diff --git a/data_processing/.gitignore b/data_processing/.gitignore new file mode 100644 index 000000000..2f7896d1d --- /dev/null +++ b/data_processing/.gitignore @@ -0,0 +1 @@ +target/ diff --git a/data_processing/README.md b/data_processing/README.md new file mode 100644 index 000000000..c17f8581f --- /dev/null +++ b/data_processing/README.md @@ -0,0 +1,9 @@ +## Package +```shell +mvn clean scala:compile assembly:single +``` + +## Dependencies +* Spark 3.0.1 +* Java 8 +* Scala 2.12 diff --git a/data_processing/pom.xml b/data_processing/pom.xml new file mode 100644 index 000000000..4308b76ac --- /dev/null +++ b/data_processing/pom.xml @@ -0,0 +1,115 @@ + + 4.0.0 + + com.bytedance.aml.enterprise + sm4spark + 0.0.1-SNAPSHOT + + + UTF-8 + 3.0.3 + + 1.8 + 2.12 + 2.12.10 + + 1.8 + 1.8 + 8 + + + + + + org.scala-lang + scala-library + ${scala.version} + + + + commons-codec + commons-codec + 1.15 + + + + org.apache.spark + spark-sql_2.12 + ${spark.version} + provided + + + com.google.guava + guava + + + + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + ${java.version} + ${java.version} + + + + + net.alchim31.maven + scala-maven-plugin + 4.3.0 + + + scala-compile-first + process-resources + + add-source + compile + + + + scala-test-compile + process-test-resources + + testCompile + + + + + ${scala.version} + + + + + + maven-assembly-plugin + + ${project.artifactId}-${project.version}-RELEASE + + + fully.qualified.MainClass + + + + jar-with-dependencies + + + + + make-assembly + package + + single + + + + + + + + \ No newline at end of file diff --git a/data_processing/src/main/scala/com/bytedance/aml/enterprise/sparkudaf/Hist.scala b/data_processing/src/main/scala/com/bytedance/aml/enterprise/sparkudaf/Hist.scala new file mode 100644 index 000000000..5f1cfb38a --- /dev/null +++ b/data_processing/src/main/scala/com/bytedance/aml/enterprise/sparkudaf/Hist.scala @@ -0,0 +1,25 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.bytedance.aml.enterprise.sparkudaf + +import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder +import org.apache.spark.sql.expressions.UserDefinedFunction +import org.apache.spark.sql.functions + +object Hist{ + def getFunc: UserDefinedFunction = functions.udaf(HistUDAF, ExpressionEncoder[HistIn]) + +} \ No newline at end of file diff --git a/data_processing/src/main/scala/com/bytedance/aml/enterprise/sparkudaf/HistUDAF.scala b/data_processing/src/main/scala/com/bytedance/aml/enterprise/sparkudaf/HistUDAF.scala new file mode 100644 index 000000000..44cabc257 --- /dev/null +++ b/data_processing/src/main/scala/com/bytedance/aml/enterprise/sparkudaf/HistUDAF.scala @@ -0,0 +1,65 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.bytedance.aml.enterprise.sparkudaf + +import org.apache.spark.sql.expressions.Aggregator +import org.apache.spark.sql.{Encoder, Encoders} + +case class HistIn(var value: Double, var min: Double, var max: Double, var binsNum: Int, var interval: Double) +case class Bucket(var bins: Array[Double], var counts: Array[Int]) + +object HistUDAF extends Aggregator[HistIn, Bucket, Bucket]{ + + def zero: Bucket = Bucket(bins = new Array[Double](0), counts = new Array[Int](0)) + + def reduce(buffer: Bucket, data: HistIn): Bucket = { + if (buffer.bins.length == 0) { + buffer.bins = new Array[Double](data.binsNum + 1) + for (i <- 0 until data.binsNum) { + buffer.bins(i) = i * data.interval + data.min + } + buffer.bins(data.binsNum) = data.max + buffer.counts = new Array[Int](data.binsNum) + } + if (data.interval != 0.0){ + var bucket_idx = ((data.value - data.min) / data.interval).toInt + if (bucket_idx < 0) { + bucket_idx = 0 + } else if (bucket_idx > (data.binsNum - 1)){ + bucket_idx = data.binsNum - 1 + } + buffer.counts(bucket_idx) += 1 + } + buffer + } + + + def merge(b1: Bucket, b2: Bucket): Bucket = { + (b1.bins.length, b2.bins.length) match { + case (_, 0) => b1 + case (0, _) => b2 + case _ => b1.counts = (b1.counts zip b2.counts) map (x => x._1 + x._2) + b1 + } + } + + def finish(reduction: Bucket): Bucket = reduction + + def bufferEncoder: Encoder[Bucket] = Encoders.product + + def outputEncoder: Encoder[Bucket] = Encoders.product + +} \ No newline at end of file diff --git a/docs/licenses/LICENCE-BurntSushi_toml.txt b/docs/licenses/LICENCE-BurntSushi_toml.txt new file mode 100644 index 000000000..01b574320 --- /dev/null +++ b/docs/licenses/LICENCE-BurntSushi_toml.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2013 TOML authors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/docs/licenses/LICENCE-Go-Logrus.txt b/docs/licenses/LICENCE-Go-Logrus.txt new file mode 100644 index 000000000..f090cb42f --- /dev/null +++ b/docs/licenses/LICENCE-Go-Logrus.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Simon Eskildsen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/docs/licenses/LICENCE-Go-Testify.txt b/docs/licenses/LICENCE-Go-Testify.txt new file mode 100644 index 000000000..4b0421cf9 --- /dev/null +++ b/docs/licenses/LICENCE-Go-Testify.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2012-2020 Mat Ryer, Tyler Bunnell and contributors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/docs/licenses/LICENCE-GoDoc-Text.txt b/docs/licenses/LICENCE-GoDoc-Text.txt new file mode 100644 index 000000000..77113a54b --- /dev/null +++ b/docs/licenses/LICENCE-GoDoc-Text.txt @@ -0,0 +1,31 @@ +Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ +Upstream-Name: github.com/kr/text +Source: https://github.com/kr/text/ + +Files: * +Copyright: 2013 Keith Rarick +License: Expat + +Files: debian/* +Copyright: 2013 Tonnerre Lombard +License: Expat + +License: Expat + +Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + . + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + . + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE diff --git a/docs/licenses/LICENCE-Microsoft-go-winio.txt b/docs/licenses/LICENCE-Microsoft-go-winio.txt new file mode 100644 index 000000000..fa365be22 --- /dev/null +++ b/docs/licenses/LICENCE-Microsoft-go-winio.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE diff --git a/docs/licenses/LICENCE-Python_six.txt b/docs/licenses/LICENCE-Python_six.txt new file mode 100644 index 000000000..01de9e22d --- /dev/null +++ b/docs/licenses/LICENCE-Python_six.txt @@ -0,0 +1,18 @@ +Copyright (c) 2010-2018 Benjamin Peterson + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE diff --git a/docs/licenses/LICENCE-armon_go-socks5.txt b/docs/licenses/LICENCE-armon_go-socks5.txt new file mode 100644 index 000000000..94fadc2a9 --- /dev/null +++ b/docs/licenses/LICENCE-armon_go-socks5.txt @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2014 Armon Dadgar + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE diff --git a/docs/licenses/LICENCE-benbjohnson-clock.txt b/docs/licenses/LICENCE-benbjohnson-clock.txt new file mode 100644 index 000000000..0dfeb1d6a --- /dev/null +++ b/docs/licenses/LICENCE-benbjohnson-clock.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Ben Johnson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE diff --git a/docs/licenses/LICENCE-beorn7-perks.txt b/docs/licenses/LICENCE-beorn7-perks.txt new file mode 100644 index 000000000..9316a10d2 --- /dev/null +++ b/docs/licenses/LICENCE-beorn7-perks.txt @@ -0,0 +1,20 @@ +Copyright (C) 2013 Blake Mizerany + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE diff --git a/docs/licenses/LICENCE-cespare_xxhash.txt b/docs/licenses/LICENCE-cespare_xxhash.txt new file mode 100644 index 000000000..341bd91f0 --- /dev/null +++ b/docs/licenses/LICENCE-cespare_xxhash.txt @@ -0,0 +1,22 @@ +Copyright (c) 2016 Caleb Spare + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE diff --git a/docs/licenses/LICENCE-charset-normalizer.txt b/docs/licenses/LICENCE-charset-normalizer.txt new file mode 100644 index 000000000..a86dd9559 --- /dev/null +++ b/docs/licenses/LICENCE-charset-normalizer.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 TAHRI Ahmed R. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/docs/licenses/LICENCE-cpuguy83-go-md2man.txt b/docs/licenses/LICENCE-cpuguy83-go-md2man.txt new file mode 100644 index 000000000..1cade6cef --- /dev/null +++ b/docs/licenses/LICENCE-cpuguy83-go-md2man.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Brian Goff + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/docs/licenses/LICENCE-create-react-app.txt b/docs/licenses/LICENCE-create-react-app.txt new file mode 100644 index 000000000..a73b785a6 --- /dev/null +++ b/docs/licenses/LICENCE-create-react-app.txt @@ -0,0 +1,26 @@ +Copyright (c) 2016-present, Facebook, Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name Facebook nor the names of its contributors may be used to + endorse or promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/docs/licenses/LICENCE-dsnet_compress.txt b/docs/licenses/LICENCE-dsnet_compress.txt new file mode 100644 index 000000000..945b396cf --- /dev/null +++ b/docs/licenses/LICENCE-dsnet_compress.txt @@ -0,0 +1,24 @@ +Copyright © 2015, Joe Tsai and The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. +* Neither the copyright holder nor the names of its contributors may be used to +endorse or promote products derived from this software without specific prior +written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/licenses/LICENCE-evanphx_json-patch.txt b/docs/licenses/LICENCE-evanphx_json-patch.txt new file mode 100644 index 000000000..050fe60f0 --- /dev/null +++ b/docs/licenses/LICENCE-evanphx_json-patch.txt @@ -0,0 +1,25 @@ +Copyright (c) 2014, Evan Phoenix +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +* Neither the name of the Evan Phoenix nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/docs/licenses/LICENCE-frankban_quicktest.txt b/docs/licenses/LICENCE-frankban_quicktest.txt new file mode 100644 index 000000000..23a294c75 --- /dev/null +++ b/docs/licenses/LICENCE-frankban_quicktest.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Canonical Ltd. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/docs/licenses/LICENCE-fsnotify.txt b/docs/licenses/LICENCE-fsnotify.txt new file mode 100644 index 000000000..fb03ade75 --- /dev/null +++ b/docs/licenses/LICENCE-fsnotify.txt @@ -0,0 +1,25 @@ +Copyright © 2012 The Go Authors. All rights reserved. +Copyright © fsnotify Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or + other materials provided with the distribution. +* Neither the name of Google Inc. nor the names of its contributors may be used + to endorse or promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/licenses/LICENCE-go-ansiterm.txt b/docs/licenses/LICENCE-go-ansiterm.txt new file mode 100644 index 000000000..b86c36e25 --- /dev/null +++ b/docs/licenses/LICENCE-go-ansiterm.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Microsoft Corporation + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE diff --git a/docs/licenses/LICENCE-go-check-check.txt b/docs/licenses/LICENCE-go-check-check.txt new file mode 100644 index 000000000..9ac6ae0a6 --- /dev/null +++ b/docs/licenses/LICENCE-go-check-check.txt @@ -0,0 +1,23 @@ +BSD Two Clause License +====================== + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT +SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT +OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. diff --git a/docs/licenses/LICENCE-go-inf-inf.txt b/docs/licenses/LICENCE-go-inf-inf.txt new file mode 100644 index 000000000..e923f606e --- /dev/null +++ b/docs/licenses/LICENCE-go-inf-inf.txt @@ -0,0 +1,28 @@ +Copyright (c) 2012 Péter Surányi. Portions Copyright (c) 2009 The Go +Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/docs/licenses/LICENCE-go-restful.txt b/docs/licenses/LICENCE-go-restful.txt new file mode 100644 index 000000000..812a5c834 --- /dev/null +++ b/docs/licenses/LICENCE-go-restful.txt @@ -0,0 +1,22 @@ +Copyright (c) 2012,2013 Ernest Micklei + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/docs/licenses/LICENCE-go-spew.txt b/docs/licenses/LICENCE-go-spew.txt new file mode 100644 index 000000000..223583735 --- /dev/null +++ b/docs/licenses/LICENCE-go-spew.txt @@ -0,0 +1,15 @@ +ISC License + +Copyright (c) 2012-2016 Dave Collins + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE diff --git a/docs/licenses/LICENCE-go-tomb-tomb.txt b/docs/licenses/LICENCE-go-tomb-tomb.txt new file mode 100644 index 000000000..db0834849 --- /dev/null +++ b/docs/licenses/LICENCE-go-tomb-tomb.txt @@ -0,0 +1,27 @@ +Copyright (c) 2010-2011 - Gustavo Niemeyer + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/docs/licenses/LICENCE-go-zap.txt b/docs/licenses/LICENCE-go-zap.txt new file mode 100644 index 000000000..82a1dd0dc --- /dev/null +++ b/docs/licenses/LICENCE-go-zap.txt @@ -0,0 +1,19 @@ +Copyright (c) 2016-2017 Uber Technologies, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE diff --git a/docs/licenses/LICENCE-go_uber_org_goleak.txt b/docs/licenses/LICENCE-go_uber_org_goleak.txt new file mode 100644 index 000000000..a0e4cc690 --- /dev/null +++ b/docs/licenses/LICENCE-go_uber_org_goleak.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2018 Uber Technologies, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE diff --git a/docs/licenses/LICENCE-go_uber_org_multierr.txt b/docs/licenses/LICENCE-go_uber_org_multierr.txt new file mode 100644 index 000000000..fe9e5258b --- /dev/null +++ b/docs/licenses/LICENCE-go_uber_org_multierr.txt @@ -0,0 +1,19 @@ +Copyright (c) 2017 Uber Technologies, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE diff --git a/docs/licenses/LICENCE-gogo-protobuf.txt b/docs/licenses/LICENCE-gogo-protobuf.txt new file mode 100644 index 000000000..748f3b3ee --- /dev/null +++ b/docs/licenses/LICENCE-gogo-protobuf.txt @@ -0,0 +1,28 @@ +Copyright 2010 The Go Authors. All rights reserved. +https://github.com/golang/protobuf + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/docs/licenses/LICENCE-golang-github-spf13-pflag-dev.txt b/docs/licenses/LICENCE-golang-github-spf13-pflag-dev.txt new file mode 100644 index 000000000..e6a8ddc0d --- /dev/null +++ b/docs/licenses/LICENCE-golang-github-spf13-pflag-dev.txt @@ -0,0 +1,28 @@ +Copyright (c) 2012 Alex Ogier. All rights reserved. +Copyright (c) 2012 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/docs/licenses/LICENCE-golang-jwt_jwt.txt b/docs/licenses/LICENCE-golang-jwt_jwt.txt new file mode 100644 index 000000000..95135bb75 --- /dev/null +++ b/docs/licenses/LICENCE-golang-jwt_jwt.txt @@ -0,0 +1,8 @@ +Copyright (c) 2012 Dave Grijalva +Copyright (c) 2021 golang-jwt maintainers + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE diff --git a/docs/licenses/LICENCE-golang-protobuf.txt b/docs/licenses/LICENCE-golang-protobuf.txt new file mode 100644 index 000000000..ed122f2d6 --- /dev/null +++ b/docs/licenses/LICENCE-golang-protobuf.txt @@ -0,0 +1,27 @@ +Copyright 2010 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/docs/licenses/LICENCE-golang-snappy-go-dev.txt b/docs/licenses/LICENCE-golang-snappy-go-dev.txt new file mode 100644 index 000000000..cf9059d9d --- /dev/null +++ b/docs/licenses/LICENCE-golang-snappy-go-dev.txt @@ -0,0 +1,27 @@ +Copyright (c) 2011 The Snappy-Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/docs/licenses/LICENCE-golang_org_x_net.txt b/docs/licenses/LICENCE-golang_org_x_net.txt new file mode 100644 index 000000000..6a66aea5e --- /dev/null +++ b/docs/licenses/LICENCE-golang_org_x_net.txt @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/licenses/LICENCE-golang_org_x_oauth2.txt b/docs/licenses/LICENCE-golang_org_x_oauth2.txt new file mode 100644 index 000000000..6a66aea5e --- /dev/null +++ b/docs/licenses/LICENCE-golang_org_x_oauth2.txt @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/licenses/LICENCE-golang_org_x_sync.txt b/docs/licenses/LICENCE-golang_org_x_sync.txt new file mode 100644 index 000000000..6a66aea5e --- /dev/null +++ b/docs/licenses/LICENCE-golang_org_x_sync.txt @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/licenses/LICENCE-golang_org_x_term.txt b/docs/licenses/LICENCE-golang_org_x_term.txt new file mode 100644 index 000000000..6a66aea5e --- /dev/null +++ b/docs/licenses/LICENCE-golang_org_x_term.txt @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/licenses/LICENCE-golang_org_x_time.txt b/docs/licenses/LICENCE-golang_org_x_time.txt new file mode 100644 index 000000000..6a66aea5e --- /dev/null +++ b/docs/licenses/LICENCE-golang_org_x_time.txt @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/licenses/LICENCE-golang_text.txt b/docs/licenses/LICENCE-golang_text.txt new file mode 100644 index 000000000..6a66aea5e --- /dev/null +++ b/docs/licenses/LICENCE-golang_text.txt @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/licenses/LICENCE-gomega.txt b/docs/licenses/LICENCE-gomega.txt new file mode 100644 index 000000000..9415ee72c --- /dev/null +++ b/docs/licenses/LICENCE-gomega.txt @@ -0,0 +1,20 @@ +Copyright (c) 2013-2014 Onsi Fakhouri + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/docs/licenses/LICENCE-google_go-cmp.txt b/docs/licenses/LICENCE-google_go-cmp.txt new file mode 100644 index 000000000..32017f8fa --- /dev/null +++ b/docs/licenses/LICENCE-google_go-cmp.txt @@ -0,0 +1,27 @@ +Copyright (c) 2017 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/licenses/LICENCE-google_golang_org_protobuf.txt b/docs/licenses/LICENCE-google_golang_org_protobuf.txt new file mode 100644 index 000000000..0f646931a --- /dev/null +++ b/docs/licenses/LICENCE-google_golang_org_protobuf.txt @@ -0,0 +1,28 @@ +Copyright 2010 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/docs/licenses/LICENCE-google_uuid.txt b/docs/licenses/LICENCE-google_uuid.txt new file mode 100644 index 000000000..3726ed0a0 --- /dev/null +++ b/docs/licenses/LICENCE-google_uuid.txt @@ -0,0 +1,27 @@ +Copyright (c) 2009,2014 Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/docs/licenses/LICENCE-goproxy.txt b/docs/licenses/LICENCE-goproxy.txt new file mode 100644 index 000000000..2067e567c --- /dev/null +++ b/docs/licenses/LICENCE-goproxy.txt @@ -0,0 +1,27 @@ +Copyright (c) 2012 Elazar Leibovich. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Elazar Leibovich. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/licenses/LICENCE-gorilla_mux.txt b/docs/licenses/LICENCE-gorilla_mux.txt new file mode 100644 index 000000000..5da121e53 --- /dev/null +++ b/docs/licenses/LICENCE-gorilla_mux.txt @@ -0,0 +1,27 @@ +Copyright (c) 2012-2018 The Gorilla Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/docs/licenses/LICENCE-idna.txt b/docs/licenses/LICENCE-idna.txt new file mode 100644 index 000000000..cc7d6baac --- /dev/null +++ b/docs/licenses/LICENCE-idna.txt @@ -0,0 +1,31 @@ +BSD 3-Clause License + +Copyright (c) 2013-2022, Kim Davies and contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/licenses/LICENCE-josharian_intern.txt b/docs/licenses/LICENCE-josharian_intern.txt new file mode 100644 index 000000000..0096c79c6 --- /dev/null +++ b/docs/licenses/LICENCE-josharian_intern.txt @@ -0,0 +1,23 @@ +2020 Roger Shimizu +License: Expat +Comment: Debian packaging is licensed under the same terms as upstream + +License: Expat + +Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + . + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + . + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/docs/licenses/LICENCE-jsoniter-go.txt b/docs/licenses/LICENCE-jsoniter-go.txt new file mode 100644 index 000000000..f6dfb8773 --- /dev/null +++ b/docs/licenses/LICENCE-jsoniter-go.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 json-iterator + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE diff --git a/docs/licenses/LICENCE-kr-fs.txt b/docs/licenses/LICENCE-kr-fs.txt new file mode 100644 index 000000000..76427ff52 --- /dev/null +++ b/docs/licenses/LICENCE-kr-fs.txt @@ -0,0 +1,27 @@ +Copyright (c) 2012 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/docs/licenses/LICENCE-kr_pretty.txt b/docs/licenses/LICENCE-kr_pretty.txt new file mode 100644 index 000000000..480a32805 --- /dev/null +++ b/docs/licenses/LICENCE-kr_pretty.txt @@ -0,0 +1,19 @@ +Copyright 2012 Keith Rarick + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/docs/licenses/LICENCE-mailru_easyjson.txt b/docs/licenses/LICENCE-mailru_easyjson.txt new file mode 100644 index 000000000..620fb1f5b --- /dev/null +++ b/docs/licenses/LICENCE-mailru_easyjson.txt @@ -0,0 +1,7 @@ +Copyright (c) 2016 Mail.Ru Group + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE diff --git a/docs/licenses/LICENCE-melbahja_goph.txt b/docs/licenses/LICENCE-melbahja_goph.txt new file mode 100644 index 000000000..42d540c38 --- /dev/null +++ b/docs/licenses/LICENCE-melbahja_goph.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020-present Mohamed El Bahja + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/docs/licenses/LICENCE-mergo.txt b/docs/licenses/LICENCE-mergo.txt new file mode 100644 index 000000000..068cab72d --- /dev/null +++ b/docs/licenses/LICENCE-mergo.txt @@ -0,0 +1,28 @@ +Copyright (c) 2013 Dario Castañé. All rights reserved. +Copyright (c) 2012 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/docs/licenses/LICENCE-mholt_archiver.txt b/docs/licenses/LICENCE-mholt_archiver.txt new file mode 100644 index 000000000..54bc89fa0 --- /dev/null +++ b/docs/licenses/LICENCE-mholt_archiver.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 Matthew Holt + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE diff --git a/docs/licenses/LICENCE-morikuni_aec.txt b/docs/licenses/LICENCE-morikuni_aec.txt new file mode 100644 index 000000000..7504d0682 --- /dev/null +++ b/docs/licenses/LICENCE-morikuni_aec.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Taihei Morikuni + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE diff --git a/docs/licenses/LICENCE-munnerz_goautoneg.txt b/docs/licenses/LICENCE-munnerz_goautoneg.txt new file mode 100644 index 000000000..bbc7b897c --- /dev/null +++ b/docs/licenses/LICENCE-munnerz_goautoneg.txt @@ -0,0 +1,31 @@ +Copyright (c) 2011, Open Knowledge Foundation Ltd. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + Neither the name of the Open Knowledge Foundation Ltd. nor the + names of its contributors may be used to endorse or promote + products derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/licenses/LICENCE-nwaples_rardecode.txt b/docs/licenses/LICENCE-nwaples_rardecode.txt new file mode 100644 index 000000000..160337a36 --- /dev/null +++ b/docs/licenses/LICENCE-nwaples_rardecode.txt @@ -0,0 +1,23 @@ +Copyright (c) 2015, Nicholas Waples +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/docs/licenses/LICENCE-nxadm_tail.txt b/docs/licenses/LICENCE-nxadm_tail.txt new file mode 100644 index 000000000..595de48cd --- /dev/null +++ b/docs/licenses/LICENCE-nxadm_tail.txt @@ -0,0 +1,21 @@ +# The MIT License (MIT) + +# © Copyright 2015 Hewlett Packard Enterprise Development LP +Copyright (c) 2014 ActiveState + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE diff --git a/docs/licenses/LICENCE-onsi_ginkgo.txt b/docs/licenses/LICENCE-onsi_ginkgo.txt new file mode 100644 index 000000000..9415ee72c --- /dev/null +++ b/docs/licenses/LICENCE-onsi_ginkgo.txt @@ -0,0 +1,20 @@ +Copyright (c) 2013-2014 Onsi Fakhouri + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/docs/licenses/LICENCE-pierrec-lz4.txt b/docs/licenses/LICENCE-pierrec-lz4.txt new file mode 100644 index 000000000..bb8c35c0b --- /dev/null +++ b/docs/licenses/LICENCE-pierrec-lz4.txt @@ -0,0 +1,27 @@ +Copyright (c) 2015, Pierre Curto +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of xxHash nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/docs/licenses/LICENCE-pkg_errors.txt b/docs/licenses/LICENCE-pkg_errors.txt new file mode 100644 index 000000000..141995377 --- /dev/null +++ b/docs/licenses/LICENCE-pkg_errors.txt @@ -0,0 +1,23 @@ +Copyright (c) 2015, Dave Cheney +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/docs/licenses/LICENCE-pmezard-go-difflib.txt b/docs/licenses/LICENCE-pmezard-go-difflib.txt new file mode 100644 index 000000000..a635f8b06 --- /dev/null +++ b/docs/licenses/LICENCE-pmezard-go-difflib.txt @@ -0,0 +1,35 @@ +Copyright: 2013 Patrick Mézard +License: BSD-3-clause + +Files: debian/* +Copyright: 2016 Dmitry Smirnov +License: BSD-3-clause + +License: BSD-3-clause + +Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + . + Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + . + Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + . + The names of its contributors may not be used to endorse or promote + products derived from this software without specific prior written + permission. + . + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS + IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED + TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A + PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED + TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/docs/licenses/LICENCE-purell.txt b/docs/licenses/LICENCE-purell.txt new file mode 100644 index 000000000..8cf42fe5b --- /dev/null +++ b/docs/licenses/LICENCE-purell.txt @@ -0,0 +1,12 @@ +Copyright (c) 2012, Martin Angers +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +* Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/docs/licenses/LICENCE-pypi_setuptools.txt b/docs/licenses/LICENCE-pypi_setuptools.txt new file mode 100644 index 000000000..323d2c18e --- /dev/null +++ b/docs/licenses/LICENCE-pypi_setuptools.txt @@ -0,0 +1,19 @@ +Copyright (C) 2016 Jason R Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE diff --git a/docs/licenses/LICENCE-python-certifi.txt b/docs/licenses/LICENCE-python-certifi.txt new file mode 100644 index 000000000..383c7a63d --- /dev/null +++ b/docs/licenses/LICENCE-python-certifi.txt @@ -0,0 +1,409 @@ +Mozilla Public License +Version 2.0 +====================== + + +1. Definitions +-------------- + + 1.1. "Contributor" + + means each individual or legal entity that creates, contributes to the creation + of, or owns Covered Software. + + 1.2. "Contributor Version" + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor's Contribution. + + 1.3. "Contribution" + + means Covered Software of a particular Contributor. + + 1.4. "Covered Software" + + means Source Code Form to which the initial Contributor has attached the notice + in Exhibit A, the Executable Form of such Source Code Form, and Modifications + of such Source Code Form, in each case including portions thereof. + + 1.5. "Incompatible With Secondary Licenses" + + means + + a. + + that the initial Contributor has attached the notice described in Exhibit B + to the Covered Software; or + + b. + + that the Covered Software was made available under the terms of version 1.1 + or earlier of the License, but not also under the terms of a Secondary + License. + + 1.6. "Executable Form" + + means any form of the work other than Source Code Form. + + 1.7. "Larger Work" + + means a work that combines Covered Software with other material, in a separate + file or files, that is not Covered Software. + + 1.8. "License" + + means this document. + + 1.9. "Licensable" + + means having the right to grant, to the maximum extent possible, whether at the + time of the initial grant or subsequently, any and all of the rights conveyed + by this License. + + 1.10. "Modifications" + + means any of the following: + + a. + + any file in Source Code Form that results from an addition to, deletion + from, or modification of the contents of Covered Software; or + + b. + + any new file in Source Code Form that contains any Covered Software. + + 1.11. "Patent Claims" of a Contributor + + means any patent claim(s), including without limitation, method, process, and + apparatus claims, in any patent Licensable by such Contributor that would be + infringed, but for the grant of the License, by the making, using, selling, + offering for sale, having made, import, or transfer of either its Contributions + or its Contributor Version. + + 1.12. "Secondary License" + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public License, + Version 3.0, or any later versions of those licenses. + + 1.13. "Source Code Form" + + means the form of the work preferred for making modifications. + + 1.14. "You" (or "Your") + + means an individual or a legal entity exercising rights under this License. For + legal entities, "You" includes any entity that controls, is controlled by, or + is under common control with You. For purposes of this definition, "control" + means (a) the power, direct or indirect, to cause the direction or management + of such entity, whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial ownership of such + entity. + + +2. License Grants and Conditions +-------------------------------- + + + 2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, non-exclusive + license: + + a. + + under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, modify, + display, perform, distribute, and otherwise exploit its Contributions, + either on an unmodified basis, with Modifications, or as part of a Larger + Work; and + + b. + + under Patent Claims of such Contributor to make, use, sell, offer for sale, + have made, import, and otherwise transfer either its Contributions or its + Contributor Version. + + + 2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution become + effective for each Contribution on the date the Contributor first distributes + such Contribution. + + + 2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under this + License. No additional rights or licenses will be implied from the distribution + or licensing of Covered Software under this License. Notwithstanding + Section 2.1(b) above, no patent license is granted by a Contributor: + + a. + + for any code that a Contributor has removed from Covered Software; or + + b. + + for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. + + under Patent Claims infringed by Covered Software in the absence of its + Contributions. + + This License does not grant any rights in the trademarks, service marks, or + logos of any Contributor (except as may be necessary to comply with the notice + requirements in Section 3.4). + + + 2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to distribute + the Covered Software under a subsequent version of this License (see + Section 10.2) or under the terms of a Secondary License (if permitted under the + terms of Section 3.3). + + + 2.5. Representation + + Each Contributor represents that the Contributor believes its Contributions are + its original creation(s) or it has sufficient rights to grant the rights to its + Contributions conveyed by this License. + + + 2.6. Fair Use + + This License is not intended to limit any rights You have under applicable + copyright doctrines of fair use, fair dealing, or other equivalents. + + + 2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities +------------------- + + + 3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under the + terms of this License. You must inform recipients that the Source Code Form of + the Covered Software is governed by the terms of this License, and how they can + obtain a copy of this License. You may not attempt to alter or restrict the + recipients' rights in the Source Code Form. + + + 3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. + + such Covered Software must also be made available in Source Code Form, as + described in Section 3.1, and You must inform recipients of the Executable + Form how they can obtain a copy of such Source Code Form by reasonable + means in a timely manner, at a charge no more than the cost of distribution + to the recipient; and + + b. + + You may distribute such Executable Form under the terms of this License, or + sublicense it under different terms, provided that the license for the + Executable Form does not attempt to limit or alter the recipients' rights + in the Source Code Form under this License. + + + 3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for the + Covered Software. If the Larger Work is a combination of Covered Software with + a work governed by one or more Secondary Licenses, and the Covered Software is + not Incompatible With Secondary Licenses, this License permits You to + additionally distribute such Covered Software under the terms of such Secondary + License(s), so that the recipient of the Larger Work may, at their option, + further distribute the Covered Software under the terms of either this License + or such Secondary License(s). + + + 3.4. Notices + + You may not remove or alter the substance of any license notices (including + copyright notices, patent notices, disclaimers of warranty, or limitations of + liability) contained within the Source Code Form of the Covered Software, + except that You may alter any license notices to the extent required to remedy + known factual inaccuracies. + + + 3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, indemnity + or liability obligations to one or more recipients of Covered Software. + However, You may do so only on Your own behalf, and not on behalf of any + Contributor. You must make it absolutely clear that any such warranty, support, + indemnity, or liability obligation is offered by You alone, and You hereby + agree to indemnify every Contributor for any liability incurred by such + Contributor as a result of warranty, support, indemnity or liability terms You + offer. You may include additional disclaimers of warranty and limitations of + liability specific to any jurisdiction. + + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this License with +respect to some or all of the Covered Software due to statute, judicial order, or +regulation then You must: (a) comply with the terms of this License to the +maximum extent possible; and (b) describe the limitations and the code they +affect. Such description must be placed in a text file included with all +distributions of the Covered Software under this License. Except to the extent +prohibited by statute or regulation, such description must be sufficiently +detailed for a recipient of ordinary skill to be able to understand it. + + +5. Termination +-------------- + + 5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, then + the rights granted under this License from a particular Contributor are + reinstated (a) provisionally, unless and until such Contributor explicitly and + finally terminates Your grants, and (b) on an ongoing basis, if such + Contributor fails to notify You of the non-compliance by some reasonable means + prior to 60 days after You have come back into compliance. Moreover, Your + grants from a particular Contributor are reinstated on an ongoing basis if such + Contributor notifies You of the non-compliance by some reasonable means, this + is the first time You have received notice of non-compliance with this License + from such Contributor, and You become compliant prior to 30 days after Your + receipt of the notice. + + 5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, counter-claims, and + cross-claims) alleging that a Contributor Version directly or indirectly + infringes any patent, then the rights granted to You by any and all + Contributors for the Covered Software under Section 2.1 of this License shall + terminate. + + 5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + + +6. Disclaimer of Warranty +------------------------- + +Covered Software is provided under this License on an "as is" basis, without +warranty of any kind, either expressed, implied, or statutory, including, without +limitation, warranties that the Covered Software is free of defects, +merchantable, fit for a particular purpose or non-infringing. The entire risk as +to the quality and performance of the Covered Software is with You. Should any +Covered Software prove defective in any respect, You (not any Contributor) assume +the cost of any necessary servicing, repair, or correction. This disclaimer of +warranty constitutes an essential part of this License. No use of any Covered +Software is authorized under this License except under this disclaimer. + + +7. Limitation of Liability +-------------------------- + +Under no circumstances and under no legal theory, whether tort (including +negligence), contract, or otherwise, shall any Contributor, or anyone who +distributes Covered Software as permitted above, be liable to You for any direct, +indirect, special, incidental, or consequential damages of any character +including, without limitation, damages for lost profits, loss of goodwill, work +stoppage, computer failure or malfunction, or any and all other commercial +damages or losses, even if such party shall have been informed of the possibility +of such damages. This limitation of liability shall not apply to liability for +death or personal injury resulting from such party's negligence to the extent +applicable law prohibits such limitation. Some jurisdictions do not allow the +exclusion or limitation of incidental or consequential damages, so this exclusion +and limitation may not apply to You. + + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the courts of a +jurisdiction where the defendant maintains its principal place of business and +such litigation shall be governed by laws of that jurisdiction, without reference +to its conflict-of-law provisions. Nothing in this Section shall prevent a +party's ability to bring cross-claims or counter-claims. + + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject matter +hereof. If any provision of this License is held to be unenforceable, such +provision shall be reformed only to the extent necessary to make it enforceable. +Any law or regulation which provides that the language of a contract shall be +construed against the drafter shall not be used to construe this License against +a Contributor. + + +10. Versions of the License +--------------------------- + + + 10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section 10.3, + no one other than the license steward has the right to modify or publish new + versions of this License. Each version will be given a distinguishing version + number. + + + 10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version of the + License under which You originally received the Covered Software, or under the + terms of any subsequent version published by the license steward. + + + 10.3. Modified Versions + + If you create software not governed by this License, and you want to create a + new license for such software, you may create and use a modified version of + this License if you rename the license and remove any references to the name of + the license steward (except to note that such modified license differs from + this License). + + + 10.4. Distributing Source Code Form that is Incompatible With Secondary + Licenses + + If You choose to distribute Source Code Form that is Incompatible With + Secondary Licenses under the terms of this version of the License, the notice + described in Exhibit B of this License must be attached. + + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public License, + v. 2.0. If a copy of the MPL was not distributed with this file, You can + obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, then +You may include the notice in a location (such as a LICENSE file in a relevant +directory) where a recipient would be likely to look for such a notice. + +You may add additional accurate notices of copyright ownership. + + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as defined + by the Mozilla Public License, v. 2.0. + diff --git a/docs/licenses/LICENCE-rogpeppe_go-internal.txt b/docs/licenses/LICENCE-rogpeppe_go-internal.txt new file mode 100644 index 000000000..49ea0f928 --- /dev/null +++ b/docs/licenses/LICENCE-rogpeppe_go-internal.txt @@ -0,0 +1,27 @@ +Copyright (c) 2018 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/licenses/LICENCE-sftp.txt b/docs/licenses/LICENCE-sftp.txt new file mode 100644 index 000000000..9ac6ae0a6 --- /dev/null +++ b/docs/licenses/LICENCE-sftp.txt @@ -0,0 +1,23 @@ +BSD Two Clause License +====================== + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT +SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT +OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. diff --git a/docs/licenses/LICENCE-sigs_k8s_io_json.txt b/docs/licenses/LICENCE-sigs_k8s_io_json.txt new file mode 100644 index 000000000..e5adf7f0c --- /dev/null +++ b/docs/licenses/LICENCE-sigs_k8s_io_json.txt @@ -0,0 +1,238 @@ +Files other than internal/golang/* licensed under: + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +------------------ + +internal/golang/* files licensed under: + + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/licenses/LICENCE-uber-go_atomic.txt b/docs/licenses/LICENCE-uber-go_atomic.txt new file mode 100644 index 000000000..12cd09580 --- /dev/null +++ b/docs/licenses/LICENCE-uber-go_atomic.txt @@ -0,0 +1,19 @@ +Copyright (c) 2016 Uber Technologies, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE diff --git a/docs/licenses/LICENCE-ulikunitz_xz.txt b/docs/licenses/LICENCE-ulikunitz_xz.txt new file mode 100644 index 000000000..d358ed04d --- /dev/null +++ b/docs/licenses/LICENCE-ulikunitz_xz.txt @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/docs/licenses/LICENCE-urfAVE_cli.txt b/docs/licenses/LICENCE-urfAVE_cli.txt new file mode 100644 index 000000000..2c84c78a1 --- /dev/null +++ b/docs/licenses/LICENCE-urfAVE_cli.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 urfave/cli maintainers + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/docs/licenses/LICENCE-urlesc.txt b/docs/licenses/LICENCE-urlesc.txt new file mode 100644 index 000000000..76427ff52 --- /dev/null +++ b/docs/licenses/LICENCE-urlesc.txt @@ -0,0 +1,27 @@ +Copyright (c) 2012 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/docs/licenses/LICENCE-urllib3.txt b/docs/licenses/LICENCE-urllib3.txt new file mode 100644 index 000000000..429a1767e --- /dev/null +++ b/docs/licenses/LICENCE-urllib3.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/docs/licenses/LICENCE-xrash_smetrics.txt b/docs/licenses/LICENCE-xrash_smetrics.txt new file mode 100644 index 000000000..80445682f --- /dev/null +++ b/docs/licenses/LICENCE-xrash_smetrics.txt @@ -0,0 +1,21 @@ +Copyright (C) 2016 Felipe da Cunha Gonçalves +All Rights Reserved. + +MIT LICENSE + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/docs/licenses/LICENCE-yaml-for-Go.txt b/docs/licenses/LICENCE-yaml-for-Go.txt new file mode 100644 index 000000000..2683e4bb1 --- /dev/null +++ b/docs/licenses/LICENCE-yaml-for-Go.txt @@ -0,0 +1,50 @@ + +This project is covered by two different licenses: MIT and Apache. + +#### MIT License #### + +The following files were ported to Go from C files of libyaml, and thus +are still covered by their original MIT license, with the additional +copyright staring in 2011 when the project was ported over: + + apic.go emitterc.go parserc.go readerc.go scannerc.go + writerc.go yamlh.go yamlprivateh.go + +Copyright (c) 2006-2010 Kirill Simonov +Copyright (c) 2006-2011 Kirill Simonov + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +### Apache License ### + +All the remaining project files are covered by the Apache license: + +Copyright (c) 2011-2019 Canonical Ltd + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/operator/.dockerignore b/operator/.dockerignore new file mode 100644 index 000000000..0f046820f --- /dev/null +++ b/operator/.dockerignore @@ -0,0 +1,4 @@ +# More info: https://docs.docker.com/engine/reference/builder/#dockerignore-file +# Ignore build and test binaries. +bin/ +testbin/ diff --git a/operator/.gitignore b/operator/.gitignore new file mode 100644 index 000000000..4db1fbd44 --- /dev/null +++ b/operator/.gitignore @@ -0,0 +1,25 @@ + +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib +bin +testbin/* + +# Test binary, build with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Kubernetes Generated files - skip generated files, except for vendored files +!vendor/**/zz_generated.* + +# editor and IDE paraphernalia +.idea +*.swp +*.swo +*~ +/config/ diff --git a/operator/BUILD.bazel b/operator/BUILD.bazel new file mode 100755 index 000000000..70ff958ed --- /dev/null +++ b/operator/BUILD.bazel @@ -0,0 +1,5 @@ +filegroup( + name = "srcs", + srcs = glob(["**"]), + visibility = ["//visibility:public"], +) diff --git a/operator/Dockerfile b/operator/Dockerfile new file mode 100644 index 000000000..4152680b7 --- /dev/null +++ b/operator/Dockerfile @@ -0,0 +1,27 @@ +# Build the manager binary +FROM golang:1.16 as builder + +WORKDIR /workspace +# Copy the Go Modules manifests +COPY go.mod go.mod +COPY go.sum go.sum +# cache deps before building and copying source so that we don't need to re-download as much +# and so that source changes don't invalidate our downloaded layer +RUN go mod download + +# Copy the go source +COPY main.go main.go +COPY api/ api/ +COPY controllers/ controllers/ + +# Build +RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -a -o manager main.go + +# Use distroless as minimal base image to package the manager binary +# Refer to https://github.com/GoogleContainerTools/distroless for more details +FROM gcr.io/distroless/static:nonroot +WORKDIR / +COPY --from=builder /workspace/manager . +USER 65532:65532 + +ENTRYPOINT ["/manager"] diff --git a/operator/Makefile b/operator/Makefile new file mode 100644 index 000000000..fddbad234 --- /dev/null +++ b/operator/Makefile @@ -0,0 +1,130 @@ + +# Image URL to use all building/pushing image targets +IMG ?= controller:latest +# ENVTEST_K8S_VERSION refers to the version of kubebuilder assets to be downloaded by envtest binary. +ENVTEST_K8S_VERSION = 1.19.2 + +# Get the currently used golang install path (in GOPATH/bin, unless GOBIN is set) +ifeq (,$(shell go env GOBIN)) +GOBIN=$(shell go env GOPATH)/bin +else +GOBIN=$(shell go env GOBIN) +endif + +# Setting SHELL to bash allows bash commands to be executed by recipes. +# This is a requirement for 'setup-envtest.sh' in the test target. +# Options are set to exit when a recipe line exits non-zero or a piped command fails. +SHELL = /usr/bin/env bash -o pipefail +.SHELLFLAGS = -ec + +.PHONY: all +all: build + +##@ General + +# The help target prints out all targets with their descriptions organized +# beneath their categories. The categories are represented by '##@' and the +# target descriptions by '##'. The awk commands is responsible for reading the +# entire set of makefiles included in this invocation, looking for lines of the +# file as xyz: ## something, and then pretty-format the target and help. Then, +# if there's a line with ##@ something, that gets pretty-printed as a category. +# More info on the usage of ANSI control characters for terminal formatting: +# https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_parameters +# More info on the awk command: +# http://linuxcommand.org/lc3_adv_awk.php + +.PHONY: help +help: ## Display this help. + @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) + +##@ Development + +.PHONY: manifests +manifests: controller-gen ## Generate WebhookConfiguration, ClusterRole and CustomResourceDefinition objects. + $(CONTROLLER_GEN) rbac:roleName=manager-role crd webhook paths="./..." output:crd:artifacts:config=config/crd/bases + +.PHONY: generate +generate: controller-gen ## Generate code containing DeepCopy, DeepCopyInto, and DeepCopyObject method implementations. + $(CONTROLLER_GEN) object:headerFile="hack/boilerplate.go.txt" paths="./..." + +.PHONY: fmt +fmt: ## Run go fmt against code. + go fmt ./... + +.PHONY: vet +vet: ## Run go vet against code. + go vet ./... + +.PHONY: test +test: manifests generate fmt vet envtest ## Run tests. + KUBEBUILDER_ASSETS="$(shell $(ENVTEST) use $(ENVTEST_K8S_VERSION) -p path)" go test ./... -coverprofile cover.out + +##@ Build + +.PHONY: build +build: generate fmt vet ## Build manager binary. + go build -o bin/manager main.go + +.PHONY: run +run: manifests generate fmt vet ## Run a controller from your host. + go run ./main.go + +.PHONY: docker-build +docker-build: test ## Build docker image with the manager. + docker build -t ${IMG} . + +.PHONY: docker-push +docker-push: ## Push docker image with the manager. + docker push ${IMG} + +##@ Deployment + +ifndef ignore-not-found + ignore-not-found = false +endif + +.PHONY: install +install: manifests kustomize ## Install CRDs into the K8s cluster specified in ~/.kube/config. + $(KUSTOMIZE) build config/crd | kubectl apply -f - + +.PHONY: uninstall +uninstall: manifests kustomize ## Uninstall CRDs from the K8s cluster specified in ~/.kube/config. Call with ignore-not-found=true to ignore resource not found errors during deletion. + $(KUSTOMIZE) build config/crd | kubectl delete --ignore-not-found=$(ignore-not-found) -f - + +.PHONY: deploy +deploy: manifests kustomize ## Deploy controller to the K8s cluster specified in ~/.kube/config. + cd config/manager && $(KUSTOMIZE) edit set image controller=${IMG} + $(KUSTOMIZE) build config/default | kubectl apply -f - + +.PHONY: undeploy +undeploy: ## Undeploy controller from the K8s cluster specified in ~/.kube/config. Call with ignore-not-found=true to ignore resource not found errors during deletion. + $(KUSTOMIZE) build config/default | kubectl delete --ignore-not-found=$(ignore-not-found) -f - + +CONTROLLER_GEN = $(shell pwd)/bin/controller-gen +.PHONY: controller-gen +controller-gen: ## Download controller-gen locally if necessary. + $(call go-get-tool,$(CONTROLLER_GEN),sigs.k8s.io/controller-tools/cmd/controller-gen@v0.7.0) + +KUSTOMIZE = $(shell pwd)/bin/kustomize +.PHONY: kustomize +kustomize: ## Download kustomize locally if necessary. + $(call go-get-tool,$(KUSTOMIZE),sigs.k8s.io/kustomize/kustomize/v3@v3.8.7) + +ENVTEST = $(shell pwd)/bin/setup-envtest +.PHONY: envtest +envtest: ## Download envtest-setup locally if necessary. + $(call go-get-tool,$(ENVTEST),sigs.k8s.io/controller-runtime/tools/setup-envtest@latest) + +# go-get-tool will 'go get' any package $2 and install it to $1. +PROJECT_DIR := $(shell dirname $(abspath $(lastword $(MAKEFILE_LIST)))) +define go-get-tool +@[ -f $(1) ] || { \ +set -e ;\ +TMP_DIR=$$(mktemp -d) ;\ +cd $$TMP_DIR ;\ +go mod init tmp ;\ +echo "Downloading $(2)" ;\ +GOBIN=$(PROJECT_DIR)/bin go get $(2) ;\ +rm -rf $$TMP_DIR ;\ +} +endef diff --git a/operator/PROJECT b/operator/PROJECT new file mode 100644 index 000000000..067ff0f40 --- /dev/null +++ b/operator/PROJECT @@ -0,0 +1,16 @@ +domain: k8s.io +layout: +- go.kubebuilder.io/v3 +projectName: flapp-operator +repo: fedlearner.net +resources: +- api: + crdVersion: v1 + namespaced: true + controller: true + domain: k8s.io + group: fedlearner + kind: FedApp + path: fedlearner.net/operator/api/v1alpha1 + version: v1alpha1 +version: "3" diff --git a/operator/README.md b/operator/README.md new file mode 100644 index 000000000..16ba93d46 --- /dev/null +++ b/operator/README.md @@ -0,0 +1,32 @@ +# Generate yamls +`make manifests` + +To generate yamls in ./config, such as ./config/crd/bases and ./config/rbac + +在生成后需要在annotation中添加`api-approved.kubernetes.io: https://github.com/kubernetes/kubernetes/pull/78458`来避免k8s报警 +# Test Controller locally +`make install ` + +To install Crd and RBAC in your cluster which specify in your .kube config. + + +`make run` + +Local run a Controller in your terminal which watch and update resources in the cluster of .kube. + +# Run in cluster +`make docker-build docker-push IMG=/:tag` +`make deploy IMG=/:tag` + +# Integration test +`make test` + +# 后续开发 +框架相关文档:https://book.kubebuilder.io/ + +仅需关注与修改/api/v1alpha1/fedapp_types.go(定义)和/contollers.fedapp_controller.go(控制逻辑)即可。 + +后续增加CRD和对应的Controller均统一在此目录(Project)下,参考文档中指令即可添加新的CRD脚手架。 + +# 集群依赖 +- 0.1.2 版本以上使用了 非headless service,所以如果想要运行tensorflow,需要集群开启hairpin mode。 \ No newline at end of file diff --git a/operator/api/v1alpha1/fedapp_types.go b/operator/api/v1alpha1/fedapp_types.go new file mode 100644 index 000000000..fbfc74c03 --- /dev/null +++ b/operator/api/v1alpha1/fedapp_types.go @@ -0,0 +1,185 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package v1alpha1 + +import ( + corev1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +// EDIT THIS FILE! THIS IS SCAFFOLDING FOR YOU TO OWN! +// NOTE: json tags are required. Any new fields you add must have json tags for the fields to be serialized. + +// FedReplicaType can be any string. +type FedReplicaType string + +// RestartPolicy describes how the replicas should be restarted. +// Only one of the following restart policies may be specified. +// If none of the following policies is specified, the default one +// is RestartPolicyOnFailure. +type RestartPolicy string + +const ( + RestartPolicyAlways RestartPolicy = "Always" + RestartPolicyOnFailure RestartPolicy = "OnFailure" + RestartPolicyNever RestartPolicy = "Never" + // RestartPolicyExitCode policy means that user should add exit code by themselves, + // The controller will check these exit codes to + // determine the behavior when an error occurs: + // - 1-127: permanent error, do not restart. + // - 128-255: retryable error, will restart the pod. + RestartPolicyExitCode RestartPolicy = "ExitCode" +) + +// ReplicaSpec is a description of the replica +type ReplicaSpec struct { + // Replicas is the desired number of replicas of the given template. + // +kubebuilder:default=0 + // +kubebuilder:validation:Maximum=200 + // +kubebuilder:validation:Minimum=0 + Replicas *int64 `json:"replicas,omitempty"` + + // Template is the object that describes the pod that + // will be created for this replica. + Template corev1.PodTemplateSpec `json:"template,omitempty"` + + // Restart policy for all replicas within the app. + // One of Always, OnFailure, Never and ExitCode. + // +kubebuilder:default=OnFailure + RestartPolicy RestartPolicy `json:"restartPolicy,omitempty"` + + // Optional number of retries before marking this job failed. + // +kubebuilder:default=1 + // +kubebuilder:validation:Maximum=100 + // +kubebuilder:validation:Minimum=1 + BackoffLimit *int64 `json:"backoffLimit,omitempty"` + + // Whether all pods of this replica are suceeded is necessary for marking the falpp as complete. + // +kubebuilder:default=true + MustSuccess *bool `json:"mustSuccess,omitempty"` + + // +kubebuilder:default:={"containerPort": 50051, "name": "flapp-port", "protocol": "TCP"} + Port *corev1.ContainerPort `json:"port,omitempty"` +} + +// FedReplicaSpecs is the mapping from FedReplicaType to ReplicaSpec +type FedReplicaSpecs map[FedReplicaType]ReplicaSpec + +// FedAppSpec defines the desired state of FedApp +type FedAppSpec struct { + // INSERT ADDITIONAL SPEC FIELDS - desired state of cluster + // Important: Run "make" to regenerate code after modifying this file + // Defines replica spec for replica type + FedReplicaSpecs FedReplicaSpecs `json:"fedReplicaSpecs"` + + // TTLSecondsAfterFinished is the TTL to clean up jobs. + // It may take extra ReconcilePeriod seconds for the cleanup, since + // reconcile gets called periodically. + // Default to 86400(one day). + // +kubebuilder:default=86400 + // +optional + TTLSecondsAfterFinished *int64 `json:"ttlSecondsAfterFinished,omitempty"` + + // Specifies the duration in seconds relative to the startTime that the job may be active + // before the system tries to terminate it; value must be positive integer. + // +optional + ActiveDeadlineSeconds *int64 `json:"activeDeadlineSeconds,omitempty"` +} + +// FedAppStatus defines the observed state of FedApp +type FedAppStatus struct { + // INSERT ADDITIONAL STATUS FIELD - define observed state of cluster + // Important: Run "make" to regenerate code after modifying this file + + // +optional + StartTime *metav1.Time `json:"startTime"` + + Conditions []FedAppCondition `json:"conditions,omitempty"` + + // Record pods name which have terminated, hack for too fast pod GC. + // TODO: when pods gc collection is too fast that fedapp controller + // dont have enough time to record them in TerminatedPodsMap field, + // use finalizer to avoid it. + // +optional + TerminatedPodsMap TerminatedPodsMap `json:"terminatedPodsMap,omitempty"` +} +type empty struct{} +type PodSet map[string]empty +type TerminatedPodsMap map[FedReplicaType]*TerminatedPods + +// TerminatedPods holds name of Pods that have terminated. +type TerminatedPods struct { + // Succeeded holds name of succeeded Pods. + // +optional + Succeeded []PodSet `json:"succeeded,omitempty"` + + // Failed holds name of failed Pods. + // +optional + Failed []PodSet `json:"failed,omitempty"` +} + +// FedAppConditionType is a valid value for FedAppCondition.Type +type FedAppConditionType string + +// These are valid conditions of a job. +const ( + // FedAppComplete means the job has completed its execution. + // true: completed, false: failed, unknown: running + Succeeded FedAppConditionType = "succeeded" +) + +// FedAppCondition describes current state of a job. +type FedAppCondition struct { + // Type of job condition. + Type FedAppConditionType `json:"type"` + // Status of the condition, one of True, False, Unknown. + Status corev1.ConditionStatus `json:"status"` + + // Last time the condition transit from one status to another. + // +optional + LastTransitionTime metav1.Time `json:"lastTransitionTime"` + // (brief) reason for the condition's last transition. + // +optional + Reason string `json:"reason"` + // Human readable message indicating details about last transition. + // +optional + Message string `json:"message"` +} + +//+kubebuilder:object:root=true +//+kubebuilder:subresource:status + +// FedApp is the Schema for the fedapps API +type FedApp struct { + metav1.TypeMeta `json:",inline"` + metav1.ObjectMeta `json:"metadata,omitempty"` + + Spec FedAppSpec `json:"spec,omitempty"` + Status FedAppStatus `json:"status,omitempty"` +} + +//+kubebuilder:object:root=true + +// FedAppList contains a list of FedApp +type FedAppList struct { + metav1.TypeMeta `json:",inline"` + metav1.ListMeta `json:"metadata,omitempty"` + Items []FedApp `json:"items"` +} + +func init() { + SchemeBuilder.Register(&FedApp{}, &FedAppList{}) +} diff --git a/operator/api/v1alpha1/groupversion_info.go b/operator/api/v1alpha1/groupversion_info.go new file mode 100644 index 000000000..d39699efd --- /dev/null +++ b/operator/api/v1alpha1/groupversion_info.go @@ -0,0 +1,35 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Package v1alpha1 contains API Schema definitions for the fedlearner v1alpha1 API group +//+kubebuilder:object:generate=true +//+groupName=fedlearner.k8s.io +package v1alpha1 + +import ( + "k8s.io/apimachinery/pkg/runtime/schema" + "sigs.k8s.io/controller-runtime/pkg/scheme" +) + +var ( + // GroupVersion is group version used to register these objects + GroupVersion = schema.GroupVersion{Group: "fedlearner.k8s.io", Version: "v1alpha1"} + + // SchemeBuilder is used to add go types to the GroupVersionKind scheme + SchemeBuilder = &scheme.Builder{GroupVersion: GroupVersion} + + // AddToScheme adds the types in this group-version to the given scheme. + AddToScheme = SchemeBuilder.AddToScheme +) diff --git a/operator/api/v1alpha1/zz_generated.deepcopy.go b/operator/api/v1alpha1/zz_generated.deepcopy.go new file mode 100644 index 000000000..9330b4627 --- /dev/null +++ b/operator/api/v1alpha1/zz_generated.deepcopy.go @@ -0,0 +1,322 @@ +//go:build !ignore_autogenerated +// +build !ignore_autogenerated + +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Code generated by controller-gen. DO NOT EDIT. + +package v1alpha1 + +import ( + "k8s.io/api/core/v1" + runtime "k8s.io/apimachinery/pkg/runtime" +) + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *FedApp) DeepCopyInto(out *FedApp) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) + in.Spec.DeepCopyInto(&out.Spec) + in.Status.DeepCopyInto(&out.Status) +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FedApp. +func (in *FedApp) DeepCopy() *FedApp { + if in == nil { + return nil + } + out := new(FedApp) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *FedApp) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *FedAppCondition) DeepCopyInto(out *FedAppCondition) { + *out = *in + in.LastTransitionTime.DeepCopyInto(&out.LastTransitionTime) +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FedAppCondition. +func (in *FedAppCondition) DeepCopy() *FedAppCondition { + if in == nil { + return nil + } + out := new(FedAppCondition) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *FedAppList) DeepCopyInto(out *FedAppList) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ListMeta.DeepCopyInto(&out.ListMeta) + if in.Items != nil { + in, out := &in.Items, &out.Items + *out = make([]FedApp, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FedAppList. +func (in *FedAppList) DeepCopy() *FedAppList { + if in == nil { + return nil + } + out := new(FedAppList) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *FedAppList) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *FedAppSpec) DeepCopyInto(out *FedAppSpec) { + *out = *in + if in.FedReplicaSpecs != nil { + in, out := &in.FedReplicaSpecs, &out.FedReplicaSpecs + *out = make(FedReplicaSpecs, len(*in)) + for key, val := range *in { + (*out)[key] = *val.DeepCopy() + } + } + if in.TTLSecondsAfterFinished != nil { + in, out := &in.TTLSecondsAfterFinished, &out.TTLSecondsAfterFinished + *out = new(int64) + **out = **in + } + if in.ActiveDeadlineSeconds != nil { + in, out := &in.ActiveDeadlineSeconds, &out.ActiveDeadlineSeconds + *out = new(int64) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FedAppSpec. +func (in *FedAppSpec) DeepCopy() *FedAppSpec { + if in == nil { + return nil + } + out := new(FedAppSpec) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *FedAppStatus) DeepCopyInto(out *FedAppStatus) { + *out = *in + if in.StartTime != nil { + in, out := &in.StartTime, &out.StartTime + *out = (*in).DeepCopy() + } + if in.Conditions != nil { + in, out := &in.Conditions, &out.Conditions + *out = make([]FedAppCondition, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.TerminatedPodsMap != nil { + in, out := &in.TerminatedPodsMap, &out.TerminatedPodsMap + *out = make(TerminatedPodsMap, len(*in)) + for key, val := range *in { + var outVal *TerminatedPods + if val == nil { + (*out)[key] = nil + } else { + in, out := &val, &outVal + *out = new(TerminatedPods) + (*in).DeepCopyInto(*out) + } + (*out)[key] = outVal + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FedAppStatus. +func (in *FedAppStatus) DeepCopy() *FedAppStatus { + if in == nil { + return nil + } + out := new(FedAppStatus) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in FedReplicaSpecs) DeepCopyInto(out *FedReplicaSpecs) { + { + in := &in + *out = make(FedReplicaSpecs, len(*in)) + for key, val := range *in { + (*out)[key] = *val.DeepCopy() + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FedReplicaSpecs. +func (in FedReplicaSpecs) DeepCopy() FedReplicaSpecs { + if in == nil { + return nil + } + out := new(FedReplicaSpecs) + in.DeepCopyInto(out) + return *out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in PodSet) DeepCopyInto(out *PodSet) { + { + in := &in + *out = make(PodSet, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PodSet. +func (in PodSet) DeepCopy() PodSet { + if in == nil { + return nil + } + out := new(PodSet) + in.DeepCopyInto(out) + return *out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ReplicaSpec) DeepCopyInto(out *ReplicaSpec) { + *out = *in + if in.Replicas != nil { + in, out := &in.Replicas, &out.Replicas + *out = new(int64) + **out = **in + } + in.Template.DeepCopyInto(&out.Template) + if in.BackoffLimit != nil { + in, out := &in.BackoffLimit, &out.BackoffLimit + *out = new(int64) + **out = **in + } + if in.MustSuccess != nil { + in, out := &in.MustSuccess, &out.MustSuccess + *out = new(bool) + **out = **in + } + if in.Port != nil { + in, out := &in.Port, &out.Port + *out = new(v1.ContainerPort) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReplicaSpec. +func (in *ReplicaSpec) DeepCopy() *ReplicaSpec { + if in == nil { + return nil + } + out := new(ReplicaSpec) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *TerminatedPods) DeepCopyInto(out *TerminatedPods) { + *out = *in + if in.Succeeded != nil { + in, out := &in.Succeeded, &out.Succeeded + *out = make([]PodSet, len(*in)) + for i := range *in { + if (*in)[i] != nil { + in, out := &(*in)[i], &(*out)[i] + *out = make(PodSet, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + } + } + if in.Failed != nil { + in, out := &in.Failed, &out.Failed + *out = make([]PodSet, len(*in)) + for i := range *in { + if (*in)[i] != nil { + in, out := &(*in)[i], &(*out)[i] + *out = make(PodSet, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TerminatedPods. +func (in *TerminatedPods) DeepCopy() *TerminatedPods { + if in == nil { + return nil + } + out := new(TerminatedPods) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in TerminatedPodsMap) DeepCopyInto(out *TerminatedPodsMap) { + { + in := &in + *out = make(TerminatedPodsMap, len(*in)) + for key, val := range *in { + var outVal *TerminatedPods + if val == nil { + (*out)[key] = nil + } else { + in, out := &val, &outVal + *out = new(TerminatedPods) + (*in).DeepCopyInto(*out) + } + (*out)[key] = outVal + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TerminatedPodsMap. +func (in TerminatedPodsMap) DeepCopy() TerminatedPodsMap { + if in == nil { + return nil + } + out := new(TerminatedPodsMap) + in.DeepCopyInto(out) + return *out +} diff --git a/operator/controllers/cluster_spec.go b/operator/controllers/cluster_spec.go new file mode 100644 index 000000000..a30dc07aa --- /dev/null +++ b/operator/controllers/cluster_spec.go @@ -0,0 +1,59 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package controllers + +import ( + "encoding/json" + "fmt" + "strconv" + "strings" + + fedlearnerv2 "fedlearner.net/operator/api/v1alpha1" +) + +const ( + ServiceFormat = "%s.%s.svc" +) + +type ClusterSpec struct { + Services map[fedlearnerv2.FedReplicaType][]string `json:"clusterSpec"` +} + +func GenIndexName(appName string, rt string, index int) string { + n := appName + "-" + rt + "-" + strconv.Itoa(index) + return strings.Replace(n, "/", "-", -1) +} + +func NewClusterSpec(namespace string, app *fedlearnerv2.FedApp) ClusterSpec { + clusterSpec := ClusterSpec{ + Services: make(map[fedlearnerv2.FedReplicaType][]string), + } + for rtype, spec := range app.Spec.FedReplicaSpecs { + rt := strings.ToLower(string(rtype)) + replicas := int(*spec.Replicas) + port := spec.Port.ContainerPort + + for index := 0; index < replicas; index++ { + serviceName := fmt.Sprintf(ServiceFormat, GenIndexName(app.Name, rt, index), namespace) + clusterSpec.Services[rtype] = append(clusterSpec.Services[rtype], fmt.Sprintf("%s:%d", serviceName, port)) + } + } + return clusterSpec +} + +func (cs ClusterSpec) Marshal() ([]byte, error) { + return json.Marshal(cs) +} diff --git a/operator/controllers/fedapp_controller.go b/operator/controllers/fedapp_controller.go new file mode 100644 index 000000000..d83033166 --- /dev/null +++ b/operator/controllers/fedapp_controller.go @@ -0,0 +1,346 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package controllers + +import ( + "context" + "time" + + fedlearnerv2 "fedlearner.net/operator/api/v1alpha1" + v1 "k8s.io/api/core/v1" + networking "k8s.io/api/networking/v1beta1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/runtime" + ctrl "sigs.k8s.io/controller-runtime" + "sigs.k8s.io/controller-runtime/pkg/client" + "sigs.k8s.io/controller-runtime/pkg/log" +) + +// FedAppReconciler reconciles a FedApp object +type FedAppReconciler struct { + client.Client + Scheme *runtime.Scheme +} + +const ( + flReplicaTypeLabel = "fl-replica-type" + flReplicaIndexLabel = "fl-replica-index" + AppNameLabel = "app-name" + + // Env key in pod + serviceID = "SERVICE_ID" + clusterSpec = "CLUSTER_SPEC" + replicaIndex = "INDEX" +) + +//+kubebuilder:rbac:groups=fedlearner.k8s.io,resources=fedapps,verbs=get;list;watch;create;update;patch;delete +//+kubebuilder:rbac:groups=fedlearner.k8s.io,resources=fedapps/status,verbs=get;update;patch +//+kubebuilder:rbac:groups=fedlearner.k8s.io,resources=fedapps/finalizers,verbs=update +//+kubebuilder:rbac:groups=core,resources=pods,verbs=get;list;watch;create;delete +//+kubebuilder:rbac:groups=core,resources=services,verbs=get;list;create;delete +//+kubebuilder:rbac:groups=networking,resources=ingress,verbs=get;create;delete + +// Reconcile is part of the main kubernetes reconciliation loop which aims to +// move the current state of the cluster closer to the desired state. +// the FedApp object against the actual cluster state, and then +// perform operations to make the cluster state reflect the state specified by +// the user. +// +// For more details, check Reconcile and its Result here: +// - https://pkg.go.dev/sigs.k8s.io/controller-runtime@v0.10.0/pkg/reconcile +func (r *FedAppReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) { + log := log.FromContext(ctx) + + startTime := time.Now() + defer func() { + log.Info("Finished syncing job", req.NamespacedName.Name, time.Since(startTime).String()) + }() + var app fedlearnerv2.FedApp + if err := r.Get(ctx, req.NamespacedName, &app); err != nil { + log.Info("unable to fetch FedApp") + // we'll ignore not-found errors, since they can't be fixed by an immediate + // requeue (we'll need to wait for a new notification), and we can get them + // on deleted requests. + return ctrl.Result{}, client.IgnoreNotFound(err) + } + + // if job was finished previously, we don't want to redo the termination + if isAppFinished(&app) { + // release all resource + // r.releaseAppResource(ctx, req) + // Complete ttl check + if app.Spec.TTLSecondsAfterFinished != nil { + now := metav1.Now() + var finished time.Time + for _, c := range app.Status.Conditions { + if c.Type == fedlearnerv2.Succeeded { + finished = c.LastTransitionTime.Time + break + } + } + duration := now.Time.Sub(finished) + allowedDuration := time.Duration(*app.Spec.TTLSecondsAfterFinished) * time.Second + if duration >= allowedDuration { + log.Info("FedApp TTLSecondsAfterFinished terminating") + if err := r.Delete(ctx, &app); client.IgnoreNotFound(err) != nil { + return ctrl.Result{}, err + } + return ctrl.Result{}, nil + } + return ctrl.Result{RequeueAfter: time.Duration(*app.Spec.TTLSecondsAfterFinished) * time.Second}, nil + + } + return ctrl.Result{}, nil + } + + if app.Status.TerminatedPodsMap == nil { + app.Status.TerminatedPodsMap = InitTerminatedPodsMap(app) + } + + if app.Status.StartTime == nil { + // Check if pods of last execution have all been deleted. + var childPods v1.PodList + if err := r.List(ctx, &childPods, client.InNamespace(req.Namespace), client.MatchingFields{ownerKey: req.Name}); err != nil { + log.Error(err, "unable to list child Pods") + return ctrl.Result{}, err + } + if len(childPods.Items) > 0 { + log.Info("Delete all pods for last Execution.") + for _, pod := range childPods.Items { + log.Info("Delete Pod", "Pod", pod.Name) + if err := r.Delete(ctx, &pod, client.PropagationPolicy(metav1.DeletePropagationBackground)); client.IgnoreNotFound(err) != nil { + log.Error(err, "Failed to delete pods") + return ctrl.Result{}, err + } + } + + return ctrl.Result{}, nil + } + now := metav1.Now() + app.Status.StartTime = &now + if err := r.Status().Update(ctx, &app); err != nil { + log.Error(err, "unable to update FedApp status StartTime") + return ctrl.Result{}, err + } + + // enqueue a sync to check if job past ActiveDeadlineSeconds + if app.Spec.ActiveDeadlineSeconds != nil { + log.Info("FedApp has ActiveDeadlineSeconds will sync after", "ActiveDeadlineSeconds", *app.Spec.ActiveDeadlineSeconds) + return ctrl.Result{RequeueAfter: time.Duration(*app.Spec.ActiveDeadlineSeconds) * time.Second}, nil + } + return ctrl.Result{}, nil + } + if app.Spec.ActiveDeadlineSeconds != nil && app.Status.StartTime != nil { + now := metav1.Now() + start := app.Status.StartTime.Time + duration := now.Time.Sub(start) + allowedDuration := time.Duration(*app.Spec.ActiveDeadlineSeconds) * time.Second + if duration >= allowedDuration { + log.Info("FedApp has running exceeding activeDeadlineSeconds") + app.Status.Conditions, _ = ensureConditionStatus(app.Status.Conditions, fedlearnerv2.Succeeded, v1.ConditionFalse, "DeadlineExceeded", "FedApp was active longer than specified deadline") + if err := r.Status().Update(ctx, &app); err != nil { + log.Error(err, "unable to update FedApp status DeadlineExceeded") + return ctrl.Result{}, err + } + // Release the resource when next reconcile. + // Can not release resources here synchronously, because the status update request's response can't insure the status has been updated in etcd. + // And if delete the pods synchoronously, k8s cant't promise the delete requeset is behind the status update request, so the next reconcile may create new pods. + return ctrl.Result{}, nil + } + + } + + // sync service + if err := r.syncServices(ctx, &app); err != nil { + log.Error(err, "unable to sync service") + return ctrl.Result{}, err + } + // sync ingress + if err := r.syncIngress(ctx, &app); err != nil { + log.Error(err, "unable to sync ingress") + return ctrl.Result{}, err + } + // sync pod + completed := true + var childPods v1.PodList + if err := r.List(ctx, &childPods, client.InNamespace(req.Namespace), client.MatchingFields{ownerKey: req.Name}); err != nil { + log.Error(err, "unable to list child Pods") + return ctrl.Result{}, err + } + for rtype, spec := range app.Spec.FedReplicaSpecs { + replicaResult, err := r.SyncReplicas(ctx, &app, rtype, &childPods, &spec) + if replicaResult.isFailed { + log.Info("FedApp failed") + // Dont's clean resource synchronously, becase we must wait for update request finished inorder to keep reconcile idempotent. + return ctrl.Result{}, nil + } + if err != nil { + return ctrl.Result{}, err + } + + completed = completed && replicaResult.isCompleted + + } + if completed { + app.Status.Conditions, _ = ensureConditionStatus(app.Status.Conditions, fedlearnerv2.Succeeded, v1.ConditionTrue, "Completed", "") + if err := r.Status().Update(ctx, &app); err != nil { + log.Error(err, "unable to update FedApp status Completed") + return ctrl.Result{}, err + } + } + if err := r.Status().Update(ctx, &app); err != nil { + log.Error(err, "unable to update FedApp status when reconcile finished") + return ctrl.Result{}, err + } + return ctrl.Result{}, nil +} + +type ReplicaResult struct { + isFailed bool + isCompleted bool +} + +// ensureJobConditionStatus appends or updates an existing job condition of the +// given type with the given status value.The function returns a bool to let the +// caller know if the list was changed (either appended or updated). +func ensureConditionStatus(list []fedlearnerv2.FedAppCondition, cType fedlearnerv2.FedAppConditionType, status v1.ConditionStatus, reason, message string) ([]fedlearnerv2.FedAppCondition, bool) { + for i := range list { + if list[i].Type == cType { + if list[i].Status != status || list[i].Reason != reason || list[i].Message != message { + list[i].Status = status + list[i].LastTransitionTime = metav1.Now() + list[i].Reason = reason + list[i].Message = message + return list, true + } + return list, false + } + } + + return append(list, *newCondition(cType, status, reason, message)), true +} + +func newCondition(conditionType fedlearnerv2.FedAppConditionType, status v1.ConditionStatus, reason, message string) *fedlearnerv2.FedAppCondition { + return &fedlearnerv2.FedAppCondition{ + Type: conditionType, + Status: status, + LastTransitionTime: metav1.Now(), + Reason: reason, + Message: message, + } +} + +// IsAppFinished checks whether the given fedapp has finished execution. +// It does not discriminate between successful and failed terminations. +func isAppFinished(app *fedlearnerv2.FedApp) bool { + for _, c := range app.Status.Conditions { + if c.Type == fedlearnerv2.Succeeded && (c.Status == v1.ConditionTrue || c.Status == v1.ConditionFalse) { + return true + } + } + return false +} + +func (r *FedAppReconciler) releaseAppResource(ctx context.Context, req ctrl.Request) error { + log := log.FromContext(ctx) + var ingress networking.Ingress + err := r.Get(ctx, req.NamespacedName, &ingress) + if client.IgnoreNotFound(err) != nil { + log.Error(err, "Get Ingress failed") + return err + } + if err == nil { + log.Info("Delete Ingress") + if err := r.Delete(ctx, &ingress, client.PropagationPolicy(metav1.DeletePropagationBackground)); client.IgnoreNotFound(err) != nil { + log.Error(err, "Delete Ingress failed") + return err + } + } + + var childPods v1.PodList + if err := r.List(ctx, &childPods, client.InNamespace(req.Namespace), client.MatchingFields{ownerKey: req.Name}); err != nil { + log.Error(err, "unable to list child Pods") + return err + } + for _, pod := range childPods.Items { + log.Info("Delete Pod", "Pod", pod.Name) + if err := r.Delete(ctx, &pod, client.PropagationPolicy(metav1.DeletePropagationBackground)); client.IgnoreNotFound(err) != nil { + log.Error(err, "Failed to delete pods") + return err + } + } + var childServices v1.ServiceList + if err := r.List(ctx, &childServices, client.InNamespace(req.Namespace), client.MatchingFields{ownerKey: req.Name}); err != nil { + log.Error(err, "unable to list child Pods") + return err + } + for _, service := range childServices.Items { + log.Info("Delete Service", "Pod", service.Name) + if err := r.Delete(ctx, &service, client.PropagationPolicy(metav1.DeletePropagationBackground)); client.IgnoreNotFound(err) != nil { + log.Error(err, "Failed to delete pods") + return err + } + } + return nil +} + +var ( + ownerKey = ".metadata.controller" + apiGVStr = fedlearnerv2.GroupVersion.String() +) + +// SetupWithManager sets up the controller with the Manager. +func (r *FedAppReconciler) SetupWithManager(mgr ctrl.Manager) error { + // For a more efficient lookup, these Pods and Service will be indexed locally on their controller(FedApp)'s name. + if err := mgr.GetFieldIndexer().IndexField(context.Background(), &v1.Pod{}, ownerKey, func(rawObj client.Object) []string { + // grab the job object, extract the owner... + pod := rawObj.(*v1.Pod) + owner := metav1.GetControllerOf(pod) + if owner == nil { + return nil + } + // ...make sure it's a FedApp... + if owner.APIVersion != apiGVStr || owner.Kind != "FedApp" { + return nil + } + + // ...and if so, return it + return []string{owner.Name} + }); err != nil { + return err + } + if err := mgr.GetFieldIndexer().IndexField(context.Background(), &v1.Service{}, ownerKey, func(rawObj client.Object) []string { + // grab the job object, extract the owner... + service := rawObj.(*v1.Service) + owner := metav1.GetControllerOf(service) + if owner == nil { + return nil + } + // ...make sure it's a FedApp... + if owner.APIVersion != apiGVStr || owner.Kind != "FedApp" { + return nil + } + + // ...and if so, return it + return []string{owner.Name} + }); err != nil { + return err + } + return ctrl.NewControllerManagedBy(mgr). + For(&fedlearnerv2.FedApp{}). + Owns(&v1.Pod{}). + Complete(r) +} diff --git a/operator/controllers/fedapp_controller_test.go b/operator/controllers/fedapp_controller_test.go new file mode 100644 index 000000000..cd33e07f8 --- /dev/null +++ b/operator/controllers/fedapp_controller_test.go @@ -0,0 +1,154 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package controllers + +import ( + "context" + "time" + + fedlearnerv2 "fedlearner.net/operator/api/v1alpha1" + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + corev1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/types" +) + +var _ = Describe("Fedapp controller", func() { + + // Define utility constants for object names and testing timeouts/durations and intervals. + const ( + FedAppName = "test-fedapp" + FailedAppName = "failed-test-fedapp" + FedAppNamespace = "default" + FedReplicaType = "Worker" + timeout = time.Second * 10 + interval = time.Millisecond * 250 + ) + var replicas int64 = 2 + Context("When updating FedApp Status", func() { + It("Should FedApp created successfully", func() { + By("By creating a new Fedapp") + ctx := context.Background() + fedapp := &fedlearnerv2.FedApp{ + TypeMeta: metav1.TypeMeta{ + APIVersion: "fedlearner.k8s.io/v1alpha1", + Kind: "FedApp", + }, + ObjectMeta: metav1.ObjectMeta{ + Name: FedAppName, + Namespace: FedAppNamespace, + }, + Spec: fedlearnerv2.FedAppSpec{ + FedReplicaSpecs: fedlearnerv2.FedReplicaSpecs{ + FedReplicaType: fedlearnerv2.ReplicaSpec{ + Replicas: &replicas, + Template: corev1.PodTemplateSpec{ + Spec: corev1.PodSpec{ + Containers: []corev1.Container{ + { + Name: "test-container", + Image: "test-image", + }, + }, + }, + }, + }, + }, + }, + } + Expect(k8sClient.Create(ctx, fedapp)).Should(Succeed()) + fedappLookupKey := types.NamespacedName{Name: FedAppName, Namespace: FedAppNamespace} + createdFedApp := &fedlearnerv2.FedApp{} + + // We'll need to retry getting this newly created FedApp, given that creation may not immediately happen. + Eventually(func() bool { + err := k8sClient.Get(ctx, fedappLookupKey, createdFedApp) + if err == nil && createdFedApp.Status.StartTime != nil { + return true + } + return false + }, timeout, interval).Should(BeTrue(), "should have startTime in the status") + // Let's make sure our Schedule string value was properly converted/handled. + Expect(createdFedApp.Spec.FedReplicaSpecs[FedReplicaType].Port.ContainerPort).Should(Equal(int32(50051))) + By("By checking Pod Service and Ingress created succcessfully") + var childPods corev1.PodList + Eventually(func() (int, error) { + err := k8sClient.List(ctx, &childPods) + if err != nil { + return -1, err + } + return len(childPods.Items), nil + }, timeout, interval).Should(Equal(2), "should create pods") + + }) + It("Should FedApp create pod failed", func() { + By("By creating a new Fedapp") + ctx := context.Background() + fedapp := &fedlearnerv2.FedApp{ + TypeMeta: metav1.TypeMeta{ + APIVersion: "fedlearner.k8s.io/v1alpha1", + Kind: "FedApp", + }, + ObjectMeta: metav1.ObjectMeta{ + Name: FailedAppName, + Namespace: FedAppNamespace, + }, + Spec: fedlearnerv2.FedAppSpec{ + FedReplicaSpecs: fedlearnerv2.FedReplicaSpecs{ + FedReplicaType: fedlearnerv2.ReplicaSpec{ + Replicas: &replicas, + Template: corev1.PodTemplateSpec{ + Spec: corev1.PodSpec{ + Containers: []corev1.Container{ + { + Name: "test-container", + Image: " failed-image", + }, + }, + }, + }, + }, + }, + }, + } + Expect(k8sClient.Create(ctx, fedapp)).Should(Succeed()) + fedappLookupKey := types.NamespacedName{Name: FailedAppName, Namespace: FedAppNamespace} + createdFedApp := &fedlearnerv2.FedApp{} + + // We'll need to retry getting this newly created FedApp, given that creation may not immediately happen. + Eventually(func() bool { + err := k8sClient.Get(ctx, fedappLookupKey, createdFedApp) + if err == nil { + conditions := createdFedApp.Status.Conditions + for i := range conditions { + if conditions[i].Type != fedlearnerv2.Succeeded { + continue + } + if conditions[i].Status != corev1.ConditionFalse { + break + } + if conditions[i].Reason == "CreatePodFailed" { + return true + } + } + } + return false + }, timeout, interval).Should(BeTrue(), "should be failed for create pod failed") + }) + }) + +}) diff --git a/operator/controllers/ingress.go b/operator/controllers/ingress.go new file mode 100644 index 000000000..7d5ce6b11 --- /dev/null +++ b/operator/controllers/ingress.go @@ -0,0 +1,91 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package controllers + +import ( + "context" + "strings" + + fedlearnerv2 "fedlearner.net/operator/api/v1alpha1" + networking "k8s.io/api/networking/v1beta1" + "k8s.io/apimachinery/pkg/api/errors" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/types" + "k8s.io/apimachinery/pkg/util/intstr" + ctrl "sigs.k8s.io/controller-runtime" + "sigs.k8s.io/controller-runtime/pkg/log" +) + +func (r *FedAppReconciler) syncIngress(ctx context.Context, app *fedlearnerv2.FedApp) error { + log := log.FromContext(ctx) + var ingress networking.Ingress + err := r.Get(ctx, types.NamespacedName{Name: app.Name, Namespace: app.Namespace}, &ingress) + if errors.IsNotFound(err) { + ingressName := app.Name + labels := GenLabels(app) + annotations := map[string]string{ + //"kubernetes.io/ingress.class": ingressClassName, + "nginx.ingress.kubernetes.io/backend-protocol": "GRPC", + "nginx.ingress.kubernetes.io/configuration-snippet": "grpc_next_upstream_tries 5;", + "nginx.ingress.kubernetes.io/http2-insecure-port": "true", + } + + newIngress := &networking.Ingress{ + ObjectMeta: metav1.ObjectMeta{ + Name: ingressName, + Namespace: app.Namespace, + Labels: labels, + Annotations: annotations, + }, + // Explicitly set IngressClassName to nil for k8s backward compatibility + Spec: networking.IngressSpec{ + IngressClassName: nil, + }, + } + for rtype, spec := range app.Spec.FedReplicaSpecs { + replicas := int(*spec.Replicas) + rt := strings.ToLower(string(rtype)) + for index := 0; index < replicas; index++ { + path := networking.HTTPIngressPath{ + Backend: networking.IngressBackend{ + ServiceName: GenIndexName(app.Name, rt, index), + ServicePort: intstr.FromString(spec.Port.Name), + }, + } + host := GenIndexName(app.Name, rt, index) + IngressExtraHostSuffix + rule := networking.IngressRule{ + Host: host, + IngressRuleValue: networking.IngressRuleValue{ + HTTP: &networking.HTTPIngressRuleValue{ + Paths: []networking.HTTPIngressPath{path}, + }, + }, + } + newIngress.Spec.Rules = append(newIngress.Spec.Rules, rule) + } + } + if err := ctrl.SetControllerReference(app, newIngress, r.Scheme); err != nil { + return err + } + log.Info("Create Ingress", "Ingress", newIngress.Name) + err := r.Create(ctx, newIngress) + if err != nil && errors.IsAlreadyExists(err) { + return nil + } + return err + } + return err +} diff --git a/operator/controllers/options.go b/operator/controllers/options.go new file mode 100644 index 000000000..e046ad613 --- /dev/null +++ b/operator/controllers/options.go @@ -0,0 +1,20 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package controllers + +var ( + IngressExtraHostSuffix string +) diff --git a/operator/controllers/pod.go b/operator/controllers/pod.go new file mode 100644 index 000000000..217d08341 --- /dev/null +++ b/operator/controllers/pod.go @@ -0,0 +1,123 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package controllers + +import ( + "context" + "strconv" + "strings" + + fedlearnerv2 "fedlearner.net/operator/api/v1alpha1" + v1 "k8s.io/api/core/v1" + ctrl "sigs.k8s.io/controller-runtime" + "sigs.k8s.io/controller-runtime/pkg/log" +) + +func (r *FedAppReconciler) CreatePod(ctx context.Context, app *fedlearnerv2.FedApp, spec *fedlearnerv2.ReplicaSpec, index int, rt string, podSliceHasFailed int) error { + log := log.FromContext(ctx) + podTemplate := spec.Template.DeepCopy() + labels := GenLabels(app) + labels[flReplicaTypeLabel] = rt + labels[flReplicaIndexLabel] = strconv.Itoa(index) + podTemplate.Name = GenIndexName(app.Name, rt, index) + "-retry-" + strconv.Itoa(podSliceHasFailed) + podTemplate.Namespace = app.Namespace + if podTemplate.Labels == nil { + podTemplate.Labels = make(map[string]string) + } + for key, value := range labels { + podTemplate.Labels[key] = value + } + // The controller will restart pod according to FedReplicaSpec + podTemplate.Spec.RestartPolicy = v1.RestartPolicyNever + + clusterSpecValue, err := makeClusterSpec(app.Namespace, app) + if err != nil { + log.Error(err, "unable to make cluster spec") + return err + } + for idx := range podTemplate.Spec.Containers { + container := &podTemplate.Spec.Containers[idx] + container.Env = ensureEnv(container.Env, v1.EnvVar{ + Name: replicaIndex, + Value: strconv.Itoa(index), + }) + container.Env = ensureEnv(container.Env, v1.EnvVar{ + Name: serviceID, + Value: GenIndexName(app.Name, rt, index), + }) + container.Env = ensureEnv(container.Env, v1.EnvVar{ + Name: clusterSpec, + Value: clusterSpecValue, + }) + + // If pod use host network, overwrite all port to 0 to support autoport. + if podTemplate.Spec.HostNetwork { + for i := range container.Ports { + container.Ports[i].ContainerPort = 0 + } + } + + } + + pod := &v1.Pod{ + ObjectMeta: podTemplate.ObjectMeta, + Spec: podTemplate.Spec, + } + if err := ctrl.SetControllerReference(app, pod, r.Scheme); err != nil { + return err + } + log.Info("Create Pod", "Pod", pod.Name) + if err = r.Create(ctx, pod); err != nil { + return err + } + return nil +} + +func ensureEnv(envVars []v1.EnvVar, item v1.EnvVar) []v1.EnvVar { + for idx := range envVars { + if envVars[idx].Name == item.Name { + envVars[idx] = item + return envVars + } + } + envVars = append(envVars, item) + return envVars +} + +func GenLabels(app *fedlearnerv2.FedApp) map[string]string { + return map[string]string{ + AppNameLabel: strings.Replace(app.Name, "/", "-", -1), + } +} + +func makeClusterSpec(namespace string, app *fedlearnerv2.FedApp) (string, error) { + clusterSpec := NewClusterSpec(namespace, app) + bytes, err := clusterSpec.Marshal() + if err != nil { + return "", err + } + return string(bytes), nil +} + +func AllPodsFailed(podSlice []*v1.Pod) bool { + for _, pod := range podSlice { + // TODO: support restart policy + if pod.Status.Phase != v1.PodFailed { + return false + } + } + return true +} diff --git a/operator/controllers/replica.go b/operator/controllers/replica.go new file mode 100644 index 000000000..78b0aee42 --- /dev/null +++ b/operator/controllers/replica.go @@ -0,0 +1,124 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package controllers + +import ( + "context" + "strconv" + "strings" + + fedlearnerv2 "fedlearner.net/operator/api/v1alpha1" + v1 "k8s.io/api/core/v1" + "k8s.io/apimachinery/pkg/api/errors" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/labels" + "sigs.k8s.io/controller-runtime/pkg/log" +) + +func (r *FedAppReconciler) SyncReplicas(ctx context.Context, app *fedlearnerv2.FedApp, rtype fedlearnerv2.FedReplicaType, + childPods *v1.PodList, spec *fedlearnerv2.ReplicaSpec) (ReplicaResult, error) { + log := log.FromContext(ctx) + rt := strings.ToLower(string(rtype)) + pods, err := filterPodsForReplicaType(childPods, rt) + if err != nil { + log.Error(err, "filter pods error: %v") + return ReplicaResult{}, err + } + replicas := int(*spec.Replicas) + podSlices := make([][]*v1.Pod, replicas) + for _, pod := range pods { + val, ok := pod.Labels[flReplicaIndexLabel] + if !ok { + log.Info("The pod do not have the index label.") + continue + } + index, err := strconv.Atoi(val) + if err != nil { + log.Error(err, "Error when strconv.Atoi.") + continue + } + if index < 0 || index >= replicas { + log.Info("The label index is not expected", "index", index) + } else { + podSlices[index] = append(podSlices[index], pod) + } + } + SyncTerminatedPods(app, rtype, podSlices) + // Fedapp old crd in some environment does not have terminatedPodsMap field. + // Can't update status here, because terminatedPodsMap will be nil after fedapp updated. + terminatedPods := *app.Status.TerminatedPodsMap[rtype] + failedPodsNames := GetAllFailedPodsNames(terminatedPods) + if len(failedPodsNames) >= int(*spec.BackoffLimit) { + // TODO(xiangyuxuan.prs): remove failed pod name, and add pod details in fedapp status. + app.Status.Conditions, _ = ensureConditionStatus(app.Status.Conditions, fedlearnerv2.Succeeded, v1.ConditionFalse, "BackoffLimitExceeded", "FedApp has reached the specified backoff limit: "+strings.Join(failedPodsNames, ", ")) + if err := r.Status().Update(ctx, app); err != nil { + log.Error(err, "unable to update FedApp status BackoffLimitExceeded") + return ReplicaResult{}, err + } + // Requeue to rlease the resource + return ReplicaResult{isFailed: true}, nil + } + + for index, podSlice := range podSlices { + if IfSliceHasSucceeded(terminatedPods, index) { + continue + } + needCreate := AllPodsFailed(podSlice) + if !needCreate { + continue + } + sliceHasFailedNum := len(terminatedPods.Failed[index]) + if err := r.CreatePod(ctx, app, spec, index, rt, sliceHasFailedNum); !errors.IsAlreadyExists(err) { + if err == nil { + return ReplicaResult{}, nil + } + log.Error(err, "create Pod failed") + app.Status.Conditions, _ = ensureConditionStatus(app.Status.Conditions, fedlearnerv2.Succeeded, v1.ConditionFalse, "CreatePodFailed", err.Error()) + if err := r.Status().Update(ctx, app); err != nil { + log.Error(err, "unable to update FedApp status CreatePodFailed") + return ReplicaResult{}, err + } + return ReplicaResult{isFailed: true}, nil + } + + } + replicaCompleted := AllSliceCompletedOnce(terminatedPods, replicas) + return ReplicaResult{isCompleted: !*spec.MustSuccess || replicaCompleted}, nil +} + +// filterPodsForReplicaType returns pods belong to a replicaType. +func filterPodsForReplicaType(childPods *v1.PodList, replicaType string) ([]*v1.Pod, error) { + var result []*v1.Pod + + replicaSelector := &metav1.LabelSelector{ + MatchLabels: make(map[string]string), + } + + replicaSelector.MatchLabels[flReplicaTypeLabel] = replicaType + + selector, err := metav1.LabelSelectorAsSelector(replicaSelector) + if err != nil { + return nil, err + } + pods := childPods.Items + for i := range pods { + if !selector.Matches(labels.Set(pods[i].Labels)) { + continue + } + result = append(result, &pods[i]) + } + return result, nil +} diff --git a/operator/controllers/service.go b/operator/controllers/service.go new file mode 100644 index 000000000..be50d098e --- /dev/null +++ b/operator/controllers/service.go @@ -0,0 +1,177 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package controllers + +import ( + "context" + "strconv" + "strings" + + fedlearnerv2 "fedlearner.net/operator/api/v1alpha1" + v1 "k8s.io/api/core/v1" + "k8s.io/apimachinery/pkg/api/errors" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/labels" + "k8s.io/apimachinery/pkg/util/intstr" + ctrl "sigs.k8s.io/controller-runtime" + "sigs.k8s.io/controller-runtime/pkg/client" + "sigs.k8s.io/controller-runtime/pkg/log" +) + +func (r *FedAppReconciler) syncServices(ctx context.Context, app *fedlearnerv2.FedApp) error { + log := log.FromContext(ctx) + var services v1.ServiceList + if err := r.List(ctx, &services, client.InNamespace(app.Namespace), client.MatchingFields{ownerKey: app.Name}); err != nil { + log.Error(err, "unable to list child Pods") + return err + } + for rtype, spec := range app.Spec.FedReplicaSpecs { + rt := strings.ToLower(string(rtype)) + replicas := int(*spec.Replicas) + // Get all services for the type rt. + typeServices, err := filterServicesForReplicaType(&services, rt) + if err != nil { + return err + } + serviceSlices := makeServiceSlicesByIndex(ctx, typeServices, replicas) + for index, serviceSlice := range serviceSlices { + if len(serviceSlice) == 0 { + log.Info("need to create new service for", string(rtype), strconv.Itoa(index)) + if err = r.createNewService(ctx, app, rtype, spec, index); err != nil { + return err + } + } + } + + } + return nil +} + +// createNewService creates a new service for the given index and type. +func (r *FedAppReconciler) createNewService(ctx context.Context, app *fedlearnerv2.FedApp, rtype fedlearnerv2.FedReplicaType, spec fedlearnerv2.ReplicaSpec, index int) error { + log := log.FromContext(ctx) + rt := strings.ToLower(string(rtype)) + + // Append tfReplicaTypeLabel and tfReplicaIndexLabel labels. + labels := GenLabels(app) + labels[flReplicaTypeLabel] = rt + labels[flReplicaIndexLabel] = strconv.Itoa(index) + ports := GetPortsFromFedReplicaSpecs(app.Spec.FedReplicaSpecs[rtype]) + var servicePorts []v1.ServicePort + for _, port := range ports { + servicePorts = append(servicePorts, v1.ServicePort{ + Name: port.Name, + Port: port.ContainerPort, + TargetPort: intstr.IntOrString{ + Type: 1, // means string + StrVal: port.Name, + }, + }) + } + service := &v1.Service{ + Spec: v1.ServiceSpec{ + Selector: labels, + Ports: servicePorts, + }, + } + + service.Name = GenIndexName(app.Name, rt, index) + service.Namespace = app.Namespace + service.Labels = labels + if err := ctrl.SetControllerReference(app, service, r.Scheme); err != nil { + return err + } + log.Info("Create Service", "Service", service.Name) + err := r.Create(ctx, service) + if err != nil && errors.IsAlreadyExists(err) { + return nil + } + return err +} + +// GetPortsFromApp gets the ports of all containers. +func GetPortsFromFedReplicaSpecs(replicaSpec fedlearnerv2.ReplicaSpec) []v1.ContainerPort { + var ports []v1.ContainerPort + containers := replicaSpec.Template.Spec.Containers + for _, container := range containers { + for _, port := range container.Ports { + if PortNotInPortList(port, ports) { + ports = append(ports, port) + } + } + } + if PortNotInPortList(*replicaSpec.Port, ports) { + ports = append(ports, *replicaSpec.Port) + } + return ports +} + +func PortNotInPortList(port v1.ContainerPort, ports []v1.ContainerPort) bool { + for _, p := range ports { + if p.Name == port.Name || p.ContainerPort == port.ContainerPort { + return false + } + } + return true +} + +// filterServicesForReplicaType returns service belong to a replicaType. +func filterServicesForReplicaType(servicesList *v1.ServiceList, replicaType string) ([]*v1.Service, error) { + var result []*v1.Service + replicaSelector := &metav1.LabelSelector{ + MatchLabels: make(map[string]string), + } + + replicaSelector.MatchLabels[flReplicaTypeLabel] = replicaType + services := servicesList.Items + for index := range services { + selector, err := metav1.LabelSelectorAsSelector(replicaSelector) + if err != nil { + return nil, err + } + if !selector.Matches(labels.Set(services[index].Labels)) { + continue + } + result = append(result, &services[index]) + } + return result, nil +} + +// makeServiceSlicesByIndex returns a slice, which element is the slice of service. +// Assume the return object is serviceSlices, then serviceSlices[i] is an +// array of pointers to services corresponding to Services for replica i. +func makeServiceSlicesByIndex(ctx context.Context, services []*v1.Service, replicas int) [][]*v1.Service { + log := log.FromContext(ctx) + serviceSlices := make([][]*v1.Service, replicas) + for _, service := range services { + if _, ok := service.Labels[flReplicaIndexLabel]; !ok { + log.Info("The pod do not have the index label.") + continue + } + index, err := strconv.Atoi(service.Labels[flReplicaIndexLabel]) + if err != nil { + log.Error(err, "Error when strconv.Atoi.") + continue + } + if index < 0 || index >= replicas { + log.Info("The label index is not expected", "index", index, "replicas", replicas) + continue + } else { + serviceSlices[index] = append(serviceSlices[index], service) + } + } + return serviceSlices +} diff --git a/operator/controllers/suite_test.go b/operator/controllers/suite_test.go new file mode 100644 index 000000000..552077940 --- /dev/null +++ b/operator/controllers/suite_test.go @@ -0,0 +1,98 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package controllers + +import ( + "path/filepath" + "testing" + + ctrl "sigs.k8s.io/controller-runtime" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "k8s.io/client-go/kubernetes/scheme" + "k8s.io/client-go/rest" + "sigs.k8s.io/controller-runtime/pkg/client" + "sigs.k8s.io/controller-runtime/pkg/envtest" + "sigs.k8s.io/controller-runtime/pkg/envtest/printer" + logf "sigs.k8s.io/controller-runtime/pkg/log" + "sigs.k8s.io/controller-runtime/pkg/log/zap" + + fedlearnerv2 "fedlearner.net/operator/api/v1alpha1" + //+kubebuilder:scaffold:imports +) + +// These tests use Ginkgo (BDD-style Go testing framework). Refer to +// http://onsi.github.io/ginkgo/ to learn more about Ginkgo. + +var cfg *rest.Config +var k8sClient client.Client +var testEnv *envtest.Environment + +func TestAPIs(t *testing.T) { + RegisterFailHandler(Fail) + + RunSpecsWithDefaultAndCustomReporters(t, + "Controller Suite", + []Reporter{printer.NewlineReporter{}}) +} + +var _ = BeforeSuite(func() { + logf.SetLogger(zap.New(zap.WriteTo(GinkgoWriter), zap.UseDevMode(true))) + + By("bootstrapping test environment") + testEnv = &envtest.Environment{ + CRDDirectoryPaths: []string{filepath.Join("..", "deploy_charts", "fedapp.yaml")}, + ErrorIfCRDPathMissing: true, + } + + cfg, err := testEnv.Start() + Expect(err).NotTo(HaveOccurred()) + Expect(cfg).NotTo(BeNil()) + + err = fedlearnerv2.AddToScheme(scheme.Scheme) + Expect(err).NotTo(HaveOccurred()) + + //+kubebuilder:scaffold:scheme + + k8sClient, err = client.New(cfg, client.Options{Scheme: scheme.Scheme}) + Expect(err).NotTo(HaveOccurred()) + Expect(k8sClient).NotTo(BeNil()) + + k8sManager, err := ctrl.NewManager(cfg, ctrl.Options{ + Scheme: scheme.Scheme, + }) + Expect(err).ToNot(HaveOccurred()) + + err = (&FedAppReconciler{ + Client: k8sManager.GetClient(), + Scheme: k8sManager.GetScheme(), + }).SetupWithManager(k8sManager) + Expect(err).ToNot(HaveOccurred()) + + go func() { + defer GinkgoRecover() + err = k8sManager.Start(ctrl.SetupSignalHandler()) + Expect(err).ToNot(HaveOccurred(), "failed to run manager") + }() + +}, 60) + +var _ = AfterSuite(func() { + By("tearing down the test environment") + err := testEnv.Stop() + Expect(err).NotTo(HaveOccurred()) +}) diff --git a/operator/controllers/terminated_pods.go b/operator/controllers/terminated_pods.go new file mode 100644 index 000000000..06deb4161 --- /dev/null +++ b/operator/controllers/terminated_pods.go @@ -0,0 +1,90 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package controllers + +import ( + fedlearnerv2 "fedlearner.net/operator/api/v1alpha1" + v1 "k8s.io/api/core/v1" +) + +func GetAllFailedPodsNames(tPods fedlearnerv2.TerminatedPods) []string { + result := []string{} + for _, podSet := range tPods.Failed { + result = append(result, getPodNames(podSet)...) + } + return result +} + +func getPodNames(set fedlearnerv2.PodSet) []string { + result := []string{} + for p := range set { + result = append(result, p) + } + return result +} + +func AllSliceCompletedOnce(tPods fedlearnerv2.TerminatedPods, replicas int) bool { + for i := 0; i < replicas; i++ { + if !IfSliceHasSucceeded(tPods, i) { + return false + } + } + return true +} + +func IfSliceHasSucceeded(tPods fedlearnerv2.TerminatedPods, index int) bool { + return len(tPods.Succeeded[index]) > 0 +} + +func SyncTerminatedPods(app *fedlearnerv2.FedApp, rtype fedlearnerv2.FedReplicaType, podSlices [][]*v1.Pod) { + for index, podSlice := range podSlices { + for _, pod := range podSlice { + // TODO: support restart policy + if pod.Status.Phase == v1.PodSucceeded { + setAdd(app.Status.TerminatedPodsMap[rtype].Succeeded[index], pod.Name) + } + if pod.Status.Phase == v1.PodFailed { + setAdd(app.Status.TerminatedPodsMap[rtype].Failed[index], pod.Name) + } + + } + } +} + +func setAdd(set fedlearnerv2.PodSet, name string) { + // TODO: remove finalizer when pod created with finalizer. + set[name] = struct{}{} +} + +func InitTerminatedPodsMap(app fedlearnerv2.FedApp) map[fedlearnerv2.FedReplicaType]*fedlearnerv2.TerminatedPods { + terminatedPodsMap := fedlearnerv2.TerminatedPodsMap{} + + for rtype, spec := range app.Spec.FedReplicaSpecs { + replicas := int(*spec.Replicas) + succeeded := make([]fedlearnerv2.PodSet, replicas) + failed := make([]fedlearnerv2.PodSet, replicas) + for i := range succeeded { + succeeded[i] = fedlearnerv2.PodSet{} + } + for i := range failed { + failed[i] = fedlearnerv2.PodSet{} + } + terminatedPodsMap[rtype] = &fedlearnerv2.TerminatedPods{ + Succeeded: succeeded, Failed: failed, + } + } + return terminatedPodsMap +} diff --git a/operator/deploy_charts/fedapp.yaml b/operator/deploy_charts/fedapp.yaml new file mode 100644 index 000000000..eafb0eb2e --- /dev/null +++ b/operator/deploy_charts/fedapp.yaml @@ -0,0 +1,7508 @@ + +--- +apiVersion: apiextensions.k8s.io/v1 +kind: CustomResourceDefinition +metadata: + annotations: + api-approved.kubernetes.io: https://github.com/kubernetes/kubernetes/pull/78458 + controller-gen.kubebuilder.io/version: v0.7.0 + creationTimestamp: null + name: fedapps.fedlearner.k8s.io +spec: + group: fedlearner.k8s.io + names: + kind: FedApp + listKind: FedAppList + plural: fedapps + singular: fedapp + scope: Namespaced + versions: + - name: v1alpha1 + schema: + openAPIV3Schema: + description: FedApp is the Schema for the fedapps API + properties: + apiVersion: + description: 'APIVersion defines the versioned schema of this representation + of an object. Servers should convert recognized schemas to the latest + internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources' + type: string + kind: + description: 'Kind is a string value representing the REST resource this + object represents. Servers may infer this from the endpoint the client + submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds' + type: string + metadata: + type: object + spec: + description: FedAppSpec defines the desired state of FedApp + properties: + activeDeadlineSeconds: + description: Specifies the duration in seconds relative to the startTime + that the job may be active before the system tries to terminate + it; value must be positive integer. + format: int64 + type: integer + fedReplicaSpecs: + additionalProperties: + description: ReplicaSpec is a description of the replica + properties: + backoffLimit: + default: 1 + description: Optional number of retries before marking this + job failed. + format: int64 + maximum: 100 + minimum: 1 + type: integer + mustSuccess: + default: true + description: Whether all pods of this replica are suceeded is + necessary for marking the falpp as complete. + type: boolean + port: + default: + containerPort: 50051 + name: flapp-port + protocol: TCP + description: ContainerPort represents a network port in a single + container. + properties: + containerPort: + description: Number of port to expose on the pod's IP address. + This must be a valid port number, 0 < x < 65536. + format: int32 + type: integer + hostIP: + description: What host IP to bind the external port to. + type: string + hostPort: + description: Number of port to expose on the host. If specified, + this must be a valid port number, 0 < x < 65536. If HostNetwork + is specified, this must match ContainerPort. Most containers + do not need this. + format: int32 + type: integer + name: + description: If specified, this must be an IANA_SVC_NAME + and unique within the pod. Each named port in a pod must + have a unique name. Name for the port that can be referred + to by services. + type: string + protocol: + default: TCP + description: Protocol for port. Must be UDP, TCP, or SCTP. + Defaults to "TCP". + type: string + required: + - containerPort + type: object + replicas: + default: 0 + description: Replicas is the desired number of replicas of the + given template. + format: int64 + maximum: 200 + minimum: 0 + type: integer + restartPolicy: + default: OnFailure + description: Restart policy for all replicas within the app. + One of Always, OnFailure, Never and ExitCode. + type: string + template: + description: Template is the object that describes the pod that + will be created for this replica. + properties: + metadata: + description: 'Standard object''s metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata' + type: object + spec: + description: 'Specification of the desired behavior of the + pod. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status' + properties: + activeDeadlineSeconds: + description: Optional duration in seconds the pod may + be active on the node relative to StartTime before + the system will actively try to mark it failed and + kill associated containers. Value must be a positive + integer. + format: int64 + type: integer + affinity: + description: If specified, the pod's scheduling constraints + properties: + nodeAffinity: + description: Describes node affinity scheduling + rules for the pod. + properties: + preferredDuringSchedulingIgnoredDuringExecution: + description: The scheduler will prefer to schedule + pods to nodes that satisfy the affinity expressions + specified by this field, but it may choose + a node that violates one or more of the expressions. + The node that is most preferred is the one + with the greatest sum of weights, i.e. for + each node that meets all of the scheduling + requirements (resource request, requiredDuringScheduling + affinity expressions, etc.), compute a sum + by iterating through the elements of this + field and adding "weight" to the sum if the + node matches the corresponding matchExpressions; + the node(s) with the highest sum are the most + preferred. + items: + description: An empty preferred scheduling + term matches all objects with implicit weight + 0 (i.e. it's a no-op). A null preferred + scheduling term matches no objects (i.e. + is also a no-op). + properties: + preference: + description: A node selector term, associated + with the corresponding weight. + properties: + matchExpressions: + description: A list of node selector + requirements by node's labels. + items: + description: A node selector requirement + is a selector that contains values, + a key, and an operator that relates + the key and values. + properties: + key: + description: The label key that + the selector applies to. + type: string + operator: + description: Represents a key's + relationship to a set of values. + Valid operators are In, NotIn, + Exists, DoesNotExist. Gt, + and Lt. + type: string + values: + description: An array of string + values. If the operator is + In or NotIn, the values array + must be non-empty. If the + operator is Exists or DoesNotExist, + the values array must be empty. + If the operator is Gt or Lt, + the values array must have + a single element, which will + be interpreted as an integer. + This array is replaced during + a strategic merge patch. + items: + type: string + type: array + required: + - key + - operator + type: object + type: array + matchFields: + description: A list of node selector + requirements by node's fields. + items: + description: A node selector requirement + is a selector that contains values, + a key, and an operator that relates + the key and values. + properties: + key: + description: The label key that + the selector applies to. + type: string + operator: + description: Represents a key's + relationship to a set of values. + Valid operators are In, NotIn, + Exists, DoesNotExist. Gt, + and Lt. + type: string + values: + description: An array of string + values. If the operator is + In or NotIn, the values array + must be non-empty. If the + operator is Exists or DoesNotExist, + the values array must be empty. + If the operator is Gt or Lt, + the values array must have + a single element, which will + be interpreted as an integer. + This array is replaced during + a strategic merge patch. + items: + type: string + type: array + required: + - key + - operator + type: object + type: array + type: object + weight: + description: Weight associated with matching + the corresponding nodeSelectorTerm, + in the range 1-100. + format: int32 + type: integer + required: + - preference + - weight + type: object + type: array + requiredDuringSchedulingIgnoredDuringExecution: + description: If the affinity requirements specified + by this field are not met at scheduling time, + the pod will not be scheduled onto the node. + If the affinity requirements specified by + this field cease to be met at some point during + pod execution (e.g. due to an update), the + system may or may not try to eventually evict + the pod from its node. + properties: + nodeSelectorTerms: + description: Required. A list of node selector + terms. The terms are ORed. + items: + description: A null or empty node selector + term matches no objects. The requirements + of them are ANDed. The TopologySelectorTerm + type implements a subset of the NodeSelectorTerm. + properties: + matchExpressions: + description: A list of node selector + requirements by node's labels. + items: + description: A node selector requirement + is a selector that contains values, + a key, and an operator that relates + the key and values. + properties: + key: + description: The label key that + the selector applies to. + type: string + operator: + description: Represents a key's + relationship to a set of values. + Valid operators are In, NotIn, + Exists, DoesNotExist. Gt, + and Lt. + type: string + values: + description: An array of string + values. If the operator is + In or NotIn, the values array + must be non-empty. If the + operator is Exists or DoesNotExist, + the values array must be empty. + If the operator is Gt or Lt, + the values array must have + a single element, which will + be interpreted as an integer. + This array is replaced during + a strategic merge patch. + items: + type: string + type: array + required: + - key + - operator + type: object + type: array + matchFields: + description: A list of node selector + requirements by node's fields. + items: + description: A node selector requirement + is a selector that contains values, + a key, and an operator that relates + the key and values. + properties: + key: + description: The label key that + the selector applies to. + type: string + operator: + description: Represents a key's + relationship to a set of values. + Valid operators are In, NotIn, + Exists, DoesNotExist. Gt, + and Lt. + type: string + values: + description: An array of string + values. If the operator is + In or NotIn, the values array + must be non-empty. If the + operator is Exists or DoesNotExist, + the values array must be empty. + If the operator is Gt or Lt, + the values array must have + a single element, which will + be interpreted as an integer. + This array is replaced during + a strategic merge patch. + items: + type: string + type: array + required: + - key + - operator + type: object + type: array + type: object + type: array + required: + - nodeSelectorTerms + type: object + type: object + podAffinity: + description: Describes pod affinity scheduling rules + (e.g. co-locate this pod in the same node, zone, + etc. as some other pod(s)). + properties: + preferredDuringSchedulingIgnoredDuringExecution: + description: The scheduler will prefer to schedule + pods to nodes that satisfy the affinity expressions + specified by this field, but it may choose + a node that violates one or more of the expressions. + The node that is most preferred is the one + with the greatest sum of weights, i.e. for + each node that meets all of the scheduling + requirements (resource request, requiredDuringScheduling + affinity expressions, etc.), compute a sum + by iterating through the elements of this + field and adding "weight" to the sum if the + node has pods which matches the corresponding + podAffinityTerm; the node(s) with the highest + sum are the most preferred. + items: + description: The weights of all of the matched + WeightedPodAffinityTerm fields are added + per-node to find the most preferred node(s) + properties: + podAffinityTerm: + description: Required. A pod affinity + term, associated with the corresponding + weight. + properties: + labelSelector: + description: A label query over a + set of resources, in this case pods. + properties: + matchExpressions: + description: matchExpressions + is a list of label selector + requirements. The requirements + are ANDed. + items: + description: A label selector + requirement is a selector + that contains values, a key, + and an operator that relates + the key and values. + properties: + key: + description: key is the + label key that the selector + applies to. + type: string + operator: + description: operator represents + a key's relationship to + a set of values. Valid + operators are In, NotIn, + Exists and DoesNotExist. + type: string + values: + description: values is an + array of string values. + If the operator is In + or NotIn, the values array + must be non-empty. If + the operator is Exists + or DoesNotExist, the values + array must be empty. This + array is replaced during + a strategic merge patch. + items: + type: string + type: array + required: + - key + - operator + type: object + type: array + matchLabels: + additionalProperties: + type: string + description: matchLabels is a + map of {key,value} pairs. A + single {key,value} in the matchLabels + map is equivalent to an element + of matchExpressions, whose key + field is "key", the operator + is "In", and the values array + contains only "value". The requirements + are ANDed. + type: object + type: object + namespaceSelector: + description: A label query over the + set of namespaces that the term + applies to. The term is applied + to the union of the namespaces selected + by this field and the ones listed + in the namespaces field. null selector + and null or empty namespaces list + means "this pod's namespace". An + empty selector ({}) matches all + namespaces. This field is beta-level + and is only honored when PodAffinityNamespaceSelector + feature is enabled. + properties: + matchExpressions: + description: matchExpressions + is a list of label selector + requirements. The requirements + are ANDed. + items: + description: A label selector + requirement is a selector + that contains values, a key, + and an operator that relates + the key and values. + properties: + key: + description: key is the + label key that the selector + applies to. + type: string + operator: + description: operator represents + a key's relationship to + a set of values. Valid + operators are In, NotIn, + Exists and DoesNotExist. + type: string + values: + description: values is an + array of string values. + If the operator is In + or NotIn, the values array + must be non-empty. If + the operator is Exists + or DoesNotExist, the values + array must be empty. This + array is replaced during + a strategic merge patch. + items: + type: string + type: array + required: + - key + - operator + type: object + type: array + matchLabels: + additionalProperties: + type: string + description: matchLabels is a + map of {key,value} pairs. A + single {key,value} in the matchLabels + map is equivalent to an element + of matchExpressions, whose key + field is "key", the operator + is "In", and the values array + contains only "value". The requirements + are ANDed. + type: object + type: object + namespaces: + description: namespaces specifies + a static list of namespace names + that the term applies to. The term + is applied to the union of the namespaces + listed in this field and the ones + selected by namespaceSelector. null + or empty namespaces list and null + namespaceSelector means "this pod's + namespace" + items: + type: string + type: array + topologyKey: + description: This pod should be co-located + (affinity) or not co-located (anti-affinity) + with the pods matching the labelSelector + in the specified namespaces, where + co-located is defined as running + on a node whose value of the label + with key topologyKey matches that + of any node on which any of the + selected pods is running. Empty + topologyKey is not allowed. + type: string + required: + - topologyKey + type: object + weight: + description: weight associated with matching + the corresponding podAffinityTerm, in + the range 1-100. + format: int32 + type: integer + required: + - podAffinityTerm + - weight + type: object + type: array + requiredDuringSchedulingIgnoredDuringExecution: + description: If the affinity requirements specified + by this field are not met at scheduling time, + the pod will not be scheduled onto the node. + If the affinity requirements specified by + this field cease to be met at some point during + pod execution (e.g. due to a pod label update), + the system may or may not try to eventually + evict the pod from its node. When there are + multiple elements, the lists of nodes corresponding + to each podAffinityTerm are intersected, i.e. + all terms must be satisfied. + items: + description: Defines a set of pods (namely + those matching the labelSelector relative + to the given namespace(s)) that this pod + should be co-located (affinity) or not co-located + (anti-affinity) with, where co-located is + defined as running on a node whose value + of the label with key matches + that of any node on which a pod of the set + of pods is running + properties: + labelSelector: + description: A label query over a set + of resources, in this case pods. + properties: + matchExpressions: + description: matchExpressions is a + list of label selector requirements. + The requirements are ANDed. + items: + description: A label selector requirement + is a selector that contains values, + a key, and an operator that relates + the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: operator represents + a key's relationship to a + set of values. Valid operators + are In, NotIn, Exists and + DoesNotExist. + type: string + values: + description: values is an array + of string values. If the operator + is In or NotIn, the values + array must be non-empty. If + the operator is Exists or + DoesNotExist, the values array + must be empty. This array + is replaced during a strategic + merge patch. + items: + type: string + type: array + required: + - key + - operator + type: object + type: array + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map + of {key,value} pairs. A single {key,value} + in the matchLabels map is equivalent + to an element of matchExpressions, + whose key field is "key", the operator + is "In", and the values array contains + only "value". The requirements are + ANDed. + type: object + type: object + namespaceSelector: + description: A label query over the set + of namespaces that the term applies + to. The term is applied to the union + of the namespaces selected by this field + and the ones listed in the namespaces + field. null selector and null or empty + namespaces list means "this pod's namespace". + An empty selector ({}) matches all namespaces. + This field is beta-level and is only + honored when PodAffinityNamespaceSelector + feature is enabled. + properties: + matchExpressions: + description: matchExpressions is a + list of label selector requirements. + The requirements are ANDed. + items: + description: A label selector requirement + is a selector that contains values, + a key, and an operator that relates + the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: operator represents + a key's relationship to a + set of values. Valid operators + are In, NotIn, Exists and + DoesNotExist. + type: string + values: + description: values is an array + of string values. If the operator + is In or NotIn, the values + array must be non-empty. If + the operator is Exists or + DoesNotExist, the values array + must be empty. This array + is replaced during a strategic + merge patch. + items: + type: string + type: array + required: + - key + - operator + type: object + type: array + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map + of {key,value} pairs. A single {key,value} + in the matchLabels map is equivalent + to an element of matchExpressions, + whose key field is "key", the operator + is "In", and the values array contains + only "value". The requirements are + ANDed. + type: object + type: object + namespaces: + description: namespaces specifies a static + list of namespace names that the term + applies to. The term is applied to the + union of the namespaces listed in this + field and the ones selected by namespaceSelector. + null or empty namespaces list and null + namespaceSelector means "this pod's + namespace" + items: + type: string + type: array + topologyKey: + description: This pod should be co-located + (affinity) or not co-located (anti-affinity) + with the pods matching the labelSelector + in the specified namespaces, where co-located + is defined as running on a node whose + value of the label with key topologyKey + matches that of any node on which any + of the selected pods is running. Empty + topologyKey is not allowed. + type: string + required: + - topologyKey + type: object + type: array + type: object + podAntiAffinity: + description: Describes pod anti-affinity scheduling + rules (e.g. avoid putting this pod in the same + node, zone, etc. as some other pod(s)). + properties: + preferredDuringSchedulingIgnoredDuringExecution: + description: The scheduler will prefer to schedule + pods to nodes that satisfy the anti-affinity + expressions specified by this field, but it + may choose a node that violates one or more + of the expressions. The node that is most + preferred is the one with the greatest sum + of weights, i.e. for each node that meets + all of the scheduling requirements (resource + request, requiredDuringScheduling anti-affinity + expressions, etc.), compute a sum by iterating + through the elements of this field and adding + "weight" to the sum if the node has pods which + matches the corresponding podAffinityTerm; + the node(s) with the highest sum are the most + preferred. + items: + description: The weights of all of the matched + WeightedPodAffinityTerm fields are added + per-node to find the most preferred node(s) + properties: + podAffinityTerm: + description: Required. A pod affinity + term, associated with the corresponding + weight. + properties: + labelSelector: + description: A label query over a + set of resources, in this case pods. + properties: + matchExpressions: + description: matchExpressions + is a list of label selector + requirements. The requirements + are ANDed. + items: + description: A label selector + requirement is a selector + that contains values, a key, + and an operator that relates + the key and values. + properties: + key: + description: key is the + label key that the selector + applies to. + type: string + operator: + description: operator represents + a key's relationship to + a set of values. Valid + operators are In, NotIn, + Exists and DoesNotExist. + type: string + values: + description: values is an + array of string values. + If the operator is In + or NotIn, the values array + must be non-empty. If + the operator is Exists + or DoesNotExist, the values + array must be empty. This + array is replaced during + a strategic merge patch. + items: + type: string + type: array + required: + - key + - operator + type: object + type: array + matchLabels: + additionalProperties: + type: string + description: matchLabels is a + map of {key,value} pairs. A + single {key,value} in the matchLabels + map is equivalent to an element + of matchExpressions, whose key + field is "key", the operator + is "In", and the values array + contains only "value". The requirements + are ANDed. + type: object + type: object + namespaceSelector: + description: A label query over the + set of namespaces that the term + applies to. The term is applied + to the union of the namespaces selected + by this field and the ones listed + in the namespaces field. null selector + and null or empty namespaces list + means "this pod's namespace". An + empty selector ({}) matches all + namespaces. This field is beta-level + and is only honored when PodAffinityNamespaceSelector + feature is enabled. + properties: + matchExpressions: + description: matchExpressions + is a list of label selector + requirements. The requirements + are ANDed. + items: + description: A label selector + requirement is a selector + that contains values, a key, + and an operator that relates + the key and values. + properties: + key: + description: key is the + label key that the selector + applies to. + type: string + operator: + description: operator represents + a key's relationship to + a set of values. Valid + operators are In, NotIn, + Exists and DoesNotExist. + type: string + values: + description: values is an + array of string values. + If the operator is In + or NotIn, the values array + must be non-empty. If + the operator is Exists + or DoesNotExist, the values + array must be empty. This + array is replaced during + a strategic merge patch. + items: + type: string + type: array + required: + - key + - operator + type: object + type: array + matchLabels: + additionalProperties: + type: string + description: matchLabels is a + map of {key,value} pairs. A + single {key,value} in the matchLabels + map is equivalent to an element + of matchExpressions, whose key + field is "key", the operator + is "In", and the values array + contains only "value". The requirements + are ANDed. + type: object + type: object + namespaces: + description: namespaces specifies + a static list of namespace names + that the term applies to. The term + is applied to the union of the namespaces + listed in this field and the ones + selected by namespaceSelector. null + or empty namespaces list and null + namespaceSelector means "this pod's + namespace" + items: + type: string + type: array + topologyKey: + description: This pod should be co-located + (affinity) or not co-located (anti-affinity) + with the pods matching the labelSelector + in the specified namespaces, where + co-located is defined as running + on a node whose value of the label + with key topologyKey matches that + of any node on which any of the + selected pods is running. Empty + topologyKey is not allowed. + type: string + required: + - topologyKey + type: object + weight: + description: weight associated with matching + the corresponding podAffinityTerm, in + the range 1-100. + format: int32 + type: integer + required: + - podAffinityTerm + - weight + type: object + type: array + requiredDuringSchedulingIgnoredDuringExecution: + description: If the anti-affinity requirements + specified by this field are not met at scheduling + time, the pod will not be scheduled onto the + node. If the anti-affinity requirements specified + by this field cease to be met at some point + during pod execution (e.g. due to a pod label + update), the system may or may not try to + eventually evict the pod from its node. When + there are multiple elements, the lists of + nodes corresponding to each podAffinityTerm + are intersected, i.e. all terms must be satisfied. + items: + description: Defines a set of pods (namely + those matching the labelSelector relative + to the given namespace(s)) that this pod + should be co-located (affinity) or not co-located + (anti-affinity) with, where co-located is + defined as running on a node whose value + of the label with key matches + that of any node on which a pod of the set + of pods is running + properties: + labelSelector: + description: A label query over a set + of resources, in this case pods. + properties: + matchExpressions: + description: matchExpressions is a + list of label selector requirements. + The requirements are ANDed. + items: + description: A label selector requirement + is a selector that contains values, + a key, and an operator that relates + the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: operator represents + a key's relationship to a + set of values. Valid operators + are In, NotIn, Exists and + DoesNotExist. + type: string + values: + description: values is an array + of string values. If the operator + is In or NotIn, the values + array must be non-empty. If + the operator is Exists or + DoesNotExist, the values array + must be empty. This array + is replaced during a strategic + merge patch. + items: + type: string + type: array + required: + - key + - operator + type: object + type: array + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map + of {key,value} pairs. A single {key,value} + in the matchLabels map is equivalent + to an element of matchExpressions, + whose key field is "key", the operator + is "In", and the values array contains + only "value". The requirements are + ANDed. + type: object + type: object + namespaceSelector: + description: A label query over the set + of namespaces that the term applies + to. The term is applied to the union + of the namespaces selected by this field + and the ones listed in the namespaces + field. null selector and null or empty + namespaces list means "this pod's namespace". + An empty selector ({}) matches all namespaces. + This field is beta-level and is only + honored when PodAffinityNamespaceSelector + feature is enabled. + properties: + matchExpressions: + description: matchExpressions is a + list of label selector requirements. + The requirements are ANDed. + items: + description: A label selector requirement + is a selector that contains values, + a key, and an operator that relates + the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: operator represents + a key's relationship to a + set of values. Valid operators + are In, NotIn, Exists and + DoesNotExist. + type: string + values: + description: values is an array + of string values. If the operator + is In or NotIn, the values + array must be non-empty. If + the operator is Exists or + DoesNotExist, the values array + must be empty. This array + is replaced during a strategic + merge patch. + items: + type: string + type: array + required: + - key + - operator + type: object + type: array + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map + of {key,value} pairs. A single {key,value} + in the matchLabels map is equivalent + to an element of matchExpressions, + whose key field is "key", the operator + is "In", and the values array contains + only "value". The requirements are + ANDed. + type: object + type: object + namespaces: + description: namespaces specifies a static + list of namespace names that the term + applies to. The term is applied to the + union of the namespaces listed in this + field and the ones selected by namespaceSelector. + null or empty namespaces list and null + namespaceSelector means "this pod's + namespace" + items: + type: string + type: array + topologyKey: + description: This pod should be co-located + (affinity) or not co-located (anti-affinity) + with the pods matching the labelSelector + in the specified namespaces, where co-located + is defined as running on a node whose + value of the label with key topologyKey + matches that of any node on which any + of the selected pods is running. Empty + topologyKey is not allowed. + type: string + required: + - topologyKey + type: object + type: array + type: object + type: object + automountServiceAccountToken: + description: AutomountServiceAccountToken indicates + whether a service account token should be automatically + mounted. + type: boolean + containers: + description: List of containers belonging to the pod. + Containers cannot currently be added or removed. There + must be at least one container in a Pod. Cannot be + updated. + items: + description: A single application container that you + want to run within a pod. + properties: + args: + description: 'Arguments to the entrypoint. The + docker image''s CMD is used if this is not provided. + Variable references $(VAR_NAME) are expanded + using the container''s environment. If a variable + cannot be resolved, the reference in the input + string will be unchanged. Double $$ are reduced + to a single $, which allows for escaping the + $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will + produce the string literal "$(VAR_NAME)". Escaped + references will never be expanded, regardless + of whether the variable exists or not. Cannot + be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell' + items: + type: string + type: array + command: + description: 'Entrypoint array. Not executed within + a shell. The docker image''s ENTRYPOINT is used + if this is not provided. Variable references + $(VAR_NAME) are expanded using the container''s + environment. If a variable cannot be resolved, + the reference in the input string will be unchanged. + Double $$ are reduced to a single $, which allows + for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" + will produce the string literal "$(VAR_NAME)". + Escaped references will never be expanded, regardless + of whether the variable exists or not. Cannot + be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell' + items: + type: string + type: array + env: + description: List of environment variables to + set in the container. Cannot be updated. + items: + description: EnvVar represents an environment + variable present in a Container. + properties: + name: + description: Name of the environment variable. + Must be a C_IDENTIFIER. + type: string + value: + description: 'Variable references $(VAR_NAME) + are expanded using the previously defined + environment variables in the container + and any service environment variables. + If a variable cannot be resolved, the + reference in the input string will be + unchanged. Double $$ are reduced to a + single $, which allows for escaping the + $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" + will produce the string literal "$(VAR_NAME)". + Escaped references will never be expanded, + regardless of whether the variable exists + or not. Defaults to "".' + type: string + valueFrom: + description: Source for the environment + variable's value. Cannot be used if value + is not empty. + properties: + configMapKeyRef: + description: Selects a key of a ConfigMap. + properties: + key: + description: The key to select. + type: string + name: + description: 'Name of the referent. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. + apiVersion, kind, uid?' + type: string + optional: + description: Specify whether the + ConfigMap or its key must be defined + type: boolean + required: + - key + type: object + fieldRef: + description: 'Selects a field of the + pod: supports metadata.name, metadata.namespace, + `metadata.labels['''']`, `metadata.annotations['''']`, + spec.nodeName, spec.serviceAccountName, + status.hostIP, status.podIP, status.podIPs.' + properties: + apiVersion: + description: Version of the schema + the FieldPath is written in terms + of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to + select in the specified API version. + type: string + required: + - fieldPath + type: object + resourceFieldRef: + description: 'Selects a resource of + the container: only resources limits + and requests (limits.cpu, limits.memory, + limits.ephemeral-storage, requests.cpu, + requests.memory and requests.ephemeral-storage) + are currently supported.' + properties: + containerName: + description: 'Container name: required + for volumes, optional for env + vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output + format of the exposed resources, + defaults to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource + to select' + type: string + required: + - resource + type: object + secretKeyRef: + description: Selects a key of a secret + in the pod's namespace + properties: + key: + description: The key of the secret + to select from. Must be a valid + secret key. + type: string + name: + description: 'Name of the referent. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. + apiVersion, kind, uid?' + type: string + optional: + description: Specify whether the + Secret or its key must be defined + type: boolean + required: + - key + type: object + type: object + required: + - name + type: object + type: array + envFrom: + description: List of sources to populate environment + variables in the container. The keys defined + within a source must be a C_IDENTIFIER. All + invalid keys will be reported as an event when + the container is starting. When a key exists + in multiple sources, the value associated with + the last source will take precedence. Values + defined by an Env with a duplicate key will + take precedence. Cannot be updated. + items: + description: EnvFromSource represents the source + of a set of ConfigMaps + properties: + configMapRef: + description: The ConfigMap to select from + properties: + name: + description: 'Name of the referent. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. apiVersion, + kind, uid?' + type: string + optional: + description: Specify whether the ConfigMap + must be defined + type: boolean + type: object + prefix: + description: An optional identifier to prepend + to each key in the ConfigMap. Must be + a C_IDENTIFIER. + type: string + secretRef: + description: The Secret to select from + properties: + name: + description: 'Name of the referent. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. apiVersion, + kind, uid?' + type: string + optional: + description: Specify whether the Secret + must be defined + type: boolean + type: object + type: object + type: array + image: + description: 'Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images + This field is optional to allow higher level + config management to default or override container + images in workload controllers like Deployments + and StatefulSets.' + type: string + imagePullPolicy: + description: 'Image pull policy. One of Always, + Never, IfNotPresent. Defaults to Always if :latest + tag is specified, or IfNotPresent otherwise. + Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images' + type: string + lifecycle: + description: Actions that the management system + should take in response to container lifecycle + events. Cannot be updated. + properties: + postStart: + description: 'PostStart is called immediately + after a container is created. If the handler + fails, the container is terminated and restarted + according to its restart policy. Other management + of the container blocks until the hook completes. + More info: https://kubernetes.io/docs/concepts/containers/container-lifecycle-hooks/#container-hooks' + properties: + exec: + description: One and only one of the following + should be specified. Exec specifies + the action to take. + properties: + command: + description: Command is the command + line to execute inside the container, + the working directory for the command is + root ('/') in the container's filesystem. + The command is simply exec'd, it + is not run inside a shell, so traditional + shell instructions ('|', etc) won't + work. To use a shell, you need to + explicitly call out to that shell. + Exit status of 0 is treated as live/healthy + and non-zero is unhealthy. + items: + type: string + type: array + type: object + httpGet: + description: HTTPGet specifies the http + request to perform. + properties: + host: + description: Host name to connect + to, defaults to the pod IP. You + probably want to set "Host" in httpHeaders + instead. + type: string + httpHeaders: + description: Custom headers to set + in the request. HTTP allows repeated + headers. + items: + description: HTTPHeader describes + a custom header to be used in + HTTP probes + properties: + name: + description: The header field + name + type: string + value: + description: The header field + value + type: string + required: + - name + - value + type: object + type: array + path: + description: Path to access on the + HTTP server. + type: string + port: + anyOf: + - type: integer + - type: string + description: Name or number of the + port to access on the container. + Number must be in the range 1 to + 65535. Name must be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + scheme: + description: Scheme to use for connecting + to the host. Defaults to HTTP. + type: string + required: + - port + type: object + tcpSocket: + description: 'TCPSocket specifies an action + involving a TCP port. TCP hooks not + yet supported TODO: implement a realistic + TCP lifecycle hook' + properties: + host: + description: 'Optional: Host name + to connect to, defaults to the pod + IP.' + type: string + port: + anyOf: + - type: integer + - type: string + description: Number or name of the + port to access on the container. + Number must be in the range 1 to + 65535. Name must be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + required: + - port + type: object + type: object + preStop: + description: 'PreStop is called immediately + before a container is terminated due to + an API request or management event such + as liveness/startup probe failure, preemption, + resource contention, etc. The handler is + not called if the container crashes or exits. + The reason for termination is passed to + the handler. The Pod''s termination grace + period countdown begins before the PreStop + hooked is executed. Regardless of the outcome + of the handler, the container will eventually + terminate within the Pod''s termination + grace period. Other management of the container + blocks until the hook completes or until + the termination grace period is reached. + More info: https://kubernetes.io/docs/concepts/containers/container-lifecycle-hooks/#container-hooks' + properties: + exec: + description: One and only one of the following + should be specified. Exec specifies + the action to take. + properties: + command: + description: Command is the command + line to execute inside the container, + the working directory for the command is + root ('/') in the container's filesystem. + The command is simply exec'd, it + is not run inside a shell, so traditional + shell instructions ('|', etc) won't + work. To use a shell, you need to + explicitly call out to that shell. + Exit status of 0 is treated as live/healthy + and non-zero is unhealthy. + items: + type: string + type: array + type: object + httpGet: + description: HTTPGet specifies the http + request to perform. + properties: + host: + description: Host name to connect + to, defaults to the pod IP. You + probably want to set "Host" in httpHeaders + instead. + type: string + httpHeaders: + description: Custom headers to set + in the request. HTTP allows repeated + headers. + items: + description: HTTPHeader describes + a custom header to be used in + HTTP probes + properties: + name: + description: The header field + name + type: string + value: + description: The header field + value + type: string + required: + - name + - value + type: object + type: array + path: + description: Path to access on the + HTTP server. + type: string + port: + anyOf: + - type: integer + - type: string + description: Name or number of the + port to access on the container. + Number must be in the range 1 to + 65535. Name must be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + scheme: + description: Scheme to use for connecting + to the host. Defaults to HTTP. + type: string + required: + - port + type: object + tcpSocket: + description: 'TCPSocket specifies an action + involving a TCP port. TCP hooks not + yet supported TODO: implement a realistic + TCP lifecycle hook' + properties: + host: + description: 'Optional: Host name + to connect to, defaults to the pod + IP.' + type: string + port: + anyOf: + - type: integer + - type: string + description: Number or name of the + port to access on the container. + Number must be in the range 1 to + 65535. Name must be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + required: + - port + type: object + type: object + type: object + livenessProbe: + description: 'Periodic probe of container liveness. + Container will be restarted if the probe fails. + Cannot be updated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + properties: + exec: + description: One and only one of the following + should be specified. Exec specifies the + action to take. + properties: + command: + description: Command is the command line + to execute inside the container, the + working directory for the command is + root ('/') in the container's filesystem. + The command is simply exec'd, it is + not run inside a shell, so traditional + shell instructions ('|', etc) won't + work. To use a shell, you need to explicitly + call out to that shell. Exit status + of 0 is treated as live/healthy and + non-zero is unhealthy. + items: + type: string + type: array + type: object + failureThreshold: + description: Minimum consecutive failures + for the probe to be considered failed after + having succeeded. Defaults to 3. Minimum + value is 1. + format: int32 + type: integer + httpGet: + description: HTTPGet specifies the http request + to perform. + properties: + host: + description: Host name to connect to, + defaults to the pod IP. You probably + want to set "Host" in httpHeaders instead. + type: string + httpHeaders: + description: Custom headers to set in + the request. HTTP allows repeated headers. + items: + description: HTTPHeader describes a + custom header to be used in HTTP probes + properties: + name: + description: The header field name + type: string + value: + description: The header field value + type: string + required: + - name + - value + type: object + type: array + path: + description: Path to access on the HTTP + server. + type: string + port: + anyOf: + - type: integer + - type: string + description: Name or number of the port + to access on the container. Number must + be in the range 1 to 65535. Name must + be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + scheme: + description: Scheme to use for connecting + to the host. Defaults to HTTP. + type: string + required: + - port + type: object + initialDelaySeconds: + description: 'Number of seconds after the + container has started before liveness probes + are initiated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + format: int32 + type: integer + periodSeconds: + description: How often (in seconds) to perform + the probe. Default to 10 seconds. Minimum + value is 1. + format: int32 + type: integer + successThreshold: + description: Minimum consecutive successes + for the probe to be considered successful + after having failed. Defaults to 1. Must + be 1 for liveness and startup. Minimum value + is 1. + format: int32 + type: integer + tcpSocket: + description: 'TCPSocket specifies an action + involving a TCP port. TCP hooks not yet + supported TODO: implement a realistic TCP + lifecycle hook' + properties: + host: + description: 'Optional: Host name to connect + to, defaults to the pod IP.' + type: string + port: + anyOf: + - type: integer + - type: string + description: Number or name of the port + to access on the container. Number must + be in the range 1 to 65535. Name must + be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + required: + - port + type: object + terminationGracePeriodSeconds: + description: Optional duration in seconds + the pod needs to terminate gracefully upon + probe failure. The grace period is the duration + in seconds after the processes running in + the pod are sent a termination signal and + the time when the processes are forcibly + halted with a kill signal. Set this value + longer than the expected cleanup time for + your process. If this value is nil, the + pod's terminationGracePeriodSeconds will + be used. Otherwise, this value overrides + the value provided by the pod spec. Value + must be non-negative integer. The value + zero indicates stop immediately via the + kill signal (no opportunity to shut down). + This is a beta field and requires enabling + ProbeTerminationGracePeriod feature gate. + Minimum value is 1. spec.terminationGracePeriodSeconds + is used if unset. + format: int64 + type: integer + timeoutSeconds: + description: 'Number of seconds after which + the probe times out. Defaults to 1 second. + Minimum value is 1. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + format: int32 + type: integer + type: object + name: + description: Name of the container specified as + a DNS_LABEL. Each container in a pod must have + a unique name (DNS_LABEL). Cannot be updated. + type: string + ports: + description: List of ports to expose from the + container. Exposing a port here gives the system + additional information about the network connections + a container uses, but is primarily informational. + Not specifying a port here DOES NOT prevent + that port from being exposed. Any port which + is listening on the default "0.0.0.0" address + inside a container will be accessible from the + network. Cannot be updated. + items: + description: ContainerPort represents a network + port in a single container. + properties: + containerPort: + description: Number of port to expose on + the pod's IP address. This must be a valid + port number, 0 < x < 65536. + format: int32 + type: integer + hostIP: + description: What host IP to bind the external + port to. + type: string + hostPort: + description: Number of port to expose on + the host. If specified, this must be a + valid port number, 0 < x < 65536. If HostNetwork + is specified, this must match ContainerPort. + Most containers do not need this. + format: int32 + type: integer + name: + description: If specified, this must be + an IANA_SVC_NAME and unique within the + pod. Each named port in a pod must have + a unique name. Name for the port that + can be referred to by services. + type: string + protocol: + default: TCP + description: Protocol for port. Must be + UDP, TCP, or SCTP. Defaults to "TCP". + type: string + required: + - containerPort + type: object + type: array + x-kubernetes-list-map-keys: + - containerPort + - protocol + x-kubernetes-list-type: map + readinessProbe: + description: 'Periodic probe of container service + readiness. Container will be removed from service + endpoints if the probe fails. Cannot be updated. + More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + properties: + exec: + description: One and only one of the following + should be specified. Exec specifies the + action to take. + properties: + command: + description: Command is the command line + to execute inside the container, the + working directory for the command is + root ('/') in the container's filesystem. + The command is simply exec'd, it is + not run inside a shell, so traditional + shell instructions ('|', etc) won't + work. To use a shell, you need to explicitly + call out to that shell. Exit status + of 0 is treated as live/healthy and + non-zero is unhealthy. + items: + type: string + type: array + type: object + failureThreshold: + description: Minimum consecutive failures + for the probe to be considered failed after + having succeeded. Defaults to 3. Minimum + value is 1. + format: int32 + type: integer + httpGet: + description: HTTPGet specifies the http request + to perform. + properties: + host: + description: Host name to connect to, + defaults to the pod IP. You probably + want to set "Host" in httpHeaders instead. + type: string + httpHeaders: + description: Custom headers to set in + the request. HTTP allows repeated headers. + items: + description: HTTPHeader describes a + custom header to be used in HTTP probes + properties: + name: + description: The header field name + type: string + value: + description: The header field value + type: string + required: + - name + - value + type: object + type: array + path: + description: Path to access on the HTTP + server. + type: string + port: + anyOf: + - type: integer + - type: string + description: Name or number of the port + to access on the container. Number must + be in the range 1 to 65535. Name must + be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + scheme: + description: Scheme to use for connecting + to the host. Defaults to HTTP. + type: string + required: + - port + type: object + initialDelaySeconds: + description: 'Number of seconds after the + container has started before liveness probes + are initiated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + format: int32 + type: integer + periodSeconds: + description: How often (in seconds) to perform + the probe. Default to 10 seconds. Minimum + value is 1. + format: int32 + type: integer + successThreshold: + description: Minimum consecutive successes + for the probe to be considered successful + after having failed. Defaults to 1. Must + be 1 for liveness and startup. Minimum value + is 1. + format: int32 + type: integer + tcpSocket: + description: 'TCPSocket specifies an action + involving a TCP port. TCP hooks not yet + supported TODO: implement a realistic TCP + lifecycle hook' + properties: + host: + description: 'Optional: Host name to connect + to, defaults to the pod IP.' + type: string + port: + anyOf: + - type: integer + - type: string + description: Number or name of the port + to access on the container. Number must + be in the range 1 to 65535. Name must + be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + required: + - port + type: object + terminationGracePeriodSeconds: + description: Optional duration in seconds + the pod needs to terminate gracefully upon + probe failure. The grace period is the duration + in seconds after the processes running in + the pod are sent a termination signal and + the time when the processes are forcibly + halted with a kill signal. Set this value + longer than the expected cleanup time for + your process. If this value is nil, the + pod's terminationGracePeriodSeconds will + be used. Otherwise, this value overrides + the value provided by the pod spec. Value + must be non-negative integer. The value + zero indicates stop immediately via the + kill signal (no opportunity to shut down). + This is a beta field and requires enabling + ProbeTerminationGracePeriod feature gate. + Minimum value is 1. spec.terminationGracePeriodSeconds + is used if unset. + format: int64 + type: integer + timeoutSeconds: + description: 'Number of seconds after which + the probe times out. Defaults to 1 second. + Minimum value is 1. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + format: int32 + type: integer + type: object + resources: + description: 'Compute Resources required by this + container. Cannot be updated. More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/' + properties: + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: 'Limits describes the maximum + amount of compute resources allowed. More + info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/' + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: 'Requests describes the minimum + amount of compute resources required. If + Requests is omitted for a container, it + defaults to Limits if that is explicitly + specified, otherwise to an implementation-defined + value. More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/' + type: object + type: object + securityContext: + description: 'SecurityContext defines the security + options the container should be run with. If + set, the fields of SecurityContext override + the equivalent fields of PodSecurityContext. + More info: https://kubernetes.io/docs/tasks/configure-pod-container/security-context/' + properties: + allowPrivilegeEscalation: + description: 'AllowPrivilegeEscalation controls + whether a process can gain more privileges + than its parent process. This bool directly + controls if the no_new_privs flag will be + set on the container process. AllowPrivilegeEscalation + is true always when the container is: 1) + run as Privileged 2) has CAP_SYS_ADMIN' + type: boolean + capabilities: + description: The capabilities to add/drop + when running containers. Defaults to the + default set of capabilities granted by the + container runtime. + properties: + add: + description: Added capabilities + items: + description: Capability represent POSIX + capabilities type + type: string + type: array + drop: + description: Removed capabilities + items: + description: Capability represent POSIX + capabilities type + type: string + type: array + type: object + privileged: + description: Run container in privileged mode. + Processes in privileged containers are essentially + equivalent to root on the host. Defaults + to false. + type: boolean + procMount: + description: procMount denotes the type of + proc mount to use for the containers. The + default is DefaultProcMount which uses the + container runtime defaults for readonly + paths and masked paths. This requires the + ProcMountType feature flag to be enabled. + type: string + readOnlyRootFilesystem: + description: Whether this container has a + read-only root filesystem. Default is false. + type: boolean + runAsGroup: + description: The GID to run the entrypoint + of the container process. Uses runtime default + if unset. May also be set in PodSecurityContext. If + set in both SecurityContext and PodSecurityContext, + the value specified in SecurityContext takes + precedence. + format: int64 + type: integer + runAsNonRoot: + description: Indicates that the container + must run as a non-root user. If true, the + Kubelet will validate the image at runtime + to ensure that it does not run as UID 0 + (root) and fail to start the container if + it does. If unset or false, no such validation + will be performed. May also be set in PodSecurityContext. If + set in both SecurityContext and PodSecurityContext, + the value specified in SecurityContext takes + precedence. + type: boolean + runAsUser: + description: The UID to run the entrypoint + of the container process. Defaults to user + specified in image metadata if unspecified. + May also be set in PodSecurityContext. If + set in both SecurityContext and PodSecurityContext, + the value specified in SecurityContext takes + precedence. + format: int64 + type: integer + seLinuxOptions: + description: The SELinux context to be applied + to the container. If unspecified, the container + runtime will allocate a random SELinux context + for each container. May also be set in + PodSecurityContext. If set in both SecurityContext + and PodSecurityContext, the value specified + in SecurityContext takes precedence. + properties: + level: + description: Level is SELinux level label + that applies to the container. + type: string + role: + description: Role is a SELinux role label + that applies to the container. + type: string + type: + description: Type is a SELinux type label + that applies to the container. + type: string + user: + description: User is a SELinux user label + that applies to the container. + type: string + type: object + seccompProfile: + description: The seccomp options to use by + this container. If seccomp options are provided + at both the pod & container level, the container + options override the pod options. + properties: + localhostProfile: + description: localhostProfile indicates + a profile defined in a file on the node + should be used. The profile must be + preconfigured on the node to work. Must + be a descending path, relative to the + kubelet's configured seccomp profile + location. Must only be set if type is + "Localhost". + type: string + type: + description: "type indicates which kind + of seccomp profile will be applied. + Valid options are: \n Localhost - a + profile defined in a file on the node + should be used. RuntimeDefault - the + container runtime default profile should + be used. Unconfined - no profile should + be applied." + type: string + required: + - type + type: object + windowsOptions: + description: The Windows specific settings + applied to all containers. If unspecified, + the options from the PodSecurityContext + will be used. If set in both SecurityContext + and PodSecurityContext, the value specified + in SecurityContext takes precedence. + properties: + gmsaCredentialSpec: + description: GMSACredentialSpec is where + the GMSA admission webhook (https://github.com/kubernetes-sigs/windows-gmsa) + inlines the contents of the GMSA credential + spec named by the GMSACredentialSpecName + field. + type: string + gmsaCredentialSpecName: + description: GMSACredentialSpecName is + the name of the GMSA credential spec + to use. + type: string + hostProcess: + description: HostProcess determines if + a container should be run as a 'Host + Process' container. This field is alpha-level + and will only be honored by components + that enable the WindowsHostProcessContainers + feature flag. Setting this field without + the feature flag will result in errors + when validating the Pod. All of a Pod's + containers must have the same effective + HostProcess value (it is not allowed + to have a mix of HostProcess containers + and non-HostProcess containers). In + addition, if HostProcess is true then + HostNetwork must also be set to true. + type: boolean + runAsUserName: + description: The UserName in Windows to + run the entrypoint of the container + process. Defaults to the user specified + in image metadata if unspecified. May + also be set in PodSecurityContext. If + set in both SecurityContext and PodSecurityContext, + the value specified in SecurityContext + takes precedence. + type: string + type: object + type: object + startupProbe: + description: 'StartupProbe indicates that the + Pod has successfully initialized. If specified, + no other probes are executed until this completes + successfully. If this probe fails, the Pod will + be restarted, just as if the livenessProbe failed. + This can be used to provide different probe + parameters at the beginning of a Pod''s lifecycle, + when it might take a long time to load data + or warm a cache, than during steady-state operation. + This cannot be updated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + properties: + exec: + description: One and only one of the following + should be specified. Exec specifies the + action to take. + properties: + command: + description: Command is the command line + to execute inside the container, the + working directory for the command is + root ('/') in the container's filesystem. + The command is simply exec'd, it is + not run inside a shell, so traditional + shell instructions ('|', etc) won't + work. To use a shell, you need to explicitly + call out to that shell. Exit status + of 0 is treated as live/healthy and + non-zero is unhealthy. + items: + type: string + type: array + type: object + failureThreshold: + description: Minimum consecutive failures + for the probe to be considered failed after + having succeeded. Defaults to 3. Minimum + value is 1. + format: int32 + type: integer + httpGet: + description: HTTPGet specifies the http request + to perform. + properties: + host: + description: Host name to connect to, + defaults to the pod IP. You probably + want to set "Host" in httpHeaders instead. + type: string + httpHeaders: + description: Custom headers to set in + the request. HTTP allows repeated headers. + items: + description: HTTPHeader describes a + custom header to be used in HTTP probes + properties: + name: + description: The header field name + type: string + value: + description: The header field value + type: string + required: + - name + - value + type: object + type: array + path: + description: Path to access on the HTTP + server. + type: string + port: + anyOf: + - type: integer + - type: string + description: Name or number of the port + to access on the container. Number must + be in the range 1 to 65535. Name must + be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + scheme: + description: Scheme to use for connecting + to the host. Defaults to HTTP. + type: string + required: + - port + type: object + initialDelaySeconds: + description: 'Number of seconds after the + container has started before liveness probes + are initiated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + format: int32 + type: integer + periodSeconds: + description: How often (in seconds) to perform + the probe. Default to 10 seconds. Minimum + value is 1. + format: int32 + type: integer + successThreshold: + description: Minimum consecutive successes + for the probe to be considered successful + after having failed. Defaults to 1. Must + be 1 for liveness and startup. Minimum value + is 1. + format: int32 + type: integer + tcpSocket: + description: 'TCPSocket specifies an action + involving a TCP port. TCP hooks not yet + supported TODO: implement a realistic TCP + lifecycle hook' + properties: + host: + description: 'Optional: Host name to connect + to, defaults to the pod IP.' + type: string + port: + anyOf: + - type: integer + - type: string + description: Number or name of the port + to access on the container. Number must + be in the range 1 to 65535. Name must + be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + required: + - port + type: object + terminationGracePeriodSeconds: + description: Optional duration in seconds + the pod needs to terminate gracefully upon + probe failure. The grace period is the duration + in seconds after the processes running in + the pod are sent a termination signal and + the time when the processes are forcibly + halted with a kill signal. Set this value + longer than the expected cleanup time for + your process. If this value is nil, the + pod's terminationGracePeriodSeconds will + be used. Otherwise, this value overrides + the value provided by the pod spec. Value + must be non-negative integer. The value + zero indicates stop immediately via the + kill signal (no opportunity to shut down). + This is a beta field and requires enabling + ProbeTerminationGracePeriod feature gate. + Minimum value is 1. spec.terminationGracePeriodSeconds + is used if unset. + format: int64 + type: integer + timeoutSeconds: + description: 'Number of seconds after which + the probe times out. Defaults to 1 second. + Minimum value is 1. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + format: int32 + type: integer + type: object + stdin: + description: Whether this container should allocate + a buffer for stdin in the container runtime. + If this is not set, reads from stdin in the + container will always result in EOF. Default + is false. + type: boolean + stdinOnce: + description: Whether the container runtime should + close the stdin channel after it has been opened + by a single attach. When stdin is true the stdin + stream will remain open across multiple attach + sessions. If stdinOnce is set to true, stdin + is opened on container start, is empty until + the first client attaches to stdin, and then + remains open and accepts data until the client + disconnects, at which time stdin is closed and + remains closed until the container is restarted. + If this flag is false, a container processes + that reads from stdin will never receive an + EOF. Default is false + type: boolean + terminationMessagePath: + description: 'Optional: Path at which the file + to which the container''s termination message + will be written is mounted into the container''s + filesystem. Message written is intended to be + brief final status, such as an assertion failure + message. Will be truncated by the node if greater + than 4096 bytes. The total message length across + all containers will be limited to 12kb. Defaults + to /dev/termination-log. Cannot be updated.' + type: string + terminationMessagePolicy: + description: Indicate how the termination message + should be populated. File will use the contents + of terminationMessagePath to populate the container + status message on both success and failure. + FallbackToLogsOnError will use the last chunk + of container log output if the termination message + file is empty and the container exited with + an error. The log output is limited to 2048 + bytes or 80 lines, whichever is smaller. Defaults + to File. Cannot be updated. + type: string + tty: + description: Whether this container should allocate + a TTY for itself, also requires 'stdin' to be + true. Default is false. + type: boolean + volumeDevices: + description: volumeDevices is the list of block + devices to be used by the container. + items: + description: volumeDevice describes a mapping + of a raw block device within a container. + properties: + devicePath: + description: devicePath is the path inside + of the container that the device will + be mapped to. + type: string + name: + description: name must match the name of + a persistentVolumeClaim in the pod + type: string + required: + - devicePath + - name + type: object + type: array + volumeMounts: + description: Pod volumes to mount into the container's + filesystem. Cannot be updated. + items: + description: VolumeMount describes a mounting + of a Volume within a container. + properties: + mountPath: + description: Path within the container at + which the volume should be mounted. Must + not contain ':'. + type: string + mountPropagation: + description: mountPropagation determines + how mounts are propagated from the host + to container and the other way around. + When not set, MountPropagationNone is + used. This field is beta in 1.10. + type: string + name: + description: This must match the Name of + a Volume. + type: string + readOnly: + description: Mounted read-only if true, + read-write otherwise (false or unspecified). + Defaults to false. + type: boolean + subPath: + description: Path within the volume from + which the container's volume should be + mounted. Defaults to "" (volume's root). + type: string + subPathExpr: + description: Expanded path within the volume + from which the container's volume should + be mounted. Behaves similarly to SubPath + but environment variable references $(VAR_NAME) + are expanded using the container's environment. + Defaults to "" (volume's root). SubPathExpr + and SubPath are mutually exclusive. + type: string + required: + - mountPath + - name + type: object + type: array + workingDir: + description: Container's working directory. If + not specified, the container runtime's default + will be used, which might be configured in the + container image. Cannot be updated. + type: string + required: + - name + type: object + type: array + dnsConfig: + description: Specifies the DNS parameters of a pod. + Parameters specified here will be merged to the generated + DNS configuration based on DNSPolicy. + properties: + nameservers: + description: A list of DNS name server IP addresses. + This will be appended to the base nameservers + generated from DNSPolicy. Duplicated nameservers + will be removed. + items: + type: string + type: array + options: + description: A list of DNS resolver options. This + will be merged with the base options generated + from DNSPolicy. Duplicated entries will be removed. + Resolution options given in Options will override + those that appear in the base DNSPolicy. + items: + description: PodDNSConfigOption defines DNS resolver + options of a pod. + properties: + name: + description: Required. + type: string + value: + type: string + type: object + type: array + searches: + description: A list of DNS search domains for host-name + lookup. This will be appended to the base search + paths generated from DNSPolicy. Duplicated search + paths will be removed. + items: + type: string + type: array + type: object + dnsPolicy: + description: Set DNS policy for the pod. Defaults to + "ClusterFirst". Valid values are 'ClusterFirstWithHostNet', + 'ClusterFirst', 'Default' or 'None'. DNS parameters + given in DNSConfig will be merged with the policy + selected with DNSPolicy. To have DNS options set along + with hostNetwork, you have to specify DNS policy explicitly + to 'ClusterFirstWithHostNet'. + type: string + enableServiceLinks: + description: 'EnableServiceLinks indicates whether information + about services should be injected into pod''s environment + variables, matching the syntax of Docker links. Optional: + Defaults to true.' + type: boolean + ephemeralContainers: + description: List of ephemeral containers run in this + pod. Ephemeral containers may be run in an existing + pod to perform user-initiated actions such as debugging. + This list cannot be specified when creating a pod, + and it cannot be modified by updating the pod spec. + In order to add an ephemeral container to an existing + pod, use the pod's ephemeralcontainers subresource. + This field is alpha-level and is only honored by servers + that enable the EphemeralContainers feature. + items: + description: An EphemeralContainer is a container + that may be added temporarily to an existing pod + for user-initiated activities such as debugging. + Ephemeral containers have no resource or scheduling + guarantees, and they will not be restarted when + they exit or when a pod is removed or restarted. + If an ephemeral container causes a pod to exceed + its resource allocation, the pod may be evicted. + Ephemeral containers may not be added by directly + updating the pod spec. They must be added via the + pod's ephemeralcontainers subresource, and they + will appear in the pod spec once added. This is + an alpha feature enabled by the EphemeralContainers + feature flag. + properties: + args: + description: 'Arguments to the entrypoint. The + docker image''s CMD is used if this is not provided. + Variable references $(VAR_NAME) are expanded + using the container''s environment. If a variable + cannot be resolved, the reference in the input + string will be unchanged. Double $$ are reduced + to a single $, which allows for escaping the + $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will + produce the string literal "$(VAR_NAME)". Escaped + references will never be expanded, regardless + of whether the variable exists or not. Cannot + be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell' + items: + type: string + type: array + command: + description: 'Entrypoint array. Not executed within + a shell. The docker image''s ENTRYPOINT is used + if this is not provided. Variable references + $(VAR_NAME) are expanded using the container''s + environment. If a variable cannot be resolved, + the reference in the input string will be unchanged. + Double $$ are reduced to a single $, which allows + for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" + will produce the string literal "$(VAR_NAME)". + Escaped references will never be expanded, regardless + of whether the variable exists or not. Cannot + be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell' + items: + type: string + type: array + env: + description: List of environment variables to + set in the container. Cannot be updated. + items: + description: EnvVar represents an environment + variable present in a Container. + properties: + name: + description: Name of the environment variable. + Must be a C_IDENTIFIER. + type: string + value: + description: 'Variable references $(VAR_NAME) + are expanded using the previously defined + environment variables in the container + and any service environment variables. + If a variable cannot be resolved, the + reference in the input string will be + unchanged. Double $$ are reduced to a + single $, which allows for escaping the + $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" + will produce the string literal "$(VAR_NAME)". + Escaped references will never be expanded, + regardless of whether the variable exists + or not. Defaults to "".' + type: string + valueFrom: + description: Source for the environment + variable's value. Cannot be used if value + is not empty. + properties: + configMapKeyRef: + description: Selects a key of a ConfigMap. + properties: + key: + description: The key to select. + type: string + name: + description: 'Name of the referent. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. + apiVersion, kind, uid?' + type: string + optional: + description: Specify whether the + ConfigMap or its key must be defined + type: boolean + required: + - key + type: object + fieldRef: + description: 'Selects a field of the + pod: supports metadata.name, metadata.namespace, + `metadata.labels['''']`, `metadata.annotations['''']`, + spec.nodeName, spec.serviceAccountName, + status.hostIP, status.podIP, status.podIPs.' + properties: + apiVersion: + description: Version of the schema + the FieldPath is written in terms + of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to + select in the specified API version. + type: string + required: + - fieldPath + type: object + resourceFieldRef: + description: 'Selects a resource of + the container: only resources limits + and requests (limits.cpu, limits.memory, + limits.ephemeral-storage, requests.cpu, + requests.memory and requests.ephemeral-storage) + are currently supported.' + properties: + containerName: + description: 'Container name: required + for volumes, optional for env + vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output + format of the exposed resources, + defaults to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource + to select' + type: string + required: + - resource + type: object + secretKeyRef: + description: Selects a key of a secret + in the pod's namespace + properties: + key: + description: The key of the secret + to select from. Must be a valid + secret key. + type: string + name: + description: 'Name of the referent. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. + apiVersion, kind, uid?' + type: string + optional: + description: Specify whether the + Secret or its key must be defined + type: boolean + required: + - key + type: object + type: object + required: + - name + type: object + type: array + envFrom: + description: List of sources to populate environment + variables in the container. The keys defined + within a source must be a C_IDENTIFIER. All + invalid keys will be reported as an event when + the container is starting. When a key exists + in multiple sources, the value associated with + the last source will take precedence. Values + defined by an Env with a duplicate key will + take precedence. Cannot be updated. + items: + description: EnvFromSource represents the source + of a set of ConfigMaps + properties: + configMapRef: + description: The ConfigMap to select from + properties: + name: + description: 'Name of the referent. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. apiVersion, + kind, uid?' + type: string + optional: + description: Specify whether the ConfigMap + must be defined + type: boolean + type: object + prefix: + description: An optional identifier to prepend + to each key in the ConfigMap. Must be + a C_IDENTIFIER. + type: string + secretRef: + description: The Secret to select from + properties: + name: + description: 'Name of the referent. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. apiVersion, + kind, uid?' + type: string + optional: + description: Specify whether the Secret + must be defined + type: boolean + type: object + type: object + type: array + image: + description: 'Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images' + type: string + imagePullPolicy: + description: 'Image pull policy. One of Always, + Never, IfNotPresent. Defaults to Always if :latest + tag is specified, or IfNotPresent otherwise. + Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images' + type: string + lifecycle: + description: Lifecycle is not allowed for ephemeral + containers. + properties: + postStart: + description: 'PostStart is called immediately + after a container is created. If the handler + fails, the container is terminated and restarted + according to its restart policy. Other management + of the container blocks until the hook completes. + More info: https://kubernetes.io/docs/concepts/containers/container-lifecycle-hooks/#container-hooks' + properties: + exec: + description: One and only one of the following + should be specified. Exec specifies + the action to take. + properties: + command: + description: Command is the command + line to execute inside the container, + the working directory for the command is + root ('/') in the container's filesystem. + The command is simply exec'd, it + is not run inside a shell, so traditional + shell instructions ('|', etc) won't + work. To use a shell, you need to + explicitly call out to that shell. + Exit status of 0 is treated as live/healthy + and non-zero is unhealthy. + items: + type: string + type: array + type: object + httpGet: + description: HTTPGet specifies the http + request to perform. + properties: + host: + description: Host name to connect + to, defaults to the pod IP. You + probably want to set "Host" in httpHeaders + instead. + type: string + httpHeaders: + description: Custom headers to set + in the request. HTTP allows repeated + headers. + items: + description: HTTPHeader describes + a custom header to be used in + HTTP probes + properties: + name: + description: The header field + name + type: string + value: + description: The header field + value + type: string + required: + - name + - value + type: object + type: array + path: + description: Path to access on the + HTTP server. + type: string + port: + anyOf: + - type: integer + - type: string + description: Name or number of the + port to access on the container. + Number must be in the range 1 to + 65535. Name must be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + scheme: + description: Scheme to use for connecting + to the host. Defaults to HTTP. + type: string + required: + - port + type: object + tcpSocket: + description: 'TCPSocket specifies an action + involving a TCP port. TCP hooks not + yet supported TODO: implement a realistic + TCP lifecycle hook' + properties: + host: + description: 'Optional: Host name + to connect to, defaults to the pod + IP.' + type: string + port: + anyOf: + - type: integer + - type: string + description: Number or name of the + port to access on the container. + Number must be in the range 1 to + 65535. Name must be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + required: + - port + type: object + type: object + preStop: + description: 'PreStop is called immediately + before a container is terminated due to + an API request or management event such + as liveness/startup probe failure, preemption, + resource contention, etc. The handler is + not called if the container crashes or exits. + The reason for termination is passed to + the handler. The Pod''s termination grace + period countdown begins before the PreStop + hooked is executed. Regardless of the outcome + of the handler, the container will eventually + terminate within the Pod''s termination + grace period. Other management of the container + blocks until the hook completes or until + the termination grace period is reached. + More info: https://kubernetes.io/docs/concepts/containers/container-lifecycle-hooks/#container-hooks' + properties: + exec: + description: One and only one of the following + should be specified. Exec specifies + the action to take. + properties: + command: + description: Command is the command + line to execute inside the container, + the working directory for the command is + root ('/') in the container's filesystem. + The command is simply exec'd, it + is not run inside a shell, so traditional + shell instructions ('|', etc) won't + work. To use a shell, you need to + explicitly call out to that shell. + Exit status of 0 is treated as live/healthy + and non-zero is unhealthy. + items: + type: string + type: array + type: object + httpGet: + description: HTTPGet specifies the http + request to perform. + properties: + host: + description: Host name to connect + to, defaults to the pod IP. You + probably want to set "Host" in httpHeaders + instead. + type: string + httpHeaders: + description: Custom headers to set + in the request. HTTP allows repeated + headers. + items: + description: HTTPHeader describes + a custom header to be used in + HTTP probes + properties: + name: + description: The header field + name + type: string + value: + description: The header field + value + type: string + required: + - name + - value + type: object + type: array + path: + description: Path to access on the + HTTP server. + type: string + port: + anyOf: + - type: integer + - type: string + description: Name or number of the + port to access on the container. + Number must be in the range 1 to + 65535. Name must be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + scheme: + description: Scheme to use for connecting + to the host. Defaults to HTTP. + type: string + required: + - port + type: object + tcpSocket: + description: 'TCPSocket specifies an action + involving a TCP port. TCP hooks not + yet supported TODO: implement a realistic + TCP lifecycle hook' + properties: + host: + description: 'Optional: Host name + to connect to, defaults to the pod + IP.' + type: string + port: + anyOf: + - type: integer + - type: string + description: Number or name of the + port to access on the container. + Number must be in the range 1 to + 65535. Name must be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + required: + - port + type: object + type: object + type: object + livenessProbe: + description: Probes are not allowed for ephemeral + containers. + properties: + exec: + description: One and only one of the following + should be specified. Exec specifies the + action to take. + properties: + command: + description: Command is the command line + to execute inside the container, the + working directory for the command is + root ('/') in the container's filesystem. + The command is simply exec'd, it is + not run inside a shell, so traditional + shell instructions ('|', etc) won't + work. To use a shell, you need to explicitly + call out to that shell. Exit status + of 0 is treated as live/healthy and + non-zero is unhealthy. + items: + type: string + type: array + type: object + failureThreshold: + description: Minimum consecutive failures + for the probe to be considered failed after + having succeeded. Defaults to 3. Minimum + value is 1. + format: int32 + type: integer + httpGet: + description: HTTPGet specifies the http request + to perform. + properties: + host: + description: Host name to connect to, + defaults to the pod IP. You probably + want to set "Host" in httpHeaders instead. + type: string + httpHeaders: + description: Custom headers to set in + the request. HTTP allows repeated headers. + items: + description: HTTPHeader describes a + custom header to be used in HTTP probes + properties: + name: + description: The header field name + type: string + value: + description: The header field value + type: string + required: + - name + - value + type: object + type: array + path: + description: Path to access on the HTTP + server. + type: string + port: + anyOf: + - type: integer + - type: string + description: Name or number of the port + to access on the container. Number must + be in the range 1 to 65535. Name must + be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + scheme: + description: Scheme to use for connecting + to the host. Defaults to HTTP. + type: string + required: + - port + type: object + initialDelaySeconds: + description: 'Number of seconds after the + container has started before liveness probes + are initiated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + format: int32 + type: integer + periodSeconds: + description: How often (in seconds) to perform + the probe. Default to 10 seconds. Minimum + value is 1. + format: int32 + type: integer + successThreshold: + description: Minimum consecutive successes + for the probe to be considered successful + after having failed. Defaults to 1. Must + be 1 for liveness and startup. Minimum value + is 1. + format: int32 + type: integer + tcpSocket: + description: 'TCPSocket specifies an action + involving a TCP port. TCP hooks not yet + supported TODO: implement a realistic TCP + lifecycle hook' + properties: + host: + description: 'Optional: Host name to connect + to, defaults to the pod IP.' + type: string + port: + anyOf: + - type: integer + - type: string + description: Number or name of the port + to access on the container. Number must + be in the range 1 to 65535. Name must + be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + required: + - port + type: object + terminationGracePeriodSeconds: + description: Optional duration in seconds + the pod needs to terminate gracefully upon + probe failure. The grace period is the duration + in seconds after the processes running in + the pod are sent a termination signal and + the time when the processes are forcibly + halted with a kill signal. Set this value + longer than the expected cleanup time for + your process. If this value is nil, the + pod's terminationGracePeriodSeconds will + be used. Otherwise, this value overrides + the value provided by the pod spec. Value + must be non-negative integer. The value + zero indicates stop immediately via the + kill signal (no opportunity to shut down). + This is a beta field and requires enabling + ProbeTerminationGracePeriod feature gate. + Minimum value is 1. spec.terminationGracePeriodSeconds + is used if unset. + format: int64 + type: integer + timeoutSeconds: + description: 'Number of seconds after which + the probe times out. Defaults to 1 second. + Minimum value is 1. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + format: int32 + type: integer + type: object + name: + description: Name of the ephemeral container specified + as a DNS_LABEL. This name must be unique among + all containers, init containers and ephemeral + containers. + type: string + ports: + description: Ports are not allowed for ephemeral + containers. + items: + description: ContainerPort represents a network + port in a single container. + properties: + containerPort: + description: Number of port to expose on + the pod's IP address. This must be a valid + port number, 0 < x < 65536. + format: int32 + type: integer + hostIP: + description: What host IP to bind the external + port to. + type: string + hostPort: + description: Number of port to expose on + the host. If specified, this must be a + valid port number, 0 < x < 65536. If HostNetwork + is specified, this must match ContainerPort. + Most containers do not need this. + format: int32 + type: integer + name: + description: If specified, this must be + an IANA_SVC_NAME and unique within the + pod. Each named port in a pod must have + a unique name. Name for the port that + can be referred to by services. + type: string + protocol: + default: TCP + description: Protocol for port. Must be + UDP, TCP, or SCTP. Defaults to "TCP". + type: string + required: + - containerPort + type: object + type: array + readinessProbe: + description: Probes are not allowed for ephemeral + containers. + properties: + exec: + description: One and only one of the following + should be specified. Exec specifies the + action to take. + properties: + command: + description: Command is the command line + to execute inside the container, the + working directory for the command is + root ('/') in the container's filesystem. + The command is simply exec'd, it is + not run inside a shell, so traditional + shell instructions ('|', etc) won't + work. To use a shell, you need to explicitly + call out to that shell. Exit status + of 0 is treated as live/healthy and + non-zero is unhealthy. + items: + type: string + type: array + type: object + failureThreshold: + description: Minimum consecutive failures + for the probe to be considered failed after + having succeeded. Defaults to 3. Minimum + value is 1. + format: int32 + type: integer + httpGet: + description: HTTPGet specifies the http request + to perform. + properties: + host: + description: Host name to connect to, + defaults to the pod IP. You probably + want to set "Host" in httpHeaders instead. + type: string + httpHeaders: + description: Custom headers to set in + the request. HTTP allows repeated headers. + items: + description: HTTPHeader describes a + custom header to be used in HTTP probes + properties: + name: + description: The header field name + type: string + value: + description: The header field value + type: string + required: + - name + - value + type: object + type: array + path: + description: Path to access on the HTTP + server. + type: string + port: + anyOf: + - type: integer + - type: string + description: Name or number of the port + to access on the container. Number must + be in the range 1 to 65535. Name must + be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + scheme: + description: Scheme to use for connecting + to the host. Defaults to HTTP. + type: string + required: + - port + type: object + initialDelaySeconds: + description: 'Number of seconds after the + container has started before liveness probes + are initiated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + format: int32 + type: integer + periodSeconds: + description: How often (in seconds) to perform + the probe. Default to 10 seconds. Minimum + value is 1. + format: int32 + type: integer + successThreshold: + description: Minimum consecutive successes + for the probe to be considered successful + after having failed. Defaults to 1. Must + be 1 for liveness and startup. Minimum value + is 1. + format: int32 + type: integer + tcpSocket: + description: 'TCPSocket specifies an action + involving a TCP port. TCP hooks not yet + supported TODO: implement a realistic TCP + lifecycle hook' + properties: + host: + description: 'Optional: Host name to connect + to, defaults to the pod IP.' + type: string + port: + anyOf: + - type: integer + - type: string + description: Number or name of the port + to access on the container. Number must + be in the range 1 to 65535. Name must + be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + required: + - port + type: object + terminationGracePeriodSeconds: + description: Optional duration in seconds + the pod needs to terminate gracefully upon + probe failure. The grace period is the duration + in seconds after the processes running in + the pod are sent a termination signal and + the time when the processes are forcibly + halted with a kill signal. Set this value + longer than the expected cleanup time for + your process. If this value is nil, the + pod's terminationGracePeriodSeconds will + be used. Otherwise, this value overrides + the value provided by the pod spec. Value + must be non-negative integer. The value + zero indicates stop immediately via the + kill signal (no opportunity to shut down). + This is a beta field and requires enabling + ProbeTerminationGracePeriod feature gate. + Minimum value is 1. spec.terminationGracePeriodSeconds + is used if unset. + format: int64 + type: integer + timeoutSeconds: + description: 'Number of seconds after which + the probe times out. Defaults to 1 second. + Minimum value is 1. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + format: int32 + type: integer + type: object + resources: + description: Resources are not allowed for ephemeral + containers. Ephemeral containers use spare resources + already allocated to the pod. + properties: + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: 'Limits describes the maximum + amount of compute resources allowed. More + info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/' + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: 'Requests describes the minimum + amount of compute resources required. If + Requests is omitted for a container, it + defaults to Limits if that is explicitly + specified, otherwise to an implementation-defined + value. More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/' + type: object + type: object + securityContext: + description: 'Optional: SecurityContext defines + the security options the ephemeral container + should be run with. If set, the fields of SecurityContext + override the equivalent fields of PodSecurityContext.' + properties: + allowPrivilegeEscalation: + description: 'AllowPrivilegeEscalation controls + whether a process can gain more privileges + than its parent process. This bool directly + controls if the no_new_privs flag will be + set on the container process. AllowPrivilegeEscalation + is true always when the container is: 1) + run as Privileged 2) has CAP_SYS_ADMIN' + type: boolean + capabilities: + description: The capabilities to add/drop + when running containers. Defaults to the + default set of capabilities granted by the + container runtime. + properties: + add: + description: Added capabilities + items: + description: Capability represent POSIX + capabilities type + type: string + type: array + drop: + description: Removed capabilities + items: + description: Capability represent POSIX + capabilities type + type: string + type: array + type: object + privileged: + description: Run container in privileged mode. + Processes in privileged containers are essentially + equivalent to root on the host. Defaults + to false. + type: boolean + procMount: + description: procMount denotes the type of + proc mount to use for the containers. The + default is DefaultProcMount which uses the + container runtime defaults for readonly + paths and masked paths. This requires the + ProcMountType feature flag to be enabled. + type: string + readOnlyRootFilesystem: + description: Whether this container has a + read-only root filesystem. Default is false. + type: boolean + runAsGroup: + description: The GID to run the entrypoint + of the container process. Uses runtime default + if unset. May also be set in PodSecurityContext. If + set in both SecurityContext and PodSecurityContext, + the value specified in SecurityContext takes + precedence. + format: int64 + type: integer + runAsNonRoot: + description: Indicates that the container + must run as a non-root user. If true, the + Kubelet will validate the image at runtime + to ensure that it does not run as UID 0 + (root) and fail to start the container if + it does. If unset or false, no such validation + will be performed. May also be set in PodSecurityContext. If + set in both SecurityContext and PodSecurityContext, + the value specified in SecurityContext takes + precedence. + type: boolean + runAsUser: + description: The UID to run the entrypoint + of the container process. Defaults to user + specified in image metadata if unspecified. + May also be set in PodSecurityContext. If + set in both SecurityContext and PodSecurityContext, + the value specified in SecurityContext takes + precedence. + format: int64 + type: integer + seLinuxOptions: + description: The SELinux context to be applied + to the container. If unspecified, the container + runtime will allocate a random SELinux context + for each container. May also be set in + PodSecurityContext. If set in both SecurityContext + and PodSecurityContext, the value specified + in SecurityContext takes precedence. + properties: + level: + description: Level is SELinux level label + that applies to the container. + type: string + role: + description: Role is a SELinux role label + that applies to the container. + type: string + type: + description: Type is a SELinux type label + that applies to the container. + type: string + user: + description: User is a SELinux user label + that applies to the container. + type: string + type: object + seccompProfile: + description: The seccomp options to use by + this container. If seccomp options are provided + at both the pod & container level, the container + options override the pod options. + properties: + localhostProfile: + description: localhostProfile indicates + a profile defined in a file on the node + should be used. The profile must be + preconfigured on the node to work. Must + be a descending path, relative to the + kubelet's configured seccomp profile + location. Must only be set if type is + "Localhost". + type: string + type: + description: "type indicates which kind + of seccomp profile will be applied. + Valid options are: \n Localhost - a + profile defined in a file on the node + should be used. RuntimeDefault - the + container runtime default profile should + be used. Unconfined - no profile should + be applied." + type: string + required: + - type + type: object + windowsOptions: + description: The Windows specific settings + applied to all containers. If unspecified, + the options from the PodSecurityContext + will be used. If set in both SecurityContext + and PodSecurityContext, the value specified + in SecurityContext takes precedence. + properties: + gmsaCredentialSpec: + description: GMSACredentialSpec is where + the GMSA admission webhook (https://github.com/kubernetes-sigs/windows-gmsa) + inlines the contents of the GMSA credential + spec named by the GMSACredentialSpecName + field. + type: string + gmsaCredentialSpecName: + description: GMSACredentialSpecName is + the name of the GMSA credential spec + to use. + type: string + hostProcess: + description: HostProcess determines if + a container should be run as a 'Host + Process' container. This field is alpha-level + and will only be honored by components + that enable the WindowsHostProcessContainers + feature flag. Setting this field without + the feature flag will result in errors + when validating the Pod. All of a Pod's + containers must have the same effective + HostProcess value (it is not allowed + to have a mix of HostProcess containers + and non-HostProcess containers). In + addition, if HostProcess is true then + HostNetwork must also be set to true. + type: boolean + runAsUserName: + description: The UserName in Windows to + run the entrypoint of the container + process. Defaults to the user specified + in image metadata if unspecified. May + also be set in PodSecurityContext. If + set in both SecurityContext and PodSecurityContext, + the value specified in SecurityContext + takes precedence. + type: string + type: object + type: object + startupProbe: + description: Probes are not allowed for ephemeral + containers. + properties: + exec: + description: One and only one of the following + should be specified. Exec specifies the + action to take. + properties: + command: + description: Command is the command line + to execute inside the container, the + working directory for the command is + root ('/') in the container's filesystem. + The command is simply exec'd, it is + not run inside a shell, so traditional + shell instructions ('|', etc) won't + work. To use a shell, you need to explicitly + call out to that shell. Exit status + of 0 is treated as live/healthy and + non-zero is unhealthy. + items: + type: string + type: array + type: object + failureThreshold: + description: Minimum consecutive failures + for the probe to be considered failed after + having succeeded. Defaults to 3. Minimum + value is 1. + format: int32 + type: integer + httpGet: + description: HTTPGet specifies the http request + to perform. + properties: + host: + description: Host name to connect to, + defaults to the pod IP. You probably + want to set "Host" in httpHeaders instead. + type: string + httpHeaders: + description: Custom headers to set in + the request. HTTP allows repeated headers. + items: + description: HTTPHeader describes a + custom header to be used in HTTP probes + properties: + name: + description: The header field name + type: string + value: + description: The header field value + type: string + required: + - name + - value + type: object + type: array + path: + description: Path to access on the HTTP + server. + type: string + port: + anyOf: + - type: integer + - type: string + description: Name or number of the port + to access on the container. Number must + be in the range 1 to 65535. Name must + be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + scheme: + description: Scheme to use for connecting + to the host. Defaults to HTTP. + type: string + required: + - port + type: object + initialDelaySeconds: + description: 'Number of seconds after the + container has started before liveness probes + are initiated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + format: int32 + type: integer + periodSeconds: + description: How often (in seconds) to perform + the probe. Default to 10 seconds. Minimum + value is 1. + format: int32 + type: integer + successThreshold: + description: Minimum consecutive successes + for the probe to be considered successful + after having failed. Defaults to 1. Must + be 1 for liveness and startup. Minimum value + is 1. + format: int32 + type: integer + tcpSocket: + description: 'TCPSocket specifies an action + involving a TCP port. TCP hooks not yet + supported TODO: implement a realistic TCP + lifecycle hook' + properties: + host: + description: 'Optional: Host name to connect + to, defaults to the pod IP.' + type: string + port: + anyOf: + - type: integer + - type: string + description: Number or name of the port + to access on the container. Number must + be in the range 1 to 65535. Name must + be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + required: + - port + type: object + terminationGracePeriodSeconds: + description: Optional duration in seconds + the pod needs to terminate gracefully upon + probe failure. The grace period is the duration + in seconds after the processes running in + the pod are sent a termination signal and + the time when the processes are forcibly + halted with a kill signal. Set this value + longer than the expected cleanup time for + your process. If this value is nil, the + pod's terminationGracePeriodSeconds will + be used. Otherwise, this value overrides + the value provided by the pod spec. Value + must be non-negative integer. The value + zero indicates stop immediately via the + kill signal (no opportunity to shut down). + This is a beta field and requires enabling + ProbeTerminationGracePeriod feature gate. + Minimum value is 1. spec.terminationGracePeriodSeconds + is used if unset. + format: int64 + type: integer + timeoutSeconds: + description: 'Number of seconds after which + the probe times out. Defaults to 1 second. + Minimum value is 1. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + format: int32 + type: integer + type: object + stdin: + description: Whether this container should allocate + a buffer for stdin in the container runtime. + If this is not set, reads from stdin in the + container will always result in EOF. Default + is false. + type: boolean + stdinOnce: + description: Whether the container runtime should + close the stdin channel after it has been opened + by a single attach. When stdin is true the stdin + stream will remain open across multiple attach + sessions. If stdinOnce is set to true, stdin + is opened on container start, is empty until + the first client attaches to stdin, and then + remains open and accepts data until the client + disconnects, at which time stdin is closed and + remains closed until the container is restarted. + If this flag is false, a container processes + that reads from stdin will never receive an + EOF. Default is false + type: boolean + targetContainerName: + description: If set, the name of the container + from PodSpec that this ephemeral container targets. + The ephemeral container will be run in the namespaces + (IPC, PID, etc) of this container. If not set + then the ephemeral container is run in whatever + namespaces are shared for the pod. Note that + the container runtime must support this feature. + type: string + terminationMessagePath: + description: 'Optional: Path at which the file + to which the container''s termination message + will be written is mounted into the container''s + filesystem. Message written is intended to be + brief final status, such as an assertion failure + message. Will be truncated by the node if greater + than 4096 bytes. The total message length across + all containers will be limited to 12kb. Defaults + to /dev/termination-log. Cannot be updated.' + type: string + terminationMessagePolicy: + description: Indicate how the termination message + should be populated. File will use the contents + of terminationMessagePath to populate the container + status message on both success and failure. + FallbackToLogsOnError will use the last chunk + of container log output if the termination message + file is empty and the container exited with + an error. The log output is limited to 2048 + bytes or 80 lines, whichever is smaller. Defaults + to File. Cannot be updated. + type: string + tty: + description: Whether this container should allocate + a TTY for itself, also requires 'stdin' to be + true. Default is false. + type: boolean + volumeDevices: + description: volumeDevices is the list of block + devices to be used by the container. + items: + description: volumeDevice describes a mapping + of a raw block device within a container. + properties: + devicePath: + description: devicePath is the path inside + of the container that the device will + be mapped to. + type: string + name: + description: name must match the name of + a persistentVolumeClaim in the pod + type: string + required: + - devicePath + - name + type: object + type: array + volumeMounts: + description: Pod volumes to mount into the container's + filesystem. Cannot be updated. + items: + description: VolumeMount describes a mounting + of a Volume within a container. + properties: + mountPath: + description: Path within the container at + which the volume should be mounted. Must + not contain ':'. + type: string + mountPropagation: + description: mountPropagation determines + how mounts are propagated from the host + to container and the other way around. + When not set, MountPropagationNone is + used. This field is beta in 1.10. + type: string + name: + description: This must match the Name of + a Volume. + type: string + readOnly: + description: Mounted read-only if true, + read-write otherwise (false or unspecified). + Defaults to false. + type: boolean + subPath: + description: Path within the volume from + which the container's volume should be + mounted. Defaults to "" (volume's root). + type: string + subPathExpr: + description: Expanded path within the volume + from which the container's volume should + be mounted. Behaves similarly to SubPath + but environment variable references $(VAR_NAME) + are expanded using the container's environment. + Defaults to "" (volume's root). SubPathExpr + and SubPath are mutually exclusive. + type: string + required: + - mountPath + - name + type: object + type: array + workingDir: + description: Container's working directory. If + not specified, the container runtime's default + will be used, which might be configured in the + container image. Cannot be updated. + type: string + required: + - name + type: object + type: array + hostAliases: + description: HostAliases is an optional list of hosts + and IPs that will be injected into the pod's hosts + file if specified. This is only valid for non-hostNetwork + pods. + items: + description: HostAlias holds the mapping between IP + and hostnames that will be injected as an entry + in the pod's hosts file. + properties: + hostnames: + description: Hostnames for the above IP address. + items: + type: string + type: array + ip: + description: IP address of the host file entry. + type: string + type: object + type: array + hostIPC: + description: 'Use the host''s ipc namespace. Optional: + Default to false.' + type: boolean + hostNetwork: + description: Host networking requested for this pod. + Use the host's network namespace. If this option is + set, the ports that will be used must be specified. + Default to false. + type: boolean + hostPID: + description: 'Use the host''s pid namespace. Optional: + Default to false.' + type: boolean + hostname: + description: Specifies the hostname of the Pod If not + specified, the pod's hostname will be set to a system-defined + value. + type: string + imagePullSecrets: + description: 'ImagePullSecrets is an optional list of + references to secrets in the same namespace to use + for pulling any of the images used by this PodSpec. + If specified, these secrets will be passed to individual + puller implementations for them to use. For example, + in the case of docker, only DockerConfig type secrets + are honored. More info: https://kubernetes.io/docs/concepts/containers/images#specifying-imagepullsecrets-on-a-pod' + items: + description: LocalObjectReference contains enough + information to let you locate the referenced object + inside the same namespace. + properties: + name: + description: 'Name of the referent. More info: + https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. apiVersion, kind, + uid?' + type: string + type: object + type: array + initContainers: + description: 'List of initialization containers belonging + to the pod. Init containers are executed in order + prior to containers being started. If any init container + fails, the pod is considered to have failed and is + handled according to its restartPolicy. The name for + an init container or normal container must be unique + among all containers. Init containers may not have + Lifecycle actions, Readiness probes, Liveness probes, + or Startup probes. The resourceRequirements of an + init container are taken into account during scheduling + by finding the highest request/limit for each resource + type, and then using the max of of that value or the + sum of the normal containers. Limits are applied to + init containers in a similar fashion. Init containers + cannot currently be added or removed. Cannot be updated. + More info: https://kubernetes.io/docs/concepts/workloads/pods/init-containers/' + items: + description: A single application container that you + want to run within a pod. + properties: + args: + description: 'Arguments to the entrypoint. The + docker image''s CMD is used if this is not provided. + Variable references $(VAR_NAME) are expanded + using the container''s environment. If a variable + cannot be resolved, the reference in the input + string will be unchanged. Double $$ are reduced + to a single $, which allows for escaping the + $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will + produce the string literal "$(VAR_NAME)". Escaped + references will never be expanded, regardless + of whether the variable exists or not. Cannot + be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell' + items: + type: string + type: array + command: + description: 'Entrypoint array. Not executed within + a shell. The docker image''s ENTRYPOINT is used + if this is not provided. Variable references + $(VAR_NAME) are expanded using the container''s + environment. If a variable cannot be resolved, + the reference in the input string will be unchanged. + Double $$ are reduced to a single $, which allows + for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" + will produce the string literal "$(VAR_NAME)". + Escaped references will never be expanded, regardless + of whether the variable exists or not. Cannot + be updated. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell' + items: + type: string + type: array + env: + description: List of environment variables to + set in the container. Cannot be updated. + items: + description: EnvVar represents an environment + variable present in a Container. + properties: + name: + description: Name of the environment variable. + Must be a C_IDENTIFIER. + type: string + value: + description: 'Variable references $(VAR_NAME) + are expanded using the previously defined + environment variables in the container + and any service environment variables. + If a variable cannot be resolved, the + reference in the input string will be + unchanged. Double $$ are reduced to a + single $, which allows for escaping the + $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" + will produce the string literal "$(VAR_NAME)". + Escaped references will never be expanded, + regardless of whether the variable exists + or not. Defaults to "".' + type: string + valueFrom: + description: Source for the environment + variable's value. Cannot be used if value + is not empty. + properties: + configMapKeyRef: + description: Selects a key of a ConfigMap. + properties: + key: + description: The key to select. + type: string + name: + description: 'Name of the referent. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. + apiVersion, kind, uid?' + type: string + optional: + description: Specify whether the + ConfigMap or its key must be defined + type: boolean + required: + - key + type: object + fieldRef: + description: 'Selects a field of the + pod: supports metadata.name, metadata.namespace, + `metadata.labels['''']`, `metadata.annotations['''']`, + spec.nodeName, spec.serviceAccountName, + status.hostIP, status.podIP, status.podIPs.' + properties: + apiVersion: + description: Version of the schema + the FieldPath is written in terms + of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to + select in the specified API version. + type: string + required: + - fieldPath + type: object + resourceFieldRef: + description: 'Selects a resource of + the container: only resources limits + and requests (limits.cpu, limits.memory, + limits.ephemeral-storage, requests.cpu, + requests.memory and requests.ephemeral-storage) + are currently supported.' + properties: + containerName: + description: 'Container name: required + for volumes, optional for env + vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output + format of the exposed resources, + defaults to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource + to select' + type: string + required: + - resource + type: object + secretKeyRef: + description: Selects a key of a secret + in the pod's namespace + properties: + key: + description: The key of the secret + to select from. Must be a valid + secret key. + type: string + name: + description: 'Name of the referent. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. + apiVersion, kind, uid?' + type: string + optional: + description: Specify whether the + Secret or its key must be defined + type: boolean + required: + - key + type: object + type: object + required: + - name + type: object + type: array + envFrom: + description: List of sources to populate environment + variables in the container. The keys defined + within a source must be a C_IDENTIFIER. All + invalid keys will be reported as an event when + the container is starting. When a key exists + in multiple sources, the value associated with + the last source will take precedence. Values + defined by an Env with a duplicate key will + take precedence. Cannot be updated. + items: + description: EnvFromSource represents the source + of a set of ConfigMaps + properties: + configMapRef: + description: The ConfigMap to select from + properties: + name: + description: 'Name of the referent. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. apiVersion, + kind, uid?' + type: string + optional: + description: Specify whether the ConfigMap + must be defined + type: boolean + type: object + prefix: + description: An optional identifier to prepend + to each key in the ConfigMap. Must be + a C_IDENTIFIER. + type: string + secretRef: + description: The Secret to select from + properties: + name: + description: 'Name of the referent. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. apiVersion, + kind, uid?' + type: string + optional: + description: Specify whether the Secret + must be defined + type: boolean + type: object + type: object + type: array + image: + description: 'Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images + This field is optional to allow higher level + config management to default or override container + images in workload controllers like Deployments + and StatefulSets.' + type: string + imagePullPolicy: + description: 'Image pull policy. One of Always, + Never, IfNotPresent. Defaults to Always if :latest + tag is specified, or IfNotPresent otherwise. + Cannot be updated. More info: https://kubernetes.io/docs/concepts/containers/images#updating-images' + type: string + lifecycle: + description: Actions that the management system + should take in response to container lifecycle + events. Cannot be updated. + properties: + postStart: + description: 'PostStart is called immediately + after a container is created. If the handler + fails, the container is terminated and restarted + according to its restart policy. Other management + of the container blocks until the hook completes. + More info: https://kubernetes.io/docs/concepts/containers/container-lifecycle-hooks/#container-hooks' + properties: + exec: + description: One and only one of the following + should be specified. Exec specifies + the action to take. + properties: + command: + description: Command is the command + line to execute inside the container, + the working directory for the command is + root ('/') in the container's filesystem. + The command is simply exec'd, it + is not run inside a shell, so traditional + shell instructions ('|', etc) won't + work. To use a shell, you need to + explicitly call out to that shell. + Exit status of 0 is treated as live/healthy + and non-zero is unhealthy. + items: + type: string + type: array + type: object + httpGet: + description: HTTPGet specifies the http + request to perform. + properties: + host: + description: Host name to connect + to, defaults to the pod IP. You + probably want to set "Host" in httpHeaders + instead. + type: string + httpHeaders: + description: Custom headers to set + in the request. HTTP allows repeated + headers. + items: + description: HTTPHeader describes + a custom header to be used in + HTTP probes + properties: + name: + description: The header field + name + type: string + value: + description: The header field + value + type: string + required: + - name + - value + type: object + type: array + path: + description: Path to access on the + HTTP server. + type: string + port: + anyOf: + - type: integer + - type: string + description: Name or number of the + port to access on the container. + Number must be in the range 1 to + 65535. Name must be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + scheme: + description: Scheme to use for connecting + to the host. Defaults to HTTP. + type: string + required: + - port + type: object + tcpSocket: + description: 'TCPSocket specifies an action + involving a TCP port. TCP hooks not + yet supported TODO: implement a realistic + TCP lifecycle hook' + properties: + host: + description: 'Optional: Host name + to connect to, defaults to the pod + IP.' + type: string + port: + anyOf: + - type: integer + - type: string + description: Number or name of the + port to access on the container. + Number must be in the range 1 to + 65535. Name must be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + required: + - port + type: object + type: object + preStop: + description: 'PreStop is called immediately + before a container is terminated due to + an API request or management event such + as liveness/startup probe failure, preemption, + resource contention, etc. The handler is + not called if the container crashes or exits. + The reason for termination is passed to + the handler. The Pod''s termination grace + period countdown begins before the PreStop + hooked is executed. Regardless of the outcome + of the handler, the container will eventually + terminate within the Pod''s termination + grace period. Other management of the container + blocks until the hook completes or until + the termination grace period is reached. + More info: https://kubernetes.io/docs/concepts/containers/container-lifecycle-hooks/#container-hooks' + properties: + exec: + description: One and only one of the following + should be specified. Exec specifies + the action to take. + properties: + command: + description: Command is the command + line to execute inside the container, + the working directory for the command is + root ('/') in the container's filesystem. + The command is simply exec'd, it + is not run inside a shell, so traditional + shell instructions ('|', etc) won't + work. To use a shell, you need to + explicitly call out to that shell. + Exit status of 0 is treated as live/healthy + and non-zero is unhealthy. + items: + type: string + type: array + type: object + httpGet: + description: HTTPGet specifies the http + request to perform. + properties: + host: + description: Host name to connect + to, defaults to the pod IP. You + probably want to set "Host" in httpHeaders + instead. + type: string + httpHeaders: + description: Custom headers to set + in the request. HTTP allows repeated + headers. + items: + description: HTTPHeader describes + a custom header to be used in + HTTP probes + properties: + name: + description: The header field + name + type: string + value: + description: The header field + value + type: string + required: + - name + - value + type: object + type: array + path: + description: Path to access on the + HTTP server. + type: string + port: + anyOf: + - type: integer + - type: string + description: Name or number of the + port to access on the container. + Number must be in the range 1 to + 65535. Name must be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + scheme: + description: Scheme to use for connecting + to the host. Defaults to HTTP. + type: string + required: + - port + type: object + tcpSocket: + description: 'TCPSocket specifies an action + involving a TCP port. TCP hooks not + yet supported TODO: implement a realistic + TCP lifecycle hook' + properties: + host: + description: 'Optional: Host name + to connect to, defaults to the pod + IP.' + type: string + port: + anyOf: + - type: integer + - type: string + description: Number or name of the + port to access on the container. + Number must be in the range 1 to + 65535. Name must be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + required: + - port + type: object + type: object + type: object + livenessProbe: + description: 'Periodic probe of container liveness. + Container will be restarted if the probe fails. + Cannot be updated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + properties: + exec: + description: One and only one of the following + should be specified. Exec specifies the + action to take. + properties: + command: + description: Command is the command line + to execute inside the container, the + working directory for the command is + root ('/') in the container's filesystem. + The command is simply exec'd, it is + not run inside a shell, so traditional + shell instructions ('|', etc) won't + work. To use a shell, you need to explicitly + call out to that shell. Exit status + of 0 is treated as live/healthy and + non-zero is unhealthy. + items: + type: string + type: array + type: object + failureThreshold: + description: Minimum consecutive failures + for the probe to be considered failed after + having succeeded. Defaults to 3. Minimum + value is 1. + format: int32 + type: integer + httpGet: + description: HTTPGet specifies the http request + to perform. + properties: + host: + description: Host name to connect to, + defaults to the pod IP. You probably + want to set "Host" in httpHeaders instead. + type: string + httpHeaders: + description: Custom headers to set in + the request. HTTP allows repeated headers. + items: + description: HTTPHeader describes a + custom header to be used in HTTP probes + properties: + name: + description: The header field name + type: string + value: + description: The header field value + type: string + required: + - name + - value + type: object + type: array + path: + description: Path to access on the HTTP + server. + type: string + port: + anyOf: + - type: integer + - type: string + description: Name or number of the port + to access on the container. Number must + be in the range 1 to 65535. Name must + be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + scheme: + description: Scheme to use for connecting + to the host. Defaults to HTTP. + type: string + required: + - port + type: object + initialDelaySeconds: + description: 'Number of seconds after the + container has started before liveness probes + are initiated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + format: int32 + type: integer + periodSeconds: + description: How often (in seconds) to perform + the probe. Default to 10 seconds. Minimum + value is 1. + format: int32 + type: integer + successThreshold: + description: Minimum consecutive successes + for the probe to be considered successful + after having failed. Defaults to 1. Must + be 1 for liveness and startup. Minimum value + is 1. + format: int32 + type: integer + tcpSocket: + description: 'TCPSocket specifies an action + involving a TCP port. TCP hooks not yet + supported TODO: implement a realistic TCP + lifecycle hook' + properties: + host: + description: 'Optional: Host name to connect + to, defaults to the pod IP.' + type: string + port: + anyOf: + - type: integer + - type: string + description: Number or name of the port + to access on the container. Number must + be in the range 1 to 65535. Name must + be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + required: + - port + type: object + terminationGracePeriodSeconds: + description: Optional duration in seconds + the pod needs to terminate gracefully upon + probe failure. The grace period is the duration + in seconds after the processes running in + the pod are sent a termination signal and + the time when the processes are forcibly + halted with a kill signal. Set this value + longer than the expected cleanup time for + your process. If this value is nil, the + pod's terminationGracePeriodSeconds will + be used. Otherwise, this value overrides + the value provided by the pod spec. Value + must be non-negative integer. The value + zero indicates stop immediately via the + kill signal (no opportunity to shut down). + This is a beta field and requires enabling + ProbeTerminationGracePeriod feature gate. + Minimum value is 1. spec.terminationGracePeriodSeconds + is used if unset. + format: int64 + type: integer + timeoutSeconds: + description: 'Number of seconds after which + the probe times out. Defaults to 1 second. + Minimum value is 1. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + format: int32 + type: integer + type: object + name: + description: Name of the container specified as + a DNS_LABEL. Each container in a pod must have + a unique name (DNS_LABEL). Cannot be updated. + type: string + ports: + description: List of ports to expose from the + container. Exposing a port here gives the system + additional information about the network connections + a container uses, but is primarily informational. + Not specifying a port here DOES NOT prevent + that port from being exposed. Any port which + is listening on the default "0.0.0.0" address + inside a container will be accessible from the + network. Cannot be updated. + items: + description: ContainerPort represents a network + port in a single container. + properties: + containerPort: + description: Number of port to expose on + the pod's IP address. This must be a valid + port number, 0 < x < 65536. + format: int32 + type: integer + hostIP: + description: What host IP to bind the external + port to. + type: string + hostPort: + description: Number of port to expose on + the host. If specified, this must be a + valid port number, 0 < x < 65536. If HostNetwork + is specified, this must match ContainerPort. + Most containers do not need this. + format: int32 + type: integer + name: + description: If specified, this must be + an IANA_SVC_NAME and unique within the + pod. Each named port in a pod must have + a unique name. Name for the port that + can be referred to by services. + type: string + protocol: + default: TCP + description: Protocol for port. Must be + UDP, TCP, or SCTP. Defaults to "TCP". + type: string + required: + - containerPort + type: object + type: array + x-kubernetes-list-map-keys: + - containerPort + - protocol + x-kubernetes-list-type: map + readinessProbe: + description: 'Periodic probe of container service + readiness. Container will be removed from service + endpoints if the probe fails. Cannot be updated. + More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + properties: + exec: + description: One and only one of the following + should be specified. Exec specifies the + action to take. + properties: + command: + description: Command is the command line + to execute inside the container, the + working directory for the command is + root ('/') in the container's filesystem. + The command is simply exec'd, it is + not run inside a shell, so traditional + shell instructions ('|', etc) won't + work. To use a shell, you need to explicitly + call out to that shell. Exit status + of 0 is treated as live/healthy and + non-zero is unhealthy. + items: + type: string + type: array + type: object + failureThreshold: + description: Minimum consecutive failures + for the probe to be considered failed after + having succeeded. Defaults to 3. Minimum + value is 1. + format: int32 + type: integer + httpGet: + description: HTTPGet specifies the http request + to perform. + properties: + host: + description: Host name to connect to, + defaults to the pod IP. You probably + want to set "Host" in httpHeaders instead. + type: string + httpHeaders: + description: Custom headers to set in + the request. HTTP allows repeated headers. + items: + description: HTTPHeader describes a + custom header to be used in HTTP probes + properties: + name: + description: The header field name + type: string + value: + description: The header field value + type: string + required: + - name + - value + type: object + type: array + path: + description: Path to access on the HTTP + server. + type: string + port: + anyOf: + - type: integer + - type: string + description: Name or number of the port + to access on the container. Number must + be in the range 1 to 65535. Name must + be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + scheme: + description: Scheme to use for connecting + to the host. Defaults to HTTP. + type: string + required: + - port + type: object + initialDelaySeconds: + description: 'Number of seconds after the + container has started before liveness probes + are initiated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + format: int32 + type: integer + periodSeconds: + description: How often (in seconds) to perform + the probe. Default to 10 seconds. Minimum + value is 1. + format: int32 + type: integer + successThreshold: + description: Minimum consecutive successes + for the probe to be considered successful + after having failed. Defaults to 1. Must + be 1 for liveness and startup. Minimum value + is 1. + format: int32 + type: integer + tcpSocket: + description: 'TCPSocket specifies an action + involving a TCP port. TCP hooks not yet + supported TODO: implement a realistic TCP + lifecycle hook' + properties: + host: + description: 'Optional: Host name to connect + to, defaults to the pod IP.' + type: string + port: + anyOf: + - type: integer + - type: string + description: Number or name of the port + to access on the container. Number must + be in the range 1 to 65535. Name must + be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + required: + - port + type: object + terminationGracePeriodSeconds: + description: Optional duration in seconds + the pod needs to terminate gracefully upon + probe failure. The grace period is the duration + in seconds after the processes running in + the pod are sent a termination signal and + the time when the processes are forcibly + halted with a kill signal. Set this value + longer than the expected cleanup time for + your process. If this value is nil, the + pod's terminationGracePeriodSeconds will + be used. Otherwise, this value overrides + the value provided by the pod spec. Value + must be non-negative integer. The value + zero indicates stop immediately via the + kill signal (no opportunity to shut down). + This is a beta field and requires enabling + ProbeTerminationGracePeriod feature gate. + Minimum value is 1. spec.terminationGracePeriodSeconds + is used if unset. + format: int64 + type: integer + timeoutSeconds: + description: 'Number of seconds after which + the probe times out. Defaults to 1 second. + Minimum value is 1. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + format: int32 + type: integer + type: object + resources: + description: 'Compute Resources required by this + container. Cannot be updated. More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/' + properties: + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: 'Limits describes the maximum + amount of compute resources allowed. More + info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/' + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: 'Requests describes the minimum + amount of compute resources required. If + Requests is omitted for a container, it + defaults to Limits if that is explicitly + specified, otherwise to an implementation-defined + value. More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/' + type: object + type: object + securityContext: + description: 'SecurityContext defines the security + options the container should be run with. If + set, the fields of SecurityContext override + the equivalent fields of PodSecurityContext. + More info: https://kubernetes.io/docs/tasks/configure-pod-container/security-context/' + properties: + allowPrivilegeEscalation: + description: 'AllowPrivilegeEscalation controls + whether a process can gain more privileges + than its parent process. This bool directly + controls if the no_new_privs flag will be + set on the container process. AllowPrivilegeEscalation + is true always when the container is: 1) + run as Privileged 2) has CAP_SYS_ADMIN' + type: boolean + capabilities: + description: The capabilities to add/drop + when running containers. Defaults to the + default set of capabilities granted by the + container runtime. + properties: + add: + description: Added capabilities + items: + description: Capability represent POSIX + capabilities type + type: string + type: array + drop: + description: Removed capabilities + items: + description: Capability represent POSIX + capabilities type + type: string + type: array + type: object + privileged: + description: Run container in privileged mode. + Processes in privileged containers are essentially + equivalent to root on the host. Defaults + to false. + type: boolean + procMount: + description: procMount denotes the type of + proc mount to use for the containers. The + default is DefaultProcMount which uses the + container runtime defaults for readonly + paths and masked paths. This requires the + ProcMountType feature flag to be enabled. + type: string + readOnlyRootFilesystem: + description: Whether this container has a + read-only root filesystem. Default is false. + type: boolean + runAsGroup: + description: The GID to run the entrypoint + of the container process. Uses runtime default + if unset. May also be set in PodSecurityContext. If + set in both SecurityContext and PodSecurityContext, + the value specified in SecurityContext takes + precedence. + format: int64 + type: integer + runAsNonRoot: + description: Indicates that the container + must run as a non-root user. If true, the + Kubelet will validate the image at runtime + to ensure that it does not run as UID 0 + (root) and fail to start the container if + it does. If unset or false, no such validation + will be performed. May also be set in PodSecurityContext. If + set in both SecurityContext and PodSecurityContext, + the value specified in SecurityContext takes + precedence. + type: boolean + runAsUser: + description: The UID to run the entrypoint + of the container process. Defaults to user + specified in image metadata if unspecified. + May also be set in PodSecurityContext. If + set in both SecurityContext and PodSecurityContext, + the value specified in SecurityContext takes + precedence. + format: int64 + type: integer + seLinuxOptions: + description: The SELinux context to be applied + to the container. If unspecified, the container + runtime will allocate a random SELinux context + for each container. May also be set in + PodSecurityContext. If set in both SecurityContext + and PodSecurityContext, the value specified + in SecurityContext takes precedence. + properties: + level: + description: Level is SELinux level label + that applies to the container. + type: string + role: + description: Role is a SELinux role label + that applies to the container. + type: string + type: + description: Type is a SELinux type label + that applies to the container. + type: string + user: + description: User is a SELinux user label + that applies to the container. + type: string + type: object + seccompProfile: + description: The seccomp options to use by + this container. If seccomp options are provided + at both the pod & container level, the container + options override the pod options. + properties: + localhostProfile: + description: localhostProfile indicates + a profile defined in a file on the node + should be used. The profile must be + preconfigured on the node to work. Must + be a descending path, relative to the + kubelet's configured seccomp profile + location. Must only be set if type is + "Localhost". + type: string + type: + description: "type indicates which kind + of seccomp profile will be applied. + Valid options are: \n Localhost - a + profile defined in a file on the node + should be used. RuntimeDefault - the + container runtime default profile should + be used. Unconfined - no profile should + be applied." + type: string + required: + - type + type: object + windowsOptions: + description: The Windows specific settings + applied to all containers. If unspecified, + the options from the PodSecurityContext + will be used. If set in both SecurityContext + and PodSecurityContext, the value specified + in SecurityContext takes precedence. + properties: + gmsaCredentialSpec: + description: GMSACredentialSpec is where + the GMSA admission webhook (https://github.com/kubernetes-sigs/windows-gmsa) + inlines the contents of the GMSA credential + spec named by the GMSACredentialSpecName + field. + type: string + gmsaCredentialSpecName: + description: GMSACredentialSpecName is + the name of the GMSA credential spec + to use. + type: string + hostProcess: + description: HostProcess determines if + a container should be run as a 'Host + Process' container. This field is alpha-level + and will only be honored by components + that enable the WindowsHostProcessContainers + feature flag. Setting this field without + the feature flag will result in errors + when validating the Pod. All of a Pod's + containers must have the same effective + HostProcess value (it is not allowed + to have a mix of HostProcess containers + and non-HostProcess containers). In + addition, if HostProcess is true then + HostNetwork must also be set to true. + type: boolean + runAsUserName: + description: The UserName in Windows to + run the entrypoint of the container + process. Defaults to the user specified + in image metadata if unspecified. May + also be set in PodSecurityContext. If + set in both SecurityContext and PodSecurityContext, + the value specified in SecurityContext + takes precedence. + type: string + type: object + type: object + startupProbe: + description: 'StartupProbe indicates that the + Pod has successfully initialized. If specified, + no other probes are executed until this completes + successfully. If this probe fails, the Pod will + be restarted, just as if the livenessProbe failed. + This can be used to provide different probe + parameters at the beginning of a Pod''s lifecycle, + when it might take a long time to load data + or warm a cache, than during steady-state operation. + This cannot be updated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + properties: + exec: + description: One and only one of the following + should be specified. Exec specifies the + action to take. + properties: + command: + description: Command is the command line + to execute inside the container, the + working directory for the command is + root ('/') in the container's filesystem. + The command is simply exec'd, it is + not run inside a shell, so traditional + shell instructions ('|', etc) won't + work. To use a shell, you need to explicitly + call out to that shell. Exit status + of 0 is treated as live/healthy and + non-zero is unhealthy. + items: + type: string + type: array + type: object + failureThreshold: + description: Minimum consecutive failures + for the probe to be considered failed after + having succeeded. Defaults to 3. Minimum + value is 1. + format: int32 + type: integer + httpGet: + description: HTTPGet specifies the http request + to perform. + properties: + host: + description: Host name to connect to, + defaults to the pod IP. You probably + want to set "Host" in httpHeaders instead. + type: string + httpHeaders: + description: Custom headers to set in + the request. HTTP allows repeated headers. + items: + description: HTTPHeader describes a + custom header to be used in HTTP probes + properties: + name: + description: The header field name + type: string + value: + description: The header field value + type: string + required: + - name + - value + type: object + type: array + path: + description: Path to access on the HTTP + server. + type: string + port: + anyOf: + - type: integer + - type: string + description: Name or number of the port + to access on the container. Number must + be in the range 1 to 65535. Name must + be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + scheme: + description: Scheme to use for connecting + to the host. Defaults to HTTP. + type: string + required: + - port + type: object + initialDelaySeconds: + description: 'Number of seconds after the + container has started before liveness probes + are initiated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + format: int32 + type: integer + periodSeconds: + description: How often (in seconds) to perform + the probe. Default to 10 seconds. Minimum + value is 1. + format: int32 + type: integer + successThreshold: + description: Minimum consecutive successes + for the probe to be considered successful + after having failed. Defaults to 1. Must + be 1 for liveness and startup. Minimum value + is 1. + format: int32 + type: integer + tcpSocket: + description: 'TCPSocket specifies an action + involving a TCP port. TCP hooks not yet + supported TODO: implement a realistic TCP + lifecycle hook' + properties: + host: + description: 'Optional: Host name to connect + to, defaults to the pod IP.' + type: string + port: + anyOf: + - type: integer + - type: string + description: Number or name of the port + to access on the container. Number must + be in the range 1 to 65535. Name must + be an IANA_SVC_NAME. + x-kubernetes-int-or-string: true + required: + - port + type: object + terminationGracePeriodSeconds: + description: Optional duration in seconds + the pod needs to terminate gracefully upon + probe failure. The grace period is the duration + in seconds after the processes running in + the pod are sent a termination signal and + the time when the processes are forcibly + halted with a kill signal. Set this value + longer than the expected cleanup time for + your process. If this value is nil, the + pod's terminationGracePeriodSeconds will + be used. Otherwise, this value overrides + the value provided by the pod spec. Value + must be non-negative integer. The value + zero indicates stop immediately via the + kill signal (no opportunity to shut down). + This is a beta field and requires enabling + ProbeTerminationGracePeriod feature gate. + Minimum value is 1. spec.terminationGracePeriodSeconds + is used if unset. + format: int64 + type: integer + timeoutSeconds: + description: 'Number of seconds after which + the probe times out. Defaults to 1 second. + Minimum value is 1. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes' + format: int32 + type: integer + type: object + stdin: + description: Whether this container should allocate + a buffer for stdin in the container runtime. + If this is not set, reads from stdin in the + container will always result in EOF. Default + is false. + type: boolean + stdinOnce: + description: Whether the container runtime should + close the stdin channel after it has been opened + by a single attach. When stdin is true the stdin + stream will remain open across multiple attach + sessions. If stdinOnce is set to true, stdin + is opened on container start, is empty until + the first client attaches to stdin, and then + remains open and accepts data until the client + disconnects, at which time stdin is closed and + remains closed until the container is restarted. + If this flag is false, a container processes + that reads from stdin will never receive an + EOF. Default is false + type: boolean + terminationMessagePath: + description: 'Optional: Path at which the file + to which the container''s termination message + will be written is mounted into the container''s + filesystem. Message written is intended to be + brief final status, such as an assertion failure + message. Will be truncated by the node if greater + than 4096 bytes. The total message length across + all containers will be limited to 12kb. Defaults + to /dev/termination-log. Cannot be updated.' + type: string + terminationMessagePolicy: + description: Indicate how the termination message + should be populated. File will use the contents + of terminationMessagePath to populate the container + status message on both success and failure. + FallbackToLogsOnError will use the last chunk + of container log output if the termination message + file is empty and the container exited with + an error. The log output is limited to 2048 + bytes or 80 lines, whichever is smaller. Defaults + to File. Cannot be updated. + type: string + tty: + description: Whether this container should allocate + a TTY for itself, also requires 'stdin' to be + true. Default is false. + type: boolean + volumeDevices: + description: volumeDevices is the list of block + devices to be used by the container. + items: + description: volumeDevice describes a mapping + of a raw block device within a container. + properties: + devicePath: + description: devicePath is the path inside + of the container that the device will + be mapped to. + type: string + name: + description: name must match the name of + a persistentVolumeClaim in the pod + type: string + required: + - devicePath + - name + type: object + type: array + volumeMounts: + description: Pod volumes to mount into the container's + filesystem. Cannot be updated. + items: + description: VolumeMount describes a mounting + of a Volume within a container. + properties: + mountPath: + description: Path within the container at + which the volume should be mounted. Must + not contain ':'. + type: string + mountPropagation: + description: mountPropagation determines + how mounts are propagated from the host + to container and the other way around. + When not set, MountPropagationNone is + used. This field is beta in 1.10. + type: string + name: + description: This must match the Name of + a Volume. + type: string + readOnly: + description: Mounted read-only if true, + read-write otherwise (false or unspecified). + Defaults to false. + type: boolean + subPath: + description: Path within the volume from + which the container's volume should be + mounted. Defaults to "" (volume's root). + type: string + subPathExpr: + description: Expanded path within the volume + from which the container's volume should + be mounted. Behaves similarly to SubPath + but environment variable references $(VAR_NAME) + are expanded using the container's environment. + Defaults to "" (volume's root). SubPathExpr + and SubPath are mutually exclusive. + type: string + required: + - mountPath + - name + type: object + type: array + workingDir: + description: Container's working directory. If + not specified, the container runtime's default + will be used, which might be configured in the + container image. Cannot be updated. + type: string + required: + - name + type: object + type: array + nodeName: + description: NodeName is a request to schedule this + pod onto a specific node. If it is non-empty, the + scheduler simply schedules this pod onto that node, + assuming that it fits resource requirements. + type: string + nodeSelector: + additionalProperties: + type: string + description: 'NodeSelector is a selector which must + be true for the pod to fit on a node. Selector which + must match a node''s labels for the pod to be scheduled + on that node. More info: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/' + type: object + x-kubernetes-map-type: atomic + overhead: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: 'Overhead represents the resource overhead + associated with running a pod for a given RuntimeClass. + This field will be autopopulated at admission time + by the RuntimeClass admission controller. If the RuntimeClass + admission controller is enabled, overhead must not + be set in Pod create requests. The RuntimeClass admission + controller will reject Pod create requests which have + the overhead already set. If RuntimeClass is configured + and selected in the PodSpec, Overhead will be set + to the value defined in the corresponding RuntimeClass, + otherwise it will remain unset and treated as zero. + More info: https://git.k8s.io/enhancements/keps/sig-node/688-pod-overhead/README.md + This field is beta-level as of Kubernetes v1.18, and + is only honored by servers that enable the PodOverhead + feature.' + type: object + preemptionPolicy: + description: PreemptionPolicy is the Policy for preempting + pods with lower priority. One of Never, PreemptLowerPriority. + Defaults to PreemptLowerPriority if unset. This field + is beta-level, gated by the NonPreemptingPriority + feature-gate. + type: string + priority: + description: The priority value. Various system components + use this field to find the priority of the pod. When + Priority Admission Controller is enabled, it prevents + users from setting this field. The admission controller + populates this field from PriorityClassName. The higher + the value, the higher the priority. + format: int32 + type: integer + priorityClassName: + description: If specified, indicates the pod's priority. + "system-node-critical" and "system-cluster-critical" + are two special keywords which indicate the highest + priorities with the former being the highest priority. + Any other name must be defined by creating a PriorityClass + object with that name. If not specified, the pod priority + will be default or zero if there is no default. + type: string + readinessGates: + description: 'If specified, all readiness gates will + be evaluated for pod readiness. A pod is ready when + all its containers are ready AND all conditions specified + in the readiness gates have status equal to "True" + More info: https://git.k8s.io/enhancements/keps/sig-network/580-pod-readiness-gates' + items: + description: PodReadinessGate contains the reference + to a pod condition + properties: + conditionType: + description: ConditionType refers to a condition + in the pod's condition list with matching type. + type: string + required: + - conditionType + type: object + type: array + restartPolicy: + description: 'Restart policy for all containers within + the pod. One of Always, OnFailure, Never. Default + to Always. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#restart-policy' + type: string + runtimeClassName: + description: 'RuntimeClassName refers to a RuntimeClass + object in the node.k8s.io group, which should be used + to run this pod. If no RuntimeClass resource matches + the named class, the pod will not be run. If unset + or empty, the "legacy" RuntimeClass will be used, + which is an implicit class with an empty definition + that uses the default runtime handler. More info: + https://git.k8s.io/enhancements/keps/sig-node/585-runtime-class + This is a beta feature as of Kubernetes v1.14.' + type: string + schedulerName: + description: If specified, the pod will be dispatched + by specified scheduler. If not specified, the pod + will be dispatched by default scheduler. + type: string + securityContext: + description: 'SecurityContext holds pod-level security + attributes and common container settings. Optional: + Defaults to empty. See type description for default + values of each field.' + properties: + fsGroup: + description: "A special supplemental group that + applies to all containers in a pod. Some volume + types allow the Kubelet to change the ownership + of that volume to be owned by the pod: \n 1. The + owning GID will be the FSGroup 2. The setgid bit + is set (new files created in the volume will be + owned by FSGroup) 3. The permission bits are OR'd + with rw-rw---- \n If unset, the Kubelet will not + modify the ownership and permissions of any volume." + format: int64 + type: integer + fsGroupChangePolicy: + description: 'fsGroupChangePolicy defines behavior + of changing ownership and permission of the volume + before being exposed inside Pod. This field will + only apply to volume types which support fsGroup + based ownership(and permissions). It will have + no effect on ephemeral volume types such as: secret, + configmaps and emptydir. Valid values are "OnRootMismatch" + and "Always". If not specified, "Always" is used.' + type: string + runAsGroup: + description: The GID to run the entrypoint of the + container process. Uses runtime default if unset. + May also be set in SecurityContext. If set in + both SecurityContext and PodSecurityContext, the + value specified in SecurityContext takes precedence + for that container. + format: int64 + type: integer + runAsNonRoot: + description: Indicates that the container must run + as a non-root user. If true, the Kubelet will + validate the image at runtime to ensure that it + does not run as UID 0 (root) and fail to start + the container if it does. If unset or false, no + such validation will be performed. May also be + set in SecurityContext. If set in both SecurityContext + and PodSecurityContext, the value specified in + SecurityContext takes precedence. + type: boolean + runAsUser: + description: The UID to run the entrypoint of the + container process. Defaults to user specified + in image metadata if unspecified. May also be + set in SecurityContext. If set in both SecurityContext + and PodSecurityContext, the value specified in + SecurityContext takes precedence for that container. + format: int64 + type: integer + seLinuxOptions: + description: The SELinux context to be applied to + all containers. If unspecified, the container + runtime will allocate a random SELinux context + for each container. May also be set in SecurityContext. If + set in both SecurityContext and PodSecurityContext, + the value specified in SecurityContext takes precedence + for that container. + properties: + level: + description: Level is SELinux level label that + applies to the container. + type: string + role: + description: Role is a SELinux role label that + applies to the container. + type: string + type: + description: Type is a SELinux type label that + applies to the container. + type: string + user: + description: User is a SELinux user label that + applies to the container. + type: string + type: object + seccompProfile: + description: The seccomp options to use by the containers + in this pod. + properties: + localhostProfile: + description: localhostProfile indicates a profile + defined in a file on the node should be used. + The profile must be preconfigured on the node + to work. Must be a descending path, relative + to the kubelet's configured seccomp profile + location. Must only be set if type is "Localhost". + type: string + type: + description: "type indicates which kind of seccomp + profile will be applied. Valid options are: + \n Localhost - a profile defined in a file + on the node should be used. RuntimeDefault + - the container runtime default profile should + be used. Unconfined - no profile should be + applied." + type: string + required: + - type + type: object + supplementalGroups: + description: A list of groups applied to the first + process run in each container, in addition to + the container's primary GID. If unspecified, + no groups will be added to any container. + items: + format: int64 + type: integer + type: array + sysctls: + description: Sysctls hold a list of namespaced sysctls + used for the pod. Pods with unsupported sysctls + (by the container runtime) might fail to launch. + items: + description: Sysctl defines a kernel parameter + to be set + properties: + name: + description: Name of a property to set + type: string + value: + description: Value of a property to set + type: string + required: + - name + - value + type: object + type: array + windowsOptions: + description: The Windows specific settings applied + to all containers. If unspecified, the options + within a container's SecurityContext will be used. + If set in both SecurityContext and PodSecurityContext, + the value specified in SecurityContext takes precedence. + properties: + gmsaCredentialSpec: + description: GMSACredentialSpec is where the + GMSA admission webhook (https://github.com/kubernetes-sigs/windows-gmsa) + inlines the contents of the GMSA credential + spec named by the GMSACredentialSpecName field. + type: string + gmsaCredentialSpecName: + description: GMSACredentialSpecName is the name + of the GMSA credential spec to use. + type: string + hostProcess: + description: HostProcess determines if a container + should be run as a 'Host Process' container. + This field is alpha-level and will only be + honored by components that enable the WindowsHostProcessContainers + feature flag. Setting this field without the + feature flag will result in errors when validating + the Pod. All of a Pod's containers must have + the same effective HostProcess value (it is + not allowed to have a mix of HostProcess containers + and non-HostProcess containers). In addition, + if HostProcess is true then HostNetwork must + also be set to true. + type: boolean + runAsUserName: + description: The UserName in Windows to run + the entrypoint of the container process. Defaults + to the user specified in image metadata if + unspecified. May also be set in PodSecurityContext. + If set in both SecurityContext and PodSecurityContext, + the value specified in SecurityContext takes + precedence. + type: string + type: object + type: object + serviceAccount: + description: 'DeprecatedServiceAccount is a depreciated + alias for ServiceAccountName. Deprecated: Use serviceAccountName + instead.' + type: string + serviceAccountName: + description: 'ServiceAccountName is the name of the + ServiceAccount to use to run this pod. More info: + https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account/' + type: string + setHostnameAsFQDN: + description: If true the pod's hostname will be configured + as the pod's FQDN, rather than the leaf name (the + default). In Linux containers, this means setting + the FQDN in the hostname field of the kernel (the + nodename field of struct utsname). In Windows containers, + this means setting the registry value of hostname + for the registry key HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet\\Services\\Tcpip\\Parameters + to FQDN. If a pod does not have FQDN, this has no + effect. Default to false. + type: boolean + shareProcessNamespace: + description: 'Share a single process namespace between + all of the containers in a pod. When this is set containers + will be able to view and signal processes from other + containers in the same pod, and the first process + in each container will not be assigned PID 1. HostPID + and ShareProcessNamespace cannot both be set. Optional: + Default to false.' + type: boolean + subdomain: + description: If specified, the fully qualified Pod hostname + will be "...svc.". If not specified, the pod will not have + a domainname at all. + type: string + terminationGracePeriodSeconds: + description: Optional duration in seconds the pod needs + to terminate gracefully. May be decreased in delete + request. Value must be non-negative integer. The value + zero indicates stop immediately via the kill signal + (no opportunity to shut down). If this value is nil, + the default grace period will be used instead. The + grace period is the duration in seconds after the + processes running in the pod are sent a termination + signal and the time when the processes are forcibly + halted with a kill signal. Set this value longer than + the expected cleanup time for your process. Defaults + to 30 seconds. + format: int64 + type: integer + tolerations: + description: If specified, the pod's tolerations. + items: + description: The pod this Toleration is attached to + tolerates any taint that matches the triple + using the matching operator . + properties: + effect: + description: Effect indicates the taint effect + to match. Empty means match all taint effects. + When specified, allowed values are NoSchedule, + PreferNoSchedule and NoExecute. + type: string + key: + description: Key is the taint key that the toleration + applies to. Empty means match all taint keys. + If the key is empty, operator must be Exists; + this combination means to match all values and + all keys. + type: string + operator: + description: Operator represents a key's relationship + to the value. Valid operators are Exists and + Equal. Defaults to Equal. Exists is equivalent + to wildcard for value, so that a pod can tolerate + all taints of a particular category. + type: string + tolerationSeconds: + description: TolerationSeconds represents the + period of time the toleration (which must be + of effect NoExecute, otherwise this field is + ignored) tolerates the taint. By default, it + is not set, which means tolerate the taint forever + (do not evict). Zero and negative values will + be treated as 0 (evict immediately) by the system. + format: int64 + type: integer + value: + description: Value is the taint value the toleration + matches to. If the operator is Exists, the value + should be empty, otherwise just a regular string. + type: string + type: object + type: array + topologySpreadConstraints: + description: TopologySpreadConstraints describes how + a group of pods ought to spread across topology domains. + Scheduler will schedule pods in a way which abides + by the constraints. All topologySpreadConstraints + are ANDed. + items: + description: TopologySpreadConstraint specifies how + to spread matching pods among the given topology. + properties: + labelSelector: + description: LabelSelector is used to find matching + pods. Pods that match this label selector are + counted to determine the number of pods in their + corresponding topology domain. + properties: + matchExpressions: + description: matchExpressions is a list of + label selector requirements. The requirements + are ANDed. + items: + description: A label selector requirement + is a selector that contains values, a + key, and an operator that relates the + key and values. + properties: + key: + description: key is the label key that + the selector applies to. + type: string + operator: + description: operator represents a key's + relationship to a set of values. Valid + operators are In, NotIn, Exists and + DoesNotExist. + type: string + values: + description: values is an array of string + values. If the operator is In or NotIn, + the values array must be non-empty. + If the operator is Exists or DoesNotExist, + the values array must be empty. This + array is replaced during a strategic + merge patch. + items: + type: string + type: array + required: + - key + - operator + type: object + type: array + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. A single {key,value} in the matchLabels + map is equivalent to an element of matchExpressions, + whose key field is "key", the operator is + "In", and the values array contains only + "value". The requirements are ANDed. + type: object + type: object + maxSkew: + description: 'MaxSkew describes the degree to + which pods may be unevenly distributed. When + `whenUnsatisfiable=DoNotSchedule`, it is the + maximum permitted difference between the number + of matching pods in the target topology and + the global minimum. For example, in a 3-zone + cluster, MaxSkew is set to 1, and pods with + the same labelSelector spread as 1/1/0: | zone1 + | zone2 | zone3 | | P | P | | + - if MaxSkew is 1, incoming pod can only be + scheduled to zone3 to become 1/1/1; scheduling + it onto zone1(zone2) would make the ActualSkew(2-0) + on zone1(zone2) violate MaxSkew(1). - if MaxSkew + is 2, incoming pod can be scheduled onto any + zone. When `whenUnsatisfiable=ScheduleAnyway`, + it is used to give higher precedence to topologies + that satisfy it. It''s a required field. Default + value is 1 and 0 is not allowed.' + format: int32 + type: integer + topologyKey: + description: TopologyKey is the key of node labels. + Nodes that have a label with this key and identical + values are considered to be in the same topology. + We consider each as a "bucket", + and try to put balanced number of pods into + each bucket. It's a required field. + type: string + whenUnsatisfiable: + description: 'WhenUnsatisfiable indicates how + to deal with a pod if it doesn''t satisfy the + spread constraint. - DoNotSchedule (default) + tells the scheduler not to schedule it. - ScheduleAnyway + tells the scheduler to schedule the pod in any + location, but giving higher precedence to + topologies that would help reduce the skew. + A constraint is considered "Unsatisfiable" for + an incoming pod if and only if every possible + node assigment for that pod would violate "MaxSkew" + on some topology. For example, in a 3-zone cluster, + MaxSkew is set to 1, and pods with the same + labelSelector spread as 3/1/1: | zone1 | zone2 + | zone3 | | P P P | P | P | If WhenUnsatisfiable + is set to DoNotSchedule, incoming pod can only + be scheduled to zone2(zone3) to become 3/2/1(3/1/2) + as ActualSkew(2-1) on zone2(zone3) satisfies + MaxSkew(1). In other words, the cluster can + still be imbalanced, but scheduler won''t make + it *more* imbalanced. It''s a required field.' + type: string + required: + - maxSkew + - topologyKey + - whenUnsatisfiable + type: object + type: array + x-kubernetes-list-map-keys: + - topologyKey + - whenUnsatisfiable + x-kubernetes-list-type: map + volumes: + description: 'List of volumes that can be mounted by + containers belonging to the pod. More info: https://kubernetes.io/docs/concepts/storage/volumes' + items: + description: Volume represents a named volume in a + pod that may be accessed by any container in the + pod. + properties: + awsElasticBlockStore: + description: 'AWSElasticBlockStore represents + an AWS Disk resource that is attached to a kubelet''s + host machine and then exposed to the pod. More + info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore' + properties: + fsType: + description: 'Filesystem type of the volume + that you want to mount. Tip: Ensure that + the filesystem type is supported by the + host operating system. Examples: "ext4", + "xfs", "ntfs". Implicitly inferred to be + "ext4" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore + TODO: how do we prevent errors in the filesystem + from compromising the machine' + type: string + partition: + description: 'The partition in the volume + that you want to mount. If omitted, the + default is to mount by volume name. Examples: + For volume /dev/sda1, you specify the partition + as "1". Similarly, the volume partition + for /dev/sda is "0" (or you can leave the + property empty).' + format: int32 + type: integer + readOnly: + description: 'Specify "true" to force and + set the ReadOnly property in VolumeMounts + to "true". If omitted, the default is "false". + More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore' + type: boolean + volumeID: + description: 'Unique ID of the persistent + disk resource in AWS (Amazon EBS volume). + More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore' + type: string + required: + - volumeID + type: object + azureDisk: + description: AzureDisk represents an Azure Data + Disk mount on the host and bind mount to the + pod. + properties: + cachingMode: + description: 'Host Caching mode: None, Read + Only, Read Write.' + type: string + diskName: + description: The Name of the data disk in + the blob storage + type: string + diskURI: + description: The URI the data disk in the + blob storage + type: string + fsType: + description: Filesystem type to mount. Must + be a filesystem type supported by the host + operating system. Ex. "ext4", "xfs", "ntfs". + Implicitly inferred to be "ext4" if unspecified. + type: string + kind: + description: 'Expected values Shared: multiple + blob disks per storage account Dedicated: + single blob disk per storage account Managed: + azure managed data disk (only in managed + availability set). defaults to shared' + type: string + readOnly: + description: Defaults to false (read/write). + ReadOnly here will force the ReadOnly setting + in VolumeMounts. + type: boolean + required: + - diskName + - diskURI + type: object + azureFile: + description: AzureFile represents an Azure File + Service mount on the host and bind mount to + the pod. + properties: + readOnly: + description: Defaults to false (read/write). + ReadOnly here will force the ReadOnly setting + in VolumeMounts. + type: boolean + secretName: + description: the name of secret that contains + Azure Storage Account Name and Key + type: string + shareName: + description: Share Name + type: string + required: + - secretName + - shareName + type: object + cephfs: + description: CephFS represents a Ceph FS mount + on the host that shares a pod's lifetime + properties: + monitors: + description: 'Required: Monitors is a collection + of Ceph monitors More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it' + items: + type: string + type: array + path: + description: 'Optional: Used as the mounted + root, rather than the full Ceph tree, default + is /' + type: string + readOnly: + description: 'Optional: Defaults to false + (read/write). ReadOnly here will force the + ReadOnly setting in VolumeMounts. More info: + https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it' + type: boolean + secretFile: + description: 'Optional: SecretFile is the + path to key ring for User, default is /etc/ceph/user.secret + More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it' + type: string + secretRef: + description: 'Optional: SecretRef is reference + to the authentication secret for User, default + is empty. More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it' + properties: + name: + description: 'Name of the referent. More + info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. apiVersion, + kind, uid?' + type: string + type: object + user: + description: 'Optional: User is the rados + user name, default is admin More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it' + type: string + required: + - monitors + type: object + cinder: + description: 'Cinder represents a cinder volume + attached and mounted on kubelets host machine. + More info: https://examples.k8s.io/mysql-cinder-pd/README.md' + properties: + fsType: + description: 'Filesystem type to mount. Must + be a filesystem type supported by the host + operating system. Examples: "ext4", "xfs", + "ntfs". Implicitly inferred to be "ext4" + if unspecified. More info: https://examples.k8s.io/mysql-cinder-pd/README.md' + type: string + readOnly: + description: 'Optional: Defaults to false + (read/write). ReadOnly here will force the + ReadOnly setting in VolumeMounts. More info: + https://examples.k8s.io/mysql-cinder-pd/README.md' + type: boolean + secretRef: + description: 'Optional: points to a secret + object containing parameters used to connect + to OpenStack.' + properties: + name: + description: 'Name of the referent. More + info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. apiVersion, + kind, uid?' + type: string + type: object + volumeID: + description: 'volume id used to identify the + volume in cinder. More info: https://examples.k8s.io/mysql-cinder-pd/README.md' + type: string + required: + - volumeID + type: object + configMap: + description: ConfigMap represents a configMap + that should populate this volume + properties: + defaultMode: + description: 'Optional: mode bits used to + set permissions on created files by default. + Must be an octal value between 0000 and + 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, + JSON requires decimal values for mode bits. + Defaults to 0644. Directories within the + path are not affected by this setting. This + might be in conflict with other options + that affect the file mode, like fsGroup, + and the result can be other mode bits set.' + format: int32 + type: integer + items: + description: If unspecified, each key-value + pair in the Data field of the referenced + ConfigMap will be projected into the volume + as a file whose name is the key and content + is the value. If specified, the listed keys + will be projected into the specified paths, + and unlisted keys will not be present. If + a key is specified which is not present + in the ConfigMap, the volume setup will + error unless it is marked optional. Paths + must be relative and may not contain the + '..' path or start with '..'. + items: + description: Maps a string key to a path + within a volume. + properties: + key: + description: The key to project. + type: string + mode: + description: 'Optional: mode bits used + to set permissions on this file. Must + be an octal value between 0000 and + 0777 or a decimal value between 0 + and 511. YAML accepts both octal and + decimal values, JSON requires decimal + values for mode bits. If not specified, + the volume defaultMode will be used. + This might be in conflict with other + options that affect the file mode, + like fsGroup, and the result can be + other mode bits set.' + format: int32 + type: integer + path: + description: The relative path of the + file to map the key to. May not be + an absolute path. May not contain + the path element '..'. May not start + with the string '..'. + type: string + required: + - key + - path + type: object + type: array + name: + description: 'Name of the referent. More info: + https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. apiVersion, + kind, uid?' + type: string + optional: + description: Specify whether the ConfigMap + or its keys must be defined + type: boolean + type: object + csi: + description: CSI (Container Storage Interface) + represents ephemeral storage that is handled + by certain external CSI drivers (Beta feature). + properties: + driver: + description: Driver is the name of the CSI + driver that handles this volume. Consult + with your admin for the correct name as + registered in the cluster. + type: string + fsType: + description: Filesystem type to mount. Ex. + "ext4", "xfs", "ntfs". If not provided, + the empty value is passed to the associated + CSI driver which will determine the default + filesystem to apply. + type: string + nodePublishSecretRef: + description: NodePublishSecretRef is a reference + to the secret object containing sensitive + information to pass to the CSI driver to + complete the CSI NodePublishVolume and NodeUnpublishVolume + calls. This field is optional, and may + be empty if no secret is required. If the + secret object contains more than one secret, + all secret references are passed. + properties: + name: + description: 'Name of the referent. More + info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. apiVersion, + kind, uid?' + type: string + type: object + readOnly: + description: Specifies a read-only configuration + for the volume. Defaults to false (read/write). + type: boolean + volumeAttributes: + additionalProperties: + type: string + description: VolumeAttributes stores driver-specific + properties that are passed to the CSI driver. + Consult your driver's documentation for + supported values. + type: object + required: + - driver + type: object + downwardAPI: + description: DownwardAPI represents downward API + about the pod that should populate this volume + properties: + defaultMode: + description: 'Optional: mode bits to use on + created files by default. Must be a Optional: + mode bits used to set permissions on created + files by default. Must be an octal value + between 0000 and 0777 or a decimal value + between 0 and 511. YAML accepts both octal + and decimal values, JSON requires decimal + values for mode bits. Defaults to 0644. + Directories within the path are not affected + by this setting. This might be in conflict + with other options that affect the file + mode, like fsGroup, and the result can be + other mode bits set.' + format: int32 + type: integer + items: + description: Items is a list of downward API + volume file + items: + description: DownwardAPIVolumeFile represents + information to create the file containing + the pod field + properties: + fieldRef: + description: 'Required: Selects a field + of the pod: only annotations, labels, + name and namespace are supported.' + properties: + apiVersion: + description: Version of the schema + the FieldPath is written in terms + of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to + select in the specified API version. + type: string + required: + - fieldPath + type: object + mode: + description: 'Optional: mode bits used + to set permissions on this file, must + be an octal value between 0000 and + 0777 or a decimal value between 0 + and 511. YAML accepts both octal and + decimal values, JSON requires decimal + values for mode bits. If not specified, + the volume defaultMode will be used. + This might be in conflict with other + options that affect the file mode, + like fsGroup, and the result can be + other mode bits set.' + format: int32 + type: integer + path: + description: 'Required: Path is the + relative path name of the file to + be created. Must not be absolute or + contain the ''..'' path. Must be utf-8 + encoded. The first item of the relative + path must not start with ''..''' + type: string + resourceFieldRef: + description: 'Selects a resource of + the container: only resources limits + and requests (limits.cpu, limits.memory, + requests.cpu and requests.memory) + are currently supported.' + properties: + containerName: + description: 'Container name: required + for volumes, optional for env + vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output + format of the exposed resources, + defaults to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource + to select' + type: string + required: + - resource + type: object + required: + - path + type: object + type: array + type: object + emptyDir: + description: 'EmptyDir represents a temporary + directory that shares a pod''s lifetime. More + info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir' + properties: + medium: + description: 'What type of storage medium + should back this directory. The default + is "" which means to use the node''s default + medium. Must be an empty string (default) + or Memory. More info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir' + type: string + sizeLimit: + anyOf: + - type: integer + - type: string + description: 'Total amount of local storage + required for this EmptyDir volume. The size + limit is also applicable for memory medium. + The maximum usage on memory medium EmptyDir + would be the minimum value between the SizeLimit + specified here and the sum of memory limits + of all containers in a pod. The default + is nil which means that the limit is undefined. + More info: http://kubernetes.io/docs/user-guide/volumes#emptydir' + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + ephemeral: + description: "Ephemeral represents a volume that + is handled by a cluster storage driver. The + volume's lifecycle is tied to the pod that defines + it - it will be created before the pod starts, + and deleted when the pod is removed. \n Use + this if: a) the volume is only needed while + the pod runs, b) features of normal volumes + like restoring from snapshot or capacity tracking + are needed, c) the storage driver is specified + through a storage class, and d) the storage + driver supports dynamic volume provisioning + through a PersistentVolumeClaim (see EphemeralVolumeSource + for more information on the connection between + this volume type and PersistentVolumeClaim). + \n Use PersistentVolumeClaim or one of the vendor-specific + APIs for volumes that persist for longer than + the lifecycle of an individual pod. \n Use CSI + for light-weight local ephemeral volumes if + the CSI driver is meant to be used that way + - see the documentation of the driver for more + information. \n A pod can use both types of + ephemeral volumes and persistent volumes at + the same time. \n This is a beta feature and + only available when the GenericEphemeralVolume + feature gate is enabled." + properties: + volumeClaimTemplate: + description: "Will be used to create a stand-alone + PVC to provision the volume. The pod in + which this EphemeralVolumeSource is embedded + will be the owner of the PVC, i.e. the PVC + will be deleted together with the pod. The + name of the PVC will be `-` where `` is the name + from the `PodSpec.Volumes` array entry. + Pod validation will reject the pod if the + concatenated name is not valid for a PVC + (for example, too long). \n An existing + PVC with that name that is not owned by + the pod will *not* be used for the pod to + avoid using an unrelated volume by mistake. + Starting the pod is then blocked until the + unrelated PVC is removed. If such a pre-created + PVC is meant to be used by the pod, the + PVC has to updated with an owner reference + to the pod once the pod exists. Normally + this should not be necessary, but it may + be useful when manually reconstructing a + broken cluster. \n This field is read-only + and no changes will be made by Kubernetes + to the PVC after it has been created. \n + Required, must not be nil." + properties: + metadata: + description: May contain labels and annotations + that will be copied into the PVC when + creating it. No other fields are allowed + and will be rejected during validation. + type: object + spec: + description: The specification for the + PersistentVolumeClaim. The entire content + is copied unchanged into the PVC that + gets created from this template. The + same fields as in a PersistentVolumeClaim + are also valid here. + properties: + accessModes: + description: 'AccessModes contains + the desired access modes the volume + should have. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1' + items: + type: string + type: array + dataSource: + description: 'This field can be used + to specify either: * An existing + VolumeSnapshot object (snapshot.storage.k8s.io/VolumeSnapshot) + * An existing PVC (PersistentVolumeClaim) + If the provisioner or an external + controller can support the specified + data source, it will create a new + volume based on the contents of + the specified data source. If the + AnyVolumeDataSource feature gate + is enabled, this field will always + have the same contents as the DataSourceRef + field.' + properties: + apiGroup: + description: APIGroup is the group + for the resource being referenced. + If APIGroup is not specified, + the specified Kind must be in + the core API group. For any + other third-party types, APIGroup + is required. + type: string + kind: + description: Kind is the type + of resource being referenced + type: string + name: + description: Name is the name + of resource being referenced + type: string + required: + - kind + - name + type: object + dataSourceRef: + description: 'Specifies the object + from which to populate the volume + with data, if a non-empty volume + is desired. This may be any local + object from a non-empty API group + (non core object) or a PersistentVolumeClaim + object. When this field is specified, + volume binding will only succeed + if the type of the specified object + matches some installed volume populator + or dynamic provisioner. This field + will replace the functionality of + the DataSource field and as such + if both fields are non-empty, they + must have the same value. For backwards + compatibility, both fields (DataSource + and DataSourceRef) will be set to + the same value automatically if + one of them is empty and the other + is non-empty. There are two important + differences between DataSource and + DataSourceRef: * While DataSource + only allows two specific types of + objects, DataSourceRef allows + any non-core object, as well as + PersistentVolumeClaim objects. * + While DataSource ignores disallowed + values (dropping them), DataSourceRef preserves + all values, and generates an error + if a disallowed value is specified. + (Alpha) Using this field requires + the AnyVolumeDataSource feature + gate to be enabled.' + properties: + apiGroup: + description: APIGroup is the group + for the resource being referenced. + If APIGroup is not specified, + the specified Kind must be in + the core API group. For any + other third-party types, APIGroup + is required. + type: string + kind: + description: Kind is the type + of resource being referenced + type: string + name: + description: Name is the name + of resource being referenced + type: string + required: + - kind + - name + type: object + resources: + description: 'Resources represents + the minimum resources the volume + should have. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#resources' + properties: + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: 'Limits describes + the maximum amount of compute + resources allowed. More info: + https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/' + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: 'Requests describes + the minimum amount of compute + resources required. If Requests + is omitted for a container, + it defaults to Limits if that + is explicitly specified, otherwise + to an implementation-defined + value. More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/' + type: object + type: object + selector: + description: A label query over volumes + to consider for binding. + properties: + matchExpressions: + description: matchExpressions + is a list of label selector + requirements. The requirements + are ANDed. + items: + description: A label selector + requirement is a selector + that contains values, a key, + and an operator that relates + the key and values. + properties: + key: + description: key is the + label key that the selector + applies to. + type: string + operator: + description: operator represents + a key's relationship to + a set of values. Valid + operators are In, NotIn, + Exists and DoesNotExist. + type: string + values: + description: values is an + array of string values. + If the operator is In + or NotIn, the values array + must be non-empty. If + the operator is Exists + or DoesNotExist, the values + array must be empty. This + array is replaced during + a strategic merge patch. + items: + type: string + type: array + required: + - key + - operator + type: object + type: array + matchLabels: + additionalProperties: + type: string + description: matchLabels is a + map of {key,value} pairs. A + single {key,value} in the matchLabels + map is equivalent to an element + of matchExpressions, whose key + field is "key", the operator + is "In", and the values array + contains only "value". The requirements + are ANDed. + type: object + type: object + storageClassName: + description: 'Name of the StorageClass + required by the claim. More info: + https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1' + type: string + volumeMode: + description: volumeMode defines what + type of volume is required by the + claim. Value of Filesystem is implied + when not included in claim spec. + type: string + volumeName: + description: VolumeName is the binding + reference to the PersistentVolume + backing this claim. + type: string + type: object + required: + - spec + type: object + type: object + fc: + description: FC represents a Fibre Channel resource + that is attached to a kubelet's host machine + and then exposed to the pod. + properties: + fsType: + description: 'Filesystem type to mount. Must + be a filesystem type supported by the host + operating system. Ex. "ext4", "xfs", "ntfs". + Implicitly inferred to be "ext4" if unspecified. + TODO: how do we prevent errors in the filesystem + from compromising the machine' + type: string + lun: + description: 'Optional: FC target lun number' + format: int32 + type: integer + readOnly: + description: 'Optional: Defaults to false + (read/write). ReadOnly here will force the + ReadOnly setting in VolumeMounts.' + type: boolean + targetWWNs: + description: 'Optional: FC target worldwide + names (WWNs)' + items: + type: string + type: array + wwids: + description: 'Optional: FC volume world wide + identifiers (wwids) Either wwids or combination + of targetWWNs and lun must be set, but not + both simultaneously.' + items: + type: string + type: array + type: object + flexVolume: + description: FlexVolume represents a generic volume + resource that is provisioned/attached using + an exec based plugin. + properties: + driver: + description: Driver is the name of the driver + to use for this volume. + type: string + fsType: + description: Filesystem type to mount. Must + be a filesystem type supported by the host + operating system. Ex. "ext4", "xfs", "ntfs". + The default filesystem depends on FlexVolume + script. + type: string + options: + additionalProperties: + type: string + description: 'Optional: Extra command options + if any.' + type: object + readOnly: + description: 'Optional: Defaults to false + (read/write). ReadOnly here will force the + ReadOnly setting in VolumeMounts.' + type: boolean + secretRef: + description: 'Optional: SecretRef is reference + to the secret object containing sensitive + information to pass to the plugin scripts. + This may be empty if no secret object is + specified. If the secret object contains + more than one secret, all secrets are passed + to the plugin scripts.' + properties: + name: + description: 'Name of the referent. More + info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. apiVersion, + kind, uid?' + type: string + type: object + required: + - driver + type: object + flocker: + description: Flocker represents a Flocker volume + attached to a kubelet's host machine. This depends + on the Flocker control service being running + properties: + datasetName: + description: Name of the dataset stored as + metadata -> name on the dataset for Flocker + should be considered as deprecated + type: string + datasetUUID: + description: UUID of the dataset. This is + unique identifier of a Flocker dataset + type: string + type: object + gcePersistentDisk: + description: 'GCEPersistentDisk represents a GCE + Disk resource that is attached to a kubelet''s + host machine and then exposed to the pod. More + info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk' + properties: + fsType: + description: 'Filesystem type of the volume + that you want to mount. Tip: Ensure that + the filesystem type is supported by the + host operating system. Examples: "ext4", + "xfs", "ntfs". Implicitly inferred to be + "ext4" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk + TODO: how do we prevent errors in the filesystem + from compromising the machine' + type: string + partition: + description: 'The partition in the volume + that you want to mount. If omitted, the + default is to mount by volume name. Examples: + For volume /dev/sda1, you specify the partition + as "1". Similarly, the volume partition + for /dev/sda is "0" (or you can leave the + property empty). More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk' + format: int32 + type: integer + pdName: + description: 'Unique name of the PD resource + in GCE. Used to identify the disk in GCE. + More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk' + type: string + readOnly: + description: 'ReadOnly here will force the + ReadOnly setting in VolumeMounts. Defaults + to false. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk' + type: boolean + required: + - pdName + type: object + gitRepo: + description: 'GitRepo represents a git repository + at a particular revision. DEPRECATED: GitRepo + is deprecated. To provision a container with + a git repo, mount an EmptyDir into an InitContainer + that clones the repo using git, then mount the + EmptyDir into the Pod''s container.' + properties: + directory: + description: Target directory name. Must not + contain or start with '..'. If '.' is supplied, + the volume directory will be the git repository. Otherwise, + if specified, the volume will contain the + git repository in the subdirectory with + the given name. + type: string + repository: + description: Repository URL + type: string + revision: + description: Commit hash for the specified + revision. + type: string + required: + - repository + type: object + glusterfs: + description: 'Glusterfs represents a Glusterfs + mount on the host that shares a pod''s lifetime. + More info: https://examples.k8s.io/volumes/glusterfs/README.md' + properties: + endpoints: + description: 'EndpointsName is the endpoint + name that details Glusterfs topology. More + info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod' + type: string + path: + description: 'Path is the Glusterfs volume + path. More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod' + type: string + readOnly: + description: 'ReadOnly here will force the + Glusterfs volume to be mounted with read-only + permissions. Defaults to false. More info: + https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod' + type: boolean + required: + - endpoints + - path + type: object + hostPath: + description: 'HostPath represents a pre-existing + file or directory on the host machine that is + directly exposed to the container. This is generally + used for system agents or other privileged things + that are allowed to see the host machine. Most + containers will NOT need this. More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath + --- TODO(jonesdl) We need to restrict who can + use host directory mounts and who can/can not + mount host directories as read/write.' + properties: + path: + description: 'Path of the directory on the + host. If the path is a symlink, it will + follow the link to the real path. More info: + https://kubernetes.io/docs/concepts/storage/volumes#hostpath' + type: string + type: + description: 'Type for HostPath Volume Defaults + to "" More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath' + type: string + required: + - path + type: object + iscsi: + description: 'ISCSI represents an ISCSI Disk resource + that is attached to a kubelet''s host machine + and then exposed to the pod. More info: https://examples.k8s.io/volumes/iscsi/README.md' + properties: + chapAuthDiscovery: + description: whether support iSCSI Discovery + CHAP authentication + type: boolean + chapAuthSession: + description: whether support iSCSI Session + CHAP authentication + type: boolean + fsType: + description: 'Filesystem type of the volume + that you want to mount. Tip: Ensure that + the filesystem type is supported by the + host operating system. Examples: "ext4", + "xfs", "ntfs". Implicitly inferred to be + "ext4" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#iscsi + TODO: how do we prevent errors in the filesystem + from compromising the machine' + type: string + initiatorName: + description: Custom iSCSI Initiator Name. + If initiatorName is specified with iscsiInterface + simultaneously, new iSCSI interface : will be created for + the connection. + type: string + iqn: + description: Target iSCSI Qualified Name. + type: string + iscsiInterface: + description: iSCSI Interface Name that uses + an iSCSI transport. Defaults to 'default' + (tcp). + type: string + lun: + description: iSCSI Target Lun number. + format: int32 + type: integer + portals: + description: iSCSI Target Portal List. The + portal is either an IP or ip_addr:port if + the port is other than default (typically + TCP ports 860 and 3260). + items: + type: string + type: array + readOnly: + description: ReadOnly here will force the + ReadOnly setting in VolumeMounts. Defaults + to false. + type: boolean + secretRef: + description: CHAP Secret for iSCSI target + and initiator authentication + properties: + name: + description: 'Name of the referent. More + info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. apiVersion, + kind, uid?' + type: string + type: object + targetPortal: + description: iSCSI Target Portal. The Portal + is either an IP or ip_addr:port if the port + is other than default (typically TCP ports + 860 and 3260). + type: string + required: + - iqn + - lun + - targetPortal + type: object + name: + description: 'Volume''s name. Must be a DNS_LABEL + and unique within the pod. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' + type: string + nfs: + description: 'NFS represents an NFS mount on the + host that shares a pod''s lifetime More info: + https://kubernetes.io/docs/concepts/storage/volumes#nfs' + properties: + path: + description: 'Path that is exported by the + NFS server. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs' + type: string + readOnly: + description: 'ReadOnly here will force the + NFS export to be mounted with read-only + permissions. Defaults to false. More info: + https://kubernetes.io/docs/concepts/storage/volumes#nfs' + type: boolean + server: + description: 'Server is the hostname or IP + address of the NFS server. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs' + type: string + required: + - path + - server + type: object + persistentVolumeClaim: + description: 'PersistentVolumeClaimVolumeSource + represents a reference to a PersistentVolumeClaim + in the same namespace. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims' + properties: + claimName: + description: 'ClaimName is the name of a PersistentVolumeClaim + in the same namespace as the pod using this + volume. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims' + type: string + readOnly: + description: Will force the ReadOnly setting + in VolumeMounts. Default false. + type: boolean + required: + - claimName + type: object + photonPersistentDisk: + description: PhotonPersistentDisk represents a + PhotonController persistent disk attached and + mounted on kubelets host machine + properties: + fsType: + description: Filesystem type to mount. Must + be a filesystem type supported by the host + operating system. Ex. "ext4", "xfs", "ntfs". + Implicitly inferred to be "ext4" if unspecified. + type: string + pdID: + description: ID that identifies Photon Controller + persistent disk + type: string + required: + - pdID + type: object + portworxVolume: + description: PortworxVolume represents a portworx + volume attached and mounted on kubelets host + machine + properties: + fsType: + description: FSType represents the filesystem + type to mount Must be a filesystem type + supported by the host operating system. + Ex. "ext4", "xfs". Implicitly inferred to + be "ext4" if unspecified. + type: string + readOnly: + description: Defaults to false (read/write). + ReadOnly here will force the ReadOnly setting + in VolumeMounts. + type: boolean + volumeID: + description: VolumeID uniquely identifies + a Portworx volume + type: string + required: + - volumeID + type: object + projected: + description: Items for all in one resources secrets, + configmaps, and downward API + properties: + defaultMode: + description: Mode bits used to set permissions + on created files by default. Must be an + octal value between 0000 and 0777 or a decimal + value between 0 and 511. YAML accepts both + octal and decimal values, JSON requires + decimal values for mode bits. Directories + within the path are not affected by this + setting. This might be in conflict with + other options that affect the file mode, + like fsGroup, and the result can be other + mode bits set. + format: int32 + type: integer + sources: + description: list of volume projections + items: + description: Projection that may be projected + along with other supported volume types + properties: + configMap: + description: information about the configMap + data to project + properties: + items: + description: If unspecified, each + key-value pair in the Data field + of the referenced ConfigMap will + be projected into the volume as + a file whose name is the key and + content is the value. If specified, + the listed keys will be projected + into the specified paths, and + unlisted keys will not be present. + If a key is specified which is + not present in the ConfigMap, + the volume setup will error unless + it is marked optional. Paths must + be relative and may not contain + the '..' path or start with '..'. + items: + description: Maps a string key + to a path within a volume. + properties: + key: + description: The key to project. + type: string + mode: + description: 'Optional: mode + bits used to set permissions + on this file. Must be an + octal value between 0000 + and 0777 or a decimal value + between 0 and 511. YAML + accepts both octal and decimal + values, JSON requires decimal + values for mode bits. If + not specified, the volume + defaultMode will be used. + This might be in conflict + with other options that + affect the file mode, like + fsGroup, and the result + can be other mode bits set.' + format: int32 + type: integer + path: + description: The relative + path of the file to map + the key to. May not be an + absolute path. May not contain + the path element '..'. May + not start with the string + '..'. + type: string + required: + - key + - path + type: object + type: array + name: + description: 'Name of the referent. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. + apiVersion, kind, uid?' + type: string + optional: + description: Specify whether the + ConfigMap or its keys must be + defined + type: boolean + type: object + downwardAPI: + description: information about the downwardAPI + data to project + properties: + items: + description: Items is a list of + DownwardAPIVolume file + items: + description: DownwardAPIVolumeFile + represents information to create + the file containing the pod + field + properties: + fieldRef: + description: 'Required: Selects + a field of the pod: only + annotations, labels, name + and namespace are supported.' + properties: + apiVersion: + description: Version of + the schema the FieldPath + is written in terms + of, defaults to "v1". + type: string + fieldPath: + description: Path of the + field to select in the + specified API version. + type: string + required: + - fieldPath + type: object + mode: + description: 'Optional: mode + bits used to set permissions + on this file, must be an + octal value between 0000 + and 0777 or a decimal value + between 0 and 511. YAML + accepts both octal and decimal + values, JSON requires decimal + values for mode bits. If + not specified, the volume + defaultMode will be used. + This might be in conflict + with other options that + affect the file mode, like + fsGroup, and the result + can be other mode bits set.' + format: int32 + type: integer + path: + description: 'Required: Path + is the relative path name + of the file to be created. + Must not be absolute or + contain the ''..'' path. + Must be utf-8 encoded. The + first item of the relative + path must not start with + ''..''' + type: string + resourceFieldRef: + description: 'Selects a resource + of the container: only resources + limits and requests (limits.cpu, + limits.memory, requests.cpu + and requests.memory) are + currently supported.' + properties: + containerName: + description: 'Container + name: required for volumes, + optional for env vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies + the output format of + the exposed resources, + defaults to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: + resource to select' + type: string + required: + - resource + type: object + required: + - path + type: object + type: array + type: object + secret: + description: information about the secret + data to project + properties: + items: + description: If unspecified, each + key-value pair in the Data field + of the referenced Secret will + be projected into the volume as + a file whose name is the key and + content is the value. If specified, + the listed keys will be projected + into the specified paths, and + unlisted keys will not be present. + If a key is specified which is + not present in the Secret, the + volume setup will error unless + it is marked optional. Paths must + be relative and may not contain + the '..' path or start with '..'. + items: + description: Maps a string key + to a path within a volume. + properties: + key: + description: The key to project. + type: string + mode: + description: 'Optional: mode + bits used to set permissions + on this file. Must be an + octal value between 0000 + and 0777 or a decimal value + between 0 and 511. YAML + accepts both octal and decimal + values, JSON requires decimal + values for mode bits. If + not specified, the volume + defaultMode will be used. + This might be in conflict + with other options that + affect the file mode, like + fsGroup, and the result + can be other mode bits set.' + format: int32 + type: integer + path: + description: The relative + path of the file to map + the key to. May not be an + absolute path. May not contain + the path element '..'. May + not start with the string + '..'. + type: string + required: + - key + - path + type: object + type: array + name: + description: 'Name of the referent. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. + apiVersion, kind, uid?' + type: string + optional: + description: Specify whether the + Secret or its key must be defined + type: boolean + type: object + serviceAccountToken: + description: information about the serviceAccountToken + data to project + properties: + audience: + description: Audience is the intended + audience of the token. A recipient + of a token must identify itself + with an identifier specified in + the audience of the token, and + otherwise should reject the token. + The audience defaults to the identifier + of the apiserver. + type: string + expirationSeconds: + description: ExpirationSeconds is + the requested duration of validity + of the service account token. + As the token approaches expiration, + the kubelet volume plugin will + proactively rotate the service + account token. The kubelet will + start trying to rotate the token + if the token is older than 80 + percent of its time to live or + if the token is older than 24 + hours.Defaults to 1 hour and must + be at least 10 minutes. + format: int64 + type: integer + path: + description: Path is the path relative + to the mount point of the file + to project the token into. + type: string + required: + - path + type: object + type: object + type: array + type: object + quobyte: + description: Quobyte represents a Quobyte mount + on the host that shares a pod's lifetime + properties: + group: + description: Group to map volume access to + Default is no group + type: string + readOnly: + description: ReadOnly here will force the + Quobyte volume to be mounted with read-only + permissions. Defaults to false. + type: boolean + registry: + description: Registry represents a single + or multiple Quobyte Registry services specified + as a string as host:port pair (multiple + entries are separated with commas) which + acts as the central registry for volumes + type: string + tenant: + description: Tenant owning the given Quobyte + volume in the Backend Used with dynamically + provisioned Quobyte volumes, value is set + by the plugin + type: string + user: + description: User to map volume access to + Defaults to serivceaccount user + type: string + volume: + description: Volume is a string that references + an already created Quobyte volume by name. + type: string + required: + - registry + - volume + type: object + rbd: + description: 'RBD represents a Rados Block Device + mount on the host that shares a pod''s lifetime. + More info: https://examples.k8s.io/volumes/rbd/README.md' + properties: + fsType: + description: 'Filesystem type of the volume + that you want to mount. Tip: Ensure that + the filesystem type is supported by the + host operating system. Examples: "ext4", + "xfs", "ntfs". Implicitly inferred to be + "ext4" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#rbd + TODO: how do we prevent errors in the filesystem + from compromising the machine' + type: string + image: + description: 'The rados image name. More info: + https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it' + type: string + keyring: + description: 'Keyring is the path to key ring + for RBDUser. Default is /etc/ceph/keyring. + More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it' + type: string + monitors: + description: 'A collection of Ceph monitors. + More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it' + items: + type: string + type: array + pool: + description: 'The rados pool name. Default + is rbd. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it' + type: string + readOnly: + description: 'ReadOnly here will force the + ReadOnly setting in VolumeMounts. Defaults + to false. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it' + type: boolean + secretRef: + description: 'SecretRef is name of the authentication + secret for RBDUser. If provided overrides + keyring. Default is nil. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it' + properties: + name: + description: 'Name of the referent. More + info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. apiVersion, + kind, uid?' + type: string + type: object + user: + description: 'The rados user name. Default + is admin. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it' + type: string + required: + - image + - monitors + type: object + scaleIO: + description: ScaleIO represents a ScaleIO persistent + volume attached and mounted on Kubernetes nodes. + properties: + fsType: + description: Filesystem type to mount. Must + be a filesystem type supported by the host + operating system. Ex. "ext4", "xfs", "ntfs". + Default is "xfs". + type: string + gateway: + description: The host address of the ScaleIO + API Gateway. + type: string + protectionDomain: + description: The name of the ScaleIO Protection + Domain for the configured storage. + type: string + readOnly: + description: Defaults to false (read/write). + ReadOnly here will force the ReadOnly setting + in VolumeMounts. + type: boolean + secretRef: + description: SecretRef references to the secret + for ScaleIO user and other sensitive information. + If this is not provided, Login operation + will fail. + properties: + name: + description: 'Name of the referent. More + info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. apiVersion, + kind, uid?' + type: string + type: object + sslEnabled: + description: Flag to enable/disable SSL communication + with Gateway, default false + type: boolean + storageMode: + description: Indicates whether the storage + for a volume should be ThickProvisioned + or ThinProvisioned. Default is ThinProvisioned. + type: string + storagePool: + description: The ScaleIO Storage Pool associated + with the protection domain. + type: string + system: + description: The name of the storage system + as configured in ScaleIO. + type: string + volumeName: + description: The name of a volume already + created in the ScaleIO system that is associated + with this volume source. + type: string + required: + - gateway + - secretRef + - system + type: object + secret: + description: 'Secret represents a secret that + should populate this volume. More info: https://kubernetes.io/docs/concepts/storage/volumes#secret' + properties: + defaultMode: + description: 'Optional: mode bits used to + set permissions on created files by default. + Must be an octal value between 0000 and + 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, + JSON requires decimal values for mode bits. + Defaults to 0644. Directories within the + path are not affected by this setting. This + might be in conflict with other options + that affect the file mode, like fsGroup, + and the result can be other mode bits set.' + format: int32 + type: integer + items: + description: If unspecified, each key-value + pair in the Data field of the referenced + Secret will be projected into the volume + as a file whose name is the key and content + is the value. If specified, the listed keys + will be projected into the specified paths, + and unlisted keys will not be present. If + a key is specified which is not present + in the Secret, the volume setup will error + unless it is marked optional. Paths must + be relative and may not contain the '..' + path or start with '..'. + items: + description: Maps a string key to a path + within a volume. + properties: + key: + description: The key to project. + type: string + mode: + description: 'Optional: mode bits used + to set permissions on this file. Must + be an octal value between 0000 and + 0777 or a decimal value between 0 + and 511. YAML accepts both octal and + decimal values, JSON requires decimal + values for mode bits. If not specified, + the volume defaultMode will be used. + This might be in conflict with other + options that affect the file mode, + like fsGroup, and the result can be + other mode bits set.' + format: int32 + type: integer + path: + description: The relative path of the + file to map the key to. May not be + an absolute path. May not contain + the path element '..'. May not start + with the string '..'. + type: string + required: + - key + - path + type: object + type: array + optional: + description: Specify whether the Secret or + its keys must be defined + type: boolean + secretName: + description: 'Name of the secret in the pod''s + namespace to use. More info: https://kubernetes.io/docs/concepts/storage/volumes#secret' + type: string + type: object + storageos: + description: StorageOS represents a StorageOS + volume attached and mounted on Kubernetes nodes. + properties: + fsType: + description: Filesystem type to mount. Must + be a filesystem type supported by the host + operating system. Ex. "ext4", "xfs", "ntfs". + Implicitly inferred to be "ext4" if unspecified. + type: string + readOnly: + description: Defaults to false (read/write). + ReadOnly here will force the ReadOnly setting + in VolumeMounts. + type: boolean + secretRef: + description: SecretRef specifies the secret + to use for obtaining the StorageOS API credentials. If + not specified, default values will be attempted. + properties: + name: + description: 'Name of the referent. More + info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + TODO: Add other useful fields. apiVersion, + kind, uid?' + type: string + type: object + volumeName: + description: VolumeName is the human-readable + name of the StorageOS volume. Volume names + are only unique within a namespace. + type: string + volumeNamespace: + description: VolumeNamespace specifies the + scope of the volume within StorageOS. If + no namespace is specified then the Pod's + namespace will be used. This allows the + Kubernetes name scoping to be mirrored within + StorageOS for tighter integration. Set VolumeName + to any name to override the default behaviour. + Set to "default" if you are not using namespaces + within StorageOS. Namespaces that do not + pre-exist within StorageOS will be created. + type: string + type: object + vsphereVolume: + description: VsphereVolume represents a vSphere + volume attached and mounted on kubelets host + machine + properties: + fsType: + description: Filesystem type to mount. Must + be a filesystem type supported by the host + operating system. Ex. "ext4", "xfs", "ntfs". + Implicitly inferred to be "ext4" if unspecified. + type: string + storagePolicyID: + description: Storage Policy Based Management + (SPBM) profile ID associated with the StoragePolicyName. + type: string + storagePolicyName: + description: Storage Policy Based Management + (SPBM) profile name. + type: string + volumePath: + description: Path that identifies vSphere + volume vmdk + type: string + required: + - volumePath + type: object + required: + - name + type: object + type: array + required: + - containers + type: object + type: object + type: object + description: 'INSERT ADDITIONAL SPEC FIELDS - desired state of cluster + Important: Run "make" to regenerate code after modifying this file + Defines replica spec for replica type' + type: object + ttlSecondsAfterFinished: + default: 86400 + description: TTLSecondsAfterFinished is the TTL to clean up jobs. + It may take extra ReconcilePeriod seconds for the cleanup, since + reconcile gets called periodically. Default to 86400(one day). + format: int64 + type: integer + required: + - fedReplicaSpecs + type: object + status: + description: FedAppStatus defines the observed state of FedApp + properties: + conditions: + items: + description: FedAppCondition describes current state of a job. + properties: + lastTransitionTime: + description: Last time the condition transit from one status + to another. + format: date-time + type: string + message: + description: Human readable message indicating details about + last transition. + type: string + reason: + description: (brief) reason for the condition's last transition. + type: string + status: + description: Status of the condition, one of True, False, Unknown. + type: string + type: + description: Type of job condition. + type: string + required: + - status + - type + type: object + type: array + startTime: + format: date-time + type: string + terminatedPodsMap: + additionalProperties: + description: TerminatedPods holds name of Pods that have terminated. + properties: + failed: + description: Failed holds name of failed Pods. + items: + additionalProperties: + type: object + type: object + type: array + succeeded: + description: Succeeded holds name of succeeded Pods. + items: + additionalProperties: + type: object + type: object + type: array + type: object + description: 'Record pods name which have terminated, hack for too + fast pod GC. TODO: when pods gc collection is too fast that fedapp + controller dont have enough time to record them in TerminatedPodsMap + field, use finalizer to avoid it.' + type: object + type: object + type: object + served: true + storage: true + subresources: + status: {} +status: + acceptedNames: + kind: "" + plural: "" + conditions: [] + storedVersions: [] diff --git a/operator/deploy_charts/hl-test-manager.yaml b/operator/deploy_charts/hl-test-manager.yaml new file mode 100644 index 000000000..f93be49a4 --- /dev/null +++ b/operator/deploy_charts/hl-test-manager.yaml @@ -0,0 +1,64 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: controller-manager + namespace: fedlearner + labels: + control-plane: controller-manager +spec: + selector: + matchLabels: + control-plane: controller-manager + replicas: 1 + template: + metadata: + annotations: + kubectl.kubernetes.io/default-container: manager + labels: + control-plane: controller-manager + spec: + securityContext: + runAsNonRoot: true + volumes: + - name: hl-kubeconfig + configMap: + name: hl-kubeconfig + defaultMode: 420 + containers: + - command: + - /manager + env: + - name: KUBECONFIG + value: /.kube/config + args: + - --ingress-extra-host-suffix=.fl-bytedance.com + - --namespace=fedlearner + image: artifact.bytedance.com/fedlearner/pp_fedapp_operator:0.2.2 + name: manager + securityContext: + allowPrivilegeEscalation: false + livenessProbe: + httpGet: + path: /healthz + port: 8081 + initialDelaySeconds: 15 + periodSeconds: 20 + readinessProbe: + httpGet: + path: /readyz + port: 8081 + initialDelaySeconds: 5 + periodSeconds: 10 + # TODO(user): Configure the resources accordingly based on the project requirements. + # More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/ + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 10m + memory: 64Mi + volumeMounts: + - name: hl-kubeconfig + mountPath: /.kube/ + terminationGracePeriodSeconds: 10 \ No newline at end of file diff --git a/operator/deploy_charts/third_deployment.yaml b/operator/deploy_charts/third_deployment.yaml new file mode 100644 index 000000000..b2bd6691a --- /dev/null +++ b/operator/deploy_charts/third_deployment.yaml @@ -0,0 +1,56 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: controller-manager + namespace: default + labels: + control-plane: controller-manager +spec: + selector: + matchLabels: + control-plane: controller-manager + replicas: 1 + template: + metadata: + annotations: + kubectl.kubernetes.io/default-container: manager + labels: + control-plane: controller-manager + spec: + imagePullSecrets: + - name: regcred + serviceAccountName: fedlearner-operator + securityContext: + runAsNonRoot: true + containers: + - name: manager + command: + - /manager + args: + - --ingress-extra-host-suffix=.fl-tryit1.com + - --namespace=default + image: artifact.bytedance.com/fedlearner/pp_fedapp_operator:0.2.2 + securityContext: + allowPrivilegeEscalation: false + livenessProbe: + httpGet: + path: /healthz + port: 8081 + initialDelaySeconds: 15 + periodSeconds: 20 + readinessProbe: + httpGet: + path: /readyz + port: 8081 + initialDelaySeconds: 5 + periodSeconds: 10 + # TODO(user): Configure the resources accordingly based on the project requirements. + # More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/ + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 10m + memory: 64Mi + terminationGracePeriodSeconds: 10 \ No newline at end of file diff --git a/operator/hack/boilerplate.go.txt b/operator/hack/boilerplate.go.txt new file mode 100644 index 000000000..65b862271 --- /dev/null +++ b/operator/hack/boilerplate.go.txt @@ -0,0 +1,15 @@ +/* +Copyright 2023. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ \ No newline at end of file diff --git a/operator/main.go b/operator/main.go new file mode 100644 index 000000000..26ed6b23e --- /dev/null +++ b/operator/main.go @@ -0,0 +1,106 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package main + +import ( + "flag" + "os" + + // Import all Kubernetes client auth plugins (e.g. Azure, GCP, OIDC, etc.) + // to ensure that exec-entrypoint and run can make use of them. + _ "k8s.io/client-go/plugin/pkg/client/auth" + + "k8s.io/apimachinery/pkg/runtime" + utilruntime "k8s.io/apimachinery/pkg/util/runtime" + clientgoscheme "k8s.io/client-go/kubernetes/scheme" + ctrl "sigs.k8s.io/controller-runtime" + "sigs.k8s.io/controller-runtime/pkg/healthz" + "sigs.k8s.io/controller-runtime/pkg/log/zap" + + fedlearnerv1alpha1 "fedlearner.net/operator/api/v1alpha1" + "fedlearner.net/operator/controllers" + //+kubebuilder:scaffold:imports +) + +var ( + scheme = runtime.NewScheme() + setupLog = ctrl.Log.WithName("setup") +) + +func init() { + utilruntime.Must(clientgoscheme.AddToScheme(scheme)) + utilruntime.Must(fedlearnerv1alpha1.AddToScheme(scheme)) + //+kubebuilder:scaffold:scheme +} + +func main() { + var metricsAddr string + var enableLeaderElection bool + var probeAddr string + var namespace string + flag.StringVar(&metricsAddr, "metrics-bind-address", ":8080", "The address the metric endpoint binds to.") + flag.StringVar(&probeAddr, "health-probe-bind-address", ":8081", "The address the probe endpoint binds to.") + flag.StringVar(&namespace, "namespace", "default", "namespace") + flag.BoolVar(&enableLeaderElection, "leader-elect", false, + "Enable leader election for controller manager. "+ + "Enabling this will ensure there is only one active controller manager.") + flag.StringVar(&controllers.IngressExtraHostSuffix, "ingress-extra-host-suffix", ".fl-aliyun-test.com", "The extra suffix of hosts when creating ingress.") + opts := zap.Options{ + Development: true, + } + opts.BindFlags(flag.CommandLine) + flag.Parse() + + ctrl.SetLogger(zap.New(zap.UseFlagOptions(&opts))) + + mgr, err := ctrl.NewManager(ctrl.GetConfigOrDie(), ctrl.Options{ + Namespace: namespace, + Scheme: scheme, + MetricsBindAddress: metricsAddr, + Port: 9443, + HealthProbeBindAddress: probeAddr, + LeaderElection: enableLeaderElection, + LeaderElectionID: "bc5e4174.k8s.io", + }) + if err != nil { + setupLog.Error(err, "unable to start manager") + os.Exit(1) + } + + if err = (&controllers.FedAppReconciler{ + Client: mgr.GetClient(), + Scheme: mgr.GetScheme(), + }).SetupWithManager(mgr); err != nil { + setupLog.Error(err, "unable to create controller", "controller", "FedApp") + os.Exit(1) + } + //+kubebuilder:scaffold:builder + + if err := mgr.AddHealthzCheck("healthz", healthz.Ping); err != nil { + setupLog.Error(err, "unable to set up health check") + os.Exit(1) + } + if err := mgr.AddReadyzCheck("readyz", healthz.Ping); err != nil { + setupLog.Error(err, "unable to set up ready check") + os.Exit(1) + } + + setupLog.Info("starting manager") + if err := mgr.Start(ctrl.SetupSignalHandler()); err != nil { + setupLog.Error(err, "problem running manager") + os.Exit(1) + } +} diff --git a/pp_lite/BUILD.bazel b/pp_lite/BUILD.bazel new file mode 100644 index 000000000..a3923df98 --- /dev/null +++ b/pp_lite/BUILD.bazel @@ -0,0 +1,30 @@ +# gazelle:exclude spark/ +load("@rules_python//python:defs.bzl", "py_binary", "py_library") + +package_group( + name = "pp_lite_package", + packages = ["//pp_lite/..."], +) + +py_library( + name = "pp_lite", + srcs = [ + "cli.py", + ], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + "//pp_lite/data_join/psi_ot", + "//pp_lite/data_join/psi_rsa", + "//pp_lite/proto:py_proto", + "//web_console_v2/inspection:error_code_lib", + "@common_click//:pkg", + ], +) + +py_binary( + name = "cli_bin", + srcs = ["cli.py"], + main = "cli.py", + visibility = ["//pp_lite:pp_lite_package"], + deps = [":pp_lite"], +) diff --git a/pp_lite/README.md b/pp_lite/README.md new file mode 100644 index 000000000..e05d43b69 --- /dev/null +++ b/pp_lite/README.md @@ -0,0 +1,45 @@ +# 隐私求交轻客户端 + +## 文件结构 +轻客户端的文件由以下几个部分组成, +- images/pp_lite:定义容器入口shell脚本,以及Dockerfile。 +- pp_lite:定义具体业务逻辑和客户端打包方法。 +- tools/tcp_grpc_proxy:提供了tcp转grpc的功能,主要用于OtPsi。 +``` +images +. +└── pp_lite +    ├── nginx.tmpl # nginx模版或其他脚本 +    ├── entrypoint.sh # 入口文件 +    └── Dockerfile +``` + +``` +pp_lite +. +├── cli.py # 入口文件 +├── requirements.txt +├── data_join # 求交实现 +│   ├── psi_rsa # rsa求交 +│   ├── psi_ot # ot求交 +│   └── utils +├── rpc # rpc相关代码 +├── test # 集成测试 +└── deploy # 轻客户端打包脚本 +``` +- 其中cli.py通过click实现,封装了轻客户端提供的各种功能。images/psi/scripts/entrypoint.sh将外部参数透传给cli.py。 +- test中实现了一些集成测试,一些局部的ut和被测试文件放在一起。 +- proto文件不单独存放,放在具体的使用的位置。 + +## 镜像管理方式 +整个轻客户端只打包一个镜像,其中 +- Dockerfile存储在images/pp_lite/Dockerfile +- 入口脚本为images/pp_lite/entrypoint.sh + +通过传递不同的参数指定容器不同的行为。 + +## 使用方式 +- 服务端通过平台中的数据模块进行求交。 +- 客户端的使用方式可参考pp_lite/deploy/README.md。 + +具体demo可参考test中的测试用例。 \ No newline at end of file diff --git a/pp_lite/cli.py b/pp_lite/cli.py new file mode 100644 index 000000000..4101db2a7 --- /dev/null +++ b/pp_lite/cli.py @@ -0,0 +1,91 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import click +import logging +from pp_lite.data_join.psi_rsa import psi_client as psi_rsa_client +from pp_lite.data_join.psi_rsa import psi_server as psi_rsa_server +from pp_lite.data_join.psi_ot import client as psi_ot_and_hash_client +from pp_lite.data_join.psi_ot import server as psi_ot_and_hash_server +from pp_lite.data_join.psi_ot import arguments as psi_ot_and_hash_arguments +from pp_lite.proto.common_pb2 import DataJoinType + +from web_console_v2.inspection.error_code import AreaCode, JobException, write_termination_message + + +@click.group(name='pp_lite') +def pp_lite(): + pass + + +# TODO(zhou.yi): add psi rsa options +@pp_lite.command() +@click.argument('role', type=click.Choice(['client', 'server', 'light_client'])) +def psi_rsa(role: str): + try: + if role in ['client', 'light_client']: + psi_rsa_client.run(psi_rsa_client.get_arguments()) + else: + psi_rsa_server.run(psi_rsa_server.get_arguments()) + except JobException as e: + logging.exception(e.message) + write_termination_message(AreaCode.PSI_RSA, e.error_type, e.message) + raise JobException(AreaCode.PSI_RSA, e.error_type, e.message) from e + + +# TODO(zhou.yi): add psi ot options +@pp_lite.command() +@click.argument('role', type=click.Choice(['client', 'server', 'light_client'])) +def psi_ot(role: str): + try: + args = psi_ot_and_hash_arguments.get_arguments() + args.data_join_type = DataJoinType.OT_PSI + if role == 'client': + args.partitioned = True + psi_ot_and_hash_client.run(args) + elif role == 'light_client': + args.partitioned = False + psi_ot_and_hash_client.run(args) + else: + psi_ot_and_hash_server.run(args) + except JobException as e: + logging.exception(e.message) + write_termination_message(AreaCode.PSI_OT, e.error_type, e.message) + raise JobException(AreaCode.PSI_OT, e.error_type, e.message) from e + + +# TODO(zhou.yi): add psi hash options +@pp_lite.command() +@click.argument('role', type=click.Choice(['client', 'server', 'light_client'])) +def psi_hash(role: str): + try: + args = psi_ot_and_hash_arguments.get_arguments() + args.data_join_type = DataJoinType.HASHED_DATA_JOIN + if role == 'client': + args.partitioned = True + psi_ot_and_hash_client.run(args) + elif role == 'light_client': + args.partitioned = False + psi_ot_and_hash_client.run(args) + else: + psi_ot_and_hash_server.run(args) + except JobException as e: + logging.exception(e.message) + write_termination_message(AreaCode.PSI_HASH, e.error_type, e.message) + raise JobException(AreaCode.PSI_HASH, e.error_type, e.message) from e + + +if __name__ == '__main__': + pp_lite() diff --git a/pp_lite/data_join/BUILD.bazel b/pp_lite/data_join/BUILD.bazel new file mode 100644 index 000000000..0e039a07a --- /dev/null +++ b/pp_lite/data_join/BUILD.bazel @@ -0,0 +1,7 @@ +load("@rules_python//python:defs.bzl", "py_library") + +py_library( + name = "envs", + srcs = ["envs.py"], + visibility = ["//pp_lite:pp_lite_package"], +) diff --git a/pp_lite/data_join/envs.py b/pp_lite/data_join/envs.py new file mode 100644 index 000000000..3fd86cc7a --- /dev/null +++ b/pp_lite/data_join/envs.py @@ -0,0 +1,20 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os + +GRPC_CLIENT_TIMEOUT = int(os.environ.get('GRPC_CLIENT_TIMEOUT', 30)) +STORAGE_ROOT = os.environ.get('STORAGE_ROOT', '/app') +CLIENT_CONNECT_RETRY_INTERVAL = 10 diff --git a/pp_lite/data_join/psi_ot/BUILD.bazel b/pp_lite/data_join/psi_ot/BUILD.bazel new file mode 100644 index 000000000..e1a204975 --- /dev/null +++ b/pp_lite/data_join/psi_ot/BUILD.bazel @@ -0,0 +1,47 @@ +load("@rules_python//python:defs.bzl", "py_library", "py_test") + +py_library( + name = "psi_ot", + srcs = [ + "arguments.py", + "client.py", + "data_join_control_servicer.py", + "data_join_manager.py", + "data_join_server.py", + "server.py", + ], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + "//pp_lite/data_join:envs", + "//pp_lite/data_join/psi_ot/joiner", + "//pp_lite/data_join/utils", + "//pp_lite/proto:py_grpc", + "//pp_lite/proto:py_proto", + "//pp_lite/rpc", + "//pp_lite/utils", + "//py_libs:metrics_lib", + "//web_console_v2/inspection:error_code_lib", + ], +) + +py_test( + name = "data_join_server_test", + size = "small", + srcs = ["data_join_server_test.py"], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + ":psi_ot", + "//pp_lite/testing", + ], +) + +py_test( + name = "arguments_test", + size = "small", + srcs = ["arguments_test.py"], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + ":psi_ot", + "//pp_lite/testing", + ], +) diff --git a/pp_lite/data_join/psi_ot/arguments.py b/pp_lite/data_join/psi_ot/arguments.py new file mode 100644 index 000000000..5b7fe62b8 --- /dev/null +++ b/pp_lite/data_join/psi_ot/arguments.py @@ -0,0 +1,45 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from os import getenv +import json +from pp_lite.proto.arguments_pb2 import Arguments +from web_console_v2.inspection.error_code import AreaCode, ErrorType, JobException + + +def get_arguments() -> Arguments: + for i in ['INPUT_PATH', 'OUTPUT_PATH', 'KEY_COLUMN', 'SERVER_PORT', 'JOINER_PORT']: + if getenv(i) is None: + raise JobException(AreaCode.UNKNOWN, ErrorType.INPUT_PARAMS_ERROR, f'Environment variable {i} is missing.') + args = Arguments() + args.input_path = getenv('INPUT_PATH') + args.output_path = getenv('OUTPUT_PATH') + args.key_column = getenv('KEY_COLUMN') + args.server_port = int(getenv('SERVER_PORT')) + args.joiner_port = int(getenv('JOINER_PORT')) + args.worker_rank = int(getenv('INDEX', '0')) + if getenv('NUM_WORKERS'): + args.num_workers = int(getenv('NUM_WORKERS')) + role = getenv('ROLE', '') + if getenv('CLUSTER_SPEC') and role != 'light_client': + cluster_spec = json.loads(getenv('CLUSTER_SPEC')) + + # Only accept CLUSTER_SPEC in cluster environment, + # so that CLUSTER_SPEC from .env in light client environment can be omitted. + if 'clusterSpec' in cluster_spec: + args.cluster_spec.workers.extend(cluster_spec['clusterSpec']['Worker']) + args.num_workers = len(args.cluster_spec.workers) + assert args.num_workers > 0 + return args diff --git a/pp_lite/data_join/psi_ot/arguments_test.py b/pp_lite/data_join/psi_ot/arguments_test.py new file mode 100644 index 000000000..0ad23650d --- /dev/null +++ b/pp_lite/data_join/psi_ot/arguments_test.py @@ -0,0 +1,93 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import json +import unittest + +from pp_lite.data_join.psi_ot.arguments import get_arguments +from pp_lite.proto.arguments_pb2 import Arguments, ClusterSpec + + +class ArgumentsTest(unittest.TestCase): + + def test_get_client_arguments(self): + os.environ['INPUT_PATH'] = 'input' + os.environ['OUTPUT_PATH'] = 'output' + os.environ['KEY_COLUMN'] = 'raw_id' + os.environ['INDEX'] = '0' + os.environ['NUM_WORKERS'] = '1' + os.environ['JOINER_PORT'] = '12345' + os.environ['SERVER_PORT'] = '54321' + os.environ['ROLE'] = 'client' + args = get_arguments() + self.assertEqual( + args, + Arguments(input_path='input', + output_path='output', + key_column='raw_id', + server_port=54321, + joiner_port=12345, + worker_rank=0, + num_workers=1)) + os.environ['CLUSTER_SPEC'] = json.dumps({'clusterSpec': {'Worker': ['worker-0', 'worker-1']}}) + args = get_arguments() + cluster_spec = ClusterSpec() + cluster_spec.workers.extend(['worker-0', 'worker-1']) + self.assertEqual( + args, + Arguments(input_path='input', + output_path='output', + key_column='raw_id', + server_port=54321, + joiner_port=12345, + worker_rank=0, + num_workers=2, + cluster_spec=cluster_spec)) + + def test_get_light_client_arguments(self): + os.environ['INPUT_PATH'] = 'input' + os.environ['OUTPUT_PATH'] = 'output' + os.environ['KEY_COLUMN'] = 'raw_id' + os.environ['INDEX'] = '0' + os.environ['NUM_WORKERS'] = '5' + os.environ['JOINER_PORT'] = '12345' + os.environ['SERVER_PORT'] = '54321' + os.environ['ROLE'] = 'light_client' + args = get_arguments() + self.assertEqual( + args, + Arguments(input_path='input', + output_path='output', + key_column='raw_id', + server_port=54321, + joiner_port=12345, + worker_rank=0, + num_workers=5)) + os.environ['CLUSTER_SPEC'] = json.dumps({'clusterSpec': {'Worker': ['worker-0', 'worker-1']}}) # omitted + args = get_arguments() + self.assertEqual( + args, + Arguments(input_path='input', + output_path='output', + key_column='raw_id', + server_port=54321, + joiner_port=12345, + worker_rank=0, + num_workers=5)) + + +if __name__ == '__main__': + unittest.main() diff --git a/pp_lite/data_join/psi_ot/client.py b/pp_lite/data_join/psi_ot/client.py new file mode 100644 index 000000000..6610d02cd --- /dev/null +++ b/pp_lite/data_join/psi_ot/client.py @@ -0,0 +1,135 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import grpc +import time +import logging +import logging.config +import tempfile +import copy +from concurrent.futures import FIRST_EXCEPTION, ProcessPoolExecutor, wait + +from pp_lite.data_join import envs + +from pp_lite.proto.arguments_pb2 import Arguments +from pp_lite.proto.common_pb2 import DataJoinType, FileType +from pp_lite.rpc.data_join_control_client import DataJoinControlClient +from pp_lite.data_join.psi_ot.data_join_manager import DataJoinManager +from pp_lite.data_join.psi_ot.joiner.ot_psi_joiner import OtPsiJoiner +from pp_lite.data_join.psi_ot.joiner.hashed_data_joiner import HashedDataJoiner +from pp_lite.data_join.utils.example_id_writer import ExampleIdWriter +from pp_lite.data_join.utils.example_id_reader import ExampleIdReader, PartitionInfo +from pp_lite.data_join.utils.partitioner import Partitioner +from pp_lite.utils.logging_config import logging_config, log_path +from pp_lite.utils.tools import get_partition_ids + + +def wait_for_ready(client: DataJoinControlClient): + while True: + try: + client.health_check() + break + except grpc.RpcError: + logging.info('server is not ready') + time.sleep(10) + + +def wait_for_ready_and_verify(client: DataJoinControlClient, num_partitions: int, num_workers: int): + wait_for_ready(client=client) + logging.info('[DataJoinControlClient] start verify parameter') + resp = client.verify_parameter(num_partitions=num_partitions, num_workers=num_workers) + logging.info( + f'[DataJoinControlClient] Server num_partitions: {resp.num_partitions}, num_workers: {resp.num_workers}') + # assert resp.succeeded, 'joiner must have the same parameters' + if not resp.succeeded: + if resp.num_partitions == 0: + logging.info(f'[DataJoinControlClient] Server num_partitions: {resp.num_partitions}, sever quit.') + logging.info('[DataJoinControlClient]joiner must have the same parameters') + return False + return True + + +def run_joiner(args: Arguments): + logging.config.dictConfig(logging_config(file_path=log_path(log_dir=envs.STORAGE_ROOT))) + client = DataJoinControlClient(args.server_port) + reader = ExampleIdReader(input_path=args.input_path, file_type=FileType.CSV, key_column=args.key_column) + writer = ExampleIdWriter(output_path=args.output_path, key_column=args.key_column) + if args.data_join_type == DataJoinType.HASHED_DATA_JOIN: + joiner = HashedDataJoiner(joiner_port=args.joiner_port) + else: + joiner = OtPsiJoiner(joiner_port=args.joiner_port) + partition_info = PartitionInfo(args.input_path) + partition_ids = get_partition_ids(args.worker_rank, args.num_workers, partition_info.num_partitions) + logging.info(f'allocated partitions {partition_ids} to worker {args.worker_rank}') + manager = DataJoinManager(joiner, client, reader, writer) + num_partitions = reader.num_partitions + if num_partitions == 0: + logging.info('[run_joiner]num_partitions of client is zero, client close grpc and quit.') + client.finish() + return + ret = wait_for_ready_and_verify(client, num_partitions, args.num_workers) + if ret: + manager.run(partition_ids=partition_ids) + client.close() + else: + client.finish() # Close _stub + client.close() + + +def run(args: Arguments): + logging.config.dictConfig(logging_config(file_path=log_path(log_dir=envs.STORAGE_ROOT))) + if args.partitioned: + run_joiner(args) + else: + partitioned_path = tempfile.mkdtemp() + logging.info(f'[DataJoinControlClient] input not partitioned, start partitioning to {partitioned_path}...') + client = DataJoinControlClient(args.server_port) + wait_for_ready(client=client) + parameter_response = client.get_parameter() + # DataJoinControlClient includes a gRPC channel. + # Creating a gRPC channel on the same port again may cause error. + # So the client needs to be closed after use. + client.close() + num_partitions = parameter_response.num_partitions + logging.info(f'[DataJoinControlClient] data will be partitioned to {num_partitions} partition(s).') + partitioner = Partitioner(input_path=args.input_path, + output_path=partitioned_path, + num_partitions=num_partitions, + block_size=1000000, + key_column=args.key_column, + queue_size=40, + reader_thread_num=20, + writer_thread_num=20) + partitioner.partition_data() + logging.info('[DataJoinControlClient] partition finished.') + futures = [] + pool = ProcessPoolExecutor() + for worker in range(args.num_workers): + worker_args = copy.deepcopy(args) + worker_args.worker_rank = worker + worker_args.input_path = partitioned_path + worker_args.server_port = args.server_port + worker * 2 + worker_args.joiner_port = args.joiner_port + worker + futures.append(pool.submit(run_joiner, worker_args)) + res = wait(futures, return_when=FIRST_EXCEPTION) + for future in res.done: + if future.exception(): + # early stop all subprocesses when catch exception + for pid, process in pool._processes.items(): # pylint: disable=protected-access + process.terminate() + raise Exception('Joiner subprocess failed') from future.exception() + + combine_writer = ExampleIdWriter(output_path=args.output_path, key_column=args.key_column) + combine_writer.combine(num_partitions) diff --git a/pp_lite/data_join/psi_ot/data_join_control_servicer.py b/pp_lite/data_join/psi_ot/data_join_control_servicer.py new file mode 100644 index 000000000..821f8fcac --- /dev/null +++ b/pp_lite/data_join/psi_ot/data_join_control_servicer.py @@ -0,0 +1,87 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import grpc +import logging +from typing import Callable +from google.protobuf import empty_pb2 + +from pp_lite.rpc.server import IServicer +from pp_lite.data_join.psi_ot.data_join_server import DataJoinServer +from pp_lite.proto.common_pb2 import Pong, Ping +from pp_lite.proto import data_join_control_service_pb2 as service_pb2 +from pp_lite.proto import data_join_control_service_pb2_grpc as service_pb2_grpc +from pp_lite.proto.arguments_pb2 import ClusterSpec + + +class DataJoinControlServicer(service_pb2_grpc.DataJoinControlServiceServicer, IServicer): + + def __init__(self, data_join_server: DataJoinServer, cluster_spec: ClusterSpec): + self._stop_hook = None + self._data_join_server = data_join_server + self._cluster_spec = cluster_spec + + def HealthCheck(self, request: Ping, context): + logging.info('[DataJoinControlServicer] Receive HealthCheck Request') + return Pong(message=request.message) + + def VerifyParameter(self, request: service_pb2.VerifyParameterRequest, context): + logging.info('[DataJoinControlServicer] Receive VerifyParameter Request') + succeeded = True + num_partitions = self._data_join_server.num_partitions + num_workers = len(self._cluster_spec.workers) + if request.num_partitions != num_partitions: + logging.warning( + f'Server and client do not have the same partition num, {num_partitions} vs {request.num_partitions}') + succeeded = False + if request.num_workers != num_workers: + logging.warning( + f'Server and client do not have the same worker num, {num_workers} vs {request.num_workers}') + succeeded = False + return service_pb2.VerifyParameterResponse(succeeded=succeeded, + num_partitions=num_partitions, + num_workers=num_workers) + + def GetParameter(self, request: service_pb2.GetParameterRequest, context): + logging.info('[DataJoinControlServicer] Receive GetParameter Request') + num_partitions = self._data_join_server.num_partitions + num_workers = len(self._cluster_spec.workers) + return service_pb2.GetParameterResponse(num_partitions=num_partitions, num_workers=num_workers) + + def CreateDataJoin(self, request: service_pb2.CreateDataJoinRequest, context): + logging.info(f'[DataJoinControlServicer] Receive CreateDataJoin Request for partition {request.partition_id}') + assert request.type == self._data_join_server.data_join_type, 'joiner must have the same type' + self._data_join_server.stop() + if self._data_join_server.empty(partition_id=request.partition_id): + logging.info(f'[DataJoinControlServicer] skip joiner for partition {request.partition_id} with input 0 ids') + return service_pb2.CreateDataJoinResponse(succeeded=True, empty=True) + succeeded = self._data_join_server.start(partition_id=request.partition_id) + return service_pb2.CreateDataJoinResponse(succeeded=succeeded, empty=False) + + def GetDataJoinResult(self, request: service_pb2.GetDataJoinResultRequest, context): + logging.info( + f'[DataJoinControlServicer] Receive GetDataJoinResult Request for partition {request.partition_id}') + finished = self._data_join_server.is_finished(request.partition_id) + logging.info(f'[DataJoinControlServicer] respond result {finished} to GetDataJoinResult request') + return service_pb2.DataJoinResult(finished=finished) + + def Finish(self, request, context): + logging.info('[DataJoinControlServicer] Receive Finish Request') + self._stop_hook() + return empty_pb2.Empty() + + def register(self, server: grpc.Server, stop_hook: Callable[[], None]): + self._stop_hook = stop_hook + service_pb2_grpc.add_DataJoinControlServiceServicer_to_server(self, server) diff --git a/pp_lite/data_join/psi_ot/data_join_manager.py b/pp_lite/data_join/psi_ot/data_join_manager.py new file mode 100644 index 000000000..357dad391 --- /dev/null +++ b/pp_lite/data_join/psi_ot/data_join_manager.py @@ -0,0 +1,83 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import time +import logging +from typing import List + +from pp_lite.data_join.psi_ot.joiner.joiner_interface import Joiner +from pp_lite.data_join.utils.example_id_reader import ExampleIdReader +from pp_lite.data_join.utils.example_id_writer import ExampleIdWriter +from pp_lite.rpc.data_join_control_client import DataJoinControlClient +from pp_lite.utils.decorators import retry_fn, timeout_fn +from pp_lite.utils import metric_collector + +MAX_NUMBER = 16000000 + + +class DataJoinManager: + + def __init__(self, joiner: Joiner, client: DataJoinControlClient, reader: ExampleIdReader, writer: ExampleIdWriter): + self._reader = reader + self._writer = writer + self._client = client + self._joiner = joiner + + def _wait_for_server_finished(self, partition_id: int): + for i in range(10): + resp = self._client.get_data_join_result(partition_id=partition_id) + if resp.finished: + logging.info(f'[DataJoinManager] server is finished for partition {partition_id}') + return + logging.warning(f'[DataJoinManager] server is still not finished for partition {partition_id}') + time.sleep(10) + raise Exception('server is still not finished!') + + @retry_fn(3) + @timeout_fn(1200) + def _run_task(self, joiner: Joiner, partition_id: int): + logging.info(f'[DataJoinManager] start partition {partition_id}') + # ensure input id is unique + with metric_collector.emit_timing('dataset.data_join.ot_or_hash_psi.read_data_timing', {'role': 'client'}): + ids = list(set(self._reader.read(partition_id))) + if len(ids) == 0: + logging.info(f'[DataJoinManager] skip join for partition {partition_id} with client input 0 ids') + return + response = self._client.create_data_join(partition_id=partition_id, data_join_type=joiner.type) + if response.empty: + logging.info(f'[DataJoinManager] skip join for partition {partition_id} with server input 0 ids') + return + logging.info(f'[DataJoinManager] start join for partition {partition_id} with input {len(ids)} ids') + assert len(ids) < MAX_NUMBER, f'the number of id should be less than {MAX_NUMBER}' + metric_collector.emit_counter('dataset.data_join.ot_or_hash_psi.partition_start_join', 1, {'role': 'client'}) + metric_collector.emit_counter('dataset.data_join.ot_or_hash_psi.row_num', len(ids), {'role': 'client'}) + inter_ids = joiner.client_run(ids=ids) + metric_collector.emit_counter('dataset.data_join.ot_or_hash_psi.intersection', len(inter_ids), + {'role': 'client'}) + logging.info(f'[DataJoinManager] finish join for partition {partition_id} with output {len(inter_ids)} ids') + self._writer.write(partition_id=partition_id, ids=inter_ids) + self._wait_for_server_finished(partition_id=partition_id) + self._writer.write_success_tag(partition_id=partition_id) + logging.info(f'[DataJoinManager] finish writing result for partition {partition_id}') + + def run(self, partition_ids: List[int]): + logging.info('[DataJoinManager] data join start!') + for partition_id in partition_ids: + if self._writer.success_tag_exists(partition_id=partition_id): + logging.warning(f'[DataJoinManager] skip partition {partition_id} since success tag exists') + continue + self._run_task(joiner=self._joiner, partition_id=partition_id) + self._client.finish() + logging.info('[DataJoinManager] data join is finished!') diff --git a/pp_lite/data_join/psi_ot/data_join_server.py b/pp_lite/data_join/psi_ot/data_join_server.py new file mode 100644 index 000000000..c78743bf0 --- /dev/null +++ b/pp_lite/data_join/psi_ot/data_join_server.py @@ -0,0 +1,117 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from threading import Lock +import time +import logging +from typing import List +from multiprocessing import get_context + +from pp_lite.proto.common_pb2 import DataJoinType +from pp_lite.data_join.psi_ot.joiner.joiner_interface import Joiner +from pp_lite.data_join.utils.example_id_reader import ExampleIdReader +from pp_lite.data_join.utils.example_id_writer import ExampleIdWriter +from pp_lite.data_join.psi_ot.data_join_manager import MAX_NUMBER +from pp_lite.utils import metric_collector + + +def _run(joiner: Joiner, ids: List[str], writer: ExampleIdWriter, partition_id: int): + # since spawn method is used, logging config is not forked from parent process, + # so the log level should be set to INFO. + # TODO(hangweiqiang): find a better way to initialize the process + logging.getLogger().setLevel(logging.INFO) + metric_collector.emit_counter('dataset.data_join.ot_or_hash_psi.partition_start_join', 1, {'role': 'server'}) + metric_collector.emit_counter('dataset.data_join.ot_or_hash_psi.row_num', len(ids), {'role': 'server'}) + inter_ids = joiner.server_run(ids) + metric_collector.emit_counter('dataset.data_join.ot_or_hash_psi.intersection', len(inter_ids), {'role': 'server'}) + logging.info(f'[DataJoinServer] finish data join for partition {partition_id} with {len(inter_ids)} ids') + writer.write(partition_id=partition_id, ids=inter_ids) + writer.write_success_tag(partition_id=partition_id) + logging.info(f'[DataJoinServer] finish write result to partition {partition_id}') + + +class DataJoinServer: + + def __init__(self, joiner: Joiner, reader: ExampleIdReader, writer: ExampleIdWriter): + self._reader = reader + self._writer = writer + self._process = None + self._joiner = joiner + self._prepared_partition_id = None + self._ids = None + self._mutex = Lock() + # Since DataJoinServer use multiprocessing.Process to initialize a new process to + # run joiner, it may be blocked during fork due to https://github.com/grpc/grpc/issues/21471. + # Setting start method to spawn will resolve this problem, the difference between fork + # and spawn can be found in https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods + self._mp_ctx = get_context('spawn') + + @property + def data_join_type(self) -> DataJoinType: + return self._joiner.type + + @property + def num_partitions(self) -> int: + return self._reader.num_partitions + + def is_finished(self, partition_id: int) -> bool: + return self._writer.success_tag_exists(partition_id=partition_id) + + def _get_ids(self, partition_id: int) -> List[str]: + with self._mutex: + if self._prepared_partition_id == partition_id: + return self._ids + # ensure input id is unique + self._ids = list(set(self._reader.read(partition_id))) + self._prepared_partition_id = partition_id + return self._ids + + def empty(self, partition_id: int) -> bool: + ids = self._get_ids(partition_id) + return len(ids) == 0 + + def start(self, partition_id: int) -> bool: + """Start non-blocking joiner""" + assert self._process is None + + with metric_collector.emit_timing('dataset.data_join.ot_or_hash_psi.read_data_timing', {'role': 'server'}): + ids = self._get_ids(partition_id) + logging.info(f'[DataJoinServer] read {len(ids)} ids from partition {partition_id}') + + if len(ids) < 1 or len(ids) > MAX_NUMBER: + logging.warning(f'[DataJoinServer] len(ids) should be positive and less than {MAX_NUMBER}') + return False + + self._process = self._mp_ctx.Process(target=_run, + kwargs={ + 'joiner': self._joiner, + 'ids': ids, + 'writer': self._writer, + 'partition_id': partition_id, + }) + logging.info(f'[DataJoinServer] start joiner for partition {partition_id}') + self._process.start() + # waiting for data join server being ready + time.sleep(10) + return True + + def stop(self): + """kill the joiner process and release the resources""" + if self._process is None: + return + self._process.terminate() + self._process.join() + self._process.close() + self._process = None diff --git a/pp_lite/data_join/psi_ot/data_join_server_test.py b/pp_lite/data_join/psi_ot/data_join_server_test.py new file mode 100644 index 000000000..e132bd588 --- /dev/null +++ b/pp_lite/data_join/psi_ot/data_join_server_test.py @@ -0,0 +1,84 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import time +import shutil +import tempfile +import unittest +from typing import List, Optional + +from pp_lite.proto.common_pb2 import FileType, DataJoinType +from pp_lite.data_join.psi_ot.data_join_server import DataJoinServer +from pp_lite.data_join.psi_ot.joiner.joiner_interface import Joiner +from pp_lite.data_join.utils.example_id_reader import ExampleIdReader +from pp_lite.data_join.utils.example_id_writer import ExampleIdWriter +from pp_lite.testing.make_data import _make_fake_data + + +class TestJoiner(Joiner): + + def __init__(self, wait_time: Optional[float] = None): + super().__init__(12345) + self._wait_time = wait_time + + @property + def type(self) -> DataJoinType: + return DataJoinType.HASHED_DATA_JOIN + + def client_run(self, ids: List[str]) -> List[str]: + return [] + + def server_run(self, ids: List[str]) -> List[str]: + if self._wait_time: + time.sleep(self._wait_time) + return ids + + +class DataJoinServerTest(unittest.TestCase): + + def setUp(self): + super().setUp() + self._base_path = tempfile.mkdtemp() + self._input_path = os.path.join(self._base_path, 'input') + self._output_path = os.path.join(self._base_path, 'output') + os.makedirs(self._input_path) + os.makedirs(self._output_path) + _make_fake_data(self._input_path, 10, 10) + self._reader = ExampleIdReader(self._input_path, FileType.CSV, 'x_1') + self._writer = ExampleIdWriter(self._output_path, 'x_1') + + def tearDown(self): + shutil.rmtree(self._base_path) + + def test_start(self): + joiner = TestJoiner() + server = DataJoinServer(joiner, self._reader, self._writer) + server.start(partition_id=2) + time.sleep(0.1) + self.assertTrue(os.path.exists(os.path.join(self._output_path, 'partition_2'))) + + def test_stop(self): + joiner = TestJoiner(wait_time=11) + server = DataJoinServer(joiner, self._reader, self._writer) + server.start(partition_id=2) + server.stop() + time.sleep(2) + self.assertFalse(os.path.exists(os.path.join(self._output_path, 'partition_2'))) + self.assertFalse(os.path.exists(os.path.join(self._output_path, '0002._SUCCESS'))) + + +if __name__ == '__main__': + unittest.main() diff --git a/pp_lite/data_join/psi_ot/joiner/BUILD.bazel b/pp_lite/data_join/psi_ot/joiner/BUILD.bazel new file mode 100644 index 000000000..4f54a769c --- /dev/null +++ b/pp_lite/data_join/psi_ot/joiner/BUILD.bazel @@ -0,0 +1,43 @@ +load("@rules_python//python:defs.bzl", "py_library", "py_test") + +py_library( + name = "joiner", + srcs = [ + "hashed_data_join_servicer.py", + "hashed_data_joiner.py", + "joiner_interface.py", + "ot_psi_joiner.py", + "utils.py", + ], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + "//pp_lite/data_join:envs", + "//pp_lite/proto:py_grpc", + "//pp_lite/proto:py_proto", + "//pp_lite/rpc", + "@common_cityhash//:pkg", + "@common_fsspec//:pkg", + "@common_pyarrow//:pkg", # keep + ], +) + +py_test( + name = "ot_psi_joiner_test", + size = "small", + srcs = ["ot_psi_joiner_test.py"], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + ":joiner", + "//pp_lite/data_join:envs", + ], +) + +py_test( + name = "utils_test", + size = "small", + srcs = ["utils_test.py"], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + ":joiner", + ], +) diff --git a/pp_lite/data_join/psi_ot/joiner/hashed_data_join_servicer.py b/pp_lite/data_join/psi_ot/joiner/hashed_data_join_servicer.py new file mode 100644 index 000000000..39b2424a3 --- /dev/null +++ b/pp_lite/data_join/psi_ot/joiner/hashed_data_join_servicer.py @@ -0,0 +1,45 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import grpc +from typing import Iterable, List, Callable + +from pp_lite.rpc.server import IServicer +from pp_lite.proto import hashed_data_join_pb2_grpc, hashed_data_join_pb2 + + +class HashedDataJoinServicer(hashed_data_join_pb2_grpc.HashedDataJoinServiceServicer, IServicer): + + def __init__(self, ids: List[str]): + self._ids = set(ids) + self._inter_ids = [] + self._finished = False + + def is_finished(self): + return self._finished + + def get_data_join_result(self) -> List[str]: + assert self.is_finished(), 'Getting result before finished' + return self._inter_ids + + def DataJoin(self, request_iterator: Iterable[hashed_data_join_pb2.DataJoinRequest], context): + for part in request_iterator: + current_inter_ids = [id for id in part.ids if id in self._ids] + self._inter_ids.extend(current_inter_ids) + yield hashed_data_join_pb2.DataJoinResponse(ids=current_inter_ids) + self._finished = True + + def register(self, server: grpc.Server, stop_hook: Callable[[], None]): + hashed_data_join_pb2_grpc.add_HashedDataJoinServiceServicer_to_server(self, server) diff --git a/pp_lite/data_join/psi_ot/joiner/hashed_data_joiner.py b/pp_lite/data_join/psi_ot/joiner/hashed_data_joiner.py new file mode 100644 index 000000000..ca3e2224d --- /dev/null +++ b/pp_lite/data_join/psi_ot/joiner/hashed_data_joiner.py @@ -0,0 +1,54 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import time +from typing import List +from pp_lite.data_join.psi_ot.joiner.joiner_interface import Joiner +from pp_lite.data_join.psi_ot.joiner.utils import HashValueSet +from pp_lite.proto.common_pb2 import DataJoinType +from pp_lite.rpc.server import RpcServer +from pp_lite.rpc.hashed_data_join_client import HashedDataJoinClient +from pp_lite.data_join.psi_ot.joiner.hashed_data_join_servicer import HashedDataJoinServicer + + +class HashedDataJoiner(Joiner): + + @property + def type(self) -> DataJoinType: + return DataJoinType.HASHED_DATA_JOIN + + def client_run(self, ids: List[str]) -> List[str]: + client = HashedDataJoinClient(server_port=self.joiner_port) + hash_value_set = HashValueSet() + hash_value_set.add_raw_values(ids) + response_iterator = client.data_join(hash_value_set.get_hash_value_list()) + resp_ids = [] + for part in response_iterator: + for response_hashed_id in part.ids: + resp_ids.append(hash_value_set.get_raw_value(response_hashed_id)) + return resp_ids + + def server_run(self, ids: List[str]) -> List[str]: + hash_value_set = HashValueSet() + hash_value_set.add_raw_values(ids) + servicer = HashedDataJoinServicer(ids=hash_value_set.get_hash_value_list()) + server = RpcServer(servicer=servicer, listen_port=self.joiner_port) + server.start() + for _ in range(1000): + if servicer.is_finished(): + raw_ids = [hash_value_set.get_raw_value(hash_id) for hash_id in servicer.get_data_join_result()] + return raw_ids + time.sleep(1) + raise Exception('data join is not finished') diff --git a/pp_lite/data_join/psi_ot/joiner/joiner_interface.py b/pp_lite/data_join/psi_ot/joiner/joiner_interface.py new file mode 100644 index 000000000..8cced0ffb --- /dev/null +++ b/pp_lite/data_join/psi_ot/joiner/joiner_interface.py @@ -0,0 +1,39 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import List +from abc import ABCMeta, abstractmethod +from pp_lite.proto.common_pb2 import DataJoinType + + +class Joiner(metaclass=ABCMeta): + + def __init__(self, joiner_port: int): + self.joiner_port = joiner_port + + @property + @abstractmethod + def type(self) -> DataJoinType: + raise NotImplementedError + + @abstractmethod + def client_run(self, ids: List[str]) -> List[str]: + """Run data join at client side. The id's in intersection set is returned""" + raise NotImplementedError + + @abstractmethod + def server_run(self, ids: List[str]) -> List[str]: + """Run data join at server side. The id's in intersection set is returned""" + raise NotImplementedError diff --git a/pp_lite/data_join/psi_ot/joiner/ot_psi_joiner.py b/pp_lite/data_join/psi_ot/joiner/ot_psi_joiner.py new file mode 100644 index 000000000..1093a472a --- /dev/null +++ b/pp_lite/data_join/psi_ot/joiner/ot_psi_joiner.py @@ -0,0 +1,83 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import fsspec +import logging +import datetime +from enum import Enum +from typing import List + +from pp_lite.data_join import envs +from pp_lite.proto.common_pb2 import DataJoinType +from pp_lite.data_join.psi_ot.joiner.joiner_interface import Joiner + + +def _write_ids(filename: str, ids: List[str]): + with fsspec.open(filename, 'wt') as f: + f.write('\n'.join(ids)) + + +def _read_ids(filename: str) -> List[str]: + with fsspec.open(filename, 'rt') as f: + return f.read().splitlines() + + +class _Role(Enum): + """the value is consistent with the argument of ot command""" + client = 0 # psi sender; tcp client + server = 1 # psi receiver; tcp server + + +def _timestamp() -> str: + """Return string format of time to make test easier to mock""" + return datetime.datetime.now().strftime('%Y%m%d-%H%M%S-%f') + + +class OtPsiJoiner(Joiner): + + @property + def type(self) -> DataJoinType: + return DataJoinType.OT_PSI + + def _run(self, ids: List[str], role: _Role): + timestamp = _timestamp() + input_path = f'{envs.STORAGE_ROOT}/data/{role.name}-input-{timestamp}' + output_path = f'{envs.STORAGE_ROOT}/data/{role.name}-output-{timestamp}' + _write_ids(input_path, ids) + # cmd = f'{CMD} -r {role.value} -file {input_path} -ofile {output_path} && \ + # -ip localhost:{self.joiner_port}'.split() + # logging.info(f'[OtPsiJoiner] run cmd: {cmd}') + try: + import psi_oprf # pylint: disable=import-outside-toplevel + psi_oprf.PsiRun(role.value, input_path, output_path, f'localhost:{self.joiner_port}') + logging.info('[ot_psi_joiner] PsiRun finished.') + # subprocess.run(cmd, check=True) + joined_ids = _read_ids(output_path) + except Exception as e: # pylint: disable=broad-except + logging.exception('[OtPsiJoiner] error happened during ot psi!') + raise Exception from e + finally: + # delete the input and output file of ot program to release the storage volume + fs = fsspec.get_mapper(input_path).fs + fs.delete(input_path) + if fs.exists(output_path): + fs.delete(output_path) + return joined_ids + + def client_run(self, ids: List[str]) -> List[str]: + return self._run(ids, _Role.client) + + def server_run(self, ids: List[str]) -> List[str]: + return self._run(ids, _Role.server) diff --git a/pp_lite/data_join/psi_ot/joiner/ot_psi_joiner_test.py b/pp_lite/data_join/psi_ot/joiner/ot_psi_joiner_test.py new file mode 100644 index 000000000..9617436a1 --- /dev/null +++ b/pp_lite/data_join/psi_ot/joiner/ot_psi_joiner_test.py @@ -0,0 +1,101 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from typing import List +from unittest.mock import patch +from tempfile import TemporaryDirectory +from concurrent.futures import ThreadPoolExecutor +import importlib.util as imutil + +from pp_lite.data_join import envs +from pp_lite.data_join.psi_ot.joiner.ot_psi_joiner import OtPsiJoiner + + +def _write_fake_output(filename: str, ids: List[str]): + with open(filename, 'wt', encoding='utf-8') as f: + f.write('\n'.join(ids)) + + +def check_psi_oprf(): + spec = imutil.find_spec('psi_oprf') + if spec is None: + psi_oprf_existed = False + else: + psi_oprf_existed = True + return psi_oprf_existed + + +@unittest.skipUnless(check_psi_oprf(), 'require ot psi file') +class OtPsiJoinerTest(unittest.TestCase): + + @patch('pp_lite.data_join.psi_ot.joiner.ot_psi_joiner._timestamp') + def test_client_run(self, mock_run, mock_timestamp): + joiner = OtPsiJoiner(joiner_port=12345) + timestamp = '20220310-185545' + mock_timestamp.return_value = timestamp + with TemporaryDirectory() as temp_dir: + envs.STORAGE_ROOT = temp_dir + input_path = f'{envs.STORAGE_ROOT}/data/client-input-{timestamp}' + output_path = f'{envs.STORAGE_ROOT}/data/client-output-{timestamp}' + inter_ids = ['4', '5', '6'] + + def _side_effect(*args, **kwargs): + _write_fake_output(output_path, inter_ids) + + mock_run.side_effect = _side_effect + ids = joiner.client_run(['1', '2', '3']) + mock_run.assert_called_with(0, input_path, output_path, f'localhost:{self.joiner_port}') + self.assertEqual(ids, inter_ids) + + @patch('pp_lite.data_join.psi_ot.joiner.ot_psi_joiner._timestamp') + def test_server_run(self, mock_run, mock_timestamp): + joiner = OtPsiJoiner(joiner_port=12345) + timestamp = '20220310-185545' + mock_timestamp.return_value = timestamp + with TemporaryDirectory() as temp_dir: + envs.STORAGE_ROOT = temp_dir + input_path = f'{envs.STORAGE_ROOT}/data/server-input-{timestamp}' + output_path = f'{envs.STORAGE_ROOT}/data/server-output-{timestamp}' + inter_ids = ['4', '5', '6'] + + def _side_effect(*args, **kwargs): + _write_fake_output(output_path, inter_ids) + + mock_run.side_effect = _side_effect + ids = joiner.server_run(['1', '2', '3']) + mock_run.assert_called_with(1, input_path, output_path, f'localhost:{self.joiner_port}') + self.assertEqual(ids, inter_ids) + + +@unittest.skipUnless(check_psi_oprf(), 'require ot psi file') +class OtPsiJoinerInContainerTest(unittest.TestCase): + + def test_joiner(self): + client_ids = [str(i) for i in range(10000)] + server_ids = [str(i) for i in range(5000, 15000)] + joined_ids = [str(i) for i in range(5000, 10000)] + joiner = OtPsiJoiner(joiner_port=1212) + with ThreadPoolExecutor(max_workers=2) as pool: + client_fut = pool.submit(joiner.client_run, client_ids) + server_fut = pool.submit(joiner.server_run, server_ids) + client_result = client_fut.result() + server_result = server_fut.result() + self.assertEqual(client_result, server_result) + self.assertEqual(sorted(client_result), joined_ids) + + +if __name__ == '__main__': + unittest.main() diff --git a/pp_lite/data_join/psi_ot/joiner/utils.py b/pp_lite/data_join/psi_ot/joiner/utils.py new file mode 100644 index 000000000..79e0a27ad --- /dev/null +++ b/pp_lite/data_join/psi_ot/joiner/utils.py @@ -0,0 +1,38 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import List, Optional +from cityhash import CityHash64 # pylint: disable=no-name-in-module + + +class HashValueSet(object): + + def __init__(self) -> None: + self._hash_map = {} + + def add_raw_values(self, values: List[str]): + self._hash_map.update({str(CityHash64(value)): value for value in values}) + + def get_raw_value(self, hashed_value: str) -> Optional[str]: + try: + return self._hash_map[hashed_value] + except KeyError as e: + raise Exception('Hashed value not found in hash map.') from e + + def get_hash_value_list(self) -> List[str]: + return list(self._hash_map.keys()) + + def exists(self, hashed_value: str) -> bool: + return hashed_value in self._hash_map diff --git a/pp_lite/data_join/psi_ot/joiner/utils_test.py b/pp_lite/data_join/psi_ot/joiner/utils_test.py new file mode 100644 index 000000000..1c43ddc79 --- /dev/null +++ b/pp_lite/data_join/psi_ot/joiner/utils_test.py @@ -0,0 +1,48 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +from pp_lite.data_join.psi_ot.joiner.utils import HashValueSet + + +class HashValueSetTest(unittest.TestCase): + + def setUp(self) -> None: + self._hash_value_set = HashValueSet() + self._hash_value_set.add_raw_values(['1', '2']) + + def test_add(self): + self.assertDictEqual( + self._hash_value_set._hash_map, # pylint: disable=protected-access + { + '9304157803607034849': '1', + '6920640749119438759': '2' + }) + + def test_get(self): + self.assertEqual(self._hash_value_set.get_raw_value('9304157803607034849'), '1') + self.assertRaises(Exception, self._hash_value_set.get_raw_value, args=('123')) + + def test_list(self): + self.assertListEqual(self._hash_value_set.get_hash_value_list(), ['9304157803607034849', '6920640749119438759']) + + def test_exists(self): + self.assertTrue(self._hash_value_set.exists('9304157803607034849')) + self.assertFalse(self._hash_value_set.exists('123')) + + +if __name__ == '__main__': + unittest.main() diff --git a/pp_lite/data_join/psi_ot/server.py b/pp_lite/data_join/psi_ot/server.py new file mode 100644 index 000000000..338d3c9f7 --- /dev/null +++ b/pp_lite/data_join/psi_ot/server.py @@ -0,0 +1,46 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +import logging.config + +from pp_lite.data_join import envs + +from pp_lite.rpc.server import RpcServer +from pp_lite.proto.arguments_pb2 import Arguments +from pp_lite.proto.common_pb2 import FileType, DataJoinType +from pp_lite.data_join.psi_ot.data_join_control_servicer import DataJoinControlServicer +from pp_lite.data_join.psi_ot.data_join_server import DataJoinServer +from pp_lite.data_join.psi_ot.joiner.ot_psi_joiner import OtPsiJoiner +from pp_lite.data_join.psi_ot.joiner.hashed_data_joiner import HashedDataJoiner +from pp_lite.data_join.utils.example_id_writer import ExampleIdWriter +from pp_lite.data_join.utils.example_id_reader import ExampleIdReader +from pp_lite.utils.logging_config import logging_config, log_path + + +def run(args: Arguments): + logging.config.dictConfig(logging_config(log_path(log_dir=envs.STORAGE_ROOT))) + reader = ExampleIdReader(input_path=args.input_path, file_type=FileType.CSV, key_column=args.key_column) + writer = ExampleIdWriter(output_path=args.output_path, key_column=args.key_column) + if args.data_join_type == DataJoinType.HASHED_DATA_JOIN: + joiner = HashedDataJoiner(joiner_port=args.joiner_port) + else: + joiner = OtPsiJoiner(joiner_port=args.joiner_port) + data_join_server = DataJoinServer(joiner, reader=reader, writer=writer) + servicer = DataJoinControlServicer(data_join_server=data_join_server, cluster_spec=args.cluster_spec) + server = RpcServer(servicer, listen_port=args.server_port) + server.start() + server.wait() + logging.info('server is finished!') diff --git a/pp_lite/data_join/psi_rsa/BUILD.bazel b/pp_lite/data_join/psi_rsa/BUILD.bazel new file mode 100644 index 000000000..4b5288aec --- /dev/null +++ b/pp_lite/data_join/psi_rsa/BUILD.bazel @@ -0,0 +1,35 @@ +load("@rules_python//python:defs.bzl", "py_binary", "py_library") + +py_library( + name = "psi_rsa", + srcs = [ + "psi_client.py", + "psi_server.py", + ], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + "//pp_lite/data_join:envs", + "//pp_lite/data_join/psi_rsa/client", + "//pp_lite/data_join/psi_rsa/server", + "//pp_lite/data_join/utils", + "//pp_lite/rpc", + "//pp_lite/utils", + "//py_libs:metrics_lib", + ], +) + +py_binary( + name = "server_bin", + srcs = ["psi_server.py"], + main = "psi_server.py", + visibility = ["//pp_lite:pp_lite_package"], + deps = [":psi_rsa"], +) + +py_binary( + name = "client_bin", + srcs = ["psi_client.py"], + main = "psi_client.py", + visibility = ["//pp_lite:pp_lite_package"], + deps = [":psi_rsa"], +) diff --git a/pp_lite/data_join/psi_rsa/client/BUILD.bazel b/pp_lite/data_join/psi_rsa/client/BUILD.bazel new file mode 100644 index 000000000..b5837de16 --- /dev/null +++ b/pp_lite/data_join/psi_rsa/client/BUILD.bazel @@ -0,0 +1,29 @@ +load("@rules_python//python:defs.bzl", "py_library", "py_test") + +py_library( + name = "client", + srcs = [ + "data_joiner.py", + "signer.py", + "syncronizer.py", + "task_producer.py", + ], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + "//pp_lite/data_join/utils", + "//pp_lite/rpc", + "//pp_lite/utils", + "@common_cityhash//:pkg", + "@common_gmpy2//:pkg", + "@common_pandas//:pkg", + "@common_rsa//:pkg", + ], +) + +py_test( + name = "signer_test", + size = "small", + srcs = ["signer_test.py"], + visibility = ["//pp_lite:pp_lite_package"], + deps = ["client"], +) diff --git a/pp_lite/data_join/psi_rsa/client/data_joiner.py b/pp_lite/data_join/psi_rsa/client/data_joiner.py new file mode 100644 index 000000000..dd9364c3c --- /dev/null +++ b/pp_lite/data_join/psi_rsa/client/data_joiner.py @@ -0,0 +1,53 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import List, Optional, Iterable + +from pp_lite.rpc.client import DataJoinClient +from pp_lite.utils.decorators import time_log + + +class DataJoiner: + + def __init__(self, client: DataJoinClient, partition_batch_size: int = 10): + self._client = client + self._partition_batch_size = partition_batch_size + + def _get_partition(self, partition_id: int) -> Iterable[Optional[List[str]]]: + partition_list = [partition_id] + if partition_id == -1: + part_num = self._client.get_partition_number().partition_num + partition_list = list(range(part_num)) + + for i in range(0, len(partition_list), self._partition_batch_size): + batch = partition_list[i:i + self._partition_batch_size] + for resp in self._client.get_signed_ids(partition_ids=batch): + yield resp.ids + + @time_log('Joiner') + def join(self, signed_ids: List[str], partition_id: int = -1): + hash_table = set(signed_ids) + intersection_ids = [] + # TODO: implement multiprocess consumer + for ids in self._get_partition(partition_id): + if ids is None: + continue + logging.info(f'[Joiner] {len(ids)} ids received from server') + inter = [i for i in ids if i in hash_table] + intersection_ids.extend(inter) + logging.info(f'[Joiner] {len(intersection_ids)} ids joined') + # remove duplicate elements + return list(set(intersection_ids)) diff --git a/pp_lite/data_join/psi_rsa/client/signer.py b/pp_lite/data_join/psi_rsa/client/signer.py new file mode 100644 index 000000000..a91c7ac71 --- /dev/null +++ b/pp_lite/data_join/psi_rsa/client/signer.py @@ -0,0 +1,110 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import rsa +import random +import logging +from typing import List, Tuple, Iterable, Optional +from concurrent.futures import ThreadPoolExecutor + +from cityhash import CityHash64 # pylint: disable=no-name-in-module +from gmpy2 import powmod, divm, mpz # pylint: disable=no-name-in-module + +from pp_lite.rpc.client import DataJoinClient +from pp_lite.utils.decorators import time_log +from pp_lite.data_join.utils.generators import make_ids_iterator_from_list + + +class Signer: + + def __init__(self, client: DataJoinClient, num_workers: Optional[int] = None): + self._client = client + self._public_key = self._get_public_key() + self._pool = None + if num_workers is not None: + self._pool = ThreadPoolExecutor(max_workers=num_workers) + + def _get_public_key(self) -> rsa.PublicKey: + resp = self._client.get_public_key() + return rsa.PublicKey(int(resp.n), int(resp.e)) + + @staticmethod + def _blind(ids: List[str], public_key: rsa.PublicKey) -> Tuple[List[int], List[int]]: + """Blind raw id by random number + blind id by id * r^e % n, where r is the blind number, randomly sampled from (0, 2^256), + (e, n) is the rsa public key. + Args: + ids: list of raw id + public_key: rsa public key + Returns: + blinded id + """ + blind_numbers = [random.SystemRandom().getrandbits(256) for i in ids] + hashed_ids = [CityHash64(i) for i in ids] + e = public_key.e + n = public_key.n + blinded_ids = [(powmod(r, e, n) * x) % n for r, x in zip(blind_numbers, hashed_ids)] + return blinded_ids, blind_numbers + + @staticmethod + def _deblind(blind_signed_ids: List[int], blind_numbers: List[int], public_key: rsa.PublicKey) -> List[mpz]: + n = public_key.n + signed_ids = [divm(x, r, n) for x, r in zip(blind_signed_ids, blind_numbers)] + return signed_ids + + @staticmethod + def _one_way_hash(ids: List[int]): + hashed_ids = [hex(CityHash64(str(i)))[2:] for i in ids] + return hashed_ids + + def _remote_sign(self, blinded_ids: List[int]): + blinded_ids = [str(i) for i in blinded_ids] + resp = self._client.sign(blinded_ids) + return [int(i) for i in resp.signed_ids] + + def sign_batch(self, ids: List[str]) -> List[str]: + """Sign raw id by calling service from remote server + sign id by + 1. generate blind number r; + 2. blind raw id by r: id * r^e % n; + 3. calling blind sign service: id^d * r^d^e % n = id^d * r % n + 4. deblind blinded signed id by r: id^d % n + 5. hash signed id: hash(id^d%n) + Args: + ids: raw id + Returns: + signed ids + """ + blinded_ids, blind_numbers = self._blind(ids, self._public_key) + blinded_signed_ids = self._remote_sign(blinded_ids) + signed_ids = self._deblind(blinded_signed_ids, blind_numbers, self._public_key) + hashed_ids = self._one_way_hash(signed_ids) + return hashed_ids + + def sign_iterator(self, ids_iterator: Iterable[List[str]]): + if self._pool: + yield from self._pool.map(self.sign_batch, ids_iterator) + else: + for ids in ids_iterator: + yield self.sign_batch(ids) + + @time_log('Signer') + def sign_list(self, ids: List[str], batch_size=4096): + ids_iterator = make_ids_iterator_from_list(ids, batch_size) + signed_ids = [] + for sids in self.sign_iterator(ids_iterator=ids_iterator): + signed_ids.extend(sids) + logging.info(f'[Signer] {len(signed_ids)} ids signed') + return signed_ids diff --git a/pp_lite/data_join/psi_rsa/client/signer_test.py b/pp_lite/data_join/psi_rsa/client/signer_test.py new file mode 100644 index 000000000..06783a51d --- /dev/null +++ b/pp_lite/data_join/psi_rsa/client/signer_test.py @@ -0,0 +1,45 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import MagicMock, patch +from pp_lite.proto import data_join_service_pb2 +from pp_lite.rpc.client import DataJoinClient +from pp_lite.data_join.psi_rsa.client.signer import Signer +import rsa +from gmpy2 import powmod # pylint: disable=no-name-in-module + + +class SignerTest(unittest.TestCase): + + @patch('pp_lite.rpc.client.DataJoinClient.get_public_key') + def setUp(self, get_public_key) -> None: + get_public_key.return_value = data_join_service_pb2.PublicKeyResponse(n=str(9376987687101647609), e=str(65537)) + self.private_key = rsa.PrivateKey(9376987687101647609, 65537, 332945516441048573, 15236990059, 615409451) + self.client = DataJoinClient() + self.signer = Signer(client=self.client) + + def test_sign(self): + self.signer._client.sign = MagicMock( # pylint: disable=protected-access + side_effect=lambda x: data_join_service_pb2.SignResponse( + signed_ids=[str(powmod(int(i), self.private_key.d, self.private_key.n)) for i in x])) + + signed_ids = self.signer.sign_batch(['1', '2', '3']) + correct_signed_ids = ['288f534080870918', '19ade65d522c7915', 'ab2fa2127da06b98'] + self.assertEqual(signed_ids, correct_signed_ids) + + +if __name__ == '__main__': + unittest.main() diff --git a/pp_lite/data_join/psi_rsa/client/syncronizer.py b/pp_lite/data_join/psi_rsa/client/syncronizer.py new file mode 100644 index 000000000..7a42be9bc --- /dev/null +++ b/pp_lite/data_join/psi_rsa/client/syncronizer.py @@ -0,0 +1,36 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import List, Iterable + +from pp_lite.rpc.client import DataJoinClient +from pp_lite.utils.decorators import retry_fn, time_log +from pp_lite.data_join.utils.generators import make_ids_iterator_from_list + + +class ResultSynchronizer: + + def __init__(self, client: DataJoinClient, batch_size: int = 4096): + self._client = client + self._batch_size = batch_size + + def sync_from_iterator(self, ids_iterator: Iterable[List[str]], partition_id: int = -1): + self._client.sync_data_join_result(ids_iterator, partition_id) + + @time_log('Synchronizer') + @retry_fn(retry_times=3) + def sync(self, ids: List[str], partition_id: int = -1): + ids_iterator = make_ids_iterator_from_list(ids, self._batch_size) + self.sync_from_iterator(ids_iterator, partition_id) diff --git a/pp_lite/data_join/psi_rsa/client/task_producer.py b/pp_lite/data_join/psi_rsa/client/task_producer.py new file mode 100644 index 000000000..6dd204b77 --- /dev/null +++ b/pp_lite/data_join/psi_rsa/client/task_producer.py @@ -0,0 +1,96 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import pandas +import logging +from typing import List +from pp_lite.data_join.utils.example_id_reader import ExampleIdReader + +from pp_lite.rpc.client import DataJoinClient +from pp_lite.data_join.psi_rsa.client.data_joiner import DataJoiner +from pp_lite.data_join.psi_rsa.client.signer import Signer +from pp_lite.data_join.psi_rsa.client.syncronizer import ResultSynchronizer +from pp_lite.utils.metrics import emit_counter + + +class TaskProducer: + + def __init__(self, + client: DataJoinClient, + reader: ExampleIdReader, + output_dir: str, + key_column: str, + batch_size: int, + num_sign_parallel: int = 5): + self._client = client + self._reader = reader + self._output_dir = output_dir + self._key_column = key_column + self._batch_size = batch_size + self._signer = Signer(client=self._client, num_workers=num_sign_parallel) + self._joiner = DataJoiner(client=self._client) + self._synchronizer = ResultSynchronizer(client=self._client) + self._create_dirs() + + def run(self, partition_id: int): + logging.info(f'[TaskProducer] dealing with partition{partition_id} ......') + ids = self._read_ids(partition_id) + if len(ids) == 0: + logging.error('[DataReader] Input data is empty, so exit now.') + raise ValueError('[DataReader] Input data is empty, please confirm input path') + logging.info(f'[DataReader] the input data count is {len(ids)}') + # sign + signed_ids = self._signer.sign_list(ids, self._batch_size) + signed_df = pandas.DataFrame({self._key_column: ids, 'sign': signed_ids}) + signed_df.to_csv(self._get_signed_path(partition_id), index=False) + # join + joined_signed_ids = self._joiner.join(signed_ids=signed_ids, partition_id=partition_id) + singed2id = dict(zip(signed_ids, ids)) + joined_ids = [singed2id[i] for i in joined_signed_ids] + # TODO (zhou.yi) use ExampleIdWriter to write file + joined_df = pandas.DataFrame({self._key_column: joined_ids}) + joined_df.to_csv(self._get_joined_path(partition_id), index=False) + # synchronize + self._synchronizer.sync(ids=joined_ids, partition_id=partition_id) + # update audit info + emit_counter('Input data count', len(ids)) + emit_counter('Joined data count', len(joined_ids)) + + def _read_ids(self, partition_id: int) -> List[str]: + + if partition_id < 0: + # partition_id < 0 means the client and the server did not use the same logic to partition, + # all client data intersect with all server data. + return self._reader.read_all() + return self._reader.read(partition_id) + + def _create_dirs(self): + os.makedirs(os.path.join(self._output_dir, 'signed'), exist_ok=True) + os.makedirs(os.path.join(self._output_dir, 'joined'), exist_ok=True) + + def _get_signed_path(self, partition_id: int) -> str: + if partition_id < 0: + file_path = 'signed.csv' + else: + file_path = f'part-{str(partition_id).zfill(5)}-signed.csv' + return os.path.join(self._output_dir, 'signed', file_path) + + def _get_joined_path(self, partition_id: int) -> str: + if partition_id < 0: + file_path = 'joined.csv' + else: + file_path = f'part-{str(partition_id).zfill(5)}-joined.csv' + return os.path.join(self._output_dir, 'joined', file_path) diff --git a/pp_lite/data_join/psi_rsa/psi_client.py b/pp_lite/data_join/psi_rsa/psi_client.py new file mode 100644 index 000000000..3b1a70e87 --- /dev/null +++ b/pp_lite/data_join/psi_rsa/psi_client.py @@ -0,0 +1,104 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import json +import logging.config +import os +from os import getenv +from pp_lite.data_join.utils.example_id_reader import ExampleIdReader +from pp_lite.proto.common_pb2 import FileType + +from pp_lite.rpc.client import DataJoinClient +from pp_lite.data_join.psi_rsa.client.task_producer import TaskProducer +from pp_lite.utils.logging_config import logging_config, log_path +from pp_lite.utils.metrics import get_audit_value, show_audit_info +from pp_lite.utils.tools import get_partition_ids, print_named_dict + + +def str_as_bool(v): + if isinstance(v, bool): + return v + if v.lower() in ('yes', 'true', 't', 'y', '1'): + return True + if v.lower() in ('no', 'false', 'f', 'n', '0'): + return False + raise argparse.ArgumentTypeError('Boolean value expected.') + + +# TODO(zhou.yi): change to CLI arguments +def get_arguments(): + arguments = { + 'input_dir': getenv('INPUT_DIR', '/app/workdir/input'), + 'output_dir': getenv('OUTPUT_DIR', '/app/workdir/output'), + 'key_column': getenv('KEY_COLUMN', 'raw_id'), + 'server_port': int(getenv('SERVER_PORT', '50051')), + 'batch_size': int(getenv('BATCH_SIZE', '4096')), + 'num_sign_parallel': int(getenv('NUM_SIGN_PARALLEL', '20')), + 'partitioned': str_as_bool(getenv('PARTITIONED', 'false')), + 'log_dir': getenv('LOG_DIR', '/app/workdir/log/'), + } + arguments['worker_rank'] = int(getenv('INDEX', '0')) + if getenv('NUM_WORKERS'): + arguments['num_workers'] = int(getenv('NUM_WORKERS')) + role = getenv('ROLE', '') + if getenv('CLUSTER_SPEC') and role != 'light_client': + cluster_spec = json.loads(getenv('CLUSTER_SPEC')) + + # Only accept CLUSTER_SPEC in cluster environment, + # so that CLUSTER_SPEC from .env in light client environment can be omitted. + if 'clusterSpec' in cluster_spec: + arguments['num_workers'] = len(cluster_spec['clusterSpec']['Worker']) + return arguments + + +def _show_client_audit(): + show_audit_info() + intersection_rate = format(get_audit_value('Joined data count') / get_audit_value('Input data count') * 100, '.2f') + logging.info('====================Result====================') + logging.info(f'Intersection rate {intersection_rate} %') + logging.info('Running log locate at workdir/log') + logging.info('Data join result locate at workdir/output/joined') + logging.info('==============================================') + + +def run(args: dict): + if args.get('log_dir') is not None: + if not os.path.exists(args['log_dir']): + os.makedirs(args['log_dir'], exist_ok=True) + logging.config.dictConfig(logging_config(file_path=log_path(args['log_dir']))) + print_named_dict(name='Client Arguments', target_dict=args) + client = DataJoinClient(args['server_port']) + client.check_server_ready(timeout_seconds=60) + reader = ExampleIdReader(input_path=args['input_dir'], file_type=FileType.CSV, key_column=args['key_column']) + partition_list = [-1] + # If the client and the server use the same logic to partition, the partition can be intersected one by one. + # Otherwise, all client data intersect with all server data. + if args['partitioned']: + num_partitions = reader.num_partitions + partition_list = get_partition_ids(worker_rank=args['worker_rank'], + num_workers=args['num_workers'], + num_partitions=num_partitions) + + task_producer = TaskProducer(client=client, + reader=reader, + output_dir=args['output_dir'], + key_column=args['key_column'], + batch_size=args['batch_size'], + num_sign_parallel=args['num_sign_parallel']) + for partition_id in partition_list: + task_producer.run(partition_id=partition_id) + _show_client_audit() + client.finish() diff --git a/pp_lite/data_join/psi_rsa/psi_server.py b/pp_lite/data_join/psi_rsa/psi_server.py new file mode 100644 index 000000000..66afc7363 --- /dev/null +++ b/pp_lite/data_join/psi_rsa/psi_server.py @@ -0,0 +1,55 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging.config +from os import getenv + +from pp_lite.data_join import envs + +from pp_lite.rpc.server import RpcServer +from pp_lite.data_join.psi_rsa.server.data_join_servicer import DataJoinServiceServicer +from pp_lite.data_join.psi_rsa.server.utils import load_private_rsa_key +from pp_lite.utils.logging_config import logging_config, log_path +from pp_lite.utils.tools import print_named_dict + + +def get_arguments(): + arguments = { + 'rsa_private_key_path': getenv('PRIVATE_KEY_PATH'), + 'input_dir': getenv('INPUT_DIR'), + 'output_dir': getenv('OUTPUT_DIR'), + 'signed_column': getenv('SIGNED_COLUMN', 'signed'), + 'key_column': getenv('KEY_COLUMN', 'raw_id'), + 'server_port': int(getenv('SERVER_PORT', '50051')), + 'batch_size': int(getenv('BATCH_SIZE', '4096')), + 'num_sign_parallel': int(getenv('NUM_SIGN_PARALLEL', '30')) + } + return arguments + + +def run(args: dict): + logging.config.dictConfig(logging_config(file_path=log_path(log_dir=envs.STORAGE_ROOT))) + print_named_dict(name='Server Arguments', target_dict=args) + private_key = load_private_rsa_key(args['rsa_private_key_path']) + servicer = DataJoinServiceServicer(private_key=private_key, + input_dir=args['input_dir'], + output_dir=args['output_dir'], + signed_column=args['signed_column'], + key_column=args['key_column'], + batch_size=args['batch_size'], + num_sign_parallel=args['num_sign_parallel']) + server = RpcServer(servicer=servicer, listen_port=args['server_port']) + server.start() + server.wait() diff --git a/pp_lite/data_join/psi_rsa/server/BUILD.bazel b/pp_lite/data_join/psi_rsa/server/BUILD.bazel new file mode 100644 index 000000000..cf64cd8fc --- /dev/null +++ b/pp_lite/data_join/psi_rsa/server/BUILD.bazel @@ -0,0 +1,49 @@ +load("@rules_python//python:defs.bzl", "py_library", "py_test") + +py_library( + name = "server", + srcs = [ + "data_join_servicer.py", + "partition_reader.py", + "partition_writer.py", + "signer.py", + "utils.py", + ], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + "//pp_lite/data_join/utils", + "//pp_lite/proto:py_grpc", + "//pp_lite/proto:py_proto", + "//pp_lite/rpc", + "//pp_lite/utils", + "@common_fsspec//:pkg", + "@common_gmpy2//:pkg", + "@common_pandas//:pkg", + "@common_pyarrow//:pkg", # keep + "@common_rsa//:pkg", + ], +) + +py_test( + name = "partition_writer_test", + size = "small", + srcs = ["partition_writer_test.py"], + visibility = ["//pp_lite:pp_lite_package"], + deps = [":server"], +) + +py_test( + name = "partition_reader_test", + size = "small", + srcs = ["partition_reader_test.py"], + visibility = ["//pp_lite:pp_lite_package"], + deps = [":server"], +) + +py_test( + name = "signer_test", + size = "small", + srcs = ["signer_test.py"], + visibility = ["//pp_lite:pp_lite_package"], + deps = [":server"], +) diff --git a/pp_lite/data_join/psi_rsa/server/data_join_servicer.py b/pp_lite/data_join/psi_rsa/server/data_join_servicer.py new file mode 100644 index 000000000..57fecdea3 --- /dev/null +++ b/pp_lite/data_join/psi_rsa/server/data_join_servicer.py @@ -0,0 +1,115 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import grpc +import logging +from typing import Callable +from google.protobuf import empty_pb2 +import rsa + +from pp_lite.rpc.server import IServicer +from pp_lite.proto.common_pb2 import Pong, Ping +from pp_lite.proto import data_join_service_pb2, data_join_service_pb2_grpc +from pp_lite.data_join.psi_rsa.server.partition_writer import RsaServerPartitionWriter +from pp_lite.data_join.psi_rsa.server.signer import RsaDataJoinSigner +from pp_lite.data_join.psi_rsa.server.partition_reader import RsaServerPartitionReader +from pp_lite.utils.metrics import show_audit_info, emit_counter +from pp_lite.utils import metric_collector + + +class DataJoinServiceServicer(data_join_service_pb2_grpc.DataJoinServiceServicer, IServicer): + + def __init__(self, + private_key: rsa.PrivateKey, + input_dir: str, + output_dir: str, + signed_column: str, + key_column: str, + batch_size: int = 4096, + num_sign_parallel: int = 1): + self._writer = RsaServerPartitionWriter(output_dir=output_dir, key_column=key_column) + self._stop_hook = None + self._signer = RsaDataJoinSigner(private_key=private_key, num_workers=num_sign_parallel) + self._partition_reader = RsaServerPartitionReader(input_dir=input_dir, + signed_column=signed_column, + batch_size=batch_size) + + def GetPartitionNumber(self, request, context): + emit_counter('get_partition_num', 1) + metric_collector.emit_counter('dataset.data_join.rsa_psi.get_partition_num', 1, {'role': 'server'}) + partition_num = self._partition_reader.get_partition_num() + logging.info(f'Receive request \'GetPartitionNum\' from client, partition num is {partition_num}') + return data_join_service_pb2.GetPartitionNumberResponse(partition_num=partition_num) + + def GetSignedIds(self, request: data_join_service_pb2.GetSignedIdsRequest, context): + emit_counter('get_partition', 1) + metric_collector.emit_counter('dataset.data_join.rsa_psi.get_partition', 1, {'role': 'server'}) + partition_ids = request.partition_ids + tip = 'without partition' if not partition_ids else f'partition {partition_ids[0]} ~ {partition_ids[-1]}' + logging.info(f'Receive request \'GetPartition\' from client, {tip}') + total_num = 0 + ids_generator = self._partition_reader.get_ids_generator(partition_ids) + for ids in ids_generator: + emit_counter('send_ids', len(ids)) + metric_collector.emit_counter('dataset.data_join.rsa_psi.send_ids', len(ids), {'role': 'server'}) + total_num = total_num + len(ids) + logging.info(f'Sending data {tip}, sent {total_num} ids now') + yield data_join_service_pb2.GetSignedIdsResponse(ids=ids) + + def GetPublicKey(self, request, context): + emit_counter('get_public_key', 1) + metric_collector.emit_counter('dataset.data_join.rsa_psi.get_public_key', 1, {'role': 'server'}) + logging.info('Receive request \'GetPublicKey\' from client') + public_key = self._signer.public_key + return data_join_service_pb2.PublicKeyResponse(e=str(public_key.e), n=str(public_key.n)) + + def Sign(self, request: data_join_service_pb2.SignRequest, context): + ids = [int(i) for i in request.ids] + emit_counter('sign_time', 1) + metric_collector.emit_counter('dataset.data_join.rsa_psi.sign_time', 1, {'role': 'server'}) + emit_counter('sign_ids', len(ids)) + metric_collector.emit_counter('dataset.data_join.rsa_psi.sign_ids', len(ids), {'role': 'server'}) + logging.info(f'Receive request \'Sign\' from client, the number of signed ids is {len(ids)}.') + signed_ids = self._signer.sign_ids(ids) + signed_ids = [str(i) for i in signed_ids] + return data_join_service_pb2.SignResponse(signed_ids=signed_ids) + + def SyncDataJoinResult(self, request_iterator, context): + emit_counter('sync_time', 1) + metric_collector.emit_counter('dataset.data_join.rsa_psi.sync_time', 1, {'role': 'server'}) + logging.info('Receive request \'Synchronize\' from client') + + def data_generator(): + for request in request_iterator: + yield request.partition_id, request.ids + + total_num = self._writer.write_data_join_result(data_generator()) + emit_counter('sync_ids', total_num) + metric_collector.emit_counter('dataset.data_join.rsa_psi.sync_ids', total_num, {'role': 'server'}) + return data_join_service_pb2.SyncDataJoinResultResponse(succeeded=True) + + def Finish(self, request, context): + self._signer.stop() + show_audit_info() + self._stop_hook() + return empty_pb2.Empty() + + def HealthCheck(self, request: Ping, context): + logging.info('Receive request \'HealthCheck\' from client') + return Pong(message=request.message) + + def register(self, server: grpc.Server, stop_hook: Callable[[], None]): + self._stop_hook = stop_hook + data_join_service_pb2_grpc.add_DataJoinServiceServicer_to_server(self, server) diff --git a/pp_lite/data_join/psi_rsa/server/partition_reader.py b/pp_lite/data_join/psi_rsa/server/partition_reader.py new file mode 100644 index 000000000..3fd4b76f6 --- /dev/null +++ b/pp_lite/data_join/psi_rsa/server/partition_reader.py @@ -0,0 +1,157 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +import os +import threading +import queue +from time import sleep +from typing import Iterator, Optional, Generator, List +from pathlib import Path + +import pandas +import fsspec + +from pp_lite.utils.decorators import retry_fn +from pp_lite.data_join.utils.generators import make_ids_iterator_from_list + + +def _get_part_id(filename: str) -> Optional[int]: + """extract partition id from filename""" + comp = filename.split('-') + for c in comp: + if c.isdecimal(): + return int(c) + return None + + +def _filter_files(files: Iterator[str], partition_ids: List[int]) -> Iterator: + """ + Args: + files(Iterator): files iterator under input dir + partition_ids(List[int]): target file number + Returns: + the files have the same number with partition_ids + """ + file_list = [] + for file in files: + if file.startswith('part-'): + if _get_part_id(file) in partition_ids: + file_list.append(file) + return file_list + + +# TODO (zhou.yi): use ExampleIdReader to read +class RsaServerPartitionReader(): + + def __init__(self, input_dir: str, signed_column: str, batch_size: int): + self._input_dir = input_dir + self._signed_column = signed_column + self._batch_size = batch_size + self._file_system: fsspec.AbstractFileSystem = fsspec.get_mapper(self._input_dir).fs + self._partition_num = self._set_partition_num() + + def _get_files_under(self, path: str) -> List[str]: + return [ + Path(file.get('name')).name + for file in self._file_system.ls(path, detail=True) + if file.get('type') == 'file' + ] + + @staticmethod + def _is_valid_file(filename: str) -> bool: + return filename.startswith('part-') + + def _set_partition_num(self) -> int: + files = self._get_files_under(self._input_dir) + return len(list(filter(self._is_valid_file, files))) + + def get_partition_num(self) -> int: + return self._partition_num + + def _get_files_by_partition_id(self, partition_ids: List[int]) -> List[str]: + files = self._get_files_under(self._input_dir) + if not partition_ids: + files = filter(self._is_valid_file, files) + else: + files = _filter_files(files=files, partition_ids=partition_ids) + files = [os.path.join(self._input_dir, file) for file in files] + return files + + def get_ids_generator(self, partition_ids: List[int]) -> Generator: + + files = self._get_files_by_partition_id(partition_ids) + with _BufferedReader(files, self._signed_column) as reader: + for keys in reader.read_keys(): + for ids in make_ids_iterator_from_list(keys, self._batch_size): + yield ids + + +class _BufferedReader(object): + """A reader to get keys from files. + + It uses producer-consumer pattern to speed up. + """ + _FILE_CAPACITY = 5 + _WAIT_TIME_SECONDS = 1 + + def __init__(self, file_list: List[str], key_column: str): + self._read_thread = threading.Thread(target=self._get_keys_from_files, name='Buffered Reader', daemon=True) + self._data_queue = queue.Queue(maxsize=self._FILE_CAPACITY) + self._file_list = file_list + self._key_column = key_column + self._exception = None + self._finish = False + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if not exc_type: + self._read_thread.join() + + def read_keys(self): + self._read_thread.start() + while True: + if self._exception: + logging.exception(f'read keys with exception: {str(self._exception)}') + raise self._exception + # NOTE: this is not thread-safe + if self._data_queue.empty(): + if self._finish: + break + sleep(self._WAIT_TIME_SECONDS) + else: + yield self._data_queue.get() + + def _get_keys_from_files(self): + try: + # Reads keys per file + for file in self._file_list: + df = read_csv(file) + keys = df[self._key_column].astype('str') + self._data_queue.put(keys) + self._finish = True + # pylint: disable=broad-except + except Exception as e: + self._exception = e + + +# reading from hdfs may fail and exit, so add retry +@retry_fn(retry_times=3) +def read_csv(file_path: str) -> pandas.DataFrame: + with fsspec.open(file_path, mode='r') as f: + logging.debug(f'Read file {file_path}...') + return pandas.read_csv(f) diff --git a/pp_lite/data_join/psi_rsa/server/partition_reader_test.py b/pp_lite/data_join/psi_rsa/server/partition_reader_test.py new file mode 100644 index 000000000..312276824 --- /dev/null +++ b/pp_lite/data_join/psi_rsa/server/partition_reader_test.py @@ -0,0 +1,57 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# pylint: disable=protected-access +import unittest +import tempfile +from shutil import rmtree + +from pp_lite.data_join.psi_rsa.server.partition_reader import RsaServerPartitionReader + + +class RsaServerPartitionReaderTest(unittest.TestCase): + + @classmethod + def setUpClass(cls) -> None: + cls.input_dir: str = tempfile.mkdtemp() + cls.parts = [] + for _ in range(5): + _, path = tempfile.mkstemp(prefix='part-', dir=cls.input_dir) + with open(path, mode='w', encoding='utf-8') as f: + f.write('signed_id\n1') + cls.helper = RsaServerPartitionReader(input_dir=cls.input_dir, signed_column='signed_id', batch_size=32) + + @classmethod + def tearDownClass(cls) -> None: + rmtree(cls.input_dir) + + def test_get_files_under(self): + files = self.helper._get_files_under(self.input_dir) + self.assertEqual(5, len(files)) + + def test_get_partition_num(self): + num = self.helper.get_partition_num() + self.assertEqual(5, num) + + def test_get_files_by_partition_id(self): + ids = [id.partition('part-')[-1] for id in self.parts] + self.assertEqual(5, len(self.helper._get_files_by_partition_id(ids))) + self.assertEqual(5, len(self.helper._get_files_by_partition_id(None))) + self.assertEqual(3, len(self.helper._get_files_by_partition_id(ids)[:3])) + self.assertEqual(5, len(self.helper._get_files_by_partition_id([]))) + + +if __name__ == '__main__': + unittest.main() diff --git a/pp_lite/data_join/psi_rsa/server/partition_writer.py b/pp_lite/data_join/psi_rsa/server/partition_writer.py new file mode 100644 index 000000000..5df90da81 --- /dev/null +++ b/pp_lite/data_join/psi_rsa/server/partition_writer.py @@ -0,0 +1,58 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +import os +from typing import Iterator, Tuple, List + +import fsspec + + +class RsaServerPartitionWriter: + + def __init__(self, output_dir: str, key_column: str): + self._output_dir = output_dir + self._key_column = key_column + self._file_system: fsspec.AbstractFileSystem = fsspec.get_mapper(self._output_dir).fs + + def _get_output_filename(self, partition_id: int = -1) -> str: + if partition_id is None or partition_id < 0: + return os.path.join(self._output_dir, 'joined', 'output.csv') + return os.path.join(self._output_dir, 'joined', f'output_{partition_id}.csv') + + # TODO(zhou.yi): refactor this function by ExampleIdWriter + def write_data_join_result(self, data_iterator: Iterator[Tuple[int, List[str]]]): + total_num = 0 + + partition_id, ids = next(data_iterator, (None, None)) + if ids is None: + logging.warning('no joined ids received from client!') + ids = [] + filename = self._get_output_filename(partition_id) + if self._file_system.exists(filename): + self._file_system.rm(filename) + if not self._file_system.exists(os.path.dirname(filename)): + self._file_system.makedirs(os.path.dirname(filename)) + with fsspec.open(filename, mode='w', encoding='utf-8') as f: + f.write(self._key_column + '\n') + f.write('\n'.join(ids) + '\n') + tip = 'without partition' if partition_id == -1 else f'partition {partition_id}' + total_num = total_num + len(ids) + logging.info(f'Receive data {tip}, Synchronize {total_num} ids now') + for partition_id, ids in data_iterator: + f.write('\n'.join(ids) + '\n') + total_num = total_num + len(ids) + logging.info(f'Receive data {tip}, Synchronize {total_num} ids now') + return total_num diff --git a/pp_lite/data_join/psi_rsa/server/partition_writer_test.py b/pp_lite/data_join/psi_rsa/server/partition_writer_test.py new file mode 100644 index 000000000..7585de9b1 --- /dev/null +++ b/pp_lite/data_join/psi_rsa/server/partition_writer_test.py @@ -0,0 +1,46 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +import tempfile +from shutil import rmtree + +from pp_lite.data_join.psi_rsa.server.partition_writer import RsaServerPartitionWriter + + +class RsaServerPartitionWriterTest(unittest.TestCase): + + def setUp(self) -> None: + self.input_dir: str = tempfile.mkdtemp() + self.output_dir: str = tempfile.mkdtemp() + self.parts = [] + for _ in range(5): + _, path = tempfile.mkstemp(prefix='part-', dir=self.input_dir) + with open(path, mode='w', encoding='utf-8') as f: + f.write('raw_id\n1') + self.writer = RsaServerPartitionWriter(output_dir=self.output_dir, key_column='raw_id') + + def tearDown(self) -> None: + rmtree(self.input_dir) + rmtree(self.output_dir) + + def test_write_data_join_result(self): + self.writer.write_data_join_result(iter([(-1, ['1', '2', '3']), (-1, ['1', '2', '3'])])) + with open(f'{self.output_dir}/joined/output.csv', mode='r', encoding='utf-8') as f: + self.assertEqual('raw_id\n1\n2\n3\n1\n2\n3\n', f.read()) + + +if __name__ == '__main__': + unittest.main() diff --git a/pp_lite/data_join/psi_rsa/server/signer.py b/pp_lite/data_join/psi_rsa/server/signer.py new file mode 100644 index 000000000..9877d5ffa --- /dev/null +++ b/pp_lite/data_join/psi_rsa/server/signer.py @@ -0,0 +1,58 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +import multiprocessing +from concurrent.futures import ProcessPoolExecutor +from typing import List + +import rsa +from gmpy2 import powmod # pylint: disable=no-name-in-module + + +class RsaDataJoinSigner(): + + def __init__(self, private_key: rsa.PrivateKey, num_workers: int = 1): + self._private_key = private_key + self._public_key = rsa.PublicKey(self._private_key.n, self._private_key.e) + mp_context = multiprocessing.get_context('spawn') + self._pool = ProcessPoolExecutor(max_workers=num_workers, mp_context=mp_context) + + @property + def private_key(self) -> rsa.PrivateKey: + return self._private_key + + @property + def public_key(self) -> rsa.PublicKey: + return self._public_key + + @staticmethod + def _sign_ids(ids: List[int], private_key: rsa.PrivateKey) -> List[int]: + return [powmod(i, private_key.d, private_key.n) for i in ids] + + def sign_ids(self, ids: List[int]) -> List[int]: + + future = self._pool.submit(self._sign_ids, ids, self._private_key) + return future.result() + + def stop(self): + # Processes in the process pool that have not yet exited will block the server process from exiting, + # so killing each subprocess is needed. + for pid, process in self._pool._processes.items(): # pylint:disable=protected-access + process.terminate() + logging.info(f'send SIGTERM to process {pid}!') + self._pool.shutdown(wait=True) + self._pool = None + logging.info('data join signer stopped') diff --git a/pp_lite/data_join/psi_rsa/server/signer_test.py b/pp_lite/data_join/psi_rsa/server/signer_test.py new file mode 100644 index 000000000..4caad436b --- /dev/null +++ b/pp_lite/data_join/psi_rsa/server/signer_test.py @@ -0,0 +1,34 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +import rsa + +from pp_lite.data_join.psi_rsa.server.signer import RsaDataJoinSigner + + +class RsaDataJoinSignerTest(unittest.TestCase): + + def setUp(self): + self._signer = RsaDataJoinSigner( + rsa.PrivateKey(9376987687101647609, 65537, 332945516441048573, 15236990059, 615409451)) + + def test_sign(self): + self.assertListEqual(self._signer.sign_ids([2, 3, 4]), + [5558008899394433345, 4817922342110581069, 672854883936409540]) + + +if __name__ == '__main__': + unittest.main() diff --git a/pp_lite/data_join/psi_rsa/server/utils.py b/pp_lite/data_join/psi_rsa/server/utils.py new file mode 100644 index 000000000..73c86750d --- /dev/null +++ b/pp_lite/data_join/psi_rsa/server/utils.py @@ -0,0 +1,23 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import rsa +import fsspec + + +def load_private_rsa_key(private_key_path: str) -> rsa.PrivateKey: + with fsspec.open(private_key_path, mode='rb') as f: + private_key = rsa.PrivateKey.load_pkcs1(f.read()) + return private_key diff --git a/pp_lite/data_join/utils/BUILD.bazel b/pp_lite/data_join/utils/BUILD.bazel new file mode 100644 index 000000000..dcc461742 --- /dev/null +++ b/pp_lite/data_join/utils/BUILD.bazel @@ -0,0 +1,52 @@ +load("@rules_python//python:defs.bzl", "py_library", "py_test") + +py_library( + name = "utils", + srcs = [ + "example_id_reader.py", + "example_id_writer.py", + "generators.py", + "partitioner.py", + ], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + "//pp_lite/proto:py_proto", + "@common_cityhash//:pkg", + "@common_fsspec//:pkg", + "@common_pandas//:pkg", + "@common_pyarrow//:pkg", # keep + ], +) + +py_test( + name = "example_id_reader_test", + size = "small", + srcs = ["example_id_reader_test.py"], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + ":utils", + "//pp_lite/testing", + ], +) + +py_test( + name = "example_id_writer_test", + size = "small", + srcs = ["example_id_writer_test.py"], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + ":utils", + "//pp_lite/testing", + ], +) + +py_test( + name = "partitioner_test", + size = "medium", + srcs = ["partitioner_test.py"], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + ":utils", + "//pp_lite/data_join/psi_ot", + ], +) diff --git a/pp_lite/data_join/utils/example_id_reader.py b/pp_lite/data_join/utils/example_id_reader.py new file mode 100644 index 000000000..ca2bee7ee --- /dev/null +++ b/pp_lite/data_join/utils/example_id_reader.py @@ -0,0 +1,112 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +import os +import fsspec +import pandas as pd +from typing import List, Iterator +from pp_lite.proto.common_pb2 import FileType + +CHUNK_SIZE = 16 * 1024 * 1024 * 1024 + + +class PartitionInfo: + + def __init__(self, input_path: str): + self._input_path = input_path + self._num_partitions = None + self._files = None + self._fs = fsspec.get_mapper(input_path).fs + + @staticmethod + def _is_valid_file(filename: str) -> bool: + return os.path.split(filename)[1].startswith('part-') + + def _list_files(self): + if self._files is None: + files = [file['name'] for file in self._fs.listdir(self._input_path)] + self._files = list(filter(self._is_valid_file, files)) + return self._files + + @property + def num_partitions(self) -> int: + if self._num_partitions is None: + self._num_partitions = len(self._list_files()) + return self._num_partitions + + def get_all_files(self) -> List[str]: + if self._fs.isfile(self._input_path): + return [self._input_path] + return [file['name'] for file in self._fs.listdir(self._input_path)] + + def get_files(self, partition_id: int) -> List[str]: + """return file given partition id""" + files = [] + try: + for file in self._list_files(): + comp_list = os.path.split(file)[1].split('-') + assert len(comp_list) > 1, f'split file err, file is {file}' + comp = comp_list[1] # as: part-04988-24de412a-0741-4157-bf0f-2e5dc4ebe2d5-c000.csv + if comp.isdigit(): + if int(comp) == partition_id: + files.append(file) + except RuntimeError: + logging.warning(f'[example_id_reader] get_files from {partition_id} err.') + return files + + +class ExampleIdReader: + + def __init__(self, input_path: str, file_type: FileType, key_column: str): + self._file_type = file_type + self._input_path = input_path + self._key_column = key_column + self._partition_info = PartitionInfo(input_path) + self._fs = fsspec.get_mapper(input_path).fs + + @property + def num_partitions(self) -> int: + return self._partition_info.num_partitions + + def _iter_data(self, filename) -> Iterator[pd.DataFrame]: + if self._file_type == FileType.CSV: + with self._fs.open(filename, 'r') as fin: + df = pd.read_csv(fin, chunksize=CHUNK_SIZE) + for chunk in df: + yield chunk + else: + raise NotImplementedError('tfrecord is not supported') + + def _data_iterator(self, partition_id: int) -> Iterator[pd.DataFrame]: + assert partition_id < self.num_partitions + files = self._partition_info.get_files(partition_id) + for file in files: + iterator = self._iter_data(filename=file) + for data in iterator: + yield data + + def read(self, partition_id: int) -> List: + values = [] + for data in self._data_iterator(partition_id): + values.extend(data[self._key_column].astype('str').to_list()) + return values + + def read_all(self) -> List[str]: + ids = [] + for filename in self._partition_info.get_all_files(): + for part in self._iter_data(filename): + ids.extend(part[self._key_column].astype('str').to_list()) + return ids diff --git a/pp_lite/data_join/utils/example_id_reader_test.py b/pp_lite/data_join/utils/example_id_reader_test.py new file mode 100644 index 000000000..9d2f820c9 --- /dev/null +++ b/pp_lite/data_join/utils/example_id_reader_test.py @@ -0,0 +1,73 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import unittest +import tempfile +from pp_lite.proto.common_pb2 import FileType +from pp_lite.data_join.utils.example_id_reader import ExampleIdReader, PartitionInfo +from pp_lite.testing.make_data import _make_fake_data + + +class PartitionInfoTest(unittest.TestCase): + + def test_num_partitions(self): + with tempfile.TemporaryDirectory() as input_dir: + _make_fake_data(input_dir, num_partitions=10, line_num=10) + partition_reader = PartitionInfo(input_dir) + self.assertEqual(partition_reader.num_partitions, 10) + + def test_get_file(self): + with tempfile.TemporaryDirectory() as input_dir: + _make_fake_data(input_dir, num_partitions=10, line_num=10, partitioned=True, spark=False) + partition_reader = PartitionInfo(input_dir) + files = partition_reader.get_files(partition_id=1) + self.assertEqual(files, [os.path.join(input_dir, 'part-1')]) + + def test_get_file_spark(self): + with tempfile.TemporaryDirectory() as input_dir: + _make_fake_data(input_dir, num_partitions=10, line_num=10, partitioned=True, spark=True) + partition_reader = PartitionInfo(input_dir) + files = partition_reader.get_files(partition_id=1) + files = [files[0].split('-')[0] + '-' + files[0].split('-')[1]] + self.assertEqual(files, [os.path.join(input_dir, 'part-1')]) + + def test_get_all_files(self): + with tempfile.TemporaryDirectory() as input_dir: + _make_fake_data(input_dir, num_partitions=10, line_num=10, partitioned=False) + partition_reader = PartitionInfo(input_dir) + files = partition_reader.get_all_files() + self.assertEqual(sorted(files), [f'{input_dir}/abcd-{str(i)}' for i in range(10)]) + + +class ExampleIdReaderTest(unittest.TestCase): + + def test_read(self): + with tempfile.TemporaryDirectory() as input_dir: + _make_fake_data(input_dir, num_partitions=10, line_num=10) + reader = ExampleIdReader(input_dir, FileType.CSV, key_column='part_id') + values = reader.read(partition_id=1) + self.assertEqual(values, ['1'] * 10) + + def test_read_all(self): + with tempfile.TemporaryDirectory() as input_dir: + _make_fake_data(input_dir, num_partitions=10, line_num=10, partitioned=False) + reader = ExampleIdReader(input_dir, FileType.CSV, key_column='part_id') + values = reader.read_all() + self.assertEqual(len(values), 100) + + +if __name__ == '__main__': + unittest.main() diff --git a/pp_lite/data_join/utils/example_id_writer.py b/pp_lite/data_join/utils/example_id_writer.py new file mode 100644 index 000000000..6b0507242 --- /dev/null +++ b/pp_lite/data_join/utils/example_id_writer.py @@ -0,0 +1,59 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import fsspec +import logging +import pandas as pd +from typing import List + + +class ExampleIdWriter: + + def __init__(self, output_path: str, key_column: str): + self._output_path = output_path + self._key_column = key_column + self._fs = fsspec.get_mapper(output_path).fs + + def write(self, partition_id: int, ids: List[str]): + if not self._fs.exists(self._output_path): + self._fs.makedirs(self._output_path, exist_ok=True) + filename = os.path.join(self._output_path, f'partition_{partition_id}') + logging.debug(f'[ExampleIdWriter] start writing {len(ids)} ids for partition {partition_id} to {filename}') + with self._fs.open(filename, 'w') as f: + df = pd.DataFrame(data={self._key_column: ids}) + df.to_csv(f, index=False) + logging.debug(f'[ExampleIdWriter] finish writing for partition {partition_id}') + + def combine(self, num_partitions: int): + if not os.path.isfile(os.path.join(self._output_path, 'partition_0')): + logging.warning('[ExampleIdWriter] combine fail, as no partition file') + return + self._fs.copy(os.path.join(self._output_path, 'partition_0'), os.path.join(self._output_path, 'output.csv')) + for partition in range(1, num_partitions): + with self._fs.open(os.path.join(self._output_path, 'output.csv'), 'ab') as o: + with self._fs.open(os.path.join(self._output_path, f'partition_{partition}')) as partition: + partition.readline() + o.write(partition.read()) + + def _success_tag(self, partition_id: int) -> str: + return os.path.join(self._output_path, f'{partition_id:04}._SUCCESS') + + def write_success_tag(self, partition_id: int): + self._fs.touch(self._success_tag(partition_id)) + logging.debug(f'[ExampleIdWriter] write success tag for partition {partition_id}') + + def success_tag_exists(self, partition_id: int) -> bool: + return self._fs.exists(self._success_tag(partition_id)) diff --git a/pp_lite/data_join/utils/example_id_writer_test.py b/pp_lite/data_join/utils/example_id_writer_test.py new file mode 100644 index 000000000..a0df306e4 --- /dev/null +++ b/pp_lite/data_join/utils/example_id_writer_test.py @@ -0,0 +1,58 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import unittest +import tempfile +from pathlib import Path +from pp_lite.data_join.utils.example_id_writer import ExampleIdWriter + + +class ExampleIdWriterTest(unittest.TestCase): + + def test_write(self): + with tempfile.TemporaryDirectory() as temp_dir: + writer = ExampleIdWriter(temp_dir, key_column='raw_id') + writer.write(partition_id=0, ids=['a', 'b']) + with open(os.path.join(temp_dir, 'partition_0'), encoding='utf-8') as f: + content = f.read() + self.assertEqual(content, 'raw_id\na\nb\n') + + def test_write_success_tag(self): + with tempfile.TemporaryDirectory() as temp_dir: + writer = ExampleIdWriter(temp_dir, key_column='raw_id') + writer.write_success_tag(partition_id=1) + self.assertTrue(os.path.exists(os.path.join(temp_dir, '0001._SUCCESS'))) + + def test_success_tag_exists(self): + with tempfile.TemporaryDirectory() as temp_dir: + writer = ExampleIdWriter(temp_dir, key_column='raw_id') + self.assertFalse(writer.success_tag_exists(1)) + Path(os.path.join(temp_dir, '0001._SUCCESS')).touch() + self.assertTrue(writer.success_tag_exists(1)) + + def test_combine(self): + with tempfile.TemporaryDirectory() as temp_dir: + writer = ExampleIdWriter(temp_dir, key_column='raw_id') + writer.write(partition_id=0, ids=[1, 2]) + writer.write(partition_id=1, ids=[3, 4]) + writer.write(partition_id=2, ids=[5, 6]) + writer.combine(3) + with open(os.path.join(temp_dir, 'output.csv'), 'r', encoding='utf-8') as f: + self.assertEqual(f.read(), 'raw_id\n1\n2\n3\n4\n5\n6\n') + + +if __name__ == '__main__': + unittest.main() diff --git a/pp_lite/data_join/utils/generators.py b/pp_lite/data_join/utils/generators.py new file mode 100644 index 000000000..572a9ea72 --- /dev/null +++ b/pp_lite/data_join/utils/generators.py @@ -0,0 +1,23 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import List + + +def make_ids_iterator_from_list(ids: List[str], batch_size=4096): + num_parts = (len(ids) + batch_size - 1) // batch_size + for part_id in range(num_parts): + id_part = ids[part_id * batch_size:(part_id + 1) * batch_size] + yield id_part diff --git a/pp_lite/data_join/utils/partitioner.py b/pp_lite/data_join/utils/partitioner.py new file mode 100644 index 000000000..d838949ed --- /dev/null +++ b/pp_lite/data_join/utils/partitioner.py @@ -0,0 +1,137 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import csv +import fcntl +import shutil +import threading +import logging +import pyarrow as pa +import pyarrow.csv as _csv +from cityhash import CityHash64 # pylint: disable=no-name-in-module +from queue import Queue, Empty +from concurrent.futures import ThreadPoolExecutor + + +def get_partition_path(output_path: str, partition_id: int): + return os.path.join(output_path, f'part-{partition_id}') + + +def read_ids(input_path: str, key_column: str, block_size: int, num_partitions: int, queue: Queue): + t = threading.current_thread() + read_options = _csv.ReadOptions(block_size=block_size) + with _csv.open_csv(input_path, read_options=read_options) as reader: + for chunk in reader: + if chunk is None: + break + raw_df = chunk.to_pandas() + raw_df[key_column] = raw_df[key_column].astype('str') + raw_df['partition_id'] = [CityHash64(i) % num_partitions for i in raw_df[key_column]] + groups = raw_df.groupby('partition_id') + for group in groups: + partition_id, data = group + data.drop(columns=['partition_id'], inplace=True) + table = pa.Table.from_pandas(data, preserve_index=False) + group = (partition_id, table) + queue.put(group) + logging.info(f'[Reader]: Put {table.num_rows} ids with partition id {partition_id} into queue of size ' + f'{queue.qsize()} ------ Thread_id: {t.ident}') + + +def write_partitioned_ids(output_path: str, queue: Queue): + try: + while True: + t = threading.current_thread() + partition_id, table = queue.get(timeout=30) + logging.info(f'[Writer]: Get {table.num_rows} ids with partition id {partition_id} from queue of size ' + f'{queue.qsize()} ------ Thread_id: {t.ident}') + path = get_partition_path(output_path, partition_id) + with open(path, 'ab') as f: + fcntl.flock(f.fileno(), fcntl.LOCK_EX) + option = _csv.WriteOptions(include_header=False) + _csv.write_csv(table, f, option) + except Empty as e: + logging.info('writer exits due to getting no data from queue') + + +class PartReader: + + def __init__(self, input_path: str, num_partitions: int, block_size: int, key_column: str, reader_thread_num: int): + self._input_path = input_path + self._num_partitions = num_partitions + self._block_size = block_size + self._key_column = key_column + self._pool = ThreadPoolExecutor(max_workers=reader_thread_num) + + def __del__(self): + self._pool.shutdown(wait=True) + logging.info('[Reader] ThreadPoolExecutor has shutdown.') + + def read(self, queue: Queue): + for filename in os.listdir(self._input_path): + self._pool.submit(read_ids, os.path.join(self._input_path, filename), self._key_column, self._block_size, + self._num_partitions, queue) + + +class PartWriter: + + def __init__(self, output_path: str, num_partitions: int, writer_thread_num: int): + self._output_path = output_path + self._num_partitions = num_partitions + self._pool = ThreadPoolExecutor(max_workers=writer_thread_num) + + def __del__(self): + self._pool.shutdown(wait=True) + logging.info('[Writer] ThreadPoolExecutor has shutdown.') + + def write(self, queue: Queue): + for _ in range(20): + self._pool.submit(write_partitioned_ids, self._output_path, queue) + + +class Partitioner: + + def __init__(self, input_path: str, output_path: str, num_partitions: int, block_size: int, key_column: str, + queue_size: int, reader_thread_num: int, writer_thread_num: int): + self._input_path = input_path + self._output_path = output_path + self._num_partitions = num_partitions + self._block_size = block_size + self._key_column = key_column + self._queue = Queue(queue_size) + self._reader_thread_num = reader_thread_num + self._writer_thread_num = writer_thread_num + shutil.rmtree(self._output_path, ignore_errors=True) + os.makedirs(self._output_path, exist_ok=True) + + def partition_data(self) -> None: + header = [self._key_column] + for filename in os.listdir(self._input_path): + input_path = os.path.join(self._input_path, filename) + with open(input_path, 'r', encoding='utf-8') as f: + reader = csv.DictReader(f) + header = reader.fieldnames + break + + for i in range(self._num_partitions): + with open(get_partition_path(self._output_path, i), 'w', encoding='utf-8') as f: + writer = csv.DictWriter(f, header) + writer.writeheader() + reader = PartReader(self._input_path, self._num_partitions, self._block_size, self._key_column, + self._reader_thread_num) + writer = PartWriter(self._output_path, self._num_partitions, self._writer_thread_num) + reader.read(self._queue) + writer.write(self._queue) diff --git a/pp_lite/data_join/utils/partitioner_test.py b/pp_lite/data_join/utils/partitioner_test.py new file mode 100644 index 000000000..c96f6d4fa --- /dev/null +++ b/pp_lite/data_join/utils/partitioner_test.py @@ -0,0 +1,137 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import csv +import os +import logging +import pandas +import shutil +import tempfile +import time +import unittest +from queue import Queue +from pp_lite.data_join.utils.partitioner import read_ids, write_partitioned_ids, Partitioner + + +def make_data(num: int, path: str, num_line: int) -> None: + shutil.rmtree(path, ignore_errors=True) + os.makedirs(path, exist_ok=True) + + for i in range(num): + data = range(i * num_line, (i + 1) * num_line) + ids = [{'oaid': oaid, 'x1': oaid} for oaid in data] + with open(os.path.join(path, f'part-{i}'), 'wt', encoding='utf-8') as f: + writer = csv.DictWriter(f, ['oaid', 'x1']) + writer.writeheader() + writer.writerows(ids) + + +class PartitionTest(unittest.TestCase): + + def test_read_ids(self): + with tempfile.TemporaryDirectory() as input_path: + make_data(1, input_path, 10) + filename = f'{input_path}/part-0' + queue = Queue(20) + read_ids(filename, 'oaid', 1000, 2, queue) + id1, table1 = queue.get() + self.assertEqual(id1, 0) + self.assertEqual(table1.to_pandas().values.tolist(), [['7', 7]]) + id2, table2 = queue.get() + self.assertEqual(id2, 1) + self.assertEqual(table2.to_pandas().values.tolist(), + [['0', 0], ['1', 1], ['2', 2], ['3', 3], ['4', 4], ['5', 5], ['6', 6], ['8', 8], ['9', 9]]) + + # 1 partition + filename = f'{input_path}/part-0' + queue = Queue(20) + read_ids(filename, 'oaid', 1000, 1, queue) + id1, table1 = queue.get() + self.assertEqual(id1, 0) + self.assertEqual( + table1.to_pandas().values.tolist(), + [['0', 0], ['1', 1], ['2', 2], ['3', 3], ['4', 4], ['5', 5], ['6', 6], ['7', 7], ['8', 8], ['9', 9]]) + + def test_write_ids(self): + with tempfile.TemporaryDirectory() as input_path: + make_data(1, input_path, 10) + filename = f'{input_path}/part-0' + queue = Queue(20) + num_partitions = 2 + block_size = 1000 + read_ids(filename, 'oaid', block_size, num_partitions, queue) + with tempfile.TemporaryDirectory() as output_path: + write_partitioned_ids(output_path, queue) + self.assertEqual(len(os.listdir(output_path)), 2) + self.assertTrue(os.path.exists(os.path.join(output_path, 'part-0'))) + self.assertTrue(os.path.exists(os.path.join(output_path, 'part-1'))) + with open(os.path.join(output_path, 'part-0'), encoding='utf-8') as fin: + self.assertEqual(fin.read(), '"7",7\n') + with open(os.path.join(output_path, 'part-1'), encoding='utf-8') as fin: + self.assertEqual(fin.read(), '"0",0\n"1",1\n"2",2\n"3",3\n"4",4\n"5",5\n"6",6\n"8",8\n"9",9\n') + + # 1 partition + read_ids(filename, 'oaid', block_size, 1, queue) + with tempfile.TemporaryDirectory() as output_path: + write_partitioned_ids(output_path, queue) + self.assertEqual(len(os.listdir(output_path)), 1) + self.assertTrue(os.path.exists(os.path.join(output_path, 'part-0'))) + with open(os.path.join(output_path, 'part-0'), encoding='utf-8') as fin: + self.assertEqual(fin.read(), + '"0",0\n"1",1\n"2",2\n"3",3\n"4",4\n"5",5\n"6",6\n"7",7\n"8",8\n"9",9\n') + + def test_partitioner(self): + with tempfile.TemporaryDirectory() as input_path: + make_data(20, input_path, 10) + with tempfile.TemporaryDirectory() as output_path: + timeout = 30 + partitioner = Partitioner(input_path=input_path, + output_path=output_path, + num_partitions=20, + block_size=10000000, + key_column='oaid', + queue_size=40, + reader_thread_num=20, + writer_thread_num=20) + start = time.time() + partitioner.partition_data() + logging.info(f'Partitioner use time {time.time() - start - timeout}s') + self.assertEqual(len(os.listdir(output_path)), 20) + df = pandas.read_csv(os.path.join(output_path, 'part-0')) + self.assertEqual(sorted(df.values.tolist()), + [[22, 22], [71, 71], [94, 94], [120, 120], [127, 127], [136, 136], [173, 173]]) + + # 1 partition + with tempfile.TemporaryDirectory() as output_path: + timeout = 30 + partitioner = Partitioner(input_path=input_path, + output_path=output_path, + num_partitions=1, + block_size=10000000, + key_column='oaid', + queue_size=40, + reader_thread_num=20, + writer_thread_num=20) + start = time.time() + partitioner.partition_data() + logging.info(f'Partitioner use time {time.time() - start - timeout}s') + self.assertEqual(len(os.listdir(output_path)), 1) + df = pandas.read_csv(os.path.join(output_path, 'part-0')) + self.assertEqual(len(df.values.tolist()), 200) + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO, format='%(asctime)s %(levelname)s %(message)s') + unittest.main() diff --git a/pp_lite/deploy/BUILD.bazel b/pp_lite/deploy/BUILD.bazel new file mode 100644 index 000000000..0ffbafac5 --- /dev/null +++ b/pp_lite/deploy/BUILD.bazel @@ -0,0 +1,28 @@ +load("@rules_python//python:defs.bzl", "py_binary", "py_library") + +py_binary( + name = "archiver", + srcs = ["archiver.py"], + data = ["//pp_lite/deploy/static"], + main = "archiver.py", + visibility = ["//pp_lite:pp_lite_package"], + deps = [":deploy"], +) + +py_library( + name = "deploy", + srcs = [ + "archiver.py", + "certificate.py", + ], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + "//deploy/auto_cert", + "//deploy/auto_cert:authenticator_lib", + "//deploy/auto_cert:auto_cert_py_proto", + "//deploy/container:containers", + "//pp_lite/deploy/configs:deploy_config", + "//pp_lite/deploy/configs:logging_config", + "@common_click//:pkg", + ], +) diff --git a/pp_lite/deploy/README.md b/pp_lite/deploy/README.md new file mode 100644 index 000000000..9ed40a42b --- /dev/null +++ b/pp_lite/deploy/README.md @@ -0,0 +1,37 @@ +# PP Lite Client Archiver + +This is a packaging tool for PP Lite - Client. Whenever you need to send your client a copy of our inspiring PP Lite, use me. + +## How to Make an Archive? + +### 1. Configuration + +In order to make a zip file for your client, you have to prepare a configuration file in YAML. + +Save it somewhere that you know. + +### 2. Make Zip File with Bazel + +Using Bazel, you can make your zip file blazing fast: + +```bash +# --run_under option makes relative path usable +bazelisk run --run_under="cd $PWD && " //pp_lite/deploy:archiver -- -c -o [-f ] +``` + +## How to Use the Archive? + +Using the archive is as simple as eating an apple: + +```bash +# Choose according to your format choice +unzip pp_lite_client.zip +# OR +tar xf pp_lite_client.tar + +cd pp_lite +# You may find UUID in [LIGHT_CLIENT_PSI]-[more information]-[Click and check the workflow] +bash start.sh +``` + +You can modify this bootstrap script for sure, and so is the `.env` file, but **make sure you know what you are doing**. diff --git a/pp_lite/deploy/archiver.py b/pp_lite/deploy/archiver.py new file mode 100644 index 000000000..4320eba17 --- /dev/null +++ b/pp_lite/deploy/archiver.py @@ -0,0 +1,91 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +import logging.config +from pathlib import Path +from tempfile import TemporaryDirectory +from shutil import make_archive, copy + +from click import command, option, Path as PathType, Choice + +from deploy.auto_cert.authenticator import ApiKeyAuthenticator +from deploy.container.containers import pull_image_as_docker_archive +from pp_lite.deploy.configs.logging_config import LOGGING_CONFIG +from pp_lite.deploy.certificate import get_certificate, write_certificate +from pp_lite.deploy.configs.controllers import get_deploy_config_from_yaml + + +def _pull_image(image_uri: str, pp_lite_path: Path): + logging.info('include_image is set to true; pulling pp_lite_client...') + pull_image_as_docker_archive(image_uri, pp_lite_path / 'client_image.tar', 'privacy_computing_platform') + logging.info('include_image is set to true; pulling pp_lite_client... [DONE]') + + +def _make_dirs(pp_lite_path: Path): + pp_lite_path.mkdir() + (pp_lite_path / 'input').mkdir() + (pp_lite_path / 'output').mkdir() + (pp_lite_path / 'log').mkdir() + (pp_lite_path / 'cert').mkdir() + + +def _copy_static_files(pp_lite_path: Path): + path_of_this_file = Path(__file__).parent + copy(path_of_this_file / 'static' / '.env', Path(pp_lite_path / '.env')) + copy(path_of_this_file / 'static' / 'start.sh', Path(pp_lite_path / 'start.sh')) + + +@command(name='PP Lite Client Archiver', help='I make archives for PP Lite Client for your clients.') +@option('--yaml_config_path', + '-c', + help='How should I perform?', + type=PathType(exists=True, file_okay=True, dir_okay=False), + required=True) +@option('--output_path', + '-o', + help='Where should I put the output archive?', + type=PathType(exists=True, file_okay=False, dir_okay=True), + required=True) +@option('--output_format', + '-f', + help='In what format do you want your archive to be?', + type=Choice(['tar', 'zip']), + default='zip') +def archive(yaml_config_path: str, output_path: str, output_format: str): + with TemporaryDirectory() as temp_dir: + config = get_deploy_config_from_yaml(Path(yaml_config_path).absolute()) + cert = get_certificate(config.pure_domain_name, ApiKeyAuthenticator(config.auto_cert_api_key)) + + pp_lite_path = Path(temp_dir) / 'pp_lite' + _make_dirs(pp_lite_path) + _copy_static_files(pp_lite_path) + write_certificate(pp_lite_path / 'cert', cert) + + if config.include_image: + _pull_image(config.image_uri, pp_lite_path) + + logging.info('Making zip archive...') + make_archive(Path(output_path) / 'pp_lite_client', output_format, Path(temp_dir).absolute()) + logging.info('Making zip archive... [DONE]') + + +if __name__ == '__main__': + logging.config.dictConfig(LOGGING_CONFIG) + try: + archive() # pylint: disable=no-value-for-parameter + except Exception as e: # pylint: disable=broad-except + logging.error(e) + raise e diff --git a/pp_lite/deploy/certificate.py b/pp_lite/deploy/certificate.py new file mode 100644 index 000000000..a9b223523 --- /dev/null +++ b/pp_lite/deploy/certificate.py @@ -0,0 +1,43 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from pathlib import Path + +from deploy.auto_cert.certificate_model_pb2 import CertificateFile +from deploy.auto_cert.certificate_service import CertificateService +from deploy.auto_cert.authenticator import ApiKeyAuthenticator +from deploy.auto_cert.consts import BOE_NEXUS_CONFIG + + +def get_certificate(company_name: str, authenticator: ApiKeyAuthenticator) -> CertificateFile: + service = CertificateService(authenticator, BOE_NEXUS_CONFIG) + common_name = f'{company_name}.fedlearner.net' + certs = service.get_certificates_by_name(common_name) + if len(certs) == 0: + logging.info(f'Certificate with company_name={company_name} not found; issuing...') + cert = service.issue_certificate(common_name, 365) + else: + cert = list(certs.values())[0] + return cert + + +def write_certificate(cert_path: Path, cert: CertificateFile): + with open(cert_path / 'public.pem', mode='w', encoding='utf-8') as f: + f.write(cert.certificate) + with open(cert_path / 'intermediate.pem', mode='w', encoding='utf-8') as f: + f.write('\n'.join(cert.certificate_chain)) + with open(cert_path / 'private.key', mode='w', encoding='utf-8') as f: + f.write(cert.private_key) diff --git a/pp_lite/deploy/configs/BUILD.bazel b/pp_lite/deploy/configs/BUILD.bazel new file mode 100644 index 000000000..55496e223 --- /dev/null +++ b/pp_lite/deploy/configs/BUILD.bazel @@ -0,0 +1,44 @@ +load("@rules_python//python:defs.bzl", "py_library", "py_test") +load("@rules_proto//proto:defs.bzl", "proto_library") +load("@com_github_grpc_grpc//bazel:python_rules.bzl", "py_proto_library") + +py_library( + name = "deploy_config", + srcs = [ + "controllers.py", + ], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + ":models_py_proto", + "@common_pyyaml//:pkg", + ], +) + +py_library( + name = "logging_config", + srcs = [ + "logging_config.py", + ], + visibility = ["//pp_lite:pp_lite_package"], +) + +proto_library( + name = "models_proto", + srcs = ["models.proto"], + visibility = ["//pp_lite:pp_lite_package"], +) + +py_proto_library( + name = "models_py_proto", + visibility = ["//pp_lite:pp_lite_package"], + deps = [":models_proto"], +) + +py_test( + name = "controllers_test", + size = "small", + srcs = ["controllers_test.py"], + data = ["//pp_lite/deploy/test_data"], + visibility = ["//pp_lite:pp_lite_package"], + deps = ["//pp_lite/deploy/configs:deploy_config"], +) diff --git a/pp_lite/deploy/configs/controllers.py b/pp_lite/deploy/configs/controllers.py new file mode 100644 index 000000000..21bb7d69f --- /dev/null +++ b/pp_lite/deploy/configs/controllers.py @@ -0,0 +1,32 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from pathlib import Path + +from yaml import load, Loader + +from pp_lite.deploy.configs.models_pb2 import Config + + +def get_deploy_config_from_yaml(path: Path) -> Config: + logging.info(f'Getting config from YAML file with path={path}...') + with open(path, mode='r', encoding='utf-8') as f: + content = load(f, Loader=Loader) + logging.info(f'Getting config from YAML file with path={path}... [DONE]') + return Config(pure_domain_name=content['pure_domain_name'], + image_uri=content['image_uri'], + include_image=content['include_image'], + auto_cert_api_key=content['auto_cert_api_key']) diff --git a/pp_lite/deploy/configs/controllers_test.py b/pp_lite/deploy/configs/controllers_test.py new file mode 100644 index 000000000..b6172c9cd --- /dev/null +++ b/pp_lite/deploy/configs/controllers_test.py @@ -0,0 +1,33 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from unittest import main, TestCase +from pathlib import Path + +from pp_lite.deploy.configs.controllers import get_deploy_config_from_yaml + + +class ConfigTest(TestCase): + + def test_get_config_from_yaml(self): + config = get_deploy_config_from_yaml(Path(__file__).parent.parent / 'test_data' / 'deploy_config.yaml') + self.assertEqual('some_company', config.pure_domain_name) + self.assertEqual('artifact.bytedance.com/fedlearner/pp_lite:2.3.25.4', config.image_uri) + self.assertEqual(False, config.include_image) + self.assertEqual('some_key', config.auto_cert_api_key) + + +if __name__ == '__main__': + main() diff --git a/pp_lite/deploy/configs/logging_config.py b/pp_lite/deploy/configs/logging_config.py new file mode 100644 index 000000000..154036407 --- /dev/null +++ b/pp_lite/deploy/configs/logging_config.py @@ -0,0 +1,37 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +LOGGING_CONFIG = { + 'version': 1, + 'disable_existing_loggers': False, + 'root': { + 'handlers': ['console'], + 'level': 'DEBUG' + }, + 'handlers': { + 'console': { + 'class': 'logging.StreamHandler', + 'formatter': 'generic', + 'level': 'INFO' + }, + }, + 'formatters': { + 'generic': { + 'format': '%(asctime)s [%(process)d] [%(levelname)s] [PP Lite Client Archiver] %(message)s', + 'datefmt': '%Y-%m-%d %H:%M:%S', + 'class': 'logging.Formatter' + } + } +} diff --git a/pp_lite/deploy/configs/models.proto b/pp_lite/deploy/configs/models.proto new file mode 100644 index 000000000..88f106a9f --- /dev/null +++ b/pp_lite/deploy/configs/models.proto @@ -0,0 +1,27 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package pp_lite.deploy.configs; + +message Config { + // Prefix of the certificate: .fedlearner.net + string pure_domain_name = 1; + string image_uri = 2; + // Whether to include pp_lite_client image in the produced zip file + bool include_image = 3; + string auto_cert_api_key = 4; +} diff --git a/pp_lite/deploy/static/BUILD.bazel b/pp_lite/deploy/static/BUILD.bazel new file mode 100644 index 000000000..6143e68fa --- /dev/null +++ b/pp_lite/deploy/static/BUILD.bazel @@ -0,0 +1,8 @@ +filegroup( + name = "static", + srcs = [ + ".env", + "start.sh", + ], + visibility = ["//pp_lite:pp_lite_package"], +) diff --git a/pp_lite/deploy/static/start.sh b/pp_lite/deploy/static/start.sh new file mode 100755 index 000000000..6696ebb10 --- /dev/null +++ b/pp_lite/deploy/static/start.sh @@ -0,0 +1,40 @@ +#!/bin/bash +if grep -q "psi-ot" .env +then + if [[ $1 == '' || $2 == '' || $3 == '' ]] + then + echo "[Usage]: bash $0 [param1: UUID] [param2: INPUT_DIR] [param3: NUM_WORKERS]" + exit 1 + fi + NUM_WORKERS=$3 + sed -i".bak" -e "s/NUM_WORKERS.*/NUM_WORKERS=${NUM_WORKERS}/" .env +else + if [[ $1 == '' || $2 == '' ]] + then + echo "[Usage]: bash $0 [param1: UUID] [param2: INPUT_DIR]" + exit 1 + fi +fi +UUID=$1 +INPUT_DIR=$2 + +set -e +sed -i".bak" -e "s/SERVICE_ID.*/SERVICE_ID=${UUID}-lc-start-server-worker-0/" .env + +if test -z "$(docker images | grep pp_lite)" +then + echo "Loading image..." + docker load -i client_image.tar +else + echo "Image already satisfied." +fi + +IMAGE_URI="$(docker images --format '{{ .Repository }}:{{ .Tag }}' | grep pp_lite | head -n 1)" +echo "Using $IMAGE_URI to proceed." + +echo "Start Client..." +docker run -it --rm --env-file .env \ + -v "$PWD":/app/workdir \ + -v "${INPUT_DIR}":/app/workdir/input \ + "${IMAGE_URI}" +echo "Start Client... [DONE]" diff --git a/pp_lite/deploy/test_data/BUILD.bazel b/pp_lite/deploy/test_data/BUILD.bazel new file mode 100644 index 000000000..57b02c214 --- /dev/null +++ b/pp_lite/deploy/test_data/BUILD.bazel @@ -0,0 +1,6 @@ +filegroup( + name = "test_data", + testonly = True, + srcs = ["deploy_config.yaml"], + visibility = ["//pp_lite:pp_lite_package"], +) diff --git a/pp_lite/deploy/test_data/deploy_config.yaml b/pp_lite/deploy/test_data/deploy_config.yaml new file mode 100644 index 000000000..f3c38b5dd --- /dev/null +++ b/pp_lite/deploy/test_data/deploy_config.yaml @@ -0,0 +1,4 @@ +pure_domain_name: some_company +image_uri: artifact.bytedance.com/fedlearner/pp_lite:2.3.25.4 +include_image: false +auto_cert_api_key: some_key diff --git a/pp_lite/proto/BUILD.bazel b/pp_lite/proto/BUILD.bazel new file mode 100644 index 000000000..8d9d5c188 --- /dev/null +++ b/pp_lite/proto/BUILD.bazel @@ -0,0 +1,31 @@ +load("@rules_proto//proto:defs.bzl", "proto_library") +load("@com_github_grpc_grpc//bazel:python_rules.bzl", "py_grpc_library", "py_proto_library") + +# gazelle:ignore otherwise gazellel will generate self in deps +proto_library( + name = "proto", + srcs = [ + "arguments.proto", + "common.proto", + "data_join_control_service.proto", + "data_join_service.proto", + "hashed_data_join.proto", + ], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + "@com_google_protobuf//:empty_proto", + ], +) + +py_proto_library( + name = "py_proto", + visibility = ["//pp_lite:pp_lite_package"], + deps = [":proto"], +) + +py_grpc_library( + name = "py_grpc", + srcs = [":proto"], + visibility = ["//pp_lite:pp_lite_package"], + deps = [":py_proto"], +) diff --git a/pp_lite/proto/arguments.proto b/pp_lite/proto/arguments.proto new file mode 100644 index 000000000..bc8d34530 --- /dev/null +++ b/pp_lite/proto/arguments.proto @@ -0,0 +1,76 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; +package pp_lite.proto; +import "pp_lite/proto/common.proto"; + +message ClusterSpec { + // service name and port of workers + repeated string workers = 1; +} + +message Arguments { + string input_path = 1; + string output_path = 2; + string key_column = 3; + DataJoinType data_join_type = 4; + int32 server_port = 5; + // 0-based worker rank + int32 worker_rank = 6; + ClusterSpec cluster_spec = 7; + int32 num_workers = 8; + int32 joiner_port = 9; + bool partitioned = 10; +} + +message TrainerArguments { + string input_path = 1 [deprecated=true]; + string output_path = 2 [deprecated=true]; + // listen port for server-side gRpc server. + int32 server_port = 3; + // cluster_spec for TensorFlow gRpc server. Default: '{"clusterSpec": {"server": ["localhost:51001"], \ + // "master": ["localhost:50101"], "ps": ["localhost:50102"], "worker": ["localhost:50103"]}}'. + string cluster_spec = 4; + // which model version to load if the pre-trained model exists. Default: 0. + int32 model_version = 5; + // the maximum number of model checkpoints to save. Default: 5. + int32 model_max_to_keep = 6; + // the tolerated number of the stale version compared to the current version for the + // model aggregation on the server. Default: 0. + int32 tolerated_version_gap = 7; + // the number of local steps for each client epoch. Default: 100. + int32 local_steps = 8; + // local training batch size. Default: 10. + int32 batch_size = 9; + // address for client-side master gRpc server. + string master_addr = 10; + int32 worker_rank = 11; + int32 ps_rank = 12; + // task mode for client. Default: 'local'. Range: ['local', 'master', 'ps', 'worker']. + string task_mode = 13; + // listen port for server-side tensorflow gRpc server. + int32 tf_port = 14; + // total number of clients. + int32 num_clients = 15; + // export server model per save_version_gap. + int32 save_version_gap = 16; + // initial weight for aggregating the client model. + float client_model_weight = 17; + string data_path = 18; + string export_path = 19; + // filter out tfrecord files we want to use + string file_wildcard = 20; +} \ No newline at end of file diff --git a/pp_lite/proto/common.proto b/pp_lite/proto/common.proto new file mode 100644 index 000000000..e8f4e0a39 --- /dev/null +++ b/pp_lite/proto/common.proto @@ -0,0 +1,35 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; +package pp_lite.proto; + +enum DataJoinType { + HASHED_DATA_JOIN = 0; + OT_PSI = 1; +} + +enum FileType { + CSV = 0; + TFRECORD = 1; +} + +message Ping { + string message = 1; +} + +message Pong { + string message = 1; +} diff --git a/pp_lite/proto/data_join_control_service.proto b/pp_lite/proto/data_join_control_service.proto new file mode 100644 index 000000000..ed028f47c --- /dev/null +++ b/pp_lite/proto/data_join_control_service.proto @@ -0,0 +1,67 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; +package pp_lite.proto; +import "pp_lite/proto/common.proto"; +import "google/protobuf/empty.proto"; + +message CreateDataJoinRequest { + DataJoinType type = 1; + int64 partition_id = 2; +} + +message CreateDataJoinResponse { + bool succeeded = 1; + bool empty = 2; +} + +message VerifyParameterRequest { + int64 num_partitions = 1; + int64 num_workers = 2; +} + +message VerifyParameterResponse { + bool succeeded = 1; + int64 num_partitions = 2; + int64 num_workers = 3; +} + +message GetParameterRequest { + string message = 1; +} + +message GetParameterResponse { + int64 num_partitions = 1; + int64 num_workers = 2; +} + +message GetDataJoinResultRequest { + int64 partition_id = 1; +} + +message DataJoinResult { + int64 num_joined = 1; + bool finished = 2; +} + +service DataJoinControlService { + rpc HealthCheck(Ping) returns (Pong) {} + rpc VerifyParameter(VerifyParameterRequest) returns (VerifyParameterResponse) {} + rpc GetParameter(GetParameterRequest) returns (GetParameterResponse) {} + rpc CreateDataJoin(CreateDataJoinRequest) returns (CreateDataJoinResponse) {} + rpc GetDataJoinResult(GetDataJoinResultRequest) returns (DataJoinResult) {} + rpc Finish(google.protobuf.Empty) returns (google.protobuf.Empty) {} +} \ No newline at end of file diff --git a/pp_lite/proto/data_join_service.proto b/pp_lite/proto/data_join_service.proto new file mode 100644 index 000000000..7a10581d0 --- /dev/null +++ b/pp_lite/proto/data_join_service.proto @@ -0,0 +1,63 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; +package pp_lite.proto; +import "pp_lite/proto/common.proto"; +import "google/protobuf/empty.proto"; + +message GetPartitionNumberResponse { + int64 partition_num = 1; +} + +message GetSignedIdsRequest { + repeated int64 partition_ids = 1; +} + +message GetSignedIdsResponse { + repeated string ids = 1; +} + +message PublicKeyResponse { + string e = 1; + string n = 2; +} + +message SignRequest { + repeated string ids = 1; +} + +message SignResponse { + repeated string signed_ids = 1; +} + +message SyncDataJoinResultRequest { + int64 partition_id = 1; + repeated string ids = 2; +} + +message SyncDataJoinResultResponse { + bool succeeded = 1; +} + +service DataJoinService { + rpc Sign(SignRequest) returns (SignResponse) {} + rpc Finish(google.protobuf.Empty) returns (google.protobuf.Empty) {} + rpc GetPublicKey(google.protobuf.Empty) returns (PublicKeyResponse) {} + rpc GetSignedIds(GetSignedIdsRequest) returns (stream GetSignedIdsResponse) {} + rpc GetPartitionNumber(google.protobuf.Empty) returns (GetPartitionNumberResponse) {} + rpc SyncDataJoinResult(stream SyncDataJoinResultRequest) returns (SyncDataJoinResultResponse) {} + rpc HealthCheck(Ping) returns (Pong) {} +} \ No newline at end of file diff --git a/pp_lite/proto/hashed_data_join.proto b/pp_lite/proto/hashed_data_join.proto new file mode 100644 index 000000000..8aff081f0 --- /dev/null +++ b/pp_lite/proto/hashed_data_join.proto @@ -0,0 +1,29 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; +package pp_lite.proto; + +message DataJoinRequest { + repeated string ids = 1; +} + +message DataJoinResponse { + repeated string ids = 1; +} + +service HashedDataJoinService { + rpc DataJoin(stream DataJoinRequest) returns (stream DataJoinResponse) {} +} diff --git a/pp_lite/rpc/BUILD.bazel b/pp_lite/rpc/BUILD.bazel new file mode 100644 index 000000000..886499438 --- /dev/null +++ b/pp_lite/rpc/BUILD.bazel @@ -0,0 +1,19 @@ +load("@rules_python//python:defs.bzl", "py_library") + +py_library( + name = "rpc", + srcs = [ + "client.py", + "data_join_control_client.py", + "hashed_data_join_client.py", + "server.py", + ], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + "//pp_lite/data_join:envs", + "//pp_lite/data_join/utils", + "//pp_lite/proto:py_grpc", + "//pp_lite/proto:py_proto", + "//pp_lite/utils", + ], +) diff --git a/pp_lite/rpc/client.py b/pp_lite/rpc/client.py new file mode 100644 index 000000000..c4474613b --- /dev/null +++ b/pp_lite/rpc/client.py @@ -0,0 +1,78 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import List, Optional, Iterable + +import grpc +from google.protobuf import empty_pb2 + +from pp_lite.proto.common_pb2 import Ping, Pong +from pp_lite.proto import data_join_service_pb2, data_join_service_pb2_grpc +from pp_lite.data_join.envs import GRPC_CLIENT_TIMEOUT +from pp_lite.utils.decorators import retry_fn + + +class DataJoinClient: + """Rsa psi rpc client""" + + def __init__(self, server_port: int = 50052, batch_size: int = 4096): + logging.info(f'RpcClient started: server_port:{server_port}') + self._host = 'localhost' + self._server_port = server_port + self._channel = grpc.insecure_channel(f'{self._host}:{self._server_port}') + self._stub = data_join_service_pb2_grpc.DataJoinServiceStub(self._channel) + self._batch_size = batch_size + + @retry_fn(retry_times=30) + def check_server_ready(self, timeout_seconds=5): + # Check server ready via channel ready future instead of for-loop `HealthCheck` call. + # Ref: https://grpc.github.io/grpc/python/grpc.html#grpc.channel_ready_future + grpc.channel_ready_future(self._channel).result(timeout=timeout_seconds) + + @retry_fn(retry_times=3) + def get_public_key(self) -> data_join_service_pb2.PublicKeyResponse: + return self._stub.GetPublicKey(empty_pb2.Empty(), timeout=GRPC_CLIENT_TIMEOUT) + + def health_check(self) -> Pong: + return self._stub.HealthCheck(Ping(), timeout=GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3) + def sign(self, ids: List[str]) -> data_join_service_pb2.SignResponse: + request = data_join_service_pb2.SignRequest(ids=ids) + return self._stub.Sign(request, timeout=GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3) + def get_partition_number(self) -> data_join_service_pb2.GetPartitionNumberResponse: + return self._stub.GetPartitionNumber(empty_pb2.Empty()) + + @retry_fn(retry_times=3) + def get_signed_ids(self, partition_ids: List[int]) -> Iterable[data_join_service_pb2.GetSignedIdsResponse]: + request = data_join_service_pb2.GetSignedIdsRequest(partition_ids=partition_ids) + return self._stub.GetSignedIds(request) + + def sync_data_join_result(self, ids_iterator: Iterable[List[str]], partition_id: Optional[int] = None) \ + -> data_join_service_pb2.SyncDataJoinResultResponse: + + def request_iterator(): + for ids in ids_iterator: + yield data_join_service_pb2.SyncDataJoinResultRequest(ids=ids, partition_id=partition_id) + + return self._stub.SyncDataJoinResult(request_iterator()) + + def finish(self): + logging.info('RpcClient stopped ! ! !') + request = empty_pb2.Empty() + self._stub.Finish(request) diff --git a/pp_lite/rpc/data_join_control_client.py b/pp_lite/rpc/data_join_control_client.py new file mode 100644 index 000000000..695da408c --- /dev/null +++ b/pp_lite/rpc/data_join_control_client.py @@ -0,0 +1,63 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import grpc +import logging +from google.protobuf import empty_pb2 + +from pp_lite.data_join.envs import GRPC_CLIENT_TIMEOUT + +from pp_lite.proto.common_pb2 import DataJoinType, Ping, Pong +from pp_lite.proto import data_join_control_service_pb2 as service_pb2 +from pp_lite.proto import data_join_control_service_pb2_grpc as service_pb2_grpc + + +class DataJoinControlClient: + """Ot psi rpc client""" + + def __init__(self, server_port: int): + logging.info(f'RpcClient started: server_port:{server_port}') + self._host = 'localhost' + self._server_port = server_port + self._channel = grpc.insecure_channel(f'{self._host}:{self._server_port}') + self._stub = service_pb2_grpc.DataJoinControlServiceStub(self._channel) + + def health_check(self, message: str = '') -> Pong: + request = Ping(message=message) + return self._stub.HealthCheck(request, timeout=GRPC_CLIENT_TIMEOUT) + + def verify_parameter(self, num_partitions: int, num_workers: int) -> service_pb2.VerifyParameterResponse: + request = service_pb2.VerifyParameterRequest(num_partitions=num_partitions, num_workers=num_workers) + return self._stub.VerifyParameter(request, timeout=GRPC_CLIENT_TIMEOUT) + + def get_parameter(self, message: str = '') -> service_pb2.GetParameterResponse: + request = service_pb2.GetParameterRequest(message=message) + return self._stub.GetParameter(request, timeout=GRPC_CLIENT_TIMEOUT) + + def create_data_join(self, partition_id: int, data_join_type: DataJoinType) -> service_pb2.CreateDataJoinResponse: + request = service_pb2.CreateDataJoinRequest(partition_id=partition_id, type=data_join_type) + # timeout is not set since server may load data from slow hdfs + return self._stub.CreateDataJoin(request) + + def get_data_join_result(self, partition_id: int) -> service_pb2.DataJoinResult: + request = service_pb2.GetDataJoinResultRequest(partition_id=partition_id) + return self._stub.GetDataJoinResult(request, timeout=GRPC_CLIENT_TIMEOUT) + + def finish(self): + logging.info('RpcClient stopped ! ! !') + self._stub.Finish(empty_pb2.Empty()) + + def close(self): + self._channel.close() diff --git a/pp_lite/rpc/hashed_data_join_client.py b/pp_lite/rpc/hashed_data_join_client.py new file mode 100644 index 000000000..e1fc62355 --- /dev/null +++ b/pp_lite/rpc/hashed_data_join_client.py @@ -0,0 +1,40 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import grpc +from typing import Iterator, List +from pp_lite.proto import hashed_data_join_pb2 as service_pb2 +from pp_lite.proto import hashed_data_join_pb2_grpc as service_pb2_grpc +from pp_lite.data_join.utils.generators import make_ids_iterator_from_list + + +class HashedDataJoinClient: + """Hashed data join for integrated test""" + + def __init__(self, server_port: int): + self._host = 'localhost' + self._server_port = server_port + self._channel = grpc.insecure_channel(f'{self._host}:{self._server_port}') + self._stub = service_pb2_grpc.HashedDataJoinServiceStub(self._channel) + + def data_join(self, ids: List[str], batch_size: int = 4096) -> Iterator[service_pb2.DataJoinResponse]: + + def request_iterator(): + for part_ids in make_ids_iterator_from_list(ids, batch_size): + yield service_pb2.DataJoinRequest(ids=part_ids) + + response_iterator = self._stub.DataJoin(request_iterator()) + + return response_iterator diff --git a/pp_lite/rpc/server.py b/pp_lite/rpc/server.py new file mode 100644 index 000000000..de4235858 --- /dev/null +++ b/pp_lite/rpc/server.py @@ -0,0 +1,69 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import grpc +import logging +import threading +from typing import Callable +from concurrent import futures +from abc import ABCMeta, abstractmethod + + +class IServicer(metaclass=ABCMeta): + + @abstractmethod + def register(self, server: grpc.Server, stop_hook: Callable[[], None]): + raise NotImplementedError() + + +class RpcServer: + + def __init__(self, servicer: IServicer, listen_port: int): + self._lock = threading.Lock() + self._started = False + self._server = None + self._servicer = servicer + self._listen_port = listen_port + + @property + def server(self): + return self._server + + def start(self): + assert not self._started, 'already started' + with self._lock: + self._server = grpc.server(futures.ThreadPoolExecutor(max_workers=20)) + self._servicer.register(self._server, self.stop) + self._server.add_insecure_port(f'[::]:{self._listen_port}') + self._server.start() + self._started = True + logging.info(f'RpcServer started: listen_port:{self._listen_port}') + + def wait(self, timeout=None): + self._server.wait_for_termination(timeout) + + def stop(self): + if not self._started: + return + with self._lock: + # cannot stop immediately due to Finish response will be returned + self._server.stop(grace=5) + del self._server + self._started = False + logging.info('RpcServer stopped ! ! !') + + def is_alive(self): + with self._lock: + return hasattr(self, '_server') diff --git a/pp_lite/test/BUILD.bazel b/pp_lite/test/BUILD.bazel new file mode 100644 index 000000000..5fc9f25c6 --- /dev/null +++ b/pp_lite/test/BUILD.bazel @@ -0,0 +1,66 @@ +load("@rules_python//python:defs.bzl", "py_test") + +py_test( + name = "psi_ot_test", + size = "medium", + srcs = ["psi_ot_test.py"], + tags = ["exclusive"], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + "//pp_lite/data_join/psi_ot", + "//pp_lite/data_join/psi_ot/joiner", + "//pp_lite/proto:py_proto", + "//pp_lite/testing", + ], +) + +py_test( + name = "psi_rsa_test", + size = "small", + srcs = ["psi_rsa_test.py"], + tags = ["exclusive"], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + "//pp_lite/data_join/psi_rsa", + "//pp_lite/testing", + ], +) + +py_test( + name = "psi_rsa_partition_test", + size = "small", + srcs = ["psi_rsa_partition_test.py"], + tags = ["exclusive"], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + "//pp_lite/data_join/psi_rsa", + "//pp_lite/testing", + ], +) + +py_test( + name = "trainer_test", + size = "medium", + srcs = ["trainer_test.py"], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + "//pp_lite/proto:py_proto", + "//pp_lite/testing", + "//pp_lite/trainer", + ], +) + +py_test( + name = "cli_test", + size = "small", + srcs = ["cli_test.py"], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + "//pp_lite", + "//pp_lite/data_join/psi_ot", + "//pp_lite/data_join/psi_ot/joiner", + "//pp_lite/data_join/psi_rsa", + "//pp_lite/testing", + "@common_click//:pkg", + ], +) diff --git a/pp_lite/test/cli_test.py b/pp_lite/test/cli_test.py new file mode 100644 index 000000000..33c8bd4d9 --- /dev/null +++ b/pp_lite/test/cli_test.py @@ -0,0 +1,106 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import unittest +from unittest.mock import Mock, patch + +from click.testing import CliRunner + +from pp_lite import cli +from web_console_v2.inspection.error_code import AreaCode, ErrorType + + +class CliTest(unittest.TestCase): + + # TODO(zhou.yi): create Env class to process environment variables + @patch('pp_lite.cli.write_termination_message') + def test_ot_missing_argument(self, mock_write_termination_message: Mock): + if 'INPUT_PATH' in os.environ: + del os.environ['INPUT_PATH'] + + runner = CliRunner() + with self.assertLogs(level='ERROR') as cm: + result = runner.invoke(cli=cli.pp_lite, args='psi-ot client') + # check logging + self.assertIn('Environment variable INPUT_PATH is missing.', cm.output[0]) + + # check termination log + mock_write_termination_message.assert_called_once_with(AreaCode.PSI_OT, ErrorType.INPUT_PARAMS_ERROR, + 'Environment variable INPUT_PATH is missing.') + + # check exception that raise again + self.assertEqual(str(result.exception), '00071005-Environment variable INPUT_PATH is missing.') + + @patch('pp_lite.cli.write_termination_message') + def test_hash_missing_argument(self, mock_write_termination_message: Mock): + if 'INPUT_PATH' in os.environ: + del os.environ['INPUT_PATH'] + + runner = CliRunner() + with self.assertLogs(level='ERROR') as cm: + result = runner.invoke(cli=cli.pp_lite, args='psi-hash client') + + # check logging + self.assertIn('Environment variable INPUT_PATH is missing.', cm.output[0]) + + # check termination log + mock_write_termination_message.assert_called_once_with(AreaCode.PSI_HASH, ErrorType.INPUT_PARAMS_ERROR, + 'Environment variable INPUT_PATH is missing.') + + # check exception that raise again + self.assertEqual(str(result.exception), '00091005-Environment variable INPUT_PATH is missing.') + + @patch('pp_lite.cli.write_termination_message') + def test_trainer_client_missing_argument(self, mock_write_termination_message: Mock): + if 'TF_PORT' in os.environ: + del os.environ['TF_PORT'] + + runner = CliRunner() + with self.assertLogs(level='ERROR') as cm: + result = runner.invoke(cli=cli.pp_lite, args='trainer client') + + # check logging + self.assertIn('Environment variable TF_PORT is missing.', cm.output[0]) + + # check termination log + mock_write_termination_message.assert_called_once_with(AreaCode.TRAINER, ErrorType.INPUT_PARAMS_ERROR, + 'Environment variable TF_PORT is missing.') + + # check exception that raise again + self.assertEqual(str(result.exception), '00101005-Environment variable TF_PORT is missing.') + + @patch('pp_lite.cli.write_termination_message') + def test_trainer_server_missing_argument(self, mock_write_termination_message: Mock): + if 'EXPORT_PATH' in os.environ: + del os.environ['EXPORT_PATH'] + + runner = CliRunner() + with self.assertLogs(level='ERROR') as cm: + result = runner.invoke(cli=cli.pp_lite, args='trainer server') + + # check logging + self.assertIn('Environment variable EXPORT_PATH is missing.', cm.output[0]) + + # check termination log + mock_write_termination_message.assert_called_once_with(AreaCode.TRAINER, ErrorType.INPUT_PARAMS_ERROR, + 'Environment variable EXPORT_PATH is missing.') + + # check exception that raise again + self.assertEqual(str(result.exception), '00101005-Environment variable EXPORT_PATH is missing.') + + +if __name__ == '__main__': + unittest.main() diff --git a/pp_lite/test/psi_ot_test.py b/pp_lite/test/psi_ot_test.py new file mode 100644 index 000000000..2716522ef --- /dev/null +++ b/pp_lite/test/psi_ot_test.py @@ -0,0 +1,98 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import shutil +import unittest +import tempfile +from concurrent.futures import ThreadPoolExecutor +import importlib.util as imutil + +from pp_lite.data_join import envs +from pp_lite.proto.arguments_pb2 import Arguments +from pp_lite.proto.common_pb2 import DataJoinType +from pp_lite.data_join.psi_ot.client import run as client_run +from pp_lite.data_join.psi_ot.server import run as server_run +from pp_lite.testing.make_data import make_data + + +def check_psi_oprf(): + spec = imutil.find_spec('psi_oprf') + if spec is None: + psi_oprf_existed = False + else: + psi_oprf_existed = True + return psi_oprf_existed + + +class IntegratedTest(unittest.TestCase): + + _PART_NUM = 2 + + def setUp(self) -> None: + self._temp_dir = tempfile.mkdtemp() + envs.STORAGE_ROOT = self._temp_dir + self._client_input_path = os.path.join(self._temp_dir, 'client') + self._server_input_path = os.path.join(self._temp_dir, 'server') + self._client_output_path = os.path.join(self._temp_dir, 'client_output') + self._server_output_path = os.path.join(self._temp_dir, 'server_output') + make_data(self._PART_NUM, self._client_input_path, self._server_input_path) + + def tearDown(self) -> None: + shutil.rmtree(self._temp_dir, ignore_errors=True) + + def _run_client(self, data_join_type: DataJoinType): + args = Arguments(input_path=self._client_input_path, + output_path=self._client_output_path, + key_column='raw_id', + data_join_type=data_join_type, + server_port=50051, + joiner_port=50053, + worker_rank=0, + num_workers=1, + partitioned=True) + args.cluster_spec.workers.extend(['worker-0']) + client_run(args) + + def _run_server(self, data_join_type: DataJoinType): + args = Arguments(input_path=self._server_input_path, + output_path=self._server_output_path, + key_column='raw_id', + data_join_type=data_join_type, + server_port=50051, + joiner_port=50053, + worker_rank=0) + args.cluster_spec.workers.extend(['worker-0']) + server_run(args) + + def _run(self, data_join_type: DataJoinType): + pool = ThreadPoolExecutor(max_workers=2) + pool.submit(self._run_server, data_join_type) + self._run_client(data_join_type) + pool.shutdown() + # success tags are included + self.assertEqual(len(os.listdir(self._client_output_path)), self._PART_NUM * 2) + self.assertEqual(len(os.listdir(self._server_output_path)), self._PART_NUM * 2) + + def test_run_hashed_data_join(self): + self._run(DataJoinType.HASHED_DATA_JOIN) + + @unittest.skipUnless(check_psi_oprf(), 'require ot psi file') + def test_ot_psi(self): + self._run(DataJoinType.OT_PSI) + + +if __name__ == '__main__': + unittest.main() diff --git a/pp_lite/test/psi_rsa_partition_test.py b/pp_lite/test/psi_rsa_partition_test.py new file mode 100644 index 000000000..31c610d4f --- /dev/null +++ b/pp_lite/test/psi_rsa_partition_test.py @@ -0,0 +1,129 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import csv +import unittest +import tempfile +import shutil + +import rsa +from gmpy2 import powmod # pylint: disable=no-name-in-module +from cityhash import CityHash64 # pylint: disable=no-name-in-module +from concurrent.futures import ThreadPoolExecutor, as_completed + +from pp_lite.data_join import envs +from pp_lite.data_join.psi_rsa.psi_client import run as client_run +from pp_lite.data_join.psi_rsa.psi_server import run as server_run + + +def sign(raw_id: str, private_key: rsa.PrivateKey) -> str: + + def _sign(i: int): + return powmod(i, private_key.d, private_key.n).digits() + + return hex(CityHash64(_sign(CityHash64(raw_id))))[2:] + + +def _make_data(client_input: str, server_input: str, private_key: rsa.PrivateKey, part_num: int, part_size: int, + ex_size: int): + if not os.path.exists(client_input): + os.makedirs(client_input) + if not os.path.exists(server_input): + os.makedirs(server_input) + for part_id in range(part_num): + client_filename = os.path.join(client_input, f'part-{part_id}') + server_filename = os.path.join(server_input, f'part-{part_id}') + client_ids = range(part_id * (part_size + ex_size), part_id * (part_size + ex_size) + part_size) + server_ids = range(part_id * (part_size + ex_size) + ex_size, (part_id + 1) * (part_size + ex_size)) + server_signed_ids = [sign(str(i), private_key) for i in server_ids] + with open(client_filename, 'wt', encoding='utf-8') as f: + writer = csv.DictWriter(f, fieldnames=['example_id']) + writer.writeheader() + writer.writerows([{'example_id': str(i)} for i in client_ids]) + with open(server_filename, 'wt', encoding='utf-8') as f: + writer = csv.DictWriter(f, fieldnames=['signed_id']) + writer.writeheader() + writer.writerows([{'signed_id': str(i)} for i in server_signed_ids]) + + +class IntegratedTest(unittest.TestCase): + + def setUp(self): + self._temp_dir = tempfile.mkdtemp() + envs.STORAGE_ROOT = self._temp_dir + envs.CLIENT_CONNECT_RETRY_INTERVAL = 1 + self.client_input = os.path.join(self._temp_dir, 'client_input') + self.server_input = os.path.join(self._temp_dir, 'server_input') + self.client_output = os.path.join(self._temp_dir, 'client_output') + self.server_output = os.path.join(self._temp_dir, 'server_output') + _, private_key = rsa.newkeys(1024) + _make_data(self.client_input, self.server_input, private_key, 2, 1000, 200) + self.private_key_path = os.path.join(self.server_input, 'private.key') + with open(self.private_key_path, 'wb') as f: + f.write(private_key.save_pkcs1()) + + def tearDown(self) -> None: + shutil.rmtree(self._temp_dir, ignore_errors=True) + + @staticmethod + def _run_client(input_path, output_path): + args = { + 'input_dir': input_path, + 'output_dir': output_path, + 'key_column': 'example_id', + 'server_port': 50058, + 'batch_size': 4096, + 'worker_rank': 1, + 'num_workers': 5, + 'num_sign_parallel': 2, + 'partitioned': True, + 'partition_list': [], + } + client_run(args=args) + + @staticmethod + def _run_server(input_path: str, output_path: str, private_key_path: str): + args = { + 'rsa_private_key_path': private_key_path, + 'input_dir': input_path, + 'output_dir': output_path, + 'signed_column': 'signed_id', + 'key_column': 'example_id', + 'server_port': 50058, + 'batch_size': 4096, + 'num_sign_parallel': 5 + } + server_run(args=args) + + def test(self): + futures = [] + with ThreadPoolExecutor(max_workers=2) as pool: + futures.append(pool.submit(self._run_server, self.server_input, self.server_output, self.private_key_path)) + futures.append(pool.submit(self._run_client, self.client_input, self.client_output)) + for _ in as_completed(futures): + pass + with open(os.path.join(self.client_output, 'joined', 'part-00001-joined.csv'), 'rt', encoding='utf-8') as f: + reader = csv.DictReader(f) + ids = sorted([line['example_id'] for line in reader]) + self.assertListEqual(ids, [str(id) for id in range(1400, 2200)]) + with open(os.path.join(self.server_output, 'joined', 'output_1.csv'), 'rt', encoding='utf-8') as f: + reader = csv.DictReader(f) + ids = sorted([line['example_id'] for line in reader]) + self.assertListEqual(ids, [str(id) for id in range(1400, 2200)]) + + +if __name__ == '__main__': + unittest.main() diff --git a/pp_lite/test/psi_rsa_test.py b/pp_lite/test/psi_rsa_test.py new file mode 100644 index 000000000..791b4eb8d --- /dev/null +++ b/pp_lite/test/psi_rsa_test.py @@ -0,0 +1,151 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import csv +import unittest +import tempfile +import shutil + +import rsa +from gmpy2 import powmod # pylint: disable=no-name-in-module +from cityhash import CityHash64 # pylint: disable=no-name-in-module +# Use ProcessPool to isolate logging config confliction. +from concurrent.futures import ProcessPoolExecutor, as_completed +import multiprocessing + +from pp_lite.data_join import envs +from pp_lite.data_join.psi_rsa.psi_client import run as client_run +from pp_lite.data_join.psi_rsa.psi_server import run as server_run + + +def sign(raw_id: str, private_key: rsa.PrivateKey) -> str: + + def _sign(i: int): + return powmod(i, private_key.d, private_key.n).digits() + + return hex(CityHash64(_sign(CityHash64(raw_id))))[2:] + + +def _make_data(client_input: str, server_input: str, private_key: rsa.PrivateKey, part_num: int, part_size: int, + ex_size: int): + if not os.path.exists(client_input): + os.makedirs(client_input) + if not os.path.exists(server_input): + os.makedirs(server_input) + for part_id in range(part_num): + client_filename = os.path.join(client_input, f'part-{part_id}') + server_filename = os.path.join(server_input, f'part-{part_id}') + client_ids = range(part_id * (part_size + ex_size), part_id * (part_size + ex_size) + part_size) + server_ids = range(part_id * (part_size + ex_size) + ex_size, (part_id + 1) * (part_size + ex_size)) + server_signed_ids = [sign(str(i), private_key) for i in server_ids] + with open(client_filename, 'wt', encoding='utf-8') as f: + writer = csv.DictWriter(f, fieldnames=['raw_id']) + writer.writeheader() + writer.writerows([{'raw_id': str(i)} for i in client_ids]) + with open(server_filename, 'wt', encoding='utf-8') as f: + writer = csv.DictWriter(f, fieldnames=['signed_id']) + writer.writeheader() + writer.writerows([{'signed_id': str(i)} for i in server_signed_ids]) + + +class IntegratedTest(unittest.TestCase): + + def setUp(self): + self._temp_dir = tempfile.mkdtemp() + envs.STORAGE_ROOT = self._temp_dir + envs.CLIENT_CONNECT_RETRY_INTERVAL = 1 + self.client_input = os.path.join(self._temp_dir, 'client_input') + self.server_input = os.path.join(self._temp_dir, 'server_input') + self.client_output = os.path.join(self._temp_dir, 'client_output') + self.server_output = os.path.join(self._temp_dir, 'server_output') + _, private_key = rsa.newkeys(1024) + _make_data(self.client_input, self.server_input, private_key, 2, 1000, 200) + self.private_key_path = os.path.join(self.server_input, 'private.key') + with open(self.private_key_path, 'wb') as f: + f.write(private_key.save_pkcs1()) + + def tearDown(self) -> None: + shutil.rmtree(self._temp_dir, ignore_errors=True) + + @staticmethod + def _run_client(input_path, output_path, storage_root: str): + envs.STORAGE_ROOT = storage_root + args = { + 'input_dir': input_path, + 'output_dir': output_path, + 'key_column': 'raw_id', + 'server_port': 50058, + 'batch_size': 4096, + 'num_workers': 5, + 'num_sign_parallel': 2, + 'partitioned': False, + 'partition_list': [], + } + client_run(args) + + @staticmethod + def _run_server(input_path: str, output_path: str, private_key_path: str, storage_root: str): + envs.STORAGE_ROOT = storage_root + args = { + 'rsa_private_key_path': private_key_path, + 'input_dir': input_path, + 'output_dir': output_path, + 'signed_column': 'signed_id', + 'key_column': 'raw_id', + 'server_port': 50058, + 'batch_size': 4096, + 'num_sign_parallel': 5 + } + server_run(args=args) + + def test(self): + futures = [] + with ProcessPoolExecutor(max_workers=2) as pool: + futures.append( + pool.submit(self._run_server, self.server_input, self.server_output, self.private_key_path, + envs.STORAGE_ROOT)) + futures.append(pool.submit(self._run_client, self.client_input, self.client_output, envs.STORAGE_ROOT)) + for _ in as_completed(futures): + pass + with open(os.path.join(self.client_output, 'joined', 'joined.csv'), 'rt', encoding='utf-8') as f: + reader = csv.DictReader(f) + self.assertEqual(len([line['raw_id'] for line in reader]), 1600) + with open(os.path.join(self.server_output, 'joined', 'output.csv'), 'rt', encoding='utf-8') as f: + reader = csv.DictReader(f) + self.assertEqual(len([line['raw_id'] for line in reader]), 1600) + + def test_client_input_file(self): + futures = [] + with ProcessPoolExecutor(max_workers=2) as pool: + futures.append( + pool.submit(self._run_server, self.server_input, self.server_output, self.private_key_path, + envs.STORAGE_ROOT)) + futures.append( + pool.submit(self._run_client, os.path.join(self.client_input, 'part-0'), self.client_output, + envs.STORAGE_ROOT)) + for _ in as_completed(futures): + pass + with open(os.path.join(self.client_output, 'joined', 'joined.csv'), 'rt', encoding='utf-8') as f: + reader = csv.DictReader(f) + self.assertEqual(len([line['raw_id'] for line in reader]), 800) + with open(os.path.join(self.server_output, 'joined', 'output.csv'), 'rt', encoding='utf-8') as f: + reader = csv.DictReader(f) + self.assertEqual(len([line['raw_id'] for line in reader]), 800) + + +if __name__ == '__main__': + multiprocessing.set_start_method('spawn') + unittest.main() diff --git a/pp_lite/test/trainer_test.py b/pp_lite/test/trainer_test.py new file mode 100755 index 000000000..ddb62de40 --- /dev/null +++ b/pp_lite/test/trainer_test.py @@ -0,0 +1,141 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +import numpy as np +import time +import unittest +import json + +import tensorflow.compat.v1 as tf +import shutil +from multiprocessing import get_context + +from pp_lite.trainer.client.client import main as client_main +from pp_lite.trainer.server.server import main as server_main +from pp_lite.proto.arguments_pb2 import TrainerArguments + + +class IntegratedTest(unittest.TestCase): + + def test_e2e(self): + logging.basicConfig(level=logging.INFO) + cluster_spec_str = json.dumps({ + 'master': ['localhost:50101'], + 'ps': ['localhost:50102', 'localhost:50104'], + 'worker': ['localhost:50103', 'localhost:50105'] + }) + cluster_spec_dict = json.loads(cluster_spec_str) + if isinstance(cluster_spec_dict, dict): + for name, addrs in cluster_spec_dict.items(): + if name in ['master', 'ps', 'worker'] and isinstance(addrs, list): + for addr in addrs: + if not isinstance(addr, str): + raise TypeError('Input cluster_spec type error') + else: + raise TypeError('Input cluster_spec type error') + else: + raise TypeError('Input cluster_spec type error') + + args = TrainerArguments(data_path='pp_lite/trainer/data/', + file_wildcard='**/*', + export_path='pp_lite/trainer/model/', + server_port=55550, + tf_port=51001, + num_clients=1, + cluster_spec=cluster_spec_str, + model_version=0, + model_max_to_keep=5, + tolerated_version_gap=1, + task_mode='local', + local_steps=100, + batch_size=10, + master_addr='localhost:55555', + ps_rank=0, + worker_rank=0, + save_version_gap=10) + + # generate TFRecord + if not tf.io.gfile.exists(args.data_path): + logging.info('Generating data ...') + tf.io.gfile.makedirs(args.data_path) + + (x, y), _ = tf.keras.datasets.mnist.load_data() + x = x.reshape((x.shape[0], -1)) / 255 + n = 1000 + num = x.shape[0] // n + for idx in range(num): + np_to_tfrecords(x[idx * n:idx * n + n], y[idx * n:idx * n + n], f'{args.data_path}{idx}') + + context = get_context('spawn') + + process_server = context.Process(target=server_main, args=(args,), daemon=True) + process_server.start() + time.sleep(1) + + cluster_spec_dict['master'] = ['localhost:50201'] + args.cluster_spec = json.dumps(cluster_spec_dict) + client_main(args=args) + process_server.join() + + shutil.rmtree(args.data_path, ignore_errors=False, onerror=None) + shutil.rmtree(args.export_path, ignore_errors=False, onerror=None) + + +def _bytes_feature(value): + if isinstance(value, type(tf.constant(0))): # if value ist tensor + value = value.numpy() # get value of tensor + return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value])) + + +def _float_feature(value): + return tf.train.Feature(float_list=tf.train.FloatList(value=value)) + + +def _int64_feature(value): + return tf.train.Feature(int64_list=tf.train.Int64List(value=[value])) + + +def serialize_array(array): + array = tf.io.serialize_tensor(array) + return array + + +def np_to_tfrecords(X, Y, file_path_prefix): + # Generate tfrecord writer + result_tf_file = file_path_prefix + '.tfrecords' + writer = tf.python_io.TFRecordWriter(result_tf_file) + + # iterate over each sample, + # and serialize it as ProtoBuf. + # temporarily enable eager execution so _bytes_feature can call Tensor.numpy() + tf.enable_eager_execution() + for idx in range(X.shape[0]): + x = X[idx] + y = Y[idx] + data = { + 'X': _bytes_feature(tf.serialize_tensor(x.astype(np.float32))), + 'X_size': _int64_feature(x.shape[0]), + 'Y': _int64_feature(y) + } + features = tf.train.Features(feature=data) + example = tf.train.Example(features=features) + serialized = example.SerializeToString() + writer.write(serialized) + tf.disable_eager_execution() + + +if __name__ == '__main__': + unittest.main() diff --git a/pp_lite/testing/BUILD.bazel b/pp_lite/testing/BUILD.bazel new file mode 100644 index 000000000..eca1ef331 --- /dev/null +++ b/pp_lite/testing/BUILD.bazel @@ -0,0 +1,8 @@ +load("@rules_python//python:defs.bzl", "py_library") + +py_library( + name = "testing", + testonly = True, + srcs = ["make_data.py"], + visibility = ["//pp_lite:pp_lite_package"], +) diff --git a/pp_lite/testing/make_data.py b/pp_lite/testing/make_data.py new file mode 100644 index 000000000..c8110fc23 --- /dev/null +++ b/pp_lite/testing/make_data.py @@ -0,0 +1,72 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import csv +import os +import shutil +from typing import List +import uuid + + +def _make_fake_data(input_dir: str, + num_partitions: int, + line_num: int, + partitioned: bool = True, + spark: bool = False) -> List[str]: + header = sorted([f'x_{str(i)}' for i in range(20)]) + header.append('part_id') + header = sorted(header) + for pid in range(num_partitions): + filename_prefix = 'part' if partitioned else 'abcd' + filename = os.path.join(input_dir, f'{filename_prefix}-{pid}') + if spark: + filename = filename + '-' + str(uuid.uuid4()) + with open(filename, 'w', encoding='utf-8') as file: + writer = csv.DictWriter(file, header) + writer.writeheader() + for i in range(line_num): + data = {h: pid + i + j for j, h in enumerate(header)} + data['part_id'] = pid + writer.writerow(data) + if partitioned: + with open(os.path.join(input_dir, '_SUCCESS'), 'w', encoding='utf-8') as f: + f.write('') + return header + + +def make_data(num_partition, client_path: str, server_path: str): + shutil.rmtree(client_path, ignore_errors=True) + shutil.rmtree(server_path, ignore_errors=True) + os.makedirs(client_path, exist_ok=True) + os.makedirs(server_path, exist_ok=True) + num_lines = 1000 + ex_lines = 200 + for part_id in range(num_partition): + client_range = range(part_id * num_lines * 10, part_id * num_lines * 10 + num_lines) + server_range = range(part_id * num_lines * 10 - ex_lines, part_id * num_lines * 10 + num_lines - ex_lines) + client_ids = [{'raw_id': i} for i in client_range] + server_ids = [{'raw_id': i} for i in server_range] + with open(os.path.join(client_path, f'part-{part_id}'), 'wt', encoding='utf-8') as f: + writer = csv.DictWriter(f, ['raw_id']) + writer.writeheader() + writer.writerows(client_ids) + with open(os.path.join(server_path, f'part-{part_id}'), 'wt', encoding='utf-8') as f: + writer = csv.DictWriter(f, ['raw_id']) + writer.writeheader() + writer.writerows(server_ids) + + +if __name__ == '__main__': + make_data(2, 'client_input', 'server_input') diff --git a/pp_lite/utils/BUILD.bazel b/pp_lite/utils/BUILD.bazel new file mode 100644 index 000000000..10630b626 --- /dev/null +++ b/pp_lite/utils/BUILD.bazel @@ -0,0 +1,38 @@ +load("@rules_python//python:defs.bzl", "py_library", "py_test") + +py_library( + name = "utils", + srcs = [ + "decorators.py", + "logging_config.py", + "metric_collector.py", + "metrics.py", + "tools.py", + ], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + "//py_libs:metrics_lib", + ], +) + +py_test( + name = "decorators_test", + size = "small", + srcs = ["decorators_test.py"], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + ":utils", + "//pp_lite/testing", + ], +) + +py_test( + name = "tools_test", + size = "small", + srcs = ["tools_test.py"], + visibility = ["//pp_lite:pp_lite_package"], + deps = [ + ":utils", + "//pp_lite/testing", + ], +) diff --git a/pp_lite/utils/decorators.py b/pp_lite/utils/decorators.py new file mode 100644 index 000000000..bb0487b5e --- /dev/null +++ b/pp_lite/utils/decorators.py @@ -0,0 +1,82 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +import time +import signal +from functools import wraps + + +def raise_timeout(signum, frame): + raise TimeoutError + + +def timeout_fn(time_in_second: int = 60): + """Raise TimeoutError after time_in_second. + Note that this decorator should be used on main thread. Found more info in + ref: https://stackoverflow.com/questions/54749342/valueerror-signal-only-works-in-main-thread + """ + + def decorator_fn(f): + + @wraps(f) + def wrapper(*args, **kwargs): + signal.signal(signal.SIGALRM, raise_timeout) + signal.alarm(time_in_second) + try: + return f(*args, **kwargs) + finally: + signal.signal(signal.SIGALRM, signal.SIG_IGN) + + return wrapper + + return decorator_fn + + +def retry_fn(retry_times: int = 3): + + def decorator_fn(f): + + @wraps(f) + def wrapper(*args, **kwargs): + for i in range(retry_times - 1): + try: + return f(*args, **kwargs) + # pylint: disable=broad-except + except Exception as e: + logging.exception(f'Call function {f.__name__} failed, retrying times...{i + 1}') + continue + return f(*args, **kwargs) + + return wrapper + + return decorator_fn + + +def time_log(log_type: str = 'Function'): + + def decorator_fn(f): + + @wraps(f) + def wrapper(*args, **kwargs): + logging.info(f'[{log_type}] start ! ! !') + start_time = time.time() + res = f(*args, **kwargs) + logging.info(f'[{log_type}] used time: {time.time() - start_time} s') + return res + + return wrapper + + return decorator_fn diff --git a/pp_lite/utils/decorators_test.py b/pp_lite/utils/decorators_test.py new file mode 100644 index 000000000..5ec0c98be --- /dev/null +++ b/pp_lite/utils/decorators_test.py @@ -0,0 +1,61 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import time +import unittest +from pp_lite.utils.decorators import retry_fn, timeout_fn + + +class DecoratorTest(unittest.TestCase): + + def setUp(self) -> None: + self._res = 0 + + def test_timeout_fn(self): + + @timeout_fn(2) + def func() -> int: + time.sleep(1) + return 1 + + @timeout_fn(1) + def some_unstable_func() -> int: + time.sleep(2) + return 1 + + self.assertEqual(func(), 1) + with self.assertRaises(TimeoutError): + some_unstable_func() + + def test_retry_fn(self): + + @retry_fn(4) + def func(): + self._res = self._res + 2 + + @retry_fn(4) + def some_unstable_func(): + self._res = self._res + 2 + raise TimeoutError + + func() + self.assertEqual(self._res, 2) + with self.assertRaises(TimeoutError): + some_unstable_func() + self.assertEqual(self._res, 10) + + +if __name__ == '__main__': + unittest.main() diff --git a/pp_lite/utils/logging_config.py b/pp_lite/utils/logging_config.py new file mode 100644 index 000000000..bec8d3eef --- /dev/null +++ b/pp_lite/utils/logging_config.py @@ -0,0 +1,54 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +from typing import Dict +from datetime import datetime + + +def log_path(log_dir: str) -> str: + return os.path.join(log_dir, f'{datetime.now().strftime("%Y%m%d_%H%M%S_%f")}.log') + + +def logging_config(file_path: str) -> Dict: + return { + 'version': 1, + 'disable_existing_loggers': False, + 'root': { + 'handlers': ['console', 'file'], + 'level': 'DEBUG' + }, + 'handlers': { + 'console': { + 'class': 'logging.StreamHandler', + 'formatter': 'generic', + 'level': 'INFO' + }, + 'file': { + 'class': 'logging.FileHandler', + 'formatter': 'generic', + 'filename': file_path, + 'encoding': 'utf-8', + 'level': 'DEBUG' + } + }, + 'formatters': { + 'generic': { + 'format': '%(asctime)s [%(process)d] [%(levelname)s] %(message)s', + 'datefmt': '%Y-%m-%d %H:%M:%S', + 'class': 'logging.Formatter' + } + } + } diff --git a/pp_lite/utils/metric_collector.py b/pp_lite/utils/metric_collector.py new file mode 100755 index 000000000..0e276db63 --- /dev/null +++ b/pp_lite/utils/metric_collector.py @@ -0,0 +1,35 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from py_libs import metrics +from os import environ +from typing import ContextManager, Union, Dict + +service_name = environ.get('METRIC_COLLECTOR_SERVICE_NAME', 'default_metric_service') +endpoint = environ.get('METRIC_COLLECTOR_EXPORT_ENDPOINT') + +cluster_name = environ.get('CLUSTER', 'default_cluster') +k8s_job_name = environ.get('APPLICATION_ID', 'default_k8s_job_name') +global_service_label = {'k8s_job_name': k8s_job_name} +if endpoint is not None: + metrics.add_handler(metrics.OpenTelemetryMetricsHandler.new_handler(cluster_name, endpoint, service_name)) + + +def emit_counter(name: str, value: Union[int, float], tags: Dict[str, str] = None): + metrics.emit_counter(name, value, global_service_label if tags is None else {**tags, **global_service_label}) + + +def emit_timing(name: str, tags: Dict[str, str] = None) -> ContextManager[None]: + return metrics.emit_timing(name, global_service_label if tags is None else {**tags, **global_service_label}) diff --git a/pp_lite/utils/metrics.py b/pp_lite/utils/metrics.py new file mode 100644 index 000000000..dae3bbde0 --- /dev/null +++ b/pp_lite/utils/metrics.py @@ -0,0 +1,56 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from pp_lite.utils.tools import print_named_dict + + +class Handler(object): + + def emit_counter(self, key: str, value: int): + """ + Different handler has different count strategy. + This version is intended to be implemented by subclasses so raises a NotImplementedError. + """ + raise NotImplementedError('Emit must be implemented by Handler subclasses') + + +class AuditHandler(Handler): + + def __init__(self, interval: int = 50): + super().__init__() + self._audit_metrics = {} + self._step = 0 + self._INTERVAL = interval + + def emit_counter(self, key: str, value: int): + self._audit_metrics[key] = self._audit_metrics.get(key, 0) + value + self._step += 1 + if self._step % self._INTERVAL == 0: + self.show_audit_info() + + def get_value(self, key: str) -> int: + return self._audit_metrics.get(key, 0) + + def show_audit_info(self): + if not self._audit_metrics: + return + print_named_dict(name='Audit', target_dict=self._audit_metrics) + + +_audit_client = AuditHandler() + +emit_counter = _audit_client.emit_counter +get_audit_value = _audit_client.get_value +show_audit_info = _audit_client.show_audit_info diff --git a/pp_lite/utils/tools.py b/pp_lite/utils/tools.py new file mode 100644 index 000000000..e8edacbd7 --- /dev/null +++ b/pp_lite/utils/tools.py @@ -0,0 +1,28 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Dict, List + + +def print_named_dict(name: str, target_dict: Dict): + logging.info(f'===================={name}====================') + for key, value in target_dict.items(): + logging.info(f'{key}: {value}') + logging.info(f'===================={"=" * len(name)}====================') + + +def get_partition_ids(worker_rank: int, num_workers: int, num_partitions: int) -> List[int]: + return [i for i in range(num_partitions) if i % num_workers == worker_rank] diff --git a/pp_lite/utils/tools_test.py b/pp_lite/utils/tools_test.py new file mode 100644 index 000000000..1334cd8e7 --- /dev/null +++ b/pp_lite/utils/tools_test.py @@ -0,0 +1,30 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from pp_lite.utils.tools import get_partition_ids + + +class ToolsTest(unittest.TestCase): + + def test_get_partition_ids(self): + self.assertListEqual(get_partition_ids(1, 5, 12), [1, 6, 11]) + self.assertListEqual(get_partition_ids(3, 5, 2), []) + self.assertListEqual(get_partition_ids(4, 5, 10), [4, 9]) + self.assertListEqual(get_partition_ids(5, 5, 10), []) + + +if __name__ == '__main__': + unittest.main() diff --git a/py_libs/BUILD.bazel b/py_libs/BUILD.bazel new file mode 100644 index 000000000..587d28421 --- /dev/null +++ b/py_libs/BUILD.bazel @@ -0,0 +1,49 @@ +package(default_visibility = ["//visibility:public"]) + +py_library( + name = "metrics_lib", + srcs = [ + "metrics.py", + ], + imports = [".."], + deps = [ + "@common_opentelemetry_exporter_otlp//:pkg", + "@common_opentelemetry_sdk//:pkg", + ], +) + +py_test( + name = "metrics_lib_test", + size = "small", + srcs = [ + "metrics_test.py", + ], + imports = [".."], + main = "metrics_test.py", + deps = [ + ":metrics_lib", + ], +) + +py_library( + name = "sdk", + srcs = [ + "sdk.py", + ], + imports = [ + "..", + ], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "@common_requests//:pkg", + ], +) + +py_library( + name = "logging_config", + srcs = [ + "logging_config.py", + ], + imports = [".."], +) diff --git a/py_libs/logging_config.py b/py_libs/logging_config.py new file mode 100644 index 000000000..4ebaa6b7d --- /dev/null +++ b/py_libs/logging_config.py @@ -0,0 +1,45 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from logging import LogRecord +from typing import Optional +import os + +LOGGING_LEVEL = os.environ.get('LOGGING_LEVEL', 'INFO') + + +class LevelFilter(logging.Filter): + + def filter(self, record: LogRecord): + if record.levelno <= logging.WARNING: + return False + return True + + +def logging_config(role: str, log_file: Optional[str] = None): + # Remove all handlers associated with the root logger object. + for handler in logging.root.handlers: + logging.root.removeHandler(handler) + + logging_format = f'%(asctime)s %(levelname)-7s [{role}] %(message)s' + logging.basicConfig(level=LOGGING_LEVEL, format=logging_format) + logging.getLogger('urllib3.connectionpool').addFilter(LevelFilter()) + if log_file is not None: + os.makedirs(os.path.dirname(log_file), exist_ok=True) + file_handler = logging.FileHandler(log_file) + file_handler.setLevel(LOGGING_LEVEL) + file_handler.setFormatter(logging.Formatter(logging_format)) + logging.getLogger().addHandler(file_handler) diff --git a/py_libs/metrics.py b/py_libs/metrics.py new file mode 100644 index 000000000..bcdcbc8c7 --- /dev/null +++ b/py_libs/metrics.py @@ -0,0 +1,259 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from contextlib import contextmanager +from datetime import datetime +import logging +from abc import ABCMeta, abstractmethod +import sys +from typing import ContextManager, Dict, Optional, Union +from threading import Lock + +from opentelemetry import trace, _metrics as metrics +from opentelemetry._metrics.instrument import UpDownCounter +from opentelemetry._metrics.measurement import Measurement +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk._metrics import MeterProvider +from opentelemetry.sdk._metrics.export import (PeriodicExportingMetricReader, ConsoleMetricExporter, MetricExporter, + MetricExportResult, Metric, Sequence) +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +from opentelemetry.exporter.otlp.proto.grpc._metric_exporter import OTLPMetricExporter +from opentelemetry.sdk.trace.export import (BatchSpanProcessor, ConsoleSpanExporter, SpanExportResult, SpanExporter, + ReadableSpan) + + +def _validate_tags(tags: Dict[str, str]): + if tags is None: + return + for k, v in tags.items(): + if not isinstance(k, str) or not isinstance(v, str): + raise TypeError(f'Expected str, actually {type(k)}: {type(v)}') + + +class DevNullSpanExporter(SpanExporter): + + def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult: + return SpanExportResult.SUCCESS + + def shutdown(self): + pass + + +class DevNullMetricExporter(MetricExporter): + + def export(self, metrics: Sequence[Metric]) -> MetricExportResult: # pylint: disable=redefined-outer-name + return MetricExportResult.SUCCESS + + def shutdown(self): + pass + + +class MetricsHandler(metaclass=ABCMeta): + + @abstractmethod + def emit_counter(self, name: str, value: Union[int, float], tags: Dict[str, str] = None): + """Emits counter metrics which will be accumulated. + + Args: + name: name of the metrics, e.g. foo.bar + value: value of the metrics in integer, e.g. 43 + tags: extra tags of the counter, e.g. {"is_test": True} + """ + + @abstractmethod + def emit_store(self, name: str, value: Union[int, float], tags: Dict[str, str] = None): + """Emits store metrics. + + Args: + name: name of the metrics, e.g. foo.bar + value: value of the metrics in integer, e.g. 43 + tags: extra tags of the counter, e.g. {"is_test": True} + """ + + @abstractmethod + def emit_timing(self, name: str, tags: Dict[str, str] = None) -> ContextManager[None]: + """Emits timing generator. + + + Args: + name: name of metrics, e.g. foo.bar + tags: extra tags of the counter, e.g. {"is_test": True} + + Returns: + Generator of timing scope. + """ + + +class _DefaultMetricsHandler(MetricsHandler): + + def emit_counter(self, name, value: Union[int, float], tags: Dict[str, str] = None): + tags = tags or {} + logging.info(f'[Metric][Counter] {name}: {value}, tags={tags}') + + def emit_store(self, name, value: Union[int, float], tags: Dict[str, str] = None): + tags = tags or {} + logging.info(f'[Metric][Store] {name}: {value}, tags={tags}') + + @contextmanager + def emit_timing(self, name: str, tags: Dict[str, str] = None) -> ContextManager[None]: + tags = tags or {} + logging.info(f'[Meitrcs][Timing] {name} started, tags={tags}') + started = datetime.timestamp(datetime.now()) + yield None + ended = datetime.timestamp(datetime.now()) + logging.info(f'[Meitrcs][Timing] {name}: {(ended - started):.2f}s ended, tags={tags}') + + +class OpenTelemetryMetricsHandler(MetricsHandler): + + class Callback: + + def __init__(self) -> None: + self._measurement_list = [] + + def add(self, value: Union[int, float], tags: Dict[str, str]): + self._measurement_list.append(Measurement(value=value, attributes=tags)) + + def __iter__(self): + return self + + def __next__(self): + if len(self._measurement_list) == 0: + raise StopIteration + return self._measurement_list.pop(0) + + def __call__(self): + return iter(self) + + @classmethod + def new_handler(cls, + cluster: str, + apm_server_endpoint: str, + instrument_module_name: Optional[str] = None) -> 'OpenTelemetryMetricsHandler': + instrument_module_name = instrument_module_name or 'fedlearner_webconsole' + resource = Resource.create(attributes={ + 'service.name': instrument_module_name, + 'deployment.environment': cluster, + }) + # initiailized trace stuff + if apm_server_endpoint == 'stdout': + span_exporter = ConsoleSpanExporter(out=sys.stdout) + elif apm_server_endpoint == '/dev/null': + span_exporter = DevNullSpanExporter() + else: + span_exporter = OTLPSpanExporter(endpoint=apm_server_endpoint) + tracer_provider = TracerProvider(resource=resource) + tracer_provider.add_span_processor(BatchSpanProcessor(span_exporter)) + trace.set_tracer_provider(tracer_provider) + + # initiailized meter stuff + if apm_server_endpoint == 'stdout': + metric_exporter = ConsoleMetricExporter(out=sys.stdout) + elif apm_server_endpoint == '/dev/null': + metric_exporter = DevNullMetricExporter() + else: + metric_exporter = OTLPMetricExporter(endpoint=apm_server_endpoint) + reader = PeriodicExportingMetricReader(metric_exporter, export_interval_millis=60000) + meter_provider = MeterProvider(metric_readers=[reader], resource=resource) + metrics.set_meter_provider(meter_provider=meter_provider) + + return cls(tracer=tracer_provider.get_tracer(instrument_module_name), + meter=meter_provider.get_meter(instrument_module_name)) + + def __init__(self, tracer: trace.Tracer, meter: metrics.Meter): + self._tracer = tracer + self._meter = meter + + self._lock = Lock() + self._cache: Dict[str, Union[UpDownCounter, OpenTelemetryMetricsHandler.Callback]] = {} + + def emit_counter(self, name: str, value: Union[int, float], tags: Dict[str, str] = None): + # Note that the `values.` prefix is used for Elastic Index Dynamic Inference. + # Optimize by decreasing lock. + if name not in self._cache: + with self._lock: + # Double check `self._cache` content. + if name not in self._cache: + counter = self._meter.create_up_down_counter(name=f'values.{name}') + self._cache[name] = counter + assert isinstance(self._cache[name], UpDownCounter) + self._cache[name].add(value, attributes=tags) + + def emit_store(self, name: str, value: Union[int, float], tags: Dict[str, str] = None): + # Note that the `values.` prefix is used for Elastic Index Dynamic Inference. + # Optimize by decreasing lock. + if name not in self._cache: + with self._lock: + # Double check `self._cache` content. + if name not in self._cache: + cb = OpenTelemetryMetricsHandler.Callback() + self._meter.create_observable_gauge(name=f'values.{name}', callback=cb) + self._cache[name] = cb + assert isinstance(self._cache[name], OpenTelemetryMetricsHandler.Callback) + self._cache[name].add(value=value, tags=tags) + + def emit_timing(self, name: str, tags: Dict[str, str] = None) -> ContextManager[None]: + return self._tracer.start_as_current_span(name=name, attributes=tags) + + +class _Client(MetricsHandler): + """A wrapper for all handlers. + + Inspired by logging module, use this to avoid usage of global statement, + which will make the code more thread-safe.""" + _handlers = [] + + def __init__(self): + self._handlers.append(_DefaultMetricsHandler()) + + def emit_counter(self, name, value: Union[int, float], tags: Dict[str, str] = None): + _validate_tags(tags) + for handler in self._handlers: + handler.emit_counter(name, value, tags) + + def emit_store(self, name, value: Union[int, float], tags: Dict[str, str] = None): + _validate_tags(tags) + for handler in self._handlers: + handler.emit_store(name, value, tags) + + @contextmanager + def emit_timing(self, name: str, tags: Dict[str, str] = None) -> ContextManager[None]: + _validate_tags(tags) + emit_timeings = [] + for handler in self._handlers: + emit_timeings.append(handler.emit_timing(name, tags)) + for e in emit_timeings: + e.__enter__() + yield None + emit_timeings.reverse() + for e in emit_timeings: + e.__exit__(None, None, None) + + def add_handler(self, handler): + self._handlers.append(handler) + + def reset_handlers(self): + # Only keep the first one + del self._handlers[1:] + + +# Exports all to module level +_client = _Client() +emit_counter = _client.emit_counter +emit_store = _client.emit_store +emit_timing = _client.emit_timing +add_handler = _client.add_handler +reset_handlers = _client.reset_handlers diff --git a/py_libs/metrics_test.py b/py_libs/metrics_test.py new file mode 100644 index 000000000..63c0c5774 --- /dev/null +++ b/py_libs/metrics_test.py @@ -0,0 +1,295 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import contextlib +import io +import json +import logging +import multiprocessing +from multiprocessing import Process, Queue +import time +import unittest +from io import StringIO +from unittest.mock import patch +from os import linesep +from typing import ContextManager, Dict +from contextlib import contextmanager + +from opentelemetry import trace as otel_trace, _metrics as otel_metrics +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor +from opentelemetry.sdk._metrics import MeterProvider +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace.export import ConsoleSpanExporter +from opentelemetry.sdk._metrics.export import ConsoleMetricExporter, PeriodicExportingMetricReader + +from py_libs import metrics +from py_libs.metrics import _DefaultMetricsHandler, MetricsHandler, OpenTelemetryMetricsHandler + + +class _FakeMetricsHandler(MetricsHandler): + + def emit_counter(self, name, value: int, tags: Dict[str, str] = None): + logging.info(f'[Test][Counter] {name} - {value}') + + def emit_store(self, name, value: int, tags: Dict[str, str] = None): + logging.info(f'[Test][Store] {name} - {value}') + + @contextmanager + def emit_timing(self, name: str, tags: Dict[str, str] = None) -> ContextManager[None]: + logging.info(f'[Test][Timing] {name} started') + yield None + logging.info(f'[Test][Timing] {name} ended') + + +class DefaultMetricsHandlerTest(unittest.TestCase): + + def setUp(self): + self._handler = _DefaultMetricsHandler() + + def test_emit_counter(self): + with self.assertLogs() as cm: + self._handler.emit_counter('test', 1) + self._handler.emit_counter('test2', 2) + logs = [r.msg for r in cm.records] + self.assertEqual(logs, ['[Metric][Counter] test: 1, tags={}', '[Metric][Counter] test2: 2, tags={}']) + + def test_emit_store(self): + with self.assertLogs() as cm: + self._handler.emit_store('test', 199) + self._handler.emit_store('test2', 299) + logs = [r.msg for r in cm.records] + self.assertEqual(logs, ['[Metric][Store] test: 199, tags={}', '[Metric][Store] test2: 299, tags={}']) + + def test_emit_timing(self): + with self.assertLogs() as cm: + with self._handler.emit_timing('test'): + time.sleep(0.01) + logs = [r.msg for r in cm.records] + self.assertEqual( + logs, ['[Meitrcs][Timing] test started, tags={}', '[Meitrcs][Timing] test: 0.01s ended, tags={}']) + + +class ClientTest(unittest.TestCase): + + def setUp(self): + metrics.add_handler(_FakeMetricsHandler()) + + def tearDown(self): + metrics.reset_handlers() + + def test_emit_counter(self): + with self.assertRaises(TypeError): + metrics.emit_counter('test', 1, tags={'name': 1}) + + with self.assertLogs() as cm: + metrics.emit_counter('test', 1) + logs = [r.msg for r in cm.records] + self.assertEqual(logs, ['[Metric][Counter] test: 1, tags={}', '[Test][Counter] test - 1']) + + def test_emit_store(self): + with self.assertRaises(TypeError): + metrics.emit_store('test', 1, tags={'name': 1}) + + with self.assertLogs() as cm: + metrics.emit_store('test', 199) + logs = [r.msg for r in cm.records] + self.assertEqual(logs, ['[Metric][Store] test: 199, tags={}', '[Test][Store] test - 199']) + + def test_emit_timing(self): + with self.assertRaises(TypeError): + metrics.emit_store('test', 1, tags={'name': 1}) + + with self.assertLogs() as cm: + with metrics.emit_timing('test'): + time.sleep(0.01) + logs = [r.msg for r in cm.records] + self.assertEqual(logs, [ + '[Meitrcs][Timing] test started, tags={}', '[Test][Timing] test started', '[Test][Timing] test ended', + '[Meitrcs][Timing] test: 0.01s ended, tags={}' + ]) + + +class OpenTelemetryMetricsHandlerClassMethodTest(unittest.TestCase): + + def setUp(self): + self._span_out = StringIO() + self._span_exporter_patcher = patch('py_libs.metrics.OTLPSpanExporter', + lambda **kwargs: ConsoleSpanExporter(out=self._span_out)) + self._metric_out = StringIO() + self._metric_exporter_patcher = patch('py_libs.metrics.OTLPMetricExporter', + lambda **kwargs: ConsoleMetricExporter(out=self._metric_out)) + self._span_exporter_patcher.start() + self._metric_exporter_patcher.start() + + def tearDown(self): + self._metric_exporter_patcher.stop() + self._span_exporter_patcher.stop() + + def test_new_handler(self): + OpenTelemetryMetricsHandler.new_handler(cluster='default', apm_server_endpoint='stdout') + self.assertEqual( + otel_trace.get_tracer_provider().resource, + Resource( + attributes={ + 'telemetry.sdk.language': 'python', + 'telemetry.sdk.name': 'opentelemetry', + 'telemetry.sdk.version': '1.10.0', + 'service.name': 'fedlearner_webconsole', + 'deployment.environment': 'default', + })) + self.assertEqual( + otel_metrics.get_meter_provider()._sdk_config.resource, # pylint: disable=protected-access + Resource( + attributes={ + 'telemetry.sdk.language': 'python', + 'telemetry.sdk.name': 'opentelemetry', + 'telemetry.sdk.version': '1.10.0', + 'service.name': 'fedlearner_webconsole', + 'deployment.environment': 'default', + })) + + +class OpenTelemetryMetricsHandlerTest(unittest.TestCase): + + def setUp(self): + self._span_out = StringIO() + self._metric_out = StringIO() + tracer_provider = TracerProvider() + # We have to custom formatter for easing the streaming split json objects. + tracer_provider.add_span_processor( + BatchSpanProcessor( + ConsoleSpanExporter( + out=self._span_out, + formatter=lambda span: span.to_json(indent=None) + linesep, + ))) + reader = PeriodicExportingMetricReader(ConsoleMetricExporter(out=self._metric_out), + export_interval_millis=60000) + meter_provider = MeterProvider(metric_readers=[reader]) + self._tracer_provider = tracer_provider + self._meter_provider = meter_provider + self._handler = OpenTelemetryMetricsHandler(tracer=tracer_provider.get_tracer(__file__), + meter=meter_provider.get_meter(__file__)) + + def _force_flush(self): + self._meter_provider.force_flush() + self._metric_out.flush() + self._tracer_provider.force_flush() + self._span_out.flush() + + def test_emit_store(self): + # Note that same instrument with different tags won't be aggregated. + # Aggregation rule for `emit_store` is delivering the last value of this interval. + # If no value at this interval, no `Metric` will be sent. + self._handler.emit_store(name='test_store', value=1, tags={'module': 'dataset', 'uuid': 'tag1'}) + self._handler.emit_store(name='test_store', value=5, tags={'module': 'dataset', 'uuid': 'tag2'}) + self._handler.emit_store(name='test_store', value=2, tags={'module': 'dataset', 'uuid': 'tag1'}) + self._force_flush() + self._force_flush() + self._force_flush() + self._handler.emit_store(name='test_store', value=0, tags={'module': 'dataset', 'uuid': 'tag1'}) + self._force_flush() + self.assertEqual(self._span_out.getvalue(), '') + self._metric_out.seek(0) + lines = self._metric_out.readlines() + measurements = [] + for l in lines: + measurement = json.loads(l) + measurements.append(measurement) + self.assertEqual(len(measurements), 3) + self.assertEqual(measurements[0]['attributes'], {'uuid': 'tag1', 'module': 'dataset'}) + self.assertEqual(measurements[1]['attributes'], {'uuid': 'tag2', 'module': 'dataset'}) + self.assertEqual(measurements[0]['name'], 'values.test_store') + self.assertEqual([m['point']['value'] for m in measurements], [2, 5, 0]) + + def test_emit_counter(self): + # Note that same instrument with different tags won't be aggregated. + # Aggregation rule for `emit_counter` is delivering the accumulated value with the same tags during this interval. # pylint: disable=line-too-long + # If no value at this interval, a `Metric` with value of last interval will be sent. + self._handler.emit_counter(name='test_counter', value=1, tags={'module': 'dataset', 'uuid': 'tag1'}) + self._handler.emit_counter(name='test_counter', value=5, tags={'module': 'dataset', 'uuid': 'tag2'}) + self._handler.emit_counter(name='test_counter', value=2, tags={'module': 'dataset', 'uuid': 'tag1'}) + self._force_flush() + self._force_flush() + self._handler.emit_counter(name='test_counter', value=-1, tags={'module': 'dataset', 'uuid': 'tag1'}) + self._force_flush() + self.assertEqual(self._span_out.getvalue(), '') + self._metric_out.seek(0) + lines = self._metric_out.readlines() + measurements = [] + for l in lines: + measurement = json.loads(l) + measurements.append(measurement) + self.assertEqual(len(measurements), 6) + self.assertEqual(measurements[0]['attributes'], {'uuid': 'tag1', 'module': 'dataset'}) + self.assertEqual(measurements[1]['attributes'], {'uuid': 'tag2', 'module': 'dataset'}) + self.assertEqual(measurement['name'], 'values.test_counter') + self.assertEqual([m['point']['value'] for m in measurements], [3, 5, 3, 5, 2, 5]) + + def test_emit_timing(self): + with self._handler.emit_timing('test', {}): + time.sleep(0.1) + with self._handler.emit_timing('test', {}): + time.sleep(0.2) + with self._handler.emit_timing('test2', {}): + time.sleep(0.1) + self._force_flush() + self._span_out.seek(0) + lines = self._span_out.readlines() + measurements = [] + for l in lines: + measurement = json.loads(l) + measurements.append(measurement) + + self.assertEqual(len(measurements), 3) + self.assertEqual([m['name'] for m in measurements], ['test', 'test', 'test2']) + + +class OpenTelemetryMetricsHandlerOutputTest(unittest.TestCase): + + @staticmethod + def suite_test(q: Queue, test_case: str): + # `OpenTelemetryMetricsHandler.new_handler` will set some global variables which cause multiple test case not idempotent issue. # pylint: disable=line-too-long + # So we use a children process to solve this problem. + f = io.StringIO() + with contextlib.redirect_stdout(f): + handler = OpenTelemetryMetricsHandler.new_handler(cluster='test_cluster', apm_server_endpoint=test_case) + handler.emit_store('test', 199) + handler.emit_counter('test2', 299) + otel_metrics.get_meter_provider().force_flush() + otel_trace.get_tracer_provider().force_flush() + q.put(f.getvalue()) + + def test_dev_null(self): + + queue = multiprocessing.SimpleQueue() + test_process = Process(target=self.suite_test, args=(queue, '/dev/null')) + test_process.start() + test_process.join() + self.assertEqual(queue.get(), '') + + def test_stdout(self): + + queue = multiprocessing.SimpleQueue() + test_process = Process(target=self.suite_test, args=(queue, 'stdout')) + test_process.start() + test_process.join() + self.assertIn('test', queue.get()) + + +if __name__ == '__main__': + multiprocessing.set_start_method('spawn') + logging.basicConfig(level=logging.DEBUG) + unittest.main() diff --git a/py_libs/sdk.py b/py_libs/sdk.py new file mode 100644 index 000000000..afb2c0669 --- /dev/null +++ b/py_libs/sdk.py @@ -0,0 +1,355 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import json +import logging +from time import sleep +import time +import urllib +import requests +from http import HTTPStatus +from typing import Dict, Tuple, Optional, List +from fedlearner_webconsole.mmgr.models import ModelJobType +from fedlearner_webconsole.dataset.models import DatasetJobKind, DatasetKindV2 as DatasetKind + + +def _get_response_data(resp: requests.Response) -> Tuple[int, Dict]: + return resp.status_code, json.loads(resp.content) + + +class WebconsoleClient: + + def __init__(self, domain_name: str): + self._domain_name = domain_name + self._session = None + self.sign_in() + + def sign_in(self): + self._session = requests.Session() + payload = {'username': 'robot', 'password': 'ZmxAMTIzNDUu'} + resp = self._session.post(f'{self._domain_name}/api/v2/auth/signin', json=payload) + content = json.loads(resp.content) + access_token = content['data']['access_token'] + self._session.headers.update({'Authorization': f'Bearer {access_token}'}) + + def get_system_info(self): + url = f'{self._domain_name}/api/v2/settings/system_info' + return _get_response_data(self._session.get(url)) + + def get_templates(self): + url = f'{self._domain_name}/api/v2/workflow_templates' + return _get_response_data(self._session.get(url)) + + def get_template(self, template_id): + url = f'{self._domain_name}/api/v2/workflow_templates/{template_id}' + return _get_response_data(self._session.get(url)) + + def get_projects(self): + url = f'{self._domain_name}/api/v2/projects' + return _get_response_data(self._session.get(url)) + + def get_project_by_id(self, project_id: int): + url = f'{self._domain_name}/api/v2/projects/{project_id}' + return _get_response_data(self._session.get(url)) + + def get_data_sources(self, project_id: int): + url = f'{self._domain_name}/api/v2/data_sources?project_id={project_id}' + return _get_response_data(self._session.get(url)) + + def get_datasets(self, project_id: int, keyword: Optional[str] = None): + url = f'{self._domain_name}/api/v2/datasets' + filter_expression = urllib.parse.quote(f'(and(project_id={project_id})(name~="{keyword}"))') + return _get_response_data(self._session.get(f'{url}?filter={filter_expression}')) + + def post_data_source(self, project_id: int, input_data_path: str, data_source_name: str, store_format: str): + url = f'{self._domain_name}/api/v2/data_sources' + payload = { + 'project_id': project_id, + 'data_source': { + 'data_source_url': input_data_path, + 'name': data_source_name, + 'store_format': store_format, + 'dataset_format': 'TABULAR', + } + } + resp = self._session.post(url, json=payload) + return _get_response_data(resp) + + def post_raw_dataset(self, project_id: int, name: str): + url = f'{self._domain_name}/api/v2/datasets' + payload = { + 'dataset_format': 'TABULAR', + 'dataset_type': 'PSI', + 'import_type': 'COPY', + 'store_format': 'TFRECORDS', + 'name': name, + 'kind': DatasetKind.RAW.value, + 'need_publish': True, + 'project_id': project_id + } + resp = self._session.post(url, json=payload) + return _get_response_data(resp) + + def post_intersection_dataset(self, project_id: int, name: str): + url = f'{self._domain_name}/api/v2/datasets' + payload = { + 'dataset_format': 'TABULAR', + 'dataset_type': 'PSI', + 'import_type': 'COPY', + 'store_format': 'TFRECORDS', + 'name': name, + 'kind': DatasetKind.PROCESSED.value, + 'is_published': True, + 'project_id': project_id + } + resp = self._session.post(url, json=payload) + return _get_response_data(resp) + + def post_data_batches(self, dataset_id: int, data_source_id: int): + url = f'{self._domain_name}/api/v2/datasets/{str(dataset_id)}/batches' + payload = {'data_source_id': data_source_id} + resp = self._session.post(url, json=payload) + return _get_response_data(resp) + + def get_participant_datasets(self, project_id: int, kind: DatasetKind): + url = f'{self._domain_name}/api/v2/project/{project_id}/participant_datasets?kind={kind.value}' + return _get_response_data(self._session.get(url)) + + def get_dataset_job_variables(self, dataset_job_kind: DatasetJobKind): + url = f'{self._domain_name}/api/v2/dataset_job_definitions/{dataset_job_kind.value}' + return _get_response_data(self._session.get(url)) + + def post_dataset_job(self, project_id: int, output_dataset_id: int, dataset_job_parameter: Dict): + url = f'{self._domain_name}/api/v2/projects/{project_id}/dataset_jobs' + payload = {'dataset_job_parameter': dataset_job_parameter, 'output_dataset_id': output_dataset_id} + resp = self._session.post(url, json=payload) + return _get_response_data(resp) + + def get_model_job_groups(self, project_id: int, keyword: Optional[str] = None): + url = f'{self._domain_name}/api/v2/projects/{project_id}/model_job_groups' + resp = self._session.get(url, json={'keyword': keyword}) + return _get_response_data(resp) + + def get_model_job_group(self, project_id: int, group_id: int): + url = f'{self._domain_name}/api/v2/projects/{project_id}/model_job_groups/{group_id}' + resp = self._session.get(url) + return _get_response_data(resp) + + def post_model_job_groups(self, project_id: int, name: str, dataset_id: int): + url = f'{self._domain_name}/api/v2/projects/{project_id}/model_job_groups' + payload = {'name': name, 'dataset_id': dataset_id, 'algorithm_type': 'NN_VERTICAL'} + resp = self._session.post(url, json=payload) + return _get_response_data(resp) + + def put_model_job_group(self, project_id: int, group_id: int, algorithm_id: int, config: Dict): + url = f'{self._domain_name}/api/v2/projects/{project_id}/model_job_groups/{group_id}' + payload = {'authorized': True, 'algorithm_id': algorithm_id, 'config': config} + resp = self._session.put(url, json=payload) + return _get_response_data(resp) + + def launch_model_job(self, project_id: int, group_id: int): + url = f'{self._domain_name}/api/v2/projects/{project_id}/model_job_groups/{group_id}:launch' + resp = self._session.post(url, json={'comment': 'created by automated scheduler'}) + return _get_response_data(resp) + + def get_model_jobs(self, project_id: int, keyword: Optional[str] = None): + url = f'{self._domain_name}/api/v2/projects/{project_id}/model_jobs' + resp = self._session.get(url, json={'keyword': keyword}) + return _get_response_data(resp) + + def get_model_job(self, project_id: int, model_job_id: int): + url = f'{self._domain_name}/api/v2/projects/{project_id}/model_jobs/{model_job_id}' + resp = self._session.get(url) + return _get_response_data(resp) + + def post_model_jobs(self, project_id: int, name: str, model_job_type: ModelJobType, dataset_id: int, + algorithm_id: int, model_id: int, config: Dict): + url = f'{self._domain_name}/api/v2/projects/{project_id}/model_jobs' + payload = { + 'name': name, + 'model_job_type': model_job_type.name, + 'dataset_id': dataset_id, + 'algorithm_type': 'NN_VERTICAL', + 'algorithm_id': algorithm_id, + 'model_id': model_id, + 'config': config + } + resp = self._session.post(url, json=payload) + return _get_response_data(resp) + + def put_model_job(self, project_id: int, model_job_id: int, algorithm_id: int, config: Dict): + url = f'{self._domain_name}/api/v2/projects/{project_id}/model_jobs/{model_job_id}' + payload = {'algorithm_id': algorithm_id, 'config': config} + resp = self._session.put(url, json=payload) + return _get_response_data(resp) + + def get_peer_model_job_group(self, project_id: int, group_id: int, participant_id: int): + url = f'{self._domain_name}/api/v2/projects/{project_id}/model_job_groups/{group_id}/peers/{participant_id}' + return _get_response_data(self._session.get(url)) + + def patch_peer_model_job_group(self, project_id: int, group_id: int, participant_id: int, config: Dict): + url = f'{self._domain_name}/api/v2/projects/{project_id}/model_job_groups/{group_id}/peers/{participant_id}' + return _get_response_data(self._session.patch(url, json={'config': config})) + + def get_models(self, project_id: int, keyword: str): + url = f'{self._domain_name}/api/v2/projects/{project_id}/models?keyword={keyword}' + return _get_response_data(self._session.get(url)) + + def get_algorithms(self, project_id: int): + url = f'{self._domain_name}/api/v2/projects/{project_id}/algorithms' + return _get_response_data(self._session.get(url)) + + +class WebconsoleService: + + def __init__(self, client: WebconsoleClient): + self.client = client + + def get_project_by_name(self, name: str) -> Optional[Dict]: + code, content = self.client.get_projects() + assert code == HTTPStatus.OK + for project in content['data']: + if project['name'] == name: + return project + return None + + def get_project_id_by_name(self, name: str) -> int: + project = self.get_project_by_name(name=name) + assert project is not None + return project['id'] + + def get_template_by_name(self, name: str) -> Optional[Dict]: + code, content = self.client.get_templates() + assert code == HTTPStatus.OK + for template in content['data']: + if template['name'] == name: + code, content = self.client.get_template(template['id']) + assert code == HTTPStatus.OK + return content['data'] + return None + + def get_model_job_group_by_name(self, project_id: int, name: str) -> Optional[Dict]: + code, content = self.client.get_model_job_groups(project_id=project_id) + assert code == HTTPStatus.OK + for group in content['data']: + if group['name'] == name: + group_id = group['id'] + code, content = self.client.get_model_job_group(project_id=project_id, group_id=group_id) + if code == HTTPStatus.OK: + return content['data'] + return None + + def get_model_job_by_name(self, project_id: int, name: str) -> Optional[Dict]: + code, content = self.client.get_model_jobs(project_id=project_id, keyword=name) + assert code == HTTPStatus.OK + for job in content['data']: + if job['name'] == name: + return job + return None + + def get_latest_model_job(self, project_id: int, group_id: int) -> Optional[Dict]: + code, content = self.client.get_model_job_group(project_id=project_id, group_id=group_id) + assert code == HTTPStatus.OK + if len(content['data']['model_jobs']) == 0: + return None + model_job_id = content['data']['model_jobs'][0]['id'] + code, content = self.client.get_model_job(project_id=project_id, model_job_id=model_job_id) + if code != HTTPStatus.OK: + raise Exception(f'get job {model_job_id} failed with details {content}') + return content['data'] + + def get_model_by_name(self, project_id: int, name: str) -> Optional[Dict]: + code, content = self.client.get_models(project_id=project_id, keyword=name) + assert code == HTTPStatus.OK + for model in content['data']: + if model['name'] == name: + return model + return None + + def get_data_source_by_name(self, name: str, project_id: int) -> Optional[Dict]: + code, content = self.client.get_data_sources(project_id=project_id) + assert code == HTTPStatus.OK + for data_source in content['data']: + if data_source['name'] == name: + return data_source + return None + + def get_dataset_by_name(self, name: str, project_id: int) -> Optional[Dict]: + code, content = self.client.get_datasets(project_id=project_id, keyword=name) + assert code == HTTPStatus.OK + for dataset in content['data']: + if dataset['name'] == name: + return dataset + return None + + def get_domain_name(self) -> str: + code, content = self.client.get_system_info() + assert code == HTTPStatus.OK + return content['data']['domain_name'] + + def get_participant_domain_name(self, name) -> str: + project = self.get_project_by_name(name=name) + assert project is not None + return project['participants'][0]['domain_name'] + + def get_participant_dataset_by_name(self, name: str, project_id: int, kind: DatasetKind) -> Optional[Dict]: + code, content = self.client.get_participant_datasets(project_id=project_id, kind=kind) + assert code == HTTPStatus.OK + for participant_dataset in content['data']: + if participant_dataset['name'] == name: + return participant_dataset + return None + + def check_dataset_ready(self, name: str, project_id: int, log_interval: int = 50) -> Dict: + last_log_time = 0 + while True: + dataset = self.get_dataset_by_name(name=name, project_id=project_id) + if dataset is not None and dataset['state_frontend'] == 'SUCCEEDED' and dataset['is_published']: + return dataset + current_time = time.time() + if current_time - last_log_time > log_interval: + logging.info(f'[check_dataset_ready]: still waiting for dataset {name} ready') + last_log_time = current_time + sleep(60) + + def check_participant_dataset_ready(self, + name: str, + project_id: int, + kind: DatasetKind, + log_interval: int = 50) -> Dict: + last_log_time = 0 + while True: + participant_dataset = self.get_participant_dataset_by_name(name=name, project_id=project_id, kind=kind) + if participant_dataset is not None: + return participant_dataset + current_time = time.time() + if current_time - last_log_time > log_interval: + logging.info(f'[check_participant_dataset_ready]: still waiting for participant dataset {name} ready') + last_log_time = current_time + sleep(60) + + def get_algorithm_by_path(self, project_id: int, path: str): + code, content = self.client.get_algorithms(project_id=project_id) + assert code == HTTPStatus.OK + for algorithm in content['data']: + if algorithm['path'] == path: + return algorithm + return None + + def get_groups_by_prefix(self, project_name: str, prefix: str) -> List[Dict]: + project_id = self.get_project_id_by_name(name=project_name) + code, content = self.client.get_model_job_groups(project_id=project_id) + assert code == HTTPStatus.OK + return [group for group in content['data'] if group['name'].startswith(prefix)] diff --git a/sgx_network_simulation/Dockerfile b/sgx_network_simulation/Dockerfile new file mode 100644 index 000000000..224d5f11e --- /dev/null +++ b/sgx_network_simulation/Dockerfile @@ -0,0 +1,30 @@ +FROM golang:1.16 AS go + +RUN apt-get update && \ + apt-get install -y make g++ libgmp-dev libglib2.0-dev libssl-dev && \ + apt-get install -y protobuf-compiler && \ + apt-get clean + +WORKDIR /app +COPY tools/tcp_grpc_proxy ./ +RUN make build + +FROM python:3.6.8 + +RUN echo "deb http://archive.debian.org/debian stretch main contrib non-free" > /etc/apt/sources.list + +RUN apt-get update && \ + apt-get install -y curl vim make nginx && \ + apt-get clean + +# upgrade nginx +RUN echo "deb http://nginx.org/packages/mainline/debian/ stretch nginx deb-src http://nginx.org/packages/mainline/debian/ stretch nginx" > /etc/apt/sources.list.d/nginx.list +RUN wget -qO - https://nginx.org/keys/nginx_signing.key | apt-key add - +RUN apt update && \ + apt remove nginx-common -y && \ + apt install nginx + +COPY sgx_network_simulation/ /app/ +WORKDIR /app +COPY --from=go /app/tcp2grpc ./ +COPY --from=go /app/grpc2tcp ./ diff --git a/sgx_network_simulation/nginx/sidecar.conf b/sgx_network_simulation/nginx/sidecar.conf new file mode 100644 index 000000000..2586392d2 --- /dev/null +++ b/sgx_network_simulation/nginx/sidecar.conf @@ -0,0 +1,22 @@ +# Forwards all traffic to nginx controller +server { + listen 32102 http2; + + # No limits + client_max_body_size 0; + grpc_read_timeout 3600s; + grpc_send_timeout 3600s; + client_body_timeout 3600s; + # grpc_socket_keepalive is recommended but not required + # grpc_socket_keepalive is supported after nginx 1.15.6 + grpc_socket_keepalive on; + + grpc_set_header Authority fl-bytedance-client-auth.com; + grpc_set_header Host fl-bytedance-client-auth.com; + grpc_set_header X-Host sgx-test.fl-cmcc.com; + + location / { + # Redirects to nginx controller + grpc_pass grpc://fedlearner-stack-ingress-nginx-controller.default.svc:80; + } +} diff --git a/sgx_network_simulation/sidecar.sh b/sgx_network_simulation/sidecar.sh new file mode 100644 index 000000000..5933fc361 --- /dev/null +++ b/sgx_network_simulation/sidecar.sh @@ -0,0 +1,74 @@ +#!/bin/bash +set -ex + +LISTEN_PORT_PATH="/pod-data/listen_port" +while [ ! -s "$LISTEN_PORT_PATH" ]; do + echo "wait for $LISTEN_PORT_PATH ..." + sleep 1 +done +WORKER_LISTEN_PORT=$(cat "$LISTEN_PORT_PATH") + +PROXY_LOCAL_PORT_PATH="/pod-data/proxy_local_port" +while [ ! -s "$PROXY_LOCAL_PORT_PATH" ]; do + echo "wait for $PROXY_LOCAL_PORT_PATH ..." + sleep 1 +done +PROXY_LOCAL_PORT=$(cat "$PROXY_LOCAL_PORT_PATH") + +GRPC_SERVER_PORT=32001 +if [ -n "$PORT0" ]; then + GRPC_SERVER_PORT=$PORT0 +fi + +TARGET_GRPC_PORT=32102 +if [ -n "$PORT1" ]; then + TARGET_GRPC_PORT=$PORT1 +fi + +echo "# Forwards all traffic to nginx controller +server { + listen ${TARGET_GRPC_PORT} http2; + + # No limits + client_max_body_size 0; + grpc_read_timeout 3600s; + grpc_send_timeout 3600s; + client_body_timeout 3600s; + # grpc_socket_keepalive is recommended but not required + # grpc_socket_keepalive is supported after nginx 1.15.6 + grpc_socket_keepalive on; + + grpc_set_header Authority ${EGRESS_HOST}; + grpc_set_header Host ${EGRESS_HOST}; + grpc_set_header X-Host ${SERVICE_ID}.${EGRESS_DOMAIN}; + + location / { + # Redirects to nginx controller + grpc_pass grpc://fedlearner-stack-ingress-nginx-controller.default.svc:80; + } +} +" > nginx/sidecar.conf + +rm -rf /etc/nginx/conf.d/* +cp nginx/sidecar.conf /etc/nginx/conf.d/ +service nginx restart + +# Server sidecar: grpc to tcp, 5001 is the server port of main container +echo "Starting server sidecar" +./grpc2tcp --grpc_server_port=$GRPC_SERVER_PORT \ + --target_tcp_address="localhost:$WORKER_LISTEN_PORT" & + +echo "Starting client sidecar" +./tcp2grpc --tcp_server_port="$PROXY_LOCAL_PORT" \ + --target_grpc_address="localhost:$TARGET_GRPC_PORT" & + +echo "===========Sidecar started!!=============" + +while true +do + if [[ -f "/pod-data/main-terminated" ]] + then + exit 0 + fi + sleep 5 +done diff --git a/tools/BUILD.bazel b/tools/BUILD.bazel new file mode 100644 index 000000000..66c16f2d6 --- /dev/null +++ b/tools/BUILD.bazel @@ -0,0 +1,4 @@ +package_group( + name = "tools_package", + packages = ["//tools/..."], +) diff --git a/tools/tcp_grpc_proxy/Dockerfile b/tools/tcp_grpc_proxy/Dockerfile new file mode 100644 index 000000000..5e95dab47 --- /dev/null +++ b/tools/tcp_grpc_proxy/Dockerfile @@ -0,0 +1,26 @@ +FROM golang:1.16 + +RUN apt-get update && \ + apt install -y curl git vim && \ + apt-get install -y make nginx g++ libgmp-dev libglib2.0-dev libssl-dev && \ + apt-get install -y protobuf-compiler && \ + apt-get clean + +WORKDIR /app +COPY . /app/tcp_grpc_proxy + +# Copies PSI lib +RUN git clone --recursive git://github.com/encryptogroup/PSI + +WORKDIR /app/PSI +RUN make + +WORKDIR /app/tcp_grpc_proxy +RUN make build + +# upgrade nginx +RUN echo "deb http://nginx.org/packages/mainline/debian/ stretch nginx deb-src http://nginx.org/packages/mainline/debian/ stretch nginx" > /etc/apt/sources.list.d/nginx.list +RUN wget -qO - https://nginx.org/keys/nginx_signing.key | apt-key add - +RUN apt update && \ + apt remove nginx-common -y && \ + apt install nginx diff --git a/tools/tcp_grpc_proxy/Makefile b/tools/tcp_grpc_proxy/Makefile new file mode 100644 index 000000000..67e1889f9 --- /dev/null +++ b/tools/tcp_grpc_proxy/Makefile @@ -0,0 +1,13 @@ +install: + go get tcp_grpc_proxy + go mod download + +protobuf: install + go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.26 + go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@v1.1 + PATH="${PATH}:$(shell go env GOPATH)/bin" \ + protoc -I=proto --go_out=. --go-grpc_out=. proto/*.proto + +build: protobuf + go build -o tcp2grpc cmd/tcp2grpc/main.go + go build -o grpc2tcp cmd/grpc2tcp/main.go diff --git a/tools/tcp_grpc_proxy/cmd/grpc2tcp/BUILD.bazel b/tools/tcp_grpc_proxy/cmd/grpc2tcp/BUILD.bazel new file mode 100644 index 000000000..06fc581c5 --- /dev/null +++ b/tools/tcp_grpc_proxy/cmd/grpc2tcp/BUILD.bazel @@ -0,0 +1,15 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") + +go_library( + name = "grpc2tcp_lib", + srcs = ["main.go"], + importpath = "fedlearner.net/tools/tcp_grpc_proxy/cmd/grpc2tcp", + visibility = ["//tools:tools_package"], + deps = ["//tools/tcp_grpc_proxy/pkg/proxy"], +) + +go_binary( + name = "grpc2tcp", + embed = [":grpc2tcp_lib"], + visibility = ["//tools:tools_package"], +) diff --git a/tools/tcp_grpc_proxy/cmd/grpc2tcp/main.go b/tools/tcp_grpc_proxy/cmd/grpc2tcp/main.go new file mode 100644 index 000000000..2b04343bb --- /dev/null +++ b/tools/tcp_grpc_proxy/cmd/grpc2tcp/main.go @@ -0,0 +1,19 @@ +package main + +import ( + "flag" + "fmt" + "tcp_grpc_proxy/proxy" +) + +func main() { + var grpcServerPort int + var targetTCPAddress string + flag.IntVar(&grpcServerPort, "grpc_server_port", 7766, "gRPC server port") + flag.StringVar(&targetTCPAddress, "target_tcp_address", "127.0.0.1:17766", "The target TCP server") + flag.Parse() + grpcServerAddress := fmt.Sprintf("0.0.0.0:%d", grpcServerPort) + + grpc2tcpServer := proxy.NewGrpc2TCPServer(grpcServerAddress, targetTCPAddress) + grpc2tcpServer.Run() +} diff --git a/tools/tcp_grpc_proxy/cmd/grpcclient/main.go b/tools/tcp_grpc_proxy/cmd/grpcclient/main.go new file mode 100644 index 000000000..670a89f02 --- /dev/null +++ b/tools/tcp_grpc_proxy/cmd/grpcclient/main.go @@ -0,0 +1,51 @@ +package main + +import ( + "bytes" + "context" + "os" + "time" + + "tcp_grpc_proxy/proto" + + "github.com/sirupsen/logrus" + "google.golang.org/grpc" +) + +func main() { + // Set up a connection to the server. + grpcServer := "127.0.0.1:7766" + conn, err := grpc.Dial(grpcServer, grpc.WithInsecure()) + if err != nil { + logrus.Fatalf("did not connect: %v", err) + } + defer conn.Close() + tsc := proto.NewTunnelServiceClient(conn) + + tc, err := tsc.Tunnel(context.Background()) + if err != nil { + logrus.Fatalln(err) + } + + sendPacket := func(data []byte) error { + return tc.Send(&proto.Chunk{Data: data}) + } + + go func() { + for { + chunk, err := tc.Recv() + if err != nil { + logrus.Println("Recv terminated:", err) + os.Exit(0) + } + logrus.Println(string(chunk.Data)) + } + + }() + + for { + time.Sleep(time.Duration(2) * time.Second) + buf := bytes.NewBufferString("************Hello World**********").Bytes() + sendPacket(buf) + } +} diff --git a/tools/tcp_grpc_proxy/cmd/grpcserver/main.go b/tools/tcp_grpc_proxy/cmd/grpcserver/main.go new file mode 100644 index 000000000..b17e4432f --- /dev/null +++ b/tools/tcp_grpc_proxy/cmd/grpcserver/main.go @@ -0,0 +1,11 @@ +package main + +import ( + "tcp_grpc_proxy/grpc2tcp" +) + +func main() { + grpcServerAddress := "0.0.0.0:7766" + targetTCPAddress := "127.0.0.1:17766" + grpc2tcp.RunServer(grpcServerAddress, targetTCPAddress) +} diff --git a/tools/tcp_grpc_proxy/cmd/tcp2grpc/BUILD.bazel b/tools/tcp_grpc_proxy/cmd/tcp2grpc/BUILD.bazel new file mode 100644 index 000000000..130eb9169 --- /dev/null +++ b/tools/tcp_grpc_proxy/cmd/tcp2grpc/BUILD.bazel @@ -0,0 +1,15 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") + +go_library( + name = "tcp2grpc_lib", + srcs = ["main.go"], + importpath = "fedlearner.net/tools/tcp_grpc_proxy/cmd/tcp2grpc", + visibility = ["//tools:tools_package"], + deps = ["//tools/tcp_grpc_proxy/pkg/proxy"], +) + +go_binary( + name = "tcp2grpc", + embed = [":tcp2grpc_lib"], + visibility = ["//tools:tools_package"], +) diff --git a/tools/tcp_grpc_proxy/cmd/tcp2grpc/main.go b/tools/tcp_grpc_proxy/cmd/tcp2grpc/main.go new file mode 100644 index 000000000..fee88a884 --- /dev/null +++ b/tools/tcp_grpc_proxy/cmd/tcp2grpc/main.go @@ -0,0 +1,57 @@ +package main + +import ( + "flag" + "fmt" + "io" + "net" + "os" + "tcp_grpc_proxy/proxy" +) + +func test() { + client, err := net.Dial("tcp", "127.0.0.1:17767") + if err != nil { + fmt.Println("err:", err) + return + } + defer client.Close() + + go func() { + input := make([]byte, 1024) + for { + n, err := os.Stdin.Read(input) + if err != nil { + fmt.Println("input err:", err) + continue + } + client.Write([]byte(input[:n])) + } + }() + + buf := make([]byte, 1024) + for { + n, err := client.Read(buf) + if err != nil { + if err == io.EOF { + return + } + fmt.Println("read err:", err) + continue + } + fmt.Println(string(buf[:n])) + + } +} + +func main() { + var tcpServerPort int + var targetGrpcAddress string + flag.IntVar(&tcpServerPort, "tcp_server_port", 17767, "TCP server port") + flag.StringVar(&targetGrpcAddress, "target_grpc_address", "127.0.0.1:7766", "The target gRPC server") + flag.Parse() + tcpServerAddress := fmt.Sprintf("0.0.0.0:%d", tcpServerPort) + + tcp2grpcServer := proxy.NewTCP2GrpcServer(tcpServerAddress, targetGrpcAddress) + tcp2grpcServer.Run() +} diff --git a/tools/tcp_grpc_proxy/cmd/tcpclient/main.go b/tools/tcp_grpc_proxy/cmd/tcpclient/main.go new file mode 100644 index 000000000..7e0c97467 --- /dev/null +++ b/tools/tcp_grpc_proxy/cmd/tcpclient/main.go @@ -0,0 +1,38 @@ +package main + +import ( + "flag" + "net" + "time" + + "github.com/sirupsen/logrus" +) + +func main() { + var tcpServerAddress string + flag.StringVar(&tcpServerAddress, "tcp_server_address", "127.0.0.1:17767", + "TCP server address which the client connects to.") + + conn, err := net.Dial("tcp", tcpServerAddress) + if err != nil { + logrus.Fatalf("Dail to tcp target %s error: %v", tcpServerAddress, err) + } + logrus.Infoln("Connected to", tcpServerAddress) + // Makes sure the connection gets closed + defer conn.Close() + defer logrus.Infoln("Connection closed to ", tcpServerAddress) + + for { + conn.Write([]byte("hello world")) + logrus.Infof("Sent 'hello world' to server %s", tcpServerAddress) + + tcpData := make([]byte, 64*1024) + _, err := conn.Read(tcpData) + if err != nil { + logrus.Fatalln("Read from tcp error: ", err) + } + logrus.Infof("Received '%s' from server", string(tcpData)) + + time.Sleep(time.Duration(5) * time.Second) + } +} diff --git a/tools/tcp_grpc_proxy/cmd/tcpserver/main.go b/tools/tcp_grpc_proxy/cmd/tcpserver/main.go new file mode 100644 index 000000000..592c7b6bd --- /dev/null +++ b/tools/tcp_grpc_proxy/cmd/tcpserver/main.go @@ -0,0 +1,46 @@ +package main + +import ( + "flag" + "fmt" + "net" + + "github.com/sirupsen/logrus" +) + +func handleTCPConn(conn net.Conn) { + for { + tcpData := make([]byte, 64*1024) + bytesRead, err := conn.Read(tcpData) + if err != nil { + logrus.Fatalln("Read from tcp error: ", err) + } + logrus.Infof("TCP server got %d bytes", bytesRead) + conn.Write([]byte("This is a string from TCP server")) + } +} + +func main() { + var tcpServerPort int + flag.IntVar(&tcpServerPort, "tcp_server_port", 17766, "TCP server port") + flag.Parse() + tcpServerAddress := fmt.Sprintf("0.0.0.0:%d", tcpServerPort) + + listener, err := net.Listen("tcp", tcpServerAddress) + if err != nil { + logrus.Fatalln("Listen TCP error: ", err) + } + defer listener.Close() + logrus.Infoln("Run TCPServer at ", tcpServerAddress) + + for { + conn, err := listener.Accept() + if err != nil { + logrus.Errorln("TCP listener error:", err) + continue + } + + logrus.Infoln("Got tcp connection") + go handleTCPConn(conn) + } +} diff --git a/tools/tcp_grpc_proxy/go.mod b/tools/tcp_grpc_proxy/go.mod new file mode 100644 index 000000000..7507c284b --- /dev/null +++ b/tools/tcp_grpc_proxy/go.mod @@ -0,0 +1,12 @@ +module tcp_grpc_proxy + +go 1.16 + +require ( + github.com/golang/protobuf v1.5.2 // indirect + github.com/sirupsen/logrus v1.8.1 + golang.org/x/net v0.0.0-20210525063256-abc453219eb5 // indirect + google.golang.org/genproto v0.0.0-20200806141610-86f49bd18e98 // indirect + google.golang.org/grpc v1.38.0 + google.golang.org/protobuf v1.26.0 +) diff --git a/tools/tcp_grpc_proxy/go.sum b/tools/tcp_grpc_proxy/go.sum new file mode 100644 index 000000000..a372202d1 --- /dev/null +++ b/tools/tcp_grpc_proxy/go.sum @@ -0,0 +1,106 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= +github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20210525063256-abc453219eb5 h1:wjuX4b5yYQnEQHzd+CBcrcC6OVR2J1CN6mUy0oSxIPo= +golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da h1:b3NXsE2LusjYGGjL5bxEVZZORm/YEFFrWFjR8eFrw/c= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200806141610-86f49bd18e98 h1:LCO0fg4kb6WwkXQXRQQgUYsFeFb5taTX5WAx5O/Vt28= +google.golang.org/genproto v0.0.0-20200806141610-86f49bd18e98/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.38.0 h1:/9BgsAsa5nWe26HqOlvlgJnqBuktYOLCgjCPqsa56W0= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/tools/tcp_grpc_proxy/pkg/proto/BUILD.bazel b/tools/tcp_grpc_proxy/pkg/proto/BUILD.bazel new file mode 100644 index 000000000..727149066 --- /dev/null +++ b/tools/tcp_grpc_proxy/pkg/proto/BUILD.bazel @@ -0,0 +1,28 @@ +load("@rules_proto//proto:defs.bzl", "proto_library") +load("@io_bazel_rules_go//go:def.bzl", "go_library") +load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") + +# gazelle:go_generate_proto true + +proto_library( + name = "proto_proto", + srcs = ["tunnel.proto"], + visibility = ["//tools:tools_package"], +) + +# keep +go_proto_library( + name = "proto_go_proto", + compilers = ["@io_bazel_rules_go//proto:go_grpc"], + importpath = "fedlearner.net/tools/tcp_grpc_proxy/pkg/proto", + proto = ":proto_proto", + visibility = ["//tools:tools_package"], +) + +go_library( + name = "proto", + srcs = ["proto.go"], + embed = [":proto_go_proto"], # keep + importpath = "fedlearner.net/tools/tcp_grpc_proxy/pkg/proto", + visibility = ["//tools:tools_package"], +) diff --git a/tools/tcp_grpc_proxy/pkg/proto/proto.go b/tools/tcp_grpc_proxy/pkg/proto/proto.go new file mode 100644 index 000000000..92256db4b --- /dev/null +++ b/tools/tcp_grpc_proxy/pkg/proto/proto.go @@ -0,0 +1 @@ +package proto diff --git a/tools/tcp_grpc_proxy/pkg/proto/tunnel.proto b/tools/tcp_grpc_proxy/pkg/proto/tunnel.proto new file mode 100644 index 000000000..ce5987254 --- /dev/null +++ b/tools/tcp_grpc_proxy/pkg/proto/tunnel.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package proto; +option go_package = "fedlearner.net/tools/tcp_grpc_proxy/pkg/proto"; + +service TunnelService { + rpc Tunnel (stream Chunk) returns (stream Chunk); +} + +message Chunk { + bytes data = 1; +} diff --git a/tools/tcp_grpc_proxy/pkg/proxy/BUILD.bazel b/tools/tcp_grpc_proxy/pkg/proxy/BUILD.bazel new file mode 100644 index 000000000..7af01f33e --- /dev/null +++ b/tools/tcp_grpc_proxy/pkg/proxy/BUILD.bazel @@ -0,0 +1,32 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") + +go_library( + name = "proxy", + srcs = [ + "grpc2tcp.go", + "tcp2grpc.go", + ], + importpath = "fedlearner.net/tools/tcp_grpc_proxy/pkg/proxy", + visibility = ["//tools:tools_package"], + deps = [ + "//tools/tcp_grpc_proxy/pkg/proto", + "@com_github_sirupsen_logrus//:logrus", + "@org_golang_google_grpc//:go_default_library", + ], +) + +go_test( + name = "proxy_test", + srcs = [ + "grpc2tcp_test.go", + "tcp2grpc_test.go", + ], + embed = [":proxy"], + visibility = ["//tools:tools_package"], + deps = [ + "//tools/tcp_grpc_proxy/pkg/proto", + "@com_github_sirupsen_logrus//:logrus", + "@com_github_stretchr_testify//assert", + "@org_golang_google_grpc//:go_default_library", + ], +) diff --git a/tools/tcp_grpc_proxy/pkg/proxy/grpc2tcp.go b/tools/tcp_grpc_proxy/pkg/proxy/grpc2tcp.go new file mode 100644 index 000000000..b9cb5c452 --- /dev/null +++ b/tools/tcp_grpc_proxy/pkg/proxy/grpc2tcp.go @@ -0,0 +1,134 @@ +package proxy + +import ( + "fmt" + "io" + "net" + + "fedlearner.net/tools/tcp_grpc_proxy/pkg/proto" + "github.com/sirupsen/logrus" + "google.golang.org/grpc" +) + +// Grpc2TcpServer A server to proxy grpc traffic to TCP +type Grpc2TcpServer struct { + proto.UnimplementedTunnelServiceServer + grpcServerAddress string + targetTcpAddress string +} + +// Tunnel the implementation of gRPC Tunnel service +func (s *Grpc2TcpServer) Tunnel(stream proto.TunnelService_TunnelServer) error { + tcpConnection, err := net.Dial("tcp", s.targetTcpAddress) + if err != nil { + logrus.Errorf("[GRPC2TCP] Dail to tcp target %s error: %v", s.targetTcpAddress, err) + return err + } + contextLogger := logrus.WithFields(logrus.Fields{ + "prefix": "[GRPC2TCP]", + "tcp_client_addr": tcpConnection.LocalAddr().String(), + }) + contextLogger.Infoln("Connected to", s.targetTcpAddress) + // Makes sure the connection gets closed + defer tcpConnection.Close() + defer contextLogger.Infoln("Connection closed to", s.targetTcpAddress) + + errChan := make(chan error) + + // Gets data from gRPC client and proxy to remote TCP server + go func() { + tcpSentBytes := 0 + grpcReceivedBytes := 0 + defer func() { + contextLogger.Infof("gRPC received %d bytes, TCP sent %d byte", grpcReceivedBytes, tcpSentBytes) + }() + + for { + chunk, err := stream.Recv() + if err == io.EOF { + contextLogger.Infoln("gRpc client EOF") + return + } + if err != nil { + errChan <- fmt.Errorf("error while receiving gRPC data: %v", err) + return + } + data := chunk.Data + grpcReceivedBytes += len(data) + + contextLogger.Debugln("Sending %d bytes to tcp server", len(data)) + _, err = tcpConnection.Write(data) + if err != nil { + errChan <- fmt.Errorf("error while sending TCP data: %v", err) + return + } else { + tcpSentBytes += len(data) + } + } + }() + + // Gets data from remote TCP server and proxy to gRPC client + go func() { + tcpReceivedBytes := 0 + grpcSentBytes := 0 + defer func() { + contextLogger.Infof("Tcp received %d bytes, gRPC sent %d bytes", tcpReceivedBytes, grpcSentBytes) + } () + + buff := make([]byte, 64*1024) + for { + bytesRead, err := tcpConnection.Read(buff) + if err == io.EOF { + contextLogger.Infoln("Remote TCP connection closed") + errChan <- nil + return + } + if err != nil { + errChan <- fmt.Errorf("error while receiving TCP data: %v", err) + return + } + tcpReceivedBytes += bytesRead + + contextLogger.Debugf("Sending %d bytes to gRPC client\n", bytesRead) + err = stream.Send(&proto.Chunk{Data: buff[0:bytesRead]}) + if err != nil { + errChan <- fmt.Errorf("error while sending gRPC data: %v", err) + return + } else { + grpcSentBytes += bytesRead + } + } + }() + + // Blocking read + returnedError := <-errChan + if returnedError != nil { + contextLogger.Errorln(returnedError) + } + return returnedError +} + +// NewGrpc2TcpServer constructs a Grpc2TCP server +func NewGrpc2TcpServer(grpcServerAddress, targetTcpAddress string) *Grpc2TcpServer { + return &Grpc2TcpServer{ + grpcServerAddress: grpcServerAddress, + targetTcpAddress: targetTcpAddress, + } +} + +// Run starts the Grpc2TCP server +func (s *Grpc2TcpServer) Run() { + listener, err := net.Listen("tcp", s.grpcServerAddress) + if err != nil { + logrus.Fatalln("Failed to listen: ", err) + } + defer listener.Close() + + // Starts a gRPC server and register services + grpcServer := grpc.NewServer() + proto.RegisterTunnelServiceServer(grpcServer, s) + logrus.Infof("Starting gRPC server at: %s, target to %s", s.grpcServerAddress, s.targetTcpAddress) + if err := grpcServer.Serve(listener); err != nil { + logrus.Fatalln("Unable to start gRPC serve:", err) + } +} diff --git a/tools/tcp_grpc_proxy/pkg/proxy/grpc2tcp_test.go b/tools/tcp_grpc_proxy/pkg/proxy/grpc2tcp_test.go new file mode 100644 index 000000000..6166916ee --- /dev/null +++ b/tools/tcp_grpc_proxy/pkg/proxy/grpc2tcp_test.go @@ -0,0 +1,84 @@ +package proxy + +import ( + "context" + "fmt" + "io" + "net" + "testing" + "time" + + "fedlearner.net/tools/tcp_grpc_proxy/pkg/proto" + "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "google.golang.org/grpc" +) + +func runFakeTcpServer(listener net.Listener) { + for { + conn, err := listener.Accept() + if err != nil { + logrus.Infoln("Intended TCP listener error:", err) + return + } + + go func(conn net.Conn) { + defer conn.Close() + for { + request := make([]byte, 64*1024) + bytesRead, err := conn.Read(request) + if err == io.EOF { + logrus.Infoln("[TCP server] Connection finished") + return + } + if err != nil { + logrus.Errorln("[TCP seerver] Error:", err) + return + } + response := fmt.Sprintf("[Proxy] %s", string(request[0:bytesRead])) + conn.Write([]byte(response)) + } + }(conn) + } +} + +func TestGrpc2Tcp(t *testing.T) { + grpcServerAddress := "localhost:13001" + targetTcpAddress := "localhost:13002" + + // Sets up a fake TCP server + listener, err := net.Listen("tcp", targetTcpAddress) + if err != nil { + assert.Fail(t, "Failed to listen") + } + go runFakeTcpServer(listener) + + // Starts the proxy + tcp2grpcServer := NewGrpc2TcpServer(grpcServerAddress, targetTcpAddress) + go tcp2grpcServer.Run() + time.Sleep(1 * time.Second) + + // Sends data by grpc connection and gets response in the same channel + responseChan := make(chan string) + for i := 0; i < 3; i++ { + go func(message string) { + grpcConn, _ := grpc.Dial(grpcServerAddress, grpc.WithInsecure()) + grpcClient := proto.NewTunnelServiceClient(grpcConn) + stream, _ := grpcClient.Tunnel(context.Background()) + + stream.Send(&proto.Chunk{Data: []byte(message)}) + stream.CloseSend() + chunk, _ := stream.Recv() + responseChan <- string(chunk.Data) + grpcConn.Close() + }(fmt.Sprintf("hello %d", i)) + } + + responses := make([]string, 0) + for i := 0; i < 3; i++ { + r := <-responseChan + responses = append(responses, r) + } + assert.ElementsMatch(t, responses, + []string{"[Proxy] hello 0", "[Proxy] hello 1", "[Proxy] hello 2"}) +} diff --git a/tools/tcp_grpc_proxy/pkg/proxy/tcp2grpc.go b/tools/tcp_grpc_proxy/pkg/proxy/tcp2grpc.go new file mode 100644 index 000000000..ae8baf8c3 --- /dev/null +++ b/tools/tcp_grpc_proxy/pkg/proxy/tcp2grpc.go @@ -0,0 +1,149 @@ +package proxy + +import ( + "context" + "io" + "net" + "sync" + + "fedlearner.net/tools/tcp_grpc_proxy/pkg/proto" + "github.com/sirupsen/logrus" + "google.golang.org/grpc" +) + +// Tcp2GrpcServer to proxy TCP traffic to gRPC +type Tcp2GrpcServer struct { + tcpServerAddress string + targetGrpcAddress string +} + +// NewTcp2GrpcServer constructs a TCP2GrpcServer +func NewTcp2GrpcServer(tcpServerAddress, targetGrpcAddress string) *Tcp2GrpcServer { + return &Tcp2GrpcServer{ + tcpServerAddress: tcpServerAddress, + targetGrpcAddress: targetGrpcAddress, + } +} + +func handleTcpConnection(tcpConn net.Conn, targetGrpcAddress string) { + contextLogger := logrus.WithFields(logrus.Fields{ + "prefix": "[TCP2GRPC]", + "tcp_client_addr": tcpConn.RemoteAddr().String(), + }) + + contextLogger.Infoln("Handle tcp connection, target to:", targetGrpcAddress) + defer tcpConn.Close() + + grpcConn, err := grpc.Dial(targetGrpcAddress, grpc.WithInsecure()) + if err != nil { + contextLogger.Errorf("Failed to connect to grpc %s: %v\n", targetGrpcAddress, err) + return + } + defer grpcConn.Close() + + grpcClient := proto.NewTunnelServiceClient(grpcConn) + stream, err := grpcClient.Tunnel(context.Background()) + if err != nil { + contextLogger.Errorln("Error of tunnel service:", err) + return + } + + var wg sync.WaitGroup + + // Gets data from remote gRPC server and proxy to TCP client + wg.Add(1) + go func() { + defer wg.Done() + + tcpSentBytes := 0 + grpcReceivedBytes := 0 + defer func() { + contextLogger.Infof("gRPC received %d bytes, TCP sent %d byte", grpcReceivedBytes, tcpSentBytes) + }() + + for { + chunk, err := stream.Recv() + if err == io.EOF { + contextLogger.Infoln("gRpc server EOF") + tcpConn.Close() + return + } + if err != nil { + contextLogger.Errorf("Recv from grpc target %s terminated: %v", targetGrpcAddress, err) + tcpConn.Close() + return + } + grpcReceivedBytes += len(chunk.Data) + + contextLogger.Debugln("Sending %d bytes to TCP client", len(chunk.Data)) + _, err = tcpConn.Write(chunk.Data) + if err != nil { + contextLogger.Errorln("Failed to send data to TCP client:", err) + return + } else { + tcpSentBytes += len(chunk.Data) + } + } + }() + + // Gets data from TCP client and proxy to remote gRPC server + wg.Add(1) + go func() { + defer wg.Done() + + tcpReceivedBytes := 0 + grpcSentBytes := 0 + defer func() { + contextLogger.Infof("TCP received %d bytes, gRPC sent %d bytes", tcpReceivedBytes, grpcSentBytes) + }() + + tcpData := make([]byte, 64*1024) + for { + bytesRead, err := tcpConn.Read(tcpData) + + if err == io.EOF { + contextLogger.Infoln("Connection finished") + stream.CloseSend() + return + } + if err != nil { + contextLogger.Errorln("Read from tcp error:", err) + stream.CloseSend() + return + } + tcpReceivedBytes += bytesRead + + contextLogger.Debugln("Sending %d bytes to gRPC server", bytesRead) + err = stream.Send(&proto.Chunk{Data: tcpData[0:bytesRead]}) + if err != nil { + contextLogger.Errorln("Failed to send gRPC data:", err) + return + } else { + grpcSentBytes += bytesRead + } + } + }() + + wg.Wait() +} + +// Run Starts the server +func (s *Tcp2GrpcServer) Run() { + listener, err := net.Listen("tcp", s.tcpServerAddress) + if err != nil { + logrus.Fatalln("Listen TCP error: ", err) + } + defer listener.Close() + logrus.Infoln("Run TCPServer at ", s.tcpServerAddress) + + for { + conn, err := listener.Accept() + if err != nil { + logrus.Errorln("TCP listener error:", err) + continue + } + + logrus.Infoln("Got tcp connection") + go handleTcpConnection(conn, s.targetGrpcAddress) + } +} diff --git a/tools/tcp_grpc_proxy/pkg/proxy/tcp2grpc_test.go b/tools/tcp_grpc_proxy/pkg/proxy/tcp2grpc_test.go new file mode 100644 index 000000000..3eb98bd4c --- /dev/null +++ b/tools/tcp_grpc_proxy/pkg/proxy/tcp2grpc_test.go @@ -0,0 +1,85 @@ +package proxy + +import ( + "fmt" + "io" + "net" + "testing" + "time" + + "fedlearner.net/tools/tcp_grpc_proxy/pkg/proto" + "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "google.golang.org/grpc" +) + +// mockGrpc2TcpServer is used to mock TunnelServer +type mockTunnelServer struct { + proto.UnimplementedTunnelServiceServer +} + +func (s *mockTunnelServer) Tunnel(stream proto.TunnelService_TunnelServer) error { + for { + chunk, err := stream.Recv() + if err == io.EOF { + logrus.Infoln("[gRPC server] Stream EOF") + return nil + } + if err != nil { + logrus.Errorln("[gRPC server] error:", err) + return err + } + response := fmt.Sprintf("[Proxy] %s", string(chunk.Data)) + if err = stream.Send(&proto.Chunk{Data: []byte(response)}); err != nil { + return err + } + } +} + +func runFakeGrpcServer(listener net.Listener) { + // Starts a gRPC server and register services + grpcServer := grpc.NewServer() + proto.RegisterTunnelServiceServer(grpcServer, &mockTunnelServer{}) + if err := grpcServer.Serve(listener); err != nil { + logrus.Fatalln("Unable to start gRPC serve:", err) + } +} + +func TestTcp2Grpc(t *testing.T) { + tcpServerAddress := "localhost:12001" + targetGrpcAddress := "localhost:12002" + + // Sets up a fake gRPC server + listener, err := net.Listen("tcp", targetGrpcAddress) + if err != nil { + assert.Fail(t, "Failed to listen") + } + go runFakeGrpcServer(listener) + + // Starts the proxy + tcp2grpcServer := NewTcp2GrpcServer(tcpServerAddress, targetGrpcAddress) + go tcp2grpcServer.Run() + time.Sleep(1 * time.Second) + + // Sends data by tcp connection and gets response in the same channel + responseChan := make(chan string) + for i := 0; i < 3; i++ { + go func(message string) { + tcpConnection, _ := net.Dial("tcp", tcpServerAddress) + tcpConnection.Write([]byte(message)) + response := make([]byte, 64*1024) + bytesRead, _ := tcpConnection.Read(response) + responseChan <- string(response[0:bytesRead]) + tcpConnection.Close() + }(fmt.Sprintf("hello %d", i)) + } + + responses := make([]string, 0) + for i := 0; i < 3; i++ { + r := <-responseChan + responses = append(responses, r) + } + + assert.ElementsMatch(t, responses, + []string{"[Proxy] hello 0", "[Proxy] hello 1", "[Proxy] hello 2"}) +} diff --git a/tools/tcp_grpc_proxy/proto/tunnel.proto b/tools/tcp_grpc_proxy/proto/tunnel.proto new file mode 100644 index 000000000..22ce1080b --- /dev/null +++ b/tools/tcp_grpc_proxy/proto/tunnel.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package proto; +option go_package = "proxy/proto"; + +service TunnelService { + rpc Tunnel (stream Chunk) returns (stream Chunk); +} + +message Chunk { + bytes data = 1; +} diff --git a/tools/tcp_grpc_proxy/proxy/grpc2tcp.go b/tools/tcp_grpc_proxy/proxy/grpc2tcp.go new file mode 100644 index 000000000..a9c5f598d --- /dev/null +++ b/tools/tcp_grpc_proxy/proxy/grpc2tcp.go @@ -0,0 +1,106 @@ +package proxy + +import ( + "fmt" + "io" + "net" + + "tcp_grpc_proxy/proxy/proto" + + "github.com/sirupsen/logrus" + "google.golang.org/grpc" +) + +// Grpc2TCPServer A server to proxy grpc traffic to TCP +type Grpc2TCPServer struct { + proto.UnimplementedTunnelServiceServer + grpcServerAddress string + targetTCPAddress string +} + +// Tunnel the implementation of gRPC Tunnel service +func (s *Grpc2TCPServer) Tunnel(stream proto.TunnelService_TunnelServer) error { + tcpConnection, err := net.Dial("tcp", s.targetTCPAddress) + if err != nil { + logrus.Errorf("Dail to tcp target %s error: %v", s.targetTCPAddress, err) + return err + } + logrus.Infoln("Connected to", s.targetTCPAddress) + // Makes sure the connection gets closed + defer tcpConnection.Close() + defer logrus.Infoln("Connection closed to ", s.targetTCPAddress) + + errChan := make(chan error) + + // Gets data from gRPC client and proxy to remote TCP server + go func() { + for { + chunk, err := stream.Recv() + if err == io.EOF { + return + } + if err != nil { + errChan <- fmt.Errorf("error while receiving gRPC data: %v", err) + return + } + + data := chunk.Data + logrus.Infof("Sending %d bytes to tcp server", len(data)) + _, err = tcpConnection.Write(data) + if err != nil { + errChan <- fmt.Errorf("error while sending TCP data: %v", err) + return + } + } + }() + + // Gets data from remote TCP server and proxy to gRPC client + go func() { + buff := make([]byte, 64*1024) + for { + bytesRead, err := tcpConnection.Read(buff) + if err == io.EOF { + logrus.Infoln("Remote TCP connection closed") + return + } + if err != nil { + errChan <- fmt.Errorf("error while receiving TCP data: %v", err) + return + } + + logrus.Infof("Sending %d bytes to gRPC client", bytesRead) + if err = stream.Send(&proto.Chunk{Data: buff[0:bytesRead]}); err != nil { + errChan <- fmt.Errorf("Error while sending gRPC data: %v", err) + return + } + } + }() + + // Blocking read + returnedError := <-errChan + return returnedError +} + +// NewGrpc2TCPServer constructs a Grpc2TCP server +func NewGrpc2TCPServer(grpcServerAddress, targetTCPAddress string) *Grpc2TCPServer { + return &Grpc2TCPServer{ + grpcServerAddress: grpcServerAddress, + targetTCPAddress: targetTCPAddress, + } +} + +// Run starts the Grpc2TCP server +func (s *Grpc2TCPServer) Run() { + listener, err := net.Listen("tcp", s.grpcServerAddress) + if err != nil { + logrus.Errorf("Failed to listen: ", err) + } + + // Starts a gRPC server and register services + grpcServer := grpc.NewServer() + proto.RegisterTunnelServiceServer(grpcServer, s) + logrus.Infof("Starting gRPC server at: %s, target to %s", s.grpcServerAddress, s.targetTCPAddress) + if err := grpcServer.Serve(listener); err != nil { + logrus.Errorln("Unable to start gRPC serve:", err) + } +} diff --git a/tools/tcp_grpc_proxy/proxy/proto/tunnel.pb.go b/tools/tcp_grpc_proxy/proxy/proto/tunnel.pb.go new file mode 100644 index 000000000..79602bc44 --- /dev/null +++ b/tools/tcp_grpc_proxy/proxy/proto/tunnel.pb.go @@ -0,0 +1,147 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.26.0 +// protoc v3.17.3 +// source: tunnel.proto + +package proto + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Chunk struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *Chunk) Reset() { + *x = Chunk{} + if protoimpl.UnsafeEnabled { + mi := &file_tunnel_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Chunk) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Chunk) ProtoMessage() {} + +func (x *Chunk) ProtoReflect() protoreflect.Message { + mi := &file_tunnel_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Chunk.ProtoReflect.Descriptor instead. +func (*Chunk) Descriptor() ([]byte, []int) { + return file_tunnel_proto_rawDescGZIP(), []int{0} +} + +func (x *Chunk) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +var File_tunnel_proto protoreflect.FileDescriptor + +var file_tunnel_proto_rawDesc = []byte{ + 0x0a, 0x0c, 0x74, 0x75, 0x6e, 0x6e, 0x65, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x05, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x1b, 0x0a, 0x05, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x12, 0x12, + 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, + 0x74, 0x61, 0x32, 0x39, 0x0a, 0x0d, 0x54, 0x75, 0x6e, 0x6e, 0x65, 0x6c, 0x53, 0x65, 0x72, 0x76, + 0x69, 0x63, 0x65, 0x12, 0x28, 0x0a, 0x06, 0x54, 0x75, 0x6e, 0x6e, 0x65, 0x6c, 0x12, 0x0c, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x1a, 0x0c, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x28, 0x01, 0x30, 0x01, 0x42, 0x0d, 0x5a, + 0x0b, 0x70, 0x72, 0x6f, 0x78, 0x79, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_tunnel_proto_rawDescOnce sync.Once + file_tunnel_proto_rawDescData = file_tunnel_proto_rawDesc +) + +func file_tunnel_proto_rawDescGZIP() []byte { + file_tunnel_proto_rawDescOnce.Do(func() { + file_tunnel_proto_rawDescData = protoimpl.X.CompressGZIP(file_tunnel_proto_rawDescData) + }) + return file_tunnel_proto_rawDescData +} + +var file_tunnel_proto_msgTypes = make([]protoimpl.MessageInfo, 1) +var file_tunnel_proto_goTypes = []interface{}{ + (*Chunk)(nil), // 0: proto.Chunk +} +var file_tunnel_proto_depIdxs = []int32{ + 0, // 0: proto.TunnelService.Tunnel:input_type -> proto.Chunk + 0, // 1: proto.TunnelService.Tunnel:output_type -> proto.Chunk + 1, // [1:2] is the sub-list for method output_type + 0, // [0:1] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_tunnel_proto_init() } +func file_tunnel_proto_init() { + if File_tunnel_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_tunnel_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Chunk); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_tunnel_proto_rawDesc, + NumEnums: 0, + NumMessages: 1, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_tunnel_proto_goTypes, + DependencyIndexes: file_tunnel_proto_depIdxs, + MessageInfos: file_tunnel_proto_msgTypes, + }.Build() + File_tunnel_proto = out.File + file_tunnel_proto_rawDesc = nil + file_tunnel_proto_goTypes = nil + file_tunnel_proto_depIdxs = nil +} diff --git a/tools/tcp_grpc_proxy/proxy/proto/tunnel_grpc.pb.go b/tools/tcp_grpc_proxy/proxy/proto/tunnel_grpc.pb.go new file mode 100644 index 000000000..f60817673 --- /dev/null +++ b/tools/tcp_grpc_proxy/proxy/proto/tunnel_grpc.pb.go @@ -0,0 +1,133 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. + +package proto + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +// TunnelServiceClient is the client API for TunnelService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type TunnelServiceClient interface { + Tunnel(ctx context.Context, opts ...grpc.CallOption) (TunnelService_TunnelClient, error) +} + +type tunnelServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewTunnelServiceClient(cc grpc.ClientConnInterface) TunnelServiceClient { + return &tunnelServiceClient{cc} +} + +func (c *tunnelServiceClient) Tunnel(ctx context.Context, opts ...grpc.CallOption) (TunnelService_TunnelClient, error) { + stream, err := c.cc.NewStream(ctx, &TunnelService_ServiceDesc.Streams[0], "/proto.TunnelService/Tunnel", opts...) + if err != nil { + return nil, err + } + x := &tunnelServiceTunnelClient{stream} + return x, nil +} + +type TunnelService_TunnelClient interface { + Send(*Chunk) error + Recv() (*Chunk, error) + grpc.ClientStream +} + +type tunnelServiceTunnelClient struct { + grpc.ClientStream +} + +func (x *tunnelServiceTunnelClient) Send(m *Chunk) error { + return x.ClientStream.SendMsg(m) +} + +func (x *tunnelServiceTunnelClient) Recv() (*Chunk, error) { + m := new(Chunk) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// TunnelServiceServer is the server API for TunnelService service. +// All implementations must embed UnimplementedTunnelServiceServer +// for forward compatibility +type TunnelServiceServer interface { + Tunnel(TunnelService_TunnelServer) error + mustEmbedUnimplementedTunnelServiceServer() +} + +// UnimplementedTunnelServiceServer must be embedded to have forward compatible implementations. +type UnimplementedTunnelServiceServer struct { +} + +func (UnimplementedTunnelServiceServer) Tunnel(TunnelService_TunnelServer) error { + return status.Errorf(codes.Unimplemented, "method Tunnel not implemented") +} +func (UnimplementedTunnelServiceServer) mustEmbedUnimplementedTunnelServiceServer() {} + +// UnsafeTunnelServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to TunnelServiceServer will +// result in compilation errors. +type UnsafeTunnelServiceServer interface { + mustEmbedUnimplementedTunnelServiceServer() +} + +func RegisterTunnelServiceServer(s grpc.ServiceRegistrar, srv TunnelServiceServer) { + s.RegisterService(&TunnelService_ServiceDesc, srv) +} + +func _TunnelService_Tunnel_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(TunnelServiceServer).Tunnel(&tunnelServiceTunnelServer{stream}) +} + +type TunnelService_TunnelServer interface { + Send(*Chunk) error + Recv() (*Chunk, error) + grpc.ServerStream +} + +type tunnelServiceTunnelServer struct { + grpc.ServerStream +} + +func (x *tunnelServiceTunnelServer) Send(m *Chunk) error { + return x.ServerStream.SendMsg(m) +} + +func (x *tunnelServiceTunnelServer) Recv() (*Chunk, error) { + m := new(Chunk) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// TunnelService_ServiceDesc is the grpc.ServiceDesc for TunnelService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var TunnelService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "proto.TunnelService", + HandlerType: (*TunnelServiceServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{ + { + StreamName: "Tunnel", + Handler: _TunnelService_Tunnel_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "tunnel.proto", +} diff --git a/tools/tcp_grpc_proxy/proxy/tcp2grpc.go b/tools/tcp_grpc_proxy/proxy/tcp2grpc.go new file mode 100644 index 000000000..63b5586b8 --- /dev/null +++ b/tools/tcp_grpc_proxy/proxy/tcp2grpc.go @@ -0,0 +1,104 @@ +package proxy + +import ( + "context" + "io" + "net" + "tcp_grpc_proxy/proxy/proto" + + "github.com/sirupsen/logrus" + "google.golang.org/grpc" +) + +// TCP2GrpcServer to proxy TCP traffic to gRPC +type TCP2GrpcServer struct { + tcpServerAddress string + targetGrpcAddress string +} + +// NewTCP2GrpcServer constructs a TCP2GrpcServer +func NewTCP2GrpcServer(tcpServerAddress, targetGrpcAddress string) *TCP2GrpcServer { + return &TCP2GrpcServer{ + tcpServerAddress: tcpServerAddress, + targetGrpcAddress: targetGrpcAddress, + } +} + +func handleTCPConn(tcpConn net.Conn, targetGrpcAddress string) { + logrus.Infoln("Handle tcp connection, target to:", targetGrpcAddress) + defer tcpConn.Close() + + grpcConn, err := grpc.Dial(targetGrpcAddress, grpc.WithInsecure()) + if err != nil { + logrus.Errorf("Error during connect to grpc %s: %v", targetGrpcAddress, err) + return + } + defer grpcConn.Close() + + grpcClient := proto.NewTunnelServiceClient(grpcConn) + stream, err := grpcClient.Tunnel(context.Background()) + if err != nil { + logrus.Errorf("Error of tunnel service: %v", err) + return + } + + // Gets data from remote gRPC server and proxy to TCP client + go func() { + for { + chunk, err := stream.Recv() + if err != nil { + logrus.Errorf("Recv from grpc target %s terminated: %v", targetGrpcAddress, err) + return + } + logrus.Infof("Sending %d bytes to TCP client", len(chunk.Data)) + tcpConn.Write(chunk.Data) + } + }() + + // Gets data from TCP client and proxy to remote gRPC server + func() { + for { + tcpData := make([]byte, 64*1024) + bytesRead, err := tcpConn.Read(tcpData) + + if err == io.EOF { + logrus.Infoln("Connection finished") + return + } + if err != nil { + logrus.Errorf("Read from tcp error: %v", err) + return + } + logrus.Infof("Sending %d bytes to gRPC server", bytesRead) + if err := stream.Send(&proto.Chunk{Data: tcpData[0:bytesRead]}); err != nil { + logrus.Errorf("Failed to send gRPC data: %v", err) + return + } + } + }() + + // If tcp connection gets closed, then we close the gRPC connection. + stream.CloseSend() + return +} + +// Run Starts the server +func (s *TCP2GrpcServer) Run() { + listener, err := net.Listen("tcp", s.tcpServerAddress) + if err != nil { + logrus.Fatalln("Listen TCP error: ", err) + } + defer listener.Close() + logrus.Infoln("Run TCPServer at ", s.tcpServerAddress) + + for { + conn, err := listener.Accept() + if err != nil { + logrus.Errorln("TCP listener error:", err) + continue + } + + logrus.Infoln("Got tcp connection") + go handleTCPConn(conn, s.targetGrpcAddress) + } +} diff --git a/tools/tcp_grpc_proxy/start_proxy.sh b/tools/tcp_grpc_proxy/start_proxy.sh new file mode 100644 index 000000000..6fb20a114 --- /dev/null +++ b/tools/tcp_grpc_proxy/start_proxy.sh @@ -0,0 +1,60 @@ +#! /bin/bash +set -ex + +# The chain of the traffic: +# TCP client -> out TCP server -> out gRPC server -> Nginx -> +# network -> remote grpc server (Nginx) -> in gRPC server -> in TCP server +OUT_TCP_SERVER_PORT=17767 +OUT_GRPC_SERVER_PORT=17768 +IN_GRPC_SERVER_PORT=17769 +IN_TCP_SERVER_PORT=7766 + +REMOTE_GRPC_SERVER_HOST=1.1.1.1 +REMOTE_GRPC_SERVER_PORT=17771 + +echo " +upstream remote_grpc_server { + server ${REMOTE_GRPC_SERVER_HOST}:${REMOTE_GRPC_SERVER_PORT}; +} + +# Proxies to remote grpc server +server { + listen ${OUT_GRPC_SERVER_PORT} http2; + + # No limits + client_max_body_size 0; + grpc_read_timeout 3600s; + grpc_send_timeout 3600s; + client_body_timeout 3600s; + # grpc_socket_keepalive is recommended but not required + # grpc_socket_keepalive is supported after nginx 1.15.6 + grpc_socket_keepalive on; + location / { + # change grpc to grpcs if ssl is used + grpc_pass grpc://remote_grpc_server; + } +} + +# Listens grpc traffic, this port should be public +server { + listen ${REMOTE_GRPC_SERVER_PORT} http2; + + # No limits + client_max_body_size 0; + grpc_read_timeout 3600s; + grpc_send_timeout 3600s; + client_body_timeout 3600s; + grpc_socket_keepalive on; + location / { + grpc_pass grpc://localhost:${IN_GRPC_SERVER_PORT}; + } +} +" > nginx.conf +cp nginx.conf /etc/nginx/conf.d/nginx.conf +service nginx restart + +./tcp2grpc --tcp_server_port="$OUT_TCP_SERVER_PORT" \ + --target_grpc_address="localhost:$OUT_GRPC_SERVER_PORT" & + +./grpc2tcp --grpc_server_port="$IN_GRPC_SERVER_PORT" \ + --target_tcp_address="localhost:$IN_TCP_SERVER_PORT" & diff --git a/web_console_v2/.dockerignore b/web_console_v2/.dockerignore index f6edae1e9..e74466cc6 100644 --- a/web_console_v2/.dockerignore +++ b/web_console_v2/.dockerignore @@ -4,5 +4,5 @@ Dockerfile # Tests client/tests -api/test +api/tests api/testing diff --git a/web_console_v2/BUILD.bazel b/web_console_v2/BUILD.bazel new file mode 100644 index 000000000..e4c69406a --- /dev/null +++ b/web_console_v2/BUILD.bazel @@ -0,0 +1,13 @@ +filegroup( + name = "srcs", + srcs = [ + ".dockerignore", + ".gitignore", + "Dockerfile", + "README.md", + "nginx.conf", + "run_dev.sh", + "run_prod.sh", + ], + visibility = ["//visibility:public"], +) diff --git a/web_console_v2/Dockerfile b/web_console_v2/Dockerfile index 0254897a4..6d856b5b4 100644 --- a/web_console_v2/Dockerfile +++ b/web_console_v2/Dockerfile @@ -1,11 +1,13 @@ -FROM python:3.7 +FROM python:3.6.8 RUN apt-get update && \ apt install -y curl && \ # For nodejs PA curl -sL https://deb.nodesource.com/setup_14.x | bash && \ + # For krb5-user installation + export DEBIAN_FRONTEND=noninteractive && \ # Install dependencies - apt-get install -y make nodejs nginx && \ + apt-get install -y make nodejs nginx krb5-user cron && \ apt-get clean WORKDIR /app @@ -14,14 +16,15 @@ COPY . . # Builds frontend WORKDIR /app/client -RUN npx pnpm install && npx pnpm build && rm -rf node_modules +RUN npx pnpm@6.4.0 install && npx pnpm@6.4.0 build && rm -rf node_modules # Builds backend WORKDIR /app/api RUN pip3 install --no-cache-dir -r requirements.txt && make protobuf +WORKDIR /app # Nginx configuration -COPY nginx.conf /etc/nginx/conf.d/nginx.conf +RUN cp nginx.conf /etc/nginx/conf.d/nginx.conf # Port for webconsole http server EXPOSE 1989 @@ -29,19 +32,7 @@ EXPOSE 1989 # This should not be exposed in PROD EXPOSE 1990 -# Install vscode -RUN curl -fOL https://github.com/cdr/code-server/releases/download/v3.8.0/code-server_3.8.0_amd64.deb && \ - dpkg -i code-server_3.8.0_amd64.deb && \ - rm code-server_3.8.0_amd64.deb && \ - mkdir -p ~/.config/code-server/ && \ - echo 'bind-addr: 0.0.0.0:1992\n\ -auth: password\n\ -password: fedlearner\n\ -cert: false\n' >> ~/.config/code-server/config.yaml - -# Port for VScode -EXPOSE 1992 ENV TZ="Asia/Shanghai" WORKDIR /app -CMD sh run_prod.sh +CMD bash run_prod.sh diff --git a/web_console_v2/Makefile b/web_console_v2/Makefile deleted file mode 100644 index 9fb779f40..000000000 --- a/web_console_v2/Makefile +++ /dev/null @@ -1,8 +0,0 @@ -api-test: - cd api && \ - make protobuf && \ - make lint && \ - make test - -docker-spark: - cd ./docker/spark && docker build . -t spark-tfrecord:latest \ No newline at end of file diff --git a/web_console_v2/api/.gitignore b/web_console_v2/api/.gitignore index b42ddd478..9f55327c4 100644 --- a/web_console_v2/api/.gitignore +++ b/web_console_v2/api/.gitignore @@ -4,9 +4,10 @@ # Generated proto python code fedlearner_webconsole/proto/*.py fedlearner_webconsole/proto/*.pyi +fedlearner_webconsole/proto/testing/ # Coverage generated .coverage_html_report/ .coverage* -root.log.* \ No newline at end of file +root.log.* diff --git a/web_console_v2/api/.style.yapf b/web_console_v2/api/.style.yapf new file mode 100644 index 000000000..b3d849f2d --- /dev/null +++ b/web_console_v2/api/.style.yapf @@ -0,0 +1,3 @@ +[style] +based_on_style = google +column_limit = 120 diff --git a/web_console_v2/api/.yapfignore b/web_console_v2/api/.yapfignore new file mode 100644 index 000000000..eefeb4275 --- /dev/null +++ b/web_console_v2/api/.yapfignore @@ -0,0 +1,2 @@ +migrations/ +fedlearner_webconsole/proto/ diff --git a/web_console_v2/api/BUILD.bazel b/web_console_v2/api/BUILD.bazel new file mode 100644 index 000000000..f20b92936 --- /dev/null +++ b/web_console_v2/api/BUILD.bazel @@ -0,0 +1,150 @@ +load("@rules_python//python:defs.bzl", "py_binary", "py_library") + +package(default_visibility = [":console_api_package"]) + +package_group( + name = "console_api_package", + packages = ["//web_console_v2/api/..."], +) + +filegroup( + name = "srcs", + srcs = [ + "Makefile", + "README.md", + ], +) + +py_library( + name = "checks_lib", + srcs = ["checks.py"], + imports = ["."], + deps = [":envs_lib"], +) + +py_library( + name = "command_lib", + srcs = [ + "command.py", + "es_configuration.py", + ], + imports = ["."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:app_lib", + "//web_console_v2/api/fedlearner_webconsole:initial_db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:es_lib", + "//web_console_v2/api/tools:lib", + "@common_click//:pkg", + "@common_elasticsearch//:pkg", + "@common_flask_migrate//:pkg", + "@common_requests//:pkg", + ], +) + +py_library( + name = "config_lib", + srcs = ["config.py"], + imports = ["."], + deps = [ + ":envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + ], +) + +py_library( + name = "envs_lib", + srcs = ["envs.py"], + imports = ["."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/utils:const_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_pytz//:pkg", + ], +) + +py_test( + name = "envs_lib_test", + size = "small", + srcs = [ + "envs_test.py", + ], + imports = [".."], + main = "envs_test.py", + deps = [ + ":envs_lib", + ], +) + +py_library( + name = "logging_config_lib", + srcs = ["logging_config.py"], + imports = ["."], + deps = [ + ":envs_lib", + "//web_console_v2/api/fedlearner_webconsole/middleware:log_filter_lib", + ], +) + +py_binary( + name = "rpc_server_bin", + srcs = ["rpc_server.py"], + imports = ["."], + main = "rpc_server.py", + deps = [ + ":envs_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:server_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:hooks_lib", + ], +) + +py_binary( + name = "composer_bin", + srcs = ["composer.py"], + imports = ["."], + main = "composer.py", + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:composer_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:hooks_lib", + ], +) + +py_library( + name = "server_lib", + srcs = ["server.py"], + imports = ["."], + deps = [ + ":checks_lib", + ":config_lib", + ":envs_lib", + "//web_console_v2/api/fedlearner_webconsole:app_lib", + "//web_console_v2/api/fedlearner_webconsole/middleware:middlewares_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:hooks_lib", + "@common_flask//:pkg", + "@common_gunicorn//:pkg", + ], +) + +filegroup( + name = "gunicorn_config", + srcs = [ + "gunicorn_config.py", + ], +) + +py_binary( + name = "entrypoint_bin", + srcs = ["entrypoint.py"], + imports = ["."], + main = "entrypoint.py", + deps = [ + ":envs_lib", + ":server_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:composer_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:server_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:hooks_lib", + "@common_flask//:pkg", + "@common_gunicorn//:pkg", + ], +) diff --git a/web_console_v2/api/Makefile b/web_console_v2/api/Makefile index 997047b5f..a306d70e7 100644 --- a/web_console_v2/api/Makefile +++ b/web_console_v2/api/Makefile @@ -1,36 +1,16 @@ export PYTHONPATH:=${PWD}:$(PYTHONPATH) -.PHONY: test unit-test-all unit-test protobuf - -lint: - pylint --rcfile ./ci/pylintrc --load-plugins pylint_quotes fedlearner_webconsole +clean: + rm -f err.out && \ + find ./ -type f \( -name "*.db" -o -name "*.log" \) -exec rm -f {} \; protobuf: + PATH=${PATH}:${PWD}/bin/$(shell uname) \ python -m grpc_tools.protoc -I protocols \ --python_out=. \ --grpc_python_out=. \ --mypy_out=. \ - protocols/fedlearner_webconsole/proto/*.proto - -UNIT_TEST_SCRIPTS := $(shell find test/ -type f -name "*_test.py") -UNIT_TEST_SCRIPTS_REGEX := $(shell find test/$(FOLDER) -type f -name "$(REG)*.py") -UNIT_TESTS := $(UNIT_TEST_SCRIPTS:%.py=%.phony) -UNIT_TESTS_REGEX := $(UNIT_TEST_SCRIPTS_REGEX:%.py=%.phony) - -test/%.phony: test/%.py - python $^ - -unit-test-all: protobuf $(UNIT_TESTS) - -# run unit test with optional $FOLDER and $REG parameter to limit the number of -# running tests. -# Sample: make unit-test FOLDER="/fedlearner_webconsole/utils" REG="file*" -unit-test: protobuf $(UNIT_TESTS_REGEX) - -cli-test: - FLASK_APP=command:app flask routes - -test: unit-test-all cli-test - -clean: - find ./ -type f \( -name "*.db" -o -name "*.log" \) -exec rm -f {} \; + --jsonschema_out=prefix_schema_files_with_package,disallow_additional_properties:fedlearner_webconsole/proto/jsonschemas \ + protocols/fedlearner_webconsole/proto/*.proto \ + protocols/fedlearner_webconsole/proto/**/*.proto \ + protocols/fedlearner_webconsole/proto/rpc/v2/*.proto diff --git a/web_console_v2/api/README.md b/web_console_v2/api/README.md index c46e853dd..30a737fec 100644 --- a/web_console_v2/api/README.md +++ b/web_console_v2/api/README.md @@ -2,73 +2,126 @@ ## Prerequisites -* GNU Make -* Python3 +* Bazel * MySQL 8.0 +* Docker ## Get started -``` -python3 -m venv -source /bin/activate -pip3 install -r requirements.txt +Starting development by using fake k8s (no actual data). + +start all the processes -# Generates python code for proto -make protobuf +```bash +bazelisk run //web_console_v2/api/cmds:run_dev +``` -# Use MySQL, please create database in advance, then set -# SQLALCHEMY_DATABASE_URI, for example as follows -export SQLALCHEMY_DATABASE_URI=mysql+pymysql://root:root@localhost:33600/fedlearner +optionally if you want to stop or restart one of the processes -# Creates schemas for DB -FLASK_APP=command:app flask db upgrade +```bash +bazelisk run //web_console_v2/api/cmds:supervisorctl_cli_bin -- -s unix:///tmp/supervisor.sock +``` -# Creates initial user -FLASK_APP=command:app flask create-initial-data +## Develop with remote k8s cluster -# Starts the server -export FLASK_ENV=development -flask run +```bash +# Changes configs in tools/local_runner/app_a.py or app_b.py +bash tools/local_runner/run_a.sh +bash tools/local_runner/run_b.sh ``` ## Tests ### Unit tests -``` -cd -make unit-test +```bash +bazelisk test //web_console_v2/api/... --config lint ``` ## Helpers ### Gets all routes -``` -FLASK_APP=command:app flask routes + +```bash +FLASK_APP=web_console_v2/api/command:app \ + APM_SERVER_ENDPOINT=/dev/null \ + bazelisk run //web_console_v2/api/cmds:flask_cli_bin -- routes ``` ### Add migration files -``` -FLASK_APP=command:app flask db migrate -m "Whats' changed" +```bash +FLASK_APP=web_console_v2/api/command:app \ + APM_SERVER_ENDPOINT=/dev/null \ + bazelisk run //web_console_v2/api/cmds:flask_cli_bin -- db migrate -m "Whats' changed" -d web_console_v2/api/migrations + # like dry-run mode, preview auto-generated SQL -FLASK_APP=command:app flask db upgrade --sql +FLASK_APP=web_console_v2/api/command:app \ + APM_SERVER_ENDPOINT=/dev/null \ + bazelisk run //web_console_v2/api/cmds:flask_cli_bin -- db upgrade --sql -d web_console_v2/api/migrations + # update database actually -FLASK_APP=command:app flask db upgrade +FLASK_APP=web_console_v2/api/command:app \ + APM_SERVER_ENDPOINT=/dev/null \ + bazelisk run //web_console_v2/api/cmds:flask_cli_bin -- db upgrade -d web_console_v2/api/migrations ``` ### Reset migration files Delete migrations folder first. + +```bash +FLASK_APP=web_console_v2/api/command:app \ + APM_SERVER_ENDPOINT=/dev/null \ + bazelisk run //web_console_v2/api/cmds:flask_cli_bin -- db init -d web_console_v2/api/migrations + +FLASK_APP=web_console_v2/api/command:app \ + APM_SERVER_ENDPOINT=/dev/null \ + bazelisk run //web_console_v2/api/cmds:flask_cli_bin -- db migrate -m "Initial migration." -d web_console_v2/api/migrations +``` + +### Cleanup project + +```bash +FLASK_APP=web_console_v2/api/command:app \ + APM_SERVER_ENDPOINT=/dev/null \ + bazelisk run //web_console_v2/api/cmds:flask_cli_bin -- cleanup-project ``` -FLASK_APP=command:app flask db init -FLASK_APP=command:app flask db migrate -m "Initial migration." + +## 规范 & 风格 + +### [Style guide](docs/style_guide.md) + +### Code formatter + +We use [yapf](https://github.com/google/yapf) to format our code, style is defined in `.style.yapf`. + +To check the format, please run: + +```bash +bazelisk test --config lint ``` -## [Style guide](docs/style_guide.md) -## [Best practices](docs/best_practices.md) +To fix the errors, please run: + +```bash +bazelisk test --config fix +``` + +### [gRPC](docs/grpc.md) + +## 最佳实践 + +### [数据库相关最佳实践](docs/best_practices/db.md) + +### [API层最佳实践](docs/best_practices.md) + +### [客户端-服务端模型最佳实践](docs/best_practices/client_server.md) + +### [多进程最佳实践](docs/best_practices/multiprocess.md) ## References ### Default date time in sqlalchemy + https://stackoverflow.com/questions/13370317/sqlalchemy-default-datetime/33532154#33532154 diff --git a/web_console_v2/api/bin/Darwin/protoc-gen-jsonschema b/web_console_v2/api/bin/Darwin/protoc-gen-jsonschema new file mode 100755 index 000000000..7e38eee5a Binary files /dev/null and b/web_console_v2/api/bin/Darwin/protoc-gen-jsonschema differ diff --git a/web_console_v2/api/bin/Linux/protoc-gen-jsonschema b/web_console_v2/api/bin/Linux/protoc-gen-jsonschema new file mode 100755 index 000000000..b8935f7fb Binary files /dev/null and b/web_console_v2/api/bin/Linux/protoc-gen-jsonschema differ diff --git a/web_console_v2/api/checks.py b/web_console_v2/api/checks.py new file mode 100644 index 000000000..ae90cac86 --- /dev/null +++ b/web_console_v2/api/checks.py @@ -0,0 +1,24 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import sys +from envs import Envs + + +def validity_check(): + error_msg = Envs.check() + if error_msg: + print(f'Validity check failed: {error_msg}') + sys.exit(1) diff --git a/web_console_v2/api/ci/pylintrc b/web_console_v2/api/ci/pylintrc deleted file mode 100644 index 13af12998..000000000 --- a/web_console_v2/api/ci/pylintrc +++ /dev/null @@ -1,434 +0,0 @@ -[MASTER] - -# Specify a configuration file. -#rcfile= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=CVS - -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns=.*pb2.* - -# Pickle collected data for later comparisons. -persistent=yes - -# List of plugins (as comma separated values of python modules names) to load, -# usually to register additional checkers. -load-plugins= - -# Use multiple processes to speed up Pylint. -jobs=1 - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code -extension-pkg-whitelist= - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED -confidence= - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time. See also the "--disable" option for examples. -#enable= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once).You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use"--disable=all --enable=classes -# --disable=W" -# -# ----------------------------------------------------------------------- -# 2015-01-12 - What follows is the list of all disabled items necessary -# to get a clean run of lint across CourseBuilder. These are separated -# into three tiers: -# -# - Fix-worthy. This includes: -# - Probable bugs -# - Easily-addressed hygiene issues, -# - Real warnings which we may mark as suppressed on a case-by-case basis. -# - Items that are questionable practice, but not necessarily economical to fix. -# - Items that we intend to ignore, as we do not consider them bad practice. -# -# Warning messages are documented at http://docs.pylint.org/features.html -# -# ---------------------------------------------------------------------- -# Fix-worthy: -# -# ---- Possible bugs: -# disable=super-on-old-class -# disable=arguments-differ (# of arguments to overriding/overridden method) -# disable=signature-differs -# disable=method-hidden -# disable=abstract-method (Abstract method not overridden in derived class) -# disable=no-member (self.foo used when foo not declared in class) -# -# ---- Easy-to-fix and improves readability, cleanliness: -# disable=relative-import -# -# ---- Probably legitimate, but needs markup to indicate intentionality -# disable=no-init (Class does not have __init__, nor do ancestor classes) -# disable=import-error -# disable=attribute-defined-outside-init -# -# ---------------------------------------------------------------------- -# Fix when economical: -# -# ---- Minor code cleanliness problems; fix when encountered. -# disable=unused-argument -# disable=unused-variable -# disable=invalid-name (Variable name does not meet coding standard) -# disable=duplicate-code -# -# ---- Laundry list of tunable parameters for when things are too big/small -# disable=abstract-class-little-used -# disable=too-few-public-methods -# disable=too-many-instance-attributes -# disable=too-many-ancestors -# disable=too-many-return-statements -# disable=too-many-lines -# disable=too-many-locals -# disable=too-many-function-args -# disable=too-many-public-methods -# disable=too-many-arguments -# -# ---------------------------------------------------------------------- -# Ignored; OK by our coding standard: -# -# disable=bad-continuation (Bad whitespace on following line) -# disable=no-self-use (Member function never uses 'self' parameter) -# disable=missing-docstring -# disable=fixme -# disable=star-args -# disable=locally-disabled (Notes local suppression of warning) -# disable=locally-enabled (Notes re-enable of suppressed warning) -# disable=bad-option-value (Notes suppression of unknown warning) -# disable=abstract-class-not-used (Warns when not used in same file) -# -# Unfortunately, since the options parsing does not support multi-line entries -# nor line continuation, all of the above items are redundantly specified here -# in a way that pylint is willing to parse. -disable=super-on-old-class,arguments-differ,signature-differs,method-hidden,abstract-method,no-member,relative-import,no-init,import-error,attribute-defined-outside-init,abstract-class-not-used,unused-argument,unused-variable,invalid-name,duplicate-code,abstract-class-little-used,too-few-public-methods,too-many-instance-attributes,too-many-ancestors,too-many-return-statements,too-many-lines,too-many-locals,too-many-function-args,too-many-public-methods,too-many-arguments,bad-continuation,no-self-use,missing-docstring,fixme,star-args,locally-disabled,locally-enabled,bad-option-value,useless-object-inheritance,logging-format-interpolation - -[REPORTS] - -# Set the output format. Available formats are text, parseable, colorized, msvs -# (visual studio) and html. You can also give a reporter class, eg -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Put messages in a separate file for each module / package specified on the -# command line instead of printing them on stdout. Reports (if any) will be -# written in a file name "pylint_global.[txt|html]". -files-output=no - -# Tells whether to display a full report or only the messages -reports=no - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details -#msg-template= - - -[SPELLING] - -# Spelling dictionary name. Available dictionaries: none. To make it working -# install python-enchant package. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to indicated private dictionary in -# --spelling-private-dict-file option instead of raising a message. -spelling-store-unknown-words=no - - -[SIMILARITIES] - -# Minimum lines number of a similarity. -min-similarity-lines=4 - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - - -[LOGGING] - -# Logging modules to check that the string format arguments are in logging -# function parameter format -logging-modules=logging - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME,XXX,TODO - - -[VARIABLES] - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# A regular expression matching the name of dummy variables (i.e. expectedly -# not used). -dummy-variables-rgx=_$|dummy - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid to define new builtins when possible. -additional-builtins= - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_,_cb - - -[TYPECHECK] - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis -ignored-modules= - -# List of classes names for which member attributes should not be checked -# (useful for classes with attributes dynamically set). -ignored-classes=SQLObject - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E0201 when accessed. Python regular -# expressions are accepted. -generated-members=REQUEST,acl_users,aq_parent - - -[BASIC] - -# List of builtins function names that should not be used, separated by a comma -bad-functions=map,filter,input - -# Good variable names which should always be accepted, separated by a comma -good-names=i,j,k,ex,Run,_ - -# Bad variable names which should always be refused, separated by a comma -bad-names=foo,bar,baz,toto,tutu,tata - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Include a hint for the correct naming format with invalid-name -include-naming-hint=no - -# Regular expression matching correct function names -function-rgx=[a-z_][a-z0-9_]{2,50}$ - -# Naming hint for function names -function-name-hint=[a-z_][a-z0-9_]{2,50}$ - -# Regular expression matching correct variable names -variable-rgx=[a-z_][a-z0-9_]{1,30}$ - -# Naming hint for variable names -variable-name-hint=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression matching correct constant names -const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ - -# Naming hint for constant names -const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ - -# Regular expression matching correct attribute names -attr-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Naming hint for attribute names -attr-name-hint=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression matching correct argument names -argument-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Naming hint for argument names -argument-name-hint=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression matching correct class attribute names -class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ - -# Naming hint for class attribute names -class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ - -# Regular expression matching correct inline iteration names -inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ - -# Naming hint for inline iteration names -inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ - -# Regular expression matching correct class names -class-rgx=[A-Z_][a-zA-Z0-9]+$ - -# Naming hint for class names -class-name-hint=[A-Z_][a-zA-Z0-9]+$ - -# Regular expression matching correct module names -module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ - -# Naming hint for module names -module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ - -# Regular expression matching correct method names -method-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Naming hint for method names -method-name-hint=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=__.*__ - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - - -[FORMAT] - -# Maximum number of characters on a single line. -max-line-length=80 - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - -# List of optional constructs for which whitespace checking is disabled -no-space-check=trailing-comma,dict-separator - -# Maximum number of lines in a module -max-module-lines=2000 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - - -[IMPORTS] - -# Deprecated modules which should not be used, separated by a comma -deprecated-modules=regsub,TERMIOS,Bastion,rexec - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled) -import-graph= - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled) -ext-import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled) -int-import-graph= - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__,__new__,setUp - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=mcs - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict,_fields,_replace,_source,_make - - -[DESIGN] - -# Maximum number of arguments for function / method -max-args=12 - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore -ignored-argument-names=_.* - -# Maximum number of locals for function / method body -max-locals=25 - -# Maximum number of return / yield for function / method body -max-returns=6 - -# Maximum number of branch for function / method body -max-branches=40 - -# Maximum number of statements in function / method body -max-statements=105 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=50 - -# Set the linting for string quotes -string-quote=single -triple-quote=double -docstring-quote=double - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception" -overgeneral-exceptions=Exception diff --git a/web_console_v2/api/cmds/BUILD.bazel b/web_console_v2/api/cmds/BUILD.bazel new file mode 100644 index 000000000..3fae43d8e --- /dev/null +++ b/web_console_v2/api/cmds/BUILD.bazel @@ -0,0 +1,100 @@ +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_binary( + name = "flask_cli_bin", + srcs = [ + "flask_cli.py", + ], + data = [ + "//web_console_v2/api/migrations", + ], + main = "flask_cli.py", + deps = [ + # THIS IS A HACK!!! + # Although `//web_console_v2/api:command_lib` is not directly used in `flask_cli.py`, we have to `deps` it for discovering python dependencies at runtime. + "//web_console_v2/api:command_lib", + "@common_flask//:pkg", + ], +) + +py_binary( + name = "gunicorn_cli_bin", + srcs = [ + "gunicorn_cli.py", + ], + data = [ + "//web_console_v2/api:gunicorn_config", + ], + main = "gunicorn_cli.py", + deps = [ + # THIS IS A HACK!!! + # Although `//web_console_v2/api:server_lib"` is not directly used in `gunicorn_cli.py`, we have to `deps` it for discovering python dependencies at runtime. + "//web_console_v2/api:server_lib", + "@common_gunicorn//:pkg", + ], +) + +py_binary( + name = "supervisorctl_cli_bin", + srcs = [ + "supervisorctl_cli.py", + ], + data = [ + "supervisord.conf", + ], + main = "supervisorctl_cli.py", + deps = [ + "@common_supervisor//:pkg", + ], +) + +py_binary( + name = "supervisord_cli_bin", + srcs = [ + "supervisord_cli.py", + ], + data = [ + "supervisord.conf", + ], + main = "supervisord_cli.py", + deps = [ + "@common_supervisor//:pkg", + ], +) + +filegroup( + name = "runtime_env", + srcs = [ + "runtime_env.sh", + ], +) + +sh_binary( + name = "run_prod", + srcs = [ + "run_prod.sh", + ], + data = [ + ":flask_cli_bin", + ":gunicorn_cli_bin", + ":runtime_env", + ":supervisorctl_cli_bin", + ":supervisord_cli_bin", + "//web_console_v2/api:entrypoint_bin", + ], + visibility = ["//visibility:public"], +) + +sh_binary( + name = "run_dev", + srcs = [ + "run_dev.sh", + ], + data = [ + "supervisord_dev.conf", + ":runtime_env", + "//web_console_v2/api:entrypoint_bin", + "//web_console_v2/api/cmds:flask_cli_bin", + "//web_console_v2/api/cmds:supervisord_cli_bin", + ], +) diff --git a/web_console_v2/api/cmds/flask_cli.py b/web_console_v2/api/cmds/flask_cli.py new file mode 100644 index 000000000..b60819485 --- /dev/null +++ b/web_console_v2/api/cmds/flask_cli.py @@ -0,0 +1,20 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import sys +from flask.cli import main + +if __name__ == '__main__': + sys.exit(main()) diff --git a/web_console_v2/api/cmds/gunicorn_cli.py b/web_console_v2/api/cmds/gunicorn_cli.py new file mode 100644 index 000000000..6c67f60d9 --- /dev/null +++ b/web_console_v2/api/cmds/gunicorn_cli.py @@ -0,0 +1,20 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import sys +from gunicorn.app.wsgiapp import run + +if __name__ == '__main__': + sys.exit(run()) diff --git a/web_console_v2/api/cmds/run_dev.sh b/web_console_v2/api/cmds/run_dev.sh new file mode 100755 index 000000000..c7c6cde27 --- /dev/null +++ b/web_console_v2/api/cmds/run_dev.sh @@ -0,0 +1,43 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +#!/bin/bash +set -e + +# This script is designed for triggering by bazel run. +# So it can be only executed in root of our workspace. +[[ ! ${PWD} =~ .*privacy_computing_platform ]] && echo "this scripts should be executed in root of workspace"; exit 1; + +function flask_command { + FLASK_APP=web_console_v2/api/command:app \ + APM_SERVER_ENDPOINT=/dev/null \ + web_console_v2/api/cmds/flask_cli_bin \ + $* +} + +export SYSTEM_INFO="{\"domain_name\": \"dev.fedlearner.net\", \"name\": \"Dev\"}" +export APM_SERVER_ENDPOINT=stdout +export FLASK_ENV=development + +# set runtime env +source web_console_v2/api/cmds/runtime_env.sh + +# Migrates DB schemas +flask_command create-db +# Loads initial data +flask_command create-initial-data +# Runs Api Composer and gRPC service +web_console_v2/api/cmds/supervisord_cli_bin \ + -c web_console_v2/api/cmds/supervisord_dev.conf --nodaemon diff --git a/web_console_v2/api/cmds/run_prod.sh b/web_console_v2/api/cmds/run_prod.sh new file mode 100755 index 000000000..cdd6fcf92 --- /dev/null +++ b/web_console_v2/api/cmds/run_prod.sh @@ -0,0 +1,72 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +#!/bin/bash +set -e + +# Add hook into pythonpath +export PYTHONPATH=$PYTHONPATH:$PWD +# We should find if $0.runfiles exists which is runtime files exists. +[[ -d "$0.runfiles" ]] && cd $0.runfiles/privacy_computing_platform + +# link all deps inside runfiles to $workspace/external +if [[ ! -d "external" ]] +then + echo "linking symbolic into external..." + mkdir external + ls ../ | grep -v privacy_computing_platform | xargs -IX ln -s $PWD/../X $PWD/external/X +fi + +function flask_command { + FLASK_APP=web_console_v2/api/command:app \ + APM_SERVER_ENDPOINT=/dev/null \ + web_console_v2/api/cmds/flask_cli_bin \ + $* +} + +# set runtime env +source web_console_v2/api/cmds/runtime_env.sh + +# Configure ElasticSearch ILM Information +flask_command es-configuration + +# Iterates arguments +while test $# -gt 0 +do + case "$1" in + --migrate) + echo "Migrating DB" + # Migrates DB schemas + flask_command db upgrade \ + --directory web_console_v2/api/migrations + ;; + esac + shift +done + +flask_command create-initial-data + +export FEDLEARNER_WEBCONSOLE_LOG_DIR=/var/log/fedlearner_webconsole/ +mkdir -p $FEDLEARNER_WEBCONSOLE_LOG_DIR + +# This starts supervisor daemon which will start all processes defined in +# supervisord.conf. The daemon starts in background by default. +web_console_v2/api/cmds/supervisord_cli_bin \ + -c web_console_v2/api/cmds/supervisord.conf +# This tails logs from all processes defined in supervisord.conf. +# The purpose for this is to put supervisor to foreground so that the +# pod/container will not be terminated. +web_console_v2/api/cmds/supervisorctl_cli_bin \ + -c web_console_v2/api/cmds/supervisord.conf maintail -f diff --git a/web_console_v2/api/cmds/runtime_env.sh b/web_console_v2/api/cmds/runtime_env.sh new file mode 100644 index 000000000..8d3f616af --- /dev/null +++ b/web_console_v2/api/cmds/runtime_env.sh @@ -0,0 +1,42 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +#!/bin/bash +set -e + +# Adds root directory to python path to make the modules findable. +export PYTHONPATH=$PYTHONPATH:"web_console_v2/api/" + +# disable pymalloc to avoid high memory usage when parrallism allocation small objects +# Ref: https://docs.python.org/3/c-api/memory.html#the-pymalloc-allocator +export PYTHONMALLOC=malloc +export PYTHONUNBUFFERED=1 + +# When recongize HADOOP_HOME, export some also useful environment variables for GFile +if [ ! -z $HADOOP_HOME ] +then + echo "set hadoop env" + # This is super import for compitable with hadoop and hadoop_current + if [ -f "$HADOOP_HOME/conf/hadoop-env.sh" ] + then + export HADOOP_CONF_DIR=$HADOOP_HOME/conf + source "$HADOOP_HOME/conf/hadoop-env.sh" &> /dev/null + else + export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop + source "$HADOOP_HOME/etc/hadoop/hadoop-env.sh" &> /dev/null + fi + export LD_LIBRARY_PATH=${HADOOP_HOME}/lib/native:${HADOOP_HOME}/lib/native/nfs:${JAVA_HOME}/jre/lib/amd64/server:${LD_LIBRARY_PATH} + export CLASSPATH=$($HADOOP_HOME/bin/hadoop classpath --glob) +fi diff --git a/web_console_v2/api/cmds/supervisorctl_cli.py b/web_console_v2/api/cmds/supervisorctl_cli.py new file mode 100644 index 000000000..622963683 --- /dev/null +++ b/web_console_v2/api/cmds/supervisorctl_cli.py @@ -0,0 +1,20 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import sys +from supervisor.supervisorctl import main + +if __name__ == '__main__': + sys.exit(main()) diff --git a/web_console_v2/api/cmds/supervisord.conf b/web_console_v2/api/cmds/supervisord.conf new file mode 100644 index 000000000..2101d641c --- /dev/null +++ b/web_console_v2/api/cmds/supervisord.conf @@ -0,0 +1,27 @@ +[supervisord] +pidfile=/run/supervisord.pid +loglevel=debug + +; The section sets up an HTTP server that listens on "file", which can be used +; to control the daemon +[unix_http_server] +file=/var/run/supervisor.sock + +[rpcinterface:supervisor] +supervisor.rpcinterface_factory=supervisor.rpcinterface:make_main_rpcinterface + +; This section connects to the HTTP server to control the HTTP server +[supervisorctl] +serverurl=unix:///var/run/supervisor.sock + +[program:restful_api] +command=web_console_v2/api/cmds/gunicorn_cli_bin --conf web_console_v2/api/gunicorn_config.py server:app +redirect_stderr=true + +[program:rpc] +command=web_console_v2/api/entrypoint_bin start-rpc +redirect_stderr=true + +[program:composer] +command=web_console_v2/api/entrypoint_bin start-composer +redirect_stderr=true diff --git a/web_console_v2/api/cmds/supervisord_cli.py b/web_console_v2/api/cmds/supervisord_cli.py new file mode 100644 index 000000000..0725d2bad --- /dev/null +++ b/web_console_v2/api/cmds/supervisord_cli.py @@ -0,0 +1,20 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import sys +from supervisor.supervisord import main + +if __name__ == '__main__': + sys.exit(main()) diff --git a/web_console_v2/api/cmds/supervisord_dev.conf b/web_console_v2/api/cmds/supervisord_dev.conf new file mode 100644 index 000000000..373a93b99 --- /dev/null +++ b/web_console_v2/api/cmds/supervisord_dev.conf @@ -0,0 +1,28 @@ +[supervisord] +pidfile=/tmp/supervisord.pid +loglevel=debug + +; The section sets up an HTTP server that listens on "file", which can be used +; to control the daemon +[unix_http_server] +file=/tmp/supervisor.sock + +[rpcinterface:supervisor] +supervisor.rpcinterface_factory=supervisor.rpcinterface:make_main_rpcinterface + +; This section connects to the HTTP server to control the HTTP server +[supervisorctl] +serverurl=unix:///tmp/supervisor.sock + +[program:restful_api] +command=./web_console_v2/api/cmds/flask_cli_bin run --eager-loading --port=1991 --host=0.0.0.0 +environment=FLASK_APP=web_console_v2.api.server:app +redirect_stderr=true + +[program:rpc] +command=./web_console_v2/api/entrypoint_bin -- start-rpc +redirect_stderr=true + +[program:composer] +command=./web_console_v2/api/entrypoint_bin -- start-composer +redirect_stderr=true diff --git a/web_console_v2/api/command.py b/web_console_v2/api/command.py index ca3fdc337..776408412 100644 --- a/web_console_v2/api/command.py +++ b/web_console_v2/api/command.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,19 +13,24 @@ # limitations under the License. # coding: utf-8 +import click + from config import Config +from flask_migrate import Migrate +from es_configuration import es_config from fedlearner_webconsole.app import create_app -from fedlearner_webconsole.db import db_handler as db +from fedlearner_webconsole.db import db from fedlearner_webconsole.initial_db import initial_db -from flask_migrate import Migrate - from fedlearner_webconsole.utils.hooks import pre_start_hook +from tools.project_cleanup import delete_project +from tools.workflow_migration.workflow_completed_failed import migrate_workflow_completed_failed_state +from tools.dataset_migration.dataset_job_name_migration.dataset_job_name_migration import migrate_dataset_job_name +from tools.variable_finder import find class CliConfig(Config): - START_GRPC_SERVER = False START_SCHEDULER = False - START_COMPOSER = False + START_K8S_WATCHER = False pre_start_hook() @@ -42,3 +47,35 @@ def create_initial_data(): @app.cli.command('create-db') def create_db(): db.create_all() + + +@app.cli.command('cleanup-project') +@click.argument('project_id') +def cleanup_project(project_id): + delete_project(int(project_id)) + + +@app.cli.command('migrate-workflow-completed-failed-state') +def remove_intersection_dataset(): + migrate_workflow_completed_failed_state() + + +@app.cli.command('migrate-dataset-job-name') +def add_dataset_job_name(): + migrate_dataset_job_name() + + +@app.cli.command('migrate-connect-to-test') +def migrate_connect_to_test(): + migrate_connect_to_test() + + +@app.cli.command('find-variable') +@click.argument('name') +def find_variable(name: str): + find(name) + + +@app.cli.command('es-configuration') +def es_configuration(): + es_config() diff --git a/web_console_v2/api/composer.py b/web_console_v2/api/composer.py new file mode 100644 index 000000000..d4d4248fa --- /dev/null +++ b/web_console_v2/api/composer.py @@ -0,0 +1,28 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from fedlearner_webconsole.db import db +from fedlearner_webconsole.composer.composer import composer +from fedlearner_webconsole.utils.hooks import pre_start_hook + +if __name__ == '__main__': + # TODO(wangsen.0914): refactor logging_config + # There's a race condition when multiple process logging to same file. + logging.basicConfig(level=logging.DEBUG) + pre_start_hook() + logging.info('Starting composer...') + composer.run(db.engine) + composer.wait_for_termination() diff --git a/web_console_v2/api/config.py b/web_console_v2/api/config.py index e58492b6e..f3d27f82d 100644 --- a/web_console_v2/api/config.py +++ b/web_console_v2/api/config.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,9 +14,6 @@ # coding: utf-8 -import os -import secrets - from fedlearner_webconsole.db import get_database_uri from envs import Envs @@ -28,12 +25,11 @@ class Config(object): # For unicode strings # Ref: https://stackoverflow.com/questions/14853694/python-jsonify-dictionary-in-utf-8 JSON_AS_ASCII = False - JWT_SECRET_KEY = os.getenv('JWT_SECRET_KEY', secrets.token_urlsafe(64)) + JWT_SECRET_KEY = Envs.JWT_SECRET_KEY PROPAGATE_EXCEPTIONS = True - GRPC_LISTEN_PORT = 1990 + GRPC_LISTEN_PORT = Envs.GRPC_LISTEN_PORT JWT_ACCESS_TOKEN_EXPIRES = 86400 STORAGE_ROOT = Envs.STORAGE_ROOT - START_GRPC_SERVER = True START_SCHEDULER = True - START_COMPOSER = os.getenv('START_COMPOSER', True) + START_K8S_WATCHER = True diff --git a/web_console_v2/api/docs/BUILD.bazel b/web_console_v2/api/docs/BUILD.bazel new file mode 100644 index 000000000..821338eb2 --- /dev/null +++ b/web_console_v2/api/docs/BUILD.bazel @@ -0,0 +1,5 @@ +filegroup( + name = "srcs", + srcs = glob(["**/*"]), + visibility = ["//visibility:public"], +) diff --git a/web_console_v2/api/docs/best_practices.md b/web_console_v2/api/docs/best_practices.md index 9c5964b50..88aaaddf6 100644 --- a/web_console_v2/api/docs/best_practices.md +++ b/web_console_v2/api/docs/best_practices.md @@ -6,6 +6,8 @@ flask-migrate, which needs us to upgrade the migration files once schema gets updated (inefficiently). Integers/strings makes us easy to extend the enums, the disadvantage is we should take care of data migrations if enum is deleted. +Natively sqlalchemy support Enum type in a column. [Ref](https://docs.sqlalchemy.org/en/14/core/type_basics.html#sqlalchemy.types.Enum) + ### Index in DB Index is not necessary if the value of column is very limited, such as enum or boolean. Reference: https://tech.meituan.com/2014/06/30/mysql-index.html @@ -53,3 +55,58 @@ See details [here](https://en.wikipedia.org/wiki/Representational_state_transfer primaryjoin='Project.id == ' 'foreign(Job.project_id)') ``` + +### sqlalchemy session +* Please limit the session/transaction scope as small as possible, otherwise it may not work as expected. +[Ref](https://docs.sqlalchemy.org/en/14/orm/session_basics.html#when-do-i-construct-a-session-when-do-i-commit-it-and-when-do-i-close-it) +```python +#BAD: the transaction will include the runner query, it may stale. +with db.session_scope() as session: + init_runners = session.query(SchedulerRunner).filter_by( + status=RunnerStatus.INIT.value).all() + for runner in init_runners: + # Do something with the runner + session.commit() + +#GOOD: make the transaction scope clear. +with db.session_scope() as session: + running_runner_ids = session.query(SchedulerRunner.id).filter_by( + status=RunnerStatus.RUNNING.value).all() +for runner_id, *_ in running_runner_ids: + with db.session_scope() as session: + runner = session.query(SchedulerRunner).get(runner_id) + # Do something with the runner + session.commit() +``` + +### Pagination +- Use `utils/paginate.py`, and **read the test case** as a quickstart +- All resources are **un-paginated** by default +- Append page metadata in your returned body in the following format: +```json +// your POV +{ + "data": pagination.get_items(), + "page_meta": pagination.get_metadata() +} + +// frontend POV +{ + "data": {...}, + "page_meta": { + "current_page": 1, + "page_size": 5, + "total_pages": 2, + "total_items": 7 + } +} +``` +- **ALWAYS** return `page_meta` + - If your API is called with `page=...`, then paginate for the caller; return the pagination metadata as shown above + - If your API is called without `page=...`, then return the un-paginated data with an **empty** `page_meta` body like so: + ```json + { + "data": {...}, + "page_meta": {} + } + ``` diff --git a/web_console_v2/api/docs/how_to_install_python362.md b/web_console_v2/api/docs/how_to_install_python362.md new file mode 100644 index 000000000..98b137414 --- /dev/null +++ b/web_console_v2/api/docs/how_to_install_python362.md @@ -0,0 +1,78 @@ +# Background: + +When we use the outdated software version, the lack of upward compatibility will cause us to be unable to use higher versions of python. At this time, we need to switch and isolate the python version. + +In our project environment, we need to use the version of tensorflow v1.15.0, which requires a version of python3.6.2 + + + +# How to install&use: + +## 1. Get a python3.6.2 using pyenv + +Firstly, we use *brew* to install *pyenv*: + +```shell +brew install pyenv +pyenv -v # check pyenv version +``` + +then, we check the python3.6 version provide by pyenv and install the version we need: + +```shell +pyenv install --list | grep 3.6 +pyenv install 3.6.2 +``` + + + +###### if you see error like this, + +```shell +Last 10 log lines: +./Modules/posixmodule.c:8210:15: error: implicit declaration of function 'sendfile' is invalid in C99 [-Werror,-Wimplicit-function-declaration] +ret = sendfile(in, out, offset, &sbytes, &sf, flags); +^ +./Modules/posixmodule.c:10432:5: warning: code will never be executed [-Wunreachable-code] +Py_FatalError("abort() called from Python code didn't abort!"); +^~~~~~~~~~~~~ +1 warning and 1 error generated. +make: *** [Modules/posixmodule.o] Error 1 +make: *** Waiting for unfinished jobs.... +1 warning generated` +``` + +###### you can try to install python version using: + +```shell +CFLAGS="-I$(brew --prefix openssl)/include -I$(brew --prefix bzip2)/include -I$(brew --prefix readline)/include -I$(xcrun --show-sdk-path)/usr/include" LDFLAGS="-L$(brew --prefix openssl)/lib -L$(brew --prefix readline)/lib -L$(brew --prefix zlib)/lib -L$(brew --prefix bzip2)/lib" pyenv install --patch 3.6.2 < <(curl -sSL https://github.com/python/cpython/commit/8ea6353.patch\?full_index\=1) +``` + + + +Check all the available python versions in pyenv to make sure your python3.6.2 installed successfully: + +```shell +pyenv versions +``` + + + +## 2. create your own python3.6.2 virtualenv: + +When using different python versions, I strongly recommend you to create a python virtual environment to isolate the python packages between different versions. + +the python3.6.2 path installed by *pyenv* is ~/.pyenv/versions/3.6.2/bin/python3.6 + +create/manage/use your python3.6.2 virtualenv by *virtualenvwrapper*: + +```shell +mkvirtualenv --python=/users/bytedance/.pyenv/versions/3.6.2/bin/python3.6 $ENV_NAME +workon $ENV_NAME +``` + +or just using *venv*, or you can just using the virtualenv created by *pycharm* + + + +***note***: please **DO NOT** directly install packages based on python3.6.2 on your real env \ No newline at end of file diff --git a/web_console_v2/api/entrypoint.py b/web_console_v2/api/entrypoint.py new file mode 100644 index 000000000..e4dd2facf --- /dev/null +++ b/web_console_v2/api/entrypoint.py @@ -0,0 +1,55 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from click import group + +from envs import Envs + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.utils.hooks import pre_start_hook +from fedlearner_webconsole.composer.composer import composer +from fedlearner_webconsole.rpc.server import rpc_server +from fedlearner_webconsole.k8s.k8s_watcher import k8s_watcher + + +@group('entrypoint') +def entrypoint(): + pass + + +@entrypoint.command('start-rpc') +def start_rpc(): + logging.info('Starting Rpc...') + # Start k8s watcher in rpc server process for now. + k8s_watcher.start() + rpc_server.stop() + rpc_server.start(Envs.GRPC_LISTEN_PORT) + rpc_server.wait_for_termination() + + +@entrypoint.command('start-composer') +def start_composer(): + # TODO(wangsen.0914): refactor logging_config + # There's a race condition when multiple process logging to same file. + logging.basicConfig(level=logging.DEBUG) + logging.info('Starting composer...') + composer.run(db.engine) + composer.wait_for_termination() + + +if __name__ == '__main__': + pre_start_hook() + entrypoint() diff --git a/web_console_v2/api/envs.py b/web_console_v2/api/envs.py index 6f6f5f39d..b4d7a4900 100644 --- a/web_console_v2/api/envs.py +++ b/web_console_v2/api/envs.py @@ -1,56 +1,172 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + import os -import json +import re +import secrets +from typing import Optional +from urllib.parse import unquote +from google.protobuf.json_format import Parse, ParseError import pytz +from fedlearner_webconsole.proto import setting_pb2 +from fedlearner_webconsole.utils.const import API_VERSION + +# SQLALCHEMY_DATABASE_URI pattern dialect+driver://username:password@host:port/database +_SQLALCHEMY_DATABASE_URI_PATTERN = re.compile( + r'^(?P[^+:]+)(\+(?P[^:]+))?://' + r'((?P[^:@]+)?:(?P[^@]+)?@((?P[^:/]+)(:(?P[0-9]+))?)?)?' + r'/(?P[^?]+)?') + +# Limit one thread used by OpenBLAS to avoid many threads that hang. +# ref: https://stackoverflow.com/questions/30791550/limit-number-of-threads-in-numpy +os.environ['OMP_NUM_THREADS'] = '1' + class Envs(object): + SERVER_HOST = os.environ.get('SERVER_HOST', 'http://localhost:666/') TZ = pytz.timezone(os.environ.get('TZ', 'UTC')) - ES_HOST = os.environ.get('ES_HOST', - 'fedlearner-stack-elasticsearch-client') - ES_READ_HOST = os.environ.get('ES_READ_HOST', ES_HOST) + ES_HOST = os.environ.get('ES_HOST', 'fedlearner-stack-elasticsearch-client') ES_PORT = os.environ.get('ES_PORT', 9200) ES_USERNAME = os.environ.get('ES_USERNAME', 'elastic') ES_PASSWORD = os.environ.get('ES_PASSWORD', 'Fedlearner123') + # apm-server service address which is used to collect trace and custom metrics + APM_SERVER_ENDPOINT = os.environ.get('APM_SERVER_ENDPOINT', 'http://fedlearner-stack-apm-server:8200') # addr to Kibana in pod/cluster - KIBANA_SERVICE_ADDRESS = os.environ.get( - 'KIBANA_SERVICE_ADDRESS', 'http://fedlearner-stack-kibana:443') + KIBANA_SERVICE_ADDRESS = os.environ.get('KIBANA_SERVICE_ADDRESS', 'http://fedlearner-stack-kibana:443') # addr to Kibana outside cluster, typically comply with port-forward KIBANA_ADDRESS = os.environ.get('KIBANA_ADDRESS', 'localhost:1993') # What fields are allowed in peer query. - KIBANA_ALLOWED_FIELDS = set( - f for f in os.environ.get('KIBANA_ALLOWED_FIELDS', '*').split(',') - if f) - OPERATOR_LOG_MATCH_PHRASE = os.environ.get('OPERATOR_LOG_MATCH_PHRASE', - None) - # Whether to use the real jwt_required decorator or fake one + KIBANA_ALLOWED_FIELDS = set(f for f in os.environ.get('KIBANA_ALLOWED_FIELDS', '*').split(',') if f) + # Kibana dashboard list of dashboard information consist of [`name`, `uuid`] in json format + KIBANA_DASHBOARD_LIST = os.environ.get('KIBANA_DASHBOARD_LIST', '[]') + OPERATOR_LOG_MATCH_PHRASE = os.environ.get('OPERATOR_LOG_MATCH_PHRASE', None) + # Whether to use the real credentials_required decorator or fake one DEBUG = os.environ.get('DEBUG', False) + SWAGGER_URL_PREFIX = os.environ.get('SWAGGER_URL_PREFIX', API_VERSION) + # grpc client can use this GRPC_SERVER_URL when DEBUG is True + GRPC_SERVER_URL = os.environ.get('GRPC_SERVER_URL', None) + GRPC_LISTEN_PORT = int(os.environ.get('GRPC_LISTEN_PORT', 1990)) + RESTFUL_LISTEN_PORT = int(os.environ.get('RESTFUL_LISTEN_PORT', 1991)) + # composer server listen port for health checking service + COMPOSER_LISTEN_PORT = int(os.environ.get('COMPOSER_LISTEN_PORT', 1992)) ES_INDEX = os.environ.get('ES_INDEX', 'filebeat-*') # Indicates which k8s namespace fedlearner pods belong to K8S_NAMESPACE = os.environ.get('K8S_NAMESPACE', 'default') K8S_CONFIG_PATH = os.environ.get('K8S_CONFIG_PATH', None) - # additional info for k8s.metadata.labels - K8S_LABEL_INFO = json.loads(os.environ.get('K8S_LABEL_INFO', '{}')) - FEDLEARNER_WEBCONSOLE_LOG_DIR = os.environ.get( - 'FEDLEARNER_WEBCONSOLE_LOG_DIR', '.') + K8S_HOOK_MODULE_PATH = os.environ.get('K8S_HOOK_MODULE_PATH', None) + FEDLEARNER_WEBCONSOLE_LOG_DIR = os.environ.get('FEDLEARNER_WEBCONSOLE_LOG_DIR', '.') + LOG_LEVEL = os.environ.get('LOGLEVEL', 'INFO').upper() FLASK_ENV = os.environ.get('FLASK_ENV', 'development') + CLUSTER = os.environ.get('CLUSTER', 'default') + JWT_SECRET_KEY = os.environ.get('JWT_SECRET_KEY', secrets.token_urlsafe(64)) # In seconds - GRPC_CLIENT_TIMEOUT = os.environ.get('GRPC_CLIENT_TIMEOUT', 5) + GRPC_CLIENT_TIMEOUT = int(os.environ.get('GRPC_CLIENT_TIMEOUT', 5)) + # In seconds + GRPC_STREAM_CLIENT_TIMEOUT = int(os.environ.get('GRPC_STREAM_CLIENT_TIMEOUT', 10)) # storage filesystem STORAGE_ROOT = os.getenv('STORAGE_ROOT', '/data') # BASE_DIR BASE_DIR = os.path.abspath(os.path.dirname(__file__)) - # spark on k8s image url - SPARKAPP_IMAGE_URL = os.getenv('SPARKAPP_IMAGE_URL', None) - SPARKAPP_FILES_PATH = os.getenv('SPARKAPP_FILES_PATH', None) - SPARKAPP_VOLUMES = os.getenv('SPARKAPP_VOLUMES', None) - SPARKAPP_VOLUME_MOUNTS = os.getenv('SPARKAPP_VOLUME_MOUNTS', None) # Hooks PRE_START_HOOK = os.environ.get('PRE_START_HOOK', None) + # Flags + FLAGS = os.environ.get('FLAGS', '{}') + + # Third party SSO, see the example in test_sso.json + SSO_INFOS = os.environ.get('SSO_INFOS', '[]') + + # Audit module storage setting + AUDIT_STORAGE = os.environ.get('AUDIT_STORAGE', 'db') + + # system info, include name, domain name, ip + SYSTEM_INFO = os.environ.get('SYSTEM_INFO', '{}') + + CUSTOMIZED_FILE_MANAGER = os.environ.get('CUSTOMIZED_FILE_MANAGER') + SCHEDULER_POLLING_INTERVAL = os.environ.get('FEDLEARNER_WEBCONSOLE_POLLING_INTERVAL', 60) + + # DB related + SQLALCHEMY_DATABASE_URI = os.environ.get('SQLALCHEMY_DATABASE_URI') + DB_HOST = os.environ.get('DB_HOST') + DB_PORT = os.environ.get('DB_PORT') + DB_DATABASE = os.environ.get('DB_DATABASE') + DB_USERNAME = os.environ.get('DB_USERNAME') + DB_PASSWORD = os.environ.get('DB_PASSWORD') + + # Fedlearner related + KVSTORE_TYPE = os.environ.get('KVSTORE_TYPE') + ETCD_NAME = os.environ.get('ETCD_NAME') + ETCD_ADDR = os.environ.get('ETCD_ADDR') + ETCD_BASE_DIR = os.environ.get('ETCD_BASE_DIR') + ROBOT_USERNAME = os.environ.get('ROBOT_USERNAME') + ROBOT_PWD = os.environ.get('ROBOT_PWD') + WEB_CONSOLE_V2_ENDPOINT = os.environ.get('WEB_CONSOLE_V2_ENDPOINT') + HADOOP_HOME = os.environ.get('HADOOP_HOME') + JAVA_HOME = os.environ.get('JAVA_HOME') + + @staticmethod + def _decode_url_codec(codec: str) -> str: + if not codec: + return codec + return unquote(codec) + + @classmethod + def _check_db_envs(cls) -> Optional[str]: + # Checks if DB related envs are matched + if cls.SQLALCHEMY_DATABASE_URI: + matches = _SQLALCHEMY_DATABASE_URI_PATTERN.match(cls.SQLALCHEMY_DATABASE_URI) + if not matches: + return 'Invalid SQLALCHEMY_DATABASE_URI' + if cls.DB_HOST: + # Other DB_* envs should be set together + db_host = cls._decode_url_codec(matches.group('host')) + if cls.DB_HOST != db_host: + return 'DB_HOST does not match' + db_port = cls._decode_url_codec(matches.group('port')) + if cls.DB_PORT != db_port: + return 'DB_PORT does not match' + db_database = cls._decode_url_codec(matches.group('database')) + if cls.DB_DATABASE != db_database: + return 'DB_DATABASQLALCHEMY_DATABASE_URISE does not match' + db_username = cls._decode_url_codec(matches.group('username')) + if cls.DB_USERNAME != db_username: + return 'DB_USERNAME does not match' + db_password = cls._decode_url_codec(matches.group('password')) + if cls.DB_PASSWORD != db_password: + return 'DB_PASSWORD does not match' + return None + + @classmethod + def _check_system_info_envs(cls) -> Optional[str]: + try: + system_info = Parse(Envs.SYSTEM_INFO, setting_pb2.SystemInfo()) + except ParseError as err: + return f'failed to parse SYSTEM_INFO {err}' + if system_info.domain_name == '' or system_info.name == '': + return 'domain_name or name is not set into SYSTEM_INFO' + return None -class Features(object): - FEATURE_MODEL_K8S_HOOK = os.getenv('FEATURE_MODEL_K8S_HOOK') - FEATURE_MODEL_WORKFLOW_HOOK = os.getenv('FEATURE_MODEL_WORKFLOW_HOOK') - DATA_MODULE_BETA = os.getenv('DATA_MODULE_BETA', None) + @classmethod + def check(cls) -> Optional[str]: + db_envs_error = cls._check_db_envs() + if db_envs_error: + return db_envs_error + system_info_envs_error = cls._check_system_info_envs() + if system_info_envs_error: + return system_info_envs_error + return None diff --git a/web_console_v2/api/envs_test.py b/web_console_v2/api/envs_test.py new file mode 100644 index 000000000..5dd724374 --- /dev/null +++ b/web_console_v2/api/envs_test.py @@ -0,0 +1,108 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch + +from envs import _SQLALCHEMY_DATABASE_URI_PATTERN, Envs + + +class EnvsTest(unittest.TestCase): + + def test_sqlalchemy_database_uri_pattern(self): + # Sqlite + matches = _SQLALCHEMY_DATABASE_URI_PATTERN.match('sqlite:///app.db') + self.assertIsNotNone(matches) + self.assertEqual(matches.group('dialect'), 'sqlite') + self.assertIsNone(matches.group('driver')) + self.assertIsNone(matches.group('username')) + self.assertIsNone(matches.group('password')) + self.assertIsNone(matches.group('host')) + self.assertIsNone(matches.group('port')) + self.assertEqual(matches.group('database'), 'app.db') + # MySQL + matches = _SQLALCHEMY_DATABASE_URI_PATTERN.match('mysql+pymysql://root:root@localhost:33600/fedlearner') + self.assertIsNotNone(matches) + self.assertEqual(matches.group('dialect'), 'mysql') + self.assertEqual(matches.group('driver'), 'pymysql') + self.assertEqual(matches.group('username'), 'root') + self.assertEqual(matches.group('password'), 'root') + self.assertEqual(matches.group('host'), 'localhost') + self.assertEqual(matches.group('port'), '33600') + self.assertEqual(matches.group('database'), 'fedlearner') + # MySQL with socket + matches = _SQLALCHEMY_DATABASE_URI_PATTERN.match('mysql+pymysql://:@/?charset=utf8mb4db_psm=mysql.fedlearner') + self.assertIsNotNone(matches) + self.assertEqual(matches.group('dialect'), 'mysql') + self.assertEqual(matches.group('driver'), 'pymysql') + self.assertIsNone(matches.group('username')) + self.assertIsNone(matches.group('password')) + self.assertIsNone(matches.group('host')) + self.assertIsNone(matches.group('port')) + self.assertIsNone(matches.group('database')) + # Invalid ones + matches = _SQLALCHEMY_DATABASE_URI_PATTERN.match('mysql+pymysql://root_33600/fedlearner') + self.assertIsNone(matches) + matches = _SQLALCHEMY_DATABASE_URI_PATTERN.match('sqlite://hello') + self.assertIsNone(matches) + + def test_check_db_envs_valid(self): + with patch('envs.Envs.SQLALCHEMY_DATABASE_URI', 'mysql+pymysql://root:proot@localhost:33600/fedlearner'), \ + patch('envs.Envs.DB_HOST', 'localhost'), \ + patch('envs.Envs.DB_PORT', '33600'), \ + patch('envs.Envs.DB_DATABASE', 'fedlearner'), \ + patch('envs.Envs.DB_USERNAME', 'root'), \ + patch('envs.Envs.DB_PASSWORD', 'proot'): + self.assertIsNone(Envs._check_db_envs()) # pylint: disable=protected-access + # DB_HOST is not set + with patch('envs.Envs.SQLALCHEMY_DATABASE_URI', 'mysql+pymysql://:@/?charset=utf8mb4db_psm=mysql.fedlearner'): + self.assertIsNone(Envs._check_db_envs()) # pylint: disable=protected-access + # DB_PASSWORD with some encodings + with patch('envs.Envs.SQLALCHEMY_DATABASE_URI', 'mysql+pymysql://root:fl%4012345@localhost:33600/fedlearner'), \ + patch('envs.Envs.DB_HOST', 'localhost'), \ + patch('envs.Envs.DB_PORT', '33600'), \ + patch('envs.Envs.DB_DATABASE', 'fedlearner'), \ + patch('envs.Envs.DB_USERNAME', 'root'), \ + patch('envs.Envs.DB_PASSWORD', 'fl@12345'): + self.assertIsNone(Envs._check_db_envs()) # pylint: disable=protected-access + + def test_check_db_envs_invalid(self): + with patch('envs.Envs.SQLALCHEMY_DATABASE_URI', 'mysql+pymysql://root:proot@localhost:33600/fedlearner'), \ + patch('envs.Envs.DB_HOST', 'localhost'), \ + patch('envs.Envs.DB_PORT', '336'): + self.assertEqual(Envs._check_db_envs(), 'DB_PORT does not match') # pylint: disable=protected-access + with patch('envs.Envs.SQLALCHEMY_DATABASE_URI', 'mysql+pymysql://:@/?charset=utf8mb4db_psm=mysql.fedlearner'), \ + patch('envs.Envs.DB_HOST', 'localhost'): + self.assertEqual(Envs._check_db_envs(), 'DB_HOST does not match') # pylint: disable=protected-access + + def test_decode_url_codec(self): + self.assertIsNone(Envs._decode_url_codec(None)) # pylint: disable=protected-access + self.assertEqual(Envs._decode_url_codec('hahaha'), 'hahaha') # pylint: disable=protected-access + self.assertEqual(Envs._decode_url_codec('%20%40'), ' @') # pylint: disable=protected-access + + def test_system_info_valid(self): + with patch('envs.Envs.SYSTEM_INFO', '{"domain_name": "aaa.fedlearner.net", "name": "aaa.Inc"}'): + self.assertIsNone(Envs._check_system_info_envs()) # pylint: disable=protected-access + + def test_system_info_invalid(self): + with patch('envs.Envs.SYSTEM_INFO', '{"domain_name": "aaa.fedlearner.net"}'): + self.assertEqual('domain_name or name is not set into SYSTEM_INFO', Envs._check_system_info_envs()) # pylint: disable=protected-access + + with patch('envs.Envs.SYSTEM_INFO', '{"domain_name": "aaa.fedlearner.net"'): + self.assertIn('failed to parse SYSTEM_INFO', Envs._check_system_info_envs()) # pylint: disable=protected-access + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/es_configuration.py b/web_console_v2/api/es_configuration.py index a77694eec..96081f7f2 100644 --- a/web_console_v2/api/es_configuration.py +++ b/web_console_v2/api/es_configuration.py @@ -1,3 +1,18 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + import requests from elasticsearch import Elasticsearch, exceptions @@ -14,42 +29,46 @@ def _configure_index_alias(es, alias_name): es.indices.create( # resolves to alias_name-yyyy.mm.dd-000001 in ES f'<{alias_name}-{{now/d}}-000001>', - body={"aliases": {alias_name: {"is_write_index": True}}} - ) + body={'aliases': { + alias_name: { + 'is_write_index': True + } + }}) def _configure_kibana_index_patterns(kibana_addr, index_type): if not kibana_addr: - requests.post( - url='{}/api/saved_objects/index-pattern/{}' - .format(kibana_addr, ALIAS_NAME[index_type]), - json={'attributes': { - 'title': ALIAS_NAME[index_type] + '*', - 'timeFieldName': 'tags.process_time' - if index_type == 'metrics' else 'tags.event_time'}}, - headers={'kbn-xsrf': 'true', - 'Content-Type': 'application/json'}, - params={'overwrite': True} - ) + requests.post(url=f'{kibana_addr}/api/saved_objects/index-pattern/{ALIAS_NAME[index_type]}', + json={ + 'attributes': { + 'title': ALIAS_NAME[index_type] + '*', + 'timeFieldName': 'tags.process_time' if index_type == 'metrics' else 'tags.event_time' + } + }, + headers={ + 'kbn-xsrf': 'true', + 'Content-Type': 'application/json' + }, + params={'overwrite': True}) def put_ilm(es, ilm_name, hot_size='50gb', hot_age='10d', delete_age='30d'): ilm_body = { - "policy": { - "phases": { - "hot": { - "min_age": "0ms", - "actions": { - "rollover": { - "max_size": hot_size, - "max_age": hot_age + 'policy': { + 'phases': { + 'hot': { + 'min_age': '0ms', + 'actions': { + 'rollover': { + 'max_size': hot_size, + 'max_age': hot_age } } }, - "delete": { - "min_age": delete_age, - "actions": { - "delete": {} + 'delete': { + 'min_age': delete_age, + 'actions': { + 'delete': {} } } } @@ -64,28 +83,25 @@ def _put_index_template(es, index_type, shards): es.indices.put_template(template_name, template_body) -if __name__ == '__main__': - es = Elasticsearch([{'host': Envs.ES_HOST, 'port': Envs.ES_PORT}], - http_auth=(Envs.ES_USERNAME, Envs.ES_PASSWORD)) +def es_config(): + es = Elasticsearch([{'host': Envs.ES_HOST, 'port': Envs.ES_PORT}], http_auth=(Envs.ES_USERNAME, Envs.ES_PASSWORD)) if int(es.info()['version']['number'].split('.')[0]) == 7: es.ilm.start() for index_type, alias_name in ALIAS_NAME.items(): - put_ilm(es, 'fedlearner_{}_ilm'.format(index_type)) + put_ilm(es, f'fedlearner_{index_type}_ilm') _put_index_template(es, index_type, shards=1) _configure_index_alias(es, alias_name) # Kibana index-patterns initialization - _configure_kibana_index_patterns( - Envs.KIBANA_SERVICE_ADDRESS, index_type - ) + _configure_kibana_index_patterns(Envs.KIBANA_SERVICE_ADDRESS, index_type) # Filebeat's built-in ilm does not contain delete phase. Below will # add a delete phase to the existing policy. # NOTE: Due to compatibility, should put policy only when policy exists, # but no method to check existence. So use try-except to do the trick. - for filebeat_name in ('filebeat-7.7.1', 'filebeat-7.0.1'): - try: - es.ilm.get_lifecycle(policy=filebeat_name) - except exceptions.NotFoundError: - pass - else: - put_ilm(es, filebeat_name, hot_age='1d') + filebeat_name = 'filebeat' + try: + es.ilm.get_lifecycle(policy=filebeat_name) + except exceptions.NotFoundError: + pass + else: + put_ilm(es, filebeat_name, hot_age='1d') # Filebeat template and indices should be deployed during deployment. diff --git a/web_console_v2/api/fedlearner_webconsole/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/BUILD.bazel new file mode 100644 index 000000000..66220d6a0 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/BUILD.bazel @@ -0,0 +1,157 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "db_lib", + srcs = ["db.py"], + imports = [".."], + deps = [ + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "@common_pymysql//:pkg", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "db_lib_test", + size = "small", + srcs = [ + "db_test.py", + ], + imports = [".."], + main = "db_test.py", + deps = [ + ":db_lib", + ], +) + +py_library( + name = "initial_db_lib", + srcs = [ + "initial_db.py", + ], + data = [ + "//web_console_v2/api/fedlearner_webconsole/sys_preset_templates", + ], + imports = [".."], + deps = [ + ":db_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:composer_service_lib", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:models_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "initial_db_lib_test", + size = "small", + srcs = [ + "initial_db_test.py", + ], + data = [ + "//web_console_v2/api/fedlearner_webconsole/sys_preset_templates", + ], + imports = [".."], + main = "initial_db_test.py", + deps = [ + ":db_lib", + ":initial_db_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "exceptions_lib", + srcs = ["exceptions.py"], + imports = [".."], + deps = ["@common_flask//:pkg"], +) + +py_test( + name = "exceptions_lib_test", + size = "small", + srcs = [ + "exceptions_test.py", + ], + imports = [".."], + main = "exceptions_test.py", + deps = [ + ":exceptions_lib", + ], +) + +py_library( + name = "app_lib", + srcs = [ + "app.py", + ], + imports = [".."], + deps = [ + ":db_lib", + ":exceptions_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api:logging_config_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:apis_lib", + "//web_console_v2/api/fedlearner_webconsole/audit:apis_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:apis_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:services_lib", + "//web_console_v2/api/fedlearner_webconsole/cleanup:apis_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:apis_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:apis_lib", + "//web_console_v2/api/fedlearner_webconsole/debug:apis_lib", + "//web_console_v2/api/fedlearner_webconsole/e2e:apis_lib", + "//web_console_v2/api/fedlearner_webconsole/file:apis_lib", + "//web_console_v2/api/fedlearner_webconsole/flag:apis_lib", + "//web_console_v2/api/fedlearner_webconsole/iam:apis_lib", + "//web_console_v2/api/fedlearner_webconsole/iam:client_lib", + "//web_console_v2/api/fedlearner_webconsole/job:apis_lib", + "//web_console_v2/api/fedlearner_webconsole/k8s:k8s_watcher_lib", + "//web_console_v2/api/fedlearner_webconsole/middleware:middlewares_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:apis_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:apis_lib", + "//web_console_v2/api/fedlearner_webconsole/project:apis_lib", + "//web_console_v2/api/fedlearner_webconsole/scheduler:scheduler_lib", + "//web_console_v2/api/fedlearner_webconsole/serving:apis_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:apis_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/sparkapp:apis_lib", + "//web_console_v2/api/fedlearner_webconsole/swagger:models_lib", + "//web_console_v2/api/fedlearner_webconsole/tee:apis_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:swagger_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:apis_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:apis_lib", + "@common_apispec_webframeworks//:pkg", + "@common_flasgger//:pkg", + "@common_flask//:pkg", + "@common_flask_restful//:pkg", + "@common_marshmallow//:pkg", + "@common_sqlalchemy//:pkg", + "@common_webargs//:pkg", + "@common_werkzeug//:pkg", + ], +) + +py_test( + name = "app_test", + size = "medium", + srcs = ["app_test.py"], + imports = ["../.."], + main = "app_test.py", + deps = [ + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/testing:common_lib", + "@common_marshmallow//:pkg", + "@common_webargs//:pkg", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/__init__.py b/web_console_v2/api/fedlearner_webconsole/__init__.py deleted file mode 100644 index cd7504799..000000000 --- a/web_console_v2/api/fedlearner_webconsole/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -from fedlearner_webconsole import auth diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/algorithm/BUILD.bazel new file mode 100644 index 000000000..ecd438982 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/BUILD.bazel @@ -0,0 +1,195 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "models_lib", + srcs = ["models.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "models_lib_test", + # TODO(gezhengqiang): tunes the perf + size = "medium", + srcs = [ + "models_test.py", + ], + imports = ["../.."], + main = "models_test.py", + deps = [ + ":models_lib", + ":utils_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:common_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "service_lib", + srcs = ["service.py"], + imports = ["../.."], + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:file_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:filtering_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:resource_name_lib", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "service_lib_test", + srcs = [ + "service_test.py", + ], + imports = ["../.."], + main = "service_test.py", + deps = [ + ":models_lib", + ":service_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_library( + name = "fetcher_lib", + srcs = ["fetcher.py"], + imports = ["../.."], + deps = [ + ":models_lib", + ":utils_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm/transmit", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:resource_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:file_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + ], +) + +py_test( + name = "fetcher_lib_test", + srcs = [ + "fetcher_test.py", + ], + data = [ + "//web_console_v2/api/testing/test_data/algorithm", + ], + imports = ["../.."], + main = "fetcher_test.py", + deps = [ + ":fetcher_lib", + ":models_lib", + ":utils_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm/transmit", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:proto_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:common_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "utils_lib", + srcs = ["utils.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/utils:file_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "@common_python_slugify//:pkg", + ], +) + +py_library( + name = "apis_lib", + srcs = ["apis.py"], + imports = ["../.."], + deps = [ + ":fetcher_lib", + ":models_lib", + ":service_lib", + ":utils_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms:preset_algorithm_service_lib", + "//web_console_v2/api/fedlearner_webconsole/audit:decorators_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:third_party_sso_lib", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:resource_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/swagger:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:file_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:filtering_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:paginate_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:sorting_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + "@common_flask//:pkg", + "@common_flask_restful//:pkg", + "@common_marshmallow//:pkg", + "@common_sqlalchemy//:pkg", + "@common_werkzeug//:pkg", + ], +) + +py_test( + name = "apis_lib_test", + size = "large", + srcs = [ + "apis_test.py", + ], + imports = ["../.."], + main = "apis_test.py", + deps = [ + ":apis_lib", + ":models_lib", + ":utils_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:models_lib", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:filtering_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:proto_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:resource_name_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + "//web_console_v2/api/testing:common_lib", + "//web_console_v2/api/testing/rpc:client_lib", + "@com_google_protobuf//:protobuf_python", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/proto/__init__.py b/web_console_v2/api/fedlearner_webconsole/algorithm/__init__.py similarity index 100% rename from web_console_v2/api/fedlearner_webconsole/proto/__init__.py rename to web_console_v2/api/fedlearner_webconsole/algorithm/__init__.py diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/apis.py b/web_console_v2/api/fedlearner_webconsole/algorithm/apis.py new file mode 100644 index 000000000..cd7a5b869 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/apis.py @@ -0,0 +1,1720 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +import os +import json +import tempfile +import grpc +from flask import request, send_file +from typing import Optional +from http import HTTPStatus +from envs import Envs +from sqlalchemy import Column +from sqlalchemy.orm import Session +from sqlalchemy.sql.elements import ColumnElement +from werkzeug.utils import secure_filename +from flask_restful import Resource +from google.protobuf.json_format import ParseDict +from marshmallow import Schema, post_load, fields, validate +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.audit.decorators import emits_event +from fedlearner_webconsole.utils.decorators.pp_flask import admin_required, input_validator +from fedlearner_webconsole.utils.file_manager import FileManager +from fedlearner_webconsole.utils.file_tree import FileTreeBuilder +from fedlearner_webconsole.utils.filtering import SupportedField, FieldType, FilterBuilder, SimpleExpression +from fedlearner_webconsole.utils.sorting import SorterBuilder, SortExpression, parse_expression +from fedlearner_webconsole.utils.paginate import paginate +from fedlearner_webconsole.proto.algorithm_pb2 import AlgorithmParameter +from fedlearner_webconsole.proto.filtering_pb2 import FilterOp +from fedlearner_webconsole.auth.third_party_sso import credentials_required +from fedlearner_webconsole.algorithm.fetcher import AlgorithmFetcher +from fedlearner_webconsole.algorithm.utils import algorithm_project_path, algorithm_path, check_algorithm_file +from fedlearner_webconsole.algorithm.preset_algorithms.preset_algorithm_service \ + import create_algorithm_if_not_exists +from fedlearner_webconsole.algorithm.service import AlgorithmProjectService, PendingAlgorithmService, AlgorithmService +from fedlearner_webconsole.swagger.models import schema_manager +from fedlearner_webconsole.utils.file_operator import FileOperator +from fedlearner_webconsole.utils.pp_datetime import now +from fedlearner_webconsole.utils.decorators.pp_flask import use_args, use_kwargs +from fedlearner_webconsole.utils.flask_utils import make_flask_response, get_current_user, FilterExpField +from fedlearner_webconsole.algorithm.models import Algorithm, AlgorithmProject, ReleaseStatus, Source, AlgorithmType, \ + PendingAlgorithm, normalize_path +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.exceptions import NoAccessException, NotFoundException, InvalidArgumentException, \ + UnauthorizedException, ResourceConflictException +from fedlearner_webconsole.proto.audit_pb2 import Event +from fedlearner_webconsole.flag.models import Flag +from fedlearner_webconsole.rpc.v2.resource_service_client import ResourceServiceClient + +file_manager = FileManager() +file_operator = FileOperator() + + +class UploadAlgorithmFile(Schema): + path = fields.Str(required=True) + filename = fields.Str(required=True) + is_directory = fields.Boolean(required=False, load_default=False) + file = fields.Raw(required=False, type='file', load_default=None) + + @post_load() + def make(self, data, **kwargs): + return data + + +def _validate_parameter(parameter): + try: + ParseDict(json.loads(parameter), AlgorithmParameter()) + except: # pylint: disable=bare-except + return False + return True + + +class CreateAlgorithmProjectParams(Schema): + name = fields.Str(required=True) + type = fields.Str(required=True, validate=validate.OneOf([a.name for a in AlgorithmType])) + parameter = fields.Str(required=False, load_default='{}', validate=_validate_parameter) + comment = fields.Str(required=False, load_default=None) + + @post_load() + def make(self, data, **kwargs): + data['parameter'] = ParseDict(json.loads(data['parameter']), AlgorithmParameter()) + return data + + +class GetAlgorithmProjectParams(Schema): + name = fields.Str(required=False, load_default=None) + sources = fields.List(fields.Str(required=False, + load_default=None, + validate=validate.OneOf( + [Source.PRESET.name, Source.USER.name, Source.THIRD_PARTY.name])), + load_default=None) + type = fields.Str(required=False, load_default=None, validate=validate.OneOf([a.name for a in AlgorithmType])) + keyword = fields.Str(required=False, load_default=None) + page = fields.Integer(required=False, load_default=None) + page_size = fields.Integer(required=False, load_default=None) + filter_exp = FilterExpField(data_key='filter', required=False, load_default=None) + sorter_exp = fields.String(data_key='order_by', required=False, load_default=None) + + @post_load() + def make(self, data, **kwargs): + return data + + +class PatchAlgorithmProjectParams(Schema): + parameter = fields.Dict(required=False, load_default=None) + comment = fields.Str(required=False, load_default=None) + + @post_load() + def make(self, data, **kwargs): + return data + + +def _get_project(project_id: int, session: Session) -> Project: + project = session.query(Project).get(project_id) + if project is None: + raise NotFoundException(f'project {project_id} is not found') + return project + + +def _get_participant(participant_id: int, session: Session) -> Participant: + participant = session.query(Participant).get(participant_id) + if participant is None: + raise NotFoundException(f'participant {participant_id} is not found') + return participant + + +def _get_algorithm(algo_id: int, session: Session, project_id: Optional[int] = None) -> Algorithm: + if project_id: + algo = session.query(Algorithm).filter_by(id=algo_id, project_id=project_id).first() + else: + algo = session.query(Algorithm).get(algo_id) + if algo is None: + raise NotFoundException(f'algorithm {algo_id} is not found') + return algo + + +def _get_algorithm_project(algo_project_id: int, + session: Session, + project_id: Optional[int] = None) -> AlgorithmProject: + if project_id: + algo_project = session.query(AlgorithmProject).filter_by(id=algo_project_id, project_id=project_id).first() + else: + algo_project = session.query(AlgorithmProject).get(algo_project_id) + if algo_project is None: + raise NotFoundException(f'algorithm project {algo_project_id} is not found') + return algo_project + + +def _get_pending_algorithm(pending_algorithm_id: int, session: Session) -> PendingAlgorithm: + pending_algo = session.query(PendingAlgorithm).get(pending_algorithm_id) + if pending_algo is None: + raise NotFoundException(f'pending algorithm {pending_algorithm_id} is not found') + return pending_algo + + +class AlgorithmApi(Resource): + + @credentials_required + @use_kwargs({'download': fields.Bool(required=False, load_default=False)}, location='query') + def get(self, download: Optional[bool], algo_id: int): + """Get the algorithm by id + --- + tags: + - algorithm + description: get the algorithm by id + parameters: + - in: path + name: algo_id + schema: + type: integer + - in: query + name: download + schema: + type: boolean + responses: + 200: + description: detail of the algorithm + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.AlgorithmPb' + """ + with db.session_scope() as session: + algo = _get_algorithm(algo_id, session) + # TODO(gezhengqiang): split download out for swagger + if not download: + return make_flask_response(algo.to_proto()) + files = file_manager.ls(algo.path, include_directory=True) + if len(files) == 0: + return make_flask_response(status=HTTPStatus.NO_CONTENT) + with tempfile.NamedTemporaryFile(suffix='.tar') as temp_file: + file_operator.archive_to([file.path for file in files], temp_file.name) + target_file_name = os.path.join(os.path.dirname(temp_file.name), f'{algo.name}.tar') + file_manager.copy(temp_file.name, target_file_name) + return send_file(filename_or_fp=target_file_name, mimetype='application/x-tar', as_attachment=True) + + @credentials_required + @emits_event(resource_type=Event.ResourceType.ALGORITHM, op_type=Event.OperationType.DELETE) + def delete(self, algo_id: int): + """Delete the model + --- + tags: + - algorithm + description: delete the model + parameters: + - in: path + name: algo_id + schema: + type: integer + responses: + 204: + description: delete the model successfully + """ + with db.session_scope() as session: + algo = _get_algorithm(algo_id, session) + AlgorithmService(session).delete(algo) + session.commit() + return make_flask_response(status=HTTPStatus.NO_CONTENT) + + @input_validator + @credentials_required + @emits_event(resource_type=Event.ResourceType.ALGORITHM, op_type=Event.OperationType.UPDATE) + @use_kwargs({'comment': fields.Str(required=False, load_default=None)}, location='json') + def patch(self, comment: Optional[str], algo_id: int): + """Update an algorithm + --- + tags: + - algorithm + description: update an algorithm + parameters: + - in: path + name: algo_id + schema: + type: integer + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + comment: + type: string + responses: + 200: + description: detail of the algorithm + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.AlgorithmPb' + """ + with db.session_scope() as session: + algo = _get_algorithm(algo_id, session) + if comment: + algo.comment = comment + session.commit() + return make_flask_response(algo.to_proto()) + + +class AlgorithmsApi(Resource): + + @credentials_required + @use_kwargs({'algo_project_id': fields.Integer(required=False, load_default=None)}, location='query') + def get(self, project_id: int, algo_project_id: Optional[int]): + """Get the algorithms by algo_project_id + --- + tags: + - algorithm + description: get the algorithms by algo_project_id + parameters: + - in: path + name: project_id + schema: + type: integer + - in: query + name: algo_project_id + schema: + type: integer + responses: + 200: + description: detail of the algorithms + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.AlgorithmPb' + """ + with db.session_scope() as session: + query = session.query(Algorithm) + if project_id: # It means not to filter projects when project_id is 0 + query = query.filter_by(project_id=project_id) + if algo_project_id: + query = query.filter_by(algorithm_project_id=algo_project_id) + query = query.order_by(Algorithm.created_at.desc()) + algos = query.all() + results = [algo.to_proto() for algo in algos] + return make_flask_response(results) + + +class AlgorithmTreeApi(Resource): + + @credentials_required + def get(self, algo_id: int): + """Get the algorithm tree + --- + tags: + - algorithm + description: get the algorithm tree + parameters: + - in: path + name: algo_id + schema: + type: integer + responses: + 200: + description: the file tree of the algorithm + content: + application/json: + schema: + type: array + items: + name: FileTreeNode + type: object + properties: + filename: + type: string + path: + type: string + size: + type: integer + mtime: + type: integer + is_directory: + type: boolean + files: + type: array + items: + type: object + description: FileTreeNode + """ + with db.session_scope() as session: + algo = _get_algorithm(algo_id, session) + # relative path is used in returned file tree + file_trees = FileTreeBuilder(algo.path, relpath=True).build() + return make_flask_response(file_trees) + + +class AlgorithmFilesApi(Resource): + + @credentials_required + @use_kwargs({'path': fields.Str(required=True)}, location='query') + def get(self, path: str, algo_id: int): + """Get the algorithm file + --- + tags: + - algorithm + description: get the algorithm file + parameters: + - in: path + name: algo_id + schema: + type: integer + - in: query + name: path + schema: + type: string + responses: + 200: + description: content and path of the algorithm file + content: + application/json: + schema: + type: object + properties: + content: + type: string + path: + type: string + 400: + description: error exists when reading the file + 401: + description: unauthorized path under the algorithm + """ + with db.session_scope() as session: + algo = _get_algorithm(algo_id, session) + path = normalize_path(os.path.join(algo.path, path)) + if not algo.is_path_accessible(path): + raise UnauthorizedException(f'Unauthorized path {path} under algorithm {algo_id}') + try: + text = file_manager.read(path) + except Exception as e: + raise InvalidArgumentException(details=str(e)) from e + relpath = os.path.relpath(path, algo.path) + return make_flask_response({'content': text, 'path': relpath}) + + +def _build_release_status_query(exp: SimpleExpression) -> ColumnElement: + col: Column = getattr(AlgorithmProject, '_release_status') + return col.in_(exp.list_value.string_list) + + +class AlgorithmProjectsApi(Resource): + + FILTER_FIELDS = { + 'name': SupportedField(type=FieldType.STRING, ops={FilterOp.CONTAIN: None}), + 'release_status': SupportedField(type=FieldType.STRING, ops={FilterOp.IN: _build_release_status_query}), + 'type': SupportedField(type=FieldType.STRING, ops={FilterOp.IN: None}), + } + + SORTER_FIELDS = ['created_at', 'updated_at'] + + def __init__(self): + self._filter_builder = FilterBuilder(model_class=AlgorithmProject, supported_fields=self.FILTER_FIELDS) + self._sorter_builder = SorterBuilder(model_class=AlgorithmProject, supported_fields=self.SORTER_FIELDS) + + @credentials_required + @use_args(GetAlgorithmProjectParams(), location='query') + def get(self, params: dict, project_id: int): + """Get the list of the algorithm project + --- + tags: + - algorithm + description: get the list of the algorithm project + parameters: + - in: path + name: project_id + schema: + type: integer + - in: query + name: name + schema: + type: string + - in: query + name: sources + schema: + type: array + items: + type: string + - in: query + name: type + schema: + type: string + - in: query + name: keyword + schema: + type: string + - in: query + name: page + schema: + type: integer + - in: query + name: page_size + schema: + type: integer + - in: query + name: filter + schema: + type: string + - in: query + name: order_by + schema: + type: string + responses: + 200: + description: list of the algorithm projects + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.AlgorithmProjectPb' + """ + with db.session_scope() as session: + query = session.query(AlgorithmProject) + if params['name']: + query = query.filter_by(name=params['name']) + if project_id: + query = query.filter_by(project_id=project_id) + if params['type']: + query = query.filter_by(type=AlgorithmType[params['type']]) + if params['sources']: + sources = [Source[n] for n in params['sources']] + query = query.filter(AlgorithmProject.source.in_(sources)) + if params['keyword']: + query = query.filter(AlgorithmProject.name.like(f'%{params["keyword"]}%')) + if params['filter_exp']: + try: + query = self._filter_builder.build_query(query, params['filter_exp']) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid filter: {str(e)}') from e + try: + if params['sorter_exp'] is not None: + sorter_exp = parse_expression(params['sorter_exp']) + else: + sorter_exp = SortExpression(field='created_at', is_asc=False) + query = self._sorter_builder.build_query(query, sorter_exp) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid sorted: {str(e)}') from e + pagination = paginate(query, params['page'], params['page_size']) + data = [d.to_proto() for d in pagination.get_items()] + return make_flask_response(data=data, page_meta=pagination.get_metadata()) + + @input_validator + @credentials_required + @use_args(CreateAlgorithmProjectParams(), location='form') + @emits_event(resource_type=Event.ResourceType.ALGORITHM_PROJECT, op_type=Event.OperationType.CREATE) + def post(self, param: dict, project_id: int): + """Create an algorithm project + --- + tags: + - algorithm + description: create an algorithm project + parameters: + - in: path + name: project_id + schema: + type: integer + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/CreateAlgorithmProjectParams' + responses: + 201: + description: detail of the algorithm project + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.AlgorithmProjectPb' + 400: + description: the project does not exist + 403: + description: the algorithm project is forbidden to create + 409: + description: the algorithm project already exists + """ + # TODO(hangweiqiang): clear the file if error in subsequent operation + if not Flag.TRUSTED_COMPUTING_ENABLED.value and param['type'] == AlgorithmType.TRUSTED_COMPUTING.name: + raise NoAccessException(message='trusted computing is not enabled') + file = None + if 'file' in request.files: + file = request.files['file'] + user = get_current_user() + path = algorithm_project_path(Envs.STORAGE_ROOT, param['name']) + with db.session_scope() as session, check_algorithm_file(path): + project = session.query(Project).get(project_id) + if project is None: + raise InvalidArgumentException(details=f'project {project_id} not exist') + algo_project = session.query(AlgorithmProject).filter_by(name=param['name'], + source=Source.USER, + project_id=project_id).first() + if algo_project is not None: + raise ResourceConflictException(message=f'algorithm project {param["name"]} already exists') + file_manager.mkdir(path) + algo_project = AlgorithmProjectService(session).create_algorithm_project( + name=param['name'], + project_id=project_id, + algorithm_type=AlgorithmType[param['type']], + username=user.username, + parameter=param['parameter'], + comment=param['comment'], + file=file, + path=path) + session.commit() + return make_flask_response(algo_project.to_proto(), status=HTTPStatus.CREATED) + + +class AlgorithmProjectApi(Resource): + + @credentials_required + def get(self, algo_project_id: int): + """Get the algorithm project by id + --- + tags: + - algorithm + description: get the algorithm project by id + parameters: + - in: path + name: algo_project_id + schema: + type: integer + responses: + 200: + description: detail of the algorithm project + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.AlgorithmProjectPb' + """ + with db.session_scope() as session: + algo_project = _get_algorithm_project(algo_project_id, session) + result = algo_project.to_proto() + return make_flask_response(result) + + @input_validator + @credentials_required + @emits_event(resource_type=Event.ResourceType.ALGORITHM_PROJECT, op_type=Event.OperationType.UPDATE) + @use_args(PatchAlgorithmProjectParams(), location='json') + def patch(self, params: dict, algo_project_id: int): + """Update the algorithm project + --- + tags: + - algorithm + description: update the algorithm project + parameters: + - in: path + name: algorithm_project_id + schema: + type: integer + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/PatchAlgorithmProjectParams' + responses: + 200: + description: detail of the algorithm project + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.AlgorithmProjectPb' + """ + with db.session_scope() as session: + algo_project = _get_algorithm_project(algo_project_id, session) + if algo_project.source == Source.THIRD_PARTY: + raise NoAccessException(message='algo_project from THIRD_PARTY can not be edited') + if params['comment']: + algo_project.comment = params['comment'] + if params['parameter']: + parameter = ParseDict(params['parameter'], AlgorithmParameter()) + algo_project.set_parameter(parameter) + algo_project.release_status = ReleaseStatus.UNRELEASED + session.commit() + return make_flask_response(algo_project.to_proto()) + + @credentials_required + @emits_event(resource_type=Event.ResourceType.ALGORITHM_PROJECT, op_type=Event.OperationType.DELETE) + def delete(self, algo_project_id: int): + """Delete the algorithm project + --- + tags: + - algorithm + description: delete the algorithm project + parameters: + - in: path + name: algo_project_id + schema: + type: integer + responses: + 204: + description: delete the algorithm project successfully + """ + with db.session_scope() as session: + algo_project = _get_algorithm_project(algo_project_id, session) + AlgorithmProjectService(session).delete(algo_project) + session.commit() + return make_flask_response(status=HTTPStatus.NO_CONTENT) + + +class AlgorithmProjectTreeApi(Resource): + + @credentials_required + def get(self, algo_project_id: int): + """Get the algorithm project file tree + --- + tags: + - algorithm + description: get the algorithm project file tree + parameters: + - in: path + name: algo_project_id + schema: + type: integer + responses: + 200: + description: the file tree of the algorithm project + content: + application/json: + schema: + type: array + items: + name: FileTreeNode + type: object + properties: + filename: + type: string + path: + type: string + size: + type: integer + mtime: + type: integer + is_directory: + type: boolean + files: + type: array + items: + type: object + description: FileTreeNode + """ + with db.session_scope() as session: + algo_project = _get_algorithm_project(algo_project_id, session) + # relative path is used in returned file tree + # TODO(gezhengqiang): change to return empty array + if algo_project.path is None: + return make_flask_response([]) + file_trees = FileTreeBuilder(algo_project.path, relpath=True).build() + return make_flask_response(file_trees) + + +class AlgorithmProjectFilesApi(Resource): + + @staticmethod + def _mark_algorithm_project_unreleased(algo_project_id: int): + with db.session_scope() as session: + algo_project = _get_algorithm_project(algo_project_id, session) + algo_project.release_status = ReleaseStatus.UNRELEASED + session.commit() + + @credentials_required + @use_kwargs({'path': fields.Str(required=True)}, location='query') + def get(self, path: str, algo_project_id: int): + """Get the files of the algorithm project + --- + tags: + - algorithm + description: get the files of the algorithm project + parameters: + - in: path + name: algo_project_id + schema: + type: integer + - in: query + name: path + schema: + type: string + responses: + 200: + description: content and path of the algorithm file + content: + application/json: + schema: + type: object + properties: + content: + type: string + path: + type: string + 400: + description: error exists when reading the file + 401: + description: unauthorized path under the algorithm + """ + with db.session_scope() as session: + algo_project = _get_algorithm_project(algo_project_id, session) + path = normalize_path(os.path.join(algo_project.path, path)) + if not algo_project.is_path_accessible(path): + raise UnauthorizedException(f'Unauthorized path {path} under algorithm {algo_project_id}') + try: + content = file_manager.read(path) + except Exception as e: + raise InvalidArgumentException(details=str(e)) from e + relpath = os.path.relpath(path, algo_project.path) + return make_flask_response({'content': content, 'path': relpath}) + + @credentials_required + @use_kwargs({'path': fields.Str(required=True), 'filename': fields.Str(required=True)}, location='form') + @emits_event(resource_type=Event.ResourceType.ALGORITHM_PROJECT, op_type=Event.OperationType.UPDATE) + def post(self, path: str, filename: str, algo_project_id: int): + """Upload the algorithm project file + --- + tags: + - algorithm + description: upload the algorithm project file + parameters: + - in: path + name: algo_project_id + schema: + type: integer + - in: form + name: path + schema: + type: string + - in: form + name: filename + schema: + type: string + responses: + 200: + description: filename and path of the algorithm project file + content: + application/json: + schema: + type: object + properties: + path: + type: string + filename: + type: string + 400: + description: file does not exist or is not directory + 401: + description: unauthorized path under the algorithm project + """ + with db.session_scope() as session: + algo_project = _get_algorithm_project(algo_project_id, session) + # TODO(hangweiqiang): check algorithm file accessibility in decorator + path = normalize_path(os.path.join(algo_project.path, path)) + if not algo_project.is_path_accessible(path): + raise UnauthorizedException(f'Unauthorized path {path} under algorithm project {algo_project_id}') + if not file_manager.isdir(path): + raise InvalidArgumentException(details=f'file {str(path)} does not exist or is not directory') + secure_file_name = secure_filename(filename) + file_path = normalize_path(os.path.join(path, secure_file_name)) + file = request.files['file'] + file_content = file.read() + file_manager.write(file_path, file_content) + self._mark_algorithm_project_unreleased(algo_project_id) + relpath = os.path.relpath(path, algo_project.path) + return make_flask_response({'path': relpath, 'filename': secure_file_name}) + + @credentials_required + @use_args(UploadAlgorithmFile(), location='form') + @emits_event(resource_type=Event.ResourceType.ALGORITHM_PROJECT, op_type=Event.OperationType.UPDATE) + def put(self, param: dict, algo_project_id: int): + """put the algorithm project file + --- + tags: + - algorithm + description: put the algorithm project file + parameters: + - in: path + name: algo_project_id + schema: + type: integer + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/UploadAlgorithmFile' + responses: + 200: + description: content, path and filename of the algorithm project + content: + application/json: + schema: + type: object + properties: + content: + type: string + path: + type: string + filename: + type: string + 400: + description: file does not exist or is not directory or file path already exists + 401: + description: unauthorized path under the algorithm project + """ + with db.session_scope() as session: + algo_project = _get_algorithm_project(algo_project_id, session) + path = normalize_path(os.path.join(algo_project.path, param['path'])) + if not algo_project.is_path_accessible(path): + raise UnauthorizedException(f'Unauthorized path {path} under algorithm project {algo_project_id}') + if not file_manager.isdir(path): + raise InvalidArgumentException(details=f'file {str(param["path"])} does not exist or is not directory') + secure_file_name = secure_filename(param['filename']) + file_path = os.path.join(path, secure_file_name) + file_content = None + if param['is_directory']: + if file_manager.exists(file_path): + raise InvalidArgumentException(details=f'file {str(param["path"])} already exists') + file_manager.mkdir(file_path) + else: + file_content = param['file'] + file_manager.write(file_path, file_content or '') + if isinstance(file_content, bytes): + file_content = file_content.decode('utf-8') + self._mark_algorithm_project_unreleased(algo_project_id) + relpath = os.path.relpath(path, algo_project.path) + return make_flask_response({'content': file_content, 'path': relpath, 'filename': secure_file_name}) + + @credentials_required + @use_kwargs({'path': fields.Str(required=True)}, location='query') + @emits_event(resource_type=Event.ResourceType.ALGORITHM_PROJECT, op_type=Event.OperationType.UPDATE) + def delete(self, path: str, algo_project_id: int): + """Delete the algorithm project file + --- + tags: + - algorithm + description: delete the algorithm project file + parameters: + - in: path + name: algo_project_id + schema: + type: integer + - in: query + name: path + schema: + type: string + responses: + 204: + description: delete the algorithm project file successfully + 400: + description: error exists when removing the file + 401: + description: unauthorized path under the algorithm project + """ + with db.session_scope() as session: + algo_project = _get_algorithm_project(algo_project_id, session) + path = normalize_path(os.path.join(algo_project.path, path)) + if not algo_project.is_path_accessible(path): + raise UnauthorizedException(f'Unauthorized path {path} under algorithm project {algo_project_id}') + try: + file_manager.remove(path) + except Exception as e: + raise InvalidArgumentException(details=str(e)) from e + self._mark_algorithm_project_unreleased(algo_project_id) + return make_flask_response(status=HTTPStatus.NO_CONTENT) + + @credentials_required + @use_kwargs({'path': fields.Str(required=True), 'dest': fields.Str(required=True)}, location='json') + @emits_event(resource_type=Event.ResourceType.ALGORITHM_PROJECT, op_type=Event.OperationType.UPDATE) + def patch(self, path: str, dest: str, algo_project_id: int): + """Patch the algorithm project file + --- + tags: + - algorithm + description: patch the algorithm project file + parameters: + - in: path + name: algo_project_id + schema: + type: integer + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + path: + type: string + dest: + type: string + responses: + 204: + description: patch the algorithm project file successfully + 401: + description: unauthorized path under the algorithm project + 401: + description: unauthorized dest under the algorithm project + """ + with db.session_scope() as session: + algo_project = _get_algorithm_project(algo_project_id, session) + path = normalize_path(os.path.join(algo_project.path, path)) + dest = normalize_path(os.path.join(algo_project.path, dest)) + if not algo_project.is_path_accessible(path): + raise UnauthorizedException(f'Unauthorized path {path} under algorithm project {algo_project_id}') + if not algo_project.is_path_accessible(dest): + raise UnauthorizedException(f'Unauthorized dest {dest} under algorithm project {algo_project_id}') + try: + file_manager.rename(path, dest) + except Exception as e: + raise InvalidArgumentException(details=str(e)) from e + self._mark_algorithm_project_unreleased(algo_project_id) + return make_flask_response(status=HTTPStatus.NO_CONTENT) + + +class ParticipantAlgorithmProjectsApi(Resource): + + @credentials_required + @use_kwargs( + { + 'filter_exp': FilterExpField(data_key='filter', required=False, load_default=None), + 'sorter_exp': fields.String(data_key='order_by', required=False, load_default=None) + }, + location='query') + def get(self, project_id: int, participant_id: int, filter_exp: Optional[str], sorter_exp: Optional[str]): + """Get the list of the participant algorithm project + --- + tags: + - algorithm + description: get the list of the participant algorithm project + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: participant_id + schema: + type: integer + - in: query + name: filter_exp + schema: + type: string + - in: query + name: sorter_exp + schema: + type: string + responses: + 200: + description: list of the participant algorithm projects + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.AlgorithmProjectPb' + """ + with db.session_scope() as session: + project = _get_project(project_id, session) + participants = project.participants + if participant_id: + participants = [_get_participant(participant_id, session)] + algorithm_projects = [] + for participant in participants: + try: + client = ResourceServiceClient.from_project_and_participant(participant.domain_name, project.name) + participant_algorithm_projects = client.list_algorithm_projects( + filter_exp=filter_exp).algorithm_projects + for algo_project in participant_algorithm_projects: + algo_project.participant_id = participant.id + algorithm_projects.extend(participant_algorithm_projects) + except grpc.RpcError as e: + logging.warning(f'[algorithm] failed to get {participant.type} participant {participant.id}\'s ' + f'algorithm projects with grpc code {e.code()} and details {e.details()}') + if len(algorithm_projects) != 0: + field = 'created_at' + is_asc = False + if sorter_exp: + sorter_exp = parse_expression(sorter_exp) + field = sorter_exp.field + is_asc = sorter_exp.is_asc + try: + algorithm_projects = sorted(algorithm_projects, key=lambda x: getattr(x, field), reverse=not is_asc) + except AttributeError as e: + raise InvalidArgumentException(details=f'Invalid sort attribute: {str(e)}') from e + return make_flask_response(algorithm_projects) + + +class ParticipantAlgorithmProjectApi(Resource): + + def get(self, project_id: int, participant_id: int, algorithm_project_uuid: str): + """Get the participant algorithm project by algorithm_project_uuid + --- + tags: + - algorithm + description: get the participant algorithm project by algorithm_project_uuid + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: participant_id + schema: + type: integer + - in: path + name: algorithm_project_uuid + schema: + type: string + responses: + 200: + description: detail of the participant algorithm project + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.AlgorithmProjectPb' + + """ + with db.session_scope() as session: + project = _get_project(project_id, session) + participant = _get_participant(participant_id, session) + client = ResourceServiceClient.from_project_and_participant(participant.domain_name, project.name) + algorithm_project = client.get_algorithm_project(algorithm_project_uuid=algorithm_project_uuid) + return make_flask_response(algorithm_project) + + +class ParticipantAlgorithmsApi(Resource): + + @credentials_required + @use_kwargs({'algorithm_project_uuid': fields.Str(required=True)}, location='query') + def get(self, project_id: int, participant_id: int, algorithm_project_uuid: str): + """Get the participant algorithms by algorithm_project_uuid + --- + tags: + - algorithm + description: get the participant algorithms by algorithm_project_uuid + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: participant_id + schema: + type: integer + - in: query + name: algorithm_project_uuid + schema: + type: string + responses: + 200: + description: list of the participant algorithms + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.AlgorithmPb' + """ + with db.session_scope() as session: + project = _get_project(project_id, session) + participant = _get_participant(participant_id, session) + client = ResourceServiceClient.from_project_and_participant(participant.domain_name, project.name) + participant_algorithms = client.list_algorithms(algorithm_project_uuid).algorithms + for algo in participant_algorithms: + algo.participant_id = participant_id + algorithms = sorted(participant_algorithms, key=lambda x: x.created_at, reverse=True) + return make_flask_response(algorithms) + + +class ParticipantAlgorithmApi(Resource): + + @credentials_required + def get(self, project_id: int, participant_id: int, algorithm_uuid: str): + """Get the participant algorithm by algorithm_uuid + --- + tags: + - algorithm + description: get the participant algorithm by algorithm_uuid + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: participant_id + schema: + type: integer + - in: path + name: algorithm_uuid + schema: + type: string + responses: + 200: + description: detail of the participant algorithm + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.AlgorithmPb' + + """ + with db.session_scope() as session: + project = _get_project(project_id, session) + participant = _get_participant(participant_id, session) + client = ResourceServiceClient.from_project_and_participant(participant.domain_name, project.name) + algorithm = client.get_algorithm(algorithm_uuid) + return make_flask_response(algorithm) + + +class ParticipantAlgorithmTreeApi(Resource): + + @credentials_required + def get(self, project_id: int, participant_id: int, algorithm_uuid: str): + """Get the participant algorithm tree + --- + tags: + - algorithm + description: get the participant algorithm tree + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: participant_id + schema: + type: integer + - in: path + name: algorithm_uuid + schema: + type: string + responses: + 200: + description: the file tree of the participant algorithm + content: + application/json: + schema: + type: array + items: + name: FileTreeNode + type: object + properties: + filename: + type: string + path: + type: string + size: + type: integer + mtime: + type: integer + is_directory: + type: boolean + files: + type: array + items: + type: object + description: FileTreeNode + """ + algorithm = AlgorithmFetcher(project_id=project_id).get_algorithm_from_participant( + algorithm_uuid=algorithm_uuid, participant_id=participant_id) + + # relative path is used in returned file tree + file_trees = FileTreeBuilder(algorithm.path, relpath=True).build() + return make_flask_response(file_trees) + + +class ParticipantAlgorithmFilesApi(Resource): + + @credentials_required + @use_kwargs({'path': fields.Str(required=True)}, location='query') + def get(self, project_id: int, participant_id: int, algorithm_uuid: str, path: str): + """Get the algorithm file + --- + tags: + - algorithm + description: get the algorithm file + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: participant_id + schema: + type: integer + - in: path + name: algorithm_uuid + schema: + type: string + - in: query + name: path + schema: + type: string + responses: + 200: + description: content and path of the participant algorithm file + content: + application/json: + schema: + type: object + properties: + content: + type: string + path: + type: string + 400: + description: error exists when reading the file + 401: + description: unauthorized path under the algorithm + """ + algorithm = AlgorithmFetcher(project_id=project_id).get_algorithm_from_participant( + algorithm_uuid=algorithm_uuid, participant_id=participant_id) + path = normalize_path(os.path.join(algorithm.path, path)) + if not normalize_path(path).startswith(algorithm.path): + raise UnauthorizedException(f'Unauthorized path {path} under the participant algorithm {algorithm_uuid}') + try: + text = file_manager.read(path) + except Exception as e: + raise InvalidArgumentException(details=str(e)) from e + relpath = os.path.relpath(path, algorithm.path) + return make_flask_response({'content': text, 'path': relpath}) + + +class FetchAlgorithmApi(Resource): + + @credentials_required + def get(self, project_id: int, algorithm_uuid: str): + """Get the algorithm by uuid + --- + tags: + - algorithm + description: get the algorithm by uuid, whether it is from your own side or from a participant + parameters: + - in: path + name: algo_id + schema: + type: integer + - in: path + name: algorithm_uuid + schema: + type: string + responses: + 200: + description: detail of the algorithm + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.AlgorithmPb' + """ + algorithm = AlgorithmFetcher(project_id=project_id).get_algorithm(uuid=algorithm_uuid) + return make_flask_response(algorithm) + + +class UpdatePresetAlgorithmApi(Resource): + + @credentials_required + @admin_required + @emits_event(resource_type=Event.ResourceType.PRESET_ALGORITHM, op_type=Event.OperationType.UPDATE) + def post(self): + """Update the preset algorithm + --- + tags: + - algorithm + description: update the preset algorithm + responses: + 200: + description: detail of the preset algorithm projects + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.AlgorithmProjectPb' + """ + create_algorithm_if_not_exists() + with db.session_scope() as session: + algo_projects = session.query(AlgorithmProject).filter_by(source=Source.PRESET).all() + results = [project.to_proto() for project in algo_projects] + return make_flask_response(results) + + +class ReleaseAlgorithmApi(Resource): + + @input_validator + @credentials_required + @use_kwargs({'comment': fields.Str(required=False, load_default=None, location='body')}) + @emits_event(resource_type=Event.ResourceType.ALGORITHM, op_type=Event.OperationType.CREATE) + def post(self, comment: Optional[str], algo_project_id: int): + """Release the algorithm + --- + tags: + - algorithm + description: release the algorithm + parameters: + - in: path + name: algo_project_id + schema: + type: integer + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + comment: + type: string + responses: + 200: + description: detail of the algorithm + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.AlgorithmPb' + """ + user = get_current_user() + with db.session_scope() as session: + algorithm_project = _get_algorithm_project(algo_project_id, session) + if algorithm_project.source == Source.THIRD_PARTY: + raise NoAccessException(message='algo_project from THIRD_PARTY can not be released') + version = algorithm_project.latest_version + 1 + path = algorithm_path(Envs.STORAGE_ROOT, algorithm_project.name, version) + with check_algorithm_file(path): + algo = AlgorithmProjectService(session).release_algorithm(algorithm_project=algorithm_project, + username=user.username, + comment=comment, + path=path) + session.commit() + return make_flask_response(algo.to_proto()) + + +class PublishAlgorithmApi(Resource): + + @credentials_required + @emits_event(resource_type=Event.ResourceType.ALGORITHM, op_type=Event.OperationType.UPDATE) + def post(self, algorithm_id: int, project_id: int): + """Publish the algorithm + --- + tags: + - algorithm + description: publish the algorithm + parameters: + - in: path + name: algorithm_id + schema: + type: integer + - in: path + name: project_id + schema: + type: integer + responses: + 200: + description: detail of the algorithm + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.AlgorithmPb' + """ + with db.session_scope() as session: + _get_algorithm(algorithm_id, session, project_id) + algorithm = AlgorithmService(session).publish_algorithm(algorithm_id, project_id) + session.commit() + return make_flask_response(algorithm.to_proto()) + + +class UnpublishAlgorithmApi(Resource): + + @credentials_required + @emits_event(resource_type=Event.ResourceType.ALGORITHM, op_type=Event.OperationType.UPDATE) + def post(self, algorithm_id: int, project_id: int): + """Unpublish the algorithm + --- + tags: + - algorithm + description: unpublish the algorithm + parameters: + - in: path + name: algorithm_id + schema: + type: integer + - in: path + name: project_id + schema: + type: integer + responses: + 200: + description: detail of the algorithm + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.AlgorithmPb' + """ + with db.session_scope() as session: + _get_algorithm(algorithm_id, session, project_id) + algorithm = AlgorithmService(session).unpublish_algorithm(algorithm_id, project_id) + session.commit() + return make_flask_response(algorithm.to_proto()) + + +class PendingAlgorithmsApi(Resource): + + @credentials_required + def get(self, project_id: int): + """Get the list of the pending algorithms + --- + tags: + - algorithm + description: get the list of the pending algorithms + parameters: + - in: path + name: project_id + schema: + type: integer + responses: + 200: + description: list of the pending algorithms + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.PendingAlgorithmPb' + """ + with db.session_scope() as session: + query = session.query(PendingAlgorithm) + if project_id: + query = query.filter_by(project_id=project_id) + query = query.order_by(PendingAlgorithm.created_at.desc()) + pending_algorithms = query.all() + results = [algo.to_proto() for algo in pending_algorithms] + return make_flask_response(results) + + +class AcceptPendingAlgorithmApi(Resource): + + @input_validator + @credentials_required + @use_kwargs({ + 'name': fields.Str(required=True), + 'comment': fields.Str(required=False, load_default=None, location='body') + }) + @emits_event(resource_type=Event.ResourceType.ALGORITHM_PROJECT, op_type=Event.OperationType.CREATE) + def post(self, name: str, comment: Optional[str], project_id: int, pending_algorithm_id: int): + """Accept the pending algorithm + --- + tags: + - algorithm + description: accept the pending algorithm + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: pending_algorithm_id + schema: + type: integer + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + name: + type: string + comment: + type: string + responses: + 204: + description: accept the pending algorithm successfully + """ + del project_id + with db.session_scope() as session: + pending_algo = _get_pending_algorithm(pending_algorithm_id, session) + algo_project = session.query(AlgorithmProject).filter_by( + uuid=pending_algo.algorithm_project_uuid).filter_by(source=Source.THIRD_PARTY).first() + user = get_current_user() + if algo_project is None: + algo_project = PendingAlgorithmService(session).create_algorithm_project(pending_algorithm=pending_algo, + username=user.username, + name=name, + comment=comment) + session.flush() + algo_path = algorithm_path(Envs.STORAGE_ROOT, name, pending_algo.version) + with check_algorithm_file(algo_path): + pending_algo.deleted_at = now() + PendingAlgorithmService(session).create_algorithm(pending_algorithm=pending_algo, + algorithm_project_id=algo_project.id, + username=user.username, + path=algo_path, + comment=comment) + algo_project.latest_version = pending_algo.version + session.commit() + return make_flask_response(status=HTTPStatus.NO_CONTENT) + + +class PendingAlgorithmTreeApi(Resource): + + @credentials_required + def get(self, project_id: int, pending_algo_id: int): + """Get the file tree of the pending algorithm + --- + tags: + - algorithm + description: get the file tree of the pending algorithm + parameters: + - in: path + name: pending_algo_id + schema: + type: integer + - in: path + name: project_id + schema: + type: integer + responses: + 200: + description: the file tree of the pending algorithm + content: + application/json: + schema: + type: array + items: + name: FileTreeNode + type: object + properties: + filename: + type: string + path: + type: string + size: + type: integer + mtime: + type: integer + is_directory: + type: boolean + files: + type: array + items: + type: object + description: FileTreeNode + """ + with db.session_scope() as session: + pending_algo = _get_pending_algorithm(pending_algo_id, session) + # relative path is used in returned file tree + file_trees = FileTreeBuilder(pending_algo.path, relpath=True).build() + return make_flask_response(file_trees) + + +class PendingAlgorithmFilesApi(Resource): + + @credentials_required + @use_kwargs({'path': fields.Str(required=True)}, location='query') + def get(self, path: str, project_id: int, pending_algo_id: int): + """Get the files of the pending algorithm + --- + tags: + - algorithm + description: get the files of the pending algorithm + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: pending_algo_id + schema: + type: integer + - in: query + name: path + schema: + type: string + responses: + 200: + description: content and path of the pending algorithm file + content: + application/json: + schema: + type: object + properties: + content: + type: string + path: + type: string + 400: + description: error exists when reading the file + 401: + description: unauthorized path under the pending algorithm + """ + with db.session_scope() as session: + pending_algo = _get_pending_algorithm(pending_algo_id, session) + path = normalize_path(os.path.join(pending_algo.path, path)) + if not pending_algo.is_path_accessible(path): + raise UnauthorizedException(f'Unauthorized path {path} under pending algorithm {pending_algo_id}') + try: + text = file_manager.read(path) + except Exception as e: + raise InvalidArgumentException(details=str(e)) from e + relpath = os.path.relpath(path, pending_algo.path) + return make_flask_response({'content': text, 'path': relpath}) + + +def initialize_algorithm_apis(api): + # TODO(gezhengqiang): add project in the url + api.add_resource(AlgorithmApi, '/algorithms/') + api.add_resource(AlgorithmsApi, '/projects//algorithms') + api.add_resource(AlgorithmTreeApi, '/algorithms//tree') + api.add_resource(AlgorithmFilesApi, '/algorithms//files') + api.add_resource(AlgorithmProjectsApi, '/projects//algorithm_projects') + api.add_resource(AlgorithmProjectApi, '/algorithm_projects/') + api.add_resource(AlgorithmProjectTreeApi, '/algorithm_projects//tree') + api.add_resource(AlgorithmProjectFilesApi, '/algorithm_projects//files') + api.add_resource(ParticipantAlgorithmProjectsApi, + '/projects//participants//algorithm_projects') + api.add_resource( + ParticipantAlgorithmProjectApi, '/projects//participants//' + 'algorithm_projects/') + api.add_resource(ParticipantAlgorithmsApi, + '/projects//participants//algorithms') + api.add_resource(ParticipantAlgorithmApi, + '/projects//participants//algorithms/') + api.add_resource( + ParticipantAlgorithmTreeApi, + '/projects//participants//algorithms//tree') + api.add_resource( + ParticipantAlgorithmFilesApi, + '/projects//participants//algorithms//files') + api.add_resource(FetchAlgorithmApi, '/projects//algorithms/') + # TODO(gezhengqiang): algorithm project publish has been changed to release, the api will be deleted in future + api.add_resource(ReleaseAlgorithmApi, + '/algorithm_projects/:publish', + endpoint='algorithm_project:publish') + api.add_resource(ReleaseAlgorithmApi, + '/algorithm_projects/:release', + endpoint='algorithm_project:release') + api.add_resource(PublishAlgorithmApi, '/projects//algorithms/:publish') + api.add_resource(UnpublishAlgorithmApi, '/projects//algorithms/:unpublish') + api.add_resource(PendingAlgorithmsApi, '/projects//pending_algorithms') + api.add_resource(AcceptPendingAlgorithmApi, + '/projects//pending_algorithms/:accept') + api.add_resource(PendingAlgorithmTreeApi, + '/projects//pending_algorithms//tree') + api.add_resource(PendingAlgorithmFilesApi, + '/projects//pending_algorithms//files') + api.add_resource(UpdatePresetAlgorithmApi, '/preset_algorithms:update') + schema_manager.append(UploadAlgorithmFile) + schema_manager.append(CreateAlgorithmProjectParams) + schema_manager.append(GetAlgorithmProjectParams) + schema_manager.append(PatchAlgorithmProjectParams) diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/apis_test.py b/web_console_v2/api/fedlearner_webconsole/algorithm/apis_test.py new file mode 100644 index 000000000..bb1f924a1 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/apis_test.py @@ -0,0 +1,1464 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import json +import tarfile +import unittest +import tempfile +import urllib.parse +import grpc + +from envs import Envs +from io import BytesIO +from http import HTTPStatus +from datetime import datetime +from pathlib import Path +from unittest.mock import patch +from testing.common import BaseTestCase +from testing.rpc.client import FakeRpcError +from google.protobuf.json_format import ParseDict +from fedlearner_webconsole.db import db +from fedlearner_webconsole.auth.models import User +from fedlearner_webconsole.utils.proto import to_dict +from fedlearner_webconsole.utils.resource_name import resource_uuid +from fedlearner_webconsole.utils.filtering import parse_expression +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.algorithm.transmit.sender import AlgorithmSender +from fedlearner_webconsole.algorithm.models import (Algorithm, AlgorithmType, Source, AlgorithmProject, + PendingAlgorithm, ReleaseStatus, PublishStatus) +from fedlearner_webconsole.algorithm.utils import algorithm_project_path +from fedlearner_webconsole.proto.algorithm_pb2 import AlgorithmParameter, AlgorithmVariable, AlgorithmProjectPb, \ + AlgorithmPb +from fedlearner_webconsole.proto.rpc.v2.resource_service_pb2 import ListAlgorithmsResponse,\ + ListAlgorithmProjectsResponse +from fedlearner_webconsole.flag.models import Flag + + +def generate_algorithm_files(): + path = tempfile.mkdtemp() + path = Path(path, 'e2e_test').resolve() + path.mkdir() + path.joinpath('follower').mkdir() + path.joinpath('follower').joinpath('main.py').touch() + path.joinpath('leader').mkdir() + file_path = path.joinpath('leader').joinpath('main.py') + file_path.touch() + file_path.write_text('import tensorflow', encoding='utf-8') + return str(path) + + +def _generate_tar_file(): + path = generate_algorithm_files() + tar_path = os.path.join(tempfile.mkdtemp(), 'test.tar.gz') + with tarfile.open(tar_path, 'w:gz') as tar: + tar.add(os.path.join(path, 'leader'), arcname='leader') + tar.add(os.path.join(path, 'follower'), arcname='follower') + tar = tarfile.open(tar_path, 'r') # pylint: disable=consider-using-with + return tar + + +class AlgorithmApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + user = User(username='user') + session.add(user) + session.flush() + algo_project1 = AlgorithmProject(id=1, name='test-algo-project-1', uuid='test-algo-project-1-uuid') + algo1 = Algorithm(name='test-algo-1', + version=1, + project_id=1, + algorithm_project_id=1, + path=generate_algorithm_files(), + username=user.username, + source=Source.PRESET, + type=AlgorithmType.NN_VERTICAL) + algo1.set_parameter(AlgorithmParameter(variables=[AlgorithmVariable(name='BATCH_SIZE', value='123')])) + algo_project2 = AlgorithmProject(id=2, name='test-algo-project', publish_status=PublishStatus.PUBLISHED) + algo2 = Algorithm(name='test-algo-2', + algorithm_project_id=2, + publish_status=PublishStatus.PUBLISHED, + path=tempfile.mkdtemp()) + session.add_all([algo_project1, algo_project2, algo1, algo2]) + session.commit() + + def test_get_algorithm_by_id(self): + with db.session_scope() as session: + algo = session.query(Algorithm).filter_by(name='test-algo-1').first() + response = self.get_helper(f'/api/v2/algorithms/{algo.id}') + self.assertEqual(response.status_code, HTTPStatus.OK) + self.maxDiff = None + self.assertResponseDataEqual(response, { + 'name': 'test-algo-1', + 'project_id': 1, + 'status': 'UNPUBLISHED', + 'version': 1, + 'type': 'NN_VERTICAL', + 'source': 'PRESET', + 'username': 'user', + 'algorithm_project_id': 1, + 'algorithm_project_uuid': 'test-algo-project-1-uuid', + 'path': algo.path, + 'parameter': { + 'variables': [{ + 'name': 'BATCH_SIZE', + 'value': '123', + 'required': False, + 'display_name': '', + 'comment': '', + 'value_type': 'STRING' + }] + }, + 'participant_id': 0, + 'participant_name': '', + 'favorite': False, + 'comment': '' + }, + ignore_fields=['id', 'uuid', 'created_at', 'updated_at', 'deleted_at']) + + def test_get_with_not_found_exception(self): + response = self.get_helper('/api/v2/algorithms/12') + self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) + + def test_delete_algorithm(self): + with db.session_scope() as session: + algo = session.query(Algorithm).filter_by(name='test-algo-1').first() + resp = self.delete_helper(f'/api/v2/algorithms/{algo.id}') + self.assertEqual(resp.status_code, HTTPStatus.NO_CONTENT) + with db.session_scope() as session: + algo = session.query(Algorithm).filter_by(name='test-algo-1').execution_options( + include_deleted=True).first() + self.assertIsNone(algo) + + def test_download_algorithm_files(self): + with db.session_scope() as session: + algo = session.query(Algorithm).filter_by(name='test-algo-2').first() + resp = self.get_helper(f'/api/v2/algorithms/{algo.id}?download=true') + self.assertEqual(resp.status_code, HTTPStatus.NO_CONTENT) + with db.session_scope() as session: + algo = session.query(Algorithm).filter_by(name='test-algo-1').first() + resp = self.get_helper(f'/api/v2/algorithms/{algo.id}?download=true') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertEqual(resp.headers['Content-Disposition'], 'attachment; filename=test-algo-1.tar') + self.assertEqual(resp.headers['Content-Type'], 'application/x-tar') + tar = tarfile.TarFile(fileobj=BytesIO(resp.data)) # pylint: disable=consider-using-with + with tempfile.TemporaryDirectory() as temp_dir: + tar.extractall(temp_dir) + self.assertEqual(['follower', 'leader'], sorted(os.listdir(temp_dir))) + self.assertEqual(['main.py'], os.listdir(os.path.join(temp_dir, 'follower'))) + self.assertEqual(['main.py'], os.listdir(os.path.join(temp_dir, 'leader'))) + + def test_patch_algorithm(self): + with db.session_scope() as session: + algo = session.query(Algorithm).filter_by(name='test-algo-1').first() + resp = self.patch_helper(f'/api/v2/algorithms/{algo.id}', data={'comment': 'test edit comment'}) + self.assertEqual(resp.status_code, HTTPStatus.OK) + with db.session_scope() as session: + algorithm = session.query(Algorithm).filter_by(id=algo.id).first() + self.assertEqual(algorithm.comment, 'test edit comment') + + +class AlgorithmsApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='test-project') + algo_project = AlgorithmProject(id=1, name='test-algo-project') + algo1 = Algorithm(name='test-algo-1', algorithm_project_id=1, project_id=1) + algo2 = Algorithm(name='test-algo-2', algorithm_project_id=1, project_id=1) + algo3 = Algorithm(name='test-algo-3', algorithm_project_id=2, project_id=1) + session.add_all([project, algo_project, algo1, algo2, algo3]) + session.commit() + + def test_get_algorithms_by_algo_project_id(self): + resp = self.get_helper('/api/v2/projects/1/algorithms?algo_project_id=1') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual(len(data), 2) + self.assertEqual(data[0]['name'], 'test-algo-2') + self.assertEqual(data[1]['name'], 'test-algo-1') + resp = self.get_helper('/api/v2/projects/0/algorithms?algo_project_id=1') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual(len(data), 2) + resp = self.get_helper('/api/v2/projects/0/algorithms') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual(len(data), 3) + + +class AlgorithmFilesApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + path = generate_algorithm_files() + with db.session_scope() as session: + algo = Algorithm(name='test-algo', path=path) + session.add(algo) + session.commit() + + def test_get_algorithm_tree(self): + with db.session_scope() as session: + algo = session.query(Algorithm).filter_by(name='test-algo').first() + resp = self.get_helper(f'/api/v2/algorithms/{algo.id}/tree') + data = self.get_response_data(resp) + data = sorted(data, key=lambda d: d['filename']) + self.assertPartiallyEqual(data[1], { + 'filename': 'leader', + 'path': 'leader', + 'is_directory': True + }, + ignore_fields=['size', 'mtime', 'files']) + self.assertPartiallyEqual(data[1]['files'][0], { + 'filename': 'main.py', + 'path': 'leader/main.py', + 'is_directory': False + }, + ignore_fields=['size', 'mtime', 'files']) + + def test_get_algorithm_files(self): + with db.session_scope() as session: + algo = session.query(Algorithm).filter_by(name='test-algo').first() + resp = self.get_helper(f'/api/v2/algorithms/{algo.id}/files?path=..') + self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) + resp = self.get_helper(f'/api/v2/algorithms/{algo.id}/files?path=leader') + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + resp = self.get_helper(f'/api/v2/algorithms/{algo.id}/files?path=leader/config.py') + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + resp = self.get_helper(f'/api/v2/algorithms/{algo.id}/files?path=leader/main.py') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertResponseDataEqual(resp, {'content': 'import tensorflow', 'path': 'leader/main.py'}) + + +class AlgorithmFilesDownloadApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + path = generate_algorithm_files() + with db.session_scope() as session: + algo = Algorithm(name='test-algo', project_id=1, path=path) + session.add(algo) + session.commit() + + +class AlgorithmProjectsApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(name='test-project') + session.add(project) + session.commit() + + def test_get_algorithms(self): + with db.session_scope() as session: + algo_project_1 = AlgorithmProject(name='test-algo-1', created_at=datetime(2021, 12, 1, 0, 0, 0)) + algo_project_2 = AlgorithmProject(name='test-algo-2', + project_id=1, + created_at=datetime(2021, 12, 1, 0, 0, 1)) + session.add_all([algo_project_1, algo_project_2]) + session.commit() + # test get all + response = self.get_helper('/api/v2/projects/0/algorithm_projects') + data = self.get_response_data(response) + self.assertEqual(len(data), 2) + self.assertEqual(data[0]['name'], 'test-algo-2') + # test get by project + response = self.get_helper('/api/v2/projects/1/algorithm_projects') + data = self.get_response_data(response) + self.assertEqual(len(data), 1) + self.assertEqual(data[0]['name'], 'test-algo-2') + # test get by keyword + response = self.get_helper('/api/v2/projects/0/algorithm_projects?keyword=algo-2') + data = self.get_response_data(response) + self.assertEqual(len(data), 1) + self.assertEqual(data[0]['name'], 'test-algo-2') + + def test_get_algorithms_by_source(self): + with db.session_scope() as session: + algo_project_1 = AlgorithmProject(name='test-preset-1', source=Source.PRESET) + algo_project_2 = AlgorithmProject(name='test-preset-2', + source=Source.USER, + created_at=datetime(2021, 12, 1, 0, 0, 0)) + algo_project_3 = AlgorithmProject(name='test-preset-3', + source=Source.THIRD_PARTY, + created_at=datetime(2021, 12, 1, 0, 0, 1)) + session.add_all([algo_project_1, algo_project_2, algo_project_3]) + session.commit() + response = self.get_helper('/api/v2/projects/0/algorithm_projects?sources=PRESET') + data = self.get_response_data(response) + self.assertEqual(data[0]['name'], 'test-preset-1') + response = self.get_helper('/api/v2/projects/0/algorithm_projects?sources=USER&sources=THIRD_PARTY') + data = self.get_response_data(response) + self.assertEqual(len(data), 2) + self.assertEqual(data[0]['name'], 'test-preset-3') + self.assertEqual(data[1]['name'], 'test-preset-2') + + def test_get_algorithm_projects_by_filter(self): + with db.session_scope() as session: + algo_project_1 = AlgorithmProject(name='test-algo-1', + release_status=ReleaseStatus.RELEASED, + type=AlgorithmType.NN_VERTICAL, + created_at=datetime(2021, 12, 1, 0, 0, 0), + updated_at=datetime(2021, 12, 5, 3, 0, 0)) + algo_project_2 = AlgorithmProject(name='test-algo-2', + release_status=ReleaseStatus.UNRELEASED, + type=AlgorithmType.TREE_VERTICAL, + created_at=datetime(2021, 12, 2, 0, 0, 0), + updated_at=datetime(2021, 12, 5, 4, 0, 0)) + algo_project_3 = AlgorithmProject(name='test-preset-1', + release_status=ReleaseStatus.RELEASED, + type=AlgorithmType.NN_VERTICAL, + created_at=datetime(2021, 12, 3, 0, 0, 0), + updated_at=datetime(2021, 12, 5, 2, 0, 0)) + algo_project_4 = AlgorithmProject(name='test-preset-2', + release_status=ReleaseStatus.UNRELEASED, + type=AlgorithmType.TREE_VERTICAL, + created_at=datetime(2021, 12, 4, 0, 0, 0), + updated_at=datetime(2021, 12, 5, 1, 0, 0)) + session.add_all([algo_project_1, algo_project_2, algo_project_3, algo_project_4]) + session.commit() + resp = self.get_helper('/api/v2/projects/0/algorithm_projects') + data = self.get_response_data(resp) + self.assertEqual(len(data), 4) + filter_param = urllib.parse.quote('(type:["NN_VERTICAL"])') + resp = self.get_helper(f'/api/v2/projects/0/algorithm_projects?filter={filter_param}') + data = self.get_response_data(resp) + self.assertEqual([d['name'] for d in data], ['test-preset-1', 'test-algo-1']) + filter_param = urllib.parse.quote('(release_status:["UNRELEASED"])') + resp = self.get_helper(f'/api/v2/projects/0/algorithm_projects?filter={filter_param}') + data = self.get_response_data(resp) + self.assertEqual([d['name'] for d in data], ['test-preset-2', 'test-algo-2']) + filter_param = urllib.parse.quote('(name~="test-algo")') + resp = self.get_helper(f'/api/v2/projects/0/algorithm_projects?filter={filter_param}') + data = self.get_response_data(resp) + self.assertEqual([d['name'] for d in data], ['test-algo-2', 'test-algo-1']) + order_by_param = urllib.parse.quote('created_at asc') + resp = self.get_helper(f'/api/v2/projects/0/algorithm_projects?order_by={order_by_param}') + data = self.get_response_data(resp) + self.assertEqual([d['name'] for d in data], ['test-algo-1', 'test-algo-2', 'test-preset-1', 'test-preset-2']) + order_by_param = urllib.parse.quote('updated_at asc') + resp = self.get_helper(f'/api/v2/projects/0/algorithm_projects?order_by={order_by_param}') + data = self.get_response_data(resp) + self.assertEqual([d['name'] for d in data], ['test-preset-2', 'test-preset-1', 'test-algo-1', 'test-algo-2']) + + def test_post_algorithm_project_with_wrong_parameter(self): + with db.session_scope() as session: + project = session.query(Project).filter_by(name='test-project').first() + parameters = {'variable': []} + resp = self.post_helper(f'/api/v2/projects/{project.id}/algorithm_projects', + data={ + 'name': 'test-algo-project', + 'type': AlgorithmType.NN_VERTICAL.name, + 'parameter': json.dumps(parameters) + }) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + + @patch('fedlearner_webconsole.algorithm.service.AlgorithmProject') + @patch('fedlearner_webconsole.algorithm.apis.algorithm_project_path') + def test_post_algorithm_project_with_exceptions(self, mock_algorithm_project_path, mock_algorithm_project): + with db.session_scope() as session: + project = session.query(Project).filter_by(name='test-project').first() + parameters = {'variables': [{'name': 'BATCH_SIZE', 'value': '128'}]} + file = (BytesIO(_generate_tar_file().fileobj.read()), 'test.tar.gz') + Envs.STORAGE_ROOT = tempfile.mkdtemp() + name = 'test-algo-project' + path = os.path.join(Envs.STORAGE_ROOT, 'algorithm_projects', name) + mock_algorithm_project_path.return_value = path + mock_algorithm_project.side_effect = Exception() + self.client.post(f'/api/v2/projects/{project.id}/algorithm_projects', + data={ + 'name': name, + 'file': [file], + 'type': AlgorithmType.NN_VERTICAL.name, + 'parameter': json.dumps(parameters), + 'comment': 'haha' + }, + content_type='multipart/form-data', + headers=self._get_headers()) + self.assertFalse(os.path.exists(path)) + + def test_post_algorithm_project(self): + with db.session_scope() as session: + project = session.query(Project).filter_by(name='test-project').first() + parameters = {'variables': [{'name': 'BATCH_SIZE', 'value': '128'}]} + file = (BytesIO(_generate_tar_file().fileobj.read()), 'test.tar.gz') + Envs.STORAGE_ROOT = tempfile.mkdtemp() + resp = self.client.post(f'/api/v2/projects/{project.id}/algorithm_projects', + data={ + 'name': 'test-algo-project', + 'file': [file], + 'type': AlgorithmType.NN_VERTICAL.name, + 'parameter': json.dumps(parameters), + 'comment': 'haha' + }, + content_type='multipart/form-data', + headers=self._get_headers()) + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + with db.session_scope() as session: + algo_project: AlgorithmProject = session.query(AlgorithmProject).filter_by(name='test-algo-project').first() + self.assertEqual(algo_project.type, AlgorithmType.NN_VERTICAL) + algo_parameter = ParseDict(parameters, AlgorithmParameter()) + self.assertEqual(algo_project.get_parameter(), algo_parameter) + self.assertEqual(algo_project.comment, 'haha') + self.assertTrue(os.path.exists(os.path.join(algo_project.path, 'leader', 'main.py'))) + self.assertTrue(os.path.exists(os.path.join(algo_project.path, 'follower', 'main.py'))) + with open(os.path.join(algo_project.path, 'leader', 'main.py'), encoding='utf-8') as fin: + self.assertEqual(fin.read(), 'import tensorflow') + with open(os.path.join(algo_project.path, 'follower', 'main.py'), encoding='utf-8') as fin: + self.assertEqual(fin.read(), '') + + def test_post_algorithm_project_with_empty_file(self): + with db.session_scope() as session: + project = session.query(Project).filter_by(name='test-project').first() + Envs.STORAGE_ROOT = tempfile.mkdtemp() + resp = self.client.post(f'/api/v2/projects/{project.id}/algorithm_projects', + data={ + 'name': 'test-algo-project', + 'type': AlgorithmType.NN_VERTICAL.name, + }, + content_type='multipart/form-data', + headers=self._get_headers()) + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project').first() + self.assertEqual(algo_project.name, 'test-algo-project') + self.assertEqual(algo_project.type, AlgorithmType.NN_VERTICAL) + self.assertEqual(algo_project.get_parameter(), AlgorithmParameter()) + self.assertTrue(os.path.exists(algo_project.path)) + self.assertEqual(os.listdir(algo_project.path), []) + + @patch('fedlearner_webconsole.utils.file_manager.FileManager.mkdir') + def test_post_algorithm_project_with_duplicate_name(self, mock_mkdir): + with db.session_scope() as session: + project1 = Project(id=2, name='test-project-1') + project2 = Project(id=3, name='test-project-2') + algo_project = AlgorithmProject(name='test-algo-project', project_id=2, source=Source.USER) + session.add_all([project1, project2, algo_project]) + session.commit() + resp = self.client.post('/api/v2/projects/2/algorithm_projects', + data={ + 'name': 'test-algo-project', + 'type': AlgorithmType.NN_VERTICAL.name, + }, + content_type='multipart/form-data', + headers=self._get_headers()) + self.assertEqual(resp.status_code, HTTPStatus.CONFLICT) + resp = self.client.post('/api/v2/projects/3/algorithm_projects', + data={ + 'name': 'test-algo-project', + 'type': AlgorithmType.TREE_VERTICAL.name, + }, + content_type='multipart/form-data', + headers=self._get_headers()) + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + + def test_post_algorithm_project_with_trusted_computing(self): + Flag.TRUSTED_COMPUTING_ENABLED.value = True + with db.session_scope() as session: + project = session.query(Project).filter_by(name='test-project').first() + parameters = {'variables': [{'name': 'OUTPUT_PATH', 'value': '/output'}]} + file = (BytesIO(_generate_tar_file().fileobj.read()), 'test.tar.gz') + Envs.STORAGE_ROOT = tempfile.mkdtemp() + golden_data = { + 'name': 'test-algo-project-trust', + 'file': [file], + 'type': AlgorithmType.TRUSTED_COMPUTING.name, + 'parameter': json.dumps(parameters), + 'comment': 'comment for algorithm project with trusted computing type' + } + resp = self.client.post(f'/api/v2/projects/{project.id}/algorithm_projects', + data=golden_data, + content_type='multipart/form-data', + headers=self._get_headers()) + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + with db.session_scope() as session: + algo_project: AlgorithmProject = \ + session.query(AlgorithmProject).filter_by(name=golden_data['name']).first() + self.assertEqual(algo_project.type.name, golden_data['type']) + algo_parameter = ParseDict(parameters, AlgorithmParameter()) + self.assertEqual(algo_project.get_parameter(), algo_parameter) + self.assertEqual(algo_project.comment, golden_data['comment']) + self.assertTrue(os.path.exists(os.path.join(algo_project.path, 'leader', 'main.py'))) + self.assertTrue(os.path.exists(os.path.join(algo_project.path, 'follower', 'main.py'))) + with open(os.path.join(algo_project.path, 'leader', 'main.py'), encoding='utf-8') as fin: + self.assertEqual(fin.read(), 'import tensorflow') + with open(os.path.join(algo_project.path, 'follower', 'main.py'), encoding='utf-8') as fin: + self.assertEqual(fin.read(), '') + + +class AlgorithmProjectApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + user = User(username='test-user') + session.add(user) + session.flush() + algo_project = AlgorithmProject(name='test-algo-project', + type=AlgorithmType.NN_VERTICAL, + project_id=1, + username=user.username, + source=Source.PRESET, + path=generate_algorithm_files(), + comment='comment') + parameter = { + 'variables': [{ + 'name': 'BATCH_SIZE', + 'value': '12', + 'display_name': 'batch_size', + 'required': False, + 'comment': '', + 'value_type': 'STRING' + }] + } + algo_parameter = ParseDict(parameter, AlgorithmParameter()) + algo_project.set_parameter(algo_parameter) + session.add(algo_project) + algo_project = AlgorithmProject(name='test-algo-project-third-party', + type=AlgorithmType.NN_VERTICAL, + project_id=1, + username=user.username, + source=Source.THIRD_PARTY, + path=generate_algorithm_files(), + comment='comment') + session.add(algo_project) + session.commit() + + def test_get_algorithm_project(self): + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project').first() + parameter = to_dict(algo_project.get_parameter()) + resp = self.get_helper(f'/api/v2/algorithm_projects/{algo_project.id}') + self.assertEqual(resp.status_code, HTTPStatus.OK) + expected_data = { + 'algorithms': [], + 'name': 'test-algo-project', + 'type': 'NN_VERTICAL', + 'project_id': 1, + 'username': 'test-user', + 'latest_version': 0, + 'source': 'PRESET', + 'participant_id': 0, + 'participant_name': '', + 'parameter': parameter, + 'publish_status': 'UNPUBLISHED', + 'release_status': 'UNRELEASED', + 'path': algo_project.path, + 'comment': 'comment' + } + self.maxDiff = None + self.assertResponseDataEqual(resp, + expected_data, + ignore_fields=['id', 'uuid', 'created_at', 'updated_at', 'deleted_at']) + + def test_get_algorithms_from_algorithm_project(self): + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project').first() + algo_1 = Algorithm(name='test-algo', version=1, algorithm_project_id=algo_project.id) + algo_2 = Algorithm(name='test-algo', version=2, algorithm_project_id=algo_project.id) + session.add_all([algo_1, algo_2]) + session.commit() + resp = self.get_helper(f'/api/v2/algorithm_projects/{algo_project.id}') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual(len(data['algorithms']), 2) + + def test_patch_algorithm_project(self): + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project').first() + parameters = {'variables': [{'name': 'BATCH_SIZE', 'value': '128'}]} + resp = self.patch_helper(f'/api/v2/algorithm_projects/{algo_project.id}', + data={ + 'parameter': parameters, + 'comment': 'comment' + }) + self.assertEqual(resp.status_code, HTTPStatus.OK) + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).get(algo_project.id) + self.assertEqual(algo_project.comment, 'comment') + algo_parameter = ParseDict(parameters, AlgorithmParameter()) + self.assertEqual(algo_project.get_parameter(), algo_parameter) + + def test_patch_third_party_algorithm_project(self): + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project-third-party').first() + comment = 'test edit comment' + resp = self.patch_helper(f'/api/v2/algorithm_projects/{algo_project.id}', + data={ + 'parameter': None, + 'comment': comment + }) + self.assertEqual(resp.status_code, HTTPStatus.FORBIDDEN) + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).get(algo_project.id) + self.assertNotEqual(algo_project.comment, comment) + + def test_delete_algorithm_project(self): + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project').first() + algo = Algorithm(name='test-algo', algorithm_project_id=algo_project.id, path=generate_algorithm_files()) + session.add(algo) + session.commit() + resp = self.delete_helper(f'/api/v2/algorithm_projects/{algo.id}') + self.assertEqual(resp.status_code, HTTPStatus.NO_CONTENT) + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).filter( + AlgorithmProject.name.like('%test-algo-project')).execution_options(include_deleted=True).first() + algo = session.query(Algorithm).filter( + Algorithm.name.like('%test-algo')).execution_options(include_deleted=True).first() + self.assertIsNone(algo_project) + self.assertIsNone(algo) + + +class AlgorithmProjectFilesApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + path = generate_algorithm_files() + with db.session_scope() as session: + algo_project = AlgorithmProject(name='test-algo-project', path=path) + session.add(algo_project) + session.commit() + + def test_get_file_tree(self): + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project').first() + resp = self.get_helper(f'/api/v2/algorithm_projects/{algo_project.id}/tree') + data = self.get_response_data(resp) + data = sorted(data, key=lambda d: d['filename']) + self.assertPartiallyEqual(data[1], { + 'filename': 'leader', + 'path': 'leader', + 'is_directory': True + }, + ignore_fields=['size', 'mtime', 'files']) + self.assertPartiallyEqual(data[1]['files'][0], { + 'filename': 'main.py', + 'path': 'leader/main.py', + 'is_directory': False + }, + ignore_fields=['size', 'mtime', 'files']) + + def test_get_project_files(self): + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project').first() + resp = self.get_helper(f'/api/v2/algorithm_projects/{algo_project.id}/files?path=leader/../..') + self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) + resp = self.get_helper(f'/api/v2/algorithm_projects/{algo_project.id}/files?path=leader/config.py') + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + resp = self.get_helper(f'/api/v2/algorithm_projects/{algo_project.id}/files?path=leader') + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + resp = self.get_helper(f'/api/v2/algorithm_projects/{algo_project.id}/files?path=leader/main.py') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertResponseDataEqual(resp, {'content': 'import tensorflow', 'path': 'leader/main.py'}) + + def test_post_project_files(self): + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project').first() + # unauthorized path under algorithm + data = {'path': '..', 'filename': 'test', 'file': (BytesIO(b'abcdef'), 'test.jpg')} + resp = self.client.post(f'/api/v2/algorithm_projects/{algo_project.id}/files', + data=data, + content_type='multipart/form-data', + headers=self._get_headers()) + self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) + # fail due to path not found + data = {'path': 'test', 'filename': ',.test.jpg.', 'file': (BytesIO(b'abcdef'), 'test.jpg')} + resp = self.client.post(f'/api/v2/algorithm_projects/{algo_project.id}/files', + data=data, + content_type='multipart/form-data', + headers=self._get_headers()) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + # put file under leader directory + data = {'path': 'leader', 'filename': ',.test.jpg.', 'file': (BytesIO(b'abcdef'), 'test.jpg')} + resp = self.client.post(f'/api/v2/algorithm_projects/{algo_project.id}/files', + data=data, + content_type='multipart/form-data', + headers=self._get_headers()) + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertResponseDataEqual(resp, {'path': 'leader', 'filename': 'test.jpg'}) + with open(os.path.join(algo_project.path, 'leader', 'test.jpg'), 'rb') as fin: + file_content = fin.read() + self.assertEqual(file_content, b'abcdef') + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project').first() + self.assertEqual(algo_project.release_status, ReleaseStatus.UNRELEASED) + + def test_put_empty_file(self): + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project').first() + # put empty file under leader directory + data = {'path': 'leader', 'filename': 'test'} + resp = self.client.put(f'/api/v2/algorithm_projects/{algo_project.id}/files', + data=data, + content_type='multipart/form-data', + headers=self._get_headers()) + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertResponseDataEqual(resp, {'content': None, 'path': 'leader', 'filename': 'test'}) + self.assertTrue(os.path.exists(os.path.join(algo_project.path, 'leader', 'test'))) + + def test_put_file_by_content(self): + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project').first() + # put file under leader directory by content + data = {'path': 'leader', 'filename': 'test', 'file': BytesIO(b'123')} + resp = self.client.put(f'/api/v2/algorithm_projects/{algo_project.id}/files', + data=data, + content_type='multipart/form-data', + headers=self._get_headers()) + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertResponseDataEqual(resp, {'path': 'leader', 'filename': 'test', 'content': '123'}) + with open(os.path.join(algo_project.path, 'leader', 'test'), 'r', encoding='utf-8') as file: + self.assertEqual(file.read(), '123') + + def test_put_directory(self): + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project').first() + # fail due to file already exist + data = {'path': '.', 'filename': 'leader', 'is_directory': True} + resp = self.client.put(f'/api/v2/algorithm_projects/{algo_project.id}/files', + data=data, + content_type='multipart/form-data', + headers=self._get_headers()) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + # fail due to path not exist + data = {'path': 'test', 'filename': 'test', 'is_directory': True} + resp = self.client.put(f'/api/v2/algorithm_projects/{algo_project.id}/files', + data=data, + content_type='multipart/form-data', + headers=self._get_headers()) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + # create directory under leader + data = {'path': 'leader', 'filename': 'test', 'is_directory': True} + resp = self.client.put(f'/api/v2/algorithm_projects/{algo_project.id}/files', + data=data, + content_type='multipart/form-data', + headers=self._get_headers()) + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertResponseDataEqual(resp, {'content': None, 'path': 'leader', 'filename': 'test'}) + self.assertTrue(os.path.isdir(os.path.join(algo_project.path, 'leader', 'test'))) + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project').first() + self.assertEqual(algo_project.release_status, ReleaseStatus.UNRELEASED) + + def test_delete_project_files(self): + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project').first() + resp = self.delete_helper(f'/api/v2/algorithm_projects/{algo_project.id}/files?path=leader/../..') + self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) + resp = self.delete_helper(f'/api/v2/algorithm_projects/{algo_project.id}/files?path=leader/config.py') + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + resp = self.delete_helper(f'/api/v2/algorithm_projects/{algo_project.id}/files?path=leader/main.py') + self.assertEqual(resp.status_code, HTTPStatus.NO_CONTENT) + self.assertTrue(os.path.exists(os.path.join(algo_project.path, 'leader'))) + self.assertFalse(os.path.exists(os.path.join(algo_project.path, 'leader', 'main.py'))) + resp = self.delete_helper(f'/api/v2/algorithm_projects/{algo_project.id}/files?path=follower') + self.assertEqual(resp.status_code, HTTPStatus.NO_CONTENT) + self.assertEqual(os.listdir(os.path.join(algo_project.path)), ['leader']) + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project').first() + self.assertEqual(algo_project.release_status, ReleaseStatus.UNRELEASED) + + def test_patch_algorithm_project_file_rename(self): + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project').first() + resp = self.patch_helper(f'/api/v2/algorithm_projects/{algo_project.id}/files', + data={ + 'path': 'leader', + 'dest': 'leader1' + }) + self.assertEqual(resp.status_code, HTTPStatus.NO_CONTENT) + self.assertTrue(os.path.exists(os.path.join(algo_project.path, 'leader1'))) + self.assertFalse(os.path.exists(os.path.join(algo_project.path, 'leader'))) + resp = self.patch_helper(f'/api/v2/algorithm_projects/{algo_project.id}/files', + data={ + 'path': 'leader1/main.py', + 'dest': 'main.py' + }) + self.assertEqual(resp.status_code, HTTPStatus.NO_CONTENT) + self.assertTrue(os.path.exists(os.path.join(algo_project.path, 'main.py'))) + self.assertFalse(os.path.exists(os.path.join(algo_project.path, 'leader', 'main.py'))) + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project').first() + self.assertEqual(algo_project.release_status, ReleaseStatus.UNRELEASED) + + +class ParticipantAlgorithmProjectsApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project-1') + participant_1 = Participant(id=1, name='part-1', domain_name='test-1') + participant_2 = Participant(id=2, name='part-2', domain_name='test-2') + project_participant_1 = ProjectParticipant(id=1, project_id=1, participant_id=1) + project_participant_2 = ProjectParticipant(id=2, project_id=1, participant_id=2) + algorithm_project_1 = AlgorithmProject(id=1, + uuid='algo-project-uuid-1', + name='test-algo-project-1', + type=AlgorithmType.NN_VERTICAL, + source=Source.PRESET, + latest_version=1, + comment='comment-1', + created_at=datetime(2021, 12, 3, 0, 0, 0), + updated_at=datetime(2021, 12, 7, 2, 0, 0)) + algorithm_project_2 = AlgorithmProject(id=2, + uuid='algo-project-uuid-2', + name='test-algo-project-2', + type=AlgorithmType.NN_VERTICAL, + source=Source.USER, + latest_version=1, + comment='comment-2', + created_at=datetime(2021, 12, 4, 0, 0, 0), + updated_at=datetime(2021, 12, 6, 2, 0, 0)) + session.add_all([ + project, participant_1, participant_2, project_participant_1, project_participant_2, + algorithm_project_1, algorithm_project_2 + ]) + session.commit() + + @patch('fedlearner_webconsole.rpc.v2.resource_service_client.ResourceServiceClient.list_algorithm_projects') + def test_get_participant_algorithm_projects(self, mock_list_algorithm_projects): + with db.session_scope() as session: + algo_project_1 = session.query(AlgorithmProject).get(1) + algo_project_2 = session.query(AlgorithmProject).get(2) + participant_algorithm_projects1 = [algo_project_1.to_proto()] + participant_algorithm_projects2 = [algo_project_2.to_proto()] + mock_list_algorithm_projects.return_value = ListAlgorithmProjectsResponse( + algorithm_projects=participant_algorithm_projects1) + resp = self.get_helper('/api/v2/projects/1/participants/1/algorithm_projects') + data = self.get_response_data(resp) + self.assertEqual(len(data), 1) + self.assertEqual(mock_list_algorithm_projects.call_count, 1) + self.assertEqual(data[0]['uuid'], 'algo-project-uuid-1') + self.assertEqual(data[0]['latest_version'], 1) + self.assertEqual(data[0]['comment'], 'comment-1') + self.assertEqual(data[0]['participant_id'], 1) + mock_list_algorithm_projects.side_effect = [ + ListAlgorithmProjectsResponse(algorithm_projects=participant_algorithm_projects1), + ListAlgorithmProjectsResponse(algorithm_projects=participant_algorithm_projects2) + ] + resp = self.get_helper('/api/v2/projects/1/participants/0/algorithm_projects') + data = self.get_response_data(resp) + self.assertEqual(len(data), 2) + self.assertEqual(mock_list_algorithm_projects.call_count, 3) + self.assertEqual(data[0]['uuid'], 'algo-project-uuid-2') + self.assertEqual(data[0]['latest_version'], 1) + self.assertEqual(data[0]['comment'], 'comment-2') + self.assertEqual(data[0]['participant_id'], 2) + self.assertEqual(data[1]['name'], 'test-algo-project-1') + self.assertEqual(data[1]['source'], 'PRESET') + self.assertEqual(data[1]['participant_id'], 1) + # when grpc error + mock_list_algorithm_projects.side_effect = [ + FakeRpcError(grpc.StatusCode.UNIMPLEMENTED, 'rpc not implemented'), + ListAlgorithmProjectsResponse(algorithm_projects=participant_algorithm_projects2) + ] + resp = self.get_helper('/api/v2/projects/1/participants/0/algorithm_projects') + data = self.get_response_data(resp) + self.assertEqual(len(data), 1) + mock_list_algorithm_projects.side_effect = [ + FakeRpcError(grpc.StatusCode.UNIMPLEMENTED, 'rpc not implemented'), + FakeRpcError(grpc.StatusCode.UNIMPLEMENTED, 'rpc not implemented') + ] + resp = self.get_helper('/api/v2/projects/1/participants/0/algorithm_projects') + data = self.get_response_data(resp) + self.assertEqual(len(data), 0) + + @patch('fedlearner_webconsole.rpc.v2.resource_service_client.ResourceServiceClient.list_algorithm_projects') + def test_get_participant_algorithm_projects_with_filter(self, mock_list_algorithm_projects): + with db.session_scope() as session: + algo_project_1 = session.query(AlgorithmProject).get(1) + algo_project_2 = session.query(AlgorithmProject).get(2) + participant_algorithm_projects1 = [algo_project_1.to_proto()] + participant_algorithm_projects2 = [algo_project_1.to_proto(), algo_project_2.to_proto()] + mock_list_algorithm_projects.return_value = ListAlgorithmProjectsResponse( + algorithm_projects=participant_algorithm_projects1) + filter_param = urllib.parse.quote('(name~="1")') + resp = self.get_helper(f'/api/v2/projects/1/participants/1/algorithm_projects?filter={filter_param}') + data = self.get_response_data(resp) + self.assertEqual(len(data), 1) + self.assertEqual(data[0]['uuid'], 'algo-project-uuid-1') + self.assertEqual(data[0]['latest_version'], 1) + self.assertEqual(data[0]['comment'], 'comment-1') + mock_list_algorithm_projects.assert_called_with(filter_exp=parse_expression('(name~="1")')) + mock_list_algorithm_projects.return_value = ListAlgorithmProjectsResponse( + algorithm_projects=participant_algorithm_projects2) + order_by_param = urllib.parse.quote('updated_at asc') + resp = self.get_helper(f'/api/v2/projects/1/participants/1/algorithm_projects?order_by={order_by_param}') + data = self.get_response_data(resp) + self.assertEqual(data[0]['name'], 'test-algo-project-2') + self.assertEqual(data[1]['uuid'], 'algo-project-uuid-1') + mock_list_algorithm_projects.return_value = ListAlgorithmProjectsResponse( + algorithm_projects=participant_algorithm_projects2) + order_by_param = urllib.parse.quote('created_at asc') + resp = self.get_helper(f'/api/v2/projects/1/participants/1/algorithm_projects?order_by={order_by_param}') + data = self.get_response_data(resp) + self.assertEqual(data[0]['name'], 'test-algo-project-1') + self.assertEqual(data[1]['uuid'], 'algo-project-uuid-2') + order_by_param = urllib.parse.quote('unknown_attribute asc') + resp = self.get_helper(f'/api/v2/projects/1/participants/1/algorithm_projects?order_by={order_by_param}') + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + + +class ParticipantAlgorithmProjectApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project-1') + participant = Participant(id=1, name='part-1', domain_name='test') + session.add_all([project, participant]) + session.commit() + + @patch('fedlearner_webconsole.rpc.v2.resource_service_client.ResourceServiceClient.get_algorithm_project') + def test_get_algorithm_project(self, mock_get_algorithm_project): + participant_algorithm_project = AlgorithmProjectPb(uuid='algo-project-uuid-1', + name='test-algo-project-1', + type=AlgorithmType.NN_VERTICAL.name, + source=Source.USER.name, + latest_version=1, + comment='comment-1', + created_at=1326542405, + updated_at=1326542405) + mock_get_algorithm_project.return_value = participant_algorithm_project + resp = self.get_helper('/api/v2/projects/1/participants/1/algorithm_projects/algo-project-uuid-1') + data = self.get_response_data(resp) + self.assertEqual(data['uuid'], 'algo-project-uuid-1') + self.assertEqual(data['latest_version'], 1) + self.assertEqual(data['comment'], 'comment-1') + + +class ParticipantAlgorithmsApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project-1') + participant = Participant(id=1, name='part-1', domain_name='test') + session.add_all([project, participant]) + session.commit() + + @patch('fedlearner_webconsole.rpc.v2.resource_service_client.ResourceServiceClient.list_algorithms') + def test_get_participant_algorithms(self, mock_list_algorithms): + parameter = ParseDict({'variables': [{'name': 'BATCH_SIZE', 'value': '128'}]}, AlgorithmParameter()) + participant_algorithms = [ + AlgorithmPb(uuid='algo-uuid-1', + name='test-algo-1', + version=1, + type=AlgorithmType.NN_VERTICAL.name, + source=Source.USER.name, + parameter=parameter, + comment='comment-1', + created_at=1326542405, + updated_at=1326542405), + AlgorithmPb(uuid='algo-uuid-2', + name='test-algo-2', + version=2, + type=AlgorithmType.TREE_VERTICAL.name, + source=Source.THIRD_PARTY.name, + parameter=parameter, + comment='comment-2', + created_at=1326542405, + updated_at=1326542405) + ] + mock_list_algorithms.return_value = ListAlgorithmsResponse(algorithms=participant_algorithms) + resp = self.get_helper('/api/v2/projects/1/participants/1/algorithms?algorithm_project_uuid=uuid') + data = self.get_response_data(resp) + self.assertEqual(len(data), 2) + self.assertEqual(data[0]['uuid'], 'algo-uuid-1') + self.assertEqual(data[0]['version'], 1) + self.assertEqual(data[0]['comment'], 'comment-1') + self.assertEqual(data[0]['participant_id'], 1) + self.assertEqual(data[1]['name'], 'test-algo-2') + self.assertEqual(data[1]['type'], 'TREE_VERTICAL') + self.assertEqual(data[1]['source'], 'THIRD_PARTY') + self.assertEqual(data[1]['participant_id'], 1) + + +class ParticipantAlgorithmApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project-1') + participant = Participant(id=1, name='part-1', domain_name='test') + algorithm = Algorithm(id=1, + uuid='algo-uuid-1', + name='test-algo-1', + version=1, + type=AlgorithmType.NN_VERTICAL, + source=Source.USER, + created_at=datetime(2012, 1, 14, 12, 0, 5), + updated_at=datetime(2012, 1, 14, 12, 0, 5)) + session.add_all([project, participant, algorithm]) + session.commit() + + @patch('fedlearner_webconsole.rpc.v2.resource_service_client.ResourceServiceClient.get_algorithm') + def test_get_participant_algorithm(self, mock_get_algorithm): + with db.session_scope() as session: + algo = session.query(Algorithm).get(1) + mock_get_algorithm.return_value = AlgorithmPb(uuid=algo.uuid, + name=algo.name, + version=algo.version, + type=algo.type.name, + source=algo.source.name, + comment=algo.comment) + resp = self.get_helper('/api/v2/projects/1/participants/1/algorithms/algo-uuid-1') + data = self.get_response_data(resp) + self.assertEqual(data['name'], 'test-algo-1') + self.assertEqual(data['version'], 1) + self.assertEqual(data['type'], 'NN_VERTICAL') + self.assertEqual(data['source'], 'USER') + + +class ParticipantAlgorithmFilesApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project-1') + participant = Participant(id=1, name='part-1', domain_name='test') + path = generate_algorithm_files() + algorithm = Algorithm(id=1, uuid='algo-uuid-1', name='test-algo-1', path=path) + session.add_all([project, participant, algorithm]) + session.commit() + + @patch('fedlearner_webconsole.rpc.v2.resource_service_client.ResourceServiceClient.get_algorithm') + @patch('fedlearner_webconsole.rpc.v2.resource_service_client.ResourceServiceClient.get_algorithm_files') + def test_get_participant_algorithm_tree(self, mock_get_algorithm_files, mock_get_algorithm): + with db.session_scope() as session: + algo = session.query(Algorithm).get(1) + mock_get_algorithm.return_value = algo.to_proto() + data_iterator = AlgorithmSender().make_algorithm_iterator(algo.path) + mock_get_algorithm_files.return_value = data_iterator + with tempfile.TemporaryDirectory() as temp_dir: + Envs.STORAGE_ROOT = temp_dir + resp = self.get_helper('/api/v2/projects/1/participants/1/algorithms/algo-uuid-1/tree') + data = self.get_response_data(resp) + data = sorted(data, key=lambda d: d['filename']) + self.assertPartiallyEqual(data[1], { + 'filename': 'leader', + 'path': 'leader', + 'is_directory': True + }, + ignore_fields=['size', 'mtime', 'files']) + self.assertPartiallyEqual(data[1]['files'][0], { + 'filename': 'main.py', + 'path': 'leader/main.py', + 'is_directory': False + }, + ignore_fields=['size', 'mtime', 'files']) + + @patch('fedlearner_webconsole.rpc.v2.resource_service_client.ResourceServiceClient.get_algorithm') + @patch('fedlearner_webconsole.rpc.v2.resource_service_client.ResourceServiceClient.get_algorithm_files') + def test_get_participant_algorithm_files(self, mock_get_algorithm_files, mock_get_algorithm): + with db.session_scope() as session: + algo = session.query(Algorithm).get(1) + mock_get_algorithm.return_value = algo.to_proto() + data_iterator = AlgorithmSender().make_algorithm_iterator(algo.path) + mock_get_algorithm_files.return_value = data_iterator + with tempfile.TemporaryDirectory() as temp_dir: + Envs.STORAGE_ROOT = temp_dir + resp = self.get_helper('/api/v2/projects/1/participants/1/algorithms/algo-uuid-1/files?path=..') + self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) + resp = self.get_helper('/api/v2/projects/1/participants/1/algorithms/algo-uuid-1/files?path=leader') + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + resp = self.get_helper( + '/api/v2/projects/1/participants/1/algorithms/algo-uuid-1/files?path=leader/config.py') + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + resp = self.get_helper('/api/v2/projects/1/participants/1/algorithms/algo-uuid-1/files?path=leader/main.py') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertResponseDataEqual(resp, {'content': 'import tensorflow', 'path': 'leader/main.py'}) + + +class FetchAlgorithmApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project') + participant = Participant(id=1, name='part', domain_name='part-test.com') + project_participant = ProjectParticipant(id=1, project_id=1, participant_id=1) + algorithm = Algorithm(id=1, uuid='uuid', name='algo', project_id=1, source=Source.USER) + session.add_all([project, participant, project_participant, algorithm]) + session.commit() + + def test_get_algorithm(self): + resp = self.get_helper('/api/v2/projects/1/algorithms/uuid') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual(data['name'], 'algo') + + @patch('fedlearner_webconsole.algorithm.fetcher.AlgorithmFetcher.get_algorithm_from_participant') + def test_get_algorithm_from_participant(self, mock_get_algorithm_from_participant): + mock_get_algorithm_from_participant.return_value = AlgorithmPb(name='peer-algo') + resp = self.get_helper('/api/v2/projects/1/algorithms/uuid-1') + mock_get_algorithm_from_participant.assert_called_with(algorithm_uuid='uuid-1', participant_id=1) + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual(data['name'], 'peer-algo') + + +class UpdatePresetAlgorithmApiTest(BaseTestCase): + + def test_update_preset_algorithms(self): + with tempfile.TemporaryDirectory() as temp_dir: + Envs.STORAGE_ROOT = temp_dir + self.signin_as_admin() + resp = self.post_helper('/api/v2/preset_algorithms:update') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual(sorted([d['name'] for d in data]), ['e2e_test', 'horizontal_e2e_test', 'secure_boost']) + with db.session_scope() as session: + algo_project1 = session.query(AlgorithmProject).filter_by(name='e2e_test').first() + self.assertIsNotNone(algo_project1) + self.assertEqual(algo_project1.type, AlgorithmType.NN_VERTICAL) + self.assertEqual(algo_project1.source, Source.PRESET) + algo_project2 = session.query(AlgorithmProject).filter_by(name='secure_boost').first() + self.assertIsNotNone(algo_project2) + self.assertEqual(algo_project2.type, AlgorithmType.TREE_VERTICAL) + self.assertEqual(algo_project2.source, Source.PRESET) + algo1 = session.query(Algorithm).filter_by(name='e2e_test').first() + self.assertIsNotNone(algo1) + self.assertEqual(algo1.type, AlgorithmType.NN_VERTICAL) + self.assertEqual(algo1.source, Source.PRESET) + self.assertEqual(algo1.algorithm_project_id, algo_project1.id) + self.assertTrue(os.path.exists(os.path.join(algo1.path, 'follower/config.py'))) + self.assertTrue(os.path.exists(os.path.join(algo1.path, 'leader/config.py'))) + algo2 = session.query(Algorithm).filter_by(name='secure_boost').first() + self.assertIsNotNone(algo2) + self.assertEqual(algo2.type, AlgorithmType.TREE_VERTICAL) + self.assertEqual(algo2.source, Source.PRESET) + self.assertEqual(algo2.algorithm_project_id, algo_project2.id) + + +class ReleaseAlgorithmApiTest(BaseTestCase): + + @patch('fedlearner_webconsole.algorithm.apis.algorithm_path') + @patch('fedlearner_webconsole.algorithm.service.Algorithm') + def test_release_algorithm_with_exceptions(self, mock_algorithm, mock_algorithm_path): + path = generate_algorithm_files() + with db.session_scope() as session: + algo_project = AlgorithmProject(name='test-algo', path=path) + session.add(algo_project) + session.commit() + self.assertEqual(algo_project.release_status, ReleaseStatus.UNRELEASED) + Envs.STORAGE_ROOT = tempfile.mkdtemp() + algorithm_path = os.path.join(Envs.STORAGE_ROOT, 'algorithms', 'test_with_exceptions') + mock_algorithm_path.return_value = algorithm_path + mock_algorithm.side_effect = Exception() + self.post_helper(f'/api/v2/algorithm_projects/{algo_project.id}:release', {}) + self.assertFalse(os.path.exists(algorithm_path)) + + def test_release_algorithm(self): + path = generate_algorithm_files() + with db.session_scope() as session: + algo_project = AlgorithmProject(name='test-algo', path=path) + session.add(algo_project) + session.commit() + self.assertEqual(algo_project.release_status, ReleaseStatus.UNRELEASED) + Envs.STORAGE_ROOT = tempfile.mkdtemp() + resp = self.post_helper(f'/api/v2/algorithm_projects/{algo_project.id}:release', {}) + self.assertEqual(resp.status_code, HTTPStatus.OK) + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).get(1) + algo = algo_project.algorithms[0] + self.assertEqual(algo_project.release_status, ReleaseStatus.RELEASED) + self.assertEqual(algo.name, 'test-algo') + self.assertEqual(algo.version, 1) + self.assertTrue(algo.path.startswith(Envs.STORAGE_ROOT)) + with open(os.path.join(algo.path, 'leader', 'main.py'), 'r', encoding='utf-8') as fin: + self.assertEqual(fin.read(), 'import tensorflow') + self.assertTrue(os.path.exists(os.path.join(algo.path, 'follower', 'main.py'))) + resp = self.post_helper(f'/api/v2/algorithm_projects/{algo_project.id}:release', data={'comment': 'comment'}) + self.assertEqual(resp.status_code, HTTPStatus.OK) + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).get(1) + self.assertEqual(algo_project.release_status, ReleaseStatus.RELEASED) + self.assertEqual(len(algo_project.algorithms), 2) + algo = algo_project.algorithms[0] + self.assertEqual(algo.name, 'test-algo') + self.assertEqual(algo.comment, 'comment') + self.assertEqual(algo.version, 2) + + def test_release_algorithm_failed(self): + path = generate_algorithm_files() + with db.session_scope() as session: + algo_project = AlgorithmProject(name='test-algo', path=path, source=Source.THIRD_PARTY) + session.add(algo_project) + session.commit() + self.assertEqual(algo_project.release_status, ReleaseStatus.UNRELEASED) + Envs.STORAGE_ROOT = tempfile.mkdtemp() + resp = self.post_helper(f'/api/v2/algorithm_projects/{algo_project.id}:release', {}) + self.assertEqual(resp.status_code, HTTPStatus.FORBIDDEN) + + +class PublishAlgorithmApiTest(BaseTestCase): + + def test_publish_algorithm(self): + with db.session_scope() as session: + project = Project(id=1, name='test-project') + algo_project = AlgorithmProject(id=1, + project_id=1, + name='test-algo-project', + publish_status=PublishStatus.UNPUBLISHED) + algo = Algorithm(id=1, + project_id=1, + name='test-algo', + algorithm_project_id=1, + publish_status=PublishStatus.UNPUBLISHED) + session.add_all([project, algo_project, algo]) + session.commit() + resp = self.post_helper('/api/v2/projects/1/algorithms/1:publish') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual(data['status'], PublishStatus.PUBLISHED.name) + with db.session_scope() as session: + algo = session.query(Algorithm).get(1) + self.assertEqual(algo.publish_status, PublishStatus.PUBLISHED) + algo_project = session.query(AlgorithmProject).get(1) + self.assertEqual(algo_project.publish_status, PublishStatus.PUBLISHED) + + +class UnpublishAlgorithmApiTest(BaseTestCase): + + def test_unpublish_algorithm(self): + with db.session_scope() as session: + project = Project(id=1, name='test-project') + algo_project = AlgorithmProject(id=1, + project_id=1, + name='test-algo-project', + publish_status=PublishStatus.PUBLISHED) + algo1 = Algorithm(id=1, + project_id=1, + algorithm_project_id=1, + name='test-algo-1', + publish_status=PublishStatus.PUBLISHED) + algo2 = Algorithm(id=2, + project_id=1, + algorithm_project_id=1, + name='test-algo-2', + publish_status=PublishStatus.PUBLISHED) + session.add_all([project, algo_project, algo1, algo2]) + session.commit() + resp = self.post_helper('/api/v2/projects/1/algorithms/1:unpublish') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual(data['status'], PublishStatus.UNPUBLISHED.name) + with db.session_scope() as session: + algo = session.query(Algorithm).get(1) + self.assertEqual(algo.publish_status, PublishStatus.UNPUBLISHED) + algo_project = session.query(AlgorithmProject).get(1) + self.assertEqual(algo_project.publish_status, PublishStatus.PUBLISHED) + resp = self.post_helper('/api/v2/projects/1/algorithms/2:unpublish') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual(data['status'], PublishStatus.UNPUBLISHED.name) + with db.session_scope() as session: + algo = session.query(Algorithm).get(2) + self.assertEqual(algo.publish_status, PublishStatus.UNPUBLISHED) + algo_project = session.query(AlgorithmProject).get(1) + self.assertEqual(algo_project.publish_status, PublishStatus.UNPUBLISHED) + + +class PendingAlgorithmsApiTest(BaseTestCase): + + def test_get_pending_algorithms(self): + with db.session_scope() as session: + uuid = resource_uuid() + algo_project = AlgorithmProject(name='test-algo', uuid=uuid) + participant = Participant(name='test-part', domain_name='haha') + session.add(algo_project) + session.add(participant) + session.flush() + pending_algo_1 = PendingAlgorithm(name='test-algo-1', + algorithm_project_uuid=uuid, + project_id=1, + created_at=datetime(2021, 12, 2, 0, 0), + participant_id=participant.id) + pending_algo_2 = PendingAlgorithm(name='test-algo-2', project_id=2, created_at=datetime(2021, 12, 2, 0, 1)) + pending_algo_3 = PendingAlgorithm(name='test-algo-3', project_id=2, deleted_at=datetime(2021, 12, 2)) + session.add_all([pending_algo_1, pending_algo_2, pending_algo_3]) + session.commit() + resp = self.get_helper('/api/v2/projects/0/pending_algorithms') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual(len(data), 2) + self.assertEqual(data[0]['name'], 'test-algo-2') + resp = self.get_helper('/api/v2/projects/1/pending_algorithms') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual(len(data), 1) + with db.session_scope() as session: + algo_project = session.query(AlgorithmProject).filter_by(uuid=uuid).first() + self.assertPartiallyEqual( + data[0], { + 'name': 'test-algo-1', + 'project_id': 1, + 'algorithm_project_id': algo_project.id, + 'version': 0, + 'type': 'UNSPECIFIED', + 'path': '', + 'comment': '', + 'participant_id': participant.id, + 'participant_name': 'test-part' + }, + ignore_fields=['id', 'algorithm_uuid', 'algorithm_project_uuid', 'created_at', 'updated_at', 'deleted_at']) + + +class AcceptPendingAlgorithmApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(name='test') + session.add(project) + session.flush() + pending_algo = PendingAlgorithm(name='test-algo', + version=2, + project_id=project.id, + algorithm_uuid=resource_uuid(), + algorithm_project_uuid=resource_uuid(), + type=AlgorithmType.NN_VERTICAL, + participant_id=1) + pending_algo.path = generate_algorithm_files() + session.add(pending_algo) + session.commit() + + @patch('fedlearner_webconsole.algorithm.service.Algorithm') + @patch('fedlearner_webconsole.algorithm.service.AlgorithmProject') + @patch('fedlearner_webconsole.algorithm.apis.algorithm_project_path') + @patch('fedlearner_webconsole.algorithm.apis.algorithm_path') + def test_accept_pending_algorithm_with_exceptions(self, mock_algorithm_path, mock_algorithm_project_path, + mock_algorithm, mock_algorithm_project): + with db.session_scope() as session: + pending_algo = session.query(PendingAlgorithm).filter_by(name='test-algo').first() + with tempfile.TemporaryDirectory() as temp_dir: + Envs.STORAGE_ROOT = temp_dir + name = 'test_with_exceptions' + algo_project_path = os.path.join(Envs.STORAGE_ROOT, 'algorithm_projects', name) + mock_algorithm_project_path.return_value = algo_project_path + algorithm_path = os.path.join(Envs.STORAGE_ROOT, 'algorithms', name) + mock_algorithm_path.return_value = algorithm_path + mock_algorithm.side_effect = Exception() + mock_algorithm_project.side_effect = Exception() + self.post_helper(f'/api/v2/projects/1/pending_algorithms/{pending_algo.id}:accept', data={'name': 'algo-1'}) + self.assertFalse(os.path.exists(algorithm_path)) + self.assertFalse(os.path.exists(algo_project_path)) + + def test_accept_pending_algorithm(self): + with db.session_scope() as session: + pending_algo = session.query(PendingAlgorithm).filter_by(name='test-algo').first() + with tempfile.TemporaryDirectory() as temp_dir: + Envs.STORAGE_ROOT = temp_dir + resp = self.post_helper(f'/api/v2/projects/1/pending_algorithms/{pending_algo.id}:accept', + data={'name': 'algo-1'}) + self.assertEqual(resp.status_code, HTTPStatus.NO_CONTENT) + with db.session_scope() as session: + pending_algo = session.query(PendingAlgorithm).execution_options(include_deleted=True).filter_by( + name='test-algo').first() + self.assertTrue(bool(pending_algo.deleted_at)) + algo_project = session.query(AlgorithmProject).filter_by(name='algo-1').first() + self.assertEqual(algo_project.username, 'ada') + self.assertEqual(algo_project.participant_id, pending_algo.participant_id) + self.assertEqual(algo_project.latest_version, pending_algo.version) + self.assertEqual(algo_project.type, pending_algo.type) + self.assertEqual(algo_project.source, Source.THIRD_PARTY) + self.assertEqual(len(algo_project.algorithms), 1) + self.assertEqual(algo_project.release_status, ReleaseStatus.RELEASED) + self.assertEqual(algo_project.algorithms[0].participant_id, pending_algo.participant_id) + + def test_accept_with_duplicate_uuid(self): + uuid = resource_uuid() + with tempfile.TemporaryDirectory() as temp_dir: + Envs.STORAGE_ROOT = temp_dir + with db.session_scope() as session: + pending_algo = session.query(PendingAlgorithm).filter_by(name='test-algo').first() + algorithm_project_uuid = pending_algo.algorithm_project_uuid + algo_project_path = algorithm_project_path(Envs.STORAGE_ROOT, 'test-algo') + algo_project = AlgorithmProject(name='test-algo-project', + uuid=algorithm_project_uuid, + path=algo_project_path, + source=Source.THIRD_PARTY) + algo_project.release_status = ReleaseStatus.RELEASED + session.add(algo_project) + session.commit() + + resp = self.post_helper(f'/api/v2/projects/1/pending_algorithms/{pending_algo.id}:accept', + data={'name': 'test-algo-project'}) + self.assertEqual(resp.status_code, HTTPStatus.NO_CONTENT) + with db.session_scope() as session: + pending_algo = session.query(PendingAlgorithm).execution_options(include_deleted=True).filter_by( + name='test-algo').first() + self.assertTrue(bool(pending_algo.deleted_at)) + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project').first() + self.assertEqual(algo_project.source, Source.THIRD_PARTY) + self.assertEqual(len(algo_project.algorithms), 1) + self.assertEqual(algo_project.release_status, ReleaseStatus.RELEASED) + self.assertEqual(algo_project.uuid, pending_algo.algorithm_project_uuid) + self.assertEqual(algo_project.algorithms[0].participant_id, pending_algo.participant_id) + self.assertEqual(algo_project.algorithms[0].name, pending_algo.name) + self.assertEqual(algo_project.algorithms[0].parameter, pending_algo.parameter) + self.assertEqual(algo_project.algorithms[0].uuid, pending_algo.algorithm_uuid) + self.assertEqual(sorted(os.listdir(algo_project.algorithms[0].path)), ['follower', 'leader']) + + +class PendingAlgorithmTreeApi(BaseTestCase): + + def test_get_tree(self): + path = generate_algorithm_files() + with db.session_scope() as session: + pending_algo = PendingAlgorithm(name='test-algo', path=path) + session.add(pending_algo) + session.commit() + resp = self.get_helper(f'/api/v2/projects/0/pending_algorithms/{pending_algo.id}/tree') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + data = sorted(data, key=lambda d: d['filename']) + self.assertPartiallyEqual(data[1], { + 'filename': 'leader', + 'path': 'leader', + 'is_directory': True + }, + ignore_fields=['size', 'mtime', 'files']) + self.assertPartiallyEqual(data[1]['files'][0], { + 'filename': 'main.py', + 'path': 'leader/main.py', + 'is_directory': False + }, + ignore_fields=['size', 'mtime', 'files']) + + +class PendingAlgorithmFilesApi(BaseTestCase): + + def test_get_file(self): + path = generate_algorithm_files() + with db.session_scope() as session: + pending_algo = PendingAlgorithm(name='test-algo', path=path) + session.add(pending_algo) + session.commit() + resp = self.get_helper(f'/api/v2/projects/0/pending_algorithms/{pending_algo.id}/files?path=leader/main.py') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertResponseDataEqual(resp, {'content': 'import tensorflow', 'path': 'leader/main.py'}) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/fetcher.py b/web_console_v2/api/fedlearner_webconsole/algorithm/fetcher.py new file mode 100644 index 000000000..1ccd68d90 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/fetcher.py @@ -0,0 +1,67 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import grpc +from envs import Envs +from fedlearner_webconsole.algorithm.models import Algorithm, Source +from fedlearner_webconsole.algorithm.utils import algorithm_cache_path +from fedlearner_webconsole.algorithm.utils import check_algorithm_file +from fedlearner_webconsole.algorithm.transmit.receiver import AlgorithmReceiver +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.proto.algorithm_pb2 import AlgorithmPb +from fedlearner_webconsole.rpc.v2.resource_service_client import ResourceServiceClient +from fedlearner_webconsole.utils.file_manager import file_manager +from fedlearner_webconsole.exceptions import NotFoundException + + +class AlgorithmFetcher: + + def __init__(self, project_id: int): + self._project_id = project_id + + def get_algorithm_from_participant(self, algorithm_uuid: str, participant_id: int) -> AlgorithmPb: + with db.session_scope() as session: + project = session.query(Project).get(self._project_id) + participant = session.query(Participant).get(participant_id) + client = ResourceServiceClient.from_project_and_participant(participant.domain_name, project.name) + algorithm = client.get_algorithm(algorithm_uuid=algorithm_uuid) + algo_cache_path = algorithm_cache_path(Envs.STORAGE_ROOT, algorithm_uuid) + if not file_manager.exists(algo_cache_path): + data_iterator = client.get_algorithm_files(algorithm_uuid=algorithm_uuid) + # Get the hash in the first response to be used for verification when the file is received + resp = next(data_iterator) + with check_algorithm_file(algo_cache_path): + AlgorithmReceiver().write_data_and_extract(data_iterator, algo_cache_path, resp.hash) + algorithm.path = algo_cache_path + algorithm.source = Source.PARTICIPANT.name + algorithm.participant_id = participant_id + return algorithm + + def get_algorithm(self, uuid: str) -> AlgorithmPb: + """Raise NotFoundException when the algorithm is not found""" + + with db.session_scope() as session: + algorithm = session.query(Algorithm).filter_by(uuid=uuid).first() + participants = session.query(Project).get(self._project_id).participants + if algorithm: + return algorithm.to_proto() + for participant in participants: + try: + return self.get_algorithm_from_participant(algorithm_uuid=uuid, participant_id=participant.id) + except grpc.RpcError: + continue + raise NotFoundException(f'the algorithm uuid: {uuid} is not found') diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/fetcher_test.py b/web_console_v2/api/fedlearner_webconsole/algorithm/fetcher_test.py new file mode 100644 index 000000000..873c033ea --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/fetcher_test.py @@ -0,0 +1,101 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import tempfile +import unittest +from envs import Envs +from google.protobuf.json_format import ParseDict +from testing.common import NoWebServerTestCase +from unittest.mock import patch +from fedlearner_webconsole.algorithm.models import Algorithm, Source +from fedlearner_webconsole.algorithm.transmit.sender import AlgorithmSender +from fedlearner_webconsole.algorithm.fetcher import AlgorithmFetcher +from fedlearner_webconsole.algorithm.utils import algorithm_cache_path +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.proto.algorithm_pb2 import AlgorithmVariable, AlgorithmParameter +from fedlearner_webconsole.db import db +from fedlearner_webconsole.utils.proto import remove_secrets + +_TEST_ALGORITHM_PATH = os.path.join(Envs.BASE_DIR, 'testing/test_data/algorithm/e2e_test') + + +class AlgorithmFetcherTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project') + participant = Participant(id=1, name='part', domain_name='test') + project.participants = [participant] + algo1 = Algorithm(id=1, project_id=1, name='test-algo-1', uuid='algo-1', path=_TEST_ALGORITHM_PATH) + parameter1 = ParseDict({'variables': [{'name': 'BATCH_SIZE', 'value': '128'}]}, AlgorithmParameter()) + algo1.set_parameter(parameter1) + algo2 = Algorithm(id=2, project_id=1, name='test-algo-2', uuid='algo-2') + parameter2 = ParseDict({'variables': [{'name': 'MAX_DEPTH', 'value': '5'}]}, AlgorithmParameter()) + algo2.set_parameter(parameter2) + session.add_all([participant, project, algo1, algo2]) + session.commit() + + @patch('fedlearner_webconsole.rpc.v2.resource_service_client.ResourceServiceClient.get_algorithm') + @patch('fedlearner_webconsole.rpc.v2.resource_service_client.ResourceServiceClient.get_algorithm_files') + def test_get_algorithm_from_participant(self, mock_get_algorithm_files, mock_get_algorithm): + with db.session_scope() as session: + algo = session.query(Algorithm).get(1) + mock_get_algorithm.return_value = remove_secrets(algo.to_proto()) + mock_get_algorithm_files.return_value = AlgorithmSender().make_algorithm_iterator(algo.path) + with tempfile.TemporaryDirectory() as temp_dir: + Envs.STORAGE_ROOT = temp_dir + algorithm_uuid = 'uuid' + algorithm = AlgorithmFetcher(project_id=1).get_algorithm_from_participant(algorithm_uuid=algorithm_uuid, + participant_id=1) + algo_cache_path = algorithm_cache_path(Envs.STORAGE_ROOT, algorithm_uuid) + self.assertTrue(os.path.exists(algo_cache_path)) + self.assertEqual(algo_cache_path, algorithm.path) + self.assertEqual(algorithm.source, Source.PARTICIPANT.name) + self.assertEqual(algorithm.id, 0) + self.assertEqual(algorithm.algorithm_project_id, 0) + self.assertEqual(algorithm.parameter, + AlgorithmParameter(variables=[AlgorithmVariable(name='BATCH_SIZE', value='128')])) + self.assertEqual(sorted(os.listdir(algo_cache_path)), ['follower', 'leader']) + with open(os.path.join(algo_cache_path, 'leader', 'main.py'), encoding='utf-8') as f: + self.assertEqual(f.read(), 'import tensorflow\n') + with open(os.path.join(algo_cache_path, 'follower', 'main.py'), encoding='utf-8') as f: + self.assertEqual(f.read(), '') + + @patch('fedlearner_webconsole.rpc.v2.resource_service_client.ResourceServiceClient.get_algorithm') + @patch('fedlearner_webconsole.rpc.v2.resource_service_client.ResourceServiceClient.get_algorithm_files') + def test_get_algorithm(self, mock_get_algorithm_files, mock_get_algorithm): + with db.session_scope() as session: + algo1 = session.query(Algorithm).get(1) + algo2 = session.query(Algorithm).get(2) + mock_get_algorithm.return_value = remove_secrets(algo2.to_proto()) + mock_get_algorithm_files.return_value = AlgorithmSender().make_algorithm_iterator(algo1.path) + with tempfile.TemporaryDirectory() as temp_dir: + Envs.STORAGE_ROOT = temp_dir + fetcher = AlgorithmFetcher(project_id=1) + algorithm1 = fetcher.get_algorithm('algo-1') + self.assertEqual(algorithm1.path, algo1.path) + self.assertEqual(algorithm1.parameter, + AlgorithmParameter(variables=[AlgorithmVariable(name='BATCH_SIZE', value='128')])) + algorithm2 = fetcher.get_algorithm('algo-3') + self.assertEqual(algorithm2.path, algorithm_cache_path(Envs.STORAGE_ROOT, 'algo-3')) + self.assertEqual(algorithm2.parameter, + AlgorithmParameter(variables=[AlgorithmVariable(name='MAX_DEPTH', value='5')])) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/models.py b/web_console_v2/api/fedlearner_webconsole/algorithm/models.py new file mode 100644 index 000000000..a764e8f47 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/models.py @@ -0,0 +1,354 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import enum +from typing import Optional +from sqlalchemy.sql.schema import Index, UniqueConstraint +from google.protobuf import text_format +from fedlearner_webconsole.auth.models import User +from fedlearner_webconsole.db import db, default_table_args +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto.algorithm_pb2 import AlgorithmParameter, PendingAlgorithmPb, AlgorithmPb, \ + AlgorithmProjectPb +from fedlearner_webconsole.utils.pp_datetime import now, to_timestamp +from fedlearner_webconsole.utils.base_model.softdelete_model import SoftDeleteModel +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus, ReviewTicketModel + + +def normalize_path(path: str) -> str: + if path.startswith('hdfs://'): + return path + if path.startswith('file://'): + _, pure_path = path.split('://') + return f'file://{os.path.normpath(pure_path)}' + return os.path.normpath(path) + + +class AlgorithmType(enum.Enum): + UNSPECIFIED = 0 + NN_LOCAL = 1 + NN_HORIZONTAL = 2 + NN_VERTICAL = 3 + TREE_VERTICAL = 4 + TRUSTED_COMPUTING = 5 + + +class Source(enum.Enum): + UNSPECIFIED = 0 + PRESET = 1 + USER = 2 + THIRD_PARTY = 3 # deprecated + PARTICIPANT = 4 # algorithm from participant + + +class ReleaseStatus(enum.Enum): + UNPUBLISHED = 0 # deprecated + PUBLISHED = 1 # deprecated + UNRELEASED = 'UNRELEASED' + RELEASED = 'RELEASED' + + +class PublishStatus(enum.Enum): + UNPUBLISHED = 'UNPUBLISHED' + PUBLISHED = 'PUBLISHED' + + +class AlgorithmStatus(enum.Enum): + UNPUBLISHED = 'UNPUBLISHED' + PENDING_APPROVAL = 'PENDING_APPROVAL' + APPROVED = 'APPROVED' + DECLINED = 'DECLINED' + PUBLISHED = 'PUBLISHED' + + +# TODO(hangweiqiang): read https://docs.sqlalchemy.org/en/14/orm/inheritance.html and try refactor +class AlgorithmProject(db.Model, SoftDeleteModel): + __tablename__ = 'algorithm_projects_v2' + __table_args__ = (UniqueConstraint('name', 'source', 'project_id', name='uniq_name_source_project_id'), + UniqueConstraint('uuid', name='uniq_uuid'), default_table_args('algorithm_projects')) + id = db.Column(db.Integer, primary_key=True, comment='id', autoincrement=True) + uuid = db.Column(db.String(64), comment='uuid') + name = db.Column(db.String(255), comment='name') + project_id = db.Column(db.Integer, comment='project id') + latest_version = db.Column(db.Integer, default=0, comment='latest version') + type = db.Column('algorithm_type', + db.Enum(AlgorithmType, native_enum=False, length=32, create_constraint=False), + default=AlgorithmType.UNSPECIFIED, + key='type', + comment='algorithm type') + source = db.Column(db.Enum(Source, native_enum=False, length=32, create_constraint=False), + default=Source.UNSPECIFIED, + comment='algorithm source') + # Algorithm project publish has been modified to release. Algorithm project is unreleased when file or + # parameter is edited. In order to ensure compatibility, it is still saved as publish_status in the database, + # and _release_status is added to the model layer to make a conversion when data is used. + _release_status = db.Column('publish_status', + db.Enum(ReleaseStatus, native_enum=False, length=32, create_constraint=False), + default=ReleaseStatus.UNRELEASED, + comment='release status') + publish_status = db.Column('publish_status_v2', + db.Enum(PublishStatus, native_enum=False, length=32, create_constraint=False), + server_default=PublishStatus.UNPUBLISHED.name, + comment='publish status') + username = db.Column(db.String(255), comment='creator name') + participant_id = db.Column(db.Integer, comment='participant id') + path = db.Column('fspath', db.String(512), key='path', comment='algorithm project path') + parameter = db.Column(db.Text(), comment='parameter') + comment = db.Column('cmt', db.String(255), key='comment', comment='comment') + created_at = db.Column(db.DateTime(timezone=True), default=now, comment='created time') + updated_at = db.Column(db.DateTime(timezone=True), default=now, onupdate=now, comment='updated time') + deleted_at = db.Column(db.DateTime(timezone=True), comment='deleted time') + project = db.relationship(Project.__name__, primaryjoin='foreign(AlgorithmProject.project_id) == Project.id') + user = db.relationship(User.__name__, primaryjoin='foreign(AlgorithmProject.username) == User.username') + participant = db.relationship(Participant.__name__, + primaryjoin='foreign(AlgorithmProject.participant_id) == Participant.id') + algorithms = db.relationship( + 'Algorithm', + order_by='desc(Algorithm.version)', + primaryjoin='foreign(Algorithm.algorithm_project_id) == AlgorithmProject.id', + # To disable the warning of back_populates + overlaps='algorithm_project') + + @property + def release_status(self) -> ReleaseStatus: + if self._release_status == ReleaseStatus.UNPUBLISHED: + return ReleaseStatus.UNRELEASED + if self._release_status == ReleaseStatus.PUBLISHED: + return ReleaseStatus.RELEASED + return self._release_status + + @release_status.setter + def release_status(self, release_status: ReleaseStatus): + self._release_status = release_status + + def set_parameter(self, parameter: Optional[AlgorithmParameter] = None): + if parameter is None: + parameter = AlgorithmParameter() + self.parameter = text_format.MessageToString(parameter) + + def get_parameter(self) -> Optional[AlgorithmParameter]: + if self.parameter is not None: + return text_format.Parse(self.parameter, AlgorithmParameter()) + return None + + def is_path_accessible(self, path: str): + if self.path is None: + return False + return normalize_path(path).startswith(self.path) + + def get_participant_name(self): + if self.participant is not None: + return self.participant.name + return None + + def to_proto(self) -> AlgorithmProjectPb: + return AlgorithmProjectPb( + id=self.id, + uuid=self.uuid, + name=self.name, + project_id=self.project_id, + latest_version=self.latest_version, + type=self.type.name, + source=self.source.name, + publish_status=self.publish_status.name, + release_status=self.release_status.name, + username=self.username, + participant_id=self.participant_id, + participant_name=self.get_participant_name(), + path=self.path, + parameter=self.get_parameter(), + comment=self.comment, + created_at=to_timestamp(self.created_at) if self.created_at else None, + updated_at=to_timestamp(self.updated_at) if self.updated_at else None, + deleted_at=to_timestamp(self.deleted_at) if self.deleted_at else None, + algorithms=[algo.to_proto() for algo in self.algorithms], + ) + + +class Algorithm(db.Model, SoftDeleteModel, ReviewTicketModel): + __tablename__ = 'algorithms_v2' + __table_args__ = (Index('idx_name', + 'name'), UniqueConstraint('source', 'name', 'version', name='uniq_source_name_version'), + UniqueConstraint('uuid', name='uniq_uuid'), default_table_args('algorithms')) + id = db.Column(db.Integer, primary_key=True, comment='id', autoincrement=True) + uuid = db.Column(db.String(64), comment='uuid') + name = db.Column(db.String(255), comment='name') + project_id = db.Column(db.Integer, comment='project id') + version = db.Column(db.Integer, comment='version') + type = db.Column('algorithm_type', + db.Enum(AlgorithmType, native_enum=False, length=32, create_constraint=False), + default=AlgorithmType.UNSPECIFIED, + key='type', + comment='algorithm type') + source = db.Column(db.Enum(Source, native_enum=False, length=32, create_constraint=False), + default=Source.UNSPECIFIED, + comment='source') + publish_status = db.Column(db.Enum(PublishStatus, native_enum=False, length=32, create_constraint=False), + default=PublishStatus.UNPUBLISHED, + comment='publish status') + algorithm_project_id = db.Column(db.Integer, comment='algorithm project id') + username = db.Column(db.String(255), comment='creator name') + participant_id = db.Column(db.Integer, comment='participant id') + path = db.Column('fspath', db.String(512), key='path', comment='algorithm path') + parameter = db.Column(db.Text(), comment='parameter') + favorite = db.Column(db.Boolean, default=False, comment='favorite') + comment = db.Column('cmt', db.String(255), key='comment', comment='comment') + created_at = db.Column(db.DateTime(timezone=True), default=now, comment='created time') + updated_at = db.Column(db.DateTime(timezone=True), default=now, onupdate=now, comment='updated time') + deleted_at = db.Column(db.DateTime(timezone=True), comment='deleted time') + project = db.relationship(Project.__name__, primaryjoin='foreign(Algorithm.project_id) == Project.id') + user = db.relationship(User.__name__, primaryjoin='foreign(Algorithm.username) == User.username') + participant = db.relationship(Participant.__name__, + primaryjoin='foreign(Algorithm.participant_id) == Participant.id') + algorithm_project = db.relationship(AlgorithmProject.__name__, + primaryjoin='foreign(Algorithm.algorithm_project_id) == AlgorithmProject.id') + + def set_parameter(self, parameter: Optional[AlgorithmParameter] = None): + if parameter is None: + parameter = AlgorithmParameter() + self.parameter = text_format.MessageToString(parameter) + + def get_parameter(self) -> Optional[AlgorithmParameter]: + if self.parameter is not None: + return text_format.Parse(self.parameter, AlgorithmParameter()) + return None + + def is_path_accessible(self, path: str): + if self.path is None: + return False + return normalize_path(path).startswith(self.path) + + def get_participant_name(self): + if self.participant is not None: + return self.participant.name + return None + + def get_status(self) -> AlgorithmStatus: + if self.publish_status == PublishStatus.PUBLISHED: + return AlgorithmStatus.PUBLISHED + if self.ticket_uuid is not None: + if self.ticket_status == TicketStatus.PENDING: + return AlgorithmStatus.PENDING_APPROVAL + if self.ticket_status == TicketStatus.APPROVED: + return AlgorithmStatus.APPROVED + if self.ticket_status == TicketStatus.DECLINED: + return AlgorithmStatus.DECLINED + return AlgorithmStatus.UNPUBLISHED + + def get_algorithm_project_uuid(self) -> Optional[str]: + if self.algorithm_project: + return self.algorithm_project.uuid + return None + + def to_proto(self) -> AlgorithmPb: + return AlgorithmPb( + id=self.id, + uuid=self.uuid, + name=self.name, + project_id=self.project_id, + version=self.version, + type=self.type.name, + source=self.source.name, + status=self.get_status().name, + algorithm_project_id=self.algorithm_project_id, + algorithm_project_uuid=self.get_algorithm_project_uuid(), + username=self.username, + participant_id=self.participant_id, + participant_name=self.get_participant_name(), + # TODO(gezhengqiang): delete participant name + path=self.path, + parameter=self.get_parameter(), + favorite=self.favorite, + comment=self.comment, + created_at=to_timestamp(self.created_at) if self.created_at else None, + updated_at=to_timestamp(self.updated_at) if self.updated_at else None, + deleted_at=to_timestamp(self.deleted_at) if self.deleted_at else None, + ) + + +class PendingAlgorithm(db.Model, SoftDeleteModel): + __tablename__ = 'pending_algorithms_v2' + __table_args__ = (default_table_args('pending_algorithms')) + id = db.Column(db.Integer, primary_key=True, comment='id', autoincrement=True) + algorithm_uuid = db.Column(db.String(64), comment='algorithm uuid') + algorithm_project_uuid = db.Column(db.String(64), comment='algorithm project uuid') + name = db.Column(db.String(255), comment='name') + project_id = db.Column(db.Integer, comment='project id') + version = db.Column(db.Integer, comment='version') + type = db.Column('algorithm_type', + db.Enum(AlgorithmType, native_enum=False, length=32, create_constraint=False), + default=AlgorithmType.UNSPECIFIED, + key='type', + comment='algorithm type') + participant_id = db.Column(db.Integer, comment='participant id') + path = db.Column('fspath', db.String(512), key='path', comment='algorithm path') + parameter = db.Column(db.Text(), comment='parameter') + comment = db.Column('cmt', db.String(255), key='comment', comment='comment') + created_at = db.Column(db.DateTime(timezone=True), default=now, comment='created time') + updated_at = db.Column(db.DateTime(timezone=True), default=now, onupdate=now, comment='updated time') + deleted_at = db.Column(db.DateTime(timezone=True), comment='deleted time') + project = db.relationship(Project.__name__, primaryjoin='foreign(PendingAlgorithm.project_id) == Project.id') + participant = db.relationship(Participant.__name__, + primaryjoin='foreign(PendingAlgorithm.participant_id) == Participant.id') + algorithm_project = db.relationship( + AlgorithmProject.__name__, + primaryjoin='foreign(PendingAlgorithm.algorithm_project_uuid) == AlgorithmProject.uuid') + + def set_parameter(self, parameter: Optional[AlgorithmParameter] = None): + if parameter is None: + parameter = AlgorithmParameter() + self.parameter = text_format.MessageToString(parameter) + + def get_parameter(self) -> Optional[AlgorithmParameter]: + if self.parameter is not None: + return text_format.Parse(self.parameter, AlgorithmParameter()) + return None + + def is_path_accessible(self, path: str): + if self.path is None: + return False + return normalize_path(path).startswith(self.path) + + def get_participant_name(self): + if self.participant: + return self.participant.name + return None + + def get_algorithm_project_id(self) -> Optional[int]: + if self.algorithm_project: + return self.algorithm_project.id + return None + + def to_proto(self) -> PendingAlgorithmPb: + return PendingAlgorithmPb( + id=self.id, + algorithm_uuid=self.algorithm_uuid, + algorithm_project_uuid=self.algorithm_project_uuid, + name=self.name, + project_id=self.project_id, + version=self.version, + type=self.type.name, + participant_id=self.participant_id, + participant_name=self.get_participant_name(), + path=self.path, + parameter=self.get_parameter(), + comment=self.comment, + created_at=to_timestamp(self.created_at) if self.created_at else None, + updated_at=to_timestamp(self.updated_at) if self.updated_at else None, + deleted_at=to_timestamp(self.deleted_at) if self.deleted_at else None, + algorithm_project_id=self.get_algorithm_project_id(), + ) diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/models_test.py b/web_console_v2/api/fedlearner_webconsole/algorithm/models_test.py new file mode 100644 index 000000000..19dc63f80 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/models_test.py @@ -0,0 +1,169 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from datetime import datetime, timezone +from testing.common import NoWebServerTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.algorithm.models import Algorithm, AlgorithmProject, AlgorithmType, Source,\ + PendingAlgorithm, PublishStatus, AlgorithmStatus, normalize_path +from fedlearner_webconsole.proto.algorithm_pb2 import AlgorithmParameter, AlgorithmVariable, AlgorithmPb,\ + AlgorithmProjectPb +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.utils.pp_datetime import to_timestamp + + +class AlgorithmTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + algo_project = AlgorithmProject(id=1, name='test-algo-project', uuid='test-algo-project-uuid') + algo = Algorithm(id=1, + algorithm_project_id=1, + name='test-algo', + type=AlgorithmType.NN_VERTICAL, + source=Source.USER, + path='/data', + created_at=datetime(2022, 2, 22, tzinfo=timezone.utc), + updated_at=datetime(2022, 2, 22, tzinfo=timezone.utc)) + algo.set_parameter(AlgorithmParameter(variables=[AlgorithmVariable(name='MAX_ITERS', value='5')])) + session.add_all([algo_project, algo]) + session.commit() + + def test_parameter(self): + algo = Algorithm(name='test-algo') + parameters = AlgorithmParameter(variables=[AlgorithmVariable(name='MAX_ITERS', value='5')]) + algo.set_parameter(parameters) + self.assertEqual(algo.get_parameter(), parameters) + + def test_to_proto(self): + parameters = AlgorithmParameter(variables=[ + AlgorithmVariable( + name='MAX_ITERS', value='5', required=False, display_name='', comment='', value_type='STRING') + ]) + with db.session_scope() as session: + algo = session.query(Algorithm).filter_by(name='test-algo').first() + self.assertEqual( + algo.to_proto(), + AlgorithmPb(id=1, + name='test-algo', + type='NN_VERTICAL', + source='USER', + algorithm_project_id=1, + path='/data', + parameter=parameters, + status='UNPUBLISHED', + algorithm_project_uuid='test-algo-project-uuid', + updated_at=to_timestamp(datetime(2022, 2, 22, tzinfo=timezone.utc)), + created_at=to_timestamp(datetime(2022, 2, 22, tzinfo=timezone.utc)))) + + def test_normalize_path(self): + path1 = 'hdfs:///user/./local' + self.assertEqual(normalize_path(path1), 'hdfs:///user/./local') + path2 = 'file:///app/./local/../tools' + self.assertEqual(normalize_path(path2), 'file:///app/tools') + path3 = '/app/./local/../tools' + self.assertEqual(normalize_path(path3), '/app/tools') + + def test_get_status(self): + algo1 = Algorithm(publish_status=PublishStatus.PUBLISHED, ticket_uuid=1) + self.assertEqual(algo1.get_status(), AlgorithmStatus.PUBLISHED) + algo2 = Algorithm(publish_status=PublishStatus.PUBLISHED, ticket_uuid=None) + self.assertEqual(algo2.get_status(), AlgorithmStatus.PUBLISHED) + algo3 = Algorithm(publish_status=PublishStatus.UNPUBLISHED, ticket_uuid=None) + self.assertEqual(algo3.get_status(), AlgorithmStatus.UNPUBLISHED) + algo4 = Algorithm(publish_status=PublishStatus.UNPUBLISHED, + ticket_uuid=None, + ticket_status=TicketStatus.PENDING) + self.assertEqual(algo4.get_status(), AlgorithmStatus.UNPUBLISHED) + algo5 = Algorithm(publish_status=PublishStatus.UNPUBLISHED, ticket_uuid=1, ticket_status=TicketStatus.PENDING) + self.assertEqual(algo5.get_status(), AlgorithmStatus.PENDING_APPROVAL) + algo6 = Algorithm(publish_status=PublishStatus.UNPUBLISHED, ticket_uuid=1, ticket_status=TicketStatus.DECLINED) + self.assertEqual(algo6.get_status(), AlgorithmStatus.DECLINED) + algo7 = Algorithm(publish_status=PublishStatus.UNPUBLISHED, ticket_uuid=1, ticket_status=TicketStatus.APPROVED) + self.assertEqual(algo7.get_status(), AlgorithmStatus.APPROVED) + + +class AlgorithmProjectTest(NoWebServerTestCase): + + def test_algorithms_reference(self): + with db.session_scope() as session: + algo_project = AlgorithmProject(name='test-algo') + session.add(algo_project) + session.flush() + algo1 = Algorithm(name='test-algo', version=1, algorithm_project_id=algo_project.id) + algo2 = Algorithm(name='test-algo', version=2, algorithm_project_id=algo_project.id) + algo3 = Algorithm(name='test-algo') + session.add_all([algo1, algo2, algo3]) + session.commit() + with db.session_scope() as session: + algo_project: AlgorithmProject = session.query(AlgorithmProject).get(algo_project.id) + algorithms = algo_project.algorithms + self.assertEqual(len(algorithms), 2) + self.assertEqual(algorithms[0].name, 'test-algo') + self.assertEqual(algorithms[0].version, 2) + self.assertEqual(algorithms[1].name, 'test-algo') + self.assertEqual(algorithms[1].version, 1) + + def test_to_proto(self): + with db.session_scope() as session: + algo_project = AlgorithmProject(id=1, + name='test-algo-project', + type=AlgorithmType.TREE_VERTICAL, + path='/data', + created_at=datetime(2022, 2, 22, tzinfo=timezone.utc), + updated_at=datetime(2022, 2, 22, tzinfo=timezone.utc)) + algo_project.set_parameter(AlgorithmParameter(variables=[AlgorithmVariable(name='MAX_DEPTH', value='5')])) + session.add(algo_project) + session.commit() + result = algo_project.to_proto() + parameters = AlgorithmParameter(variables=[ + AlgorithmVariable( + name='MAX_DEPTH', value='5', required=False, display_name='', comment='', value_type='STRING') + ]) + self.assertEqual( + result, + AlgorithmProjectPb(id=1, + name='test-algo-project', + type='TREE_VERTICAL', + source='UNSPECIFIED', + publish_status='UNPUBLISHED', + path='/data', + parameter=parameters, + updated_at=to_timestamp(datetime(2022, 2, 22, tzinfo=timezone.utc)), + created_at=to_timestamp(datetime(2022, 2, 22, tzinfo=timezone.utc)), + release_status='UNRELEASED')) + + +class PendingAlgorithmTest(NoWebServerTestCase): + + def test_to_dict(self): + pending_algo = PendingAlgorithm(name='test-algo', type=AlgorithmType.TREE_VERTICAL, path='/data') + pending_algo.set_parameter(AlgorithmParameter(variables=[AlgorithmVariable(name='MAX_DEPTH', value='5')])) + with db.session_scope() as session: + session.add(pending_algo) + session.commit() + result = pending_algo.to_proto() + self.assertEqual(result.type, 'TREE_VERTICAL') + parameters = AlgorithmParameter(variables=[ + AlgorithmVariable( + name='MAX_DEPTH', value='5', required=False, display_name='', comment='', value_type='STRING') + ]) + self.assertEqual(result.parameter, parameters) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/BUILD.bazel new file mode 100644 index 000000000..6380eb244 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/BUILD.bazel @@ -0,0 +1,38 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +filegroup( + name = "preset_algorithms", + srcs = glob(["**/*"]), +) + +py_library( + name = "preset_algorithm_service_lib", + srcs = ["preset_algorithm_service.py"], + data = [":preset_algorithms"], + imports = ["../.."], + deps = [ + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:file_lib", + ], +) + +py_test( + name = "preset_algorithm_service_lib_test", + srcs = [ + "preset_algorithm_service_test.py", + ], + imports = ["../../.."], + main = "preset_algorithm_service_test.py", + deps = [ + ":preset_algorithm_service_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/testing:common_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v1/follower/config.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v1/follower/config.py new file mode 100644 index 000000000..b18f227ab --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v1/follower/config.py @@ -0,0 +1,18 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +leader_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9', 'x10', 'x11', 'x12'] +leader_label_name = ['label'] +follower_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9'] diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v1/follower/main.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v1/follower/main.py new file mode 100644 index 000000000..50ac5f5a9 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v1/follower/main.py @@ -0,0 +1,106 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import logging +import numpy as np +import tensorflow.compat.v1 as tf +import fedlearner.trainer as flt +from config import * +from fedlearner.trainer.trainer_worker import StepLossAucMetricsHook + +ROLE = 'follower' + +parser = flt.trainer_worker.create_argument_parser() +parser.add_argument('--batch-size', type=int, default=100, help='Training batch size.') +args = parser.parse_args() + + +def input_fn(bridge, trainer_master): + dataset = flt.data.DataBlockLoader(args.batch_size, ROLE, bridge, trainer_master).make_dataset() + + def parse_fn(example): + feature_map = dict() + feature_map['example_id'] = tf.FixedLenFeature([], tf.string) + feature_map['raw_id'] = tf.FixedLenFeature([], tf.string) + for name in follower_feature_names: + feature_map[name] = tf.FixedLenFeature([], tf.float32, default_value=0.0) + features = tf.parse_example(example, features=feature_map) + return features, dict(y=tf.constant(0)) + + dataset = dataset.map(map_func=parse_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE) + return dataset + + +def serving_input_receiver_fn(): + feature_map = { + "example_id": tf.FixedLenFeature([], tf.string), + "raw_id": tf.FixedLenFeature([], tf.string), + } + for name in follower_feature_names: + feature_map[name] = tf.FixedLenFeature([], tf.float32, default_value=0.0) + record_batch = tf.placeholder(dtype=tf.string, name='examples') + features = tf.parse_example(record_batch, features=feature_map) + features['act1_f'] = tf.placeholder(dtype=tf.float32, name='act1_f') + receiver_tensors = {'examples': record_batch, 'act1_f': features['act1_f']} + return tf.estimator.export.ServingInputReceiver(features, receiver_tensors) + + +def model_fn(model, features, labels, mode): + logging.info('model_fn: mode %s', mode) + x = [tf.expand_dims(features[name], axis=-1) for name in follower_feature_names] + x = tf.concat(x, axis=-1) + + w1f = tf.get_variable('w1l', + shape=[len(follower_feature_names), len(leader_label_name)], + dtype=tf.float32, + initializer=tf.random_uniform_initializer(-0.01, 0.01)) + b1f = tf.get_variable('b1l', shape=[len(leader_label_name)], dtype=tf.float32, initializer=tf.zeros_initializer()) + + act1_f = tf.nn.bias_add(tf.matmul(x, w1f), b1f) + + if mode == tf.estimator.ModeKeys.PREDICT: + return model.make_spec(mode=mode, predictions=act1_f) + + if mode == tf.estimator.ModeKeys.TRAIN: + gact1_f = model.send('act1_f', act1_f, require_grad=True) + elif mode == tf.estimator.ModeKeys.EVAL: + model.send('act1_f', act1_f, require_grad=False) + + #acc = model.recv('acc', tf.float32, require_grad=False) + auc = model.recv('auc', tf.float32, require_grad=False) + loss = model.recv('loss', tf.float32, require_grad=False) + logging_hook = tf.train.LoggingTensorHook({ + 'auc': auc, + 'loss': loss, + }, every_n_iter=10) + step_metric_hook = StepLossAucMetricsHook(loss_tensor=loss, auc_tensor=auc) + + global_step = tf.train.get_or_create_global_step() + if mode == tf.estimator.ModeKeys.TRAIN: + optimizer = tf.train.GradientDescentOptimizer(0.1) + train_op = model.minimize(optimizer, act1_f, grad_loss=gact1_f, global_step=global_step) + return model.make_spec(mode, + loss=tf.math.reduce_mean(act1_f), + train_op=train_op, + training_hooks=[logging_hook, step_metric_hook]) + if mode == tf.estimator.ModeKeys.EVAL: + fake_loss = tf.reduce_mean(act1_f) + return model.make_spec(mode=mode, loss=fake_loss, evaluation_hooks=[logging_hook, step_metric_hook]) + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + flt.trainer_worker.train(ROLE, args, input_fn, model_fn, serving_input_receiver_fn) diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v1/leader/config.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v1/leader/config.py new file mode 100644 index 000000000..b18f227ab --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v1/leader/config.py @@ -0,0 +1,18 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +leader_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9', 'x10', 'x11', 'x12'] +leader_label_name = ['label'] +follower_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9'] diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v1/leader/main.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v1/leader/main.py new file mode 100644 index 000000000..7bddf34b3 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v1/leader/main.py @@ -0,0 +1,123 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import logging +import tensorflow.compat.v1 as tf +import fedlearner.trainer as flt +from config import * +from fedlearner.trainer.trainer_worker import StepLossAucMetricsHook + +ROLE = 'leader' + +parser = flt.trainer_worker.create_argument_parser() +parser.add_argument('--batch-size', type=int, default=100, help='Training batch size.') +args = parser.parse_args() + + +def input_fn(bridge, trainer_master): + dataset = flt.data.DataBlockLoader(args.batch_size, ROLE, bridge, trainer_master).make_dataset() + + def parse_fn(example): + feature_map = dict() + feature_map['example_id'] = tf.FixedLenFeature([], tf.string) + feature_map['raw_id'] = tf.FixedLenFeature([], tf.string) + for name in leader_feature_names: + feature_map[name] = tf.FixedLenFeature([], tf.float32, default_value=0.0) + label_map = {} + for name in leader_label_name: + label_map[name] = tf.FixedLenFeature([], tf.float32, default_value=0.0) + features = tf.parse_example(example, features=feature_map) + labels = tf.parse_example(example, features=label_map) + return features, labels + + dataset = dataset.map(map_func=parse_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE) + return dataset + + +def serving_input_receiver_fn(): + feature_map = { + "example_id": tf.FixedLenFeature([], tf.string), + "raw_id": tf.FixedLenFeature([], tf.string), + } + for name in leader_feature_names: + feature_map[name] = tf.FixedLenFeature([], tf.float32, default_value=0.0) + record_batch = tf.placeholder(dtype=tf.string, name='examples') + features = tf.parse_example(record_batch, features=feature_map) + features['act1_f'] = tf.placeholder(dtype=tf.float32, name='act1_f') + receiver_tensors = {'examples': record_batch, 'act1_f': features['act1_f']} + return tf.estimator.export.ServingInputReceiver(features, receiver_tensors) + + +def model_fn(model, features, labels, mode): + logging.info('model_fn: mode %s', mode) + x = [tf.expand_dims(features[name], axis=-1) for name in leader_feature_names] + x = tf.concat(x, axis=-1) + + w1l = tf.get_variable('w1l', + shape=[len(leader_feature_names), len(leader_label_name)], + dtype=tf.float32, + initializer=tf.random_uniform_initializer(-0.01, 0.01)) + b1l = tf.get_variable('b1l', shape=[len(leader_label_name)], dtype=tf.float32, initializer=tf.zeros_initializer()) + + act1_l = tf.nn.bias_add(tf.matmul(x, w1l), b1l) + if mode == tf.estimator.ModeKeys.TRAIN: + act1_f = model.recv('act1_f', tf.float32, require_grad=True) + elif mode == tf.estimator.ModeKeys.EVAL: + act1_f = model.recv('act1_f', tf.float32, require_grad=False) + else: + act1_f = features['act1_f'] + logits = act1_l + act1_f + pred = tf.math.sigmoid(logits) + + if mode == tf.estimator.ModeKeys.PREDICT: + return model.make_spec(mode=mode, predictions=pred) + + y = [tf.expand_dims(labels[name], axis=-1) for name in leader_label_name] + y = tf.concat(y, axis=-1) + + loss = tf.nn.sigmoid_cross_entropy_with_logits(labels=y, logits=logits) + _, auc = tf.metrics.auc(labels=y, predictions=pred) + #correct = tf.nn.in_top_k(predictions=logits, targets=y, k=1) + #acc = tf.reduce_mean(input_tensor=tf.cast(correct, tf.float32)) + logging_hook = tf.train.LoggingTensorHook( + { + # 'acc': acc, + 'auc': auc, + 'loss': loss, + }, + every_n_iter=10) + step_metric_hook = StepLossAucMetricsHook(loss_tensor=loss, auc_tensor=auc) + #model.send('acc', acc, require_grad=False) + model.send('auc', auc, require_grad=False) + model.send('loss', loss, require_grad=False) + + global_step = tf.train.get_or_create_global_step() + if mode == tf.estimator.ModeKeys.TRAIN: + optimizer = tf.train.AdamOptimizer(1e-4) + train_op = model.minimize(optimizer, loss, global_step=global_step) + return model.make_spec(mode=mode, loss=loss, train_op=train_op, training_hooks=[logging_hook, step_metric_hook]) + + if mode == tf.estimator.ModeKeys.EVAL: + loss_pair = tf.metrics.mean(loss) + return model.make_spec(mode=mode, + loss=loss, + eval_metric_ops={'loss': loss_pair}, + evaluation_hooks=[logging_hook, step_metric_hook]) + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + flt.trainer_worker.train(ROLE, args, input_fn, model_fn, serving_input_receiver_fn) diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v2/follower/config.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v2/follower/config.py new file mode 100644 index 000000000..b18f227ab --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v2/follower/config.py @@ -0,0 +1,18 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +leader_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9', 'x10', 'x11', 'x12'] +leader_label_name = ['label'] +follower_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9'] diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v2/follower/main.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v2/follower/main.py new file mode 100644 index 000000000..2f3d8773f --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v2/follower/main.py @@ -0,0 +1,136 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import logging +import numpy as np +import tensorflow.compat.v1 as tf +import fedlearner.trainer as flt +from config import * +from fedlearner.trainer.trainer_worker import StepLossAucMetricsHook + +ROLE = 'follower' + +parser = flt.trainer_worker.create_argument_parser() +parser.add_argument('--batch-size', type=int, default=10, help='Training batch size.') +args = parser.parse_args() + + +class ResultWriter: + + def __init__(self): + self.result = {} + + def update_result(self, raw_id, pred): + raw_id = raw_id.numpy() + pred = pred.numpy() + for i in range(len(raw_id)): + self.result[raw_id[i]] = pred[i] + + def write_result(self, filename): + raw_id = np.array(list(self.result.keys())).reshape(-1) + pred = np.array([self.result[i] for i in raw_id]).reshape(-1) + with tf.gfile.Open(filename, 'w') as f: + np.savetxt(f, np.dstack((raw_id, pred))[0], '%s,%f', header='raw_id,pred') + logging.info(f'[write_result]output result to {filename}') + + +result_writer = ResultWriter() + + +def input_fn(bridge, trainer_master): + dataset = flt.data.DataBlockLoader(args.batch_size, ROLE, bridge, trainer_master).make_dataset() + + def parse_fn(example): + feature_map = dict() + feature_map['example_id'] = tf.FixedLenFeature([], tf.string) + feature_map['raw_id'] = tf.FixedLenFeature([], tf.int64) + for name in follower_feature_names: + feature_map[name] = tf.FixedLenFeature([], tf.float32, default_value=0.0) + features = tf.parse_example(example, features=feature_map) + return features, dict(y=tf.constant(0)) + + dataset = dataset.map(map_func=parse_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE) + return dataset + + +def serving_input_receiver_fn(): + feature_map = { + 'example_id': tf.FixedLenFeature([], tf.string), + 'raw_id': tf.FixedLenFeature([], tf.int64), + } + for name in follower_feature_names: + feature_map[name] = tf.FixedLenFeature([], tf.float32, default_value=0.0) + record_batch = tf.placeholder(dtype=tf.string, name='examples') + features = tf.parse_example(record_batch, features=feature_map) + features['act1_f'] = tf.placeholder(dtype=tf.float32, name='act1_f') + receiver_tensors = {'examples': record_batch, 'act1_f': features['act1_f']} + return tf.estimator.export.ServingInputReceiver(features, receiver_tensors) + + +def model_fn(model, features, labels, mode): + logging.info('model_fn: mode %s', mode) + x = [tf.expand_dims(features[name], axis=-1) for name in follower_feature_names] + x = tf.concat(x, axis=-1) + + w1f = tf.get_variable('w1l', + shape=[len(follower_feature_names), len(leader_label_name)], + dtype=tf.float32, + initializer=tf.random_uniform_initializer(-0.01, 0.01)) + b1f = tf.get_variable('b1l', shape=[len(leader_label_name)], dtype=tf.float32, initializer=tf.zeros_initializer()) + + act1_f = tf.nn.bias_add(tf.matmul(x, w1f), b1f) + + if mode == tf.estimator.ModeKeys.PREDICT: + return model.make_spec(mode=mode, predictions=act1_f) + + if mode == tf.estimator.ModeKeys.TRAIN: + gact1_f = model.send('act1_f', act1_f, require_grad=True) + elif mode == tf.estimator.ModeKeys.EVAL: + model.send('act1_f', act1_f, require_grad=False) + + pred = model.recv('pred', tf.float32, require_grad=False) + raw_id = features['raw_id'] + update_result_op = tf.py_function(result_writer.update_result, [raw_id, pred], [], 'update') + with tf.control_dependencies([update_result_op]): + auc = model.recv('auc', tf.float32, require_grad=False) + loss = model.recv('loss', tf.float32, require_grad=False) + + logging_hook = tf.train.LoggingTensorHook({ + 'auc': auc, + 'loss': loss, + }, every_n_iter=10) + step_metric_hook = StepLossAucMetricsHook(loss_tensor=loss, auc_tensor=auc, every_n_iter=1) + + global_step = tf.train.get_or_create_global_step() + if mode == tf.estimator.ModeKeys.TRAIN: + optimizer = tf.train.GradientDescentOptimizer(0.1) + train_op = model.minimize(optimizer, act1_f, grad_loss=gact1_f, global_step=global_step) + return model.make_spec(mode, + loss=tf.math.reduce_mean(act1_f), + train_op=train_op, + training_hooks=[logging_hook, step_metric_hook]) + if mode == tf.estimator.ModeKeys.EVAL: + fake_loss = tf.reduce_mean(act1_f) + return model.make_spec(mode=mode, loss=fake_loss, evaluation_hooks=[logging_hook, step_metric_hook]) + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + outputs_path = os.path.join(os.environ['OUTPUT_BASE_DIR'], 'outputs') + tf.gfile.MakeDirs(outputs_path) + flt.trainer_worker.train(ROLE, args, input_fn, model_fn, serving_input_receiver_fn) + if args.worker: + result_writer.write_result(os.path.join(outputs_path, f'worker-{str(args.worker_rank)}.csv')) diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v2/leader/config.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v2/leader/config.py new file mode 100644 index 000000000..b73e677f3 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v2/leader/config.py @@ -0,0 +1,3 @@ +leader_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9', 'x10', 'x11', 'x12'] +leader_label_name = ['label'] +follower_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9'] diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v2/leader/main.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v2/leader/main.py new file mode 100644 index 000000000..3c3f0b62c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v2/leader/main.py @@ -0,0 +1,132 @@ +import os +import logging +import numpy as np +import tensorflow.compat.v1 as tf +import fedlearner.trainer as flt +from config import * +from fedlearner.trainer.trainer_worker import StepLossAucMetricsHook + +ROLE = 'leader' + +parser = flt.trainer_worker.create_argument_parser() +parser.add_argument('--batch-size', type=int, default=10, help='Training batch size.') +args = parser.parse_args() + + +class ResultWriter: + + def __init__(self): + self.result = {} + + def update_result(self, raw_id, pred): + raw_id = raw_id.numpy() + pred = pred.numpy() + for i in range(len(raw_id)): + self.result[raw_id[i]] = pred[i] + + def write_result(self, filename): + raw_id = np.array(list(self.result.keys())).reshape(-1) + pred = np.array([self.result[i] for i in raw_id]).reshape(-1) + with tf.gfile.Open(filename, 'w') as f: + np.savetxt(f, np.dstack((raw_id, pred))[0], '%s,%f', header='raw_id,pred') + logging.info(f'[write_result]output result to {filename}') + + +result_writer = ResultWriter() + + +def input_fn(bridge, trainer_master): + dataset = flt.data.DataBlockLoader(args.batch_size, ROLE, bridge, trainer_master).make_dataset() + + def parse_fn(example): + feature_map = dict() + feature_map['example_id'] = tf.FixedLenFeature([], tf.string) + feature_map['raw_id'] = tf.FixedLenFeature([], tf.int64) + for name in leader_feature_names: + feature_map[name] = tf.FixedLenFeature([], tf.float32, default_value=0.0) + label_map = {} + for name in leader_label_name: + label_map[name] = tf.FixedLenFeature([], tf.float32, default_value=0.0) + features = tf.parse_example(example, features=feature_map) + labels = tf.parse_example(example, features=label_map) + return features, labels + + dataset = dataset.map(map_func=parse_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE) + return dataset + + +def serving_input_receiver_fn(): + feature_map = { + 'example_id': tf.FixedLenFeature([], tf.string), + 'raw_id': tf.FixedLenFeature([], tf.int64), + } + for name in leader_feature_names: + feature_map[name] = tf.FixedLenFeature([], tf.float32, default_value=0.0) + record_batch = tf.placeholder(dtype=tf.string, name='examples') + features = tf.parse_example(record_batch, features=feature_map) + features['act1_f'] = tf.placeholder(dtype=tf.float32, name='act1_f') + receiver_tensors = {'examples': record_batch, 'act1_f': features['act1_f']} + return tf.estimator.export.ServingInputReceiver(features, receiver_tensors) + + +def model_fn(model, features, labels, mode): + logging.info('model_fn: mode %s', mode) + x = [tf.expand_dims(features[name], axis=-1) for name in leader_feature_names] + x = tf.concat(x, axis=-1) + + w1l = tf.get_variable('w1l', + shape=[len(leader_feature_names), len(leader_label_name)], + dtype=tf.float32, + initializer=tf.random_uniform_initializer(-0.01, 0.01)) + b1l = tf.get_variable('b1l', shape=[len(leader_label_name)], dtype=tf.float32, initializer=tf.zeros_initializer()) + + act1_l = tf.nn.bias_add(tf.matmul(x, w1l), b1l) + if mode == tf.estimator.ModeKeys.TRAIN: + act1_f = model.recv('act1_f', tf.float32, require_grad=True) + elif mode == tf.estimator.ModeKeys.EVAL: + act1_f = model.recv('act1_f', tf.float32, require_grad=False) + else: + act1_f = features['act1_f'] + logits = act1_l + act1_f + pred = tf.math.sigmoid(logits) + if mode == tf.estimator.ModeKeys.PREDICT: + return model.make_spec(mode=mode, predictions=pred) + + raw_id = features['raw_id'] + update_result_op = tf.py_function(result_writer.update_result, [raw_id, pred], [], 'update') + model.send('pred', pred, require_grad=False) + y = [tf.expand_dims(labels[name], axis=-1) for name in leader_label_name] + y = tf.concat(y, axis=-1) + + with tf.control_dependencies([update_result_op]): + loss = tf.nn.sigmoid_cross_entropy_with_logits(labels=y, logits=logits) + _, auc = tf.metrics.auc(labels=y, predictions=pred) + logging_hook = tf.train.LoggingTensorHook({ + 'auc': auc, + 'loss': loss, + }, every_n_iter=10) + step_metric_hook = StepLossAucMetricsHook(loss_tensor=loss, auc_tensor=auc, every_n_iter=1) + model.send('auc', auc, require_grad=False) + model.send('loss', loss, require_grad=False) + + global_step = tf.train.get_or_create_global_step() + if mode == tf.estimator.ModeKeys.TRAIN: + optimizer = tf.train.AdamOptimizer(1e-4) + train_op = model.minimize(optimizer, loss, global_step=global_step) + return model.make_spec(mode=mode, loss=loss, train_op=train_op, training_hooks=[logging_hook, step_metric_hook]) + + if mode == tf.estimator.ModeKeys.EVAL: + loss_pair = tf.metrics.mean(loss) + return model.make_spec(mode=mode, + loss=loss, + eval_metric_ops={'loss': loss_pair}, + evaluation_hooks=[logging_hook, step_metric_hook]) + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + outputs_path = os.path.join(os.environ['OUTPUT_BASE_DIR'], 'outputs') + tf.gfile.MakeDirs(outputs_path) + flt.trainer_worker.train(ROLE, args, input_fn, model_fn, serving_input_receiver_fn) + if args.worker: + result_writer.write_result(os.path.join(outputs_path, f'worker-{str(args.worker_rank)}.csv')) diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v3/follower/config.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v3/follower/config.py new file mode 100644 index 000000000..b18f227ab --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v3/follower/config.py @@ -0,0 +1,18 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +leader_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9', 'x10', 'x11', 'x12'] +leader_label_name = ['label'] +follower_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9'] diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v3/follower/main.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v3/follower/main.py new file mode 100644 index 000000000..dce750326 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v3/follower/main.py @@ -0,0 +1,136 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import logging +import numpy as np +import tensorflow.compat.v1 as tf +import fedlearner.trainer as flt +from config import * +from fedlearner.trainer.trainer_worker import StepLossAucMetricsHook + +ROLE = 'follower' + +parser = flt.trainer_worker.create_argument_parser() +parser.add_argument('--batch-size', type=int, default=10, help='Training batch size.') +args = parser.parse_args() + + +class ResultWriter: + + def __init__(self): + self.result = {} + + def update_result(self, raw_id, pred): + raw_id = raw_id.numpy() + pred = pred.numpy() + for i in range(len(raw_id)): + self.result[raw_id[i]] = pred[i] + + def write_result(self, filename): + raw_id = np.array(list(self.result.keys())).reshape(-1) + pred = np.array([self.result[i] for i in raw_id]).reshape(-1) + with tf.gfile.Open(filename, 'w') as f: + np.savetxt(f, np.dstack((raw_id, pred))[0], '%s,%f', header='raw_id,pred') + logging.info(f'[write_result]output result to {filename}') + + +result_writer = ResultWriter() + + +def input_fn(bridge, trainer_master): + dataset = flt.data.DataBlockLoader(args.batch_size, ROLE, bridge, trainer_master).make_dataset() + + def parse_fn(example): + feature_map = dict() + feature_map['example_id'] = tf.FixedLenFeature([], tf.string) + feature_map['raw_id'] = tf.FixedLenFeature([], tf.string) + for name in follower_feature_names: + feature_map[name] = tf.FixedLenFeature([], tf.float32, default_value=0.0) + features = tf.parse_example(example, features=feature_map) + return features, dict(y=tf.constant(0)) + + dataset = dataset.map(map_func=parse_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE) + return dataset + + +def serving_input_receiver_fn(): + feature_map = { + 'example_id': tf.FixedLenFeature([], tf.string), + 'raw_id': tf.FixedLenFeature([], tf.string), + } + for name in follower_feature_names: + feature_map[name] = tf.FixedLenFeature([], tf.float32, default_value=0.0) + record_batch = tf.placeholder(dtype=tf.string, name='examples') + features = tf.parse_example(record_batch, features=feature_map) + features['act1_f'] = tf.placeholder(dtype=tf.float32, name='act1_f') + receiver_tensors = {'examples': record_batch, 'act1_f': features['act1_f']} + return tf.estimator.export.ServingInputReceiver(features, receiver_tensors) + + +def model_fn(model, features, labels, mode): + logging.info('model_fn: mode %s', mode) + x = [tf.expand_dims(features[name], axis=-1) for name in follower_feature_names] + x = tf.concat(x, axis=-1) + + w1f = tf.get_variable('w1l', + shape=[len(follower_feature_names), len(leader_label_name)], + dtype=tf.float32, + initializer=tf.random_uniform_initializer(-0.01, 0.01)) + b1f = tf.get_variable('b1l', shape=[len(leader_label_name)], dtype=tf.float32, initializer=tf.zeros_initializer()) + + act1_f = tf.nn.bias_add(tf.matmul(x, w1f), b1f) + + if mode == tf.estimator.ModeKeys.PREDICT: + return model.make_spec(mode=mode, predictions=act1_f) + + if mode == tf.estimator.ModeKeys.TRAIN: + gact1_f = model.send('act1_f', act1_f, require_grad=True) + elif mode == tf.estimator.ModeKeys.EVAL: + model.send('act1_f', act1_f, require_grad=False) + + pred = model.recv('pred', tf.float32, require_grad=False) + raw_id = features['raw_id'] + update_result_op = tf.py_function(result_writer.update_result, [raw_id, pred], [], 'update') + with tf.control_dependencies([update_result_op]): + auc = model.recv('auc', tf.float32, require_grad=False) + loss = model.recv('loss', tf.float32, require_grad=False) + + logging_hook = tf.train.LoggingTensorHook({ + 'auc': auc, + 'loss': loss, + }, every_n_iter=10) + step_metric_hook = StepLossAucMetricsHook(loss_tensor=loss, auc_tensor=auc, every_n_iter=1) + + global_step = tf.train.get_or_create_global_step() + if mode == tf.estimator.ModeKeys.TRAIN: + optimizer = tf.train.GradientDescentOptimizer(0.1) + train_op = model.minimize(optimizer, act1_f, grad_loss=gact1_f, global_step=global_step) + return model.make_spec(mode, + loss=tf.math.reduce_mean(act1_f), + train_op=train_op, + training_hooks=[logging_hook, step_metric_hook]) + if mode == tf.estimator.ModeKeys.EVAL: + fake_loss = tf.reduce_mean(act1_f) + return model.make_spec(mode=mode, loss=fake_loss, evaluation_hooks=[logging_hook, step_metric_hook]) + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + outputs_path = os.path.join(os.environ['OUTPUT_BASE_DIR'], 'outputs') + tf.gfile.MakeDirs(outputs_path) + flt.trainer_worker.train(ROLE, args, input_fn, model_fn, serving_input_receiver_fn) + if args.worker: + result_writer.write_result(os.path.join(outputs_path, f'worker-{str(args.worker_rank)}.csv')) diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v3/leader/config.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v3/leader/config.py new file mode 100644 index 000000000..b73e677f3 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v3/leader/config.py @@ -0,0 +1,3 @@ +leader_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9', 'x10', 'x11', 'x12'] +leader_label_name = ['label'] +follower_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9'] diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v3/leader/main.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v3/leader/main.py new file mode 100644 index 000000000..22e4e65ba --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v3/leader/main.py @@ -0,0 +1,132 @@ +import os +import logging +import numpy as np +import tensorflow.compat.v1 as tf +import fedlearner.trainer as flt +from config import * +from fedlearner.trainer.trainer_worker import StepLossAucMetricsHook + +ROLE = 'leader' + +parser = flt.trainer_worker.create_argument_parser() +parser.add_argument('--batch-size', type=int, default=10, help='Training batch size.') +args = parser.parse_args() + + +class ResultWriter: + + def __init__(self): + self.result = {} + + def update_result(self, raw_id, pred): + raw_id = raw_id.numpy() + pred = pred.numpy() + for i in range(len(raw_id)): + self.result[raw_id[i]] = pred[i] + + def write_result(self, filename): + raw_id = np.array(list(self.result.keys())).reshape(-1) + pred = np.array([self.result[i] for i in raw_id]).reshape(-1) + with tf.gfile.Open(filename, 'w') as f: + np.savetxt(f, np.dstack((raw_id, pred))[0], '%s,%f', header='raw_id,pred') + logging.info(f'[write_result]output result to {filename}') + + +result_writer = ResultWriter() + + +def input_fn(bridge, trainer_master): + dataset = flt.data.DataBlockLoader(args.batch_size, ROLE, bridge, trainer_master).make_dataset() + + def parse_fn(example): + feature_map = dict() + feature_map['example_id'] = tf.FixedLenFeature([], tf.string) + feature_map['raw_id'] = tf.FixedLenFeature([], tf.string) + for name in leader_feature_names: + feature_map[name] = tf.FixedLenFeature([], tf.float32, default_value=0.0) + label_map = {} + for name in leader_label_name: + label_map[name] = tf.FixedLenFeature([], tf.float32, default_value=0.0) + features = tf.parse_example(example, features=feature_map) + labels = tf.parse_example(example, features=label_map) + return features, labels + + dataset = dataset.map(map_func=parse_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE) + return dataset + + +def serving_input_receiver_fn(): + feature_map = { + 'example_id': tf.FixedLenFeature([], tf.string), + 'raw_id': tf.FixedLenFeature([], tf.string), + } + for name in leader_feature_names: + feature_map[name] = tf.FixedLenFeature([], tf.float32, default_value=0.0) + record_batch = tf.placeholder(dtype=tf.string, name='examples') + features = tf.parse_example(record_batch, features=feature_map) + features['act1_f'] = tf.placeholder(dtype=tf.float32, name='act1_f') + receiver_tensors = {'examples': record_batch, 'act1_f': features['act1_f']} + return tf.estimator.export.ServingInputReceiver(features, receiver_tensors) + + +def model_fn(model, features, labels, mode): + logging.info('model_fn: mode %s', mode) + x = [tf.expand_dims(features[name], axis=-1) for name in leader_feature_names] + x = tf.concat(x, axis=-1) + + w1l = tf.get_variable('w1l', + shape=[len(leader_feature_names), len(leader_label_name)], + dtype=tf.float32, + initializer=tf.random_uniform_initializer(-0.01, 0.01)) + b1l = tf.get_variable('b1l', shape=[len(leader_label_name)], dtype=tf.float32, initializer=tf.zeros_initializer()) + + act1_l = tf.nn.bias_add(tf.matmul(x, w1l), b1l) + if mode == tf.estimator.ModeKeys.TRAIN: + act1_f = model.recv('act1_f', tf.float32, require_grad=True) + elif mode == tf.estimator.ModeKeys.EVAL: + act1_f = model.recv('act1_f', tf.float32, require_grad=False) + else: + act1_f = features['act1_f'] + logits = act1_l + act1_f + pred = tf.math.sigmoid(logits) + if mode == tf.estimator.ModeKeys.PREDICT: + return model.make_spec(mode=mode, predictions=pred) + + raw_id = features['raw_id'] + update_result_op = tf.py_function(result_writer.update_result, [raw_id, pred], [], 'update') + model.send('pred', pred, require_grad=False) + y = [tf.expand_dims(labels[name], axis=-1) for name in leader_label_name] + y = tf.concat(y, axis=-1) + + with tf.control_dependencies([update_result_op]): + loss = tf.nn.sigmoid_cross_entropy_with_logits(labels=y, logits=logits) + _, auc = tf.metrics.auc(labels=y, predictions=pred) + logging_hook = tf.train.LoggingTensorHook({ + 'auc': auc, + 'loss': loss, + }, every_n_iter=10) + step_metric_hook = StepLossAucMetricsHook(loss_tensor=loss, auc_tensor=auc, every_n_iter=1) + model.send('auc', auc, require_grad=False) + model.send('loss', loss, require_grad=False) + + global_step = tf.train.get_or_create_global_step() + if mode == tf.estimator.ModeKeys.TRAIN: + optimizer = tf.train.AdamOptimizer(1e-4) + train_op = model.minimize(optimizer, loss, global_step=global_step) + return model.make_spec(mode=mode, loss=loss, train_op=train_op, training_hooks=[logging_hook, step_metric_hook]) + + if mode == tf.estimator.ModeKeys.EVAL: + loss_pair = tf.metrics.mean(loss) + return model.make_spec(mode=mode, + loss=loss, + eval_metric_ops={'loss': loss_pair}, + evaluation_hooks=[logging_hook, step_metric_hook]) + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + outputs_path = os.path.join(os.environ['OUTPUT_BASE_DIR'], 'outputs') + tf.gfile.MakeDirs(outputs_path) + flt.trainer_worker.train(ROLE, args, input_fn, model_fn, serving_input_receiver_fn) + if args.worker: + result_writer.write_result(os.path.join(outputs_path, f'worker-{str(args.worker_rank)}.csv')) diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v4/follower/config.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v4/follower/config.py new file mode 100644 index 000000000..b18f227ab --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v4/follower/config.py @@ -0,0 +1,18 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +leader_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9', 'x10', 'x11', 'x12'] +leader_label_name = ['label'] +follower_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9'] diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v4/follower/main.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v4/follower/main.py new file mode 100644 index 000000000..0affd60c1 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v4/follower/main.py @@ -0,0 +1,136 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import logging +import numpy as np +import tensorflow.compat.v1 as tf +import fedlearner.trainer as flt +from config import * +from fedlearner.trainer.trainer_worker import StepLossAucMetricsHook + +ROLE = 'follower' + +parser = flt.trainer_worker.create_argument_parser() +parser.add_argument('--batch-size', type=int, default=10, help='Training batch size.') +args = parser.parse_args() + + +class ResultWriter: + + def __init__(self): + self.result = {} + + def update_result(self, raw_id, pred): + raw_id = raw_id.numpy() + pred = pred.numpy() + for i in range(len(raw_id)): + self.result[raw_id[i].decode('utf-8')] = pred[i] + + def write_result(self, filename): + raw_id = np.array(list(self.result.keys())).reshape(-1) + pred = np.array([self.result[i] for i in raw_id]).reshape(-1) + with tf.gfile.Open(filename, 'w') as f: + np.savetxt(f, np.dstack((raw_id, pred))[0], '%s,%s', header='raw_id,pred') + logging.info(f'[write_result]output result to {filename}') + + +result_writer = ResultWriter() + + +def input_fn(bridge, trainer_master): + dataset = flt.data.DataBlockLoader(args.batch_size, ROLE, bridge, trainer_master).make_dataset() + + def parse_fn(example): + feature_map = dict() + feature_map['example_id'] = tf.FixedLenFeature([], tf.string) + feature_map['raw_id'] = tf.FixedLenFeature([], tf.string) + for name in follower_feature_names: + feature_map[name] = tf.FixedLenFeature([], tf.float32, default_value=0.0) + features = tf.parse_example(example, features=feature_map) + return features, dict(y=tf.constant(0)) + + dataset = dataset.map(map_func=parse_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE) + return dataset + + +def serving_input_receiver_fn(): + feature_map = { + 'example_id': tf.FixedLenFeature([], tf.string), + 'raw_id': tf.FixedLenFeature([], tf.string), + } + for name in follower_feature_names: + feature_map[name] = tf.FixedLenFeature([], tf.float32, default_value=0.0) + record_batch = tf.placeholder(dtype=tf.string, name='examples') + features = tf.parse_example(record_batch, features=feature_map) + features['act1_f'] = tf.placeholder(dtype=tf.float32, name='act1_f') + receiver_tensors = {'examples': record_batch, 'act1_f': features['act1_f']} + return tf.estimator.export.ServingInputReceiver(features, receiver_tensors) + + +def model_fn(model, features, labels, mode): + logging.info('model_fn: mode %s', mode) + x = [tf.expand_dims(features[name], axis=-1) for name in follower_feature_names] + x = tf.concat(x, axis=-1) + + w1f = tf.get_variable('w1l', + shape=[len(follower_feature_names), len(leader_label_name)], + dtype=tf.float32, + initializer=tf.random_uniform_initializer(-0.01, 0.01)) + b1f = tf.get_variable('b1l', shape=[len(leader_label_name)], dtype=tf.float32, initializer=tf.zeros_initializer()) + + act1_f = tf.nn.bias_add(tf.matmul(x, w1f), b1f) + + if mode == tf.estimator.ModeKeys.PREDICT: + return model.make_spec(mode=mode, predictions=act1_f) + + if mode == tf.estimator.ModeKeys.TRAIN: + gact1_f = model.send('act1_f', act1_f, require_grad=True) + elif mode == tf.estimator.ModeKeys.EVAL: + model.send('act1_f', act1_f, require_grad=False) + + pred = model.recv('pred', tf.float32, require_grad=False) + raw_id = features['raw_id'] + update_result_op = tf.py_function(result_writer.update_result, [raw_id, pred], [], 'update') + with tf.control_dependencies([update_result_op]): + auc = model.recv('auc', tf.float32, require_grad=False) + loss = model.recv('loss', tf.float32, require_grad=False) + + logging_hook = tf.train.LoggingTensorHook({ + 'auc': auc, + 'loss': loss, + }, every_n_iter=10) + step_metric_hook = StepLossAucMetricsHook(loss_tensor=loss, auc_tensor=auc, every_n_iter=1) + + global_step = tf.train.get_or_create_global_step() + if mode == tf.estimator.ModeKeys.TRAIN: + optimizer = tf.train.GradientDescentOptimizer(0.1) + train_op = model.minimize(optimizer, act1_f, grad_loss=gact1_f, global_step=global_step) + return model.make_spec(mode, + loss=tf.math.reduce_mean(act1_f), + train_op=train_op, + training_hooks=[logging_hook, step_metric_hook]) + if mode == tf.estimator.ModeKeys.EVAL: + fake_loss = tf.reduce_mean(act1_f) + return model.make_spec(mode=mode, loss=fake_loss, evaluation_hooks=[logging_hook, step_metric_hook]) + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + outputs_path = os.path.join(os.environ['OUTPUT_BASE_DIR'], 'outputs') + tf.gfile.MakeDirs(outputs_path) + flt.trainer_worker.train(ROLE, args, input_fn, model_fn, serving_input_receiver_fn) + if args.worker: + result_writer.write_result(os.path.join(outputs_path, f'worker-{str(args.worker_rank)}.csv')) diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v4/leader/config.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v4/leader/config.py new file mode 100644 index 000000000..b73e677f3 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v4/leader/config.py @@ -0,0 +1,3 @@ +leader_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9', 'x10', 'x11', 'x12'] +leader_label_name = ['label'] +follower_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9'] diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v4/leader/main.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v4/leader/main.py new file mode 100644 index 000000000..832f27b65 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/e2e_test_v4/leader/main.py @@ -0,0 +1,132 @@ +import os +import logging +import numpy as np +import tensorflow.compat.v1 as tf +import fedlearner.trainer as flt +from config import * +from fedlearner.trainer.trainer_worker import StepLossAucMetricsHook + +ROLE = 'leader' + +parser = flt.trainer_worker.create_argument_parser() +parser.add_argument('--batch-size', type=int, default=10, help='Training batch size.') +args = parser.parse_args() + + +class ResultWriter: + + def __init__(self): + self.result = {} + + def update_result(self, raw_id, pred): + raw_id = raw_id.numpy() + pred = pred.numpy() + for i in range(len(raw_id)): + self.result[raw_id[i].decode('utf-8')] = pred[i] + + def write_result(self, filename): + raw_id = np.array(list(self.result.keys())).reshape(-1) + pred = np.array([self.result[i] for i in raw_id]).reshape(-1) + with tf.gfile.Open(filename, 'w') as f: + np.savetxt(f, np.dstack((raw_id, pred))[0], '%s,%s', header='raw_id,pred') + logging.info(f'[write_result]output result to {filename}') + + +result_writer = ResultWriter() + + +def input_fn(bridge, trainer_master): + dataset = flt.data.DataBlockLoader(args.batch_size, ROLE, bridge, trainer_master).make_dataset() + + def parse_fn(example): + feature_map = dict() + feature_map['example_id'] = tf.FixedLenFeature([], tf.string) + feature_map['raw_id'] = tf.FixedLenFeature([], tf.string) + for name in leader_feature_names: + feature_map[name] = tf.FixedLenFeature([], tf.float32, default_value=0.0) + label_map = {} + for name in leader_label_name: + label_map[name] = tf.FixedLenFeature([], tf.float32, default_value=0.0) + features = tf.parse_example(example, features=feature_map) + labels = tf.parse_example(example, features=label_map) + return features, labels + + dataset = dataset.map(map_func=parse_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE) + return dataset + + +def serving_input_receiver_fn(): + feature_map = { + 'example_id': tf.FixedLenFeature([], tf.string), + 'raw_id': tf.FixedLenFeature([], tf.string), + } + for name in leader_feature_names: + feature_map[name] = tf.FixedLenFeature([], tf.float32, default_value=0.0) + record_batch = tf.placeholder(dtype=tf.string, name='examples') + features = tf.parse_example(record_batch, features=feature_map) + features['act1_f'] = tf.placeholder(dtype=tf.float32, name='act1_f') + receiver_tensors = {'examples': record_batch, 'act1_f': features['act1_f']} + return tf.estimator.export.ServingInputReceiver(features, receiver_tensors) + + +def model_fn(model, features, labels, mode): + logging.info('model_fn: mode %s', mode) + x = [tf.expand_dims(features[name], axis=-1) for name in leader_feature_names] + x = tf.concat(x, axis=-1) + + w1l = tf.get_variable('w1l', + shape=[len(leader_feature_names), len(leader_label_name)], + dtype=tf.float32, + initializer=tf.random_uniform_initializer(-0.01, 0.01)) + b1l = tf.get_variable('b1l', shape=[len(leader_label_name)], dtype=tf.float32, initializer=tf.zeros_initializer()) + + act1_l = tf.nn.bias_add(tf.matmul(x, w1l), b1l) + if mode == tf.estimator.ModeKeys.TRAIN: + act1_f = model.recv('act1_f', tf.float32, require_grad=True) + elif mode == tf.estimator.ModeKeys.EVAL: + act1_f = model.recv('act1_f', tf.float32, require_grad=False) + else: + act1_f = features['act1_f'] + logits = act1_l + act1_f + pred = tf.math.sigmoid(logits) + if mode == tf.estimator.ModeKeys.PREDICT: + return model.make_spec(mode=mode, predictions=pred) + + raw_id = features['raw_id'] + update_result_op = tf.py_function(result_writer.update_result, [raw_id, pred], [], 'update') + model.send('pred', pred, require_grad=False) + y = [tf.expand_dims(labels[name], axis=-1) for name in leader_label_name] + y = tf.concat(y, axis=-1) + + with tf.control_dependencies([update_result_op]): + loss = tf.nn.sigmoid_cross_entropy_with_logits(labels=y, logits=logits) + _, auc = tf.metrics.auc(labels=y, predictions=pred) + logging_hook = tf.train.LoggingTensorHook({ + 'auc': auc, + 'loss': loss, + }, every_n_iter=10) + step_metric_hook = StepLossAucMetricsHook(loss_tensor=loss, auc_tensor=auc, every_n_iter=1) + model.send('auc', auc, require_grad=False) + model.send('loss', loss, require_grad=False) + + global_step = tf.train.get_or_create_global_step() + if mode == tf.estimator.ModeKeys.TRAIN: + optimizer = tf.train.AdamOptimizer(1e-4) + train_op = model.minimize(optimizer, loss, global_step=global_step) + return model.make_spec(mode=mode, loss=loss, train_op=train_op, training_hooks=[logging_hook, step_metric_hook]) + + if mode == tf.estimator.ModeKeys.EVAL: + loss_pair = tf.metrics.mean(loss) + return model.make_spec(mode=mode, + loss=loss, + eval_metric_ops={'loss': loss_pair}, + evaluation_hooks=[logging_hook, step_metric_hook]) + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + outputs_path = os.path.join(os.environ['OUTPUT_BASE_DIR'], 'outputs') + tf.gfile.MakeDirs(outputs_path) + flt.trainer_worker.train(ROLE, args, input_fn, model_fn, serving_input_receiver_fn) + if args.worker: + result_writer.write_result(os.path.join(outputs_path, f'worker-{str(args.worker_rank)}.csv')) diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/horizontal_e2e_test_v1/follower.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/horizontal_e2e_test_v1/follower.py new file mode 100644 index 000000000..6497fb759 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/horizontal_e2e_test_v1/follower.py @@ -0,0 +1,41 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import logging +import tensorflow as tf +from model import run, get_dataset +from fedlearner.fedavg import train_from_keras_model + +fl_name = 'follower' + +mode = os.getenv('MODE', 'train') +epoch_num = int(os.getenv('EPOCH_NUM', 1)) +data_path = os.getenv('DATA_PATH') +output_base_dir = os.getenv('OUTPUT_BASE_DIR') +steps_per_sync = int(os.getenv('FL_STEPS_PER_SYNC', 10)) +LOAD_MODEL_FROM = os.getenv('LOAD_MODEL_FROM') + +if __name__ == '__main__': + print('-------------------------------') + print('mode : ', mode) + print('data_path : ', data_path) + print('output_base_dir : ', output_base_dir) + print('load model from : ', LOAD_MODEL_FROM) + print('-------------------------------') + logging.basicConfig(level=logging.INFO) + logging.info('mode: %s', mode) + ds = get_dataset(data_path) + run(fl_name, mode, ds, epoch_num, steps_per_sync) diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/horizontal_e2e_test_v1/leader.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/horizontal_e2e_test_v1/leader.py new file mode 100644 index 000000000..650a6bf23 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/horizontal_e2e_test_v1/leader.py @@ -0,0 +1,41 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import logging +import tensorflow as tf +from model import run, get_dataset +from fedlearner.fedavg import train_from_keras_model + +fl_name = 'leader' + +mode = os.getenv('MODE', 'train') +epoch_num = int(os.getenv('EPOCH_NUM', 1)) +data_path = os.getenv('DATA_PATH') +output_base_dir = os.getenv('OUTPUT_BASE_DIR') +steps_per_sync = int(os.getenv('FL_STEPS_PER_SYNC', 10)) +LOAD_MODEL_FROM = os.getenv('LOAD_MODEL_FROM') + +if __name__ == '__main__': + print('-------------------------------') + print('mode : ', mode) + print('data_path : ', data_path) + print('output_base_dir : ', output_base_dir) + print('load model from : ', LOAD_MODEL_FROM) + print('-------------------------------') + logging.basicConfig(level=logging.INFO) + logging.info('mode: %s', mode) + ds = get_dataset(data_path) + run(fl_name, mode, ds, epoch_num, steps_per_sync) diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/horizontal_e2e_test_v1/metrics.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/horizontal_e2e_test_v1/metrics.py new file mode 100644 index 000000000..e7ca4441b --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/horizontal_e2e_test_v1/metrics.py @@ -0,0 +1,62 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import tensorflow as tf +from fedlearner.common import metrics +from fedlearner.fedavg.master import LeaderMaster, FollowerMaster +from fedlearner.fedavg.cluster.cluster_spec import FLClusterSpec +from fedlearner.fedavg._global_context import global_context as _gtx + + +class MetricsKerasCallback(tf.keras.callbacks.Callback): + + def __init__(self): + super().__init__() + self._global_step = None + self._metrics = {} + + def on_train_end(self, logs=None): + self.emit_metrics() + + def on_train_batch_end(self, batch, logs=None): + self.update_metrics(logs) + + def on_test_end(self, logs=None): + self.emit_metrics() + + def on_test_batch_end(self, batch, logs=None): + self.update_metrics(logs) + + def update_metrics(self, logs: dict): + if 'batch' not in logs: + return + + self._global_step = logs['batch'] + self._metrics = logs + if self._global_step % 10 == 0: + self.emit_metrics() + + def emit_metrics(self): + if self._global_step is None: + return + stats_pipe = _gtx.stats_client.pipeline() + stats_pipe.gauge('trainer.metric_global_step', self._global_step) + for key, value in self._metrics.items(): + if key in ('size', 'batch'): + continue + stats_pipe.gauge('trainer.metric_value', value, tags={'metric': key}) + metrics.emit_store(name=key, value=value) + stats_pipe.send() diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/horizontal_e2e_test_v1/model.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/horizontal_e2e_test_v1/model.py new file mode 100644 index 000000000..e7f8e28f4 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/horizontal_e2e_test_v1/model.py @@ -0,0 +1,150 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import time +import tensorflow as tf +import numpy as np +import logging +from datetime import datetime +from fedlearner.fedavg import train_from_keras_model +from metrics import MetricsKerasCallback + +LOAD_MODEL_FROM = os.getenv('LOAD_MODEL_FROM') # load from {STORAGE_ROOT}/job_outputs/{job.name}/checkpoints +OUTPUT_BASE_DIR = os.getenv('OUTPUT_BASE_DIR') # save output under {OUTPUT_BASE_DIR}/output +EXPORT_PATH = os.path.join(OUTPUT_BASE_DIR, 'exported_models') # save estimator to {EXPORT_PATH} +CHECKPOINT_PATH = os.path.join(OUTPUT_BASE_DIR, 'checkpoints') # save keras model to {CHECKPOINT_PATH} + +fl_leader_address = os.getenv('FL_LEADER_ADDRESS', '0.0.0.0:6870') +FL_CLUSTER = {'leader': {'name': 'leader', 'address': fl_leader_address}, 'followers': [{'name': 'follower'}]} + + +def _label_to_int(label: str): + pred_fn_pairs = [(tf.equal(label, 'deer'), lambda: 0), (tf.equal(label, 'frog'), lambda: 1), + (tf.equal(label, 'horse'), lambda: 2), (tf.equal(label, 'dog'), lambda: 3), + (tf.equal(label, 'automobile'), lambda: 4), (tf.equal(label, 'airplane'), lambda: 5), + (tf.equal(label, 'ship'), lambda: 6), (tf.equal(label, 'cat'), lambda: 7), + (tf.equal(label, 'truck'), lambda: 8), (tf.equal(label, 'bird'), lambda: 9)] + return tf.case(pred_fn_pairs) + + +def decode_and_resize(args): + x, h, w, c = args + x = tf.io.decode_raw(x, tf.uint8) + x = tf.reshape(x, [h, w, c]) + x = tf.image.resize(x, (128, 128)) + x = tf.cast(x, tf.float32) + x = tf.image.per_image_standardization(x) + x.set_shape([128, 128, 3]) + return x + + +def serving_input_receiver_fn(): + feature_map = { + 'width': tf.io.FixedLenFeature([], tf.int64), + 'height': tf.io.FixedLenFeature([], tf.int64), + 'nChannels': tf.io.FixedLenFeature([], tf.int64), + 'data': tf.io.FixedLenFeature([], tf.string) + } + record_batch = tf.placeholder(dtype=tf.string, name='examples') + features = tf.io.parse_example(record_batch, features=feature_map) + features['data'] = tf.map_fn(decode_and_resize, + (features['data'], features['height'], features['width'], features['nChannels']), + dtype=tf.float32) + receiver_tensors = {'examples': record_batch} + return tf.estimator.export.ServingInputReceiver({'data': features['data']}, receiver_tensors) + + +def parse_fn(record: bytes): + features = tf.io.parse_single_example( + record, { + 'width': tf.io.FixedLenFeature([], tf.int64), + 'height': tf.io.FixedLenFeature([], tf.int64), + 'nChannels': tf.io.FixedLenFeature([], tf.int64), + 'label': tf.io.FixedLenFeature([], tf.string), + 'data': tf.io.FixedLenFeature([], tf.string) + }) + label = _label_to_int(features['label']) + img = tf.decode_raw(features['data'], out_type=tf.uint8) + img = tf.reshape(img, [features['height'], features['width'], features['nChannels']]) + img = tf.image.resize(img, size=[128, 128]) + img = tf.cast(img, tf.float32) + return img, label + + +def create_model(): + model = tf.keras.Sequential([ + tf.keras.Input(shape=(128, 128, 3), name='data'), + tf.keras.layers.Conv2D(16, kernel_size=(3, 3), activation='relu'), + tf.keras.layers.Conv2D(32, kernel_size=(3, 3), activation='relu'), + tf.keras.layers.Conv2D(64, kernel_size=(3, 3), activation='relu'), + tf.keras.layers.GlobalMaxPool2D(), + tf.keras.layers.BatchNormalization(), + tf.keras.layers.Dense(64, activation='relu'), + tf.keras.layers.Dense(16, activation='relu'), + tf.keras.layers.Dense(10, activation='softmax', name='label'), + ]) + model.compile(optimizer=tf.keras.optimizers.Adam(0.001), + loss=tf.keras.losses.SparseCategoricalCrossentropy(), + metrics=['acc']) + return model + + +def get_dataset(data_path: str): + files = [] + for dirname, subdirs, filenames in tf.io.gfile.walk(data_path): + for filename in filenames: + if filename.startswith('part'): + files.append(os.path.join(dirname, filename)) + print('list filenames: ', files) + ds = tf.data.TFRecordDataset(files) \ + .map(map_func=parse_fn) \ + .shuffle(30000) \ + .batch(30) \ + .prefetch(10) + return ds + + +def run(fl_name, mode, ds, epoch_num, steps_per_sync): + if mode == 'train': + model = create_model() + model.build([None, 128, 128, 3]) + train_from_keras_model(model, + x=ds, + y=None, + epochs=epoch_num, + fl_name=fl_name, + fl_cluster=FL_CLUSTER, + steps_per_sync=steps_per_sync) + estimator = tf.keras.estimator.model_to_estimator(model) + # since fedlearner will save keras model, sleep for model importer to import the latest model + time.sleep(60) + export_path = estimator.export_saved_model(EXPORT_PATH, serving_input_receiver_fn=serving_input_receiver_fn) + logging.info(f'\nexport estimator to {export_path}\n') + checkpoint_path = os.path.join(CHECKPOINT_PATH, str(int(datetime.now().timestamp()))) + model.save(checkpoint_path, save_format='tf') + logging.info(f'\nexport model to {CHECKPOINT_PATH}\n') + else: + latest_path = os.path.join(LOAD_MODEL_FROM, sorted(tf.io.gfile.listdir(LOAD_MODEL_FROM))[-1]) + logging.info('load model from %s', latest_path) + model = tf.keras.models.load_model(latest_path) + if mode == 'eval': + model.evaluate(ds, callbacks=[MetricsKerasCallback()]) + output = model.predict(ds) + output_path = os.path.join(OUTPUT_BASE_DIR, 'outputs') + tf.io.gfile.makedirs(output_path) + logging.info('write output to %s', output_path) + with tf.io.gfile.GFile(os.path.join(output_path, 'output.csv'), 'w') as fp: + np.savetxt(fp, output) diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/preset_algorithm_service.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/preset_algorithm_service.py new file mode 100644 index 000000000..b9bee62a5 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/preset_algorithm_service.py @@ -0,0 +1,132 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import copy +from envs import Envs +from pathlib import Path +from fedlearner_webconsole.db import db +from fedlearner_webconsole.utils.file_manager import FileManager +from fedlearner_webconsole.utils.file_operator import FileOperator +from fedlearner_webconsole.algorithm.utils import algorithm_path, check_algorithm_file +from fedlearner_webconsole.algorithm.models import Algorithm, Source, AlgorithmType, AlgorithmProject + +_ALGORITHMS_PATH = Path(__file__, '..').resolve() + +# When inserting a preset algorithm, you need to insert the algorithm project and the algorithm into +# PRESET_ALGORITHM_PROJECT_LIST and PRESET_ALGORITHM_LIST respectively. The algorithm project and the +# algorithm need to have the same name. If the algorithm project already exists, you need to update +# the latest version. + +PRESET_ALGORITHM_PROJECT_LIST = [ + AlgorithmProject(name='e2e_test', + type=AlgorithmType.NN_VERTICAL, + uuid='u1b9eea3753e24fd9b91', + source=Source.PRESET, + comment='algorithm for end to end test', + latest_version=4), + AlgorithmProject(name='horizontal_e2e_test', + type=AlgorithmType.NN_HORIZONTAL, + uuid='u76630127d63c4ddb871', + source=Source.PRESET, + comment='algorithm for end to end test', + latest_version=1), + AlgorithmProject(name='secure_boost', + type=AlgorithmType.TREE_VERTICAL, + uuid='u7607b76db2c843fb9cd', + source=Source.PRESET, + comment='algorithm for secure boost', + latest_version=1) +] + +PRESET_ALGORITHM_LIST = [ + Algorithm(name='e2e_test', + version=1, + uuid='u5c4f510aab2f4a288c8', + source=Source.PRESET, + type=AlgorithmType.NN_VERTICAL, + path=os.path.join(_ALGORITHMS_PATH, 'e2e_test_v1'), + comment='algorithm for end to end test'), + Algorithm(name='e2e_test', + version=2, + uuid='uc74ce6731906480c804', + source=Source.PRESET, + type=AlgorithmType.NN_VERTICAL, + path=os.path.join(_ALGORITHMS_PATH, 'e2e_test_v2'), + comment='algorithm for end to end test'), + Algorithm(name='e2e_test', + version=3, + uuid='u322cd66836f04a13b94', + source=Source.PRESET, + type=AlgorithmType.NN_VERTICAL, + path=os.path.join(_ALGORITHMS_PATH, 'e2e_test_v3'), + comment='algorithm for end to end test'), + Algorithm(name='e2e_test', + version=4, + uuid='uff7a19e8a1834d5e991', + source=Source.PRESET, + type=AlgorithmType.NN_VERTICAL, + path=os.path.join(_ALGORITHMS_PATH, 'e2e_test_v4'), + comment='support save result when predict'), + Algorithm(name='horizontal_e2e_test', + version=1, + uuid='ub7b45bf127fc4aebad4', + source=Source.PRESET, + type=AlgorithmType.NN_HORIZONTAL, + path=os.path.join(_ALGORITHMS_PATH, 'horizontal_e2e_test_v1'), + comment='algorithm for horizontal nn end to end test'), + Algorithm(name='secure_boost', + version=1, + uuid='u936cb7254e4444caaf9', + source=Source.PRESET, + type=AlgorithmType.TREE_VERTICAL, + comment='algorithm for secure boost') +] + + +def create_algorithm_if_not_exists(): + file_operator = FileOperator() + file_manager = FileManager() + + for algo_project in PRESET_ALGORITHM_PROJECT_LIST: + with db.session_scope() as session: + algorithm_project = session.query(Algorithm).filter_by(name=algo_project.name, source=Source.PRESET).first() + if algorithm_project is None: + session.add(algo_project) + session.commit() + + for preset_algo in PRESET_ALGORITHM_LIST: + algo = copy.deepcopy(preset_algo) + dest_algo_path = None + if preset_algo.path: + dest_algo_path = algorithm_path(Envs.STORAGE_ROOT, algo.name, algo.version) + file_manager.mkdir(dest_algo_path) + with check_algorithm_file(dest_algo_path): + file_operator.copy_to(preset_algo.path, dest_algo_path) + algo.path = dest_algo_path + with db.session_scope() as session: + algorithm = session.query(Algorithm).filter_by(name=algo.name, version=algo.version, + source=Source.PRESET).first() + # Only need to update the path when the algo has been added to the database + if preset_algo.path and algorithm: + if algorithm.path and file_manager.exists(algorithm.path): + file_manager.remove(algorithm.path) + algorithm.path = dest_algo_path + if algorithm is None: + algo_project = session.query(AlgorithmProject).filter_by(name=algo.name, source=Source.PRESET).first() + assert algo_project is not None, 'preset algorithm project is not found' + algo.algorithm_project_id = algo_project.id + session.add(algo) + session.commit() diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/preset_algorithm_service_test.py b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/preset_algorithm_service_test.py new file mode 100644 index 000000000..e6b83befc --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/preset_algorithms/preset_algorithm_service_test.py @@ -0,0 +1,114 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import unittest +import tempfile +from envs import Envs +from pathlib import Path +from unittest.mock import patch, MagicMock +from testing.common import NoWebServerTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.algorithm.preset_algorithms.preset_algorithm_service import create_algorithm_if_not_exists +from fedlearner_webconsole.algorithm.models import Algorithm, Source, AlgorithmType, AlgorithmProject +from fedlearner_webconsole.utils.file_manager import FileManager + +_ALGORITHMS_PATH = Path(__file__, '..').resolve() + + +class PresetTemplateServiceTest(NoWebServerTestCase): + + def test_create_all(self): + Envs.STORAGE_ROOT = tempfile.gettempdir() + create_algorithm_if_not_exists() + with db.session_scope() as session: + algorithm = session.query(Algorithm).filter_by(name='e2e_test').first() + self.assertTrue(os.path.exists(os.path.join(algorithm.path, 'leader', 'main.py'))) + self.assertTrue(os.path.exists(os.path.join(algorithm.path, 'follower', 'main.py'))) + algorithm = session.query(Algorithm).filter_by(name='horizontal_e2e_test').first() + self.assertEqual(sorted(os.listdir(algorithm.path)), ['follower.py', 'leader.py', 'metrics.py', 'model.py']) + algorithm = session.query(Algorithm).filter_by(name='secure_boost').first() + self.assertIsNone(algorithm.path) + algo_ids = session.query(Algorithm.id).filter_by(source=Source.PRESET).all() + self.assertEqual(len(algo_ids), 6) + + @patch('fedlearner_webconsole.algorithm.preset_algorithms.preset_algorithm_service.PRESET_ALGORITHM_PROJECT_LIST', + new_callable=list) + @patch('fedlearner_webconsole.algorithm.preset_algorithms.preset_algorithm_service.PRESET_ALGORITHM_LIST', + new_callable=list) + def test_update_preset_algorithm(self, mock_preset_algorithm_list: MagicMock, + mock_preset_algorithm_project_list: MagicMock): + mock_preset_algorithm_project_list.extend([ + AlgorithmProject(name='e2e_test', + type=AlgorithmType.NN_VERTICAL, + uuid='u1b9eea3753e24fd9b91', + source=Source.PRESET, + comment='algorithm for end to end test', + latest_version=4) + ]) + mock_preset_algorithm_list.extend([ + Algorithm(name='e2e_test', + version=1, + uuid='u5c4f510aab2f4a288c8', + source=Source.PRESET, + type=AlgorithmType.NN_VERTICAL, + path=os.path.join(_ALGORITHMS_PATH, 'e2e_test_v1'), + comment='algorithm for end to end test'), + Algorithm(name='e2e_test', + version=2, + uuid='uc74ce6731906480c804', + source=Source.PRESET, + type=AlgorithmType.NN_VERTICAL, + path=os.path.join(_ALGORITHMS_PATH, 'e2e_test_v2'), + comment='algorithm for end to end test') + ]) + file_manager = FileManager() + Envs.STORAGE_ROOT = tempfile.gettempdir() + create_algorithm_if_not_exists() + algo = Algorithm(name='e2e_test', + version=3, + uuid='e2e_test_version_3', + source=Source.PRESET, + type=AlgorithmType.NN_VERTICAL, + path=os.path.join(_ALGORITHMS_PATH, 'e2e_test_v3'), + comment='algorithm for end to end test') + mock_preset_algorithm_list.append(algo) + create_algorithm_if_not_exists() + with db.session_scope() as session: + algorithm = session.query(Algorithm).filter_by(name='e2e_test', source=Source.PRESET, version=3).first() + self.assertTrue(os.path.exists(os.path.join(algorithm.path, 'leader', 'main.py'))) + self.assertTrue(os.path.exists(os.path.join(algorithm.path, 'follower', 'main.py'))) + # when algorithm path does not exist + file_manager.remove(algorithm.path) + self.assertFalse(file_manager.exists(algorithm.path)) + session.commit() + create_algorithm_if_not_exists() + with db.session_scope() as session: + algorithm = session.query(Algorithm).filter_by(name='e2e_test', source=Source.PRESET, version=3).first() + self.assertTrue(os.path.exists(os.path.join(algorithm.path, 'leader', 'main.py'))) + self.assertTrue(os.path.exists(os.path.join(algorithm.path, 'follower', 'main.py'))) + # when algorithm path is empty + file_manager.remove(algorithm.path) + file_manager.mkdir(algorithm.path) + self.assertEqual(len(file_manager.ls(algorithm.path)), 0) + create_algorithm_if_not_exists() + with db.session_scope() as session: + algorithm = session.query(Algorithm).filter_by(name='e2e_test', source=Source.PRESET, version=3).first() + self.assertTrue(os.path.exists(os.path.join(algorithm.path, 'leader', 'main.py'))) + self.assertTrue(os.path.exists(os.path.join(algorithm.path, 'follower', 'main.py'))) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/service.py b/web_console_v2/api/fedlearner_webconsole/algorithm/service.py new file mode 100644 index 000000000..c808a2a8c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/service.py @@ -0,0 +1,233 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import tarfile +import tempfile +from io import FileIO +from datetime import datetime +from typing import Optional, List +from sqlalchemy.orm import Session +from fedlearner_webconsole.exceptions import InvalidArgumentException +from fedlearner_webconsole.proto.filtering_pb2 import FilterOp, FilterExpression +from fedlearner_webconsole.utils.file_manager import FileManager +from fedlearner_webconsole.utils.file_operator import FileOperator +from fedlearner_webconsole.utils.filtering import SupportedField, FieldType, FilterBuilder +from fedlearner_webconsole.utils.resource_name import resource_uuid +from fedlearner_webconsole.algorithm.models import Algorithm, AlgorithmProject, Source, ReleaseStatus, PublishStatus, \ + PendingAlgorithm, AlgorithmType + +# TODO(wangzeju): use singleton of file_manager or file_operator +file_manager = FileManager() +file_operator = FileOperator() + + +class AlgorithmProjectService: + + FILTER_FIELDS = { + 'name': SupportedField(type=FieldType.STRING, ops={FilterOp.CONTAIN: None}), + 'type': SupportedField(type=FieldType.STRING, ops={FilterOp.IN: None}), + } + + def __init__(self, session: Session): + self._session = session + self._filter_builder = FilterBuilder(model_class=AlgorithmProject, supported_fields=self.FILTER_FIELDS) + + @staticmethod + def _extract_to(file, path): + with tempfile.TemporaryDirectory() as directory: + with tarfile.open(fileobj=file) as tar: + tar.extractall(directory) + for root, _, files in os.walk(directory): + for name in files: + # There will be error files starting with '._' when the file is uploaded from the MacOS system + if name.startswith('._') or name.endswith('.pyc'): + os.remove(os.path.join(root, name)) + file_operator.copy_to(directory, path) + + def create_algorithm_project(self, + name: str, + project_id: int, + algorithm_type: AlgorithmType, + username: str, + parameter, + path: str, + comment: Optional[str] = None, + file: Optional[FileIO] = None) -> AlgorithmProject: + if file is not None: + self._extract_to(file, path) + algo_project = AlgorithmProject(name=name, + uuid=resource_uuid(), + project_id=project_id, + type=algorithm_type, + source=Source.USER, + username=username, + path=path, + comment=comment) + algo_project.set_parameter(parameter) + self._session.add(algo_project) + self._session.flush() + return algo_project + + def release_algorithm(self, + algorithm_project: AlgorithmProject, + username: str, + path: str, + participant_id: Optional[int] = None, + comment: Optional[str] = None): + # apply exclusive lock on algorithm project to avoid race condition on algorithm version + algo_project: AlgorithmProject = self._session.query( + AlgorithmProject).populate_existing().with_for_update().get(algorithm_project.id) + file_operator.copy_to(algorithm_project.path, path, create_dir=True) + algo = Algorithm(name=algorithm_project.name, + type=algorithm_project.type, + parameter=algorithm_project.parameter, + path=path, + source=Source.USER, + username=username, + participant_id=participant_id, + project_id=algorithm_project.project_id, + algorithm_project_id=algorithm_project.id, + comment=comment) + algo.uuid = resource_uuid() + algo.version = algo_project.latest_version + 1 + algo_project.latest_version += 1 + algo_project.release_status = ReleaseStatus.RELEASED + self._session.add(algo) + return algo + + # TODO(linfan): implement delete file from file system + def delete(self, algorithm_project: AlgorithmProject): + algorithm_service = AlgorithmService(self._session) + for algo in algorithm_project.algorithms: + algorithm_service.delete(algo) + self._session.delete(algorithm_project) + + def get_published_algorithm_projects(self, project_id: int, + filter_exp: Optional[FilterExpression]) -> List[AlgorithmProject]: + query = self._session.query(AlgorithmProject).filter_by(project_id=project_id, + publish_status=PublishStatus.PUBLISHED) + try: + query = self._filter_builder.build_query(query, filter_exp) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid filter: {str(e)}') from e + return query.all() + + def get_published_algorithms_latest_update_time(self, algorithm_project_id: int) -> datetime: + algo = self._session.query(Algorithm).filter_by(algorithm_project_id=algorithm_project_id, + publish_status=PublishStatus.PUBLISHED).order_by( + Algorithm.updated_at.desc()).limit(1).first() + return algo.updated_at + + +class PendingAlgorithmService: + + def __init__(self, session: Session): + self._session = session + + def create_algorithm_project(self, + pending_algorithm: PendingAlgorithm, + username: str, + name: str, + comment: Optional[str] = None) -> AlgorithmProject: + algo_project = self._session.query(AlgorithmProject).filter( + AlgorithmProject.name == name, AlgorithmProject.source == Source.THIRD_PARTY).first() + if algo_project is not None: + raise ValueError(f'there already exists algorithm project with name {name} from third party') + algorithm_project = AlgorithmProject(name=name, + project_id=pending_algorithm.project_id, + latest_version=pending_algorithm.version, + type=pending_algorithm.type, + source=Source.THIRD_PARTY, + username=username, + participant_id=pending_algorithm.participant_id, + comment=comment, + uuid=pending_algorithm.algorithm_project_uuid, + release_status=ReleaseStatus.RELEASED) + algorithm_project.set_parameter(pending_algorithm.get_parameter()) + self._session.add(algorithm_project) + return algorithm_project + + def create_algorithm(self, + pending_algorithm: PendingAlgorithm, + algorithm_project_id: int, + username: str, + path: str, + comment: Optional[str] = None) -> Algorithm: + file_operator.copy_to(pending_algorithm.path, path, create_dir=True) + algo = Algorithm(name=pending_algorithm.name, + type=pending_algorithm.type, + parameter=pending_algorithm.parameter, + path=path, + source=Source.THIRD_PARTY, + username=username, + participant_id=pending_algorithm.participant_id, + project_id=pending_algorithm.project_id, + algorithm_project_id=algorithm_project_id, + uuid=pending_algorithm.algorithm_uuid, + version=pending_algorithm.version, + comment=comment) + self._session.add(algo) + return algo + + +class AlgorithmService: + + def __init__(self, session: Session): + self._session = session + + def _update_algorithm_project_publish_status(self, algorithm_project_id: int): + algorithms = self._session.query(Algorithm).filter_by(algorithm_project_id=algorithm_project_id, + publish_status=PublishStatus.PUBLISHED).all() + # There may be a race condition here. Only one Algorithm under the AlgorithmProject is published. + # At this time, if an algorithm is published and an algorithm is unpublished or deleted at the same time, + # there may be a "published" Algorithm under the AlgorithmProject, but the AlgorithmProject is + # UNPUBLISHED. The user "Publish" or "Unpublish" the algorithm again, and it will be normal. + if len(algorithms) == 0: + algo_project = self._session.query(AlgorithmProject).get(algorithm_project_id) + algo_project.publish_status = PublishStatus.UNPUBLISHED + + def _update_algorithm_project_release_status(self, algorithm_project_id: int): + algorithms = self._session.query(Algorithm).filter_by(algorithm_project_id=algorithm_project_id).all() + # There may be a race condition here too. + if len(algorithms) == 0: + algo_project = self._session.query(AlgorithmProject).get(algorithm_project_id) + algo_project.release_status = ReleaseStatus.UNRELEASED + + def delete(self, algorithm: Algorithm): + self._session.delete(algorithm) + algo_project = self._session.query(AlgorithmProject).get(algorithm.algorithm_project_id) + if algo_project.latest_version == algorithm.version: + algo_project.release_status = ReleaseStatus.UNRELEASED + self._update_algorithm_project_publish_status(algorithm_project_id=algorithm.algorithm_project_id) + self._update_algorithm_project_release_status(algorithm_project_id=algorithm.algorithm_project_id) + + def publish_algorithm(self, algorithm_id: int, project_id: int) -> Algorithm: + algorithm = self._session.query(Algorithm).filter_by(id=algorithm_id, project_id=project_id).first() + algorithm.publish_status = PublishStatus.PUBLISHED + algo_project = self._session.query(AlgorithmProject).get(algorithm.algorithm_project_id) + algo_project.publish_status = PublishStatus.PUBLISHED + return algorithm + + def unpublish_algorithm(self, algorithm_id: int, project_id: int) -> Algorithm: + algorithm = self._session.query(Algorithm).filter_by(id=algorithm_id, project_id=project_id).first() + algorithm.publish_status = PublishStatus.UNPUBLISHED + self._update_algorithm_project_publish_status(algorithm_project_id=algorithm.algorithm_project_id) + return algorithm + + def get_published_algorithms(self, project_id: int, algorithm_project_id: int) -> List[Algorithm]: + return self._session.query(Algorithm).filter_by(project_id=project_id, + algorithm_project_id=algorithm_project_id, + publish_status=PublishStatus.PUBLISHED).all() diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/service_test.py b/web_console_v2/api/fedlearner_webconsole/algorithm/service_test.py new file mode 100644 index 000000000..39dec7a58 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/service_test.py @@ -0,0 +1,128 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import tarfile +import tempfile +import unittest + +from io import BytesIO +from pathlib import Path +from testing.common import NoWebServerTestCase +from fedlearner_webconsole.algorithm.models import Algorithm, AlgorithmProject, PublishStatus, ReleaseStatus +from fedlearner_webconsole.algorithm.service import AlgorithmService, AlgorithmProjectService +from fedlearner_webconsole.db import db + + +class AlgorithmServiceTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + algo_project1 = AlgorithmProject(id=1, + name='test-algo-project-1', + publish_status=PublishStatus.PUBLISHED, + release_status=ReleaseStatus.RELEASED) + algo_project2 = AlgorithmProject(id=2, + name='test-algo-project-2', + latest_version=3, + publish_status=PublishStatus.PUBLISHED, + release_status=ReleaseStatus.RELEASED) + algo1 = Algorithm(id=1, algorithm_project_id=1, name='test-algo-1', publish_status=PublishStatus.PUBLISHED) + algo2 = Algorithm(id=2, + algorithm_project_id=2, + name='test-algo-2', + version=1, + publish_status=PublishStatus.PUBLISHED) + algo3 = Algorithm(id=3, + algorithm_project_id=2, + name='test-algo-3', + version=2, + publish_status=PublishStatus.PUBLISHED) + algo4 = Algorithm(id=4, + algorithm_project_id=2, + name='test-algo-4', + version=3, + publish_status=PublishStatus.PUBLISHED) + session.add_all([algo_project1, algo_project2, algo1, algo2, algo3, algo4]) + session.commit() + + def test_delete_algorithm(self): + with db.session_scope() as session: + algo1 = session.query(Algorithm).filter_by(name='test-algo-1').first() + AlgorithmService(session).delete(algo1) + algo1 = session.query(Algorithm).filter_by(name='test-algo-1').execution_options( + include_deleted=True).first() + self.assertIsNone(algo1) + algo_project1 = session.query(AlgorithmProject).get(1) + self.assertEqual(algo_project1.release_status, ReleaseStatus.UNRELEASED) + + def test_algorithm_project_status_when_delete_algorithms(self): + with db.session_scope() as session: + algo2 = session.query(Algorithm).filter_by(name='test-algo-2').first() + algo3 = session.query(Algorithm).filter_by(name='test-algo-3').first() + algo4 = session.query(Algorithm).filter_by(name='test-algo-4').first() + AlgorithmService(session).delete(algo4) + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project-2').first() + self.assertEqual(algo_project.publish_status, PublishStatus.PUBLISHED) + self.assertEqual(algo_project.release_status, ReleaseStatus.UNRELEASED) + algo_project.release_status = ReleaseStatus.RELEASED + AlgorithmService(session).delete(algo2) + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project-2').first() + self.assertEqual(algo_project.publish_status, PublishStatus.PUBLISHED) + self.assertEqual(algo_project.release_status, ReleaseStatus.RELEASED) + AlgorithmService(session).delete(algo3) + algo_project = session.query(AlgorithmProject).filter_by(name='test-algo-project-2').first() + self.assertEqual(algo_project.publish_status, PublishStatus.UNPUBLISHED) + self.assertEqual(algo_project.release_status, ReleaseStatus.UNRELEASED) + + +class AlgorithmProjectServiceTest(NoWebServerTestCase): + + def test_extract_files(self): + path = tempfile.mkdtemp() + path = Path(path, 'e2e_test').resolve() + path.mkdir() + path.joinpath('follower').mkdir() + path.joinpath('follower').joinpath('main.py').touch() + path.joinpath('follower').joinpath('._main.py').touch() + path.joinpath('follower').joinpath('main.pyc').touch() + path.joinpath('leader').mkdir() + path.joinpath('leader').joinpath('___main.py').touch() + file_path = path.joinpath('leader').joinpath('main.py') + file_path.touch() + file_path.write_text('import tensorflow', encoding='utf-8') + tar_path = os.path.join(tempfile.mkdtemp(), 'test.tar.gz') + with tarfile.open(tar_path, 'w:gz') as tar: + tar.add(os.path.join(path, 'leader'), arcname='leader') + tar.add(os.path.join(path, 'follower'), arcname='follower') + tar = tarfile.open(tar_path, 'r') # pylint: disable=consider-using-with + with tempfile.TemporaryDirectory() as directory: + with db.session_scope() as session: + # pylint: disable=protected-access + AlgorithmProjectService(session)._extract_to(BytesIO(tar.fileobj.read()), directory) + self.assertTrue(os.path.exists(os.path.join(directory, 'leader', 'main.py'))) + self.assertTrue(os.path.exists(os.path.join(directory, 'follower', 'main.py'))) + self.assertTrue(os.path.exists(os.path.join(directory, 'leader', '___main.py'))) + self.assertFalse(os.path.exists(os.path.join(directory, 'follower', '._main.py'))) + self.assertFalse(os.path.exists(os.path.join(directory, 'follower', 'main.pyc'))) + with open(os.path.join(directory, 'leader', 'main.py'), encoding='utf-8') as fin: + self.assertEqual(fin.read(), 'import tensorflow') + with open(os.path.join(directory, 'follower', 'main.py'), encoding='utf-8') as fin: + self.assertEqual(fin.read(), '') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/BUILD.bazel new file mode 100644 index 000000000..f40922097 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/BUILD.bazel @@ -0,0 +1,69 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "transmit", + srcs = [ + "hash.py", + "receiver.py", + "sender.py", + ], + imports = ["../../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:file_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + ], +) + +py_test( + name = "hash_test", + srcs = [ + "hash_test.py", + ], + imports = ["../../.."], + deps = [ + ":transmit", + "//web_console_v2/api/fedlearner_webconsole/utils:file_lib", + ], +) + +py_test( + name = "sender_test", + srcs = [ + "sender_test.py", + ], + data = [ + "//web_console_v2/api/testing/test_data/algorithm", + ], + imports = ["../../.."], + deps = [ + ":transmit", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_test( + name = "receiver_test", + srcs = [ + "receiver_test.py", + ], + data = [ + "//web_console_v2/api/testing/test_data/algorithm", + ], + imports = ["../../.."], + deps = [ + ":transmit", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:utils_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:common_lib", + "@com_google_protobuf//:protobuf_python", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/hash.py b/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/hash.py new file mode 100644 index 000000000..e93828474 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/hash.py @@ -0,0 +1,24 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import hashlib + +from fedlearner_webconsole.utils.file_manager import FileManager + + +def get_file_md5(file_manager: FileManager, file_name: str) -> str: + # TODO(gezhengqiang): solve memory overflow problem + data = file_manager.read(file_name).encode() + return hashlib.md5(data).hexdigest() diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/hash_test.py b/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/hash_test.py new file mode 100644 index 000000000..8b4ba6636 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/hash_test.py @@ -0,0 +1,38 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import tempfile +import unittest + +from pathlib import Path + +from fedlearner_webconsole.algorithm.transmit.hash import get_file_md5 +from fedlearner_webconsole.utils.file_manager import FileManager + + +class HashTest(unittest.TestCase): + + def test_get_file_md5(self): + with tempfile.NamedTemporaryFile() as f: + Path(f.name).write_text('hello world', encoding='utf-8') + self.assertEqual(get_file_md5(FileManager(), f.name), '5eb63bbbe01eeed093cb22bb8f5acdc3') + + def test_get_file_md5_empty_file(self): + with tempfile.NamedTemporaryFile() as f: + self.assertEqual(get_file_md5(FileManager(), f.name), 'd41d8cd98f00b204e9800998ecf8427e') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/receiver.py b/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/receiver.py new file mode 100644 index 000000000..4b8e91c8c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/receiver.py @@ -0,0 +1,49 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import tempfile +from typing import Iterator, Optional +from fedlearner_webconsole.utils.file_manager import FileManager +from fedlearner_webconsole.utils.file_operator import FileOperator +from fedlearner_webconsole.algorithm.transmit.hash import get_file_md5 +from fedlearner_webconsole.proto.algorithm_pb2 import AlgorithmData + +file_operator = FileOperator() + + +class AlgorithmReceiver(object): + + def __init__(self): + self._file_manager = FileManager() + + def write_data_and_extract(self, + data_iterator: Iterator[AlgorithmData], + dest: str, + expected_file_hash: Optional[str] = None): + temp_dir = f'{dest}_temp' + self._file_manager.mkdir(temp_dir) + with tempfile.NamedTemporaryFile(suffix='.tar') as temp_file: + # TODO: limit the size of the received file + _written = False + for data in data_iterator: + self._file_manager.write(temp_file.name, data.chunk, mode='a') + _written = True + if _written: + if expected_file_hash is not None: + file_hash = get_file_md5(self._file_manager, temp_file.name) + if file_hash != expected_file_hash: + raise ValueError('The received file is not completed') + file_operator.extract_to(temp_file.name, temp_dir, create_dir=True) + self._file_manager.rename(temp_dir, dest) diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/receiver_test.py b/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/receiver_test.py new file mode 100644 index 000000000..32d13465e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/receiver_test.py @@ -0,0 +1,91 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import tempfile +import unittest + +from google.protobuf.json_format import ParseDict +from envs import Envs +from testing.common import NoWebServerTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.algorithm.models import Algorithm, AlgorithmType, Source +from fedlearner_webconsole.algorithm.transmit.receiver import AlgorithmReceiver +from fedlearner_webconsole.algorithm.transmit.sender import AlgorithmSender +from fedlearner_webconsole.algorithm.utils import algorithm_cache_path +from fedlearner_webconsole.proto.algorithm_pb2 import AlgorithmParameter + +_TEST_ALGORITHM_PATH = os.path.join(Envs.BASE_DIR, 'testing/test_data/algorithm/e2e_test') + + +class AlgorithmReceiverTest(NoWebServerTestCase): + + def test_recv_algorithm_files(self): + parameter = ParseDict({'variables': [{'name': 'BATCH_SIZE', 'value': '128'}]}, AlgorithmParameter()) + with db.session_scope() as session: + algo1 = Algorithm(id=1, + name='algo-1', + uuid='algo-uuid-1', + path=_TEST_ALGORITHM_PATH, + type=AlgorithmType.NN_VERTICAL, + source=Source.USER, + comment='comment', + version=1) + algo1.set_parameter(parameter) + session.commit() + data_iterator = AlgorithmSender().make_algorithm_iterator(algo1.path) + receiver = AlgorithmReceiver() + with tempfile.TemporaryDirectory() as temp_dir: + resp = next(data_iterator) + algo_cache_path = algorithm_cache_path(temp_dir, 'algo-uuid-2') + receiver.write_data_and_extract(data_iterator, algo_cache_path, resp.hash) + self.assertTrue(os.path.exists(algo_cache_path)) + self.assertEqual(sorted(os.listdir(algo_cache_path)), ['follower', 'leader']) + with open(os.path.join(algo_cache_path, 'leader', 'main.py'), encoding='utf-8') as f: + self.assertEqual(f.read(), 'import tensorflow\n') + with open(os.path.join(algo_cache_path, 'follower', 'main.py'), encoding='utf-8') as f: + self.assertEqual(f.read(), '') + + def test_write_data_and_extra_when_no_files(self): + with db.session_scope() as session: + algo = Algorithm(id=1, + name='algo-1', + uuid='algo-uuid-1',\ + type=AlgorithmType.NN_VERTICAL, + source=Source.USER, + comment='comment', + version=1) + session.commit() + data_iterator = AlgorithmSender().make_algorithm_iterator(algo.path) + next(data_iterator) + with tempfile.TemporaryDirectory() as temp_dir: + path = os.path.join(temp_dir, 'test') + AlgorithmReceiver().write_data_and_extract(data_iterator, path) + self.assertTrue(os.path.exists(path)) + self.assertEqual(os.listdir(path), []) + + def test_write_data_iterator_with_wrong_hash(self): + with db.session_scope() as session: + sender = AlgorithmSender() + data_iterator = sender.make_algorithm_iterator(_TEST_ALGORITHM_PATH) + # Consumes hash code response first + next(data_iterator) + with tempfile.TemporaryDirectory() as temp_dir: + with self.assertRaises(ValueError): + AlgorithmReceiver().write_data_and_extract(data_iterator, temp_dir, 'wrong_hash') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/sender.py b/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/sender.py new file mode 100644 index 000000000..3fe762fab --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/sender.py @@ -0,0 +1,55 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import tempfile +from io import BytesIO +from typing import Generator, Union +from fedlearner_webconsole.utils.file_manager import FileManager +from fedlearner_webconsole.utils.file_operator import FileOperator +from fedlearner_webconsole.algorithm.transmit.hash import get_file_md5 +from fedlearner_webconsole.proto.rpc.v2.resource_service_pb2 import GetAlgorithmFilesResponse + +_DEFAULT_CHUNK_SIZE = 1024 * 1024 +_FILE_MANAGER = FileManager() +_FILE_OPERATOR = FileOperator() + + +class AlgorithmSender(object): + + def __init__(self, chunk_size: int = _DEFAULT_CHUNK_SIZE): + self.chunk_size = chunk_size + + def _file_content_generator(self, file: BytesIO) -> Generator[bytes, None, None]: + while True: + chunk = file.read(self.chunk_size) + if len(chunk) == 0: + return + yield chunk + + def _archive_algorithm_files_into(self, algo_path: Union[str, None], dest_tar: str): + if algo_path is None: + return + sources = [file.path for file in _FILE_MANAGER.ls(algo_path, include_directory=True)] + if len(sources) > 0: + _FILE_OPERATOR.archive_to(sources, dest_tar) + + def make_algorithm_iterator(self, algo_path: str) -> Generator[GetAlgorithmFilesResponse, None, None]: + with tempfile.NamedTemporaryFile(suffix='.tar') as temp_file: + self._archive_algorithm_files_into(algo_path, temp_file.name) + file_hash = get_file_md5(_FILE_MANAGER, temp_file.name) + chunk_generator = self._file_content_generator(temp_file.file) + yield GetAlgorithmFilesResponse(hash=file_hash) + for chunk in chunk_generator: + yield GetAlgorithmFilesResponse(chunk=chunk) diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/sender_test.py b/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/sender_test.py new file mode 100644 index 000000000..678be8385 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/transmit/sender_test.py @@ -0,0 +1,44 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import unittest + +from envs import Envs +from fedlearner_webconsole.algorithm.transmit.sender import AlgorithmSender +from testing.common import NoWebServerTestCase + +_TEST_ALGORITHM_PATH = os.path.join(Envs.BASE_DIR, 'testing/test_data/algorithm/e2e_test') + + +class AlgorithmSenderTest(NoWebServerTestCase): + + def test_make_algorithm_iterator(self): + sender = AlgorithmSender(chunk_size=1024) + data_iterator = sender.make_algorithm_iterator(_TEST_ALGORITHM_PATH) + + hash_resp = next(data_iterator) + # As tar's hash code is always changing + self.assertEqual(len(hash_resp.hash), 32) + # Tar archives have a minimum size of 10240 bytes by default + chunk_count = 0 + for data in data_iterator: + chunk_count += 1 + self.assertEqual(len(data.chunk), 1024) + self.assertEqual(chunk_count, 10) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/algorithm/utils.py b/web_console_v2/api/fedlearner_webconsole/algorithm/utils.py new file mode 100644 index 000000000..19074628d --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/algorithm/utils.py @@ -0,0 +1,64 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +from contextlib import contextmanager +from uuid import uuid4 +from slugify import slugify +from fedlearner_webconsole.utils.pp_datetime import now +from fedlearner_webconsole.utils.file_manager import file_manager + + +# TODO(hangweiqiang): move Envs.STORAGE_ROOT in function +def algorithm_path(root_path: str, name: str, version: int) -> str: + suffix = now().strftime('%Y%m%d_%H%M%S') + return os.path.join(root_path, 'algorithms', f'{slugify(name)}-v{version}-{suffix}-{uuid4().hex[:5]}') + + +def algorithm_cache_path(root_path: str, algorithm_uuid: str) -> str: + return os.path.join(root_path, 'algorithm_cache', algorithm_uuid) + + +def algorithm_project_path(root_path: str, name: str) -> str: + suffix = now().strftime('%Y%m%d_%H%M%S') + return os.path.join(root_path, 'algorithm_projects', f'{slugify(name)}-{suffix}-{uuid4().hex[:5]}') + + +def pending_algorithm_path(root_path: str, name: str, version: int) -> str: + suffix = now().strftime('%Y%m%d_%H%M%S') + return os.path.join(root_path, 'pending_algorithms', f'{slugify(name)}-v{version}-{suffix}-{uuid4().hex[:5]}') + + +def deleted_name(name: str) -> str: + timestamp = now().strftime('%Y%m%d_%H%M%S') + return f'deleted_at_{timestamp}_{name}' + + +@contextmanager +def check_algorithm_file(path: str): + """clear the created algorithm files when exceptions + + Example: + path = (the path of the algorithm files) + with _check_algorithm_file(path): + ... + + """ + try: + yield + except Exception as e: + if os.path.exists(path): + file_manager.remove(path) + raise e diff --git a/web_console_v2/api/fedlearner_webconsole/app.py b/web_console_v2/api/fedlearner_webconsole/app.py index 618b13b30..247227d6c 100644 --- a/web_console_v2/api/fedlearner_webconsole/app.py +++ b/web_console_v2/api/fedlearner_webconsole/app.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,40 +16,66 @@ # pylint: disable=wrong-import-position, global-statement import logging import logging.config -import os -import traceback from http import HTTPStatus +from json import load +from pathlib import Path + +from apispec.ext.marshmallow import MarshmallowPlugin +from apispec_webframeworks.flask import FlaskPlugin +from flasgger import APISpec, Swagger from flask import Flask, jsonify from flask_restful import Api -from flask_jwt_extended import JWTManager -from envs import Envs -from fedlearner_webconsole.utils import metrics - -jwt = JWTManager() +from marshmallow import ValidationError +from sqlalchemy import inspect +from sqlalchemy.orm import Session +from webargs.flaskparser import parser +from envs import Envs +from fedlearner_webconsole.utils.hooks import pre_start_hook +from fedlearner_webconsole.composer.apis import initialize_composer_apis +from fedlearner_webconsole.cleanup.apis import initialize_cleanup_apis +from fedlearner_webconsole.audit.apis import initialize_audit_apis +from fedlearner_webconsole.auth.services import UserService +from fedlearner_webconsole.e2e.apis import initialize_e2e_apis +from fedlearner_webconsole.flag.apis import initialize_flags_apis +from fedlearner_webconsole.iam.apis import initialize_iams_apis +from fedlearner_webconsole.iam.client import create_iams_for_user +from fedlearner_webconsole.middleware.middlewares import flask_middlewares +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.swagger.models import schema_manager +from fedlearner_webconsole.utils import metrics, const from fedlearner_webconsole.auth.apis import initialize_auth_apis from fedlearner_webconsole.project.apis import initialize_project_apis -from fedlearner_webconsole.workflow_template.apis \ - import initialize_workflow_template_apis +from fedlearner_webconsole.participant.apis import initialize_participant_apis +from fedlearner_webconsole.utils.decorators.pp_flask import parser as custom_parser +from fedlearner_webconsole.utils.swagger import normalize_schema +from fedlearner_webconsole.workflow_template.apis import initialize_workflow_template_apis from fedlearner_webconsole.workflow.apis import initialize_workflow_apis from fedlearner_webconsole.dataset.apis import initialize_dataset_apis from fedlearner_webconsole.job.apis import initialize_job_apis from fedlearner_webconsole.setting.apis import initialize_setting_apis -from fedlearner_webconsole.mmgr.apis import initialize_mmgr_apis +from fedlearner_webconsole.mmgr.model_apis import initialize_mmgr_model_apis +from fedlearner_webconsole.mmgr.model_job_apis import initialize_mmgr_model_job_apis +from fedlearner_webconsole.mmgr.model_job_group_apis import initialize_mmgr_model_job_group_apis +from fedlearner_webconsole.algorithm.apis import initialize_algorithm_apis from fedlearner_webconsole.debug.apis import initialize_debug_apis +from fedlearner_webconsole.serving.apis import initialize_serving_services_apis from fedlearner_webconsole.sparkapp.apis import initialize_sparkapps_apis -from fedlearner_webconsole.rpc.server import rpc_server +from fedlearner_webconsole.file.apis import initialize_files_apis +from fedlearner_webconsole.tee.apis import initialize_tee_apis from fedlearner_webconsole.db import db -from fedlearner_webconsole.exceptions import (make_response, - WebConsoleApiException, - InvalidArgumentException, - NotFoundException) +from fedlearner_webconsole.exceptions import make_response, WebConsoleApiException, InvalidArgumentException from fedlearner_webconsole.scheduler.scheduler import scheduler -from fedlearner_webconsole.utils.k8s_watcher import k8s_watcher -from fedlearner_webconsole.auth.models import User, Session -from fedlearner_webconsole.composer.composer import composer -from logging_config import LOGGING_CONFIG +from fedlearner_webconsole.k8s.k8s_watcher import k8s_watcher +from logging_config import get_logging_config +from werkzeug.exceptions import HTTPException + + +@custom_parser.error_handler +@parser.error_handler +def handle_request_parsing_error(validation_error: ValidationError, *args, **kwargs): + raise InvalidArgumentException(details=validation_error.messages) def _handle_bad_request(error): @@ -63,17 +89,19 @@ def _handle_bad_request(error): return error -def _handle_not_found(error): - """Handles the not found exception raised by framework""" - if not isinstance(error, WebConsoleApiException): - return make_response(NotFoundException()) - return error +def _handle_wsgi_exception(error: HTTPException): + logging.exception('Wsgi exception: %s', str(error)) + response = jsonify( + code=error.code, + msg=str(error), + ) + response.status_code = error.code + return response def _handle_uncaught_exception(error): """A fallback catcher for all exceptions.""" - logging.error('Uncaught exception %s, stack trace:\n %s', str(error), - traceback.format_exc()) + logging.exception('Uncaught exception %s', str(error)) response = jsonify( code=500, msg='Unknown error', @@ -82,71 +110,82 @@ def _handle_uncaught_exception(error): return response -@jwt.unauthorized_loader -def _handle_unauthorized_request(reason): - response = jsonify(code=HTTPStatus.UNAUTHORIZED, msg=reason) - return response, HTTPStatus.UNAUTHORIZED - - -@jwt.invalid_token_loader -def _handle_invalid_jwt_request(reason): - response = jsonify(code=HTTPStatus.UNPROCESSABLE_ENTITY, msg=reason) - return response, HTTPStatus.UNPROCESSABLE_ENTITY - - -@jwt.expired_token_loader -def _handle_token_expired_request(expired_token): - response = jsonify(code=HTTPStatus.UNAUTHORIZED, msg='Token has expired') - return response, HTTPStatus.UNAUTHORIZED - - -@jwt.user_lookup_loader -def user_lookup_callback(jwt_header, jwt_data): - del jwt_header # Unused by user load. - - identity = jwt_data['sub'] - return User.query.filter_by(username=identity).one_or_none() - - -@jwt.token_in_blocklist_loader -def check_if_token_invalid(jwt_header, jwt_data): - del jwt_header # unused by check_if_token_invalid - - jti = jwt_data['jti'] - session = Session.query.filter_by(jti=jti).first() - return session is None +def _initial_iams_for_users(session: Session): + inspector = inspect(db.engine) + if inspector.has_table('users_v2'): + try: + users = UserService(session).get_all_users() + for u in users: + create_iams_for_user(u) + except Exception as e: # pylint: disable=broad-except + logging.warning('Initial iams failed, will be OK after db migration.') + + +def _init_swagger(app: Flask): + openapi_version = '3.0.3' + spec = APISpec(title='FedLearner WebConsole API Documentation', + version=SettingService.get_application_version().version.version, + openapi_version=openapi_version, + plugins=[FlaskPlugin(), MarshmallowPlugin()]) + schemas = schema_manager.get_schemas() + template = spec.to_flasgger(app, definitions=schemas, paths=[*app.view_functions.values()]) + app.config['SWAGGER'] = {'title': 'FedLearner WebConsole API Documentation', 'uiversion': 3} + for path in (Path(__file__).parent / 'proto' / 'jsonschemas').glob('**/*.json'): + with open(path, mode='r', encoding='utf-8') as file: + definitions = load(file)['definitions'] + definitions = normalize_schema(definitions, Path(path)) + template['components']['schemas'] = {**template['components']['schemas'], **definitions} + template['definitions'] = template['components']['schemas'] + Swagger(app, + template=template, + config={ + 'url_prefix': Envs.SWAGGER_URL_PREFIX, + 'openapi': openapi_version + }, + merge=True) def create_app(config): + pre_start_hook() # format logging - logging.config.dictConfig(LOGGING_CONFIG) + logging.config.dictConfig(get_logging_config()) - app = Flask('fedlearner_webconsole') + app = Flask('fedlearner_webconsole', root_path=Envs.BASE_DIR) app.config.from_object(config) - jwt.init_app(app) - # Error handlers app.register_error_handler(400, _handle_bad_request) - app.register_error_handler(404, _handle_not_found) app.register_error_handler(WebConsoleApiException, make_response) + app.register_error_handler(HTTPException, _handle_wsgi_exception) app.register_error_handler(Exception, _handle_uncaught_exception) - - # TODO(wangsen.0914): This will be removed sooner! - db.init_app(app) - - api = Api(prefix='/api/v2') + # TODO(xiangyuxuan.prs): Initial iams for all existed users, remove when not using memory-iams + with db.session_scope() as session: + _initial_iams_for_users(session) + api = Api(prefix=const.API_VERSION) + initialize_composer_apis(api) + initialize_cleanup_apis(api) initialize_auth_apis(api) initialize_project_apis(api) + initialize_participant_apis(api) initialize_workflow_template_apis(api) initialize_workflow_apis(api) initialize_job_apis(api) initialize_dataset_apis(api) initialize_setting_apis(api) - initialize_mmgr_apis(api) + initialize_mmgr_model_apis(api) + initialize_mmgr_model_job_apis(api) + initialize_mmgr_model_job_group_apis(api) + initialize_algorithm_apis(api) initialize_sparkapps_apis(api) - if os.environ.get('FLASK_ENV') != 'production' or Envs.DEBUG: + initialize_files_apis(api) + initialize_flags_apis(api) + initialize_serving_services_apis(api) + initialize_iams_apis(api) + initialize_e2e_apis(api) + initialize_tee_apis(api) + if Envs.FLASK_ENV != 'production' or Envs.DEBUG: initialize_debug_apis(api) + initialize_audit_apis(api) # A hack that use our customized error handlers # Ref: https://github.com/flask-restful/flask-restful/issues/280 handle_exception = app.handle_exception @@ -154,21 +193,16 @@ def create_app(config): api.init_app(app) app.handle_exception = handle_exception app.handle_user_exception = handle_user_exception - + if Envs.FLASK_ENV != 'production' or Envs.DEBUG: + _init_swagger(app) # Inits k8s related stuff first since something in composer # may depend on it - if Envs.FLASK_ENV == 'production' or Envs.K8S_CONFIG_PATH is not None: + if app.config.get('START_K8S_WATCHER', True): k8s_watcher.start() - - if app.config.get('START_GRPC_SERVER', True): - rpc_server.stop() - rpc_server.start(app) if app.config.get('START_SCHEDULER', True): scheduler.stop() - scheduler.start(app) - if app.config.get('START_COMPOSER', True): - with app.app_context(): - composer.run(db_engine=db.get_engine()) + scheduler.start() - metrics.emit_counter('create_app', 1) + metrics.emit_store('create_app', 1) + app = flask_middlewares.init_app(app) return app diff --git a/web_console_v2/api/fedlearner_webconsole/app_test.py b/web_console_v2/api/fedlearner_webconsole/app_test.py new file mode 100644 index 000000000..5643c6239 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/app_test.py @@ -0,0 +1,56 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from http import HTTPStatus + +from marshmallow import fields +from webargs.flaskparser import use_args + +from fedlearner_webconsole.utils.flask_utils import make_flask_response +from testing.common import BaseTestCase + + +class ExceptionHandlersTest(BaseTestCase): + + def test_404(self): + self.assert404(self.get_helper('/api/v2/not_found', use_auth=False)) + + def test_405(self): + self.assert405(self.post_helper('/api/v2/versions', use_auth=False)) + + def test_uncaught_exception(self): + + @self.app.route('/test_uncaught') + def test_route(): + raise RuntimeError('Uncaught') + + response = self.get_helper('/test_uncaught', use_auth=False) + self.assert500(response) + + def test_marshmallow_validation_error(self): + + @self.app.route('/test_validation') + @use_args({'must': fields.Bool(required=True)}) + def test_route(params): + return make_flask_response({'succeeded': params['must']}) + + resp = self.get_helper('/test_validation', use_auth=False) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + self.assertEqual(resp.get_json()['details'], {'json': {'must': ['Missing data for required field.']}}) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/audit/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/audit/BUILD.bazel new file mode 100644 index 000000000..88bc2a1de --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/audit/BUILD.bazel @@ -0,0 +1,159 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "models_lib", + srcs = ["models.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:mixins_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "models_lib_test", + size = "small", + srcs = [ + "models_test.py", + ], + imports = ["../.."], + main = "models_test.py", + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:services_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:proto_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@common_sqlalchemy//:pkg", + ], +) + +py_library( + name = "storage_lib", + srcs = ["storage.py"], + imports = ["../.."], + deps = [ + ":models_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:services_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:filtering_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_library( + name = "services_lib", + srcs = ["services.py"], + imports = ["../.."], + deps = [ + ":storage_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "services_lib_test", + size = "small", + srcs = [ + "services_test.py", + ], + imports = ["../.."], + main = "services_test.py", + deps = [ + ":models_lib", + ":services_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:services_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:filtering_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "decorators_lib", + srcs = ["decorators.py"], + imports = ["../.."], + deps = [ + ":services_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:const_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:metrics_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_flask//:pkg", + ], +) + +py_test( + name = "decorators_lib_test", + testonly = True, + srcs = [ + "decorators_test.py", + ], + imports = ["../.."], + main = "decorators_test.py", + deps = [ + ":decorators_lib", + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:const_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_base64_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:common_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "apis_lib", + srcs = ["apis.py"], + imports = ["../.."], + deps = [ + ":services_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:third_party_sso_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:filtering_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:paginate_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "@common_flask_restful//:pkg", + "@common_marshmallow//:pkg", + "@common_python_dateutil//:pkg", + "@common_webargs//:pkg", + ], +) + +py_test( + name = "apis_lib_test", + srcs = [ + "apis_test.py", + ], + imports = ["../.."], + main = "apis_test.py", + deps = [ + ":apis_lib", + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:const_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:common_lib", + "@common_python_dateutil//:pkg", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/audit/__init__.py b/web_console_v2/api/fedlearner_webconsole/audit/__init__.py new file mode 100644 index 000000000..fc6e7fa2c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/audit/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# coding: utf-8 diff --git a/web_console_v2/api/fedlearner_webconsole/audit/apis.py b/web_console_v2/api/fedlearner_webconsole/audit/apis.py new file mode 100644 index 000000000..d7c3597d6 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/audit/apis.py @@ -0,0 +1,113 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# coding: utf-8 +from http import HTTPStatus +from typing import Optional + +from flask_restful import Api, Resource +from marshmallow import fields, validate +from webargs.flaskparser import use_kwargs +from dateutil.relativedelta import relativedelta +from fedlearner_webconsole.audit.models import EventType +from fedlearner_webconsole.audit.services import EventService +from fedlearner_webconsole.auth.third_party_sso import credentials_required +from fedlearner_webconsole.db import db +from fedlearner_webconsole.utils.flask_utils import make_flask_response +from fedlearner_webconsole.utils.decorators.pp_flask import admin_required +from fedlearner_webconsole.utils.paginate import paginate +from fedlearner_webconsole.utils.pp_datetime import to_timestamp, now +from fedlearner_webconsole.utils.filtering import parse_expression + + +class EventsApi(Resource): + + @credentials_required + @admin_required + @use_kwargs( + { + 'filter_exp': fields.String(validate=validate.Length(min=1), data_key='filter', load_default=None), + 'page': fields.Integer(load_default=1), + 'page_size': fields.Integer(load_default=10) + }, + location='query') + def get(self, filter_exp: Optional[str], page: int, page_size: int): + """Get audit events + --- + tags: + - audit + description: get audit events + parameters: + - name: filter + in: query + schema: + type: string + - name: page + in: query + schema: + type: integer + - name: page_size + in: query + schema: + type: integer + responses: + 200: + description: Events are returned + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.Event' + """ + with db.session_scope() as session: + if filter_exp is not None: + filter_exp = parse_expression(filter_exp) + query = EventService(session).get_events(filter_exp) + pagination = paginate(query, page, page_size) + data = [model.to_proto() for model in pagination.get_items()] + return make_flask_response(data=data, page_meta=pagination.get_metadata()) + + @credentials_required + @admin_required + @use_kwargs({'event_type': fields.String(required=True, validate=validate.OneOf([a.name for a in EventType]))}, + location='query') + def delete(self, event_type: str): + """Delete audit events that are older than 6 months + --- + tags: + - audit + parameters: + - name: event_type + in: query + schema: + type: string + responses: + 204: + description: Events are deleted successfully + """ + end_time = to_timestamp(now() - relativedelta(months=6)) + if EventType[event_type] == EventType.RPC: + filter_exp = parse_expression(f'(and(start_time>0)(end_time<{end_time})(source:["RPC"]))') + elif EventType[event_type] == EventType.USER_ENDPOINT: # delete API/UI events + filter_exp = parse_expression( + f'(and(start_time>0)(end_time<{end_time})(source:["UNKNOWN_SOURCE","UI","API"]))') + with db.session_scope() as session: + EventService(session).delete_events(filter_exp) + session.commit() + return make_flask_response(status=HTTPStatus.NO_CONTENT) + + +def initialize_audit_apis(api: Api): + api.add_resource(EventsApi, '/events') diff --git a/web_console_v2/api/fedlearner_webconsole/audit/apis_test.py b/web_console_v2/api/fedlearner_webconsole/audit/apis_test.py new file mode 100644 index 000000000..7445ac6e5 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/audit/apis_test.py @@ -0,0 +1,158 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# coding: utf-8 +import unittest +from http import HTTPStatus +from typing import Tuple +from datetime import timedelta, timezone + +from dateutil.relativedelta import relativedelta + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.utils.const import API_VERSION +from fedlearner_webconsole.utils.pp_datetime import to_timestamp, now +from fedlearner_webconsole.proto.audit_pb2 import Event +from fedlearner_webconsole.audit.models import EventModel, to_model +from testing.common import BaseTestCase + +PATH_PREFIX = f'{API_VERSION}/events' + + +def get_times() -> Tuple[int, int]: + ts = to_timestamp(now()) + return ts - 60 * 2, ts + 60 * 2 + + +def generate_event() -> EventModel: + return to_model( + Event(name='some_event', + user_id=1, + resource_type=Event.ResourceType.IAM, + resource_name='some_resource', + op_type=Event.OperationType.CREATE, + result=Event.Result.SUCCESS, + result_code='OK', + source=Event.Source.UI)) + + +def generate_rpc_event() -> EventModel: + return to_model( + Event(name='some_rpc_event', + user_id=1, + resource_type=Event.ResourceType.WORKFLOW, + resource_name='workflow_uuid', + op_type=Event.OperationType.CREATE, + result=Event.Result.SUCCESS, + result_code='OK', + source=Event.Source.RPC)) + + +class EventApisTest(BaseTestCase): + + def setUp(self): + super().setUp() + events = [generate_event() for _ in range(5)] + with db.session_scope() as session: + session.bulk_save_objects(events) + session.commit() + + def test_get_events(self): + start_time, end_time = get_times() + self.signin_as_admin() + + response = self.get_helper( + f'{PATH_PREFIX}?filter=(and(username="ada")(start_time>{start_time})(end_time<{end_time}))') + self.assertStatus(response, HTTPStatus.OK) + self.assertEqual(5, len(self.get_response_data(response))) + self.assertEqual('CREATE', self.get_response_data(response)[0].get('op_type')) + + response = self.get_helper( + f'{PATH_PREFIX}?filter=(and(username="admin")(start_time>{start_time})(end_time<{end_time}))') + self.assertEqual(0, len(self.get_response_data(response))) + + start_time = to_timestamp(now(timezone.utc) + timedelta(hours=2)) + end_time = to_timestamp(now(timezone.utc) + timedelta(hours=3)) + + response = self.get_helper( + f'{PATH_PREFIX}?filter=(and(username="ada")(start_time>{start_time})(end_time<{end_time}))') + self.assertEqual(0, len(self.get_response_data(response))) + + response = self.get_helper( + f'{PATH_PREFIX}?filter=(and(username="ada")(start_time>{end_time})(end_time<{start_time}))') + self.assertEqual(0, len(self.get_response_data(response))) + + def test_delete_events(self): + rpc_events = [generate_rpc_event() for _ in range(3)] + created_at = now(timezone.utc) - relativedelta(months=8) + with db.session_scope() as session: + session.bulk_save_objects(rpc_events) + session.query(EventModel).update({'created_at': created_at}) + session.commit() + start_time, end_time = get_times() + + self.signin_as_admin() + response = self.delete_helper(f'{PATH_PREFIX}?event_type=USER_ENDPOINT') + self.assertStatus(response, HTTPStatus.NO_CONTENT) + + start_time = to_timestamp(now(timezone.utc) - relativedelta(months=9)) + end_time = to_timestamp(now(timezone.utc) - relativedelta(months=7)) + + response = self.get_helper( + f'{PATH_PREFIX}?filter=(and(username="ada")(start_time>{start_time})(end_time<{end_time}))') + self.assertEqual(3, len(self.get_response_data(response))) + + def test_delete_rpc_events(self): + rpc_events = [generate_rpc_event() for _ in range(3)] + created_at = now(timezone.utc) - relativedelta(months=8) + with db.session_scope() as session: + session.bulk_save_objects(rpc_events) + session.query(EventModel).update({'created_at': created_at}) + session.commit() + + start_time, end_time = get_times() + + self.signin_as_admin() + response = self.delete_helper(f'{PATH_PREFIX}?event_type=RPC') + self.assertStatus(response, HTTPStatus.NO_CONTENT) + + start_time = to_timestamp(now(timezone.utc) - relativedelta(months=9)) + end_time = to_timestamp(now(timezone.utc) - relativedelta(months=7)) + + response = self.get_helper( + f'{PATH_PREFIX}?filter=(and(username="ada")(start_time>{start_time})(end_time<{end_time}))') + self.assertEqual(5, len(self.get_response_data(response))) + + def test_get_with_op_type(self): + start_time, end_time = get_times() + + self.signin_as_admin() + resp = self.get_helper( + f'{PATH_PREFIX}?filter=(and(username="ada")(start_time>{start_time})(end_time<{end_time})(op_type="CREATE"))' # pylint: disable=line-too-long + ) + self.assertEqual(5, len(self.get_response_data(resp))) + + resp = self.get_helper( + f'{PATH_PREFIX}?filter=(and(username="ada")(start_time>{end_time})(end_time<{start_time})(op_type="UPDATE"))' # pylint: disable=line-too-long + ) + self.assertEqual(0, len(self.get_response_data(resp))) + + resp = self.get_helper( + f'{PATH_PREFIX}?filter=(and(username="ada")(start_time>{end_time})(end_time<{start_time})(op_type="poop"))') + self.assert200(resp) + self.assertEqual(0, len(self.get_response_data(resp))) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/audit/decorators.py b/web_console_v2/api/fedlearner_webconsole/audit/decorators.py new file mode 100644 index 000000000..19698d6c2 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/audit/decorators.py @@ -0,0 +1,263 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# coding: utf-8 +import grpc +import json +import logging +from functools import wraps +from typing import Optional, Dict, Tuple, Callable +from envs import Envs +from flask import request +from google.protobuf.message import Message +from google.protobuf.empty_pb2 import Empty +from fedlearner_webconsole.audit.services import EventService +from fedlearner_webconsole.db import db +from fedlearner_webconsole.utils.const import API_VERSION +from fedlearner_webconsole.utils.flask_utils import get_current_user +from fedlearner_webconsole.utils.metrics import emit_store +from fedlearner_webconsole.proto.audit_pb2 import Event +from fedlearner_webconsole.proto.common_pb2 import StatusCode +from fedlearner_webconsole.proto.service_pb2 import TwoPcRequest +from fedlearner_webconsole.exceptions import UnauthorizedException, InvalidArgumentException +from fedlearner_webconsole.utils.domain_name import get_pure_domain_name +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.utils.proto import to_dict +from fedlearner_webconsole.rpc.auth import get_common_name, PROJECT_NAME_HEADER, SSL_CLIENT_SUBJECT_DN_HEADER +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.proto.two_pc_pb2 import TwoPcType +# TODO(wangsen.0914): IAM and SYSTEM WIP + +RESOURCE_TYPE_MAPPING = { + 'projects': Event.ResourceType.WORKSPACE, + 'workflow_templates': Event.ResourceType.TEMPLATE, + 'workflows': Event.ResourceType.WORKFLOW, + 'datasets': Event.ResourceType.DATASET, + 'models': Event.ResourceType.MODEL, + 'auth': Event.ResourceType.USER, + 'participants': Event.ResourceType.PARTICIPANT, + 'serving_services': Event.ResourceType.SERVING_SERVICE, + 'algorithm_projects': Event.ResourceType.ALGORITHM_PROJECT, + 'preset_algorithms': Event.ResourceType.PRESET_ALGORITHM +} + +OP_TYPE_MAPPING = { + 'post': Event.OperationType.CREATE, + 'patch': Event.OperationType.UPDATE, + 'put': Event.OperationType.UPDATE, + 'delete': Event.OperationType.DELETE +} + +STATUS_TYPE_MAPPING = { + StatusCode.STATUS_SUCCESS: grpc.StatusCode.OK.name, + StatusCode.STATUS_UNKNOWN_ERROR: grpc.StatusCode.UNKNOWN.name, + StatusCode.STATUS_UNAUTHORIZED: grpc.StatusCode.UNAUTHENTICATED.name, + StatusCode.STATUS_NOT_FOUND: grpc.StatusCode.NOT_FOUND.name, + StatusCode.STATUS_INVALID_ARGUMENT: grpc.StatusCode.INVALID_ARGUMENT.name +} + +RESULT_TYPE_MAPPING = { + Event.Result.UNKNOWN_RESULT: grpc.StatusCode.UNKNOWN.name, + Event.Result.SUCCESS: grpc.StatusCode.OK.name, + Event.Result.FAILURE: grpc.StatusCode.ABORTED.name +} + + +def emits_event(resource_type: Event.ResourceType = Event.ResourceType.UNKNOWN_RESOURCE_TYPE, + op_type: Event.OperationType = Event.OperationType.UNKNOWN_OPERATION_TYPE, + audit_fields: Optional[list] = None): + + def wrapper_func(func): + + @wraps(func) + def wrapper(*args, **kwargs): + user = get_current_user() + if user is None: + return func(*args, **kwargs) + fields, result = _infer_event_fields(resource_type, op_type, audit_fields), Event.Result.SUCCESS + try: + data, *_ = func(*args, **kwargs) + if fields['op_type'] == Event.OperationType.CREATE: + fields['resource_name'] += f'/{data.get("data").get("id")}' + return (data, *_) + except Exception as e: + result = Event.Result.FAILURE + raise e + finally: + # TODO(yeqiuhan): deprecate result in two release cut + _emit_event(user_id=user.id, result=result, result_code=RESULT_TYPE_MAPPING[result], fields=fields) + + return wrapper + + return wrapper_func + + +# TODO(yeqiuhan): Call local server for operation + + +def emits_rpc_event( + resource_name_fn: Callable[[Message], str], + resource_type: Event.ResourceType = Event.ResourceType.UNKNOWN_RESOURCE_TYPE, + op_type: Event.OperationType = Event.OperationType.UNKNOWN_OPERATION_TYPE, +): + + def wrapper_func(func): + + @wraps(func) + def wrapper(*args, **kwargs): + func_params = _get_func_params(func, *args, **kwargs) + fields = _infer_rpc_event_fields(func_params, resource_type, resource_name_fn, op_type) + try: + response = func(*args, **kwargs) + return response + except Exception as e: + raise e + finally: + # use public interface to get status code until upgrade of grpc from 1.32 to 1.38+ + if func_params['context']._state.code is not None: # pylint: disable=protected-access + result_code = func_params['context']._state.code.name # pylint: disable=protected-access + else: + if isinstance(response, Empty) or response.DESCRIPTOR.fields_by_name.get('status') is None: + result_code = 'OK' + elif isinstance(response.status.code, int): + result_code = STATUS_TYPE_MAPPING[response.status.code] + else: + result_code = 'UNKNOWN' + _emit_event(user_id=None, + result=Event.Result.SUCCESS if result_code == 'OK' else Event.Result.FAILURE, + result_code=result_code, + fields=fields) + + return wrapper + + return wrapper_func + + +def _infer_event_fields(resource_type: Event.ResourceType = Event.ResourceType.UNKNOWN_RESOURCE_TYPE, + op_type: Event.OperationType = Event.OperationType.UNKNOWN_OPERATION_TYPE, + audit_fields: Optional[list] = None) -> Dict[str, any]: + # path: API_PATH_PREFIX/resource_type/... + if resource_type == Event.ResourceType.UNKNOWN_RESOURCE_TYPE: + resource_type = RESOURCE_TYPE_MAPPING.get(request.path.partition(API_VERSION)[-1].split('/')[1].lower()) + if op_type == Event.OperationType.UNKNOWN_OPERATION_TYPE: + op_type = OP_TYPE_MAPPING.get(request.method.lower()) + body = request.get_json(force=True, silent=True) + resource_name = request.path.rpartition(API_VERSION)[-1] + extra = {k: body.get(k) for k in audit_fields} if audit_fields else {} + coordinator_pure_domain_name = SettingService.get_system_info().pure_domain_name + return { + 'name': Event.OperationType.Name(op_type).lower() + Event.ResourceType.Name(resource_type).capitalize(), + 'resource_type': resource_type, + 'resource_name': resource_name, + 'op_type': op_type, + # TODO(wangsen.0914): source depends on credentials + 'source': Event.Source.UI, + 'extra': json.dumps(extra), + 'coordinator_pure_domain_name': coordinator_pure_domain_name + } + + +def _get_func_params(func, *args, **kwargs): + dict_param = {} + for arg in list(kwargs.values()) + list(args): + if isinstance(arg, grpc.ServicerContext): + dict_param['context'] = arg + if isinstance(arg, Message): + dict_param['request'] = arg + return dict_param + + +def _infer_auth_info(rpc_request, context) -> Tuple[Optional[str], Optional[int]]: + if Envs.FLASK_ENV == 'production': + metadata = dict(context.invocation_metadata()) + if not metadata: + raise UnauthorizedException('No client subject dn found') + cn = get_common_name(metadata.get(SSL_CLIENT_SUBJECT_DN_HEADER)) + if not cn: + raise UnauthorizedException('Failed to get domain name from certs') + pure_domain_name = get_pure_domain_name(cn) + with db.session_scope() as session: + if 'auth_info' in rpc_request.keys(): # v1 + project_name = rpc_request['auth_info']['project_name'] + else: # v2 + project_name = metadata.get(PROJECT_NAME_HEADER) + project = session.query(Project).filter_by(name=project_name).first() + project_id = project.id if project is not None else None + return pure_domain_name, project_id + return (None, None) + + +def _infer_rpc_event_fields(func_params: Dict[str, any], resource_type: Event.ResourceType, + resource_name_fn: Callable[[Message], str], op_type: Event.OperationType) -> Dict[str, any]: + request_type = type(func_params['request']) + if resource_name_fn is None: + raise InvalidArgumentException('Callable resource_name_fn required') + resource_uuid = resource_name_fn(func_params['request']) + rpc_request = to_dict(func_params['request']) + context = func_params['context'] + if request_type is TwoPcRequest: + type_list = rpc_request['type'].split('_') + if type_list[-1] == 'STATE': + op_type = Event.OperationType.Value(type_list[0] + '_' + type_list[-1]) + resource_type = Event.ResourceType.Value('_'.join(type_list[1:-1])) + else: + op_type = Event.OperationType.Value(type_list[0]) + resource_type = Event.ResourceType.Value('_'.join(type_list[1:])) + # get domain_name and project_name + pure_domain_name, project_id = _infer_auth_info(rpc_request, context) + return { + 'name': str(request_type)[str(request_type).rfind('.') + 1:str(request_type).rfind('Request')], + 'op_type': op_type, + 'resource_type': resource_type, + 'resource_name': resource_uuid, + 'coordinator_pure_domain_name': pure_domain_name, + 'project_id': project_id, + 'source': Event.Source.RPC + } + + +def _emit_event(user_id: Optional[int], result: Event.Result, result_code: str, fields: dict) -> None: + event = Event(user_id=user_id, result=result, result_code=result_code, **fields) + try: + with db.session_scope() as session: + EventService(session).emit_event(event) + session.commit() + except ValueError as e: + logging.error(f'[audit.decorator] invalid argument passed: {e}') + emit_store('audit_invalid_arguments', 1) + + +def get_two_pc_request_uuid(rpc_request: TwoPcRequest) -> Optional[str]: + if rpc_request.type == TwoPcType.CREATE_MODEL_JOB: + return rpc_request.data.create_model_job_data.model_job_uuid + if rpc_request.type == TwoPcType.CONTROL_WORKFLOW_STATE: + return rpc_request.data.transit_workflow_state_data.workflow_uuid + if rpc_request.type == TwoPcType.CREATE_MODEL_JOB_GROUP: + return rpc_request.data.create_model_job_group_data.model_job_group_uuid + if rpc_request.type == TwoPcType.LAUNCH_DATASET_JOB: + return rpc_request.data.launch_dataset_job_data.dataset_job_uuid + if rpc_request.type == TwoPcType.STOP_DATASET_JOB: + return rpc_request.data.stop_dataset_job_data.dataset_job_uuid + if rpc_request.type == TwoPcType.CREATE_TRUSTED_JOB_GROUP: + return rpc_request.data.create_trusted_job_group_data.algorithm_uuid + if rpc_request.type == TwoPcType.LAUNCH_TRUSTED_JOB: + return rpc_request.data.launch_trusted_job_data.uuid + if rpc_request.type == TwoPcType.STOP_TRUSTED_JOB: + return rpc_request.data.stop_trusted_job_data.uuid + if rpc_request.type == TwoPcType.LAUNCH_DATASET_JOB_STAGE: + return rpc_request.data.launch_dataset_job_stage_data.dataset_job_stage_uuid + if rpc_request.type == TwoPcType.STOP_DATASET_JOB_STAGE: + return rpc_request.data.stop_dataset_job_stage_data.dataset_job_stage_uuid + logging.warning('[TwoPc] Unsupported TwoPcType!') + return None diff --git a/web_console_v2/api/fedlearner_webconsole/audit/decorators_test.py b/web_console_v2/api/fedlearner_webconsole/audit/decorators_test.py new file mode 100644 index 000000000..d7cc8d5a3 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/audit/decorators_test.py @@ -0,0 +1,238 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# coding: utf-8 +from http import HTTPStatus +import unittest +from unittest.mock import MagicMock, patch +from typing import Tuple +from grpc import ServicerContext, StatusCode +from grpc._server import _Context, _RPCState +from google.protobuf import empty_pb2 +from google.protobuf.message import Message +from testing.common import BaseTestCase, NoWebServerTestCase + +from fedlearner_webconsole.utils.pp_base64 import base64encode +from fedlearner_webconsole.utils.const import API_VERSION +from fedlearner_webconsole.proto.audit_pb2 import Event +from fedlearner_webconsole.audit.decorators import _get_func_params, _infer_rpc_event_fields,\ + emits_rpc_event, get_two_pc_request_uuid +from fedlearner_webconsole.proto.service_pb2 import TwoPcRequest, UpdateWorkflowResponse +from fedlearner_webconsole.proto import common_pb2, service_pb2 +from fedlearner_webconsole.audit.models import EventModel +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.two_pc_pb2 import CreateModelJobData, TransactionData, TwoPcAction, TwoPcType +from fedlearner_webconsole.proto.workflow_definition_pb2 import JobDefinition, WorkflowDefinition +from fedlearner_webconsole.utils.pp_datetime import now, to_timestamp + + +def get_uuid(proto_message: Message): + return 'test_uuid' + + +def get_times() -> Tuple[int, int]: + ts = to_timestamp(now()) + return ts - 60 * 2, ts + 60 * 2 + + +@emits_rpc_event(resource_type=Event.ResourceType.WORKFLOW, + op_type=Event.OperationType.UPDATE, + resource_name_fn=get_uuid) +def fake_rpc_method(request, context=None): + return UpdateWorkflowResponse(status=common_pb2.Status(code=common_pb2.STATUS_UNAUTHORIZED, msg='done')) + + +@emits_rpc_event(resource_type=Event.ResourceType.MODEL_JOB_GROUP, + op_type=Event.OperationType.UPDATE, + resource_name_fn=get_uuid) +def fake_rpc_method_without_status_code(request, context=None): + return service_pb2.UpdateModelJobGroupResponse(uuid='test', + config=WorkflowDefinition(job_definitions=[ + JobDefinition(name='train-job', + job_type=JobDefinition.JobType.TREE_MODEL_TRAINING, + variables=[Variable(name='mode', value='train')]) + ])) + + +@emits_rpc_event(resource_type=Event.ResourceType.TRUSTED_JOB_GROUP, + op_type=Event.OperationType.INFORM, + resource_name_fn=get_uuid) +def fake_rpc_method_with_status_code_in_context(request, context: ServicerContext = None): + context.abort(StatusCode.INVALID_ARGUMENT, 'just test') + return empty_pb2.Empty() + + +class DecoratorsTest(BaseTestCase): + + def test_emits_event(self): + self.signin_as_admin() + + start_time, end_time = get_times() + response = self.post_helper( + f'{API_VERSION}/auth/users', { + 'username': 'test123', + 'password': base64encode('123456.@abc'), + 'role': 'USER', + 'name': 'test123', + 'email': 'test@byd.org' + }) + user_id = self.get_response_data(response).get('id') + + response = self.get_helper( + f'{API_VERSION}/events?filter=(and(username="admin")(start_time>{start_time})(end_time<{end_time}))') + data = self.get_response_data(response)[0] + self.assertEqual(Event.OperationType.CREATE, Event.OperationType.Value(data.get('op_type'))) + self.assertEqual(Event.ResourceType.USER, Event.ResourceType.Value(data.get('resource_type'))) + self.assertEqual('4', data.get('resource_name').split('/')[-1]) + + # send a wrong request and see if the event is logged correctly + response = self.patch_helper(f'{API_VERSION}/auth/users/999', {}) + self.assertStatus(response, HTTPStatus.NOT_FOUND) + response = self.get_helper( + f'{API_VERSION}/events?filter=(and(username="admin")(start_time>{start_time})(end_time<{end_time}))') + self.assertEqual(Event.OperationType.UPDATE, + Event.OperationType.Value(self.get_response_data(response)[0].get('op_type'))) + self.assertEqual(Event.Result.FAILURE, Event.Result.Value(self.get_response_data(response)[0].get('result'))) + + self.patch_helper(f'{API_VERSION}/auth/users/{user_id}', {}) + response = self.get_helper( + f'{API_VERSION}/events?filter=(and(username="admin")(start_time>{start_time})(end_time<{end_time}))') + self.assertEqual(Event.Result.SUCCESS, Event.Result.Value(self.get_response_data(response)[0].get('result'))) + + self.delete_helper(f'{API_VERSION}/auth/users/{user_id}') + response = self.get_helper( + f'{API_VERSION}/events?filter=(and(username="admin")(start_time>{start_time})(end_time<{end_time}))') + self.assertEqual(Event.OperationType.DELETE, + Event.OperationType.Value(self.get_response_data(response)[0].get('op_type'))) + + +class RpcDecoratorsTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + self.default_auth_info = service_pb2.ProjAuthInfo(project_name='test', target_domain='test_domain') + self.request = service_pb2.UpdateWorkflowRequest(auth_info=self.default_auth_info) + self.context = _Context('11', _RPCState(), '22') + + @patch('fedlearner_webconsole.audit.decorators._infer_auth_info') + def test_decorator(self, mock_infer_auth_info: MagicMock): + mock_infer_auth_info.return_value = 'bytedance', 1 + fake_rpc_method(self.request, context=self.context) + with db.session_scope() as session: + workflow_event = session.query(EventModel).first() + self.assertEqual(workflow_event.op_type, Event.OperationType.Name(Event.OperationType.UPDATE)) + self.assertEqual(workflow_event.resource_type, Event.ResourceType.Name(Event.ResourceType.WORKFLOW)) + self.assertEqual(workflow_event.resource_name, 'test_uuid') + self.assertEqual(workflow_event.coordinator_pure_domain_name, 'bytedance') + self.assertEqual(workflow_event.project_id, 1) + self.assertEqual(workflow_event.result_code, 'UNAUTHENTICATED') + self.assertEqual(workflow_event.result, 'FAILURE') + self.assertEqual(workflow_event.name, 'UpdateWorkflow') + + def test_get_func_params(self): + func_params = _get_func_params(fake_rpc_method, request=self.request, context=self.context) + self.assertEqual(len(func_params), 2) + self.assertEqual(func_params['request'], self.request) + self.assertEqual(func_params['context'], self.context) + + def test_infer_rpc_event_fields_with_two_pc(self): + transaction_data = TransactionData( + create_model_job_data=CreateModelJobData(model_job_name='test model name', model_job_uuid='test uuid')) + request = TwoPcRequest(auth_info=self.default_auth_info, + transaction_uuid='test-id', + type=TwoPcType.CREATE_MODEL_JOB, + action=TwoPcAction.PREPARE, + data=transaction_data) + func_params = _get_func_params(fake_rpc_method, request=request, context=self.context) + fields = _infer_rpc_event_fields(func_params, Event.ResourceType.UNKNOWN_RESOURCE_TYPE, get_uuid, + Event.OperationType.UNKNOWN_OPERATION_TYPE) + uuid = get_two_pc_request_uuid(request) + self.assertEqual(uuid, request.data.create_model_job_data.model_job_uuid) + self.assertEqual(fields['op_type'], Event.OperationType.CREATE) + self.assertEqual(fields['resource_type'], Event.ResourceType.MODEL_JOB) + self.assertEqual(fields['name'], 'TwoPc') + + request = TwoPcRequest(auth_info=self.default_auth_info, + transaction_uuid='test-id', + type=TwoPcType.CONTROL_WORKFLOW_STATE, + action=TwoPcAction.PREPARE) + func_params = _get_func_params(fake_rpc_method, request=request, context=self.context) + fields = _infer_rpc_event_fields(func_params, Event.ResourceType.UNKNOWN_RESOURCE_TYPE, get_uuid, + Event.OperationType.UNKNOWN_OPERATION_TYPE) + self.assertEqual(fields['op_type'], Event.OperationType.CONTROL_STATE) + self.assertEqual(fields['resource_type'], Event.ResourceType.WORKFLOW) + + request = TwoPcRequest(auth_info=self.default_auth_info, + transaction_uuid='test-id', + type=TwoPcType.CREATE_MODEL_JOB_GROUP, + action=TwoPcAction.PREPARE) + func_params = _get_func_params(fake_rpc_method, request=request, context=self.context) + fields = _infer_rpc_event_fields(func_params, Event.ResourceType.UNKNOWN_RESOURCE_TYPE, get_uuid, + Event.OperationType.UNKNOWN_OPERATION_TYPE) + self.assertEqual(fields['op_type'], Event.OperationType.CREATE) + self.assertEqual(fields['resource_type'], Event.ResourceType.MODEL_JOB_GROUP) + + request = TwoPcRequest(auth_info=self.default_auth_info, + transaction_uuid='test-id', + type=TwoPcType.LAUNCH_DATASET_JOB, + action=TwoPcAction.PREPARE) + func_params = _get_func_params(fake_rpc_method, request=request, context=self.context) + fields = _infer_rpc_event_fields(func_params, Event.ResourceType.UNKNOWN_RESOURCE_TYPE, get_uuid, + Event.OperationType.UNKNOWN_OPERATION_TYPE) + self.assertEqual(fields['op_type'], Event.OperationType.LAUNCH) + self.assertEqual(fields['resource_type'], Event.ResourceType.DATASET_JOB) + + request = TwoPcRequest(auth_info=self.default_auth_info, + transaction_uuid='test-id', + type=TwoPcType.LAUNCH_MODEL_JOB, + action=TwoPcAction.PREPARE) + func_params = _get_func_params(fake_rpc_method, request=request, context=self.context) + fields = _infer_rpc_event_fields(func_params, Event.ResourceType.UNKNOWN_RESOURCE_TYPE, get_uuid, + Event.OperationType.UNKNOWN_OPERATION_TYPE) + self.assertEqual(fields['op_type'], Event.OperationType.LAUNCH) + self.assertEqual(fields['resource_type'], Event.ResourceType.MODEL_JOB) + + request = TwoPcRequest(auth_info=self.default_auth_info, + transaction_uuid='test-id', + type=TwoPcType.STOP_DATASET_JOB, + action=TwoPcAction.PREPARE) + func_params = _get_func_params(fake_rpc_method, request=request, context=self.context) + fields = _infer_rpc_event_fields(func_params, Event.ResourceType.UNKNOWN_RESOURCE_TYPE, get_uuid, + Event.OperationType.UNKNOWN_OPERATION_TYPE) + self.assertEqual(fields['op_type'], Event.OperationType.STOP) + self.assertEqual(fields['resource_type'], Event.ResourceType.DATASET_JOB) + + @patch('fedlearner_webconsole.audit.decorators._infer_auth_info') + def test_response_with_no_status(self, mock_infer_auth_info: MagicMock): + mock_infer_auth_info.return_value = 'bytedance', 1 + fake_rpc_method_without_status_code(self.request, context=self.context) + with db.session_scope() as session: + event = session.query(EventModel).first() + self.assertEqual(event.result_code, 'OK') + self.assertEqual(event.result, 'SUCCESS') + + @patch('fedlearner_webconsole.audit.decorators._infer_auth_info') + def test_response_with_status_code_in_context(self, mock_infer_auth_info: MagicMock): + mock_infer_auth_info.return_value = 'bytedance', 1 + with self.assertRaises(Exception): + fake_rpc_method_with_status_code_in_context(self.request, context=self.context) + with db.session_scope() as session: + event = session.query(EventModel).first() + self.assertEqual(event.result_code, 'INVALID_ARGUMENT') + self.assertEqual(event.result, 'FAILURE') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/audit/models.py b/web_console_v2/api/fedlearner_webconsole/audit/models.py new file mode 100644 index 000000000..8a73578b3 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/audit/models.py @@ -0,0 +1,122 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# coding: utf-8 +from uuid import uuid4 +import enum + +from sqlalchemy import UniqueConstraint, func +from fedlearner_webconsole.db import db, default_table_args +from fedlearner_webconsole.utils.mixins import to_dict_mixin +from fedlearner_webconsole.proto.audit_pb2 import Event +from fedlearner_webconsole.proto.auth_pb2 import User +from fedlearner_webconsole.utils.pp_datetime import to_timestamp + + +class EventType(enum.Enum): + # USER_ENDPOINT maps to events with source of 'API/UI' + USER_ENDPOINT = 0 + # RPC maps to events with source of 'RPC' + RPC = 1 + + +@to_dict_mixin(ignores=['updated_at', 'deleted_at', 'user_id'], + extras={ + 'user': lambda e: { + 'id': e.user.id, + 'username': e.user.username, + 'role': e.user.role.name, + }, + }) +class EventModel(db.Model): + __tablename__ = 'events_v2' + __table_args__ = (UniqueConstraint('uuid', name='uniq_uuid'), default_table_args('webconsole audit events')) + id = db.Column(db.Integer, primary_key=True, autoincrement=True, comment='auto-incremented id') + uuid = db.Column(db.String(255), nullable=False, comment='UUID of the event', default=lambda _: str(uuid4())) + name = db.Column(db.String(255), nullable=False, comment='the name of the event') + user_id = db.Column(db.Integer, comment='the ID of the user who triggered the event') + resource_type = db.Column(db.Enum(*Event.ResourceType.keys(), + native_enum=False, + create_constraint=False, + length=32, + name='resource_type'), + nullable=False, + comment='the type of the resource') + resource_name = db.Column(db.String(512), nullable=False, comment='the name of the resource') + op_type = db.Column(db.Enum(*Event.OperationType.keys(), + native_enum=False, + create_constraint=False, + length=32, + name='op_type'), + nullable=False, + comment='the type of the operation of the event') + # Due to compatibility, audit API double writes result and result_code field + # TODO(yeqiuhan): remove result field + result = db.Column(db.Enum(*Event.Result.keys(), + native_enum=False, + create_constraint=False, + length=32, + name='result'), + nullable=False, + comment='the result of the operation') + result_code = db.Column(db.String(255), comment='the result code of the operation') + source = db.Column(db.Enum(*Event.Source.keys(), + native_enum=False, + create_constraint=False, + length=32, + name='source'), + nullable=False, + comment='the source that triggered the event') + coordinator_pure_domain_name = db.Column(db.String(255), comment='name of the coordinator') + project_id = db.Column(db.Integer, comment='project_id corresponds to participants name') + extra = db.Column(db.Text, comment='extra info in JSON') + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), comment='created at') + updated_at = db.Column(db.DateTime(timezone=True), + onupdate=func.now(), + server_default=func.now(), + comment='updated at') + deleted_at = db.Column(db.DateTime(timezone=True), comment='deleted at') + user = db.relationship('User', primaryjoin='foreign(EventModel.user_id) == User.id') + + def to_proto(self) -> Event: + return Event(user_id=self.user_id, + resource_type=Event.ResourceType.Value(self.resource_type), + resource_name=self.resource_name, + op_type=Event.OperationType.Value(self.op_type), + result=Event.Result.Value(self.result), + result_code=self.result_code, + source=Event.Source.Value(self.source), + name=self.name, + coordinator_pure_domain_name=self.coordinator_pure_domain_name, + project_id=self.project_id, + extra=self.extra, + user=User(id=self.user.id, username=self.user.username, role=self.user.role.value) + if self.user is not None else None, + event_id=self.id, + uuid=self.uuid, + created_at=to_timestamp(self.created_at)) + + +def to_model(proto: Event) -> EventModel: + return EventModel(name=proto.name, + user_id=proto.user_id, + resource_type=Event.ResourceType.Name(proto.resource_type), + resource_name=proto.resource_name, + op_type=Event.OperationType.Name(proto.op_type), + coordinator_pure_domain_name=proto.coordinator_pure_domain_name, + result=Event.Result.Name(proto.result), + result_code=proto.result_code, + source=Event.Source.Name(proto.source), + extra=proto.extra, + project_id=proto.project_id) diff --git a/web_console_v2/api/fedlearner_webconsole/audit/models_test.py b/web_console_v2/api/fedlearner_webconsole/audit/models_test.py new file mode 100644 index 000000000..304adff35 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/audit/models_test.py @@ -0,0 +1,146 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# coding: utf-8 + +import unittest +from sqlalchemy.exc import IntegrityError +from datetime import datetime, timezone +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.auth.services import UserService +from fedlearner_webconsole.db import db +from fedlearner_webconsole.audit.models import EventModel, to_model +from fedlearner_webconsole.proto import audit_pb2 +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.utils.proto import to_dict +from fedlearner_webconsole.proto.auth_pb2 import User + + +def generate_event() -> EventModel: + return EventModel(name='some_event', + user_id=1, + resource_type='IAM', + resource_name='some_resource', + op_type='CREATE', + result='SUCCESS', + result_code='OK', + coordinator_pure_domain_name='bytedance', + project_id=1, + source='RPC') + + +class EventModelsTest(NoWebServerTestCase): + + def setUp(self) -> None: + super().setUp() + created_at = datetime(2022, 5, 1, 10, 10, tzinfo=timezone.utc) + events = [generate_event() for _ in range(3)] + events[1].user_id = None + events[2].user_id = 0 + with db.session_scope() as session: + UserService(session).create_user_if_not_exists(username='ada', email='ada@ada.com', password='ada') + session.add_all(events) + session.commit() + self.default_event = audit_pb2.Event(event_id=1, + name='some_event', + user_id=1, + resource_type='IAM', + resource_name='some_resource', + op_type='CREATE', + result='SUCCESS', + result_code='OK', + coordinator_pure_domain_name='bytedance', + project_id=1, + user=User(id=1, username='ada', role='USER'), + created_at=to_timestamp(created_at), + source='RPC') + self.default_event_2 = audit_pb2.Event(event_id=2, + name='some_event', + user_id=None, + resource_type='IAM', + resource_name='some_resource', + op_type='CREATE', + result='SUCCESS', + result_code='OK', + coordinator_pure_domain_name='bytedance', + project_id=1, + user=None, + created_at=to_timestamp(created_at), + source='RPC') + self.default_event_3 = audit_pb2.Event(event_id=3, + name='some_event', + user_id=0, + resource_type='IAM', + resource_name='some_resource', + op_type='CREATE', + result='SUCCESS', + result_code='OK', + coordinator_pure_domain_name='bytedance', + project_id=1, + user=None, + created_at=to_timestamp(created_at), + source='RPC') + + def test_uuids(self): + with db.session_scope() as session: + events = session.query(EventModel).all() + self.assertNotEqual(events[0].uuid, events[1].uuid) + + def test_invalid_instances(self): + event = EventModel() + with db.session_scope() as session: + session.add(event) + self.assertRaises(IntegrityError, session.commit) + + def test_to_proto(self): + with db.session_scope() as session: + result = session.query(EventModel).first() + self.assertDictPartiallyEqual(to_dict(result.to_proto()), to_dict(self.default_event), + ['created_at', 'uuid']) + + def test_save_proto(self): + with db.session_scope() as session: + session.add(to_model(self.default_event)) + session.commit() + + with db.session_scope() as session: + result = session.query(EventModel).get(1) + self.assertDictPartiallyEqual(to_dict(result.to_proto()), to_dict(self.default_event), + ['created_at', 'uuid']) + + def test_without_user_id_to_proto(self): + with db.session_scope() as session: + result = session.query(EventModel).filter_by(user_id=None).first() + self.assertDictPartiallyEqual(to_dict(result.to_proto()), to_dict(self.default_event_2), + ['created_at', 'uuid']) + + def test_without_user_id_save_proto(self): + with db.session_scope() as session: + session.add(to_model(self.default_event_2)) + session.commit() + + with db.session_scope() as session: + result = session.query(EventModel).get(2) + self.assertDictPartiallyEqual(to_dict(result.to_proto()), to_dict(self.default_event_2), + ['created_at', 'uuid']) + + def test_user_id_zero_to_proto(self): + with db.session_scope() as session: + result = session.query(EventModel).filter_by(user_id=0).first() + self.assertDictPartiallyEqual(to_dict(result.to_proto()), to_dict(self.default_event_3), + ['created_at', 'uuid']) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/audit/services.py b/web_console_v2/api/fedlearner_webconsole/audit/services.py new file mode 100644 index 000000000..113304d32 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/audit/services.py @@ -0,0 +1,63 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# coding: utf-8 +from typing import Optional + +from sqlalchemy.orm import Session, Query + +from fedlearner_webconsole.audit.storage import get_storage +from fedlearner_webconsole.proto.audit_pb2 import Event +from fedlearner_webconsole.proto.filtering_pb2 import FilterExpression + + +class EventService: + + def __init__(self, session: Session): + """Construct a EventService. + + Args: + session (Session): SQLAlchemy session. + """ + self._session = session + self.storage = get_storage(self._session) + + def emit_event(self, event: Event) -> None: + """Pass a Event instance to storage. + + Args: + event (Event): Records to store. + + Raises: + ValueError: Fields are invalid. + """ + self.storage.save_event(event) + + def get_events(self, filter_exp: Optional[FilterExpression] = None) -> Query: + """Get events by time and additional conditions in {event}. + + Args: + filter_exp (FilterExpression): Filtering expression defined in utils/filtering.py + Returns: + A SQLAlchemy Query object contains selected records. + """ + return self.storage.get_events(filter_exp) + + def delete_events(self, filter_exp: FilterExpression): + """Delete events by time. + + Args: + filter_exp (FilterExpression): Filtering expression defined in utils/filtering.py + """ + self.storage.delete_events(filter_exp) diff --git a/web_console_v2/api/fedlearner_webconsole/audit/services_test.py b/web_console_v2/api/fedlearner_webconsole/audit/services_test.py new file mode 100644 index 000000000..2cab952bd --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/audit/services_test.py @@ -0,0 +1,168 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# coding: utf-8 + +import unittest +from typing import Tuple + +from fedlearner_webconsole.audit.services import EventService +from fedlearner_webconsole.audit.models import EventModel +from fedlearner_webconsole.db import db +from fedlearner_webconsole.utils.pp_datetime import now, to_timestamp +from fedlearner_webconsole.proto.audit_pb2 import Event +from fedlearner_webconsole.utils.filtering import parse_expression +from fedlearner_webconsole.proto.filtering_pb2 import FilterExpression, FilterExpressionKind, SimpleExpression, FilterOp +from fedlearner_webconsole.auth.services import UserService + +from testing.no_web_server_test_case import NoWebServerTestCase + + +def get_times() -> Tuple[int, int]: + ts = to_timestamp(now()) + return ts - 60 * 2, ts + 60 * 2 + + +def generate_event() -> EventModel: + return EventModel(name='some_event', + user_id=1, + resource_type='IAM', + resource_name='some_resource', + op_type='CREATE', + result='SUCCESS', + result_code='OK', + coordinator_pure_domain_name='bytedance', + project_id=1, + source='RPC') + + +class EventServiceTest(NoWebServerTestCase): + + def setUp(self) -> None: + super().setUp() + with db.session_scope() as session: + UserService(session).create_user_if_not_exists(username='ada', email='ada@ada.com', password='ada') + event_1 = generate_event() + event_1.coordinator_pure_domain_name = 'mihoyo' + session.add(event_1) + event_2 = generate_event() + event_2.resource_type = 'WORKFLOW' + event_2.op_type = 'UPDATE' + session.add(event_2) + event_3 = generate_event() + event_3.resource_type = 'DATASET' + event_3.op_type = 'DELETE' + event_3.result_code = 'CANCELLED' + session.add(event_3) + session.commit() + + def test_emit_event(self): + with db.session_scope() as session: + service = EventService(session) + event = generate_event() + event_param = Event(name=event.name, + user_id=event.user_id, + resource_type=event.resource_type, + resource_name=event.resource_name, + result_code=event.result_code, + coordinator_pure_domain_name=event.coordinator_pure_domain_name, + project_id=event.project_id, + op_type=event.op_type, + result=event.result, + source=event.source) + service.emit_event(event_param) + session.commit() + events = service.get_events() + self.assertEqual(4, len(events.all())) + self.assertEqual(now().hour, events.first().created_at.hour) + + def test_get_events(self): + with db.session_scope() as session: + service = EventService(session) + events = service.get_events() + + self.assertEqual(3, len(events.all())) + + def test_get_rpc_events_with_filter(self): + filter_exp = FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression( + field='coordinator_pure_domain_name', + op=FilterOp.CONTAIN, + string_value='mihoyo', + )) + with db.session_scope() as session: + service = EventService(session) + events = service.get_events(filter_exp) + self.assertEqual(1, len(events.all())) + self.assertEqual(events[0].coordinator_pure_domain_name, 'mihoyo') + + filter_exp = FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression( + field='op_type', + op=FilterOp.IN, + list_value=SimpleExpression.ListValue(string_list=['UPDATE', 'DELETE']))) + with db.session_scope() as session: + service = EventService(session) + events = service.get_events(filter_exp) + self.assertEqual(2, len(events.all())) + self.assertEqual(events[0].op_type, 'DELETE') + self.assertEqual(events[1].op_type, 'UPDATE') + + filter_exp = FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression( + field='op_type', + op=FilterOp.EQUAL, + string_value='UPDATE', + )) + with db.session_scope() as session: + service = EventService(session) + events = service.get_events(filter_exp) + self.assertEqual(1, len(events.all())) + self.assertEqual(events[0].op_type, 'UPDATE') + + filter_exp = FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression( + field='result_code', + op=FilterOp.EQUAL, + string_value='CANCELLED', + )) + with db.session_scope() as session: + service = EventService(session) + events = service.get_events(filter_exp) + self.assertEqual(1, len(events.all())) + self.assertEqual(events[0].result_code, 'CANCELLED') + + filter_exp = FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression( + field='resource_type', + op=FilterOp.IN, + list_value=SimpleExpression.ListValue(string_list=['WORKFLOW', 'DATASET']))) + with db.session_scope() as session: + service = EventService(session) + events = service.get_events(filter_exp) + self.assertEqual(2, len(events.all())) + self.assertEqual(events[0].resource_type, 'DATASET') + self.assertEqual(events[1].resource_type, 'WORKFLOW') + + def test_delete_events(self): + with db.session_scope() as session: + service = EventService(session) + start_time, end_time = get_times() + filter_exp = parse_expression(f'(and(start_time>{start_time})(end_time<{end_time}))') + self.assertIsNone(service.delete_events(filter_exp)) + self.assertEqual(0, service.get_events().count()) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/audit/storage.py b/web_console_v2/api/fedlearner_webconsole/audit/storage.py new file mode 100644 index 000000000..2c77c1148 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/audit/storage.py @@ -0,0 +1,158 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# coding: utf-8 +from abc import ABCMeta, abstractmethod +from datetime import datetime, timezone +from typing import Optional + +from sqlalchemy import func +from sqlalchemy.orm import Session, Query +from sqlalchemy.sql.schema import Column + +from envs import Envs +from fedlearner_webconsole.audit.models import EventModel, to_model +from fedlearner_webconsole.proto.audit_pb2 import Event +from fedlearner_webconsole.proto.filtering_pb2 import FilterExpression, FilterOp, SimpleExpression +from fedlearner_webconsole.utils.filtering import FieldType, FilterBuilder, SupportedField +from fedlearner_webconsole.auth.services import UserService +from fedlearner_webconsole.db import db + + +def _contains_case_insensitive(exp: SimpleExpression): + c: Column = getattr(EventModel, exp.field) + return c.ilike(f'%{exp.string_value}%') + + +def _equals_username(exp: SimpleExpression): + username = exp.string_value + c: Column = EventModel.user_id + with db.session_scope() as session: + user = UserService(session).get_user_by_username(username) + if user is None: + return False + return c == user.id + + +def _later(exp: SimpleExpression): + c: Column = EventModel.created_at + dt = datetime.fromtimestamp(exp.number_value, tz=timezone.utc) + return c > dt + + +def _earlier(exp: SimpleExpression): + c: Column = EventModel.created_at + dt = datetime.fromtimestamp(exp.number_value, tz=timezone.utc) + return c < dt + + +class IStorage(metaclass=ABCMeta): + + @abstractmethod + def save_event(self, event: Event) -> None: + """Save the event instance into corresponding storage. + + Args: + event (Event): The event instance waited to be stored. + """ + + @abstractmethod + def get_events(self, filter_exp: Optional[FilterExpression] = None) -> Query: + """Get event records from corresponding storage. + + Args: + filter_exp (FilterExpression): Filtering expression defined in utils/filtering.py + Returns: + A Query object contains selected events. + """ + + @abstractmethod + def delete_events(self, filter_exp: FilterExpression) -> None: + """Delete event records for a period of time. + + Args: + filter_exp (FilterExpression): Filtering expression defined in utils/filtering.py + """ + + +class MySqlStorage(IStorage): + + FILTER_FIELDS = { + 'name': + SupportedField(type=FieldType.STRING, ops={FilterOp.CONTAIN: _contains_case_insensitive}), + 'username': + SupportedField(type=FieldType.STRING, ops={FilterOp.EQUAL: _equals_username}), + 'resource_type': + SupportedField(type=FieldType.STRING, ops={ + FilterOp.CONTAIN: _contains_case_insensitive, + FilterOp.IN: None + }), + 'resource_name': + SupportedField(type=FieldType.STRING, ops={FilterOp.CONTAIN: _contains_case_insensitive}), + 'op_type': + SupportedField(type=FieldType.STRING, ops={ + FilterOp.EQUAL: None, + FilterOp.IN: None + }), + 'coordinator_pure_domain_name': + SupportedField(type=FieldType.STRING, ops={FilterOp.CONTAIN: _contains_case_insensitive}), + 'project_id': + SupportedField(type=FieldType.NUMBER, ops={FilterOp.EQUAL: None}), + 'result': + SupportedField(type=FieldType.STRING, ops={FilterOp.EQUAL: None}), + 'result_code': + SupportedField(type=FieldType.STRING, ops={FilterOp.EQUAL: None}), + 'source': + SupportedField(type=FieldType.STRING, ops={ + FilterOp.EQUAL: None, + FilterOp.IN: None + }), + 'start_time': + SupportedField(type=FieldType.NUMBER, ops={FilterOp.GREATER_THAN: _later}), + 'end_time': + SupportedField(type=FieldType.NUMBER, ops={FilterOp.LESS_THAN: _earlier}), + } + + def __init__(self, session: Session): + self._session = session + self._filter_builder = FilterBuilder(model_class=EventModel, supported_fields=self.FILTER_FIELDS) + + def save_event(self, event: Event) -> None: + self._session.add(to_model(event)) + + def get_events(self, filter_exp: Optional[FilterExpression] = None) -> Query: + events = self._session.query(EventModel).filter(EventModel.deleted_at.is_(None)) + if filter_exp is not None: + events = self._filter_builder.build_query(events, filter_exp) + return events.order_by(EventModel.id.desc()) + + def delete_events(self, filter_exp: FilterExpression) -> None: + events = self._session.query(EventModel).filter(EventModel.deleted_at.is_(None)) + events = self._filter_builder.build_query(events, filter_exp) + events.update({'deleted_at': func.now()}, synchronize_session='fetch') + + +def get_storage(session: Session) -> Optional[IStorage]: + """Get a storage object accordingly. + + Args: + session (Session): Session used to query records. + + Returns: + A IStorage object that can save, get and delete events. + """ + if Envs.AUDIT_STORAGE == 'db': + return MySqlStorage(session) + # TODO(wangsen.0914): add CloudStorage or other types of storages later + return None diff --git a/web_console_v2/api/fedlearner_webconsole/auth/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/auth/BUILD.bazel new file mode 100644 index 000000000..4b88c98f9 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/auth/BUILD.bazel @@ -0,0 +1,135 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "models_lib", + srcs = ["models.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:mixins_lib", + "@common_passlib//:pkg", + "@common_sqlalchemy//:pkg", + ], +) + +py_library( + name = "services_lib", + srcs = [ + "services.py", + ], + imports = ["../.."], + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole/iam:client_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:const_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "services_lib_test", + size = "small", + srcs = [ + "services_test.py", + ], + imports = ["../.."], + main = "services_test.py", + deps = [ + ":models_lib", + ":services_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "third_party_sso_lib", + srcs = [ + "third_party_sso.py", + ], + imports = ["../.."], + deps = [ + ":models_lib", + ":services_lib", + "//web_console_v2/api:config_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:const_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:metrics_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_base64_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:helpers_lib", + "@com_google_protobuf//:protobuf_python", + "@common_flask//:pkg", + "@common_pyjwt//:pkg", + "@common_requests//:pkg", + "@common_xmltodict//:pkg", + ], +) + +py_test( + name = "third_party_sso_lib_test", + size = "medium", + srcs = [ + "third_party_sso_test.py", + ], + data = [ + "//web_console_v2/api/testing/test_data", + ], + imports = ["../.."], + main = "third_party_sso_test.py", + deps = [ + ":third_party_sso_lib", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_library( + name = "apis_lib", + srcs = ["apis.py"], + imports = ["../.."], + deps = [ + ":models_lib", + ":third_party_sso_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/audit:decorators_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:services_lib", + "//web_console_v2/api/fedlearner_webconsole/iam:client_lib", + "//web_console_v2/api/fedlearner_webconsole/swagger:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_base64_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:proto_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:helpers_lib", + "@common_flask//:pkg", + "@common_flask_restful//:pkg", + "@common_marshmallow//:pkg", + "@common_webargs//:pkg", + ], +) + +py_test( + name = "apis_lib_test", + size = "medium", + srcs = [ + "apis_test.py", + ], + data = [ + "//web_console_v2/api/testing/test_data", + ], + imports = ["../.."], + main = "apis_test.py", + deps = [ + ":apis_lib", + "//web_console_v2/api/testing:common_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/auth/apis.py b/web_console_v2/api/fedlearner_webconsole/auth/apis.py index 7206e3497..d288b5fdf 100644 --- a/web_console_v2/api/fedlearner_webconsole/auth/apis.py +++ b/web_console_v2/api/fedlearner_webconsole/auth/apis.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,27 +13,32 @@ # limitations under the License. # coding: utf-8 -# pylint: disable=cyclic-import +import logging import re -import datetime from http import HTTPStatus -from flask import request -from flask_restful import Resource, reqparse -from flask_jwt_extended.utils import get_current_user -from flask_jwt_extended import create_access_token, decode_token, get_jwt - -from fedlearner_webconsole.utils.base64 import base64decode -from fedlearner_webconsole.utils.decorators import jwt_required -from fedlearner_webconsole.utils.decorators import admin_required +from flask import request +from flask_restful import Resource +from marshmallow import Schema, post_load, fields, validate, EXCLUDE +from marshmallow.decorators import validates_schema +from webargs.flaskparser import use_args + +from fedlearner_webconsole.audit.decorators import emits_event +from fedlearner_webconsole.auth.services import UserService, SessionService +from fedlearner_webconsole.iam.client import create_iams_for_user +from fedlearner_webconsole.proto import auth_pb2 +from fedlearner_webconsole.swagger.models import schema_manager + +from fedlearner_webconsole.utils.pp_base64 import base64decode +from fedlearner_webconsole.auth.third_party_sso import credentials_required, SsoHandlerFactory +from fedlearner_webconsole.utils.flask_utils import get_current_user, make_flask_response +from fedlearner_webconsole.utils.decorators.pp_flask import admin_required from fedlearner_webconsole.db import db -from fedlearner_webconsole.auth.models import (State, User, Role, - MUTABLE_ATTRS_MAPPER, Session) -from fedlearner_webconsole.exceptions import (NotFoundException, - InvalidArgumentException, - ResourceConflictException, - UnauthorizedException, - NoAccessException) +from fedlearner_webconsole.auth.models import (Role, MUTABLE_ATTRS_MAPPER) +from fedlearner_webconsole.exceptions import (NotFoundException, InvalidArgumentException, ResourceConflictException, + NoAccessException, UnauthorizedException) +from fedlearner_webconsole.utils.proto import to_dict +from fedlearner_webconsole.auth.third_party_sso import sso_info_manager # rule: password must have a letter, a num and a special character PASSWORD_FORMAT_L = re.compile(r'.*[A-Za-z]') @@ -41,10 +46,9 @@ PASSWORD_FORMAT_S = re.compile(r'.*[`!@#$%^&*()\-_=+|{}\[\];:\'\",<.>/?~]') -def check_password_format(password: str): +def _check_password_format(password: str): if not 8 <= len(password) <= 20: - raise InvalidArgumentException( - 'Password is not legal: 8 <= length <= 20') + raise InvalidArgumentException('Password is not legal: 8 <= length <= 20') required_chars = [] if PASSWORD_FORMAT_L.match(password) is None: required_chars.append('a letter') @@ -54,111 +58,159 @@ def check_password_format(password: str): required_chars.append('a special character') if required_chars: tip = ', '.join(required_chars) - raise InvalidArgumentException( - f'Password is not legal: must have {tip}.') + raise InvalidArgumentException(f'Password is not legal: must have {tip}.') -class SigninApi(Resource): - def post(self): - parser = reqparse.RequestParser() - parser.add_argument('username', - required=True, - help='username is empty') - parser.add_argument('password', - required=True, - help='password is empty') - data = parser.parse_args() - username = data['username'] - password = base64decode(data['password']) - user = User.query.filter_by(username=username).filter_by( - state=State.ACTIVE).first() - if user is None: - raise NotFoundException(f'Failed to find user: {username}') - if not user.verify_password(password): - raise UnauthorizedException('Invalid password') - token = create_access_token(identity=username) - decoded_token = decode_token(token) - - session = Session(jti=decoded_token.get('jti'), - expired_at=datetime.datetime.fromtimestamp( - decoded_token.get('exp'))) - db.session.add(session) - db.session.commit() - - return { - 'data': { - 'user': user.to_dict(), - 'access_token': token - } - }, HTTPStatus.OK - - @jwt_required() - def delete(self): - decoded_token = get_jwt() +class UserParameter(Schema): + username = fields.Str(required=True) + # Base64 encoded password + password = fields.Str(required=True, validate=lambda x: _check_password_format(base64decode(x))) + role = fields.Str(required=True, validate=validate.OneOf([x.name for x in Role])) + name = fields.Str(required=True, validate=validate.Length(min=1)) + email = fields.Str(required=True, validate=validate.Email()) + + @post_load + def make_user(self, data, **kwargs): + return auth_pb2.User(**data) + + +class SigninParameter(Schema): + username = fields.String() + password = fields.String() + code = fields.String() + ticket = fields.String() + + @validates_schema + def validate_schema(self, data, **kwargs): + del kwargs + if data.get('username') is None and data.get('code') is None and data.get('ticket') is None: + raise InvalidArgumentException('no credential detected') + + @post_load + def make_proto(self, data, **kwargs): + del kwargs + return auth_pb2.SigninParameter(**data) - jti = decoded_token.get('jti') - Session.query.filter_by(jti=jti).delete() - db.session.commit() - return {}, HTTPStatus.OK +class SigninApi(Resource): + + @use_args(SigninParameter(unknown=EXCLUDE), location='json_or_form') + def post(self, signin_parameter: auth_pb2.SigninParameter): + """Sign in to the system + --- + tags: + - auth + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/SigninParameter' + responses: + 200: + content: + application/json: + schema: + type: object + properties: + access_token: + type: string + user: + type: object + properties: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.User' + """ + sso_name = request.args.get('sso_name') + return make_flask_response(SsoHandlerFactory.get_handler(sso_name).signin(signin_parameter)) + + @credentials_required + def delete(self): + """Sign out from the system + --- + tags: + - auth + parameters: + - in: header + name: Authorization + schema: + type: string + description: token used for current session + responses: + 200: + description: Signed out successfully + """ + user = get_current_user() + SsoHandlerFactory.get_handler(user.sso_name).signout() + return make_flask_response() class UsersApi(Resource): - @jwt_required() + + @credentials_required @admin_required def get(self): - return { - 'data': [ - row.to_dict() - for row in User.query.filter_by(state=State.ACTIVE).all() - ] - } - - @jwt_required() + """Get a list of all users + --- + tags: + - auth + responses: + 200: + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.User' + """ + with db.session_scope() as session: + return make_flask_response( + [row.to_dict() for row in UserService(session).get_all_users(filter_deleted=True)]) + + @credentials_required @admin_required - def post(self): - parser = reqparse.RequestParser() - parser.add_argument('username', - required=True, - help='username is empty') - parser.add_argument('password', - required=True, - help='password is empty') - parser.add_argument('role', required=True, help='role is empty') - parser.add_argument('name', required=True, help='name is empty') - parser.add_argument('email', required=True, help='email is empty') - - data = parser.parse_args() - username = data['username'] - password = base64decode(data['password']) - role = data['role'] - name = data['name'] - email = data['email'] - - check_password_format(password) - - if User.query.filter_by(username=username).first() is not None: - raise ResourceConflictException( - 'user {} already exists'.format(username)) - user = User(username=username, - role=role, - name=name, - email=email, - state=State.ACTIVE) - user.set_password(password) - db.session.add(user) - db.session.commit() - - return {'data': user.to_dict()}, HTTPStatus.CREATED + # if use_kwargs is used with explicit parameters, one has to write YAML document! + # Param: https://swagger.io/docs/specification/2-0/describing-parameters/ + # Body: https://swagger.io/docs/specification/2-0/describing-request-body/ + # Resp: https://swagger.io/docs/specification/2-0/describing-responses/ + @use_args(UserParameter(unknown=EXCLUDE)) + @emits_event() + def post(self, params: auth_pb2.User): + """Create a user + --- + tags: + - auth + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/UserParameter' + responses: + 201: + description: The user is created + content: + application/json: + schema: + $ref: '#/definitions/UserParameter' + 409: + description: A user with the same username exists + """ + # Swagger will detect APIs automatically, but params/req body/resp have to be defined manually + with db.session_scope() as session: + user = UserService(session).get_user_by_username(params.username) + if user is not None: + raise ResourceConflictException(f'user {user.username} already exists') + user = UserService(session).create_user_if_not_exists(username=params.username, + role=Role(params.role), + name=params.name, + email=params.email, + password=base64decode(params.password)) + session.commit() + return make_flask_response(user.to_dict(), status=HTTPStatus.CREATED) class UserApi(Resource): - def _find_user(self, user_id) -> User: - user = User.query.filter_by(id=user_id).first() - if user is None or user.state == State.DELETED: - raise NotFoundException( - f'Failed to find user_id: {user_id}') - return user def _check_current_user(self, user_id, msg): current_user = get_current_user() @@ -166,50 +218,188 @@ def _check_current_user(self, user_id, msg): and not user_id == current_user.id: raise NoAccessException(msg) - @jwt_required() + @credentials_required def get(self, user_id): - self._check_current_user(user_id, - 'user cannot get other user\'s information') - user = self._find_user(user_id) - return {'data': user.to_dict()}, HTTPStatus.OK - - @jwt_required() + """Get a user by id + --- + tags: + - auth + parameters: + - in: path + name: user_id + schema: + type: integer + responses: + 200: + description: The user is returned + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.User' + 404: + description: The user with specified ID is not found + """ + self._check_current_user(user_id, 'user cannot get other user\'s information') + + with db.session_scope() as session: + user = UserService(session).get_user_by_id(user_id, filter_deleted=True) + if user is None: + raise NotFoundException(f'Failed to find user_id: {user_id}') + return make_flask_response(user.to_dict()) + + @credentials_required + @emits_event() + # Example of manually defining an API def patch(self, user_id): - self._check_current_user(user_id, - 'user cannot modify other user\'s information') - user = self._find_user(user_id) - - mutable_attrs = MUTABLE_ATTRS_MAPPER.get(get_current_user().role) - - data = request.get_json() - for k, v in data.items(): - if k not in mutable_attrs: - raise InvalidArgumentException(f'cannot edit {k} attribute!') - if k == 'password': - password = base64decode(v) - check_password_format(password) - user.set_password(password) - else: - setattr(user, k, v) - - db.session.commit() - return {'data': user.to_dict()}, HTTPStatus.OK - - @jwt_required() + """Patch a user + --- + tags: + - auth + parameters: + - in: path + name: user_id + required: true + schema: + type: integer + description: The ID of the user + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.User' + responses: + 200: + description: The user is updated + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.User' + 404: + description: The user is not found + 400: + description: Attributes selected are uneditable + """ + self._check_current_user(user_id, 'user cannot modify other user\'s information') + + with db.session_scope() as session: + user = UserService(session).get_user_by_id(user_id, filter_deleted=True) + if user is None: + raise NotFoundException(f'Failed to find user_id: {user_id}') + + mutable_attrs = MUTABLE_ATTRS_MAPPER.get(get_current_user().role) + + data = request.get_json() + for k, v in data.items(): + if k not in mutable_attrs: + raise InvalidArgumentException(f'cannot edit {k} attribute!') + if k == 'password': + password = base64decode(v) + _check_password_format(password) + user.set_password(password) + SessionService(session).delete_session_by_user_id(user_id) + elif k == 'role': + user.role = Role(v) + else: + setattr(user, k, v) + create_iams_for_user(user) + session.commit() + return make_flask_response(user.to_dict()) + + @credentials_required @admin_required + @emits_event() def delete(self, user_id): - user = self._find_user(user_id) + """Delete the user with specified ID + --- + tags: + - auth + parameters: + - in: path + name: user_id + schema: + type: integer + responses: + 200: + description: The user with specified ID is deleted + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.User' + 400: + description: Cannot delete the user logged in within current session + 404: + description: The user with specified ID is not found + """ + with db.session_scope() as session: + user_service = UserService(session) + user = user_service.get_user_by_id(user_id, filter_deleted=True) + + if user is None: + raise NotFoundException(f'Failed to find user_id: {user_id}') + + current_user = get_current_user() + if current_user.id == user_id: + raise InvalidArgumentException('cannot delete yourself') + + user = UserService(session).delete_user(user) + session.commit() + return make_flask_response(user.to_dict()) + + +class SsoInfosApi(Resource): - current_user = get_current_user() - if current_user.id == user_id: - raise InvalidArgumentException('cannot delete yourself') - - user.state = State.DELETED - db.session.commit() - return {'data': user.to_dict()}, HTTPStatus.OK + def get(self): + """Get all available options of SSOs + --- + tags: + - auth + responses: + 200: + description: All options are returned + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.Sso' + """ + return make_flask_response([to_dict(sso, with_secret=False) for sso in sso_info_manager.sso_infos]) + + +class SelfUserApi(Resource): + + @credentials_required + def get(self): + """Get current user + --- + tags: + - auth + responses: + 200: + description: User logged in within current session is returned + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.User' + 400: + description: No user is logged in within current session + """ + user = get_current_user() + # Defensively program for unexpected exception + if user is None: + logging.error('No current user.') + raise UnauthorizedException('No current user.') + return make_flask_response(user.to_dict()) def initialize_auth_apis(api): api.add_resource(SigninApi, '/auth/signin') api.add_resource(UsersApi, '/auth/users') api.add_resource(UserApi, '/auth/users/') + api.add_resource(SsoInfosApi, '/auth/sso_infos') + api.add_resource(SelfUserApi, '/auth/self') + + # if a schema is used, one has to append it to schema_manager so Swagger knows there is a schema available + schema_manager.append(UserParameter) + schema_manager.append(SigninParameter) diff --git a/web_console_v2/api/fedlearner_webconsole/auth/apis_test.py b/web_console_v2/api/fedlearner_webconsole/auth/apis_test.py new file mode 100644 index 000000000..9e963b99d --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/auth/apis_test.py @@ -0,0 +1,306 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import json +import unittest +from http import HTTPStatus +from unittest.mock import patch +from datetime import timedelta + +from testing.common import BaseTestCase +from testing.helpers import FakeResponse +from fedlearner_webconsole.auth.services import UserService +from fedlearner_webconsole.utils.pp_base64 import base64encode +from fedlearner_webconsole.utils.const import API_VERSION +from fedlearner_webconsole.utils.pp_datetime import now +from fedlearner_webconsole.auth.models import State, User +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto.auth_pb2 import Sso, OAuthProtocol, CasProtocol +from fedlearner_webconsole.auth.third_party_sso import get_user_info_with_cache, SsoInfos, OAuthHandler, CasHandler +from fedlearner_webconsole.auth.models import Session as SessionTbl +from envs import Envs + + +class UsersApiTest(BaseTestCase): + + def test_get_all_users(self): + deleted_user = User(username='deleted_one', email='who.knows@hhh.com', state=State.DELETED) + with db.session_scope() as session: + session.add(deleted_user) + session.commit() + + resp = self.get_helper('/api/v2/auth/users') + self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) + + self.signin_as_admin() + + resp = self.get_helper('/api/v2/auth/users') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertEqual(len(self.get_response_data(resp)), 3) + + def test_create_new_user(self): + new_user = { + 'username': 'fedlearner', + 'password': 'fedlearner', + 'email': 'hello@bytedance.com', + 'role': 'USER', + 'name': 'codemonkey', + } + resp = self.post_helper('/api/v2/auth/users', data=new_user) + self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) + + self.signin_as_admin() + illegal_cases = [ + 'aaaaaaaa', '11111111', '!@#$%^[]', 'aaaA1111', 'AAAa!@#$', '1111!@#-', 'aa11!@', 'fl@123.', + 'fl@1234567890abcdefg.' + ] + legal_case = 'fl@1234.' + + for case in illegal_cases: + new_user['password'] = base64encode(case) + resp = self.post_helper('/api/v2/auth/users', data=new_user) + print(self.get_response_data(resp)) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + + new_user['password'] = base64encode(legal_case) + resp = self.post_helper('/api/v2/auth/users', data=new_user) + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + self.assertEqual(self.get_response_data(resp).get('username'), 'fedlearner') + + # test_repeat_create + resp = self.post_helper('/api/v2/auth/users', data=new_user) + self.assertEqual(resp.status_code, HTTPStatus.CONFLICT) + + +class AuthApiTest(BaseTestCase): + + def test_partial_update_user_info(self): + self.signin_as_admin() + resp = self.get_helper('/api/v2/auth/users') + resp_data = self.get_response_data(resp) + user_id = resp_data[0]['id'] + admin_id = resp_data[1]['id'] + + self.signin_helper() + resp = self.patch_helper('/api/v2/auth/users/10', data={}) + self.assertEqual(resp.status_code, HTTPStatus.FORBIDDEN) + + resp = self.patch_helper(f'/api/v2/auth/users/{user_id}', data={ + 'email': 'a_new_email@bytedance.com', + }) + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertEqual(self.get_response_data(resp).get('email'), 'a_new_email@bytedance.com') + + resp = self.patch_helper(f'/api/v2/auth/users/{admin_id}', data={ + 'name': 'cannot_modify', + }) + self.assertEqual(resp.status_code, HTTPStatus.FORBIDDEN) + + # now we are signing in as admin + self.signin_as_admin() + resp = self.patch_helper(f'/api/v2/auth/users/{user_id}', data={ + 'role': 'ADMIN', + }) + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertEqual(self.get_response_data(resp).get('role'), 'ADMIN') + + resp = self.patch_helper(f'/api/v2/auth/users/{user_id}', data={ + 'password': base64encode('fl@1234.'), + }) + self.assertEqual(resp.status_code, HTTPStatus.OK) + + def test_delete_user(self): + self.signin_as_admin() + resp = self.get_helper('/api/v2/auth/users') + resp_data = self.get_response_data(resp) + user_id = resp_data[0]['id'] + admin_id = resp_data[1]['id'] + + self.signin_helper() + resp = self.delete_helper(url=f'/api/v2/auth/users/{user_id}') + self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) + + self.signin_as_admin() + + resp = self.delete_helper(url=f'/api/v2/auth/users/{admin_id}') + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + + resp = self.delete_helper(url=f'/api/v2/auth/users/{user_id}') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertEqual(self.get_response_data(resp).get('username'), 'ada') + + def test_get_specific_user(self): + resp = self.get_helper(url='/api/v2/auth/users/10086') + self.assertEqual(resp.status_code, HTTPStatus.FORBIDDEN) + + resp = self.get_helper(url='/api/v2/auth/users/1') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertEqual(self.get_response_data(resp).get('username'), 'ada') + + self.signin_as_admin() + + resp = self.get_helper(url='/api/v2/auth/users/1') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertEqual(self.get_response_data(resp).get('username'), 'ada') + + resp = self.get_helper(url='/api/v2/auth/users/10086') + self.assertEqual(resp.status_code, HTTPStatus.NOT_FOUND) + + def test_signout(self): + self.signin_helper() + + resp = self.delete_helper(url='/api/v2/auth/signin') + self.assertEqual(resp.status_code, HTTPStatus.OK, resp.json) + + resp = self.get_helper(url='/api/v2/auth/users/1') + self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) + + @patch('fedlearner_webconsole.auth.apis.SsoHandlerFactory.get_handler') + @patch('fedlearner_webconsole.auth.third_party_sso.requests.post') + @patch('fedlearner_webconsole.auth.third_party_sso.requests.get') + def test_signin_oauth(self, mock_request_get, mock_request_post, mock_sso_handler): + + mock_sso_handler.return_value = OAuthHandler(Sso(name='test', oauth=OAuthProtocol())) + resp = self.post_helper(url='/api/v2/auth/signin?sso_name=test', data={}) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + + mock_request_post.return_value = FakeResponse({}, HTTPStatus.OK) + resp = self.post_helper(url='/api/v2/auth/signin?sso_name=test', data={'code': 'wrong_code'}) + self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) + + mock_request_post.return_value = FakeResponse({'access_token': 'token'}, HTTPStatus.OK) + self.post_helper(url='/api/v2/auth/signin?sso_name=test', data={'code': 'right_code'}) + mock_request_get.assert_called_once() + get_user_info_with_cache.cache_clear() + mock_request_get.return_value = FakeResponse({'username': 'test', 'email': 'test'}, HTTPStatus.OK) + resp = self.post_helper(url='/api/v2/auth/signin?sso_name=test', data={'code': 'right_code'}) + data = self.get_response_data(resp) + self.assertEqual(data['user']['username'], 'test') + # test oauth sign in after deleted + with db.session_scope() as session: + user = UserService(session).get_user_by_username(data['user']['username']) + user.state = State.DELETED + session.commit() + resp = self.post_helper(url='/api/v2/auth/signin?sso_name=test', data={'code': 'right_code'}) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + + @patch('fedlearner_webconsole.auth.apis.SsoHandlerFactory.get_handler') + @patch('fedlearner_webconsole.auth.third_party_sso.requests.get') + def test_signin_cas(self, mock_request_get, mock_sso_handler): + mock_sso_handler.return_value = CasHandler(Sso(name='test', cas=CasProtocol())) + resp = self.post_helper(url='/api/v2/auth/signin?sso_name=test', data={}) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + + resp = self.post_helper(url='/api/v2/auth/signin?sso_name=test', data={'ticket': 'wrong_ticket'}) + self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) + mock_request_get.assert_called_once() + fake_xml = """ + + + test3 + + + + """ + mock_request_get.return_value = FakeResponse(None, HTTPStatus.OK, fake_xml) + resp = self.post_helper(url='/api/v2/auth/signin?sso_name=test', data={'ticket': 'right_code'}) + data = self.get_response_data(resp) + self.assertEqual(data['user']['username'], 'test3') + + +class SsoInfosApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with open(f'{Envs.BASE_DIR}/testing/test_data/test_sso.json', encoding='utf-8') as f: + sso_infos_dict = json.load(f) + self.patch_ssoinfos = patch('fedlearner_webconsole.auth.third_party_sso.Envs.SSO_INFOS', + json.dumps(sso_infos_dict)) + self.patch_ssoinfos.start() + + def tearDown(self): + self.patch_ssoinfos.stop() + + def test_get_sso_infos(self): + with patch('fedlearner_webconsole.auth.apis.sso_info_manager', SsoInfos()): + resp = self.get_helper(url='/api/v2/auth/sso_infos') + data = self.get_response_data(resp) + self.assertEqual(len(data), 2) + self.assertTrue(data[0].get('oauth')) + self.assertEqual(data[0]['oauth'].get('secret'), '') + + +class SelfUserApiTest(BaseTestCase): + + def test_get_self_user(self): + resp = self.get_helper(url='/api/v2/auth/self') + self.assertEqual(self.get_response_data(resp)['name'], 'ada') + self.signout_helper() + resp = self.get_helper(url='/api/v2/auth/self') + self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) + self.assertEqual('failed to find x-pc-auth or authorization within headers', resp.json.get('message')) + + +class StrictSignInServiceTest(BaseTestCase): + + def test_sign_in(self): + self.post_helper(f'{API_VERSION}/auth/signin', data={'username': 'ada', 'password': base64encode('fl@.')}) + self.post_helper(f'{API_VERSION}/auth/signin', data={'username': 'ada', 'password': base64encode('fl@.')}) + resp = self.post_helper(f'{API_VERSION}/auth/signin', + data={ + 'username': 'ada', + 'password': base64encode('fl@.') + }) + self.assertStatus(resp, HTTPStatus.BAD_REQUEST) + resp = self.post_helper(f'{API_VERSION}/auth/signin', + data={ + 'username': 'ada', + 'password': base64encode('fl@12345.') + }) + self.assertStatus(resp, HTTPStatus.FORBIDDEN) + self.assertEqual('Account is locked', resp.json['message']) + + def test_banned_time(self): + self.post_helper(f'{API_VERSION}/auth/signin', data={'username': 'ada', 'password': base64encode('fl@.')}) + self.post_helper(f'{API_VERSION}/auth/signin', data={'username': 'ada', 'password': base64encode('fl@.')}) + self.post_helper(f'{API_VERSION}/auth/signin', data={'username': 'ada', 'password': base64encode('fl@.')}) + resp = self.post_helper(f'{API_VERSION}/auth/signin', + data={ + 'username': 'ada', + 'password': base64encode('fl@12345.') + }) + self.assertStatus(resp, HTTPStatus.FORBIDDEN) + with db.session_scope() as session: + session.query(User).filter(User.username == 'ada').first().last_sign_in_at = now() - timedelta(minutes=31) + session.commit() + resp = self.post_helper(f'{API_VERSION}/auth/signin', + data={ + 'username': 'ada', + 'password': base64encode('fl@12345.') + }) + self.assertStatus(resp, HTTPStatus.OK) + + def test_change_password(self): + with db.session_scope() as session: + user_id = UserService(session).get_user_by_username('ada').id + self.assertIsNotNone(session.query(SessionTbl).filter(SessionTbl.user_id == user_id).first()) + self.signin_as_admin() + self.patch_helper(f'{API_VERSION}/auth/users/{user_id}', data={'password': base64encode('flfl123123.')}) + with db.session_scope() as session: + self.assertIsNone(session.query(SessionTbl).filter(SessionTbl.user_id == user_id).first()) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/auth/models.py b/web_console_v2/api/fedlearner_webconsole/auth/models.py index cad09bc5c..4778c2f0b 100644 --- a/web_console_v2/api/fedlearner_webconsole/auth/models.py +++ b/web_console_v2/api/fedlearner_webconsole/auth/models.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,6 +15,7 @@ # coding: utf-8 import enum + from passlib.apps import custom_app_context as pwd_context from sqlalchemy.sql.schema import UniqueConstraint, Index from sqlalchemy.sql import func @@ -24,14 +25,16 @@ class Role(enum.Enum): - USER = 'user' - ADMIN = 'admin' + USER = 'USER' + ADMIN = 'ADMIN' +# yapf: disable MUTABLE_ATTRS_MAPPER = { Role.USER: ('password', 'name', 'email'), Role.ADMIN: ('password', 'role', 'name', 'email') } +# yapf: enable class State(enum.Enum): @@ -42,19 +45,27 @@ class State(enum.Enum): @to_dict_mixin(ignores=['password', 'state']) class User(db.Model): __tablename__ = 'users_v2' - __table_args__ = (UniqueConstraint('username', name='uniq_username'), - default_table_args('This is webconsole user table')) - id = db.Column(db.Integer, primary_key=True, comment='user id') + __table_args__ = (UniqueConstraint('username', + name='uniq_username'), default_table_args('This is webconsole user table')) + id = db.Column(db.Integer, primary_key=True, comment='user id', autoincrement=True) username = db.Column(db.String(255), comment='unique name of user') password = db.Column(db.String(255), comment='user password after encode') - role = db.Column(db.Enum(Role, native_enum=False), + role = db.Column(db.Enum(Role, native_enum=False, create_constraint=False, length=21), default=Role.USER, comment='role of user') name = db.Column(db.String(255), comment='name of user') email = db.Column(db.String(255), comment='email of user') - state = db.Column(db.Enum(State, native_enum=False), + state = db.Column(db.Enum(State, native_enum=False, create_constraint=False, length=21), default=State.ACTIVE, comment='state of user') + sso_name = db.Column(db.String(255), comment='sso_name') + last_sign_in_at = db.Column(db.DateTime(timezone=True), + nullable=True, + comment='the last time when user tries to sign in') + failed_sign_in_attempts = db.Column(db.Integer, + nullable=False, + default=0, + comment='failed sign in attempts since last successful sign in') def set_password(self, password): self.password = pwd_context.hash(password) @@ -63,17 +74,12 @@ def verify_password(self, password): return pwd_context.verify(password, self.password) +@to_dict_mixin(ignores=['expired_at', 'created_at']) class Session(db.Model): __tablename__ = 'session_v2' - __table_args__ = (Index('idx_jti', 'jti'), - default_table_args('This is webconsole session table')) - id = db.Column(db.Integer, - primary_key=True, - autoincrement=True, - comment='session id') + __table_args__ = (Index('idx_jti', 'jti'), default_table_args('This is webconsole session table')) + id = db.Column(db.Integer, primary_key=True, autoincrement=True, comment='session id') jti = db.Column(db.String(64), comment='JWT jti') - expired_at = db.Column(db.DateTime(timezone=True), - comment='expired time, for db automatically clear') - created_at = db.Column(db.DateTime(timezone=True), - server_default=func.now(), - comment='created at') + user_id = db.Column(db.Integer, nullable=False, comment='for whom the session is created') + expired_at = db.Column(db.DateTime(timezone=True), comment='expired time, for db automatically clear') + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), comment='created at') diff --git a/web_console_v2/api/fedlearner_webconsole/auth/services.py b/web_console_v2/api/fedlearner_webconsole/auth/services.py new file mode 100644 index 000000000..e0b41b226 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/auth/services.py @@ -0,0 +1,110 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import logging +from typing import List, Optional +from sqlalchemy.orm import Session + +from fedlearner_webconsole.auth.models import State, User, Session as SessionTbl, Role +from fedlearner_webconsole.iam.client import create_iams_for_user +from fedlearner_webconsole.utils.const import SIGN_IN_INTERVAL_SECONDS, MAX_SIGN_IN_ATTEMPTS +from fedlearner_webconsole.utils.pp_datetime import now, to_timestamp + + +class UserService(object): + + def __init__(self, session: Session): + self._session = session + + def _filter_deleted_user(self, user: User) -> Optional[User]: + if not user or user.state == State.DELETED: + return None + return user + + def get_user_by_id(self, user_id: int, filter_deleted=False) -> Optional[User]: + user = self._session.query(User).filter_by(id=user_id).first() + if filter_deleted: + return self._filter_deleted_user(user) + return user + + def get_user_by_username(self, username: str, filter_deleted=False) -> Optional[User]: + user = self._session.query(User).filter_by(username=username).first() + if filter_deleted: + return self._filter_deleted_user(user) + return user + + def get_all_users(self, filter_deleted=False) -> List[User]: + if filter_deleted: + return self._session.query(User).filter_by(state=State.ACTIVE).all() + return self._session.query(User).all() + + def delete_user(self, user: User) -> User: + user.state = State.DELETED + return user + + def create_user_if_not_exists(self, + username: str, + email: str, + name: Optional[str] = None, + role: Role = Role.USER, + sso_name: Optional[str] = None, + password: Optional[str] = None) -> User: + user = self.get_user_by_username(username) + if user is None: + user = User(username=username, name=name, email=email, state=State.ACTIVE, role=role, sso_name=sso_name) + if password is not None: + user.set_password(password) + self._session.add(user) + create_iams_for_user(user) + return user + + +class SessionService(object): + + def __init__(self, session: Session): + self._session = session + + def get_session_by_jti(self, jti: str) -> Optional[SessionTbl]: + return self._session.query(SessionTbl).filter_by(jti=jti).first() + + def delete_session(self, session_obj: SessionTbl) -> Optional[SessionTbl]: + if session_obj is None: + logging.warning('deleting a non-existence session...') + return None + self._session.delete(session_obj) + return session_obj + + def delete_session_by_user_id(self, user_id: int) -> Optional[SessionTbl]: + session_obj = self._session.query(SessionTbl).filter(SessionTbl.user_id == user_id).first() + return self.delete_session(session_obj) + + +class StrictSignInService(object): + + def __init__(self, session: Session): + self._session = session + + def can_sign_in(self, user: User): + if user.last_sign_in_at is None or \ + to_timestamp(now()) - to_timestamp(user.last_sign_in_at) > SIGN_IN_INTERVAL_SECONDS: + return True + return not user.failed_sign_in_attempts >= MAX_SIGN_IN_ATTEMPTS + + def update(self, user: User, is_signed_in: bool = True): + user.last_sign_in_at = now() + if is_signed_in: + user.failed_sign_in_attempts = 0 + else: + user.failed_sign_in_attempts += 1 diff --git a/web_console_v2/api/fedlearner_webconsole/auth/services_test.py b/web_console_v2/api/fedlearner_webconsole/auth/services_test.py new file mode 100644 index 000000000..b4a810361 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/auth/services_test.py @@ -0,0 +1,125 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from fedlearner_webconsole.auth.models import State, User, Session as SessionTbl +from fedlearner_webconsole.auth.services import UserService, SessionService + +from fedlearner_webconsole.db import db +from testing.no_web_server_test_case import NoWebServerTestCase + + +class UserServiceTest(NoWebServerTestCase): + + def test_get_user_by_id(self): + # case1: unexisted one + unexisted_uid = 9999 + with db.session_scope() as session: + self.assertIsNone(UserService(session).get_user_by_id(unexisted_uid)) + + # case2: deleted one + with db.session_scope() as session: + deleted_user = User(username='deleted_one', email='who.knows@hhh.com', state=State.DELETED) + session.add(deleted_user) + session.commit() + with db.session_scope() as session: + self.assertIsNone(UserService(session).get_user_by_id(deleted_user.id, filter_deleted=True)) + + # case3: a real one + with db.session_scope() as session: + real_user = User(username='real_one', email='who.knows@hhh.com', state=State.ACTIVE) + session.add(real_user) + session.commit() + with db.session_scope() as session: + self.assertEqual(UserService(session).get_user_by_id(real_user.id).username, 'real_one') + + def test_get_user_by_username(self): + # case1: unexisted one + unexisted_username = 'none_existed' + with db.session_scope() as session: + self.assertIsNone(UserService(session).get_user_by_username(unexisted_username)) + + # case2: deleted one + with db.session_scope() as session: + deleted_user = User(username='deleted_one', email='who.knows@hhh.com', state=State.DELETED) + session.add(deleted_user) + session.commit() + with db.session_scope() as session: + self.assertIsNone(UserService(session).get_user_by_username(deleted_user.username, filter_deleted=True)) + + # case3: a real one + with db.session_scope() as session: + real_user = User(username='real_one', email='who.knows@hhh.com', state=State.ACTIVE) + session.add(real_user) + session.commit() + with db.session_scope() as session: + self.assertEqual(UserService(session).get_user_by_username(real_user.username).id, 2) + + def test_get_all_users(self): + with db.session_scope() as session: + session.add_all([ + User(username='real_one', email='who.knows@hhh.com', state=State.ACTIVE), + User(username='deleted_one', email='who.knows@hhh.com', state=State.DELETED) + ]) + session.commit() + with db.session_scope() as session: + self.assertEqual(len(UserService(session).get_all_users()), 2) + self.assertEqual(len(UserService(session).get_all_users(filter_deleted=True)), 1) + + def test_delete_user(self): + with db.session_scope() as session: + user = User(username='real_one', email='who.knows@hhh.com', state=State.ACTIVE) + session.add(user) + session.commit() + with db.session_scope() as session: + deleted_user = UserService(session).delete_user(user) + session.commit() + self.assertEqual(deleted_user.state, State.DELETED) + + +class SessionServiceTest(NoWebServerTestCase): + + def test_get_session_by_jti(self): + jti = 'test' + with db.session_scope() as session: + session.add(SessionTbl(jti=jti, user_id=1)) + session.commit() + with db.session_scope() as session: + session_obj = SessionService(session).get_session_by_jti(jti) + self.assertEqual(session_obj.jti, jti) + session_obj = SessionService(session).get_session_by_jti('fjeruif') + self.assertIsNone(session_obj) + + def test_delete_session(self): + jti = 'test' + with db.session_scope() as session: + session.add(SessionTbl(jti=jti, user_id=1)) + session.commit() + with db.session_scope() as session: + session_obj = SessionService(session).get_session_by_jti(jti) + session_obj = SessionService(session).delete_session(session_obj) + self.assertEqual(session_obj.jti, jti) + session.commit() + with db.session_scope() as session: + self.assertIsNone(SessionService(session).get_session_by_jti(jti)) + + with db.session_scope() as session: + session_obj = SessionService(session).get_session_by_jti('dfas') + session_obj = SessionService(session).delete_session(session_obj) + self.assertIsNone(session_obj) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/auth/third_party_sso.py b/web_console_v2/api/fedlearner_webconsole/auth/third_party_sso.py new file mode 100644 index 000000000..137ba84ec --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/auth/third_party_sso.py @@ -0,0 +1,376 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import enum +import json +import logging +import uuid +from abc import ABCMeta, abstractmethod +from datetime import timedelta, timezone +from typing import Optional +from collections import namedtuple +from http import HTTPStatus +from functools import wraps +from urllib.parse import urlencode + +import requests +import jwt +from flask import request, g +import xmltodict +from google.protobuf.json_format import ParseDict +from config import Config +from envs import Envs +from fedlearner_webconsole.db import db +from fedlearner_webconsole.utils.const import SSO_HEADER +from fedlearner_webconsole.utils.metrics import emit_store +from fedlearner_webconsole.utils.decorators.lru_cache import lru_cache +from fedlearner_webconsole.utils.pp_datetime import now, to_timestamp +from fedlearner_webconsole.utils.flask_utils import set_current_user +from fedlearner_webconsole.utils.pp_base64 import base64decode +from fedlearner_webconsole.proto.auth_pb2 import SigninParameter, Sso +from fedlearner_webconsole.exceptions import UnauthorizedException, InvalidArgumentException, NoAccessException +from fedlearner_webconsole.auth.services import UserService, SessionService, StrictSignInService +from fedlearner_webconsole.auth.models import Session, State, User + +UserInfo = namedtuple('UserInfo', ['username', 'email']) + + +class SsoProtocol(enum.Enum): + OAUTH = 'oauth' + CAS = 'cas' + + +def _generate_jwt_session(username: str, user_id: int, session: Session) -> str: + delta = timedelta(seconds=Config.JWT_ACCESS_TOKEN_EXPIRES) + expire_time = now(timezone.utc) + delta + jti = str(uuid.uuid4()) + token = jwt.encode( + { + 'username': username, + 'exp': expire_time, + 'jti': jti + }, + key=Config.JWT_SECRET_KEY, + ) + session_obj = Session(jti=jti, user_id=user_id, expired_at=expire_time) + session.add(session_obj) + # PyJWT api has a breaking change for return types + if isinstance(token, bytes): + token = token.decode() + return token + + +def _signout_jwt_session(): + if hasattr(g, 'jti'): + jti = g.jti + else: + raise UnauthorizedException('Not sign in with jwt.') + with db.session_scope() as session: + session_obj = SessionService(session).get_session_by_jti(jti) + SessionService(session).delete_session(session_obj) + session.commit() + + +def _validate_jwt_session(credentials: str) -> str: + time_now = to_timestamp(now(timezone.utc)) + decoded_token = jwt.decode(credentials, Config.JWT_SECRET_KEY, algorithms='HS256') + expire_time = decoded_token.get('exp') + jti = decoded_token.get('jti') + username = decoded_token.get('username') + with db.session_scope() as session: + session = SessionService(session).get_session_by_jti(jti) + if session is None: + raise UnauthorizedException('No session.') + if expire_time < time_now: + raise UnauthorizedException('Token has expired.') + # Set jti to for signout to find the session to remove. + g.jti = jti + return username + + +class SsoHandler(metaclass=ABCMeta): + + def __init__(self, sso): + self.sso = sso + + @abstractmethod + def signin(self, signin_parameter: SigninParameter) -> dict: + pass + + @abstractmethod + def signout(self): + pass + + @abstractmethod + def check_credentials(self, credentials) -> str: + """ + Check credentials and return the username. + """ + + def check_credentials_and_set_current_user(self, credentials): + try: + username = self.check_credentials(credentials) + except Exception as err: + raise UnauthorizedException(str(err)) from err + with db.session_scope() as session: + user = UserService(session).get_user_by_username(username, filter_deleted=True) + if user is None: + raise UnauthorizedException(f'User {username} not found.') + set_current_user(user) + + @classmethod + def check_user_validity(cls, user: User): + if user.state == State.DELETED: + error_msg = f'user: {user.username} has been deleted' + logging.error(error_msg) + raise InvalidArgumentException(error_msg) + + +class OAuthHandler(SsoHandler): + + def get_access_token(self, code: str) -> str: + try: + r = requests.post(self.sso.oauth.access_token_url, + data={ + 'code': code, + 'client_id': self.sso.oauth.client_id, + 'client_secret': self.sso.oauth.secret, + 'redirect_uri': self.sso.oauth.redirect_uri, + 'grant_type': 'authorization_code' + }) + except Exception as e: + error_msg = f'Get access_token failed from sso: {self.sso.name}: {str(e)}.' + logging.error(error_msg) + raise UnauthorizedException(error_msg) from e + if r.status_code != HTTPStatus.OK: + error_msg = f'Get access_token failed from sso: {self.sso.name}: {r.json()}.' + logging.error(error_msg) + raise UnauthorizedException(error_msg) + access_token = r.json().get('access_token') + if access_token is None: + error_msg = f'Get access_token failed from sso: ' \ + f'{self.sso.name}: no access_token in response.' + logging.error(error_msg) + raise UnauthorizedException(error_msg) + return access_token + + def get_user_info(self, access_token: str) -> UserInfo: + user_info = get_user_info_with_cache(self.sso.name, self.sso.oauth.user_info_url, access_token, + self.sso.oauth.username_key, self.sso.oauth.email_key) + return user_info + + def signin(self, signin_parameter: SigninParameter) -> dict: + code = signin_parameter.code + if code == '': + raise InvalidArgumentException('OAuth code is not found') + access_token = self.get_access_token(code) + + user_info = self.get_user_info(access_token) + + with db.session_scope() as session: + user = UserService(session).create_user_if_not_exists(username=user_info.username, + email=user_info.email, + sso_name=self.sso.name, + name=user_info.username) + self.check_user_validity(user) + StrictSignInService(session).update(user, is_signed_in=True) + session.commit() + return {'user': user.to_dict(), 'access_token': access_token} + + def signout(self): + get_user_info_with_cache.cache_clear() + + def check_credentials(self, credentials): + user_info = get_user_info_with_cache(self.sso.name, self.sso.oauth.user_info_url, credentials, + self.sso.oauth.username_key, self.sso.oauth.email_key) + return user_info.username + + +class JwtHandler(SsoHandler): + + def __init__(self): + super().__init__(None) + + def signin(self, signin_parameter: SigninParameter) -> dict: + username = signin_parameter.username + password = base64decode(signin_parameter.password) + if username == '' or password == '': + raise InvalidArgumentException('username or password is not found') + with db.session_scope() as session: + user = UserService(session).get_user_by_username(username, filter_deleted=True) + if user is None: + raise InvalidArgumentException(f'Failed to find user: {username}') + strict_service = StrictSignInService(session) + if not strict_service.can_sign_in(user): + raise NoAccessException('Account is locked') + if not user.verify_password(password): + logging.warning(f'user {user.username} login failed due to wrong password') + emit_store('user.wrong_password', 1) + strict_service.update(user, is_signed_in=False) + session.commit() + raise InvalidArgumentException('Invalid password') + token = _generate_jwt_session(username, user.id, session) + strict_service.update(user, is_signed_in=True) + session.commit() + return {'user': user.to_dict(), 'access_token': token} + + def signout(self): + _signout_jwt_session() + + def check_credentials(self, credentials: str) -> str: + return _validate_jwt_session(credentials) + + +class CasHandler(SsoHandler): + + def _service_validate(self, ticket: str) -> str: + params_dict = dict( + service=self.sso.cas.service_url, + ticket=ticket, + ) + validate_url = f'{self.sso.cas.cas_server_url}' \ + f'{self.sso.cas.validate_route}?{urlencode(params_dict)}' + r = requests.get(validate_url) + if r.status_code != HTTPStatus.OK: + logging.error(f'Cas sso {self.sso.name} receive Error code {r.status_code}') + raise UnauthorizedException('Sso server error.') + resp_dict = xmltodict.parse(r.content) + if 'cas:authenticationSuccess' in resp_dict['cas:serviceResponse']: + resp_data = resp_dict['cas:serviceResponse']['cas:authenticationSuccess'] + return resp_data['cas:user'] + logging.error(f'sso: {self.sso.name} CAS returned unexpected result') + raise UnauthorizedException('Wrong ticket.') + + def signin(self, signin_parameter: SigninParameter) -> dict: + ticket = signin_parameter.ticket + if ticket == '': + raise InvalidArgumentException('CAS ticket is not found') + username = self._service_validate(ticket) + + with db.session_scope() as session: + user = UserService(session).create_user_if_not_exists(username=username, + name=username, + email='', + sso_name=self.sso.name) + self.check_user_validity(user) + session.flush() + token = _generate_jwt_session(username, user.id, session) + StrictSignInService(session).update(user, is_signed_in=True) + session.commit() + return {'user': user.to_dict(), 'access_token': token} + + def signout(self): + _signout_jwt_session() + + def check_credentials(self, credentials: str) -> str: + return _validate_jwt_session(credentials) + + +class SsoInfos: + + def __init__(self): + try: + sso_infos_dict = json.loads(Envs.SSO_INFOS) + except Exception as e: # pylint: disable=broad-except + logging.error(f'Failed parse SSO_INFOS: {str(e)}') + sso_infos_dict = [] + self.sso_infos = [] + # sso_infos without server info which should not be visible to the frontend + self.sso_handlers = {} + for sso in sso_infos_dict: + # check the format of sso_infos + sso_proto = ParseDict(sso, Sso(), ignore_unknown_fields=True) + if sso_proto.name == 'default': + logging.error('Sso name should not be \'default\'') + self.sso_infos.append(sso_proto) + if sso_proto.WhichOneof('protocol') == SsoProtocol.OAUTH.value: + self.sso_handlers[sso_proto.name] = OAuthHandler(sso_proto) + elif sso_proto.WhichOneof('protocol') == SsoProtocol.CAS.value: + self.sso_handlers[sso_proto.name] = CasHandler(sso_proto) + else: + logging.error(f'SSO {sso_proto.name} does not have supported protocol.') + self.sso_handlers['default'] = JwtHandler() + + def get_sso_info(self, name: str) -> Optional[Sso]: + for sso in self.sso_infos: + if name == sso.name: + return sso + return None + + +sso_info_manager = SsoInfos() + + +class SsoHandlerFactory: + + @staticmethod + def get_handler(sso_name) -> SsoHandler: + jwt_handler = sso_info_manager.sso_handlers['default'] + return sso_info_manager.sso_handlers.get(sso_name, jwt_handler) + + +# Separate the func from the class to avoid leaking memory. +@lru_cache(timeout=600, maxsize=128) +def get_user_info_with_cache(sso_name: str, user_info_url: str, access_token: str, username_key: str, + email_key: str) -> Optional[UserInfo]: + if not username_key: + username_key = 'username' + if not email_key: + email_key = 'email' + try: + r = requests.get(user_info_url, headers={'Authorization': f'Bearer {access_token}'}) + except Exception as e: # pylint: disable=broad-except + error_msg = f'Get user_info failed from sso: {sso_name}: {str(e)}.' + logging.error(error_msg) + raise UnauthorizedException(error_msg) from e + if r.status_code != HTTPStatus.OK: + error_msg = f'Get user_info failed from sso: {sso_name}: {r.json()}.' + logging.error(error_msg) + raise UnauthorizedException(error_msg) + user_info_dict = r.json() + # This is to be compatible with some API response schema with data. + if 'data' in user_info_dict: + user_info_dict = user_info_dict.get('data') + if username_key not in user_info_dict: + error_msg = f'Get user_info failed from sso: ' \ + f'{sso_name}: no {username_key} in response.' + logging.error(error_msg) + raise UnauthorizedException(error_msg) + user_info = UserInfo(username=user_info_dict.get(username_key), email=user_info_dict.get(email_key, '')) + return user_info + + +def credentials_required(fn): + + @wraps(fn) + def decorator(*args, **kwargs): + + sso_headers = request.headers.get(SSO_HEADER, None) + jwt_headers = request.headers.get('Authorization', None) + sso_name = None + + if sso_headers is None and jwt_headers is None and Envs.DEBUG: + return fn(*args, **kwargs) + + if sso_headers: + sso_name, _, credentials = sso_headers.split() + elif jwt_headers: + _, credentials = jwt_headers.split() + else: + raise UnauthorizedException(f'failed to find {SSO_HEADER} or authorization within headers') + SsoHandlerFactory.get_handler(sso_name).check_credentials_and_set_current_user(credentials) + return fn(*args, **kwargs) + + return decorator diff --git a/web_console_v2/api/fedlearner_webconsole/auth/third_party_sso_test.py b/web_console_v2/api/fedlearner_webconsole/auth/third_party_sso_test.py new file mode 100644 index 000000000..1207c26bf --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/auth/third_party_sso_test.py @@ -0,0 +1,159 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from http import HTTPStatus +from unittest.mock import patch + +import jwt +from flask import g +from datetime import timedelta +from config import Config +from envs import Envs +from testing.common import BaseTestCase +from testing.helpers import FakeResponse +from fedlearner_webconsole.auth.services import SessionService +from fedlearner_webconsole.auth.third_party_sso import credentials_required, \ + get_user_info_with_cache, SsoInfos, JwtHandler +from fedlearner_webconsole.exceptions import UnauthorizedException +from fedlearner_webconsole.auth.models import User +from fedlearner_webconsole.db import db +from fedlearner_webconsole.utils.pp_datetime import now + + +@credentials_required +def test_some_api(): + return 1 + + +class OauthHandlerTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + session.add(User(username='test')) + session.commit() + with open(f'{Envs.BASE_DIR}/testing/test_data/test_sso.json', encoding='utf-8') as f: + self.patch_ssoinfos = patch('fedlearner_webconsole.auth.third_party_sso.Envs.SSO_INFOS', f.read()) + self.patch_ssoinfos.start() + self.fake_sso_info_manager = SsoInfos() + self.patch_manager = patch('fedlearner_webconsole.auth.third_party_sso.sso_info_manager', + self.fake_sso_info_manager) + self.patch_manager.start() + + def tearDown(self): + self.patch_manager.stop() + self.patch_ssoinfos.stop() + # clear cache to isolate the cache of each test case. + get_user_info_with_cache.cache_clear() + super().tearDown() + + def test_get_sso_infos(self): + self.assertEqual(len(self.fake_sso_info_manager.sso_infos), 2) + + def test_get_sso_info(self,): + self.assertEqual(self.fake_sso_info_manager.get_sso_info('test').name, 'test') + self.assertEqual(self.fake_sso_info_manager.get_sso_info('test').display_name, 'test') + + @patch('fedlearner_webconsole.auth.third_party_sso.requests.get') + @patch('fedlearner_webconsole.auth.third_party_sso.request.headers.get') + def test_credentials_required(self, mock_headers, mock_request_get): + # test not supported sso + mock_headers.return_value = 'not_supported_sso oauth access_token' + self.assertRaises(UnauthorizedException, test_some_api) + mock_request_get.return_value = FakeResponse({'username': 'test', 'email': 'test'}, HTTPStatus.OK) + self.assertRaises(UnauthorizedException, test_some_api) + + # test supported sso + mock_headers.return_value = 'test oauth access_token' + test_some_api() + + @patch('fedlearner_webconsole.auth.third_party_sso.requests.get') + @patch('fedlearner_webconsole.auth.third_party_sso.request.headers.get') + def test_get_user_info_cache(self, mock_headers, mock_request_get): + mock_headers.return_value = 'test oauth access_token' + mock_request_get.return_value = FakeResponse({'username': 'test', 'email': 'test'}, HTTPStatus.OK) + test_some_api() + test_some_api() + mock_request_get.assert_called_once() + mock_headers.return_value = 'test oauth access_token1' + mock_request_get.return_value = FakeResponse({'data': {'username': 'test', 'email': 'test'}}, HTTPStatus.OK) + test_some_api() + test_some_api() + self.assertEqual(mock_request_get.call_count, 2) + + +class JwtHandlerTest(BaseTestCase): + + def test_check_credentials(self): + jwt_handler = JwtHandler() + self.assertEqual(jwt_handler.check_credentials(self._token), 'ada') + jti = jwt.decode(self._token, key=Config.JWT_SECRET_KEY, algorithms='HS256').get('jti') + with db.session_scope() as session: + session_obj = SessionService(session).get_session_by_jti(jti) + SessionService(session).delete_session(session_obj=session_obj) + session.commit() + self.assertRaises(UnauthorizedException, jwt_handler.check_credentials, self._token) + self.signin_as_admin() + with patch('fedlearner_webconsole.auth.third_party_sso.now') as fake_now: + fake_now.return_value = now() + timedelta(seconds=86405) + self.assertRaises(UnauthorizedException, jwt_handler.check_credentials, self._token) + + def test_signout(self): + jwt_handler = JwtHandler() + jti = jwt.decode(self._token, key=Config.JWT_SECRET_KEY, algorithms='HS256').get('jti') + with db.session_scope() as session: + session_obj = SessionService(session).get_session_by_jti(jti) + self.assertIsNotNone(session_obj) + g.jti = jti + jwt_handler.signout() + with db.session_scope() as session: + session_obj = SessionService(session).get_session_by_jti(jti) + self.assertIsNone(session_obj) + + +class CasHandlerTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + session.add(User(username='test')) + session.commit() + with open(f'{Envs.BASE_DIR}/testing/test_data/test_sso.json', encoding='utf-8') as f: + self.patch_ssoinfos = patch('fedlearner_webconsole.auth.third_party_sso.Envs.SSO_INFOS', f.read()) + self.patch_ssoinfos.start() + self.fake_sso_info_manager = SsoInfos() + self.patch_manager = patch('fedlearner_webconsole.auth.third_party_sso.sso_info_manager', + self.fake_sso_info_manager) + self.patch_manager.start() + + def tearDown(self): + self.patch_manager.stop() + self.patch_ssoinfos.stop() + super().tearDown() + + @patch('fedlearner_webconsole.auth.third_party_sso.requests.get') + @patch('fedlearner_webconsole.auth.third_party_sso.request.headers.get') + def test_credentials_required_cas(self, mock_headers, mock_request_get): + mock_request_get.return_value = FakeResponse({'username': 'test', 'email': 'test'}, HTTPStatus.OK) + # test supported sso + mock_headers.return_value = f'test_cas cas {self._token}' + test_some_api() + mock_headers.return_value = f'test_cas cas {self._token}aa' + self.assertRaises(UnauthorizedException, test_some_api) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/cleanup/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/cleanup/BUILD.bazel new file mode 100644 index 000000000..835bb1f4b --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/cleanup/BUILD.bazel @@ -0,0 +1,137 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "models_lib", + srcs = ["models.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "models_test", + size = "medium", + srcs = [ + "models_test.py", + ], + imports = ["../.."], + main = "models_test.py", + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "services_lib", + srcs = ["services.py"], + imports = ["../.."], + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:filtering_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:paginate_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "services_test", + size = "medium", + srcs = [ + "services_test.py", + ], + imports = ["../.."], + main = "services_test.py", + deps = [ + ":models_lib", + ":services_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:resource_name_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "cleaner_cronjob_lib", + srcs = ["cleaner_cronjob.py"], + imports = ["../.."], + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:file_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "cleaner_cronjob_test", + size = "medium", + srcs = [ + "cleaner_cronjob_test.py", + ], + imports = ["../.."], + main = "cleaner_cronjob_test.py", + deps = [ + ":cleaner_cronjob_lib", + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:proto_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:fake_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "apis_lib", + srcs = ["apis.py"], + imports = ["../.."], + deps = [ + ":services_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:third_party_sso_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "@common_flask_restful//:pkg", + ], +) + +py_test( + name = "apis_test", + size = "medium", + srcs = [ + "apis_test.py", + ], + imports = ["../.."], + main = "apis_test.py", + deps = [ + ":apis_lib", + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:proto_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:common_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/setting/__init__.py b/web_console_v2/api/fedlearner_webconsole/cleanup/__init__.py similarity index 100% rename from web_console_v2/api/fedlearner_webconsole/setting/__init__.py rename to web_console_v2/api/fedlearner_webconsole/cleanup/__init__.py diff --git a/web_console_v2/api/fedlearner_webconsole/cleanup/apis.py b/web_console_v2/api/fedlearner_webconsole/cleanup/apis.py new file mode 100644 index 000000000..0991ca90e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/cleanup/apis.py @@ -0,0 +1,150 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from flask_restful import Resource, Api +from fedlearner_webconsole.db import db +from fedlearner_webconsole.exceptions import InvalidArgumentException +from fedlearner_webconsole.auth.third_party_sso import credentials_required +from fedlearner_webconsole.utils.decorators.pp_flask import admin_required, use_args +from fedlearner_webconsole.utils.flask_utils import make_flask_response, FilterExpField +from fedlearner_webconsole.cleanup.services import CleanupService +from marshmallow import Schema, fields + + +class GetCleanupParams(Schema): + filter = FilterExpField(required=False, load_default=None) + page = fields.Integer(required=False, load_default=None) + page_size = fields.Integer(required=False, load_default=None) + + +class CleanupsApi(Resource): + + @credentials_required + @admin_required + @use_args(GetCleanupParams(), location='query') + def get(self, params: dict): + """Get a list of all cleanups + --- + tags: + - cleanup + description: get cleanups list + parameters: + - in: query + name: filter + schema: + type: string + required: false + - in: query + name: page + schema: + type: integer + required: false + - in: query + name: page_size + schema: + type: integer + required: false + responses: + 200: + description: Get cleanups list result + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.CleanupPb' + """ + with db.session_scope() as session: + try: + pagination = CleanupService(session).get_cleanups( + filter_exp=params['filter'], + page=params['page'], + page_size=params['page_size'], + ) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid filter: {str(e)}') from e + data = [t.to_proto() for t in pagination.get_items()] + return make_flask_response(data=data, page_meta=pagination.get_metadata()) + + +class CleanupApi(Resource): + + @credentials_required + @admin_required + def get(self, cleanup_id: int): + """Get a cleanup by id + --- + tags: + - cleanup + description: get details of cleanup + parameters: + - in: path + name: cleanup_id + schema: + type: integer + required: true + responses: + 200: + description: Get details of cleanup + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.CleanupPb' + 404: + description: The cleanup with specified ID is not found + """ + with db.session_scope() as session: + cleanup = CleanupService(session).get_cleanup(cleanup_id) + return make_flask_response(cleanup) + + +class CleanupCancelApi(Resource): + + @credentials_required + @admin_required + def post(self, cleanup_id: int): + """Get a cleanup by id + --- + tags: + - cleanup + description: change the state of cleanup + parameters: + - in: path + name: cleanup_id + schema: + type: integer + required: true + responses: + 200: + description: The Cleanup's state has been updated + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.CleanupPb' + 400: + description: The param of state in the request body is invliad + 404: + description: The cleanup with specified ID is not found + """ + with db.session_scope() as session: + cleanup = CleanupService(session).cancel_cleanup_by_id(cleanup_id) + session.commit() + return make_flask_response(cleanup) + + +def initialize_cleanup_apis(api: Api): + api.add_resource(CleanupsApi, '/cleanups') + api.add_resource(CleanupApi, '/cleanups/') + api.add_resource(CleanupCancelApi, '/cleanups/:cancel') diff --git a/web_console_v2/api/fedlearner_webconsole/cleanup/apis_test.py b/web_console_v2/api/fedlearner_webconsole/cleanup/apis_test.py new file mode 100644 index 000000000..2194ee6de --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/cleanup/apis_test.py @@ -0,0 +1,180 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +import urllib.parse +from http import HTTPStatus +from datetime import datetime, timezone +from testing.common import BaseTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.cleanup.models import ResourceType, CleanupState, Cleanup +from fedlearner_webconsole.dataset.models import Dataset, DatasetKindV2, DatasetType +from fedlearner_webconsole.proto.cleanup_pb2 import CleanupPayload, CleanupPb +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.utils.proto import to_dict + + +class CleanupsApiTest(BaseTestCase): + _TARGET_START_AT = datetime(2022, 2, 22, 10, 10, 12, tzinfo=timezone.utc) + _CREATED_AT = datetime(2022, 2, 22, 3, 3, 4, tzinfo=timezone.utc) + _CREATED_AT_2 = datetime(2022, 3, 22, 3, 3, 4, tzinfo=timezone.utc) + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def setUp(self): + super().setUp() + dataset1 = Dataset(id=1, + name='default_dataset', + dataset_type=DatasetType.PSI, + project_id=1, + dataset_kind=DatasetKindV2.RAW, + path='/data/default_dataset/') + self.default_paylaod = CleanupPayload(paths=['/Major333/test_path/a.csv']) + cleanup1 = Cleanup(id=1, + state=CleanupState.WAITING, + created_at=self._CREATED_AT, + updated_at=self._CREATED_AT, + target_start_at=self._TARGET_START_AT, + resource_id=dataset1.id, + resource_type=ResourceType(Dataset).name, + payload=self.default_paylaod) + cleanup2 = Cleanup(id=2, + state=CleanupState.CANCELED, + created_at=self._CREATED_AT, + updated_at=self._CREATED_AT, + target_start_at=self._TARGET_START_AT, + resource_id=dataset1.id, + resource_type=ResourceType(Dataset).name, + payload=self.default_paylaod) + with db.session_scope() as session: + session.add(dataset1) + session.add(cleanup1) + session.add(cleanup2) + session.commit() + self.signin_as_admin() + + def test_get_without_filter_and_pagination(self): + response = self.get_helper('/api/v2/cleanups') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 2) + + def test_get_with_pagination(self): + response = self.get_helper('/api/v2/cleanups?page=1&page_size=1') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 1) + self.assertEqual(data[0]['id'], 1) + + def test_get_with_invalid_filter(self): + response = self.get_helper('/api/v2/cleanups?filter=invalid') + self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) + + def test_get_with_filter(self): + filter_exp = urllib.parse.quote('(and(resource_type="DATASET")(state="WAITING"))') + response = self.get_helper(f'/api/v2/cleanups?filter={filter_exp}') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 1) + self.assertEqual(data[0]['id'], 1) + + +class CleanupApiTest(BaseTestCase): + _TARGET_START_AT = datetime(2022, 2, 22, 10, 10, 12, tzinfo=timezone.utc) + _CREATED_AT = datetime(2022, 2, 22, 3, 3, 4, tzinfo=timezone.utc) + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def setUp(self): + super().setUp() + dataset1 = Dataset(id=1, + name='default_dataset', + dataset_type=DatasetType.PSI, + project_id=1, + dataset_kind=DatasetKindV2.RAW, + path='/data/default_dataset/') + self.default_paylaod = CleanupPayload(paths=['/Major333/test_path/a.csv']) + cleanup1 = Cleanup(id=1, + state=CleanupState.WAITING, + created_at=self._CREATED_AT, + updated_at=self._CREATED_AT, + target_start_at=self._TARGET_START_AT, + resource_id=dataset1.id, + resource_type=ResourceType(Dataset).name, + payload=self.default_paylaod) + with db.session_scope() as session: + session.add(dataset1) + session.add(cleanup1) + session.commit() + + def test_get(self): + expected_cleanup_proto = CleanupPb(id=1, + state='WAITING', + completed_at=None, + resource_id=1, + resource_type='DATASET', + payload=self.default_paylaod, + target_start_at=to_timestamp(self._TARGET_START_AT), + updated_at=to_timestamp(self._CREATED_AT), + created_at=to_timestamp(self._CREATED_AT)) + self.signin_as_admin() + response = self.get_helper(f'/api/v2/cleanups/{expected_cleanup_proto.id}') + self.assertEqual(response.status_code, HTTPStatus.OK) + cleanup = self.get_response_data(response) + self.assertEqual(cleanup, to_dict(expected_cleanup_proto)) + + +class CleanupCancelApiTest(BaseTestCase): + _TARGET_START_AT = datetime(2022, 2, 22, 10, 10, 12, tzinfo=timezone.utc) + _CREATED_AT = datetime(2022, 2, 22, 3, 3, 4, tzinfo=timezone.utc) + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def setUp(self): + super().setUp() + dataset1 = Dataset(id=1, + name='default_dataset', + dataset_type=DatasetType.PSI, + project_id=1, + dataset_kind=DatasetKindV2.RAW, + path='/data/default_dataset/') + self.default_payload = CleanupPayload(paths=['/Major333/test_path/a.csv']) + cleanup1 = Cleanup(id=1, + state=CleanupState.WAITING, + created_at=self._CREATED_AT, + updated_at=self._CREATED_AT, + target_start_at=self._TARGET_START_AT, + resource_id=dataset1.id, + resource_type=ResourceType(Dataset).name, + payload=self.default_payload) + with db.session_scope() as session: + session.add(dataset1) + session.add(cleanup1) + session.commit() + + def test_cleanup_waiting_cancel(self): + self.signin_as_admin() + response = self.post_helper('/api/v2/cleanups/1:cancel', {}) + self.assertEqual(response.status_code, HTTPStatus.OK) + response = self.get_helper('/api/v2/cleanups/1') + cancelled_cleanup = self.get_response_data(response) + self.assertEqual(cancelled_cleanup['state'], 'CANCELED') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/cleanup/cleaner_cronjob.py b/web_console_v2/api/fedlearner_webconsole/cleanup/cleaner_cronjob.py new file mode 100644 index 000000000..0ba84225e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/cleanup/cleaner_cronjob.py @@ -0,0 +1,119 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Tuple, List +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.interface import IRunnerV2 +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.db import db +from fedlearner_webconsole.utils.file_manager import FileManager +from fedlearner_webconsole.proto.composer_pb2 import RunnerOutput, CleanupCronJobOutput +from datetime import timezone, datetime +from fedlearner_webconsole.cleanup.models import Cleanup, CleanupState + + +class CleanupCronJob(IRunnerV2): + + def __init__(self) -> None: + super().__init__() + self._file_manager = FileManager() + + def _get_current_utc_time(self): + return datetime.now(tz=timezone.utc) + + def _execute_cleanup(self, cleanup: Cleanup): + for path in cleanup.payload.paths: + if self._file_manager.exists(path): + self._file_manager.remove(path) + cleanup.state = CleanupState.SUCCEEDED + cleanup.completed_at = self._get_current_utc_time() + + def _get_waiting_ids(self) -> List[int]: + with db.session_scope() as session: + current_time = self._get_current_utc_time() + waiting_ids = session.query(Cleanup.id).filter(Cleanup.state == CleanupState.WAITING).filter( + Cleanup.target_start_at <= current_time).all() + logging.info(f'Has collected waiting cleanup ids:{waiting_ids}') + # unwrap query result + return [cleanup_id for cleanup_id, *_ in waiting_ids] + + def _sweep_waiting_cleanups(self, waiting_list: List[int]): + logging.info(f'will sweep the cleanup ids:{waiting_list}') + for cleanup_id in waiting_list: + with db.session_scope() as session: + logging.info(f'will sweep the waiting cleanup:{cleanup_id}') + current_time = self._get_current_utc_time() + cleanup = session.query(Cleanup).populate_existing().with_for_update().get(cleanup_id) + try: + if cleanup and cleanup.state == CleanupState.WAITING and \ + cleanup.target_start_at.replace(tzinfo=timezone.utc) <= current_time: + cleanup.state = CleanupState.RUNNING + # Release the lock + session.commit() + else: + logging.warning(f'In waiting cleanup list are being swept, \ + the cleanup:{cleanup_id} has been changed/canceled. It has been skipped.') + # Release the lock + session.rollback() + except Exception as e: # pylint: disable=broad-except + logging.error(f'The cleanup:{cleanup.id} has failed. error_msg is:{str(e)}') + cleanup.state = CleanupState.FAILED + session.commit() + + def _get_running_ids(self) -> List[int]: + with db.session_scope() as session: + running_ids = session.query(Cleanup.id).filter(Cleanup.state == CleanupState.RUNNING).all() + logging.info(f'Has collected waiting cleanup ids:{running_ids}') + # unwrap query result + return [cleanup_id for cleanup_id, *_ in running_ids] + + def _sweep_running_cleanups(self, running_list: List[int]) -> Tuple[List[int], List[int]]: + logging.info(f'will sweep the cleanup ids:{running_list}') + succeeded_cleanup_ids = [] + failed_cleanup_ids = [] + for cleanup_id in running_list: + with db.session_scope() as session: + logging.info(f'will sweep the running cleanup:{cleanup_id}') + cleanup = session.query(Cleanup).populate_existing().with_for_update().get(cleanup_id) + try: + if cleanup and cleanup.state == CleanupState.RUNNING: + self._execute_cleanup(cleanup) + # Release the lock + session.commit() + succeeded_cleanup_ids.append(cleanup.id) + else: + logging.warning(f'In running cleanup list are being swept, \ + the cleanup:{cleanup_id} has been changed/canceled. It has been skipped.') + # Release the lock + session.rollback() + except Exception as e: # pylint: disable=broad-except + logging.error(f'The cleanup:{cleanup.id} has failed. error_msg is:{str(e)}') + cleanup.state = CleanupState.FAILED + session.commit() + failed_cleanup_ids.append(cleanup.id) + return succeeded_cleanup_ids, failed_cleanup_ids + + def run(self, context: RunnerContext) -> Tuple[RunnerStatus, RunnerOutput]: + try: + waiting_ids = self._get_waiting_ids() + self._sweep_waiting_cleanups(waiting_ids) + running_ids = self._get_running_ids() + succeeded_cleanup_ids, failed_cleanup_ids = self._sweep_running_cleanups(running_ids) + return RunnerStatus.DONE, RunnerOutput(cleanup_cron_job_output=CleanupCronJobOutput( + succeeded_cleanup_ids=succeeded_cleanup_ids, failed_cleanup_ids=failed_cleanup_ids)) + except Exception as e: # pylint: disable=broad-except + logging.error(f'Cleanup Cronjob is failed. error_msg is:{str(e)}') + return RunnerStatus.FAILED, RunnerOutput(error_message=str(e)) diff --git a/web_console_v2/api/fedlearner_webconsole/cleanup/cleaner_cronjob_test.py b/web_console_v2/api/fedlearner_webconsole/cleanup/cleaner_cronjob_test.py new file mode 100644 index 000000000..d99f8c91b --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/cleanup/cleaner_cronjob_test.py @@ -0,0 +1,93 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from datetime import timezone, datetime +from unittest.mock import MagicMock, patch +from testing.no_web_server_test_case import NoWebServerTestCase +from testing.fake_time_patcher import FakeTimePatcher + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput + +from fedlearner_webconsole.dataset.models import Dataset +from fedlearner_webconsole.cleanup.models import Cleanup, CleanupState, ResourceType +from fedlearner_webconsole.cleanup.cleaner_cronjob import CleanupCronJob +from fedlearner_webconsole.proto.cleanup_pb2 import CleanupPayload + + +@patch('fedlearner_webconsole.utils.file_manager.FileManager.exists') +@patch('fedlearner_webconsole.utils.file_manager.FileManager.remove') +class CleanupCronJobTest(NoWebServerTestCase): + _CLEANUP_ID = 1 + + def setUp(self): + super().setUp() + self.time_patcher = FakeTimePatcher() + self.time_patcher.start(datetime(2012, 1, 14, 12, 0, 5, tzinfo=timezone.utc)) + self.default_paylaod = CleanupPayload(paths=['/Major333/test_path/a.csv']) + with db.session_scope() as session: + self.default_cleanup = Cleanup(id=1, + state=CleanupState.WAITING, + target_start_at=datetime(1999, 3, 1, tzinfo=timezone.utc), + resource_id=1, + resource_type=ResourceType(Dataset).name, + payload=self.default_paylaod) + session.add(self.default_cleanup) + session.commit() + + def tearDown(self): + self.time_patcher.stop() + super().tearDown() + + def test_run_failed_alone(self, mock_remove: MagicMock, mock_exists: MagicMock): + # The file always exist + mock_exists.return_value = True + #Failed to delete + mock_remove.side_effect = RuntimeError('fake error') + + runner = CleanupCronJob() + runner_input = RunnerInput() + runner_context = RunnerContext(index=0, input=runner_input) + + status, output = runner.run(runner_context) + self.assertEqual(status, RunnerStatus.DONE) + expected_cleanup_status = CleanupState.FAILED + with db.session_scope() as session: + cleanup = session.query(Cleanup).get(1) + self.assertEqual(expected_cleanup_status, cleanup.state) + + def test_run_success_alone(self, mock_remove: MagicMock, mock_exists: MagicMock): + # The file always exist + mock_exists.return_value = True + #Success to delete + mock_remove.reset_mock(side_effect=True) + + runner = CleanupCronJob() + runner_input = RunnerInput() + runner_context = RunnerContext(index=0, input=runner_input) + + status, output = runner.run(runner_context) + self.assertEqual(status, RunnerStatus.DONE) + expected_cleanup_status = CleanupState.SUCCEEDED + with db.session_scope() as session: + cleanup = session.query(Cleanup).get(1) + self.assertEqual(expected_cleanup_status, cleanup.state) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/cleanup/models.py b/web_console_v2/api/fedlearner_webconsole/cleanup/models.py new file mode 100644 index 000000000..a11d73dd5 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/cleanup/models.py @@ -0,0 +1,88 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import enum + +from sqlalchemy.sql import func +from google.protobuf import text_format + +from fedlearner_webconsole.proto.cleanup_pb2 import CleanupPayload, CleanupPb +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.models import Dataset, DatasetJob, DatasetJobStage +from fedlearner_webconsole.mmgr.models import Model +from fedlearner_webconsole.algorithm.models import Algorithm +from fedlearner_webconsole.utils.pp_datetime import to_timestamp + + +# Centralized RegistrationIn +class ResourceType(enum.Enum): + DATASET = Dataset + DATASET_JOB = DatasetJob + DATASET_JOB_STAGE = DatasetJobStage + MODEL = Model + ALGORITHM = Algorithm + NO_RESOURCE = None + + +class CleanupState(enum.Enum): + WAITING = 'WAITING' + RUNNING = 'RUNNING' + SUCCEEDED = 'SUCCEEDED' + FAILED = 'FAILED' + CANCELED = 'CANCELED' + + +class Cleanup(db.Model): + __tablename__ = 'cleanups_v2' + id = db.Column(db.Integer, primary_key=True, autoincrement=True, comment='id') + state = db.Column(db.Enum(CleanupState, native_enum=False, length=64, create_constraint=False), + default=CleanupState.WAITING, + comment='state') + target_start_at = db.Column(db.DateTime(timezone=True), comment='target_start_at') + completed_at = db.Column(db.DateTime(timezone=True), comment='completed_at') + resource_id = db.Column(db.Integer, comment='resource_id') + resource_type = db.Column(db.Enum(ResourceType, native_enum=False, length=64, create_constraint=False), + comment='resource_type') + _payload = db.Column(db.Text(), name='payload', comment='the underlying resources that need to be cleaned up') + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), comment='created_at') + updated_at = db.Column(db.DateTime(timezone=True), + server_default=func.now(), + server_onupdate=func.now(), + comment='updated_at') + + @property + def payload(self) -> CleanupPayload: + if not self._payload: + return CleanupPayload() + return text_format.Parse(self._payload, CleanupPayload()) + + @payload.setter + def payload(self, payload: CleanupPayload): + self._payload = text_format.MessageToString(payload) + + @property + def is_cancellable(self): + return self.state in [CleanupState.CANCELED, CleanupState.WAITING] + + def to_proto(self) -> CleanupPb: + return CleanupPb(id=self.id, + state=self.state.name, + target_start_at=to_timestamp(self.target_start_at), + completed_at=to_timestamp(self.completed_at) if self.completed_at else None, + resource_id=self.resource_id, + resource_type=self.resource_type.name, + payload=self.payload, + created_at=to_timestamp(self.created_at), + updated_at=to_timestamp(self.updated_at)) diff --git a/web_console_v2/api/fedlearner_webconsole/cleanup/models_test.py b/web_console_v2/api/fedlearner_webconsole/cleanup/models_test.py new file mode 100644 index 000000000..86049a4a3 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/cleanup/models_test.py @@ -0,0 +1,95 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from datetime import datetime, timezone +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.cleanup.models import Cleanup, CleanupState, ResourceType +from fedlearner_webconsole.proto.cleanup_pb2 import CleanupPayload, CleanupPb +from fedlearner_webconsole.utils.pp_datetime import to_timestamp + + +class CleanupTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + self.default_paylaod = CleanupPayload(paths=['/Major333/test_path/a.csv']) + default_cleanup = Cleanup(id=1, + state=CleanupState.WAITING, + created_at=datetime(2022, 2, 22, tzinfo=timezone.utc), + updated_at=datetime(2022, 2, 22, tzinfo=timezone.utc), + target_start_at=datetime(2022, 2, 22, tzinfo=timezone.utc), + resource_id=100, + resource_type=ResourceType.DATASET.name, + payload=self.default_paylaod) + cleanup_without_resource = Cleanup(id=2, + state=CleanupState.WAITING, + created_at=datetime(2022, 2, 22, tzinfo=timezone.utc), + updated_at=datetime(2022, 2, 22, tzinfo=timezone.utc), + target_start_at=datetime(2022, 2, 22, tzinfo=timezone.utc), + resource_id=100, + resource_type=ResourceType.NO_RESOURCE.name, + payload=self.default_paylaod) + running_cleanup = Cleanup(id=3, + state=CleanupState.RUNNING, + created_at=datetime(2022, 2, 22, tzinfo=timezone.utc), + updated_at=datetime(2022, 2, 22, tzinfo=timezone.utc), + target_start_at=datetime(2022, 2, 22, tzinfo=timezone.utc), + resource_id=100, + resource_type=ResourceType.NO_RESOURCE.name, + payload=self.default_paylaod) + with db.session_scope() as session: + session.add(default_cleanup) + session.add(cleanup_without_resource) + session.add(running_cleanup) + session.commit() + + def test_payload(self): + with db.session_scope() as session: + cleanup = session.query(Cleanup).get(1) + self.assertEqual(cleanup.payload, self.default_paylaod) + cleanup.payload = CleanupPayload(paths=['/Major333/test_path/b.csv']) + session.add(cleanup) + session.commit() + with db.session_scope() as session: + cleanup = session.query(Cleanup).get(1) + self.assertEqual(['/Major333/test_path/b.csv'], cleanup.payload.paths) + + def test_cancellable(self): + with db.session_scope() as session: + default_cleanup = session.query(Cleanup).get(1) + cleanup_without_resource = session.query(Cleanup).get(2) + running_cleanup = session.query(Cleanup).get(3) + self.assertTrue(default_cleanup.is_cancellable) + self.assertTrue(cleanup_without_resource.is_cancellable) + self.assertFalse(running_cleanup.is_cancellable) + + def test_to_proto(self): + expected_cleanup_proto = CleanupPb(id=1, + state='WAITING', + target_start_at=to_timestamp(datetime(2022, 2, 22, tzinfo=timezone.utc)), + resource_id=100, + resource_type='DATASET', + payload=self.default_paylaod, + updated_at=to_timestamp(datetime(2022, 2, 22, tzinfo=timezone.utc)), + created_at=to_timestamp(datetime(2022, 2, 22, tzinfo=timezone.utc))) + with db.session_scope() as session: + cleanup_proto = session.query(Cleanup).get(1).to_proto() + self.assertEqual(cleanup_proto, expected_cleanup_proto) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/cleanup/services.py b/web_console_v2/api/fedlearner_webconsole/cleanup/services.py new file mode 100644 index 000000000..57124ad6e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/cleanup/services.py @@ -0,0 +1,83 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Optional +from datetime import datetime, timezone +from sqlalchemy.orm import Session +from fedlearner_webconsole.exceptions import InvalidArgumentException, NotFoundException +from fedlearner_webconsole.cleanup.models import Cleanup, CleanupState +from fedlearner_webconsole.proto.cleanup_pb2 import CleanupParameter, CleanupPb +from fedlearner_webconsole.proto.filtering_pb2 import FilterOp, FilterExpression +from fedlearner_webconsole.utils.paginate import Pagination, paginate +from fedlearner_webconsole.utils.filtering import SupportedField, FieldType, FilterBuilder + + +class CleanupService(): + + FILTER_FIELDS = { + 'state': SupportedField(type=FieldType.STRING, ops={FilterOp.EQUAL: None}), + 'resource_type': SupportedField(type=FieldType.STRING, ops={FilterOp.EQUAL: None}), + 'resource_id': SupportedField(type=FieldType.NUMBER, ops={FilterOp.EQUAL: None}), + } + + def __init__(self, session: Session): + self._session = session + self._filter_builder = FilterBuilder(model_class=Cleanup, supported_fields=self.FILTER_FIELDS) + + def get_cleanups(self, + page: Optional[int] = None, + page_size: Optional[int] = None, + filter_exp: Optional[FilterExpression] = None) -> Pagination: + query = self._session.query(Cleanup) + if filter_exp: + query = self._filter_builder.build_query(query, filter_exp) + query = query.order_by(Cleanup.created_at.desc()) + return paginate(query, page, page_size) + + def get_cleanup(self, cleanup_id: int = 0) -> CleanupPb: + cleanup = self._session.query(Cleanup).get(cleanup_id) + if not cleanup: + raise NotFoundException(f'Failed to find cleanup: {cleanup_id}') + return cleanup.to_proto() + + def create_cleanup(self, cleanup_parmeter: CleanupParameter) -> Cleanup: + cleanup = Cleanup( + state=CleanupState.WAITING, + resource_id=cleanup_parmeter.resource_id, + resource_type=cleanup_parmeter.resource_type, + target_start_at=datetime.fromtimestamp(cleanup_parmeter.target_start_at, tz=timezone.utc), + payload=cleanup_parmeter.payload, + ) + self._session.add(cleanup) + return cleanup + + def _cancel_cleanup(self, cleanup: Cleanup) -> CleanupPb: + if not cleanup.is_cancellable: + error_msg = f'cleanup: {cleanup.id} can not be canceled' + logging.error(error_msg) + raise InvalidArgumentException(error_msg) + cleanup.state = CleanupState.CANCELED + return cleanup.to_proto() + + def cancel_cleanup_by_id(self, cleanup_id: int = 0) -> CleanupPb: + # apply exclusive lock on cleanup to avoid race condition on updating its state + cleanup = self._session.query(Cleanup).populate_existing().with_for_update().filter( + Cleanup.id == cleanup_id).first() + if not cleanup: + error_msg = f'there is no cleanup with cleanup_id:{cleanup_id}' + logging.error(error_msg) + raise InvalidArgumentException(error_msg) + return self._cancel_cleanup(cleanup) diff --git a/web_console_v2/api/fedlearner_webconsole/cleanup/services_test.py b/web_console_v2/api/fedlearner_webconsole/cleanup/services_test.py new file mode 100644 index 000000000..516d99c34 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/cleanup/services_test.py @@ -0,0 +1,135 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from datetime import datetime, timezone +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.cleanup.services import CleanupService +from fedlearner_webconsole.cleanup.models import Cleanup, CleanupState, ResourceType +from fedlearner_webconsole.proto.cleanup_pb2 import CleanupPayload, CleanupParameter, CleanupPb +from fedlearner_webconsole.proto.filtering_pb2 import FilterExpression, FilterExpressionKind, SimpleExpression, FilterOp +from fedlearner_webconsole.dataset.models import Dataset, DatasetType, DatasetKindV2 +from fedlearner_webconsole.utils.resource_name import resource_uuid + + +class CleanupServiceTest(NoWebServerTestCase): + _TARGET_START_AT = datetime(2022, 2, 22, 10, 10, 12, tzinfo=timezone.utc) + _CREATED_AT = datetime(2022, 2, 22, 3, 3, 4, tzinfo=timezone.utc) + + def setUp(self): + super().setUp() + self.default_paylaod = CleanupPayload(paths=['/Major333/test_path/a.csv']) + self.deafult_dataset = Dataset( + id=100, + uuid=resource_uuid(), + name='dataset_1', + dataset_type=DatasetType.PSI, + project_id=100, + dataset_kind=DatasetKindV2.RAW, + path='/data/dataset_1/', + ) + default_cleanup = Cleanup(id=1, + state=CleanupState.WAITING, + created_at=self._CREATED_AT, + updated_at=self._CREATED_AT, + target_start_at=self._TARGET_START_AT, + resource_id=100, + resource_type=ResourceType.DATASET.name, + payload=self.default_paylaod) + default_cleanup_2 = Cleanup(id=2, + state=CleanupState.WAITING, + created_at=self._CREATED_AT, + updated_at=self._CREATED_AT, + target_start_at=self._TARGET_START_AT, + resource_id=100, + resource_type=ResourceType.NO_RESOURCE.name, + payload=self.default_paylaod) + with db.session_scope() as session: + session.add(self.deafult_dataset) + session.add(default_cleanup) + session.add(default_cleanup_2) + session.commit() + + def test_get_cleanups(self): + filter_exp = FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression( + field='state', + op=FilterOp.EQUAL, + string_value='WAITING', + )) + with db.session_scope() as session: + service = CleanupService(session) + paginations = service.get_cleanups(filter_exp=filter_exp, page=1, page_size=2) + cleanup_ids = [cleanup.id for cleanup in paginations.get_items()] + self.assertEqual(cleanup_ids, [1, 2]) + + def test_get_cleanup(self): + expected_cleanup_display_proto = CleanupPb(id=1, + state='WAITING', + target_start_at=to_timestamp(self._TARGET_START_AT), + resource_id=100, + resource_type='DATASET', + payload=self.default_paylaod, + updated_at=to_timestamp(self._CREATED_AT), + created_at=to_timestamp(self._CREATED_AT)) + with db.session_scope() as session: + service = CleanupService(session) + cleanup = service.get_cleanup(cleanup_id=1) + self.assertEqual(cleanup, expected_cleanup_display_proto) + + def test_get_cleanup_without_resource_type(self): + expected_cleanup_display_proto = CleanupPb(id=2, + state='WAITING', + target_start_at=to_timestamp(self._TARGET_START_AT), + resource_id=100, + resource_type='NO_RESOURCE', + payload=self.default_paylaod, + updated_at=to_timestamp(self._CREATED_AT), + created_at=to_timestamp(self._CREATED_AT)) + with db.session_scope() as session: + service = CleanupService(session) + cleanup = service.get_cleanup(cleanup_id=2) + self.assertEqual(cleanup, expected_cleanup_display_proto) + + def test_create_cleanup(self): + cleanup_parm = CleanupParameter(resource_id=1011, + resource_type='DATASET', + target_start_at=to_timestamp(self._TARGET_START_AT), + payload=self.default_paylaod) + with db.session_scope() as session: + cleanup = CleanupService(session).create_cleanup(cleanup_parm) + session.commit() + cleanup_id = cleanup.id + with db.session_scope() as session: + created_cleanup: Cleanup = session.query(Cleanup).get(cleanup_id) + self.assertEqual(created_cleanup.resource_type, ResourceType.DATASET) + self.assertEqual(created_cleanup.resource_id, 1011) + self.assertEqual(to_timestamp(created_cleanup.target_start_at), to_timestamp(self._TARGET_START_AT)) + self.assertEqual(created_cleanup.payload, self.default_paylaod) + + def test_cancel_cleanup_by_id(self): + with db.session_scope() as session: + service = CleanupService(session) + service.cancel_cleanup_by_id(1) + session.commit() + with db.session_scope() as session: + cleanup = session.query(Cleanup).get(1) + self.assertEqual(cleanup.state, CleanupState.CANCELED) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/composer/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/composer/BUILD.bazel new file mode 100644 index 000000000..460a2c722 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/composer/BUILD.bazel @@ -0,0 +1,253 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "composer_service_lib", + srcs = ["composer_service.py"], + imports = ["../.."], + deps = [ + ":common_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:filtering_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:metrics_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:paginate_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_croniter//:pkg", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "composer_service_lib_test", + srcs = [ + "composer_service_test.py", + ], + imports = ["../.."], + main = "composer_service_test.py", + deps = [ + ":common_lib", + ":composer_service_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "common_lib", + srcs = [ + "context.py", + "interface.py", + "models.py", + ], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:mixins_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:proto_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_croniter//:pkg", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "context_test", + srcs = [ + "context_test.py", + ], + imports = ["../.."], + main = "context_test.py", + deps = [ + ":common_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "models_test", + srcs = [ + "models_test.py", + ], + imports = ["../.."], + main = "models_test.py", + deps = [ + ":common_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:proto_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "runner_lib", + srcs = [ + "runner.py", + ], + imports = ["../.."], + visibility = ["//visibility:private"], + deps = [ + ":common_lib", + "//web_console_v2/api/fedlearner_webconsole/cleanup:cleaner_cronjob_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:batch_stats_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset/scheduler", + "//web_console_v2/api/fedlearner_webconsole/job:scheduler_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:cronjob_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:scheduler_lib", + "//web_console_v2/api/fedlearner_webconsole/project:project_scheduler_lib", + "//web_console_v2/api/fedlearner_webconsole/serving:runners_lib", + "//web_console_v2/api/fedlearner_webconsole/tee:runners_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:cronjob_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:workflow_scheduler_lib", + ], +) + +py_library( + name = "composer_lib", + srcs = [ + "composer.py", + "context.py", + "op_locker.py", + "pipeline.py", + "strategy.py", + "thread_reaper.py", + ], + imports = ["../.."], + deps = [ + ":common_lib", + ":runner_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_time_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_github_grpc_grpc//src/python/grpcio_health_checking/grpc_health/v1:grpc_health", + "@com_github_grpc_grpc//src/python/grpcio_health_checking/grpc_health/v1:health_py_pb2", + "@com_github_grpc_grpc//src/python/grpcio_health_checking/grpc_health/v1:health_py_pb2_grpc", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "composer_test", + srcs = [ + "composer_test.py", + ], + imports = ["../.."], + main = "composer_test.py", + deps = [ + ":composer_lib", + ":composer_service_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:fake_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "//web_console_v2/api/testing/composer", + ], +) + +py_test( + name = "op_locker_test", + srcs = [ + "op_locker_test.py", + ], + imports = ["../.."], + main = "op_locker_test.py", + deps = [ + ":common_lib", + ":composer_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_test( + name = "pipeline_test", + srcs = [ + "pipeline_test.py", + ], + imports = ["../.."], + main = "pipeline_test.py", + deps = [ + ":common_lib", + ":composer_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:fake_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "//web_console_v2/api/testing/composer", + ], +) + +py_test( + name = "strategy_test", + srcs = [ + "strategy_test.py", + ], + imports = ["../.."], + main = "strategy_test.py", + deps = [ + ":common_lib", + ":composer_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "//web_console_v2/api/testing/composer", + ], +) + +py_test( + name = "thread_reaper_test", + srcs = [ + "thread_reaper_test.py", + ], + # It's unpredictable to keep first runner running when same runner are submitted again. + # Ref: web_console_v2/api/fedlearner_webconsole/composer/thread_reaper_test.py:ThreadReaperTest.test_submit + flaky = True, + imports = ["../.."], + main = "thread_reaper_test.py", + deps = [ + ":common_lib", + ":composer_lib", + "//web_console_v2/api/testing:fake_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "//web_console_v2/api/testing/composer", + ], +) + +py_library( + name = "apis_lib", + srcs = ["apis.py"], + imports = ["../.."], + deps = [ + ":common_lib", + ":composer_service_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:third_party_sso_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "@common_flask_restful//:pkg", + "@common_marshmallow//:pkg", + "@common_webargs//:pkg", + ], +) + +py_test( + name = "apis_lib_test", + size = "medium", + srcs = [ + "apis_test.py", + ], + imports = ["../.."], + main = "apis_test.py", + deps = [ + ":apis_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:common_lib", + "@com_google_protobuf//:protobuf_python", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/composer/apis.py b/web_console_v2/api/fedlearner_webconsole/composer/apis.py new file mode 100644 index 000000000..cb7aec460 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/composer/apis.py @@ -0,0 +1,246 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from flask_restful import Resource, Api +from fedlearner_webconsole.db import db +from fedlearner_webconsole.composer.models import ItemStatus, SchedulerItem +from fedlearner_webconsole.composer.composer_service import SchedulerItemService, SchedulerRunnerService +from fedlearner_webconsole.auth.third_party_sso import credentials_required +from fedlearner_webconsole.utils.decorators.pp_flask import admin_required +from fedlearner_webconsole.utils.flask_utils import make_flask_response, FilterExpField +from fedlearner_webconsole.exceptions import InvalidArgumentException, NotFoundException +from webargs.flaskparser import use_kwargs, use_args +from marshmallow import Schema, fields, validate + + +class ListSchedulerItemsParams(Schema): + filter = FilterExpField(required=False, load_default=None) + page = fields.Integer(required=False, load_default=None) + page_size = fields.Integer(required=False, load_default=None) + + +class ListSchedulerRunnersParams(Schema): + filter = FilterExpField(required=False, load_default=None) + page = fields.Integer(required=False, load_default=None) + page_size = fields.Integer(required=False, load_default=None) + + +class SchedulerItemsApi(Resource): + + @credentials_required + @admin_required + @use_args(ListSchedulerItemsParams(), location='query') + def get(self, params: dict): + """Get a list of all scheduler items. + --- + tags: + - composer + description: Get a list of all scheduler items. + parameters: + - in: query + name: filter + schema: + type: string + required: false + - in: query + name: page + schema: + type: integer + required: false + - in: query + name: page_size + schema: + type: integer + required: false + responses: + 200: + description: Get a list of all scheduler items result + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.SchedulerItemPb' + """ + with db.session_scope() as session: + try: + pagination = SchedulerItemService(session).get_scheduler_items( + filter_exp=params['filter'], + page=params['page'], + page_size=params['page_size'], + ) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid filter: {str(e)}') from e + data = [t.to_proto() for t in pagination.get_items()] + return make_flask_response(data=data, page_meta=pagination.get_metadata()) + + +class SchedulerItemApi(Resource): + + @credentials_required + @admin_required + @use_args(ListSchedulerRunnersParams(), location='query') + def get(self, params: dict, item_id: int): + """Get all scheduler runners by item_id + --- + tags: + - composer + description: Get all scheduler runners by item_id + parameters: + - in: path + name: item_id + schema: + type: integer + required: true + description: The ID of the scheduler item. + - in: query + name: filter + schema: + type: string + required: false + - in: query + name: page + schema: + type: integer + required: false + - in: query + name: page_size + schema: + type: integer + required: false + responses: + 200: + description: Get all scheduler runners by item_id + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.SchedulerRunnerPb' + """ + with db.session_scope() as session: + try: + pagination = SchedulerRunnerService(session).get_scheduler_runners(filter_exp=params['filter'], + item_id=item_id, + page=params['page'], + page_size=params['page_size']) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid filter: {str(e)}') from e + data = [t.to_proto() for t in pagination.get_items()] + return make_flask_response(data=data, page_meta=pagination.get_metadata()) + + @credentials_required + @admin_required + @use_kwargs( + {'status': fields.Str(required=True, validate=validate.OneOf([ItemStatus.ON.name, ItemStatus.OFF.name]))}, + location='json') + def patch(self, item_id: int, status: str): + """Change status of a scheduler item + --- + tags: + - composer + description: change SchedulerItem status + parameters: + - in: path + required: true + name: item_id + schema: + type: integer + required: false + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + status: + type: string + responses: + 200: + description: The SchedulerItem's status has been updated + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.SchedulerItemPb' + 400: + description: The param of state in the request body is invalid + 404: + description: The scheduleritem with specified ID is not found + """ + with db.session_scope() as session: + scheduler_item = session.query(SchedulerItem).filter_by(id=item_id).first() + if not scheduler_item: + raise NotFoundException(f'Failed to find scheduler_item: {item_id}') + try: + scheduler_item.status = ItemStatus[status].value + session.commit() + except ValueError as e: + raise InvalidArgumentException(f'Invalid argument for Status: {status}') from e + return make_flask_response(scheduler_item.to_proto()) + + +class SchedulerRunnersApi(Resource): + + @credentials_required + @admin_required + @use_args(ListSchedulerRunnersParams(), location='query') + def get(self, params: dict): + """Get a list of all scheduler runners + --- + tags: + - composer + description: get scheduler runners list + parameters: + - in: query + name: filter + schema: + type: string + required: false + - in: query + name: page + schema: + type: integer + required: false + - in: query + name: page_size + schema: + type: integer + required: false + responses: + 200: + description: Get scheduler runners list result + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.SchedulerRunnerPb' + """ + with db.session_scope() as session: + try: + pagination = SchedulerRunnerService(session).get_scheduler_runners(filter_exp=params['filter'], + page=params['page'], + page_size=params['page_size']) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid filter: {str(e)}') from e + data = [t.to_proto() for t in pagination.get_items()] + return make_flask_response(data=data, page_meta=pagination.get_metadata()) + + +def initialize_composer_apis(api: Api): + api.add_resource(SchedulerItemsApi, '/scheduler_items') + api.add_resource(SchedulerItemApi, '/scheduler_items/') + api.add_resource(SchedulerRunnersApi, '/scheduler_runners') diff --git a/web_console_v2/api/fedlearner_webconsole/composer/apis_test.py b/web_console_v2/api/fedlearner_webconsole/composer/apis_test.py new file mode 100644 index 000000000..965534e25 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/composer/apis_test.py @@ -0,0 +1,236 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +import urllib.parse +from http import HTTPStatus +from datetime import datetime +from testing.common import BaseTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.composer.models import SchedulerItem, SchedulerRunner, ItemStatus, RunnerStatus +import json +from fedlearner_webconsole.utils.proto import to_json +from fedlearner_webconsole.proto import composer_pb2 + + +class SchedulerItemsApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + # ids from 1001 to 1004 work as cron_job with status "ON" + scheduler_item_off = SchedulerItem(id=1001, + name='test_item_off', + status=ItemStatus.OFF.value, + created_at=datetime(2022, 1, 1, 12, 0, 0), + updated_at=datetime(2022, 1, 1, 12, 0, 0)) + scheduler_item_on = SchedulerItem(id=1002, + name='test_item_on', + status=ItemStatus.ON.value, + created_at=datetime(2022, 1, 1, 12, 0, 0), + updated_at=datetime(2022, 1, 1, 12, 0, 0)) + scheduler_item_on_cron = SchedulerItem(id=1003, + name='test_item_on_cron', + cron_config='*/20 * * * *', + status=ItemStatus.ON.value, + created_at=datetime(2022, 1, 1, 12, 0, 0), + updated_at=datetime(2022, 1, 1, 12, 0, 0)) + scheduler_item_off_cron = SchedulerItem(id=1004, + name='test_item_off_cron', + cron_config='*/20 * * * *', + status=ItemStatus.OFF.value, + created_at=datetime(2022, 1, 1, 12, 0, 0), + updated_at=datetime(2022, 1, 1, 12, 0, 0)) + scheduler_item_for_id_test = SchedulerItem(id=201, + name='scheduler_item_for_id_test', + cron_config='*/20 * * * *', + status=ItemStatus.ON.value, + created_at=datetime(2022, 1, 1, 12, 0, 0), + updated_at=datetime(2022, 1, 1, 12, 0, 0)) + + with db.session_scope() as session: + session.add(scheduler_item_on) + session.add(scheduler_item_off) + session.add(scheduler_item_on_cron) + session.add(scheduler_item_off_cron) + session.add(scheduler_item_for_id_test) + session.commit() + self.signin_as_admin() + + def test_get_with_pagination(self): + response = self.get_helper('/api/v2/scheduler_items?page=1&page_size=1') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 1) + self.assertEqual(data[0]['id'], 1004) + + def test_get_with_invalid_filter(self): + response = self.get_helper('/api/v2/scheduler_items?filter=invalid') + self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) + + def test_get_with_id_filter(self): + filter_exp = urllib.parse.quote('(id=201)') + response = self.get_helper(f'/api/v2/scheduler_items?filter={filter_exp}') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 1) + self.assertEqual(data[0]['name'], 'scheduler_item_for_id_test') + + def test_get_with_three_filter(self): + filter_exp = urllib.parse.quote('(and(is_cron=true)(status="OFF")(name~="item_off_cron"))') + response = self.get_helper(f'/api/v2/scheduler_items?filter={filter_exp}') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 1) + self.assertEqual(data[0]['status'], ItemStatus.OFF.name) + self.assertNotEqual(data[0]['cron_config'], '') + + def test_get_with_single_filter(self): + filter_exp = urllib.parse.quote('(status="OFF")') + response = self.get_helper(f'/api/v2/scheduler_items?filter={filter_exp}') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 2) + self.assertEqual(data[0]['status'], ItemStatus.OFF.name) + + +class SchedulerItemApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + scheduler_item_on = SchedulerItem(id=100, + name='test_item_on', + status=ItemStatus.ON.value, + created_at=datetime(2022, 1, 1, 12, 0, 0), + updated_at=datetime(2022, 1, 1, 12, 0, 0)) + self.default_scheduler_item = scheduler_item_on + self.default_context = json.dumps({'outputs': {'0': {'job_scheduler_output': {}}}}) + self.default_output = to_json(composer_pb2.RunnerOutput(error_message='error1')) + runner_init = SchedulerRunner(id=0, + item_id=100, + status=RunnerStatus.INIT.value, + context=self.default_context, + output=self.default_output) + runner_running_1 = SchedulerRunner(id=1, + item_id=100, + status=RunnerStatus.RUNNING.value, + context=self.default_context, + output=self.default_output) + runner_running_2 = SchedulerRunner(id=2, + item_id=100, + status=RunnerStatus.RUNNING.value, + context=self.default_context, + output=self.default_output) + + with db.session_scope() as session: + session.add(scheduler_item_on) + session.add(runner_init) + session.add(runner_running_1) + session.add(runner_running_2) + session.commit() + self.signin_as_admin() + + def test_get_runners_without_pagination(self): + response = self.get_helper(f'/api/v2/scheduler_items/{self.default_scheduler_item.id}') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 3) + + def test_get_with_pagination(self): + response = self.get_helper(f'/api/v2/scheduler_items/{self.default_scheduler_item.id}?page=1&page_size=1') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 1) + + def test_change_scheduleritem_status(self): + get_response = self.patch_helper(f'/api/v2/scheduler_items/{self.default_scheduler_item.id}', + data={'status': 'OFF'}) + self.assertEqual(get_response.status_code, HTTPStatus.OK) + with db.session_scope() as session: + dataset = session.query(SchedulerItem).get(self.default_scheduler_item.id) + self.assertEqual(dataset.status, ItemStatus.OFF.value) + + def test_change_scheduleritem_status_with_invalid_argument(self): + get_response = self.patch_helper(f'/api/v2/scheduler_items/{self.default_scheduler_item.id}', + data={'status': 'RUNNING'}) + self.assertEqual(get_response.status_code, HTTPStatus.BAD_REQUEST) + with db.session_scope() as session: + dataset = session.query(SchedulerItem).get(self.default_scheduler_item.id) + self.assertEqual(dataset.status, ItemStatus.ON.value) + + +class SchedulerRunnersApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + scheduler_item_on = SchedulerItem(id=100, + name='test_item_on', + status=ItemStatus.ON.value, + created_at=datetime(2022, 1, 1, 12, 0, 0), + updated_at=datetime(2022, 1, 1, 12, 0, 0)) + self.default_scheduler_item = scheduler_item_on + self.default_context = json.dumps({'outputs': {'0': {'job_scheduler_output': {}}}}) + self.default_output = to_json(composer_pb2.RunnerOutput(error_message='error1')) + runner_init = SchedulerRunner(id=0, + item_id=100, + status=RunnerStatus.INIT.value, + context=self.default_context, + output=self.default_output) + runner_running = SchedulerRunner(id=1, + item_id=100, + status=RunnerStatus.RUNNING.value, + context=self.default_context) + runner_running_2 = SchedulerRunner(id=2, + item_id=100, + status=RunnerStatus.RUNNING.value, + context=self.default_context) + runner_done = SchedulerRunner(id=3, item_id=100, status=RunnerStatus.DONE.value) + + with db.session_scope() as session: + session.add(scheduler_item_on) + session.add(runner_init) + session.add(runner_running) + session.add(runner_running_2) + session.add(runner_done) + session.commit() + self.signin_as_admin() + + def test_get_without_filter_and_pagination(self): + response = self.get_helper('/api/v2/scheduler_runners') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 4) + + def test_get_with_pagination(self): + response = self.get_helper('/api/v2/scheduler_runners?page=1&page_size=1') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 1) + + def test_get_with_invalid_filter(self): + response = self.get_helper('/api/v2/scheduler_runners?filter=invalid') + self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) + + def test_get_with_filter(self): + filter_exp = urllib.parse.quote('(status="RUNNING")') + response = self.get_helper(f'/api/v2/scheduler_runners?filter={filter_exp}') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 2) + self.assertEqual(data[0]['status'], RunnerStatus.RUNNING.name) + self.assertEqual(data[1]['status'], RunnerStatus.RUNNING.name) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/composer/composer.py b/web_console_v2/api/fedlearner_webconsole/composer/composer.py index e0040ba99..e23a0eec1 100644 --- a/web_console_v2/api/fedlearner_webconsole/composer/composer.py +++ b/web_console_v2/api/fedlearner_webconsole/composer/composer.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,69 +13,53 @@ # limitations under the License. # coding: utf-8 - -import json import logging -import time import threading import traceback from datetime import datetime -from typing import List, Optional +from envs import Envs from sqlalchemy import func from sqlalchemy.engine import Engine +from fedlearner_webconsole.composer.strategy import SingletonStrategy +from fedlearner_webconsole.proto import composer_pb2 +from fedlearner_webconsole.proto.composer_pb2 import PipelineContextData +from fedlearner_webconsole.utils import pp_time from fedlearner_webconsole.db import get_session from fedlearner_webconsole.composer.runner import global_runner_fn -from fedlearner_webconsole.composer.runner_cache import RunnerCache -from fedlearner_webconsole.composer.interface import IItem -from fedlearner_webconsole.composer.models import Context, decode_context, \ - ContextEncoder, SchedulerItem, ItemStatus, SchedulerRunner, RunnerStatus +from fedlearner_webconsole.composer.models import SchedulerItem, ItemStatus, SchedulerRunner, RunnerStatus +from fedlearner_webconsole.composer.pipeline import PipelineExecutor from fedlearner_webconsole.composer.op_locker import OpLocker from fedlearner_webconsole.composer.thread_reaper import ThreadReaper +import grpc +from concurrent import futures +from grpc_health.v1 import health +from grpc_health.v1 import health_pb2_grpc class ComposerConfig(object): + def __init__( self, runner_fn: dict, - name='default_name', - worker_num=10, + name: str = 'default_name', + worker_num: int = 20, ): """Config for composer Args: - runner_fn: runner functions - name: composer name - worker_num: number of worker doing heavy job + runner_fn (dict): runner functions + name (str): composer name + worker_num (int): number of worker doing heavy job """ self.runner_fn = runner_fn self.name = name self.worker_num = worker_num -class Pipeline(object): - def __init__(self, name: str, deps: List[str], meta: dict): - """Define the deps of scheduler item - - Fields: - name: pipeline name - deps: items to be processed in order - meta: additional info - """ - self.name = name - self.deps = deps - self.meta = meta - - -class PipelineEncoder(json.JSONEncoder): - def default(self, obj): - return obj.__dict__ - - class Composer(object): - # attributes that you can patch - MUTABLE_ITEM_KEY = ['interval_time', 'retry_cnt'] + LOOP_INTERVAL = 5 def __init__(self, config: ComposerConfig): """Composer @@ -85,18 +69,37 @@ def __init__(self, config: ComposerConfig): """ self.config = config self.name = config.name - self.runner_fn = config.runner_fn self.db_engine = None self.thread_reaper = ThreadReaper(worker_num=config.worker_num) - self.runner_cache = RunnerCache(runner_fn=config.runner_fn) + self.pipeline_executor = PipelineExecutor( + thread_reaper=self.thread_reaper, + db_engine=self.db_engine, + runner_fns=config.runner_fn, + ) self.lock = threading.Lock() self._stop = False + self._loop_thread = None + self._grpc_server_thread = None def run(self, db_engine: Engine): self.db_engine = db_engine + self.pipeline_executor.db_engine = db_engine logging.info(f'[composer] starting {self.name}...') - loop = threading.Thread(target=self._loop, args=[], daemon=True) - loop.start() + self._loop_thread = threading.Thread(target=self._loop, args=[], daemon=True) + self._loop_thread.start() + self._grpc_server_thread = threading.Thread(target=self._run, args=[], daemon=True) + self._grpc_server_thread.start() + + def wait_for_termination(self): + self._loop_thread.join() + self._grpc_server_thread.join() + + def _run(self): + grpc_server = grpc.server(futures.ThreadPoolExecutor(max_workers=1)) + health_pb2_grpc.add_HealthServicer_to_server(health.HealthServicer(), grpc_server) + grpc_server.add_insecure_port(f'[::]:{Envs.COMPOSER_LISTEN_PORT}') + grpc_server.start() + grpc_server.wait_for_termination() def _loop(self): while True: @@ -111,300 +114,106 @@ def _loop(self): self._check_init_runners() self._check_running_runners() except Exception as e: # pylint: disable=broad-except - logging.error(f'[composer] something wrong, exception: {e}, ' - f'trace: {traceback.format_exc()}') - time.sleep(5) + logging.error(f'[composer] something wrong, exception: {e}, ' f'trace: {traceback.format_exc()}') + pp_time.sleep(self.LOOP_INTERVAL) def stop(self): logging.info(f'[composer] stopping {self.name}...') with self.lock: self._stop = True - - def collect(self, - name: str, - items: List[IItem], - metadata: dict, - interval: int = -1): - """Collect scheduler item - - Args: - name: item name, should be unique - items: specify dependencies - metadata: pass metadata to share with item dependencies each other - interval: if value is -1, it's run-once job, or run - every interval time in seconds - """ - if len(name) == 0: - return - valid_interval = interval == -1 or interval >= 10 - if not valid_interval: # seems non-sense if interval is less than 10 - raise ValueError('interval should not less than 10 if not -1') - with get_session(self.db_engine) as session: - # check name if exists - existed = session.query(SchedulerItem).filter_by(name=name).first() - if existed: - return - item = SchedulerItem( - name=name, - pipeline=PipelineEncoder().encode( - self._build_pipeline(name, items, metadata)), - interval_time=interval, - ) - session.add(item) - try: - session.commit() - except Exception as e: # pylint: disable=broad-except - logging.error(f'[composer] failed to create scheduler_item, ' - f'name: {name}, exception: {e}') - session.rollback() - - def finish(self, name: str): - """Finish item - - Args: - name: item name - """ - with get_session(self.db_engine) as session: - existed = session.query(SchedulerItem).filter_by( - name=name, status=ItemStatus.ON.value).first() - if not existed: - return - existed.status = ItemStatus.OFF.value - try: - session.commit() - except Exception as e: # pylint: disable=broad-except - logging.error(f'[composer] failed to finish scheduler_item, ' - f'name: {name}, exception: {e}') - session.rollback() - - def get_item_status(self, name: str) -> Optional[ItemStatus]: - """Get item status - - Args: - name: item name - """ - with get_session(self.db_engine) as session: - existed = session.query(SchedulerItem).filter( - SchedulerItem.name == name).first() - if not existed: - return None - return ItemStatus(existed.status) - - def patch_item_attr(self, name: str, key: str, value: str): - """ patch item args - - Args: - name (str): name of this item - key (str): key you want to update - value (str): value you wnat to set - - Returns: - Raise if some check violates - """ - if key not in self.__class__.MUTABLE_ITEM_KEY: - raise ValueError(f'fail to change attribute {key}') - - with get_session(self.db_engine) as session: - item: SchedulerItem = session.query(SchedulerItem).filter( - SchedulerItem.name == name).first() - if not item: - raise ValueError(f'cannot find item {name}') - setattr(item, key, value) - session.add(item) - try: - session.commit() - except Exception as e: # pylint: disable=broad-except - logging.error(f'[composer] failed to patch item attr, ' - f'name: {name}, exception: {e}') - session.rollback() - - def get_recent_runners(self, - name: str, - count: int = 10) -> List[SchedulerRunner]: - """Get recent runners order by created_at in desc - - Args: - name: item name - count: the number of runners - """ - with get_session(self.db_engine) as session: - runners = session.query(SchedulerRunner).join( - SchedulerItem, - SchedulerItem.id == SchedulerRunner.item_id).filter( - SchedulerItem.name == name).order_by( - SchedulerRunner.created_at.desc()).limit(count) - if not runners: - return [] - return runners + if self._loop_thread is not None: + self._loop_thread.join(timeout=self.LOOP_INTERVAL * 2) def _check_items(self): with get_session(self.db_engine) as session: - items = session.query(SchedulerItem).filter_by( - status=ItemStatus.ON.value).all() + items = session.query(SchedulerItem).filter_by(status=ItemStatus.ON.value).all() for item in items: - if not item.need_run(): + if not SingletonStrategy(session).should_run(item): + continue + + pipeline: composer_pb2.Pipeline = item.get_pipeline() + if pipeline.version != 2: + logging.error(f'[Composer] Invalid pipeline in item {item.id}') + item.status = ItemStatus.OFF.value + session.commit() continue - # NOTE: use `func.now()` to let sqlalchemy handles + runner = SchedulerRunner(item_id=item.id) + runner.set_pipeline(pipeline) + runner.set_context(PipelineContextData()) + session.add(runner) + + # NOTE: use sqlalchemy's `func.now()` to let it handles # the timezone. item.last_run_at = func.now() - if item.interval_time < 0: + if not item.cron_config: # finish run-once item automatically item.status = ItemStatus.OFF.value - pp = Pipeline(**(json.loads(item.pipeline))) - context = Context(data=pp.meta, - internal={}, - db_engine=self.db_engine) - runner = SchedulerRunner( - item_id=item.id, - pipeline=item.pipeline, - context=ContextEncoder().encode(context), - ) - session.add(runner) - try: - logging.info( - f'[composer] insert runner, item_id: {item.id}') - session.commit() - except Exception as e: # pylint: disable=broad-except - logging.error( - f'[composer] failed to create scheduler_runner, ' - f'item_id: {item.id}, exception: {e}') - session.rollback() + + logging.info(f'[composer] insert runner, item_id: {item.id}') + session.commit() def _check_init_runners(self): with get_session(self.db_engine) as session: - init_runners = session.query(SchedulerRunner).filter_by( - status=RunnerStatus.INIT.value).all() + init_runner_ids = session.query(SchedulerRunner.id).filter_by(status=RunnerStatus.INIT.value).all() # TODO: support priority - for runner in init_runners: - # if thread_reaper is full, skip this round and - # wait next checking - if self.thread_reaper.is_full(): - return - lock_name = f'check_init_runner_{runner.id}_lock' - check_lock = OpLocker(lock_name, self.db_engine).try_lock() - if not check_lock: - logging.error(f'[composer] failed to lock, ' - f'ignore current init_runner_{runner.id}') - continue - pipeline = Pipeline(**(json.loads(runner.pipeline))) - context = decode_context(val=runner.context, - db_engine=self.db_engine) - # find the first job in pipeline - first = pipeline.deps[0] + for runner_id, *_ in init_runner_ids: + # if thread_reaper is full, skip this round and + # wait next checking + if self.thread_reaper.is_full(): + logging.info('[composer] thread_reaper is full now, waiting for other item finish') + return + lock_name = f'check_init_runner_{runner_id}_lock' + check_lock = OpLocker(lock_name, self.db_engine).try_lock() + if not check_lock: + logging.error(f'[composer] failed to lock, ignore current init_runner_{runner_id}') + continue + with get_session(self.db_engine) as session: + runner: SchedulerRunner = session.query(SchedulerRunner).get(runner_id) # update status runner.start_at = func.now() runner.status = RunnerStatus.RUNNING.value - output = json.loads(runner.output) - output[first] = {'status': RunnerStatus.RUNNING.value} - runner.output = json.dumps(output) - # record current running job - context.set_internal('current', first) - runner.context = ContextEncoder().encode(context) - # start runner - runner_fn = self.runner_cache.find_runner(runner.id, first) - self.thread_reaper.enqueue(name=lock_name, - fn=runner_fn, - context=context) + pipeline: composer_pb2.Pipeline = runner.get_pipeline() + if pipeline.version != 2: + logging.error(f'[Composer] Invalid pipeline in runner {runner.id}') + runner.status = RunnerStatus.FAILED.value + session.commit() + continue try: - logging.info( - f'[composer] update runner, status: {runner.status}, ' - f'pipeline: {runner.pipeline}, ' - f'output: {output}, context: {runner.context}') + logging.info(f'[composer] update runner, status: {runner.status}, ' + f'pipeline: {runner.pipeline}, ' + f'context: {runner.context}') if check_lock.is_latest_version() and \ check_lock.update_version(): session.commit() else: - logging.error(f'[composer] {lock_name} is outdated, ' - f'ignore updates to database') + logging.error(f'[composer] {lock_name} is outdated, ignore updates to database') except Exception as e: # pylint: disable=broad-except - logging.error(f'[composer] failed to update init runner' - f'status, exception: {e}') + logging.error(f'[composer] failed to update init runner status, exception: {e}') session.rollback() def _check_running_runners(self): with get_session(self.db_engine) as session: - running_runners = session.query(SchedulerRunner).filter_by( - status=RunnerStatus.RUNNING.value).all() - for runner in running_runners: - if self.thread_reaper.is_full(): - return - lock_name = f'check_running_runner_{runner.id}_lock' - check_lock = OpLocker(lock_name, self.db_engine).try_lock() - if not check_lock: - logging.error(f'[composer] failed to lock, ' - f'ignore current running_runner_{runner.id}') - continue + running_runner_ids = session.query(SchedulerRunner.id).filter_by(status=RunnerStatus.RUNNING.value).all() + for runner_id, *_ in running_runner_ids: + if self.thread_reaper.is_full(): + logging.info('[composer] thread_reaper is full now, waiting for other item finish') + return + lock_name = f'check_running_runner_{runner_id}_lock' + check_lock = OpLocker(lock_name, self.db_engine).try_lock() + if not check_lock: + logging.error(f'[composer] failed to lock, ' f'ignore current running_runner_{runner_id}') + continue + with get_session(self.db_engine) as session: # TODO: restart runner if exit unexpectedly - pipeline = Pipeline(**(json.loads(runner.pipeline))) - output = json.loads(runner.output) - context = decode_context(val=runner.context, - db_engine=self.db_engine) - current = context.internal['current'] - runner_fn = self.runner_cache.find_runner(runner.id, current) - # check status of current one - status, current_output = runner_fn.result(context) - if status == RunnerStatus.RUNNING: - continue # ignore - if status == RunnerStatus.DONE: - output[current] = {'status': RunnerStatus.DONE.value} - context.set_internal(f'output_{current}', current_output) - current_idx = pipeline.deps.index(current) - if current_idx == len(pipeline.deps) - 1: # all done - runner.status = RunnerStatus.DONE.value - runner.end_at = func.now() - else: # run next one - next_one = pipeline.deps[current_idx + 1] - output[next_one] = { - 'status': RunnerStatus.RUNNING.value - } - context.set_internal('current', next_one) - next_runner_fn = self.runner_cache.find_runner( - runner.id, next_one) - self.thread_reaper.enqueue(name=lock_name, - fn=next_runner_fn, - context=context) - elif status == RunnerStatus.FAILED: - # TODO: abort now, need retry - output[current] = {'status': RunnerStatus.FAILED.value} - context.set_internal(f'output_{current}', current_output) + runner = session.query(SchedulerRunner).get(runner_id) + pipeline = runner.get_pipeline() + if pipeline.version != 2: + logging.error(f'[Composer] Invalid pipeline in runner {runner.id}') runner.status = RunnerStatus.FAILED.value - runner.end_at = func.now() - - runner.pipeline = PipelineEncoder().encode(pipeline) - runner.output = json.dumps(output) - runner.context = ContextEncoder().encode(context) - - updated_db = False - try: - logging.info( - f'[composer] update runner, status: {runner.status}, ' - f'pipeline: {runner.pipeline}, ' - f'output: {output}, context: {runner.context}') - if check_lock.is_latest_version(): - if check_lock.update_version(): - session.commit() - updated_db = True - else: - logging.error(f'[composer] {lock_name} is outdated, ' - f'ignore updates to database') - except Exception as e: # pylint: disable=broad-except - logging.error(f'[composer] failed to update running ' - f'runner status, exception: {e}') - session.rollback() - - # delete useless runner obj in runner cache - if status in (RunnerStatus.DONE, - RunnerStatus.FAILED) and updated_db: - self.runner_cache.del_runner(runner.id, current) - - @staticmethod - def _build_pipeline(name: str, items: List[IItem], - metadata: dict) -> Pipeline: - deps = [] - for item in items: - deps.append(f'{item.type().value}_{item.get_id()}') - return Pipeline(name=name, deps=deps, meta=metadata) + session.commit() + continue + # If the runner is running, we always try to run it. + self.pipeline_executor.run(runner_id) -composer = Composer(config=ComposerConfig( - runner_fn=global_runner_fn(), name='scheduler for fedlearner webconsole')) +composer = Composer(config=ComposerConfig(runner_fn=global_runner_fn(), name='scheduler for fedlearner webconsole')) diff --git a/web_console_v2/api/fedlearner_webconsole/composer/composer_service.py b/web_console_v2/api/fedlearner_webconsole/composer/composer_service.py new file mode 100644 index 000000000..51c213c1e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/composer/composer_service.py @@ -0,0 +1,276 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# pylint: disable=global-statement +# coding: utf-8 + +import logging +from typing import List, Optional, Tuple +from sqlalchemy.sql.schema import Column +from sqlalchemy.sql import func +from sqlalchemy.orm import Session +from croniter import croniter +from fedlearner_webconsole.composer.models import ItemStatus, RunnerStatus +from fedlearner_webconsole.composer.interface import ItemType +from fedlearner_webconsole.composer.models import SchedulerItem, SchedulerRunner +from fedlearner_webconsole.proto import composer_pb2 +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput +from fedlearner_webconsole.utils.metrics import emit_store +from fedlearner_webconsole.exceptions import InvalidArgumentException +from fedlearner_webconsole.utils.filtering import SupportedField, FieldType, FilterBuilder +from fedlearner_webconsole.proto.filtering_pb2 import FilterOp, FilterExpression, SimpleExpression +from fedlearner_webconsole.utils.paginate import Pagination, paginate + + +def _contains_case_insensitive(exp: SimpleExpression): + c: Column = getattr(SchedulerItem, exp.field) + return c.ilike(f'%{exp.string_value}%') + + +def _is_cron(exp: SimpleExpression): + c: Column = SchedulerItem.cron_config + if exp.bool_value: + exp.string_value = '*' + return c.ilike(f'%{exp.string_value}%') + return c + + +def _equal_item_status(exp: SimpleExpression): + c: Column = SchedulerItem.status + return c == ItemStatus[exp.string_value].value + + +def _equal_runner_status(exp: SimpleExpression): + c: Column = SchedulerRunner.status + return c == RunnerStatus[exp.string_value].value + + +class ComposerService(object): + # attributes that you can patch + MUTABLE_ITEM_KEY = ['cron_config', 'retry_cnt'] + + def __init__(self, session: Session): + self._session = session + + def get_item_status(self, name: str) -> Optional[ItemStatus]: + """Get item status + + Args: + name (str): item name + + Returns: + ItemStatus: item status + """ + existed = self._session.query(SchedulerItem).filter(SchedulerItem.name == name).first() + if not existed: + return None + return ItemStatus(existed.status) + + def patch_item_attr(self, name: str, key: str, value: str): + """ patch item args + + Args: + name (str): name of this item + key (str): key you want to update + value (str): value you want to set + + Raises: + ValueError: if some check violates + Exception: if session failed + """ + if key not in self.__class__.MUTABLE_ITEM_KEY: + raise ValueError(f'fail to change attribute {key}') + + # TODO(linfan.fine): add validations + item: SchedulerItem = self._session.query(SchedulerItem).filter(SchedulerItem.name == name).first() + if not item: + raise ValueError(f'cannot find item {name}') + setattr(item, key, value) + self._session.add(item) + try: + self._session.flush() + except Exception as e: # pylint: disable=broad-except + logging.error(f'[composer] failed to patch item attr, ' f'name: {name}, exception: {e}') + raise e + + def collect_v2(self, name: str, items: List[Tuple[ItemType, RunnerInput]], cron_config: Optional[str] = None): + """Collect scheduler item. + + Args: + name (str): item name, should be unique + items (List[Tuple[IItem, RunnerInput]): specify the execution pipeline (in order) + cron_config (Optional[str]): a cron expression for running item periodically + + Raises: + ValueError: if `cron_config` is invalid + Exception: if db session failed + """ + if len(name) == 0: + return + if cron_config and not croniter.is_valid(cron_config): + raise ValueError('invalid cron_config') + # check name if exists + existed = self._session.query(SchedulerItem.id).filter_by(name=name).first() + if existed: + logging.warning('SchedulerItem %s already existed', name) + return + scheduler_item = SchedulerItem(name=name, cron_config=cron_config, created_at=func.now()) + queue = [] + for item_type, rinput in items: + runner_input = RunnerInput(runner_type=item_type.value) + runner_input.MergeFrom(rinput) + queue.append(runner_input) + pipeline = composer_pb2.Pipeline(version=2, name=name, queue=queue) + scheduler_item.set_pipeline(pipeline) + self._session.add(scheduler_item) + try: + self._session.flush() + except Exception as e: # pylint: disable=broad-except + logging.error(f'[composer] failed to create scheduler_item, name: {name}, exception: {e}') + raise e + + def start(self, name: str): + """Enable an OFF scheduler item""" + existed = self._session.query(SchedulerItem).filter_by(name=name).first() + existed.status = ItemStatus.ON.value + + def finish(self, name: str): + """Finish item + + Args: + name (str): item name + + Raises: + Exception: if db session failed + """ + existed = self._session.query(SchedulerItem).filter_by(name=name, status=ItemStatus.ON.value).first() + if not existed: + return + existed.status = ItemStatus.OFF.value + self._session.query(SchedulerRunner).filter( + SchedulerRunner.item_id == existed.id, + SchedulerRunner.status.in_([RunnerStatus.INIT.value, RunnerStatus.RUNNING.value])).delete() + try: + self._session.flush() + except Exception as e: # pylint: disable=broad-except + logging.error(f'[composer] failed to finish scheduler_item, ' f'name: {name}, exception: {e}') + raise e + + def get_recent_runners(self, name: str, count: int = 10) -> List[SchedulerRunner]: + """Get recent runners order by created_at in desc + + Args: + name (str): item name + count (int): the number of runners + + Returns: + List[SchedulerRunner]: list of SchedulerRunner + """ + runners = self._session.query(SchedulerRunner).join( + SchedulerItem, SchedulerItem.id == SchedulerRunner.item_id).filter(SchedulerItem.name == name).order_by( + SchedulerRunner.created_at.desc()).limit(count).all() + if not runners: + return [] + return runners + + +class CronJobService: + + def __init__(self, session: Session): + self._session = session + + def start_cronjob(self, item_name: str, items: List[Tuple[ItemType, RunnerInput]], cron_config: str): + """Starts a cronjob if cron_config is valid. + + Args: + item_name (str): name of scheduler item + items: list of scheduler items with inputs + cron_config (str): cron expression; + + Raises: + Raise if some check violates + InvalidArgumentException: if some check violates + """ + if not croniter.is_valid(cron_config): + raise InvalidArgumentException(f'cron config {cron_config} is not valid') + service = ComposerService(self._session) + status = service.get_item_status(name=item_name) + # create a cronjob + if status is None: + service.collect_v2(name=item_name, items=items, cron_config=cron_config) + return + if status == ItemStatus.OFF: + logging.info(f'[start_cronjob] start composer item {item_name}') + service.start(name=item_name) + # patch a cronjob + try: + service.patch_item_attr(name=item_name, key='cron_config', value=cron_config) + except ValueError as err: + emit_store('path_item_attr_error', 1) + raise InvalidArgumentException(details=repr(err)) from err + + def stop_cronjob(self, item_name: str): + service = ComposerService(self._session) + logging.info(f'[start_or_stop_cronjob] finish composer item {item_name}') + service.finish(name=item_name) + + +class SchedulerItemService(): + """ 'is_cron' param means whether should only display cron-jobs. """ + FILTER_FIELDS = { + 'is_cron': SupportedField(type=FieldType.BOOL, ops={FilterOp.EQUAL: _is_cron}), + 'status': SupportedField(type=FieldType.STRING, ops={FilterOp.EQUAL: _equal_item_status}), + 'name': SupportedField(type=FieldType.STRING, ops={FilterOp.CONTAIN: _contains_case_insensitive}), + 'id': SupportedField(type=FieldType.NUMBER, ops={FilterOp.EQUAL: None}) + } + + def __init__(self, session: Session): + self._session = session + self._filter_builder = FilterBuilder(model_class=SchedulerItem, supported_fields=self.FILTER_FIELDS) + + def get_scheduler_items(self, + page: Optional[int] = None, + page_size: Optional[int] = None, + filter_exp: Optional[FilterExpression] = None) -> Pagination: + query = self._session.query(SchedulerItem) + if filter_exp: + query = self._filter_builder.build_query(query, filter_exp) + query = query.order_by(SchedulerItem.id.desc()) + return paginate(query, page, page_size) + + +class SchedulerRunnerService(): + FILTER_FIELDS = { + 'status': SupportedField(type=FieldType.STRING, ops={FilterOp.EQUAL: _equal_runner_status}), + } + + def __init__(self, session: Session): + self._session = session + self._filter_builder = FilterBuilder(model_class=SchedulerRunner, supported_fields=self.FILTER_FIELDS) + + def get_scheduler_runners(self, + item_id: Optional[int] = None, + page: Optional[int] = None, + page_size: Optional[int] = None, + filter_exp: Optional[FilterExpression] = None) -> Pagination: + # runner_status used as index to optimize sql query + # id.desc better than created_at.desc for index can be used + query = self._session.query(SchedulerRunner).order_by( + SchedulerRunner.id.desc()).filter(SchedulerRunner.status > -1) + if filter_exp: + query = self._filter_builder.build_query(query, filter_exp) + if item_id is not None: + query = query.filter_by(item_id=item_id) + + return paginate(query, page, page_size) diff --git a/web_console_v2/api/fedlearner_webconsole/composer/composer_service_test.py b/web_console_v2/api/fedlearner_webconsole/composer/composer_service_test.py new file mode 100644 index 000000000..cb8ed76ad --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/composer/composer_service_test.py @@ -0,0 +1,279 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +import unittest +from datetime import datetime + +import sys + +from fedlearner_webconsole.composer.composer_service import (ComposerService, CronJobService, SchedulerItemService, + SchedulerRunnerService) +from fedlearner_webconsole.composer.interface import ItemType +from fedlearner_webconsole.composer.models import RunnerStatus, SchedulerItem, ItemStatus, SchedulerRunner +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput, Pipeline, ModelTrainingCronJobInput +from fedlearner_webconsole.proto.filtering_pb2 import FilterExpression, FilterExpressionKind, SimpleExpression, FilterOp +from testing.no_web_server_test_case import NoWebServerTestCase + + +class ComposerServiceTest(NoWebServerTestCase): + + def test_collect_v2(self): + with db.session_scope() as session: + service = ComposerService(session) + service.collect_v2('test_item', [(ItemType.TASK, RunnerInput()), (ItemType.TASK, RunnerInput())]) + session.commit() + with db.session_scope() as session: + item = session.query(SchedulerItem).filter(SchedulerItem.name == 'test_item').first() + self.assertEqual(item.status, ItemStatus.ON.value) + self.assertIsNone(item.cron_config) + self.assertEqual( + item.get_pipeline(), + Pipeline( + version=2, + name='test_item', + queue=[RunnerInput(runner_type=ItemType.TASK.value), + RunnerInput(runner_type=ItemType.TASK.value)])) + + def test_collect_v2_duplication(self): + with db.session_scope() as session: + service = ComposerService(session) + service.collect_v2('test_item', [(ItemType.TASK, RunnerInput())]) + session.commit() + service.collect_v2('test_item', [(ItemType.TASK, RunnerInput())]) + session.commit() + with db.session_scope() as session: + items = session.query(SchedulerItem).filter(SchedulerItem.name == 'test_item').all() + self.assertEqual(len(items), 1) + + def test_collect_v2_cron(self): + with db.session_scope() as session: + service = ComposerService(session) + service.collect_v2('test_cron_item', [ + (ItemType.TASK, RunnerInput()), + ], '* * * * * */10') + session.commit() + with db.session_scope() as session: + item = session.query(SchedulerItem).filter(SchedulerItem.name == 'test_cron_item').first() + self.assertEqual(item.status, ItemStatus.ON.value) + self.assertEqual(item.cron_config, '* * * * * */10') + self.assertEqual( + item.get_pipeline(), + Pipeline(version=2, name='test_cron_item', queue=[RunnerInput(runner_type=ItemType.TASK.value)])) + + def test_finish(self): + with db.session_scope() as session: + item = SchedulerItem(id=100, name='fake_item', status=ItemStatus.ON.value) + runner_1 = SchedulerRunner(id=100, item_id=100, status=RunnerStatus.RUNNING.value) + runner_2 = SchedulerRunner(id=101, item_id=100, status=RunnerStatus.DONE.value) + runner_3 = SchedulerRunner(id=102, item_id=100, status=RunnerStatus.FAILED.value) + runner_4 = SchedulerRunner(id=103, item_id=100, status=RunnerStatus.INIT.value) + session.add(item) + session.add(runner_1) + session.add(runner_2) + session.add(runner_3) + session.add(runner_4) + session.commit() + + with db.session_scope() as session: + service = ComposerService(session) + service.finish(name='fake_item') + session.commit() + + with db.session_scope() as session: + item = session.query(SchedulerItem).get(100) + runner_1 = session.query(SchedulerRunner).get(100) + runner_2 = session.query(SchedulerRunner).get(101) + runner_3 = session.query(SchedulerRunner).get(102) + runner_4 = session.query(SchedulerRunner).get(103) + + self.assertEqual(item.status, ItemStatus.OFF.value) + self.assertIsNone(runner_1) + self.assertEqual(runner_2.status, RunnerStatus.DONE.value) + self.assertEqual(runner_3.status, RunnerStatus.FAILED.value) + self.assertIsNone(runner_4) + + def test_get_recent_runners(self): + with db.session_scope() as session: + item = SchedulerItem(id=100, name='fake_item', status=ItemStatus.ON.value) + runner_1 = SchedulerRunner(id=100, + item_id=100, + status=RunnerStatus.RUNNING.value, + created_at=datetime(2012, 1, 14, 12, 0, 6)) + runner_2 = SchedulerRunner(id=101, + item_id=100, + status=RunnerStatus.DONE.value, + created_at=datetime(2012, 1, 14, 12, 0, 7)) + runner_3 = SchedulerRunner(id=102, + item_id=100, + status=RunnerStatus.FAILED.value, + created_at=datetime(2012, 1, 14, 12, 0, 8)) + runner_4 = SchedulerRunner(id=103, + item_id=100, + status=RunnerStatus.INIT.value, + created_at=datetime(2012, 1, 14, 12, 0, 9)) + session.add_all([item, runner_1, runner_2, runner_3, runner_4]) + session.commit() + + with db.session_scope() as session: + expect_runners = [runner_4, runner_3, runner_2, runner_1] + runners = ComposerService(session).get_recent_runners(name='fake_item', count=10) + self.assertEqual(len(runners), 4) + for i in range(4): + self.assertEqual(runners[i].id, expect_runners[i].id) + self.assertEqual(runners[i].status, expect_runners[i].status) + self.assertEqual(runners[i].item_id, 100) + + runners = ComposerService(session).get_recent_runners(name='fake_item', count=1) + self.assertEqual(len(runners), 1) + self.assertEqual(runners[0].id, expect_runners[0].id) + self.assertEqual(runners[0].status, expect_runners[0].status) + self.assertEqual(runners[0].item_id, 100) + + def test_patch_item_attr(self): + test_item_name = 'test' + with db.session_scope() as session: + scheduler_item = SchedulerItem(name=test_item_name, cron_config='* */1 * * *') + session.add(scheduler_item) + session.commit() + with db.session_scope() as session: + service = ComposerService(session) + service.patch_item_attr(name=test_item_name, key='cron_config', value='*/20 * * * *') + session.commit() + with db.session_scope() as session: + item = session.query(SchedulerItem).filter(SchedulerItem.name == test_item_name).one() + self.assertEqual(item.cron_config, '*/20 * * * *') + with self.assertRaises(ValueError): + with db.session_scope() as session: + ComposerService(session).patch_item_attr(name=test_item_name, + key='create_at', + value='2021-04-01 00:00:00') + session.commit() + + +class CronJobServiceTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + item = SchedulerItem(id=1, + name='model_training_cronjob_1', + cron_config='*/10 * * * *', + status=ItemStatus.ON.value) + session.add(item) + session.commit() + + def test_start_cronjob(self): + with db.session_scope() as session: + items = [(ItemType.MODEL_TRAINING_CRON_JOB, + RunnerInput(model_training_cron_job_input=ModelTrainingCronJobInput(group_id=1)))] + CronJobService(session).start_cronjob('model_training_cronjob_1', items, '*/20 * * * *') + CronJobService(session).start_cronjob('model_training_cronjob_2', items, '*/20 * * * *') + session.commit() + with db.session_scope() as session: + item_1 = session.query(SchedulerItem).get(1) + self.assertEqual(item_1.cron_config, '*/20 * * * *') + item_2 = session.query(SchedulerItem).filter_by(name='model_training_cronjob_2').first() + self.assertEqual(item_2.cron_config, '*/20 * * * *') + + def test_stop_cronjob(self): + with db.session_scope() as session: + CronJobService(session).stop_cronjob('model_training_cronjob_1') + session.commit() + with db.session_scope() as session: + item = session.query(SchedulerItem).get(1) + self.assertEqual(item.status, ItemStatus.OFF.value) + + +class SchedulerItemServiceTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + scheduler_item_off = SchedulerItem(id=5, + name='test_item_off', + status=ItemStatus.OFF.value, + created_at=datetime(2022, 1, 1, 12, 0, 0), + updated_at=datetime(2022, 1, 1, 12, 0, 0)) + scheduler_item_on = SchedulerItem(id=6, + name='test_item_on', + status=ItemStatus.ON.value, + created_at=datetime(2022, 1, 1, 12, 0, 0), + updated_at=datetime(2022, 1, 1, 12, 0, 0)) + scheduler_item_on_cron = SchedulerItem(id=7, + name='test_item_on_cron', + cron_config='*/20 * * * *', + status=ItemStatus.ON.value, + created_at=datetime(2022, 1, 1, 12, 0, 0), + updated_at=datetime(2022, 1, 1, 12, 0, 0)) + + with db.session_scope() as session: + session.add(scheduler_item_on) + session.add(scheduler_item_off) + session.add(scheduler_item_on_cron) + session.commit() + + def test_get_scheduler_items(self): + filter_exp = FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression( + field='is_cron', + op=FilterOp.EQUAL, + bool_value=1, + )) + with db.session_scope() as session: + service = SchedulerItemService(session) + paginations = service.get_scheduler_items(filter_exp=filter_exp, page=1, page_size=7) + item_ids = [item.id for item in paginations.get_items()] + self.assertEqual(item_ids, [7]) + + +class SchedulerRunnerServiceTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + scheduler_item_on = SchedulerItem(id=100, + name='test_item_on', + status=ItemStatus.ON.value, + created_at=datetime(2022, 1, 1, 12, 0, 0), + updated_at=datetime(2022, 1, 1, 12, 0, 0)) + self.default_scheduler_item = scheduler_item_on + runner_init = SchedulerRunner(id=0, item_id=100, status=RunnerStatus.INIT.value) + runner_running_1 = SchedulerRunner(id=1, item_id=100, status=RunnerStatus.RUNNING.value) + runner_running_2 = SchedulerRunner(id=2, item_id=100, status=RunnerStatus.RUNNING.value) + + with db.session_scope() as session: + session.add(scheduler_item_on) + session.add(runner_init) + session.add(runner_running_1) + session.add(runner_running_2) + session.commit() + + def test_get_scheduler_runners(self): + filter_exp = FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression( + field='status', + op=FilterOp.EQUAL, + string_value='INIT', + )) + with db.session_scope() as session: + service = SchedulerRunnerService(session) + paginations = service.get_scheduler_runners(filter_exp=filter_exp, page=1, page_size=7) + item_ids = [item.id for item in paginations.get_items()] + self.assertEqual(item_ids, [0]) + + +if __name__ == '__main__': + logging.basicConfig(stream=sys.stderr, level=logging.INFO) + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/composer/composer_test.py b/web_console_v2/api/fedlearner_webconsole/composer/composer_test.py new file mode 100644 index 000000000..a9b4f2c91 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/composer/composer_test.py @@ -0,0 +1,171 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +import logging +import sys +import threading +import unittest +from datetime import datetime + +from fedlearner_webconsole.composer.composer import Composer, ComposerConfig +from fedlearner_webconsole.composer.composer_service import ComposerService +from fedlearner_webconsole.composer.interface import ItemType +from fedlearner_webconsole.composer.models import ItemStatus, RunnerStatus, SchedulerItem, \ + SchedulerRunner +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput, PipelineContextData, RunnerOutput +from testing.composer.common import TestRunner +from testing.no_web_server_test_case import NoWebServerTestCase +from testing.fake_time_patcher import FakeTimePatcher + + +class ComposerV2Test(NoWebServerTestCase): + + def setUp(self): + super().setUp() + self.time_patcher = FakeTimePatcher() + self.time_patcher.start(datetime(2012, 1, 14, 12, 0, 5)) + + runner_fn = { + ItemType.TASK.value: TestRunner, + } + + def tearDown(self): + self.time_patcher.stop() + super().tearDown() + + def test_multiple_composers(self): + logging.info('+++++++++++++++++++++++++++ test multiple composers') + cfg = ComposerConfig(runner_fn=self.runner_fn, name='scheduler for normal items') + composer1 = Composer(cfg) + composer2 = Composer(cfg) + c1 = threading.Thread(target=composer1.run, args=[db.engine]) + c1.start() + c2 = threading.Thread(target=composer2.run, args=[db.engine]) + c2.start() + self.time_patcher.interrupt(15) + composer1.stop() + composer2.stop() + + def test_normal_items(self): + logging.info('+++++++++++++++++++++++++++ test normal items') + cfg = ComposerConfig(runner_fn=self.runner_fn, name='scheduler for normal items') + composer = Composer(config=cfg) + composer.run(db_engine=db.engine) + with db.session_scope() as session: + name = 'normal items' + service = ComposerService(session) + service.collect_v2(name, [(ItemType.TASK, RunnerInput()), (ItemType.TASK, RunnerInput()), + (ItemType.TASK, RunnerInput())]) + session.commit() + self.time_patcher.interrupt(60) + with db.session_scope() as session: + runners = session.query(SchedulerRunner).all() + self.assertEqual(len(runners), 1, 'Should be only 1 runner') + self.assertEqual(runners[0].status, RunnerStatus.DONE.value) + self.assertEqual( + runners[0].get_context(), + PipelineContextData(current_runner=2, + outputs={ + 0: RunnerOutput(), + 1: RunnerOutput(), + 2: RunnerOutput(), + })) + # Item should be finished + item = session.query(SchedulerItem).filter(SchedulerItem.name == 'normal items').first() + self.assertEqual(item.status, ItemStatus.OFF.value, 'should finish item') + composer.stop() + + def test_failed_items(self): + logging.info('+++++++++++++++++++++++++++ test failed items') + cfg = ComposerConfig(runner_fn=self.runner_fn, name='scheduler for failed items') + composer = Composer(config=cfg) + composer.run(db_engine=db.engine) + with db.session_scope() as session: + name = 'failed items' + ComposerService(session).collect_v2( + name, + [ + (ItemType.TASK, RunnerInput()), + (ItemType.TASK, RunnerInput()), + (ItemType.TASK, RunnerInput()), + # Failed one + (ItemType.TASK, RunnerInput()), + (ItemType.TASK, RunnerInput()), + ]) + session.commit() + self.time_patcher.interrupt(60) + with db.session_scope() as session: + runners = session.query(SchedulerRunner).all() + self.assertEqual(len(runners), 1, 'Should be only 1 runner') + self.assertEqual(runners[0].status, RunnerStatus.FAILED.value) + self.assertEqual( + runners[0].get_context(), + PipelineContextData(current_runner=3, + outputs={ + 0: RunnerOutput(), + 1: RunnerOutput(), + 2: RunnerOutput(), + 3: RunnerOutput(error_message='index is 3') + })) + # Item should be finished + item = session.query(SchedulerItem).filter(SchedulerItem.name == 'failed items').first() + self.assertEqual(item.status, ItemStatus.OFF.value, 'should finish item') + composer.stop() + + def test_cron_items(self): + logging.info('+++++++++++++++++++++++++++ test finishing cron items') + cfg = ComposerConfig(runner_fn=self.runner_fn, name='finish normal items') + composer = Composer(config=cfg) + composer.run(db_engine=db.engine) + with db.session_scope() as session: + service = ComposerService(session) + name = 'cronjob' + # test invalid cron + self.assertRaises(ValueError, + service.collect_v2, + name, [ + (ItemType.TASK, RunnerInput()), + ], + cron_config='invalid cron') + + service.collect_v2( + name, + [ + (ItemType.TASK, RunnerInput()), + ], + # Every 10 seconds + cron_config='* * * * * */10') + session.commit() + self.assertEqual(1, len(session.query(SchedulerItem).all())) + # Interrupts twice since we need two rounds of tick for + # composer to schedule items in fake world + self.time_patcher.interrupt(11) + self.time_patcher.interrupt(11) + with db.session_scope() as session: + self.assertEqual(2, len(session.query(SchedulerRunner).all())) + service = ComposerService(session) + self.assertEqual(RunnerStatus.DONE.value, + service.get_recent_runners(name)[-1].status, 'should finish runner') + service.finish(name) + session.commit() + self.assertEqual(ItemStatus.OFF, service.get_item_status(name), 'should finish item') + composer.stop() + + +if __name__ == '__main__': + logging.basicConfig(stream=sys.stderr, level=logging.INFO) + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/composer/context.py b/web_console_v2/api/fedlearner_webconsole/composer/context.py new file mode 100644 index 000000000..c71b101f0 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/composer/context.py @@ -0,0 +1,61 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# pylint: disable=redefined-builtin +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput, PipelineContextData, Pipeline + + +class RunnerContext(object): + + def __init__(self, index: int, input: RunnerInput): + self._index = index + self._input = input + + @property + def index(self) -> int: + return self._index + + @property + def input(self) -> RunnerInput: + return self._input + + +class PipelineContext(object): + + def __init__(self, pipeline: Pipeline, data: PipelineContextData): + self._pipeline = pipeline + self._data = data + self._runner_contexts = {} + + @classmethod + def build(cls, pipeline: Pipeline, data: PipelineContextData) -> 'PipelineContext': + return cls(pipeline=pipeline, data=data) + + def run_next(self): + if self._data.current_runner >= len(self._pipeline.queue) - 1: + return + self._data.current_runner += 1 + + def get_current_runner_context(self) -> RunnerContext: + runner_idx = self._data.current_runner + if runner_idx in self._runner_contexts: + return self._runner_contexts[runner_idx] + context = RunnerContext(index=runner_idx, input=self._pipeline.queue[runner_idx]) + self._runner_contexts[runner_idx] = context + return context + + @property + def data(self) -> PipelineContextData: + return self._data diff --git a/web_console_v2/api/fedlearner_webconsole/composer/context_test.py b/web_console_v2/api/fedlearner_webconsole/composer/context_test.py new file mode 100644 index 000000000..54259e78f --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/composer/context_test.py @@ -0,0 +1,46 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +from fedlearner_webconsole.composer.context import PipelineContext +from fedlearner_webconsole.proto.composer_pb2 import Pipeline, RunnerInput, PipelineContextData + + +class PipelineContextTest(unittest.TestCase): + + def test_get_current_runner_context(self): + pipeline_context = PipelineContext.build(pipeline=Pipeline(version=2, + name='test pipeline', + queue=[ + RunnerInput(runner_type='test type1'), + RunnerInput(runner_type='test type2'), + ]), + data=PipelineContextData()) + runner_context = pipeline_context.get_current_runner_context() + self.assertEqual(runner_context.index, 0) + self.assertEqual(runner_context.input.runner_type, 'test type1') + pipeline_context.run_next() + runner_context = pipeline_context.get_current_runner_context() + self.assertEqual(runner_context.index, 1) + self.assertEqual(runner_context.input.runner_type, 'test type2') + # No effect as whole pipeline already done + pipeline_context.run_next() + runner_context = pipeline_context.get_current_runner_context() + self.assertEqual(runner_context.index, 1) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/composer/interface.py b/web_console_v2/api/fedlearner_webconsole/composer/interface.py index f9acdeb88..001eafe79 100644 --- a/web_console_v2/api/fedlearner_webconsole/composer/interface.py +++ b/web_console_v2/api/fedlearner_webconsole/composer/interface.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -19,20 +19,38 @@ import enum from typing import Tuple -from fedlearner_webconsole.composer.models import Context, RunnerStatus +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.models import RunnerStatus + +from fedlearner_webconsole.proto.composer_pb2 import RunnerOutput # NOTE: remember to register new item in `global_runner_fn` \ # which defined in `runner.py` class ItemType(enum.Enum): TASK = 'task' # test only - MEMORY = 'memory' - WORKFLOW_CRON_JOB = 'workflow_cron_job' - DATA_PIPELINE = 'data_pipeline' + WORKFLOW_CRON_JOB = 'workflow_cron_job' # v2 + BATCH_STATS = 'batch_stats' # v2 + SERVING_SERVICE_PARSE_SIGNATURE = 'serving_service_parse_signature' # v2 + SERVING_SERVICE_QUERY_PARTICIPANT_STATUS = 'serving_service_query_participant_status' # v2 + SERVING_SERVICE_UPDATE_MODEL = 'serving_service_update_model' # v2 + SCHEDULE_WORKFLOW = 'schedule_workflow' # v2 + SCHEDULE_JOB = 'schedule_job' # v2 + CLEANUP_CRON_JOB = 'cleanup_cron_job' # v2 + MODEL_TRAINING_CRON_JOB = 'model_training_cron_job' # v2 + TEE_CREATE_RUNNER = 'tee_create_runner' # v2 + TEE_RESOURCE_CHECK_RUNNER = 'tee_resource_check_runner' # v2 + SCHEDULE_PROJECT = 'schedule_project' # v2 + DATASET_LONG_PERIOD_SCHEDULER = 'dataset_long_period_scheduler' # v2 + DATASET_SHORT_PERIOD_SCHEDULER = 'dataset_short_period_scheduler' # v2 + SCHEDULE_MODEL_JOB = 'schedule_model_job' # v2 + SCHEDULE_MODEL_JOB_GROUP = 'schedule_model_job_group' # v2 + SCHEDULE_LONG_PERIOD_MODEL_JOB_GROUP = 'schedule_long_period_model_job_group' # v2 # item interface class IItem(metaclass=ABCMeta): + @abstractmethod def type(self) -> ItemType: pass @@ -42,27 +60,16 @@ def get_id(self) -> int: pass -# runner interface -class IRunner(metaclass=ABCMeta): - @abstractmethod - def start(self, context: Context): - """Start runner - - Args: - context: shared in runner. Don't write data to context in this - method. Only can read data via `context.data`. - """ +class IRunnerV2(metaclass=ABCMeta): @abstractmethod - def result(self, context: Context) -> Tuple[RunnerStatus, dict]: - """Check runner result + def run(self, context: RunnerContext) -> Tuple[RunnerStatus, RunnerOutput]: + """Runs the runner. - NOTE: You could check runner if is timeout in this method. If it's - timeout, return `RunnerStatus.FAILED`. Since runners executed by - `ThreadPoolExecutor` may have some common resources, it's better to - stop the runner by user instead of `composer`. + The implementation should be light, as runners will be executed by `ThreadPoolExecutor`. Args: - context: shared in runner. In this method, data can be - read or written to context via `context.data`. + context: immutable context in the runner. + Returns: + status and the output. """ diff --git a/web_console_v2/api/fedlearner_webconsole/composer/models.py b/web_console_v2/api/fedlearner_webconsole/composer/models.py index 4cf88ac7e..17e8c5c11 100644 --- a/web_console_v2/api/fedlearner_webconsole/composer/models.py +++ b/web_console_v2/api/fedlearner_webconsole/composer/models.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,17 +16,24 @@ import enum import json -import datetime import logging - -from sqlalchemy import UniqueConstraint +from datetime import timezone, datetime +from sqlalchemy import UniqueConstraint, Index from sqlalchemy.engine import Engine from sqlalchemy.sql import func +from croniter import croniter from fedlearner_webconsole.db import db, default_table_args +from fedlearner_webconsole.proto import composer_pb2 +from fedlearner_webconsole.utils.pp_datetime import now +from fedlearner_webconsole.utils.mixins import to_dict_mixin +from fedlearner_webconsole.utils.proto import to_json, parse_from_json +from fedlearner_webconsole.proto.composer_pb2 import SchedulerItemPb, SchedulerRunnerPb +from fedlearner_webconsole.utils.pp_datetime import to_timestamp class Context(object): + def __init__(self, data: dict, internal: dict, db_engine: Engine): self._data = data # user data self._internal = internal # internal system data @@ -52,24 +59,21 @@ def db_engine(self) -> Engine: class ContextEncoder(json.JSONEncoder): - def default(self, obj) -> dict: - d = obj.__dict__ - return { - '_data': d.get('_data', {}), - '_internal': d.get('_internal', {}) - } + + def default(self, o) -> dict: + d = o.__dict__ + return {'_data': d.get('_data', {}), '_internal': d.get('_internal', {})} class ContextDecoder(json.JSONDecoder): + def __init__(self, db_engine: Engine): self.db_engine = db_engine super().__init__(object_hook=self.dict2object) def dict2object(self, val): if '_data' in val and '_internal' in val: - return Context(data=val.get('_data', {}), - internal=val.get('_internal', {}), - db_engine=self.db_engine) + return Context(data=val.get('_data', {}), internal=val.get('_internal', {}), db_engine=self.db_engine) return val @@ -84,37 +88,24 @@ class ItemStatus(enum.Enum): ON = 1 # need to run +@to_dict_mixin(extras={'need_run': (lambda si: si.need_run())}) class SchedulerItem(db.Model): __tablename__ = 'scheduler_item_v2' - __table_args__ = (UniqueConstraint('name', name='uniq_name'), - default_table_args('scheduler items')) - id = db.Column(db.Integer, - comment='id', - primary_key=True, - autoincrement=True) + __table_args__ = ( + UniqueConstraint('name', name='uniq_name'), + # idx_status is a common name will may cause conflict in sqlite + Index('idx_item_status', 'status'), + default_table_args('scheduler items'), + ) + id = db.Column(db.Integer, comment='id', primary_key=True, autoincrement=True) name = db.Column(db.String(255), comment='item name', nullable=False) - pipeline = db.Column(db.Text, - comment='pipeline', - nullable=False, - default='{}') - status = db.Column(db.Integer, - comment='item status', - nullable=False, - default=ItemStatus.ON.value) - interval_time = db.Column(db.Integer, - comment='item run interval in second', - nullable=False, - default=-1) - last_run_at = db.Column(db.DateTime(timezone=True), - comment='last runner time') - retry_cnt = db.Column(db.Integer, - comment='retry count when item is failed', - nullable=False, - default=0) + pipeline = db.Column(db.Text(16777215), comment='pipeline', nullable=False, default='{}') + status = db.Column(db.Integer, comment='item status', nullable=False, default=ItemStatus.ON.value) + cron_config = db.Column(db.String(255), comment='cron expression in UTC timezone') + last_run_at = db.Column(db.DateTime(timezone=True), comment='last runner time') + retry_cnt = db.Column(db.Integer, comment='retry count when item is failed', nullable=False, default=0) extra = db.Column(db.Text(), comment='extra info') - created_at = db.Column(db.DateTime(timezone=True), - comment='created at', - server_default=func.now()) + created_at = db.Column(db.DateTime(timezone=True), comment='created at', server_default=func.now()) updated_at = db.Column(db.DateTime(timezone=True), comment='updated at', server_default=func.now(), @@ -122,23 +113,40 @@ class SchedulerItem(db.Model): deleted_at = db.Column(db.DateTime(timezone=True), comment='deleted at') def need_run(self) -> bool: - # job runs one time - if self.interval_time == -1 and self.last_run_at is None: - return True - if self.interval_time > 0: # cronjob - if self.last_run_at is None: # never run - return True - # compare datetime in utc - next_run_at = self.last_run_at.replace( - tzinfo=datetime.timezone.utc) + datetime.timedelta( - seconds=self.interval_time) - utc_now = datetime.datetime.now(datetime.timezone.utc) - logging.debug(f'[composer] item id: {self.id}, ' - f'next_run_at: {next_run_at.timestamp()}, ' - f'utc_now: {utc_now.timestamp()}') - if next_run_at.timestamp() < utc_now.timestamp(): - return True - return False + if not self.cron_config: + # job runs once + return self.last_run_at is None + # cronjob + if self.last_run_at is None: # never run + # if there is no start time, croniter will return next run + # datetime (UTC) based on create time + base = self.created_at.replace(tzinfo=timezone.utc) + else: + base = self.last_run_at.replace(tzinfo=timezone.utc) + next_run_at = croniter(self.cron_config, base).get_next(datetime) + utc_now = now(timezone.utc) + logging.debug(f'[composer] item id: {self.id}, ' + f'next_run_at: {next_run_at.timestamp()}, ' + f'utc_now: {utc_now.timestamp()}') + return next_run_at.timestamp() < utc_now.timestamp() + + def set_pipeline(self, proto: composer_pb2.Pipeline): + self.pipeline = to_json(proto) + + def get_pipeline(self) -> composer_pb2.Pipeline: + return parse_from_json(self.pipeline, composer_pb2.Pipeline()) + + def to_proto(self) -> SchedulerItemPb: + return SchedulerItemPb(id=self.id, + name=self.name, + pipeline=self.get_pipeline(), + status=ItemStatus(self.status).name, + cron_config=self.cron_config, + last_run_at=to_timestamp(self.last_run_at) if self.last_run_at else None, + retry_cnt=self.retry_cnt, + created_at=to_timestamp(self.created_at) if self.created_at else None, + updated_at=to_timestamp(self.updated_at) if self.updated_at else None, + deleted_at=to_timestamp(self.deleted_at) if self.deleted_at else None) class RunnerStatus(enum.Enum): @@ -148,43 +156,62 @@ class RunnerStatus(enum.Enum): FAILED = 3 +@to_dict_mixin() class SchedulerRunner(db.Model): __tablename__ = 'scheduler_runner_v2' - __table_args__ = (default_table_args('scheduler runners')) - id = db.Column(db.Integer, - comment='id', - primary_key=True, - autoincrement=True) + __table_args__ = ( + # idx_status is a common name will may cause conflict in sqlite + Index('idx_runner_status', 'status'), + Index('idx_runner_item_id', 'item_id'), + default_table_args('scheduler runners'), + ) + id = db.Column(db.Integer, comment='id', primary_key=True, autoincrement=True) item_id = db.Column(db.Integer, comment='item id', nullable=False) - status = db.Column(db.Integer, - comment='runner status', - nullable=False, - default=RunnerStatus.INIT.value) - start_at = db.Column(db.DateTime(timezone=True), - comment='runner start time') + status = db.Column(db.Integer, comment='runner status', nullable=False, default=RunnerStatus.INIT.value) + start_at = db.Column(db.DateTime(timezone=True), comment='runner start time') end_at = db.Column(db.DateTime(timezone=True), comment='runner end time') - pipeline = db.Column(db.Text(), - comment='pipeline from scheduler item', - nullable=False, - default='{}') - output = db.Column(db.Text(), - comment='output', - nullable=False, - default='{}') - context = db.Column(db.Text(), - comment='context', - nullable=False, - default='{}') + pipeline = db.Column(db.Text(16777215), comment='pipeline from scheduler item', nullable=False, default='{}') + output = db.Column(db.Text(), comment='output', nullable=False, default='{}') + context = db.Column(db.Text(16777215), comment='context', nullable=False, default='{}') extra = db.Column(db.Text(), comment='extra info') - created_at = db.Column(db.DateTime(timezone=True), - comment='created at', - server_default=func.now()) + created_at = db.Column(db.DateTime(timezone=True), comment='created at', server_default=func.now()) updated_at = db.Column(db.DateTime(timezone=True), comment='updated at', server_default=func.now(), onupdate=func.now()) deleted_at = db.Column(db.DateTime(timezone=True), comment='deleted at') + def set_pipeline(self, proto: composer_pb2.Pipeline): + self.pipeline = to_json(proto) + + def get_pipeline(self) -> composer_pb2.Pipeline: + return parse_from_json(self.pipeline, composer_pb2.Pipeline()) + + def set_context(self, proto: composer_pb2.PipelineContextData): + self.context = to_json(proto) + + def get_context(self) -> composer_pb2.PipelineContextData: + return parse_from_json(self.context, composer_pb2.PipelineContextData()) + + def set_output(self, proto: composer_pb2.RunnerOutput): + self.output = to_json(proto) + + def get_output(self) -> composer_pb2.RunnerOutput: + return parse_from_json(self.output, composer_pb2.RunnerOutput()) + + def to_proto(self) -> SchedulerRunnerPb: + return SchedulerRunnerPb(id=self.id, + item_id=self.item_id, + status=RunnerStatus(self.status).name, + start_at=to_timestamp(self.start_at) if self.start_at else None, + end_at=to_timestamp(self.end_at) if self.end_at else None, + pipeline=self.get_pipeline(), + output=self.get_output(), + context=self.get_context(), + created_at=to_timestamp(self.created_at) if self.created_at else None, + updated_at=to_timestamp(self.updated_at) if self.updated_at else None, + deleted_at=to_timestamp(self.deleted_at) if self.deleted_at else None) + class OptimisticLock(db.Model): __tablename__ = 'optimistic_lock_v2' @@ -192,15 +219,10 @@ class OptimisticLock(db.Model): UniqueConstraint('name', name='uniq_name'), default_table_args('optimistic lock'), ) - id = db.Column(db.Integer, - comment='id', - primary_key=True, - autoincrement=True) + id = db.Column(db.Integer, comment='id', primary_key=True, autoincrement=True) name = db.Column(db.String(255), comment='lock name', nullable=False) version = db.Column(db.BIGINT, comment='lock version', nullable=False) - created_at = db.Column(db.DateTime(timezone=True), - comment='created at', - server_default=func.now()) + created_at = db.Column(db.DateTime(timezone=True), comment='created at', server_default=func.now()) updated_at = db.Column(db.DateTime(timezone=True), comment='updated at', server_default=func.now(), diff --git a/web_console_v2/api/fedlearner_webconsole/composer/models_test.py b/web_console_v2/api/fedlearner_webconsole/composer/models_test.py new file mode 100644 index 000000000..23746ac85 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/composer/models_test.py @@ -0,0 +1,195 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from datetime import datetime, timezone +from unittest.mock import patch + +from fedlearner_webconsole.composer.models import SchedulerItem, ItemStatus, RunnerStatus, SchedulerRunner +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto import composer_pb2 +from fedlearner_webconsole.utils.pp_datetime import now +from fedlearner_webconsole.utils.proto import to_json +from testing.no_web_server_test_case import NoWebServerTestCase + + +class SchedulerItemTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + created_at = datetime(2022, 5, 1, 10, 10, tzinfo=timezone.utc) + self.default_pipeline = composer_pb2.Pipeline(version=2, + name='test pipeline', + queue=[ + composer_pb2.RunnerInput(runner_type='test type1'), + composer_pb2.RunnerInput(runner_type='test type2'), + ]) + scheduler_item = SchedulerItem(id=5, + name='test_item_off', + pipeline=to_json(self.default_pipeline), + status=ItemStatus.OFF.value, + cron_config='* * * * * 15', + last_run_at=created_at, + retry_cnt=0, + created_at=created_at, + updated_at=created_at) + with db.session_scope() as session: + session.add(scheduler_item) + session.commit() + + def test_need_run_normal_job(self): + with db.session_scope() as session: + item = SchedulerItem(name='test normal item') + session.commit() + # Never run + self.assertTrue(item.need_run()) + item.last_run_at = now() + session.commit() + self.assertFalse(item.need_run()) + + @patch('fedlearner_webconsole.composer.models.now') + def test_need_run_cron_job(self, mock_now): + with db.session_scope() as session: + item = SchedulerItem( + name='test cron item', + # Runs every 30 minutes + cron_config='*/30 * * * *', + created_at=datetime(2021, 9, 1, 10, 10)) + session.commit() + # Never run + mock_now.return_value = datetime(2021, 9, 1, 10, 20, tzinfo=timezone.utc) + self.assertFalse(item.need_run()) + mock_now.return_value = datetime(2021, 9, 1, 10, 50, tzinfo=timezone.utc) + self.assertTrue(item.need_run()) + # Has been run + item.last_run_at = datetime(2021, 9, 1, 10, 10) + session.commit() + mock_now.return_value = datetime(2021, 9, 1, 10, 11, tzinfo=timezone.utc) + self.assertFalse(item.need_run()) + mock_now.return_value = datetime(2021, 9, 1, 10, 50, tzinfo=timezone.utc) + self.assertTrue(item.need_run()) + + def test_get_pipeline(self): + with db.session_scope() as session: + scheduler_item = session.query(SchedulerItem).first() + self.assertEqual(self.default_pipeline, scheduler_item.get_pipeline()) + + def test_set_pipeline(self): + with db.session_scope() as session: + scheduler_item = session.query(SchedulerItem).first() + pipeline = composer_pb2.Pipeline(name='test1') + scheduler_item.set_pipeline(pipeline) + self.assertEqual(pipeline, scheduler_item.get_pipeline()) + + def test_to_proto(self): + created_at = datetime(2022, 5, 1, 10, 10, tzinfo=timezone.utc) + with db.session_scope() as session: + scheduler_item = session.query(SchedulerItem).first() + self.assertEqual( + scheduler_item.to_proto(), + composer_pb2.SchedulerItemPb(id=5, + name='test_item_off', + pipeline=self.default_pipeline, + status=ItemStatus.OFF.name, + cron_config='* * * * * 15', + last_run_at=int(created_at.timestamp()), + retry_cnt=0, + created_at=int(created_at.timestamp()), + updated_at=int(created_at.timestamp()))) + + +class SchedulerRunnerTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + self.default_context = composer_pb2.PipelineContextData(current_runner=0) + self.default_pipeline = composer_pb2.Pipeline(version=2, + name='test pipeline', + queue=[ + composer_pb2.RunnerInput(runner_type='test type1'), + composer_pb2.RunnerInput(runner_type='test type2'), + ]) + self.default_output = composer_pb2.RunnerOutput(error_message='error1') + created_at = datetime(2022, 5, 1, 10, 10, tzinfo=timezone.utc) + scheduler_runner = SchedulerRunner( + id=5, + item_id=1, + status=RunnerStatus.INIT.value, + start_at=created_at, + pipeline=to_json(self.default_pipeline), + output=to_json(self.default_output), + context=to_json(self.default_context), + created_at=created_at, + updated_at=created_at, + ) + with db.session_scope() as session: + session.add(scheduler_runner) + session.commit() + + def test_get_pipeline(self): + with db.session_scope() as session: + scheduler_runner = session.query(SchedulerRunner).first() + self.assertEqual(self.default_pipeline, scheduler_runner.get_pipeline()) + + def test_set_pipeline(self): + with db.session_scope() as session: + scheduler_runner = session.query(SchedulerRunner).first() + pipeline = composer_pb2.Pipeline(name='test1') + scheduler_runner.set_pipeline(pipeline) + self.assertEqual(pipeline, scheduler_runner.get_pipeline()) + + def test_get_context(self): + with db.session_scope() as session: + scheduler_runner = session.query(SchedulerRunner).first() + self.assertEqual(self.default_context, scheduler_runner.get_context()) + + def test_set_context(self): + with db.session_scope() as session: + scheduler_runner = session.query(SchedulerRunner).first() + context = composer_pb2.PipelineContextData(current_runner=1) + scheduler_runner.set_context(context) + self.assertEqual(context, scheduler_runner.get_context()) + + def test_get_output(self): + with db.session_scope() as session: + scheduler_runner = session.query(SchedulerRunner).first() + self.assertEqual(self.default_output, scheduler_runner.get_output()) + + def test_set_output(self): + with db.session_scope() as session: + scheduler_runner = session.query(SchedulerRunner).first() + output = composer_pb2.RunnerOutput(error_message='error2') + scheduler_runner.set_output(output) + self.assertEqual(output, scheduler_runner.get_output()) + + def test_to_proto(self): + created_at = datetime(2022, 5, 1, 10, 10, tzinfo=timezone.utc) + with db.session_scope() as session: + scheduler_runner = session.query(SchedulerRunner).first() + self.assertEqual( + scheduler_runner.to_proto(), + composer_pb2.SchedulerRunnerPb(id=5, + item_id=1, + status=RunnerStatus.INIT.name, + start_at=int(created_at.timestamp()), + pipeline=self.default_pipeline, + output=self.default_output, + context=self.default_context, + created_at=int(created_at.timestamp()), + updated_at=int(created_at.timestamp()))) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/composer/op_locker.py b/web_console_v2/api/fedlearner_webconsole/composer/op_locker.py index 8b0bdd404..1c4c09e77 100644 --- a/web_console_v2/api/fedlearner_webconsole/composer/op_locker.py +++ b/web_console_v2/api/fedlearner_webconsole/composer/op_locker.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -23,11 +23,13 @@ class OpLocker(object): + def __init__(self, name: str, db_engine: Engine): """Optimistic Lock Args: - name: lock name should be unique in same thread + name (str): lock name should be unique in same thread + db_engine (Engine): db engine """ self._name = name self._version = 0 @@ -45,14 +47,12 @@ def version(self) -> int: def try_lock(self) -> 'OpLocker': with get_session(self.db_engine) as session: try: - lock = session.query(OptimisticLock).filter_by( - name=self._name).first() + lock = session.query(OptimisticLock).filter_by(name=self._name).first() if lock: self._has_lock = True self._version = lock.version return self - new_lock = OptimisticLock(name=self._name, - version=self._version) + new_lock = OptimisticLock(name=self._name, version=self._version) session.add(new_lock) session.commit() self._has_lock = True @@ -67,16 +67,13 @@ def is_latest_version(self) -> bool: with get_session(self.db_engine) as session: try: - new_lock = session.query(OptimisticLock).filter_by( - name=self._name).first() + new_lock = session.query(OptimisticLock).filter_by(name=self._name).first() if not new_lock: return False - logging.info(f'[op_locker] version, current: {self._version}, ' - f'new: {new_lock.version}') + logging.info(f'[op_locker] version, current: {self._version}, ' f'new: {new_lock.version}') return self._version == new_lock.version except Exception as e: # pylint: disable=broad-except - logging.error( - f'failed to check lock is conflict, exception: {e}') + logging.error(f'failed to check lock is conflict, exception: {e}') return False def update_version(self) -> bool: @@ -86,8 +83,7 @@ def update_version(self) -> bool: with get_session(self.db_engine) as session: try: - lock = session.query(OptimisticLock).filter_by( - name=self._name).first() + lock = session.query(OptimisticLock).filter_by(name=self._name).first() lock.version = self._version + 1 session.commit() return True diff --git a/web_console_v2/api/fedlearner_webconsole/composer/op_locker_test.py b/web_console_v2/api/fedlearner_webconsole/composer/op_locker_test.py new file mode 100644 index 000000000..6680e8d45 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/composer/op_locker_test.py @@ -0,0 +1,47 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +import logging +import sys +import unittest + +from fedlearner_webconsole.composer.models import OptimisticLock +from fedlearner_webconsole.db import db +from fedlearner_webconsole.composer.op_locker import OpLocker +from testing.common import BaseTestCase + + +class OpLockTest(BaseTestCase): + + class Config(BaseTestCase.Config): + STORAGE_ROOT = '/tmp' + START_SCHEDULER = False + + def test_lock(self): + lock = OpLocker('test', db.engine).try_lock() + self.assertEqual(True, lock.is_latest_version(), 'should be latest version') + + # update database version + with db.session_scope() as session: + new_lock = session.query(OptimisticLock).filter_by(name=lock.name).first() + new_lock.version = new_lock.version + 1 + session.commit() + self.assertEqual(False, lock.is_latest_version(), 'should not be latest version') + + +if __name__ == '__main__': + logging.basicConfig(stream=sys.stderr, level=logging.INFO) + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/composer/pipeline.py b/web_console_v2/api/fedlearner_webconsole/composer/pipeline.py new file mode 100644 index 000000000..a11cbe7cc --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/composer/pipeline.py @@ -0,0 +1,114 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from concurrent.futures import Future +from typing import Dict + +from sqlalchemy import func +from sqlalchemy.engine import Engine + +from fedlearner_webconsole.composer.context import PipelineContext +from fedlearner_webconsole.composer.interface import IRunnerV2 +from fedlearner_webconsole.composer.models import RunnerStatus, SchedulerRunner +from fedlearner_webconsole.composer.op_locker import OpLocker +from fedlearner_webconsole.composer.thread_reaper import ThreadReaper +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto import composer_pb2 + + +class PipelineExecutor(object): + + def __init__(self, thread_reaper: ThreadReaper, db_engine: Engine, runner_fns: Dict[str, IRunnerV2]): + self.thread_reaper = thread_reaper + self.db_engine = db_engine + self._runner_fns = runner_fns + self._running_workers = {} + + def run(self, runner_id: int) -> bool: + """Starts runner by submitting it to the thread reaper.""" + with db.session_scope() as session: + runner: SchedulerRunner = session.query(SchedulerRunner).get(runner_id) + pipeline: composer_pb2.Pipeline = runner.get_pipeline() + if runner.status not in [RunnerStatus.RUNNING.value]: + return False + if self.thread_reaper.is_running(runner_id) or self.thread_reaper.is_full(): + return False + pipeline_context = PipelineContext.build(pipeline=pipeline, data=runner.get_context()) + current_runner_context = pipeline_context.get_current_runner_context() + runner_fn = self._runner_fns[current_runner_context.input.runner_type]() + return self.thread_reaper.submit( + runner_id=runner_id, + fn=runner_fn, + context=current_runner_context, + done_callback=self._runner_done_callback, + ) + + def _runner_done_callback(self, runner_id: int, fu: Future): + """Callback when one runner finishes. + + The callback will only update the status, other workers in pipeline will be + triggered by the executor in the next round check.""" + with db.session_scope() as session: + runner = session.query(SchedulerRunner).get(runner_id) + pipeline = runner.get_pipeline() + if pipeline.version != 2: + return + pipeline_context = PipelineContext.build(pipeline=pipeline, data=runner.get_context()) + current_runner_context = pipeline_context.get_current_runner_context() + output = None + try: + status, output = fu.result() + # Defensively confirming the status + if status == RunnerStatus.RUNNING: + return + pipeline_context.data.outputs[current_runner_context.index].MergeFrom(output) + if status == RunnerStatus.DONE: + if current_runner_context.index == len(pipeline.queue) - 1: + # the whole pipeline is done + runner.status = RunnerStatus.DONE.value + runner.end_at = func.now() + else: + # mark to run next + pipeline_context.run_next() + elif status == RunnerStatus.FAILED: + runner.status = RunnerStatus.FAILED.value + runner.end_at = func.now() + except Exception as e: # pylint: disable=broad-except + logging.exception(f'[PipelineExecutor] failed to run {runner.id}') + runner.status = RunnerStatus.FAILED.value + runner.end_at = func.now() + pipeline_context.data.outputs[current_runner_context.index].error_message = str(e) + runner.set_context(pipeline_context.data) + + logging.info(f'[pipeline-executor] update runner, status: {runner.status}, ' + f'pipeline: {runner.pipeline}, ' + f'output: {output}, context: {runner.context}') + # Retry 3 times + for _ in range(3): + try: + lock_name = f'update_running_runner_{runner_id}_lock' + lock = OpLocker(lock_name, self.db_engine).try_lock() + if lock.is_latest_version(): + if lock.update_version(): + session.commit() + break + else: + logging.error(f'[composer] {lock_name} is outdated, ignore updates to database') + except Exception as e: # pylint: disable=broad-except + logging.error(f'[composer] failed to update running runner status, exception: {e}') + else: + # Failed 3 times + session.rollback() diff --git a/web_console_v2/api/fedlearner_webconsole/composer/pipeline_test.py b/web_console_v2/api/fedlearner_webconsole/composer/pipeline_test.py new file mode 100644 index 000000000..33c75cde6 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/composer/pipeline_test.py @@ -0,0 +1,146 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from datetime import datetime +from unittest.mock import patch + +from fedlearner_webconsole.composer.interface import ItemType +from fedlearner_webconsole.composer.models import SchedulerRunner, RunnerStatus +from fedlearner_webconsole.composer.pipeline import PipelineExecutor +from fedlearner_webconsole.composer.thread_reaper import ThreadReaper +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto import composer_pb2 +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput, PipelineContextData, RunnerOutput +from testing.composer.common import TestRunner +from testing.no_web_server_test_case import NoWebServerTestCase +from testing.fake_time_patcher import FakeTimePatcher + +_RUNNER_FNS = { + ItemType.TASK.value: TestRunner, +} + + +class PipelineExecutorTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + self.thread_reaper = ThreadReaper(worker_num=1) + self.executor = PipelineExecutor(self.thread_reaper, db.engine, _RUNNER_FNS) + + self.time_patcher = FakeTimePatcher() + self.time_patcher.start(datetime(2012, 1, 14, 12, 0, 5)) + + def tearDown(self): + self.time_patcher.stop() + self.thread_reaper.stop(wait=True) + super().tearDown() + + def test_run_completed(self): + runner = SchedulerRunner(item_id=123, status=RunnerStatus.RUNNING.value) + runner.set_pipeline( + composer_pb2.Pipeline(version=2, name='test pipeline', + queue=[RunnerInput(runner_type=ItemType.TASK.value)])) + with db.session_scope() as session: + session.add(runner) + session.commit() + self.executor.run(runner.id) + self.time_patcher.interrupt(60) + with db.session_scope() as session: + runner = session.query(SchedulerRunner).get(runner.id) + self.assertEqual(runner.status, RunnerStatus.DONE.value) + self.assertEqual(runner.get_context(), PipelineContextData(current_runner=0, outputs={0: RunnerOutput()})) + + def test_run_failed(self): + runner = SchedulerRunner(item_id=123, status=RunnerStatus.RUNNING.value) + runner.set_pipeline( + composer_pb2.Pipeline( + version=2, + name='test failed pipeline', + queue=[ + RunnerInput(runner_type=ItemType.TASK.value), + RunnerInput(runner_type=ItemType.TASK.value), + RunnerInput(runner_type=ItemType.TASK.value), + # Failed one + RunnerInput(runner_type=ItemType.TASK.value), + RunnerInput(runner_type=ItemType.TASK.value), + ])) + runner.set_context(composer_pb2.PipelineContextData(current_runner=3)) + with db.session_scope() as session: + session.add(runner) + session.commit() + self.executor.run(runner.id) + self.time_patcher.interrupt(60) + with db.session_scope() as session: + runner = session.query(SchedulerRunner).get(runner.id) + self.assertEqual(runner.status, RunnerStatus.FAILED.value) + self.assertEqual( + runner.get_context(), + PipelineContextData(current_runner=3, outputs={3: RunnerOutput(error_message='index is 3')})) + + @patch('testing.composer.common.TestRunner.run') + def test_run_exception(self, mock_run): + + def fake_run(*args, **kwargs): + raise RuntimeError('fake exception') + + mock_run.side_effect = fake_run + + runner = SchedulerRunner(item_id=666, status=RunnerStatus.RUNNING.value) + runner.set_pipeline( + composer_pb2.Pipeline( + version=2, + name='test failed pipeline', + queue=[ + # Exception one + RunnerInput(runner_type=ItemType.TASK.value), + ])) + runner.set_context(composer_pb2.PipelineContextData(current_runner=0)) + with db.session_scope() as session: + session.add(runner) + session.commit() + self.executor.run(runner.id) + self.time_patcher.interrupt(60) + with db.session_scope() as session: + runner = session.query(SchedulerRunner).get(runner.id) + self.assertEqual(runner.status, RunnerStatus.FAILED.value) + self.assertEqual( + runner.get_context(), + PipelineContextData(current_runner=0, outputs={0: RunnerOutput(error_message='fake exception')})) + + def test_run_second_runner(self): + runner = SchedulerRunner(item_id=123, status=RunnerStatus.RUNNING.value) + runner.set_pipeline( + composer_pb2.Pipeline(version=2, + name='test running pipeline', + queue=[ + RunnerInput(runner_type=ItemType.TASK.value), + RunnerInput(runner_type=ItemType.TASK.value), + RunnerInput(runner_type=ItemType.TASK.value), + ])) + runner.set_context(composer_pb2.PipelineContextData(current_runner=1)) + with db.session_scope() as session: + session.add(runner) + session.commit() + self.executor.run(runner.id) + self.time_patcher.interrupt(60) + with db.session_scope() as session: + runner = session.query(SchedulerRunner).get(runner.id) + self.assertEqual(runner.status, RunnerStatus.RUNNING.value) + self.assertEqual(runner.get_context(), PipelineContextData(current_runner=2, outputs={1: RunnerOutput()})) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/composer/runner.py b/web_console_v2/api/fedlearner_webconsole/composer/runner.py index 46d87ec20..2807e7ab3 100644 --- a/web_console_v2/api/fedlearner_webconsole/composer/runner.py +++ b/web_console_v2/api/fedlearner_webconsole/composer/runner.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,78 +13,45 @@ # limitations under the License. # coding: utf-8 -import datetime import logging -import random import sys -import time -from typing import Tuple -from fedlearner_webconsole.composer.interface import IItem, IRunner, ItemType -from fedlearner_webconsole.composer.models import Context, RunnerStatus, \ - SchedulerRunner -from fedlearner_webconsole.dataset.data_pipeline import DataPipelineRunner -from fedlearner_webconsole.db import get_session +from fedlearner_webconsole.composer.interface import ItemType +from fedlearner_webconsole.dataset.batch_stats import BatchStatsRunner +from fedlearner_webconsole.dataset.scheduler.dataset_long_period_scheduler import DatasetLongPeriodScheduler +from fedlearner_webconsole.dataset.scheduler.dataset_short_period_scheduler import DatasetShortPeriodScheduler +from fedlearner_webconsole.job.scheduler import JobScheduler +from fedlearner_webconsole.project.project_scheduler import ScheduleProjectRunner +from fedlearner_webconsole.serving.runners import ModelSignatureParser, QueryParticipantStatusRunner, UpdateModelRunner from fedlearner_webconsole.workflow.cronjob import WorkflowCronJob - - -class MemoryItem(IItem): - def __init__(self, task_id: int): - self.id = task_id - - def type(self) -> ItemType: - return ItemType.MEMORY - - def get_id(self) -> int: - return self.id - - -class MemoryRunner(IRunner): - def __init__(self, task_id: int): - """Runner Example - - Args: - task_id: required - """ - self.task_id = task_id - self._start_at = None - - def start(self, context: Context): - # NOTE: in this method, context.data can only be getter, - # don't modify context - data = context.data.get(str(self.task_id), 'EMPTY') - logging.info(f'[memory_runner] {self.task_id} started, data: {data}') - self._start_at = datetime.datetime.utcnow() - - def result(self, context: Context) -> Tuple[RunnerStatus, dict]: - time.sleep(2) - now = datetime.datetime.utcnow() - timeout = random.randint(0, 10) - # mock timeout - if self._start_at is not None and self._start_at + datetime.timedelta( - seconds=timeout) < now: - # kill runner - logging.info(f'[memory_runner] {self.task_id} is timeout, ' - f'start at: {self._start_at}') - return RunnerStatus.FAILED, {} - - # use `get_session` to query database - with get_session(context.db_engine) as session: - count = session.query(SchedulerRunner).count() - # write data to context - context.set_data(f'is_done_{self.task_id}', { - 'status': 'OK', - 'count': count - }) - return RunnerStatus.DONE, {} +from fedlearner_webconsole.workflow.workflow_scheduler import ScheduleWorkflowRunner +from fedlearner_webconsole.cleanup.cleaner_cronjob import CleanupCronJob +from fedlearner_webconsole.mmgr.cronjob import ModelTrainingCronJob +from fedlearner_webconsole.mmgr.scheduler import ModelJobSchedulerRunner, ModelJobGroupSchedulerRunner, \ + ModelJobGroupLongPeriodScheduler +from fedlearner_webconsole.tee.runners import TeeCreateRunner, TeeResourceCheckRunner def global_runner_fn(): # register runner_fn runner_fn = { - ItemType.MEMORY.value: MemoryRunner, ItemType.WORKFLOW_CRON_JOB.value: WorkflowCronJob, - ItemType.DATA_PIPELINE.value: DataPipelineRunner, + ItemType.BATCH_STATS.value: BatchStatsRunner, + ItemType.SERVING_SERVICE_PARSE_SIGNATURE.value: ModelSignatureParser, + ItemType.SERVING_SERVICE_QUERY_PARTICIPANT_STATUS.value: QueryParticipantStatusRunner, + ItemType.SERVING_SERVICE_UPDATE_MODEL.value: UpdateModelRunner, + ItemType.SCHEDULE_WORKFLOW.value: ScheduleWorkflowRunner, + ItemType.SCHEDULE_JOB.value: JobScheduler, + ItemType.CLEANUP_CRON_JOB.value: CleanupCronJob, + ItemType.MODEL_TRAINING_CRON_JOB.value: ModelTrainingCronJob, + ItemType.TEE_CREATE_RUNNER.value: TeeCreateRunner, + ItemType.TEE_RESOURCE_CHECK_RUNNER.value: TeeResourceCheckRunner, + ItemType.SCHEDULE_PROJECT.value: ScheduleProjectRunner, + ItemType.DATASET_LONG_PERIOD_SCHEDULER.value: DatasetLongPeriodScheduler, + ItemType.DATASET_SHORT_PERIOD_SCHEDULER.value: DatasetShortPeriodScheduler, + ItemType.SCHEDULE_MODEL_JOB.value: ModelJobSchedulerRunner, + ItemType.SCHEDULE_MODEL_JOB_GROUP.value: ModelJobGroupSchedulerRunner, + ItemType.SCHEDULE_LONG_PERIOD_MODEL_JOB_GROUP.value: ModelJobGroupLongPeriodScheduler, } for item in ItemType: if item.value in runner_fn or item == ItemType.TASK: diff --git a/web_console_v2/api/fedlearner_webconsole/composer/runner_cache.py b/web_console_v2/api/fedlearner_webconsole/composer/runner_cache.py deleted file mode 100644 index bd93e8bac..000000000 --- a/web_console_v2/api/fedlearner_webconsole/composer/runner_cache.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import logging -import threading - -from fedlearner_webconsole.composer.interface import IRunner - - -class RunnerCache(object): - def __init__(self, runner_fn: dict): - self._lock = threading.Lock() - self._cache = {} # id:name => obj - self.runner_fn = runner_fn - - def find_runner(self, runner_id: int, runner_name: str) -> IRunner: - """Find runner - - Args: - runner_id: id in runner table - runner_name: {item_type}_{item_id} - """ - with self._lock: - key = self.cache_key(runner_id, runner_name) - obj = self._cache.get(key, None) - if obj: - return obj - item_type, item_id = runner_name.rsplit('_', 1) - if item_type not in self.runner_fn: - logging.error( - f'failed to find item_type {item_type} in runner_fn, ' - f'please register it in global_runner_fn') - raise ValueError(f'unknown item_type {item_type} in runner') - obj = self.runner_fn[item_type](int(item_id)) - self._cache[key] = obj - return obj - - def del_runner(self, runner_id: int, runner_name: str): - """Delete runner - - Args: - runner_id: id in runner table - runner_name: {item_type}_{item_id} - """ - with self._lock: - key = self.cache_key(runner_id, runner_name) - del self._cache[key] - - @staticmethod - def cache_key(runner_id: int, runner_name: str) -> str: - return f'{runner_id}:{runner_name}' - - @property - def data(self) -> dict: - with self._lock: - return self._cache diff --git a/web_console_v2/api/fedlearner_webconsole/composer/strategy.py b/web_console_v2/api/fedlearner_webconsole/composer/strategy.py new file mode 100644 index 000000000..87173a36d --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/composer/strategy.py @@ -0,0 +1,52 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from abc import ABC, abstractmethod + +from sqlalchemy import func +from sqlalchemy.orm import Session + +from fedlearner_webconsole.composer.models import SchedulerItem, SchedulerRunner, RunnerStatus + + +class RunnerStrategy(ABC): + + def __init__(self, session: Session): + self.session = session + + @abstractmethod + def should_run(self, item: SchedulerItem) -> bool: + """Checks if the scheduler item should run or not.""" + + +class SingletonStrategy(RunnerStrategy): + """A strategy to make sure there is only one runner instance for the scheduler item. + + 1. For normal scheduler item, there is no difference with normal strategy. + 2. For cron jobs, there will be only one scheduler runner for the item. + """ + + def should_run(self, item: SchedulerItem) -> bool: + if not item.need_run(): + return False + + if item.cron_config: + ongoing_count = self.session.query(func.count(SchedulerRunner.id)).filter( + SchedulerRunner.item_id == item.id, + SchedulerRunner.status.in_([RunnerStatus.INIT.value, RunnerStatus.RUNNING.value])).scalar() + if ongoing_count > 0: + return False + + return True diff --git a/web_console_v2/api/fedlearner_webconsole/composer/strategy_test.py b/web_console_v2/api/fedlearner_webconsole/composer/strategy_test.py new file mode 100644 index 000000000..03f65a786 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/composer/strategy_test.py @@ -0,0 +1,78 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from datetime import datetime +from unittest.mock import patch, Mock + +from fedlearner_webconsole.composer.models import SchedulerItem, ItemStatus, RunnerStatus, SchedulerRunner +from fedlearner_webconsole.composer.strategy import SingletonStrategy +from fedlearner_webconsole.db import db +from testing.no_web_server_test_case import NoWebServerTestCase + + +class SingletonStrategyTest(NoWebServerTestCase): + + @patch.object(SchedulerItem, 'need_run') + def test_should_run_normal_item(self, mock_need_run: Mock): + with db.session_scope() as session: + item = SchedulerItem(id=123, + name='test normal item', + status=ItemStatus.ON.value, + created_at=datetime(2021, 9, 1, 10, 10)) + strategy = SingletonStrategy(session) + mock_need_run.return_value = True + self.assertTrue(strategy.should_run(item)) + # No need to run + mock_need_run.return_value = False + self.assertFalse(strategy.should_run(item)) + + @patch.object(SchedulerItem, 'need_run') + def test_should_run_cron_item(self, mock_need_run: Mock): + item_id = 123123 + runner_id = 7644 + with db.session_scope() as session: + item = SchedulerItem( + id=item_id, + name='test cron item', + # Runs every 30 minutes + cron_config='*/30 * * * *', + created_at=datetime(2022, 1, 1, 10, 0)) + session.add(item) + session.commit() + with db.session_scope() as session: + strategy = SingletonStrategy(session) + mock_need_run.return_value = False + self.assertFalse(strategy.should_run(item)) + mock_need_run.return_value = True + self.assertTrue(strategy.should_run(item)) + runner = SchedulerRunner(id=runner_id, item_id=item_id, status=RunnerStatus.RUNNING.value) + session.add(runner) + session.commit() + with db.session_scope() as session: + # Already one running runner, so no new one will be generated. + item = session.query(SchedulerItem).get(item_id) + self.assertFalse(strategy.should_run(item)) + runner = session.query(SchedulerRunner).get(runner_id) + runner.status = RunnerStatus.DONE.value + session.commit() + with db.session_scope() as session: + # All runners are done, so a new one will be there. + item = session.query(SchedulerItem).get(item_id) + self.assertTrue(strategy.should_run(item)) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/composer/thread_reaper.py b/web_console_v2/api/fedlearner_webconsole/composer/thread_reaper.py index e63a1ae69..b97510228 100644 --- a/web_console_v2/api/fedlearner_webconsole/composer/thread_reaper.py +++ b/web_console_v2/api/fedlearner_webconsole/composer/thread_reaper.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,12 +18,14 @@ import threading from concurrent.futures import Future from concurrent.futures.thread import ThreadPoolExecutor +from typing import Callable, Optional -from fedlearner_webconsole.composer.models import Context -from fedlearner_webconsole.composer.interface import IRunner +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.interface import IRunnerV2 class ThreadReaper(object): + def __init__(self, worker_num: int): """ThreadPool with battery @@ -33,26 +35,51 @@ def __init__(self, worker_num: int): self.lock = threading.RLock() self.worker_num = worker_num self.running_worker_num = 0 + self._running_workers = {} self._thread_pool = ThreadPoolExecutor(max_workers=worker_num) - def enqueue(self, name: str, fn: IRunner, context: Context) -> bool: + def is_running(self, runner_id: int) -> bool: + with self.lock: + return runner_id in self._running_workers + + def submit(self, + runner_id: int, + fn: IRunnerV2, + context: RunnerContext, + done_callback: Optional[Callable[[int, Future], None]] = None) -> bool: if self.is_full(): return False - logging.info(f'[thread_reaper] enqueue {name}') + + def full_done_callback(fu: Future): + # The order matters, as we need to update the status at the last. + if done_callback: + done_callback(runner_id, fu) + self._track_status(runner_id, fu) + + logging.info(f'[thread_reaper] enqueue {runner_id}') with self.lock: + if runner_id in self._running_workers: + logging.warning(f'f[thread_reaper] {runner_id} already enqueued') + return False self.running_worker_num += 1 - fu = self._thread_pool.submit(fn.start, context=context) - fu.add_done_callback(self._track_status) + self._running_workers[runner_id] = fn + fu = self._thread_pool.submit(fn.run, context=context) + fu.add_done_callback(full_done_callback) return True - def _track_status(self, fu: Future): + def _track_status(self, runner_id: int, fu: Future): with self.lock: self.running_worker_num -= 1 - logging.info(f'this job is done, result: {fu.result()}') + # Safely removing + self._running_workers.pop(runner_id, None) + try: + logging.info(f'f------Job {runner_id} is done------') + logging.info(f'result: {fu.result()}') + except Exception as e: # pylint: disable=broad-except + logging.info(f'error: {str(e)}') if self.running_worker_num < 0: - logging.error( - f'[thread_reaper] something wrong, should be non-negative, ' - f'val: f{self.running_worker_num}') + logging.error(f'[thread_reaper] something wrong, should be non-negative, ' + f'val: f{self.running_worker_num}') def is_full(self) -> bool: with self.lock: diff --git a/web_console_v2/api/fedlearner_webconsole/composer/thread_reaper_test.py b/web_console_v2/api/fedlearner_webconsole/composer/thread_reaper_test.py new file mode 100644 index 000000000..7d2126a7c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/composer/thread_reaper_test.py @@ -0,0 +1,106 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +import logging +from concurrent.futures import Future + +import sys +import unittest + +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.thread_reaper import ThreadReaper +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput +from testing.composer.common import TestRunner + +from testing.no_web_server_test_case import NoWebServerTestCase +from testing.fake_time_patcher import FakeTimePatcher + + +class ThreadReaperTest(NoWebServerTestCase): + + class Config(NoWebServerTestCase.Config): + STORAGE_ROOT = '/tmp' + START_SCHEDULER = False + + def setUp(self): + super().setUp() + self.fake_time_patcher = FakeTimePatcher() + self.fake_time_patcher.start() + + def tearDown(self): + self.fake_time_patcher.stop() + return super().tearDown() + + def test_submit(self): + thread_reaper = ThreadReaper(worker_num=2) + runner = TestRunner() + submitted = thread_reaper.submit( + runner_id=123, + fn=runner, + context=RunnerContext(0, RunnerInput()), + ) + self.assertTrue(submitted) + self.assertTrue(thread_reaper.is_running(123)) + self.assertFalse(thread_reaper.is_full()) + # Submit again + submitted = thread_reaper.submit( + runner_id=123, + fn=runner, + context=RunnerContext(0, RunnerInput()), + ) + self.assertFalse(submitted) + self.assertFalse(thread_reaper.is_full()) + submitted = thread_reaper.submit( + runner_id=3333, + fn=runner, + context=RunnerContext(1, RunnerInput()), + ) + self.assertTrue(submitted) + self.assertTrue(thread_reaper.is_full()) + self.fake_time_patcher.interrupt(5) + self.assertFalse(thread_reaper.is_running(123)) + self.assertFalse(thread_reaper.is_full()) + thread_reaper.stop(wait=True) + + def test_submit_with_exception(self): + thread_reaper = ThreadReaper(worker_num=1) + error = None + runner_id = None + + def done_callback(rid: int, fu: Future): + nonlocal error, runner_id + try: + runner_id = rid + fu.result() + except RuntimeError as e: + error = str(e) + + runner = TestRunner(with_exception=True) + thread_reaper.submit(runner_id=123, + fn=runner, + context=RunnerContext(1, RunnerInput()), + done_callback=done_callback) + + self.fake_time_patcher.interrupt(5) + self.assertEqual(runner.context.index, 1) + self.assertEqual(runner_id, 123) + self.assertEqual(error, 'fake error') + thread_reaper.stop(wait=True) + + +if __name__ == '__main__': + logging.basicConfig(stream=sys.stderr, level=logging.INFO) + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/dataset/BUILD.bazel new file mode 100644 index 000000000..f3f575f95 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/BUILD.bazel @@ -0,0 +1,446 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "batch_stats_lib", + srcs = [ + "batch_stats.py", + ], + imports = ["../.."], + deps = [ + ":data_path_lib", + ":models_lib", + ":services_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:file_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:hooks_lib", + ], +) + +py_test( + name = "batch_stats_lib_test", + size = "small", + srcs = [ + "batch_stats_test.py", + ], + imports = ["../.."], + main = "batch_stats_test.py", + deps = [ + ":batch_stats_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "controllers_lib", + srcs = [ + "controllers.py", + ], + imports = ["../.."], + deps = [ + ":services_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset/job_configer", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:job_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:resource_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:system_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/two_pc:transaction_manager_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:workflow_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:workflow_controller_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "controllers_lib_test", + size = "small", + srcs = [ + "controllers_test.py", + ], + imports = ["../.."], + main = "controllers_test.py", + deps = [ + ":controllers_lib", + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:const_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:resource_name_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + "//web_console_v2/api/testing:fake_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + ], +) + +py_library( + name = "delete_dependency_lib", + srcs = ["delete_dependency.py"], + imports = ["../.."], + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + ], +) + +py_test( + name = "delete_dependency_lib_test", + size = "small", + srcs = [ + "delete_dependency_test.py", + ], + imports = ["../.."], + main = "delete_dependency_test.py", + deps = [ + ":delete_dependency_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@common_sqlalchemy//:pkg", + ], +) + +py_library( + name = "common_lib", + srcs = [ + "consts.py", + "dataset_directory.py", + "meta_data.py", + "util.py", + ], + imports = ["../.."], + deps = [ + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:file_lib", + "@common_python_dateutil//:pkg", + "@common_python_slugify//:pkg", + ], +) + +py_test( + name = "dataset_directory_test", + size = "small", + srcs = [ + "dataset_directory_test.py", + ], + imports = ["../.."], + main = "dataset_directory_test.py", + deps = [ + ":common_lib", + ], +) + +py_test( + name = "meta_data_test", + size = "small", + srcs = [ + "meta_data_test.py", + ], + data = [ + "//web_console_v2/api/testing/test_data", + ], + imports = ["../.."], + main = "meta_data_test.py", + deps = [ + ":common_lib", + "//web_console_v2/api:envs_lib", + ], +) + +py_test( + name = "util_test", + size = "small", + srcs = [ + "util_test.py", + ], + imports = ["../.."], + main = "util_test.py", + deps = [ + ":common_lib", + ], +) + +py_library( + name = "models_lib", + srcs = ["models.py"], + imports = ["../.."], + visibility = ["//visibility:public"], + deps = [ + "common_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "models_lib_test", + size = "medium", + srcs = [ + "models_test.py", + ], + imports = ["../.."], + main = "models_test.py", + deps = [ + ":models_lib", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_library( + name = "metrics_lib", + srcs = [ + "metrics.py", + ], + imports = ["../.."], + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:metrics_lib", + ], +) + +py_test( + name = "metrics_lib_test", + size = "small", + srcs = [ + "metrics_test.py", + ], + imports = ["../.."], + main = "metrics_test.py", + deps = [ + ":metrics_lib", + ], +) + +py_library( + name = "services_lib", + srcs = [ + "auth_service.py", + "services.py", + ], + imports = ["../.."], + deps = [ + ":common_lib", + ":delete_dependency_lib", + ":filter_funcs_lib", + ":metrics_lib", + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/cleanup:models_lib", + "//web_console_v2/api/fedlearner_webconsole/cleanup:services_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset/job_configer", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/review:ticket_helper_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:file_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:filtering_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:paginate_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:resource_name_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:workflow_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "services_lib_test", + size = "medium", + srcs = [ + "services_test.py", + ], + imports = ["../.."], + main = "services_test.py", + deps = [ + ":services_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/testing:common_lib", + "//web_console_v2/api/testing:fake_lib", + ], +) + +py_test( + name = "auth_service_test", + size = "small", + srcs = [ + "auth_service_test.py", + ], + imports = ["../.."], + main = "auth_service_test.py", + deps = [ + ":models_lib", + ":services_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:services_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "apis_lib", + srcs = ["apis.py"], + imports = ["../.."], + deps = [ + ":common_lib", + ":controllers_lib", + ":filter_funcs_lib", + ":local_controllers_lib", + ":models_lib", + ":services_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/audit:decorators_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:third_party_sso_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:composer_service_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset/job_configer", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/review:ticket_helper_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:client_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:job_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:resource_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:system_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/swagger:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:domain_name_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:file_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:filtering_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:paginate_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:sorting_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_flask_restful//:pkg", + "@common_marshmallow//:pkg", + "@common_webargs//:pkg", + ], +) + +py_test( + name = "apis_lib_test", + size = "large", + srcs = [ + "apis_test.py", + ], + imports = ["../.."], + main = "apis_test.py", + deps = [ + ":apis_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:proto_lib", + "//web_console_v2/api/testing:common_lib", + "//web_console_v2/api/testing:fake_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + ], +) + +py_library( + name = "local_controllers_lib", + srcs = ["local_controllers.py"], + imports = ["../.."], + deps = [ + ":models_lib", + ":services_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:workflow_controller_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "local_controllers_lib_test", + size = "medium", + srcs = [ + "local_controllers_test.py", + ], + imports = ["../.."], + main = "local_controllers_test.py", + deps = [ + ":local_controllers_lib", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_library( + name = "data_path_lib", + srcs = ["data_path.py"], + imports = ["../.."], + deps = [ + ":common_lib", + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:file_lib", + ], +) + +py_test( + name = "data_path_lib_test", + size = "small", + srcs = [ + "data_path_test.py", + ], + imports = ["../.."], + main = "data_path_test.py", + deps = [ + ":data_path_lib", + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:resource_name_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "filter_funcs_lib", + srcs = ["filter_funcs.py"], + imports = ["../.."], + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "filter_funcs_lib_test", + size = "small", + srcs = ["filter_funcs_test.py"], + imports = ["../.."], + main = "filter_funcs_test.py", + deps = [ + ":filter_funcs_lib", + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/apis.py b/web_console_v2/api/fedlearner_webconsole/dataset/apis.py index 865f41a26..f195b2f50 100644 --- a/web_console_v2/api/fedlearner_webconsole/dataset/apis.py +++ b/web_console_v2/api/fedlearner_webconsole/dataset/apis.py @@ -1,236 +1,2242 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# -# coding: utf-8 # pylint: disable=raise-missing-from -import os +from datetime import timedelta +import logging +from typing import Any, Dict, Optional, List +from urllib.parse import urlparse -from datetime import datetime, timezone from http import HTTPStatus +from flask_restful import Resource, Api +from webargs.flaskparser import use_kwargs, use_args +from marshmallow.exceptions import ValidationError +from marshmallow import post_load, validate, fields +from marshmallow.schema import Schema +from google.protobuf.json_format import ParseDict, ParseError +from envs import Envs -from flask import current_app, request -from flask_restful import Resource, Api, reqparse -from slugify import slugify - -from fedlearner_webconsole.dataset.models import (Dataset, DatasetType, - BatchState, DataBatch) -from fedlearner_webconsole.dataset.services import DatasetService -from fedlearner_webconsole.exceptions import (InvalidArgumentException, - NotFoundException) -from fedlearner_webconsole.db import db_handler as db -from fedlearner_webconsole.proto import dataset_pb2 -from fedlearner_webconsole.scheduler.scheduler import scheduler -from fedlearner_webconsole.utils.decorators import jwt_required +from fedlearner_webconsole.audit.decorators import emits_event +from fedlearner_webconsole.composer.composer_service import ComposerService +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.dataset.controllers import DatasetJobController +from fedlearner_webconsole.dataset.job_configer.dataset_job_configer import DatasetJobConfiger +from fedlearner_webconsole.dataset.job_configer.base_configer import set_variable_value_to_job_config +from fedlearner_webconsole.dataset.local_controllers import DatasetJobStageLocalController +from fedlearner_webconsole.dataset.models import (DataBatch, DataSource, DataSourceType, Dataset, + DatasetJobSchedulerState, ResourceState, DatasetJob, DatasetJobKind, + DatasetJobStage, DatasetJobState, ImportType, StoreFormat, + DatasetType, DatasetSchemaChecker, DatasetKindV2, DatasetFormat) +from fedlearner_webconsole.dataset.services import (DatasetJobService, DatasetService, DataSourceService, + DatasetJobStageService) +from fedlearner_webconsole.dataset.util import get_export_dataset_name, add_default_url_scheme, is_streaming_folder, \ + CronInterval +from fedlearner_webconsole.dataset.auth_service import AuthService +from fedlearner_webconsole.dataset.filter_funcs import dataset_auth_status_filter_op_in, dataset_format_filter_op_in, \ + dataset_format_filter_op_equal, dataset_publish_frontend_filter_op_equal +from fedlearner_webconsole.exceptions import InvalidArgumentException, MethodNotAllowedException, NoAccessException, \ + NotFoundException +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto.dataset_pb2 import DatasetJobGlobalConfigs, TimeRange +from fedlearner_webconsole.proto.filtering_pb2 import FilterExpression, FilterOp +from fedlearner_webconsole.proto.review_pb2 import TicketDetails, TicketType +from fedlearner_webconsole.review.ticket_helper import get_ticket_helper +from fedlearner_webconsole.rpc.v2.job_service_client import JobServiceClient +from fedlearner_webconsole.rpc.v2.resource_service_client import ResourceServiceClient +from fedlearner_webconsole.rpc.v2.system_service_client import SystemServiceClient +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.utils.decorators.pp_flask import admin_required, input_validator +from fedlearner_webconsole.utils.domain_name import get_pure_domain_name +from fedlearner_webconsole.auth.third_party_sso import credentials_required from fedlearner_webconsole.utils.file_manager import FileManager +from fedlearner_webconsole.utils import filtering, sorting +from fedlearner_webconsole.utils.flask_utils import FilterExpField, make_flask_response +from fedlearner_webconsole.proto import dataset_pb2 +from fedlearner_webconsole.swagger.models import schema_manager +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.rpc.client import RpcClient +from fedlearner_webconsole.utils.paginate import paginate +from fedlearner_webconsole.utils.file_tree import FileTreeBuilder +from fedlearner_webconsole.workflow.models import WorkflowExternalState +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.flag.models import Flag + +_DEFAULT_DATA_SOURCE_PREVIEW_FILE_NUM = 3 + + +def _path_authority_validator(path: str): + """Validate data_source path + this func is used to forbiden access to local filesystem + 1. if path is not nfs, pass + 2. if path is nfs and belongs to STORAGE_ROOT, pass + 3. if path is nfs but doesn's belong to STORAGE_ROOT, raise ValidationError + """ + path = path.strip() + authority_path = add_default_url_scheme(Envs.STORAGE_ROOT) + if not authority_path.endswith('/'): + authority_path += '/' + validate_path = add_default_url_scheme(path) + if _parse_data_source_url(validate_path).type != DataSourceType.FILE.value: + return + if not validate_path.startswith(authority_path): + raise ValidationError(f'no access to unauchority path {validate_path}!') + + +def _export_path_validator(path: str): + path = path.strip() + if len(path) == 0: + raise ValidationError('export path is empty!') + fm = FileManager() + if not fm.can_handle(path): + raise ValidationError('cannot handle export path!') + if not fm.isdir(path): + raise ValidationError('export path is not exist!') + _path_authority_validator(path) + + +def _parse_data_source_url(data_source_url: str) -> dataset_pb2.DataSource: + data_source_url = data_source_url.strip() + data_source_url = add_default_url_scheme(data_source_url) + url_parser = urlparse(data_source_url) + data_source_type = url_parser.scheme + # source_type must in DataSourceType + if data_source_type not in [o.value for o in DataSourceType]: + raise ValidationError(f'{data_source_type} is not a supported data_source type') + return dataset_pb2.DataSource( + type=data_source_type, + url=data_source_url, + is_user_upload=False, + is_user_export=False, + ) + + +def _validate_data_source(data_source_url: str, dataset_type: DatasetType): + fm = FileManager() + if not fm.can_handle(path=data_source_url): + raise InvalidArgumentException(f'invalid data_source_url: {data_source_url}') + if not fm.isdir(path=data_source_url): + raise InvalidArgumentException(f'cannot connect to data_source_url: {data_source_url}') + if dataset_type == DatasetType.STREAMING: + res, message = is_streaming_folder(data_source_url) + if not res: + raise InvalidArgumentException(message) + -_FORMAT_ERROR_MESSAGE = '{} is empty' +class DatasetJobConfigParameter(Schema): + dataset_uuid = fields.Str(required=False) + dataset_id = fields.Integer(required=False) + variables = fields.List(fields.Dict()) + @post_load + def make_dataset_job_config(self, item: Dict[str, Any], **kwargs) -> dataset_pb2.DatasetJobConfig: + del kwargs # this variable is not needed for now -def _get_dataset_path(dataset_name): - root_dir = current_app.config.get('STORAGE_ROOT') - prefix = datetime.now().strftime('%Y%m%d_%H%M%S') - # Builds a path for dataset according to the dataset name - # Example: '/data/dataset/20210305_173312_test-dataset - return f'{root_dir}/dataset/{prefix}_{slugify(dataset_name)[:32]}' + try: + dataset_job_config = dataset_pb2.DatasetJobConfig() + return ParseDict(item, dataset_job_config) + except ParseError as err: + raise ValidationError(message='failed to convert dataset_job_config', + field_name='global_configs', + data=err.args) + + +class DatasetJobParameter(Schema): + global_configs = fields.Dict(required=True, keys=fields.Str(), values=fields.Nested(DatasetJobConfigParameter())) + dataset_job_kind = fields.Str(required=False, + validate=validate.OneOf([o.value for o in DatasetJobKind]), + load_default='') + + @post_load + def make_dataset_job(self, item: Dict[str, Any], **kwargs) -> dataset_pb2.DatasetJob: + del kwargs # this variable is not needed for now + + global_configs = item['global_configs'] + global_configs_pb = DatasetJobGlobalConfigs() + for domain_name, job_config in global_configs.items(): + global_configs_pb.global_configs[get_pure_domain_name(domain_name)].MergeFrom(job_config) + + return dataset_pb2.DatasetJob(kind=item['dataset_job_kind'], global_configs=global_configs_pb) + + +class DatasetJobVariablesParameter(Schema): + variables = fields.List(fields.Dict()) + + @post_load + def make_dataset_job_config(self, item: Dict[str, Any], **kwargs) -> dataset_pb2.DatasetJobConfig: + del kwargs # this variable is not needed for now + + try: + dataset_job_config = dataset_pb2.DatasetJobConfig() + return ParseDict(item, dataset_job_config) + except ParseError as err: + raise ValidationError(message='failed to convert dataset_job_config', + field_name='dataset_job_config', + data=err.args) class DatasetApi(Resource): - @jwt_required() - def get(self, dataset_id): + + @credentials_required + def get(self, dataset_id: int): + """Get dataset details + --- + tags: + - dataset + description: get details of dataset + parameters: + - in: path + required: true + name: dataset_id + schema: + type: integer + responses: + 200: + description: get details of dataset + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.Dataset' + """ with db.session_scope() as session: - dataset = session.query(Dataset).get(dataset_id) - if dataset is None: - raise NotFoundException( - f'Failed to find dataset: {dataset_id}') - return {'data': dataset.to_dict()} - - @jwt_required() - def patch(self, dataset_id: int): - parser = reqparse.RequestParser() - parser.add_argument('name', - type=str, - required=False, - help='dataset name') - parser.add_argument('comment', - type=str, - required=False, - help='dataset comment') - parser.add_argument('comment') - data = parser.parse_args() + dataset = DatasetService(session).get_dataset(dataset_id) + # TODO(liuhehan): this commit is a lazy update of dataset store_format, remove it after release 2.4 + session.commit() + return make_flask_response(dataset) + + @input_validator + @credentials_required + @emits_event() + @use_kwargs({'comment': fields.Str(required=False, load_default=None)}) + def patch(self, dataset_id: int, comment: Optional[str]): + """Change dataset info + --- + tags: + - dataset + description: change dataset info + parameters: + - in: path + required: true + name: dataset_id + schema: + type: integer + requestBody: + required: false + content: + application/json: + schema: + type: object + properties: + comment: + type: string + responses: + 200: + description: change dataset info + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.Dataset' + """ with db.session_scope() as session: dataset = session.query(Dataset).filter_by(id=dataset_id).first() if not dataset: - raise NotFoundException( - f'Failed to find dataset: {dataset_id}') - if data['name']: - dataset.name = data['name'] - if data['comment']: - dataset.comment = data['comment'] + raise NotFoundException(f'Failed to find dataset: {dataset_id}') + if comment: + dataset.comment = comment session.commit() - return {'data': dataset.to_dict()}, HTTPStatus.OK + return make_flask_response(dataset.to_proto()) + + @credentials_required + @emits_event() + def delete(self, dataset_id: int): + """Delete dataset + --- + tags: + - dataset + description: delete dataset + parameters: + - in: path + required: true + name: dataset_id + schema: + type: integer + responses: + 204: + description: deleted dataset result + """ + with db.session_scope() as session: + # added an exclusive lock to this row + # ensure the state is modified correctly in a concurrency scenario. + dataset = session.query(Dataset).with_for_update().populate_existing().get(dataset_id) + if not dataset: + raise NotFoundException(f'Failed to find dataset: {dataset_id}') + DatasetService(session).cleanup_dataset(dataset) + session.commit() + return make_flask_response(status=HTTPStatus.NO_CONTENT) class DatasetPreviewApi(Resource): - def get(self, dataset_id: int): + + @credentials_required + @use_kwargs({ + 'batch_id': fields.Integer(required=True), + }, location='query') + def get(self, dataset_id: int, batch_id: int): + """Get dataset preview + --- + tags: + - dataset + description: get dataset preview + parameters: + - in: path + required: true + name: dataset_id + schema: + type: integer + - in: query + name: batch_id + schema: + type: integer + responses: + 200: + description: dataset preview info + content: + application/json: + schema: + type: object + properties: + dtypes: + type: array + items: + type: object + properties: + key: + type: string + value: + type: string + sample: + type: array + items: + type: array + items: + anyOf: + - type: string + - type: integer + - type: number + num_example: + type: integer + metrics: + type: object + images: + type: array + items: + type: object + properties: + created_at: + type: string + file_name: + type: string + name: + type: string + height: + type: string + width: + type: string + path: + type: string + """ if dataset_id <= 0: raise NotFoundException(f'Failed to find dataset: {dataset_id}') with db.session_scope() as session: - data = DatasetService(session).get_dataset_preview(dataset_id) - return {'data': data} + data = DatasetService(session).get_dataset_preview(dataset_id, batch_id) + return make_flask_response(data) + +class DatasetLedgerApi(Resource): -class DatasetMetricsApi(Resource): def get(self, dataset_id: int): - if dataset_id <= 0: - raise NotFoundException(f'Failed to find dataset: {dataset_id}') - name = request.args.get('name', None) - if not name: - raise InvalidArgumentException(f'required params name') + """Get dataset ledger + --- + tags: + - dataset + description: get + parameters: + - in: path + name: dataset_id + schema: + type: integer + responses: + 200: + description: get dataset ledger page + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.DatasetLedger' + """ + return make_flask_response(data={}, status=HTTPStatus.NO_CONTENT) + + +class DatasetExportApi(Resource): + + @credentials_required + @use_kwargs({ + 'export_path': fields.Str(required=True, validate=_export_path_validator), + 'batch_id': fields.Integer(required=False, load_default=None) + }) + def post(self, dataset_id: int, export_path: str, batch_id: Optional[int]): + """Export dataset + --- + tags: + - dataset + description: Export dataset + parameters: + - in: path + required: true + name: dataset_id + schema: + type: integer + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + export_path: + type: string + required: true + batch_id: + type: integer + required: false + responses: + 201: + description: Export dataset + content: + application/json: + schema: + type: object + properties: + export_dataset_id: + type: integer + dataset_job_id: + type: integer + """ + export_path = _parse_data_source_url(export_path).url with db.session_scope() as session: - data = DatasetService(session).feature_metrics(name, dataset_id) - return {'data': data} + input_dataset: Dataset = session.query(Dataset).get(dataset_id) + if not input_dataset: + raise NotFoundException(f'Failed to find dataset: {dataset_id}') + export_index = session.query(DatasetJob).filter(DatasetJob.kind == DatasetJobKind.EXPORT).filter( + DatasetJob.input_dataset_id == dataset_id).count() + if batch_id: + data_batch = session.query(DataBatch).filter(DataBatch.dataset_id == dataset_id).filter( + DataBatch.id == batch_id).first() + if data_batch is None: + raise NotFoundException(f'Failed to find data_batch {batch_id} in dataset {dataset_id}') + data_batches = [data_batch] + export_dataset_name = get_export_dataset_name(index=export_index, + input_dataset_name=input_dataset.name, + input_data_batch_name=data_batch.batch_name) + else: + data_batches = input_dataset.data_batches + export_dataset_name = get_export_dataset_name(index=export_index, input_dataset_name=input_dataset.name) + dataset_job_config = dataset_pb2.DatasetJobConfig(dataset_uuid=input_dataset.uuid) + store_format = StoreFormat.UNKNOWN.value if input_dataset.store_format == StoreFormat.UNKNOWN \ + else StoreFormat.CSV.value + dataset_parameter = dataset_pb2.DatasetParameter(name=export_dataset_name, + type=input_dataset.dataset_type.value, + project_id=input_dataset.project.id, + kind=DatasetKindV2.EXPORTED.value, + format=DatasetFormat(input_dataset.dataset_format).name, + is_published=False, + store_format=store_format, + auth_status=AuthStatus.AUTHORIZED.name, + path=export_path) + output_dataset = DatasetService(session=session).create_dataset(dataset_parameter=dataset_parameter) + session.flush() + global_configs = DatasetJobGlobalConfigs() + pure_domain_name = SettingService.get_system_info().pure_domain_name + global_configs.global_configs[pure_domain_name].MergeFrom(dataset_job_config) + export_dataset_job = DatasetJobService(session).create_as_coordinator(project_id=input_dataset.project_id, + kind=DatasetJobKind.EXPORT, + output_dataset_id=output_dataset.id, + global_configs=global_configs) + session.flush() + for data_batch in reversed(data_batches): + # skip non-succeeded data_batch + if not data_batch.is_available(): + continue + DatasetJobStageLocalController(session=session).create_data_batch_and_job_stage_as_coordinator( + dataset_job_id=export_dataset_job.id, + global_configs=export_dataset_job.get_global_configs(), + event_time=data_batch.event_time) + + session.commit() + return make_flask_response(data={ + 'export_dataset_id': output_dataset.id, + 'dataset_job_id': export_dataset_job.id + }, + status=HTTPStatus.OK) + + +class DatasetStateFixtApi(Resource): + + @credentials_required + @admin_required + @use_kwargs({ + 'force': + fields.Str(required=False, load_default=None, validate=validate.OneOf([o.value for o in DatasetJobState])) + }) + def post(self, dataset_id: int, force: str): + """fix dataset state + --- + tags: + - dataset + description: fix dataset state + parameters: + - in: path + required: true + name: dataset_id + schema: + type: integer + requestBody: + required: false + content: + application/json: + schema: + type: object + properties: + force: + type: array + items: + type: string + responses: + 200: + description: fix dataset state successfully + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.Dataset' + """ + with db.session_scope() as session: + dataset: Dataset = session.query(Dataset).get(dataset_id) + if not dataset: + raise NotFoundException(f'Failed to find dataset: {dataset_id}') + if force: + dataset.parent_dataset_job.state = DatasetJobState(force) + else: + workflow_state = dataset.parent_dataset_job.workflow.get_state_for_frontend() + # if workflow is completed, restart the batch stats task + if workflow_state == WorkflowExternalState.COMPLETED: + item_name = dataset.parent_dataset_job.get_context().batch_stats_item_name + runners = ComposerService(session).get_recent_runners(item_name, count=1) + # This is a hack to restart the composer runner, see details in job_scheduler.py + if len(runners) > 0: + runners[0].status = RunnerStatus.INIT.value + dataset.parent_dataset_job.state = DatasetJobState.RUNNING + elif workflow_state in (WorkflowExternalState.FAILED, WorkflowExternalState.STOPPED, + WorkflowExternalState.INVALID): + dataset.parent_dataset_job.state = DatasetJobState.FAILED + session.commit() + return make_flask_response(data=dataset.to_proto(), status=HTTPStatus.OK) + + +class DatasetParameter(Schema): + name = fields.Str(required=True) + dataset_type = fields.Str(required=False, + load_default=DatasetType.PSI.value, + validate=validate.OneOf([o.value for o in DatasetType])) + comment = fields.Str(required=False) + project_id = fields.Int(required=True) + kind = fields.Str(required=False, + load_default=DatasetKindV2.RAW.value, + validate=validate.OneOf([o.value for o in DatasetKindV2])) + dataset_format = fields.Str(required=True, validate=validate.OneOf([o.name for o in DatasetFormat])) + need_publish = fields.Bool(required=False, load_default=False) + value = fields.Int(required=False, load_default=0, validate=[validate.Range(min=100, max=10000)]) + schema_checkers = fields.List(fields.Str(validate=validate.OneOf([o.value for o in DatasetSchemaChecker]))) + is_published = fields.Bool(required=False, load_default=False) + import_type = fields.Str(required=False, + load_default=ImportType.COPY.value, + validate=validate.OneOf([o.value for o in ImportType])) + store_format = fields.Str(required=False, + load_default=StoreFormat.TFRECORDS.value, + validate=validate.OneOf([o.value for o in StoreFormat])) + + @post_load + def make_dataset_parameter(self, item: Dict[str, str], **kwargs) -> dataset_pb2.DatasetParameter: + return dataset_pb2.DatasetParameter(name=item.get('name'), + type=item.get('dataset_type'), + comment=item.get('comment'), + project_id=item.get('project_id'), + kind=item.get('kind'), + format=item.get('dataset_format'), + need_publish=item.get('need_publish'), + value=item.get('value'), + is_published=item.get('is_published'), + schema_checkers=item.get('schema_checkers'), + import_type=item.get('import_type'), + store_format=item.get('store_format')) class DatasetsApi(Resource): - @jwt_required() - def get(self): - parser = reqparse.RequestParser() - parser.add_argument('project', - type=int, - required=False, - help='project') - data = parser.parse_args() - with db.session_scope() as session: - datasets = DatasetService(session).get_datasets( - project_id=int(data['project'] or 0)) - return {'data': [d.to_dict() for d in datasets]} - - @jwt_required() - def post(self): - parser = reqparse.RequestParser() - parser.add_argument('name', - required=True, - type=str, - help=_FORMAT_ERROR_MESSAGE.format('name')) - parser.add_argument('dataset_type', - required=True, - type=DatasetType, - help=_FORMAT_ERROR_MESSAGE.format('dataset_type')) - parser.add_argument('comment', type=str) - parser.add_argument('project_id', - required=True, - type=int, - help=_FORMAT_ERROR_MESSAGE.format('project_id')) - body = parser.parse_args() - name = body.get('name') - dataset_type = body.get('dataset_type') - comment = body.get('comment') - project_id = body.get('project_id') + FILTER_FIELDS = { + 'name': + filtering.SupportedField(type=filtering.FieldType.STRING, ops={FilterOp.CONTAIN: None}), + 'project_id': + filtering.SupportedField(type=filtering.FieldType.NUMBER, ops={FilterOp.EQUAL: None}), + 'uuid': + filtering.SupportedField(type=filtering.FieldType.STRING, ops={FilterOp.EQUAL: None}), + 'dataset_kind': + filtering.SupportedField(type=filtering.FieldType.STRING, ops={ + FilterOp.IN: None, + FilterOp.EQUAL: None + }), + 'dataset_format': + filtering.SupportedField(type=filtering.FieldType.STRING, + ops={ + FilterOp.IN: dataset_format_filter_op_in, + FilterOp.EQUAL: dataset_format_filter_op_equal + }), + 'is_published': + filtering.SupportedField(type=filtering.FieldType.BOOL, ops={FilterOp.EQUAL: None}), + 'dataset_type': + filtering.SupportedField(type=filtering.FieldType.STRING, ops={FilterOp.EQUAL: None}), + 'publish_frontend_state': + filtering.SupportedField(type=filtering.FieldType.STRING, + ops={FilterOp.EQUAL: dataset_publish_frontend_filter_op_equal}), + 'auth_status': + filtering.SupportedField(type=filtering.FieldType.STRING, + ops={FilterOp.IN: dataset_auth_status_filter_op_in}), + } + + SORTER_FIELDS = ['created_at'] + def __init__(self): + self._filter_builder = filtering.FilterBuilder(model_class=Dataset, supported_fields=self.FILTER_FIELDS) + self._sorter_builder = sorting.SorterBuilder(model_class=Dataset, supported_fields=self.SORTER_FIELDS) + + @credentials_required + @use_kwargs( + { + 'page': + fields.Integer(required=False, load_default=1), + 'page_size': + fields.Integer(required=False, load_default=10), + 'dataset_job_kind': + fields.String(required=False, load_default=None), + 'state_frontend': + fields.List( + fields.String( + required=False, load_default=None, validate=validate.OneOf([o.value for o in ResourceState]))), + 'filter_exp': + FilterExpField(required=False, load_default=None, data_key='filter'), + 'sorter_exp': + fields.String(required=False, load_default=None, data_key='order_by'), + 'cron_interval': + fields.String( + required=False, load_default=None, validate=validate.OneOf([o.value for o in CronInterval])), + }, + location='query') + def get(self, + page: int, + page_size: int, + dataset_job_kind: Optional[str] = None, + state_frontend: Optional[List[str]] = None, + filter_exp: Optional[FilterExpression] = None, + sorter_exp: Optional[str] = None, + cron_interval: Optional[str] = None): + """Get datasets list + --- + tags: + - dataset + description: get datasets list + parameters: + - in: query + name: page + schema: + type: integer + - in: query + name: page_size + schema: + type: integer + - in: query + name: dataset_job_kind + schema: + type: string + - in: query + name: state_frontend + schema: + type: array + collectionFormat: multi + items: + type: string + enum: [PENDING, PROCESSING, SUCCEEDED, FAILED] + - in: query + name: filter + schema: + type: string + - in: query + name: order_by + schema: + type: string + - in: query + name: cron_interval + schema: + type: string + responses: + 200: + description: get datasets list result + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.DatasetRef' + """ + if dataset_job_kind is not None: + try: + dataset_job_kind = DatasetJobKind(dataset_job_kind) + except TypeError as err: + raise InvalidArgumentException( + details=f'failed to find dataset dataset_job_kind {dataset_job_kind}') from err with db.session_scope() as session: + query = DatasetService(session).query_dataset_with_parent_job() + if dataset_job_kind: + query = query.filter(DatasetJob.kind == dataset_job_kind) + if filter_exp is not None: + try: + query = self._filter_builder.build_query(query, filter_exp) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid filter: {str(e)}') from e try: - # Create dataset - dataset = Dataset( - name=name, - dataset_type=dataset_type, - comment=comment, - path=_get_dataset_path(name), - project_id=project_id, - ) - session.add(dataset) - # TODO: scan cronjob - session.commit() - return {'data': dataset.to_dict()} - except Exception as e: - session.rollback() - raise InvalidArgumentException(details=str(e)) + if sorter_exp is not None: + sorter_exp = sorting.parse_expression(sorter_exp) + else: + sorter_exp = sorting.SortExpression(field='created_at', is_asc=False) + query = self._sorter_builder.build_query(query, sorter_exp) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid sorter: {str(e)}') from e + # TODO(liuhehan): add state_frontend as custom_builder + if state_frontend is not None: + states = [] + for state in state_frontend: + states.append(ResourceState(state)) + query = DatasetService.filter_dataset_state(query, states) + # filter daily or hourly cron + if cron_interval: + if cron_interval == CronInterval.HOURS.value: + time_range = timedelta(hours=1) + else: + time_range = timedelta(days=1) + query = query.filter(DatasetJob.time_range == time_range) + pagination = paginate(query=query, page=page, page_size=page_size) + datasets = [] + for dataset in pagination.get_items(): + dataset_ref = dataset.to_ref() + dataset_ref.total_value = 0 + datasets.append(dataset_ref) + # TODO(liuhehan): this commit is a lazy update of dataset store_format, remove it after release 2.4 + session.commit() + return make_flask_response(data=datasets, page_meta=pagination.get_metadata()) + + @input_validator + @credentials_required + @emits_event() + @use_args(DatasetParameter()) + def post(self, dataset_parameter: dataset_pb2.DatasetParameter): + """Create dataset + --- + tags: + - dataset + description: Create dataset + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/DatasetParameter' + responses: + 201: + description: Create dataset + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.Dataset' + """ + with db.session_scope() as session: + # processed dataset must be is_published + if DatasetKindV2(dataset_parameter.kind) == DatasetKindV2.PROCESSED and not dataset_parameter.is_published: + raise InvalidArgumentException('is_published must be true if dataset kind is PROCESSED') + if DatasetKindV2(dataset_parameter.kind) == DatasetKindV2.PROCESSED and ImportType( + dataset_parameter.import_type) != ImportType.COPY: + raise InvalidArgumentException('import type must be copy if dataset kind is PROCESSED') + if StoreFormat(dataset_parameter.store_format) == StoreFormat.CSV and DatasetKindV2( + dataset_parameter.kind) in [DatasetKindV2.RAW, DatasetKindV2.PROCESSED]: + raise InvalidArgumentException('csv store_type is not support if dataset kind is RAW or PROCESSED') + dataset_parameter.auth_status = AuthStatus.AUTHORIZED.name + dataset = DatasetService(session=session).create_dataset(dataset_parameter=dataset_parameter) + session.flush() + # create review ticket for processed_dataset + if DatasetKindV2(dataset_parameter.kind) == DatasetKindV2.PROCESSED: + ticket_helper = get_ticket_helper(session=session) + ticket_helper.create_ticket(TicketType.CREATE_PROCESSED_DATASET, TicketDetails(uuid=dataset.uuid)) + session.commit() + return make_flask_response(data=dataset.to_proto(), status=HTTPStatus.CREATED) + + +class ChildrenDatasetsApi(Resource): + + def get(self, dataset_id: int): + """Get children datasets list + --- + tags: + - dataset + description: Get children datasets list + parameters: + - in: path + name: dataset_id + schema: + type: integer + responses: + 200: + description: get children datasets list result + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.DatasetRef' + """ + with db.session_scope() as session: + query = DatasetService(session=session).query_dataset_with_parent_job() + query = query.filter(DatasetJob.input_dataset_id == dataset_id) + # exported dataset should not be shown in children datasets + query = query.filter(Dataset.dataset_kind != DatasetKindV2.EXPORTED) + return make_flask_response(data=[dataset.to_ref() for dataset in query.all()]) + + +class BatchParameter(Schema): + data_source_id = fields.Integer(required=True) + comment = fields.Str(required=False) + + @post_load + def make_batch_parameter(self, item: Dict[str, Any], **kwargs) -> dataset_pb2.BatchParameter: + data_source_id = item.get('data_source_id') + comment = item.get('comment') + + with db.session_scope() as session: + data_source = session.query(DataSource).get(data_source_id) + if data_source is None: + raise ValidationError(message=f'failed to find data_source {data_source_id}', + field_name='data_source_id') + + return dataset_pb2.BatchParameter(comment=comment, data_source_id=data_source_id) class BatchesApi(Resource): - @jwt_required() + + SORTER_FIELDS = ['created_at', 'updated_at'] + + def __init__(self): + self._sorter_builder = sorting.SorterBuilder(model_class=DataBatch, supported_fields=self.SORTER_FIELDS) + + @credentials_required + @use_kwargs( + { + 'page': fields.Integer(required=False, load_default=1), + 'page_size': fields.Integer(required=False, load_default=10), + 'sorter_exp': fields.String(required=False, load_default=None, data_key='order_by') + }, + location='query') + def get(self, dataset_id: int, page: int, page_size: int, sorter_exp: Optional[str]): + """List data batches + --- + tags: + - dataset + description: List data batches + parameters: + - in: path + name: dataset_id + schema: + type: integer + - in: query + name: page + schema: + type: integer + - in: query + name: page_size + schema: + type: integer + - in: query + name: order_by + schema: + type: string + responses: + 200: + description: list of data batches + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.DataBatch' + """ + with db.session_scope() as session: + query = session.query(DataBatch).filter(DataBatch.dataset_id == dataset_id) + try: + if sorter_exp is not None: + sorter_exp = sorting.parse_expression(sorter_exp) + else: + # default sort is created_at desc + sorter_exp = sorting.SortExpression(field='created_at', is_asc=False) + query = self._sorter_builder.build_query(query, sorter_exp) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid sorter: {str(e)}') from e + pagination = paginate(query=query, page=page, page_size=page_size) + return make_flask_response(data=[data_batch.to_proto() for data_batch in pagination.get_items()], + page_meta=pagination.get_metadata()) + + +class BatchApi(Resource): + + @credentials_required + def get(self, dataset_id: int, data_batch_id: int): + """Get data batch by id + --- + tags: + - dataset + description: Get data batch by id + parameters: + - in: path + name: dataset_id + schema: + type: integer + - in: path + name: data_batch_id + schema: + type: integer + responses: + 200: + description: Get data batch by id + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.DataBatch' + """ + with db.session_scope() as session: + batch: DataBatch = session.query(DataBatch).filter(DataBatch.dataset_id == dataset_id).filter( + DataBatch.id == data_batch_id).first() + if batch is None: + raise NotFoundException(f'failed to find batch {data_batch_id} in dataset {dataset_id}') + return make_flask_response(data=batch.to_proto()) + + +class BatchAnalyzeApi(Resource): + + @credentials_required + @use_kwargs({'dataset_job_config': fields.Nested(DatasetJobVariablesParameter())}) + def post(self, dataset_id: int, data_batch_id: int, dataset_job_config: dataset_pb2.DatasetJobConfig): + """Analyze data_batch by id + --- + tags: + - dataset + description: Analyze data_batch by id + parameters: + - in: path + name: dataset_id + schema: + type: integer + - in: path + name: data_batch_id + schema: + type: integer + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + dataset_job_config: + $ref: '#/definitions/fedlearner_webconsole.proto.DatasetJobConfig' + responses: + 200: + description: analyzer dataset job details + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.DatasetJob' + """ + with db.session_scope() as session: + dataset: Dataset = session.query(Dataset).get(dataset_id) + if dataset is None: + raise NotFoundException(f'Failed to find dataset: {dataset_id}') + dataset_job_config.dataset_uuid = dataset.uuid + global_configs = DatasetJobGlobalConfigs() + pure_domain_name = SettingService.get_system_info().pure_domain_name + global_configs.global_configs[pure_domain_name].MergeFrom(dataset_job_config) + analyzer_dataset_job: DatasetJob = session.query(DatasetJob).filter( + DatasetJob.output_dataset_id == dataset_id).filter(DatasetJob.kind == DatasetJobKind.ANALYZER).first() + if analyzer_dataset_job is None: + analyzer_dataset_job = DatasetJobService(session).create_as_coordinator(project_id=dataset.project_id, + kind=DatasetJobKind.ANALYZER, + output_dataset_id=dataset_id, + global_configs=global_configs) + else: + previous_global_configs = analyzer_dataset_job.get_global_configs() + for variable in dataset_job_config.variables: + set_variable_value_to_job_config(previous_global_configs.global_configs[pure_domain_name], variable) + analyzer_dataset_job.set_global_configs(previous_global_configs) + session.flush() + DatasetJobStageService(session).create_dataset_job_stage_as_coordinator( + project_id=dataset.project_id, + dataset_job_id=analyzer_dataset_job.id, + output_data_batch_id=data_batch_id, + global_configs=analyzer_dataset_job.get_global_configs()) + dataset_job_details = analyzer_dataset_job.to_proto() + session.commit() + + return make_flask_response(data=dataset_job_details, status=HTTPStatus.OK) + + +class BatchMetricsApi(Resource): + + @credentials_required + @use_kwargs({ + 'name': fields.Str(required=True), + }, location='query') + def get(self, dataset_id: int, data_batch_id: int, name: str): + """Get data batch metrics info + --- + tags: + - dataset + description: get data batch metrics info + parameters: + - in: path + required: true + name: dataset_id + schema: + type: integer + - in: path + required: true + name: data_batch_id + schema: + type: integer + - in: query + required: true + name: name + schema: + type: string + responses: + 200: + description: get data batch metrics info + content: + application/json: + schema: + type: object + properties: + name: + type: string + metrics: + type: object + properties: + count: + type: string + max: + type: string + min: + type: string + mean: + type: string + stddev: + type: string + missing_count: + type: string + hist: + type: object + properties: + x: + type: array + items: + type: number + y: + type: array + items: + type: number + """ + # TODO(liuhehan): return dataset metrics in proto + with db.session_scope() as session: + data = DatasetService(session).feature_metrics(name, dataset_id, data_batch_id) + return make_flask_response(data) + + +class BatchRerunApi(Resource): + + @credentials_required + @use_kwargs({'dataset_job_parameter': fields.Nested(DatasetJobParameter())}) + def post(self, dataset_id: int, data_batch_id: int, dataset_job_parameter: dataset_pb2.DatasetJob): + """rerun data_batch by id + --- + tags: + - dataset + description: Rerun data_batch by id + parameters: + - in: path + name: dataset_id + schema: + type: integer + - in: path + name: data_batch_id + schema: + type: integer + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + dataset_job_parameter: + $ref: '#/definitions/DatasetJobParameter' + responses: + 200: + description: dataset job stage details + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.DatasetJobStage' + """ + global_configs = dataset_job_parameter.global_configs + + with db.session_scope() as session: + dataset: Dataset = session.query(Dataset).get(dataset_id) + if dataset is None: + raise InvalidArgumentException(f'failed to find dataset: {dataset_id}') + data_batch: DataBatch = session.query(DataBatch).filter(DataBatch.dataset_id == dataset_id).filter( + DataBatch.id == data_batch_id).first() + if data_batch is None: + raise InvalidArgumentException(f'failed to find data_batch: {data_batch_id}') + dataset_job: DatasetJob = dataset.parent_dataset_job + if dataset_job is None: + raise InvalidArgumentException(f'dataset_job is missing, output_dataset_id: {dataset_id}') + # get current global_configs + if dataset_job.is_coordinator(): + current_global_configs = dataset_job.get_global_configs() + else: + participant: Participant = session.query(Participant).get(dataset_job.coordinator_id) + system_client = SystemServiceClient.from_participant(domain_name=participant.domain_name) + flag_resp = system_client.list_flags() + if not flag_resp.get(Flag.DATA_BATCH_RERUN_ENABLED.name): + raise MethodNotAllowedException( + f'particiapnt {participant.pure_domain_name()} not support rerun data_batch, ' \ + 'could only rerun data_batch created as coordinator' + ) + client = RpcClient.from_project_and_participant(dataset_job.project.name, dataset_job.project.token, + participant.domain_name) + response = client.get_dataset_job(uuid=dataset_job.uuid) + current_global_configs = response.dataset_job.global_configs + # set global_configs + for pure_domain_name in global_configs.global_configs: + for variable in global_configs.global_configs[pure_domain_name].variables: + set_variable_value_to_job_config(current_global_configs.global_configs[pure_domain_name], variable) + # create dataset_job_stage + dataset_job_stage = DatasetJobStageService(session).create_dataset_job_stage_as_coordinator( + project_id=dataset.project_id, + dataset_job_id=dataset_job.id, + output_data_batch_id=data_batch_id, + global_configs=current_global_configs) + session.flush() + dataset_job_stage_details = dataset_job_stage.to_proto() + session.commit() + + return make_flask_response(data=dataset_job_stage_details, status=HTTPStatus.OK) + + +class DataSourceParameter(Schema): + name = fields.Str(required=True) + comment = fields.Str(required=False) + data_source_url = fields.Str(required=True, validate=_path_authority_validator) + is_user_upload = fields.Bool(required=False) + dataset_format = fields.Str(required=False, + load_default=DatasetFormat.TABULAR.name, + validate=validate.OneOf([o.name for o in DatasetFormat])) + store_format = fields.Str(required=False, + load_default=StoreFormat.UNKNOWN.value, + validate=validate.OneOf([o.value for o in StoreFormat])) + dataset_type = fields.Str(required=False, + load_default=DatasetType.PSI.value, + validate=validate.OneOf([o.value for o in DatasetType])) + + @post_load + def make_data_source(self, item: Dict[str, str], **kwargs) -> dataset_pb2.DataSource: + del kwargs # this variable is not needed for now + name = item.get('name') + comment = item.get('comment') + data_source_url = item.get('data_source_url') + is_user_upload = item.get('is_user_upload', False) + data_source = _parse_data_source_url(data_source_url) + data_source.name = name + data_source.dataset_format = item.get('dataset_format') + data_source.store_format = item.get('store_format') + data_source.dataset_type = item.get('dataset_type') + if is_user_upload: + data_source.is_user_upload = True + if comment: + data_source.comment = comment + return data_source + + +class DataSourcesApi(Resource): + + @credentials_required + @use_kwargs({'data_source': fields.Nested(DataSourceParameter()), 'project_id': fields.Integer(required=True)}) + def post(self, data_source: dataset_pb2.DataSource, project_id: int): + """Create a data source + --- + tags: + - dataset + description: create a data source + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + data_source: + type: object + required: true + properties: + schema: + $ref: '#/definitions/DataSourceParameter' + project_id: + type: integer + required: true + responses: + 201: + description: The data source is created + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.DataSource' + 409: + description: A data source with the same name exists + 400: + description: | + A data source that webconsole cannot connect with + Probably, unexist data source or unauthorized to the data source + """ + + _validate_data_source(data_source.url, DatasetType(data_source.dataset_type)) + with db.session_scope() as session: + data_source.project_id = project_id + data_source = DataSourceService(session=session).create_data_source(data_source) + session.commit() + return make_flask_response(data=data_source.to_proto(), status=HTTPStatus.CREATED) + + @credentials_required + @use_kwargs({'project_id': fields.Integer(required=False, load_default=0, validate=validate.Range(min=0))}, + location='query') + def get(self, project_id: int): + """Get a list of data source + --- + tags: + - dataset + description: get a list of data source + parameters: + - in: query + name: project_id + schema: + type: integer + responses: + 200: + description: list of data source + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.DataSource' + """ + + with db.session_scope() as session: + data_sources = DataSourceService(session=session).get_data_sources(project_id) + return make_flask_response(data=data_sources) + + +class DataSourceApi(Resource): + + @credentials_required + def get(self, data_source_id: int): + """Get target data source by id + --- + tags: + - dataset + description: get target data source by id + parameters: + - in: path + name: data_source_id + schema: + type: integer + responses: + 200: + description: data source + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.DataSource' + """ + + with db.session_scope() as session: + data_source: DataSource = session.query(DataSource).get(data_source_id) + if not data_source: + raise NotFoundException(message=f'cannot find data_source with id: {data_source_id}') + return make_flask_response(data=data_source.to_proto()) + + @credentials_required + def delete(self, data_source_id: int): + """Delete a data source + --- + tags: + - dataset + description: delete a data source + parameters: + - in: path + name: data_source_id + schema: + type: integer + responses: + 204: + description: deleted data source result + """ + + with db.session_scope() as session: + DataSourceService(session=session).delete_data_source(data_source_id) + session.commit() + return make_flask_response(data={}, status=HTTPStatus.NO_CONTENT) + + +class DataSourceTreeApi(Resource): + + @credentials_required + def get(self, data_source_id: int): + """Get the data source tree + --- + tags: + - dataset + description: get the data source tree + parameters: + - in: path + name: data_source_id + schema: + type: integer + responses: + 200: + description: the file tree of the data source + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.FileTreeNode' + """ + with db.session_scope() as session: + data_source: DataSource = session.query(DataSource).get(data_source_id) + # relative path is used in returned file tree + file_tree = FileTreeBuilder(data_source.path, relpath=True).build_with_root() + return make_flask_response(file_tree) + + +class DataSourceCheckConnectionApi(Resource): + + @credentials_required + @use_kwargs({ + 'data_source_url': + fields.Str(required=True, validate=_path_authority_validator), + 'file_num': + fields.Integer(required=False, load_default=_DEFAULT_DATA_SOURCE_PREVIEW_FILE_NUM), + 'dataset_type': + fields.Str(required=False, + load_default=DatasetType.PSI.value, + validate=validate.OneOf([o.value for o in DatasetType])) + }) + def post(self, data_source_url: str, file_num: int, dataset_type: str): + """Check data source connection status + --- + tags: + - dataset + description: check data source connection status + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + data_source_url: + type: string + required: true + file_num: + type: integer + required: false + dataset_type: + type: string + required: false + responses: + 200: + description: status details and file_names + content: + application/json: + schema: + type: object + properties: + extra_nums: + type: interger + file_names: + type: array + items: + type: string + """ + + data_source_url = _parse_data_source_url(data_source_url).url + _validate_data_source(data_source_url, DatasetType(dataset_type)) + file_names = FileManager().listdir(data_source_url) + return make_flask_response(data={ + 'file_names': file_names[:file_num], + 'extra_nums': max(len(file_names) - file_num, 0), + }) + + +class ParticipantDatasetsApi(Resource): + + @credentials_required + @use_kwargs( + { + 'kind': + fields.Str(required=False, load_default=None), + 'uuid': + fields.Str(required=False, load_default=None), + 'participant_id': + fields.Integer(required=False, load_default=None), + 'cron_interval': + fields.String( + required=False, load_default=None, validate=validate.OneOf([o.value for o in CronInterval])), + }, + location='query') + def get( + self, + project_id: int, + kind: Optional[str], + uuid: Optional[str], + participant_id: Optional[int], + cron_interval: Optional[str], + ): + """Get list of participant datasets + --- + tags: + - dataset + description: get list of participant datasets + parameters: + - in: path + name: project_id + schema: + type: integer + - in: query + name: kind + schema: + type: string + - in: query + name: uuid + schema: + type: string + - in: query + name: participant_id + schema: + type: integer + - in: query + name: cron_interval + schema: + type: string + responses: + 200: + description: list of participant datasets + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.ParticipantDatasetRef' + """ + if kind is not None: + try: + DatasetKindV2(kind) + except ValueError as err: + raise InvalidArgumentException(details=f'failed to find dataset kind {kind}') from err + time_range = None + if cron_interval: + if cron_interval == CronInterval.HOURS.value: + time_range = TimeRange(hours=1) + else: + time_range = TimeRange(days=1) + + with db.session_scope() as session: + if participant_id is None: + participants = ParticipantService(session).get_platform_participants_by_project(project_id) + else: + participant = session.query(Participant).get(participant_id) + if participant is None: + raise NotFoundException(f'particiapnt {participant_id} is not found') + participants = [participant] + project = session.query(Project).get(project_id) + data = [] + for participant in participants: + # check flag + system_client = SystemServiceClient.from_participant(domain_name=participant.domain_name) + flag_resp = system_client.list_flags() + # if participant supports list dataset rpc, use new rpc + if flag_resp.get(Flag.LIST_DATASETS_RPC_ENABLED.name): + client = ResourceServiceClient.from_project_and_participant(participant.domain_name, project.name) + response = client.list_datasets(kind=DatasetKindV2(kind) if kind is not None else None, + uuid=uuid, + state=ResourceState.SUCCEEDED, + time_range=time_range) + else: + client = RpcClient.from_project_and_participant(project.name, project.token, + participant.domain_name) + response = client.list_participant_datasets(kind=kind, uuid=uuid) + datasets = response.participant_datasets + if uuid: + datasets = [d for d in datasets if uuid and d.uuid == uuid] + for dataset in datasets: + dataset.participant_id = participant.id + dataset.project_id = project_id + data.extend(datasets) + + return make_flask_response(data=data) + + +class DatasetPublishApi(Resource): + + @credentials_required + @use_kwargs({'value': fields.Int(required=False, load_default=0, validate=[validate.Range(min=100, max=10000)])}) + def post(self, dataset_id: int, value: int): + """Publish the dataset in workspace + --- + tags: + - dataset + description: Publish the dataset in workspace + parameters: + - in: path + name: dataset_id + schema: + type: integer + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + value: + type: integer + required: true + responses: + 200: + description: published the dataset in workspace + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.Dataset' + """ + with db.session_scope() as session: + dataset = DatasetService(session=session).publish_dataset(dataset_id, value) + session.commit() + return make_flask_response(data=dataset.to_proto()) + + @credentials_required + def delete(self, dataset_id: int): + """Revoke publish dataset ops + --- + tags: + - dataset + description: Revoke publish dataset ops + parameters: + - in: path + name: dataset_id + schema: + type: integer + responses: + 204: + description: revoked publish dataset successfully + """ + with db.session_scope() as session: + DatasetService(session=session).withdraw_dataset(dataset_id) + session.commit() + return make_flask_response(data=None, status=HTTPStatus.NO_CONTENT) + + +class DatasetAuthorizehApi(Resource): + + @credentials_required def post(self, dataset_id: int): - parser = reqparse.RequestParser() - parser.add_argument('event_time', type=int) - parser.add_argument('files', - required=True, - type=list, - location='json', - help=_FORMAT_ERROR_MESSAGE.format('files')) - parser.add_argument('move', type=bool) - parser.add_argument('comment', type=str) - body = parser.parse_args() - event_time = body.get('event_time') - files = body.get('files') - move = body.get('move', False) - comment = body.get('comment') + """Authorize target dataset by id + --- + tags: + - dataset + description: authorize target dataset by id + parameters: + - in: path + name: dataset_id + schema: + type: integer + responses: + 200: + description: authorize target dataset by id + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.Dataset' + """ with db.session_scope() as session: - dataset = session.query(Dataset).filter_by(id=dataset_id).first() + dataset: Dataset = session.query(Dataset).get(dataset_id) if dataset is None: - raise NotFoundException( - f'Failed to find dataset: {dataset_id}') - if event_time is None and dataset.type == DatasetType.STREAMING: - raise InvalidArgumentException( - details='data_batch.event_time is empty') - # TODO: PSI dataset should not allow multi batches - - # Use current timestamp to fill when type is PSI - event_time = datetime.fromtimestamp( - event_time or datetime.utcnow().timestamp(), tz=timezone.utc) - batch_folder_name = event_time.strftime('%Y%m%d_%H%M%S') - batch_path = f'{dataset.path}/batch/{batch_folder_name}' - # Create batch - batch = DataBatch(dataset_id=dataset.id, - event_time=event_time, - comment=comment, - state=BatchState.NEW, - move=move, - path=batch_path) - batch_details = dataset_pb2.DataBatch() - for file_path in files: - file = batch_details.files.add() - file.source_path = file_path - file_name = file_path.split('/')[-1] - file.destination_path = f'{batch_path}/{file_name}' - batch.set_details(batch_details) - session.add(batch) - session.commit() - session.refresh(batch) - scheduler.wakeup(data_batch_ids=[batch.id]) - return {'data': batch.to_dict()} - - -class FilesApi(Resource): + raise NotFoundException(f'Failed to find dataset: {dataset_id}') + # update local auth_status + dataset.auth_status = AuthStatus.AUTHORIZED + if dataset.participants_info is not None: + # update local auth_status cache + AuthService(session=session, dataset_job=dataset.parent_dataset_job).update_auth_status( + domain_name=SettingService.get_system_info().pure_domain_name, auth_status=AuthStatus.AUTHORIZED) + # update participants auth_status cache + DatasetJobController(session=session).inform_auth_status(dataset_job=dataset.parent_dataset_job, + auth_status=AuthStatus.AUTHORIZED) + session.commit() + return make_flask_response(data=dataset.to_proto()) + + @credentials_required + def delete(self, dataset_id: int): + """Revoke dataset authorization by id + --- + tags: + - dataset + description: revoke dataset authorization by id + parameters: + - in: path + name: dataset_id + schema: + type: integer + responses: + 200: + description: revoke dataset authorization by id successfully + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.Dataset' + """ + with db.session_scope() as session: + dataset: Dataset = session.query(Dataset).get(dataset_id) + if dataset is None: + raise NotFoundException(f'Failed to find dataset: {dataset_id}') + # update local auth_status + dataset.auth_status = AuthStatus.WITHDRAW + if dataset.participants_info is not None: + # update local auth_status cache + AuthService(session=session, dataset_job=dataset.parent_dataset_job).update_auth_status( + domain_name=SettingService.get_system_info().pure_domain_name, auth_status=AuthStatus.WITHDRAW) + # update participants auth_status cache + DatasetJobController(session=session).inform_auth_status(dataset_job=dataset.parent_dataset_job, + auth_status=AuthStatus.WITHDRAW) + session.commit() + return make_flask_response(data=dataset.to_proto()) + + +class DatasetFlushAuthStatusApi(Resource): + + @credentials_required + def post(self, dataset_id: int): + """flush dataset auth status cache by id + --- + tags: + - dataset + description: flush dataset auth status cache by id + parameters: + - in: path + name: dataset_id + schema: + type: integer + responses: + 200: + description: flush dataset auth status cache by id successfully + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.Dataset' + """ + with db.session_scope() as session: + dataset: Dataset = session.query(Dataset).get(dataset_id) + if dataset is None: + raise NotFoundException(f'Failed to find dataset: {dataset_id}') + if dataset.participants_info is not None: + DatasetJobController(session=session).update_auth_status_cache(dataset_job=dataset.parent_dataset_job) + session.commit() + return make_flask_response(data=dataset.to_proto()) + + +class TimeRangeParameter(Schema): + days = fields.Integer(required=False, load_default=0, validate=[validate.Range(min=0, max=1)]) + hours = fields.Integer(required=False, load_default=0, validate=[validate.Range(min=0, max=1)]) + + @post_load + def make_time_range(self, item: Dict[str, Any], **kwargs) -> dataset_pb2.TimeRange: + days = item['days'] + hours = item['hours'] + + return dataset_pb2.TimeRange(days=days, hours=hours) + + +class DatasetJobDefinitionApi(Resource): + + @credentials_required + def get(self, dataset_job_kind: str): + """Get variables of this dataset_job + --- + tags: + - dataset + description: Get variables of this dataset_job + parameters: + - in: path + name: dataset_job_kind + schema: + type: string + responses: + 200: + description: variables of this dataset_job + content: + application/json: + schema: + type: object + properties: + variables: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.Variable' + is_federated: + type: boolean + """ + # webargs doesn't support location=path for now + # reference: webargs/core.py:L285 + try: + dataset_job_kind = DatasetJobKind(dataset_job_kind) + except ValueError as err: + raise InvalidArgumentException(details=f'unkown dataset_job_kind {dataset_job_kind}') from err + with db.session_scope() as session: + configer = DatasetJobConfiger.from_kind(dataset_job_kind, session) + user_variables = configer.user_variables + is_federated = not DatasetJobService(session).is_local(dataset_job_kind) + return make_flask_response(data={'variables': user_variables, 'is_federated': is_federated}) + + +class DatasetJobsApi(Resource): + FILTER_FIELDS = { + 'name': filtering.SupportedField(type=filtering.FieldType.STRING, ops={FilterOp.CONTAIN: None}), + 'kind': filtering.SupportedField(type=filtering.FieldType.STRING, ops={FilterOp.IN: None}), + 'input_dataset_id': filtering.SupportedField(type=filtering.FieldType.NUMBER, ops={FilterOp.EQUAL: None}), + 'coordinator_id': filtering.SupportedField(type=filtering.FieldType.NUMBER, ops={FilterOp.IN: None}), + 'state': filtering.SupportedField(type=filtering.FieldType.STRING, ops={FilterOp.IN: None}), + } + + SORTER_FIELDS = ['created_at'] + + def __init__(self): + self._filter_builder = filtering.FilterBuilder(model_class=DatasetJob, supported_fields=self.FILTER_FIELDS) + self._sorter_builder = sorting.SorterBuilder(model_class=DatasetJob, supported_fields=self.SORTER_FIELDS) + + @credentials_required + @use_kwargs( + { + 'page': fields.Integer(required=False, load_default=1), + 'page_size': fields.Integer(required=False, load_default=10), + 'filter_exp': FilterExpField(required=False, load_default=None, data_key='filter'), + 'sorter_exp': fields.String(required=False, load_default=None, data_key='order_by'), + }, + location='query') + def get(self, + project_id: int, + page: int, + page_size: int, + filter_exp: Optional[FilterExpression] = None, + sorter_exp: Optional[str] = None): + """Get list of this dataset_jobs + --- + tags: + - dataset + description: Get list of this dataset_jobs + parameters: + - in: path + name: project_id + schema: + type: integer + - in: query + name: filter + schema: + type: string + - in: query + name: order_by + schema: + type: string + responses: + 200: + description: list of this dataset_jobs + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.DatasetJobRef' + """ + with db.session_scope() as session: + query = session.query(DatasetJob).filter(DatasetJob.project_id == project_id) + if filter_exp is not None: + try: + query = self._filter_builder.build_query(query, filter_exp) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid filter: {str(e)}') from e + try: + if sorter_exp is not None: + sorter_exp = sorting.parse_expression(sorter_exp) + else: + sorter_exp = sorting.SortExpression(field='created_at', is_asc=False) + query = self._sorter_builder.build_query(query, sorter_exp) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid sorter: {str(e)}') from e + pagination = paginate(query=query, page=page, page_size=page_size) + + return make_flask_response(data=[dataset_job.to_ref() for dataset_job in pagination.get_items()], + page_meta=pagination.get_metadata()) + + @credentials_required + @use_kwargs({ + 'dataset_job_parameter': fields.Nested(DatasetJobParameter()), + 'output_dataset_id': fields.Integer(required=False, load_default=None), + 'time_range': fields.Nested(TimeRangeParameter(), required=False, load_default=dataset_pb2.TimeRange()) + }) + def post(self, project_id: int, dataset_job_parameter: dataset_pb2.DatasetJob, output_dataset_id: Optional[int], + time_range: dataset_pb2.TimeRange): + """Create new dataset job of the kind + --- + tags: + - dataset + description: Create new dataset job of the kind + parameters: + - in: path + name: project_id + schema: + type: integer + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + dataset_job_parameter: + $ref: '#/definitions/DatasetJobParameter' + time_range: + $ref: '#/definitions/TimeRangeParameter' + responses: + 201: + description: Create new dataset job of the kind + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.DatasetJob' + """ + dataset_job_kind = DatasetJobKind(dataset_job_parameter.kind) + if not Flag.OT_PSI_ENABLED.value and dataset_job_kind == DatasetJobKind.OT_PSI_DATA_JOIN: + raise NoAccessException(f'dataset job {dataset_job_parameter.kind} is not enabled') + if not Flag.HASH_DATA_JOIN_ENABLED.value and dataset_job_kind == DatasetJobKind.HASH_DATA_JOIN: + raise NoAccessException(f'dataset job {dataset_job_parameter.kind} is not enabled') + + global_configs = dataset_job_parameter.global_configs + + with db.session_scope() as session: + output_dataset = session.query(Dataset).get(output_dataset_id) + if not output_dataset: + raise InvalidArgumentException(f'failed to find dataset: {output_dataset_id}') + time_delta = None + if output_dataset.dataset_type == DatasetType.STREAMING: + if not (time_range.days > 0) ^ (time_range.hours > 0): + raise InvalidArgumentException('must specify cron by days or hours') + time_delta = timedelta(days=time_range.days, hours=time_range.hours) + dataset_job = DatasetJobService(session).create_as_coordinator(project_id=project_id, + kind=dataset_job_kind, + output_dataset_id=output_dataset.id, + global_configs=global_configs, + time_range=time_delta) + session.flush() + dataset_job_details = dataset_job.to_proto() + + # we set particiapnts_info in dataset_job api as we need get participants from dataset_kind + particiapnts = DatasetJobService(session=session).get_participants_need_distribute(dataset_job=dataset_job) + AuthService(session=session, + dataset_job=dataset_job).initialize_participants_info_as_coordinator(participants=particiapnts) + # set need_create_stage to True for non-cron dataset_job, + # we donot create stage here as we should promise no stage created before all particiapnts authorized + if not dataset_job.is_cron(): + context = dataset_job.get_context() + context.need_create_stage = True + dataset_job.set_context(context) + session.commit() + + return make_flask_response(dataset_job_details, status=HTTPStatus.CREATED) + + +class DatasetJobApi(Resource): + + @credentials_required + def get(self, project_id: int, dataset_job_id: int): + """Get detail of this dataset_job + --- + tags: + - dataset + description: Get detail of this dataset_job + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: dataset_job_id + schema: + type: integer + responses: + 200: + description: detail of this dataset_job + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.DatasetJob' + """ + with db.session_scope() as session: + # TODO(wangsen.0914): move these logic into service + dataset_job: DatasetJob = session.query(DatasetJob).filter_by(project_id=project_id).filter_by( + id=dataset_job_id).first() + if dataset_job is None: + raise NotFoundException(f'failed to find datasetjob {dataset_job_id}') + dataset_job_pb = dataset_job.to_proto() + if not dataset_job.is_coordinator(): + participant = session.query(Participant).get(dataset_job.coordinator_id) + client = RpcClient.from_project_and_participant(dataset_job.project.name, dataset_job.project.token, + participant.domain_name) + response = client.get_dataset_job(uuid=dataset_job.uuid) + dataset_job_pb.global_configs.MergeFrom(response.dataset_job.global_configs) + dataset_job_pb.scheduler_state = response.dataset_job.scheduler_state + return make_flask_response(dataset_job_pb) + + @credentials_required + def delete(self, project_id: int, dataset_job_id: int): + """Delete dataset_job by id + --- + tags: + - dataset + description: Delete dataset_job by id + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: dataset_job_id + schema: + type: integer + responses: + 204: + description: delete dataset_job successfully + """ + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).filter_by(project_id=project_id).filter_by( + id=dataset_job_id).first() + if dataset_job is None: + message = f'Failed to delete dataset_job: {dataset_job_id}; reason: failed to find dataset_job' + logging.error(message) + raise NotFoundException(message) + DatasetJobService(session).delete_dataset_job(dataset_job=dataset_job) + session.commit() + return make_flask_response(status=HTTPStatus.NO_CONTENT) + + +class DatasetJobStopApi(Resource): + + @credentials_required + def post(self, project_id: int, dataset_job_id: int): + """Stop dataset_job by id + --- + tags: + - dataset + description: Stop dataset_job by id + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: dataset_job_id + schema: + type: integer + responses: + 200: + description: stop dataset_job successfully + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.DatasetJob' + """ + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).filter_by(project_id=project_id).filter_by( + id=dataset_job_id).first() + if dataset_job is None: + raise NotFoundException(f'failed to find datasetjob {dataset_job_id}') + DatasetJobController(session).stop(uuid=dataset_job.uuid) + session.commit() + return make_flask_response(data=dataset_job.to_proto()) + + +class DatasetJobStopSchedulerApi(Resource): + + @credentials_required + def post(self, project_id: int, dataset_job_id: int): + """Stop scheduler dataset_job by id + --- + tags: + - dataset + description: Stop scheduler dataset_job by id + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: dataset_job_id + schema: + type: integer + responses: + 200: + description: stop scheduler dataset_job successfully + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.DatasetJob' + """ + with db.session_scope() as session: + dataset_job: DatasetJob = session.query(DatasetJob).filter_by(project_id=project_id).filter_by( + id=dataset_job_id).first() + if dataset_job is None: + raise NotFoundException(f'failed to find datasetjob {dataset_job_id}') + if dataset_job.is_coordinator(): + DatasetJobService(session=session).stop_cron_scheduler(dataset_job=dataset_job) + dataset_job_pb = dataset_job.to_proto() + else: + participant = session.query(Participant).get(dataset_job.coordinator_id) + client = JobServiceClient.from_project_and_participant(participant.domain_name, + dataset_job.project.name) + client.update_dataset_job_scheduler_state(uuid=dataset_job.uuid, + scheduler_state=DatasetJobSchedulerState.STOPPED) + client = RpcClient.from_project_and_participant(dataset_job.project.name, dataset_job.project.token, + participant.domain_name) + response = client.get_dataset_job(uuid=dataset_job.uuid) + dataset_job_pb = dataset_job.to_proto() + dataset_job_pb.global_configs.MergeFrom(response.dataset_job.global_configs) + dataset_job_pb.scheduler_state = response.dataset_job.scheduler_state + session.commit() + return make_flask_response(data=dataset_job_pb) + + +class DatasetJobStagesApi(Resource): + + FILTER_FIELDS = { + 'state': filtering.SupportedField(type=filtering.FieldType.STRING, ops={FilterOp.IN: None}), + } + + SORTER_FIELDS = ['created_at'] + def __init__(self): - self._file_manager = FileManager() + self._filter_builder = filtering.FilterBuilder(model_class=DatasetJobStage, supported_fields=self.FILTER_FIELDS) + self._sorter_builder = sorting.SorterBuilder(model_class=DatasetJobStage, supported_fields=self.SORTER_FIELDS) + + @credentials_required + @use_kwargs( + { + 'page': fields.Integer(required=False, load_default=1), + 'page_size': fields.Integer(required=False, load_default=10), + 'filter_exp': FilterExpField(required=False, load_default=None, data_key='filter'), + 'sorter_exp': fields.String(required=False, load_default=None, data_key='order_by') + }, + location='query') + def get(self, project_id: int, dataset_job_id: int, page: int, page_size: int, + filter_exp: Optional[FilterExpression], sorter_exp: Optional[str]): + """List dataset job stages + --- + tags: + - dataset + description: List dataset job stages + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: dataset_job_id + schema: + type: integer + - in: query + name: page + schema: + type: integer + - in: query + name: page_size + schema: + type: integer + - in: query + name: filter + schema: + type: string + - in: query + name: order_by + schema: + type: string + responses: + 200: + description: list of dataset job stages + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.DatasetJobStageRef' + """ + with db.session_scope() as session: + query = session.query(DatasetJobStage).filter(DatasetJobStage.project_id == project_id).filter( + DatasetJobStage.dataset_job_id == dataset_job_id) + if filter_exp is not None: + try: + query = self._filter_builder.build_query(query, filter_exp) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid filter: {str(e)}') from e + try: + if sorter_exp is not None: + sorter_exp = sorting.parse_expression(sorter_exp) + else: + # default sort is created_at desc + sorter_exp = sorting.SortExpression(field='created_at', is_asc=False) + query = self._sorter_builder.build_query(query, sorter_exp) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid sorter: {str(e)}') from e + pagination = paginate(query=query, page=page, page_size=page_size) + return make_flask_response( + data=[dataset_job_stage.to_ref() for dataset_job_stage in pagination.get_items()], + page_meta=pagination.get_metadata()) - @jwt_required() - def get(self): - # TODO: consider the security factor - if 'directory' in request.args: - directory = request.args['directory'] - else: - directory = os.path.join(current_app.config.get('STORAGE_ROOT'), - 'upload') - files = self._file_manager.ls(directory, recursive=True) - return {'data': [dict(file._asdict()) for file in files]} + +class DatasetJobStageApi(Resource): + + @credentials_required + def get(self, project_id: int, dataset_job_id: int, dataset_job_stage_id: int): + """Get details of given dataset job stage + --- + tags: + - dataset + description: Get details of given dataset job stage + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: dataset_job_id + schema: + type: integer + - in: path + name: dataset_job_stage_id + schema: + type: integer + responses: + 200: + description: dataset job stage details + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.DatasetJobStage' + """ + with db.session_scope() as session: + dataset_job_stage: DatasetJobStage = session.query(DatasetJobStage).filter( + DatasetJobStage.project_id == project_id).filter( + DatasetJobStage.dataset_job_id == dataset_job_id).filter( + DatasetJobStage.id == dataset_job_stage_id).first() + if not dataset_job_stage: + raise NotFoundException(f'Failed to find dataset job stage: {dataset_job_stage_id}') + dataset_job_stage_pb = dataset_job_stage.to_proto() + if not dataset_job_stage.is_coordinator(): + participant = session.query(Participant).get(dataset_job_stage.coordinator_id) + client = JobServiceClient.from_project_and_participant(participant.domain_name, + dataset_job_stage.project.name) + response = client.get_dataset_job_stage(dataset_job_stage_uuid=dataset_job_stage.uuid) + dataset_job_stage_pb.global_configs.MergeFrom(response.dataset_job_stage.global_configs) + + return make_flask_response(dataset_job_stage_pb) def initialize_dataset_apis(api: Api): api.add_resource(DatasetsApi, '/datasets') api.add_resource(DatasetApi, '/datasets/') + api.add_resource(DatasetPublishApi, '/datasets/:publish') + api.add_resource(DatasetAuthorizehApi, '/datasets/:authorize') + api.add_resource(DatasetFlushAuthStatusApi, '/datasets/:flush_auth_status') api.add_resource(BatchesApi, '/datasets//batches') + api.add_resource(BatchApi, '/datasets//batches/') + api.add_resource(ChildrenDatasetsApi, '/datasets//children_datasets') + api.add_resource(BatchAnalyzeApi, '/datasets//batches/:analyze') + api.add_resource(BatchMetricsApi, '/datasets//batches//feature_metrics') + api.add_resource(BatchRerunApi, '/datasets//batches/:rerun') api.add_resource(DatasetPreviewApi, '/datasets//preview') - api.add_resource(DatasetMetricsApi, - '/datasets//feature_metrics') - api.add_resource(FilesApi, '/files') + api.add_resource(DatasetLedgerApi, '/datasets//ledger') + api.add_resource(DatasetExportApi, '/datasets/:export') + api.add_resource(DatasetStateFixtApi, '/datasets/:state_fix') + + api.add_resource(DataSourcesApi, '/data_sources') + api.add_resource(DataSourceApi, '/data_sources/') + api.add_resource(DataSourceCheckConnectionApi, '/data_sources:check_connection') + api.add_resource(DataSourceTreeApi, '/data_sources//tree') + + api.add_resource(ParticipantDatasetsApi, '/project//participant_datasets') + + api.add_resource(DatasetJobDefinitionApi, '/dataset_job_definitions/') + api.add_resource(DatasetJobsApi, '/projects//dataset_jobs') + api.add_resource(DatasetJobApi, '/projects//dataset_jobs/') + api.add_resource(DatasetJobStopApi, '/projects//dataset_jobs/:stop') + api.add_resource(DatasetJobStopSchedulerApi, + '/projects//dataset_jobs/:stop_scheduler') + + api.add_resource(DatasetJobStagesApi, + '/projects//dataset_jobs//dataset_job_stages') + api.add_resource( + DatasetJobStageApi, + '/projects//dataset_jobs//dataset_job_stages/') + + schema_manager.append(DataSourceParameter) + schema_manager.append(DatasetJobConfigParameter) + schema_manager.append(DatasetParameter) + schema_manager.append(BatchParameter) + schema_manager.append(DatasetJobParameter) + schema_manager.append(TimeRangeParameter) diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/apis_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/apis_test.py new file mode 100644 index 000000000..9c5c84861 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/apis_test.py @@ -0,0 +1,3175 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import json +import os +import tempfile +from datetime import datetime, timedelta +from http import HTTPStatus +from pathlib import Path +import urllib +import unittest +from unittest.mock import patch, MagicMock, ANY, PropertyMock +from google.protobuf.struct_pb2 import Value + +from collections import namedtuple +from marshmallow.exceptions import ValidationError +from tensorflow.io import gfile + +from envs import Envs +from testing.common import BaseTestCase +from testing.dataset import FakeDatasetJobConfiger, FakeFederatedDatasetJobConfiger + +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.apis import _parse_data_source_url, _path_authority_validator +from fedlearner_webconsole.dataset.models import (Dataset, DatasetJob, DatasetJobKind, DatasetJobSchedulerState, + DatasetJobStage, ImportType, DatasetKindV2, DatasetSchemaChecker, + DatasetJobState, StoreFormat, DatasetType, ResourceState, DataBatch, + DatasetFormat, BatchState, DataSourceType, DataSource) +from fedlearner_webconsole.dataset.dataset_directory import DatasetDirectory +from fedlearner_webconsole.utils.resource_name import resource_uuid +from fedlearner_webconsole.utils.file_manager import FileManager +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState +from fedlearner_webconsole.participant.models import ProjectParticipant, Participant +from fedlearner_webconsole.proto import dataset_pb2, service_pb2, setting_pb2 +from fedlearner_webconsole.proto.project_pb2 import ParticipantInfo, ParticipantsInfo +from fedlearner_webconsole.proto.rpc.v2.resource_service_pb2 import ListDatasetsResponse +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.algorithm_pb2 import FileTreeNode + +FakeFileStatistics = namedtuple('FakeFileStatistics', ['length', 'mtime_nsec', 'is_directory']) + + +def fake_get_items(*args, **kwargs): + return {}, [] + + +def fake_export_task_result(*args, **kwargs): + return {} + + +def fake_isdir(*args, **kwargs): + path = kwargs.get('path') + return (path in [ + 'file:///data/test', 'hdfs:///home/', 'hdfs:///home/20220801', 'hdfs:///home/20220802', + 'hdfs:///home/20220803-15', 'hdfs:///home/2022080316' + ]) + + +class DatasetApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + STORAGE_ROOT = tempfile.gettempdir() + + def setUp(self): + super().setUp() + self._storage_root = Envs.STORAGE_ROOT + self._file_manager = FileManager() + with db.session_scope() as session: + project = Project(name='test-project') + session.add(project) + session.flush([project]) + participant = Participant(id=project.id, name='test_participant', domain_name='fake_domain_name') + session.add(participant) + project_participant = ProjectParticipant(project_id=project.id, participant_id=participant.id) + session.add(project_participant) + workflow = Workflow(state=WorkflowState.COMPLETED, name='workflow_generate_by_dataset_job') + session.add(workflow) + + session.commit() + + with db.session_scope() as session: + self.default_dataset1 = Dataset(name='default dataset1', + creator_username='test', + uuid='default dataset1 uuid', + dataset_type=DatasetType.STREAMING, + comment='test comment1', + path='/data/dataset/123', + project_id=1, + dataset_kind=DatasetKindV2.RAW, + created_at=datetime(2012, 1, 14, 12, 0, 5)) + meta_info = dataset_pb2.DatasetMetaInfo(value=100, + schema_checkers=[ + DatasetSchemaChecker.RAW_ID_CHECKER.value, + DatasetSchemaChecker.NUMERIC_COLUMNS_CHECKER.value + ]) + self.default_dataset1.set_meta_info(meta_info) + session.add(self.default_dataset1) + session.flush() + default_dataset_job_1 = DatasetJob(workflow_id=workflow.id, + uuid=resource_uuid(), + project_id=project.id, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + input_dataset_id=100, + output_dataset_id=self.default_dataset1.id, + state=DatasetJobState.FAILED) + session.add(default_dataset_job_1) + session.commit() + with db.session_scope() as session: + self.default_dataset2 = Dataset(name='default dataset2', + creator_username='test', + dataset_type=DatasetType.STREAMING, + comment='test comment2', + path=os.path.join(tempfile.gettempdir(), 'dataset/123'), + project_id=project.id, + dataset_kind=DatasetKindV2.PROCESSED, + dataset_format=DatasetFormat.TABULAR.value, + created_at=datetime(2012, 1, 14, 12, 0, 6)) + session.add(self.default_dataset2) + session.flush([self.default_dataset2]) + data_batch = DataBatch(event_time=datetime.now(), + comment='comment', + state=BatchState.NEW, + dataset_id=self.default_dataset2.id, + path='/data/dataset/123/batch_test_batch') + session.add(data_batch) + session.flush() + default_dataset_job_2 = DatasetJob(workflow_id=workflow.id, + uuid=resource_uuid(), + project_id=project.id, + kind=DatasetJobKind.DATA_ALIGNMENT, + input_dataset_id=100, + output_dataset_id=self.default_dataset2.id) + session.add(default_dataset_job_2) + default_dataset_job_3 = DatasetJob(workflow_id=workflow.id, + uuid=resource_uuid(), + project_id=project.id, + kind=DatasetJobKind.ANALYZER, + input_dataset_id=self.default_dataset2.id, + output_dataset_id=self.default_dataset2.id) + session.add(default_dataset_job_3) + session.commit() + + with db.session_scope() as session: + workflow = Workflow(id=100, state=WorkflowState.COMPLETED, name='fake_workflow') + dataset = Dataset(id=3, + name='dataset', + creator_username='test', + dataset_type=DatasetType.STREAMING, + comment='comment', + path=os.path.join(tempfile.gettempdir(), 'dataset/321'), + project_id=3, + created_at=datetime(2012, 1, 14, 12, 0, 7)) + session.add(workflow) + session.add(dataset) + session.flush() + default_dataset_job_4 = DatasetJob(workflow_id=workflow.id, + uuid=resource_uuid(), + project_id=project.id, + kind=DatasetJobKind.DATA_ALIGNMENT, + input_dataset_id=100, + output_dataset_id=dataset.id) + session.add(default_dataset_job_4) + session.commit() + + def test_get_dataset(self): + get_response = self.get_helper(f'/api/v2/datasets/{self.default_dataset1.id}') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + self.assertResponseDataEqual( + get_response, + { + 'id': 1, + 'name': 'default dataset1', + 'creator_username': 'test', + 'comment': 'test comment1', + 'path': '/data/dataset/123', + 'deleted_at': 0, + 'dataset_kind': 'RAW', + 'is_published': False, + 'project_id': 1, + 'dataset_format': DatasetFormat.TABULAR.name, + 'num_feature': 0, + 'num_example': 0, + 'state_frontend': ResourceState.FAILED.value, + 'file_size': 0, + 'parent_dataset_job_id': 1, + 'data_source': ANY, + 'workflow_id': 1, + 'value': 100, + 'schema_checkers': ['RAW_ID_CHECKER', 'NUMERIC_COLUMNS_CHECKER'], + 'dataset_type': 'STREAMING', + 'import_type': 'COPY', + 'store_format': 'TFRECORDS', + 'analyzer_dataset_job_id': 0, + 'publish_frontend_state': 'UNPUBLISHED', + 'auth_frontend_state': 'AUTH_APPROVED', + 'local_auth_status': 'PENDING', + 'participants_info': { + 'participants_map': {} + }, + }, + ignore_fields=[ + 'created_at', + 'updated_at', + 'uuid', + ], + ) + + def test_get_internal_processed_dataset(self): + with db.session_scope() as session: + default_dataset = Dataset(id=10, + uuid=resource_uuid(), + name='default dataset', + dataset_type=DatasetType.PSI, + comment='test comment', + path='/data/dataset/123', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 5), + dataset_kind=DatasetKindV2.INTERNAL_PROCESSED, + is_published=False, + auth_status=AuthStatus.AUTHORIZED) + session.add(default_dataset) + session.commit() + + get_response = self.get_helper('/api/v2/datasets/10') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + self.assertResponseDataEqual( + get_response, + { + 'id': 10, + 'project_id': 1, + 'name': 'default dataset', + 'path': '/data/dataset/123', + 'comment': 'test comment', + 'dataset_format': 'TABULAR', + 'state_frontend': 'SUCCEEDED', + 'dataset_kind': 'INTERNAL_PROCESSED', + 'workflow_id': 0, + 'data_source': '', + 'file_size': 0, + 'num_example': 0, + 'num_feature': 0, + 'deleted_at': 0, + 'parent_dataset_job_id': 0, + 'analyzer_dataset_job_id': 0, + 'is_published': False, + 'value': 0, + 'schema_checkers': [], + 'creator_username': '', + 'import_type': 'COPY', + 'dataset_type': 'PSI', + 'store_format': 'TFRECORDS', + 'publish_frontend_state': 'UNPUBLISHED', + 'auth_frontend_state': 'AUTH_APPROVED', + 'local_auth_status': 'AUTHORIZED', + 'participants_info': { + 'participants_map': {} + }, + }, + ignore_fields=[ + 'uuid', + 'created_at', + 'updated_at', + ], + ) + + def test_get_dataset_not_found(self): + get_response = self.get_helper('/api/v2/datasets/10086') + self.assertEqual(get_response.status_code, HTTPStatus.NOT_FOUND) + + def test_get_datasets(self): + with db.session_scope() as session: + default_dataset_job_4 = session.query(DatasetJob).get(4) + default_dataset_job_4.kind = DatasetJobKind.ANALYZER + default_dataset_job_4.input_dataset_id = 3 + + default_data_source = DataSource(id=4, + name='default data_source', + creator_username='test', + uuid='default data_source uuid', + comment='test comment1', + path='/data/dataset/123', + project_id=1, + dataset_kind=DatasetKindV2.SOURCE, + created_at=datetime(2012, 1, 14, 12, 0, 1)) + session.add(default_data_source) + session.commit() + get_response = self.get_helper('/api/v2/datasets') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + datasets = self.get_response_data(get_response) + self.assertEqual(datasets, [{ + 'id': 3, + 'project_id': 3, + 'comment': 'comment', + 'created_at': 1326542407, + 'creator_username': 'test', + 'data_source': ANY, + 'dataset_format': 'TABULAR', + 'dataset_kind': 'RAW', + 'dataset_type': 'STREAMING', + 'file_size': 0, + 'import_type': 'COPY', + 'is_published': False, + 'name': 'dataset', + 'num_example': 0, + 'path': ANY, + 'state_frontend': 'FAILED', + 'store_format': 'TFRECORDS', + 'total_value': 0, + 'uuid': '', + 'publish_frontend_state': 'UNPUBLISHED', + 'auth_frontend_state': 'AUTH_APPROVED', + 'local_auth_status': 'PENDING', + 'participants_info': { + 'participants_map': {} + }, + }, { + 'id': 2, + 'project_id': 1, + 'name': 'default dataset2', + 'creator_username': 'test', + 'created_at': 1326542406, + 'path': ANY, + 'dataset_format': 'TABULAR', + 'comment': 'test comment2', + 'state_frontend': 'PENDING', + 'dataset_kind': 'PROCESSED', + 'data_source': ANY, + 'file_size': 0, + 'is_published': False, + 'num_example': 0, + 'uuid': '', + 'total_value': 0, + 'store_format': 'TFRECORDS', + 'dataset_type': 'STREAMING', + 'import_type': 'COPY', + 'publish_frontend_state': 'UNPUBLISHED', + 'auth_frontend_state': 'AUTH_APPROVED', + 'local_auth_status': 'PENDING', + 'participants_info': { + 'participants_map': {} + }, + }, { + 'id': 1, + 'project_id': 1, + 'name': 'default dataset1', + 'creator_username': 'test', + 'created_at': 1326542405, + 'path': '/data/dataset/123', + 'dataset_format': 'TABULAR', + 'comment': 'test comment1', + 'state_frontend': 'FAILED', + 'uuid': 'default dataset1 uuid', + 'dataset_kind': 'RAW', + 'file_size': 0, + 'is_published': False, + 'num_example': 0, + 'data_source': ANY, + 'total_value': 0, + 'store_format': 'TFRECORDS', + 'dataset_type': 'STREAMING', + 'import_type': 'COPY', + 'publish_frontend_state': 'UNPUBLISHED', + 'auth_frontend_state': 'AUTH_APPROVED', + 'local_auth_status': 'PENDING', + 'participants_info': { + 'participants_map': {} + }, + }, { + 'id': 4, + 'project_id': 1, + 'name': 'default data_source', + 'creator_username': 'test', + 'comment': 'test comment1', + 'created_at': 1326542401, + 'path': '/data/dataset/123', + 'data_source': ANY, + 'dataset_format': 'TABULAR', + 'dataset_kind': 'SOURCE', + 'dataset_type': 'PSI', + 'file_size': 0, + 'import_type': 'COPY', + 'is_published': False, + 'num_example': 0, + 'state_frontend': 'FAILED', + 'store_format': 'TFRECORDS', + 'total_value': 0, + 'uuid': 'default data_source uuid', + 'publish_frontend_state': 'UNPUBLISHED', + 'auth_frontend_state': 'AUTH_APPROVED', + 'local_auth_status': 'PENDING', + 'participants_info': { + 'participants_map': {} + }, + }]) + self.assertEqual( + json.loads(get_response.data).get('page_meta'), { + 'current_page': 1, + 'page_size': 10, + 'total_pages': 1, + 'total_items': 4 + }) + + with db.session_scope() as session: + default_dataset_job_4 = session.query(DatasetJob).get(4) + default_dataset_job_4.kind = DatasetJobKind.DATA_ALIGNMENT + default_dataset_job_4.input_dataset_id = 100 + session.commit() + + # test sorter + sorter_param = urllib.parse.quote('created_at asc') + get_response = self.get_helper(f'/api/v2/datasets?order_by={sorter_param}') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + datasets = self.get_response_data(get_response) + self.assertEqual(len(datasets), 4) + self.assertEqual([dataset.get('id') for dataset in datasets], [4, 1, 2, 3]) + + fake_sorter_param = urllib.parse.quote('fake_time asc') + get_response = self.get_helper(f'/api/v2/datasets?order_by={fake_sorter_param}') + self.assertEqual(get_response.status_code, HTTPStatus.BAD_REQUEST) + + # test filter + filter_param = urllib.parse.quote('(and(project_id=1)(name~="default"))') + get_response = self.get_helper(f'/api/v2/datasets?filter={filter_param}') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + datasets = self.get_response_data(get_response) + self.assertEqual(len(datasets), 3) + self.assertEqual([dataset.get('name') for dataset in datasets], + ['default dataset2', 'default dataset1', 'default data_source']) + + filter_param = urllib.parse.quote('(and(project_id=1)(dataset_format:["TABULAR"])(dataset_kind:["RAW"]))') + get_response = self.get_helper(f'/api/v2/datasets?filter={filter_param}') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + datasets = self.get_response_data(get_response) + self.assertEqual(len(datasets), 1) + self.assertEqual(datasets[0].get('name'), 'default dataset1') + + filter_param = urllib.parse.quote('(dataset_format="IMAGE")') + get_response = self.get_helper(f'/api/v2/datasets?filter={filter_param}') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + datasets = self.get_response_data(get_response) + self.assertEqual(len(datasets), 0) + + filter_param = urllib.parse.quote('(dataset_format="UNKOWN")') + get_response = self.get_helper(f'/api/v2/datasets?filter={filter_param}') + self.assertEqual(get_response.status_code, HTTPStatus.BAD_REQUEST) + + filter_param = urllib.parse.quote('(is_published=false)') + get_response = self.get_helper(f'/api/v2/datasets?filter={filter_param}') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + datasets = self.get_response_data(get_response) + self.assertEqual(len(datasets), 4) + + # test state_frontend + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(1) + dataset_job.state = DatasetJobState.SUCCEEDED + session.commit() + get_response = self.get_helper('/api/v2/datasets?state_frontend=SUCCEEDED') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + datasets = self.get_response_data(get_response) + self.assertEqual(len(datasets), 1) + self.assertEqual(datasets[0].get('name'), 'default dataset1') + + def test_get_datasets_by_publish_frontend_state(self): + with db.session_scope() as session: + default_dataset_1 = session.query(Dataset).get(1) + default_dataset_1.ticket_status = None + default_dataset_1.is_published = False + default_dataset_2 = session.query(Dataset).get(2) + default_dataset_2.ticket_status = TicketStatus.PENDING + default_dataset_2.is_published = True + default_dataset_3 = session.query(Dataset).get(3) + default_dataset_3.ticket_status = TicketStatus.APPROVED + default_dataset_3.is_published = True + session.commit() + filter_param = urllib.parse.quote('(publish_frontend_state="UNPUBLISHED")') + get_response = self.get_helper(f'/api/v2/datasets?filter={filter_param}') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + datasets = self.get_response_data(get_response) + self.assertEqual(len(datasets), 1) + self.assertEqual(datasets[0].get('id'), 1) + + filter_param = urllib.parse.quote('(publish_frontend_state="TICKET_PENDING")') + get_response = self.get_helper(f'/api/v2/datasets?filter={filter_param}') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + datasets = self.get_response_data(get_response) + self.assertEqual(len(datasets), 1) + self.assertEqual(datasets[0].get('id'), 2) + + filter_param = urllib.parse.quote('(publish_frontend_state="PUBLISHED")') + get_response = self.get_helper(f'/api/v2/datasets?filter={filter_param}') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + datasets = self.get_response_data(get_response) + self.assertEqual(len(datasets), 1) + self.assertEqual(datasets[0].get('id'), 3) + + def test_get_datasets_by_auth_state(self): + with db.session_scope() as session: + default_dataset_1 = session.query(Dataset).get(1) + default_dataset_1.auth_status = AuthStatus.AUTHORIZED + default_dataset_2 = session.query(Dataset).get(2) + default_dataset_2.auth_status = AuthStatus.PENDING + default_dataset_3 = session.query(Dataset).get(3) + default_dataset_3.auth_status = AuthStatus.WITHDRAW + session.commit() + filter_param = urllib.parse.quote('(auth_status:["AUTHORIZED", "WITHDRAW"])') + get_response = self.get_helper(f'/api/v2/datasets?filter={filter_param}') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + datasets = self.get_response_data(get_response) + self.assertEqual(len(datasets), 2) + self.assertEqual([dataset.get('id') for dataset in datasets], [3, 1]) + + with db.session_scope() as session: + default_dataset_2 = session.query(Dataset).get(2) + default_dataset_2.auth_status = None + session.commit() + filter_param = urllib.parse.quote('(auth_status:["AUTHORIZED"])') + get_response = self.get_helper(f'/api/v2/datasets?filter={filter_param}') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + datasets = self.get_response_data(get_response) + self.assertEqual(len(datasets), 2) + self.assertEqual([dataset.get('id') for dataset in datasets], [2, 1]) + + def test_get_internal_processed_datasets(self): + with db.session_scope() as session: + internal_processed_dataset = Dataset(id=10, + uuid=resource_uuid(), + name='internal_processed dataset', + dataset_type=DatasetType.PSI, + comment='test comment', + path='/data/dataset/123', + project_id=1, + dataset_kind=DatasetKindV2.INTERNAL_PROCESSED, + is_published=False, + auth_status=AuthStatus.AUTHORIZED) + session.add(internal_processed_dataset) + + dataset_job = session.query(DatasetJob).get(2) + dataset_job.state = DatasetJobState.SUCCEEDED + + session.commit() + + get_response = self.get_helper('/api/v2/datasets?state_frontend=SUCCEEDED') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + datasets = self.get_response_data(get_response) + self.assertEqual(len(datasets), 2) + self.assertCountEqual([dataset.get('name') for dataset in datasets], + ['default dataset2', 'internal_processed dataset']) + + filter_param = urllib.parse.quote('(dataset_kind:["PROCESSED"])') + get_response = self.get_helper(f'/api/v2/datasets?state_frontend=SUCCEEDED&filter={filter_param}') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + datasets = self.get_response_data(get_response) + self.assertEqual(len(datasets), 1) + self.assertEqual(datasets[0].get('name'), 'default dataset2') + + get_response = self.get_helper('/api/v2/datasets?state_frontend=FAILED') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + datasets = self.get_response_data(get_response) + self.assertEqual(len(datasets), 1) + self.assertEqual(datasets[0].get('name'), 'default dataset1') + + def test_get_datasets_by_time_range(self): + with db.session_scope() as session: + default_dataset_1 = session.query(Dataset).get(1) + default_dataset_1.parent_dataset_job.time_range = timedelta(days=1) + default_dataset_2 = session.query(Dataset).get(2) + default_dataset_2.parent_dataset_job.time_range = timedelta(hours=1) + session.commit() + get_response = self.get_helper('/api/v2/datasets?cron_interval=DAYS') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + datasets = self.get_response_data(get_response) + self.assertEqual(len(datasets), 1) + self.assertEqual(datasets[0].get('id'), 1) + get_response = self.get_helper('/api/v2/datasets?cron_interval=HOURS') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + datasets = self.get_response_data(get_response) + self.assertEqual(len(datasets), 1) + self.assertEqual(datasets[0].get('id'), 2) + get_response = self.get_helper('/api/v2/datasets') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + datasets = self.get_response_data(get_response) + self.assertEqual(len(datasets), 3) + + def test_change_dataset_comment(self): + get_response = self.patch_helper(f'/api/v2/datasets/{self.default_dataset1.id}', data={'comment': 'test api'}) + self.assertEqual(get_response.status_code, HTTPStatus.OK) + with db.session_scope() as session: + dataset = session.query(Dataset).get(self.default_dataset1.id) + self.assertEqual(dataset.comment, 'test api') + + def test_preview_dataset(self): + with db.session_scope() as session: + tmp_path = tempfile.gettempdir() + self.batch_path = os.path.join(tmp_path, 'dataset/20211228_161352_train-ds/batch/20211228_081351') + self.default_databatch1 = DataBatch(name='20220101', + id=111, + event_time=datetime.now(), + comment='comment', + state=BatchState.NEW, + dataset_id=1, + path=self.batch_path) + session.add(self.default_databatch1) + session.commit() + with db.session_scope() as session: + tmp_path = tempfile.gettempdir() + self.batch_path = os.path.join(tmp_path, 'dataset/20211228_161352_train-ds/batch/20211228_081352') + self.default_databatch2 = DataBatch(name='20220102', + id=222, + event_time=datetime.now(), + comment='comment', + state=BatchState.NEW, + dataset_id=2, + path=self.batch_path) + session.add(self.default_databatch2) + session.commit() + meta_file = DatasetDirectory(dataset_path=self.default_dataset2.path).batch_meta_file(batch_name='20220101') + gfile.makedirs(meta_file.split('/_META')[0]) + meta_data = { + 'dtypes': [{ + 'key': 'f01', + 'value': 'bigint' + }], + 'sample': [ + [ + 1, + ], + [ + 0, + ], + ], + 'count': 0, + 'features': { + 'f01': { + 'count': '2', + 'mean': '0.0015716767309123998', + 'stddev': '0.03961485047808605', + 'min': '0', + 'max': '1', + 'missing_count': '0' + }, + }, + 'hist': { + 'f01': { + 'x': [ + 0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, + 1 + ], + 'y': [12070, 0, 0, 0, 0, 0, 0, 0, 0, 19] + }, + }, + } + with gfile.GFile(meta_file, 'w') as f: + f.write(json.dumps(meta_data)) + + response = self.get_helper('/api/v2/datasets/2/preview?batch_id=111') + self.assertEqual(response.status_code, 200) + preview_data = self.get_response_data(response) + golden_preview = { + 'dtypes': [{ + 'key': 'f01', + 'value': 'bigint' + }], + 'sample': [ + [1], + [0], + ], + 'num_example': 0, + 'metrics': { + 'f01': { + 'count': '2', + 'mean': '0.0015716767309123998', + 'stddev': '0.03961485047808605', + 'min': '0', + 'max': '1', + 'missing_count': '0' + }, + }, + } + self.assertEqual(preview_data, golden_preview, 'should has preview data') + + @patch('fedlearner_webconsole.dataset.services.get_dataset_path') + def test_post_raw_datasets(self, mock_get_dataset_path: MagicMock): + name = 'test_dataset' + dataset_path = os.path.join(self._storage_root, 'dataset/20200608_060606_test-post-dataset') + mock_get_dataset_path.return_value = dataset_path + dataset_type = DatasetType.PSI.value + comment = 'test comment' + create_response = self.post_helper('/api/v2/datasets', + data={ + 'name': name, + 'dataset_type': dataset_type, + 'comment': comment, + 'project_id': 1, + 'dataset_format': DatasetFormat.TABULAR.name, + 'kind': DatasetKindV2.RAW.value, + 'need_publish': True, + 'value': 100, + }) + self.assertEqual(create_response.status_code, HTTPStatus.CREATED) + + self.assertResponseDataEqual( + create_response, + { + 'id': ANY, + 'name': 'test_dataset', + 'creator_username': 'ada', + 'comment': comment, + 'path': dataset_path, + 'deleted_at': 0, + 'data_source': '', + 'project_id': 1, + 'dataset_kind': DatasetKindV2.RAW.name, + 'is_published': False, + 'dataset_format': DatasetFormat.TABULAR.name, + 'state_frontend': ResourceState.FAILED.value, + 'file_size': 0, + 'num_example': 0, + 'num_feature': 0, + 'parent_dataset_job_id': 0, + 'workflow_id': 0, + 'value': 100, + 'schema_checkers': [], + 'dataset_type': DatasetType.PSI.value, + 'import_type': ImportType.COPY.value, + 'store_format': StoreFormat.TFRECORDS.value, + 'analyzer_dataset_job_id': 0, + 'publish_frontend_state': 'UNPUBLISHED', + 'auth_frontend_state': 'AUTH_APPROVED', + 'local_auth_status': 'AUTHORIZED', + 'participants_info': { + 'participants_map': {} + }, + }, + ignore_fields=['created_at', 'updated_at', 'uuid'], + ) + with db.session_scope() as session: + data_batch: DataBatch = session.query(DataBatch).outerjoin( + Dataset, Dataset.id == DataBatch.dataset_id).filter(Dataset.name == name).first() + self.assertIsNone(data_batch) + + @patch('fedlearner_webconsole.dataset.services.get_dataset_path') + def test_post_processed_datasets(self, mock_get_dataset_path: MagicMock): + name = 'test_dataset' + dataset_path = os.path.join(self._storage_root, 'dataset/20200608_060606_test-post-dataset') + mock_get_dataset_path.return_value = dataset_path + dataset_type = DatasetType.PSI.value + comment = 'test comment' + + # test bad request + create_response = self.post_helper('/api/v2/datasets', + data={ + 'name': name, + 'dataset_type': dataset_type, + 'comment': comment, + 'project_id': 1, + 'dataset_format': DatasetFormat.TABULAR.name, + 'kind': DatasetKindV2.PROCESSED.value, + 'need_publish': True, + 'value': 100, + 'is_published': False + }) + self.assertEqual(create_response.status_code, HTTPStatus.BAD_REQUEST) + + # test pass + create_response = self.post_helper('/api/v2/datasets', + data={ + 'name': name, + 'dataset_type': dataset_type, + 'comment': comment, + 'project_id': 1, + 'dataset_format': DatasetFormat.TABULAR.name, + 'kind': DatasetKindV2.PROCESSED.value, + 'is_published': True, + 'value': 100, + }) + self.assertEqual(create_response.status_code, HTTPStatus.CREATED) + + self.assertResponseDataEqual( + create_response, + { + 'id': ANY, + 'name': 'test_dataset', + 'creator_username': 'ada', + 'comment': comment, + 'path': dataset_path, + 'deleted_at': 0, + 'data_source': '', + 'project_id': 1, + 'dataset_kind': DatasetKindV2.PROCESSED.name, + 'is_published': True, + 'dataset_format': DatasetFormat.TABULAR.name, + 'state_frontend': ResourceState.FAILED.value, + 'file_size': 0, + 'num_example': 0, + 'num_feature': 0, + 'parent_dataset_job_id': 0, + 'workflow_id': 0, + 'value': 100, + 'schema_checkers': [], + 'dataset_type': DatasetType.PSI.value, + 'import_type': ImportType.COPY.value, + 'store_format': StoreFormat.TFRECORDS.value, + 'analyzer_dataset_job_id': 0, + 'publish_frontend_state': 'PUBLISHED', + 'auth_frontend_state': 'AUTH_APPROVED', + 'local_auth_status': 'AUTHORIZED', + 'participants_info': { + 'participants_map': {} + }, + }, + ignore_fields=['created_at', 'updated_at', 'uuid'], + ) + with db.session_scope() as session: + data_batch: DataBatch = session.query(DataBatch).outerjoin( + Dataset, Dataset.id == DataBatch.dataset_id).filter(Dataset.name == name).first() + self.assertIsNone(data_batch) + dataset = session.query(Dataset).filter(Dataset.name == name).first() + self.assertEqual(dataset.ticket_status, TicketStatus.APPROVED) + + @patch('fedlearner_webconsole.dataset.services.get_dataset_path') + def test_post_datasets_with_checkers(self, mock_get_dataset_path: MagicMock): + dataset_path = os.path.join(self._storage_root, 'dataset/20200608_060606_test-post-dataset') + mock_get_dataset_path.return_value = dataset_path + create_response = self.post_helper('/api/v2/datasets', + data={ + 'name': 'fake_dataset', + 'comment': 'comment', + 'project_id': 1, + 'dataset_format': DatasetFormat.TABULAR.name, + 'kind': DatasetKindV2.RAW.value, + 'schema_checkers': ['RAW_ID_CHECKER', 'NUMERIC_COLUMNS_CHECKER'] + }) + self.assertEqual(create_response.status_code, HTTPStatus.CREATED) + + self.assertResponseDataEqual( + create_response, + { + 'id': 4, + 'name': 'fake_dataset', + 'creator_username': 'ada', + 'comment': 'comment', + 'path': dataset_path, + 'deleted_at': 0, + 'data_source': '', + 'project_id': 1, + 'dataset_kind': DatasetKindV2.RAW.name, + 'is_published': False, + 'dataset_format': DatasetFormat.TABULAR.name, + 'state_frontend': ResourceState.FAILED.value, + 'file_size': 0, + 'num_example': 0, + 'num_feature': 0, + 'parent_dataset_job_id': 0, + 'workflow_id': 0, + 'value': 0, + 'schema_checkers': ['RAW_ID_CHECKER', 'NUMERIC_COLUMNS_CHECKER'], + 'dataset_type': 'PSI', + 'import_type': 'COPY', + 'store_format': 'TFRECORDS', + 'analyzer_dataset_job_id': 0, + 'publish_frontend_state': 'UNPUBLISHED', + 'auth_frontend_state': 'AUTH_APPROVED', + 'local_auth_status': 'AUTHORIZED', + 'participants_info': { + 'participants_map': {} + }, + }, + ignore_fields=['created_at', 'updated_at', 'uuid'], + ) + with db.session_scope() as session: + dataset: Dataset = session.query(Dataset).get(4) + meta_info = dataset.get_meta_info() + self.assertEqual(list(meta_info.schema_checkers), ['RAW_ID_CHECKER', 'NUMERIC_COLUMNS_CHECKER']) + + def _fake_schema_check_test_data(self): + # schema check test + self.dataset_dir = tempfile.mkdtemp() + self.dataset_csv = Path(self.dataset_dir).joinpath('test.csv') + self.dataset_json = Path(self.dataset_dir).joinpath('validation_jsonschema.json') + self.error_dir = Path(self.dataset_dir).joinpath('error') + self.error_dir.mkdir() + self.error_json = self.error_dir.joinpath('schema_error.json') + + with db.session_scope() as session: + self.schema_check_dataset = Dataset(name='schema_check_dataset', + dataset_type=DatasetType.STREAMING, + comment='schema check dataset', + path=str(self.dataset_dir), + project_id=1) + session.add(self.schema_check_dataset) + session.flush() + self.schema_check_batch = DataBatch(dataset_id=self.schema_check_dataset.id, + event_time=datetime(2021, 10, 28, 16, 37, 37), + comment='schema check batch') + session.add(self.schema_check_batch) + session.commit() + + def __del__(self): + # delete the dataset path, created in function: test_post_datasets + dataset_path = os.path.join(self._storage_root, 'dataset/20200608_060606_test-post-dataset') + if self._file_manager.isdir(dataset_path): + self._file_manager.remove(dataset_path) + + +class DatasetExportApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(name='test_project') + session.add(project) + session.flush([project]) + + dataset = Dataset(id=1, + name='test_dataset', + dataset_type=DatasetType.PSI, + uuid='dataset uuid', + comment='comment', + path='/data/dataset/321', + project_id=project.id, + dataset_format=DatasetFormat.NONE_STRUCTURED.value, + store_format=StoreFormat.UNKNOWN, + dataset_kind=DatasetKindV2.PROCESSED) + data_batch = DataBatch(id=1, name='0', comment='comment', dataset_id=1, path='/data/dataset/321/batch/0') + session.add_all([dataset, data_batch]) + + streaming_dataset = Dataset(id=2, + name='test_streaming_dataset', + dataset_type=DatasetType.STREAMING, + uuid='streaming dataset uuid', + comment='comment', + path='/data/dataset/streaming_dataset', + project_id=project.id, + dataset_format=DatasetFormat.TABULAR.value, + store_format=StoreFormat.TFRECORDS, + dataset_kind=DatasetKindV2.PROCESSED) + streaming_data_batch_1 = DataBatch(id=2, + name='20220101', + comment='comment', + dataset_id=2, + path='/data/dataset/321/batch/20220101', + event_time=datetime(2022, 1, 1)) + streaming_data_batch_2 = DataBatch(id=3, + name='20220102', + comment='comment', + dataset_id=2, + path='/data/dataset/321/batch/20220102', + event_time=datetime(2022, 1, 2)) + session.add_all([streaming_dataset, streaming_data_batch_1, streaming_data_batch_2]) + + session.commit() + + @patch('fedlearner_webconsole.dataset.apis.SettingService.get_system_info', + lambda: setting_pb2.SystemInfo(pure_domain_name='test_domain')) + @patch('fedlearner_webconsole.dataset.apis.Envs.STORAGE_ROOT', '/data') + @patch('fedlearner_webconsole.utils.file_manager.FileManager.isdir', lambda *args: True) + @patch('fedlearner_webconsole.dataset.models.DataBatch.is_available', lambda _: True) + def test_export_dataset_none_streaming(self): + export_path = '/data/user_home/export_dataset' + resp = self.post_helper('/api/v2/datasets/1:export', { + 'export_path': export_path, + 'batch_id': 1, + }) + self.assertEqual(resp.status_code, HTTPStatus.OK) + resp_data = self.get_response_data(resp) + self.assertEqual(resp_data, {'export_dataset_id': 3, 'dataset_job_id': 1}) + export_dataset_name = 'export-test_dataset-0-0' + with db.session_scope() as session: + export_dataset: Dataset = session.query(Dataset).filter(Dataset.name == export_dataset_name).first() + self.assertEqual(export_dataset.dataset_kind, DatasetKindV2.EXPORTED) + self.assertEqual(export_dataset.store_format, StoreFormat.UNKNOWN) + self.assertEqual(export_dataset.dataset_type, DatasetType.PSI) + self.assertEqual(export_dataset.path, 'file://' + export_path) + self.assertFalse(export_dataset.is_published) + batch = export_dataset.get_single_batch() + self.assertEqual(batch.batch_name, '0') + self.assertEqual(batch.path, 'file://' + export_path + '/batch/0') + dataset_job: DatasetJob = session.query(DatasetJob).get(1) + self.assertEqual(dataset_job.kind, DatasetJobKind.EXPORT) + self.assertEqual( + dataset_job.get_global_configs(), + dataset_pb2.DatasetJobGlobalConfigs( + global_configs={'test_domain': dataset_pb2.DatasetJobConfig(dataset_uuid='dataset uuid')})) + dataset_job_stagees = dataset_job.dataset_job_stages + self.assertEqual(len(dataset_job_stagees), 1) + self.assertEqual(dataset_job_stagees[0].data_batch_id, batch.id) + + @patch('fedlearner_webconsole.dataset.apis.SettingService.get_system_info', + lambda: setting_pb2.SystemInfo(pure_domain_name='test_domain')) + @patch('fedlearner_webconsole.dataset.apis.Envs.STORAGE_ROOT', '/data') + @patch('fedlearner_webconsole.utils.file_manager.FileManager.isdir', lambda *args: True) + @patch('fedlearner_webconsole.dataset.models.DataBatch.is_available', lambda _: True) + def test_export_dataset_streaming(self): + export_path = '/data/user_home/export_dataset' + export_path_with_space = ' ' + export_path + ' ' + resp = self.post_helper('/api/v2/datasets/2:export', {'export_path': export_path_with_space}) + self.assertEqual(resp.status_code, HTTPStatus.OK) + resp_data = self.get_response_data(resp) + self.assertEqual(resp_data, {'export_dataset_id': 3, 'dataset_job_id': 1}) + export_dataset_name = 'export-test_streaming_dataset-0' + with db.session_scope() as session: + export_dataset: Dataset = session.query(Dataset).filter(Dataset.name == export_dataset_name).first() + self.assertEqual(export_dataset.dataset_kind, DatasetKindV2.EXPORTED) + self.assertEqual(export_dataset.store_format, StoreFormat.CSV) + self.assertEqual(export_dataset.dataset_type, DatasetType.STREAMING) + self.assertEqual(export_dataset.path, 'file://' + export_path) + self.assertFalse(export_dataset.is_published) + batches = export_dataset.data_batches + self.assertEqual(len(batches), 2) + self.assertEqual(batches[0].batch_name, '20220102') + self.assertEqual(batches[0].path, 'file://' + export_path + '/batch/20220102') + self.assertEqual(batches[0].event_time, datetime(2022, 1, 2)) + self.assertEqual(batches[1].batch_name, '20220101') + self.assertEqual(batches[1].path, 'file://' + export_path + '/batch/20220101') + self.assertEqual(batches[1].event_time, datetime(2022, 1, 1)) + dataset_job: DatasetJob = session.query(DatasetJob).get(1) + self.assertEqual(dataset_job.kind, DatasetJobKind.EXPORT) + self.assertEqual( + dataset_job.get_global_configs(), + dataset_pb2.DatasetJobGlobalConfigs( + global_configs={'test_domain': dataset_pb2.DatasetJobConfig( + dataset_uuid='streaming dataset uuid')})) + dataset_job_stagees = dataset_job.dataset_job_stages + self.assertEqual(len(dataset_job_stagees), 2) + + +class BatchesApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(name='test_project') + session.add(project) + session.flush([project]) + + dataset = Dataset(name='test_dataset', + dataset_type=DatasetType.PSI, + uuid=resource_uuid(), + comment='comment', + path='/data/dataset/321', + project_id=project.id, + dataset_format=DatasetFormat.TABULAR.value, + dataset_kind=DatasetKindV2.RAW) + session.add(dataset) + + data_source = DataSource(name='test_datasource', + uuid=resource_uuid(), + path='/upload/', + project_id=project.id, + dataset_kind=DatasetKindV2.SOURCE) + session.add(data_source) + + session.commit() + self._project_id = project.id + self._dataset_id = dataset.id + self._data_source_id = data_source.id + self._data_source_uuid = data_source.uuid + + def test_get_data_batches(self): + with db.session_scope() as session: + data_batch_1 = DataBatch(dataset_id=self._dataset_id, + name='20220101', + event_time=datetime(2022, 1, 1), + created_at=datetime(2022, 1, 1, 0, 0, 0), + comment='batch_1', + path='/data/dataset/123/batch/20220101') + session.add(data_batch_1) + data_batch_2 = DataBatch(dataset_id=self._dataset_id, + name='20220102', + event_time=datetime(2022, 1, 2), + created_at=datetime(2022, 1, 2, 0, 0, 0), + comment='batch_2', + path='/data/dataset/123/batch/20220102') + session.add(data_batch_2) + session.commit() + sorter_param = urllib.parse.quote('created_at asc') + response = self.get_helper( + f'/api/v2/datasets/{self._dataset_id}/batches?page=1&page_size=5&order_by={sorter_param}') + self.assertEqual(response.status_code, HTTPStatus.OK) + self.assertResponseDataEqual( + response, + [{ + 'id': 1, + 'dataset_id': 1, + 'comment': 'batch_1', + 'created_at': to_timestamp(datetime(2022, 1, 1, 0, 0, 0)), + 'event_time': to_timestamp(datetime(2022, 1, 1)), + 'file_size': 0, + 'num_example': 0, + 'num_feature': 0, + 'name': '20220101', + 'path': '/data/dataset/123/batch/20220101', + 'state': 'FAILED', + 'latest_parent_dataset_job_stage_id': 0, + 'latest_analyzer_dataset_job_stage_id': 0, + }, { + 'id': 2, + 'dataset_id': 1, + 'comment': 'batch_2', + 'created_at': to_timestamp(datetime(2022, 1, 2, 0, 0, 0)), + 'event_time': to_timestamp(datetime(2022, 1, 2)), + 'file_size': 0, + 'num_example': 0, + 'num_feature': 0, + 'name': '20220102', + 'path': '/data/dataset/123/batch/20220102', + 'state': 'FAILED', + 'latest_parent_dataset_job_stage_id': 0, + 'latest_analyzer_dataset_job_stage_id': 0, + }], + ignore_fields=['updated_at'], + ) + self.assertEqual( + json.loads(response.data).get('page_meta'), { + 'current_page': 1, + 'page_size': 5, + 'total_pages': 1, + 'total_items': 2 + }) + + +class BatchApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(name='test_project') + session.add(project) + session.flush([project]) + + dataset = Dataset(name='test_dataset', + dataset_type=DatasetType.STREAMING, + uuid=resource_uuid(), + comment='comment', + path='/data/dataset/123', + project_id=project.id, + dataset_format=DatasetFormat.TABULAR.value, + dataset_kind=DatasetKindV2.RAW) + session.add(dataset) + session.flush() + + data_batch = DataBatch(dataset_id=dataset.id, + name='20220101', + event_time=datetime(2022, 1, 1), + created_at=datetime(2022, 1, 1, 0, 0, 0), + comment='batch_1', + path='/data/dataset/123/batch/20220101') + session.add(data_batch) + session.flush() + + session.commit() + self._project_id = project.id + self._dataset_id = dataset.id + self._data_batch_id = data_batch.id + + def test_get_data_batch(self): + response = self.get_helper(f'/api/v2/datasets/{self._dataset_id}/batches/{self._data_batch_id}') + self.assertEqual(response.status_code, HTTPStatus.OK) + self.assertResponseDataEqual( + response, + { + 'id': self._data_batch_id, + 'dataset_id': self._dataset_id, + 'comment': 'batch_1', + 'created_at': to_timestamp(datetime(2022, 1, 1, 0, 0, 0)), + 'event_time': to_timestamp(datetime(2022, 1, 1)), + 'file_size': 0, + 'num_example': 0, + 'num_feature': 0, + 'name': '20220101', + 'path': '/data/dataset/123/batch/20220101', + 'state': 'FAILED', + 'latest_parent_dataset_job_stage_id': 0, + 'latest_analyzer_dataset_job_stage_id': 0, + }, + ignore_fields=['updated_at'], + ) + + +class BatchMetricsApiTest(BaseTestCase): + + def test_get_batch_metrics(self): + with db.session_scope() as session: + default_dataset = Dataset(id=1, + name='dataset', + creator_username='test', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path=os.path.join(tempfile.gettempdir(), 'dataset/123'), + project_id=1, + dataset_kind=DatasetKindV2.PROCESSED, + dataset_format=DatasetFormat.TABULAR.value, + created_at=datetime(2012, 1, 14, 12, 0, 6)) + session.add(default_dataset) + default_databatch = DataBatch(name='20220101', + id=111, + event_time=datetime(2022, 1, 1), + comment='comment', + state=BatchState.NEW, + dataset_id=1, + path='/data/test/batch/20220101') + session.add(default_databatch) + session.commit() + meta_file = DatasetDirectory(dataset_path=default_dataset.path).batch_meta_file(batch_name='20220101') + gfile.makedirs(meta_file.split('/_META')[0]) + meta_data = { + 'dtypes': [{ + 'key': 'f01', + 'value': 'bigint' + }], + 'sample': [ + [ + 1, + ], + [ + 0, + ], + ], + 'count': 0, + 'features': { + 'f01': { + 'count': '2', + 'mean': '0.0015716767309123998', + 'stddev': '0.03961485047808605', + 'min': '0', + 'max': '1', + 'missing_count': '0' + }, + }, + 'hist': { + 'f01': { + 'x': [ + 0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, + 1 + ], + 'y': [12070, 0, 0, 0, 0, 0, 0, 0, 0, 19] + }, + }, + } + with gfile.GFile(meta_file, 'w') as f: + f.write(json.dumps(meta_data)) + + feat_name = 'f01' + feature_response = self.get_helper(f'/api/v2/datasets/1/batches/111/feature_metrics?name={feat_name}') + self.assertEqual(feature_response.status_code, 200) + feature_data = self.get_response_data(feature_response) + golden_feature = { + 'name': feat_name, + 'metrics': { + 'count': '2', + 'mean': '0.0015716767309123998', + 'stddev': '0.03961485047808605', + 'min': '0', + 'max': '1', + 'missing_count': '0' + }, + 'hist': { + 'x': [ + 0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1 + ], + 'y': [12070, 0, 0, 0, 0, 0, 0, 0, 0, 19] + }, + } + self.assertEqual(feature_data, golden_feature, 'should has feature data') + + +class BatchesAnalyzeApiTest(BaseTestCase): + + @patch('fedlearner_webconsole.dataset.apis.get_pure_domain_name', lambda _: 'test_domain') + @patch('fedlearner_webconsole.dataset.apis.DatasetJobService.create_as_coordinator') + @patch('fedlearner_webconsole.dataset.apis.DatasetJobStageService.create_dataset_job_stage_as_coordinator') + def test_analyze_data_batch(self, create_dataset_job_stage_as_coordinator: MagicMock, + mock_create_as_coordinator: MagicMock): + with db.session_scope() as session: + project = Project(id=1, name='test-project') + session.add(project) + dataset = Dataset(id=1, + name='default dataset', + uuid='dataset_uuid', + creator_username='test', + dataset_type=DatasetType.STREAMING, + comment='test comment2', + path='data/dataset/123', + project_id=1, + dataset_kind=DatasetKindV2.PROCESSED, + dataset_format=DatasetFormat.TABULAR.value) + session.add(dataset) + data_batch = DataBatch(id=1, + name='20220101', + comment='comment', + event_time=datetime(2022, 1, 1), + dataset_id=1, + path='/data/dataset/123/batch/20220101') + session.add(data_batch) + session.commit() + mock_dataset_job = DatasetJob(id=1, + name='analyzer_dataset_job', + uuid='123', + project_id=1, + output_dataset_id=1, + input_dataset_id=1, + kind=DatasetJobKind.ANALYZER, + coordinator_id=0, + state=DatasetJobState.PENDING, + created_at=datetime(2022, 1, 1), + updated_at=datetime(2022, 1, 1), + creator_username='test user') + mock_dataset_job.set_global_configs(global_configs=dataset_pb2.DatasetJobGlobalConfigs( + global_configs={ + 'test_domain': + dataset_pb2.DatasetJobConfig(dataset_uuid='u123', + variables=[ + Variable(name='name1', value='value1'), + Variable(name='name2', value='value2'), + Variable(name='name3', value='value3') + ]) + })) + mock_create_as_coordinator.return_value = mock_dataset_job + response = self.post_helper( + '/api/v2/datasets/1/batches/1:analyze', { + 'dataset_job_config': { + 'variables': [{ + 'name': 'name1', + 'value': 'value1', + }, { + 'name': 'name2', + 'value': 'value2', + }, { + 'name': 'name3', + 'value': 'value3', + }] + } + }) + self.assertEqual(response.status_code, HTTPStatus.OK) + self.maxDiff = None + self.assertResponseDataEqual( + response, { + 'name': 'analyzer_dataset_job', + 'uuid': '123', + 'project_id': 1, + 'kind': 'ANALYZER', + 'state': 'PENDING', + 'created_at': to_timestamp(datetime(2022, 1, 1)), + 'updated_at': to_timestamp(datetime(2022, 1, 1)), + 'result_dataset_uuid': '', + 'result_dataset_name': '', + 'is_ready': False, + 'input_data_batch_num_example': 0, + 'output_data_batch_num_example': 0, + 'id': 1, + 'coordinator_id': 0, + 'workflow_id': 0, + 'finished_at': 0, + 'started_at': 0, + 'has_stages': False, + 'creator_username': 'test user', + 'scheduler_state': '', + 'global_configs': ANY, + 'time_range': { + 'days': 0, + 'hours': 0, + }, + 'scheduler_message': '', + }) + create_dataset_job_stage_as_coordinator.assert_called_once_with( + project_id=1, + dataset_job_id=1, + output_data_batch_id=1, + global_configs=dataset_pb2.DatasetJobGlobalConfigs( + global_configs={ + 'test_domain': + dataset_pb2.DatasetJobConfig(dataset_uuid='u123', + variables=[ + Variable(name='name1', value='value1'), + Variable(name='name2', value='value2'), + Variable(name='name3', value='value3') + ]) + })) + + +class BatchRerunApiTest(BaseTestCase): + + @patch('fedlearner_webconsole.dataset.apis.SystemServiceClient.list_flags') + @patch('fedlearner_webconsole.dataset.apis.DatasetJobStageService.create_dataset_job_stage_as_coordinator') + def test_rerun_batch(self, mock_create_dataset_job_stage_as_coordinator: MagicMock, mock_list_flags: MagicMock): + with db.session_scope() as session: + project = Project(id=1, name='test-project') + session.add(project) + dataset = Dataset(id=1, + name='default dataset', + uuid='dataset_uuid', + creator_username='test', + dataset_type=DatasetType.STREAMING, + comment='test comment2', + path='data/dataset/123', + project_id=1, + dataset_kind=DatasetKindV2.PROCESSED, + dataset_format=DatasetFormat.TABULAR.value) + session.add(dataset) + data_batch = DataBatch(id=1, + name='20220101', + comment='comment', + event_time=datetime(2022, 1, 1), + dataset_id=1, + path='/data/dataset/123/batch/20220101') + session.add(data_batch) + dataset_job = DatasetJob(id=1, + name='default dataset_job', + uuid='u123', + project_id=1, + output_dataset_id=1, + input_dataset_id=1, + kind=DatasetJobKind.OT_PSI_DATA_JOIN, + coordinator_id=0, + state=DatasetJobState.PENDING, + created_at=datetime(2022, 1, 1), + updated_at=datetime(2022, 1, 1), + creator_username='test user') + dataset_job.set_global_configs(global_configs=dataset_pb2.DatasetJobGlobalConfigs( + global_configs={ + 'coordinator_domain': + dataset_pb2.DatasetJobConfig(dataset_uuid='u123', + variables=[ + Variable(name='name1', + typed_value=Value(string_value='value1-1'), + value_type=Variable.ValueType.STRING), + Variable(name='name2', + typed_value=Value(string_value='value1-2'), + value_type=Variable.ValueType.STRING), + Variable(name='name3', + typed_value=Value(string_value='value1-3'), + value_type=Variable.ValueType.STRING), + ]), + 'participant_domain': + dataset_pb2.DatasetJobConfig(dataset_uuid='u123', + variables=[ + Variable(name='name1', + typed_value=Value(string_value='value1-1'), + value_type=Variable.ValueType.STRING), + Variable(name='name2', + typed_value=Value(string_value='value1-2'), + value_type=Variable.ValueType.STRING), + Variable(name='name3', + typed_value=Value(string_value='value1-3'), + value_type=Variable.ValueType.STRING), + ]), + })) + session.add(dataset_job) + participant = Participant(id=1, name='participant_1', domain_name='fl-fake_domain_name_1.com') + session.add(participant) + session.commit() + + mock_create_dataset_job_stage_as_coordinator.return_value = DatasetJobStage(id=1, + name='mock stage', + uuid='fake stage uuid', + state=DatasetJobState.PENDING, + dataset_job_id=1, + data_batch_id=1, + coordinator_id=1, + created_at=datetime(2022, 1, 1), + updated_at=datetime(2022, 1, 1)) + rerun_config = { + 'dataset_job_parameter': { + 'global_configs': { + 'fl-coordinator_domain.com': { + 'variables': [{ + 'name': 'name1', + 'typed_value': 'value2-1', + 'value_type': 'STRING', + }, { + 'name': 'name2', + 'typed_value': 'value2-2', + 'value_type': 'STRING', + }, { + 'name': 'name3', + 'typed_value': 'value2-3', + 'value_type': 'STRING', + }] + }, + 'fl-participant_domain.com': { + 'variables': [{ + 'name': 'name1', + 'typed_value': 'value2-1', + 'value_type': 'STRING', + }] + }, + }, + }, + } + response = self.post_helper('/api/v2/datasets/1/batches/1:rerun', rerun_config) + self.assertEqual(response.status_code, HTTPStatus.OK) + mock_create_dataset_job_stage_as_coordinator.assert_called_once_with( + project_id=1, + dataset_job_id=1, + output_data_batch_id=1, + global_configs=dataset_pb2.DatasetJobGlobalConfigs( + global_configs={ + 'coordinator_domain': + dataset_pb2.DatasetJobConfig(dataset_uuid='u123', + variables=[ + Variable(name='name1', + typed_value=Value(string_value='value2-1'), + value_type=Variable.ValueType.STRING), + Variable(name='name2', + typed_value=Value(string_value='value2-2'), + value_type=Variable.ValueType.STRING), + Variable(name='name3', + typed_value=Value(string_value='value2-3'), + value_type=Variable.ValueType.STRING), + ]), + 'participant_domain': + dataset_pb2.DatasetJobConfig(dataset_uuid='u123', + variables=[ + Variable(name='name1', + typed_value=Value(string_value='value2-1'), + value_type=Variable.ValueType.STRING), + Variable(name='name2', + typed_value=Value(string_value='value1-2'), + value_type=Variable.ValueType.STRING), + Variable(name='name3', + typed_value=Value(string_value='value1-3'), + value_type=Variable.ValueType.STRING), + ]), + })) + + # test participant not support rerun + mock_list_flags.return_value = {'data_batch_rerun_enabled': False} + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(1) + dataset_job.coordinator_id = 1 + session.commit() + response = self.post_helper('/api/v2/datasets/1/batches/1:rerun', rerun_config) + self.assertEqual(response.status_code, HTTPStatus.METHOD_NOT_ALLOWED) + + +class DataSourcesApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def setUp(self): + super().setUp() + with db.session_scope() as session: + default_project = Project(id=1, name='default_project') + datasource_1 = DataSource(id=100, + uuid=resource_uuid(), + name='datasource_1', + creator_username='test', + path='hdfs:///data/fake_path_1', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 5), + is_published=False, + store_format=StoreFormat.TFRECORDS, + dataset_format=DatasetFormat.IMAGE.value, + dataset_type=DatasetType.STREAMING) + datasource_1.set_meta_info(meta=dataset_pb2.DatasetMetaInfo(datasource_type=DataSourceType.HDFS.value)) + datasource_2 = DataSource(id=101, + uuid=resource_uuid(), + name='datasource_2', + creator_username='test', + path='hdfs:///data/fake_path_2', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 6), + is_published=False, + store_format=StoreFormat.CSV, + dataset_format=DatasetFormat.TABULAR.value, + dataset_type=DatasetType.PSI) + datasource_2.set_meta_info(meta=dataset_pb2.DatasetMetaInfo(datasource_type=DataSourceType.HDFS.value)) + session.add(default_project) + session.add(datasource_1) + session.add(datasource_2) + session.commit() + + def test_parse_data_source_url(self): + url = 'hdfs:///home/test' + data_source = dataset_pb2.DataSource(type=DataSourceType.HDFS.value, + url='hdfs:///home/test', + is_user_upload=False) + self.assertEqual(_parse_data_source_url(url), data_source) + + url = '/data/test' + data_source = dataset_pb2.DataSource(type=DataSourceType.FILE.value, + url='file:///data/test', + is_user_upload=False) + self.assertEqual(_parse_data_source_url(url), data_source) + + url = '/data/test' + data_source = dataset_pb2.DataSource(type=DataSourceType.FILE.value, + url='file:///data/test', + is_user_upload=False) + self.assertEqual(_parse_data_source_url(url), data_source) + url = 'hdfs/' + with self.assertRaises(ValidationError): + _parse_data_source_url(url) + + @patch('fedlearner_webconsole.utils.file_manager.FileManager.listdir') + @patch('fedlearner_webconsole.utils.file_manager.FileManager.isdir', fake_isdir) + @patch('fedlearner_webconsole.dataset.apis.Envs.STORAGE_ROOT', new_callable=PropertyMock) + def test_data_source_check_connection(self, mock_storage_root: MagicMock, mock_listdir: MagicMock): + mock_storage_root.return_value = 'hdfs:///home/' + mock_listdir.return_value = ['_SUCCESS', 'test.csv'] + resp = self.post_helper('/api/v2/data_sources:check_connection', { + 'data_source_url': 'hdfs:///home/', + 'file_num': 1, + }) + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertResponseDataEqual(resp, { + 'file_names': ['_SUCCESS',], + 'extra_nums': 1, + }) + + mock_storage_root.reset_mock() + mock_storage_root.return_value = 'file:///data' + resp = self.post_helper('/api/v2/data_sources:check_connection', {'data_source_url': 'file:///data/test'}) + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertResponseDataEqual(resp, { + 'file_names': ['_SUCCESS', 'test.csv'], + 'extra_nums': 0, + }) + + resp = self.post_helper('/api/v2/data_sources:check_connection', {'data_source_url': 'file:///data/fake_path'}) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + + resp = self.post_helper('/api/v2/data_sources:check_connection', {}) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + self.assertIn('required', resp.json.get('details').get('json').get('data_source_url')[0]) + + resp = self.post_helper('/api/v2/data_sources:check_connection', {'data_source_url': 'hdfs:/home/'}) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + self.assertEqual(resp.json.get('details'), 'invalid data_source_url: hdfs:/home/') + + mock_listdir.reset_mock() + mock_listdir.return_value = ['20220801', '20220802'] + resp = self.post_helper('/api/v2/data_sources:check_connection', { + 'data_source_url': 'hdfs:///home/', + 'dataset_type': 'STREAMING' + }) + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertResponseDataEqual(resp, { + 'file_names': ['20220801', '20220802'], + 'extra_nums': 0, + }) + + mock_listdir.reset_mock() + mock_listdir.return_value = ['20220803-15', '2022080316'] + resp = self.post_helper('/api/v2/data_sources:check_connection', { + 'data_source_url': 'hdfs:///home/', + 'dataset_type': 'STREAMING' + }) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + self.assertEqual(resp.json.get('details'), 'illegal dir format: 2022080316') + + @patch('fedlearner_webconsole.dataset.apis.Envs.STORAGE_ROOT', '2022080316') + @patch('fedlearner_webconsole.dataset.apis._validate_data_source', lambda *args: None) + def test_post_data_source(self): + resp = self.post_helper( + '/api/v2/data_sources', { + 'data_source': { + 'name': 'test', + 'comment': 'test comment', + 'data_source_url': 'hdfs:///home/fake_path', + }, + 'project_id': 1 + }) + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + self.assertResponseDataEqual(resp, { + 'type': DataSourceType.HDFS.value, + 'url': 'hdfs:///home/fake_path', + 'name': 'test', + 'creator_username': 'ada', + 'project_id': 1, + 'is_user_upload': False, + 'is_user_export': False, + 'dataset_format': 'TABULAR', + 'store_format': 'UNKNOWN', + 'dataset_type': 'PSI', + 'comment': 'test comment', + }, + ignore_fields=['created_at', 'id', 'uuid']) + + resp_upload = self.post_helper('/api/v2/data_sources', { + 'data_source': { + 'name': 'test', + 'data_source_url': '/home/fake_path', + 'is_user_upload': True, + }, + 'project_id': 1 + }) + self.assertEqual(resp_upload.status_code, HTTPStatus.BAD_REQUEST) + self.assertEqual( + resp_upload.json.get('details'), + {'json': { + 'data_source': { + 'data_source_url': ['no access to unauchority path file:///home/fake_path!'] + } + }}) + + resp_upload_hdfs = self.post_helper( + '/api/v2/data_sources', { + 'data_source': { + 'name': 'test', + 'data_source_url': 'hdfs:///home/fake_path', + 'is_user_upload': True, + 'dataset_format': 'TABULAR', + 'store_format': 'TFRECORDS', + 'dataset_type': 'STREAMING', + }, + 'project_id': 1 + }) + self.assertEqual(resp_upload_hdfs.status_code, HTTPStatus.CREATED) + self.assertResponseDataEqual(resp_upload_hdfs, { + 'type': DataSourceType.HDFS.value, + 'url': 'hdfs:///home/fake_path', + 'name': 'test', + 'creator_username': 'ada', + 'project_id': 1, + 'is_user_upload': True, + 'is_user_export': False, + 'dataset_format': 'TABULAR', + 'store_format': 'TFRECORDS', + 'dataset_type': 'STREAMING', + 'comment': '', + }, + ignore_fields=['created_at', 'id', 'uuid']) + + resp_err = self.post_helper('/api/v2/data_sources', { + 'data_source': { + 'name': 'test', + 'data_source_url': 'fake:///home/fake_path', + }, + 'project_id': 1 + }) + self.assertEqual(resp_err.status_code, HTTPStatus.BAD_REQUEST) + self.assertEqual(resp_err.json.get('details'), + {'json': { + 'data_source': { + 'data_source_url': ['fake is not a supported data_source type'] + } + }}) + + def test_delete_data_source(self): + resp = self.delete_helper('/api/v2/data_sources/100') + self.assertEqual(resp.status_code, HTTPStatus.NO_CONTENT) + with db.session_scope() as session: + dataset = session.query(DataSource).get(100) + self.assertIsNone(dataset) + + def test_get_data_sources(self): + expected_result = [{ + 'id': 101, + 'uuid': ANY, + 'name': 'datasource_2', + 'comment': '', + 'creator_username': 'test', + 'url': 'hdfs:///data/fake_path_2', + 'type': DataSourceType.HDFS.value, + 'project_id': 1, + 'created_at': to_timestamp(datetime(2012, 1, 14, 12, 0, 6)), + 'is_user_upload': False, + 'is_user_export': False, + 'dataset_format': 'TABULAR', + 'store_format': 'CSV', + 'dataset_type': 'PSI', + }, { + 'id': 100, + 'uuid': ANY, + 'name': 'datasource_1', + 'comment': '', + 'creator_username': 'test', + 'url': 'hdfs:///data/fake_path_1', + 'type': DataSourceType.HDFS.value, + 'project_id': 1, + 'created_at': to_timestamp(datetime(2012, 1, 14, 12, 0, 5)), + 'is_user_upload': False, + 'is_user_export': False, + 'dataset_format': 'IMAGE', + 'store_format': 'TFRECORDS', + 'dataset_type': 'STREAMING', + }] + resp = self.get_helper('/api/v2/data_sources') + self.assertResponseDataEqual(resp, expected_result) + + resp = self.get_helper('/api/v2/data_sources?project_id=1') + self.assertResponseDataEqual(resp, expected_result) + + resp = self.get_helper('/api/v2/data_sources?project_id=10') + self.assertResponseDataEqual(resp, []) + + def test_get_data_source(self): + resp = self.get_helper('/api/v2/data_sources/100') + self.assertEqual(resp.status_code, 200) + self.assertResponseDataEqual( + resp, { + 'id': 100, + 'uuid': ANY, + 'name': 'datasource_1', + 'comment': '', + 'creator_username': 'test', + 'url': 'hdfs:///data/fake_path_1', + 'type': DataSourceType.HDFS.value, + 'project_id': 1, + 'created_at': to_timestamp(datetime(2012, 1, 14, 12, 0, 5)), + 'is_user_upload': False, + 'is_user_export': False, + 'dataset_format': 'IMAGE', + 'store_format': 'TFRECORDS', + 'dataset_type': 'STREAMING', + }) + resp = self.get_helper('/api/v2/data_sources/1') + self.assertEqual(resp.status_code, 404) + + @patch('fedlearner_webconsole.dataset.apis.Envs.STORAGE_ROOT', '/data') + def test_path_authority_validator(self): + _path_authority_validator('/data/test') + _path_authority_validator('hdfs:///home') + _path_authority_validator('file:///data/test') + with self.assertRaises(ValidationError): + _path_authority_validator('fake') + with self.assertRaises(ValidationError): + _path_authority_validator('/fake') + with self.assertRaises(ValidationError): + _path_authority_validator('file:///fake') + + +class DataSourceTreeApiTest(BaseTestCase): + + @patch('fedlearner_webconsole.utils.file_tree.FileTreeBuilder.build_with_root') + def test_get_tree(self, mock_build_with_root: MagicMock): + mock_build_with_root.return_value = FileTreeNode(filename='20221101', + path='20221101', + is_directory=True, + size=1024, + mtime=0, + files=[ + FileTreeNode(filename='test.csv', + path='20221101/test.csv', + is_directory=False, + size=1024, + mtime=0), + ]) + with db.session_scope() as session: + data_source = DataSource(id=100, + uuid=resource_uuid(), + name='datasource_1', + creator_username='test', + path='hdfs:///data/fake_path_1', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 5), + is_published=False, + store_format=StoreFormat.TFRECORDS, + dataset_format=DatasetFormat.IMAGE.value, + dataset_type=DatasetType.STREAMING) + session.add(data_source) + session.commit() + resp = self.get_helper('/api/v2/data_sources/100/tree') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertResponseDataEqual( + resp, { + 'filename': + '20221101', + 'path': + '20221101', + 'is_directory': + True, + 'size': + 1024, + 'mtime': + 0, + 'files': [{ + 'filename': 'test.csv', + 'path': '20221101/test.csv', + 'is_directory': False, + 'size': 1024, + 'mtime': 0, + 'files': [], + }], + }) + + +class ParticipantDatasetApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=10, name='test-project') + participant_1 = Participant(id=10, name='participant_1', domain_name='fake_domain_name_1') + project_participant_1 = ProjectParticipant(project_id=project.id, participant_id=participant_1.id) + + session.add(project) + session.add(participant_1) + session.add(project_participant_1) + session.commit() + + @patch('fedlearner_webconsole.dataset.apis.SystemServiceClient.list_flags') + @patch('fedlearner_webconsole.rpc.v2.resource_service_client.ResourceServiceClient.list_datasets') + @patch('fedlearner_webconsole.rpc.client.RpcClient.list_participant_datasets') + def test_get_paricipant(self, mock_list_participant_datasets: MagicMock, mock_list_datasets: MagicMock, + mock_list_flags: MagicMock): + dataref_1 = dataset_pb2.ParticipantDatasetRef(uuid='1', + name='fake_dataset_1', + format=DatasetFormat.TABULAR.name, + file_size=1000, + dataset_kind=DatasetKindV2.RAW.name, + dataset_type=DatasetType.PSI.name, + auth_status='PENDING') + dataref_2 = dataset_pb2.ParticipantDatasetRef(uuid='2', + name='fake_dataset_2', + format=DatasetFormat.TABULAR.name, + file_size=1000, + dataset_kind=DatasetKindV2.PROCESSED.name, + dataset_type=DatasetType.PSI.name, + auth_status='PENDING') + mock_return = service_pb2.ListParticipantDatasetsResponse(participant_datasets=[dataref_1, dataref_2]) + mock_list_participant_datasets.return_value = mock_return + mock_list_flags.return_value = {'list_datasets_rpc_enabled': False} + + # test no filter + resp = self.get_helper('/api/v2/project/10/participant_datasets') + self.assertEqual(resp.status_code, 200) + expect_data = [{ + 'uuid': '1', + 'project_id': 10, + 'name': 'fake_dataset_1', + 'participant_id': 10, + 'format': DatasetFormat.TABULAR.name, + 'file_size': 1000, + 'updated_at': 0, + 'value': 0, + 'dataset_kind': 'RAW', + 'dataset_type': 'PSI', + 'auth_status': 'PENDING', + }, { + 'uuid': '2', + 'project_id': 10, + 'name': 'fake_dataset_2', + 'participant_id': 10, + 'format': DatasetFormat.TABULAR.name, + 'file_size': 1000, + 'updated_at': 0, + 'value': 0, + 'dataset_kind': 'PROCESSED', + 'dataset_type': 'PSI', + 'auth_status': 'PENDING', + }] + resp_data = self.get_response_data(resp) + self.assertCountEqual(resp_data, expect_data) + mock_list_participant_datasets.assert_called_once_with(kind=None, uuid=None) + mock_list_datasets.assert_not_called() + mock_list_participant_datasets.reset_mock() + + # test filter uuid + mock_list_participant_datasets.return_value = mock_return + resp = self.get_helper('/api/v2/project/10/participant_datasets?uuid=1') + self.assertEqual(resp.status_code, 200) + expect_data = [{ + 'uuid': '1', + 'project_id': 10, + 'name': 'fake_dataset_1', + 'participant_id': 10, + 'format': DatasetFormat.TABULAR.name, + 'file_size': 1000, + 'updated_at': 0, + 'value': 0, + 'dataset_kind': 'RAW', + 'dataset_type': 'PSI', + 'auth_status': 'PENDING', + }] + self.assertResponseDataEqual(resp, expect_data) + mock_list_participant_datasets.assert_called_once_with(kind=None, uuid='1') + mock_list_participant_datasets.reset_mock() + + # test illegal kind + mock_list_participant_datasets.return_value = service_pb2.ListParticipantDatasetsResponse() + resp = self.get_helper('/api/v2/project/10/participant_datasets?kind=unkown') + self.assertEqual(resp.status_code, 400) + mock_list_participant_datasets.assert_not_called() + + # test filter kind + resp = self.get_helper('/api/v2/project/10/participant_datasets?kind=raw') + self.assertEqual(resp.status_code, 200) + mock_list_participant_datasets.assert_called_once_with(kind='raw', uuid=None) + + # test filter participant_id + mock_list_participant_datasets.reset_mock() + mock_list_participant_datasets.return_value = mock_return + resp = self.get_helper('/api/v2/project/10/participant_datasets?participant_id=10') + self.assertEqual(resp.status_code, 200) + self.assertEqual(len(self.get_response_data(resp)), 2) + mock_list_participant_datasets.assert_called_once_with(kind=None, uuid=None) + + # test filter participant_id not found + mock_list_participant_datasets.reset_mock() + mock_list_participant_datasets.return_value = mock_return + resp = self.get_helper('/api/v2/project/10/participant_datasets?participant_id=100') + self.assertEqual(resp.status_code, 404) + mock_list_participant_datasets.assert_not_called() + + # test list_datasets_rpc + mock_list_participant_datasets.reset_mock() + mock_list_datasets.reset_mock() + mock_list_flags.reset_mock() + + mock_list_datasets.return_value = mock_return + mock_list_flags.return_value = {'list_datasets_rpc_enabled': True} + resp = self.get_helper('/api/v2/project/10/participant_datasets?uuid=1&kind=raw') + self.assertEqual(resp.status_code, 200) + self.assertResponseDataEqual(resp, expect_data) + mock_list_datasets.assert_called_once_with(kind=DatasetKindV2.RAW, + uuid='1', + state=ResourceState.SUCCEEDED, + time_range=None) + mock_list_participant_datasets.assert_not_called() + + # test filter cron + mock_list_datasets.reset_mock() + mock_list_datasets.return_value = mock_return + resp = self.get_helper('/api/v2/project/10/participant_datasets?cron_interval=DAYS') + self.assertEqual(resp.status_code, 200) + mock_list_datasets.assert_called_once_with(kind=None, + uuid=None, + state=ResourceState.SUCCEEDED, + time_range=dataset_pb2.TimeRange(days=1)) + + +class PublishDatasetApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def test_publish_dataset(self): + with db.session_scope() as session: + published_dataset = Dataset(id=10, + uuid='uuid', + name='published_dataset', + creator_username='test', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + is_published=False, + project_id=1, + dataset_format=DatasetFormat.TABULAR.value, + created_at=datetime(2022, 1, 1, 12, 0, 0), + updated_at=datetime(2022, 1, 1, 12, 0, 0)) + session.add(published_dataset) + dataset_job = DatasetJob(workflow_id=0, + uuid=resource_uuid(), + project_id=1, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + input_dataset_id=1, + output_dataset_id=10, + state=DatasetJobState.SUCCEEDED) + session.add(dataset_job) + session.commit() + resp = self.post_helper('/api/v2/datasets/10:publish', {}) + self.assertEqual(resp.status_code, 200) + self.assertResponseDataEqual( + resp, + { + 'id': 10, + 'uuid': 'uuid', + 'is_published': True, + 'name': 'published_dataset', + 'creator_username': 'test', + 'path': '/data/dataset/123', + 'comment': 'test comment', + 'project_id': 1, + 'dataset_kind': 'RAW', + 'dataset_format': 'TABULAR', + 'file_size': 0, + 'num_example': 0, + 'num_feature': 0, + 'state_frontend': 'SUCCEEDED', + 'parent_dataset_job_id': 1, + 'workflow_id': 0, + 'value': 0, + 'schema_checkers': [], + 'dataset_type': 'STREAMING', + 'import_type': 'COPY', + 'store_format': 'TFRECORDS', + 'analyzer_dataset_job_id': 0, + 'publish_frontend_state': 'PUBLISHED', + 'auth_frontend_state': 'AUTH_APPROVED', + 'local_auth_status': 'PENDING', + 'participants_info': { + 'participants_map': {} + }, + }, + ignore_fields=['created_at', 'updated_at', 'deleted_at', 'data_source'], + ) + + @patch('fedlearner_webconsole.dataset.services.DatasetService.withdraw_dataset') + def test_revoke_published_dataset(self, fake_withdraw_dataset: MagicMock): + resp = self.delete_helper('/api/v2/datasets/11:publish') + self.assertEqual(resp.status_code, 204) + fake_withdraw_dataset.assert_called_once_with(11) + + +class DatasetAuthorizehApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def setUp(self): + super().setUp() + + with db.session_scope() as session: + dataset = Dataset(id=10, + uuid='uuid', + name='default dataset', + creator_username='test', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + is_published=False, + project_id=1, + dataset_format=DatasetFormat.TABULAR.value, + auth_status=AuthStatus.PENDING) + dataset.set_participants_info(participants_info=ParticipantsInfo( + participants_map={'test_domain': ParticipantInfo(auth_status='PENDING')})) + session.add(dataset) + dataset_job = DatasetJob(workflow_id=0, + uuid=resource_uuid(), + project_id=1, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + input_dataset_id=1, + output_dataset_id=10, + state=DatasetJobState.SUCCEEDED) + session.add(dataset_job) + session.commit() + + @patch('fedlearner_webconsole.dataset.apis.SettingService.get_system_info', + lambda: setting_pb2.SystemInfo(pure_domain_name='test_domain')) + @patch('fedlearner_webconsole.dataset.apis.DatasetJobController.inform_auth_status') + def test_authorize_dataset(self, mock_inform_auth_status: MagicMock): + resp = self.post_helper('/api/v2/datasets/10:authorize') + self.assertEqual(resp.status_code, 200) + self.assertEqual(self.get_response_data(resp).get('local_auth_status'), 'AUTHORIZED') + self.assertEqual( + self.get_response_data(resp).get('participants_info'), { + 'participants_map': { + 'test_domain': { + 'auth_status': 'AUTHORIZED', + 'name': '', + 'role': '', + 'state': '', + 'type': '', + } + } + }) + mock_inform_auth_status.assert_called_once_with(dataset_job=ANY, auth_status=AuthStatus.AUTHORIZED) + with db.session_scope() as session: + dataset = session.query(Dataset).get(10) + self.assertEqual(dataset.auth_status, AuthStatus.AUTHORIZED) + self.assertEqual( + dataset.get_participants_info(), + ParticipantsInfo(participants_map={'test_domain': ParticipantInfo(auth_status='AUTHORIZED')})) + + @patch('fedlearner_webconsole.dataset.apis.SettingService.get_system_info', + lambda: setting_pb2.SystemInfo(pure_domain_name='test_domain')) + @patch('fedlearner_webconsole.dataset.apis.DatasetJobController.inform_auth_status') + def test_revoke_authorized_dataset(self, mock_inform_auth_status: MagicMock): + resp = self.delete_helper('/api/v2/datasets/10:authorize') + self.assertEqual(resp.status_code, 200) + self.assertEqual(self.get_response_data(resp).get('local_auth_status'), 'WITHDRAW') + self.assertEqual( + self.get_response_data(resp).get('participants_info'), { + 'participants_map': { + 'test_domain': { + 'auth_status': 'WITHDRAW', + 'name': '', + 'role': '', + 'state': '', + 'type': '', + } + } + }) + mock_inform_auth_status.assert_called_once_with(dataset_job=ANY, auth_status=AuthStatus.WITHDRAW) + with db.session_scope() as session: + dataset = session.query(Dataset).get(10) + self.assertEqual(dataset.auth_status, AuthStatus.WITHDRAW) + self.assertEqual( + dataset.get_participants_info(), + ParticipantsInfo(participants_map={'test_domain': ParticipantInfo(auth_status='WITHDRAW')})) + + +class DatasetFlushAuthStatusApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def setUp(self): + super().setUp() + + with db.session_scope() as session: + project = Project(id=1, name='test_project') + session.add(project) + dataset = Dataset(id=10, + uuid='uuid', + name='default dataset', + creator_username='test', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + is_published=False, + project_id=1, + dataset_format=DatasetFormat.TABULAR.value, + auth_status=AuthStatus.AUTHORIZED) + participants_info = ParticipantsInfo( + participants_map={ + 'coordinator-domain-name': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'participant-domain-name': ParticipantInfo(auth_status=AuthStatus.PENDING.name) + }) + dataset.set_participants_info(participants_info=participants_info) + session.add(dataset) + dataset_job = DatasetJob(workflow_id=0, + uuid=resource_uuid(), + project_id=1, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + input_dataset_id=1, + output_dataset_id=10, + state=DatasetJobState.SUCCEEDED) + session.add(dataset_job) + session.commit() + + @patch('fedlearner_webconsole.dataset.controllers.SystemServiceClient.list_flags') + @patch('fedlearner_webconsole.dataset.controllers.ResourceServiceClient.list_datasets') + @patch('fedlearner_webconsole.dataset.controllers.DatasetJobService.get_participants_need_distribute') + def test_authorize_dataset(self, mock_get_participants_need_distribute: MagicMock, mock_list_datasets: MagicMock, + mock_list_flags: MagicMock): + participant = Participant(id=1, name='test_participant', domain_name='fl-participant-domain-name.com') + mock_get_participants_need_distribute.return_value = [participant] + mock_list_datasets.return_value = ListDatasetsResponse( + participant_datasets=[dataset_pb2.ParticipantDatasetRef(auth_status=AuthStatus.AUTHORIZED.name)]) + mock_list_flags.return_value = {'list_datasets_rpc_enabled': True} + + resp = self.post_helper('/api/v2/datasets/10:flush_auth_status') + self.assertEqual(resp.status_code, 200) + self.assertEqual( + self.get_response_data(resp).get('participants_info'), { + 'participants_map': { + 'coordinator-domain-name': { + 'auth_status': 'AUTHORIZED', + 'name': '', + 'role': '', + 'state': '', + 'type': '', + }, + 'participant-domain-name': { + 'auth_status': 'AUTHORIZED', + 'name': '', + 'role': '', + 'state': '', + 'type': '', + } + } + }) + with db.session_scope() as session: + dataset = session.query(Dataset).get(10) + self.assertEqual( + dataset.get_participants_info(), + ParticipantsInfo( + participants_map={ + 'coordinator-domain-name': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'participant-domain-name': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + })) + + +class DatasetStateFixApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def test_dataset_state_fix(self): + self.signin_as_admin() + with db.session_scope() as session: + dataset = Dataset(id=10, + uuid='uuid', + name='dataset', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + is_published=False, + project_id=1, + dataset_format=DatasetFormat.TABULAR.value, + created_at=datetime(2022, 1, 1, 12, 0, 0), + updated_at=datetime(2022, 1, 1, 12, 0, 0)) + session.add(dataset) + workflow = Workflow(id=11, state=WorkflowState.FAILED, name='fake_workflow') + session.add(workflow) + dataset_job = DatasetJob(workflow_id=11, + uuid=resource_uuid(), + project_id=1, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + input_dataset_id=1, + output_dataset_id=10, + state=DatasetJobState.RUNNING) + session.add(dataset_job) + + session.commit() + resp = self.post_helper('/api/v2/datasets/10:state_fix', {}) + self.assertEqual(resp.status_code, HTTPStatus.OK) + with db.session_scope() as session: + dataset = session.query(Dataset).get(10) + self.assertEqual(dataset.parent_dataset_job.state, DatasetJobState.FAILED) + + with db.session_scope() as session: + workflow = session.query(Workflow).get(11) + workflow.state = WorkflowState.COMPLETED + session.commit() + resp = self.post_helper('/api/v2/datasets/10:state_fix', {}) + self.assertEqual(resp.status_code, HTTPStatus.OK) + with db.session_scope() as session: + dataset = session.query(Dataset).get(10) + self.assertEqual(dataset.parent_dataset_job.state, DatasetJobState.RUNNING) + + resp = self.post_helper('/api/v2/datasets/10:state_fix', {'force': 'SUCCEEDED'}) + self.assertEqual(resp.status_code, HTTPStatus.OK) + with db.session_scope() as session: + dataset = session.query(Dataset).get(10) + self.assertEqual(dataset.parent_dataset_job.state, DatasetJobState.SUCCEEDED) + + +class DatasetJobsApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def setUp(self): + super().setUp() + + with db.session_scope() as session: + project = Project(name='test_project') + session.add(project) + session.flush([project]) + + input_dataset = Dataset(id=1, + uuid=resource_uuid(), + is_published=False, + name='input_dataset', + path='/data/dataset/test_123', + project_id=project.id, + dataset_format=DatasetFormat.TABULAR.value, + dataset_type=DatasetType.PSI, + dataset_kind=DatasetKindV2.RAW) + session.add(input_dataset) + streaming_dataset = Dataset(id=2, + uuid=resource_uuid(), + is_published=False, + name='streaming_dataset', + path='/data/dataset/test_123', + project_id=project.id, + dataset_format=DatasetFormat.TABULAR.value, + dataset_type=DatasetType.STREAMING, + dataset_kind=DatasetKindV2.RAW) + session.add(streaming_dataset) + + session.commit() + self._project_id = project.id + self._input_dataset_uuid = input_dataset.uuid + + @patch('fedlearner_webconsole.dataset.apis.DatasetJobConfiger.from_kind', + lambda *args: FakeDatasetJobConfiger(None)) + @patch('fedlearner_webconsole.utils.domain_name.get_pure_domain_name', lambda _: 'test_domain') + @patch('fedlearner_webconsole.dataset.apis.SettingService.get_system_info', + lambda: setting_pb2.SystemInfo(pure_domain_name='test_domain', domain_name='test_domain.fedlearner.net')) + @patch('fedlearner_webconsole.dataset.services.DatasetJobService.get_participants_need_distribute') + @patch('fedlearner_webconsole.dataset.apis.DatasetJobService.create_as_coordinator') + def test_post_dataset_job(self, mock_create_as_coordinator: MagicMock, + mock_get_participants_need_distribute: MagicMock): + mock_get_participants_need_distribute.return_value = [ + Participant(id=1, name='test_participant_1', domain_name='fl-test-domain-name-1.com'), + Participant(id=2, name='test_participant_2', domain_name='fl-test-domain-name-2.com') + ] + + dataset_job = DatasetJob(uuid=resource_uuid(), + project_id=1, + kind=DatasetJobKind.DATA_ALIGNMENT, + state=DatasetJobState.PENDING, + created_at=datetime(2012, 1, 14, 12, 0, 5), + updated_at=datetime(2012, 1, 14, 12, 0, 5), + time_range=timedelta(days=1)) + dataset_job.input_dataset = Dataset(uuid=resource_uuid(), + name='test_dataset', + dataset_format=DatasetFormat.IMAGE.value) + output_dataset = Dataset(id=2, + uuid=resource_uuid(), + is_published=False, + name='streaming_dataset', + path='/data/dataset/test_123', + project_id=1, + dataset_format=DatasetFormat.TABULAR.value, + dataset_type=DatasetType.STREAMING, + dataset_kind=DatasetKindV2.RAW, + auth_status=AuthStatus.AUTHORIZED) + property_mock = PropertyMock(return_value=output_dataset) + DatasetJob.output_dataset = property_mock + global_configs = dataset_pb2.DatasetJobGlobalConfigs() + global_configs.global_configs['test'].MergeFrom(dataset_pb2.DatasetJobConfig()) + dataset_job.set_global_configs(global_configs) + + # test with error output_dataset_id + mock_create_as_coordinator.reset_mock() + resp = self.post_helper( + '/api/v2/projects/1/dataset_jobs', { + 'dataset_job_parameter': { + 'global_configs': { + 'test_domain.fedlearner.net': { + 'dataset_uuid': self._input_dataset_uuid, + 'variables': [] + }, + }, + 'dataset_job_kind': 'RSA_PSI_DATA_JOIN', + }, + 'output_dataset_id': 100, + }) + + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + mock_create_as_coordinator.assert_not_called() + + # test cron dataset_job + mock_create_as_coordinator.return_value = dataset_job + + resp = self.post_helper( + '/api/v2/projects/1/dataset_jobs', { + 'dataset_job_parameter': { + 'global_configs': { + 'test_domain.fedlearner.net': { + 'dataset_uuid': self._input_dataset_uuid, + 'variables': [] + }, + }, + 'dataset_job_kind': 'RSA_PSI_DATA_JOIN', + }, + 'output_dataset_id': 2, + 'time_range': { + 'days': 1, + } + }) + + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + global_config = dataset_pb2.DatasetJobGlobalConfigs( + global_configs={'test_domain': dataset_pb2.DatasetJobConfig(dataset_uuid=self._input_dataset_uuid)}) + mock_create_as_coordinator.assert_called_with(project_id=1, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + output_dataset_id=2, + global_configs=global_config, + time_range=timedelta(days=1)) + self.assertFalse(dataset_job.get_context().need_create_stage) + + # test non-cron dataset_job + mock_create_as_coordinator.reset_mock() + dataset_job.time_range = None + mock_create_as_coordinator.return_value = dataset_job + + resp = self.post_helper( + '/api/v2/projects/1/dataset_jobs', { + 'dataset_job_parameter': { + 'global_configs': { + 'test_domain.fedlearner.net': { + 'dataset_uuid': self._input_dataset_uuid, + 'variables': [] + }, + }, + 'dataset_job_kind': 'RSA_PSI_DATA_JOIN', + }, + 'output_dataset_id': 2, + 'time_range': { + 'hours': 1, + } + }) + + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + global_config = dataset_pb2.DatasetJobGlobalConfigs( + global_configs={'test_domain': dataset_pb2.DatasetJobConfig(dataset_uuid=self._input_dataset_uuid)}) + mock_create_as_coordinator.assert_called_with(project_id=1, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + output_dataset_id=2, + global_configs=global_config, + time_range=timedelta(hours=1)) + self.assertEqual( + dataset_job.output_dataset.get_participants_info(), + ParticipantsInfo( + participants_map={ + 'test_domain': ParticipantInfo(auth_status='AUTHORIZED'), + 'test-domain-name-1': ParticipantInfo(auth_status='PENDING'), + 'test-domain-name-2': ParticipantInfo(auth_status='PENDING'), + })) + self.assertTrue(dataset_job.get_context().need_create_stage) + + def test_get_dataset_jobs(self): + with db.session_scope() as session: + output_dataset_1 = Dataset(id=4, + uuid=resource_uuid(), + is_published=False, + name='output_dataset_1', + path='/data/dataset/test_123', + project_id=1, + dataset_format=DatasetFormat.TABULAR.value, + dataset_type=DatasetType.PSI, + dataset_kind=DatasetKindV2.PROCESSED) + session.add(output_dataset_1) + output_dataset_2 = Dataset(id=5, + uuid=resource_uuid(), + is_published=False, + name='output_dataset_2', + path='/data/dataset/test_123', + project_id=2, + dataset_format=DatasetFormat.TABULAR.value, + dataset_type=DatasetType.PSI, + dataset_kind=DatasetKindV2.PROCESSED) + session.add(output_dataset_2) + dataset_job_1 = DatasetJob(uuid='test-uuid-1', + name='test_dataset_job_1', + kind=DatasetJobKind.DATA_ALIGNMENT, + project_id=1, + workflow_id=1, + input_dataset_id=1, + output_dataset_id=4, + coordinator_id=1, + state=DatasetJobState.PENDING, + created_at=datetime(2012, 1, 14, 12, 0, 5), + creator_username='test user 1') + session.add(dataset_job_1) + + dataset_job_2 = DatasetJob(uuid='test-uuid-2', + name='test_dataset_job_2', + kind=DatasetJobKind.IMPORT_SOURCE, + project_id=1, + workflow_id=1, + input_dataset_id=2, + output_dataset_id=4, + coordinator_id=0, + state=DatasetJobState.SUCCEEDED, + created_at=datetime(2012, 1, 14, 12, 0, 6), + creator_username='test user 2') + session.add(dataset_job_2) + dataset_job_3 = DatasetJob(uuid='test-uuid-3', + name='test_dataset_job_3', + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + project_id=2, + workflow_id=1, + input_dataset_id=3, + output_dataset_id=5, + coordinator_id=0, + state=DatasetJobState.PENDING, + created_at=datetime(2012, 1, 14, 12, 0, 7), + creator_username='test user 3') + session.add(dataset_job_3) + dataset_job_4 = DatasetJob(uuid='test-another-uuid-4', + name='test_another_dataset_job_4', + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + project_id=1, + workflow_id=1, + input_dataset_id=3, + output_dataset_id=4, + coordinator_id=0, + state=DatasetJobState.SUCCEEDED, + created_at=datetime(2012, 1, 14, 12, 0, 8), + creator_username='test user 4') + session.add(dataset_job_4) + session.commit() + + get_response = self.get_helper('/api/v2/projects/2/dataset_jobs') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + dataset_jobs = self.get_response_data(get_response) + self.assertEqual(len(dataset_jobs), 1) + self.assertEqual(dataset_jobs, [{ + 'id': 3, + 'name': 'test_dataset_job_3', + 'uuid': 'test-uuid-3', + 'kind': DatasetJobKind.RSA_PSI_DATA_JOIN.name, + 'project_id': 2, + 'result_dataset_id': 5, + 'result_dataset_name': 'output_dataset_2', + 'state': DatasetJobState.PENDING.name, + 'coordinator_id': 0, + 'created_at': ANY, + 'has_stages': False, + 'creator_username': 'test user 3', + }]) + + fake_sorter_param = urllib.parse.quote('fake_time asc') + get_response = self.get_helper(f'/api/v2/projects/1/dataset_jobs?order_by={fake_sorter_param}') + self.assertEqual(get_response.status_code, HTTPStatus.BAD_REQUEST) + + sorter_param = urllib.parse.quote('created_at desc') + get_response = self.get_helper(f'/api/v2/projects/1/dataset_jobs?order_by={sorter_param}') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + dataset_jobs = self.get_response_data(get_response) + self.assertEqual([dataset_job.get('id') for dataset_job in dataset_jobs], [4, 2, 1]) + + filter_param = urllib.parse.quote('(and(state:["SUCCEEDED"])(name~="test_dataset"))') + get_response = self.get_helper(f'/api/v2/projects/1/dataset_jobs?filter={filter_param}') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + dataset_jobs = self.get_response_data(get_response) + self.assertEqual([dataset_job.get('id') for dataset_job in dataset_jobs], [2]) + + filter_param = urllib.parse.quote('(kind:["DATA_ALIGNMENT", "IMPORT_SOURCE"])') + sorter_param = urllib.parse.quote('created_at asc') + get_response = self.get_helper(f'/api/v2/projects/1/dataset_jobs?filter={filter_param}&order_by={sorter_param}') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + dataset_jobs = self.get_response_data(get_response) + self.assertEqual([dataset_job.get('id') for dataset_job in dataset_jobs], [1, 2]) + + filter_param = urllib.parse.quote('(coordinator_id:[0])') + get_response = self.get_helper(f'/api/v2/projects/1/dataset_jobs?filter={filter_param}') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + dataset_jobs = self.get_response_data(get_response) + self.assertEqual([dataset_job.get('id') for dataset_job in dataset_jobs], [4, 2]) + + filter_param = urllib.parse.quote('(input_dataset_id=1)') + get_response = self.get_helper(f'/api/v2/projects/1/dataset_jobs?filter={filter_param}') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + dataset_jobs = self.get_response_data(get_response) + self.assertEqual([dataset_job.get('id') for dataset_job in dataset_jobs], [1]) + + +class DatasetJobDefinitionApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def test_get_wrong_dataset_job_definitions(self): + resp = self.get_helper('/api/v2/dataset_job_definitions/test') + self.assertEqual(resp.status_code, 400) + + @patch('fedlearner_webconsole.dataset.apis.DatasetJobKind', lambda _: 'fake_handler') + @patch('fedlearner_webconsole.dataset.apis.DatasetJobConfiger.from_kind', + lambda *args: FakeDatasetJobConfiger(None)) + @patch('fedlearner_webconsole.dataset.services.DatasetJobService.is_local', lambda *args: True) + def test_get_dataset_job_definitions(self): + resp = self.get_helper('/api/v2/dataset_job_definitions/fake_handler') + self.assertEqual(resp.status_code, 200) + self.assertResponseDataEqual( + resp, { + 'variables': [{ + 'name': 'hello', + 'value_type': 'NUMBER', + 'typed_value': 1.0, + 'value': '', + 'tag': '', + 'access_mode': 'UNSPECIFIED', + 'widget_schema': '' + }, { + 'name': 'hello_from_job', + 'value_type': 'NUMBER', + 'typed_value': 3.0, + 'tag': '', + 'value': '', + 'access_mode': 'UNSPECIFIED', + 'widget_schema': '' + }], + 'is_federated': False, + }) + + @patch('fedlearner_webconsole.dataset.apis.DatasetJobKind', lambda _: 'fake_federated_handler') + @patch('fedlearner_webconsole.dataset.apis.DatasetJobConfiger.from_kind', + lambda *args: FakeFederatedDatasetJobConfiger(None)) + @patch('fedlearner_webconsole.dataset.services.DatasetJobService.is_local', lambda *args: False) + def test_get_federated_dataset_job_definitions(self): + resp = self.get_helper('/api/v2/dataset_job_definitions/FAKE_HANDLER') + self.assertEqual(resp.status_code, 200) + self.assertResponseDataEqual( + resp, { + 'variables': [{ + 'name': 'hello', + 'value_type': 'NUMBER', + 'tag': '', + 'typed_value': 1.0, + 'value': '', + 'access_mode': 'UNSPECIFIED', + 'widget_schema': '' + }, { + 'name': 'hello_from_job', + 'value_type': 'NUMBER', + 'typed_value': 3.0, + 'tag': '', + 'value': '', + 'access_mode': 'UNSPECIFIED', + 'widget_schema': '' + }], + 'is_federated': True, + }) + + +class DatasetJobApiTest(BaseTestCase): + + @patch('fedlearner_webconsole.dataset.apis.RpcClient.get_dataset_job') + def test_get_datasetjob(self, mock_get_dataset_job: MagicMock): + get_response = self.get_helper('/api/v2/projects/1/dataset_jobs/123') + self.assertEqual(get_response.status_code, 404) + + with db.session_scope() as session: + participant = Participant(name='test', domain_name='test_domain') + session.add(participant) + project = Project(name='test-project') + session.add(project) + session.flush([project, participant]) + project_participant = ProjectParticipant(project_id=project.id, participant_id=participant.id) + session.add(project_participant) + + output_dataset = Dataset(uuid='output_uuid', name='output_dataset') + session.add(output_dataset) + session.flush([output_dataset]) + coordinator_dataset_job = DatasetJob(uuid='u12345', + name='coordinator_dataset_job', + project_id=project.id, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + input_dataset_id=123, + output_dataset_id=output_dataset.id, + creator_username='test user', + time_range=timedelta(hours=1)) + coordinator_dataset_job.set_global_configs(global_configs=dataset_pb2.DatasetJobGlobalConfigs( + global_configs={'our': dataset_pb2.DatasetJobConfig(dataset_uuid='u123')})) + context = dataset_pb2.DatasetJobContext(input_data_batch_num_example=1000, + output_data_batch_num_example=500) + coordinator_dataset_job.set_context(context) + session.add(coordinator_dataset_job) + mock_get_dataset_job.return_value = service_pb2.GetDatasetJobResponse(dataset_job=dataset_pb2.DatasetJob( + global_configs=coordinator_dataset_job.get_global_configs(), + scheduler_state=DatasetJobSchedulerState.STOPPED.name, + )) + + participant_dataset_job = DatasetJob( + uuid='u54321', + name='participant_dataset_job', + project_id=project.id, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + input_dataset_id=123, + output_dataset_id=output_dataset.id, + coordinator_id=participant.id, + creator_username='test user', + time_range=timedelta(days=1), + ) + session.add(participant_dataset_job) + session.commit() + coordinator_dataset_job_id = coordinator_dataset_job.id + participant_dataset_job_id = participant_dataset_job.id + + get_response = self.get_helper(f'/api/v2/projects/1/dataset_jobs/{coordinator_dataset_job_id}') + self.assertEqual(get_response.status_code, 200) + self.assertResponseDataEqual( + get_response, + { + 'id': 1, + 'uuid': 'u12345', + 'name': 'coordinator_dataset_job', + 'project_id': 1, + 'kind': 'RSA_PSI_DATA_JOIN', + 'state': 'PENDING', + 'result_dataset_uuid': 'output_uuid', + 'result_dataset_name': 'output_dataset', + 'global_configs': { + 'global_configs': { + 'our': { + 'dataset_uuid': 'u123', + 'variables': [] + } + } + }, + 'input_data_batch_num_example': 1000, + 'output_data_batch_num_example': 500, + 'coordinator_id': 0, + 'workflow_id': 0, + 'is_ready': False, + 'started_at': 0, + 'finished_at': 0, + 'has_stages': False, + 'creator_username': 'test user', + 'scheduler_state': 'PENDING', + 'time_range': { + 'days': 0, + 'hours': 1, + }, + 'scheduler_message': '', + }, + ignore_fields=['created_at', 'updated_at'], + ) + + get_response = self.get_helper(f'/api/v2/projects/1/dataset_jobs/{participant_dataset_job_id}') + self.assertEqual(get_response.status_code, 200) + self.assertResponseDataEqual( + get_response, + { + 'id': 2, + 'uuid': 'u54321', + 'name': 'participant_dataset_job', + 'project_id': 1, + 'kind': 'RSA_PSI_DATA_JOIN', + 'state': 'PENDING', + 'result_dataset_uuid': 'output_uuid', + 'result_dataset_name': 'output_dataset', + 'global_configs': { + 'global_configs': { + 'our': { + 'dataset_uuid': 'u123', + 'variables': [] + } + } + }, + 'input_data_batch_num_example': 0, + 'output_data_batch_num_example': 0, + 'coordinator_id': 1, + 'workflow_id': 0, + 'is_ready': False, + 'started_at': 0, + 'finished_at': 0, + 'has_stages': False, + 'creator_username': 'test user', + 'scheduler_state': 'STOPPED', + 'time_range': { + 'days': 1, + 'hours': 0, + }, + 'scheduler_message': '', + }, + ignore_fields=['created_at', 'updated_at'], + ) + mock_get_dataset_job.assert_called_once_with(uuid='u54321') + + def test_delete_dataset_job(self): + # no dataset + response = self.delete_helper('/api/v2/projects/1/dataset_jobs/1') + self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) + + # delete successfully + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + uuid='test-uuid', + kind=DatasetJobKind.DATA_ALIGNMENT, + state=DatasetJobState.FAILED, + project_id=1, + workflow_id=1, + input_dataset_id=1, + output_dataset_id=2, + coordinator_id=0) + session.add(dataset_job) + session.commit() + response = self.delete_helper('/api/v2/projects/1/dataset_jobs/1') + self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) + with db.session_scope() as session: + dataset = session.query(DatasetJob).execution_options(include_deleted=True).get(1) + self.assertIsNotNone(dataset.deleted_at) + + +class DatasetJobStopApiTest(BaseTestCase): + + def test_no_dataset_job(self): + response = self.post_helper('/api/v2/projects/1/dataset_jobs/1:stop') + self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) + + @patch('fedlearner_webconsole.dataset.apis.DatasetJobController.stop') + def test_stop_dataset_job(self, mock_stop: MagicMock): + with db.session_scope() as session: + dataset_job = DatasetJob( + id=1, + uuid='u54321', + project_id=1, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + input_dataset_id=123, + output_dataset_id=0, + coordinator_id=0, + ) + session.add(dataset_job) + session.commit() + response = self.post_helper('/api/v2/projects/1/dataset_jobs/1:stop') + self.assertEqual(response.status_code, HTTPStatus.OK) + mock_stop.assert_called_once_with(uuid='u54321') + + +class DatasetJobStopSchedulerApiTest(BaseTestCase): + + def test_stop_scheduler_dataset_job(self): + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + uuid='u54321', + project_id=1, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + input_dataset_id=123, + output_dataset_id=0, + coordinator_id=0, + scheduler_state=DatasetJobSchedulerState.RUNNABLE, + time_range=timedelta(days=1)) + session.add(dataset_job) + session.commit() + response = self.post_helper('/api/v2/projects/1/dataset_jobs/1:stop_scheduler') + self.assertEqual(response.status_code, HTTPStatus.OK) + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(1) + self.assertEqual(dataset_job.scheduler_state, DatasetJobSchedulerState.STOPPED) + + +class DatasetJobStagesApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + dataset_job = DatasetJob( + id=1, + uuid='dataset_job uuid', + project_id=1, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + input_dataset_id=123, + output_dataset_id=0, + coordinator_id=0, + ) + session.add(dataset_job) + dataset_job_stage_1 = DatasetJobStage(id=1, + uuid='uuid_1', + name='default dataset job stage 1', + project_id=1, + workflow_id=1, + created_at=datetime(2022, 1, 1, 0, 0, 0), + dataset_job_id=1, + data_batch_id=1, + event_time=datetime(2022, 1, 15), + state=DatasetJobState.PENDING) + session.add(dataset_job_stage_1) + dataset_job_stage_2 = DatasetJobStage(id=2, + uuid='uuid_2', + name='default dataset job stage 2', + project_id=1, + workflow_id=2, + created_at=datetime(2022, 1, 2, 0, 0, 0), + dataset_job_id=1, + data_batch_id=1, + event_time=datetime(2022, 1, 15), + state=DatasetJobState.SUCCEEDED) + session.add(dataset_job_stage_2) + dataset_job_stage_3 = DatasetJobStage(id=3, + uuid='uuid_3', + name='default dataset job stage 3', + project_id=1, + workflow_id=1, + created_at=datetime(2022, 1, 3, 0, 0, 0), + dataset_job_id=1, + data_batch_id=1, + event_time=datetime(2022, 1, 15), + state=DatasetJobState.STOPPED) + session.add(dataset_job_stage_3) + session.commit() + + def test_get_dataset_job_stages(self): + response = self.get_helper('/api/v2/projects/1/dataset_jobs/1/dataset_job_stages') + self.assertEqual(response.status_code, HTTPStatus.OK) + self.assertResponseDataEqual(response, [{ + 'created_at': ANY, + 'dataset_job_id': 1, + 'id': 3, + 'kind': DatasetJobKind.RSA_PSI_DATA_JOIN.name, + 'name': 'default dataset job stage 3', + 'output_data_batch_id': 1, + 'project_id': 1, + 'state': DatasetJobState.STOPPED.name + }, { + 'created_at': ANY, + 'dataset_job_id': 1, + 'id': 2, + 'kind': DatasetJobKind.RSA_PSI_DATA_JOIN.name, + 'name': 'default dataset job stage 2', + 'output_data_batch_id': 1, + 'project_id': 1, + 'state': DatasetJobState.SUCCEEDED.name + }, { + 'created_at': ANY, + 'dataset_job_id': 1, + 'id': 1, + 'kind': DatasetJobKind.RSA_PSI_DATA_JOIN.name, + 'name': 'default dataset job stage 1', + 'output_data_batch_id': 1, + 'project_id': 1, + 'state': DatasetJobState.PENDING.name + }]) + filter_param = urllib.parse.quote('(state:["STOPPED", "SUCCEEDED"])') + sorter_param = urllib.parse.quote('created_at asc') + response = self.get_helper(f'/api/v2/projects/1/dataset_jobs/1/dataset_job_stages?\ + page=1&page_size=5&filter={filter_param}&order_by={sorter_param}') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual([job_stage.get('id') for job_stage in data], [2, 3]) + self.assertEqual( + json.loads(response.data).get('page_meta'), { + 'current_page': 1, + 'page_size': 5, + 'total_pages': 1, + 'total_items': 2 + }) + + +class DatasetJobStageApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + dataset_job = DatasetJob( + id=1, + uuid='dataset_job uuid', + project_id=1, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + input_dataset_id=123, + output_dataset_id=0, + coordinator_id=0, + ) + session.add(dataset_job) + dataset_job_stage_1 = DatasetJobStage(id=1, + uuid='uuid_1', + name='default dataset job stage 1', + project_id=1, + workflow_id=1, + created_at=datetime(2022, 1, 1, 0, 0, 0), + dataset_job_id=1, + data_batch_id=1, + event_time=datetime(2022, 1, 15), + state=DatasetJobState.PENDING, + coordinator_id=0) + session.add(dataset_job_stage_1) + session.commit() + + def test_get_dataset_job_stage(self): + response = self.get_helper('/api/v2/projects/1/dataset_jobs/1/dataset_job_stages/1') + self.assertEqual(response.status_code, HTTPStatus.OK) + self.assertResponseDataEqual( + response, { + 'id': 1, + 'uuid': 'uuid_1', + 'workflow_id': 1, + 'dataset_job_id': 1, + 'dataset_job_uuid': 'dataset_job uuid', + 'event_time': to_timestamp(datetime(2022, 1, 15)), + 'is_ready': False, + 'kind': 'RSA_PSI_DATA_JOIN', + 'name': 'default dataset job stage 1', + 'output_data_batch_id': 1, + 'project_id': 1, + 'state': 'PENDING', + 'created_at': ANY, + 'updated_at': ANY, + 'started_at': 0, + 'finished_at': 0, + 'input_data_batch_num_example': 0, + 'output_data_batch_num_example': 0, + 'scheduler_message': '', + }) + response = self.get_helper('/api/v2/projects/2/dataset_jobs/2/dataset_job_stages/2') + self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) + + +class ChildrenbDatasetsApiTest(BaseTestCase): + + def test_get_sub_dataset_api(self): + with db.session_scope() as session: + parent_dataset = Dataset(id=1, + uuid='parent_dataset uuid', + name='parent_dataset', + creator_username='test', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + is_published=True, + project_id=1, + dataset_format=DatasetFormat.TABULAR.value) + session.add(parent_dataset) + dataset_job = DatasetJob(workflow_id=0, + uuid=resource_uuid(), + project_id=1, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + input_dataset_id=1, + output_dataset_id=2, + state=DatasetJobState.SUCCEEDED) + session.add(dataset_job) + analyzer_dataset_job = DatasetJob(workflow_id=0, + uuid=resource_uuid(), + project_id=1, + kind=DatasetJobKind.ANALYZER, + input_dataset_id=1, + output_dataset_id=1, + state=DatasetJobState.SUCCEEDED) + session.add(analyzer_dataset_job) + child_dataset = Dataset(id=2, + uuid='child_dataset uuid', + name='child_dataset', + creator_username='test', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + is_published=True, + project_id=1, + dataset_format=DatasetFormat.TABULAR.value, + dataset_kind=DatasetKindV2.PROCESSED, + store_format=StoreFormat.TFRECORDS) + session.add(child_dataset) + export_dataset_job = DatasetJob(workflow_id=0, + uuid=resource_uuid(), + project_id=1, + kind=DatasetJobKind.EXPORT, + input_dataset_id=1, + output_dataset_id=3, + state=DatasetJobState.SUCCEEDED) + session.add(export_dataset_job) + export_dataset = Dataset(id=3, + uuid='export_dataset uuid', + name='export_dataset', + creator_username='test', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + is_published=False, + project_id=1, + dataset_format=DatasetFormat.TABULAR.value, + dataset_kind=DatasetKindV2.EXPORTED, + store_format=StoreFormat.CSV) + session.add(export_dataset) + session.commit() + response = self.get_helper('/api/v2/datasets/1/children_datasets') + self.assertEqual(response.status_code, HTTPStatus.OK) + self.assertResponseDataEqual(response, [{ + 'id': 2, + 'project_id': 1, + 'name': 'child_dataset', + 'creator_username': 'test', + 'created_at': ANY, + 'path': '/data/dataset/123', + 'dataset_format': 'TABULAR', + 'comment': 'test comment', + 'state_frontend': 'SUCCEEDED', + 'dataset_kind': 'PROCESSED', + 'data_source': ANY, + 'file_size': 0, + 'is_published': True, + 'num_example': 0, + 'uuid': 'child_dataset uuid', + 'total_value': 0, + 'store_format': 'TFRECORDS', + 'dataset_type': 'STREAMING', + 'import_type': 'COPY', + 'publish_frontend_state': 'PUBLISHED', + 'auth_frontend_state': 'AUTH_APPROVED', + 'local_auth_status': 'PENDING', + 'participants_info': { + 'participants_map': {} + }, + }]) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/auth_service.py b/web_console_v2/api/fedlearner_webconsole/dataset/auth_service.py new file mode 100644 index 000000000..5a4aaeafd --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/auth_service.py @@ -0,0 +1,63 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import List +from sqlalchemy.orm import Session + +from fedlearner_webconsole.dataset.models import Dataset, DatasetJob +from fedlearner_webconsole.flag.models import Flag +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo, ParticipantInfo + + +class AuthService(object): + + def __init__(self, session: Session, dataset_job: DatasetJob): + self._session = session + self._dataset_job = dataset_job + self._output_dataset: Dataset = dataset_job.output_dataset + + def initialize_participants_info_as_coordinator(self, participants: List[Participant]): + participants_info = ParticipantsInfo() + for participant in participants: + # default auth status is pending + participant_info = ParticipantInfo(auth_status=AuthStatus.PENDING.value) + participants_info.participants_map[participant.pure_domain_name()].CopyFrom(participant_info) + + coordinator_domain_name = SettingService.get_system_info().pure_domain_name + coordinator_info = ParticipantInfo(auth_status=self._output_dataset.auth_status.name) + participants_info.participants_map[coordinator_domain_name].CopyFrom(coordinator_info) + + self._output_dataset.set_participants_info(participants_info=participants_info) + + def initialize_participants_info_as_participant(self, participants_info: ParticipantsInfo): + self._output_dataset.set_participants_info(participants_info=participants_info) + + def update_auth_status(self, domain_name: str, auth_status: AuthStatus): + participants_info = self._output_dataset.get_participants_info() + participants_info.participants_map[domain_name].auth_status = auth_status.name + self._output_dataset.set_participants_info(participants_info=participants_info) + + def check_local_authorized(self) -> bool: + if not Flag.DATASET_AUTH_STATUS_CHECK_ENABLED.value: + return True + return self._output_dataset.auth_status == AuthStatus.AUTHORIZED + + def check_participants_authorized(self) -> bool: + if not Flag.DATASET_AUTH_STATUS_CHECK_ENABLED.value: + return True + return self._output_dataset.is_all_participants_authorized() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/auth_service_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/auth_service_test.py new file mode 100644 index 000000000..cca03bee5 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/auth_service_test.py @@ -0,0 +1,145 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import MagicMock, PropertyMock, patch + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.auth_service import AuthService +from fedlearner_webconsole.dataset.models import (Dataset, DatasetKindV2, ImportType, DatasetType, DatasetJob, + DatasetJobKind, DatasetJobState) +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.flag.models import _Flag +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo, ParticipantInfo +from fedlearner_webconsole.proto.setting_pb2 import SystemInfo +from testing.no_web_server_test_case import NoWebServerTestCase + + +class AuthServiceTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + dataset = Dataset(id=1, + uuid='dataset uuid', + name='default dataset', + dataset_type=DatasetType.PSI, + comment='test comment', + path='/data/dataset/123', + project_id=1, + dataset_kind=DatasetKindV2.PROCESSED, + is_published=True, + import_type=ImportType.COPY, + auth_status=AuthStatus.AUTHORIZED) + session.add(dataset) + dataset_job = DatasetJob(uuid='dataset_job uuid', + kind=DatasetJobKind.DATA_ALIGNMENT, + state=DatasetJobState.PENDING, + project_id=1, + workflow_id=0, + input_dataset_id=0, + output_dataset_id=1, + coordinator_id=0) + session.add(dataset_job) + session.commit() + + @patch('fedlearner_webconsole.project.services.SettingService.get_system_info') + def test_initialize_participants_info_as_coordinator(self, mock_system_info: MagicMock): + mock_system_info.return_value = SystemInfo(pure_domain_name='test-domain-name-coordinator', name='coordinator') + particiapnt_1 = Participant(id=1, name='test_participant_1', domain_name='fl-test-domain-name-1.com') + particiapnt_2 = Participant(id=2, name='test_participant_2', domain_name='fl-test-domain-name-2.com') + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(1) + AuthService(session=session, dataset_job=dataset_job).initialize_participants_info_as_coordinator( + participants=[particiapnt_1, particiapnt_2]) + self.assertEqual( + dataset_job.output_dataset.get_participants_info().participants_map['test-domain-name-1'].auth_status, + AuthStatus.PENDING.value) + self.assertEqual( + dataset_job.output_dataset.get_participants_info().participants_map['test-domain-name-2'].auth_status, + AuthStatus.PENDING.value) + self.assertEqual( + dataset_job.output_dataset.get_participants_info().participants_map['test-domain-name-coordinator']. + auth_status, AuthStatus.AUTHORIZED.value) + + def test_initialize_participants_info_as_participant(self): + participants_info = ParticipantsInfo( + participants_map={ + 'test-domain-name-coordinator': ParticipantInfo(auth_status='AUTHORIZED'), + 'test-domain-name-1': ParticipantInfo(auth_status='PENDING') + }) + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(1) + AuthService(session=session, dataset_job=dataset_job).initialize_participants_info_as_participant( + participants_info=participants_info) + self.assertEqual( + dataset_job.output_dataset.get_participants_info().participants_map['test-domain-name-1'].auth_status, + AuthStatus.PENDING.value) + self.assertEqual( + dataset_job.output_dataset.get_participants_info().participants_map['test-domain-name-coordinator']. + auth_status, AuthStatus.AUTHORIZED.value) + + def test_update_auth_status(self): + participants_info = ParticipantsInfo( + participants_map={ + 'test-domain-name-coordinator': ParticipantInfo(auth_status='AUTHORIZED'), + 'test-domain-name-1': ParticipantInfo(auth_status='PENDING') + }) + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(1) + dataset_job.output_dataset.set_participants_info(participants_info) + AuthService(session=session, dataset_job=dataset_job).update_auth_status(domain_name='test-domain-name-1', + auth_status=AuthStatus.AUTHORIZED) + self.assertEqual( + dataset_job.output_dataset.get_participants_info().participants_map['test-domain-name-1'].auth_status, + AuthStatus.AUTHORIZED.value) + + @patch('fedlearner_webconsole.flag.models.Flag.DATASET_AUTH_STATUS_CHECK_ENABLED', new_callable=PropertyMock) + def test_check_local_authorized(self, mock_dataset_auth_status_check_enabled: MagicMock): + with db.session_scope() as session: + mock_dataset_auth_status_check_enabled.return_value = _Flag('dataset_auth_status_check_enabled', True) + dataset_job = session.query(DatasetJob).get(1) + auth_service = AuthService(session=session, dataset_job=dataset_job) + self.assertTrue(auth_service.check_local_authorized()) + dataset_job.output_dataset.auth_status = AuthStatus.WITHDRAW + self.assertFalse(auth_service.check_local_authorized()) + + mock_dataset_auth_status_check_enabled.reset_mock() + mock_dataset_auth_status_check_enabled.return_value = _Flag('dataset_auth_status_check_enabled', False) + self.assertTrue(auth_service.check_local_authorized()) + + @patch('fedlearner_webconsole.flag.models.Flag.DATASET_AUTH_STATUS_CHECK_ENABLED', new_callable=PropertyMock) + def test_check_participants_authorized(self, mock_dataset_auth_status_check_enabled: MagicMock): + participants_info = ParticipantsInfo( + participants_map={ + 'test-domain-name-coordinator': ParticipantInfo(auth_status='AUTHORIZED'), + 'test-domain-name-1': ParticipantInfo(auth_status='PENDING') + }) + with db.session_scope() as session: + mock_dataset_auth_status_check_enabled.return_value = _Flag('dataset_auth_status_check_enabled', True) + dataset_job = session.query(DatasetJob).get(1) + auth_service = AuthService(session=session, dataset_job=dataset_job) + self.assertTrue(auth_service.check_participants_authorized()) + dataset_job.output_dataset.set_participants_info(participants_info) + self.assertFalse(auth_service.check_participants_authorized()) + + mock_dataset_auth_status_check_enabled.reset_mock() + mock_dataset_auth_status_check_enabled.return_value = _Flag('dataset_auth_status_check_enabled', False) + self.assertTrue(auth_service.check_participants_authorized()) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/batch_stats.py b/web_console_v2/api/fedlearner_webconsole/dataset/batch_stats.py new file mode 100644 index 000000000..58199e882 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/batch_stats.py @@ -0,0 +1,108 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Tuple +from envs import Envs +import enum +from multiprocessing import get_context, Queue +import queue + +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.db import db +from fedlearner_webconsole.composer.interface import IRunnerV2 +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.proto.composer_pb2 import RunnerOutput +from fedlearner_webconsole.dataset.models import DataBatch, BatchState +from fedlearner_webconsole.dataset.services import DataReader +from fedlearner_webconsole.dataset.data_path import get_batch_data_path +from fedlearner_webconsole.utils.file_operator import FileOperator +from fedlearner_webconsole.utils.hooks import pre_start_hook + +_BATCH_STATS_LOG = 'batch stats' + + +class BatchStatsItemState(enum.Enum): + SUCCESSED = 'SUCCESSED' + FAILED = 'FAILED' + + +def batch_stats_sub_process(batch_id: int, q: Queue): + # as we need connect to db in sub process, we should pre-set environment in hook + # TODO(wangsen.0914): support start process in a unify func + pre_start_hook() + with db.session_scope() as session: + batch: DataBatch = session.query(DataBatch).get(batch_id) + batch_path = get_batch_data_path(batch) + batch_name = batch.batch_name + dataset_path = batch.dataset.path + meta = DataReader(dataset_path).metadata(batch_name=batch_name) + batch_num_feature = meta.num_feature + batch_num_example = meta.num_example + batch_file_size = FileOperator().getsize(batch_path) + q.put([batch_num_feature, batch_num_example, batch_file_size]) + + +class BatchStatsRunner(IRunnerV2): + + def _set_batch_stats(self, batch_id: int): + try: + context = get_context('spawn') + internal_queue = context.Queue() + # The memory will not release after batch stats, so a new process is initialized to do that. + batch_stats_process = context.Process(target=batch_stats_sub_process, + kwargs={ + 'batch_id': batch_id, + 'q': internal_queue, + }, + daemon=True) + batch_stats_process.start() + try: + # wait 10 min as some customer hdfs system may cause long time to read + batch_num_feature, batch_num_example, batch_file_size = internal_queue.get(timeout=600) + except queue.Empty as e: + batch_stats_process.terminate() + raise RuntimeError('run batch_stats_sub_process failed') from e + finally: + batch_stats_process.join() + batch_stats_process.close() + internal_queue.close() + with db.session_scope() as session: + batch = session.query(DataBatch).get(batch_id) + batch.num_feature = batch_num_feature + batch.num_example = batch_num_example + batch.file_size = batch_file_size + logging.info(f'[{_BATCH_STATS_LOG}]: total batch data size is {batch.file_size}') + batch.state = BatchState.SUCCESS + session.commit() + logging.info(f'[{_BATCH_STATS_LOG}]: finish batch stats task') + except Exception: # pylint: disable=broad-except + with db.session_scope() as session: + batch = session.query(DataBatch).get(batch_id) + batch.state = BatchState.FAILED + session.commit() + raise + + def run(self, context: RunnerContext) -> Tuple[RunnerStatus, RunnerOutput]: + logging.info(f'[{_BATCH_STATS_LOG}]: start batch stats task') + try: + batch_id = context.input.batch_stats_input.batch_id + logging.info(f'[{_BATCH_STATS_LOG}]: collect raw dataset stats info, batch id: {batch_id}') + self._set_batch_stats(batch_id) + return RunnerStatus.DONE, RunnerOutput() + except Exception as e: # pylint: disable=broad-except + error_message = str(e) + logging.error(f'[{_BATCH_STATS_LOG}] err: {error_message}, envs: {Envs.__dict__}') + return RunnerStatus.FAILED, RunnerOutput(error_message=error_message) diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/batch_stats_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/batch_stats_test.py new file mode 100644 index 000000000..7e718fa07 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/batch_stats_test.py @@ -0,0 +1,99 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from datetime import datetime +from unittest.mock import patch, MagicMock +from multiprocessing import Queue + +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.dataset.batch_stats import BatchStatsRunner, batch_stats_sub_process +from fedlearner_webconsole.dataset.models import DataBatch, Dataset, BatchState, DatasetType +from fedlearner_webconsole.dataset.services import DataReader +from fedlearner_webconsole.db import db, turn_db_timezone_to_utc +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput, BatchStatsInput +from testing.no_web_server_test_case import NoWebServerTestCase + + +def fake_batch_stats_sub_process(batch_id: int, q: Queue): + q.put([10, 666, 789123]) + + +class BatchStatsRunnerTest(NoWebServerTestCase): + + @patch('fedlearner_webconsole.dataset.batch_stats.batch_stats_sub_process', fake_batch_stats_sub_process) + def test_run_for_batch(self): + with db.session_scope() as session: + dataset = Dataset(id=1, name='test_dataset', path='/test_dataset', dataset_type=DatasetType.PSI) + session.add(dataset) + batch = DataBatch(id=2, + name='0', + dataset_id=dataset.id, + path='/test_dataset/1/batch/0', + event_time=datetime(2021, 10, 28, 16, 37, 37)) + session.add(batch) + session.commit() + + runner = BatchStatsRunner() + + runner_input = RunnerInput(batch_stats_input=BatchStatsInput(batch_id=2)) + context = RunnerContext(index=0, input=runner_input) + + # Succeeded case + status, _ = runner.run(context) + self.assertEqual(status, RunnerStatus.DONE) + with db.session_scope() as session: + batch = session.query(DataBatch).get(2) + self.assertEqual(batch.state, BatchState.SUCCESS) + self.assertEqual(batch.num_feature, 10) + self.assertEqual(batch.num_example, 666) + self.assertEqual(batch.file_size, 789123) + + @patch('fedlearner_webconsole.dataset.batch_stats.FileOperator.getsize') + @patch('fedlearner_webconsole.dataset.batch_stats.DataReader') + @patch('fedlearner_webconsole.dataset.services.DataReader.metadata') + @patch('fedlearner_webconsole.utils.hooks.get_database_uri') + def test_batch_stats_sub_process(self, mock_get_database_uri: MagicMock, mock_metadata: MagicMock, + mock_data_reader: MagicMock, mock_getsize: MagicMock): + with db.session_scope() as session: + dataset = Dataset(id=1, name='test_dataset', path='/test_dataset', dataset_type=DatasetType.PSI) + session.add(dataset) + batch = DataBatch(id=2, + name='0', + dataset_id=dataset.id, + path='/test_dataset/1/batch/0', + event_time=datetime(2021, 10, 28, 16, 37, 37)) + session.add(batch) + session.commit() + mock_metadata_res = MagicMock() + mock_metadata_res.num_feature = 10 + mock_metadata_res.num_example = 666 + mock_getsize.return_value = 789123 + mock_get_database_uri.return_value = turn_db_timezone_to_utc(self.__class__.Config.SQLALCHEMY_DATABASE_URI) + + mock_data_reader.return_value = DataReader('/test_dataset') + mock_metadata.return_value = mock_metadata_res + + queue = Queue() + batch_stats_sub_process(batch_id=2, q=queue) + batch_num_feature, batch_num_example, batch_file_size = queue.get() + self.assertEqual(batch_num_feature, 10) + self.assertEqual(batch_num_example, 666) + self.assertEqual(batch_file_size, 789123) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/consts.py b/web_console_v2/api/fedlearner_webconsole/dataset/consts.py new file mode 100644 index 000000000..6c9cd5019 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/consts.py @@ -0,0 +1,26 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +PLACEHOLDER = 'PLACEHOLDER' + +CRON_SCHEDULER_FOLDER_NOT_READY_ERROR_MESSAGE = f'数据源下未找到满足格式要求的文件夹,请确认文件夹以{PLACEHOLDER}格式命名' +CRON_SCHEDULER_CERTAIN_FOLDER_NOT_READY_ERROR_MESSAGE = \ + f'{PLACEHOLDER}文件夹检查失败,请确认数据源下存在以{PLACEHOLDER}格式命名的文件夹,且文件夹下有_SUCCESS文件' +CRON_SCHEDULER_BATCH_NOT_READY_ERROR_MESSAGE = f'未找到满足格式要求的数据批次,请确保输入数据集有{PLACEHOLDER}格式命名的数据批次' +CRON_SCHEDULER_CERTAIN_BATCH_NOT_READY_ERROR_MESSAGE = f'数据批次{PLACEHOLDER}检查失败,请确认该批次命名格式及状态' +CRON_SCHEDULER_SUCCEEDED_MESSAGE = f'已成功发起{PLACEHOLDER}批次处理任务' + +ERROR_BATCH_SIZE = -1 +MANUFACTURER = 'dm9sY2VuZ2luZQ==' diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/controllers.py b/web_console_v2/api/fedlearner_webconsole/dataset/controllers.py new file mode 100644 index 000000000..253b17ccd --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/controllers.py @@ -0,0 +1,210 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import grpc +import logging +from sqlalchemy.orm import Session + +from fedlearner_webconsole.proto.two_pc_pb2 import LaunchDatasetJobData, LaunchDatasetJobStageData, \ + StopDatasetJobData, StopDatasetJobStageData, TransactionData, TwoPcType +from fedlearner_webconsole.rpc.v2.job_service_client import JobServiceClient +from fedlearner_webconsole.rpc.v2.resource_service_client import ResourceServiceClient +from fedlearner_webconsole.rpc.v2.system_service_client import SystemServiceClient +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.utils.workflow import fill_variables +from fedlearner_webconsole.dataset.models import Dataset, DatasetJob, DatasetJobStage, DatasetJobState +from fedlearner_webconsole.dataset.job_configer.dataset_job_configer import DatasetJobConfiger +from fedlearner_webconsole.dataset.services import DatasetJobService +from fedlearner_webconsole.dataset.auth_service import AuthService +from fedlearner_webconsole.exceptions import InvalidArgumentException, InternalException +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.workflow.workflow_controller import create_ready_workflow +from fedlearner_webconsole.two_pc.transaction_manager import TransactionManager +from fedlearner_webconsole.flag.models import Flag + + +class DatasetJobController: + + def __init__(self, session: Session): + self._session = session + + def _transfer_state(self, uuid: str, target_state: DatasetJobState): + dataset_job = self._session.query(DatasetJob).filter_by(uuid=uuid).first() + + participants = DatasetJobService(session=self._session).get_participants_need_distribute(dataset_job) + if target_state == DatasetJobState.RUNNING: + data = LaunchDatasetJobData(dataset_job_uuid=dataset_job.uuid) + two_pc_type = TwoPcType.LAUNCH_DATASET_JOB + transaction_data = TransactionData(launch_dataset_job_data=data) + elif target_state == DatasetJobState.STOPPED: + data = StopDatasetJobData(dataset_job_uuid=dataset_job.uuid) + two_pc_type = TwoPcType.STOP_DATASET_JOB + transaction_data = TransactionData(stop_dataset_job_data=data) + else: + raise InternalException(f'cannot transfer dataset_job state to {target_state.name} by two_pc') + + tm = TransactionManager(project_name=dataset_job.project.name, + project_token=dataset_job.project.token, + participants=[participant.domain_name for participant in participants], + two_pc_type=two_pc_type) + successed, message = tm.run(data=transaction_data) + if not successed: + err_msg = f'error when try to transfer dataset_job state to {target_state.name} by 2PC, ' \ + f'dataset_job_id: {dataset_job.id}, message: {message}' + logging.error(err_msg) + raise InternalException(err_msg) + + def start(self, uuid: str): + self._transfer_state(uuid=uuid, target_state=DatasetJobState.RUNNING) + + def stop(self, uuid: str): + self._transfer_state(uuid=uuid, target_state=DatasetJobState.STOPPED) + + # stop all related dataset_job_stage + dataset_job_stage_ids = self._session.query(DatasetJobStage.id).outerjoin( + DatasetJob, DatasetJobStage.dataset_job_id == DatasetJob.id).filter(DatasetJob.uuid == uuid).all() + for dataset_job_stage_id, *_ in dataset_job_stage_ids: + # check each dataset_job_stage, stop by 2pc if is not finished. + # we don't recheck job_stage state as TransactionManager will check dataset_job_stage state in new session. + dataset_job_stage = self._session.query(DatasetJobStage).get(dataset_job_stage_id) + if not dataset_job_stage.is_finished(): + DatasetJobStageController(self._session).stop(uuid=dataset_job_stage.uuid) + + def inform_auth_status(self, dataset_job: DatasetJob, auth_status: AuthStatus): + participants = DatasetJobService(self._session).get_participants_need_distribute(dataset_job) + for participant in participants: + client = ResourceServiceClient.from_project_and_participant(domain_name=participant.domain_name, + project_name=dataset_job.project.name) + try: + client.inform_dataset(dataset_uuid=dataset_job.output_dataset.uuid, auth_status=auth_status) + except grpc.RpcError as err: + logging.warning( + f'[dataset_job_controller]: failed to inform particiapnt {participant.name} dataset auth_status, '\ + f'dataset name: {dataset_job.output_dataset.name}, exception: {err}' + ) + + def update_auth_status_cache(self, dataset_job: DatasetJob): + participants = DatasetJobService(self._session).get_participants_need_distribute(dataset_job) + for participant in participants: + try: + # check flag + client = SystemServiceClient.from_participant(domain_name=participant.domain_name) + resp = client.list_flags() + # if participant not supports list dataset rpc, just set AUTHORIZED + if not resp.get(Flag.LIST_DATASETS_RPC_ENABLED.name): + AuthService(self._session, + dataset_job=dataset_job).update_auth_status(domain_name=participant.pure_domain_name(), + auth_status=AuthStatus.AUTHORIZED) + continue + client = ResourceServiceClient.from_project_and_participant(domain_name=participant.domain_name, + project_name=dataset_job.project.name) + resp = client.list_datasets(uuid=dataset_job.output_dataset.uuid) + if len(resp.participant_datasets) == 0 or not resp.participant_datasets[0].auth_status: + logging.warning( + '[dataset_job_controller]: update auth_status cache failed as dataset not found, ' \ + f'or auth_status is None, particiapnt name: {participant.name}, ' \ + f'dataset name: {dataset_job.output_dataset.name}' + ) + continue + participant_auth_status = AuthStatus[resp.participant_datasets[0].auth_status] + AuthService(self._session, + dataset_job=dataset_job).update_auth_status(domain_name=participant.pure_domain_name(), + auth_status=participant_auth_status) + except grpc.RpcError as err: + logging.warning( + '[dataset_job_controller]: failed to update dataset auth_status_cache, ' \ + f'particiapnt name: {participant.name}, ' \ + f'dataset name: {dataset_job.output_dataset.name}, exception: {err}' + ) + + +class DatasetJobStageController: + + def __init__(self, session: Session): + self._session = session + + def create_ready_workflow(self, dataset_job_stage: DatasetJobStage) -> Workflow: + dataset_job: DatasetJob = dataset_job_stage.dataset_job + if not dataset_job_stage.is_coordinator(): + coordinator = self._session.query(Participant).get(dataset_job_stage.coordinator_id) + if coordinator is None: + raise InvalidArgumentException(f'failed to find participant {dataset_job_stage.coordinator_id}') + try: + client = JobServiceClient.from_project_and_participant(coordinator.domain_name, + dataset_job_stage.project.name) + pulled_dataset_job_stage = client.get_dataset_job_stage(dataset_job_stage_uuid=dataset_job_stage.uuid) + except grpc.RpcError as err: + logging.error(f'failed to call GetDatasetJobStage with status code {err.code()}, \ + and details {err.details()}') + raise + config = pulled_dataset_job_stage.dataset_job_stage.workflow_definition + global_configs = pulled_dataset_job_stage.dataset_job_stage.global_configs + + else: + # TODO(liuhehan): refactor to use rpc get config + config = DatasetJobConfiger.from_kind(dataset_job.kind, self._session).get_config() + global_configs = dataset_job_stage.get_global_configs() + + result_dataset = self._session.query(Dataset).get(dataset_job.output_dataset_id) + global_configs = DatasetJobConfiger.from_kind(dataset_job.kind, self._session).config_local_variables( + global_configs, result_dataset.uuid, dataset_job_stage.event_time) + + domain_name = SettingService.get_system_info().pure_domain_name + filled_config = fill_variables(config=config, variables=global_configs.global_configs[domain_name].variables) + workflow = create_ready_workflow( + session=self._session, + name=f'{dataset_job.kind.value}-{dataset_job_stage.uuid}', + config=filled_config, + project_id=dataset_job_stage.project_id, + uuid=dataset_job_stage.uuid, + ) + self._session.flush() + dataset_job_stage.workflow_id = workflow.id + + return workflow + + def _transfer_state(self, uuid: str, target_state: DatasetJobState): + dataset_job_stage: DatasetJobStage = self._session.query(DatasetJobStage).filter_by(uuid=uuid).first() + + assert target_state in [DatasetJobState.RUNNING, DatasetJobState.STOPPED] + if target_state == DatasetJobState.RUNNING: + data = LaunchDatasetJobStageData(dataset_job_stage_uuid=uuid) + two_pc_type = TwoPcType.LAUNCH_DATASET_JOB_STAGE + transaction_data = TransactionData(launch_dataset_job_stage_data=data) + else: + data = StopDatasetJobStageData(dataset_job_stage_uuid=uuid) + two_pc_type = TwoPcType.STOP_DATASET_JOB_STAGE + transaction_data = TransactionData(stop_dataset_job_stage_data=data) + + participants = DatasetJobService(session=self._session).get_participants_need_distribute( + dataset_job_stage.dataset_job) + tm = TransactionManager(project_name=dataset_job_stage.project.name, + project_token=dataset_job_stage.project.token, + participants=[participant.domain_name for participant in participants], + two_pc_type=two_pc_type) + succeeded, message = tm.run(data=transaction_data) + if not succeeded: + err_msg = f'error when try to transfer dataset_job_stage state to {target_state.name} by 2PC, ' \ + f'dataset_job_stage_id: {dataset_job_stage.id}, message: {message}' + logging.error(err_msg) + raise InternalException(err_msg) + + def start(self, uuid: str): + self._transfer_state(uuid=uuid, target_state=DatasetJobState.RUNNING) + + def stop(self, uuid: str): + self._transfer_state(uuid=uuid, target_state=DatasetJobState.STOPPED) diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/controllers_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/controllers_test.py new file mode 100644 index 000000000..d60969f5c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/controllers_test.py @@ -0,0 +1,473 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# pylint: disable=protected-access +from datetime import datetime +import unittest +from unittest.mock import MagicMock, patch +from google.protobuf.struct_pb2 import Value + +from testing.no_web_server_test_case import NoWebServerTestCase +from testing.dataset import FakeDatasetJobConfiger +from fedlearner_webconsole.exceptions import InternalException +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.utils.const import SYSTEM_WORKFLOW_CREATOR_USERNAME +from fedlearner_webconsole.utils.resource_name import resource_uuid +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.controllers import DatasetJobController, DatasetJobStageController +from fedlearner_webconsole.dataset.models import Dataset, DatasetJob, DatasetJobKind, DatasetJobStage, DatasetJobState +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto import dataset_pb2, service_pb2 +from fedlearner_webconsole.proto.rpc.v2 import job_service_pb2 +from fedlearner_webconsole.proto.setting_pb2 import SystemInfo +from fedlearner_webconsole.proto.two_pc_pb2 import LaunchDatasetJobData, LaunchDatasetJobStageData, \ + StopDatasetJobData, StopDatasetJobStageData, TransactionData, TwoPcType +from fedlearner_webconsole.proto.project_pb2 import ParticipantInfo, ParticipantsInfo +from fedlearner_webconsole.proto.rpc.v2.resource_service_pb2 import ListDatasetsResponse + + +def get_dataset_job_pb(*args, **kwargs) -> service_pb2.GetDatasetJobResponse: + dataset_job = dataset_pb2.DatasetJob(uuid='u1234') + global_configs = dataset_pb2.DatasetJobGlobalConfigs() + global_configs.global_configs['test_domain'].MergeFrom( + dataset_pb2.DatasetJobConfig(dataset_uuid=resource_uuid(), + variables=[ + Variable(name='hello', + value_type=Variable.ValueType.NUMBER, + typed_value=Value(number_value=1)), + Variable(name='test', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='test_value')), + ])) + dataset_job.global_configs.MergeFrom(global_configs) + return service_pb2.GetDatasetJobResponse(dataset_job=dataset_job) + + +class DatasetJobControllerTest(NoWebServerTestCase): + _PROJECT_ID = 1 + _PARTICIPANT_ID = 1 + _OUTPUT_DATASET_ID = 1 + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=self._PROJECT_ID, name='test-project') + participant = Participant(id=self._PARTICIPANT_ID, name='participant_1', domain_name='fake_domain_name_1') + project_participant = ProjectParticipant(project_id=self._PROJECT_ID, participant_id=self._PARTICIPANT_ID) + session.add(project) + session.add(participant) + session.add(project_participant) + output_dataset = Dataset(id=self._OUTPUT_DATASET_ID, + name='test_output_dataset', + uuid=resource_uuid(), + path='/data/dataset/test_dataset') + session.add(output_dataset) + session.commit() + + @patch('fedlearner_webconsole.dataset.services.DatasetJobService.need_distribute') + @patch('fedlearner_webconsole.dataset.controllers.TransactionManager') + def test_transfer_state(self, mock_transaction_manager: MagicMock, mock_need_distribute: MagicMock): + dataset_job_id = 10 + workflow_id = 11 + with db.session_scope() as session: + uuid = resource_uuid() + workflow = Workflow(id=workflow_id, uuid=uuid) + dataset_job = DatasetJob(id=dataset_job_id, + uuid=uuid, + kind=DatasetJobKind.IMPORT_SOURCE, + project_id=self._PROJECT_ID, + workflow_id=workflow_id, + input_dataset_id=1, + output_dataset_id=2) + session.add(workflow) + session.add(dataset_job) + session.commit() + + mock_need_distribute.return_value = False + mock_run = MagicMock(return_value=(True, '')) + mock_transaction_manager.return_value = MagicMock(run=mock_run) + + # test illegal target state + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(dataset_job_id) + with self.assertRaises(InternalException): + DatasetJobController(session)._transfer_state(uuid=dataset_job.uuid, + target_state=DatasetJobState.SUCCEEDED) + mock_transaction_manager.assert_not_called() + + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(dataset_job_id) + DatasetJobController(session)._transfer_state(uuid=dataset_job.uuid, target_state=DatasetJobState.RUNNING) + data = LaunchDatasetJobData(dataset_job_uuid=dataset_job.uuid) + mock_run.assert_called_with(data=TransactionData(launch_dataset_job_data=data)) + mock_transaction_manager.assert_called_with(project_name='test-project', + project_token=None, + two_pc_type=TwoPcType.LAUNCH_DATASET_JOB, + participants=[]) + + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(dataset_job_id) + DatasetJobController(session)._transfer_state(uuid=dataset_job.uuid, target_state=DatasetJobState.STOPPED) + data = StopDatasetJobData(dataset_job_uuid=dataset_job.uuid) + mock_run.assert_called_with(data=TransactionData(stop_dataset_job_data=data)) + mock_transaction_manager.assert_called_with(project_name='test-project', + project_token=None, + two_pc_type=TwoPcType.STOP_DATASET_JOB, + participants=[]) + + mock_need_distribute.return_value = True + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(dataset_job_id) + DatasetJobController(session)._transfer_state(uuid=dataset_job.uuid, target_state=DatasetJobState.RUNNING) + data = LaunchDatasetJobData(dataset_job_uuid=dataset_job.uuid) + mock_run.assert_called_with(data=TransactionData(launch_dataset_job_data=data)) + mock_transaction_manager.assert_called_with(project_name='test-project', + project_token=None, + two_pc_type=TwoPcType.LAUNCH_DATASET_JOB, + participants=['fake_domain_name_1']) + + mock_run = MagicMock(return_value=(False, '')) + mock_transaction_manager.return_value = MagicMock(run=mock_run) + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(dataset_job_id) + with self.assertRaises(InternalException): + DatasetJobController(session)._transfer_state(uuid=dataset_job.uuid, + target_state=DatasetJobState.RUNNING) + + @patch('fedlearner_webconsole.dataset.controllers.DatasetJobController._transfer_state') + def test_start(self, mock_transfer_state: MagicMock): + with db.session_scope() as session: + DatasetJobController(session).start(uuid=1) + mock_transfer_state.assert_called_once_with(uuid=1, target_state=DatasetJobState.RUNNING) + + @patch('fedlearner_webconsole.dataset.controllers.DatasetJobController._transfer_state') + @patch('fedlearner_webconsole.dataset.controllers.DatasetJobStageController.stop') + def test_stop(self, mock_dataset_job_stage_stop: MagicMock, mock_transfer_state: MagicMock): + with db.session_scope() as session: + dataset_job = DatasetJob( + id=1, + uuid='u54321', + project_id=1, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + input_dataset_id=123, + output_dataset_id=0, + coordinator_id=0, + ) + session.add(dataset_job) + dataset_job_stage_1 = DatasetJobStage(id=1, + uuid='job_stage uuid_1', + name='default dataset job stage 1', + project_id=1, + workflow_id=1, + created_at=datetime(2022, 1, 1, 0, 0, 0), + dataset_job_id=1, + data_batch_id=1, + event_time=datetime(2022, 1, 1), + state=DatasetJobState.PENDING) + session.add(dataset_job_stage_1) + dataset_job_stage_2 = DatasetJobStage(id=2, + uuid='job_stage uuid_2', + name='default dataset job stage 2', + project_id=1, + workflow_id=1, + created_at=datetime(2022, 1, 2, 0, 0, 0), + dataset_job_id=1, + data_batch_id=1, + event_time=datetime(2022, 1, 2), + state=DatasetJobState.SUCCEEDED) + session.add(dataset_job_stage_2) + session.commit() + with db.session_scope() as session: + DatasetJobController(session).stop(uuid='u54321') + mock_transfer_state.assert_called_once_with(uuid='u54321', target_state=DatasetJobState.STOPPED) + mock_dataset_job_stage_stop.assert_called_once_with(uuid='job_stage uuid_1') + + @patch('fedlearner_webconsole.dataset.controllers.ResourceServiceClient.inform_dataset') + @patch('fedlearner_webconsole.dataset.controllers.DatasetJobService.get_participants_need_distribute') + def test_inform_auth_status(self, mock_get_participants_need_distribute: MagicMock, mock_inform_dataset: MagicMock): + particiapnt = Participant(id=1, name='test_participant', domain_name='fl-test-domain-name.com') + mock_get_participants_need_distribute.return_value = [particiapnt] + with db.session_scope() as session: + dataset_job = DatasetJob( + id=1, + uuid='u54321', + project_id=1, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + input_dataset_id=123, + output_dataset_id=self._OUTPUT_DATASET_ID, + coordinator_id=0, + ) + session.add(dataset_job) + session.flush() + DatasetJobController(session=session).inform_auth_status(dataset_job=dataset_job, + auth_status=AuthStatus.AUTHORIZED) + mock_inform_dataset.assert_called_once_with(dataset_uuid=dataset_job.output_dataset.uuid, + auth_status=AuthStatus.AUTHORIZED) + + @patch('fedlearner_webconsole.dataset.controllers.SystemServiceClient.list_flags') + @patch('fedlearner_webconsole.dataset.controllers.ResourceServiceClient.list_datasets') + @patch('fedlearner_webconsole.dataset.controllers.DatasetJobService.get_participants_need_distribute') + def test_update_auth_status_cache(self, mock_get_participants_need_distribute: MagicMock, + mock_list_datasets: MagicMock, mock_list_flags: MagicMock): + particiapnt = Participant(id=1, name='test_participant', domain_name='fl-test-domain-name.com') + mock_get_participants_need_distribute.return_value = [particiapnt] + mock_list_datasets.return_value = ListDatasetsResponse( + participant_datasets=[dataset_pb2.ParticipantDatasetRef(auth_status=AuthStatus.AUTHORIZED.name)]) + with db.session_scope() as session: + dataset_job = DatasetJob( + id=1, + uuid='u54321', + project_id=1, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + input_dataset_id=123, + output_dataset_id=self._OUTPUT_DATASET_ID, + coordinator_id=0, + ) + session.add(dataset_job) + dataset: Dataset = session.query(Dataset).get(self._OUTPUT_DATASET_ID) + participants_info = ParticipantsInfo( + participants_map={'test-domain-name': ParticipantInfo(auth_status=AuthStatus.PENDING.name)}) + dataset.set_participants_info(participants_info=participants_info) + session.flush() + mock_list_flags.return_value = {'list_datasets_rpc_enabled': False} + DatasetJobController(session=session).update_auth_status_cache(dataset_job=dataset_job) + mock_list_datasets.assert_not_called() + mock_list_flags.reset_mock() + mock_list_flags.return_value = {'list_datasets_rpc_enabled': True} + DatasetJobController(session=session).update_auth_status_cache(dataset_job=dataset_job) + mock_list_datasets.assert_called_once_with(uuid=dataset_job.output_dataset.uuid) + self.assertEqual(dataset.get_participants_info().participants_map['test-domain-name'].auth_status, + AuthStatus.AUTHORIZED.name) + + +def get_dataset_job_stage_pb(*args, **kwargs) -> job_service_pb2.GetDatasetJobStageResponse: + dataset_job_stage = dataset_pb2.DatasetJobStage(uuid='dataset_job_stage uuid') + global_configs = dataset_pb2.DatasetJobGlobalConfigs() + global_configs.global_configs['test_domain'].MergeFrom( + dataset_pb2.DatasetJobConfig(dataset_uuid=resource_uuid(), + variables=[ + Variable(name='hello', + value_type=Variable.ValueType.NUMBER, + typed_value=Value(number_value=1)), + Variable(name='test', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='test_value')), + ])) + dataset_job_stage.global_configs.MergeFrom(global_configs) + return job_service_pb2.GetDatasetJobStageResponse(dataset_job_stage=dataset_job_stage) + + +class DatasetJobStageControllerTest(NoWebServerTestCase): + _PROJECT_ID = 1 + _PARTICIPANT_ID = 1 + _OUTPUT_DATASET_ID = 1 + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=self._PROJECT_ID, name='test-project') + participant = Participant(id=self._PARTICIPANT_ID, name='participant_1', domain_name='fake_domain_name_1') + project_participant = ProjectParticipant(project_id=self._PROJECT_ID, participant_id=self._PARTICIPANT_ID) + session.add(project) + session.add(participant) + session.add(project_participant) + output_dataset = Dataset(id=self._OUTPUT_DATASET_ID, + name='test_output_dataset', + uuid='output_dataset uuid', + path='/data/dataset/test_dataset') + session.add(output_dataset) + session.commit() + + @patch('fedlearner_webconsole.dataset.controllers.DatasetJobConfiger.from_kind', + lambda *args: FakeDatasetJobConfiger(None)) + @patch('fedlearner_webconsole.dataset.controllers.SettingService.get_system_info', + lambda: SystemInfo(name='test', domain_name='test_domain.fedlearner.net')) + def test_create_ready_workflow_coordinator(self): + + with db.session_scope() as session: + + dataset_job = DatasetJob( + id=1, + uuid='dataset_job uuid', + kind=DatasetJobKind.IMPORT_SOURCE, + coordinator_id=0, + project_id=self._PROJECT_ID, + input_dataset_id=0, + output_dataset_id=self._OUTPUT_DATASET_ID, + ) + session.add(dataset_job) + uuid = resource_uuid() + dataset_job_stage = DatasetJobStage( + id=1, + uuid=uuid, + project_id=self._PROJECT_ID, + dataset_job_id=1, + data_batch_id=1, + coordinator_id=0, + ) + session.add(dataset_job_stage) + + global_configs = dataset_pb2.DatasetJobGlobalConfigs() + global_configs.global_configs['test_domain'].MergeFrom( + dataset_pb2.DatasetJobConfig(dataset_uuid=resource_uuid(), + variables=[ + Variable(name='hello', + value_type=Variable.ValueType.NUMBER, + typed_value=Value(number_value=1)), + Variable(name='test', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='test_value')), + ])) + dataset_job_stage.set_global_configs(global_configs) + session.flush() + + wf = DatasetJobStageController(session).create_ready_workflow(dataset_job_stage) + self.assertEqual(wf.uuid, uuid) + self.assertEqual(wf.creator, SYSTEM_WORKFLOW_CREATOR_USERNAME) + + @patch('fedlearner_webconsole.dataset.controllers.JobServiceClient.get_dataset_job_stage') + @patch('fedlearner_webconsole.dataset.job_configer.import_source_configer.ImportSourceConfiger.'\ + 'config_local_variables') + @patch('fedlearner_webconsole.dataset.controllers.SettingService.get_system_info', + lambda: SystemInfo(name='test', domain_name='test_domain.fedlearner.net')) + def test_create_ready_workflow_participant(self, mock_config_local_variables: MagicMock, + mock_get_dataset_job_stage: MagicMock): + get_dataset_job_stage_response = get_dataset_job_stage_pb() + mock_get_dataset_job_stage.return_value = get_dataset_job_stage_response + mock_config_local_variables.return_value = get_dataset_job_stage_response.dataset_job_stage.global_configs + with db.session_scope() as session: + dataset_job = DatasetJob( + id=1, + uuid='dataset_job uuid', + kind=DatasetJobKind.IMPORT_SOURCE, + coordinator_id=0, + project_id=self._PROJECT_ID, + input_dataset_id=0, + output_dataset_id=self._OUTPUT_DATASET_ID, + ) + session.add(dataset_job) + uuid = resource_uuid() + dataset_job_stage = DatasetJobStage( + id=1, + uuid=uuid, + project_id=self._PROJECT_ID, + dataset_job_id=1, + data_batch_id=1, + event_time=datetime(2022, 1, 1), + coordinator_id=self._PARTICIPANT_ID, + ) + session.add(dataset_job_stage) + session.flush() + + wf = DatasetJobStageController(session).create_ready_workflow(dataset_job_stage) + self.assertEqual(wf.uuid, uuid) + self.assertEqual(wf.creator, SYSTEM_WORKFLOW_CREATOR_USERNAME) + mock_config_local_variables.assert_called_once_with( + get_dataset_job_stage_response.dataset_job_stage.global_configs, 'output_dataset uuid', + datetime(2022, 1, 1)) + + @patch('fedlearner_webconsole.dataset.services.DatasetJobService.get_participants_need_distribute') + @patch('fedlearner_webconsole.dataset.controllers.TransactionManager') + def test_transfer_state(self, mock_transaction_manager: MagicMock, + mock_get_participants_need_distribute: MagicMock): + dataset_job_id = 10 + dataset_job_stage_id = 11 + workflow_id = 12 + with db.session_scope() as session: + uuid = resource_uuid() + workflow = Workflow(id=workflow_id, uuid=uuid) + dataset_job = DatasetJob(id=dataset_job_id, + uuid=resource_uuid(), + kind=DatasetJobKind.IMPORT_SOURCE, + project_id=self._PROJECT_ID, + input_dataset_id=1, + output_dataset_id=2) + dataset_job_stage = DatasetJobStage(id=dataset_job_stage_id, + name='stage_1', + uuid=uuid, + dataset_job_id=dataset_job_id, + workflow_id=workflow_id, + project_id=self._PROJECT_ID, + data_batch_id=1, + state=DatasetJobState.PENDING) + session.add(workflow) + session.add(dataset_job) + session.add(dataset_job_stage) + session.commit() + + mock_get_participants_need_distribute.return_value = [] + mock_run = MagicMock(return_value=(True, '')) + mock_transaction_manager.return_value = MagicMock(run=mock_run) + + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_id) + DatasetJobStageController(session)._transfer_state(uuid=dataset_job_stage.uuid, + target_state=DatasetJobState.RUNNING) + data = LaunchDatasetJobStageData(dataset_job_stage_uuid=dataset_job_stage.uuid) + mock_run.assert_called_with(data=TransactionData(launch_dataset_job_stage_data=data)) + mock_transaction_manager.assert_called_with(project_name='test-project', + project_token=None, + two_pc_type=TwoPcType.LAUNCH_DATASET_JOB_STAGE, + participants=[]) + + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_id) + DatasetJobStageController(session)._transfer_state(uuid=dataset_job_stage.uuid, + target_state=DatasetJobState.STOPPED) + data = StopDatasetJobStageData(dataset_job_stage_uuid=dataset_job_stage.uuid) + mock_run.assert_called_with(data=TransactionData(stop_dataset_job_stage_data=data)) + mock_transaction_manager.assert_called_with(project_name='test-project', + project_token=None, + two_pc_type=TwoPcType.STOP_DATASET_JOB_STAGE, + participants=[]) + + mock_get_participants_need_distribute.return_value = [Participant(domain_name='fake_domain_name_1')] + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_id) + DatasetJobStageController(session)._transfer_state(uuid=dataset_job_stage.uuid, + target_state=DatasetJobState.RUNNING) + data = LaunchDatasetJobStageData(dataset_job_stage_uuid=dataset_job_stage.uuid) + mock_run.assert_called_with(data=TransactionData(launch_dataset_job_stage_data=data)) + mock_transaction_manager.assert_called_with(project_name='test-project', + project_token=None, + two_pc_type=TwoPcType.LAUNCH_DATASET_JOB_STAGE, + participants=['fake_domain_name_1']) + + mock_run = MagicMock(return_value=(False, '')) + mock_transaction_manager.return_value = MagicMock(run=mock_run) + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_id) + with self.assertRaises(InternalException): + DatasetJobStageController(session)._transfer_state(uuid=dataset_job_stage.uuid, + target_state=DatasetJobState.RUNNING) + + @patch('fedlearner_webconsole.dataset.controllers.DatasetJobStageController._transfer_state') + def test_start(self, mock_transfer_state: MagicMock): + with db.session_scope() as session: + DatasetJobStageController(session).start(uuid=1) + mock_transfer_state.assert_called_once_with(uuid=1, target_state=DatasetJobState.RUNNING) + + @patch('fedlearner_webconsole.dataset.controllers.DatasetJobStageController._transfer_state') + def test_stop(self, mock_transfer_state: MagicMock): + with db.session_scope() as session: + DatasetJobStageController(session).stop(uuid=1) + mock_transfer_state.assert_called_once_with(uuid=1, target_state=DatasetJobState.STOPPED) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/data_path.py b/web_console_v2/api/fedlearner_webconsole/dataset/data_path.py new file mode 100644 index 000000000..f7dc0ebba --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/data_path.py @@ -0,0 +1,26 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from fedlearner_webconsole.dataset.dataset_directory import DatasetDirectory +from fedlearner_webconsole.dataset.models import DataBatch, ImportType +from fedlearner_webconsole.utils.file_manager import FileManager + + +# we put this func out of data_batch model as this func will read file +def get_batch_data_path(data_batch: DataBatch): + if data_batch.dataset.import_type == ImportType.NO_COPY: + source_batch_path = DatasetDirectory(data_batch.dataset.path).source_batch_path_file(data_batch.batch_name) + return FileManager().read(source_batch_path) + return data_batch.path diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/data_path_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/data_path_test.py new file mode 100644 index 000000000..954ede9d2 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/data_path_test.py @@ -0,0 +1,77 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from datetime import datetime +import unittest +from unittest.mock import MagicMock, patch + +from testing.no_web_server_test_case import NoWebServerTestCase + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.models import Dataset, DatasetKindV2, ImportType, DatasetType, DataBatch +from fedlearner_webconsole.dataset.data_path import get_batch_data_path +from fedlearner_webconsole.utils.resource_name import resource_uuid + + +class DataPathTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + dataset = Dataset(id=1, + uuid=resource_uuid(), + name='default dataset', + dataset_type=DatasetType.PSI, + comment='test comment', + path='/data/dataset/123', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 5), + dataset_kind=DatasetKindV2.RAW, + is_published=False, + import_type=ImportType.NO_COPY) + session.add(dataset) + data_batch = DataBatch(id=1, + name='20220701', + dataset_id=1, + path='/data/test/batch/20220701', + event_time=datetime.strptime('20220701', '%Y%m%d'), + file_size=100, + num_example=10, + num_feature=3, + latest_parent_dataset_job_stage_id=1) + session.add(data_batch) + session.commit() + + @patch('fedlearner_webconsole.dataset.data_path.FileManager.read') + def test_get_batch_data_path(self, mock_read: MagicMock): + source_path = '/data/data_source/batch_1' + mock_read.return_value = source_path + # test get data_path when import_type is NO_COPY + with db.session_scope() as session: + data_batch: DataBatch = session.query(DataBatch).get(1) + self.assertEqual(get_batch_data_path(data_batch), source_path) + mock_read.assert_called_once_with('/data/dataset/123/batch/20220701/source_batch_path') + # test get data_path when import_type is COPY + with db.session_scope() as session: + dataset: Dataset = session.query(Dataset).get(1) + dataset.import_type = ImportType.COPY + session.commit() + with db.session_scope() as session: + data_batch: DataBatch = session.query(DataBatch).get(1) + self.assertEqual(get_batch_data_path(data_batch), data_batch.path) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/data_pipeline.py b/web_console_v2/api/fedlearner_webconsole/dataset/data_pipeline.py deleted file mode 100644 index 70b6d4588..000000000 --- a/web_console_v2/api/fedlearner_webconsole/dataset/data_pipeline.py +++ /dev/null @@ -1,185 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import io -import logging -import os -import tarfile -import traceback - -from enum import Enum -from copy import deepcopy -from typing import Tuple, Optional, List -from uuid import uuid4 - -from envs import Envs - -from fedlearner_webconsole.composer.interface import IItem, IRunner, ItemType -from fedlearner_webconsole.composer.models import Context, RunnerStatus -from fedlearner_webconsole.sparkapp.service import SparkAppService -from fedlearner_webconsole.sparkapp.schema import SparkAppConfig - - -class DataPipelineType(Enum): - ANALYZER = 'analyzer' - CONVERTER = 'converter' - TRANSFORMER = 'transformer' - - -class DataPipelineItem(IItem): - def __init__(self, task_id: int): - self.id = task_id - - def type(self) -> ItemType: - return ItemType.DATA_PIPELINE - - def get_id(self) -> int: - return self.id - - -class DataPipelineRunner(IRunner): - TYPE_PARAMS_MAPPER = { - DataPipelineType.ANALYZER: { - 'files_dir': 'fedlearner_webconsole/dataset/sparkapp/pipeline', - 'main_application': 'pipeline/analyzer.py', - }, - DataPipelineType.CONVERTER: { - 'files_dir': 'fedlearner_webconsole/dataset/sparkapp/pipeline', - 'main_application': 'pipeline/converter.py', - }, - DataPipelineType.TRANSFORMER: { - 'files_dir': 'fedlearner_webconsole/dataset/sparkapp/pipeline', - 'main_application': 'pipeline/transformer.py', - } - } - - SPARKAPP_STATE_TO_RUNNER_STATUS = { - '': RunnerStatus.RUNNING, - 'SUBMITTED': RunnerStatus.RUNNING, - 'PENDING_RERUN': RunnerStatus.RUNNING, - 'RUNNING': RunnerStatus.RUNNING, - 'COMPLETED': RunnerStatus.DONE, - 'SUCCEEDING': RunnerStatus.DONE, - 'FAILED': RunnerStatus.FAILED, - 'SUBMISSION_FAILED': RunnerStatus.FAILED, - 'INVALIDATING': RunnerStatus.FAILED, - 'FAILING': RunnerStatus.FAILED, - 'UNKNOWN': RunnerStatus.FAILED - } - - def __init__(self, task_id: int) -> None: - self.task_id = task_id - self.task_type = None - self.files_dir = None - self.files_path = None - self.main_application = None - self.command = [] - self.sparkapp_name = None - self.args = {} - self.started = False - self.error_msg = False - - self.spark_service = SparkAppService() - - def start(self, context: Context): - try: - self.started = True - self.args = deepcopy(context.data.get(str(self.task_id), {})) - self.task_type = DataPipelineType(self.args.pop('task_type')) - name = self.args.pop('sparkapp_name') - job_id = uuid4().hex - self.sparkapp_name = f'pipe-{self.task_type.value}-{job_id}-{name}' - - params = self.__class__.TYPE_PARAMS_MAPPER[self.task_type] - self.files_dir = os.path.join(Envs.BASE_DIR, params['files_dir']) - self.files_path = Envs.SPARKAPP_FILES_PATH - self.main_application = params['main_application'] - self.command = self.args.pop('input') - - files = None - if self.files_path is None: - files_obj = io.BytesIO() - with tarfile.open(fileobj=files_obj, mode='w') as f: - f.add(self.files_dir) - files = files_obj.getvalue() - - config = { - 'name': self.sparkapp_name, - 'files': files, - 'files_path': self.files_path, - 'image_url': Envs.SPARKAPP_IMAGE_URL, - 'volumes': gen_sparkapp_volumes(Envs.SPARKAPP_VOLUMES), - 'driver_config': { - 'cores': - 1, - 'memory': - '4g', - 'volume_mounts': - gen_sparkapp_volume_mounts(Envs.SPARKAPP_VOLUME_MOUNTS), - }, - 'executor_config': { - 'cores': - 2, - 'memory': - '4g', - 'instances': - 1, - 'volume_mounts': - gen_sparkapp_volume_mounts(Envs.SPARKAPP_VOLUME_MOUNTS), - }, - 'main_application': f'${{prefix}}/{self.main_application}', - 'command': self.command, - } - config_dict = SparkAppConfig.from_dict(config) - resp = self.spark_service.submit_sparkapp(config=config_dict) - logging.info( - f'created spark app, name: {name}, ' - f'config: {config_dict.__dict__}, resp: {resp.__dict__}') - except Exception as e: # pylint: disable=broad-except - self.error_msg = f'[composer] failed to run this item, err: {e}, \ - trace: {traceback.format_exc()}' - - def result(self, context: Context) -> Tuple[RunnerStatus, dict]: - if self.error_msg: - context.set_data(f'failed_{self.task_id}', - {'error': self.error_msg}) - return RunnerStatus.FAILED, {} - if not self.started: - return RunnerStatus.RUNNING, {} - resp = self.spark_service.get_sparkapp_info(self.sparkapp_name) - logging.info(f'sparkapp resp: {resp.__dict__}') - if not resp.state: - return RunnerStatus.RUNNING, {} - return self.__class__.SPARKAPP_STATE_TO_RUNNER_STATUS.get( - resp.state, RunnerStatus.FAILED), resp.to_dict() - - -def gen_sparkapp_volumes(value: str) -> Optional[List[dict]]: - if value != 'data': - return None - # TODO: better to read from conf - return [{ - 'name': 'data', - 'persistentVolumeClaim': { - 'claimName': 'pvc-fedlearner-default' - } - }] - - -def gen_sparkapp_volume_mounts(value: str) -> Optional[List[dict]]: - if value != 'data': - return None - # TODO: better to read from conf - return [{'name': 'data', 'mountPath': '/data'}] diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/dataset_directory.py b/web_console_v2/api/fedlearner_webconsole/dataset/dataset_directory.py new file mode 100644 index 000000000..544b2151f --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/dataset_directory.py @@ -0,0 +1,101 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# copy from fedlearner_web_console_v2/web_console_v2/inspection/dataset_directory.py + +import os + + +class DatasetDirectory(object): + """ + Dataset struct + | + |--- batch ---- batch_name_1 --- real data files + | | + | |- batch_name_2 --- real data files + | | + | |- batch_name_3 --- real data files + | + |--- meta --- batch_name_1 --- thumbnails (only for image) --- preview image (.png) + | | | + | | |- _META + | | + | |- batch_name_2 --- thumbnails (only for image) --- preview image (.png) + | | | + | | |- _META + | | + | |- batch_name_3 --- thumbnails (only for image) --- preview image (.png) + | | | + | | |- _META + | + |--- errors --- batch_name_1 --- error message files (.csv) + | | + | |- batch_name_2 --- error message files (.csv) + | | + | |- batch_name_3 --- error message files (.csv) + | + |--- side_output --- batch_name_1 --- intermedia data + | | + | |- batch_name_2 --- intermedia data + | | + | |- batch_name_3 --- intermedia data + | + |--- _META (now move to meta/batch_name, delete in future) + | + |--- schema.json + + """ + _BATCH_DIR = 'batch' + _META_DIR = 'meta' + _ERRORS_DIR = 'errors' + _SIDE_OUTPUT_DIR = 'side_output' + _THUMBNAILS_DIR = 'thumbnails' + _META_FILE = '_META' + _SCHEMA_FILE = 'schema.json' + _SOURCE_BATCH_PATH_FILE = 'source_batch_path' + + def __init__(self, dataset_path: str): + self._dataset_path = dataset_path + + @property + def dataset_path(self) -> str: + return self._dataset_path + + def batch_path(self, batch_name: str) -> str: + return os.path.join(self._dataset_path, self._BATCH_DIR, batch_name) + + def errors_path(self, batch_name: str) -> str: + return os.path.join(self._dataset_path, self._ERRORS_DIR, batch_name) + + def thumbnails_path(self, batch_name: str) -> str: + return os.path.join(self._dataset_path, self._META_DIR, batch_name, self._THUMBNAILS_DIR) + + def side_output_path(self, batch_name: str) -> str: + return os.path.join(self._dataset_path, self._SIDE_OUTPUT_DIR, batch_name) + + def source_batch_path_file(self, batch_name: str) -> str: + return os.path.join(self.batch_path(batch_name), self._SOURCE_BATCH_PATH_FILE) + + def batch_meta_file(self, batch_name) -> str: + return os.path.join(self._dataset_path, self._META_DIR, batch_name, self._META_FILE) + + @property + def schema_file(self) -> str: + return os.path.join(self._dataset_path, self._SCHEMA_FILE) + + # TODO(liuhehan): remove it in future + @property + def meta_file(self) -> str: + return os.path.join(self._dataset_path, self._META_FILE) diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/dataset_directory_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/dataset_directory_test.py new file mode 100644 index 000000000..a1c3a88df --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/dataset_directory_test.py @@ -0,0 +1,66 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# copy from fedlearner_web_console_v2/web_console_v2/inspection/dataset_directory_test.py + +import unittest + +from fedlearner_webconsole.dataset.dataset_directory import DatasetDirectory + + +class UtilTest(unittest.TestCase): + _DATASET_PATH = '/fakepath/test_dataset' + _BATCH_NAME = 'test_batch_name' + + def setUp(self) -> None: + super().setUp() + self._dataset_dir = DatasetDirectory(dataset_path=self._DATASET_PATH) + + def test_dataset_path(self): + self.assertEqual(self._dataset_dir.dataset_path, self._DATASET_PATH) + + def test_batch_path(self): + self.assertEqual(self._dataset_dir.batch_path(self._BATCH_NAME), + f'{self._DATASET_PATH}/batch/{self._BATCH_NAME}') + + def test_errors_path(self): + self.assertEqual(self._dataset_dir.errors_path(self._BATCH_NAME), + f'{self._DATASET_PATH}/errors/{self._BATCH_NAME}') + + def test_thumbnails_path(self): + self.assertEqual(self._dataset_dir.thumbnails_path(self._BATCH_NAME), + f'{self._DATASET_PATH}/meta/{self._BATCH_NAME}/thumbnails') + + def test_batch_meta_file(self): + self.assertEqual(self._dataset_dir.batch_meta_file(self._BATCH_NAME), + f'{self._DATASET_PATH}/meta/{self._BATCH_NAME}/_META') + + def test_tmp_path(self): + self.assertEqual(self._dataset_dir.side_output_path(self._BATCH_NAME), + f'{self._DATASET_PATH}/side_output/{self._BATCH_NAME}') + + def test_schema_file(self): + self.assertEqual(self._dataset_dir.schema_file, f'{self._DATASET_PATH}/schema.json') + + def test_meta_file(self): + self.assertEqual(self._dataset_dir.meta_file, f'{self._DATASET_PATH}/_META') + + def test_source_batch_path_file(self): + self.assertEqual(self._dataset_dir.source_batch_path_file(self._BATCH_NAME), + f'{self._DATASET_PATH}/batch/{self._BATCH_NAME}/source_batch_path') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/delete_dependency.py b/web_console_v2/api/fedlearner_webconsole/dataset/delete_dependency.py new file mode 100644 index 000000000..499f6be64 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/delete_dependency.py @@ -0,0 +1,72 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from sqlalchemy import or_ +from fedlearner_webconsole.mmgr.models import ModelJob +from fedlearner_webconsole.workflow.models import WorkflowExternalState +from fedlearner_webconsole.dataset.models import Dataset, DatasetJobSchedulerState, ResourceState, DatasetJob +from typing import List, Tuple + + +class DatasetDeleteDependency(object): + + def __init__(self, session) -> None: + self._session = session + self._check_pipeline = [self._check_model_jobs, self._check_dataset, self._check_dataset_jobs] + + def is_deletable(self, dataset: Dataset) -> Tuple[bool, List[str]]: + # warning: No lock on modelJob table + # TODO(wangzeju): Ensure correct check results when concurrently modifying modelJob + is_deletable, msg = True, [] + for check_func in self._check_pipeline: + result = check_func(dataset=dataset) + is_deletable, msg = is_deletable & result[0], msg + result[1] + return is_deletable, msg + + def _check_model_jobs(self, dataset: Dataset) -> Tuple[bool, List[str]]: + dataset_id = dataset.id + is_deletable, msg = True, [] + model_jobs: List[ModelJob] = self._session.query(ModelJob).filter_by(dataset_id=dataset_id).all() + for model_job in model_jobs: + state = model_job.state + if state not in [ + WorkflowExternalState.COMPLETED, WorkflowExternalState.FAILED, WorkflowExternalState.STOPPED, + WorkflowExternalState.INVALID + ]: + is_deletable, msg = False, msg + [f'The Model Job: {model_job.name} is using this dataset'] + return is_deletable, msg + + def _check_dataset_jobs(self, dataset: Dataset) -> Tuple[bool, List[str]]: + is_deletable, msg = True, [] + dataset_jobs = self._session.query(DatasetJob).filter(DatasetJob.input_dataset_id == dataset.id).all() + for dataset_job in dataset_jobs: + if not dataset_job.is_finished(): + is_deletable, msg = False, msg + [ + f'dependent dataset_job is not finished, dataset_job_id: {dataset_job.id}' + ] + cron_dataset_jobs = self._session.query(DatasetJob).filter( + or_(DatasetJob.input_dataset_id == dataset.id, DatasetJob.output_dataset_id == dataset.id)).filter( + DatasetJob.scheduler_state == DatasetJobSchedulerState.RUNNABLE).all() + if cron_dataset_jobs: + is_deletable, msg = False, msg + [ + 'dependent cron dataset_job is still runnable, plz stop scheduler first! ' \ + f'dataset_jobs_id: {[cron_dataset_job.id for cron_dataset_job in cron_dataset_jobs]}' + ] + return is_deletable, msg + + def _check_dataset(self, dataset: Dataset) -> Tuple[bool, List[str]]: + if not dataset.get_frontend_state() in [ResourceState.SUCCEEDED, ResourceState.FAILED]: + return False, [f'The dataset {dataset.name} is being processed'] + return True, [] diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/delete_dependency_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/delete_dependency_test.py new file mode 100644 index 000000000..808a5cf1e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/delete_dependency_test.py @@ -0,0 +1,211 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch, PropertyMock +from datetime import datetime + +from fedlearner_webconsole.job.models import Job, JobState, JobType +from fedlearner_webconsole.workflow.models import WorkflowExternalState +from fedlearner_webconsole.mmgr.models import ModelJob +from fedlearner_webconsole.dataset.delete_dependency import DatasetDeleteDependency +from fedlearner_webconsole.dataset.models import Dataset, DatasetJob, DatasetJobKind, DatasetJobState, DatasetType, \ + DatasetJobSchedulerState +from fedlearner_webconsole.db import db +from testing.no_web_server_test_case import NoWebServerTestCase + + +class DatasetDeleteDependencyTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + self.default_dataset1 = Dataset(name='default dataset1', + dataset_type=DatasetType.STREAMING, + comment='test comment1', + path='/data/dataset/123', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 5)) + session.add(self.default_dataset1) + self.default_dataset2 = Dataset(name='default dataset2', + dataset_type=DatasetType.STREAMING, + comment='test comment1', + path='/data/dataset/123', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 5)) + session.add(self.default_dataset2) + self.default_dataset3 = Dataset(name='default dataset3', + dataset_type=DatasetType.STREAMING, + comment='test comment1', + path='/data/dataset/123', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 5)) + session.add(self.default_dataset3) + self.default_dataset4 = Dataset(name='default dataset4', + dataset_type=DatasetType.STREAMING, + comment='test comment1', + path='/data/dataset/123', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 5)) + session.add(self.default_dataset4) + self.default_dataset5 = Dataset(name='default dataset5', + dataset_type=DatasetType.STREAMING, + comment='test comment1', + path='/data/dataset/123', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 5)) + session.add(self.default_dataset5) + session.commit() + with db.session_scope() as session: + parent_dataset_job1 = DatasetJob(id=1, + uuid='parent_dataset_job_uuid_1', + project_id=1, + input_dataset_id=100, + output_dataset_id=1, + state=DatasetJobState.RUNNING, + kind=DatasetJobKind.DATA_ALIGNMENT) + session.add(parent_dataset_job1) + child_dataset_job1 = DatasetJob(id=2, + uuid='child_dataset_job_uuid_1', + project_id=1, + input_dataset_id=1, + output_dataset_id=100, + state=DatasetJobState.FAILED, + kind=DatasetJobKind.DATA_ALIGNMENT) + session.add(child_dataset_job1) + parent_dataset_job2 = DatasetJob(id=3, + uuid='parent_dataset_job_uuid_2', + project_id=1, + input_dataset_id=100, + output_dataset_id=2, + state=DatasetJobState.SUCCEEDED, + kind=DatasetJobKind.DATA_ALIGNMENT) + session.add(parent_dataset_job2) + child_dataset_job2 = DatasetJob(id=4, + uuid='child_dataset_job_uuid_2', + project_id=1, + input_dataset_id=2, + output_dataset_id=100, + state=DatasetJobState.PENDING, + kind=DatasetJobKind.DATA_ALIGNMENT) + session.add(child_dataset_job2) + self.default_job1 = Job(name='test-train-job-1', + state=JobState.WAITING, + job_type=JobType.NN_MODEL_TRANINING, + workflow_id=1, + project_id=1) + session.add(self.default_job1) + self.default_model_job1 = ModelJob(id=1, + name='test-nn-job-1', + job_name=self.default_job1.name, + dataset_id=3) + session.add(self.default_model_job1) + self.default_job2 = Job(name='test-train-job-2', + state=JobState.COMPLETED, + job_type=JobType.NN_MODEL_TRANINING, + workflow_id=1, + project_id=1) + session.add(self.default_job2) + self.default_model_job2 = ModelJob(id=2, + name='test-nn-job-2', + job_name=self.default_job2.name, + dataset_id=4) + session.add(self.default_model_job2) + parent_dataset_job4 = DatasetJob(id=5, + uuid='parent_dataset_job_uuid_4', + project_id=1, + input_dataset_id=100, + output_dataset_id=4, + state=DatasetJobState.SUCCEEDED, + kind=DatasetJobKind.DATA_ALIGNMENT) + session.add(parent_dataset_job4) + parent_dataset_job5 = DatasetJob(id=6, + uuid='parent_dataset_job_uuid_5', + project_id=1, + input_dataset_id=100, + output_dataset_id=5, + state=DatasetJobState.SUCCEEDED, + kind=DatasetJobKind.DATA_ALIGNMENT) + session.add(parent_dataset_job5) + child_dataset_job5 = DatasetJob(id=7, + uuid='child_dataset_job_uuid_5', + project_id=1, + input_dataset_id=5, + output_dataset_id=100, + state=DatasetJobState.FAILED, + kind=DatasetJobKind.DATA_ALIGNMENT) + session.add(child_dataset_job5) + session.commit() + + def test_is_deletable(self): + # TODO(wangzeju): Not covering all branches + with db.session_scope() as session: + dataset_delete_dependency = DatasetDeleteDependency(session) + # test delete not finish dataset + dataset1 = session.query(Dataset).get(1) + is_deletable, msg = dataset_delete_dependency.is_deletable(dataset1) + self.assertFalse(is_deletable) + + # test dataset wtih running dependent dataset_job + dataset2 = session.query(Dataset).get(2) + is_deletable, msg = dataset_delete_dependency.is_deletable(dataset2) + self.assertFalse(is_deletable) + + # test dataset with runnable cron dataset_job + dataset_job = session.query(DatasetJob).get(4) + dataset_job.state = DatasetJobState.SUCCEEDED + dataset_job.scheduler_state = DatasetJobSchedulerState.RUNNABLE + session.flush() + is_deletable, msg = dataset_delete_dependency.is_deletable(dataset2) + self.assertFalse(is_deletable) + print(msg) + self.assertEqual( + msg[0], 'dependent cron dataset_job is still runnable, plz stop scheduler first! dataset_jobs_id: [4]') + + # test the dataset is being used by model job + dataset3 = session.query(Dataset).get(3) + is_deletable, msg = dataset_delete_dependency.is_deletable(dataset3) + self.assertFalse(is_deletable) + + # test the model job is not being used dataset + dataset4 = session.query(Dataset).get(4) + with patch('fedlearner_webconsole.mmgr.models.ModelJob.state', new_callable=PropertyMock) as mock_state: + mock_state.return_value = WorkflowExternalState.COMPLETED + is_deletable, msg = dataset_delete_dependency.is_deletable(dataset4) + self.assertTrue(is_deletable) + + with patch('fedlearner_webconsole.mmgr.models.ModelJob.state', new_callable=PropertyMock) as mock_state: + mock_state.return_value = WorkflowExternalState.STOPPED + is_deletable, msg = dataset_delete_dependency.is_deletable(dataset4) + self.assertTrue(is_deletable) + + with patch('fedlearner_webconsole.mmgr.models.ModelJob.state', new_callable=PropertyMock) as mock_state: + mock_state.return_value = WorkflowExternalState.INVALID + is_deletable, msg = dataset_delete_dependency.is_deletable(dataset4) + self.assertTrue(is_deletable) + + with patch('fedlearner_webconsole.mmgr.models.ModelJob.state', new_callable=PropertyMock) as mock_state: + mock_state.return_value = WorkflowExternalState.RUNNING + is_deletable, msg = dataset_delete_dependency.is_deletable(dataset4) + self.assertFalse(is_deletable) + + # test deleteble dataset + dataset5 = session.query(Dataset).get(5) + is_deletable, msg = dataset_delete_dependency.is_deletable(dataset5) + self.assertTrue(is_deletable) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/filter_funcs.py b/web_console_v2/api/fedlearner_webconsole/dataset/filter_funcs.py new file mode 100644 index 000000000..4747bf4c7 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/filter_funcs.py @@ -0,0 +1,56 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from sqlalchemy import and_, or_ + +from fedlearner_webconsole.dataset.models import Dataset, DatasetFormat, PublishFrontendState +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.proto.filtering_pb2 import SimpleExpression + + +def dataset_format_filter_op_in(simple_exp: SimpleExpression): + filter_list = [] + dataset_format_str_list = [dataset_format.name for dataset_format in DatasetFormat] + for dataset_format in simple_exp.list_value.string_list: + if dataset_format not in dataset_format_str_list: + raise ValueError(f'dataset_format does not has type {dataset_format}') + filter_list.append(DatasetFormat[dataset_format].value) + return Dataset.dataset_format.in_(filter_list) + + +def dataset_format_filter_op_equal(simple_exp: SimpleExpression): + for dataset_format in DatasetFormat: + if simple_exp.string_value == dataset_format.name: + return Dataset.dataset_format == dataset_format.value + raise ValueError(f'dataset_format does not has type {simple_exp.string_value}') + + +def dataset_publish_frontend_filter_op_equal(simple_exp: SimpleExpression): + if simple_exp.string_value == PublishFrontendState.PUBLISHED.name: + return and_(Dataset.is_published.is_(True), Dataset.ticket_status == TicketStatus.APPROVED) + if simple_exp.string_value == PublishFrontendState.TICKET_PENDING.name: + return and_(Dataset.is_published.is_(True), Dataset.ticket_status == TicketStatus.PENDING) + if simple_exp.string_value == PublishFrontendState.TICKET_DECLINED.name: + return and_(Dataset.is_published.is_(True), Dataset.ticket_status == TicketStatus.DECLINED) + return Dataset.is_published.is_(False) + + +def dataset_auth_status_filter_op_in(simple_exp: SimpleExpression): + filter_list = [AuthStatus[auth_status] for auth_status in simple_exp.list_value.string_list] + filter_exp = Dataset.auth_status.in_(filter_list) + if AuthStatus.AUTHORIZED in filter_list: + filter_exp = or_(Dataset.auth_status.is_(None), filter_exp) + return filter_exp diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/filter_funcs_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/filter_funcs_test.py new file mode 100644 index 000000000..8f7a03e31 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/filter_funcs_test.py @@ -0,0 +1,94 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.filter_funcs import dataset_format_filter_op_equal, dataset_format_filter_op_in, \ + dataset_publish_frontend_filter_op_equal, dataset_auth_status_filter_op_in +from fedlearner_webconsole.dataset.models import Dataset +from fedlearner_webconsole.proto.filtering_pb2 import SimpleExpression + + +class FilterFuncsTest(NoWebServerTestCase): + + def test_dataset_format_filter_op_in(self): + # test pass + exepression = SimpleExpression(list_value=SimpleExpression.ListValue(string_list=['TABULAR', 'IMAGE'])) + with db.session_scope() as session: + query = session.query(Dataset).filter(dataset_format_filter_op_in(exepression)) + self.assertTrue('WHERE datasets_v2.dataset_format IN (0, 1)' in self.generate_mysql_statement(query)) + # test raise + with self.assertRaises(ValueError): + exepression = SimpleExpression(list_value=SimpleExpression.ListValue(string_list=['FAKE'])) + dataset_format_filter_op_in(exepression) + + def test_dataset_format_filter_op_euqal(self): + # test pass + exepression = SimpleExpression(string_value='TABULAR') + with db.session_scope() as session: + query = session.query(Dataset).filter(dataset_format_filter_op_equal(exepression)) + self.assertTrue('WHERE datasets_v2.dataset_format = 0' in self.generate_mysql_statement(query)) + # test raise + with self.assertRaises(ValueError): + exepression = SimpleExpression(list_value=SimpleExpression.ListValue(string_list=['FAKE'])) + dataset_format_filter_op_equal(exepression) + + def test_dataset_publish_frontend_filter_op_equal(self): + # test published + exepression = SimpleExpression(string_value='PUBLISHED') + with db.session_scope() as session: + query = session.query(Dataset).filter(dataset_publish_frontend_filter_op_equal(exepression)) + self.assertTrue('WHERE datasets_v2.is_published IS true AND datasets_v2.ticket_status = \'APPROVED\'' in + self.generate_mysql_statement(query)) + + # test unpublished + exepression = SimpleExpression(string_value='UNPUBLISHED') + with db.session_scope() as session: + query = session.query(Dataset).filter(dataset_publish_frontend_filter_op_equal(exepression)) + self.assertTrue('WHERE datasets_v2.is_published IS false' in self.generate_mysql_statement(query)) + + # test ticket_pending + exepression = SimpleExpression(string_value='TICKET_PENDING') + with db.session_scope() as session: + query = session.query(Dataset).filter(dataset_publish_frontend_filter_op_equal(exepression)) + self.assertTrue('WHERE datasets_v2.is_published IS true AND datasets_v2.ticket_status = \'PENDING\'' in + self.generate_mysql_statement(query)) + + # test ticket declined + exepression = SimpleExpression(string_value='TICKET_DECLINED') + with db.session_scope() as session: + query = session.query(Dataset).filter(dataset_publish_frontend_filter_op_equal(exepression)) + self.assertTrue('WHERE datasets_v2.is_published IS true AND datasets_v2.ticket_status = \'DECLINED\'' in + self.generate_mysql_statement(query)) + + def test_dataset_auth_status_filter_op_in(self): + # test authorized + exepression = SimpleExpression(list_value=SimpleExpression.ListValue(string_list=['AUTHORIZED'])) + with db.session_scope() as session: + query = session.query(Dataset).filter(dataset_auth_status_filter_op_in(exepression)) + self.assertTrue('WHERE datasets_v2.auth_status IS NULL OR datasets_v2.auth_status IN (\'AUTHORIZED\')' in + self.generate_mysql_statement(query)) + # test others + exepression = SimpleExpression(list_value=SimpleExpression.ListValue(string_list=['PENDING', 'WITHDRAW'])) + with db.session_scope() as session: + query = session.query(Dataset).filter(dataset_auth_status_filter_op_in(exepression)) + self.assertTrue( + 'WHERE datasets_v2.auth_status IN (\'PENDING\', \'WITHDRAW\')' in self.generate_mysql_statement(query)) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/import_handler.py b/web_console_v2/api/fedlearner_webconsole/dataset/import_handler.py deleted file mode 100644 index 38a800c07..000000000 --- a/web_console_v2/api/fedlearner_webconsole/dataset/import_handler.py +++ /dev/null @@ -1,143 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import logging -import threading -import os -from concurrent.futures.thread import ThreadPoolExecutor -from datetime import timedelta, datetime - -from fedlearner_webconsole.dataset.models import DataBatch, BatchState -from fedlearner_webconsole.db import db -from fedlearner_webconsole.utils.file_manager import FileManager -from fedlearner_webconsole.proto import dataset_pb2 - - -class ImportHandler(object): - def __init__(self): - self._executor = ThreadPoolExecutor(max_workers=os.cpu_count() * 3) - self._file_manager = FileManager() - self._pending_imports = set() - self._running_imports = set() - self._import_lock = threading.Lock() - self._app = None - - def __del__(self): - self._executor.shutdown() - - def init(self, app): - self._app = app - - def schedule_to_handle(self, dataset_batch_ids): - if isinstance(dataset_batch_ids, int): - dataset_batch_ids = [dataset_batch_ids] - self._pending_imports.update(dataset_batch_ids) - - def _copy_file(self, - source_path, - destination_path, - move=False, - num_retry=3): - logging.info('%s from %s to %s', 'moving' if move else 'copying', - source_path, destination_path) - # Creates parent folders if needed - parent_folder = os.path.dirname(destination_path) - self._file_manager.mkdir(parent_folder) - success = False - error_message = '' - for _ in range(num_retry): - try: - if move: - self._file_manager.move(source_path, destination_path) - else: - self._file_manager.copy(source_path, destination_path) - success = True - break - except Exception as e: # pylint: disable=broad-except - logging.error( - 'Error occurred when importing file from %s to %s', - source_path, destination_path) - error_message = str(e) - file = dataset_pb2.File(source_path=source_path, - destination_path=destination_path) - if not success: - file.error_message = error_message - file.state = dataset_pb2.File.State.FAILED - else: - file.size = self._file_manager.ls(destination_path)[0].size - file.state = dataset_pb2.File.State.COMPLETED - return file - - def _import_batch(self, batch_id): - self._import_lock.acquire() - if batch_id in self._running_imports: - return - self._running_imports.add(batch_id) - self._import_lock.release() - - # Pushes app context to make db session work - self._app.app_context().push() - - logging.info('Importing batch %d', batch_id) - batch = DataBatch.query.get(batch_id) - batch.state = BatchState.IMPORTING - db.session.commit() - db.session.refresh(batch) - details = batch.get_details() - - for file in details.files: - if file.state == dataset_pb2.File.State.UNSPECIFIED: - # Recovers the state - try: - destination_existed = len( - self._file_manager.ls(file.destination_path)) > 0 - except Exception: # pylint: disable=broad-except - destination_existed = False - if destination_existed: - file.state = dataset_pb2.File.State.COMPLETED - continue - # Moves/Copies - file.MergeFrom( - self._copy_file(source_path=file.source_path, - destination_path=file.destination_path, - move=batch.move)) - - batch.set_details(details) - db.session.commit() - - self._import_lock.acquire() - self._running_imports.remove(batch_id) - self._import_lock.release() - - def handle(self, pull=False): - """Handles all the batches in the queue or all batches which - should be imported.""" - batches_to_run = self._pending_imports - self._pending_imports = set() - if pull: - # TODO: should separate pull logic to a cron job, - # otherwise there will be a race condition that two handlers - # are trying to move the same batch - one_hour_ago = datetime.utcnow() - timedelta(hours=1) - pulled_batches = db.session.query(DataBatch.id).filter( - (DataBatch.state == BatchState.NEW) | - (DataBatch.state == BatchState.IMPORTING))\ - .filter(DataBatch.updated_at < one_hour_ago)\ - .all() - pulled_ids = [bid for bid, in pulled_batches] - batches_to_run.update(pulled_ids) - - for batch in batches_to_run: - self._executor.submit(self._import_batch, batch) diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/BUILD.bazel new file mode 100644 index 000000000..aef156e80 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/BUILD.bazel @@ -0,0 +1,107 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "job_configer", + srcs = [ + "analyzer_configer.py", + "base_configer.py", + "data_alignment_configer.py", + "dataset_job_configer.py", + "export_configer.py", + "hash_data_join_configer.py", + "import_source_configer.py", + "light_client_ot_psi_data_join_configer.py", + "light_client_rsa_psi_data_join_configer.py", + "ot_psi_data_join_configer.py", + "rsa_psi_data_join_configer.py", + ], + data = [ + "//web_console_v2/api/fedlearner_webconsole/sys_preset_templates", + ], + imports = ["../../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:common_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:data_path_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:domain_name_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:file_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:schema_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:workflow_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:service_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:utils_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_rsa//:pkg", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "dataset_job_configer_test", + size = "medium", + srcs = [ + "dataset_job_configer_test.py", + ], + imports = ["../../.."], + main = "dataset_job_configer_test.py", + deps = [ + ":job_configer", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:initial_db_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:resource_name_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:workflow_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_test( + name = "light_client_ot_psi_data_join_configer_test", + size = "small", + srcs = [ + "light_client_ot_psi_data_join_configer_test.py", + ], + imports = ["../../.."], + main = "light_client_ot_psi_data_join_configer_test.py", + deps = [ + ":job_configer", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:initial_db_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:resource_name_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:workflow_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_test( + name = "export_configer_test", + size = "small", + srcs = [ + "export_configer_test.py", + ], + imports = ["../../.."], + main = "export_configer_test.py", + deps = [ + ":job_configer", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:initial_db_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:resource_name_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:workflow_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/analyzer_configer.py b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/analyzer_configer.py new file mode 100644 index 000000000..ff5a83d83 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/analyzer_configer.py @@ -0,0 +1,79 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from datetime import datetime + +from typing import List, Optional + +from fedlearner_webconsole.dataset.models import Dataset, DatasetFormat +from fedlearner_webconsole.dataset.dataset_directory import DatasetDirectory +from fedlearner_webconsole.dataset.job_configer.base_configer import BaseConfiger, get_my_pure_domain_name, \ + set_variable_value_to_job_config, filter_user_variables +from fedlearner_webconsole.exceptions import InvalidArgumentException +from fedlearner_webconsole.proto.dataset_pb2 import DatasetJobGlobalConfigs +from fedlearner_webconsole.utils.workflow import zip_workflow_variables +from fedlearner_webconsole.workflow_template.service import WorkflowTemplateService +from fedlearner_webconsole.workflow_template.utils import make_variable +from fedlearner_webconsole.proto import common_pb2, workflow_definition_pb2 + + +class AnalyzerConfiger(BaseConfiger): + + def get_config(self) -> workflow_definition_pb2.WorkflowDefinition: + template = WorkflowTemplateService(self._session).get_workflow_template(name='sys-preset-analyzer') + return template.get_config() + + @property + def user_variables(self) -> List[common_pb2.Variable]: + # return variables which tag is RESOURCE_ALLOCATION or INPUT_PARAM + return filter_user_variables(list(zip_workflow_variables(self.get_config()))) + + def auto_config_variables(self, global_configs: DatasetJobGlobalConfigs) -> DatasetJobGlobalConfigs: + my_domain_name = get_my_pure_domain_name() + job_config = global_configs.global_configs[my_domain_name] + input_dataset: Dataset = self._session.query(Dataset).filter(Dataset.uuid == job_config.dataset_uuid).first() + if input_dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {job_config.dataset_uuid}') + + input_batch_path_variable = make_variable(name='input_batch_path', typed_value=input_dataset.path) + set_variable_value_to_job_config(job_config, input_batch_path_variable) + data_type_variable = make_variable(name='data_type', + typed_value=DatasetFormat(input_dataset.dataset_format).name.lower()) + set_variable_value_to_job_config(job_config, data_type_variable) + + return global_configs + + def config_local_variables(self, + global_configs: DatasetJobGlobalConfigs, + result_dataset_uuid: str, + event_time: Optional[datetime] = None) -> DatasetJobGlobalConfigs: + my_domain_name = get_my_pure_domain_name() + job_config = global_configs.global_configs[my_domain_name] + + dataset: Dataset = self._session.query(Dataset).filter(Dataset.uuid == result_dataset_uuid).first() + if dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {result_dataset_uuid}') + + data_batch = self._get_data_batch(dataset, event_time) + batch_name = data_batch.name or data_batch.batch_name + dataset_path = dataset.path + thumbnail_path = DatasetDirectory(dataset_path).thumbnails_path(batch_name) + thumbnail_path_variable = make_variable(name='thumbnail_path', typed_value=thumbnail_path) + set_variable_value_to_job_config(job_config, thumbnail_path_variable) + + output_batch_name_variable = make_variable(name='output_batch_name', typed_value=batch_name) + set_variable_value_to_job_config(job_config, output_batch_name_variable) + + return global_configs diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/base_configer.py b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/base_configer.py new file mode 100644 index 000000000..f4e763bcc --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/base_configer.py @@ -0,0 +1,107 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +from datetime import datetime + +from typing import List, Optional +from sqlalchemy.orm import Session + +from fedlearner_webconsole.dataset.models import Dataset, DataBatch, DatasetType +from fedlearner_webconsole.exceptions import InvalidArgumentException +from fedlearner_webconsole.proto import dataset_pb2 +from fedlearner_webconsole.proto.dataset_pb2 import DatasetJobGlobalConfigs +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.proto import common_pb2, workflow_definition_pb2 +from fedlearner_webconsole.utils.pp_datetime import to_timestamp + + +def get_my_pure_domain_name() -> str: + """Get pure domain name of our side + + Returns: + str: pure domain name + """ + return SettingService.get_system_info().pure_domain_name + + +def set_variable_value_to_job_config(job_config: dataset_pb2.DatasetJobConfig, target_variable: common_pb2.Variable): + for variable in job_config.variables: + if variable.name == target_variable.name and variable.value_type == target_variable.value_type: + variable.typed_value.CopyFrom(target_variable.typed_value) + break + else: + job_config.variables.append(target_variable) + + +def filter_user_variables(variables: List[common_pb2.Variable]) -> List[common_pb2.Variable]: + user_variables = [] + for variable in variables: + if variable.tag in ['RESOURCE_ALLOCATION', 'INPUT_PARAM']: + user_variables.append(variable) + return user_variables + + +class BaseConfiger(metaclass=abc.ABCMeta): + """This is base interface aimed to config dataset_job global_configs for different job kind + Routines: + user_variables: + Usage: Get a list of variables that one can configure itself. + When: [Coordinator] API user gets the dataset_job definitions. + auto_config_variables: + Usage: Auto config some variables that are needed real job without letting users know. + When: [Coordinator] API Layer that creates the dataset_job resource. + config_local_vairables: + Usage: Config some local variables that're sensitive to each participants. + When: [Participant] DatasetJob Scheduler of each participants + """ + + def __init__(self, session: Session): + self._session = session + + @abc.abstractmethod + def get_config(self) -> workflow_definition_pb2.WorkflowDefinition: + """Get workflow_definition of this dataset_job_kind + + Returns: + workflow_definition_pb2.WorkflowDefinition: workflow definition according to given kind + """ + + @property + @abc.abstractmethod + def user_variables(self) -> List[common_pb2.Variable]: + pass + + @abc.abstractmethod + def auto_config_variables(self, global_configs: DatasetJobGlobalConfigs) -> DatasetJobGlobalConfigs: + pass + + @abc.abstractmethod + def config_local_variables(self, + global_configs: DatasetJobGlobalConfigs, + result_dataset_uuid: str, + event_time: Optional[datetime] = None) -> DatasetJobGlobalConfigs: + pass + + def _get_data_batch(self, dataset: Dataset, event_time: Optional[datetime] = None) -> DataBatch: + if dataset.dataset_type == DatasetType.PSI: + return dataset.get_single_batch() + data_batch: DataBatch = self._session.query(DataBatch).filter(DataBatch.dataset_id == dataset.id).filter( + DataBatch.event_time == event_time).first() + if data_batch is None: + raise InvalidArgumentException( + details=f'failed to find data_batch, event_time: {to_timestamp(event_time)}, \ + dataset id: {dataset.id}') + return data_batch diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/data_alignment_configer.py b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/data_alignment_configer.py new file mode 100644 index 000000000..277162225 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/data_alignment_configer.py @@ -0,0 +1,121 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from datetime import datetime +import json +import os + +from typing import List, Optional + +from fedlearner_webconsole.dataset.models import Dataset, DatasetFormat +from fedlearner_webconsole.dataset.job_configer.base_configer import BaseConfiger, get_my_pure_domain_name, \ + set_variable_value_to_job_config +from fedlearner_webconsole.dataset.dataset_directory import DatasetDirectory +from fedlearner_webconsole.dataset.data_path import get_batch_data_path +from fedlearner_webconsole.exceptions import InvalidArgumentException +from fedlearner_webconsole.proto import dataset_pb2 +from fedlearner_webconsole.proto.dataset_pb2 import DatasetJobGlobalConfigs +from fedlearner_webconsole.utils.schema import spark_schema_to_json_schema +from fedlearner_webconsole.utils.file_manager import FileManager +from fedlearner_webconsole.utils.workflow import zip_workflow_variables +from fedlearner_webconsole.workflow_template.service import WorkflowTemplateService +from fedlearner_webconsole.workflow_template.utils import make_variable +from fedlearner_webconsole.proto import common_pb2, workflow_definition_pb2 + + +class DataAlignmentConfiger(BaseConfiger): + USER_VARIABLES_NAME_SET = { + 'driver_cores', + 'driver_mem', + 'executor_cores', + 'executor_mem', + } + + def get_config(self) -> workflow_definition_pb2.WorkflowDefinition: + template = WorkflowTemplateService(self._session).get_workflow_template(name='sys-preset-alignment-task') + return template.get_config() + + @property + def user_variables(self) -> List[common_pb2.Variable]: + real_user_variables = [] + for variable in zip_workflow_variables(self.get_config()): + if variable.name in self.USER_VARIABLES_NAME_SET: + real_user_variables.append(variable) + + return real_user_variables + + def auto_config_variables( + self, global_configs: dataset_pb2.DatasetJobGlobalConfigs) -> dataset_pb2.DatasetJobGlobalConfigs: + my_domain_name = get_my_pure_domain_name() + job_config = global_configs.global_configs[my_domain_name] + dataset = self._session.query(Dataset).filter(Dataset.uuid == job_config.dataset_uuid).first() + if dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {job_config.dataset_uuid}') + + dataset_path = dataset.path + + spark_schema = FileManager().read(os.path.join(dataset_path, 'schema.json')) + json_schema_str = json.dumps(spark_schema_to_json_schema(json.loads(spark_schema))) + for job_config in global_configs.global_configs.values(): + json_schema_variable = make_variable(name='json_schema', typed_value=json_schema_str) + set_variable_value_to_job_config(job_config, json_schema_variable) + data_type_variable = make_variable(name='data_type', + typed_value=DatasetFormat(dataset.dataset_format).name.lower()) + set_variable_value_to_job_config(job_config, data_type_variable) + return global_configs + + def config_local_variables(self, + global_configs: DatasetJobGlobalConfigs, + result_dataset_uuid: str, + event_time: Optional[datetime] = None) -> DatasetJobGlobalConfigs: + my_domain_name = get_my_pure_domain_name() + job_config = global_configs.global_configs[my_domain_name] + + input_dataset = self._session.query(Dataset).filter(Dataset.uuid == job_config.dataset_uuid).first() + if input_dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {job_config.dataset_uuid}') + + output_dataset = self._session.query(Dataset).filter(Dataset.uuid == result_dataset_uuid).first() + if output_dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {result_dataset_uuid}') + + input_batch = self._get_data_batch(input_dataset, event_time) + output_batch = self._get_data_batch(output_dataset, event_time) + input_batch_path = get_batch_data_path(input_batch) + output_dataset_path = output_dataset.path + output_batch_path = output_batch.path + output_batch_name = output_batch.batch_name + thumbnail_path = DatasetDirectory(dataset_path=output_dataset_path).thumbnails_path( + batch_name=output_batch_name) + + input_dataset_path_variable = make_variable(name='input_dataset_path', typed_value=input_dataset.path) + set_variable_value_to_job_config(job_config, input_dataset_path_variable) + + input_batch_path_variable = make_variable(name='input_batch_path', typed_value=input_batch_path) + set_variable_value_to_job_config(job_config, input_batch_path_variable) + + output_dataset_path_variable = make_variable(name='output_dataset_path', typed_value=output_dataset_path) + set_variable_value_to_job_config(job_config, output_dataset_path_variable) + + output_batch_path_variable = make_variable(name='output_batch_path', typed_value=output_batch_path) + set_variable_value_to_job_config(job_config, output_batch_path_variable) + + thumbnail_path_variable = make_variable(name='thumbnail_path', typed_value=thumbnail_path) + set_variable_value_to_job_config(job_config, thumbnail_path_variable) + + output_batch_name_variable = make_variable(name='output_batch_name', typed_value=output_batch_name) + set_variable_value_to_job_config(job_config, output_batch_name_variable) + + return global_configs diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/dataset_job_configer.py b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/dataset_job_configer.py new file mode 100644 index 000000000..e43082e56 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/dataset_job_configer.py @@ -0,0 +1,54 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +from typing import Union +from sqlalchemy.orm import Session + +from fedlearner_webconsole.dataset.models import DatasetJobKind +from fedlearner_webconsole.dataset.job_configer.base_configer import BaseConfiger +from fedlearner_webconsole.dataset.job_configer.import_source_configer import ImportSourceConfiger +from fedlearner_webconsole.dataset.job_configer.data_alignment_configer import DataAlignmentConfiger +from fedlearner_webconsole.dataset.job_configer.rsa_psi_data_join_configer import RsaPsiDataJoinConfiger +from fedlearner_webconsole.dataset.job_configer.export_configer import ExportConfiger +from fedlearner_webconsole.dataset.job_configer.light_client_rsa_psi_data_join_configer import \ + LightClientRsaPsiDataJoinConfiger +from fedlearner_webconsole.dataset.job_configer.ot_psi_data_join_configer import OtPsiDataJoinConfiger +from fedlearner_webconsole.dataset.job_configer.light_client_ot_psi_data_join_configer import \ + LightClientOtPsiDataJoinConfiger +from fedlearner_webconsole.dataset.job_configer.hash_data_join_configer import HashDataJoinConfiger +from fedlearner_webconsole.dataset.job_configer.analyzer_configer import AnalyzerConfiger + + +class DatasetJobConfiger(metaclass=abc.ABCMeta): + + @classmethod + def from_kind(cls, kind: Union[DatasetJobKind, str], session: Session) -> BaseConfiger: + hanlders_mapper = { + DatasetJobKind.IMPORT_SOURCE: ImportSourceConfiger, + DatasetJobKind.DATA_ALIGNMENT: DataAlignmentConfiger, + DatasetJobKind.RSA_PSI_DATA_JOIN: RsaPsiDataJoinConfiger, + DatasetJobKind.EXPORT: ExportConfiger, + DatasetJobKind.LIGHT_CLIENT_RSA_PSI_DATA_JOIN: LightClientRsaPsiDataJoinConfiger, + DatasetJobKind.OT_PSI_DATA_JOIN: OtPsiDataJoinConfiger, + DatasetJobKind.LIGHT_CLIENT_OT_PSI_DATA_JOIN: LightClientOtPsiDataJoinConfiger, + DatasetJobKind.HASH_DATA_JOIN: HashDataJoinConfiger, + DatasetJobKind.ANALYZER: AnalyzerConfiger, + } + + if isinstance(kind, str): + kind = DatasetJobKind(kind) + + return hanlders_mapper[kind](session) diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/dataset_job_configer_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/dataset_job_configer_test.py new file mode 100644 index 000000000..2911590d9 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/dataset_job_configer_test.py @@ -0,0 +1,864 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# TODO(liuhehan): split this UT to multi-files +# pylint: disable=protected-access +from datetime import datetime, timedelta +import json +import os +import unittest +from unittest.mock import patch + +from google.protobuf.struct_pb2 import Value + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto import common_pb2 +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.dataset_pb2 import DatasetJobConfig, DatasetJobGlobalConfigs +from fedlearner_webconsole.proto.setting_pb2 import SystemInfo +from fedlearner_webconsole.utils.workflow import zip_workflow_variables +from fedlearner_webconsole.utils.resource_name import resource_uuid +from fedlearner_webconsole.db import db +from fedlearner_webconsole.initial_db import _insert_or_update_templates +from fedlearner_webconsole.dataset.models import DataBatch, DataSource, Dataset, DatasetJob, DatasetJobKind, \ + DatasetJobState, DatasetKindV2, DatasetMetaInfo, ImportType, DatasetType +from fedlearner_webconsole.dataset.job_configer.import_source_configer import ImportSourceConfiger +from fedlearner_webconsole.dataset.job_configer.data_alignment_configer import DataAlignmentConfiger +from fedlearner_webconsole.dataset.job_configer.rsa_psi_data_join_configer import RsaPsiDataJoinConfiger +from fedlearner_webconsole.dataset.job_configer.light_client_rsa_psi_data_join_configer import \ + LightClientRsaPsiDataJoinConfiger +from fedlearner_webconsole.dataset.job_configer.ot_psi_data_join_configer import OtPsiDataJoinConfiger +from fedlearner_webconsole.dataset.job_configer.hash_data_join_configer import HashDataJoinConfiger +from fedlearner_webconsole.dataset.job_configer.analyzer_configer import AnalyzerConfiger + + +def fake_spark_schema(*args) -> str: + del args + + return json.dumps({ + 'type': + 'struct', + 'fields': [{ + 'name': 'raw_id', + 'type': 'integer', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'f01', + 'type': 'float', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'image', + 'type': 'binary', + 'nullable': True, + 'metadata': {} + }] + }) + + +class DatasetJobConfigersTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + self.maxDiff = None + with db.session_scope() as session: + _insert_or_update_templates(session) + + test_project = Project(name='test_project') + session.add(test_project) + session.flush([test_project]) + + data_source = DataSource(id=1, + name='test_data_source', + uuid=resource_uuid(), + path='/data/some_data_source/') + session.add(data_source) + + test_input_dataset = Dataset(id=2, + name='test_input_dataset', + uuid=resource_uuid(), + is_published=False, + project_id=test_project.id, + path='/data/dataset/test_input_dataset', + dataset_kind=DatasetKindV2.RAW) + session.add(test_input_dataset) + session.flush([test_input_dataset]) + + test_input_data_batch = DataBatch(dataset_id=test_input_dataset.id, + path=os.path.join(test_input_dataset.path, 'batch/test_input_data_batch')) + session.add(test_input_data_batch) + + test_output_dataset = Dataset(id=3, + name='test_output_dataset', + uuid=resource_uuid(), + is_published=True, + project_id=test_project.id, + path='/data/dataset/test_output_dataset', + dataset_kind=DatasetKindV2.PROCESSED) + session.add(test_output_dataset) + session.flush([test_output_dataset]) + + test_output_data_batch = DataBatch(dataset_id=test_output_dataset.id, + path=os.path.join(test_output_dataset.path, + 'batch/test_output_data_batch')) + session.add(test_output_data_batch) + + test_input_streaming_dataset = Dataset(id=4, + name='test_input_dataset', + uuid=resource_uuid(), + is_published=False, + project_id=test_project.id, + path='/data/dataset/test_input_dataset', + dataset_type=DatasetType.STREAMING, + dataset_kind=DatasetKindV2.RAW) + session.add(test_input_streaming_dataset) + session.flush() + + test_input_streaming_data_batch = DataBatch(dataset_id=test_input_streaming_dataset.id, + event_time=datetime(2022, 1, 1), + path=os.path.join(test_input_dataset.path, 'batch/20220101')) + session.add(test_input_streaming_data_batch) + + test_output_streaming_dataset = Dataset(id=5, + name='test_output_dataset', + uuid=resource_uuid(), + is_published=True, + project_id=test_project.id, + path='/data/dataset/test_output_dataset', + dataset_type=DatasetType.STREAMING, + dataset_kind=DatasetKindV2.PROCESSED) + session.add(test_output_streaming_dataset) + session.flush() + + test_output_streaming_data_batch = DataBatch(dataset_id=test_output_streaming_dataset.id, + event_time=datetime(2022, 1, 1), + path=os.path.join(test_output_dataset.path, 'batch/20220101')) + session.add(test_output_streaming_data_batch) + + self._data_source_uuid = data_source.uuid + self._input_dataset_uuid = test_input_dataset.uuid + self._output_dataset_uuid = test_output_dataset.uuid + self._input_streaming_dataset_uuid = test_input_streaming_dataset.uuid + self._output_streaming_dataset_uuid = test_output_streaming_dataset.uuid + + session.commit() + + def test_get_data_batch(self): + # test PSI dataset + with db.session_scope() as session: + dataset = session.query(Dataset).filter(Dataset.uuid == self._output_dataset_uuid).first() + data_batch = ImportSourceConfiger(session)._get_data_batch(dataset=dataset) + self.assertEqual(data_batch.path, '/data/dataset/test_output_dataset/batch/test_output_data_batch') + + # test STREAMING dataset + with db.session_scope() as session: + dataset = session.query(Dataset).filter(Dataset.uuid == self._output_streaming_dataset_uuid).first() + data_batch = ImportSourceConfiger(session)._get_data_batch(dataset=dataset, event_time=datetime(2022, 1, 1)) + self.assertEqual(data_batch.path, '/data/dataset/test_output_dataset/batch/20220101') + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info', + lambda: SystemInfo(domain_name='fl-test_domain.com', pure_domain_name='test_domain')) + def test_import_source(self): + with db.session_scope() as session: + # This is a test to notify the change of template + config = ImportSourceConfiger(session).get_config() + self.assertEqual(len(config.variables), 22) + + with db.session_scope() as session: + global_configs = ImportSourceConfiger(session).auto_config_variables(global_configs=DatasetJobGlobalConfigs( + global_configs={'test_domain': DatasetJobConfig(dataset_uuid=self._data_source_uuid)})) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + Variable(name='file_format', + value='tfrecords', + typed_value=Value(string_value='tfrecords'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='data_type', + value='tabular', + typed_value=Value(string_value='tabular'), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + + with db.session_scope() as session: + global_configs = ImportSourceConfiger(session).config_local_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={'test_domain': DatasetJobConfig(dataset_uuid=self._data_source_uuid)}), + result_dataset_uuid=self._output_dataset_uuid) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + Variable(name='input_batch_path', + value='/data/some_data_source/', + typed_value=Value(string_value='/data/some_data_source/'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='dataset_path', + value='/data/dataset/test_output_dataset', + typed_value=Value(string_value='/data/dataset/test_output_dataset'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable( + name='batch_path', + value='/data/dataset/test_output_dataset/batch/test_output_data_batch', + typed_value=Value(string_value='/data/dataset/test_output_dataset/batch/test_output_data_batch'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='thumbnail_path', + value='/data/dataset/test_output_dataset/meta/test_output_data_batch/thumbnails', + typed_value=Value( + string_value='/data/dataset/test_output_dataset/meta/test_output_data_batch/thumbnails'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='checkers', + typed_value=Value(string_value=''), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='import_type', + value='COPY', + typed_value=Value(string_value='COPY'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='output_batch_name', + value='test_output_data_batch', + typed_value=Value(string_value='test_output_data_batch'), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + + with db.session_scope() as session: + output_dataset: Dataset = session.query(Dataset).filter(Dataset.uuid == self._output_dataset_uuid).first() + output_dataset.set_meta_info(DatasetMetaInfo(schema_checkers=['RAW_ID_CHECKER', 'NUMERIC_COLUMNS_CHECKER'])) + output_dataset.import_type = ImportType.NO_COPY + session.commit() + + with db.session_scope() as session: + global_configs = ImportSourceConfiger(session).config_local_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={'test_domain': DatasetJobConfig(dataset_uuid=self._data_source_uuid)}), + result_dataset_uuid=self._output_dataset_uuid) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + Variable(name='input_batch_path', + value='/data/some_data_source/', + typed_value=Value(string_value='/data/some_data_source/'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='dataset_path', + value='/data/dataset/test_output_dataset', + typed_value=Value(string_value='/data/dataset/test_output_dataset'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable( + name='batch_path', + value='/data/dataset/test_output_dataset/batch/test_output_data_batch', + typed_value=Value(string_value='/data/dataset/test_output_dataset/batch/test_output_data_batch'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='thumbnail_path', + value='/data/dataset/test_output_dataset/meta/test_output_data_batch/thumbnails', + typed_value=Value( + string_value='/data/dataset/test_output_dataset/meta/test_output_data_batch/thumbnails'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='checkers', + value='RAW_ID_CHECKER,NUMERIC_COLUMNS_CHECKER', + typed_value=Value(string_value='RAW_ID_CHECKER,NUMERIC_COLUMNS_CHECKER'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='import_type', + value='NO_COPY', + typed_value=Value(string_value='NO_COPY'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='output_batch_name', + value='test_output_data_batch', + typed_value=Value(string_value='test_output_data_batch'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='skip_analyzer', + value='true', + typed_value=Value(string_value='true'), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + + with db.session_scope() as session: + dataset_job_streaming = DatasetJob(id=1, + uuid='dataset_job streaming', + project_id=1, + input_dataset_id=1, + output_dataset_id=5, + kind=DatasetJobKind.IMPORT_SOURCE, + state=DatasetJobState.PENDING, + time_range=timedelta(days=1)) + session.add(dataset_job_streaming) + session.commit() + + # test with event_time + with db.session_scope() as session: + global_configs = ImportSourceConfiger(session).config_local_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={'test_domain': DatasetJobConfig(dataset_uuid=self._data_source_uuid)}), + result_dataset_uuid=self._output_streaming_dataset_uuid, + event_time=datetime(2022, 1, 1)) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + Variable(name='input_batch_path', + value='/data/some_data_source/20220101', + typed_value=Value(string_value='/data/some_data_source/20220101'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='dataset_path', + value='/data/dataset/test_output_dataset', + typed_value=Value(string_value='/data/dataset/test_output_dataset'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='batch_path', + value='/data/dataset/test_output_dataset/batch/20220101', + typed_value=Value(string_value='/data/dataset/test_output_dataset/batch/20220101'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='thumbnail_path', + value='/data/dataset/test_output_dataset/meta/20220101/thumbnails', + typed_value=Value(string_value='/data/dataset/test_output_dataset/meta/20220101/thumbnails'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='checkers', + value='', + typed_value=Value(string_value=''), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='import_type', + value='COPY', + typed_value=Value(string_value='COPY'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='output_batch_name', + value='20220101', + typed_value=Value(string_value='20220101'), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info', + lambda: SystemInfo(domain_name='fl-test_domain.com', pure_domain_name='test_domain')) + @patch('fedlearner_webconsole.dataset.job_configer.data_alignment_configer.FileManager.read', fake_spark_schema) + def test_data_alignment(self): + with db.session_scope() as session: + # This is a test to notify the change of template + config = DataAlignmentConfiger(session).get_config() + variables = zip_workflow_variables(config) + self.assertEqual(len(list(variables)), 18) + + with db.session_scope() as session: + self.assertEqual(len(DataAlignmentConfiger(session).user_variables), 4) + + with db.session_scope() as session: + global_configs = DataAlignmentConfiger(session).auto_config_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={ + 'test_domain': DatasetJobConfig(dataset_uuid=self._input_dataset_uuid), + 'test_domain_2': DatasetJobConfig(dataset_uuid='u12345') + })) + for job_config in global_configs.global_configs.values(): + variables = job_config.variables + input_batch_path_variable = [v for v in variables if v.name == 'json_schema'][0] + self.assertEqual(input_batch_path_variable.value_type, common_pb2.Variable.ValueType.STRING) + data_type_variable = [v for v in variables if v.name == 'data_type'][0] + self.assertEqual(data_type_variable.typed_value.string_value, 'tabular') + + with db.session_scope() as session: + global_configs = DataAlignmentConfiger(session).config_local_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={ + 'test_domain': DatasetJobConfig(dataset_uuid=self._input_dataset_uuid), + 'test_domain_2': DatasetJobConfig(dataset_uuid='u12345') + }), + result_dataset_uuid=self._output_dataset_uuid) + variables = global_configs.global_configs['test_domain'].variables + input_batch_path_variable = [v for v in variables if v.name == 'input_batch_path'][0] + self.assertEqual(input_batch_path_variable.typed_value.string_value, + '/data/dataset/test_input_dataset/batch/test_input_data_batch') + thumbnail_path_variable = [v for v in variables if v.name == 'thumbnail_path'][0] + self.assertEqual(thumbnail_path_variable.typed_value.string_value, + '/data/dataset/test_output_dataset/meta/test_output_data_batch/thumbnails') + + variables = global_configs.global_configs['test_domain_2'].variables + self.assertListEqual([v for v in variables if v.name == 'input_batch_path'], []) + self.assertListEqual([v for v in variables if v.name == 'thumbnail_path'], []) + + # test with event_time + with db.session_scope() as session: + global_configs = DataAlignmentConfiger(session).config_local_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={ + 'test_domain': DatasetJobConfig(dataset_uuid=self._input_streaming_dataset_uuid), + 'test_domain_2': DatasetJobConfig(dataset_uuid='u12345') + }), + result_dataset_uuid=self._output_streaming_dataset_uuid, + event_time=datetime(2022, 1, 1)) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + Variable(name='input_dataset_path', + value='/data/dataset/test_input_dataset', + typed_value=Value(string_value='/data/dataset/test_input_dataset'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='input_batch_path', + value='/data/dataset/test_input_dataset/batch/20220101', + typed_value=Value(string_value='/data/dataset/test_input_dataset/batch/20220101'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='output_dataset_path', + value='/data/dataset/test_output_dataset', + typed_value=Value(string_value='/data/dataset/test_output_dataset'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='output_batch_path', + value='/data/dataset/test_output_dataset/batch/20220101', + typed_value=Value(string_value='/data/dataset/test_output_dataset/batch/20220101'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='thumbnail_path', + value='/data/dataset/test_output_dataset/meta/20220101/thumbnails', + typed_value=Value(string_value='/data/dataset/test_output_dataset/meta/20220101/thumbnails'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='output_batch_name', + value='20220101', + typed_value=Value(string_value='20220101'), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info', + lambda: SystemInfo(domain_name='fl-test_domain.com', pure_domain_name='test_domain')) + def test_rsa_psi_data_join(self): + with db.session_scope() as session: + # This is a test to notify the change of template + config = RsaPsiDataJoinConfiger(session).get_config() + variables = zip_workflow_variables(config) + self.assertEqual(len(list(variables)), 20) + + with db.session_scope() as session: + self.assertEqual(len(RsaPsiDataJoinConfiger(session).user_variables), 9) + + with db.session_scope() as session: + global_configs = RsaPsiDataJoinConfiger(session).auto_config_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={ + 'test_domain': DatasetJobConfig(dataset_uuid=self._input_dataset_uuid), + 'test_domain_2': DatasetJobConfig(dataset_uuid='u12345') + })) + + for pure_domain_name, job_config in global_configs.global_configs.items(): + if pure_domain_name == 'test_domain': + for var in job_config.variables: + if var.name == 'role': + self.assertEqual(var.typed_value.string_value, 'Leader') + elif var.name == 'rsa_key_pem': + self.assertIn('-----BEGIN RSA PRIVATE KEY-----', var.typed_value.string_value) + else: + for var in job_config.variables: + if var.name == 'role': + self.assertEqual(var.typed_value.string_value, 'Follower') + elif var.name == 'rsa_key_pem': + self.assertIn('-----BEGIN RSA PUBLIC KEY-----', var.typed_value.string_value) + + for var in job_config.variables: + if var.name == 'rsa_key_path': + self.assertEqual(var.typed_value.string_value, '') + + with db.session_scope() as session: + global_configs = RsaPsiDataJoinConfiger(session).config_local_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={ + 'test_domain': DatasetJobConfig(dataset_uuid=self._input_dataset_uuid), + 'test_domain_2': DatasetJobConfig(dataset_uuid='u12345') + }), + result_dataset_uuid=self._output_dataset_uuid) + + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + Variable(name='dataset', + value='/data/dataset/test_input_dataset/batch/test_input_data_batch', + typed_value=Value(string_value='/data/dataset/test_input_dataset/batch/test_input_data_batch'), + value_type=Variable.ValueType.STRING), + Variable(name='output_dataset_path', + value='/data/dataset/test_output_dataset', + typed_value=Value(string_value='/data/dataset/test_output_dataset'), + value_type=Variable.ValueType.STRING), + Variable( + name='output_batch_path', + value='/data/dataset/test_output_dataset/batch/test_output_data_batch', + typed_value=Value(string_value='/data/dataset/test_output_dataset/batch/test_output_data_batch'), + value_type=Variable.ValueType.STRING), + Variable(name='output_batch_name', + value='test_output_data_batch', + typed_value=Value(string_value='test_output_data_batch'), + value_type=Variable.ValueType.STRING), + ]) + + # test with event_time + with db.session_scope() as session: + global_configs = RsaPsiDataJoinConfiger(session).config_local_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={ + 'test_domain': DatasetJobConfig(dataset_uuid=self._input_streaming_dataset_uuid), + 'test_domain_2': DatasetJobConfig(dataset_uuid='u12345') + }), + result_dataset_uuid=self._output_streaming_dataset_uuid, + event_time=datetime(2022, 1, 1)) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + Variable(name='dataset', + value='/data/dataset/test_input_dataset/batch/20220101', + typed_value=Value(string_value='/data/dataset/test_input_dataset/batch/20220101'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='output_dataset_path', + value='/data/dataset/test_output_dataset', + typed_value=Value(string_value='/data/dataset/test_output_dataset'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='output_batch_path', + value='/data/dataset/test_output_dataset/batch/20220101', + typed_value=Value(string_value='/data/dataset/test_output_dataset/batch/20220101'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='output_batch_name', + value='20220101', + typed_value=Value(string_value='20220101'), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info', + lambda: SystemInfo(domain_name='fl-test_domain.com', pure_domain_name='test_domain')) + def test_light_client_rsa_psi_data_join(self): + with db.session_scope() as session: + global_configs = LightClientRsaPsiDataJoinConfiger(session).auto_config_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={'test_domain': DatasetJobConfig(dataset_uuid=self._input_dataset_uuid)})) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), []) + + with db.session_scope() as session: + global_configs = LightClientRsaPsiDataJoinConfiger(session).config_local_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={ + 'test_domain': + DatasetJobConfig(dataset_uuid=self._input_dataset_uuid, + variables=[ + common_pb2.Variable(name='input_dataset_path', + typed_value=Value(string_value=''), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable(name='input_batch_path', + typed_value=Value(string_value=''), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable(name='output_dataset_path', + typed_value=Value(string_value=''), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable(name='output_batch_path', + typed_value=Value(string_value=''), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + }), + result_dataset_uuid=self._output_dataset_uuid) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + common_pb2.Variable(name='input_dataset_path', + typed_value=Value(string_value='/data/dataset/test_input_dataset'), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable( + name='input_batch_path', + typed_value=Value(string_value='/data/dataset/test_input_dataset/batch/test_input_data_batch'), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable(name='output_dataset_path', + typed_value=Value(string_value='/data/dataset/test_output_dataset'), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable( + name='output_batch_path', + typed_value=Value(string_value='/data/dataset/test_output_dataset/batch/test_output_data_batch'), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable(name='output_batch_name', + value='test_output_data_batch', + typed_value=Value(string_value='test_output_data_batch'), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + # test with event_time + with db.session_scope() as session: + global_configs = LightClientRsaPsiDataJoinConfiger(session).config_local_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={ + 'test_domain': DatasetJobConfig(dataset_uuid=self._input_streaming_dataset_uuid), + 'test_domain_2': DatasetJobConfig(dataset_uuid='u12345') + }), + result_dataset_uuid=self._output_streaming_dataset_uuid, + event_time=datetime(2022, 1, 1)) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + Variable(name='input_dataset_path', + value='/data/dataset/test_input_dataset', + typed_value=Value(string_value='/data/dataset/test_input_dataset'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='input_batch_path', + value='/data/dataset/test_input_dataset/batch/20220101', + typed_value=Value(string_value='/data/dataset/test_input_dataset/batch/20220101'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='output_dataset_path', + value='/data/dataset/test_output_dataset', + typed_value=Value(string_value='/data/dataset/test_output_dataset'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='output_batch_path', + value='/data/dataset/test_output_dataset/batch/20220101', + typed_value=Value(string_value='/data/dataset/test_output_dataset/batch/20220101'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='output_batch_name', + value='20220101', + typed_value=Value(string_value='20220101'), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info', + lambda: SystemInfo(domain_name='fl-test_domain.com', pure_domain_name='test_domain')) + def test_ot_psi_data_join_configer(self): + with db.session_scope() as session: + global_configs = OtPsiDataJoinConfiger(session).auto_config_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={ + 'test_domain': + DatasetJobConfig(dataset_uuid=self._input_dataset_uuid, + variables=[ + common_pb2.Variable(name='role', + value_type=common_pb2.Variable.ValueType.STRING), + ]), + 'test_domain_2': + DatasetJobConfig(dataset_uuid='u12345', + variables=[ + common_pb2.Variable(name='role', + value_type=common_pb2.Variable.ValueType.STRING), + ]) + })) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + common_pb2.Variable(name='role', + typed_value=Value(string_value='server'), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + self.assertEqual(list(global_configs.global_configs['test_domain_2'].variables), [ + common_pb2.Variable(name='role', + typed_value=Value(string_value='client'), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + + with db.session_scope() as session: + global_configs = OtPsiDataJoinConfiger(session).config_local_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={ + 'test_domain': + DatasetJobConfig(dataset_uuid=self._input_dataset_uuid, + variables=[ + common_pb2.Variable(name='input_dataset_path', + typed_value=Value(string_value=''), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable(name='input_batch_path', + typed_value=Value(string_value=''), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable(name='output_dataset_path', + typed_value=Value(string_value=''), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable(name='output_batch_path', + typed_value=Value(string_value=''), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + }), + result_dataset_uuid=self._output_dataset_uuid) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + common_pb2.Variable(name='input_dataset_path', + typed_value=Value(string_value='/data/dataset/test_input_dataset'), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable( + name='input_batch_path', + typed_value=Value(string_value='/data/dataset/test_input_dataset/batch/test_input_data_batch'), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable(name='output_dataset_path', + typed_value=Value(string_value='/data/dataset/test_output_dataset'), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable( + name='output_batch_path', + typed_value=Value(string_value='/data/dataset/test_output_dataset/batch/test_output_data_batch'), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable(name='output_batch_name', + value='test_output_data_batch', + typed_value=Value(string_value='test_output_data_batch'), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + # test with event_time + with db.session_scope() as session: + global_configs = OtPsiDataJoinConfiger(session).config_local_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={ + 'test_domain': DatasetJobConfig(dataset_uuid=self._input_streaming_dataset_uuid), + 'test_domain_2': DatasetJobConfig(dataset_uuid='u12345') + }), + result_dataset_uuid=self._output_streaming_dataset_uuid, + event_time=datetime(2022, 1, 1)) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + Variable(name='input_dataset_path', + value='/data/dataset/test_input_dataset', + typed_value=Value(string_value='/data/dataset/test_input_dataset'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='input_batch_path', + value='/data/dataset/test_input_dataset/batch/20220101', + typed_value=Value(string_value='/data/dataset/test_input_dataset/batch/20220101'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='output_dataset_path', + value='/data/dataset/test_output_dataset', + typed_value=Value(string_value='/data/dataset/test_output_dataset'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='output_batch_path', + value='/data/dataset/test_output_dataset/batch/20220101', + typed_value=Value(string_value='/data/dataset/test_output_dataset/batch/20220101'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='output_batch_name', + value='20220101', + typed_value=Value(string_value='20220101'), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info', + lambda: SystemInfo(domain_name='fl-test_domain.com', pure_domain_name='test_domain')) + def test_hash_data_join_configer(self): + with db.session_scope() as session: + global_configs = HashDataJoinConfiger(session).auto_config_variables(global_configs=DatasetJobGlobalConfigs( + global_configs={ + 'test_domain': + DatasetJobConfig(dataset_uuid=self._input_dataset_uuid, + variables=[ + common_pb2.Variable(name='role', + value_type=common_pb2.Variable.ValueType.STRING), + ]), + 'test_domain_2': + DatasetJobConfig(dataset_uuid='u12345', + variables=[ + common_pb2.Variable(name='role', + value_type=common_pb2.Variable.ValueType.STRING), + ]) + })) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + common_pb2.Variable(name='role', + typed_value=Value(string_value='server'), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + self.assertEqual(list(global_configs.global_configs['test_domain_2'].variables), [ + common_pb2.Variable(name='role', + typed_value=Value(string_value='client'), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + + with db.session_scope() as session: + global_configs = HashDataJoinConfiger(session).config_local_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={ + 'test_domain': + DatasetJobConfig(dataset_uuid=self._input_dataset_uuid, + variables=[ + common_pb2.Variable(name='input_dataset_path', + typed_value=Value(string_value=''), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable(name='input_batch_path', + typed_value=Value(string_value=''), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable(name='output_dataset_path', + typed_value=Value(string_value=''), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable(name='output_batch_path', + typed_value=Value(string_value=''), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + }), + result_dataset_uuid=self._output_dataset_uuid) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + common_pb2.Variable(name='input_dataset_path', + typed_value=Value(string_value='/data/dataset/test_input_dataset'), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable( + name='input_batch_path', + typed_value=Value(string_value='/data/dataset/test_input_dataset/batch/test_input_data_batch'), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable(name='output_dataset_path', + typed_value=Value(string_value='/data/dataset/test_output_dataset'), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable( + name='output_batch_path', + typed_value=Value(string_value='/data/dataset/test_output_dataset/batch/test_output_data_batch'), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable(name='output_batch_name', + value='test_output_data_batch', + typed_value=Value(string_value='test_output_data_batch'), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + # test with event_time + with db.session_scope() as session: + global_configs = HashDataJoinConfiger(session).config_local_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={ + 'test_domain': DatasetJobConfig(dataset_uuid=self._input_streaming_dataset_uuid), + 'test_domain_2': DatasetJobConfig(dataset_uuid='u12345') + }), + result_dataset_uuid=self._output_streaming_dataset_uuid, + event_time=datetime(2022, 1, 1)) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + Variable(name='input_dataset_path', + value='/data/dataset/test_input_dataset', + typed_value=Value(string_value='/data/dataset/test_input_dataset'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='input_batch_path', + value='/data/dataset/test_input_dataset/batch/20220101', + typed_value=Value(string_value='/data/dataset/test_input_dataset/batch/20220101'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='output_dataset_path', + value='/data/dataset/test_output_dataset', + typed_value=Value(string_value='/data/dataset/test_output_dataset'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='output_batch_path', + value='/data/dataset/test_output_dataset/batch/20220101', + typed_value=Value(string_value='/data/dataset/test_output_dataset/batch/20220101'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='output_batch_name', + value='20220101', + typed_value=Value(string_value='20220101'), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info', + lambda: SystemInfo(domain_name='fl-test_domain.com', pure_domain_name='test_domain')) + def test_analyzer_configer(self): + with db.session_scope() as session: + # This is a test to notify the change of template + config = AnalyzerConfiger(session).get_config() + self.assertEqual(len(config.variables), 14) + + with db.session_scope() as session: + global_configs = AnalyzerConfiger(session).auto_config_variables(global_configs=DatasetJobGlobalConfigs( + global_configs={'test_domain': DatasetJobConfig(dataset_uuid=self._data_source_uuid)})) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + common_pb2.Variable(name='input_batch_path', + value='/data/some_data_source/', + typed_value=Value(string_value='/data/some_data_source/'), + value_type=common_pb2.Variable.ValueType.STRING), + common_pb2.Variable(name='data_type', + value='tabular', + typed_value=Value(string_value='tabular'), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + + with db.session_scope() as session: + global_configs = AnalyzerConfiger(session).config_local_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={'test_domain': DatasetJobConfig(dataset_uuid=self._data_source_uuid)}), + result_dataset_uuid=self._output_dataset_uuid) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + Variable(name='thumbnail_path', + value='/data/dataset/test_output_dataset/meta/test_output_data_batch/thumbnails', + typed_value=Value( + string_value='/data/dataset/test_output_dataset/meta/test_output_data_batch/thumbnails'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='output_batch_name', + value='test_output_data_batch', + typed_value=Value(string_value='test_output_data_batch'), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + + # test with event_time + with db.session_scope() as session: + global_configs = AnalyzerConfiger(session).config_local_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={'test_domain': DatasetJobConfig(dataset_uuid=self._input_streaming_dataset_uuid)}), + result_dataset_uuid=self._output_streaming_dataset_uuid, + event_time=datetime(2022, 1, 1)) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + Variable(name='thumbnail_path', + value='/data/dataset/test_output_dataset/meta/20220101/thumbnails', + typed_value=Value(string_value='/data/dataset/test_output_dataset/meta/20220101/thumbnails'), + value_type=common_pb2.Variable.ValueType.STRING), + Variable(name='output_batch_name', + value='20220101', + typed_value=Value(string_value='20220101'), + value_type=common_pb2.Variable.ValueType.STRING), + ]) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/export_configer.py b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/export_configer.py new file mode 100644 index 000000000..95900715c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/export_configer.py @@ -0,0 +1,75 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from datetime import datetime + +from typing import List, Optional + +from fedlearner_webconsole.dataset.models import Dataset +from fedlearner_webconsole.dataset.job_configer.base_configer import BaseConfiger, filter_user_variables, \ + get_my_pure_domain_name, set_variable_value_to_job_config +from fedlearner_webconsole.exceptions import InvalidArgumentException +from fedlearner_webconsole.proto.dataset_pb2 import DatasetJobGlobalConfigs +from fedlearner_webconsole.utils.workflow import zip_workflow_variables +from fedlearner_webconsole.workflow_template.utils import make_variable +from fedlearner_webconsole.workflow_template.service import WorkflowTemplateService +from fedlearner_webconsole.proto import common_pb2, workflow_definition_pb2 + + +class ExportConfiger(BaseConfiger): + + def get_config(self) -> workflow_definition_pb2.WorkflowDefinition: + template = WorkflowTemplateService(self._session).get_workflow_template(name='sys-preset-export-dataset') + return template.get_config() + + @property + def user_variables(self) -> List[common_pb2.Variable]: + # return variables which tag is RESOURCE_ALLOCATION or INPUT_PARAM + return filter_user_variables(list(zip_workflow_variables(self.get_config()))) + + def auto_config_variables(self, global_configs: DatasetJobGlobalConfigs) -> DatasetJobGlobalConfigs: + return global_configs + + def config_local_variables(self, + global_configs: DatasetJobGlobalConfigs, + result_dataset_uuid: str, + event_time: Optional[datetime] = None) -> DatasetJobGlobalConfigs: + my_domain_name = get_my_pure_domain_name() + job_config = global_configs.global_configs[my_domain_name] + + input_dataset: Dataset = self._session.query(Dataset).filter(Dataset.uuid == job_config.dataset_uuid).first() + if input_dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {job_config.dataset_uuid}') + + output_dataset: Dataset = self._session.query(Dataset).filter(Dataset.uuid == result_dataset_uuid).first() + if output_dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {result_dataset_uuid}') + + input_batch = self._get_data_batch(input_dataset, event_time) + output_batch = self._get_data_batch(output_dataset, event_time) + + dataset_path_variable = make_variable(name='dataset_path', typed_value=input_dataset.path) + set_variable_value_to_job_config(job_config, dataset_path_variable) + + file_format_variable = make_variable(name='file_format', typed_value=input_dataset.store_format.name.lower()) + set_variable_value_to_job_config(job_config, file_format_variable) + + batch_name_variable = make_variable(name='batch_name', typed_value=input_batch.batch_name) + set_variable_value_to_job_config(job_config, batch_name_variable) + + export_path_variable = make_variable(name='export_path', typed_value=output_batch.path) + set_variable_value_to_job_config(job_config, export_path_variable) + + return global_configs diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/export_configer_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/export_configer_test.py new file mode 100644 index 000000000..99727fbc9 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/export_configer_test.py @@ -0,0 +1,183 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from datetime import datetime +import os +import unittest +from unittest.mock import patch +from google.protobuf.struct_pb2 import Value + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.utils.resource_name import resource_uuid +from fedlearner_webconsole.utils.workflow import zip_workflow_variables +from fedlearner_webconsole.initial_db import _insert_or_update_templates +from fedlearner_webconsole.dataset.models import DataBatch, Dataset, DatasetKindV2, DatasetType +from fedlearner_webconsole.dataset.job_configer.export_configer import ExportConfiger +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.dataset_pb2 import DatasetJobConfig, DatasetJobGlobalConfigs +from fedlearner_webconsole.proto.setting_pb2 import SystemInfo + + +class ExportConfigersTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + _insert_or_update_templates(session) + + test_project = Project(name='test_project') + session.add(test_project) + session.flush([test_project]) + + test_input_dataset = Dataset(id=2, + name='test_input_dataset', + uuid=resource_uuid(), + is_published=False, + project_id=test_project.id, + path='/data/dataset/test_input_dataset', + dataset_kind=DatasetKindV2.RAW) + session.add(test_input_dataset) + session.flush([test_input_dataset]) + + test_input_data_batch = DataBatch(dataset_id=test_input_dataset.id, + path=os.path.join(test_input_dataset.path, 'batch/0')) + session.add(test_input_data_batch) + + test_output_dataset = Dataset(id=3, + name='test_output_dataset', + uuid=resource_uuid(), + is_published=True, + project_id=test_project.id, + path='/data/dataset/test_output_dataset', + dataset_kind=DatasetKindV2.PROCESSED) + session.add(test_output_dataset) + session.flush([test_output_dataset]) + + test_output_data_batch = DataBatch(dataset_id=test_output_dataset.id, + path=os.path.join(test_output_dataset.path, 'batch/0')) + session.add(test_output_data_batch) + + test_input_streaming_dataset = Dataset(id=4, + name='test_input_dataset', + uuid=resource_uuid(), + is_published=False, + project_id=test_project.id, + path='/data/dataset/test_input_dataset', + dataset_type=DatasetType.STREAMING, + dataset_kind=DatasetKindV2.RAW) + session.add(test_input_streaming_dataset) + session.flush() + + test_input_streaming_data_batch = DataBatch(dataset_id=test_input_streaming_dataset.id, + event_time=datetime(2022, 1, 1), + path=os.path.join(test_input_dataset.path, 'batch/20220101')) + session.add(test_input_streaming_data_batch) + + test_output_streaming_dataset = Dataset(id=5, + name='test_output_dataset', + uuid=resource_uuid(), + is_published=True, + project_id=test_project.id, + path='/data/dataset/test_output_dataset', + dataset_type=DatasetType.STREAMING, + dataset_kind=DatasetKindV2.PROCESSED) + session.add(test_output_streaming_dataset) + session.flush() + + test_output_streaming_data_batch = DataBatch(dataset_id=test_output_streaming_dataset.id, + event_time=datetime(2022, 1, 1), + path=os.path.join(test_output_dataset.path, 'batch/20220101')) + session.add(test_output_streaming_data_batch) + + self._input_dataset_uuid = test_input_dataset.uuid + self._output_dataset_uuid = test_output_dataset.uuid + self._input_streaming_dataset_uuid = test_input_streaming_dataset.uuid + self._output_streaming_dataset_uuid = test_output_streaming_dataset.uuid + + session.commit() + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info', + lambda: SystemInfo(domain_name='fl-test_domain.com', pure_domain_name='test_domain')) + def test_export(self): + with db.session_scope() as session: + # This is a test to notify the change of template + config = ExportConfiger(session).get_config() + variables = zip_workflow_variables(config) + self.assertEqual(len(list(variables)), 13) + + with db.session_scope() as session: + self.assertEqual(len(ExportConfiger(session).user_variables), 8) + + with db.session_scope() as session: + resp = ExportConfiger(session).auto_config_variables(global_configs=DatasetJobGlobalConfigs( + global_configs={'test_domain': DatasetJobConfig(dataset_uuid=self._input_dataset_uuid)})) + self.assertEqual(list(resp.global_configs['test_domain'].variables), []) + + # test none_streaming dataset + with db.session_scope() as session: + global_configs = ExportConfiger(session).config_local_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={'test_domain': DatasetJobConfig(dataset_uuid=self._input_dataset_uuid)}), + result_dataset_uuid=self._output_dataset_uuid) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + Variable(name='dataset_path', + value='/data/dataset/test_input_dataset', + typed_value=Value(string_value='/data/dataset/test_input_dataset'), + value_type=Variable.ValueType.STRING), + Variable(name='file_format', + value='tfrecords', + typed_value=Value(string_value='tfrecords'), + value_type=Variable.ValueType.STRING), + Variable(name='batch_name', + value='0', + typed_value=Value(string_value='0'), + value_type=Variable.ValueType.STRING), + Variable(name='export_path', + value='/data/dataset/test_output_dataset/batch/0', + typed_value=Value(string_value='/data/dataset/test_output_dataset/batch/0'), + value_type=Variable.ValueType.STRING), + ]) + + # test streaming dataset + with db.session_scope() as session: + global_configs = ExportConfiger(session).config_local_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={'test_domain': DatasetJobConfig(dataset_uuid=self._input_streaming_dataset_uuid)}), + result_dataset_uuid=self._output_streaming_dataset_uuid, + event_time=datetime(2022, 1, 1)) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + Variable(name='dataset_path', + value='/data/dataset/test_input_dataset', + typed_value=Value(string_value='/data/dataset/test_input_dataset'), + value_type=Variable.ValueType.STRING), + Variable(name='file_format', + value='tfrecords', + typed_value=Value(string_value='tfrecords'), + value_type=Variable.ValueType.STRING), + Variable(name='batch_name', + value='20220101', + typed_value=Value(string_value='20220101'), + value_type=Variable.ValueType.STRING), + Variable(name='export_path', + value='/data/dataset/test_output_dataset/batch/20220101', + typed_value=Value(string_value='/data/dataset/test_output_dataset/batch/20220101'), + value_type=Variable.ValueType.STRING), + ]) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/hash_data_join_configer.py b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/hash_data_join_configer.py new file mode 100644 index 000000000..90d40684e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/hash_data_join_configer.py @@ -0,0 +1,95 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from datetime import datetime + +from typing import List, Optional + +from fedlearner_webconsole.dataset.models import Dataset +from fedlearner_webconsole.dataset.job_configer.base_configer import BaseConfiger, get_my_pure_domain_name, \ + set_variable_value_to_job_config +from fedlearner_webconsole.dataset.data_path import get_batch_data_path +from fedlearner_webconsole.exceptions import InvalidArgumentException +from fedlearner_webconsole.proto.dataset_pb2 import DatasetJobGlobalConfigs +from fedlearner_webconsole.utils.workflow import zip_workflow_variables +from fedlearner_webconsole.workflow_template.service import WorkflowTemplateService +from fedlearner_webconsole.workflow_template.utils import make_variable +from fedlearner_webconsole.proto import common_pb2, workflow_definition_pb2 + + +class HashDataJoinConfiger(BaseConfiger): + + def get_config(self) -> workflow_definition_pb2.WorkflowDefinition: + template = WorkflowTemplateService( + self._session).get_workflow_template(name='sys-preset-hash-data-join-analyzer') + return template.get_config() + + @property + def user_variables(self) -> List[common_pb2.Variable]: + # return all variables and frontend will filter them by tag + return list(zip_workflow_variables(self.get_config())) + + def auto_config_variables(self, global_configs: DatasetJobGlobalConfigs) -> DatasetJobGlobalConfigs: + my_domain_name = get_my_pure_domain_name() + job_config = global_configs.global_configs[my_domain_name] + dataset = self._session.query(Dataset).filter(Dataset.uuid == job_config.dataset_uuid).first() + if dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {job_config.dataset_uuid}') + + for pure_domain_name, job_config in global_configs.global_configs.items(): + if pure_domain_name == my_domain_name: + role_variable = make_variable(name='role', typed_value='server') + else: + role_variable = make_variable(name='role', typed_value='client') + set_variable_value_to_job_config(job_config, role_variable) + return global_configs + + def config_local_variables(self, + global_configs: DatasetJobGlobalConfigs, + result_dataset_uuid: str, + event_time: Optional[datetime] = None) -> DatasetJobGlobalConfigs: + my_domain_name = get_my_pure_domain_name() + job_config = global_configs.global_configs[my_domain_name] + + input_dataset = self._session.query(Dataset).filter(Dataset.uuid == job_config.dataset_uuid).first() + if input_dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {job_config.dataset_uuid}') + + output_dataset = self._session.query(Dataset).filter(Dataset.uuid == result_dataset_uuid).first() + if output_dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {result_dataset_uuid}') + + input_batch = self._get_data_batch(input_dataset, event_time) + output_batch = self._get_data_batch(output_dataset, event_time) + + input_batch_path = get_batch_data_path(input_batch) + output_batch_path = output_batch.path + + input_dataset_path_variable = make_variable(name='input_dataset_path', typed_value=input_dataset.path) + set_variable_value_to_job_config(job_config, input_dataset_path_variable) + + input_batch_path_variable = make_variable(name='input_batch_path', typed_value=input_batch_path) + set_variable_value_to_job_config(job_config, input_batch_path_variable) + + output_dataset_path_variable = make_variable(name='output_dataset_path', typed_value=output_dataset.path) + set_variable_value_to_job_config(job_config, output_dataset_path_variable) + + output_batch_path_variable = make_variable(name='output_batch_path', typed_value=output_batch_path) + set_variable_value_to_job_config(job_config, output_batch_path_variable) + + output_batch_name_variable = make_variable(name='output_batch_name', typed_value=output_batch.batch_name) + set_variable_value_to_job_config(job_config, output_batch_name_variable) + + return global_configs diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/import_source_configer.py b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/import_source_configer.py new file mode 100644 index 000000000..78a3e9610 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/import_source_configer.py @@ -0,0 +1,122 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from datetime import datetime +import os + +from typing import List, Optional + +from fedlearner_webconsole.dataset.models import (Dataset, DatasetFormat, DatasetKindV2, ImportType, StoreFormat, + DatasetType) +from fedlearner_webconsole.dataset.job_configer.base_configer import BaseConfiger, get_my_pure_domain_name, \ + set_variable_value_to_job_config, filter_user_variables +from fedlearner_webconsole.dataset.dataset_directory import DatasetDirectory +from fedlearner_webconsole.dataset.util import parse_event_time_to_daily_folder_name, \ + parse_event_time_to_hourly_folder_name +from fedlearner_webconsole.exceptions import InvalidArgumentException +from fedlearner_webconsole.proto.dataset_pb2 import DatasetJobGlobalConfigs +from fedlearner_webconsole.utils.workflow import zip_workflow_variables +from fedlearner_webconsole.workflow_template.service import WorkflowTemplateService +from fedlearner_webconsole.workflow_template.utils import make_variable +from fedlearner_webconsole.proto import common_pb2, workflow_definition_pb2 + + +class ImportSourceConfiger(BaseConfiger): + + def get_config(self) -> workflow_definition_pb2.WorkflowDefinition: + template = WorkflowTemplateService(self._session).get_workflow_template(name='sys-preset-converter-analyzer') + return template.get_config() + + @property + def user_variables(self) -> List[common_pb2.Variable]: + # return variables which tag is RESOURCE_ALLOCATION or INPUT_PARAM + return filter_user_variables(list(zip_workflow_variables(self.get_config()))) + + def auto_config_variables(self, global_configs: DatasetJobGlobalConfigs) -> DatasetJobGlobalConfigs: + my_domain_name = get_my_pure_domain_name() + job_config = global_configs.global_configs[my_domain_name] + input_dataset: Dataset = self._session.query(Dataset).filter(Dataset.uuid == job_config.dataset_uuid).first() + if input_dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {job_config.dataset_uuid}') + + if input_dataset.store_format is None: + if input_dataset.dataset_kind == DatasetKindV2.SOURCE: + raise InvalidArgumentException(f'data_source {input_dataset.name} is too old and has no store_format, \ + please create a new data_source') + input_dataset.store_format = StoreFormat.TFRECORDS + file_format_variable = make_variable(name='file_format', typed_value=input_dataset.store_format.name.lower()) + set_variable_value_to_job_config(job_config, file_format_variable) + data_type_variable = make_variable(name='data_type', + typed_value=DatasetFormat(input_dataset.dataset_format).name.lower()) + set_variable_value_to_job_config(job_config, data_type_variable) + + return global_configs + + def config_local_variables(self, + global_configs: DatasetJobGlobalConfigs, + result_dataset_uuid: str, + event_time: Optional[datetime] = None) -> DatasetJobGlobalConfigs: + my_domain_name = get_my_pure_domain_name() + job_config = global_configs.global_configs[my_domain_name] + input_dataset: Dataset = self._session.query(Dataset).filter(Dataset.uuid == job_config.dataset_uuid).first() + if input_dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {job_config.dataset_uuid}') + output_dataset: Dataset = self._session.query(Dataset).filter(Dataset.uuid == result_dataset_uuid).first() + if output_dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {result_dataset_uuid}') + + if output_dataset.dataset_type == DatasetType.PSI: + input_batch_path = input_dataset.path + else: + if output_dataset.parent_dataset_job.is_hourly_cron(): + folder_name = parse_event_time_to_hourly_folder_name(event_time) + else: + folder_name = parse_event_time_to_daily_folder_name(event_time) + input_batch_path = os.path.join(input_dataset.path, folder_name) + output_data_batch = self._get_data_batch(output_dataset, event_time) + output_batch_path = output_data_batch.path + output_batch_name = output_data_batch.batch_name + output_dataset_path = output_dataset.path + thumbnail_path = DatasetDirectory(dataset_path=output_dataset_path).thumbnails_path( + batch_name=output_batch_name) + schema_checkers = list(output_dataset.get_meta_info().schema_checkers) + + # Note: Following vairables's name should be equal to template `sys-preset-converter-analyzer` + input_batch_path_variable = make_variable(name='input_batch_path', typed_value=input_batch_path) + set_variable_value_to_job_config(job_config, input_batch_path_variable) + + dataset_path_variable = make_variable(name='dataset_path', typed_value=output_dataset_path) + set_variable_value_to_job_config(job_config, dataset_path_variable) + + batch_path_variable = make_variable(name='batch_path', typed_value=output_batch_path) + set_variable_value_to_job_config(job_config, batch_path_variable) + + thumbnail_path_variable = make_variable(name='thumbnail_path', typed_value=thumbnail_path) + set_variable_value_to_job_config(job_config, thumbnail_path_variable) + + schema_checkers_variable = make_variable(name='checkers', typed_value=','.join(schema_checkers)) + set_variable_value_to_job_config(job_config, schema_checkers_variable) + + import_type_variable = make_variable(name='import_type', typed_value=output_dataset.import_type.name) + set_variable_value_to_job_config(job_config, import_type_variable) + + output_batch_name_variable = make_variable(name='output_batch_name', typed_value=output_batch_name) + set_variable_value_to_job_config(job_config, output_batch_name_variable) + + if output_dataset.import_type == ImportType.NO_COPY: + skip_analyzer_variable = make_variable(name='skip_analyzer', typed_value='true') + set_variable_value_to_job_config(job_config, skip_analyzer_variable) + + return global_configs diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/light_client_ot_psi_data_join_configer.py b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/light_client_ot_psi_data_join_configer.py new file mode 100644 index 000000000..e1c8715a4 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/light_client_ot_psi_data_join_configer.py @@ -0,0 +1,82 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from datetime import datetime + +from typing import List, Optional + +from fedlearner_webconsole.dataset.models import Dataset +from fedlearner_webconsole.dataset.job_configer.base_configer import BaseConfiger, get_my_pure_domain_name, \ + set_variable_value_to_job_config +from fedlearner_webconsole.dataset.data_path import get_batch_data_path +from fedlearner_webconsole.exceptions import InvalidArgumentException +from fedlearner_webconsole.proto.dataset_pb2 import DatasetJobGlobalConfigs +from fedlearner_webconsole.utils.workflow import zip_workflow_variables +from fedlearner_webconsole.workflow_template.service import WorkflowTemplateService +from fedlearner_webconsole.workflow_template.utils import make_variable +from fedlearner_webconsole.proto import common_pb2, workflow_definition_pb2 + + +class LightClientOtPsiDataJoinConfiger(BaseConfiger): + + def get_config(self) -> workflow_definition_pb2.WorkflowDefinition: + template = WorkflowTemplateService(self._session).get_workflow_template(name='sys-preset-light-ot-data-join') + return template.get_config() + + @property + def user_variables(self) -> List[common_pb2.Variable]: + # return all variables and frontend will filter them by tag + return list(zip_workflow_variables(self.get_config())) + + def auto_config_variables(self, global_configs: DatasetJobGlobalConfigs) -> DatasetJobGlobalConfigs: + return global_configs + + def config_local_variables(self, + global_configs: DatasetJobGlobalConfigs, + result_dataset_uuid: str, + event_time: Optional[datetime] = None) -> DatasetJobGlobalConfigs: + my_domain_name = get_my_pure_domain_name() + job_config = global_configs.global_configs[my_domain_name] + + input_dataset = self._session.query(Dataset).filter(Dataset.uuid == job_config.dataset_uuid).first() + if input_dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {job_config.dataset_uuid}') + + output_dataset = self._session.query(Dataset).filter(Dataset.uuid == result_dataset_uuid).first() + if output_dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {result_dataset_uuid}') + + input_batch = self._get_data_batch(input_dataset, event_time) + output_batch = self._get_data_batch(output_dataset, event_time) + + input_batch_path = get_batch_data_path(input_batch) + output_batch_path = output_batch.path + + input_dataset_path_variable = make_variable(name='input_dataset_path', typed_value=input_dataset.path) + set_variable_value_to_job_config(job_config, input_dataset_path_variable) + + input_batch_path_variable = make_variable(name='input_batch_path', typed_value=input_batch_path) + set_variable_value_to_job_config(job_config, input_batch_path_variable) + + output_dataset_path_variable = make_variable(name='output_dataset_path', typed_value=output_dataset.path) + set_variable_value_to_job_config(job_config, output_dataset_path_variable) + + output_batch_path_variable = make_variable(name='output_batch_path', typed_value=output_batch_path) + set_variable_value_to_job_config(job_config, output_batch_path_variable) + + output_batch_name_variable = make_variable(name='output_batch_name', typed_value=output_batch.batch_name) + set_variable_value_to_job_config(job_config, output_batch_name_variable) + + return global_configs diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/light_client_ot_psi_data_join_configer_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/light_client_ot_psi_data_join_configer_test.py new file mode 100644 index 000000000..24c622f54 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/light_client_ot_psi_data_join_configer_test.py @@ -0,0 +1,201 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from datetime import datetime +import os +import unittest +from unittest.mock import patch + +from google.protobuf.struct_pb2 import Value + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.dataset_pb2 import DatasetJobConfig, DatasetJobGlobalConfigs +from fedlearner_webconsole.proto.setting_pb2 import SystemInfo +from fedlearner_webconsole.utils.resource_name import resource_uuid +from fedlearner_webconsole.db import db +from fedlearner_webconsole.initial_db import _insert_or_update_templates +from fedlearner_webconsole.dataset.models import DataBatch, Dataset, DatasetKindV2, DatasetType +from fedlearner_webconsole.dataset.job_configer.light_client_ot_psi_data_join_configer import \ + LightClientOtPsiDataJoinConfiger + + +class LightClientOtPsiDataJoinConfigerTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + _insert_or_update_templates(session) + + test_project = Project(name='test_project') + session.add(test_project) + session.flush([test_project]) + + test_input_dataset = Dataset(id=2, + name='test_input_dataset', + uuid=resource_uuid(), + is_published=False, + project_id=test_project.id, + path='/data/dataset/test_input_dataset', + dataset_kind=DatasetKindV2.RAW) + session.add(test_input_dataset) + session.flush([test_input_dataset]) + + test_input_data_batch = DataBatch(dataset_id=test_input_dataset.id, + path=os.path.join(test_input_dataset.path, 'batch/test_input_data_batch')) + session.add(test_input_data_batch) + + test_output_dataset = Dataset(id=3, + name='test_output_dataset', + uuid=resource_uuid(), + is_published=True, + project_id=test_project.id, + path='/data/dataset/test_output_dataset', + dataset_kind=DatasetKindV2.PROCESSED) + session.add(test_output_dataset) + session.flush([test_output_dataset]) + + test_output_data_batch = DataBatch(dataset_id=test_output_dataset.id, + path=os.path.join(test_output_dataset.path, + 'batch/test_output_data_batch')) + session.add(test_output_data_batch) + + test_input_streaming_dataset = Dataset(id=4, + name='test_input_dataset', + uuid=resource_uuid(), + is_published=False, + project_id=test_project.id, + path='/data/dataset/test_input_dataset', + dataset_type=DatasetType.STREAMING, + dataset_kind=DatasetKindV2.RAW) + session.add(test_input_streaming_dataset) + session.flush() + + test_input_streaming_data_batch = DataBatch(dataset_id=test_input_streaming_dataset.id, + event_time=datetime(2022, 1, 1), + path=os.path.join(test_input_dataset.path, 'batch/20220101')) + session.add(test_input_streaming_data_batch) + + test_output_streaming_dataset = Dataset(id=5, + name='test_output_dataset', + uuid=resource_uuid(), + is_published=True, + project_id=test_project.id, + path='/data/dataset/test_output_dataset', + dataset_type=DatasetType.STREAMING, + dataset_kind=DatasetKindV2.PROCESSED) + session.add(test_output_streaming_dataset) + session.flush() + + test_output_streaming_data_batch = DataBatch(dataset_id=test_output_streaming_dataset.id, + event_time=datetime(2022, 1, 1), + path=os.path.join(test_output_dataset.path, 'batch/20220101')) + session.add(test_output_streaming_data_batch) + + self._input_dataset_uuid = test_input_dataset.uuid + self._output_dataset_uuid = test_output_dataset.uuid + self._input_streaming_dataset_uuid = test_input_streaming_dataset.uuid + self._output_streaming_dataset_uuid = test_output_streaming_dataset.uuid + + session.commit() + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info', + lambda: SystemInfo(domain_name='fl-test_domain.com', pure_domain_name='test_domain')) + def test_light_client_ot_psi_data_join(self): + + with db.session_scope() as session: + global_configs = LightClientOtPsiDataJoinConfiger(session).auto_config_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={'test_domain': DatasetJobConfig(dataset_uuid=self._input_dataset_uuid)})) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), []) + + with db.session_scope() as session: + global_configs = LightClientOtPsiDataJoinConfiger(session).config_local_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={ + 'test_domain': + DatasetJobConfig(dataset_uuid=self._input_dataset_uuid, + variables=[ + Variable(name='input_dataset_path', + typed_value=Value(string_value=''), + value_type=Variable.ValueType.STRING), + Variable(name='input_batch_path', + typed_value=Value(string_value=''), + value_type=Variable.ValueType.STRING), + Variable(name='output_dataset_path', + typed_value=Value(string_value=''), + value_type=Variable.ValueType.STRING), + Variable(name='output_batch_path', + typed_value=Value(string_value=''), + value_type=Variable.ValueType.STRING), + ]) + }), + result_dataset_uuid=self._output_dataset_uuid) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + Variable(name='input_dataset_path', + typed_value=Value(string_value='/data/dataset/test_input_dataset'), + value_type=Variable.ValueType.STRING), + Variable(name='input_batch_path', + typed_value=Value(string_value='/data/dataset/test_input_dataset/batch/test_input_data_batch'), + value_type=Variable.ValueType.STRING), + Variable(name='output_dataset_path', + typed_value=Value(string_value='/data/dataset/test_output_dataset'), + value_type=Variable.ValueType.STRING), + Variable( + name='output_batch_path', + typed_value=Value(string_value='/data/dataset/test_output_dataset/batch/test_output_data_batch'), + value_type=Variable.ValueType.STRING), + Variable(name='output_batch_name', + value='test_output_data_batch', + typed_value=Value(string_value='test_output_data_batch'), + value_type=Variable.ValueType.STRING), + ]) + # test with event_time + with db.session_scope() as session: + global_configs = LightClientOtPsiDataJoinConfiger(session).config_local_variables( + global_configs=DatasetJobGlobalConfigs( + global_configs={ + 'test_domain': DatasetJobConfig(dataset_uuid=self._input_streaming_dataset_uuid), + 'test_domain_2': DatasetJobConfig(dataset_uuid='u12345') + }), + result_dataset_uuid=self._output_streaming_dataset_uuid, + event_time=datetime(2022, 1, 1)) + self.assertEqual(list(global_configs.global_configs['test_domain'].variables), [ + Variable(name='input_dataset_path', + value='/data/dataset/test_input_dataset', + typed_value=Value(string_value='/data/dataset/test_input_dataset'), + value_type=Variable.ValueType.STRING), + Variable(name='input_batch_path', + value='/data/dataset/test_input_dataset/batch/20220101', + typed_value=Value(string_value='/data/dataset/test_input_dataset/batch/20220101'), + value_type=Variable.ValueType.STRING), + Variable(name='output_dataset_path', + value='/data/dataset/test_output_dataset', + typed_value=Value(string_value='/data/dataset/test_output_dataset'), + value_type=Variable.ValueType.STRING), + Variable(name='output_batch_path', + value='/data/dataset/test_output_dataset/batch/20220101', + typed_value=Value(string_value='/data/dataset/test_output_dataset/batch/20220101'), + value_type=Variable.ValueType.STRING), + Variable(name='output_batch_name', + value='20220101', + typed_value=Value(string_value='20220101'), + value_type=Variable.ValueType.STRING), + ]) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/light_client_rsa_psi_data_join_configer.py b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/light_client_rsa_psi_data_join_configer.py new file mode 100644 index 000000000..75f623505 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/light_client_rsa_psi_data_join_configer.py @@ -0,0 +1,82 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from datetime import datetime + +from typing import List, Optional + +from fedlearner_webconsole.dataset.models import Dataset +from fedlearner_webconsole.dataset.job_configer.base_configer import BaseConfiger, get_my_pure_domain_name, \ + set_variable_value_to_job_config +from fedlearner_webconsole.dataset.data_path import get_batch_data_path +from fedlearner_webconsole.exceptions import InvalidArgumentException +from fedlearner_webconsole.proto.dataset_pb2 import DatasetJobGlobalConfigs +from fedlearner_webconsole.utils.workflow import zip_workflow_variables +from fedlearner_webconsole.workflow_template.service import WorkflowTemplateService +from fedlearner_webconsole.workflow_template.utils import make_variable +from fedlearner_webconsole.proto import common_pb2, workflow_definition_pb2 + + +class LightClientRsaPsiDataJoinConfiger(BaseConfiger): + + def get_config(self) -> workflow_definition_pb2.WorkflowDefinition: + template = WorkflowTemplateService(self._session).get_workflow_template(name='sys-preset-light-psi-data-join') + return template.get_config() + + @property + def user_variables(self) -> List[common_pb2.Variable]: + # return all variables and frontend will filter them by tag + return list(zip_workflow_variables(self.get_config())) + + def auto_config_variables(self, global_configs: DatasetJobGlobalConfigs) -> DatasetJobGlobalConfigs: + return global_configs + + def config_local_variables(self, + global_configs: DatasetJobGlobalConfigs, + result_dataset_uuid: str, + event_time: Optional[datetime] = None) -> DatasetJobGlobalConfigs: + my_domain_name = get_my_pure_domain_name() + job_config = global_configs.global_configs[my_domain_name] + + input_dataset = self._session.query(Dataset).filter(Dataset.uuid == job_config.dataset_uuid).first() + if input_dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {job_config.dataset_uuid}') + + output_dataset = self._session.query(Dataset).filter(Dataset.uuid == result_dataset_uuid).first() + if output_dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {result_dataset_uuid}') + + input_batch = self._get_data_batch(input_dataset, event_time) + output_batch = self._get_data_batch(output_dataset, event_time) + + input_batch_path = get_batch_data_path(input_batch) + output_batch_path = output_batch.path + + input_dataset_path_variable = make_variable(name='input_dataset_path', typed_value=input_dataset.path) + set_variable_value_to_job_config(job_config, input_dataset_path_variable) + + input_batch_path_variable = make_variable(name='input_batch_path', typed_value=input_batch_path) + set_variable_value_to_job_config(job_config, input_batch_path_variable) + + output_dataset_path_variable = make_variable(name='output_dataset_path', typed_value=output_dataset.path) + set_variable_value_to_job_config(job_config, output_dataset_path_variable) + + output_batch_path_variable = make_variable(name='output_batch_path', typed_value=output_batch_path) + set_variable_value_to_job_config(job_config, output_batch_path_variable) + + output_batch_name_variable = make_variable(name='output_batch_name', typed_value=output_batch.batch_name) + set_variable_value_to_job_config(job_config, output_batch_name_variable) + + return global_configs diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/ot_psi_data_join_configer.py b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/ot_psi_data_join_configer.py new file mode 100644 index 000000000..eef1dfdc4 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/ot_psi_data_join_configer.py @@ -0,0 +1,94 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from datetime import datetime + +from typing import List, Optional +from google.protobuf.struct_pb2 import Value + +from fedlearner_webconsole.dataset.models import Dataset +from fedlearner_webconsole.dataset.job_configer.base_configer import BaseConfiger, get_my_pure_domain_name, \ + set_variable_value_to_job_config +from fedlearner_webconsole.dataset.data_path import get_batch_data_path +from fedlearner_webconsole.exceptions import InvalidArgumentException +from fedlearner_webconsole.proto.dataset_pb2 import DatasetJobGlobalConfigs +from fedlearner_webconsole.utils.workflow import zip_workflow_variables +from fedlearner_webconsole.workflow_template.service import WorkflowTemplateService +from fedlearner_webconsole.workflow_template.utils import make_variable +from fedlearner_webconsole.proto import common_pb2, workflow_definition_pb2 + + +class OtPsiDataJoinConfiger(BaseConfiger): + + def get_config(self) -> workflow_definition_pb2.WorkflowDefinition: + template = WorkflowTemplateService(self._session).get_workflow_template(name='sys-preset-ot-psi-analyzer') + return template.get_config() + + @property + def user_variables(self) -> List[common_pb2.Variable]: + # return all variables and frontend will filter them by tag + return list(zip_workflow_variables(self.get_config())) + + def auto_config_variables(self, global_configs: DatasetJobGlobalConfigs) -> DatasetJobGlobalConfigs: + my_domain_name = get_my_pure_domain_name() + job_config = global_configs.global_configs[my_domain_name] + dataset = self._session.query(Dataset).filter(Dataset.uuid == job_config.dataset_uuid).first() + if dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {job_config.dataset_uuid}') + + for pure_domain_name, job_config in global_configs.global_configs.items(): + role_variable = make_variable(name='role', typed_value='server') + if pure_domain_name != my_domain_name: + role_variable.typed_value.CopyFrom(Value(string_value='client')) + set_variable_value_to_job_config(job_config, role_variable) + return global_configs + + def config_local_variables(self, + global_configs: DatasetJobGlobalConfigs, + result_dataset_uuid: str, + event_time: Optional[datetime] = None) -> DatasetJobGlobalConfigs: + my_domain_name = get_my_pure_domain_name() + job_config = global_configs.global_configs[my_domain_name] + + input_dataset = self._session.query(Dataset).filter(Dataset.uuid == job_config.dataset_uuid).first() + if input_dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {job_config.dataset_uuid}') + + output_dataset = self._session.query(Dataset).filter(Dataset.uuid == result_dataset_uuid).first() + if output_dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {result_dataset_uuid}') + + input_batch = self._get_data_batch(input_dataset, event_time) + output_batch = self._get_data_batch(output_dataset, event_time) + + input_batch_path = get_batch_data_path(input_batch) + output_batch_path = output_batch.path + + input_dataset_path_variable = make_variable(name='input_dataset_path', typed_value=input_dataset.path) + set_variable_value_to_job_config(job_config, input_dataset_path_variable) + + input_batch_path_variable = make_variable(name='input_batch_path', typed_value=input_batch_path) + set_variable_value_to_job_config(job_config, input_batch_path_variable) + + output_dataset_path_variable = make_variable(name='output_dataset_path', typed_value=output_dataset.path) + set_variable_value_to_job_config(job_config, output_dataset_path_variable) + + output_batch_path_variable = make_variable(name='output_batch_path', typed_value=output_batch_path) + set_variable_value_to_job_config(job_config, output_batch_path_variable) + + output_batch_name_variable = make_variable(name='output_batch_name', typed_value=output_batch.batch_name) + set_variable_value_to_job_config(job_config, output_batch_name_variable) + + return global_configs diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/rsa_psi_data_join_configer.py b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/rsa_psi_data_join_configer.py new file mode 100644 index 000000000..c694b780e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/job_configer/rsa_psi_data_join_configer.py @@ -0,0 +1,134 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from datetime import datetime + +from typing import Tuple, List, Optional +import rsa + +from fedlearner_webconsole.dataset.models import Dataset, ProcessedDataset +from fedlearner_webconsole.dataset.job_configer.base_configer import BaseConfiger, get_my_pure_domain_name, \ + set_variable_value_to_job_config +from fedlearner_webconsole.dataset.data_path import get_batch_data_path +from fedlearner_webconsole.exceptions import InvalidArgumentException +from fedlearner_webconsole.proto.dataset_pb2 import DatasetJobGlobalConfigs +from fedlearner_webconsole.utils.workflow import zip_workflow_variables +from fedlearner_webconsole.workflow_template.service import WorkflowTemplateService +from fedlearner_webconsole.workflow_template.utils import make_variable +from fedlearner_webconsole.proto import common_pb2, workflow_definition_pb2 + + +class RsaPsiDataJoinConfiger(BaseConfiger): + USER_VARIABLES_NAME_SET = { + 'fedlearner_image_version', + 'num_partitions', + 'raw_worker_cpu', + 'raw_worker_mem', + 'batch_size', + 'psi_worker_cpu', + 'psi_worker_mem', + 'master_cpu', + 'master_mem', + } + + @staticmethod + def _generate_rsa_key_pair(length: int = 1024) -> Tuple[str, str]: + """generate rsa key pair in pem format + + Args: + length (int, optional): bits for generate private key. Defaults to 1024. + + Returns: + Tuple[str, str]: PublicKey PEM, PrivateKey PEM + """ + # Note that we generate rsa keys in current thread, which will slow down api response. + # DONT USE POOLSIZE ARGUMENT!!!! + # TODO(wangsen.0914): optimize this using async job_scheduler to call this func. + public_key, private_key = rsa.newkeys(length) + return public_key.save_pkcs1(format='PEM').decode(), private_key.save_pkcs1(format='PEM').decode() + + def get_config(self) -> workflow_definition_pb2.WorkflowDefinition: + template = WorkflowTemplateService( + self._session).get_workflow_template(name='sys-preset-psi-data-join-analyzer') + return template.get_config() + + @property + def user_variables(self) -> List[common_pb2.Variable]: + real_user_variables = [] + for variable in zip_workflow_variables(self.get_config()): + if variable.name in self.USER_VARIABLES_NAME_SET: + real_user_variables.append(variable) + + return real_user_variables + + def auto_config_variables(self, global_configs: DatasetJobGlobalConfigs) -> DatasetJobGlobalConfigs: + my_domain_name = get_my_pure_domain_name() + job_config = global_configs.global_configs[my_domain_name] + dataset = self._session.query(Dataset).filter(Dataset.uuid == job_config.dataset_uuid).first() + if dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {job_config.dataset_uuid}') + + public_key_pem, private_key_pem = self._generate_rsa_key_pair() + for pure_domain_name, job_config in global_configs.global_configs.items(): + if pure_domain_name == my_domain_name: + role_variable = make_variable(name='role', typed_value='Leader') + set_variable_value_to_job_config(job_config, role_variable) + rsa_key_pem_variable = make_variable(name='rsa_key_pem', typed_value=private_key_pem) + set_variable_value_to_job_config(job_config, rsa_key_pem_variable) + + else: + role_variable = make_variable(name='role', typed_value='Follower') + set_variable_value_to_job_config(job_config, role_variable) + rsa_key_pem_variable = make_variable(name='rsa_key_pem', typed_value=public_key_pem) + set_variable_value_to_job_config(job_config, rsa_key_pem_variable) + + rsa_key_path_variable = make_variable(name='rsa_key_path', typed_value='') + set_variable_value_to_job_config(job_config, rsa_key_path_variable) + return global_configs + + def config_local_variables(self, + global_configs: DatasetJobGlobalConfigs, + result_dataset_uuid: str, + event_time: Optional[datetime] = None) -> DatasetJobGlobalConfigs: + + my_domain_name = get_my_pure_domain_name() + job_config = global_configs.global_configs[my_domain_name] + + input_dataset = self._session.query(Dataset).filter(Dataset.uuid == job_config.dataset_uuid).first() + if input_dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {job_config.dataset_uuid}') + output_dataset = self._session.query(ProcessedDataset).filter( + ProcessedDataset.uuid == result_dataset_uuid).first() + if output_dataset is None: + raise InvalidArgumentException(details=f'failed to find dataset {result_dataset_uuid}') + + input_batch = self._get_data_batch(input_dataset, event_time) + output_batch = self._get_data_batch(output_dataset, event_time) + + input_batch_path = get_batch_data_path(input_batch) + + dataset_variable = make_variable(name='dataset', typed_value=input_batch_path) + set_variable_value_to_job_config(job_config, dataset_variable) + + output_dataset_path_variable = make_variable(name='output_dataset_path', typed_value=output_dataset.path) + set_variable_value_to_job_config(job_config, output_dataset_path_variable) + + output_batch_path_variable = make_variable(name='output_batch_path', typed_value=output_batch.path) + set_variable_value_to_job_config(job_config, output_batch_path_variable) + + output_batch_name_variable = make_variable(name='output_batch_name', typed_value=output_batch.batch_name) + set_variable_value_to_job_config(job_config, output_batch_name_variable) + + return global_configs diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/local_controllers.py b/web_console_v2/api/fedlearner_webconsole/dataset/local_controllers.py new file mode 100644 index 000000000..6e755db49 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/local_controllers.py @@ -0,0 +1,251 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from datetime import datetime +import logging +from typing import Optional +from sqlalchemy.orm import Session + +from fedlearner_webconsole.dataset.models import DataBatch, DatasetJob, DatasetJobStage, DatasetJobState, \ + DatasetType +from fedlearner_webconsole.dataset.services import BatchService, DatasetJobStageService, DatasetService +from fedlearner_webconsole.proto.dataset_pb2 import BatchParameter, DatasetJobGlobalConfigs, CronType +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.workflow.workflow_controller import start_workflow_locally, stop_workflow_locally + + +class DatasetJobStageLocalController(object): + + def __init__(self, session: Session): + self._session = session + self._dataset_job_stage_service = DatasetJobStageService(session) + + def start(self, dataset_job_stage: DatasetJobStage): + """start dataset job stage task locally + + 1. start related workflow locally + 2. set dataset job stage's state to RUNNING + """ + start_workflow_locally(self._session, dataset_job_stage.workflow) + self._dataset_job_stage_service.start_dataset_job_stage(dataset_job_stage=dataset_job_stage) + logging.info('[dataset_job_stage_local_controller]: start successfully') + + def stop(self, dataset_job_stage: DatasetJobStage): + """stop dataset job stage task locally + + 1. stop related workflow locally + 2. set dataset job stage's state to STOPPED + """ + if dataset_job_stage.workflow is not None: + stop_workflow_locally(self._session, dataset_job_stage.workflow) + else: + logging.info(f'workflow not found, just skip! workflow id: {dataset_job_stage.workflow_id}') + self._dataset_job_stage_service.finish_dataset_job_stage(dataset_job_stage=dataset_job_stage, + finish_state=DatasetJobState.STOPPED) + logging.info('[dataset_job_stage_local_controller]: stop successfully') + + # TODO(liuhehan): delete in the near future after we use as_coordinator func + def create_data_batch_and_job_stage(self, + dataset_job_id: int, + event_time: Optional[datetime] = None, + uuid: Optional[str] = None, + name: Optional[str] = None) -> Optional[DatasetJobStage]: + """create data_batch and job_stage locally + + UseCase 1: create new data_batch and new job_stage with given uuid and name: + only called as role of participants, uuid and name are given by coordinator + will create both data_batch and job_stage + + Parameters: + dataset_job_id(int): dataset_job id + event_time(datetime): optional; only works in STREAMING dataset_job, + event_time of current data_batch and job_stage + uuid(str): uuid of dataset_job_stage + name(str): name of dataset_job_stage + + Returns: + dataset_job_stage(DatasetJobStage): dataset_job_stage which created in func + + UseCase 2: create new data_batch and new job_stage for PSI/STREAMING dataset_job: + only called as role of coordinator + will create both data_batch and job_stage + + Parameters: + dataset_job_id(int): dataset_job id + event_time(datetime): optional; only works in STREAMING dataset_job, + event_time of current data_batch and job_stage + + Returns: + dataset_job_stage(DatasetJobStage): dataset_job_stage which created in func + + UseCase 3: rerun data_batch: + called to create a new job_stage when data_batch failed + will create only dataset_job_stage if find target data_batch + + Parameters: + dataset_job_id(int): dataset_job id + event_time(datetime): optional; only works in STREAMING dataset_job, + event_time of current data_batch and job_stage + + Returns: + dataset_job_stage(DatasetJobStage): dataset_job_stage which created in func + """ + dataset_job: DatasetJob = self._session.query(DatasetJob).get(dataset_job_id) + if dataset_job.output_dataset.dataset_type == DatasetType.STREAMING: + data_batch = self._session.query(DataBatch).filter( + DataBatch.dataset_id == dataset_job.output_dataset_id).filter( + DataBatch.event_time == event_time).first() + else: + data_batch = self._session.query(DataBatch).filter( + DataBatch.dataset_id == dataset_job.output_dataset_id).first() + # create data_batch if not exist: + if data_batch is None: + batch_parameter = BatchParameter(dataset_id=dataset_job.output_dataset_id) + if event_time: + batch_parameter.event_time = to_timestamp(event_time) + data_batch = BatchService(self._session).create_batch(batch_parameter=batch_parameter) + self._session.flush() + dataset_job_stage = None + if uuid: + dataset_job_stage = self._session.query(DatasetJobStage).filter(DatasetJobStage.uuid == uuid).first() + # for idempotent, skip if dataset_job_stage exists: + if dataset_job_stage is None: + dataset_job_stage = self._dataset_job_stage_service.create_dataset_job_stage( + project_id=dataset_job.project_id, + dataset_job_id=dataset_job_id, + output_data_batch_id=data_batch.id, + uuid=uuid, + name=name) + return dataset_job_stage + + def create_data_batch_and_job_stage_as_coordinator( + self, + dataset_job_id: int, + global_configs: DatasetJobGlobalConfigs, + event_time: Optional[datetime] = None) -> Optional[DatasetJobStage]: + """create data_batch and job_stage locally as coordinator + + UseCase 1: create new data_batch and new job_stage for PSI/STREAMING dataset: + only called as role of coordinator + will create both data_batch and job_stage + + Parameters: + dataset_job_id(int): dataset_job id + global_configs(global_configs): configs of all participants for this dataset_job_stage + event_time(datetime): optional; only works in STREAMING dataset, + event_time of current data_batch and job_stage + + Returns: + dataset_job_stage(DatasetJobStage): dataset_job_stage which created in func + + UseCase 2: rerun data_batch: + called to create a new job_stage when data_batch failed, + will create only dataset_job_stage if find target data_batch + + Parameters: + dataset_job_id(int): dataset_job id + global_configs(global_configs): configs of all participants for this dataset_job_stage + event_time(datetime): optional; only works in STREAMING dataset, + event_time of current data_batch and job_stage + + Returns: + dataset_job_stage(DatasetJobStage): dataset_job_stage which created in func + """ + dataset_job: DatasetJob = self._session.query(DatasetJob).get(dataset_job_id) + data_batch = DatasetService(session=self._session).get_data_batch(dataset=dataset_job.output_dataset, + event_time=event_time) + # create data_batch if not exist: + if data_batch is None: + batch_parameter = BatchParameter(dataset_id=dataset_job.output_dataset_id) + if event_time: + batch_parameter.event_time = to_timestamp(event_time) + if dataset_job.is_daily_cron(): + batch_parameter.cron_type = CronType.DAILY + elif dataset_job.is_hourly_cron(): + batch_parameter.cron_type = CronType.HOURLY + data_batch = BatchService(self._session).create_batch(batch_parameter=batch_parameter) + self._session.flush() + dataset_job_stage = self._dataset_job_stage_service.create_dataset_job_stage_as_coordinator( + project_id=dataset_job.project_id, + dataset_job_id=dataset_job_id, + output_data_batch_id=data_batch.id, + global_configs=global_configs) + return dataset_job_stage + + def create_data_batch_and_job_stage_as_participant(self, + dataset_job_id: int, + coordinator_id: int, + uuid: str, + name: str, + event_time: Optional[datetime] = None + ) -> Optional[DatasetJobStage]: + """create data_batch and job_stage locally as participant + + UseCase 1: create new data_batch and new job_stage with given uuid and name: + only called as role of participants, uuid and name are given by coordinator. + will create both data_batch and job_stage + + Parameters: + dataset_job_id(int): dataset_job id + coordinator_id(int): id of coordinator + uuid(str): uuid of dataset_job_stage + name(str): name of dataset_job_stage + event_time(datetime): optional; only works in STREAMING dataset, + event_time of current data_batch and job_stage + + Returns: + dataset_job_stage(DatasetJobStage): dataset_job_stage which created in func + + UseCase 2: rerun data_batch: + only called as role of participants, uuid and name are given by coordinator. + aim to create a new job_stage when data_batch failed, + will create only dataset_job_stage if find target data_batch + + Parameters: + dataset_job_id(int): dataset_job id + coordinator_id(int): id of coordinator + uuid(str): uuid of dataset_job_stage + name(str): name of dataset_job_stage + event_time(datetime): optional; only works in STREAMING dataset, + event_time of current data_batch and job_stage + + Returns: + dataset_job_stage(DatasetJobStage): dataset_job_stage which created in func + """ + dataset_job: DatasetJob = self._session.query(DatasetJob).get(dataset_job_id) + data_batch = DatasetService(session=self._session).get_data_batch(dataset=dataset_job.output_dataset, + event_time=event_time) + # create data_batch if not exist: + if data_batch is None: + batch_parameter = BatchParameter(dataset_id=dataset_job.output_dataset_id) + if event_time: + batch_parameter.event_time = to_timestamp(event_time) + if dataset_job.is_daily_cron(): + batch_parameter.cron_type = CronType.DAILY + elif dataset_job.is_hourly_cron(): + batch_parameter.cron_type = CronType.HOURLY + data_batch = BatchService(self._session).create_batch(batch_parameter=batch_parameter) + self._session.flush() + dataset_job_stage = self._session.query(DatasetJobStage).filter(DatasetJobStage.uuid == uuid).first() + # for idempotent, skip if dataset_job_stage exists: + if dataset_job_stage is None: + dataset_job_stage = self._dataset_job_stage_service.create_dataset_job_stage_as_participant( + project_id=dataset_job.project_id, + dataset_job_id=dataset_job_id, + output_data_batch_id=data_batch.id, + uuid=uuid, + name=name, + coordinator_id=coordinator_id) + return dataset_job_stage diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/local_controllers_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/local_controllers_test.py new file mode 100644 index 000000000..b88782542 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/local_controllers_test.py @@ -0,0 +1,526 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from datetime import datetime, timedelta, timezone +import unittest +from unittest.mock import patch, MagicMock + +from testing.common import NoWebServerTestCase + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto import dataset_pb2 +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.dataset.local_controllers import DatasetJobStageLocalController +from fedlearner_webconsole.dataset.models import DataBatch, Dataset, DatasetJob, DatasetJobKind, DatasetJobStage, \ + DatasetJobState, DatasetKindV2, DatasetType +from fedlearner_webconsole.utils.resource_name import resource_uuid + + +class DatasetJobStageLocalControllerTest(NoWebServerTestCase): + + @patch('fedlearner_webconsole.dataset.local_controllers.start_workflow_locally') + @patch('fedlearner_webconsole.dataset.local_controllers.DatasetJobStageService.start_dataset_job_stage') + def test_start(self, mock_start_dataset_job_stage: MagicMock, mock_start_workflow_locally: MagicMock): + with db.session_scope() as session: + dataset_job_stage = DatasetJobStage(uuid=resource_uuid(), + project_id=1, + workflow_id=1, + dataset_job_id=1, + data_batch_id=1) + DatasetJobStageLocalController(session=session).start(dataset_job_stage=dataset_job_stage) + mock_start_workflow_locally.assert_called_once() + mock_start_dataset_job_stage.assert_called_once_with(dataset_job_stage=dataset_job_stage) + + @patch('fedlearner_webconsole.dataset.local_controllers.stop_workflow_locally') + @patch('fedlearner_webconsole.dataset.local_controllers.DatasetJobStageService.finish_dataset_job_stage') + def test_stop(self, mock_finish_dataset_job_stage: MagicMock, mock_stop_workflow_locally: MagicMock): + with db.session_scope() as session: + dataset_job_stage = DatasetJobStage(uuid=resource_uuid(), + project_id=1, + workflow_id=1, + dataset_job_id=1, + data_batch_id=1) + session.add(dataset_job_stage) + + dataset_job_stage_local_controller = DatasetJobStageLocalController(session=session) + # test no worlflow + dataset_job_stage_local_controller.stop(dataset_job_stage=dataset_job_stage) + mock_stop_workflow_locally.assert_not_called() + mock_finish_dataset_job_stage.assert_called_once_with(dataset_job_stage=dataset_job_stage, + finish_state=DatasetJobState.STOPPED) + + # test has workflow + mock_stop_workflow_locally.reset_mock() + mock_finish_dataset_job_stage.reset_mock() + workflow = Workflow(id=1) + session.add(workflow) + session.flush() + dataset_job_stage_local_controller.stop(dataset_job_stage=dataset_job_stage) + mock_stop_workflow_locally.assert_called_once() + mock_finish_dataset_job_stage.assert_called_once_with(dataset_job_stage=dataset_job_stage, + finish_state=DatasetJobState.STOPPED) + + def test_create_data_batch_and_job_stage(self): + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + uuid='dataset_job', + project_id=1, + input_dataset_id=1, + output_dataset_id=2, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.PENDING) + dataset_job.set_global_configs( + dataset_pb2.DatasetJobGlobalConfigs(global_configs={'test': dataset_pb2.DatasetJobConfig()})) + session.add(dataset_job) + output_dataset = Dataset(id=2, + uuid='output_dataset uuid', + name='output_dataset', + dataset_type=DatasetType.PSI, + comment='test comment', + path='/data/dataset/123', + project_id=1, + dataset_kind=DatasetKindV2.PROCESSED) + session.add(output_dataset) + session.commit() + + # test PSI + with db.session_scope() as session: + dataset_job_stage = DatasetJobStageLocalController(session=session).create_data_batch_and_job_stage( + dataset_job_id=1) + session.flush() + self.assertEqual(dataset_job_stage.dataset_job_id, 1) + self.assertEqual(dataset_job_stage.project_id, 1) + self.assertIsNone(dataset_job_stage.event_time) + self.assertEqual(dataset_job_stage.data_batch.dataset_id, 2) + self.assertEqual(dataset_job_stage.data_batch.path, '/data/dataset/123/batch/0') + + # test PSI has batch + with db.session_scope() as session: + data_batch = DataBatch(id=1, name='test_data_batch', dataset_id=2, path='/data/test/batch/0') + session.add(data_batch) + session.flush() + dataset_job_stage = DatasetJobStageLocalController(session=session).create_data_batch_and_job_stage( + dataset_job_id=1) + session.flush() + self.assertEqual(dataset_job_stage.dataset_job_id, 1) + self.assertEqual(dataset_job_stage.project_id, 1) + self.assertIsNone(dataset_job_stage.event_time) + self.assertEqual(dataset_job_stage.data_batch.name, 'test_data_batch') + self.assertEqual(dataset_job_stage.data_batch.path, '/data/test/batch/0') + + # test PSI has batch and stage + with db.session_scope() as session: + data_batch = DataBatch(id=1, name='test_data_batch', dataset_id=2, path='/data/test/batch/0') + session.add(data_batch) + dataset_job_stage = DatasetJobStage(id=100, + name='test_dataset_job', + uuid='test_dataset_job uuid', + project_id=1, + workflow_id=0, + dataset_job_id=1, + data_batch_id=1) + session.add(dataset_job_stage) + session.flush() + dataset_job_stage = DatasetJobStageLocalController(session=session).create_data_batch_and_job_stage( + dataset_job_id=1, uuid='test_dataset_job uuid', name='test_dataset_job') + self.assertEqual(dataset_job_stage.id, 100) + + with db.session_scope() as session: + dataset = session.query(Dataset).get(2) + dataset.dataset_type = DatasetType.STREAMING + session.commit() + + # test STREAMING + with db.session_scope() as session: + dataset_job_stage = DatasetJobStageLocalController(session=session).create_data_batch_and_job_stage( + dataset_job_id=1, event_time=datetime(2022, 1, 1)) + session.flush() + self.assertEqual(dataset_job_stage.dataset_job_id, 1) + self.assertEqual(dataset_job_stage.project_id, 1) + self.assertEqual(dataset_job_stage.event_time, datetime(2022, 1, 1).replace(tzinfo=timezone.utc)) + self.assertEqual(dataset_job_stage.data_batch.dataset_id, 2) + self.assertEqual(dataset_job_stage.data_batch.path, '/data/dataset/123/batch/20220101') + self.assertEqual(dataset_job_stage.data_batch.event_time, datetime(2022, 1, 1)) + + # test STREAMING has batch + with db.session_scope() as session: + data_batch_1 = DataBatch(id=1, + name='test_data_batch 1', + dataset_id=2, + path='/data/test/batch/20220101', + event_time=datetime(2022, 1, 1)) + session.add(data_batch_1) + data_batch_2 = DataBatch(id=2, + name='test_data_batch 2', + dataset_id=2, + path='/data/test/batch/20220102', + event_time=datetime(2022, 1, 2)) + session.add(data_batch_2) + session.flush() + dataset_job_stage = DatasetJobStageLocalController(session=session).create_data_batch_and_job_stage( + dataset_job_id=1, event_time=datetime(2022, 1, 1)) + session.flush() + self.assertEqual(dataset_job_stage.dataset_job_id, 1) + self.assertEqual(dataset_job_stage.project_id, 1) + self.assertEqual(dataset_job_stage.event_time, datetime(2022, 1, 1)) + self.assertEqual(dataset_job_stage.data_batch.name, 'test_data_batch 1') + self.assertEqual(dataset_job_stage.data_batch.path, '/data/test/batch/20220101') + self.assertEqual(dataset_job_stage.data_batch.event_time, datetime(2022, 1, 1)) + + # test STREAMING has batch and stage + with db.session_scope() as session: + data_batch_1 = DataBatch(id=1, + name='test_data_batch 1', + dataset_id=2, + path='/data/test/batch/20220101', + event_time=datetime(2022, 1, 1)) + session.add(data_batch_1) + data_batch_2 = DataBatch(id=2, + name='test_data_batch 2', + dataset_id=2, + path='/data/test/batch/20220102', + event_time=datetime(2022, 1, 2)) + session.add(data_batch_2) + dataset_job_stage = DatasetJobStage(id=100, + name='test_dataset_job', + uuid='test_dataset_job uuid', + project_id=1, + workflow_id=0, + dataset_job_id=1, + data_batch_id=1, + event_time=datetime(2022, 1, 2)) + session.add(dataset_job_stage) + session.flush() + dataset_job_stage = DatasetJobStageLocalController(session=session).create_data_batch_and_job_stage( + dataset_job_id=1, + event_time=datetime(2022, 1, 1), + uuid='test_dataset_job uuid', + name='test_dataset_job') + self.assertEqual(dataset_job_stage.id, 100) + + def test_create_data_batch_and_job_stage_as_coordinator(self): + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + uuid='dataset_job', + project_id=1, + input_dataset_id=1, + output_dataset_id=2, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.PENDING) + session.add(dataset_job) + output_dataset = Dataset(id=2, + uuid='output_dataset uuid', + name='output_dataset', + dataset_type=DatasetType.PSI, + comment='test comment', + path='/data/dataset/123', + project_id=1, + dataset_kind=DatasetKindV2.PROCESSED) + session.add(output_dataset) + session.commit() + + global_configs = dataset_pb2.DatasetJobGlobalConfigs(global_configs={'test': dataset_pb2.DatasetJobConfig()}) + # test PSI + with db.session_scope() as session: + dataset_job_stage = DatasetJobStageLocalController( + session=session).create_data_batch_and_job_stage_as_coordinator(dataset_job_id=1, + global_configs=global_configs) + session.flush() + self.assertEqual(dataset_job_stage.dataset_job_id, 1) + self.assertEqual(dataset_job_stage.project_id, 1) + self.assertIsNone(dataset_job_stage.event_time) + self.assertEqual(dataset_job_stage.data_batch.dataset_id, 2) + self.assertEqual(dataset_job_stage.data_batch.path, '/data/dataset/123/batch/0') + self.assertEqual(dataset_job_stage.get_global_configs(), global_configs) + self.assertTrue(dataset_job_stage.is_coordinator()) + + # test PSI has batch + with db.session_scope() as session: + data_batch = DataBatch(id=1, name='test_data_batch', dataset_id=2, path='/data/test/batch/0') + session.add(data_batch) + session.flush() + dataset_job_stage = DatasetJobStageLocalController( + session=session).create_data_batch_and_job_stage_as_coordinator(dataset_job_id=1, + global_configs=global_configs) + session.flush() + self.assertEqual(dataset_job_stage.dataset_job_id, 1) + self.assertEqual(dataset_job_stage.project_id, 1) + self.assertIsNone(dataset_job_stage.event_time) + self.assertEqual(dataset_job_stage.data_batch.name, 'test_data_batch') + self.assertEqual(dataset_job_stage.data_batch.path, '/data/test/batch/0') + self.assertEqual(dataset_job_stage.get_global_configs(), global_configs) + self.assertTrue(dataset_job_stage.is_coordinator()) + + with db.session_scope() as session: + dataset = session.query(Dataset).get(2) + dataset.dataset_type = DatasetType.STREAMING + session.commit() + + # test STREAMING + with db.session_scope() as session: + dataset_job_stage = DatasetJobStageLocalController( + session=session).create_data_batch_and_job_stage_as_coordinator(dataset_job_id=1, + global_configs=global_configs, + event_time=datetime(2022, 1, 1)) + session.flush() + self.assertEqual(dataset_job_stage.dataset_job_id, 1) + self.assertEqual(dataset_job_stage.project_id, 1) + self.assertEqual(dataset_job_stage.event_time, datetime(2022, 1, 1).replace(tzinfo=timezone.utc)) + self.assertEqual(dataset_job_stage.data_batch.dataset_id, 2) + self.assertEqual(dataset_job_stage.data_batch.path, '/data/dataset/123/batch/20220101') + self.assertEqual(dataset_job_stage.data_batch.event_time, datetime(2022, 1, 1)) + self.assertEqual(dataset_job_stage.get_global_configs(), global_configs) + self.assertTrue(dataset_job_stage.is_coordinator()) + + # test STREAMING has batch + with db.session_scope() as session: + data_batch_1 = DataBatch(id=1, + name='test_data_batch 1', + dataset_id=2, + path='/data/test/batch/20220101', + event_time=datetime(2022, 1, 1)) + session.add(data_batch_1) + data_batch_2 = DataBatch(id=2, + name='test_data_batch 2', + dataset_id=2, + path='/data/test/batch/20220102', + event_time=datetime(2022, 1, 2)) + session.add(data_batch_2) + session.flush() + dataset_job_stage = DatasetJobStageLocalController( + session=session).create_data_batch_and_job_stage_as_coordinator(dataset_job_id=1, + global_configs=global_configs, + event_time=datetime(2022, 1, 1)) + session.flush() + self.assertEqual(dataset_job_stage.dataset_job_id, 1) + self.assertEqual(dataset_job_stage.project_id, 1) + self.assertEqual(dataset_job_stage.event_time, datetime(2022, 1, 1)) + self.assertEqual(dataset_job_stage.data_batch.name, 'test_data_batch 1') + self.assertEqual(dataset_job_stage.data_batch.path, '/data/test/batch/20220101') + self.assertEqual(dataset_job_stage.data_batch.event_time, datetime(2022, 1, 1)) + self.assertEqual(dataset_job_stage.get_global_configs(), global_configs) + self.assertTrue(dataset_job_stage.is_coordinator()) + + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(1) + dataset_job.time_range = timedelta(hours=1) + session.commit() + + # test STREAMING in hourly level + with db.session_scope() as session: + dataset_job_stage = DatasetJobStageLocalController( + session=session).create_data_batch_and_job_stage_as_coordinator(dataset_job_id=1, + global_configs=global_configs, + event_time=datetime(2022, 1, 1, 8)) + session.flush() + self.assertEqual(dataset_job_stage.dataset_job_id, 1) + self.assertEqual(dataset_job_stage.project_id, 1) + self.assertEqual(dataset_job_stage.event_time, datetime(2022, 1, 1, 8).replace(tzinfo=timezone.utc)) + self.assertEqual(dataset_job_stage.data_batch.dataset_id, 2) + self.assertEqual(dataset_job_stage.data_batch.path, '/data/dataset/123/batch/20220101-08') + self.assertEqual(dataset_job_stage.data_batch.event_time, datetime(2022, 1, 1, 8)) + self.assertEqual(dataset_job_stage.get_global_configs(), global_configs) + self.assertTrue(dataset_job_stage.is_coordinator()) + + def test_create_data_batch_and_job_stage_as_participant(self): + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + uuid='dataset_job', + project_id=1, + input_dataset_id=1, + output_dataset_id=2, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.PENDING) + session.add(dataset_job) + output_dataset = Dataset(id=2, + uuid='output_dataset uuid', + name='output_dataset', + dataset_type=DatasetType.PSI, + comment='test comment', + path='/data/dataset/123', + project_id=1, + dataset_kind=DatasetKindV2.PROCESSED) + session.add(output_dataset) + session.commit() + + # test PSI + with db.session_scope() as session: + dataset_job_stage = DatasetJobStageLocalController( + session=session).create_data_batch_and_job_stage_as_participant(dataset_job_id=1, + coordinator_id=1, + uuid='test_dataset_job uuid', + name='test_dataset_job') + session.flush() + self.assertEqual(dataset_job_stage.dataset_job_id, 1) + self.assertEqual(dataset_job_stage.project_id, 1) + self.assertIsNone(dataset_job_stage.event_time) + self.assertEqual(dataset_job_stage.data_batch.dataset_id, 2) + self.assertEqual(dataset_job_stage.data_batch.path, '/data/dataset/123/batch/0') + self.assertIsNone(dataset_job_stage.global_configs) + self.assertEqual(dataset_job_stage.coordinator_id, 1) + + # test PSI has batch + with db.session_scope() as session: + data_batch = DataBatch(id=1, name='test_data_batch', dataset_id=2, path='/data/test/batch/0') + session.add(data_batch) + session.flush() + dataset_job_stage = DatasetJobStageLocalController( + session=session).create_data_batch_and_job_stage_as_participant(dataset_job_id=1, + coordinator_id=1, + uuid='test_dataset_job uuid', + name='test_dataset_job') + session.flush() + self.assertEqual(dataset_job_stage.dataset_job_id, 1) + self.assertEqual(dataset_job_stage.project_id, 1) + self.assertIsNone(dataset_job_stage.event_time) + self.assertEqual(dataset_job_stage.data_batch.name, 'test_data_batch') + self.assertEqual(dataset_job_stage.data_batch.path, '/data/test/batch/0') + self.assertIsNone(dataset_job_stage.global_configs) + self.assertEqual(dataset_job_stage.coordinator_id, 1) + + # test PSI has batch and stage + with db.session_scope() as session: + data_batch = DataBatch(id=1, name='test_data_batch', dataset_id=2, path='/data/test/batch/0') + session.add(data_batch) + dataset_job_stage = DatasetJobStage(id=100, + name='test_dataset_job', + uuid='test_dataset_job uuid', + project_id=1, + workflow_id=0, + dataset_job_id=1, + data_batch_id=1, + coordinator_id=1) + session.add(dataset_job_stage) + session.flush() + dataset_job_stage = DatasetJobStageLocalController( + session=session).create_data_batch_and_job_stage_as_participant(dataset_job_id=1, + coordinator_id=1, + uuid='test_dataset_job uuid', + name='test_dataset_job') + self.assertEqual(dataset_job_stage.id, 100) + + with db.session_scope() as session: + dataset = session.query(Dataset).get(2) + dataset.dataset_type = DatasetType.STREAMING + session.commit() + + # test STREAMING + with db.session_scope() as session: + dataset_job_stage = DatasetJobStageLocalController( + session=session).create_data_batch_and_job_stage_as_participant(dataset_job_id=1, + coordinator_id=1, + uuid='test_dataset_job uuid', + name='test_dataset_job', + event_time=datetime(2022, 1, 1)) + session.flush() + self.assertEqual(dataset_job_stage.dataset_job_id, 1) + self.assertEqual(dataset_job_stage.project_id, 1) + self.assertEqual(dataset_job_stage.event_time, datetime(2022, 1, 1).replace(tzinfo=timezone.utc)) + self.assertEqual(dataset_job_stage.data_batch.dataset_id, 2) + self.assertEqual(dataset_job_stage.data_batch.path, '/data/dataset/123/batch/20220101') + self.assertEqual(dataset_job_stage.data_batch.event_time, datetime(2022, 1, 1)) + self.assertIsNone(dataset_job_stage.global_configs) + self.assertEqual(dataset_job_stage.coordinator_id, 1) + + # test STREAMING has batch + with db.session_scope() as session: + data_batch_1 = DataBatch(id=1, + name='test_data_batch 1', + dataset_id=2, + path='/data/test/batch/20220101', + event_time=datetime(2022, 1, 1)) + session.add(data_batch_1) + data_batch_2 = DataBatch(id=2, + name='test_data_batch 2', + dataset_id=2, + path='/data/test/batch/20220102', + event_time=datetime(2022, 1, 2)) + session.add(data_batch_2) + session.flush() + dataset_job_stage = DatasetJobStageLocalController( + session=session).create_data_batch_and_job_stage_as_participant(dataset_job_id=1, + coordinator_id=1, + uuid='test_dataset_job uuid', + name='test_dataset_job', + event_time=datetime(2022, 1, 1)) + session.flush() + self.assertEqual(dataset_job_stage.dataset_job_id, 1) + self.assertEqual(dataset_job_stage.project_id, 1) + self.assertEqual(dataset_job_stage.event_time, datetime(2022, 1, 1)) + self.assertEqual(dataset_job_stage.data_batch.name, 'test_data_batch 1') + self.assertEqual(dataset_job_stage.data_batch.path, '/data/test/batch/20220101') + self.assertEqual(dataset_job_stage.data_batch.event_time, datetime(2022, 1, 1)) + self.assertIsNone(dataset_job_stage.global_configs) + self.assertEqual(dataset_job_stage.coordinator_id, 1) + + # test STREAMING has batch and stage + with db.session_scope() as session: + data_batch_1 = DataBatch(id=1, + name='test_data_batch 1', + dataset_id=2, + path='/data/test/batch/20220101', + event_time=datetime(2022, 1, 1)) + session.add(data_batch_1) + data_batch_2 = DataBatch(id=2, + name='test_data_batch 2', + dataset_id=2, + path='/data/test/batch/20220102', + event_time=datetime(2022, 1, 2)) + session.add(data_batch_2) + dataset_job_stage = DatasetJobStage(id=100, + name='test_dataset_job', + uuid='test_dataset_job uuid', + project_id=1, + workflow_id=0, + dataset_job_id=1, + data_batch_id=1, + event_time=datetime(2022, 1, 2), + coordinator_id=1) + session.add(dataset_job_stage) + session.flush() + dataset_job_stage = DatasetJobStageLocalController( + session=session).create_data_batch_and_job_stage_as_participant(dataset_job_id=1, + coordinator_id=1, + event_time=datetime(2022, 1, 1), + uuid='test_dataset_job uuid', + name='test_dataset_job') + self.assertEqual(dataset_job_stage.id, 100) + + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(1) + dataset_job.time_range = timedelta(hours=1) + session.commit() + + # test STREAMING in hourly level + with db.session_scope() as session: + dataset_job_stage = DatasetJobStageLocalController( + session=session).create_data_batch_and_job_stage_as_participant(dataset_job_id=1, + coordinator_id=1, + event_time=datetime(2022, 1, 1, 8), + uuid='test_dataset_job uuid', + name='test_dataset_job') + session.flush() + self.assertEqual(dataset_job_stage.dataset_job_id, 1) + self.assertEqual(dataset_job_stage.project_id, 1) + self.assertEqual(dataset_job_stage.event_time, datetime(2022, 1, 1, 8).replace(tzinfo=timezone.utc)) + self.assertEqual(dataset_job_stage.data_batch.dataset_id, 2) + self.assertEqual(dataset_job_stage.data_batch.path, '/data/dataset/123/batch/20220101-08') + self.assertEqual(dataset_job_stage.data_batch.event_time, datetime(2022, 1, 1, 8)) + self.assertIsNone(dataset_job_stage.global_configs) + self.assertEqual(dataset_job_stage.coordinator_id, 1) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/meta_data.py b/web_console_v2/api/fedlearner_webconsole/dataset/meta_data.py new file mode 100644 index 000000000..fcdd31113 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/meta_data.py @@ -0,0 +1,207 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import logging +import dateutil.parser +from typing import List, Optional, Dict, Any + + +class MetaData(object): + _DTYPES = 'dtypes' + _SAMPLE = 'sample' + _FEATURES = 'features' + _HIST = 'hist' + _COUNT = 'count' + + def __init__(self, metadata: Optional[dict] = None): + self.metadata = metadata or {} + + @property + def dtypes(self) -> List[Any]: + return self.metadata.get(self._DTYPES, []) + + @property + def sample(self) -> List[Any]: + return self.metadata.get(self._SAMPLE, []) + + @property + def metrics(self) -> Dict[str, Dict[Any, Any]]: + return self.metadata.get(self._FEATURES, {}) + + @property + def hist(self) -> Dict[str, Dict[Any, Any]]: + return self.metadata.get(self._HIST, {}) + + @property + def num_feature(self) -> int: + return len(self.dtypes) + + @property + def num_example(self) -> int: + return self.metadata.get(self._COUNT, 0) + + def get_metrics_by_name(self, name: str) -> Dict[Any, Any]: + return self.metrics.get(name, {}) + + def get_hist_by_name(self, name: str) -> Dict[Any, Any]: + return self.hist.get(name, {}) + + def get_preview(self) -> dict: + """ get the preview data + Returns: + preview dict format: + { + 'dtypes': [ + {'key': 'f01', 'value': 'bigint'} + ], + 'sample': [ + [1], + [0], + ], + 'count': 1000 + 'metrics': { + 'f01': { + 'count': '2', + 'mean': '0.0015716767309123998', + 'stddev': '0.03961485047808605', + 'min': '0', + 'max': '1', + 'missing_count': '0' + } + }, + 'hist': { + 'x': [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, + 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], + 'y': [12070, 0, 0, 0, 0, 0, 0, 0, 0, 19] + } + } + """ + preview = {} + preview['dtypes'] = self.dtypes + preview['sample'] = self.sample + preview['num_example'] = self.num_example + preview['metrics'] = self.metrics + return preview + + +class ImageMetaData(MetaData): + _LABEL_COUNT = 'label_count' + _THUMBNAIL_EXTENSION = '.png' + + def __init__(self, thumbnail_dir_path: str, metadata: Optional[dict] = None): + super().__init__(metadata=metadata) + self.thumbnail_dir_path = thumbnail_dir_path + + @property + def label_count(self) -> List[Any]: + return self.metadata.get(self._LABEL_COUNT, []) + + def _get_column_idx(self, col_name: str): + col_idx = -1 + for index, col_map in enumerate(self.dtypes): + if col_map['key'] == col_name: + col_idx = index + break + if col_idx < 0: + logging.warning(f'can\'t found the {col_name} column in dtypes:{self.dtypes}') + return col_idx + + def _get_thumbnail_file_name(self, file_name: str) -> str: + thumbnail_file_name = file_name.split('.')[0] + self._THUMBNAIL_EXTENSION + return thumbnail_file_name + + def get_preview(self) -> dict: + """ get the preview data + Returns: + preview dict format: + { + "dtypes": [ + { "key": "file_name", "value": "string" }, + { "key": "width", "value": "int" }, + { "key": "height", "value": "int" }, + { "key": "nChannels", "value": "int" }, + { "key": "mode", "value": "int" }, + { "key": "name", "value": "string" }, + { "key": "created_at", "value": "string" }, + { "key": "caption", "value": "string" }, + { "key": "label", "value": "string" } + ], + "label_count": [ + { + "label": "B", + "count": 1 + }, + ], + "count": 50, + "sample": [ + [ + "000000050576.jpg", + 640, + 480, + 3, + 16, + "000000050576.jpg", + "2021-08-30T16:52:15.501516", + "A tow truck loading a bank security truck by a building.", + "B" + ], + ... + ], + "features": { + "file_name": { + "count": "50", + "mean": null, + "stddev": null, + "min": "000000005756.jpg", + "max": "000000562222.jpg", + "missing_count": "0" + }, + ... + }, + "hist": { + "width": { + "x": [ 333.0, 363.7, 394.4, 425.1, 455.8, 486.5, 517.2, 547.9, 578.6, 609.3, 640.0 ], + "y": [ 1, 1, 4, 3, 4, 0, 0, 0, 36 ] + }, + ... + } + } + """ + preview = super().get_preview() + display_name_idx = self._get_column_idx('name') + file_name_idx = self._get_column_idx('file_name') + height_idx = self._get_column_idx('height') + width_idx = self._get_column_idx('width') + created_at_idx = self._get_column_idx('created_at') + label_idx = self._get_column_idx('label') + images = [] + for sample in self.sample: + sample[created_at_idx] = dateutil.parser.isoparse(sample[created_at_idx]).strftime('%Y-%m-%d') + image = { + 'name': sample[display_name_idx], + 'file_name': sample[file_name_idx], + 'width': sample[width_idx], + 'height': sample[height_idx], + 'created_at': sample[created_at_idx], + # TODO(wangzeju): hard code for the classification task, need to support more image follow up tasks. + 'annotation': { + 'label': sample[label_idx] + }, + 'path': os.path.join(self.thumbnail_dir_path, self._get_thumbnail_file_name(sample[file_name_idx])) + } + images.append(image) + preview['images'] = images + return preview diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/meta_data_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/meta_data_test.py new file mode 100644 index 000000000..9d863086f --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/meta_data_test.py @@ -0,0 +1,46 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +import json +from envs import Envs +from fedlearner_webconsole.dataset.meta_data import ImageMetaData + + +class ImageMetaDataTest(unittest.TestCase): + + def setUp(self): + super().setUp() + self.maxDiff = None + with open(f'{Envs.BASE_DIR}/testing/test_data/image_meta.json', mode='r', encoding='utf-8') as f: + self.image_data = json.load(f) + with open(f'{Envs.BASE_DIR}/testing/test_data/expected_image_preview.json', mode='r', encoding='utf-8') as f: + self.expected_image_preview = json.load(f) + self.thumbnail_dir_path = '/fake_dir/' + + def test_image_preview(self): + image_meta = ImageMetaData(self.thumbnail_dir_path, self.image_data) + image_preview = image_meta.get_preview() + self.assertDictEqual(self.expected_image_preview, image_preview) + + def test_empty_meta(self): + image_meta = ImageMetaData(self.thumbnail_dir_path, None) + image_preview = image_meta.get_preview() + expected_response = {'dtypes': [], 'sample': [], 'num_example': 0, 'metrics': {}, 'images': []} + self.assertDictEqual(expected_response, image_preview) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/metrics.py b/web_console_v2/api/fedlearner_webconsole/dataset/metrics.py new file mode 100644 index 000000000..77e10f06c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/metrics.py @@ -0,0 +1,39 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from fedlearner_webconsole.utils.metrics import emit_store +from fedlearner_webconsole.dataset.models import DatasetJobKind, DatasetJobState + + +def emit_dataset_job_submission_store(uuid: str, kind: DatasetJobKind, coordinator_id: int): + emit_store(name='dataset.job.submission', + value=1, + tags={ + 'uuid': uuid, + 'kind': kind.name, + 'coordinator_id': str(coordinator_id), + }) + + +def emit_dataset_job_duration_store(duration: int, uuid: str, kind: DatasetJobKind, coordinator_id: int, + state: DatasetJobState): + emit_store(name='dataset.job.duration', + value=duration, + tags={ + 'uuid': uuid, + 'kind': kind.name, + 'coordinator_id': str(coordinator_id), + 'state': state.name + }) diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/metrics_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/metrics_test.py new file mode 100644 index 000000000..3df3b1a6c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/metrics_test.py @@ -0,0 +1,45 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +from fedlearner_webconsole.dataset.metrics import emit_dataset_job_submission_store, emit_dataset_job_duration_store +from fedlearner_webconsole.dataset.models import DatasetJobKind, DatasetJobState + + +class MetricsTest(unittest.TestCase): + + def test_emit_dataset_job_submission_store(self): + with self.assertLogs() as cm: + emit_dataset_job_submission_store('uuit-test', DatasetJobKind.IMPORT_SOURCE, 0) + logs = [r.msg for r in cm.records] + self.assertEqual(logs, [ + '[Metric][Store] dataset.job.submission: 1, tags={\'uuid\': \'uuit-test\', ' \ + '\'kind\': \'IMPORT_SOURCE\', \'coordinator_id\': \'0\'}', + ]) + + def test_emit_dataset_job_duration_store(self): + with self.assertLogs() as cm: + emit_dataset_job_duration_store(1000, 'uuit-test', DatasetJobKind.RSA_PSI_DATA_JOIN, 1, + DatasetJobState.SUCCEEDED) + logs = [r.msg for r in cm.records] + self.assertEqual(logs, [ + '[Metric][Store] dataset.job.duration: 1000, tags={\'uuid\': \'uuit-test\', ' \ + '\'kind\': \'RSA_PSI_DATA_JOIN\', \'coordinator_id\': \'1\', \'state\': \'SUCCEEDED\'}', + ]) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/models.py b/web_console_v2/api/fedlearner_webconsole/dataset/models.py index 981a00f04..c85c73d10 100644 --- a/web_console_v2/api/fedlearner_webconsole/dataset/models.py +++ b/web_console_v2/api/fedlearner_webconsole/dataset/models.py @@ -1,29 +1,40 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -# coding: utf-8 +# import enum +import os +from typing import Optional + from sqlalchemy.sql import func from sqlalchemy import UniqueConstraint -from fedlearner_webconsole.db import db -from fedlearner_webconsole.utils.mixins import to_dict_mixin +from google.protobuf import text_format +from fedlearner_webconsole.dataset.consts import ERROR_BATCH_SIZE from fedlearner_webconsole.proto import dataset_pb2 +from fedlearner_webconsole.proto.dataset_pb2 import (DatasetJobGlobalConfigs, DatasetRef, DatasetMetaInfo, + DatasetJobContext, DatasetJobStageContext, TimeRange) +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.utils.base_model.review_ticket_and_auth_model import ReviewTicketAndAuthModel +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.db import db, default_table_args +from fedlearner_webconsole.utils.base_model.softdelete_model import SoftDeleteModel +from fedlearner_webconsole.workflow.models import WorkflowExternalState class DatasetType(enum.Enum): - PSI = 'PSI' + PSI = 'PSI' # use PSI as none streaming dataset type STREAMING = 'STREAMING' @@ -32,88 +43,149 @@ class BatchState(enum.Enum): SUCCESS = 'SUCCESS' FAILED = 'FAILED' IMPORTING = 'IMPORTING' + UNKNOWN = 'UNKNOWN' -@to_dict_mixin( - extras={ - 'data_batches': - lambda dataset: - [data_batch.to_dict() for data_batch in dataset.data_batches] - }) -class Dataset(db.Model): - __tablename__ = 'datasets_v2' - __table_args__ = ({ - 'comment': 'This is webconsole dataset table', - 'mysql_engine': 'innodb', - 'mysql_charset': 'utf8mb4', - }) - - id = db.Column(db.Integer, - primary_key=True, - autoincrement=True, - comment='id') - name = db.Column(db.String(255), nullable=False, comment='dataset name') - dataset_type = db.Column(db.Enum(DatasetType, native_enum=False), - nullable=False, - comment='data type') - path = db.Column(db.String(512), comment='dataset path') - comment = db.Column('cmt', - db.Text(), - key='comment', - comment='comment of dataset') - created_at = db.Column(db.DateTime(timezone=True), - server_default=func.now(), - comment='created time') - updated_at = db.Column(db.DateTime(timezone=True), - server_default=func.now(), - onupdate=func.now(), - comment='updated time') - deleted_at = db.Column(db.DateTime(timezone=True), comment='deleted time') - project_id = db.Column(db.Integer, default=0, comment='project_id') +# used to represent dataset and data_batch frontend state +class ResourceState(enum.Enum): + PENDING = 'PENDING' + PROCESSING = 'PROCESSING' + SUCCEEDED = 'SUCCEEDED' + FAILED = 'FAILED' + + +class PublishFrontendState(enum.Enum): + UNPUBLISHED = 'UNPUBLISHED' + TICKET_PENDING = 'TICKET_PENDING' + TICKET_DECLINED = 'TICKET_DECLINED' + PUBLISHED = 'PUBLISHED' + + +class DatasetFormat(enum.Enum): + TABULAR = 0 + IMAGE = 1 + NONE_STRUCTURED = 2 + + +class ImportType(enum.Enum): + COPY = 'COPY' + NO_COPY = 'NO_COPY' + + +class DatasetKindV2(enum.Enum): + RAW = 'raw' + PROCESSED = 'processed' + SOURCE = 'source' + EXPORTED = 'exported' + INTERNAL_PROCESSED = 'internal_processed' # dataset generatred by internal module, like model or tee + + +class DatasetSchemaChecker(enum.Enum): + RAW_ID_CHECKER = 'RAW_ID_CHECKER' + NUMERIC_COLUMNS_CHECKER = 'NUMERIC_COLUMNS_CHECKER' + + +class DatasetJobKind(enum.Enum): + RSA_PSI_DATA_JOIN = 'RSA_PSI_DATA_JOIN' + LIGHT_CLIENT_RSA_PSI_DATA_JOIN = 'LIGHT_CLIENT_RSA_PSI_DATA_JOIN' + OT_PSI_DATA_JOIN = 'OT_PSI_DATA_JOIN' + LIGHT_CLIENT_OT_PSI_DATA_JOIN = 'LIGHT_CLIENT_OT_PSI_DATA_JOIN' + HASH_DATA_JOIN = 'HASH_DATA_JOIN' + DATA_JOIN = 'DATA_JOIN' + DATA_ALIGNMENT = 'DATA_ALIGNMENT' + IMPORT_SOURCE = 'IMPORT_SOURCE' + EXPORT = 'EXPORT' + ANALYZER = 'ANALYZER' + + +# micro dataset_job's input/output dataset is the same one +MICRO_DATASET_JOB = [DatasetJobKind.ANALYZER] + +LOCAL_DATASET_JOBS = [ + DatasetJobKind.IMPORT_SOURCE, + DatasetJobKind.ANALYZER, + DatasetJobKind.EXPORT, + DatasetJobKind.LIGHT_CLIENT_OT_PSI_DATA_JOIN, + DatasetJobKind.LIGHT_CLIENT_RSA_PSI_DATA_JOIN, +] + + +class DatasetJobState(enum.Enum): + PENDING = 'PENDING' + RUNNING = 'RUNNING' + SUCCEEDED = 'SUCCEEDED' + FAILED = 'FAILED' + STOPPED = 'STOPPED' + + +class StoreFormat(enum.Enum): + UNKNOWN = 'UNKNOWN' + CSV = 'CSV' + TFRECORDS = 'TFRECORDS' + + +class DatasetJobSchedulerState(enum.Enum): + PENDING = 'PENDING' + RUNNABLE = 'RUNNABLE' + STOPPED = 'STOPPED' + - data_batches = db.relationship( - 'DataBatch', primaryjoin='foreign(DataBatch.dataset_id) == Dataset.id') - project = db.relationship( - 'Project', primaryjoin='foreign(Dataset.project_id) == Project.id') +DATASET_STATE_CONVERT_MAP_V2 = { + DatasetJobState.PENDING: ResourceState.PENDING, + DatasetJobState.RUNNING: ResourceState.PROCESSING, + DatasetJobState.SUCCEEDED: ResourceState.SUCCEEDED, + DatasetJobState.FAILED: ResourceState.FAILED, + DatasetJobState.STOPPED: ResourceState.FAILED, +} + + +class DataSourceType(enum.Enum): + # hdfs datasource path, e.g. hdfs:///home/xxx + HDFS = 'hdfs' + # nfs datasource path, e.g. file:///data/xxx + FILE = 'file' + + +SOURCE_IS_DELETED = 'deleted' +WORKFLOW_STATUS_STATE_MAPPER = { + WorkflowExternalState.COMPLETED: ResourceState.SUCCEEDED, + WorkflowExternalState.FAILED: ResourceState.FAILED, + WorkflowExternalState.STOPPED: ResourceState.FAILED, + WorkflowExternalState.INVALID: ResourceState.FAILED, +} +DATASET_JOB_FINISHED_STATE = [DatasetJobState.SUCCEEDED, DatasetJobState.FAILED, DatasetJobState.STOPPED] -@to_dict_mixin(extras={'details': (lambda batch: batch.get_details())}) class DataBatch(db.Model): __tablename__ = 'data_batches_v2' - __table_args__ = ( - UniqueConstraint('event_time', - 'dataset_id', - name='uniq_event_time_dataset_id'), - { - 'comment': 'This is webconsole dataset table', - 'mysql_engine': 'innodb', - 'mysql_charset': 'utf8mb4', - }, - ) - id = db.Column(db.Integer, - primary_key=True, - autoincrement=True, - comment='id') - event_time = db.Column(db.TIMESTAMP(timezone=True), - nullable=False, - comment='event_time') + __table_args__ = (UniqueConstraint('event_time', 'dataset_id', name='uniq_event_time_dataset_id'), + default_table_args('This is webconsole dataset table')) + id = db.Column(db.Integer, primary_key=True, autoincrement=True, comment='id') + name = db.Column(db.String(255), nullable=True, comment='data_batch name') + event_time = db.Column(db.TIMESTAMP(timezone=True), nullable=True, comment='event_time') dataset_id = db.Column(db.Integer, nullable=False, comment='dataset_id') path = db.Column(db.String(512), comment='path') - state = db.Column(db.Enum(BatchState, native_enum=False), + # TODO(wangsen.0914): gonna to deprecate + state = db.Column(db.Enum(BatchState, native_enum=False, create_constraint=False), default=BatchState.NEW, comment='state') + # move column will be deprecated after dataset refactor move = db.Column(db.Boolean, default=False, comment='move') # Serialized proto of DatasetBatch - details = db.Column(db.LargeBinary(), comment='details') - file_size = db.Column(db.Integer, default=0, comment='file_size') - num_imported_file = db.Column(db.Integer, - default=0, - comment='num_imported_file') - num_file = db.Column(db.Integer, default=0, comment='num_file') + file_size = db.Column(db.BigInteger, default=0, comment='file_size in bytes') + num_example = db.Column(db.BigInteger, default=0, comment='num_example') + num_feature = db.Column(db.BigInteger, default=0, comment='num_feature') + meta_info = db.Column(db.Text(16777215), comment='dataset meta info') comment = db.Column('cmt', db.Text(), key='comment', comment='comment') - created_at = db.Column(db.DateTime(timezone=True), - server_default=func.now(), - comment='created_at') + latest_parent_dataset_job_stage_id = db.Column(db.Integer, + nullable=False, + server_default=db.text('0'), + comment='latest parent dataset_job_stage id') + latest_analyzer_dataset_job_stage_id = db.Column(db.Integer, + nullable=False, + server_default=db.text('0'), + comment='latest analyzer dataset_job_stage id') + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), comment='created_at') updated_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), server_onupdate=func.now(), @@ -125,30 +197,605 @@ class DataBatch(db.Model): 'foreign(DataBatch.dataset_id)', back_populates='data_batches') - def set_details(self, proto): - self.num_file = len(proto.files) - num_imported_file = 0 - num_failed_file = 0 + latest_parent_dataset_job_stage = db.relationship( + 'DatasetJobStage', + primaryjoin='DatasetJobStage.id == foreign(DataBatch.latest_parent_dataset_job_stage_id)', + # To disable the warning of back_populates + overlaps='data_batch') + + @property + def batch_name(self): + return self.name or os.path.basename(os.path.abspath(self.path)) + + def get_frontend_state(self) -> ResourceState: + # use dataset_job state to replace dataset_job_stage state when dataset_job_stage not support + if self.latest_parent_dataset_job_stage is None: + return self.dataset.get_frontend_state() + return DATASET_STATE_CONVERT_MAP_V2.get(self.latest_parent_dataset_job_stage.state) + + def is_available(self) -> bool: + return self.get_frontend_state() == ResourceState.SUCCEEDED + + def to_proto(self) -> dataset_pb2.DataBatch: + proto = dataset_pb2.DataBatch(id=self.id, + name=self.batch_name, + dataset_id=self.dataset_id, + path=self.path, + file_size=self.file_size, + num_example=self.num_example, + num_feature=self.num_feature, + comment=self.comment, + created_at=to_timestamp(self.created_at), + updated_at=to_timestamp(self.updated_at), + event_time=to_timestamp(self.event_time) if self.event_time else 0, + latest_parent_dataset_job_stage_id=self.latest_parent_dataset_job_stage_id, + latest_analyzer_dataset_job_stage_id=self.latest_analyzer_dataset_job_stage_id) + proto.state = self.get_frontend_state().name + return proto + + +class Dataset(SoftDeleteModel, ReviewTicketAndAuthModel, db.Model): + __tablename__ = 'datasets_v2' + __table_args__ = (default_table_args('This is webconsole dataset table')) + + id = db.Column(db.Integer, primary_key=True, autoincrement=True, comment='id') + uuid = db.Column(db.String(255), nullable=True, comment='dataset uuid') + is_published = db.Column(db.Boolean, default=False, comment='dataset is published or not') + name = db.Column(db.String(255), nullable=False, comment='dataset name') + creator_username = db.Column(db.String(255), default='', comment='creator username') + dataset_type = db.Column(db.Enum(DatasetType, native_enum=False, create_constraint=False), + default=DatasetType.PSI, + nullable=False, + comment='data type') + path = db.Column(db.String(512), comment='dataset path') + comment = db.Column('cmt', db.Text(), key='comment', comment='comment of dataset') + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), comment='created time') + updated_at = db.Column(db.DateTime(timezone=True), + server_default=func.now(), + onupdate=func.now(), + comment='updated time') + project_id = db.Column(db.Integer, default=0, comment='project_id') + # New version of dataset kind + dataset_kind = db.Column(db.Enum(DatasetKindV2, native_enum=False, length=32, create_constraint=False), + default=DatasetKindV2.RAW, + comment='new version of dataset kind, choices [raw, processed, ...]') + # DatasetFormat enum + dataset_format = db.Column(db.Integer, default=0, comment='dataset format') + # StoreFormat + store_format = db.Column(db.Enum(StoreFormat, native_enum=False, length=32, create_constraint=False), + default=StoreFormat.TFRECORDS, + comment='dataset store format, like CSV, TFRECORDS, ...') + meta_info = db.Column(db.Text(16777215), comment='dataset meta info') + + import_type = db.Column(db.Enum(ImportType, length=64, native_enum=False, create_constraint=False), + server_default=ImportType.COPY.name, + comment='import type') + + data_batches = db.relationship('DataBatch', + primaryjoin='foreign(DataBatch.dataset_id) == Dataset.id', + order_by='desc(DataBatch.id)') + project = db.relationship('Project', primaryjoin='foreign(Dataset.project_id) == Project.id') + + # dataset only has one main dataset_job as parent_dataset_job, but could have many micro dataset_job + @property + def parent_dataset_job(self): + return None if not db.object_session(self) else db.object_session(self).query(DatasetJob).filter( + DatasetJob.output_dataset_id == self.id).filter( + DatasetJob.kind.not_in(MICRO_DATASET_JOB)).execution_options(include_deleted=True).first() + + # dataset only has one analyzer dataset_job + def analyzer_dataset_job(self): + return None if not db.object_session(self) else db.object_session(self).query(DatasetJob).filter( + DatasetJob.output_dataset_id == self.id).filter( + DatasetJob.kind == DatasetJobKind.ANALYZER).execution_options(include_deleted=True).first() + + # single table inheritance + # Ref: https://docs.sqlalchemy.org/en/14/orm/inheritance.html + __mapper_args__ = {'polymorphic_identity': DatasetKindV2.RAW, 'polymorphic_on': dataset_kind} + + def get_frontend_state(self) -> ResourceState: + # if parent_dataset_job failed to generate, dataset state is failed + if self.parent_dataset_job is None: + return ResourceState.FAILED + return DATASET_STATE_CONVERT_MAP_V2.get(self.parent_dataset_job.state) + + def get_file_size(self) -> int: file_size = 0 - # Aggregates stats - for file in proto.files: - if file.state == dataset_pb2.File.State.COMPLETED: - num_imported_file += 1 - file_size += file.size - elif file.state == dataset_pb2.File.State.FAILED: - num_failed_file += 1 - if num_imported_file + num_failed_file == self.num_file: - if num_failed_file > 0: - self.state = BatchState.FAILED - else: - self.state = BatchState.SUCCESS - self.num_imported_file = num_imported_file - self.file_size = file_size - self.details = proto.SerializeToString() - - def get_details(self): - if self.details is None: + for batch in self.data_batches: + if not batch.file_size or batch.file_size == ERROR_BATCH_SIZE: + continue + file_size += batch.file_size + return file_size + + def get_num_example(self) -> int: + return sum([batch.num_example or 0 for batch in self.data_batches]) + + def get_num_feature(self) -> int: + if len(self.data_batches) != 0: + # num_feature is decided by the first data_batch + return self.data_batches[0].num_feature + return 0 + + # TODO(hangweiqiang): remove data_source after adapting fedlearner to dataset path + def get_data_source(self) -> Optional[str]: + if self.parent_dataset_job is not None: + dataset_job_stage = db.object_session(self).query(DatasetJobStage).filter_by( + dataset_job_id=self.parent_dataset_job.id).first() + if dataset_job_stage is not None: + return f'{dataset_job_stage.uuid}-psi-data-join-job' + if self.parent_dataset_job.workflow is not None: + return f'{self.parent_dataset_job.workflow.uuid}-psi-data-join-job' + return None + + @property + def publish_frontend_state(self) -> PublishFrontendState: + if not self.is_published: + return PublishFrontendState.UNPUBLISHED + if self.ticket_status == TicketStatus.APPROVED: + return PublishFrontendState.PUBLISHED + if self.ticket_status == TicketStatus.DECLINED: + return PublishFrontendState.TICKET_DECLINED + return PublishFrontendState.TICKET_PENDING + + def to_ref(self) -> DatasetRef: + # TODO(liuhehan): this is a lazy update of dataset store_format, remove it after release 2.4 + if self.dataset_kind in [DatasetKindV2.RAW, DatasetKindV2.PROCESSED] and self.store_format is None: + self.store_format = StoreFormat.TFRECORDS + # TODO(liuhehan): this is a lazy update for auth status, remove after release 2.4 + if self.auth_status is None: + self.auth_status = AuthStatus.AUTHORIZED + return DatasetRef(id=self.id, + uuid=self.uuid, + project_id=self.project_id, + name=self.name, + created_at=to_timestamp(self.created_at), + state_frontend=self.get_frontend_state().name, + path=self.path, + is_published=self.is_published, + dataset_format=DatasetFormat(self.dataset_format).name, + comment=self.comment, + dataset_kind=self.dataset_kind.name, + file_size=self.get_file_size(), + num_example=self.get_num_example(), + data_source=self.get_data_source(), + creator_username=self.creator_username, + dataset_type=self.dataset_type.name, + store_format=self.store_format.name if self.store_format else '', + import_type=self.import_type.name, + publish_frontend_state=self.publish_frontend_state.name, + auth_frontend_state=self.auth_frontend_state.name, + local_auth_status=self.auth_status.name, + participants_info=self.get_participants_info()) + + def to_proto(self) -> dataset_pb2.Dataset: + # TODO(liuhehan): this is a lazy update of dataset store_format, remove it after release 2.4 + if self.dataset_kind in [DatasetKindV2.RAW, DatasetKindV2.PROCESSED] and self.store_format is None: + self.store_format = StoreFormat.TFRECORDS + # TODO(liuhehan): this is a lazy update for auth status, remove after release 2.4 + if self.auth_status is None: + self.auth_status = AuthStatus.AUTHORIZED + meta_data = self.get_meta_info() + analyzer_dataset_job = self.analyzer_dataset_job() + # use newest data_batch updated_at time as dataset updated_at time if has data_batch + updated_at = self.data_batches[0].updated_at if self.data_batches else self.updated_at + return dataset_pb2.Dataset( + id=self.id, + uuid=self.uuid, + is_published=self.is_published, + project_id=self.project_id, + name=self.name, + workflow_id=self.parent_dataset_job.workflow_id if self.parent_dataset_job is not None else 0, + path=self.path, + created_at=to_timestamp(self.created_at), + data_source=self.get_data_source(), + file_size=self.get_file_size(), + num_example=self.get_num_example(), + comment=self.comment, + num_feature=self.get_num_feature(), + updated_at=to_timestamp(updated_at), + deleted_at=to_timestamp(self.deleted_at) if self.deleted_at else None, + parent_dataset_job_id=self.parent_dataset_job.id if self.parent_dataset_job is not None else 0, + dataset_format=DatasetFormat(self.dataset_format).name, + analyzer_dataset_job_id=analyzer_dataset_job.id if analyzer_dataset_job is not None else 0, + state_frontend=self.get_frontend_state().name, + dataset_kind=self.dataset_kind.name, + value=meta_data.value, + schema_checkers=meta_data.schema_checkers, + creator_username=self.creator_username, + import_type=self.import_type.name, + dataset_type=self.dataset_type.name, + store_format=self.store_format.name if self.store_format else '', + publish_frontend_state=self.publish_frontend_state.name, + auth_frontend_state=self.auth_frontend_state.name, + local_auth_status=self.auth_status.name, + participants_info=self.get_participants_info()) + + def is_tabular(self) -> bool: + return self.dataset_format == DatasetFormat.TABULAR.value + + def is_image(self) -> bool: + return self.dataset_format == DatasetFormat.IMAGE.value + + def set_meta_info(self, meta: DatasetMetaInfo): + if meta is None: + meta = DatasetMetaInfo() + self.meta_info = text_format.MessageToString(meta) + + def get_meta_info(self) -> DatasetMetaInfo: + meta = DatasetMetaInfo() + if self.meta_info is not None: + meta = text_format.Parse(self.meta_info, DatasetMetaInfo()) + return meta + + def get_single_batch(self) -> DataBatch: + """Get single batch of this dataset + + Returns: + DataBatch: according data batch + + Raises: + TypeError: when there's no data batch or more than one data batch + """ + if not self.data_batches: + raise TypeError(f'there is no data_batch for this dataset {self.id}') + if len(self.data_batches) != 1: + raise TypeError(f'there is more than one data_batch for this dataset {self.id}') + return self.data_batches[0] + + +class DataSource(Dataset): + + __mapper_args__ = {'polymorphic_identity': DatasetKindV2.SOURCE} + + def to_proto(self) -> dataset_pb2.DataSource: + meta_info = self.get_meta_info() + return dataset_pb2.DataSource( + id=self.id, + comment=self.comment, + uuid=self.uuid, + name=self.name, + type=meta_info.datasource_type, + url=self.path, + created_at=to_timestamp(self.created_at), + project_id=self.project_id, + is_user_upload=meta_info.is_user_upload, + is_user_export=meta_info.is_user_export, + creator_username=self.creator_username, + dataset_format=DatasetFormat(self.dataset_format).name, + store_format=self.store_format.name if self.store_format else '', + dataset_type=self.dataset_type.name, + ) + + +class ProcessedDataset(Dataset): + + __mapper_args__ = {'polymorphic_identity': DatasetKindV2.PROCESSED} + + +class ExportedDataset(Dataset): + + __mapper_args__ = {'polymorphic_identity': DatasetKindV2.EXPORTED} + + +class InternalProcessedDataset(Dataset): + + __mapper_args__ = {'polymorphic_identity': DatasetKindV2.INTERNAL_PROCESSED} + + def get_frontend_state(self) -> ResourceState: + # we just hack internal_processed dataset state to successded now + return ResourceState.SUCCEEDED + + +class DatasetJob(SoftDeleteModel, db.Model): + """ DatasetJob is the abstraction of basic action inside dataset module. + + UseCase 1: A import job from datasource to a dataset + { + "id": 1, + "uuid": u456, + "input_dataset_id": 5, + "output_dataset_id": 4, + "kind": "import_datasource", + "global_configs": map, + "workflow_id": 6, + "coordinator_id": 0, + } + + UseCase 2: A data join job between participants + coodinator: + { + "id": 1, + "uuid": u456, + "input_dataset_id": 2, + "output_dataset_id": 4, + "kind": "rsa_psi_data_join", + "global_configs": map, + "coordinator_id": 0, + "workflow_id": 6, + } + + participant: + { + "id": 1, + "uuid": u456, + "input_dataset_id": 4, + "output_dataset_id": 7, + "kind": "rsa_psi_data_join", + "global_configs": "", # pull from coodinator + "coordinator_id": 1, + "workflow_id": 7, + } + """ + __tablename__ = 'dataset_jobs_v2' + __table_args__ = (UniqueConstraint('uuid', name='uniq_dataset_job_uuid'), default_table_args('dataset_jobs_v2')) + + id = db.Column(db.Integer, primary_key=True, autoincrement=True, comment='id of dataset job') + uuid = db.Column(db.String(255), nullable=False, comment='dataset job uuid') + name = db.Column(db.String(255), nullable=True, comment='dataset job name') + + # state is updated to keep the same with the newest dataset_job_stage + state = db.Column(db.Enum(DatasetJobState, length=64, native_enum=False, create_constraint=False), + nullable=False, + default=DatasetJobState.PENDING, + comment='dataset job state') + project_id = db.Column(db.Integer, nullable=False, comment='project id') + + # If multiple dataset/datasource input is supported, the following two columns will be deprecated. + # Instead, a new table will be introduced. + input_dataset_id = db.Column(db.Integer, nullable=False, comment='input dataset id') + output_dataset_id = db.Column(db.Integer, nullable=False, comment='output dataset id') + + kind = db.Column(db.Enum(DatasetJobKind, length=128, native_enum=False, create_constraint=False), + nullable=False, + comment='dataset job kind') + # If batch update mode is supported, this column will be deprecated. + # Instead, a new table called DatasetStage and a new Column called Context will be introduced. + workflow_id = db.Column(db.Integer, nullable=True, default=0, comment='relating workflow id') + context = db.Column(db.Text(), nullable=True, default=None, comment='context info of dataset job') + + global_configs = db.Column( + db.Text(), comment='global configs of this job including related participants only appear in coordinator') + coordinator_id = db.Column(db.Integer, nullable=False, default=0, comment='participant id of this job coordinator') + + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), comment='created time') + updated_at = db.Column(db.DateTime(timezone=True), + server_default=func.now(), + onupdate=func.now(), + comment='updated time') + started_at = db.Column(db.DateTime(timezone=True), comment='started_at') + finished_at = db.Column(db.DateTime(timezone=True), comment='finished_at') + + # cron_job will use time_range to infer event_time for next data_batch + time_range = db.Column(db.Interval(native=False), nullable=True, comment='time_range to create new job_stage') + # cron_job will read event_time to get current data_batch, + # and update it to event_time + time_range when next new data_batch created + event_time = db.Column(db.DateTime(timezone=True), nullable=True, comment='event_time for current data_batch') + + # scheduler_state will be filter and change by job_scheduler_v2 + scheduler_state = db.Column(db.Enum(DatasetJobSchedulerState, length=64, native_enum=False, + create_constraint=False), + nullable=True, + default=DatasetJobSchedulerState.PENDING, + comment='dataset job scheduler state') + + creator_username = db.Column(db.String(255), nullable=True, comment='creator username') + + workflow = db.relationship('Workflow', primaryjoin='foreign(DatasetJob.workflow_id) == Workflow.id') + project = db.relationship('Project', primaryjoin='foreign(DatasetJob.project_id) == Project.id') + input_dataset = db.relationship('Dataset', primaryjoin='foreign(DatasetJob.input_dataset_id) == Dataset.id') + + @property + def output_dataset(self): + return None if not db.object_session(self) else db.object_session(self).query(Dataset).filter( + Dataset.id == self.output_dataset_id).execution_options(include_deleted=True).first() + + dataset_job_stages = db.relationship( + 'DatasetJobStage', + order_by='desc(DatasetJobStage.created_at)', + primaryjoin='DatasetJob.id == foreign(DatasetJobStage.dataset_job_id)', + # To disable the warning of back_populates + overlaps='dataset_job') + + def get_global_configs(self) -> Optional[DatasetJobGlobalConfigs]: + # For participant, global_config is empty text. + if self.global_configs is None or len(self.global_configs) == 0: return None - proto = dataset_pb2.DataBatch() - proto.ParseFromString(self.details) + return text_format.Parse(self.global_configs, DatasetJobGlobalConfigs()) + + def set_global_configs(self, global_configs: DatasetJobGlobalConfigs): + self.global_configs = text_format.MessageToString(global_configs) + + def get_context(self) -> DatasetJobContext: + context_pb = DatasetJobContext() + if self.context: + context_pb = text_format.Parse(self.context, context_pb) + return context_pb + + def set_context(self, context: DatasetJobContext): + self.context = text_format.MessageToString(context) + + def set_scheduler_message(self, scheduler_message: str): + context_pb = self.get_context() + context_pb.scheduler_message = scheduler_message + self.set_context(context=context_pb) + + @property + def time_range_pb(self) -> TimeRange: + time_range_pb = TimeRange() + if self.is_daily_cron(): + time_range_pb.days = self.time_range.days + elif self.is_hourly_cron(): + # convert seconds to hours + time_range_pb.hours = int(self.time_range.seconds / 3600) + return time_range_pb + + def to_proto(self) -> dataset_pb2.DatasetJob: + context = self.get_context() + proto = dataset_pb2.DatasetJob( + id=self.id, + uuid=self.uuid, + name=self.name, + project_id=self.project_id, + workflow_id=self.workflow_id, + coordinator_id=self.coordinator_id, + kind=self.kind.value, + state=self.state.name, + global_configs=self.get_global_configs(), + input_data_batch_num_example=context.input_data_batch_num_example, + output_data_batch_num_example=context.output_data_batch_num_example, + has_stages=context.has_stages, + created_at=to_timestamp(self.created_at), + updated_at=to_timestamp(self.updated_at), + started_at=to_timestamp(self.started_at) if self.started_at else 0, + finished_at=to_timestamp(self.finished_at) if self.finished_at else 0, + creator_username=self.creator_username, + scheduler_state=self.scheduler_state.name if self.scheduler_state else '', + time_range=self.time_range_pb, + scheduler_message=context.scheduler_message, + ) + if self.output_dataset: + proto.result_dataset_uuid = self.output_dataset.uuid + proto.result_dataset_name = self.output_dataset.name + if self.workflow_id: + proto.is_ready = True return proto + + def to_ref(self) -> dataset_pb2.DatasetJobRef: + return dataset_pb2.DatasetJobRef( + uuid=self.uuid, + id=self.id, + name=self.name, + coordinator_id=self.coordinator_id, + project_id=self.project_id, + kind=self.kind.name, + result_dataset_id=self.output_dataset_id, + result_dataset_name=self.output_dataset.name if self.output_dataset else '', + state=self.state.name, + created_at=to_timestamp(self.created_at), + has_stages=self.get_context().has_stages, + creator_username=self.creator_username, + ) + + def is_coordinator(self) -> bool: + return self.coordinator_id == 0 + + def is_finished(self) -> bool: + return self.state in DATASET_JOB_FINISHED_STATE + + def is_cron(self) -> bool: + return self.time_range is not None + + def is_daily_cron(self) -> bool: + if self.time_range is None: + return False + return self.time_range.days > 0 + + def is_hourly_cron(self) -> bool: + if self.time_range is None: + return False + # hourly time_range is less than one day + return self.time_range.days == 0 + + +class DatasetJobStage(SoftDeleteModel, db.Model): + __tablename__ = 'dataset_job_stages_v2' + __table_args__ = (UniqueConstraint('uuid', + name='uniq_dataset_job_stage_uuid'), default_table_args('dataset_job_stages_v2')) + + id = db.Column(db.Integer, primary_key=True, autoincrement=True, comment='id of dataset job stage') + uuid = db.Column(db.String(255), nullable=False, comment='dataset job stage uuid') + name = db.Column(db.String(255), nullable=True, comment='dataset job stage name') + state = db.Column(db.Enum(DatasetJobState, length=64, native_enum=False, create_constraint=False), + nullable=False, + default=DatasetJobState.PENDING, + comment='dataset job stage state') + project_id = db.Column(db.Integer, nullable=False, comment='project id') + workflow_id = db.Column(db.Integer, nullable=True, default=0, comment='relating workflow id') + dataset_job_id = db.Column(db.Integer, nullable=False, comment='dataset_job id') + data_batch_id = db.Column(db.Integer, nullable=False, comment='data_batch id') + event_time = db.Column(db.DateTime(timezone=True), nullable=True, comment='event_time of data upload') + # store dataset_job global_configs to job_stage global_configs when job_stage created if is coordinator + global_configs = db.Column( + db.Text(), comment='global configs of this stage including related participants only appear in coordinator') + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), comment='created time') + updated_at = db.Column(db.DateTime(timezone=True), + server_default=func.now(), + onupdate=func.now(), + comment='updated time') + started_at = db.Column(db.DateTime(timezone=True), comment='started_at') + finished_at = db.Column(db.DateTime(timezone=True), comment='finished_at') + + # dataset_job coordinator might be different with dataset_job_stage coordinator + coordinator_id = db.Column(db.Integer, + nullable=False, + server_default=db.text('0'), + comment='participant id of this dataset_job_stage, 0 if it is coordinator') + + context = db.Column(db.Text(), nullable=True, default=None, comment='context info of dataset job stage') + + workflow = db.relationship('Workflow', primaryjoin='foreign(DatasetJobStage.workflow_id) == Workflow.id') + project = db.relationship('Project', primaryjoin='foreign(DatasetJobStage.project_id) == Project.id') + dataset_job = db.relationship('DatasetJob', primaryjoin='foreign(DatasetJobStage.dataset_job_id) == DatasetJob.id') + data_batch = db.relationship('DataBatch', primaryjoin='foreign(DatasetJobStage.data_batch_id) == DataBatch.id') + + def get_global_configs(self) -> Optional[DatasetJobGlobalConfigs]: + # For participant, global_config is empty text. + if self.global_configs is None or len(self.global_configs) == 0: + return None + return text_format.Parse(self.global_configs, DatasetJobGlobalConfigs()) + + def set_global_configs(self, global_configs: DatasetJobGlobalConfigs): + self.global_configs = text_format.MessageToString(global_configs) + + def is_finished(self) -> bool: + return self.state in DATASET_JOB_FINISHED_STATE + + def to_ref(self) -> dataset_pb2.DatasetJobStageRef: + return dataset_pb2.DatasetJobStageRef(id=self.id, + name=self.name, + dataset_job_id=self.dataset_job_id, + output_data_batch_id=self.data_batch_id, + project_id=self.project_id, + state=self.state.name, + created_at=to_timestamp(self.created_at), + kind=self.dataset_job.kind.name if self.dataset_job else '') + + def to_proto(self) -> dataset_pb2.DatasetJobStage: + context = self.get_context() + return dataset_pb2.DatasetJobStage(id=self.id, + name=self.name, + uuid=self.uuid, + dataset_job_id=self.dataset_job_id, + output_data_batch_id=self.data_batch_id, + workflow_id=self.workflow_id, + project_id=self.project_id, + state=self.state.name, + event_time=to_timestamp(self.event_time) if self.event_time else 0, + global_configs=self.get_global_configs(), + created_at=to_timestamp(self.created_at), + updated_at=to_timestamp(self.updated_at), + started_at=to_timestamp(self.started_at) if self.started_at else 0, + finished_at=to_timestamp(self.finished_at) if self.finished_at else 0, + dataset_job_uuid=self.dataset_job.uuid if self.dataset_job else None, + is_ready=self.workflow is not None, + kind=self.dataset_job.kind.name if self.dataset_job else '', + input_data_batch_num_example=context.input_data_batch_num_example, + output_data_batch_num_example=context.output_data_batch_num_example, + scheduler_message=context.scheduler_message) + + def get_context(self) -> DatasetJobStageContext: + context_pb = DatasetJobStageContext() + if self.context: + context_pb = text_format.Parse(self.context, context_pb) + return context_pb + + def set_context(self, context: DatasetJobStageContext): + self.context = text_format.MessageToString(context) + + def set_scheduler_message(self, scheduler_message: str): + context_pb = self.get_context() + context_pb.scheduler_message = scheduler_message + self.set_context(context=context_pb) + + def is_coordinator(self) -> bool: + return self.coordinator_id == 0 diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/models_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/models_test.py new file mode 100644 index 000000000..53406258d --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/models_test.py @@ -0,0 +1,711 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from datetime import datetime, timedelta +import time +import unittest +from unittest.mock import MagicMock, PropertyMock, patch +from testing.common import NoWebServerTestCase + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.models import (DATASET_STATE_CONVERT_MAP_V2, Dataset, DataSource, DatasetFormat, + DatasetJobSchedulerState, DatasetJobStage, DatasetKindV2, ImportType, + PublishFrontendState, ResourceState, StoreFormat, DatasetType, + DatasetJob, DataSourceType, DatasetJobKind, DatasetJobState, + DataBatch) +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.utils.resource_name import resource_uuid +from fedlearner_webconsole.utils.pp_datetime import now, to_timestamp +from fedlearner_webconsole.utils.proto import to_dict +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto import dataset_pb2, project_pb2 +from fedlearner_webconsole.proto.dataset_pb2 import DatasetJobStageContext, DatasetMetaInfo, DatasetJobConfig, \ + DatasetJobGlobalConfigs, TimeRange +from google.protobuf.struct_pb2 import Value + + +class DataBatchTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + dataset = Dataset(id=1, + uuid=resource_uuid(), + name='default dataset', + dataset_type=DatasetType.PSI, + comment='test comment', + path='/data/dataset/123', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 5), + dataset_kind=DatasetKindV2.RAW, + is_published=False, + import_type=ImportType.NO_COPY) + session.add(dataset) + data_batch = DataBatch(id=1, + name='20220701', + dataset_id=1, + path='/data/test/batch/20220701', + event_time=datetime.strptime('20220701', '%Y%m%d'), + file_size=100, + num_example=10, + num_feature=3, + latest_parent_dataset_job_stage_id=1) + session.add(data_batch) + session.commit() + + def test_to_proto(self): + with db.session_scope() as session: + data_batch: DataBatch = session.query(DataBatch).get(1) + self.assertPartiallyEqual( + to_dict(data_batch.to_proto()), + { + 'id': 1, + 'name': '20220701', + 'dataset_id': 1, + 'path': '/data/test/batch/20220701', + 'event_time': to_timestamp(datetime.strptime('20220701', '%Y%m%d')), + 'file_size': 100, + 'num_example': 10, + 'num_feature': 3, + 'comment': '', + 'state': 'FAILED', + 'latest_parent_dataset_job_stage_id': 1, + 'latest_analyzer_dataset_job_stage_id': 0, + }, + ignore_fields=['created_at', 'updated_at'], + ) + + def test_is_available(self): + with db.session_scope() as session: + job_stage = DatasetJobStage(id=1, + uuid='job stage uuid', + name='default dataset job stage', + project_id=1, + workflow_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 5), + dataset_job_id=1, + data_batch_id=1, + event_time=datetime(2012, 1, 15), + state=DatasetJobState.PENDING, + coordinator_id=0) + session.add(job_stage) + session.commit() + with db.session_scope() as session: + data_batch = session.query(DataBatch).get(1) + self.assertFalse(data_batch.is_available()) + job_stage = session.query(DatasetJobStage).get(1) + job_stage.state = DatasetJobState.SUCCEEDED + session.flush() + self.assertTrue(data_batch.is_available()) + + +class DatasetTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + default_dataset = Dataset(id=10, + uuid=resource_uuid(), + name='default dataset', + dataset_type=DatasetType.PSI, + comment='test comment', + path='/data/dataset/123', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 5), + dataset_kind=DatasetKindV2.RAW, + is_published=False) + session.add(default_dataset) + session.commit() + + def test_dataset_meta_info(self): + meta_info = DatasetMetaInfo(value=100) + with db.session_scope() as session: + dataset = session.query(Dataset).get(10) + dataset.set_meta_info(meta_info) + session.commit() + with db.session_scope() as session: + dataset = session.query(Dataset).get(10) + meta_info_current = dataset.get_meta_info() + self.assertEqual(meta_info_current, meta_info) + + def test_get_frontend_state(self): + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + uuid='dataset_job', + project_id=1, + input_dataset_id=1, + output_dataset_id=10, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.SUCCEEDED) + session.add(dataset_job) + session.commit() + with db.session_scope() as session: + dataset = session.query(Dataset).get(10) + for state, front_state in DATASET_STATE_CONVERT_MAP_V2.items(): + dataset.parent_dataset_job.state = state + self.assertEqual(dataset.get_frontend_state(), front_state) + + def test_get_single_batch(self): + # test no batch + with db.session_scope() as session: + dataset = session.query(Dataset).get(10) + with self.assertRaises(TypeError) as cm: + dataset.get_single_batch() + self.assertEqual(cm.exception.args[0], 'there is no data_batch for this dataset 10') + + # test one batch + first_event_time = datetime(year=2000, month=1, day=1) + with db.session_scope() as session: + batch = DataBatch(dataset_id=10, event_time=first_event_time) + session.add(batch) + session.commit() + + with db.session_scope() as session: + dataset = session.query(Dataset).get(10) + batch = dataset.get_single_batch() + self.assertEqual(batch.event_time, first_event_time) + + # test two batch + second_event_time = datetime(year=2000, month=1, day=2) + with db.session_scope() as session: + batch = DataBatch(dataset_id=10, event_time=second_event_time) + session.add(batch) + session.commit() + with db.session_scope() as session: + dataset = session.query(Dataset).get(10) + with self.assertRaises(TypeError) as cm: + dataset.get_single_batch() + self.assertEqual(cm.exception.args[0], 'there is more than one data_batch for this dataset 10') + + def test_to_proto(self): + participants_info = project_pb2.ParticipantsInfo( + participants_map={ + 'test_1': project_pb2.ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'test_2': project_pb2.ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + }) + with db.session_scope() as session: + dataset: Dataset = session.query(Dataset).get(10) + dataset.auth_status = AuthStatus.AUTHORIZED + dataset.set_participants_info(participants_info) + dataset_proto = dataset.to_proto() + self.assertPartiallyEqual( + to_dict(dataset_proto), + { + 'id': 10, + 'project_id': 1, + 'name': 'default dataset', + 'path': '/data/dataset/123', + 'comment': 'test comment', + 'dataset_format': 'TABULAR', + 'state_frontend': 'FAILED', + 'dataset_kind': 'RAW', + 'workflow_id': 0, + 'data_source': '', + 'file_size': 0, + 'num_example': 0, + 'num_feature': 0, + 'deleted_at': 0, + 'parent_dataset_job_id': 0, + 'analyzer_dataset_job_id': 0, + 'is_published': False, + 'value': 0, + 'schema_checkers': [], + 'creator_username': '', + 'import_type': 'COPY', + 'dataset_type': 'PSI', + 'store_format': 'TFRECORDS', + 'publish_frontend_state': 'UNPUBLISHED', + 'auth_frontend_state': 'AUTH_PENDING', + 'local_auth_status': 'AUTHORIZED', + 'participants_info': { + 'participants_map': { + 'test_1': { + 'auth_status': 'PENDING', + 'name': '', + 'role': '', + 'state': '', + 'type': '', + }, + 'test_2': { + 'auth_status': 'AUTHORIZED', + 'name': '', + 'role': '', + 'state': '', + 'type': '', + }, + } + }, + }, + ignore_fields=['uuid', 'created_at', 'updated_at'], + ) + + def test_parent_dataset_job(self): + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + uuid='dataset_job', + project_id=1, + input_dataset_id=1, + output_dataset_id=10, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.SUCCEEDED) + session.add(dataset_job) + micro_dataset_job = DatasetJob(id=2, + uuid='micro_dataset_job', + project_id=1, + input_dataset_id=10, + output_dataset_id=10, + kind=DatasetJobKind.ANALYZER, + state=DatasetJobState.SUCCEEDED) + session.add(micro_dataset_job) + session.commit() + with db.session_scope() as session: + dataset = session.query(Dataset).get(10) + self.assertEqual(dataset.parent_dataset_job.id, 1) + + def test_publish_frontend_state(self): + with db.session_scope() as session: + dataset: Dataset = session.query(Dataset).get(10) + self.assertEqual(dataset.publish_frontend_state, PublishFrontendState.UNPUBLISHED) + dataset.is_published = True + dataset.ticket_status = TicketStatus.APPROVED + self.assertEqual(dataset.publish_frontend_state, PublishFrontendState.PUBLISHED) + dataset.ticket_status = TicketStatus.PENDING + self.assertEqual(dataset.publish_frontend_state, PublishFrontendState.TICKET_PENDING) + dataset.ticket_status = TicketStatus.DECLINED + self.assertEqual(dataset.publish_frontend_state, PublishFrontendState.TICKET_DECLINED) + + def test_updated_at(self): + with db.session_scope() as session: + data_batch = DataBatch(id=1, + name='20220701', + dataset_id=10, + path='/data/test/batch/20220701', + event_time=datetime(2022, 7, 1), + file_size=100, + num_example=10, + num_feature=3) + session.add(data_batch) + session.commit() + # make sure two batch have different updated_at time + time.sleep(1) + with db.session_scope() as session: + data_batch = DataBatch(id=2, + name='20220702', + dataset_id=10, + path='/data/test/batch/20220702', + event_time=datetime(2022, 7, 2), + file_size=100, + num_example=10, + num_feature=3) + session.add(data_batch) + session.commit() + with db.session_scope() as session: + dataset = session.query(Dataset).get(10) + data_batch = session.query(DataBatch).get(2) + self.assertEqual(dataset.to_proto().updated_at, to_timestamp(data_batch.updated_at)) + + +class DataSourceTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + default_datasource = DataSource(id=10, + uuid=resource_uuid(), + name='default dataset', + dataset_type=DatasetType.PSI, + comment='test comment', + path='/data/dataset/123', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 5), + is_published=False, + creator_username='xiaohang', + store_format=StoreFormat.CSV, + dataset_format=DatasetFormat.TABULAR.value) + default_datasource.set_meta_info( + meta=DatasetMetaInfo(datasource_type=DataSourceType.HDFS.value, is_user_upload=False)) + session.add(default_datasource) + session.commit() + + def test_to_data_source(self): + with db.session_scope() as session: + dataset = session.query(DataSource).get(10) + data_source = dataset_pb2.DataSource(id=dataset.id, + uuid=dataset.uuid, + name=dataset.name, + type=DataSourceType.HDFS.value, + url=dataset.path, + created_at=to_timestamp(dataset.created_at), + project_id=dataset.project_id, + is_user_upload=False, + creator_username='xiaohang', + dataset_format='TABULAR', + store_format='CSV', + dataset_type='PSI', + comment='test comment') + self.assertEqual(dataset.to_proto(), data_source) + + +class InternalProcessedDatasetTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + default_dataset = Dataset(id=10, + uuid=resource_uuid(), + name='default dataset', + dataset_type=DatasetType.PSI, + comment='test comment', + path='/data/dataset/123', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 5), + dataset_kind=DatasetKindV2.INTERNAL_PROCESSED, + is_published=False, + auth_status=AuthStatus.AUTHORIZED) + session.add(default_dataset) + session.commit() + + def test_get_frontend_state(self): + with db.session_scope() as session: + dataset: Dataset = session.query(Dataset).get(10) + self.assertEqual(dataset.get_frontend_state(), ResourceState.SUCCEEDED) + + +class DatasetJobTest(NoWebServerTestCase): + + def test_get_set_global_configs(self): + dataset_job = DatasetJob() + global_configs = DatasetJobGlobalConfigs() + global_configs.global_configs['test'].MergeFrom( + DatasetJobConfig(dataset_uuid=resource_uuid(), + variables=[ + Variable(name='hello', + value_type=Variable.ValueType.NUMBER, + typed_value=Value(number_value=1)), + Variable(name='test', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='test_value')), + ])) + dataset_job.set_global_configs(global_configs) + new_global_configs = dataset_job.get_global_configs() + self.assertEqual(new_global_configs, global_configs) + + @patch('fedlearner_webconsole.dataset.models.DatasetJob.output_dataset', new_callable=PropertyMock) + def test_to_proto(self, mock_output_dataset: MagicMock): + uuid = resource_uuid() + current_time = now() + dataset_job = DatasetJob(id=1, + name='test_dataset_job', + coordinator_id=2, + uuid=uuid, + project_id=1, + kind=DatasetJobKind.DATA_ALIGNMENT, + state=DatasetJobState.PENDING, + created_at=current_time, + updated_at=current_time, + creator_username='test user', + scheduler_state=DatasetJobSchedulerState.PENDING) + global_configs = DatasetJobGlobalConfigs() + global_configs.global_configs['test'].MergeFrom( + DatasetJobConfig(dataset_uuid=resource_uuid(), + variables=[ + Variable(name='hello', + value_type=Variable.ValueType.NUMBER, + typed_value=Value(number_value=1)), + Variable(name='test', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='test_value')), + ])) + dataset_job.set_global_configs(global_configs) + dataset_job.set_scheduler_message(scheduler_message='调度信息 🐵') + + mock_output_dataset.return_value = None + dataset_job_pb = dataset_pb2.DatasetJob(id=1, + name='test_dataset_job', + coordinator_id=2, + uuid=uuid, + project_id=1, + kind=DatasetJobKind.DATA_ALIGNMENT.value, + state=DatasetJobState.PENDING.value, + global_configs=global_configs, + created_at=to_timestamp(current_time), + updated_at=to_timestamp(current_time), + creator_username='test user', + scheduler_state=DatasetJobSchedulerState.PENDING.name, + time_range=TimeRange(), + scheduler_message='调度信息 🐵') + + self.assertEqual(dataset_job.to_proto(), dataset_job_pb) + + dataset_job.workflow = Workflow(id=1, uuid='workflow_uuid') + mock_output_dataset.return_value = Dataset(id=1, name='test_dataset', uuid='dataset_uuid') + dataset_job.workflow_id = 1 + dataset_job.output_dataset_id = 1 + dataset_job_pb.is_ready = True + dataset_job_pb.workflow_id = 1 + dataset_job_pb.result_dataset_name = 'test_dataset' + dataset_job_pb.result_dataset_uuid = 'dataset_uuid' + self.assertEqual(dataset_job.to_proto(), dataset_job_pb) + + context = dataset_job.get_context() + context.input_data_batch_num_example = 1000 + context.output_data_batch_num_example = 500 + dataset_job.set_context(context) + dataset_job_pb.input_data_batch_num_example = 1000 + dataset_job_pb.output_data_batch_num_example = 500 + self.assertEqual(dataset_job.to_proto(), dataset_job_pb) + + @patch('fedlearner_webconsole.dataset.models.DatasetJob.output_dataset', new_callable=PropertyMock) + def test_to_ref(self, mock_output_dataset: MagicMock): + uuid = resource_uuid() + output_dataset = Dataset(name='test_output_dataset', id=1) + dataset_job = DatasetJob(id=1, + name='test_dataset_job', + coordinator_id=2, + uuid=uuid, + project_id=1, + kind=DatasetJobKind.DATA_ALIGNMENT, + state=DatasetJobState.PENDING, + output_dataset_id=1, + created_at=now(), + creator_username='test user') + mock_output_dataset.return_value = output_dataset + self.assertPartiallyEqual(to_dict(dataset_job.to_ref()), { + 'id': 1, + 'name': 'test_dataset_job', + 'coordinator_id': 2, + 'uuid': uuid, + 'project_id': 1, + 'kind': DatasetJobKind.DATA_ALIGNMENT.name, + 'state': DatasetJobState.PENDING.name, + 'result_dataset_id': 1, + 'result_dataset_name': 'test_output_dataset', + 'has_stages': False, + 'creator_username': 'test user', + }, + ignore_fields=['created_at']) + + def test_is_finished(self): + dataset_job = DatasetJob(uuid='uuid', + project_id=1, + kind=DatasetJobKind.DATA_ALIGNMENT, + state=DatasetJobState.PENDING, + input_dataset_id=1, + output_dataset_id=2, + created_at=now()) + self.assertFalse(dataset_job.is_finished()) + dataset_job.state = DatasetJobState.RUNNING + self.assertFalse(dataset_job.is_finished()) + dataset_job.state = DatasetJobState.SUCCEEDED + self.assertTrue(dataset_job.is_finished()) + dataset_job.state = DatasetJobState.FAILED + self.assertTrue(dataset_job.is_finished()) + + def test_is_cron(self): + dataset_job = DatasetJob(id=1, + name='test_dataset_job', + coordinator_id=2, + uuid='uuid', + project_id=1, + kind=DatasetJobKind.DATA_ALIGNMENT, + state=DatasetJobState.PENDING) + self.assertFalse(dataset_job.is_cron()) + dataset_job.time_range = timedelta(days=1) + self.assertTrue(dataset_job.is_cron()) + + def test_is_daily_cron(self): + dataset_job = DatasetJob(id=1, + name='test_dataset_job', + coordinator_id=2, + uuid='uuid', + project_id=1, + kind=DatasetJobKind.DATA_ALIGNMENT, + state=DatasetJobState.PENDING) + self.assertFalse(dataset_job.is_daily_cron()) + dataset_job.time_range = timedelta(days=1) + self.assertTrue(dataset_job.is_daily_cron()) + dataset_job.time_range = timedelta(hours=1) + self.assertFalse(dataset_job.is_daily_cron()) + + def test_is_hourly_cron(self): + dataset_job = DatasetJob(id=1, + name='test_dataset_job', + coordinator_id=2, + uuid='uuid', + project_id=1, + kind=DatasetJobKind.DATA_ALIGNMENT, + state=DatasetJobState.PENDING) + self.assertFalse(dataset_job.is_hourly_cron()) + dataset_job.time_range = timedelta(days=1) + self.assertFalse(dataset_job.is_hourly_cron()) + dataset_job.time_range = timedelta(hours=1) + self.assertTrue(dataset_job.is_hourly_cron()) + + def test_set_scheduler_message(self): + scheduler_message = '调度信息 🦻' + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + name='test_dataset_job', + input_dataset_id=1, + output_dataset_id=2, + coordinator_id=0, + uuid='uuid', + project_id=1, + kind=DatasetJobKind.DATA_ALIGNMENT, + state=DatasetJobState.PENDING) + dataset_job.set_scheduler_message(scheduler_message=scheduler_message) + session.add(dataset_job) + session.commit() + + with db.session_scope() as session: + dataset_job: DatasetJob = session.query(DatasetJob).get(1) + self.assertEqual(dataset_job.get_context().scheduler_message, scheduler_message) + + +class DatasetJobStageTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + dataset_job = DatasetJob(uuid='dataset_job uuid', + project_id=1, + kind=DatasetJobKind.DATA_ALIGNMENT, + state=DatasetJobState.PENDING, + input_dataset_id=1, + output_dataset_id=2, + created_at=now()) + session.add(dataset_job) + job_stage = DatasetJobStage(id=1, + uuid='uuid_1', + name='default dataset job stage', + project_id=1, + workflow_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 5), + dataset_job_id=1, + data_batch_id=1, + event_time=datetime(2012, 1, 15), + state=DatasetJobState.PENDING, + coordinator_id=0) + session.add(job_stage) + session.commit() + + def test_get_set_global_configs(self): + with db.session_scope() as session: + job_stage: DatasetJobStage = session.query(DatasetJobStage).get(1) + global_configs = DatasetJobGlobalConfigs() + global_configs.global_configs['test'].MergeFrom( + DatasetJobConfig(dataset_uuid=resource_uuid(), + variables=[ + Variable(name='hello', + value_type=Variable.ValueType.NUMBER, + typed_value=Value(number_value=1)), + Variable(name='test', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='test_value')), + ])) + job_stage.set_global_configs(global_configs) + new_global_configs = job_stage.get_global_configs() + self.assertEqual(new_global_configs, global_configs) + + def test_to_ref(self): + with db.session_scope() as session: + job_stage: DatasetJobStage = session.query(DatasetJobStage).get(1) + job_stage_ref = job_stage.to_ref() + self.assertEqual( + to_dict(job_stage_ref), { + 'id': 1, + 'name': 'default dataset job stage', + 'dataset_job_id': 1, + 'output_data_batch_id': 1, + 'project_id': 1, + 'state': DatasetJobState.PENDING.name, + 'created_at': to_timestamp(datetime(2012, 1, 14, 12, 0, 5)), + 'kind': DatasetJobKind.DATA_ALIGNMENT.name, + }) + + def test_to_proto(self): + with db.session_scope() as session: + job_stage: DatasetJobStage = session.query(DatasetJobStage).get(1) + context = DatasetJobStageContext(batch_stats_item_name='batch_stats_item_1', + input_data_batch_num_example=100, + output_data_batch_num_example=50, + scheduler_message='错误信息 ✖️') + job_stage.set_context(context=context) + job_stage_proto = job_stage.to_proto() + self.assertPartiallyEqual( + to_dict(job_stage_proto), + { + 'id': 1, + 'name': 'default dataset job stage', + 'uuid': 'uuid_1', + 'dataset_job_id': 1, + 'output_data_batch_id': 1, + 'workflow_id': 1, + 'project_id': 1, + 'state': DatasetJobState.PENDING.name, + 'event_time': to_timestamp(datetime(2012, 1, 15)), + 'created_at': to_timestamp(datetime(2012, 1, 14, 12, 0, 5)), + 'dataset_job_uuid': 'dataset_job uuid', + 'started_at': 0, + 'finished_at': 0, + 'is_ready': False, + 'kind': DatasetJobKind.DATA_ALIGNMENT.name, + 'input_data_batch_num_example': 100, + 'output_data_batch_num_example': 50, + 'scheduler_message': '错误信息 ✖️', + }, + ignore_fields=['updated_at'], + ) + + def test_set_and_get_context(self): + with db.session_scope() as session: + job_stage: DatasetJobStage = session.query(DatasetJobStage).get(1) + empty_context = job_stage.get_context() + self.assertEqual(empty_context, DatasetJobStageContext()) + context = DatasetJobStageContext(batch_stats_item_name='batch_stats_item_1', + input_data_batch_num_example=100, + output_data_batch_num_example=50) + job_stage.set_context(context=context) + text_context = 'batch_stats_item_name: "batch_stats_item_1"\n' \ + 'input_data_batch_num_example: 100\noutput_data_batch_num_example: 50\n' + self.assertEqual(job_stage.context, text_context) + target_context = job_stage.get_context() + self.assertEqual(target_context, context) + + def test_set_scheduler_message(self): + scheduler_message = '错误信息 ❌' + with db.session_scope() as session: + job_stage: DatasetJobStage = session.query(DatasetJobStage).get(1) + empty_context = job_stage.get_context() + self.assertEqual(empty_context, DatasetJobStageContext()) + job_stage.set_scheduler_message(scheduler_message=scheduler_message) + session.commit() + + with db.session_scope() as session: + job_stage: DatasetJobStage = session.query(DatasetJobStage).get(1) + self.assertEqual(job_stage.get_context().scheduler_message, scheduler_message) + + def test_is_coordinator(self): + with db.session_scope() as session: + job_stage: DatasetJobStage = session.query(DatasetJobStage).get(1) + self.assertTrue(job_stage.is_coordinator()) + job_stage.coordinator_id = 1 + self.assertFalse(job_stage.is_coordinator()) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/BUILD.bazel new file mode 100644 index 000000000..672d3773a --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/BUILD.bazel @@ -0,0 +1,250 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "scheduler", + srcs = [ + "dataset_long_period_scheduler.py", + "dataset_short_period_scheduler.py", + ], + imports = ["../../.."], + deps = [ + ":consts_lib", + ":executors_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:composer_service_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:controllers_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:local_controllers_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:services_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset/job_configer", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:client_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:file_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "dataset_long_period_scheduler_test", + size = "small", + srcs = [ + "dataset_long_period_scheduler_test.py", + ], + imports = ["../../.."], + main = "dataset_long_period_scheduler_test.py", + deps = [ + ":consts_lib", + ":executors_lib", + ":scheduler", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_test( + name = "dataset_short_period_scheduler_test", + size = "small", + srcs = [ + "dataset_short_period_scheduler_test.py", + ], + imports = ["../../.."], + main = "dataset_short_period_scheduler_test.py", + deps = [ + ":consts_lib", + ":executors_lib", + ":scheduler", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "base_executor_lib", + srcs = [ + "base_executor.py", + ], + imports = ["../../.."], + deps = [ + ":consts_lib", + ], +) + +py_library( + name = "executors_lib", + srcs = [ + "chained_executor.py", + "cron_dataset_job_executor.py", + "dataset_job_executor.py", + "pending_dataset_job_stage_executor.py", + "running_dataset_job_stage_executor.py", + "update_auth_status_executor.py", + ], + imports = ["../../.."], + deps = [ + ":base_executor_lib", + ":consts_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/cleanup:models_lib", + "//web_console_v2/api/fedlearner_webconsole/cleanup:services_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:composer_service_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:common_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:controllers_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:local_controllers_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:services_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset/job_configer", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:client_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:job_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:resource_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:system_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:workflow_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "chained_executor_test", + size = "small", + srcs = [ + "chained_executor_test.py", + ], + imports = ["../../.."], + main = "chained_executor_test.py", + deps = [ + ":consts_lib", + ":executors_lib", + "//web_console_v2/api/testing:fake_lib", + ], +) + +py_test( + name = "cron_dataset_job_executor_test", + size = "small", + srcs = [ + "cron_dataset_job_executor_test.py", + ], + imports = ["../../.."], + main = "cron_dataset_job_executor_test.py", + deps = [ + ":consts_lib", + ":executors_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_test( + name = "update_auth_status_executor_test", + size = "small", + srcs = [ + "update_auth_status_executor_test.py", + ], + imports = ["../../.."], + main = "update_auth_status_executor_test.py", + deps = [ + ":consts_lib", + ":executors_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_test( + name = "dataset_job_executor_test", + size = "medium", + srcs = [ + "dataset_job_executor_test.py", + ], + imports = ["../../.."], + main = "dataset_job_executor_test.py", + deps = [ + ":consts_lib", + ":executors_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:initial_db_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset/job_configer", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_test( + name = "pending_dataset_job_stage_executor_test", + size = "small", + srcs = [ + "pending_dataset_job_stage_executor_test.py", + ], + imports = ["../../.."], + main = "pending_dataset_job_stage_executor_test.py", + deps = [ + ":consts_lib", + ":executors_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:initial_db_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_test( + name = "running_dataset_job_stage_executor_test", + size = "medium", + srcs = [ + "running_dataset_job_stage_executor_test.py", + ], + imports = ["../../.."], + main = "running_dataset_job_stage_executor_test.py", + deps = [ + ":consts_lib", + ":executors_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:initial_db_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@common_sqlalchemy//:pkg", + ], +) + +py_library( + name = "consts_lib", + srcs = [ + "consts.py", + ], + imports = ["../../.."], +) diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/base_executor.py b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/base_executor.py new file mode 100644 index 000000000..7f5f3c9ad --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/base_executor.py @@ -0,0 +1,40 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +from typing import List + +from fedlearner_webconsole.dataset.scheduler.consts import ExecutorResult + + +class BaseExecutor(metaclass=abc.ABCMeta): + + @abc.abstractmethod + def get_item_ids(self) -> List[int]: + """Get all items id should be processed in this executor + + Returns: + List[int]: all items id + """ + raise NotImplementedError() + + @abc.abstractmethod + def run_item(self, item_id: int) -> ExecutorResult: + """process item by given id + + Returns: + ExecutorResult + """ + raise NotImplementedError() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/chained_executor.py b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/chained_executor.py new file mode 100644 index 000000000..56335b3e7 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/chained_executor.py @@ -0,0 +1,57 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging + +from fedlearner_webconsole.dataset.scheduler.consts import ExecutorType, ExecutorResult +from fedlearner_webconsole.dataset.scheduler.base_executor import BaseExecutor +from fedlearner_webconsole.dataset.scheduler.cron_dataset_job_executor import CronDatasetJobExecutor +from fedlearner_webconsole.dataset.scheduler.update_auth_status_executor import UpdateAuthStatusExecutor +from fedlearner_webconsole.dataset.scheduler.dataset_job_executor import DatasetJobExecutor +from fedlearner_webconsole.dataset.scheduler.pending_dataset_job_stage_executor import PendingDatasetJobStageExecutor +from fedlearner_webconsole.dataset.scheduler.running_dataset_job_stage_executor import RunningDatasetJobStageExecutor +from fedlearner_webconsole.proto.composer_pb2 import ExecutorResults + + +def _get_executor(executor_type: ExecutorType) -> BaseExecutor: + executor_map = { + ExecutorType.CRON_DATASET_JOB: CronDatasetJobExecutor, + ExecutorType.UPDATE_AUTH_STATUS: UpdateAuthStatusExecutor, + ExecutorType.DATASET_JOB: DatasetJobExecutor, + ExecutorType.PENDING_DATASET_JOB_STAGE: PendingDatasetJobStageExecutor, + ExecutorType.RUNNING_DATASET_JOB_STAGE: RunningDatasetJobStageExecutor, + } + return executor_map.get(executor_type)() + + +def run_executor(executor_type: ExecutorType) -> ExecutorResults: + executor = _get_executor(executor_type=executor_type) + item_ids = executor.get_item_ids() + succeeded_items = [] + failed_items = [] + skip_items = [] + for item_id in item_ids: + try: + executor_result = executor.run_item(item_id=item_id) + if executor_result == ExecutorResult.SUCCEEDED: + succeeded_items.append(item_id) + elif executor_result == ExecutorResult.FAILED: + failed_items.append(item_id) + else: + skip_items.append(item_id) + except Exception as e: # pylint: disable=broad-except + logging.exception(f'[Dataset ChainedExecutor] failed to run {item_id}, executor_type: {executor_type.name}') + failed_items.append(item_id) + return ExecutorResults(succeeded_item_ids=succeeded_items, failed_item_ids=failed_items, skip_item_ids=skip_items) diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/chained_executor_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/chained_executor_test.py new file mode 100644 index 000000000..1dfd5ff90 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/chained_executor_test.py @@ -0,0 +1,36 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import MagicMock, patch +from fedlearner_webconsole.dataset.scheduler.chained_executor import run_executor +from fedlearner_webconsole.dataset.scheduler.consts import ExecutorType +from testing.dataset import FakeExecutor + + +class ChainedExecutorTest(unittest.TestCase): + + @patch('fedlearner_webconsole.dataset.scheduler.chained_executor._get_executor') + def test_run_executor(self, mock_get_executor: MagicMock): + mock_get_executor.return_value = FakeExecutor() + executor_resutls = run_executor(executor_type=ExecutorType.CRON_DATASET_JOB) + self.assertEqual(executor_resutls.succeeded_item_ids, [1]) + self.assertEqual(executor_resutls.failed_item_ids, [2, 4]) + self.assertEqual(executor_resutls.skip_item_ids, [3]) + mock_get_executor.assert_called_once_with(executor_type=ExecutorType.CRON_DATASET_JOB) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/consts.py b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/consts.py new file mode 100644 index 000000000..278f93852 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/consts.py @@ -0,0 +1,30 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import enum + + +class ExecutorType(enum.Enum): + CRON_DATASET_JOB = 'CRON_DATASET_JOB' + UPDATE_AUTH_STATUS = 'UPDATE_AUTH_STATUS' + PENDING_DATASET_JOB_STAGE = 'PENDING_DATASET_JOB_STAGE' + RUNNING_DATASET_JOB_STAGE = 'RUNNING_DATASET_JOB_STAGE' + DATASET_JOB = 'DATASET_JOB' + + +class ExecutorResult(enum.Enum): + SUCCEEDED = 'SUCCEEDED' + FAILED = 'FAILED' + SKIP = 'SKIP' diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/cron_dataset_job_executor.py b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/cron_dataset_job_executor.py new file mode 100644 index 000000000..9f941dc46 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/cron_dataset_job_executor.py @@ -0,0 +1,140 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import List, Optional +from datetime import datetime, timezone +import logging +from sqlalchemy.orm import Session + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.scheduler.base_executor import BaseExecutor +from fedlearner_webconsole.dataset.scheduler.consts import ExecutorResult +from fedlearner_webconsole.dataset.models import DataBatch, Dataset, DatasetJob, DatasetJobSchedulerState, \ + DatasetKindV2 +from fedlearner_webconsole.dataset.local_controllers import DatasetJobStageLocalController +from fedlearner_webconsole.dataset.util import get_oldest_daily_folder_time, parse_event_time_to_daily_folder_name, \ + check_batch_folder_ready, get_oldest_hourly_folder_time, parse_event_time_to_hourly_folder_name, \ + get_hourly_folder_not_ready_err_msg, get_daily_folder_not_ready_err_msg, get_hourly_batch_not_ready_err_msg, \ + get_daily_batch_not_ready_err_msg, get_certain_folder_not_ready_err_msg, get_certain_batch_not_ready_err_msg, \ + get_cron_succeeded_msg +from fedlearner_webconsole.dataset.auth_service import AuthService +from fedlearner_webconsole.utils.pp_datetime import now + + +class CronDatasetJobExecutor(BaseExecutor): + + def _get_next_event_time(self, session: Session, runnable_dataset_job: DatasetJob) -> Optional[datetime]: + """get next event_time. + + 1. If current event_time is None, next event_time is oldest event_time for input_dataset + 2. If current event_time is not None, next event_time is event_time + time_range + """ + if runnable_dataset_job.event_time is None: + input_dataset: Dataset = runnable_dataset_job.input_dataset + if input_dataset.dataset_kind == DatasetKindV2.SOURCE: + if runnable_dataset_job.is_hourly_cron(): + return get_oldest_hourly_folder_time(input_dataset.path) + return get_oldest_daily_folder_time(input_dataset.path) + oldest_data_batch = session.query(DataBatch).filter( + DataBatch.dataset_id == runnable_dataset_job.input_dataset_id).order_by( + DataBatch.event_time.asc()).first() + return oldest_data_batch.event_time if oldest_data_batch else None + return runnable_dataset_job.event_time + runnable_dataset_job.time_range + + def _should_run(self, session: Session, runnable_dataset_job: DatasetJob, next_event_time: datetime) -> bool: + """check dependence to decide whether should create next stage. + + 1. for input_dataset is data_source, check folder exists and _SUCCESS file exists + 2. for input_dataset is dataset, check data_batch exists and state is SUCCEEDED + """ + input_dataset: Dataset = runnable_dataset_job.input_dataset + if input_dataset.dataset_kind == DatasetKindV2.SOURCE: + if runnable_dataset_job.is_hourly_cron(): + batch_name = parse_event_time_to_hourly_folder_name(next_event_time) + else: + batch_name = parse_event_time_to_daily_folder_name(next_event_time) + return check_batch_folder_ready(folder=input_dataset.path, batch_name=batch_name) + data_batch: DataBatch = session.query(DataBatch).filter( + DataBatch.dataset_id == runnable_dataset_job.input_dataset_id).filter( + DataBatch.event_time == next_event_time).first() + if data_batch is None: + return False + return data_batch.is_available() + + def get_item_ids(self) -> List[int]: + with db.session_scope() as session: + runnable_dataset_job_ids = session.query( + DatasetJob.id).filter(DatasetJob.scheduler_state == DatasetJobSchedulerState.RUNNABLE).all() + return [runnable_dataset_job_id for runnable_dataset_job_id, *_ in runnable_dataset_job_ids] + + def run_item(self, item_id: int) -> ExecutorResult: + with db.session_scope() as session: + # we set isolation_level to SERIALIZABLE to make sure state won't be changed within this session + session.connection(execution_options={'isolation_level': 'SERIALIZABLE'}) + runnable_dataset_job: DatasetJob = session.query(DatasetJob).get(item_id) + if runnable_dataset_job.scheduler_state != DatasetJobSchedulerState.RUNNABLE: + logging.warning('dataset_job scheduler_state is not runnable, ' \ + f'dataset_job id: {item_id}') + return ExecutorResult.SKIP + # check authorization + if not AuthService(session=session, dataset_job=runnable_dataset_job).check_participants_authorized(): + message = '[cron_dataset_job_executor] still waiting for participants authorized, ' \ + f'dataset_job_id: {item_id}' + logging.warning(message) + return ExecutorResult.SKIP + + next_event_time = self._get_next_event_time(session=session, runnable_dataset_job=runnable_dataset_job) + if next_event_time is None: + if runnable_dataset_job.input_dataset.dataset_kind == DatasetKindV2.SOURCE: + logging.warning(f'input_dataset has no matched streaming folder, dataset_job id: {item_id}') + err_msg = get_hourly_folder_not_ready_err_msg() if runnable_dataset_job.is_hourly_cron() \ + else get_daily_folder_not_ready_err_msg() + runnable_dataset_job.set_scheduler_message(scheduler_message=err_msg) + else: + logging.warning(f'input_dataset has no matched batch, dataset_job id: {item_id}') + err_msg = get_hourly_batch_not_ready_err_msg() if runnable_dataset_job.is_hourly_cron() \ + else get_daily_batch_not_ready_err_msg() + runnable_dataset_job.set_scheduler_message(scheduler_message=err_msg) + session.commit() + return ExecutorResult.SKIP + # if next_event_time is 20220801, we wouldn't schedule it until 2022-08-01 00:00:00 + if next_event_time.replace(tzinfo=timezone.utc) > now(): + return ExecutorResult.SKIP + next_batch_name = parse_event_time_to_hourly_folder_name(event_time=next_event_time) \ + if runnable_dataset_job.is_hourly_cron() \ + else parse_event_time_to_daily_folder_name(event_time=next_event_time) + if not self._should_run( + session=session, runnable_dataset_job=runnable_dataset_job, next_event_time=next_event_time): + if runnable_dataset_job.input_dataset.dataset_kind == DatasetKindV2.SOURCE: + runnable_dataset_job.set_scheduler_message(scheduler_message=get_certain_folder_not_ready_err_msg( + folder_name=next_batch_name)) + else: + runnable_dataset_job.set_scheduler_message(scheduler_message=get_certain_batch_not_ready_err_msg( + batch_name=next_batch_name)) + logging.info( + f'[cron_dataset_job_executor] dataset job {item_id} is not should run, ' \ + f'next_event_time: {next_event_time.strftime("%Y%m%d")}' + ) + session.commit() + return ExecutorResult.SKIP + DatasetJobStageLocalController(session=session).create_data_batch_and_job_stage_as_coordinator( + dataset_job_id=item_id, + global_configs=runnable_dataset_job.get_global_configs(), + event_time=next_event_time) + runnable_dataset_job.event_time = next_event_time + runnable_dataset_job.set_scheduler_message(scheduler_message=get_cron_succeeded_msg( + batch_name=next_batch_name)) + session.commit() + return ExecutorResult.SUCCEEDED diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/cron_dataset_job_executor_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/cron_dataset_job_executor_test.py new file mode 100644 index 000000000..c95ca9f1e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/cron_dataset_job_executor_test.py @@ -0,0 +1,365 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# pylint: disable=protected-access +import unittest +from unittest.mock import patch, MagicMock +from datetime import datetime, timedelta + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.models import DataBatch, ResourceState, Dataset, DatasetJob, \ + DatasetJobKind, DatasetJobSchedulerState, DatasetJobStage, DatasetJobState, DatasetKindV2, DatasetType +from fedlearner_webconsole.dataset.scheduler.cron_dataset_job_executor import CronDatasetJobExecutor +from fedlearner_webconsole.dataset.scheduler.consts import ExecutorResult +from fedlearner_webconsole.proto import dataset_pb2 +from testing.no_web_server_test_case import NoWebServerTestCase + + +class CronDatasetJobExecutorTest(NoWebServerTestCase): + _PROJECT_ID = 1 + _WORKFLOW_ID = 1 + _INPUT_DATASET_ID = 1 + _OUTPUT_DATASET_ID = 2 + + def test_get_item_ids(self): + with db.session_scope() as session: + dataset_job_1 = DatasetJob(id=1, + uuid='dataset_job_1', + project_id=self._PROJECT_ID, + input_dataset_id=self._INPUT_DATASET_ID, + output_dataset_id=self._OUTPUT_DATASET_ID, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.PENDING, + coordinator_id=0, + workflow_id=self._WORKFLOW_ID, + scheduler_state=DatasetJobSchedulerState.RUNNABLE, + time_range=timedelta(days=1)) + dataset_job_1.set_global_configs( + dataset_pb2.DatasetJobGlobalConfigs(global_configs={'test': dataset_pb2.DatasetJobConfig()})) + session.add(dataset_job_1) + dataset_job_2 = DatasetJob(id=2, + uuid='dataset_job_2', + project_id=self._PROJECT_ID, + input_dataset_id=self._INPUT_DATASET_ID, + output_dataset_id=3, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.PENDING, + coordinator_id=0, + workflow_id=self._WORKFLOW_ID, + scheduler_state=DatasetJobSchedulerState.STOPPED, + time_range=timedelta(days=1)) + dataset_job_2.set_global_configs( + dataset_pb2.DatasetJobGlobalConfigs(global_configs={'test': dataset_pb2.DatasetJobConfig()})) + session.add(dataset_job_2) + dataset_job_3 = DatasetJob(id=3, + uuid='dataset_job_3', + project_id=self._PROJECT_ID, + input_dataset_id=self._INPUT_DATASET_ID, + output_dataset_id=4, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.PENDING, + coordinator_id=0, + workflow_id=self._WORKFLOW_ID, + scheduler_state=DatasetJobSchedulerState.PENDING, + time_range=timedelta(days=1)) + dataset_job_3.set_global_configs( + dataset_pb2.DatasetJobGlobalConfigs(global_configs={'test': dataset_pb2.DatasetJobConfig()})) + session.add(dataset_job_3) + session.commit() + cron_dataset_job_executor = CronDatasetJobExecutor() + self.assertEqual(cron_dataset_job_executor.get_item_ids(), [1]) + + + @patch( + 'fedlearner_webconsole.dataset.scheduler.cron_dataset_job_executor.CronDatasetJobExecutor.'\ + '_get_next_event_time' + ) + @patch('fedlearner_webconsole.dataset.scheduler.cron_dataset_job_executor.CronDatasetJobExecutor._should_run') + def test_run_item(self, mock_should_run: MagicMock, mock_get_next_event_time: MagicMock): + with db.session_scope() as session: + dataset_job_1 = DatasetJob(id=1, + uuid='dataset_job_1', + project_id=self._PROJECT_ID, + input_dataset_id=self._INPUT_DATASET_ID, + output_dataset_id=self._OUTPUT_DATASET_ID, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.PENDING, + coordinator_id=0, + workflow_id=self._WORKFLOW_ID, + scheduler_state=DatasetJobSchedulerState.RUNNABLE, + time_range=timedelta(days=1)) + dataset_job_1.set_global_configs( + dataset_pb2.DatasetJobGlobalConfigs(global_configs={'test': dataset_pb2.DatasetJobConfig()})) + session.add(dataset_job_1) + dataset_job_2 = DatasetJob(id=2, + uuid='dataset_job_2', + project_id=self._PROJECT_ID, + input_dataset_id=self._INPUT_DATASET_ID, + output_dataset_id=3, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.PENDING, + coordinator_id=0, + workflow_id=self._WORKFLOW_ID, + scheduler_state=DatasetJobSchedulerState.STOPPED, + time_range=timedelta(days=1)) + dataset_job_2.set_global_configs( + dataset_pb2.DatasetJobGlobalConfigs(global_configs={'test': dataset_pb2.DatasetJobConfig()})) + session.add(dataset_job_2) + input_dataset = Dataset(id=self._INPUT_DATASET_ID, + uuid='dataset_uuid', + name='default dataset', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + project_id=self._PROJECT_ID, + dataset_kind=DatasetKindV2.RAW, + is_published=True) + session.add(input_dataset) + output_dataset = Dataset(id=self._OUTPUT_DATASET_ID, + uuid='dataset_uuid', + name='default dataset', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + project_id=self._PROJECT_ID, + dataset_kind=DatasetKindV2.RAW, + is_published=True) + session.add(output_dataset) + session.commit() + + cron_dataset_job_executor = CronDatasetJobExecutor() + + # test next_event_time bigger than now + mock_should_run.return_value = True + mock_get_next_event_time.return_value = datetime(2100, 1, 1) + executor_result = cron_dataset_job_executor.run_item(item_id=1) + self.assertEqual(executor_result, ExecutorResult.SKIP) + mock_get_next_event_time.assert_called_once() + + # test should_run is false + mock_should_run.reset_mock() + mock_get_next_event_time.reset_mock() + mock_should_run.return_value = False + mock_get_next_event_time.return_value = datetime(2022, 1, 1) + executor_result = cron_dataset_job_executor.run_item(item_id=1) + self.assertEqual(executor_result, ExecutorResult.SKIP) + mock_get_next_event_time.assert_called_once() + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(1) + self.assertEqual(dataset_job.get_context().scheduler_message, '数据批次20220101检查失败,请确认该批次命名格式及状态') + + # test should_run is True + mock_should_run.reset_mock() + mock_get_next_event_time.reset_mock() + mock_should_run.return_value = True + mock_get_next_event_time.return_value = datetime(2022, 1, 1) + executor_result = cron_dataset_job_executor.run_item(item_id=1) + self.assertEqual(executor_result, ExecutorResult.SUCCEEDED) + mock_get_next_event_time.assert_called_once() + + with db.session_scope() as session: + data_batch = session.query(DataBatch).get(1) + self.assertEqual(data_batch.event_time, datetime(2022, 1, 1)) + dataset_job_stage = session.query(DatasetJobStage).get(1) + self.assertEqual(dataset_job_stage.event_time, datetime(2022, 1, 1)) + dataset_job = session.query(DatasetJob).get(1) + self.assertEqual(dataset_job.event_time, datetime(2022, 1, 1)) + self.assertEqual(dataset_job.get_context().scheduler_message, '已成功发起20220101批次处理任务') + + @patch('fedlearner_webconsole.dataset.scheduler.cron_dataset_job_executor.get_oldest_hourly_folder_time') + @patch('fedlearner_webconsole.dataset.scheduler.cron_dataset_job_executor.get_oldest_daily_folder_time') + def test_get_next_event_time(self, mock_get_oldest_daily_folder_time: MagicMock, + mock_get_oldest_hourly_folder_time: MagicMock): + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + uuid='dataset_job_1', + project_id=self._PROJECT_ID, + input_dataset_id=self._INPUT_DATASET_ID, + output_dataset_id=self._OUTPUT_DATASET_ID, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.PENDING, + coordinator_id=0, + workflow_id=self._WORKFLOW_ID, + scheduler_state=DatasetJobSchedulerState.RUNNABLE, + time_range=timedelta(days=1)) + session.add(dataset_job) + input_dataset = Dataset(id=self._INPUT_DATASET_ID, + uuid='dataset_uuid', + name='default dataset', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + project_id=self._PROJECT_ID, + dataset_kind=DatasetKindV2.SOURCE, + is_published=True) + session.add(input_dataset) + session.commit() + cron_dataset_job_executor = CronDatasetJobExecutor() + with db.session_scope() as session: + # test input_dataset is source + mock_get_oldest_daily_folder_time.return_value = datetime(2022, 8, 1) + dataset_job = session.query(DatasetJob).get(1) + next_event_time = cron_dataset_job_executor._get_next_event_time(session=session, + runnable_dataset_job=dataset_job) + mock_get_oldest_daily_folder_time.assert_called_once_with('/data/dataset/123') + self.assertEqual(next_event_time, datetime(2022, 8, 1)) + + # test input_dataset is not source but has no batch + mock_get_oldest_daily_folder_time.reset_mock() + dataset_job.input_dataset.dataset_kind = DatasetKindV2.RAW + next_event_time = cron_dataset_job_executor._get_next_event_time(session=session, + runnable_dataset_job=dataset_job) + mock_get_oldest_daily_folder_time.assert_not_called() + self.assertIsNone(next_event_time) + + # test input_dataset is not source and has batch + data_batch_1 = DataBatch(id=1, + name='20220801', + dataset_id=self._INPUT_DATASET_ID, + path='/data/test/batch/20220801', + event_time=datetime(2022, 8, 1)) + session.add(data_batch_1) + data_batch_2 = DataBatch(id=2, + name='20220802', + dataset_id=self._INPUT_DATASET_ID, + path='/data/test/batch/20220802', + event_time=datetime(2022, 8, 2)) + session.add(data_batch_2) + session.flush() + next_event_time = cron_dataset_job_executor._get_next_event_time(session=session, + runnable_dataset_job=dataset_job) + self.assertEqual(next_event_time, datetime(2022, 8, 1)) + + # test dataset_job already has event_time + dataset_job.event_time = datetime(2022, 8, 1) + next_event_time = cron_dataset_job_executor._get_next_event_time(session=session, + runnable_dataset_job=dataset_job) + self.assertEqual(next_event_time, datetime(2022, 8, 2)) + + # test hourly level + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(1) + dataset_job.time_range = timedelta(hours=1) + session.commit() + with db.session_scope() as session: + # test input_dataset is source + mock_get_oldest_hourly_folder_time.return_value = datetime(2022, 8, 1, 8) + dataset_job = session.query(DatasetJob).get(1) + next_event_time = cron_dataset_job_executor._get_next_event_time(session=session, + runnable_dataset_job=dataset_job) + mock_get_oldest_hourly_folder_time.assert_called_once_with('/data/dataset/123') + self.assertEqual(next_event_time, datetime(2022, 8, 1, 8)) + + # test input_dataset is not source and has batch + data_batch_1 = DataBatch(id=1, + name='20220801', + dataset_id=self._INPUT_DATASET_ID, + path='/data/test/batch/20220801', + event_time=datetime(2022, 8, 1, 8)) + session.add(data_batch_1) + data_batch_2 = DataBatch(id=2, + name='20220802', + dataset_id=self._INPUT_DATASET_ID, + path='/data/test/batch/20220802', + event_time=datetime(2022, 8, 1, 9)) + session.add(data_batch_2) + session.flush() + next_event_time = cron_dataset_job_executor._get_next_event_time(session=session, + runnable_dataset_job=dataset_job) + self.assertEqual(next_event_time, datetime(2022, 8, 1, 8)) + + # test dataset_job already has event_time + dataset_job.event_time = datetime(2022, 8, 1, 8) + next_event_time = cron_dataset_job_executor._get_next_event_time(session=session, + runnable_dataset_job=dataset_job) + self.assertEqual(next_event_time, datetime(2022, 8, 1, 9)) + + @patch('fedlearner_webconsole.dataset.scheduler.cron_dataset_job_executor.check_batch_folder_ready') + @patch('fedlearner_webconsole.dataset.models.DataBatch.get_frontend_state') + def test_should_run(self, mock_get_frontend_state: MagicMock, mock_check_batch_folder_ready: MagicMock): + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + uuid='dataset_job uuid', + project_id=self._PROJECT_ID, + input_dataset_id=self._INPUT_DATASET_ID, + output_dataset_id=self._OUTPUT_DATASET_ID, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.PENDING, + coordinator_id=0, + workflow_id=0, + scheduler_state=DatasetJobSchedulerState.RUNNABLE, + time_range=timedelta(days=1)) + dataset_job.set_global_configs( + dataset_pb2.DatasetJobGlobalConfigs(global_configs={'test': dataset_pb2.DatasetJobConfig()})) + session.add(dataset_job) + input_dataset = Dataset(id=self._INPUT_DATASET_ID, + uuid='dataset_uuid', + name='default dataset', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + project_id=self._PROJECT_ID, + dataset_kind=DatasetKindV2.RAW, + is_published=True) + session.add(input_dataset) + data_batch = DataBatch(id=1, + name='20220701', + dataset_id=self._INPUT_DATASET_ID, + path='/data/test/batch/20220701', + event_time=datetime.strptime('20220701', '%Y%m%d'), + file_size=100, + num_example=10, + num_feature=3, + latest_parent_dataset_job_stage_id=1) + session.add(data_batch) + session.commit() + + cron_dataset_job_executor = CronDatasetJobExecutor() + + # test dataset + with db.session_scope() as session: + # test no data_batch + dataset_job = session.query(DatasetJob).get(1) + mock_get_frontend_state.return_value = ResourceState.SUCCEEDED + self.assertFalse(cron_dataset_job_executor._should_run(session, dataset_job, datetime(2022, 7, 2))) + mock_get_frontend_state.reset_mock() + # test data_batch frontend state not succeeded + mock_get_frontend_state.return_value = ResourceState.FAILED + self.assertFalse(cron_dataset_job_executor._should_run(session, dataset_job, datetime(2022, 7, 1))) + mock_get_frontend_state.reset_mock() + # test should run + mock_get_frontend_state.return_value = ResourceState.SUCCEEDED + self.assertTrue(cron_dataset_job_executor._should_run(session, dataset_job, datetime(2022, 7, 1))) + mock_check_batch_folder_ready.assert_not_called() + mock_get_frontend_state.reset_mock() + + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(1) + dataset_job.input_dataset.dataset_kind = DatasetKindV2.SOURCE + session.flush() + # test streaming_folder not ready + mock_check_batch_folder_ready.return_value = False + self.assertFalse(cron_dataset_job_executor._should_run(session, dataset_job, datetime(2022, 7, 1))) + mock_check_batch_folder_ready.assert_called_once_with(folder='/data/dataset/123', batch_name='20220701') + mock_check_batch_folder_ready.reset_mock() + # test should run + mock_check_batch_folder_ready.return_value = True + self.assertTrue(cron_dataset_job_executor._should_run(session, dataset_job, datetime(2022, 7, 1))) + mock_check_batch_folder_ready.assert_called_once_with(folder='/data/dataset/123', batch_name='20220701') + mock_get_frontend_state.assert_not_called() + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/dataset_job_executor.py b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/dataset_job_executor.py new file mode 100644 index 000000000..f568a4da2 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/dataset_job_executor.py @@ -0,0 +1,102 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import List + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.scheduler.base_executor import BaseExecutor +from fedlearner_webconsole.dataset.scheduler.consts import ExecutorResult +from fedlearner_webconsole.dataset.models import DatasetJob, DatasetJobSchedulerState +from fedlearner_webconsole.dataset.services import DatasetJobService +from fedlearner_webconsole.dataset.job_configer.dataset_job_configer import DatasetJobConfiger +from fedlearner_webconsole.dataset.local_controllers import DatasetJobStageLocalController +from fedlearner_webconsole.dataset.auth_service import AuthService +from fedlearner_webconsole.proto import dataset_pb2 +from fedlearner_webconsole.rpc.client import RpcClient +from fedlearner_webconsole.rpc.v2.system_service_client import SystemServiceClient +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.flag.models import Flag + + +class DatasetJobExecutor(BaseExecutor): + + def get_item_ids(self) -> List[int]: + with db.session_scope() as session: + pending_dataset_job_ids = session.query( + DatasetJob.id).filter(DatasetJob.scheduler_state == DatasetJobSchedulerState.PENDING).all() + return [pending_dataset_job_id for pending_dataset_job_id, *_ in pending_dataset_job_ids] + + def run_item(self, item_id: int) -> ExecutorResult: + with db.session_scope() as session: + dataset_job: DatasetJob = session.query(DatasetJob).get(item_id) + # TODO(liuhehan): remove this func after remove DATASET_JOB_STAGE_ENABLED flag + if not dataset_job.get_context().has_stages: + dataset_job.scheduler_state = DatasetJobSchedulerState.STOPPED + session.commit() + return ExecutorResult.SKIP + if dataset_job.scheduler_state != DatasetJobSchedulerState.PENDING: + return ExecutorResult.SKIP + if dataset_job.output_dataset.ticket_status != TicketStatus.APPROVED: + return ExecutorResult.SKIP + if dataset_job.is_coordinator(): + # create participant dataset_job + participants = DatasetJobService(session).get_participants_need_distribute(dataset_job) + for participant in participants: + client = RpcClient.from_project_and_participant(dataset_job.project.name, dataset_job.project.token, + participant.domain_name) + dataset_job_parameter = dataset_job.to_proto() + dataset_job_parameter.workflow_definition.MergeFrom( + DatasetJobConfiger.from_kind(dataset_job.kind, session).get_config()) + dataset_parameter = dataset_pb2.Dataset( + participants_info=dataset_job.output_dataset.get_participants_info()) + client.create_dataset_job(dataset_job=dataset_job_parameter, + ticket_uuid=dataset_job.output_dataset.ticket_uuid, + dataset=dataset_parameter) + # check flags, if participants donot check authstatus, just set authorized + system_client = SystemServiceClient.from_participant(domain_name=participant.domain_name) + flag_resp = system_client.list_flags() + if not flag_resp.get(Flag.DATASET_AUTH_STATUS_CHECK_ENABLED.name): + AuthService(session=session, dataset_job=dataset_job).update_auth_status( + domain_name=participant.pure_domain_name(), auth_status=AuthStatus.AUTHORIZED) + else: + # participant scheduler state always set to stopped, + # and never created data_batch and dataset_job_stage itself + dataset_job.scheduler_state = DatasetJobSchedulerState.STOPPED + session.commit() + with db.session_scope() as session: + # we set isolation_level to SERIALIZABLE to make sure state won't be changed within this session + session.connection(execution_options={'isolation_level': 'SERIALIZABLE'}) + dataset_job: DatasetJob = session.query(DatasetJob).get(item_id) + if dataset_job.scheduler_state != DatasetJobSchedulerState.PENDING: + return ExecutorResult.SKIP + if dataset_job.is_cron(): + # if dataset_job is cron, we set scheduler state to runnable, + # and it will be scheduler again by cron_dataset_job_executor + dataset_job.scheduler_state = DatasetJobSchedulerState.RUNNABLE + else: + if dataset_job.get_context().need_create_stage: + # check authorization + if not AuthService(session=session, dataset_job=dataset_job).check_participants_authorized(): + message = '[dataset_job_executor] still waiting for participants authorized, ' \ + f'dataset_job_id: {item_id}' + logging.warning(message) + return ExecutorResult.SKIP + DatasetJobStageLocalController(session).create_data_batch_and_job_stage_as_coordinator( + dataset_job_id=dataset_job.id, global_configs=dataset_job.get_global_configs()) + dataset_job.scheduler_state = DatasetJobSchedulerState.STOPPED + session.commit() + return ExecutorResult.SUCCEEDED diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/dataset_job_executor_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/dataset_job_executor_test.py new file mode 100644 index 000000000..6c0d91ed8 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/dataset_job_executor_test.py @@ -0,0 +1,264 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import PropertyMock, patch, MagicMock +from datetime import timedelta + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.models import Dataset, DatasetJob, DatasetJobKind, DatasetJobSchedulerState, \ + DatasetJobStage, DatasetJobState, DatasetKindV2, DatasetType +from fedlearner_webconsole.dataset.scheduler.dataset_job_executor import DatasetJobExecutor +from fedlearner_webconsole.dataset.scheduler.consts import ExecutorResult +from fedlearner_webconsole.dataset.job_configer.ot_psi_data_join_configer import OtPsiDataJoinConfiger +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.flag.models import _Flag +from fedlearner_webconsole.initial_db import _insert_or_update_templates +from fedlearner_webconsole.proto import dataset_pb2, project_pb2 +from testing.no_web_server_test_case import NoWebServerTestCase + + +class DatasetJobExecutorTest(NoWebServerTestCase): + _PROJECT_ID = 1 + _PARTICIPANT_ID = 1 + _WORKFLOW_ID = 1 + _INPUT_DATASET_ID = 1 + _OUTPUT_DATASET_ID = 2 + _DATASET_JOB_ID = 1 + + def test_get_item_ids(self): + with db.session_scope() as session: + dataset_job_1 = DatasetJob(id=1, + uuid='dataset_job_1', + project_id=self._PROJECT_ID, + input_dataset_id=self._INPUT_DATASET_ID, + output_dataset_id=self._OUTPUT_DATASET_ID, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.PENDING, + coordinator_id=0, + workflow_id=self._WORKFLOW_ID, + scheduler_state=DatasetJobSchedulerState.RUNNABLE, + time_range=timedelta(days=1)) + dataset_job_1.set_global_configs( + dataset_pb2.DatasetJobGlobalConfigs(global_configs={'test': dataset_pb2.DatasetJobConfig()})) + session.add(dataset_job_1) + dataset_job_2 = DatasetJob(id=2, + uuid='dataset_job_2', + project_id=self._PROJECT_ID, + input_dataset_id=self._INPUT_DATASET_ID, + output_dataset_id=3, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.PENDING, + coordinator_id=0, + workflow_id=self._WORKFLOW_ID, + scheduler_state=DatasetJobSchedulerState.STOPPED, + time_range=timedelta(days=1)) + dataset_job_2.set_global_configs( + dataset_pb2.DatasetJobGlobalConfigs(global_configs={'test': dataset_pb2.DatasetJobConfig()})) + session.add(dataset_job_2) + dataset_job_3 = DatasetJob(id=3, + uuid='dataset_job_3', + project_id=self._PROJECT_ID, + input_dataset_id=self._INPUT_DATASET_ID, + output_dataset_id=4, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.PENDING, + coordinator_id=0, + workflow_id=self._WORKFLOW_ID, + scheduler_state=DatasetJobSchedulerState.PENDING, + time_range=timedelta(days=1)) + dataset_job_3.set_global_configs( + dataset_pb2.DatasetJobGlobalConfigs(global_configs={'test': dataset_pb2.DatasetJobConfig()})) + session.add(dataset_job_3) + session.commit() + executor = DatasetJobExecutor() + self.assertEqual(executor.get_item_ids(), [3]) + + @patch('fedlearner_webconsole.flag.models.Flag.DATASET_AUTH_STATUS_CHECK_ENABLED', new_callable=PropertyMock) + @patch('fedlearner_webconsole.dataset.scheduler.dataset_job_executor.SystemServiceClient.list_flags') + @patch('fedlearner_webconsole.dataset.local_controllers.DatasetJobStageLocalController.'\ + 'create_data_batch_and_job_stage_as_coordinator') + @patch('fedlearner_webconsole.dataset.scheduler.dataset_job_executor.RpcClient.create_dataset_job') + def test_run_item(self, mock_create_dataset_job: MagicMock, + mock_create_data_batch_and_job_stage_as_coordinator: MagicMock, mock_list_flags: MagicMock, + mock_dataset_auth_status_check_enabled: MagicMock): + with db.session_scope() as session: + # pylint: disable=protected-access + _insert_or_update_templates(session) + dataset_job_1 = DatasetJob(id=self._PARTICIPANT_ID, + uuid='dataset_job_1', + project_id=self._PROJECT_ID, + input_dataset_id=self._INPUT_DATASET_ID, + output_dataset_id=self._OUTPUT_DATASET_ID, + kind=DatasetJobKind.OT_PSI_DATA_JOIN, + state=DatasetJobState.PENDING, + coordinator_id=0, + workflow_id=self._WORKFLOW_ID, + scheduler_state=DatasetJobSchedulerState.RUNNABLE, + time_range=timedelta(days=1)) + dataset_job_1.set_global_configs( + dataset_pb2.DatasetJobGlobalConfigs( + global_configs={'test_participant_2': dataset_pb2.DatasetJobConfig()})) + dataset_job_1.set_context(dataset_pb2.DatasetJobContext(has_stages=True)) + session.add(dataset_job_1) + input_dataset = Dataset(id=self._INPUT_DATASET_ID, + uuid='dataset_uuid', + name='default dataset', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + project_id=self._PROJECT_ID, + dataset_kind=DatasetKindV2.RAW, + is_published=True) + session.add(input_dataset) + output_dataset = Dataset(id=self._OUTPUT_DATASET_ID, + uuid='dataset_uuid', + name='default dataset', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + project_id=self._PROJECT_ID, + dataset_kind=DatasetKindV2.RAW, + is_published=True, + ticket_status=TicketStatus.APPROVED) + participants_info = project_pb2.ParticipantsInfo( + participants_map={ + 'test_participant_1': project_pb2.ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'test_participant_2': project_pb2.ParticipantInfo(auth_status=AuthStatus.PENDING.name) + }) + output_dataset.set_participants_info(participants_info=participants_info) + session.add(output_dataset) + project = Project(id=self._PROJECT_ID, name='test-project') + session.add(project) + participant = Participant(id=self._PARTICIPANT_ID, + name='participant_1', + domain_name='fl-test_participant_2.com') + project_participant = ProjectParticipant(project_id=self._PROJECT_ID, participant_id=self._PARTICIPANT_ID) + session.add_all([participant, project_participant]) + session.commit() + + mock_list_flags.return_value = {'dataset_auth_status_check_enabled': True} + mock_dataset_auth_status_check_enabled.return_value = _Flag('dataset_auth_status_check_enabled', False) + executor = DatasetJobExecutor() + # test not pending + executor_result = executor.run_item(self._DATASET_JOB_ID) + self.assertEqual(executor_result, ExecutorResult.SKIP) + + # test not approved + with db.session_scope() as session: + dataset_job: DatasetJob = session.query(DatasetJob).get(self._DATASET_JOB_ID) + dataset_job.scheduler_state = DatasetJobSchedulerState.PENDING + dataset_job.output_dataset.ticket_status = TicketStatus.PENDING + session.commit() + executor_result = executor.run_item(self._DATASET_JOB_ID) + self.assertEqual(executor_result, ExecutorResult.SKIP) + + # test not coordinator + with db.session_scope() as session: + dataset_job: DatasetJob = session.query(DatasetJob).get(self._DATASET_JOB_ID) + dataset_job.coordinator_id = 1 + dataset_job.output_dataset.ticket_status = TicketStatus.APPROVED + session.commit() + executor_result = executor.run_item(self._DATASET_JOB_ID) + self.assertEqual(executor_result, ExecutorResult.SKIP) + with db.session_scope() as session: + dataset_job: DatasetJob = session.query(DatasetJob).get(self._DATASET_JOB_ID) + self.assertEqual(dataset_job.scheduler_state, DatasetJobSchedulerState.STOPPED) + mock_create_dataset_job.assert_not_called() + + # test streaming dataset_job and check flag True + with db.session_scope() as session: + dataset_job: DatasetJob = session.query(DatasetJob).get(self._DATASET_JOB_ID) + dataset_job.coordinator_id = 0 + dataset_job.scheduler_state = DatasetJobSchedulerState.PENDING + session.commit() + session.flush() + dataset_job_parameter = dataset_job.to_proto() + dataset_job_parameter.workflow_definition.MergeFrom(OtPsiDataJoinConfiger(session).get_config()) + executor_result = executor.run_item(self._DATASET_JOB_ID) + self.assertEqual(executor_result, ExecutorResult.SUCCEEDED) + with db.session_scope() as session: + dataset_job: DatasetJob = session.query(DatasetJob).get(self._DATASET_JOB_ID) + self.assertEqual(dataset_job.scheduler_state, DatasetJobSchedulerState.RUNNABLE) + participants_info = project_pb2.ParticipantsInfo( + participants_map={ + 'test_participant_1': project_pb2.ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'test_participant_2': project_pb2.ParticipantInfo(auth_status=AuthStatus.PENDING.name) + }) + mock_create_dataset_job.assert_called_once_with( + dataset_job=dataset_job_parameter, + ticket_uuid=None, + dataset=dataset_pb2.Dataset(participants_info=participants_info)) + self.assertEqual( + dataset_job.output_dataset.get_participants_info().participants_map['test_participant_2'].auth_status, + AuthStatus.PENDING.name) + + mock_create_dataset_job.reset_mock() + # test psi dataset_job need_create_batch + mock_create_data_batch_and_job_stage_as_coordinator.return_value = DatasetJobStage( + uuid='mock_stage', + project_id=self._PROJECT_ID, + coordinator_id=0, + dataset_job_id=self._DATASET_JOB_ID, + data_batch_id=0) + with db.session_scope() as session: + dataset_job: DatasetJob = session.query(DatasetJob).get(self._DATASET_JOB_ID) + dataset_job.time_range = None + dataset_job.scheduler_state = DatasetJobSchedulerState.PENDING + dataset_job.output_dataset.dataset_type = DatasetType.PSI + context = dataset_job.get_context() + context.need_create_stage = True + dataset_job.set_context(context) + session.commit() + executor_result = executor.run_item(self._DATASET_JOB_ID) + self.assertEqual(executor_result, ExecutorResult.SUCCEEDED) + with db.session_scope() as session: + dataset_job: DatasetJob = session.query(DatasetJob).get(self._DATASET_JOB_ID) + self.assertEqual(dataset_job.scheduler_state, DatasetJobSchedulerState.STOPPED) + mock_create_dataset_job.assert_called_once() + mock_create_data_batch_and_job_stage_as_coordinator.assert_called_once_with( + dataset_job_id=self._DATASET_JOB_ID, + global_configs=dataset_pb2.DatasetJobGlobalConfigs( + global_configs={'test_participant_2': dataset_pb2.DatasetJobConfig()}), + ) + + mock_create_dataset_job.reset_mock() + mock_create_data_batch_and_job_stage_as_coordinator.reset_mock() + mock_dataset_auth_status_check_enabled.reset_mock() + # test check auth_status_failed + mock_dataset_auth_status_check_enabled.return_value = _Flag('dataset_auth_status_check_enabled', True) + with db.session_scope() as session: + dataset_job: DatasetJob = session.query(DatasetJob).get(self._DATASET_JOB_ID) + dataset_job.time_range = None + dataset_job.scheduler_state = DatasetJobSchedulerState.PENDING + dataset_job.output_dataset.dataset_type = DatasetType.PSI + session.commit() + executor_result = executor.run_item(self._DATASET_JOB_ID) + self.assertEqual(executor_result, ExecutorResult.SKIP) + with db.session_scope() as session: + dataset_job: DatasetJob = session.query(DatasetJob).get(self._DATASET_JOB_ID) + self.assertEqual(dataset_job.scheduler_state, DatasetJobSchedulerState.PENDING) + mock_create_dataset_job.assert_called_once() + self.assertEqual( + dataset_job.output_dataset.get_participants_info().participants_map['test_participant_2'].auth_status, + AuthStatus.PENDING.name) + mock_create_data_batch_and_job_stage_as_coordinator.assert_not_called() + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/dataset_long_period_scheduler.py b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/dataset_long_period_scheduler.py new file mode 100644 index 000000000..d201bfb19 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/dataset_long_period_scheduler.py @@ -0,0 +1,39 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Tuple + +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.interface import IRunnerV2 +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.dataset.scheduler.chained_executor import run_executor +from fedlearner_webconsole.dataset.scheduler.consts import ExecutorType +from fedlearner_webconsole.proto.composer_pb2 import DatasetSchedulerOutput, RunnerOutput + + +class DatasetLongPeriodScheduler(IRunnerV2): + + def run(self, context: RunnerContext) -> Tuple[RunnerStatus, RunnerOutput]: + runner_output = RunnerOutput(dataset_scheduler_output=DatasetSchedulerOutput()) + + executor_result = run_executor(executor_type=ExecutorType.CRON_DATASET_JOB) + runner_output.dataset_scheduler_output.executor_outputs[ExecutorType.CRON_DATASET_JOB.value].MergeFrom( + executor_result) + + executor_result = run_executor(executor_type=ExecutorType.UPDATE_AUTH_STATUS) + runner_output.dataset_scheduler_output.executor_outputs[ExecutorType.UPDATE_AUTH_STATUS.value].MergeFrom( + executor_result) + + return RunnerStatus.DONE, runner_output diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/dataset_long_period_scheduler_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/dataset_long_period_scheduler_test.py new file mode 100644 index 000000000..e6117e3a8 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/dataset_long_period_scheduler_test.py @@ -0,0 +1,55 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import MagicMock, patch + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.dataset.scheduler.consts import ExecutorResult, ExecutorType +from fedlearner_webconsole.dataset.scheduler.dataset_long_period_scheduler import DatasetLongPeriodScheduler +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.proto.composer_pb2 import DatasetSchedulerOutput, ExecutorResults, RunnerInput, RunnerOutput + + +class DatasetLongPeriodSchedulerTest(NoWebServerTestCase): + + @patch('fedlearner_webconsole.dataset.scheduler.cron_dataset_job_executor.CronDatasetJobExecutor.get_item_ids') + @patch('fedlearner_webconsole.dataset.scheduler.update_auth_status_executor.UpdateAuthStatusExecutor.get_item_ids') + @patch('fedlearner_webconsole.dataset.scheduler.cron_dataset_job_executor.CronDatasetJobExecutor.run_item') + @patch('fedlearner_webconsole.dataset.scheduler.update_auth_status_executor.UpdateAuthStatusExecutor.run_item') + def test_run(self, mock_update_auth_status_run_item: MagicMock, mock_cron_dataset_job_run_item: MagicMock, + mock_update_auth_status_get_item_ids: MagicMock, mock_cron_dataset_job_get_item_ids: MagicMock): + mock_cron_dataset_job_get_item_ids.return_value = [1, 2, 3, 4] + mock_cron_dataset_job_run_item.side_effect = [ + ExecutorResult.SUCCEEDED, ExecutorResult.SUCCEEDED, ExecutorResult.FAILED, ExecutorResult.SKIP + ] + mock_update_auth_status_get_item_ids.return_value = [1, 2] + mock_update_auth_status_run_item.side_effect = [ExecutorResult.FAILED, ExecutorResult.FAILED] + dataset_long_period_scheduler = DatasetLongPeriodScheduler() + status, runner_output = dataset_long_period_scheduler.run(context=RunnerContext(0, RunnerInput())) + self.assertEqual(status, RunnerStatus.DONE) + expected_runner_output = RunnerOutput(dataset_scheduler_output=DatasetSchedulerOutput( + executor_outputs={ + ExecutorType.CRON_DATASET_JOB.value: + ExecutorResults(succeeded_item_ids=[1, 2], failed_item_ids=[3], skip_item_ids=[4]), + ExecutorType.UPDATE_AUTH_STATUS.value: + ExecutorResults(failed_item_ids=[1, 2]), + })) + self.assertEqual(runner_output, expected_runner_output) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/dataset_short_period_scheduler.py b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/dataset_short_period_scheduler.py new file mode 100644 index 000000000..54724efa4 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/dataset_short_period_scheduler.py @@ -0,0 +1,43 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Tuple + +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.interface import IRunnerV2 +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.dataset.scheduler.chained_executor import run_executor +from fedlearner_webconsole.dataset.scheduler.consts import ExecutorType +from fedlearner_webconsole.proto.composer_pb2 import DatasetSchedulerOutput, RunnerOutput + + +class DatasetShortPeriodScheduler(IRunnerV2): + + def run(self, context: RunnerContext) -> Tuple[RunnerStatus, RunnerOutput]: + runner_output = RunnerOutput(dataset_scheduler_output=DatasetSchedulerOutput()) + + executor_result = run_executor(executor_type=ExecutorType.DATASET_JOB) + runner_output.dataset_scheduler_output.executor_outputs[ExecutorType.DATASET_JOB.value].MergeFrom( + executor_result) + + executor_result = run_executor(executor_type=ExecutorType.PENDING_DATASET_JOB_STAGE) + runner_output.dataset_scheduler_output.executor_outputs[ExecutorType.PENDING_DATASET_JOB_STAGE.value].MergeFrom( + executor_result) + + executor_result = run_executor(executor_type=ExecutorType.RUNNING_DATASET_JOB_STAGE) + runner_output.dataset_scheduler_output.executor_outputs[ExecutorType.RUNNING_DATASET_JOB_STAGE.value].MergeFrom( + executor_result) + + return RunnerStatus.DONE, runner_output diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/dataset_short_period_scheduler_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/dataset_short_period_scheduler_test.py new file mode 100644 index 000000000..f2d49cc4b --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/dataset_short_period_scheduler_test.py @@ -0,0 +1,67 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import MagicMock, patch + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.dataset.scheduler.consts import ExecutorResult, ExecutorType +from fedlearner_webconsole.dataset.scheduler.dataset_short_period_scheduler import DatasetShortPeriodScheduler +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.proto.composer_pb2 import DatasetSchedulerOutput, ExecutorResults, RunnerInput, RunnerOutput + + +class DatasetShortPeriodSchedulerTest(NoWebServerTestCase): + + @patch('fedlearner_webconsole.dataset.scheduler.dataset_job_executor.DatasetJobExecutor.get_item_ids') + @patch('fedlearner_webconsole.dataset.scheduler.pending_dataset_job_stage_executor.'\ + 'PendingDatasetJobStageExecutor.get_item_ids') + @patch('fedlearner_webconsole.dataset.scheduler.running_dataset_job_stage_executor.'\ + 'RunningDatasetJobStageExecutor.get_item_ids') + @patch('fedlearner_webconsole.dataset.scheduler.dataset_job_executor.DatasetJobExecutor.run_item') + @patch('fedlearner_webconsole.dataset.scheduler.pending_dataset_job_stage_executor.'\ + 'PendingDatasetJobStageExecutor.run_item') + @patch('fedlearner_webconsole.dataset.scheduler.running_dataset_job_stage_executor.'\ + 'RunningDatasetJobStageExecutor.run_item') + def test_run(self, mock_running_dataset_job_stage_run_item: MagicMock, + mock_pending_dataset_job_stage_run_item: MagicMock, mock_dataset_job_run_item: MagicMock, + mock_running_dataset_job_stage_get_item_ids: MagicMock, + mock_pending_dataset_job_stage_get_item_ids: MagicMock, mock_dataset_job_get_item_ids: MagicMock): + mock_running_dataset_job_stage_get_item_ids.return_value = [1, 2, 3, 4] + mock_running_dataset_job_stage_run_item.side_effect = [ + ExecutorResult.SUCCEEDED, ExecutorResult.SUCCEEDED, ExecutorResult.FAILED, ExecutorResult.SKIP + ] + mock_pending_dataset_job_stage_get_item_ids.return_value = [1, 2] + mock_pending_dataset_job_stage_run_item.side_effect = [ExecutorResult.FAILED, ExecutorResult.FAILED] + mock_dataset_job_get_item_ids.return_value = [1, 2] + mock_dataset_job_run_item.side_effect = [ExecutorResult.SUCCEEDED, ExecutorResult.SKIP] + dataset_short_period_scheduler = DatasetShortPeriodScheduler() + status, runner_output = dataset_short_period_scheduler.run(context=RunnerContext(0, RunnerInput())) + self.assertEqual(status, RunnerStatus.DONE) + expected_runner_output = RunnerOutput(dataset_scheduler_output=DatasetSchedulerOutput( + executor_outputs={ + ExecutorType.RUNNING_DATASET_JOB_STAGE.value: + ExecutorResults(succeeded_item_ids=[1, 2], failed_item_ids=[3], skip_item_ids=[4]), + ExecutorType.PENDING_DATASET_JOB_STAGE.value: + ExecutorResults(failed_item_ids=[1, 2]), + ExecutorType.DATASET_JOB.value: + ExecutorResults(succeeded_item_ids=[1], skip_item_ids=[2]), + })) + self.assertEqual(runner_output, expected_runner_output) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/pending_dataset_job_stage_executor.py b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/pending_dataset_job_stage_executor.py new file mode 100644 index 000000000..d5cf91922 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/pending_dataset_job_stage_executor.py @@ -0,0 +1,118 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import List +import grpc +from sqlalchemy.orm import Session + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.controllers import DatasetJobStageController +from fedlearner_webconsole.dataset.services import DatasetJobService +from fedlearner_webconsole.dataset.scheduler.base_executor import BaseExecutor +from fedlearner_webconsole.dataset.scheduler.consts import ExecutorResult +from fedlearner_webconsole.dataset.models import DatasetJob, DatasetJobSchedulerState, DatasetJobStage, DatasetJobState +from fedlearner_webconsole.exceptions import InternalException +from fedlearner_webconsole.rpc.v2.job_service_client import JobServiceClient + + +class PendingDatasetJobStageExecutor(BaseExecutor): + + def _process_pending_dataset_job_stage(self, session: Session, + dataset_job_stage: DatasetJobStage) -> ExecutorResult: + """Schedules pending dataset job stage, same logic as _process_pending_dataset_job. + + 1. If is not coordinator, return + 2. check whether participant is ready, if not, try to create it and return + 3. try to start the dataset_job_stage + """ + if not dataset_job_stage.is_coordinator(): + return ExecutorResult.SUCCEEDED + # create participant dataset_job_stage + dataset_job: DatasetJob = dataset_job_stage.dataset_job + participants = DatasetJobService(session).get_participants_need_distribute(dataset_job) + # if all participants which need distribute have created dataset_job_stage and related workflow, + # is_peer_ready is True + is_peer_ready = True + for participant in participants: + client = JobServiceClient.from_project_and_participant(domain_name=participant.domain_name, + project_name=dataset_job_stage.project.name) + try: + response = client.get_dataset_job_stage(dataset_job_stage_uuid=dataset_job_stage.uuid) + if response.dataset_job_stage.is_ready: + logging.info( + '[pending dataset_job_stage executor]: participant dataset_job_stage is ready, ' \ + f'participant name: {participant.name}' + ) + else: + is_peer_ready = False + except grpc.RpcError as err: + if err.code() != grpc.StatusCode.NOT_FOUND: + raise InternalException( + details=f'failed to call GetDatasetJobStage with status code {err.code()} ' \ + f'and details {err.details()}' + ) from err + # participant has no dataset_job_stage + logging.info( + f'[pending dataset_job_stage executor]: dataset_job_stage in participant {participant.name} ' \ + 'not found, start to create') + is_peer_ready = False + client.create_dataset_job_stage(dataset_job_uuid=dataset_job.uuid, + dataset_job_stage_uuid=dataset_job_stage.uuid, + name=dataset_job_stage.name, + event_time=dataset_job_stage.event_time) + if not is_peer_ready: + return ExecutorResult.SKIP + # start dataset_job + try: + DatasetJobStageController(session=session).start(uuid=dataset_job_stage.uuid) + logging.info( + '[pending dataset_job_stage executor]: start dataset_job_stage successfully, ' \ + f'dataset_job_stage_id: {dataset_job_stage.id}' + ) + except InternalException as e: + logging.error( + f'[pending dataset_job_stage executor]: start dataset_job_stage {dataset_job_stage.id} failed, ' \ + f'exception: {e}' + ) + # reset dataset_job_stage state to PENDING, + # in order to make sure it will be scheduled to start again next time + dataset_job_stage.state = DatasetJobState.PENDING + return ExecutorResult.FAILED + return ExecutorResult.SUCCEEDED + + def get_item_ids(self) -> List[int]: + with db.session_scope() as session: + pending_dataset_job_stage_ids = session.query(DatasetJobStage.id).outerjoin( + DatasetJob, DatasetJob.id == DatasetJobStage.dataset_job_id).filter( + DatasetJobStage.state == DatasetJobState.PENDING).filter( + DatasetJob.scheduler_state != DatasetJobSchedulerState.PENDING).all() + return [pending_dataset_job_stage_id for pending_dataset_job_stage_id, *_ in pending_dataset_job_stage_ids] + + def run_item(self, item_id: int) -> ExecutorResult: + with db.session_scope() as session: + # we set isolation_level to SERIALIZABLE to make sure state won't be changed within this session + session.connection(execution_options={'isolation_level': 'SERIALIZABLE'}) + dataset_job_stage: DatasetJobStage = session.query(DatasetJobStage).get(item_id) + if dataset_job_stage.state != DatasetJobState.PENDING: + return ExecutorResult.SKIP + if not dataset_job_stage.workflow: + DatasetJobStageController(session=session).create_ready_workflow(dataset_job_stage) + session.commit() + with db.session_scope() as session: + dataset_job_stage: DatasetJobStage = session.query(DatasetJobStage).get(item_id) + executor_result = self._process_pending_dataset_job_stage(session, dataset_job_stage) + session.commit() + return executor_result diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/pending_dataset_job_stage_executor_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/pending_dataset_job_stage_executor_test.py new file mode 100644 index 000000000..a0f4b2125 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/pending_dataset_job_stage_executor_test.py @@ -0,0 +1,269 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch, MagicMock +from datetime import datetime, timedelta +import grpc + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.scheduler.consts import ExecutorResult +from fedlearner_webconsole.dataset.scheduler.pending_dataset_job_stage_executor import PendingDatasetJobStageExecutor +from fedlearner_webconsole.dataset.models import DataBatch, Dataset, DatasetJob, DatasetJobKind, \ + DatasetJobSchedulerState, DatasetJobStage, DatasetJobState, DatasetKindV2, DatasetType +from fedlearner_webconsole.job.models import Job, JobState, JobType +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState +from fedlearner_webconsole.participant.models import ProjectParticipant, Participant +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.initial_db import _insert_or_update_templates +from fedlearner_webconsole.proto import dataset_pb2 +from fedlearner_webconsole.proto.rpc.v2.job_service_pb2 import GetDatasetJobStageResponse +from testing.no_web_server_test_case import NoWebServerTestCase + + +class ProcessDatasetJobStagesTest(NoWebServerTestCase): + + _PROJECT_ID = 1 + _JOB_ID = 1 + _WORKFLOW_ID = 1 + _PARTICIPANT_ID = 1 + _INPUT_DATASET_ID = 1 + _STREAMING_OUTPUT_DATASET_ID = 2 + _PSI_OUTPUT_DATASET_ID = 3 + _DATA_BATCH_NO_EVENT_TIME_ID = 1 + _DATA_BATCH_WITH_EVENT_TIME_ID = 2 + + def setUp(self): + super().setUp() + with db.session_scope() as session: + _insert_or_update_templates(session) + project = Project(id=self._PROJECT_ID, name='test-project') + workflow = Workflow(id=self._WORKFLOW_ID, + project_id=self._PROJECT_ID, + state=WorkflowState.READY, + uuid='workflow_uuid') + job = Job(id=self._JOB_ID, + state=JobState.NEW, + job_type=JobType.PSI_DATA_JOIN, + workflow_id=self._WORKFLOW_ID, + project_id=1) + session.add_all([project, workflow, job]) + input_dataset = Dataset(id=self._INPUT_DATASET_ID, + name='input dataset', + path='/data/dataset/123', + project_id=self._PROJECT_ID, + created_at=datetime(2012, 1, 14, 12, 0, 6), + dataset_kind=DatasetKindV2.RAW) + streaming_output_dataset = Dataset(id=self._STREAMING_OUTPUT_DATASET_ID, + name='streaming output dataset', + uuid='streaming output_dataset uuid', + path='/data/dataset/321', + project_id=self._PROJECT_ID, + created_at=datetime(2012, 1, 14, 12, 0, 7), + dataset_kind=DatasetKindV2.PROCESSED, + dataset_type=DatasetType.STREAMING) + batch_with_event_time = DataBatch(id=self._DATA_BATCH_WITH_EVENT_TIME_ID, + path='/data/dataset/321/batch/20220101', + dataset_id=self._STREAMING_OUTPUT_DATASET_ID, + event_time=datetime(2022, 1, 1)) + psi_output_dataset = Dataset(id=self._PSI_OUTPUT_DATASET_ID, + name='psi output dataset', + uuid='psi output_dataset uuid', + path='/data/dataset/321', + project_id=self._PROJECT_ID, + created_at=datetime(2012, 1, 14, 12, 0, 7), + dataset_kind=DatasetKindV2.PROCESSED, + dataset_type=DatasetType.PSI) + batch_no_event_time = DataBatch(id=self._DATA_BATCH_NO_EVENT_TIME_ID, + path='/data/dataset/321/batch/0', + dataset_id=self._PSI_OUTPUT_DATASET_ID) + session.add_all([ + input_dataset, streaming_output_dataset, batch_with_event_time, psi_output_dataset, batch_no_event_time + ]) + participant = Participant(id=self._PARTICIPANT_ID, name='participant_1', domain_name='fake_domain_name_1') + project_participant = ProjectParticipant(project_id=self._PROJECT_ID, participant_id=self._PARTICIPANT_ID) + session.add_all([participant, project_participant]) + session.commit() + + def _insert_psi_dataset_job_and_stage(self, state: DatasetJobState, job_id: int): + with db.session_scope() as session: + psi_dataset_job = DatasetJob(id=job_id, + uuid=f'psi dataset_job uuid {state.name}', + project_id=self._PROJECT_ID, + input_dataset_id=self._INPUT_DATASET_ID, + output_dataset_id=self._PSI_OUTPUT_DATASET_ID, + kind=DatasetJobKind.DATA_ALIGNMENT, + state=state, + scheduler_state=DatasetJobSchedulerState.STOPPED, + coordinator_id=0, + workflow_id=0) + psi_dataset_job_stage = DatasetJobStage(id=job_id, + uuid=f'psi dataset_job_stage uuid {state.name}', + name='psi dataset job stage', + project_id=self._PROJECT_ID, + workflow_id=self._WORKFLOW_ID, + dataset_job_id=job_id, + data_batch_id=self._DATA_BATCH_NO_EVENT_TIME_ID, + state=state) + session.add_all([psi_dataset_job, psi_dataset_job_stage]) + session.commit() + + def _insert_streaming_dataset_job_and_stage(self, state: DatasetJobState, job_id: int): + with db.session_scope() as session: + streaming_dataset_job = DatasetJob(id=job_id, + uuid=f'streaming dataset_job uuid {state.name}', + project_id=self._PROJECT_ID, + input_dataset_id=self._INPUT_DATASET_ID, + output_dataset_id=self._STREAMING_OUTPUT_DATASET_ID, + kind=DatasetJobKind.DATA_ALIGNMENT, + state=state, + scheduler_state=DatasetJobSchedulerState.STOPPED, + coordinator_id=0, + workflow_id=0, + time_range=timedelta(days=1)) + streaming_dataset_job_stage = DatasetJobStage(id=job_id, + uuid=f'streaming dataset_job_stage uuid {state.name}', + name='streaming dataset job stage', + project_id=self._PROJECT_ID, + workflow_id=self._WORKFLOW_ID, + dataset_job_id=job_id, + data_batch_id=self._DATA_BATCH_WITH_EVENT_TIME_ID, + state=state, + event_time=datetime(2022, 1, 1)) + session.add_all([streaming_dataset_job, streaming_dataset_job_stage]) + session.commit() + + def test_get_item_ids(self): + dataset_job_stage_pending_id = 1 + dataset_job_stage_running_id = 2 + dataset_job_stage_succeeded_id = 3 + self._insert_psi_dataset_job_and_stage(DatasetJobState.PENDING, dataset_job_stage_pending_id) + self._insert_psi_dataset_job_and_stage(DatasetJobState.RUNNING, dataset_job_stage_running_id) + self._insert_psi_dataset_job_and_stage(DatasetJobState.SUCCEEDED, dataset_job_stage_succeeded_id) + executor = PendingDatasetJobStageExecutor() + processed_dataset_job_stage_ids = executor.get_item_ids() + self.assertEqual(processed_dataset_job_stage_ids, [dataset_job_stage_pending_id]) + + @patch( + 'fedlearner_webconsole.dataset.scheduler.pending_dataset_job_stage_executor.PendingDatasetJobStageExecutor.' \ + '_process_pending_dataset_job_stage' + ) + @patch('fedlearner_webconsole.dataset.scheduler.pending_dataset_job_stage_executor.DatasetJobStageController.'\ + 'create_ready_workflow') + def test_run_item(self, mock_create_ready_workflow: MagicMock, mock_process_pending_dataset_job_stage: MagicMock): + dataset_job_stage_pending_id = 1 + dataset_job_stage_running_id = 2 + self._insert_psi_dataset_job_and_stage(DatasetJobState.PENDING, dataset_job_stage_pending_id) + self._insert_psi_dataset_job_and_stage(DatasetJobState.RUNNING, dataset_job_stage_running_id) + executor = PendingDatasetJobStageExecutor() + + # test not pending + executor_result = executor.run_item([dataset_job_stage_running_id]) + self.assertEqual(executor_result, ExecutorResult.SKIP) + mock_create_ready_workflow.assert_not_called() + mock_process_pending_dataset_job_stage.assert_not_called() + + # test succeeded + mock_process_pending_dataset_job_stage.return_value = ExecutorResult.SUCCEEDED + executor_result = executor.run_item([dataset_job_stage_pending_id]) + self.assertEqual(executor_result, ExecutorResult.SUCCEEDED) + mock_create_ready_workflow.assert_not_called() + mock_process_pending_dataset_job_stage.assert_called_once() + self.assertEqual(mock_process_pending_dataset_job_stage.call_args[0][1].id, dataset_job_stage_pending_id) + + # test no workflow and process failed + with db.session_scope() as session: + dataset_job_stage_pending = session.query(DatasetJobStage).get(dataset_job_stage_pending_id) + dataset_job_stage_pending.workflow_id = 0 + session.commit() + mock_process_pending_dataset_job_stage.reset_mock() + mock_process_pending_dataset_job_stage.return_value = ExecutorResult.FAILED + executor_result = executor.run_item([dataset_job_stage_pending_id]) + self.assertEqual(executor_result, ExecutorResult.FAILED) + mock_create_ready_workflow.assert_called_once() + mock_process_pending_dataset_job_stage.assert_called_once() + self.assertEqual(mock_process_pending_dataset_job_stage.call_args[0][1].id, dataset_job_stage_pending_id) + + @patch('fedlearner_webconsole.dataset.scheduler.pending_dataset_job_stage_executor.JobServiceClient.'\ + 'get_dataset_job_stage') + @patch('fedlearner_webconsole.dataset.scheduler.pending_dataset_job_stage_executor.JobServiceClient.'\ + 'create_dataset_job_stage') + @patch('fedlearner_webconsole.dataset.scheduler.pending_dataset_job_stage_executor.DatasetJobStageController.'\ + 'start') + def test_process_pending_dataset_job_stage(self, mock_start: MagicMock, mock_create_dataset_job_stage: MagicMock, + mock_get_dataset_job_stage: MagicMock): + dataset_job_stage_pending_id = 1 + self._insert_streaming_dataset_job_and_stage(DatasetJobState.PENDING, dataset_job_stage_pending_id) + executor = PendingDatasetJobStageExecutor() + + # test not coordinator + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_pending_id) + dataset_job_stage.coordinator_id = 1 + # pylint: disable=protected-access + executor_result = executor._process_pending_dataset_job_stage(session=session, + dataset_job_stage=dataset_job_stage) + self.assertEqual(executor_result, ExecutorResult.SUCCEEDED) + mock_get_dataset_job_stage.assert_not_called() + + # test not_ready + mock_get_dataset_job_stage.return_value = GetDatasetJobStageResponse( + dataset_job_stage=dataset_pb2.DatasetJobStage(is_ready=False)) + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_pending_id) + # pylint: disable=protected-access + executor_result = executor._process_pending_dataset_job_stage(session=session, + dataset_job_stage=dataset_job_stage) + self.assertEqual(executor_result, ExecutorResult.SKIP) + mock_create_dataset_job_stage.assert_not_called() + mock_start.assert_not_called() + + mock_get_dataset_job_stage.reset_mock() + + # test ready and start + mock_get_dataset_job_stage.return_value = GetDatasetJobStageResponse( + dataset_job_stage=dataset_pb2.DatasetJobStage(is_ready=True)) + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_pending_id) + # pylint: disable=protected-access + executor_result = executor._process_pending_dataset_job_stage(session=session, + dataset_job_stage=dataset_job_stage) + self.assertEqual(executor_result, ExecutorResult.SUCCEEDED) + mock_create_dataset_job_stage.assert_not_called() + mock_start.assert_called_once_with(uuid='streaming dataset_job_stage uuid PENDING') + + mock_get_dataset_job_stage.reset_mock() + mock_start.reset_mock() + + # test get_dataset_job_stage raise + e = grpc.RpcError() + e.code = lambda: grpc.StatusCode.NOT_FOUND + mock_get_dataset_job_stage.side_effect = e + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_pending_id) + # pylint: disable=protected-access + executor_result = executor._process_pending_dataset_job_stage(session=session, + dataset_job_stage=dataset_job_stage) + self.assertEqual(executor_result, ExecutorResult.SKIP) + mock_create_dataset_job_stage.assert_called_once_with( + dataset_job_uuid='streaming dataset_job uuid PENDING', + dataset_job_stage_uuid='streaming dataset_job_stage uuid PENDING', + name='streaming dataset job stage', + event_time=datetime(2022, 1, 1)) + mock_start.assert_not_called() + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/running_dataset_job_stage_executor.py b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/running_dataset_job_stage_executor.py new file mode 100644 index 000000000..be894d5c1 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/running_dataset_job_stage_executor.py @@ -0,0 +1,203 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from datetime import timedelta +import logging +import os +from typing import List +from sqlalchemy.orm import Session + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.cleanup.models import ResourceType +from fedlearner_webconsole.cleanup.services import CleanupService +from fedlearner_webconsole.composer.composer_service import ComposerService +from fedlearner_webconsole.composer.interface import ItemType +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.dataset.dataset_directory import DatasetDirectory +from fedlearner_webconsole.dataset.models import Dataset, DatasetJob, DatasetJobKind, DatasetJobStage, \ + DatasetJobSchedulerState, DatasetJobState, DataBatch, DatasetKindV2, DatasetType, ResourceState +from fedlearner_webconsole.dataset.scheduler.base_executor import BaseExecutor +from fedlearner_webconsole.dataset.scheduler.consts import ExecutorResult +from fedlearner_webconsole.dataset.services import DatasetJobService, DatasetJobStageService, DatasetService +from fedlearner_webconsole.dataset.consts import ERROR_BATCH_SIZE +from fedlearner_webconsole.rpc.v2.resource_service_client import ResourceServiceClient +from fedlearner_webconsole.rpc.v2.system_service_client import SystemServiceClient +from fedlearner_webconsole.rpc.client import RpcClient +from fedlearner_webconsole.utils.pp_datetime import to_timestamp, now +from fedlearner_webconsole.utils.workflow import build_job_name +from fedlearner_webconsole.workflow.models import Workflow, WorkflowExternalState +from fedlearner_webconsole.flag.models import Flag +from fedlearner_webconsole.proto.cleanup_pb2 import CleanupParameter, CleanupPayload +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput, BatchStatsInput + +SIDE_OUTPUT_CLEANUP_DEFAULT_DELAY = timedelta(days=1) + + +class RunningDatasetJobStageExecutor(BaseExecutor): + + def _process_running_dataset_job_stage(self, session: Session, + dataset_job_stage: DatasetJobStage) -> ExecutorResult: + """Schedules running dataset job stage, same logic as _process_running_dataset_job. + + 1. If the related workflow is completed + - 1.1 Checks the batch stats item if it has been triggered. + - 1.1.1 If the runner fails, then mark the job_stage as failed. + - 1.1.2 If the runner completes, then mark the job_stage as succeeded. + - 1.2 Triggers the batch stats item if it does not exist. + 2. If the related workflow is failed, then mark the job_stage as failed. + """ + dataset_job_stage_service = DatasetJobStageService(session=session) + workflow_state = dataset_job_stage.workflow.get_state_for_frontend() + if workflow_state == WorkflowExternalState.COMPLETED: + if not self._need_batch_stats(dataset_job_stage.dataset_job.kind): + dataset_job_stage_service.finish_dataset_job_stage(dataset_job_stage=dataset_job_stage, + finish_state=DatasetJobState.SUCCEEDED) + return ExecutorResult.SUCCEEDED + item_name = dataset_job_stage.get_context().batch_stats_item_name + executor_result = ExecutorResult.SKIP + if item_name: + runners = ComposerService(session).get_recent_runners(item_name, count=1) + if len(runners) > 0: + if runners[0].status == RunnerStatus.DONE.value: + self._set_data_batch_num_example(session, dataset_job_stage) + dataset_job_stage_service.finish_dataset_job_stage(dataset_job_stage=dataset_job_stage, + finish_state=DatasetJobState.SUCCEEDED) + executor_result = ExecutorResult.SUCCEEDED + elif runners[0].status == RunnerStatus.FAILED.value: + batch = session.query(DataBatch).get(dataset_job_stage.data_batch_id) + # set file size to illegal value to let frontend know batch stats failed + batch.file_size = ERROR_BATCH_SIZE + session.flush() + self._set_data_batch_num_example(session, dataset_job_stage) + dataset_job_stage_service.finish_dataset_job_stage(dataset_job_stage=dataset_job_stage, + finish_state=DatasetJobState.SUCCEEDED) + executor_result = ExecutorResult.SUCCEEDED + else: + item_name = f'batch_stats_{dataset_job_stage.data_batch.id}_{dataset_job_stage.id}' + runner_input = RunnerInput(batch_stats_input=BatchStatsInput(batch_id=dataset_job_stage.data_batch.id)) + ComposerService(session).collect_v2(name=item_name, items=[(ItemType.BATCH_STATS, runner_input)]) + context = dataset_job_stage.get_context() + context.batch_stats_item_name = item_name + dataset_job_stage.set_context(context) + return executor_result + if workflow_state in (WorkflowExternalState.FAILED, WorkflowExternalState.STOPPED, + WorkflowExternalState.INVALID): + dataset_job_stage_service.finish_dataset_job_stage(dataset_job_stage=dataset_job_stage, + finish_state=DatasetJobState.FAILED) + return ExecutorResult.SUCCEEDED + return ExecutorResult.SKIP + + def _process_succeeded_dataset_job_stage(self, session: Session, dataset_job_stage: DatasetJobStage): + """Schedules when running dataset job stage succeeded, same logic as _process_succeeded_dataset_job. + + 1. publish output_dataset if needed + 2. create transaction for participants + 3. delete side_output data + """ + output_dataset: Dataset = dataset_job_stage.dataset_job.output_dataset + meta_info = output_dataset.get_meta_info() + if meta_info.need_publish: + DatasetService(session).publish_dataset(dataset_id=output_dataset.id, value=meta_info.value) + logging.info(f'[dataset_job_scheduler] auto publish dataset {output_dataset.id}') + # set need_publish to false after publish + meta_info.need_publish = False + output_dataset.set_meta_info(meta_info) + self._delete_side_output(session=session, dataset_job_stage=dataset_job_stage) + + def _process_failed_dataset_job_stage(self, session: Session, dataset_job_stage: DatasetJobStage): + """Schedules when running dataset job stage failed. + + 1. delete side_output data + """ + self._delete_side_output(session=session, dataset_job_stage=dataset_job_stage) + + def _need_batch_stats(self, dataset_job_kind: DatasetJobKind): + # batch sample info is now generated by analyzer spark task, so we need run again data stats after analyzer + return dataset_job_kind in [ + DatasetJobKind.RSA_PSI_DATA_JOIN, DatasetJobKind.LIGHT_CLIENT_RSA_PSI_DATA_JOIN, + DatasetJobKind.OT_PSI_DATA_JOIN, DatasetJobKind.LIGHT_CLIENT_OT_PSI_DATA_JOIN, + DatasetJobKind.HASH_DATA_JOIN, DatasetJobKind.DATA_JOIN, DatasetJobKind.DATA_ALIGNMENT, + DatasetJobKind.IMPORT_SOURCE, DatasetJobKind.ANALYZER + ] + + def _get_single_batch_num_example(self, dataset: Dataset) -> int: + try: + return dataset.get_single_batch().num_example + except TypeError as e: + logging.info(f'single data_batch not found, err: {e}') + return 0 + + def _set_data_batch_num_example(self, session: Session, dataset_job_stage: DatasetJobStage): + input_dataset: Dataset = dataset_job_stage.dataset_job.input_dataset + if input_dataset.dataset_type == DatasetType.PSI: + input_data_batch_num_example = self._get_single_batch_num_example(input_dataset) + else: + # TODO(liuhehan): add filter input data_batch by time_range + input_data_batch = session.query(DataBatch).filter(DataBatch.dataset_id == input_dataset.id).filter( + DataBatch.event_time == dataset_job_stage.event_time).first() + input_data_batch_num_example = input_data_batch.num_example if input_data_batch else 0 + output_data_batch_num_example = dataset_job_stage.data_batch.num_example if dataset_job_stage.data_batch else 0 + context = dataset_job_stage.get_context() + context.input_data_batch_num_example = input_data_batch_num_example + context.output_data_batch_num_example = output_data_batch_num_example + dataset_job_stage.set_context(context) + + def _delete_side_output(self, session: Session, dataset_job_stage: DatasetJobStage): + output_dataset: Dataset = dataset_job_stage.dataset_job.output_dataset + if output_dataset.dataset_kind not in [DatasetKindV2.RAW, DatasetKindV2.PROCESSED]: + return + batch_name = dataset_job_stage.data_batch.batch_name + paths = [DatasetDirectory(output_dataset.path).side_output_path(batch_name)] + # hack to get rsa_psi side_output + # raw_data_path: raw_data_job side_output + # psi_data_join_path: psi_data_join_job side_output, we only delete psi_output folder + # as data_block folder is still used by model training + if dataset_job_stage.dataset_job.kind == DatasetJobKind.RSA_PSI_DATA_JOIN: + workflow: Workflow = dataset_job_stage.workflow + raw_data_folder = build_job_name(workflow.uuid, 'raw-data-job') + raw_data_path = os.path.join(workflow.project.get_storage_root_path(None), 'raw_data', raw_data_folder) + psi_data_join_folder_name = build_job_name(workflow.uuid, 'psi-data-join-job') + psi_data_join_path = os.path.join(workflow.project.get_storage_root_path(None), 'data_source', + psi_data_join_folder_name, 'psi_output') + paths.extend([raw_data_path, psi_data_join_path]) + target_start_at = to_timestamp(now() + SIDE_OUTPUT_CLEANUP_DEFAULT_DELAY) + cleanup_param = CleanupParameter(resource_id=dataset_job_stage.id, + resource_type=ResourceType.DATASET_JOB_STAGE.name, + payload=CleanupPayload(paths=paths), + target_start_at=target_start_at) + CleanupService(session).create_cleanup(cleanup_parmeter=cleanup_param) + + def get_item_ids(self) -> List[int]: + with db.session_scope() as session: + running_dataset_job_stage_ids = session.query(DatasetJobStage.id).outerjoin( + DatasetJob, DatasetJob.id == DatasetJobStage.dataset_job_id).filter( + DatasetJobStage.state == DatasetJobState.RUNNING).filter( + DatasetJob.scheduler_state != DatasetJobSchedulerState.PENDING).all() + return [running_dataset_job_stage_id for running_dataset_job_stage_id, *_ in running_dataset_job_stage_ids] + + def run_item(self, item_id: int) -> ExecutorResult: + with db.session_scope() as session: + # we set isolation_level to SERIALIZABLE to make sure state won't be changed within this session + session.connection(execution_options={'isolation_level': 'SERIALIZABLE'}) + dataset_job_stage: DatasetJobStage = session.query(DatasetJobStage).get(item_id) + if dataset_job_stage.state != DatasetJobState.RUNNING: + return ExecutorResult.SKIP + executor_result = self._process_running_dataset_job_stage(session, dataset_job_stage) + if dataset_job_stage.state == DatasetJobState.SUCCEEDED: + self._process_succeeded_dataset_job_stage(session, dataset_job_stage) + elif dataset_job_stage.state == DatasetJobState.FAILED: + self._process_failed_dataset_job_stage(session, dataset_job_stage) + session.commit() + return executor_result diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/running_dataset_job_stage_executor_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/running_dataset_job_stage_executor_test.py new file mode 100644 index 000000000..81b8c7547 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/running_dataset_job_stage_executor_test.py @@ -0,0 +1,420 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch, MagicMock +from datetime import datetime, timedelta, timezone +from sqlalchemy.orm import Session + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.scheduler.consts import ExecutorResult +from fedlearner_webconsole.dataset.scheduler.running_dataset_job_stage_executor import RunningDatasetJobStageExecutor +from fedlearner_webconsole.dataset.models import DataBatch, Dataset, DatasetFormat, DatasetJob, DatasetJobKind, \ + DatasetJobSchedulerState, DatasetJobStage, DatasetJobState, DatasetKindV2, DatasetType, ResourceState +from fedlearner_webconsole.dataset.consts import ERROR_BATCH_SIZE +from fedlearner_webconsole.flag.models import _Flag +from fedlearner_webconsole.job.models import Job, JobState, JobType +from fedlearner_webconsole.composer.interface import ItemType +from fedlearner_webconsole.composer.models import RunnerStatus, SchedulerRunner +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.workflow.models import Workflow, WorkflowExternalState, WorkflowState +from fedlearner_webconsole.participant.models import ProjectParticipant, Participant +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.initial_db import _insert_or_update_templates +from fedlearner_webconsole.proto import dataset_pb2, service_pb2 +from fedlearner_webconsole.proto.cleanup_pb2 import CleanupParameter, CleanupPayload +from fedlearner_webconsole.proto.composer_pb2 import BatchStatsInput, RunnerInput +from testing.no_web_server_test_case import NoWebServerTestCase + + +def mock_process_pending_dataset_job_stage(self, session: Session, + dataset_job_stage: DatasetJobStage) -> ExecutorResult: + if dataset_job_stage.id == 1: + return ExecutorResult.SKIP + if dataset_job_stage.id == 2: + dataset_job_stage.state = DatasetJobState.SUCCEEDED + return ExecutorResult.SUCCEEDED + dataset_job_stage.state = DatasetJobState.FAILED + return ExecutorResult.SUCCEEDED + + +class ProcessDatasetJobStagesTest(NoWebServerTestCase): + + _PROJECT_ID = 1 + _JOB_ID = 1 + _WORKFLOW_ID = 1 + _PARTICIPANT_ID = 1 + _INPUT_DATASET_ID = 1 + _STREAMING_OUTPUT_DATASET_ID = 2 + _PSI_OUTPUT_DATASET_ID = 3 + _DATA_BATCH_NO_EVENT_TIME_ID = 1 + _DATA_BATCH_WITH_EVENT_TIME_ID = 2 + + def setUp(self): + super().setUp() + with db.session_scope() as session: + _insert_or_update_templates(session) + project = Project(id=self._PROJECT_ID, name='test-project') + workflow = Workflow(id=self._WORKFLOW_ID, + project_id=self._PROJECT_ID, + state=WorkflowState.READY, + uuid='workflow_uuid') + job = Job(id=self._JOB_ID, + state=JobState.NEW, + job_type=JobType.PSI_DATA_JOIN, + workflow_id=self._WORKFLOW_ID, + project_id=1) + session.add_all([project, workflow, job]) + input_dataset = Dataset(id=self._INPUT_DATASET_ID, + name='input dataset', + path='/data/dataset/123', + project_id=self._PROJECT_ID, + created_at=datetime(2012, 1, 14, 12, 0, 6), + dataset_kind=DatasetKindV2.RAW) + streaming_output_dataset = Dataset(id=self._STREAMING_OUTPUT_DATASET_ID, + name='streaming output dataset', + uuid='streaming output_dataset uuid', + path='/data/dataset/321', + project_id=self._PROJECT_ID, + created_at=datetime(2012, 1, 14, 12, 0, 7), + dataset_kind=DatasetKindV2.PROCESSED, + dataset_type=DatasetType.STREAMING) + batch_with_event_time = DataBatch(id=self._DATA_BATCH_WITH_EVENT_TIME_ID, + path='/data/dataset/321/batch/20220101', + dataset_id=self._STREAMING_OUTPUT_DATASET_ID, + event_time=datetime(2022, 1, 1)) + psi_output_dataset = Dataset(id=self._PSI_OUTPUT_DATASET_ID, + name='psi output dataset', + uuid='psi output_dataset uuid', + path='/data/dataset/321', + project_id=self._PROJECT_ID, + created_at=datetime(2012, 1, 14, 12, 0, 7), + dataset_kind=DatasetKindV2.PROCESSED, + dataset_type=DatasetType.PSI) + batch_no_event_time = DataBatch(id=self._DATA_BATCH_NO_EVENT_TIME_ID, + path='/data/dataset/321/batch/0', + dataset_id=self._PSI_OUTPUT_DATASET_ID) + session.add_all([ + input_dataset, streaming_output_dataset, batch_with_event_time, psi_output_dataset, batch_no_event_time + ]) + participant = Participant(id=self._PARTICIPANT_ID, + name='participant_1', + domain_name='fl-fake_domain_name_1.com') + project_participant = ProjectParticipant(project_id=self._PROJECT_ID, participant_id=self._PARTICIPANT_ID) + session.add_all([participant, project_participant]) + session.commit() + + def _insert_psi_dataset_job_and_stage(self, state: DatasetJobState, job_id: int): + with db.session_scope() as session: + psi_dataset_job = DatasetJob(id=job_id, + uuid=f'psi dataset_job uuid {job_id}', + project_id=self._PROJECT_ID, + input_dataset_id=self._INPUT_DATASET_ID, + output_dataset_id=self._PSI_OUTPUT_DATASET_ID, + kind=DatasetJobKind.DATA_ALIGNMENT, + state=state, + scheduler_state=DatasetJobSchedulerState.STOPPED, + coordinator_id=0, + workflow_id=0) + psi_dataset_job_stage = DatasetJobStage(id=job_id, + uuid=f'psi dataset_job_stage uuid {job_id}', + name='psi dataset job stage', + project_id=self._PROJECT_ID, + workflow_id=self._WORKFLOW_ID, + dataset_job_id=job_id, + data_batch_id=self._DATA_BATCH_NO_EVENT_TIME_ID, + state=state) + session.add_all([psi_dataset_job, psi_dataset_job_stage]) + session.commit() + + def _insert_streaming_dataset_job_and_stage(self, state: DatasetJobState, job_id: int): + with db.session_scope() as session: + streaming_dataset_job = DatasetJob(id=job_id, + uuid=f'streaming dataset_job uuid {job_id}', + project_id=self._PROJECT_ID, + input_dataset_id=self._INPUT_DATASET_ID, + output_dataset_id=self._STREAMING_OUTPUT_DATASET_ID, + kind=DatasetJobKind.DATA_ALIGNMENT, + state=state, + scheduler_state=DatasetJobSchedulerState.STOPPED, + coordinator_id=0, + workflow_id=0, + time_range=timedelta(days=1)) + streaming_dataset_job_stage = DatasetJobStage(id=job_id, + uuid=f'streaming dataset_job_stage uuid {job_id}', + name='streaming dataset job stage', + project_id=self._PROJECT_ID, + workflow_id=self._WORKFLOW_ID, + dataset_job_id=job_id, + data_batch_id=self._DATA_BATCH_WITH_EVENT_TIME_ID, + state=state, + event_time=datetime(2022, 1, 1)) + session.add_all([streaming_dataset_job, streaming_dataset_job_stage]) + session.commit() + + def test_get_item_ids(self): + dataset_job_stage_pending_id = 1 + dataset_job_stage_running_id = 2 + dataset_job_stage_succeeded_id = 3 + self._insert_psi_dataset_job_and_stage(DatasetJobState.PENDING, dataset_job_stage_pending_id) + self._insert_psi_dataset_job_and_stage(DatasetJobState.RUNNING, dataset_job_stage_running_id) + self._insert_psi_dataset_job_and_stage(DatasetJobState.SUCCEEDED, dataset_job_stage_succeeded_id) + executor = RunningDatasetJobStageExecutor() + processed_dataset_job_stage_ids = executor.get_item_ids() + self.assertEqual(processed_dataset_job_stage_ids, [dataset_job_stage_running_id]) + + @patch( + 'fedlearner_webconsole.dataset.scheduler.running_dataset_job_stage_executor.RunningDatasetJobStageExecutor' \ + '._process_running_dataset_job_stage', mock_process_pending_dataset_job_stage + ) + @patch( + 'fedlearner_webconsole.dataset.scheduler.running_dataset_job_stage_executor.RunningDatasetJobStageExecutor' \ + '._process_succeeded_dataset_job_stage' + ) + @patch( + 'fedlearner_webconsole.dataset.scheduler.running_dataset_job_stage_executor.RunningDatasetJobStageExecutor' \ + '._process_failed_dataset_job_stage' + ) + def test_run_item_not_running(self, mock_process_failed_dataset_job_stage: MagicMock, + mock_process_succeeded_dataset_job_stage: MagicMock): + dataset_job_stage_running_1_id = 1 + dataset_job_stage_running_2_id = 2 + dataset_job_stage_running_3_id = 3 + dataset_job_stage_pending_id = 4 + self._insert_psi_dataset_job_and_stage(DatasetJobState.PENDING, dataset_job_stage_pending_id) + self._insert_psi_dataset_job_and_stage(DatasetJobState.RUNNING, dataset_job_stage_running_1_id) + self._insert_psi_dataset_job_and_stage(DatasetJobState.RUNNING, dataset_job_stage_running_2_id) + self._insert_psi_dataset_job_and_stage(DatasetJobState.RUNNING, dataset_job_stage_running_3_id) + executor = RunningDatasetJobStageExecutor() + + # test not running + executor_result = executor.run_item([dataset_job_stage_pending_id]) + self.assertEqual(executor_result, ExecutorResult.SKIP) + mock_process_succeeded_dataset_job_stage.assert_not_called() + mock_process_failed_dataset_job_stage.assert_not_called() + + # test skip + executor_result = executor.run_item([dataset_job_stage_running_1_id]) + self.assertEqual(executor_result, ExecutorResult.SKIP) + mock_process_succeeded_dataset_job_stage.assert_not_called() + mock_process_failed_dataset_job_stage.assert_not_called() + + # test succeeded + executor_result = executor.run_item([dataset_job_stage_running_2_id]) + self.assertEqual(executor_result, ExecutorResult.SUCCEEDED) + mock_process_succeeded_dataset_job_stage.assert_called_once() + mock_process_failed_dataset_job_stage.assert_not_called() + mock_process_succeeded_dataset_job_stage.reset_mock() + + # test failed + executor_result = executor.run_item([dataset_job_stage_running_3_id]) + self.assertEqual(executor_result, ExecutorResult.SUCCEEDED) + mock_process_succeeded_dataset_job_stage.assert_not_called() + mock_process_failed_dataset_job_stage.assert_called_once() + + @patch('fedlearner_webconsole.dataset.scheduler.running_dataset_job_stage_executor.ComposerService.'\ + 'collect_v2') + @patch('fedlearner_webconsole.dataset.scheduler.running_dataset_job_stage_executor.'\ + 'RunningDatasetJobStageExecutor._need_batch_stats') + @patch('fedlearner_webconsole.workflow.models.Workflow.get_state_for_frontend') + @patch('fedlearner_webconsole.dataset.scheduler.running_dataset_job_stage_executor.ComposerService.'\ + 'get_recent_runners') + def test_process_running_dataset_job_stage(self, mock_get_recent_runners: MagicMock, + mock_get_state_for_frontend: MagicMock, mock_need_batch_stats: MagicMock, + mock_collect_v2: MagicMock): + dataset_job_stage_running_id = 1 + self._insert_streaming_dataset_job_and_stage(DatasetJobState.RUNNING, dataset_job_stage_running_id) + executor = RunningDatasetJobStageExecutor() + + # test workflow failed + mock_get_state_for_frontend.return_value = WorkflowExternalState.FAILED + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_running_id) + # pylint: disable=protected-access + executor._process_running_dataset_job_stage(session=session, dataset_job_stage=dataset_job_stage) + self.assertEqual(dataset_job_stage.state, DatasetJobState.FAILED) + + mock_get_recent_runners.reset_mock() + mock_need_batch_stats.reset_mock() + mock_get_state_for_frontend.reset_mock() + + # test no need batch stats + mock_need_batch_stats.return_value = False + mock_get_state_for_frontend.return_value = WorkflowExternalState.COMPLETED + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_running_id) + # pylint: disable=protected-access + executor._process_running_dataset_job_stage(session=session, dataset_job_stage=dataset_job_stage) + self.assertEqual(dataset_job_stage.state, DatasetJobState.SUCCEEDED) + + mock_get_recent_runners.reset_mock() + mock_need_batch_stats.reset_mock() + mock_get_state_for_frontend.reset_mock() + + # test need batch stats and runner done + mock_need_batch_stats.return_value = True + mock_get_state_for_frontend.return_value = WorkflowExternalState.COMPLETED + mock_get_recent_runners.return_value = [SchedulerRunner(status=RunnerStatus.DONE.value)] + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_running_id) + dataset_job_stage.set_context(dataset_pb2.DatasetJobStageContext(batch_stats_item_name='123')) + # pylint: disable=protected-access + executor._process_running_dataset_job_stage(session=session, dataset_job_stage=dataset_job_stage) + self.assertEqual(dataset_job_stage.state, DatasetJobState.SUCCEEDED) + + mock_get_recent_runners.reset_mock() + mock_need_batch_stats.reset_mock() + mock_get_state_for_frontend.reset_mock() + + # test need batch stats and runner failed + mock_need_batch_stats.return_value = True + mock_get_state_for_frontend.return_value = WorkflowExternalState.COMPLETED + mock_get_recent_runners.return_value = [SchedulerRunner(status=RunnerStatus.FAILED.value)] + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_running_id) + dataset_job_stage.set_context(dataset_pb2.DatasetJobStageContext(batch_stats_item_name='123')) + # pylint: disable=protected-access + executor._process_running_dataset_job_stage(session=session, dataset_job_stage=dataset_job_stage) + self.assertEqual(dataset_job_stage.state, DatasetJobState.SUCCEEDED) + mock_get_recent_runners.assert_called_once_with('123', count=1) + batch = session.query(DataBatch).get(self._DATA_BATCH_WITH_EVENT_TIME_ID) + self.assertEqual(batch.file_size, ERROR_BATCH_SIZE) + + mock_get_recent_runners.reset_mock() + mock_need_batch_stats.reset_mock() + mock_get_state_for_frontend.reset_mock() + + # test need batch stats and runner running + mock_need_batch_stats.return_value = True + mock_get_state_for_frontend.return_value = WorkflowExternalState.COMPLETED + mock_get_recent_runners.return_value = [SchedulerRunner(status=RunnerStatus.RUNNING.value)] + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_running_id) + dataset_job_stage.set_context(dataset_pb2.DatasetJobStageContext(batch_stats_item_name='123')) + # pylint: disable=protected-access + executor._process_running_dataset_job_stage(session=session, dataset_job_stage=dataset_job_stage) + self.assertEqual(dataset_job_stage.state, DatasetJobState.RUNNING) + + mock_get_recent_runners.reset_mock() + mock_need_batch_stats.reset_mock() + mock_get_state_for_frontend.reset_mock() + + # test no runner + mock_need_batch_stats.return_value = True + mock_get_state_for_frontend.return_value = WorkflowExternalState.COMPLETED + mock_get_recent_runners.return_value = [] + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_running_id) + # pylint: disable=protected-access + executor._process_running_dataset_job_stage(session=session, dataset_job_stage=dataset_job_stage) + self.assertEqual(dataset_job_stage.state, DatasetJobState.RUNNING) + runner_input = RunnerInput(batch_stats_input=BatchStatsInput(batch_id=self._DATA_BATCH_WITH_EVENT_TIME_ID)) + mock_collect_v2.assert_called_once_with( + name=f'batch_stats_{self._DATA_BATCH_WITH_EVENT_TIME_ID}_{dataset_job_stage_running_id}', + items=[(ItemType.BATCH_STATS, runner_input)]) + + @patch('fedlearner_webconsole.dataset.scheduler.running_dataset_job_stage_executor.'\ + 'RunningDatasetJobStageExecutor._create_transaction') + @patch('fedlearner_webconsole.dataset.scheduler.running_dataset_job_stage_executor.'\ + 'RunningDatasetJobStageExecutor._delete_side_output') + def test_process_succeeded_dataset_job_stage(self, mock_create_transaction: MagicMock, + mock_delete_side_output: MagicMock): + dataset_job_stage_succeeded_id = 1 + self._insert_streaming_dataset_job_and_stage(DatasetJobState.SUCCEEDED, dataset_job_stage_succeeded_id) + executor = RunningDatasetJobStageExecutor() + + # test no need publish + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_succeeded_id) + dataset_job_stage.dataset_job.output_dataset.dataset_kind = DatasetKindV2.RAW + # pylint: disable=protected-access + executor._process_succeeded_dataset_job_stage(session=session, dataset_job_stage=dataset_job_stage) + self.assertFalse(dataset_job_stage.dataset_job.output_dataset.is_published) + mock_create_transaction.assert_called_once() + mock_delete_side_output.assert_called_once() + + mock_create_transaction.reset_mock() + mock_delete_side_output.reset_mock() + + # test need publish + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_succeeded_id) + dataset_job_stage.dataset_job.output_dataset.dataset_kind = DatasetKindV2.RAW + dataset_job_stage.dataset_job.output_dataset.set_meta_info(dataset_pb2.DatasetMetaInfo(need_publish=True)) + # pylint: disable=protected-access + executor._process_succeeded_dataset_job_stage(session=session, dataset_job_stage=dataset_job_stage) + self.assertTrue(dataset_job_stage.dataset_job.output_dataset.is_published) + self.assertFalse(dataset_job_stage.dataset_job.output_dataset.get_meta_info().need_publish) + mock_create_transaction.assert_called_once() + mock_delete_side_output.assert_called_once() + + @patch('fedlearner_webconsole.dataset.scheduler.running_dataset_job_stage_executor.'\ + 'RunningDatasetJobStageExecutor._delete_side_output') + def test_process_failed_dataset_job_stage(self, mock_delete_side_output: MagicMock): + dataset_job_stage_failed_id = 1 + self._insert_streaming_dataset_job_and_stage(DatasetJobState.FAILED, dataset_job_stage_failed_id) + executor = RunningDatasetJobStageExecutor() + + # test failed dataset_job_stage + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_failed_id) + # pylint: disable=protected-access + executor._process_failed_dataset_job_stage(session=session, dataset_job_stage=dataset_job_stage) + mock_delete_side_output.assert_called_once() + + @patch('fedlearner_webconsole.project.models.Project.get_storage_root_path') + @patch('fedlearner_webconsole.cleanup.services.CleanupService.create_cleanup') + @patch('fedlearner_webconsole.dataset.scheduler.running_dataset_job_stage_executor.now', + lambda: datetime(2022, 1, 1, 0, 0, 0, 0, tzinfo=timezone.utc)) + def test_delete_side_output(self, cleanup_mock: MagicMock, mock_get_storage_root_path: MagicMock): + dataset_job_stage_succeeded_id = 1 + self._insert_streaming_dataset_job_and_stage(DatasetJobState.SUCCEEDED, dataset_job_stage_succeeded_id) + executor = RunningDatasetJobStageExecutor() + mock_get_storage_root_path.return_value = '/data' + + # test normal dataset_job + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_succeeded_id) + # pylint: disable=protected-access + executor._delete_side_output(session=session, dataset_job_stage=dataset_job_stage) + payload = CleanupPayload(paths=['/data/dataset/321/side_output/20220101']) + cleanup_parmeter = CleanupParameter(resource_id=dataset_job_stage.id, + resource_type='DATASET_JOB_STAGE', + payload=payload, + target_start_at=to_timestamp( + datetime(2022, 1, 2, 0, 0, 0, 0, tzinfo=timezone.utc))) + cleanup_mock.assert_called_with(cleanup_parmeter=cleanup_parmeter) + # test rsa_psi dataset_job + cleanup_mock.reset_mock() + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_succeeded_id) + dataset_job_stage.dataset_job.kind = DatasetJobKind.RSA_PSI_DATA_JOIN + # pylint: disable=protected-access + executor._delete_side_output(session=session, dataset_job_stage=dataset_job_stage) + payload = CleanupPayload(paths=[ + '/data/dataset/321/side_output/20220101', + '/data/raw_data/workflow_uuid-raw-data-job', + '/data/data_source/workflow_uuid-psi-data-join-job/psi_output', + ]) + cleanup_parmeter = CleanupParameter(resource_id=dataset_job_stage.id, + resource_type='DATASET_JOB_STAGE', + payload=payload, + target_start_at=to_timestamp( + datetime(2022, 1, 2, 0, 0, 0, 0, tzinfo=timezone.utc))) + cleanup_mock.assert_called_with(cleanup_parmeter=cleanup_parmeter) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/update_auth_status_executor.py b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/update_auth_status_executor.py new file mode 100644 index 000000000..e5e27374c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/update_auth_status_executor.py @@ -0,0 +1,46 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import List +from sqlalchemy import or_ + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.controllers import DatasetJobController +from fedlearner_webconsole.dataset.services import DatasetService +from fedlearner_webconsole.dataset.scheduler.base_executor import BaseExecutor +from fedlearner_webconsole.dataset.scheduler.consts import ExecutorResult +from fedlearner_webconsole.dataset.models import DATASET_JOB_FINISHED_STATE, Dataset, DatasetJob, \ + DatasetJobSchedulerState + + +class UpdateAuthStatusExecutor(BaseExecutor): + + def get_item_ids(self) -> List[int]: + with db.session_scope() as session: + datasets = DatasetService(session=session).query_dataset_with_parent_job().filter( + Dataset.participants_info.isnot(None)).filter( + or_(DatasetJob.state.not_in(DATASET_JOB_FINISHED_STATE), + DatasetJob.scheduler_state != DatasetJobSchedulerState.STOPPED)).all() + return [dataset.id for dataset in datasets] + + def run_item(self, item_id: int) -> ExecutorResult: + with db.session_scope() as session: + dataset: Dataset = session.query(Dataset).get(item_id) + # if all participants cache are authorized, just skip + if dataset.is_all_participants_authorized(): + return ExecutorResult.SKIP + DatasetJobController(session=session).update_auth_status_cache(dataset_job=dataset.parent_dataset_job) + session.commit() + return ExecutorResult.SUCCEEDED diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/update_auth_status_executor_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/update_auth_status_executor_test.py new file mode 100644 index 000000000..e5088e886 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/scheduler/update_auth_status_executor_test.py @@ -0,0 +1,110 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch, MagicMock + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.scheduler.consts import ExecutorResult +from fedlearner_webconsole.dataset.models import Dataset, DatasetJob, DatasetJobKind, DatasetJobSchedulerState, \ + DatasetJobState, DatasetKindV2, DatasetType +from fedlearner_webconsole.dataset.scheduler.update_auth_status_executor import UpdateAuthStatusExecutor +from fedlearner_webconsole.proto.project_pb2 import ParticipantInfo, ParticipantsInfo +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus + + +class UpdateAuthStatusExecutorTest(NoWebServerTestCase): + _PROJECT_ID = 1 + _WORKFLOW_ID = 1 + _INPUT_DATASET_ID = 1 + _OUTPUT_DATASET_ID = 2 + _OUTPUT_DATASET_2_ID = 3 + + def setUp(self) -> None: + super().setUp() + with db.session_scope() as session: + dataset_job_1 = DatasetJob(id=1, + uuid='dataset_job_1 uuid', + project_id=self._PROJECT_ID, + input_dataset_id=self._INPUT_DATASET_ID, + output_dataset_id=self._OUTPUT_DATASET_ID, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.SUCCEEDED, + coordinator_id=0, + workflow_id=0, + scheduler_state=DatasetJobSchedulerState.STOPPED) + session.add(dataset_job_1) + output_dataset_1 = Dataset(id=self._OUTPUT_DATASET_ID, + uuid='dataset_1 uuid', + name='default dataset_1', + dataset_type=DatasetType.PSI, + comment='test comment', + path='/data/dataset/123', + project_id=self._PROJECT_ID, + dataset_kind=DatasetKindV2.PROCESSED, + is_published=True) + participants_info = ParticipantsInfo( + participants_map={ + 'coordinator-domain-name': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'participant-domain-name': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + }) + output_dataset_1.set_participants_info(participants_info=participants_info) + session.add(output_dataset_1) + dataset_job_2 = DatasetJob(id=2, + uuid='dataset_job_2 uuid', + project_id=self._PROJECT_ID, + input_dataset_id=self._INPUT_DATASET_ID, + output_dataset_id=self._OUTPUT_DATASET_2_ID, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.PENDING, + coordinator_id=0, + workflow_id=0, + scheduler_state=DatasetJobSchedulerState.STOPPED) + session.add(dataset_job_2) + output_dataset_2 = Dataset(id=self._OUTPUT_DATASET_2_ID, + uuid='dataset_2 uuid', + name='default dataset_1', + dataset_type=DatasetType.PSI, + comment='test comment', + path='/data/dataset/123', + project_id=self._PROJECT_ID, + dataset_kind=DatasetKindV2.PROCESSED, + is_published=True) + participants_info = ParticipantsInfo( + participants_map={ + 'coordinator-domain-name': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'participant-domain-name': ParticipantInfo(auth_status=AuthStatus.PENDING.name) + }) + output_dataset_2.set_participants_info(participants_info=participants_info) + session.add(output_dataset_2) + session.commit() + + def test_get_item_ids(self): + update_auth_status_executor = UpdateAuthStatusExecutor() + self.assertEqual(update_auth_status_executor.get_item_ids(), [3]) + + @patch('fedlearner_webconsole.dataset.controllers.DatasetJobController.update_auth_status_cache') + def test_run_item(self, mock_update_auth_status_cache: MagicMock): + + update_auth_status_executor = UpdateAuthStatusExecutor() + with db.session_scope() as session: + executor_result = update_auth_status_executor.run_item(3) + self.assertEqual(executor_result, ExecutorResult.SUCCEEDED) + mock_update_auth_status_cache.assert_called_once() + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/services.py b/web_console_v2/api/fedlearner_webconsole/dataset/services.py index a21b7cbeb..561bc4ed7 100644 --- a/web_console_v2/api/fedlearner_webconsole/dataset/services.py +++ b/web_console_v2/api/fedlearner_webconsole/dataset/services.py @@ -1,124 +1,717 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# -# coding: utf-8 import json import logging -from typing import List - -from sqlalchemy.orm import Session +import os +from datetime import datetime, timedelta +from typing import List, Optional, Tuple, Union +from sqlalchemy import and_, or_ +from sqlalchemy.orm import Session, joinedload, Query -from fedlearner_webconsole.dataset.models import Dataset -from fedlearner_webconsole.dataset.sparkapp.pipeline.util import \ - dataset_meta_path, dataset_features_path, dataset_hist_path -from fedlearner_webconsole.exceptions import NotFoundException +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.review.ticket_helper import get_ticket_helper +from fedlearner_webconsole.utils.filtering import SupportedField, FieldType, FilterBuilder +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.utils.flask_utils import get_current_user +from fedlearner_webconsole.utils.resource_name import resource_uuid +from fedlearner_webconsole.utils.workflow import fill_variables +from fedlearner_webconsole.utils.pp_datetime import from_timestamp, to_timestamp, now from fedlearner_webconsole.utils.file_manager import FileManager +from fedlearner_webconsole.exceptions import (InvalidArgumentException, NotFoundException, MethodNotAllowedException, + ResourceConflictException) +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.dataset.models import (DATASET_JOB_FINISHED_STATE, DATASET_STATE_CONVERT_MAP_V2, + LOCAL_DATASET_JOBS, MICRO_DATASET_JOB, DatasetFormat, ResourceState, + DatasetJobKind, DatasetJobStage, DatasetJobState, DatasetKindV2, + StoreFormat, DatasetType, Dataset, ImportType, DataBatch, DatasetJob, + DataSource, ProcessedDataset, DatasetJobSchedulerState) +from fedlearner_webconsole.dataset.meta_data import MetaData, ImageMetaData +from fedlearner_webconsole.dataset.delete_dependency import DatasetDeleteDependency +from fedlearner_webconsole.dataset.dataset_directory import DatasetDirectory +from fedlearner_webconsole.dataset.job_configer.dataset_job_configer import DatasetJobConfiger +from fedlearner_webconsole.dataset.filter_funcs import dataset_format_filter_op_equal, dataset_format_filter_op_in +from fedlearner_webconsole.dataset.util import get_dataset_path, parse_event_time_to_daily_folder_name, \ + parse_event_time_to_hourly_folder_name +from fedlearner_webconsole.dataset.metrics import emit_dataset_job_submission_store, emit_dataset_job_duration_store +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.proto.cleanup_pb2 import CleanupParameter, CleanupPayload +from fedlearner_webconsole.proto.dataset_pb2 import CronType, DatasetJobGlobalConfigs +from fedlearner_webconsole.proto import dataset_pb2 +from fedlearner_webconsole.proto.filtering_pb2 import FilterExpression, FilterOp +from fedlearner_webconsole.proto.review_pb2 import TicketDetails, TicketType +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition +from fedlearner_webconsole.cleanup.models import ResourceType +from fedlearner_webconsole.cleanup.services import CleanupService + + +class DataReader(object): + + def __init__(self, dataset_path: str): + self._path = dataset_path + self._dataset_directory = DatasetDirectory(dataset_path=dataset_path) + self._file_manager = FileManager() + + # meta is generated from sparkapp/pipeline/analyzer.py + def metadata(self, batch_name: str) -> MetaData: + meta_path = self._dataset_directory.batch_meta_file(batch_name=batch_name) + try: + return MetaData(json.loads(self._file_manager.read(meta_path))) + except Exception as e: # pylint: disable=broad-except + logging.info(f'failed to read meta file, path: {meta_path}, err: {e}') + return MetaData() + + def image_metadata(self, thumbnail_dir_path: str, batch_name: str) -> ImageMetaData: + meta_path = self._dataset_directory.batch_meta_file(batch_name=batch_name) + try: + return ImageMetaData(thumbnail_dir_path, json.loads(self._file_manager.read(meta_path))) + except Exception as e: # pylint: disable=broad-except + logging.info(f'failed to read meta file, path: {meta_path}, err: {e}') + return ImageMetaData(thumbnail_dir_path) class DatasetService(object): + + DATASET_CLEANUP_DEFAULT_DELAY = timedelta(days=7) + PUBLISHED_DATASET_FILTER_FIELDS = { + 'uuid': + SupportedField(type=FieldType.STRING, ops={FilterOp.EQUAL: None}), + 'kind': + SupportedField(type=FieldType.STRING, ops={ + FilterOp.IN: None, + FilterOp.EQUAL: None + }), + 'dataset_format': + SupportedField(type=FieldType.STRING, + ops={ + FilterOp.IN: dataset_format_filter_op_in, + FilterOp.EQUAL: dataset_format_filter_op_equal + }), + } + def __init__(self, session: Session): self._session = session self._file_manager = FileManager() + self._published_dataset_filter_builder = FilterBuilder(model_class=Dataset, + supported_fields=self.PUBLISHED_DATASET_FILTER_FIELDS) - def get_dataset_preview(self, dataset_id: int = 0) -> dict: - dataset = self._session.query(Dataset).filter( - Dataset.id == dataset_id).first() + @staticmethod + def filter_dataset_state(query: Query, frontend_states: List[ResourceState]) -> Query: + if len(frontend_states) == 0: + return query + dataset_job_states = [] + for k, v in DATASET_STATE_CONVERT_MAP_V2.items(): + if v in frontend_states: + dataset_job_states.append(k) + state_filter = DatasetJob.state.in_(dataset_job_states) + # internal_processed dataset is now hack to succeeded, + # so here we add all internal_processed dataset when filter succeeded dataset + if ResourceState.SUCCEEDED in frontend_states: + state_filter = or_(state_filter, Dataset.dataset_kind == DatasetKindV2.INTERNAL_PROCESSED) + return query.filter(state_filter) + + def query_dataset_with_parent_job(self) -> Query: + return self._session.query(Dataset).outerjoin( + DatasetJob, and_(DatasetJob.output_dataset_id == Dataset.id, DatasetJob.input_dataset_id != Dataset.id)) + + def create_dataset(self, dataset_parameter: dataset_pb2.DatasetParameter) -> Dataset: + # check project existense + project = self._session.query(Project).get(dataset_parameter.project_id) + if project is None: + raise NotFoundException(message=f'cannot found project with id: {dataset_parameter.project_id}') + + # Create dataset + dataset = Dataset( + name=dataset_parameter.name, + uuid=dataset_parameter.uuid or resource_uuid(), + is_published=dataset_parameter.is_published, + dataset_type=DatasetType(dataset_parameter.type), + comment=dataset_parameter.comment, + project_id=dataset_parameter.project_id, + dataset_kind=DatasetKindV2(dataset_parameter.kind), + dataset_format=DatasetFormat[dataset_parameter.format].value, + # set participant dataset creator_username to empty if dataset is created by coordinator + # TODO(liuhehan): set participant dataset creator_username to username who authorize it + creator_username=get_current_user().username if get_current_user() else '', + ) + if dataset_parameter.path and dataset.dataset_kind in [ + DatasetKindV2.EXPORTED, DatasetKindV2.INTERNAL_PROCESSED + ]: + dataset.path = dataset_parameter.path + else: + dataset.path = get_dataset_path(dataset_name=dataset.name, uuid=dataset.uuid) + if dataset_parameter.import_type: + dataset.import_type = ImportType(dataset_parameter.import_type) + if dataset_parameter.store_format: + dataset.store_format = StoreFormat(dataset_parameter.store_format) + if dataset_parameter.auth_status: + dataset.auth_status = AuthStatus[dataset_parameter.auth_status] + if dataset_parameter.creator_username: + dataset.creator_username = dataset_parameter.creator_username + elif get_current_user(): + dataset.creator_username = get_current_user().username + meta_info = dataset_pb2.DatasetMetaInfo(need_publish=dataset_parameter.need_publish, + value=dataset_parameter.value, + schema_checkers=dataset_parameter.schema_checkers) + dataset.set_meta_info(meta_info) + self._session.add(dataset) + return dataset + + def get_dataset(self, dataset_id: int = 0) -> Union[dict, dataset_pb2.Dataset]: + dataset = self._session.query(Dataset).with_polymorphic([ProcessedDataset, + Dataset]).filter(Dataset.id == dataset_id).first() + if not dataset: + raise NotFoundException(f'Failed to find dataset: {dataset_id}') + return dataset.to_proto() + + def get_dataset_preview(self, dataset_id: int, batch_id: int) -> dict: + batch = self._session.query(DataBatch).get(batch_id) + if batch is None: + raise NotFoundException(f'Failed to find data batch: {batch_id}') + dataset = self._session.query(Dataset).filter(Dataset.id == dataset_id).first() if not dataset: raise NotFoundException(f'Failed to find dataset: {dataset_id}') - dataset_path = dataset.path - # meta is generated from sparkapp/pipeline/analyzer.py - meta_path = dataset_meta_path(dataset_path) - # data format: - # { - # 'dtypes': { - # 'f01': 'bigint' - # }, - # 'samples': [ - # [1], - # [0], - # ], - # 'metrics': { - # 'f01': { - # 'count': '2', - # 'mean': '0.0015716767309123998', - # 'stddev': '0.03961485047808605', - # 'min': '0', - # 'max': '1', - # 'missing_count': '0' - # } - # } - # } + reader = DataReader(dataset.path) + if dataset.is_image(): + thumbnail_dir_path = DatasetDirectory(dataset_path=dataset.path).thumbnails_path( + batch_name=batch.batch_name) + meta = reader.image_metadata(thumbnail_dir_path=thumbnail_dir_path, batch_name=batch.batch_name) + else: + meta = reader.metadata(batch_name=batch.batch_name) + return meta.get_preview() + + def feature_metrics(self, name: str, dataset_id: int, data_batch_id: int) -> dict: + dataset = self._session.query(Dataset).get(dataset_id) + if dataset is None: + raise NotFoundException(f'Failed to find dataset: {dataset_id}') + batch = self._session.query(DataBatch).get(data_batch_id) + if batch is None: + raise NotFoundException(f'Failed to find data batch: {data_batch_id}') + meta = DataReader(dataset.path).metadata(batch_name=batch.batch_name) val = {} - try: - val = json.loads(self._file_manager.read(meta_path)) - except Exception as e: # pylint: disable=broad-except - logging.info( - f'failed to read meta file, path: {meta_path}, err: {e}') - return {} - # feature is generated from sparkapp/pipeline/analyzer.py - feature_path = dataset_features_path(dataset_path) - try: - val['metrics'] = json.loads(self._file_manager.read(feature_path)) - except Exception as e: # pylint: disable=broad-except - logging.info( - f'failed to read feature file, path: {feature_path}, err: {e}') + val['name'] = name + val['metrics'] = meta.get_metrics_by_name(name) + val['hist'] = meta.get_hist_by_name(name) return val - def feature_metrics(self, name: str, dataset_id: int = 0) -> dict: - dataset = self._session.query(Dataset).filter( - Dataset.id == dataset_id).first() + def get_published_datasets(self, + project_id: int, + kind: Optional[DatasetJobKind] = None, + uuid: Optional[str] = None, + state: Optional[ResourceState] = None, + filter_exp: Optional[FilterExpression] = None, + time_range: Optional[timedelta] = None) -> List[dataset_pb2.ParticipantDatasetRef]: + query = self.query_dataset_with_parent_job() + query = query.options(joinedload(Dataset.data_batches)) + query = query.filter(Dataset.project_id == project_id) + query = query.filter(Dataset.is_published.is_(True)) + if kind is not None: + query = query.filter(Dataset.dataset_kind == kind) + if uuid is not None: + query = query.filter(Dataset.uuid == uuid) + if state is not None: + query = self.filter_dataset_state(query, frontend_states=[state]) + if filter_exp is not None: + query = self._published_dataset_filter_builder.build_query(query, filter_exp) + if time_range: + query = query.filter(DatasetJob.time_range == time_range) + query = query.order_by(Dataset.id.desc()) + datasets_ref = [] + for dataset in query.all(): + meta_info = dataset.get_meta_info() + dataset_ref = dataset_pb2.ParticipantDatasetRef( + uuid=dataset.uuid, + name=dataset.name, + format=DatasetFormat(dataset.dataset_format).name, + file_size=dataset.get_file_size(), + updated_at=to_timestamp(dataset.updated_at), + value=meta_info.value, + dataset_kind=dataset.dataset_kind.name, + dataset_type=dataset.dataset_type.name, + auth_status=dataset.auth_status.name if dataset.auth_status else '') + datasets_ref.append(dataset_ref) + return datasets_ref + + def publish_dataset(self, dataset_id: int, value: int = 0) -> Dataset: + dataset: Dataset = self._session.query(Dataset).get(dataset_id) if not dataset: raise NotFoundException(f'Failed to find dataset: {dataset_id}') - dataset_path = dataset.path - feature_path = dataset_features_path(dataset_path) - # data format: - # { - # 'name': 'f01', - # 'metrics': { - # 'count': '2', - # 'mean': '0.0015716767309123998', - # 'stddev': '0.03961485047808605', - # 'min': '0', - # 'max': '1', - # 'missing_count': '0' - # }, - # 'hist': { - # 'x': [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, - # 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], - # 'y': [12070, 0, 0, 0, 0, 0, 0, 0, 0, 19] - # } - # } - val = {} + if dataset.dataset_kind != DatasetKindV2.RAW: + raise MethodNotAllowedException( + f'{dataset.dataset_kind.value} dataset cannot publish, dataset_id: {dataset.id}') + dataset.is_published = True + meta_info = dataset.get_meta_info() + meta_info.value = value + dataset.set_meta_info(meta_info) + # TODO(liuhehan): a hack to add uuid for old dataset when publish, remove in the feature + if dataset.uuid is None: + dataset.uuid = resource_uuid() + + # create review ticket + if dataset.ticket_uuid is None: + ticket_helper = get_ticket_helper(session=self._session) + ticket_helper.create_ticket(TicketType.PUBLISH_DATASET, TicketDetails(uuid=dataset.uuid)) + + return dataset + + def withdraw_dataset(self, dataset_id: int): + dataset = self._session.query(Dataset).get(dataset_id) + if not dataset: + raise NotFoundException(f'Failed to find dataset: {dataset_id}') + dataset.is_published = False + + # reset ticket + dataset.ticket_uuid = None + dataset.ticket_status = None + + def cleanup_dataset(self, dataset: Dataset, delay_time: Optional[timedelta] = None) -> Tuple[bool, List[str]]: + """ Register the dataset and underlying files to be cleaned with the cleanup module. + + Args: + dataset: dataset which needs an exclusive lock to this row + delay_time: delay time to start the cleanup task afterwards + + Raises: + ResourceConflictException: if the `dataset` can not be deleted + """ + if not delay_time: + delay_time = self.DATASET_CLEANUP_DEFAULT_DELAY + target_start_at = to_timestamp(now() + delay_time) + is_deletable, error_msgs = DatasetDeleteDependency(self._session).is_deletable(dataset) + if not is_deletable: + error = {dataset.id: error_msgs} + raise ResourceConflictException(f'{error}') + logging.info(f'will mark the dataset:{dataset.id} is deleted') + payload = CleanupPayload(paths=[dataset.path]) + dataset_cleanup_parm = CleanupParameter(resource_id=dataset.id, + resource_type=ResourceType.DATASET.name, + payload=payload, + target_start_at=target_start_at) + CleanupService(self._session).create_cleanup(cleanup_parmeter=dataset_cleanup_parm) + dataset.deleted_at = now() + logging.info(f'Has registered a cleanup for dataset:{dataset.id}') + + def get_data_batch(self, dataset: Dataset, event_time: Optional[datetime] = None) -> Optional[DataBatch]: + if dataset.dataset_type == DatasetType.PSI: + return self._session.query(DataBatch).filter(DataBatch.dataset_id == dataset.id).first() + return self._session.query(DataBatch).filter(DataBatch.dataset_id == dataset.id).filter( + DataBatch.event_time == event_time).first() + + +class DataSourceService(object): + + def __init__(self, session: Session): + self._session = session + + def create_data_source(self, data_source_parameter: dataset_pb2.DataSource) -> DataSource: + # check project existense + project = self._session.query(Project).get(data_source_parameter.project_id) + if project is None: + raise NotFoundException(message=f'cannot found project with id: {data_source_parameter.project_id}') + + data_source = DataSource( + name=data_source_parameter.name, + comment=data_source_parameter.comment, + uuid=resource_uuid(), + is_published=False, + path=data_source_parameter.url, + project_id=data_source_parameter.project_id, + creator_username=get_current_user().username, + dataset_format=DatasetFormat[data_source_parameter.dataset_format].value, + store_format=StoreFormat(data_source_parameter.store_format), + dataset_type=DatasetType(data_source_parameter.dataset_type), + ) + meta_info = dataset_pb2.DatasetMetaInfo(datasource_type=data_source_parameter.type, + is_user_upload=data_source_parameter.is_user_upload, + is_user_export=data_source_parameter.is_user_export) + data_source.set_meta_info(meta_info) + self._session.add(data_source) + return data_source + + def get_data_sources(self, project_id: int) -> List[dataset_pb2.DataSource]: + data_sources = self._session.query(DataSource).order_by(Dataset.created_at.desc()) + if project_id > 0: + data_sources = data_sources.filter_by(project_id=project_id) + data_source_ref = [] + for data_source in data_sources.all(): + # ignore user upload data_source and user export data_source + meto_info = data_source.get_meta_info() + if not meto_info.is_user_upload and not meto_info.is_user_export: + data_source_ref.append(data_source.to_proto()) + return data_source_ref + + def delete_data_source(self, data_source_id: int): + data_source = self._session.query(DataSource).get(data_source_id) + if not data_source: + raise NotFoundException(message=f'cannot find data_source with id: {data_source_id}') + dataset_jobs = self._session.query(DatasetJob).filter_by(input_dataset_id=data_source.id).all() + for dataset_job in dataset_jobs: + if not dataset_job.is_finished(): + message = f'data_source {data_source.name} is still being processed by dataset_job {dataset_job.id}' + logging.error(message) + raise ResourceConflictException(message=message) + + data_source.deleted_at = now() + + +class BatchService(object): + + def __init__(self, session: Session): + self._session = session + + def create_batch(self, batch_parameter: dataset_pb2.BatchParameter) -> DataBatch: + dataset: Dataset = self._session.query(Dataset).filter_by(id=batch_parameter.dataset_id).first() + if dataset is None: + message = f'Failed to find dataset: {batch_parameter.dataset_id}' + logging.error(message) + raise NotFoundException(message=message) + if dataset.dataset_type == DatasetType.PSI: + # There should be one batch of a dataset in PSI mode. + # So the naming convention of batch is `{dataset_path}/batch/0`. + if len(dataset.data_batches) != 0: + raise InvalidArgumentException(details='there should be one batch for PSI dataset') + batch_folder_name = '0' + event_time = None + elif dataset.dataset_type == DatasetType.STREAMING: + if batch_parameter.event_time == 0: + raise InvalidArgumentException( + details='event time should be specified when create batch of streaming dataset') + event_time = from_timestamp(batch_parameter.event_time) + if batch_parameter.cron_type == CronType.DAILY: + batch_folder_name = parse_event_time_to_daily_folder_name(event_time=event_time) + elif batch_parameter.cron_type == CronType.HOURLY: + batch_folder_name = parse_event_time_to_hourly_folder_name(event_time=event_time) + else: + # old data may not has cron_tpye, we just set to daily cron_type by default + batch_folder_name = parse_event_time_to_daily_folder_name(event_time=event_time) + batch_parameter.path = os.path.join(dataset.path, 'batch', batch_folder_name) + # Create batch + batch = DataBatch(dataset_id=dataset.id, + event_time=event_time, + comment=batch_parameter.comment, + path=batch_parameter.path, + name=batch_folder_name) + self._session.add(batch) + + return batch + + def get_next_batch(self, data_batch: DataBatch) -> Optional[DataBatch]: + parent_dataset_job_stage: DatasetJobStage = data_batch.latest_parent_dataset_job_stage + if not parent_dataset_job_stage: + logging.warning(f'not found parent_dataset_job_stage, data_batch id: {data_batch.id}') + return None + parent_dataset_job: DatasetJob = parent_dataset_job_stage.dataset_job + if not parent_dataset_job: + logging.warning(f'not found parent_dataset_job, data_batch id: {data_batch.id}') + return None + if not parent_dataset_job.is_cron(): + logging.warning(f'data_batch {data_batch.id} belongs to a non-cron dataset_job, has no next batch') + return None + next_time = data_batch.event_time + parent_dataset_job.time_range + return self._session.query(DataBatch).filter(DataBatch.dataset_id == data_batch.dataset_id).filter( + DataBatch.event_time == next_time).first() + + +class DatasetJobService(object): + + def __init__(self, session: Session): + self._session = session + + def is_local(self, dataset_job_kind: DatasetJobKind) -> bool: + return dataset_job_kind in LOCAL_DATASET_JOBS + + def need_distribute(self, dataset_job: DatasetJob) -> bool: + # coordinator_id != 0 means it is a participant, + # and dataset_job need to distribute when it has participants + if dataset_job.coordinator_id != 0: + return True + return not self.is_local(dataset_job.kind) + + # filter participants which need to distribute dataset_job + def get_participants_need_distribute(self, dataset_job: DatasetJob) -> List: + participants = [] + if self.need_distribute(dataset_job): + participants = ParticipantService(self._session).get_platform_participants_by_project( + dataset_job.project_id) + return participants + + def create_as_coordinator(self, + project_id: int, + kind: DatasetJobKind, + output_dataset_id: int, + global_configs: DatasetJobGlobalConfigs, + time_range: timedelta = None) -> DatasetJob: + my_domain_name = SettingService.get_system_info().pure_domain_name + input_dataset_uuid = global_configs.global_configs[my_domain_name].dataset_uuid + input_dataset = self._session.query(Dataset).filter(Dataset.uuid == input_dataset_uuid).first() + if input_dataset is None: + raise InvalidArgumentException(f'failed to find dataset {input_dataset_uuid}') + output_dataset = self._session.query(Dataset).get(output_dataset_id) + if output_dataset is None: + return InvalidArgumentException(details=f'failed to find dataset id {output_dataset_id}') + configer = DatasetJobConfiger.from_kind(kind, self._session) + config = configer.get_config() try: - feature_data = json.loads(self._file_manager.read(feature_path)) - val['name'] = name - val['metrics'] = feature_data.get(name, {}) - except Exception as e: # pylint: disable=broad-except - logging.info( - f'failed to read feature file, path: {feature_path}, err: {e}') - # hist is generated from sparkapp/pipeline/analyzer.py - hist_path = dataset_hist_path(dataset_path) + global_configs = configer.auto_config_variables(global_configs) + fill_variables(config, global_configs.global_configs[my_domain_name].variables, dry_run=True) + except TypeError as err: + raise InvalidArgumentException(details=err.args) from err + + dataset_job = DatasetJob() + dataset_job.uuid = resource_uuid() + dataset_job.project_id = project_id + dataset_job.coordinator_id = 0 + dataset_job.input_dataset_id = input_dataset.id + dataset_job.output_dataset_id = output_dataset_id + dataset_job.name = output_dataset.name + dataset_job.kind = kind + dataset_job.time_range = time_range + dataset_job.set_global_configs(global_configs) + dataset_job.set_context(dataset_pb2.DatasetJobContext(has_stages=True)) + current_user = get_current_user() + if current_user is not None: + dataset_job.creator_username = current_user.username + + self._session.add(dataset_job) + + emit_dataset_job_submission_store(uuid=dataset_job.uuid, kind=dataset_job.kind, coordinator_id=0) + + return dataset_job + + def create_as_participant(self, + project_id: int, + kind: DatasetJobKind, + global_configs: DatasetJobGlobalConfigs, + config: WorkflowDefinition, + output_dataset_id: int, + coordinator_id: int, + uuid: str, + creator_username: str, + time_range: timedelta = None) -> DatasetJob: + my_domain_name = SettingService.get_system_info().pure_domain_name + my_dataset_job_config = global_configs.global_configs[my_domain_name] + + input_dataset = self._session.query(Dataset).filter(Dataset.uuid == my_dataset_job_config.dataset_uuid).first() + if input_dataset is None: + return InvalidArgumentException(details=f'failed to find dataset {my_dataset_job_config.dataset_uuid}') + output_dataset = self._session.query(Dataset).get(output_dataset_id) + if output_dataset is None: + return InvalidArgumentException(details=f'failed to find dataset id {output_dataset_id}') try: - hist_data = json.loads(self._file_manager.read(hist_path)) - val['hist'] = hist_data.get(name, {}) - except Exception as e: # pylint: disable=broad-except - logging.info( - f'failed to read hist file, path: {hist_path}, err: {e}') - return val + fill_variables(config, my_dataset_job_config.variables, dry_run=True) + except TypeError as err: + raise InvalidArgumentException(details=err.args) from err - def get_datasets(self, project_id: int = 0) -> List[Dataset]: - q = self._session.query(Dataset).order_by(Dataset.created_at.desc()) - if project_id > 0: - q = q.filter(Dataset.project_id == project_id) - return q.all() + dataset_job = DatasetJob() + dataset_job.uuid = uuid + dataset_job.project_id = project_id + dataset_job.input_dataset_id = input_dataset.id + dataset_job.output_dataset_id = output_dataset_id + dataset_job.name = output_dataset.name + dataset_job.coordinator_id = coordinator_id + dataset_job.kind = kind + dataset_job.time_range = time_range + dataset_job.creator_username = creator_username + dataset_job.set_context(dataset_pb2.DatasetJobContext(has_stages=True)) + + self._session.add(dataset_job) + + emit_dataset_job_submission_store(uuid=dataset_job.uuid, + kind=dataset_job.kind, + coordinator_id=dataset_job.coordinator_id) + + return dataset_job + + def start_dataset_job(self, dataset_job: DatasetJob): + dataset_job.state = DatasetJobState.RUNNING + dataset_job.started_at = now() + + def finish_dataset_job(self, dataset_job: DatasetJob, finish_state: DatasetJobState): + if finish_state not in DATASET_JOB_FINISHED_STATE: + raise ValueError(f'get invalid finish state: [{finish_state}] when try to finish dataset_job') + dataset_job.state = finish_state + dataset_job.finished_at = now() + duration = to_timestamp(dataset_job.finished_at) - to_timestamp(dataset_job.created_at) + emit_dataset_job_duration_store(duration=duration, + uuid=dataset_job.uuid, + kind=dataset_job.kind, + coordinator_id=dataset_job.coordinator_id, + state=finish_state) + + def start_cron_scheduler(self, dataset_job: DatasetJob): + if not dataset_job.is_cron(): + logging.warning(f'[dataset_job_service]: failed to start schedule a non-cron dataset_job {dataset_job.id}') + return + dataset_job.scheduler_state = DatasetJobSchedulerState.RUNNABLE + + def stop_cron_scheduler(self, dataset_job: DatasetJob): + if not dataset_job.is_cron(): + logging.warning(f'[dataset_job_service]: failed to stop schedule a non-cron dataset_job {dataset_job.id}') + return + dataset_job.scheduler_state = DatasetJobSchedulerState.STOPPED + + def delete_dataset_job(self, dataset_job: DatasetJob): + if not dataset_job.is_finished(): + message = f'Failed to delete dataset_job: {dataset_job.id}; ' \ + f'reason: dataset_job state is {dataset_job.state.name}' + logging.error(message) + raise ResourceConflictException(message) + dataset_job.deleted_at = now() + + +class DatasetJobStageService(object): + + def __init__(self, session: Session): + self._session = session + + # TODO(liuhehan): delete in the near future after we use as_coordinator func + def create_dataset_job_stage(self, + project_id: int, + dataset_job_id: int, + output_data_batch_id: int, + uuid: Optional[str] = None, + name: Optional[str] = None): + dataset_job: DatasetJob = self._session.query(DatasetJob).get(dataset_job_id) + if dataset_job is None: + raise InvalidArgumentException(details=f'failed to find dataset_job, id: {dataset_job_id}') + output_data_batch: DataBatch = self._session.query(DataBatch).get(output_data_batch_id) + if output_data_batch is None: + raise InvalidArgumentException(details=f'failed to find output_data_batch, id: {output_data_batch_id}') + + dataset_job_stages: DatasetJobStage = self._session.query(DatasetJobStage).filter( + DatasetJobStage.data_batch_id == output_data_batch_id).filter( + DatasetJobStage.dataset_job_id == dataset_job_id).order_by(DatasetJobStage.created_at.desc()).all() + index = len(dataset_job_stages) + if index != 0 and not dataset_job_stages[0].is_finished(): + raise InvalidArgumentException( + details=f'newest dataset_job_stage is still running, id: {dataset_job_stages[0].id}') + + dataset_job_stage = DatasetJobStage() + dataset_job_stage.uuid = uuid or resource_uuid() + dataset_job_stage.name = name or f'{output_data_batch.name}-stage{index}' + dataset_job_stage.event_time = output_data_batch.event_time + dataset_job_stage.dataset_job_id = dataset_job_id + dataset_job_stage.data_batch_id = output_data_batch_id + dataset_job_stage.project_id = project_id + if dataset_job.coordinator_id == 0: + dataset_job_stage.set_global_configs(dataset_job.get_global_configs()) + self._session.add(dataset_job_stage) + + self._session.flush() + if dataset_job.kind not in MICRO_DATASET_JOB: + output_data_batch.latest_parent_dataset_job_stage_id = dataset_job_stage.id + elif dataset_job.kind == DatasetJobKind.ANALYZER: + output_data_batch.latest_analyzer_dataset_job_stage_id = dataset_job_stage.id + + dataset_job.state = DatasetJobState.PENDING + + return dataset_job_stage + + def create_dataset_job_stage_as_coordinator(self, project_id: int, dataset_job_id: int, output_data_batch_id: int, + global_configs: DatasetJobGlobalConfigs): + dataset_job: DatasetJob = self._session.query(DatasetJob).get(dataset_job_id) + if dataset_job is None: + raise InvalidArgumentException(details=f'failed to find dataset_job, id: {dataset_job_id}') + output_data_batch: DataBatch = self._session.query(DataBatch).get(output_data_batch_id) + if output_data_batch is None: + raise InvalidArgumentException(details=f'failed to find output_data_batch, id: {output_data_batch_id}') + + dataset_job_stages: DatasetJobStage = self._session.query(DatasetJobStage).filter( + DatasetJobStage.data_batch_id == output_data_batch_id).filter( + DatasetJobStage.dataset_job_id == dataset_job_id).order_by(DatasetJobStage.id.desc()).all() + index = len(dataset_job_stages) + if index != 0 and not dataset_job_stages[0].is_finished(): + raise InvalidArgumentException( + details=f'newest dataset_job_stage is still running, id: {dataset_job_stages[0].id}') + + dataset_job_stage = DatasetJobStage() + dataset_job_stage.uuid = resource_uuid() + dataset_job_stage.name = f'{output_data_batch.name}-stage{index}' + dataset_job_stage.event_time = output_data_batch.event_time + dataset_job_stage.dataset_job_id = dataset_job_id + dataset_job_stage.data_batch_id = output_data_batch_id + dataset_job_stage.project_id = project_id + dataset_job_stage.coordinator_id = 0 + dataset_job_stage.set_global_configs(global_configs) + self._session.add(dataset_job_stage) + + self._session.flush() + if dataset_job.kind not in MICRO_DATASET_JOB: + output_data_batch.latest_parent_dataset_job_stage_id = dataset_job_stage.id + elif dataset_job.kind == DatasetJobKind.ANALYZER: + output_data_batch.latest_analyzer_dataset_job_stage_id = dataset_job_stage.id + + dataset_job.state = DatasetJobState.PENDING + + return dataset_job_stage + + def create_dataset_job_stage_as_participant(self, project_id: int, dataset_job_id: int, output_data_batch_id: int, + uuid: str, name: str, coordinator_id: int): + dataset_job: DatasetJob = self._session.query(DatasetJob).get(dataset_job_id) + if dataset_job is None: + raise InvalidArgumentException(details=f'failed to find dataset_job, id: {dataset_job_id}') + output_data_batch: DataBatch = self._session.query(DataBatch).get(output_data_batch_id) + if output_data_batch is None: + raise InvalidArgumentException(details=f'failed to find output_data_batch, id: {output_data_batch_id}') + + dataset_job_stages: DatasetJobStage = self._session.query(DatasetJobStage).filter( + DatasetJobStage.data_batch_id == output_data_batch_id).filter( + DatasetJobStage.dataset_job_id == dataset_job_id).order_by(DatasetJobStage.id.desc()).all() + index = len(dataset_job_stages) + if index != 0 and not dataset_job_stages[0].is_finished(): + raise InvalidArgumentException( + details=f'newest dataset_job_stage is still running, id: {dataset_job_stages[0].id}') + + dataset_job_stage = DatasetJobStage() + dataset_job_stage.uuid = uuid + dataset_job_stage.name = name + dataset_job_stage.event_time = output_data_batch.event_time + dataset_job_stage.dataset_job_id = dataset_job_id + dataset_job_stage.data_batch_id = output_data_batch_id + dataset_job_stage.project_id = project_id + dataset_job_stage.coordinator_id = coordinator_id + self._session.add(dataset_job_stage) + + self._session.flush() + if dataset_job.kind not in MICRO_DATASET_JOB: + output_data_batch.latest_parent_dataset_job_stage_id = dataset_job_stage.id + elif dataset_job.kind == DatasetJobKind.ANALYZER: + output_data_batch.latest_analyzer_dataset_job_stage_id = dataset_job_stage.id + + dataset_job.state = DatasetJobState.PENDING + + return dataset_job_stage + + def start_dataset_job_stage(self, dataset_job_stage: DatasetJobStage): + dataset_job_stage.state = DatasetJobState.RUNNING + dataset_job_stage.started_at = now() + + newest_job_stage_id, *_ = self._session.query( + DatasetJobStage.id).filter(DatasetJobStage.dataset_job_id == dataset_job_stage.dataset_job_id).order_by( + DatasetJobStage.created_at.desc()).first() + if newest_job_stage_id == dataset_job_stage.id: + dataset_job_stage.dataset_job.state = DatasetJobState.RUNNING + + def finish_dataset_job_stage(self, dataset_job_stage: DatasetJobStage, finish_state: DatasetJobState): + if finish_state not in DATASET_JOB_FINISHED_STATE: + raise ValueError(f'get invalid finish state: [{finish_state}] when try to finish dataset_job') + dataset_job_stage.state = finish_state + dataset_job_stage.finished_at = now() + + newest_job_stage_id, *_ = self._session.query( + DatasetJobStage.id).filter(DatasetJobStage.dataset_job_id == dataset_job_stage.dataset_job_id).order_by( + DatasetJobStage.created_at.desc()).first() + if newest_job_stage_id == dataset_job_stage.id: + dataset_job_stage.dataset_job.state = finish_state diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/services_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/services_test.py new file mode 100644 index 000000000..4b92d69bc --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/services_test.py @@ -0,0 +1,1398 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import json +import tempfile +import unittest +from unittest.mock import patch, MagicMock, ANY +from datetime import datetime, timedelta, timezone +from google.protobuf.struct_pb2 import Value +from dataset_directory import DatasetDirectory +from pathlib import Path + +from fedlearner_webconsole.auth.models import User +from fedlearner_webconsole.dataset.meta_data import ImageMetaData, MetaData +from fedlearner_webconsole.proto.filtering_pb2 import FilterExpression, FilterExpressionKind, SimpleExpression +from fedlearner_webconsole.proto.workflow_definition_pb2 import JobDefinition, WorkflowDefinition +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto import dataset_pb2 +from fedlearner_webconsole.proto.setting_pb2 import SystemInfo +from fedlearner_webconsole.exceptions import InvalidArgumentException, NotFoundException, ResourceConflictException +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.utils.resource_name import resource_uuid +from fedlearner_webconsole.dataset.models import (DataSourceType, DatasetFormat, DatasetJob, DatasetJobKind, + DatasetJobStage, DataBatch, DatasetKindV2, DatasetType, Dataset, + DatasetJobState, DataSource, ImportType, ResourceState, StoreFormat, + DatasetJobSchedulerState) +from fedlearner_webconsole.dataset.services import (BatchService, DataReader, DataSourceService, DatasetJobService, + DatasetJobStageService, DatasetService) +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.utils.pp_datetime import to_timestamp, now +from fedlearner_webconsole.proto.cleanup_pb2 import CleanupParameter, CleanupPayload +from testing.common import NoWebServerTestCase +from testing.dataset import FakeDatasetJobConfiger +from testing.fake_time_patcher import FakeTimePatcher + + +class DatasetServiceTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + self.source_dir = tempfile.mkdtemp() + self._set_common_dataset() + + def _set_common_dataset(self): + with db.session_scope() as session: + dataset = Dataset( + name='default dataset1', + uuid='default uuid', + dataset_type=DatasetType.STREAMING, + comment='test comment1', + path=str(self.source_dir), + project_id=1, + dataset_kind=DatasetKindV2.RAW, + ) + session.add(dataset) + session.commit() + + @patch('envs.Envs.STORAGE_ROOT', '/data') + def test_create_dataset(self): + dataset_para = dataset_pb2.DatasetParameter(name='test', + uuid='fake_uuid', + type=DatasetType.PSI.value, + comment='this is a comment', + project_id=1, + kind=DatasetKindV2.RAW.value, + format=DatasetFormat.IMAGE.name, + is_published=False, + import_type=ImportType.NO_COPY.value, + store_format=StoreFormat.CSV.value) + + with db.session_scope() as session: + with self.assertRaises(NotFoundException): + DatasetService(session).create_dataset(dataset_parameter=dataset_para) + session.commit() + + with db.session_scope() as session: + project = Project() + session.add(project) + session.commit() + dataset_para.project_id = project.id + + with db.session_scope() as session: + DatasetService(session).create_dataset(dataset_parameter=dataset_para) + session.commit() + + with db.session_scope() as session: + dataset = session.query(Dataset).filter(Dataset.name == 'test').one() + self.assertEqual(dataset.comment, 'this is a comment') + self.assertEqual(dataset.path, 'file:///data/dataset/fake_uuid_test') + self.assertEqual(dataset.is_published, False) + self.assertEqual(dataset.import_type, ImportType.NO_COPY) + self.assertEqual(dataset.store_format, StoreFormat.CSV) + + dataset_para_published = dataset_pb2.DatasetParameter(name='test_published', + uuid='fake_uuid_published', + type=DatasetType.PSI.value, + comment='this is a comment', + project_id=1, + kind=DatasetKindV2.PROCESSED.value, + format=DatasetFormat.IMAGE.name, + is_published=True, + creator_username='fakeuser') + + with db.session_scope() as session: + DatasetService(session).create_dataset(dataset_parameter=dataset_para_published) + session.commit() + + with db.session_scope() as session: + dataset = session.query(Dataset).filter(Dataset.name == 'test_published').one() + self.assertEqual(dataset.comment, 'this is a comment') + self.assertEqual(dataset.path, 'file:///data/dataset/fake_uuid_published_test-published') + self.assertEqual(dataset.is_published, True) + self.assertEqual(dataset.creator_username, 'fakeuser') + + def test_publish_dataset(self, mock_get_publish_reward: MagicMock): + with db.session_scope() as session: + unpublished_dataset = Dataset(id=11, + uuid='123', + name='none_published_dataset', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + is_published=False, + project_id=1, + dataset_format=DatasetFormat.TABULAR.value) + no_uuid_dataset = Dataset(id=12, + name='none_published_dataset', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + is_published=False, + project_id=1, + dataset_format=DatasetFormat.TABULAR.value) + session.add(unpublished_dataset) + session.add(no_uuid_dataset) + session.commit() + # test unpublish to publish + with db.session_scope() as session: + dataset = DatasetService(session=session).publish_dataset(dataset_id=11, value=100) + session.commit() + mock_get_publish_reward.assert_called_once_with(dataset_uuid='123') + with db.session_scope() as session: + dataset = session.query(Dataset).get(11) + self.assertTrue(dataset.is_published) + self.assertEqual(dataset.get_meta_info().value, 100) + self.assertIsNotNone(dataset.ticket_uuid) + self.assertEqual(dataset.ticket_status, TicketStatus.APPROVED) + # test publish to publish + with db.session_scope() as session: + dataset = DatasetService(session=session).publish_dataset(dataset_id=11) + session.commit() + with db.session_scope() as session: + dataset = session.query(Dataset).get(11) + self.assertTrue(dataset.is_published) + self.assertIsNotNone(dataset.ticket_uuid) + # test unknown dataset + with db.session_scope() as session: + with self.assertRaises(NotFoundException): + DatasetService(session=session).publish_dataset(dataset_id=100) + # test no uuid dataset + with db.session_scope() as session: + dataset = DatasetService(session=session).publish_dataset(dataset_id=12) + session.commit() + with db.session_scope() as session: + dataset = session.query(Dataset).get(12) + self.assertIsNotNone(dataset.uuid) + self.assertIsNotNone(dataset.ticket_uuid) + self.assertEqual(dataset.ticket_status, TicketStatus.APPROVED) + + def test_withdraw_dataset(self): + with db.session_scope() as session: + published_dataset = Dataset(id=10, + uuid='123', + name='published_dataset', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + is_published=True, + project_id=1, + dataset_format=DatasetFormat.TABULAR.value, + ticket_uuid='ticket_uuid', + ticket_status=TicketStatus.APPROVED) + session.add(published_dataset) + session.commit() + # test publish to unpublish + with db.session_scope() as session: + DatasetService(session=session).withdraw_dataset(dataset_id=10) + session.commit() + with db.session_scope() as session: + dataset = session.query(Dataset).get(10) + self.assertFalse(dataset.is_published) + self.assertIsNone(dataset.ticket_uuid) + self.assertIsNone(dataset.ticket_status) + # test unpublish to unpublish + with db.session_scope() as session: + DatasetService(session=session).withdraw_dataset(dataset_id=10) + session.commit() + with db.session_scope() as session: + dataset = session.query(Dataset).get(10) + self.assertFalse(dataset.is_published) + # test unknown dataset + with db.session_scope() as session: + with self.assertRaises(NotFoundException): + DatasetService(session=session).publish_dataset(dataset_id=100) + + def test_get_published_datasets(self): + update_time = datetime(2012, 1, 14, 12, 0, 5) + with db.session_scope() as session: + dataset1 = Dataset( + id=10, + uuid='1', + name='dataset_1', + dataset_type=DatasetType.PSI, + project_id=1, + path='/data/dataset_1/', + is_published=True, + dataset_format=DatasetFormat.TABULAR.value, + updated_at=update_time, + dataset_kind=DatasetKindV2.RAW, + ) + dataset_job1 = DatasetJob(id=10, + uuid=resource_uuid(), + input_dataset_id=0, + output_dataset_id=dataset1.id, + kind=DatasetJobKind.IMPORT_SOURCE, + project_id=1, + state=DatasetJobState.SUCCEEDED, + deleted_at=datetime(2022, 1, 1)) + session.add_all([dataset1, dataset_job1]) + dataset2 = Dataset( + id=11, + uuid='2', + name='dataset_2', + dataset_type=DatasetType.PSI, + project_id=1, + path='/data/dataset_2/', + is_published=True, + dataset_format=DatasetFormat.TABULAR.value, + updated_at=update_time, + dataset_kind=DatasetKindV2.PROCESSED, + ) + dataset_job2 = DatasetJob(id=11, + uuid=resource_uuid(), + input_dataset_id=0, + output_dataset_id=dataset2.id, + kind=DatasetJobKind.IMPORT_SOURCE, + project_id=1, + state=DatasetJobState.SUCCEEDED, + time_range=timedelta(days=1)) + session.add_all([dataset2, dataset_job2]) + data_source = DataSource( + id=12, + uuid='3', + name='dataset_3', + dataset_type=DatasetType.PSI, + project_id=1, + path='/data/dataset_2/', + is_published=True, + dataset_format=DatasetFormat.TABULAR.value, + updated_at=update_time, + ) + session.add(data_source) + dataset4 = Dataset( + id=13, + uuid='4', + name='dataset_4', + dataset_type=DatasetType.PSI, + project_id=1, + path='/data/dataset_4/', + is_published=True, + dataset_format=DatasetFormat.TABULAR.value, + updated_at=update_time, + ) + dataset_job4 = DatasetJob(id=13, + uuid=resource_uuid(), + input_dataset_id=0, + output_dataset_id=dataset4.id, + kind=DatasetJobKind.IMPORT_SOURCE, + project_id=1, + state=DatasetJobState.STOPPED) + session.add_all([dataset4, dataset_job4]) + session.commit() + dataref_1 = dataset_pb2.ParticipantDatasetRef(uuid='1', + name='dataset_1', + format=DatasetFormat.TABULAR.name, + file_size=0, + updated_at=to_timestamp(update_time), + dataset_kind=DatasetKindV2.RAW.name, + dataset_type=DatasetType.PSI.name, + auth_status='PENDING') + dataref_2 = dataset_pb2.ParticipantDatasetRef(uuid='2', + name='dataset_2', + format=DatasetFormat.TABULAR.name, + file_size=0, + updated_at=to_timestamp(update_time), + dataset_kind=DatasetKindV2.PROCESSED.name, + dataset_type=DatasetType.PSI.name, + auth_status='PENDING') + with db.session_scope() as session: + dataset_service = DatasetService(session=session) + self.assertEqual(dataset_service.get_published_datasets(project_id=1, state=ResourceState.SUCCEEDED), + [dataref_2, dataref_1]) + self.assertEqual( + dataset_service.get_published_datasets(project_id=1, + kind=DatasetKindV2.RAW, + state=ResourceState.SUCCEEDED), [dataref_1]) + self.assertEqual(dataset_service.get_published_datasets(project_id=1, uuid='2'), [dataref_2]) + filter_exp = FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='uuid', string_value='2')) + self.assertEqual(dataset_service.get_published_datasets(project_id=1, filter_exp=filter_exp), [dataref_2]) + self.assertEqual( + dataset_service.get_published_datasets(project_id=1, + state=ResourceState.SUCCEEDED, + time_range=timedelta(days=1)), [dataref_2]) + + @patch('fedlearner_webconsole.cleanup.services.CleanupService.create_cleanup') + def test_cleanup_dataset(self, cleanup_mock: MagicMock): + # create a test dateset + with db.session_scope() as session: + published_dataset = Dataset(id=333, + uuid='123', + name='published_dataset', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + is_published=True, + project_id=1, + dataset_format=DatasetFormat.TABULAR.value) + session.add(published_dataset) + session.commit() + # test cleanup failed case + cleanup_mock.reset_mock() + cleanup_mock.side_effect = Exception('fake-exception') + with db.session_scope() as session: + dataset = session.query(Dataset).with_for_update().populate_existing().get(333) + service = DatasetService(session) + self.assertRaises(Exception, service.cleanup_dataset, dataset) + # test cleanup dataset success + cleanup_mock.side_effect = None + cleanup_mock.return_value = None + fake_time = datetime(2022, 4, 14, 0, 0, 0, 0, tzinfo=timezone.utc) + time_patcher = FakeTimePatcher() + time_patcher.start(fake_time) + with db.session_scope() as session: + dataset = session.query(Dataset).with_for_update().populate_existing().get(333) + service = DatasetService(session) + service.cleanup_dataset(dataset) + session.commit() + expected_target_start_at = to_timestamp(fake_time + DatasetService.DATASET_CLEANUP_DEFAULT_DELAY) + expected_payload = CleanupPayload(paths=['/data/dataset/123']) + expected_cleanup_param = CleanupParameter(resource_id=333, + resource_type='DATASET', + target_start_at=expected_target_start_at, + payload=expected_payload) + cleanup_mock.assert_called_with(cleanup_parmeter=expected_cleanup_param) + with db.session_scope() as session: + self.assertRaises(NotFoundException, service.get_dataset, 333) + time_patcher.stop() + + def test_query_dataset_with_parent_job(self): + with db.session_scope() as session: + query = DatasetService(session).query_dataset_with_parent_job() + statement = self.generate_mysql_statement(query) + expected_statement = 'FROM datasets_v2 LEFT OUTER JOIN dataset_jobs_v2 ' \ + 'ON dataset_jobs_v2.output_dataset_id = datasets_v2.id ' \ + 'AND dataset_jobs_v2.input_dataset_id != datasets_v2.id' + self.assertTrue(expected_statement in statement) + + @patch('fedlearner_webconsole.dataset.services.DataReader.metadata') + @patch('fedlearner_webconsole.dataset.services.DataReader.image_metadata') + def test_get_dataset_preview(self, mock_image_metadata: MagicMock, mock_metadata: MagicMock): + mock_metadata.return_value = MetaData() + mock_image_metadata.return_value = ImageMetaData( + thumbnail_dir_path='/data/dataset/123/meta/20220101/thumbnails') + with db.session_scope() as session: + dataset = Dataset(id=10, + uuid='dataset uuid', + name='dataset', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + is_published=True, + project_id=1, + dataset_format=DatasetFormat.TABULAR.value) + session.add(dataset) + data_batch = DataBatch(id=1, + name='20220101', + path='/data/dataset/123/batch/20220101', + dataset_id=10, + event_time=datetime(year=2022, month=1, day=1)) + session.add(data_batch) + session.commit() + with db.session_scope() as session: + dataset_service = DatasetService(session=session) + dataset_service.get_dataset_preview(dataset_id=10, batch_id=1) + mock_metadata.assert_called_once_with(batch_name='20220101') + dataset = session.query(Dataset).get(10) + dataset.dataset_format = DatasetFormat.IMAGE.value + session.flush() + dataset_service.get_dataset_preview(dataset_id=10, batch_id=1) + mock_image_metadata.assert_called_once_with(batch_name='20220101', + thumbnail_dir_path='/data/dataset/123/meta/20220101/thumbnails') + + @patch('fedlearner_webconsole.dataset.services.DataReader.metadata') + def test_feature_metrics(self, mock_metadata: MagicMock): + mock_metadata.return_value = MetaData() + with db.session_scope() as session: + dataset = Dataset(id=10, + uuid='dataset uuid', + name='dataset', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + is_published=True, + project_id=1, + dataset_format=DatasetFormat.TABULAR.value) + session.add(dataset) + data_batch = DataBatch(id=1, + name='20220101', + path='/data/dataset/123/batch/20220101', + dataset_id=10, + event_time=datetime(year=2022, month=1, day=1)) + session.add(data_batch) + session.commit() + with db.session_scope() as session: + dataset_service = DatasetService(session=session) + val = dataset_service.feature_metrics(name='raw_id', dataset_id=10, data_batch_id=1) + expected_val = { + 'name': 'raw_id', + 'metrics': {}, + 'hist': {}, + } + self.assertEqual(val, expected_val) + mock_metadata.assert_called_once_with(batch_name='20220101') + + def test_get_data_batch(self): + with db.session_scope() as session: + dataset = Dataset(id=10, + uuid='dataset uuid', + name='dataset', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + is_published=True, + project_id=1, + dataset_format=DatasetFormat.TABULAR.value) + session.add(dataset) + data_batch = DataBatch(id=1, + name='20220101', + path='/data/dataset/123/batch/20220101', + dataset_id=10, + event_time=datetime(year=2022, month=1, day=1)) + session.add(data_batch) + session.commit() + with db.session_scope() as session: + dataset = session.query(Dataset).get(10) + dataset_service = DatasetService(session=session) + data_batch = dataset_service.get_data_batch(dataset=dataset, event_time=datetime(2022, 1, 1)) + self.assertEqual(data_batch.id, 1) + data_batch = dataset_service.get_data_batch(dataset=dataset, event_time=datetime(2022, 1, 2)) + self.assertIsNone(data_batch) + dataset.dataset_type = DatasetType.PSI + data_batch = dataset_service.get_data_batch(dataset=dataset) + self.assertEqual(data_batch.id, 1) + + +class DataSourceServiceTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + self.source_dir = tempfile.mkdtemp() + self._set_default_project() + self._set_common_dataset() + + def _set_default_project(self): + with db.session_scope() as session: + project = Project(id=1, name='default_project') + session.add(project) + session.commit() + + def _set_common_dataset(self): + with db.session_scope() as session: + dataset = Dataset( + name='default dataset1', + dataset_type=DatasetType.STREAMING, + comment='test comment1', + path=str(self.source_dir), + project_id=1, + ) + session.add(dataset) + session.commit() + + @patch('fedlearner_webconsole.dataset.services.get_current_user', lambda: User(id=1, username='xiaohang')) + def test_create_data_source(self): + data_source_parameter = dataset_pb2.DataSource(name='default data_source', + url='hdfs:///fack_url', + project_id=1, + type=DataSourceType.HDFS.value, + is_user_upload=False, + dataset_format=DatasetFormat.TABULAR.name, + store_format=StoreFormat.CSV.value, + dataset_type=DatasetType.PSI.value) + with db.session_scope() as session: + data_source = DataSourceService(session=session).create_data_source( + data_source_parameter=data_source_parameter) + session.commit() + with db.session_scope() as session: + data_source = session.query(DataSource).filter_by(name='default data_source').first() + self.assertEqual(data_source.name, data_source_parameter.name) + self.assertEqual(data_source.path, data_source_parameter.url) + self.assertEqual(data_source.project_id, data_source_parameter.project_id) + self.assertEqual(data_source.creator_username, 'xiaohang') + self.assertEqual(data_source.get_meta_info().datasource_type, data_source_parameter.type) + self.assertEqual(data_source.get_meta_info().is_user_upload, data_source_parameter.is_user_upload) + self.assertEqual(data_source.dataset_format, DatasetFormat.TABULAR.value) + self.assertEqual(data_source.store_format, StoreFormat.CSV) + self.assertEqual(data_source.dataset_type, DatasetType.PSI) + self.assertIsNotNone(data_source.id) + self.assertIsNotNone(data_source.created_at) + + def test_get_data_sources(self): + with db.session_scope() as session: + datasource_1 = DataSource(id=100, + uuid='data_source_1_uuid', + name='datasource_1', + path='hdfs:///data/fake_path_1', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 5), + is_published=False, + store_format=StoreFormat.TFRECORDS, + dataset_format=DatasetFormat.IMAGE.value, + dataset_type=DatasetType.STREAMING) + datasource_1.set_meta_info(meta=dataset_pb2.DatasetMetaInfo( + datasource_type=DataSourceType.HDFS.value, is_user_upload=False, is_user_export=False)) + datasource_2 = DataSource(id=101, + uuid='data_source_2_uuid', + name='datasource_2', + path='file:///data/fake_path_2', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 6), + is_published=False, + store_format=StoreFormat.CSV, + dataset_format=DatasetFormat.TABULAR.value, + dataset_type=DatasetType.PSI) + datasource_2.set_meta_info(meta=dataset_pb2.DatasetMetaInfo( + datasource_type=DataSourceType.FILE.value, is_user_upload=False, is_user_export=False)) + datasource_3 = DataSource(id=102, + uuid='data_source_3_uuid', + name='datasource_3', + path='/upload/fake_path_3', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 7), + is_published=False) + datasource_3.set_meta_info(meta=dataset_pb2.DatasetMetaInfo( + datasource_type=DataSourceType.FILE.value, is_user_upload=True, is_user_export=False)) + datasource_4 = DataSource(id=103, + uuid='data_source_4_uuid', + name='datasource_4', + path='/upload/fake_path_4', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 8), + is_published=False) + datasource_4.set_meta_info(meta=dataset_pb2.DatasetMetaInfo( + datasource_type=DataSourceType.FILE.value, is_user_upload=True, is_user_export=True)) + session.add(datasource_1) + session.add(datasource_2) + session.add(datasource_3) + session.add(datasource_4) + session.commit() + with db.session_scope() as session: + expected_datasources = [ + dataset_pb2.DataSource(id=101, + uuid='data_source_2_uuid', + name='datasource_2', + url='file:///data/fake_path_2', + project_id=1, + created_at=to_timestamp(datetime(2012, 1, 14, 12, 0, 6)), + type=DataSourceType.FILE.value, + is_user_upload=False, + dataset_format='TABULAR', + store_format='CSV', + dataset_type='PSI'), + dataset_pb2.DataSource(id=100, + uuid='data_source_1_uuid', + name='datasource_1', + url='hdfs:///data/fake_path_1', + project_id=1, + created_at=to_timestamp(datetime(2012, 1, 14, 12, 0, 5)), + type=DataSourceType.HDFS.value, + is_user_upload=False, + dataset_format='IMAGE', + store_format='TFRECORDS', + dataset_type='STREAMING') + ] + data_sources = DataSourceService(session=session).get_data_sources(project_id=1) + self.assertEqual(data_sources, expected_datasources) + + def test_delete_data_source(self): + with db.session_scope() as session: + datasource = DataSource(id=100, + uuid=resource_uuid(), + name='datasource', + path='hdfs:///data/fake_path', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 5), + is_published=False) + session.add(datasource) + session.commit() + with db.session_scope() as session: + DataSourceService(session=session).delete_data_source(data_source_id=100) + session.commit() + with db.session_scope() as session: + dataset = session.query(DataSource).execution_options(include_deleted=True).get(100) + self.assertIsNotNone(dataset.deleted_at) + with self.assertRaises(NotFoundException): + DataSourceService(session=session).delete_data_source(data_source_id=102) + + with db.session_scope() as session: + datasource = DataSource(id=101, + uuid=resource_uuid(), + name='datasource', + path='hdfs:///data/fake_path', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 5), + is_published=False) + session.add(datasource) + dataset_job = DatasetJob(id=10, + uuid='test-uuid', + kind=DatasetJobKind.DATA_ALIGNMENT, + project_id=1, + workflow_id=1, + input_dataset_id=datasource.id, + output_dataset_id=2, + coordinator_id=1, + state=DatasetJobState.RUNNING) + session.add(dataset_job) + session.commit() + with db.session_scope() as session: + with self.assertRaises(ResourceConflictException): + DataSourceService(session=session).delete_data_source(data_source_id=101) + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(10) + dataset_job.state = DatasetJobState.SUCCEEDED + session.commit() + with db.session_scope() as session: + DataSourceService(session=session).delete_data_source(data_source_id=101) + session.commit() + with db.session_scope() as session: + dataset = session.query(DataSource).execution_options(include_deleted=True).get(101) + self.assertIsNotNone(dataset.deleted_at) + + +class BatchServiceTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(name='test-project') + session.add(project) + session.commit() + + def test_create_orphan_batch(self): + batch_para = dataset_pb2.BatchParameter(comment='this is a comment for batch', + path='/data/dataset/test/batch/batch_1') + + with db.session_scope() as session: + with self.assertRaises(NotFoundException): + BatchService(session).create_batch(batch_parameter=batch_para) + session.commit() + + @patch('envs.Envs.STORAGE_ROOT', '/data') + def test_create_psi_batch(self): + batch_para = dataset_pb2.BatchParameter() + with db.session_scope() as session: + dataset_para = dataset_pb2.DatasetParameter(name='test', + uuid='fake_uuid', + type=DatasetType.PSI.value, + comment='this is a comment', + project_id=1, + path='/data/dataset/test/', + kind=DatasetKindV2.EXPORTED.value, + format=DatasetFormat.IMAGE.name) + dataset = DatasetService(session).create_dataset(dataset_parameter=dataset_para) + session.commit() + batch_para.dataset_id = dataset.id + + with db.session_scope() as session: + batch = BatchService(session).create_batch(batch_parameter=batch_para) + session.commit() + self.assertEqual(batch.dataset_id, batch_para.dataset_id) + self.assertIsNone(batch.event_time) + self.assertEqual(batch.path, '/data/dataset/test/batch/0') + self.assertEqual(batch.name, '0') + + with db.session_scope() as session: + with self.assertRaises(InvalidArgumentException): + batch = BatchService(session).create_batch(batch_parameter=batch_para) + session.commit() + + @patch('envs.Envs.STORAGE_ROOT', '/data') + def test_create_streaming_batch(self): + batch_para = dataset_pb2.BatchParameter() + with db.session_scope() as session: + dataset_para = dataset_pb2.DatasetParameter(name='test', + uuid='fake_uuid', + type=DatasetType.STREAMING.value, + comment='this is a comment', + project_id=1, + kind=DatasetKindV2.RAW.value, + format=DatasetFormat.IMAGE.name) + dataset = DatasetService(session).create_dataset(dataset_parameter=dataset_para) + session.commit() + batch_para.dataset_id = dataset.id + + with db.session_scope() as session: + with self.assertRaises(InvalidArgumentException): + batch_para.event_time = 0 + batch = BatchService(session).create_batch(batch_parameter=batch_para) + session.commit() + + with db.session_scope() as session: + event_time = now() + batch_para.event_time = to_timestamp(event_time) + batch = BatchService(session).create_batch(batch_parameter=batch_para) + session.flush() + self.assertEqual(batch.dataset_id, batch_para.dataset_id) + self.assertEqual(batch.event_time, event_time.replace(microsecond=0)) + self.assertEqual(batch.path, f'file:///data/dataset/fake_uuid_test/batch/{event_time.strftime("%Y%m%d")}') + self.assertEqual(batch.name, event_time.strftime('%Y%m%d')) + + with db.session_scope() as session: + event_time = now() + batch_para.event_time = to_timestamp(event_time) + batch_para.cron_type = dataset_pb2.CronType.HOURLY + batch = BatchService(session).create_batch(batch_parameter=batch_para) + session.flush() + self.assertEqual(batch.dataset_id, batch_para.dataset_id) + self.assertEqual(batch.event_time, event_time.replace(microsecond=0)) + self.assertEqual(batch.path, + f'file:///data/dataset/fake_uuid_test/batch/{event_time.strftime("%Y%m%d-%H")}') + self.assertEqual(batch.name, event_time.strftime('%Y%m%d-%H')) + + def test_get_next_batch(self): + with db.session_scope() as session: + dataset = Dataset(id=1, + name='output dataset', + uuid=resource_uuid(), + dataset_type=DatasetType.STREAMING, + comment='test comment2', + path='/data/dataset/123', + project_id=1) + session.add(dataset) + + data_batch = DataBatch(id=1, + name='20220101-08', + dataset_id=1, + event_time=datetime(year=2000, month=1, day=1, hour=8), + latest_parent_dataset_job_stage_id=1) + session.add(data_batch) + session.commit() + + # test no stage + with db.session_scope() as session: + data_batch = session.query(DataBatch).get(1) + self.assertIsNone(BatchService(session).get_next_batch(data_batch=data_batch), None) + + # test no dataset_job + with db.session_scope() as session: + dataset_job_stage = DatasetJobStage(name='20220101-stage0', + dataset_job_id=1, + data_batch_id=1, + project_id=1, + event_time=datetime(year=2000, month=1, day=1), + uuid=resource_uuid()) + session.add(dataset_job_stage) + session.commit() + with db.session_scope() as session: + data_batch = session.query(DataBatch).get(1) + self.assertIsNone(BatchService(session).get_next_batch(data_batch=data_batch), None) + + # test no next batch + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + uuid='test-uuid', + kind=DatasetJobKind.IMPORT_SOURCE, + state=DatasetJobState.SUCCEEDED, + project_id=1, + workflow_id=0, + input_dataset_id=0, + output_dataset_id=1, + coordinator_id=0, + time_range=timedelta(hours=1)) + session.add(dataset_job) + session.commit() + with db.session_scope() as session: + data_batch = session.query(DataBatch).get(1) + self.assertIsNone(BatchService(session).get_next_batch(data_batch=data_batch), None) + + # test get next batch + with db.session_scope() as session: + data_batch = DataBatch(id=2, + name='20220102', + dataset_id=1, + event_time=datetime(year=2000, month=1, day=1, hour=9)) + session.add(data_batch) + session.commit() + with db.session_scope() as session: + data_batch = session.query(DataBatch).get(1) + next_data_batch = session.query(DataBatch).get(2) + self.assertEqual(BatchService(session).get_next_batch(data_batch=data_batch), next_data_batch) + + +class DatasetJobServiceTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(name='test-project') + session.add(project) + session.flush([project]) + + input_dataset = DataSource(name='input dataset', + uuid=resource_uuid(), + dataset_type=DatasetType.STREAMING, + comment='test comment1', + path='/data/dataset/123', + project_id=project.id) + session.add(input_dataset) + + output_dataset = Dataset(name='output dataset', + uuid=resource_uuid(), + dataset_type=DatasetType.STREAMING, + comment='test comment1', + path='/data/dataset/123', + project_id=project.id) + session.add(output_dataset) + + session.commit() + self.project_id = project.id + self.input_dataset_id = input_dataset.id + self.output_dataset_id = output_dataset.id + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info', + lambda: SystemInfo(domain_name='fl-test_domain.com', pure_domain_name='test_domain')) + @patch('fedlearner_webconsole.dataset.services.DatasetJobConfiger.from_kind', + lambda *args: FakeDatasetJobConfiger(None)) + @patch('fedlearner_webconsole.dataset.services.get_current_user', lambda: User(id=1, username='test user')) + @patch('fedlearner_webconsole.dataset.services.emit_dataset_job_submission_store') + def test_create_dataset_job_as_coordinator(self, mock_emit_dataset_job_submission_store: MagicMock): + with db.session_scope() as session: + input_dataset = session.query(DataSource).get(self.input_dataset_id) + global_configs = dataset_pb2.DatasetJobGlobalConfigs() + global_configs.global_configs['test_domain'].MergeFrom( + dataset_pb2.DatasetJobConfig(dataset_uuid=input_dataset.uuid, + variables=[ + Variable(name='hello', + value_type=Variable.ValueType.NUMBER, + typed_value=Value(number_value=1)), + Variable(name='test', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='test_value')), + ])) + dataset_job = DatasetJobService(session).create_as_coordinator(project_id=self.project_id, + kind=DatasetJobKind.IMPORT_SOURCE, + output_dataset_id=self.output_dataset_id, + global_configs=global_configs, + time_range=timedelta(days=1)) + session.commit() + self.assertEqual(len(dataset_job.get_global_configs().global_configs['test_domain'].variables), 2) + self.assertEqual(dataset_job.name, 'output dataset') + self.assertTrue(dataset_job.get_context().has_stages) + self.assertEqual(dataset_job.time_range, timedelta(days=1)) + self.assertEqual(dataset_job.creator_username, 'test user') + mock_emit_dataset_job_submission_store.assert_called_once_with(uuid=ANY, + kind=DatasetJobKind.IMPORT_SOURCE, + coordinator_id=0) + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info', + lambda: SystemInfo(name='hahaha', domain_name='fl-test_domain.com', pure_domain_name='test_domain')) + @patch('fedlearner_webconsole.dataset.services.emit_dataset_job_submission_store') + def test_create_dataset_job_as_participant(self, mock_emit_dataset_job_submission_store: MagicMock): + with db.session_scope() as session: + input_dataset = session.query(Dataset).get(self.input_dataset_id) + global_configs = dataset_pb2.DatasetJobGlobalConfigs() + global_configs.global_configs['test_domain'].MergeFrom( + dataset_pb2.DatasetJobConfig(dataset_uuid=input_dataset.uuid, + variables=[ + Variable(name='hello', + value_type=Variable.ValueType.NUMBER, + typed_value=Value(number_value=1)), + Variable(name='test', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='test_value')), + ])) + config = WorkflowDefinition(variables=[ + Variable(name='hello', value_type=Variable.ValueType.NUMBER, typed_value=Value(number_value=1)) + ], + job_definitions=[ + JobDefinition(variables=[ + Variable(name='hello_from_job', + value_type=Variable.ValueType.NUMBER, + typed_value=Value(number_value=3)) + ]) + ]) + + dataset_job = DatasetJobService(session).create_as_participant(project_id=self.project_id, + kind=DatasetJobKind.IMPORT_SOURCE, + config=config, + output_dataset_id=self.output_dataset_id, + coordinator_id=1, + uuid='u12345', + global_configs=global_configs, + creator_username='test user') + session.commit() + self.assertTrue(dataset_job.get_context().has_stages) + mock_emit_dataset_job_submission_store.assert_called_once_with(uuid='u12345', + kind=DatasetJobKind.IMPORT_SOURCE, + coordinator_id=1) + + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).filter(DatasetJob.uuid == 'u12345').first() + self.assertIsNone(dataset_job.global_configs) + self.assertEqual(dataset_job.output_dataset_id, self.output_dataset_id) + self.assertEqual(dataset_job.kind, DatasetJobKind.IMPORT_SOURCE) + self.assertEqual(dataset_job.project_id, self.project_id) + self.assertEqual(dataset_job.coordinator_id, 1) + self.assertEqual(dataset_job.name, 'output dataset') + self.assertEqual(dataset_job.creator_username, 'test user') + self.assertIsNone(dataset_job.time_range) + + # test with time_range + mock_emit_dataset_job_submission_store.reset_mock() + with db.session_scope() as session: + dataset_job = DatasetJobService(session).create_as_participant(project_id=self.project_id, + kind=DatasetJobKind.IMPORT_SOURCE, + config=config, + output_dataset_id=self.output_dataset_id, + coordinator_id=1, + uuid='u12345 with time_range', + global_configs=global_configs, + creator_username='test user', + time_range=timedelta(days=1)) + session.commit() + self.assertTrue(dataset_job.get_context().has_stages) + mock_emit_dataset_job_submission_store.assert_called_once_with(uuid='u12345 with time_range', + kind=DatasetJobKind.IMPORT_SOURCE, + coordinator_id=1) + + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).filter(DatasetJob.uuid == 'u12345 with time_range').first() + self.assertEqual(dataset_job.time_range, timedelta(days=1)) + + def test_is_local(self): + with db.session_scope() as session: + service = DatasetJobService(session=session) + self.assertTrue(service.is_local(dataset_job_kind=DatasetJobKind.IMPORT_SOURCE)) + self.assertFalse(service.is_local(dataset_job_kind=DatasetJobKind.DATA_ALIGNMENT)) + + def test_need_distribute(self): + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + uuid='test-uuid', + kind=DatasetJobKind.EXPORT, + project_id=1, + workflow_id=1, + input_dataset_id=1, + output_dataset_id=2, + coordinator_id=1) + service = DatasetJobService(session=session) + self.assertTrue(service.need_distribute(dataset_job=dataset_job)) + dataset_job.coordinator_id = 0 + self.assertFalse(service.need_distribute(dataset_job=dataset_job)) + dataset_job.kind = DatasetJobKind.OT_PSI_DATA_JOIN + self.assertTrue(service.need_distribute(dataset_job=dataset_job)) + + @patch('fedlearner_webconsole.dataset.services.DatasetJobService.need_distribute') + @patch('fedlearner_webconsole.dataset.services.ParticipantService.get_platform_participants_by_project') + def test_get_participants_need_distribute(self, mock_get_platform_participants_by_project: MagicMock, + mock_need_distribute: MagicMock): + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + uuid='test-uuid', + kind=DatasetJobKind.DATA_ALIGNMENT, + project_id=1, + workflow_id=1, + input_dataset_id=1, + output_dataset_id=2, + coordinator_id=1) + service = DatasetJobService(session=session) + + # test no need to distribute + mock_need_distribute.return_value = False + self.assertEqual(service.get_participants_need_distribute(dataset_job), []) + + # test no plateform participant + mock_need_distribute.return_value = True + mock_get_platform_participants_by_project.return_value = [] + self.assertEqual(service.get_participants_need_distribute(dataset_job), []) + + # test get platform participants + mock_need_distribute.return_value = True + mock_get_platform_participants_by_project.return_value = ['participants1', 'participants2'] + self.assertEqual(service.get_participants_need_distribute(dataset_job), ['participants1', 'participants2']) + + @patch('fedlearner_webconsole.dataset.services.now', lambda: datetime(2022, 1, 1, 12, 0, 0)) + def test_start(self): + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + coordinator_id=2, + uuid='uuid', + project_id=1, + input_dataset_id=1, + output_dataset_id=2, + kind=DatasetJobKind.DATA_ALIGNMENT, + state=DatasetJobState.PENDING) + DatasetJobService(session).start_dataset_job(dataset_job) + session.add(dataset_job) + session.commit() + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(1) + self.assertEqual(dataset_job.state, DatasetJobState.RUNNING) + self.assertEqual(dataset_job.started_at, datetime(2022, 1, 1, 12, 0, 0)) + + @patch('fedlearner_webconsole.dataset.services.now', lambda: datetime(2022, 1, 1, 12, 0, 0)) + @patch('fedlearner_webconsole.dataset.services.emit_dataset_job_duration_store') + def test_finish(self, mock_emit_dataset_job_duration_store: MagicMock): + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + coordinator_id=2, + uuid='uuid', + project_id=1, + input_dataset_id=1, + output_dataset_id=2, + kind=DatasetJobKind.DATA_ALIGNMENT, + state=DatasetJobState.PENDING, + created_at=datetime(2022, 1, 1, 11, 0, 0)) + dataset_job_service = DatasetJobService(session) + with self.assertRaises(ValueError): + dataset_job_service.finish_dataset_job(dataset_job=dataset_job, finish_state=DatasetJobState.RUNNING) + self.assertEqual(dataset_job.state, DatasetJobState.PENDING) + self.assertIsNone(dataset_job.finished_at) + mock_emit_dataset_job_duration_store.assert_not_called() + dataset_job_service.finish_dataset_job(dataset_job=dataset_job, finish_state=DatasetJobState.SUCCEEDED) + mock_emit_dataset_job_duration_store.assert_called_once_with(duration=3600, + uuid='uuid', + kind=DatasetJobKind.DATA_ALIGNMENT, + coordinator_id=2, + state=DatasetJobState.SUCCEEDED) + session.add(dataset_job) + session.commit() + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(1) + self.assertEqual(dataset_job.state, DatasetJobState.SUCCEEDED) + self.assertEqual(dataset_job.finished_at, datetime(2022, 1, 1, 12, 0, 0)) + + def test_start_cron_scheduler(self): + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + coordinator_id=2, + uuid='uuid', + project_id=1, + input_dataset_id=1, + output_dataset_id=2, + kind=DatasetJobKind.OT_PSI_DATA_JOIN, + state=DatasetJobState.PENDING, + created_at=datetime(2022, 1, 1, 11, 0, 0), + scheduler_state=DatasetJobSchedulerState.STOPPED) + DatasetJobService(session=session).start_cron_scheduler(dataset_job=dataset_job) + self.assertEqual(dataset_job.scheduler_state, DatasetJobSchedulerState.STOPPED) + dataset_job.time_range = timedelta(days=1) + DatasetJobService(session=session).start_cron_scheduler(dataset_job=dataset_job) + self.assertEqual(dataset_job.scheduler_state, DatasetJobSchedulerState.RUNNABLE) + + def test_stop_cron_scheduler(self): + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + coordinator_id=2, + uuid='uuid', + project_id=1, + input_dataset_id=1, + output_dataset_id=2, + kind=DatasetJobKind.OT_PSI_DATA_JOIN, + state=DatasetJobState.PENDING, + created_at=datetime(2022, 1, 1, 11, 0, 0), + scheduler_state=DatasetJobSchedulerState.RUNNABLE) + DatasetJobService(session=session).stop_cron_scheduler(dataset_job=dataset_job) + self.assertEqual(dataset_job.scheduler_state, DatasetJobSchedulerState.RUNNABLE) + dataset_job.time_range = timedelta(days=1) + DatasetJobService(session=session).stop_cron_scheduler(dataset_job=dataset_job) + self.assertEqual(dataset_job.scheduler_state, DatasetJobSchedulerState.STOPPED) + + def test_delete_dataset_job(self): + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + uuid='test-uuid', + kind=DatasetJobKind.DATA_ALIGNMENT, + state=DatasetJobState.PENDING, + project_id=1, + workflow_id=1, + input_dataset_id=1, + output_dataset_id=2, + coordinator_id=1) + + # test dataset_job is not finished: + with self.assertRaises(ResourceConflictException): + DatasetJobService(session).delete_dataset_job(dataset_job=dataset_job) + self.assertIsNone(dataset_job.deleted_at) + + # test stop dataset_job successfully + dataset_job.state = DatasetJobState.SUCCEEDED + DatasetJobService(session).delete_dataset_job(dataset_job=dataset_job) + self.assertIsNotNone(dataset_job.deleted_at) + + +class DatasetJobStageServiceTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(name='test-project') + session.add(project) + session.flush([project]) + + input_dataset = DataSource(name='input dataset', + uuid=resource_uuid(), + dataset_type=DatasetType.STREAMING, + comment='test comment1', + path='/data/data_source/123', + project_id=project.id) + session.add(input_dataset) + + output_dataset = Dataset(name='output dataset', + uuid=resource_uuid(), + dataset_type=DatasetType.STREAMING, + comment='test comment2', + path='/data/dataset/123', + project_id=project.id) + session.add(output_dataset) + session.flush() + + dataset_job = DatasetJob(uuid='test-uuid', + kind=DatasetJobKind.IMPORT_SOURCE, + state=DatasetJobState.SUCCEEDED, + project_id=project.id, + workflow_id=0, + input_dataset_id=input_dataset.id, + output_dataset_id=output_dataset.id, + coordinator_id=0) + dataset_job.set_global_configs( + dataset_pb2.DatasetJobGlobalConfigs(global_configs={'test': dataset_pb2.DatasetJobConfig()})) + session.add(dataset_job) + + output_data_batch = DataBatch(name='20220101', + dataset_id=output_dataset.id, + event_time=datetime(year=2000, month=1, day=1)) + session.add(output_data_batch) + session.flush() + + dataset_job_stage = DatasetJobStage(name='20220101-stage0', + dataset_job_id=dataset_job.id, + data_batch_id=output_data_batch.id, + project_id=project.id, + event_time=datetime(year=2000, month=1, day=1), + uuid=resource_uuid()) + session.add(dataset_job_stage) + + session.commit() + self.project_id = project.id + self.input_dataset_id = input_dataset.id + self.output_dataset_id = output_dataset.id + self.dataset_job_id = dataset_job.id + self.output_data_batch_id = output_data_batch.id + self.dataset_job_stage_id = dataset_job_stage.id + + def test_create_dataset_job_stage(self): + with db.session_scope() as session: + with self.assertRaises(InvalidArgumentException): + dataset_job_stage = DatasetJobStageService(session).create_dataset_job_stage( + project_id=self.project_id, + dataset_job_id=self.dataset_job_id, + output_data_batch_id=self.output_data_batch_id) + dataset_job_stage = session.query(DatasetJobStage).get(self.dataset_job_stage_id) + dataset_job_stage.state = DatasetJobState.SUCCEEDED + session.commit() + with db.session_scope() as session: + dataset_job_stage = DatasetJobStageService(session).create_dataset_job_stage( + project_id=self.project_id, + dataset_job_id=self.dataset_job_id, + output_data_batch_id=self.output_data_batch_id) + session.commit() + dataset_job_stage_id = dataset_job_stage.id + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_id) + self.assertEqual(dataset_job_stage.name, '20220101-stage1') + self.assertEqual(dataset_job_stage.event_time, datetime(year=2000, month=1, day=1)) + self.assertEqual(dataset_job_stage.dataset_job_id, self.dataset_job_id) + self.assertEqual(dataset_job_stage.data_batch_id, self.output_data_batch_id) + self.assertEqual(dataset_job_stage.project_id, self.project_id) + dataset_job = session.query(DatasetJob).get(self.dataset_job_id) + self.assertEqual(dataset_job.state, DatasetJobState.PENDING) + data_batch = session.query(DataBatch).get(self.output_data_batch_id) + self.assertEqual(data_batch.latest_parent_dataset_job_stage_id, 2) + + def test_create_dataset_job_stage_as_coordinator(self): + global_configs = dataset_pb2.DatasetJobGlobalConfigs(global_configs={'test': dataset_pb2.DatasetJobConfig()}) + with db.session_scope() as session: + with self.assertRaises(InvalidArgumentException): + dataset_job_stage = DatasetJobStageService(session).create_dataset_job_stage_as_coordinator( + project_id=self.project_id, + dataset_job_id=self.dataset_job_id, + output_data_batch_id=self.output_data_batch_id, + global_configs=global_configs) + dataset_job_stage = session.query(DatasetJobStage).get(self.dataset_job_stage_id) + dataset_job_stage.state = DatasetJobState.SUCCEEDED + session.commit() + with db.session_scope() as session: + dataset_job_stage = DatasetJobStageService(session).create_dataset_job_stage_as_coordinator( + project_id=self.project_id, + dataset_job_id=self.dataset_job_id, + output_data_batch_id=self.output_data_batch_id, + global_configs=global_configs) + session.commit() + dataset_job_stage_id = dataset_job_stage.id + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_id) + self.assertEqual(dataset_job_stage.name, '20220101-stage1') + self.assertEqual(dataset_job_stage.event_time, datetime(year=2000, month=1, day=1)) + self.assertEqual(dataset_job_stage.dataset_job_id, self.dataset_job_id) + self.assertEqual(dataset_job_stage.data_batch_id, self.output_data_batch_id) + self.assertEqual(dataset_job_stage.project_id, self.project_id) + self.assertEqual(dataset_job_stage.get_global_configs(), global_configs) + self.assertTrue(dataset_job_stage.is_coordinator()) + dataset_job = session.query(DatasetJob).get(self.dataset_job_id) + self.assertEqual(dataset_job.state, DatasetJobState.PENDING) + data_batch = session.query(DataBatch).get(self.output_data_batch_id) + self.assertEqual(data_batch.latest_parent_dataset_job_stage_id, 2) + + def test_create_dataset_job_stage_as_participant(self): + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(self.dataset_job_stage_id) + dataset_job_stage.state = DatasetJobState.SUCCEEDED + session.commit() + with db.session_scope() as session: + dataset_job_stage = DatasetJobStageService(session).create_dataset_job_stage_as_participant( + project_id=self.project_id, + dataset_job_id=self.dataset_job_id, + output_data_batch_id=self.output_data_batch_id, + uuid='test dataset_job_stage uuid', + name='test dataset_job_stage', + coordinator_id=1) + session.commit() + dataset_job_stage_id = dataset_job_stage.id + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(dataset_job_stage_id) + self.assertEqual(dataset_job_stage.name, 'test dataset_job_stage') + self.assertEqual(dataset_job_stage.uuid, 'test dataset_job_stage uuid') + self.assertEqual(dataset_job_stage.coordinator_id, 1) + self.assertEqual(dataset_job_stage.event_time, datetime(year=2000, month=1, day=1)) + self.assertEqual(dataset_job_stage.dataset_job_id, self.dataset_job_id) + self.assertEqual(dataset_job_stage.data_batch_id, self.output_data_batch_id) + self.assertEqual(dataset_job_stage.project_id, self.project_id) + dataset_job = session.query(DatasetJob).get(self.dataset_job_id) + self.assertEqual(dataset_job.state, DatasetJobState.PENDING) + data_batch = session.query(DataBatch).get(self.output_data_batch_id) + self.assertEqual(data_batch.latest_parent_dataset_job_stage_id, 2) + + def test_start_dataset_job_stage(self): + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(self.dataset_job_stage_id) + DatasetJobStageService(session).start_dataset_job_stage(dataset_job_stage) + self.assertEqual(dataset_job_stage.state, DatasetJobState.RUNNING) + self.assertEqual(dataset_job_stage.dataset_job.state, DatasetJobState.RUNNING) + + def test_finish_dataset_job_stage(self): + with db.session_scope() as session: + dataset_job_stage = session.query(DatasetJobStage).get(self.dataset_job_stage_id) + DatasetJobStageService(session).finish_dataset_job_stage(dataset_job_stage, DatasetJobState.STOPPED) + self.assertEqual(dataset_job_stage.state, DatasetJobState.STOPPED) + self.assertEqual(dataset_job_stage.dataset_job.state, DatasetJobState.STOPPED) + + with self.assertRaises(ValueError): + DatasetJobStageService(session).finish_dataset_job_stage(dataset_job_stage, DatasetJobState.RUNNING) + + +class DataReaderTest(unittest.TestCase): + + def test_metadata(self): + with tempfile.TemporaryDirectory() as temp_dir: + dataset_path = f'{temp_dir}/dataset' + batch_name = '20220101' + meta_file = DatasetDirectory(dataset_path=dataset_path).batch_meta_file(batch_name=batch_name) + + # test no meta + reader = DataReader(dataset_path=dataset_path).metadata(batch_name=batch_name) + self.assertEqual(reader.metadata, {}) + + # test get meta + meta_info = { + 'dtypes': [{ + 'key': 'f01', + 'value': 'bigint' + }], + 'sample': [ + [1], + [0], + ], + 'count': 1000, + 'metrics': { + 'f01': { + 'count': '2', + 'mean': '0.0015716767309123998', + 'stddev': '0.03961485047808605', + 'min': '0', + 'max': '1', + 'missing_count': '0' + } + }, + 'hist': { + 'x': [ + 0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, + 1 + ], + 'y': [12070, 0, 0, 0, 0, 0, 0, 0, 0, 19] + } + } + Path(meta_file.split('/_META')[0]).mkdir(parents=True) + with open(meta_file, 'w', encoding='utf-8') as f: + f.write(json.dumps(meta_info)) + reader = DataReader(dataset_path=dataset_path).metadata(batch_name=batch_name) + self.assertEqual(reader.metadata, meta_info) + + def test_image_metadata(self): + with tempfile.TemporaryDirectory() as temp_dir: + dataset_path = f'{temp_dir}/dataset' + batch_name = '20220101' + dataset_directory = DatasetDirectory(dataset_path=dataset_path) + meta_file = dataset_directory.batch_meta_file(batch_name=batch_name) + thumbnail_dir_path = dataset_directory.thumbnails_path(batch_name=batch_name) + + # test no meta + reader = DataReader(dataset_path=dataset_path).image_metadata(thumbnail_dir_path=thumbnail_dir_path, + batch_name=batch_name) + self.assertEqual(reader.metadata, {}) + self.assertEqual(reader.thumbnail_dir_path, thumbnail_dir_path) + + # test get meta + meta_info = { + 'dtypes': [{ + 'key': 'f01', + 'value': 'bigint' + }], + 'sample': [ + [1], + [0], + ], + 'count': 1000, + 'metrics': { + 'f01': { + 'count': '2', + 'mean': '0.0015716767309123998', + 'stddev': '0.03961485047808605', + 'min': '0', + 'max': '1', + 'missing_count': '0' + } + }, + 'hist': { + 'x': [ + 0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, + 1 + ], + 'y': [12070, 0, 0, 0, 0, 0, 0, 0, 0, 19] + } + } + Path(meta_file.split('/_META')[0]).mkdir(parents=True) + with open(meta_file, 'w', encoding='utf-8') as f: + f.write(json.dumps(meta_info)) + reader = DataReader(dataset_path=dataset_path).image_metadata(thumbnail_dir_path=thumbnail_dir_path, + batch_name=batch_name) + self.assertEqual(reader.metadata, meta_info) + self.assertEqual(reader.thumbnail_dir_path, thumbnail_dir_path) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/sparkapp/pipeline/analyzer.py b/web_console_v2/api/fedlearner_webconsole/dataset/sparkapp/pipeline/analyzer.py deleted file mode 100644 index 5759b334e..000000000 --- a/web_console_v2/api/fedlearner_webconsole/dataset/sparkapp/pipeline/analyzer.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -import os -import sys -import json -import logging - -import fsspec -import pandas - -from pyspark.sql import SparkSession -from pyspark.sql.functions import col, lit, sum -from util import dataset_features_path, dataset_meta_path, dataset_hist_path - - -def analyze(dataset_path: str, wildcard: str): - # for example: - # dataset_path: /data/fl_v2_fish_fooding/dataset/20210527_221741_pipeline/ - # wildcard: rds/** - spark = SparkSession.builder.getOrCreate() - files = os.path.join(dataset_path, wildcard) - logging.info(f'### loading df..., input files path: {files}') - df = spark.read.format('tfrecords').load(files) - # df_stats - df_missing = df.select(*(sum(col(c).isNull().cast('int')).alias(c) - for c in df.columns)).withColumn( - 'summary', lit('missing_count')) - df_stats = df.describe().unionByName(df_missing) - df_stats = df_stats.toPandas().set_index('summary').transpose() - features_path = dataset_features_path(dataset_path) - logging.info(f'### writing features, features path is {features_path}') - content = json.dumps(df_stats.to_dict(orient='index')) - with fsspec.open(features_path, mode='w') as f: - f.write(content) - # meta - meta = {} - # dtypes - logging.info('### loading dtypes...') - dtypes = {} - for d in df.dtypes: - k, v = d # (feature, type) - dtypes[k] = v - meta['dtypes'] = dtypes - # sample count - logging.info('### loading count...') - meta['count'] = df.count() - # sample - logging.info('### loading sample...') - meta['sample'] = df.head(20) - # meta - meta_path = dataset_meta_path(dataset_path) - logging.info(f'### writing meta, path is {meta_path}') - with fsspec.open(meta_path, mode='w') as f: - f.write(json.dumps(meta)) - # feature histogram - logging.info('### loading hist...') - hist = {} - for c in df.columns: - # TODO: histogram is too slow and needs optimization - x, y = df.select(c).rdd.flatMap(lambda x: x).histogram(10) - hist[c] = {'x': x, 'y': y} - hist_path = dataset_hist_path(dataset_path) - logging.info(f'### writing hist, path is {hist_path}') - with fsspec.open(hist_path, mode='w') as f: - f.write(json.dumps(hist)) - - spark.stop() - - -if __name__ == '__main__': - logging.basicConfig(level=logging.INFO) - if len(sys.argv) != 3: - logging.error( - f'spark-submit {sys.argv[0]} [dataset_path] [file_wildcard]') - sys.exit(-1) - - dataset_path, wildcard = sys.argv[1], sys.argv[2] - analyze(dataset_path, wildcard) diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/sparkapp/pipeline/converter.py b/web_console_v2/api/fedlearner_webconsole/dataset/sparkapp/pipeline/converter.py deleted file mode 100644 index 248210d4d..000000000 --- a/web_console_v2/api/fedlearner_webconsole/dataset/sparkapp/pipeline/converter.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -import sys -import os -import logging - -from pyspark.sql import SparkSession -from util import dataset_rds_path - - -def convert(dataset_path: str, wildcard: str): - # for example: - # dataset_path: /data/fl_v2_fish_fooding/dataset/20210527_221741_pipeline/ - # wildcard: batch/**/*.csv - files = os.path.join(dataset_path, wildcard) - logging.info(f'### input files path: {files}') - spark = SparkSession.builder.getOrCreate() - if wildcard.endswith('*.csv'): - df = spark.read.format('csv').option('header', 'true').option( - 'inferSchema', 'true').load(files) - elif wildcard.endswith('*.rd') or wildcard.endswith('*.tfrecords'): - df = spark.read.format('tfrecords').load(files) - else: - logging.error(f'### no valid file wildcard, wildcard: {wildcard}') - return - - df.printSchema() - save_path = dataset_rds_path(dataset_path) - logging.info(f'### saving to {save_path}, in tfrecords') - df.write.format('tfrecords').save(save_path, mode='overwrite') - spark.stop() - - -if __name__ == '__main__': - logging.basicConfig(level=logging.INFO) - if len(sys.argv) != 3: - logging.error( - f'spark-submit {sys.argv[0]} [dataset_path] [file_wildcard]') - sys.exit(-1) - - dataset_path, wildcard = sys.argv[1], sys.argv[2] - convert(dataset_path, wildcard) diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/sparkapp/pipeline/transformer.py b/web_console_v2/api/fedlearner_webconsole/dataset/sparkapp/pipeline/transformer.py deleted file mode 100644 index 4c6620de0..000000000 --- a/web_console_v2/api/fedlearner_webconsole/dataset/sparkapp/pipeline/transformer.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import os -import json -import sys -import logging - -from pyspark.sql import SparkSession -from util import dataset_transformer_path - - -def transform(dataset_path: str, wildcard: str, conf: str): - # for example: - # dataset_path: /data/fl_v2_fish_fooding/dataset/20210527_221741_pipeline/ - # wildcard: rds/** or data_block/**/*.data - # conf: {"f00001": 0.0, "f00002": 1.0} - spark = SparkSession.builder.getOrCreate() - files = os.path.join(dataset_path, wildcard) - conf_dict = json.loads(conf) - logging.info(f'### input files path: {files}, config: {conf_dict}') - df = spark.read.format('tfrecords').load(files) - filled_df = df.fillna(conf_dict) - save_path = dataset_transformer_path(dataset_path) - logging.info(f'### saving to {save_path}') - filled_df.write.format('tfrecords').save(save_path, mode='overwrite') - spark.stop() - - -if __name__ == '__main__': - logging.basicConfig(level=logging.INFO) - if len(sys.argv) != 4: - logging.error( - f'spark-submit {sys.argv[0]} [dataset_path] [wildcard] [config]') - sys.exit(-1) - - dataset_path, wildcard, conf = sys.argv[1], sys.argv[2], sys.argv[3] - transform(dataset_path, wildcard, conf) diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/sparkapp/pipeline/util.py b/web_console_v2/api/fedlearner_webconsole/dataset/sparkapp/pipeline/util.py deleted file mode 100644 index 14085e93e..000000000 --- a/web_console_v2/api/fedlearner_webconsole/dataset/sparkapp/pipeline/util.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import os - - -def dataset_rds_path(dataset_path: str) -> str: - return os.path.join(dataset_path, 'rds/') - - -def dataset_features_path(dataset_path: str) -> str: - return os.path.join(dataset_path, '_FEATURES') - - -def dataset_meta_path(dataset_path: str) -> str: - return os.path.join(dataset_path, '_META') - - -def dataset_hist_path(dataset_path: str) -> str: - return os.path.join(dataset_path, '_HIST') - - -def dataset_transformer_path(dataset_path: str) -> str: - return os.path.join(dataset_path, 'fe/') diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/util.py b/web_console_v2/api/fedlearner_webconsole/dataset/util.py new file mode 100644 index 000000000..f3dcf1047 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/util.py @@ -0,0 +1,182 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Functions in web_console_v2/inspection/util.py which used by webconsole +import os +from typing import Optional, Tuple +from envs import Envs +from slugify import slugify +from urllib.parse import urlparse +from datetime import datetime +import enum + +from fedlearner_webconsole.dataset.consts import PLACEHOLDER, CRON_SCHEDULER_BATCH_NOT_READY_ERROR_MESSAGE, \ + CRON_SCHEDULER_CERTAIN_BATCH_NOT_READY_ERROR_MESSAGE, CRON_SCHEDULER_CERTAIN_FOLDER_NOT_READY_ERROR_MESSAGE, \ + CRON_SCHEDULER_FOLDER_NOT_READY_ERROR_MESSAGE, CRON_SCHEDULER_SUCCEEDED_MESSAGE +from fedlearner_webconsole.utils.file_manager import FileManager + +DEFAULT_SCHEME_TYPE = 'file' + + +class CronInterval(enum.Enum): + DAYS = 'DAYS' + HOURS = 'HOURS' + + +def get_dataset_path(dataset_name: str, uuid: str): + root_dir = add_default_url_scheme(Envs.STORAGE_ROOT) + # Builds a path for dataset according to the dataset name + # Example: '/data/dataset/xxxxxxxxxxxxx_test-dataset + return f'{root_dir}/dataset/{uuid}_{slugify(dataset_name)[:32]}' + + +def get_export_dataset_name(index: int, input_dataset_name: str, input_data_batch_name: Optional[str] = None): + if input_data_batch_name: + return f'export-{input_dataset_name}-{input_data_batch_name}-{index}' + return f'export-{input_dataset_name}-{index}' + + +def add_default_url_scheme(url: str) -> str: + url_parser = urlparse(url) + data_source_type = url_parser.scheme + # set default source_type if no source_type found + if data_source_type == '' and url.startswith('/'): + url = f'{DEFAULT_SCHEME_TYPE}://{url}' + return url + + +def _is_daily(file_name: str) -> bool: + # YYYYMMDD format, like '20220701' + # format must be YYYYMMDD, but time without zero padded like '202271' still could be recognized by strptime + # so we force length of file_name must be 8 + # ref: https://docs.python.org/3.6/library/datetime.html#strftime-strptime-behavior + if len(file_name) != 8: + return False + try: + datetime.strptime(file_name, '%Y%m%d') + return True + except ValueError: + return False + + +def _is_hourly(file_name: str) -> bool: + # YYYYMMDD-HH format, like '20220701-01' + # format must be YYYYMMDD-HH, but time without zero padded like '202271-1' still could be recognized by strptime + # so we force length of file_name must be 11 + # ref: https://docs.python.org/3.6/library/datetime.html#strftime-strptime-behavior + if len(file_name) != 11: + return False + try: + datetime.strptime(file_name, '%Y%m%d-%H') + return True + except ValueError: + return False + + +def is_streaming_folder(folder: str) -> Tuple[bool, str]: + fm = FileManager() + file_names = fm.listdir(folder) + if len(file_names) == 0: + return False, f'streaming data_path should contain folder with correct format, but path {folder} is empty' + for file_name in file_names: + if not fm.isdir(path=os.path.join(folder, file_name)): + return False, f'data_source_url could only contains dir as subpath, {file_name} is not a dir' + if not _is_daily(file_name) and not _is_hourly(file_name): + return False, f'illegal dir format: {file_name}' + return True, '' + + +def get_oldest_daily_folder_time(folder: str) -> Optional[datetime]: + fm = FileManager() + forder_names = fm.listdir(folder) + oldest_folder_time = None + for forder_name in forder_names: + if not fm.isdir(path=os.path.join(folder, forder_name)): + continue + if _is_daily(forder_name): + forder_time = datetime.strptime(forder_name, '%Y%m%d') + if oldest_folder_time is None: + oldest_folder_time = forder_time + else: + oldest_folder_time = min(oldest_folder_time, forder_time) + return oldest_folder_time + + +def get_oldest_hourly_folder_time(folder: str) -> Optional[datetime]: + fm = FileManager() + forder_names = fm.listdir(folder) + oldest_folder_time = None + for forder_name in forder_names: + if not fm.isdir(path=os.path.join(folder, forder_name)): + continue + if _is_hourly(forder_name): + forder_time = datetime.strptime(forder_name, '%Y%m%d-%H') + if oldest_folder_time is None: + oldest_folder_time = forder_time + else: + oldest_folder_time = min(oldest_folder_time, forder_time) + return oldest_folder_time + + +def parse_event_time_to_daily_folder_name(event_time: datetime) -> str: + return event_time.strftime('%Y%m%d') + + +def parse_event_time_to_hourly_folder_name(event_time: datetime) -> str: + return event_time.strftime('%Y%m%d-%H') + + +def check_batch_folder_ready(folder: str, batch_name: str) -> bool: + batch_path = os.path.join(folder, batch_name) + file_manager = FileManager() + if not file_manager.isdir(batch_path): + return False + # TODO(liuhehan): add is_file func to file_manager and check is_file here + if not file_manager.exists(os.path.join(batch_path, '_SUCCESS')): + return False + return True + + +# ==================================== +# scheduler message funcs +# ==================================== + + +def get_daily_folder_not_ready_err_msg() -> str: + return CRON_SCHEDULER_FOLDER_NOT_READY_ERROR_MESSAGE.replace(PLACEHOLDER, 'YYYYMMDD') + + +def get_hourly_folder_not_ready_err_msg() -> str: + return CRON_SCHEDULER_FOLDER_NOT_READY_ERROR_MESSAGE.replace(PLACEHOLDER, 'YYYYMMDD-HH') + + +def get_certain_folder_not_ready_err_msg(folder_name: str) -> str: + return CRON_SCHEDULER_CERTAIN_FOLDER_NOT_READY_ERROR_MESSAGE.replace(PLACEHOLDER, folder_name) + + +def get_daily_batch_not_ready_err_msg() -> str: + return CRON_SCHEDULER_BATCH_NOT_READY_ERROR_MESSAGE.replace(PLACEHOLDER, 'YYYYMMDD') + + +def get_hourly_batch_not_ready_err_msg() -> str: + return CRON_SCHEDULER_BATCH_NOT_READY_ERROR_MESSAGE.replace(PLACEHOLDER, 'YYYYMMDD-HH') + + +def get_certain_batch_not_ready_err_msg(batch_name: str) -> str: + return CRON_SCHEDULER_CERTAIN_BATCH_NOT_READY_ERROR_MESSAGE.replace(PLACEHOLDER, batch_name) + + +def get_cron_succeeded_msg(batch_name: str) -> str: + return CRON_SCHEDULER_SUCCEEDED_MESSAGE.replace(PLACEHOLDER, batch_name) diff --git a/web_console_v2/api/fedlearner_webconsole/dataset/util_test.py b/web_console_v2/api/fedlearner_webconsole/dataset/util_test.py new file mode 100644 index 000000000..9726403f4 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/dataset/util_test.py @@ -0,0 +1,190 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import tempfile +import unittest +from unittest.mock import patch +from datetime import datetime +import fsspec + +from fedlearner_webconsole.dataset.util import get_oldest_daily_folder_time, get_oldest_hourly_folder_time, \ + check_batch_folder_ready, get_dataset_path, add_default_url_scheme, _is_daily, _is_hourly, \ + is_streaming_folder, parse_event_time_to_daily_folder_name, parse_event_time_to_hourly_folder_name, \ + get_export_dataset_name, get_certain_batch_not_ready_err_msg, get_certain_folder_not_ready_err_msg, \ + get_cron_succeeded_msg, get_daily_batch_not_ready_err_msg, get_daily_folder_not_ready_err_msg, \ + get_hourly_batch_not_ready_err_msg, get_hourly_folder_not_ready_err_msg + + +class UtilsTest(unittest.TestCase): + + @patch('envs.Envs.STORAGE_ROOT', '/test') + def test_get_dataset_path(self): + res = get_dataset_path('fake_dataset', 'fake_uuid') + self.assertEqual(res, 'file:///test/dataset/fake_uuid_fake-dataset') + + def test_get_export_dataset_name(self): + self.assertEqual(get_export_dataset_name(index=0, input_dataset_name='fake_dataset'), 'export-fake_dataset-0') + self.assertEqual( + get_export_dataset_name(index=0, input_dataset_name='fake_dataset', input_data_batch_name='20220101'), + 'export-fake_dataset-20220101-0') + + def test_add_default_url_scheme(self): + path = add_default_url_scheme('') + self.assertEqual(path, '') + + path = add_default_url_scheme('/') + self.assertEqual(path, 'file:///') + + path = add_default_url_scheme('/test/123') + self.assertEqual(path, 'file:///test/123') + + path = add_default_url_scheme('test/123') + self.assertEqual(path, 'test/123') + + path = add_default_url_scheme('hdfs:///test/123') + self.assertEqual(path, 'hdfs:///test/123') + + def test_is_daily(self): + self.assertTrue(_is_daily('20220701')) + self.assertFalse(_is_daily('2022711')) + self.assertFalse(_is_daily('20221711')) + self.assertFalse(_is_daily('2022x711')) + + def test_is_hourly(self): + self.assertTrue(_is_hourly('20220701-01')) + self.assertFalse(_is_hourly('20220711')) + self.assertFalse(_is_hourly('20220701-1')) + self.assertFalse(_is_hourly('20220701-25')) + + def test_is_streaming_folder(self): + with tempfile.TemporaryDirectory() as tmp_dir: + test_path = os.path.join(tmp_dir, 'test') + fs, _ = fsspec.core.url_to_fs(test_path) + fs.mkdirs(os.path.join(test_path, '20220701')) + fs.mkdirs(os.path.join(test_path, '20220702')) + fs.mkdirs(os.path.join(test_path, '20220703')) + res, _ = is_streaming_folder(test_path) + self.assertTrue(res) + + with tempfile.TemporaryDirectory() as tmp_dir: + test_path = os.path.join(tmp_dir, 'test') + fs, _ = fsspec.core.url_to_fs(test_path) + fs.mkdirs(os.path.join(test_path, '20220701-01')) + fs.mkdirs(os.path.join(test_path, '20220701-02')) + fs.mkdirs(os.path.join(test_path, '20220701-03')) + res, _ = is_streaming_folder(test_path) + self.assertTrue(res) + + with tempfile.TemporaryDirectory() as tmp_dir: + test_path = os.path.join(tmp_dir, 'test') + fs, _ = fsspec.core.url_to_fs(test_path) + fs.mkdirs(test_path) + res, _ = is_streaming_folder(test_path) + self.assertFalse(res) + + with tempfile.TemporaryDirectory() as tmp_dir: + test_path = os.path.join(tmp_dir, 'test') + fs, _ = fsspec.core.url_to_fs(test_path) + fs.mkdirs(os.path.join(test_path, '20221331-25')) + res, _ = is_streaming_folder(test_path) + self.assertFalse(res) + + def test_get_oldest_daily_folder_time(self): + with tempfile.TemporaryDirectory() as tmp_dir: + test_path = os.path.join(tmp_dir, 'test') + fs, _ = fsspec.core.url_to_fs(test_path) + fs.mkdirs(os.path.join(test_path, '20220701')) + fs.mkdirs(os.path.join(test_path, '20220702')) + fs.mkdirs(os.path.join(test_path, '20220703')) + event_time = get_oldest_daily_folder_time(test_path) + self.assertEqual(event_time, datetime(2022, 7, 1)) + with tempfile.TemporaryDirectory() as tmp_dir: + test_path = os.path.join(tmp_dir, 'test') + fs, _ = fsspec.core.url_to_fs(test_path) + fs.mkdirs(os.path.join(test_path, '20220701-01')) + event_time = get_oldest_daily_folder_time(test_path) + self.assertIsNone(event_time) + + def test_get_oldest_hourly_folder_time(self): + with tempfile.TemporaryDirectory() as tmp_dir: + test_path = os.path.join(tmp_dir, 'test') + fs, _ = fsspec.core.url_to_fs(test_path) + fs.mkdirs(os.path.join(test_path, '20220701-01')) + fs.mkdirs(os.path.join(test_path, '20220701-02')) + fs.mkdirs(os.path.join(test_path, '20220701-03')) + event_time = get_oldest_hourly_folder_time(test_path) + self.assertEqual(event_time, datetime(2022, 7, 1, 1)) + with tempfile.TemporaryDirectory() as tmp_dir: + test_path = os.path.join(tmp_dir, 'test') + fs, _ = fsspec.core.url_to_fs(test_path) + fs.mkdirs(os.path.join(test_path, '20220701')) + event_time = get_oldest_hourly_folder_time(test_path) + self.assertIsNone(event_time) + + def test_parse_event_time_to_daily_folder_name(self): + self.assertEqual(parse_event_time_to_daily_folder_name(datetime(2022, 1, 1)), '20220101') + + def test_parse_event_time_to_hourly_folder_name(self): + self.assertEqual(parse_event_time_to_hourly_folder_name(datetime(2022, 1, 1, 1)), '20220101-01') + + def test_check_batch_folder_ready(self): + # test no batch_path + with tempfile.TemporaryDirectory() as tmp_dir: + test_path = os.path.join(tmp_dir, 'test') + fs, _ = fsspec.core.url_to_fs(test_path) + self.assertFalse(check_batch_folder_ready(folder=test_path, batch_name='20220101')) + + # test no _SUCCESS file + with tempfile.TemporaryDirectory() as tmp_dir: + test_path = os.path.join(tmp_dir, 'test') + fs, _ = fsspec.core.url_to_fs(test_path) + fs.mkdirs(os.path.join(test_path, '20220101')) + self.assertFalse(check_batch_folder_ready(folder=test_path, batch_name='20220101')) + + # test ready + with tempfile.TemporaryDirectory() as tmp_dir: + test_path = os.path.join(tmp_dir, 'test') + fs, _ = fsspec.core.url_to_fs(test_path) + fs.mkdirs(os.path.join(test_path, '20220101')) + fs.touch(os.path.join(test_path, '20220101', '_SUCCESS')) + self.assertTrue(check_batch_folder_ready(folder=test_path, batch_name='20220101')) + + def test_get_daily_folder_not_ready_err_msg(self): + self.assertEqual(get_daily_folder_not_ready_err_msg(), '数据源下未找到满足格式要求的文件夹,请确认文件夹以YYYYMMDD格式命名') + + def test_get_hourly_folder_not_ready_err_msg(self): + self.assertEqual(get_hourly_folder_not_ready_err_msg(), '数据源下未找到满足格式要求的文件夹,请确认文件夹以YYYYMMDD-HH格式命名') + + def test_get_daily_batch_not_ready_err_msg(self): + self.assertEqual(get_daily_batch_not_ready_err_msg(), '未找到满足格式要求的数据批次,请确保输入数据集有YYYYMMDD格式命名的数据批次') + + def test_get_hourly_batcb_not_ready_err_msg(self): + self.assertEqual(get_hourly_batch_not_ready_err_msg(), '未找到满足格式要求的数据批次,请确保输入数据集有YYYYMMDD-HH格式命名的数据批次') + + def test_get_certain_folder_not_ready_err_msg(self): + self.assertEqual(get_certain_folder_not_ready_err_msg(folder_name='20220101-08'), + '20220101-08文件夹检查失败,请确认数据源下存在以20220101-08格式命名的文件夹,且文件夹下有_SUCCESS文件') + + def test_get_certain_batch_not_ready_err_msg(self): + self.assertEqual(get_certain_batch_not_ready_err_msg(batch_name='20220101-08'), + '数据批次20220101-08检查失败,请确认该批次命名格式及状态') + + def test_get_cron_succeeded_msg(self): + self.assertEqual(get_cron_succeeded_msg(batch_name='20220101-08'), '已成功发起20220101-08批次处理任务') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/db.py b/web_console_v2/api/fedlearner_webconsole/db.py index b40ff033b..edca200ca 100644 --- a/web_console_v2/api/fedlearner_webconsole/db.py +++ b/web_console_v2/api/fedlearner_webconsole/db.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,29 +15,28 @@ # coding: utf-8 import os from contextlib import contextmanager -from typing import ContextManager, Callable - +from typing import ContextManager +from pymysql.constants.CLIENT import FOUND_ROWS import sqlalchemy as sa - +from sqlalchemy import orm, event, null from sqlalchemy.engine import Engine, create_engine -from sqlalchemy.ext.declarative.api import DeclarativeMeta, declarative_base -from sqlalchemy.orm import sessionmaker +from sqlalchemy.orm import sessionmaker, DeclarativeMeta, declarative_base from sqlalchemy.orm.session import Session -from flask_sqlalchemy import SQLAlchemy from envs import Envs +from fedlearner_webconsole.utils.base_model.softdelete_model import SoftDeleteModel + BASE_DIR = Envs.BASE_DIR # Explicitly set autocommit and autoflush # Disables autocommit to make developers to commit manually # Enables autoflush to make changes visible in the same session # Disable expire_on_commit to make it possible that object can detach -SESSION_OPTIONS = { - 'autocommit': False, - 'autoflush': True, - 'expire_on_commit': False -} -ENGINE_OPTIONS = {} +SESSION_OPTIONS = {'autocommit': False, 'autoflush': True, 'expire_on_commit': False} +# Add flag FOUND_ROWS to make update statement return matched rows but not changed rows. +# When use Sqlalchemy, must set this flag to make update statement validation bug free. +MYSQL_OPTIONS = {'connect_args': {'client_flag': FOUND_ROWS}} +SQLITE_OPTIONS = {} def default_table_args(comment: str) -> dict: @@ -48,7 +47,22 @@ def default_table_args(comment: str) -> dict: } -def _turn_db_timezone_to_utc(original_uri: str) -> str: +# an option is added to all SELECT statements that will limit all queries against Dataset to filter on deleted == null +# global WHERE/ON criteria eg: https://docs.sqlalchemy.org/en/14/_modules/examples/extending_query/filter_public.html +# normal orm execution wont get the soft-deleted data, eg: session.query(A).get(1) +# use options can get the soft-deleted data eg: session.query(A).execution_options(include_deleted=True).get(1) +@event.listens_for(Session, 'do_orm_execute') +def _add_filtering_criteria(execute_state): + if (not execute_state.is_column_load and not execute_state.execution_options.get('include_deleted', False)): + execute_state.statement = execute_state.statement.options( + orm.with_loader_criteria( + SoftDeleteModel, + lambda cls: cls.deleted_at == null(), + include_aliases=True, + )) + + +def turn_db_timezone_to_utc(original_uri: str) -> str: """ string operator that make any db into utc timezone Args: @@ -101,17 +115,15 @@ def get_database_uri() -> str: Returns: str: database uri with utc timezone """ - uri = '' - if 'SQLALCHEMY_DATABASE_URI' in os.environ: - uri = os.getenv('SQLALCHEMY_DATABASE_URI') - else: - uri = 'sqlite:///{}?check_same_thread=False'.format( - os.path.join(BASE_DIR, 'app.db')) - return _turn_db_timezone_to_utc(uri) + uri = Envs.SQLALCHEMY_DATABASE_URI + if not uri: + db_path = os.path.join(BASE_DIR, 'app.db') + uri = f'sqlite:///{db_path}?check_same_thread=False' + return turn_db_timezone_to_utc(uri) -def get_engine(database_uri: str) -> Engine: - """get engine according to database uri +def _get_engine(database_uri: str) -> Engine: + """Gets engine according to database uri. Args: database_uri (str): database uri used for create engine @@ -119,7 +131,12 @@ def get_engine(database_uri: str) -> Engine: Returns: Engine: engine used for managing connections """ - return create_engine(database_uri, **ENGINE_OPTIONS) + engine_options = {} + if database_uri.startswith('mysql'): + engine_options = MYSQL_OPTIONS + elif database_uri.startswith('sqlite'): + engine_options = SQLITE_OPTIONS + return create_engine(database_uri, **engine_options) @contextmanager @@ -133,8 +150,8 @@ def get_session(db_engine: Engine) -> ContextManager[Session]: """ try: session: Session = sessionmaker(bind=db_engine, **SESSION_OPTIONS)() - except Exception: - raise Exception('unknown db engine') + except Exception as e: + raise Exception('unknown db engine') from e try: yield session @@ -145,40 +162,12 @@ def get_session(db_engine: Engine) -> ContextManager[Session]: session.close() -def make_session_context() -> Callable[[], ContextManager[Session]]: - """A functional closure that will store engine - Call it n times if you want to n connection pools - - Returns: - Callable[[], Callable[[], ContextManager[Session]]] - a function that return a contextmanager - - - Examples: - # First initialize a connection pool, - # when you want to a new connetion pool - session_context = make_session_context() - ... - # You use it multiple times as follows. - with session_context() as session: - session.query(SomeMapperClass).filter_by(id=1).one() - """ - engine = None - - def wrapper_get_session(): - nonlocal engine - if engine is None: - engine = get_engine(get_database_uri()) - return get_session(engine) - - return wrapper_get_session - - class DBHandler(object): + def __init__(self) -> None: super().__init__() - self.engine: Engine = get_engine(get_database_uri()) + self.engine: Engine = _get_engine(get_database_uri()) self.Model: DeclarativeMeta = declarative_base(bind=self.engine) for module in sa, sa.orm: for key in module.__all__: @@ -193,7 +182,7 @@ def metadata(self) -> DeclarativeMeta: return self.Model.metadata def rebind(self, database_uri: str): - self.engine = get_engine(database_uri) + self.engine = _get_engine(database_uri) self.Model = declarative_base(bind=self.engine, metadata=self.metadata) def create_all(self): @@ -203,9 +192,6 @@ def drop_all(self): return self.metadata.drop_all() -# now db_handler and db are alive at the same time -# db will be replaced by db_handler in the near future -db_handler = DBHandler() -db = SQLAlchemy(session_options=SESSION_OPTIONS, - engine_options=ENGINE_OPTIONS, - metadata=db_handler.metadata) +# now db and db are alive at the same time +# db will be replaced by db in the near future +db = DBHandler() diff --git a/web_console_v2/api/fedlearner_webconsole/db_test.py b/web_console_v2/api/fedlearner_webconsole/db_test.py new file mode 100644 index 000000000..c1eb82deb --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/db_test.py @@ -0,0 +1,61 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from unittest.mock import patch + +from fedlearner_webconsole.db import get_database_uri, turn_db_timezone_to_utc + + +class EngineSessionTest(unittest.TestCase): + + def test_turn_db_timezone_to_utc(self): + sqlite_uri = 'sqlite:///app.db' + self.assertEqual(turn_db_timezone_to_utc(sqlite_uri), 'sqlite:///app.db') + + mysql_uri_naive = 'mysql+pymysql://root:root@localhost:33600/fedlearner' + self.assertEqual( + turn_db_timezone_to_utc(mysql_uri_naive), + 'mysql+pymysql://root:root@localhost:33600/fedlearner?init_command=SET SESSION time_zone=\'%2B00:00\'') + + mysql_uri_with_init_command = 'mysql+pymysql://root:root@localhost:33600/fedlearner?init_command=HELLO' + self.assertEqual( + turn_db_timezone_to_utc(mysql_uri_with_init_command), + 'mysql+pymysql://root:root@localhost:33600/fedlearner?init_command=SET SESSION time_zone=\'%2B00:00\';HELLO' + ) + + mysql_uri_with_other_args = 'mysql+pymysql://root:root@localhost:33600/fedlearner?charset=utf8mb4' + self.assertEqual( + turn_db_timezone_to_utc(mysql_uri_with_other_args), + 'mysql+pymysql://root:root@localhost:33600/fedlearner?init_command=SET SESSION time_zone=\'%2B00:00\'&&charset=utf8mb4' # pylint: disable=line-too-long + ) + + mysql_uri_with_set_time_zone = 'mysql+pymysql://root:root@localhost:33600/fedlearner?init_command=SET SESSION time_zone=\'%2B08:00\'' # pylint: disable=line-too-long + self.assertEqual( + turn_db_timezone_to_utc(mysql_uri_with_set_time_zone), + 'mysql+pymysql://root:root@localhost:33600/fedlearner?init_command=SET SESSION time_zone=\'%2B00:00\'') + + def test_get_database_uri(self): + # test with environmental variable + with patch('fedlearner_webconsole.db.Envs.SQLALCHEMY_DATABASE_URI', + 'mysql+pymysql://root:root@localhost:33600/fedlearner'): + self.assertTrue(get_database_uri().startswith('mysql+pymysql://root:root@localhost:33600/fedlearner')) + + # test with fallback options + self.assertTrue(get_database_uri().startswith('sqlite:///')) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/debug/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/debug/BUILD.bazel new file mode 100644 index 000000000..5eb4742b0 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/debug/BUILD.bazel @@ -0,0 +1,37 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "apis_lib", + srcs = ["apis.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:composer_service_lib", + "//web_console_v2/api/fedlearner_webconsole/k8s:k8s_cache_lib", + "//web_console_v2/api/fedlearner_webconsole/k8s:k8s_client_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:tfrecords_reader_lib", + "@common_flask_restful//:pkg", + "@common_pyyaml//:pkg", + "@common_tensorflow//:pkg", + ], +) + +py_test( + name = "apis_lib_test", + size = "medium", + srcs = [ + "apis_test.py", + ], + imports = ["../.."], + main = "apis_test.py", + deps = [ + ":apis_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/testing:common_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/debug/__init__.py b/web_console_v2/api/fedlearner_webconsole/debug/__init__.py index 3e28547fe..c13b80f8f 100644 --- a/web_console_v2/api/fedlearner_webconsole/debug/__init__.py +++ b/web_console_v2/api/fedlearner_webconsole/debug/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/web_console_v2/api/fedlearner_webconsole/debug/apis.py b/web_console_v2/api/fedlearner_webconsole/debug/apis.py index 4c74e9a80..ab58f220f 100644 --- a/web_console_v2/api/fedlearner_webconsole/debug/apis.py +++ b/web_console_v2/api/fedlearner_webconsole/debug/apis.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,80 +13,154 @@ # limitations under the License. # coding: utf-8 +import datetime import json -from flask_restful import Resource, Api, request +import tensorflow as tf +import yaml +from flask_restful import Resource, Api, request, reqparse -from fedlearner_webconsole.composer.composer import composer -from fedlearner_webconsole.composer.runner import MemoryItem -from fedlearner_webconsole.dataset.data_pipeline import DataPipelineItem, \ - DataPipelineType +from fedlearner_webconsole.composer.composer_service import ComposerService +from fedlearner_webconsole.composer.models import SchedulerRunner, \ + SchedulerItem +from fedlearner_webconsole.utils.tfrecords_reader import tf_record_reader +from fedlearner_webconsole.exceptions import InvalidArgumentException +from fedlearner_webconsole.k8s.k8s_cache import k8s_cache +from fedlearner_webconsole.k8s.k8s_client import k8s_client +from fedlearner_webconsole.db import db -class ComposerApi(Resource): +class DebugComposerApi(Resource): + def get(self, name): - interval = request.args.get('interval', -1) + cron_config = request.args.get('cron_config') finish = request.args.get('finish', 0) - if int(finish) == 1: - composer.finish(name) - else: - composer.collect( - name, - [MemoryItem(1), MemoryItem(2)], - { # meta data - 1: { - 'input': 'fs://data/memory_1', - }, - 2: { - 'input': 'fs://data/memory_2', - } - }, - interval=int(interval), - ) - return {'data': {'name': name}} - - -class DataPipelineApi(Resource): - def get(self, name: str): - # '/data/fl_v2_fish_fooding/dataset/20210527_221741_pipeline' - input_dir = request.args.get('input_dir', None) - if not input_dir: - return {'msg': 'no input dir'} - if 'pipe' in name: - composer.collect( - name, - [DataPipelineItem(1), DataPipelineItem(2)], - { # meta data - 1: { # convertor - 'sparkapp_name': '1', - 'task_type': DataPipelineType.CONVERTER.value, - 'input': [input_dir, 'batch/**/*.csv'], - }, - 2: { # analyzer - 'sparkapp_name': '2', - 'task_type': DataPipelineType.ANALYZER.value, - 'input': [input_dir, 'rds/**'], - }, - }, - ) - elif 'fe' in name: - composer.collect( - name, - [DataPipelineItem(1)], - { # meta data - 1: { # transformer - 'sparkapp_name': '1', - 'task_type': DataPipelineType.TRANSFORMER.value, - 'input': [input_dir, 'rds/**', json.dumps({ - 'f00000': 1.0, - 'f00010': 0.0, - })], - }, - }, - ) - return {'data': {'name': name}} + with db.session_scope() as session: + service = ComposerService(session) + if int(finish) == 1: + service.finish(name) + session.commit() + return {'data': {'name': name}} + + +class DebugSparkAppApi(Resource): + + def post(self, name: str): + data = yaml.load(f""" +apiVersion: "sparkoperator.k8s.io/v1beta2" +kind: SparkApplication +metadata: + name: {name} + namespace: default +spec: + type: Python + pythonVersion: "3" + mode: cluster + image: "registry.cn-beijing.aliyuncs.com/fedlearner/spark-tfrecord:latest" + imagePullPolicy: Always + volumes: + - name: data + persistentVolumeClaim: + claimName: pvc-fedlearner-default + mainApplicationFile: local:///data/sparkapp_test/tyt_test/schema_check.py + arguments: + - /data/sparkapp_test/tyt_test/test.csv + - /data/sparkapp_test/tyt_test/schema.json + sparkVersion: "3.0.0" + restartPolicy: + type: OnFailure + onFailureRetries: 3 + onFailureRetryInterval: 10 + onSubmissionFailureRetries: 5 + onSubmissionFailureRetryInterval: 20 + driver: + cores: 1 + coreLimit: "1200m" + memory: "512m" + labels: + version: 3.0.0 + serviceAccount: spark + volumeMounts: + - name: data + mountPath: /data + executor: + cores: 1 + instances: 1 + memory: "512m" + labels: + version: 3.0.0 + volumeMounts: + - name: data + mountPath: /data +""", + Loader=None) + data = k8s_client.create_sparkapplication(data) + return {'data': data} + + +class DebugK8sCacheApi(Resource): + + def get(self): + + def default(o): + if isinstance(o, (datetime.date, datetime.datetime)): + return o.isoformat() + return str(o) + + return {'data': json.dumps(k8s_cache.inspect(), default=default)} + + +class DebugTfRecordApi(Resource): + + def get(self): + path = request.args.get('path', None) + + if path is None or not tf.io.gfile.exists(path): + raise InvalidArgumentException('path is not found') + + lines = request.args.get('lines', 25, int) + tf_matrix = tf_record_reader(path, lines, matrix_view=True) + + return {'data': tf_matrix} + + +class DebugSchedulerItemsApi(Resource): + + def get(self): + parser = reqparse.RequestParser() + parser.add_argument('status', type=int, location='args', required=False, choices=[0, 1]) + parser.add_argument('id', type=int, location='args', required=False) + data = parser.parse_args() + with db.session_scope() as session: + items = session.query(SchedulerItem) + if data['status'] is not None: + items = items.filter_by(status=data['status']) + if data['id'] is not None: + runners = session.query(SchedulerRunner).filter_by(item_id=data['id']).order_by( + SchedulerRunner.updated_at.desc()).limit(10).all() + return {'data': [runner.to_dict() for runner in runners]} + items = items.order_by(SchedulerItem.created_at.desc()).all() + return {'data': [item.to_dict() for item in items]} + + +class DebugSchedulerRunnersApi(Resource): + + def get(self): + parser = reqparse.RequestParser() + parser.add_argument('status', type=int, location='args', required=False, choices=[0, 1, 2, 3]) + data = parser.parse_args() + with db.session_scope() as session: + runners = session.query(SchedulerRunner) + if data['status'] is not None: + runners = runners.filter_by(status=data['status']) + runners = runners.order_by(SchedulerRunner.updated_at.desc()).all() + return {'data': [runner.to_dict() for runner in runners]} def initialize_debug_apis(api: Api): - api.add_resource(ComposerApi, '/debug/composer/') - api.add_resource(DataPipelineApi, '/debug/pipeline/') + api.add_resource(DebugComposerApi, '/debug/composer/') + api.add_resource(DebugSparkAppApi, '/debug/sparkapp/') + api.add_resource(DebugK8sCacheApi, '/debug/k8scache/') + api.add_resource(DebugTfRecordApi, '/debug/tfrecord') + api.add_resource(DebugSchedulerItemsApi, '/debug/scheduler_items') + api.add_resource(DebugSchedulerRunnersApi, '/debug/scheduler_runners') diff --git a/web_console_v2/api/fedlearner_webconsole/debug/apis_test.py b/web_console_v2/api/fedlearner_webconsole/debug/apis_test.py new file mode 100644 index 000000000..8b0b33e51 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/debug/apis_test.py @@ -0,0 +1,83 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +import unittest +from http import HTTPStatus + +from testing.common import BaseTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.composer.models import (SchedulerItem, SchedulerRunner, ItemStatus, RunnerStatus) + + +class DebugSchedulerApiTest(BaseTestCase): + + _ITEM_ON_ID = 123 + _PRESET_SCHEDULER_ITEM = [ + 'test_item_off', + 'test_item_on', + 'workflow_scheduler_v2', + 'job_scheduler_v2', + 'cleanup_cron_job', + 'dataset_short_period_scheduler', + 'dataset_long_period_scheduler', + 'project_scheduler_v2', + 'tee_create_runner', + 'tee_resource_check_runner', + 'model_job_scheduler_runner', + 'model_job_group_scheduler_runner', + 'model_job_group_long_period_scheduler_runner', + ] + + def setUp(self): + super().setUp() + scheduler_item_on = SchedulerItem(id=self._ITEM_ON_ID, name='test_item_on', status=ItemStatus.ON.value) + scheduler_item_off = SchedulerItem(name='test_item_off', status=ItemStatus.OFF.value) + with db.session_scope() as session: + session.add_all([scheduler_item_on, scheduler_item_off]) + session.commit() + scheduler_runner_init = SchedulerRunner(id=0, item_id=self._ITEM_ON_ID, status=RunnerStatus.INIT.value) + scheduler_runner_running_1 = SchedulerRunner(id=1, item_id=self._ITEM_ON_ID, status=RunnerStatus.RUNNING.value) + scheduler_runner_running_2 = SchedulerRunner(id=2, item_id=self._ITEM_ON_ID, status=RunnerStatus.RUNNING.value) + + with db.session_scope() as session: + session.add_all([scheduler_runner_init, scheduler_runner_running_1, scheduler_runner_running_2]) + session.commit() + + def test_get_scheduler_item(self): + # test get all scheduler item + data = self.get_response_data(self.get_helper('/api/v2/debug/scheduler_items')) + # there exists a preset scheduler item + self.assertCountEqual([d['name'] for d in data], self._PRESET_SCHEDULER_ITEM) + # test get scheduler item with status + data = self.get_response_data(self.get_helper('/api/v2/debug/scheduler_items?status=0')) + self.assertEqual(len(data), 1) + self.assertEqual(data[0]['name'], 'test_item_off') + # test get recent scheduler runners + data = self.get_response_data(self.get_helper(f'/api/v2/debug/scheduler_items?id={self._ITEM_ON_ID}')) + self.assertEqual(len(data), 3) + self.assertEqual(data[0]['status'], RunnerStatus.INIT.value) + + def test_get_scheduler_runner(self): + # test get running runners + response = self.get_helper('/api/v2/debug/scheduler_runners?status=1') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 2) + self.assertEqual(data[0]['id'], 1) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/e2e/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/e2e/BUILD.bazel new file mode 100644 index 000000000..27174d95c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/e2e/BUILD.bazel @@ -0,0 +1,74 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "utils_lib", + srcs = ["utils.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_pyyaml//:pkg", + ], +) + +py_library( + name = "controllers_lib", + srcs = ["controllers.py"], + imports = ["../.."], + deps = [ + ":utils_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole/k8s:k8s_client_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_kubernetes//:pkg", + ], +) + +py_test( + name = "controllers_test", + size = "small", + srcs = [ + "controllers_test.py", + ], + imports = ["../.."], + deps = [ + ":controllers_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "apis_lib", + srcs = ["apis.py"], + imports = ["../.."], + deps = [ + ":controllers_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:third_party_sso_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/swagger:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_flasgger//:pkg", + "@common_flask_restful//:pkg", + "@common_kubernetes//:pkg", + "@common_marshmallow//:pkg", + "@common_webargs//:pkg", + ], +) + +py_test( + name = "apis_test", + size = "medium", + srcs = ["apis_test.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/testing:common_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/e2e/__init__.py b/web_console_v2/api/fedlearner_webconsole/e2e/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/web_console_v2/api/fedlearner_webconsole/e2e/apis.py b/web_console_v2/api/fedlearner_webconsole/e2e/apis.py new file mode 100644 index 000000000..404c276ac --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/e2e/apis.py @@ -0,0 +1,145 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from flask_restful import Api, Resource +from kubernetes.client import ApiException +from marshmallow import validate, post_load +from webargs.flaskparser import use_args +from flasgger import Schema, fields + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.e2e.controllers import ROLES_MAPPING, get_job, get_job_logs, initiate_all_tests +from fedlearner_webconsole.exceptions import NotFoundException, InvalidArgumentException +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto.e2e_pb2 import InitiateE2eJobsParameter +from fedlearner_webconsole.swagger.models import schema_manager +from fedlearner_webconsole.utils.flask_utils import make_flask_response +from fedlearner_webconsole.auth.third_party_sso import credentials_required +from fedlearner_webconsole.utils.decorators.pp_flask import admin_required + + +class E2eJobsApi(Resource): + + @credentials_required + @admin_required + def get(self, job_name: str): + """Get an existing job + --- + tags: + - e2e + description: get a job + parameters: + - in: path + name: job_name + required: true + schema: + type: string + description: The name of the job + responses: + 200: + description: The corresponding job + content: + application/json: + schema: + type: object + properties: + job_name: + type: string + description: The name of the job + status: + type: object + description: The status of the job + log: + type: array + items: + type: string + description: The log of the job; if the job is still active, an empty string is returned + 404: + description: The job is not found + """ + try: + status = get_job(job_name)['status'] + except ApiException as e: + raise NotFoundException(f'failed to find job {job_name}') from e + # if the pod is still running, do not query for logs + log = get_job_logs(job_name) if 'active' not in status else [] + return make_flask_response(data={'job_name': job_name, 'status': status, 'log': log}) + + +# If schema is defined as "...Schema", the last "Schema" will be deleted, so the reference to this schema +# is "#/definitions/InitiateE2eJobsParameter" +class InitiateE2eJobsParameterSchema(Schema): + role = fields.String(required=True, validate=validate.OneOf(ROLES_MAPPING.keys())) + name_prefix = fields.String(required=True, validate=validate.Length(min=5)) + project_name = fields.String(required=True, validate=validate.Length(min=1)) + e2e_image_uri = fields.String(required=True, validate=lambda x: 'fedlearner_e2e:' in x) + fedlearner_image_uri = fields.String(required=True, validate=lambda x: 'fedlearner:' in x) + platform_endpoint = fields.String(required=False, + load_default='http://fedlearner-fedlearner-web-console-v2-http:1989', + validate=validate.URL(require_tld=False)) + + @post_load + def make_initiate_e2e_jobs_parameter(self, data, **kwargs) -> InitiateE2eJobsParameter: + del kwargs + return InitiateE2eJobsParameter(**data) + + +class InitiateE2eJobsApi(Resource): + + @credentials_required + @admin_required + @use_args(InitiateE2eJobsParameterSchema(), location='json') + def post(self, params: InitiateE2eJobsParameter): + """Initiate a series of E2e jobs + --- + tags: + - e2e + description: initiate a series of E2e jobs + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/InitiateE2eJobsParameter' + responses: + 201: + description: Jobs are launched and job names are returned + content: + application/json: + schema: + type: array + items: + type: object + properties: + job_type: + type: string + job_name: + type: string + """ + with db.session_scope() as session: + project = session.query(Project).filter(Project.name == params.project_name).first() + if project is None: + raise InvalidArgumentException(f'failed to find project with name={params.project_name}') + try: + jobs = initiate_all_tests(params) + except ValueError as e: + raise InvalidArgumentException(str(e)) from e + return make_flask_response(jobs) + + +def initialize_e2e_apis(api: Api): + api.add_resource(E2eJobsApi, '/e2e_jobs/') + api.add_resource(InitiateE2eJobsApi, '/e2e_jobs:initiate') + schema_manager.append(InitiateE2eJobsParameterSchema) diff --git a/web_console_v2/api/fedlearner_webconsole/e2e/apis_test.py b/web_console_v2/api/fedlearner_webconsole/e2e/apis_test.py new file mode 100644 index 000000000..0eaff12d2 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/e2e/apis_test.py @@ -0,0 +1,110 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from json import loads +from unittest import main +from unittest.mock import patch + +from testing.common import BaseTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project + + +class InitiateE2eJobsApiTest(BaseTestCase): + + def test_post(self): + self.signin_as_admin() + response = self.post_helper( + '/api/v2/e2e_jobs:initiate', { + 'role': 'some_role', + 'name_prefix': 'test', + 'project_name': '', + 'e2e_image_uri': 'invalid', + 'fedlearner_image_uri': 'invalid', + 'platform_endpoint': 'invalid' + }) + self.assert400(response) + error_details = loads(response.data)['details']['json'] + self.assertRegex(error_details['role'][0], 'coordinator, participant') + self.assertRegex(error_details['name_prefix'][0], 'minimum length 5') + self.assertRegex(error_details['project_name'][0], 'minimum length 1') + self.assertRegex(error_details['e2e_image_uri'][0], 'Invalid value.') + self.assertRegex(error_details['fedlearner_image_uri'][0], 'Invalid value.') + self.assertRegex(error_details['platform_endpoint'][0], 'Not a valid URL') + + response = self.post_helper( + '/api/v2/e2e_jobs:initiate', { + 'role': 'coordinator', + 'name_prefix': 'test_me', + 'project_name': 'project', + 'e2e_image_uri': 'fedlearner_e2e:hey', + 'fedlearner_image_uri': 'fedlearner:hey', + 'platform_endpoint': 'hey-hello:80/index.html' + }) + self.assert400(response) + error_details = loads(response.data)['details']['json'] + self.assertIsNone(error_details.get('role')) + self.assertIsNone(error_details.get('name_prefix')) + self.assertIsNone(error_details.get('project_name')) + self.assertIsNone(error_details.get('e2e_image_uri')) + self.assertIsNone(error_details.get('fedlearner_image_uri')) + self.assertRegex(error_details['platform_endpoint'][0], 'Not a valid URL') + + response = self.post_helper( + '/api/v2/e2e_jobs:initiate', { + 'role': 'coordinator', + 'name_prefix': 'test_me', + 'project_name': 'project', + 'e2e_image_uri': 'fedlearner_e2e:hey', + 'fedlearner_image_uri': 'fedlearner:hey', + 'platform_endpoint': 'http://hey-hello:80/index.html' + }) + self.assert400(response) + error_details = loads(response.data)['details'] + self.assertRegex(error_details, 'failed to find project') + + with db.session_scope() as session: + session.add(Project(id=1000, name='project')) + session.commit() + + response = self.post_helper( + '/api/v2/e2e_jobs:initiate', { + 'role': 'coordinator', + 'name_prefix': 'test_me', + 'project_name': 'project', + 'e2e_image_uri': 'fedlearner_e2e:hey', + 'fedlearner_image_uri': 'fedlearner:hey', + 'platform_endpoint': 'http://hey-hello:80/index.html' + }) + self.assert400(response) + error_details = loads(response.data)['details'] + self.assertRegex(error_details, r'job with job_name=[\w-]* exists') + + with patch('fedlearner_webconsole.e2e.apis.initiate_all_tests') as mock_initiate_all_tests: + mock_initiate_all_tests.return_value = [{}] + response = self.post_helper( + '/api/v2/e2e_jobs:initiate', { + 'role': 'coordinator', + 'name_prefix': 'test_me', + 'project_name': 'project', + 'e2e_image_uri': 'fedlearner_e2e:hey', + 'fedlearner_image_uri': 'fedlearner:hey', + 'platform_endpoint': 'http://hey-hello:80/index.html' + }) + self.assert200(response) + + +if __name__ == '__main__': + main() diff --git a/web_console_v2/api/fedlearner_webconsole/e2e/controllers.py b/web_console_v2/api/fedlearner_webconsole/e2e/controllers.py new file mode 100644 index 000000000..99f0e12a1 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/e2e/controllers.py @@ -0,0 +1,78 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Dict, List, Tuple + +from kubernetes.client import ApiException + +from envs import Envs +from fedlearner_webconsole.e2e.utils import e2e_job_to_dict +from fedlearner_webconsole.proto.e2e_pb2 import E2eJob, InitiateE2eJobsParameter +from fedlearner_webconsole.k8s.k8s_client import k8s_client + +COORDINATOR_TESTS = { + 'fed-workflow': 'scripts/auto_e2e/fed_workflow/test_coordinator.py', + 'vertical-dataset-model-serving': 'scripts/auto_e2e/vertical_dataset_model_serving/test_coordinator.py' +} + +PARTICIPANT_TESTS = { + 'fed-workflow': 'scripts/auto_e2e/fed_workflow/test_participant.py', + 'vertical-dataset-model-serving': 'scripts/auto_e2e/vertical_dataset_model_serving/test_participant.py' +} + +ROLES_MAPPING: Dict[str, Dict] = {'coordinator': COORDINATOR_TESTS, 'participant': PARTICIPANT_TESTS} + + +def start_job(e2e_job: E2eJob): + try: + get_job(e2e_job.job_name) + raise ValueError(f'failed to start {e2e_job.job_name}; job with job_name={e2e_job.job_name} exists') + except ApiException: + pass + k8s_client.create_app(e2e_job_to_dict(e2e_job), group='batch', version='v1', plural='jobs') + + +def get_job(job_name: str) -> dict: + return k8s_client.crds.get_namespaced_custom_object(group='batch', + version='v1', + namespace=Envs.K8S_NAMESPACE, + plural='jobs', + name=job_name) + + +def get_job_logs(job_name: str) -> List[str]: + return k8s_client.get_pod_log(job_name, Envs.K8S_NAMESPACE, 30) + + +def generate_job_list(params: InitiateE2eJobsParameter) -> List[Tuple[str, E2eJob]]: + jobs = [] + fed_jobs = ROLES_MAPPING[params.role] + for job_type, script_path in fed_jobs.items(): + jobs.append((job_type, + E2eJob(project_name=params.project_name, + script_path=script_path, + fedlearner_image_uri=params.fedlearner_image_uri, + e2e_image_uri=params.e2e_image_uri, + job_name=f'auto-e2e-{params.name_prefix}-{job_type}', + platform_endpoint=params.platform_endpoint, + name_prefix=f'auto-e2e-{params.name_prefix}'))) + return jobs + + +def initiate_all_tests(params: InitiateE2eJobsParameter) -> List[Dict[str, str]]: + jobs = generate_job_list(params) + for _, job in jobs: + start_job(job) + return [{'job_type': job_type, 'job_name': job.job_name} for job_type, job in jobs] diff --git a/web_console_v2/api/fedlearner_webconsole/e2e/controllers_test.py b/web_console_v2/api/fedlearner_webconsole/e2e/controllers_test.py new file mode 100644 index 000000000..52ed19c07 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/e2e/controllers_test.py @@ -0,0 +1,104 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import Mock, patch, call + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.e2e.controllers import initiate_all_tests +from fedlearner_webconsole.proto.e2e_pb2 import E2eJob, InitiateE2eJobsParameter + + +class E2eControllerTest(NoWebServerTestCase): + + @patch('fedlearner_webconsole.e2e.controllers.start_job') + def test_initiate_participant_tests(self, start_job_mock: Mock): + start_job_mock.return_value = None + self.assertRaises(KeyError, initiate_all_tests, + InitiateE2eJobsParameter(role='invalid_role', platform_endpoint='some_uri.com')) + jobs = initiate_all_tests( + InitiateE2eJobsParameter(role='participant', + name_prefix='test', + project_name='hello', + e2e_image_uri='some_image', + fedlearner_image_uri='some_image', + platform_endpoint='some_uri.com')) + self.assertEqual([{ + 'job_name': 'auto-e2e-test-fed-workflow', + 'job_type': 'fed-workflow' + }, { + 'job_name': 'auto-e2e-test-vertical-dataset-model-serving', + 'job_type': 'vertical-dataset-model-serving' + }], jobs) + + self.assertEqual([ + call( + E2eJob(project_name='hello', + script_path='scripts/auto_e2e/fed_workflow/test_participant.py', + fedlearner_image_uri='some_image', + e2e_image_uri='some_image', + job_name='auto-e2e-test-fed-workflow', + platform_endpoint='some_uri.com', + name_prefix='auto-e2e-test')), + call( + E2eJob(project_name='hello', + script_path='scripts/auto_e2e/vertical_dataset_model_serving/test_participant.py', + fedlearner_image_uri='some_image', + e2e_image_uri='some_image', + job_name='auto-e2e-test-vertical-dataset-model-serving', + platform_endpoint='some_uri.com', + name_prefix='auto-e2e-test')), + ], start_job_mock.call_args_list) + + @patch('fedlearner_webconsole.e2e.controllers.start_job') + def test_initiate_coordinator_tests(self, start_job_mock: Mock): + start_job_mock.return_value = None + jobs = initiate_all_tests( + InitiateE2eJobsParameter(role='coordinator', + name_prefix='test', + project_name='hello', + e2e_image_uri='some_image', + fedlearner_image_uri='some_image', + platform_endpoint='some_uri.com')) + self.assertEqual([{ + 'job_name': 'auto-e2e-test-fed-workflow', + 'job_type': 'fed-workflow' + }, { + 'job_name': 'auto-e2e-test-vertical-dataset-model-serving', + 'job_type': 'vertical-dataset-model-serving' + }], jobs) + + self.assertEqual([ + call( + E2eJob(project_name='hello', + script_path='scripts/auto_e2e/fed_workflow/test_coordinator.py', + fedlearner_image_uri='some_image', + e2e_image_uri='some_image', + job_name='auto-e2e-test-fed-workflow', + platform_endpoint='some_uri.com', + name_prefix='auto-e2e-test')), + call( + E2eJob(project_name='hello', + script_path='scripts/auto_e2e/vertical_dataset_model_serving/test_coordinator.py', + fedlearner_image_uri='some_image', + e2e_image_uri='some_image', + job_name='auto-e2e-test-vertical-dataset-model-serving', + platform_endpoint='some_uri.com', + name_prefix='auto-e2e-test')), + ], start_job_mock.call_args_list) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/e2e/utils.py b/web_console_v2/api/fedlearner_webconsole/e2e/utils.py new file mode 100644 index 000000000..afc15b884 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/e2e/utils.py @@ -0,0 +1,70 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from string import Template + +import yaml + +from fedlearner_webconsole.proto.e2e_pb2 import E2eJob + + +def e2e_job_model_to_yaml(job: E2eJob) -> str: + return _E2E_JOB_TEMPLATE.substitute( + job_name=job.job_name, + name_prefix=job.name_prefix, + e2e_image_uri=job.e2e_image_uri, + project_name=job.project_name, + platform_endpoint=job.platform_endpoint, + fedlearner_image_uri=job.fedlearner_image_uri, + script_path=job.script_path, + ) + + +def e2e_job_to_dict(job: E2eJob) -> dict: + return yaml.load(e2e_job_model_to_yaml(job), Loader=yaml.Loader) + + +_E2E_JOB_TEMPLATE = Template("""apiVersion: batch/v1 +kind: Job +metadata: + name: $job_name + labels: + owner: wangsen.0914 + psm: data.aml.fl +spec: + template: + spec: + containers: + - name: $job_name + image: $e2e_image_uri + env: + - name: PYTHONPATH + value: /app + - name: PROJECT_NAME + value: $project_name + - name: PLATFORM_ENDPOINT + value: $platform_endpoint + - name: FEDLEARNER_IMAGE + value: $fedlearner_image_uri + - name: NAME_PREFIX + value: $name_prefix + command: + - python + - $script_path + imagePullSecrets: + - name: regcred + restartPolicy: Never + backoffLimit: 0 +""") diff --git a/web_console_v2/api/fedlearner_webconsole/exceptions.py b/web_console_v2/api/fedlearner_webconsole/exceptions.py index 3de880de0..1d1695639 100644 --- a/web_console_v2/api/fedlearner_webconsole/exceptions.py +++ b/web_console_v2/api/fedlearner_webconsole/exceptions.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ class WebConsoleApiException(Exception): + def __init__(self, status_code, error_code, message, details=None): Exception.__init__(self) self.status_code = status_code @@ -40,40 +41,52 @@ def to_dict(self): class InvalidArgumentException(WebConsoleApiException): + + def __init__(self, details): + WebConsoleApiException.__init__(self, HTTPStatus.BAD_REQUEST, 400, 'Invalid argument or payload.', details) + + +class NetworkException(WebConsoleApiException): + def __init__(self, details): - WebConsoleApiException.__init__(self, HTTPStatus.BAD_REQUEST, 400, - 'Invalid argument or payload.', details) + WebConsoleApiException.__init__(self, HTTPStatus.BAD_REQUEST, 400, 'Network exception', details) class NotFoundException(WebConsoleApiException): + def __init__(self, message=None): - WebConsoleApiException.__init__( - self, HTTPStatus.NOT_FOUND, 404, - message if message else 'Resource not found.') + WebConsoleApiException.__init__(self, HTTPStatus.NOT_FOUND, 404, message if message else 'Resource not found.') class UnauthorizedException(WebConsoleApiException): + def __init__(self, message): - WebConsoleApiException.__init__(self, HTTPStatus.UNAUTHORIZED, - 401, message) + WebConsoleApiException.__init__(self, HTTPStatus.UNAUTHORIZED, 401, message) class NoAccessException(WebConsoleApiException): + def __init__(self, message): - WebConsoleApiException.__init__(self, HTTPStatus.FORBIDDEN, - 403, message) + WebConsoleApiException.__init__(self, HTTPStatus.FORBIDDEN, 403, message) + + +class MethodNotAllowedException(WebConsoleApiException): + + def __init__(self, message): + WebConsoleApiException.__init__(self, HTTPStatus.METHOD_NOT_ALLOWED, 405, message) class ResourceConflictException(WebConsoleApiException): + def __init__(self, message): WebConsoleApiException.__init__(self, HTTPStatus.CONFLICT, 409, message) class InternalException(WebConsoleApiException): + def __init__(self, details=None): - WebConsoleApiException.__init__( - self, HTTPStatus.INTERNAL_SERVER_ERROR, 500, - 'Internal Error met when handling the request', details) + WebConsoleApiException.__init__(self, HTTPStatus.INTERNAL_SERVER_ERROR, 500, + 'Internal Error met when handling the request', details) def make_response(exception: WebConsoleApiException): diff --git a/web_console_v2/api/fedlearner_webconsole/exceptions_test.py b/web_console_v2/api/fedlearner_webconsole/exceptions_test.py new file mode 100644 index 000000000..ba866b29b --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/exceptions_test.py @@ -0,0 +1,53 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest + +from http import HTTPStatus +from fedlearner_webconsole.exceptions import (InvalidArgumentException, NotFoundException) + + +class ExceptionsTest(unittest.TestCase): + + def test_invalid_argument_exception(self): + """Checks if the information of the exception is correct.""" + exception = InvalidArgumentException(['123', 'df']) + self.assertEqual(exception.status_code, HTTPStatus.BAD_REQUEST) + self.assertEqual(exception.to_dict(), { + 'code': 400, + 'message': 'Invalid argument or payload.', + 'details': [ + '123', + 'df', + ] + }) + + def test_not_found_exception(self): + exception1 = NotFoundException('User A not found.') + self.assertEqual(exception1.status_code, HTTPStatus.NOT_FOUND) + self.assertEqual(exception1.to_dict(), { + 'code': 404, + 'message': 'User A not found.', + }) + exception2 = NotFoundException() + self.assertEqual(exception2.status_code, HTTPStatus.NOT_FOUND) + self.assertEqual(exception2.to_dict(), { + 'code': 404, + 'message': 'Resource not found.', + }) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/file/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/file/BUILD.bazel new file mode 100644 index 000000000..34f8b8f8b --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/file/BUILD.bazel @@ -0,0 +1,38 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "apis_lib", + srcs = ["apis.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:third_party_sso_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:file_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_flask//:pkg", + "@common_flask_restful//:pkg", + "@common_tensorflow//:pkg", + "@common_webargs//:pkg", + "@common_werkzeug//:pkg", + ], +) + +py_test( + name = "apis_lib_test", + size = "medium", + srcs = [ + "apis_test.py", + ], + imports = ["../.."], + main = "apis_test.py", + deps = [ + ":apis_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:file_lib", + "//web_console_v2/api/testing:common_lib", + "@common_werkzeug//:pkg", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/file/__init__.py b/web_console_v2/api/fedlearner_webconsole/file/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/web_console_v2/api/fedlearner_webconsole/file/apis.py b/web_console_v2/api/fedlearner_webconsole/file/apis.py new file mode 100644 index 000000000..034192294 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/file/apis.py @@ -0,0 +1,290 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import enum +import os +import tempfile +import logging +from datetime import datetime +from typing import List, Optional +from urllib.parse import unquote +from werkzeug.formparser import FileStorage +from werkzeug.utils import secure_filename +from io import BytesIO +from envs import Envs +from flask import send_file +from flask_restful import Resource, Api +from google.protobuf.json_format import MessageToDict +from webargs.flaskparser import use_kwargs +from webargs import fields +from tensorflow.io import gfile + +from fedlearner_webconsole.proto import common_pb2 +from fedlearner_webconsole.auth.third_party_sso import credentials_required +from fedlearner_webconsole.exceptions import (InvalidArgumentException, NoAccessException, NotFoundException) +from fedlearner_webconsole.utils.file_manager import FileManager +from fedlearner_webconsole.utils.file_operator import FileOperator + + +class FileType(enum.Enum): + FILE = 'file' + DATASET = 'dataset' + + +# Files with these extentions will be displayed directly. +DISPLAYABLE_EXTENTION = ['.txt', '.py'] +UPLOAD_FILE_PATH = f'upload_{FileType.FILE.value}' +IMAGE_EXTENSION = ('.png', '.jpg', '.jpeg', '.tiff', '.bmp', '.gif') +FILE_WHITELIST = (Envs.STORAGE_ROOT, 'hdfs://') + + +def _is_path_accessible(path): + return path.startswith(FILE_WHITELIST) + + +def _is_image_extension(filename): + return filename.lower().endswith(IMAGE_EXTENSION) + + +class FileApi(Resource): + + def __init__(self): + self._file_manager = FileManager() + + @credentials_required + @use_kwargs({'path': fields.String(required=True, help='the filepath that you want to read')}, location='query') + def get(self, path: str): + """Get file content by filepath + --- + tags: + - file + description: > + Get file content by filepath. + Note that this api isn't design for binary content. + parameters: + - in: query + name: path + schema: + type: string + responses: + 200: + description: content of the specified path + content: + application/json: + schema: + type: string + """ + filepath = path + if not _is_path_accessible(filepath): + raise NoAccessException('access to this file or directory is not allowed ') + content = self._file_manager.read(filepath) + return {'data': content} + + +class FilesApi(Resource): + + def __init__(self): + self._storage_root = Envs.STORAGE_ROOT + self._file_manager = FileManager() + self._file_operator = FileOperator() + self._file_dir = os.path.join(self._storage_root, UPLOAD_FILE_PATH) + self._file_manager.mkdir(self._file_dir) + # keep align with original upload directory + self._dataset_dir = os.path.join(self._storage_root, 'upload') + self._file_manager.mkdir(self._dataset_dir) + + @credentials_required + @use_kwargs({'directory': fields.String(required=False, load_default=None)}, location='query') + def get(self, directory: Optional[str]): + """Get files and directories under some directory + --- + tags: + - file + description: Get files and directories under some directory + parameters: + - in: query + name: directory + schema: + type: string + responses: + 200: + description: files and directories + content: + application/json: + schema: + type: array + items: + type: object + properties: + path: + type: string + size: + type: integer + mtime: + type: integer + is_directory: + type: boolean + """ + if directory is None: + directory = os.path.join(self._storage_root, 'upload') + if not _is_path_accessible(directory): + raise NoAccessException('access to this file or directory is not allowed ') + if not self._file_manager.isdir(directory): + raise NotFoundException('directory is not exist ') + files = self._file_manager.ls(directory, include_directory=True) + return {'data': [dict(file._asdict()) for file in files]} + + @credentials_required + @use_kwargs( + { + 'kind': + fields.String(required=False, load_default=FileType.FILE.value, help='file type'), + 'id': + fields.String(required=False, + load_default='', + help='id to locate the file upload location. ' + 'For example, use jobs/job_id for algorithm ' + 'upload for a certain job.'), + 'extract': + fields.String( + required=False, load_default='False', help='If it is necessary to ' + 'extract the uploaded file.'), + }, + location='form') + @use_kwargs({'file': fields.List(fields.Field(required=True))}, location='files') + def post(self, kind: str, id: str, extract: str, file: List[FileStorage]): # pylint: disable=redefined-builtin + """Post one or a set of files for upload + --- + tags: + - file + description: Post one or a set of files for upload + parameters: + - in: form + name: kind + schema: + type: string + - in: form + name: id + schema: + type: string + - in: form + name: extract + schema: + type: string + - in: form + name: file + schema: + type: array + items: + type: string + format: binary + description: list of files in binary format + responses: + 200: + description: information of uploaded files + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.UploadedFiles' + """ + location_id = id + extract = bool(extract.lower() == 'true') + + upload_files = file + if extract: + if len(upload_files) != 1: + raise InvalidArgumentException('Extraction only allows 1 file each time.') + + # file root dir: {storage_root}/upload_file/{location_id}/ + root_dir = os.path.join(self._file_dir, location_id) + if kind == FileType.DATASET.value: + # TODO: clean dataset regularly + location_id = datetime.utcnow().strftime('%Y%m%d_%H%M%S%f') + root_dir = os.path.join(self._dataset_dir, location_id) + + response = common_pb2.UploadedFiles() + # file root dir: {storage_root}/upload_file/{location_id}/{datetime}/ + self._file_manager.mkdir(root_dir) + for upload_file in upload_files: + file_content: bytes = upload_file.read() + if extract: + secure_tarfile_name = secure_filename(os.path.basename(upload_file.filename)) + target_dir_path = os.path.join(root_dir, secure_tarfile_name.split('.')[0]) + self._file_manager.mkdir(target_dir_path) + logging.info(f'target_dir_path:{target_dir_path}') + extension = '.' + secure_tarfile_name.split('.')[-1] + with tempfile.NamedTemporaryFile(suffix=extension) as f: + f.write(file_content) + self._file_operator.extract_to(f.name, target_dir_path) + response.uploaded_files.append( + common_pb2.UploadedFile(display_file_name=secure_tarfile_name, + internal_path=target_dir_path, + internal_directory=target_dir_path)) + else: + # copy the file to the target destination. + secure_file_name = secure_filename(os.path.basename(upload_file.filename)) + response.uploaded_files.append( + self._save_secured_file(root_dir, + display_name=secure_file_name, + secure_file_name=secure_file_name, + content=file_content)) + return {'data': MessageToDict(response, preserving_proto_field_name=True)} + + def _save_secured_file(self, root_folder: str, display_name: str, secure_file_name: str, content: str) -> str: + """Save the file to fedlearner and return the UI view.""" + self._file_manager.write(os.path.join(root_folder, secure_file_name), content) + return common_pb2.UploadedFile(display_file_name=display_name, + internal_path=os.path.join(root_folder, secure_file_name), + internal_directory=root_folder) + + +class ImageApi(Resource): + + def __init__(self): + self._file_manager = FileManager() + + @use_kwargs({'name': fields.String(required=True, help='image name that you want')}, location='query') + def get(self, name: str): + """Get image content by image path + --- + tags: + - file + description: Get image content by image path + parameters: + - in: query + name: name + schema: + type: string + description: file path of image + responses: + 200: + description: + content: + image/jpeg: + type: string + format: binary + """ + if not _is_path_accessible(name): + raise NoAccessException('access to this file or directory is not allowed ') + if not _is_image_extension(name): + raise InvalidArgumentException('access to this file or directory is not allowed ') + content = gfile.GFile(unquote(name), 'rb').read() + return send_file(BytesIO(content), mimetype='image/jpeg') + + +def initialize_files_apis(api: Api): + api.add_resource(FilesApi, '/files') + api.add_resource(FileApi, '/file') + api.add_resource(ImageApi, '/image') diff --git a/web_console_v2/api/fedlearner_webconsole/file/apis_test.py b/web_console_v2/api/fedlearner_webconsole/file/apis_test.py new file mode 100644 index 000000000..d4c6f274b --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/file/apis_test.py @@ -0,0 +1,170 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import tarfile +import os +import shutil +import tempfile +import unittest +from io import BytesIO +from unittest.mock import patch + +from envs import Envs +from http import HTTPStatus +from pathlib import Path +from collections import namedtuple + +from werkzeug.utils import secure_filename +from werkzeug.datastructures import FileStorage +from testing.common import BaseTestCase + +from fedlearner_webconsole.file.apis import UPLOAD_FILE_PATH +from fedlearner_webconsole.utils.file_manager import FileManager + +BASE_DIR = Envs.BASE_DIR +FakeFileStatistics = namedtuple('FakeFileStatistics', ['length', 'mtime_nsec', 'is_directory']) + +_FAKE_STORAGE_ROOT = str(tempfile.gettempdir()) + + +@patch('fedlearner_webconsole.file.apis.Envs.STORAGE_ROOT', _FAKE_STORAGE_ROOT) +@patch('fedlearner_webconsole.file.apis.FILE_WHITELIST', (_FAKE_STORAGE_ROOT)) +class FilesApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + + self._file_manager = FileManager() + self._tempdir = os.path.join(_FAKE_STORAGE_ROOT, 'upload') + os.makedirs(self._tempdir, exist_ok=True) + subdir = Path(self._tempdir).joinpath('s') + subdir.mkdir(exist_ok=True) + Path(self._tempdir).joinpath('f1.txt').write_text('f1', encoding='utf-8') + Path(self._tempdir).joinpath('f2.txt').write_text('f2f2', encoding='utf-8') + subdir.joinpath('s3.txt').write_text('s3s3s3', encoding='utf-8') + + def tearDown(self): + # Remove the directory after the test + shutil.rmtree(self._tempdir) + + def _get_temp_path(self, file_path: str = None) -> str: + return str(Path(self._tempdir, file_path or '').absolute()) + + def test_get_storage_root(self): + get_response = self.get_helper('/api/v2/files') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + self.assertEqual(len(self.get_response_data(get_response)), 3) + + def test_get_specified_illegal_directory(self): + get_response = self.get_helper('/api/v2/files?directory=/var/log') + self.assertEqual(get_response.status_code, HTTPStatus.FORBIDDEN) + + def test_get_not_exist_directory(self): + fake_dir = os.path.join(_FAKE_STORAGE_ROOT, 'fake_dir') + get_response = self.get_helper(f'/api/v2/files?directory={fake_dir}') + self.assertEqual(get_response.status_code, HTTPStatus.NOT_FOUND) + + def test_upload_files(self): + data = {} + data['file'] = [(BytesIO(b'abcdef'), os.path.join(BASE_DIR, 'test.jpg')), + (BytesIO(b'aaabbb'), os.path.join(BASE_DIR, 'test.txt'))] + data['id'] = 'jobs/123' + upload_response = self.client.post('/api/v2/files', + data=data, + content_type='multipart/form-data', + headers=self._get_headers()) + self.assertEqual(upload_response.status_code, HTTPStatus.OK) + uploaded_files = self.get_response_data(upload_response) + self.assertEqual( + { + 'uploaded_files': [{ + 'display_file_name': + 'test.jpg', + 'internal_path': + os.path.join(_FAKE_STORAGE_ROOT, UPLOAD_FILE_PATH, 'jobs/123', secure_filename('test.jpg')), + 'internal_directory': + os.path.join(_FAKE_STORAGE_ROOT, UPLOAD_FILE_PATH, 'jobs/123'), + }, { + 'display_file_name': + 'test.txt', + 'internal_path': + os.path.join(_FAKE_STORAGE_ROOT, UPLOAD_FILE_PATH, 'jobs/123', secure_filename('test.txt')), + 'internal_directory': + os.path.join(_FAKE_STORAGE_ROOT, UPLOAD_FILE_PATH, 'jobs/123'), + }], + }, uploaded_files) + + # Check the saved files. + self.assertEqual( + 'abcdef', + self._file_manager.read( + os.path.join(_FAKE_STORAGE_ROOT, UPLOAD_FILE_PATH, 'jobs/123', secure_filename('test.jpg')))) + self.assertEqual( + 'aaabbb', + self._file_manager.read( + os.path.join(_FAKE_STORAGE_ROOT, UPLOAD_FILE_PATH, 'jobs/123', secure_filename('test.txt')))) + + # Delete the saved files + self._file_manager.remove( + os.path.join(_FAKE_STORAGE_ROOT, UPLOAD_FILE_PATH, 'jobs/123', secure_filename('test.jpg'))) + self._file_manager.remove( + os.path.join(_FAKE_STORAGE_ROOT, UPLOAD_FILE_PATH, 'jobs/123', secure_filename('test.txt'))) + + +@patch('fedlearner_webconsole.file.apis.Envs.STORAGE_ROOT', _FAKE_STORAGE_ROOT) +@patch('fedlearner_webconsole.file.apis.FILE_WHITELIST', (_FAKE_STORAGE_ROOT)) +class FileApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + + self._tempdir = _FAKE_STORAGE_ROOT + os.makedirs(self._tempdir, exist_ok=True) + Path(self._tempdir).joinpath('exists.txt').write_text('Hello World', encoding='utf-8') + + def test_get_file_content_api(self): + get_response = self.get_helper(f'/api/v2/file?path={self._tempdir}/exists.txt') + self.assertEqual(self.get_response_data(get_response), 'Hello World') + + get_response = self.get_helper('/api/v2/file?path=/system/fd.txt') + self.assertEqual(get_response.status_code, HTTPStatus.FORBIDDEN) + + +@patch('fedlearner_webconsole.file.apis.Envs.STORAGE_ROOT', _FAKE_STORAGE_ROOT) +@patch('fedlearner_webconsole.file.apis.FILE_WHITELIST', (_FAKE_STORAGE_ROOT)) +class ImageApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + self.signout_helper() + + self._tempdir = _FAKE_STORAGE_ROOT + os.makedirs(self._tempdir, exist_ok=True) + Path(self._tempdir).joinpath('fake_image.jpg').write_bytes(b'This is a image') + + def test_get_image_content_api(self): + get_response = self.get_helper(f'/api/v2/image?name={self._tempdir}/fake_image.jpg') + self.assertEqual(get_response.data, b'This is a image') + self.assertEqual(get_response.mimetype, 'image/jpeg') + + get_response = self.get_helper(f'/api/v2/image?name={self._tempdir}/fd.txt') + self.assertEqual(get_response.status_code, HTTPStatus.BAD_REQUEST) + + get_response = self.get_helper('/api/v2/image?name=/system/fd.txt') + self.assertEqual(get_response.status_code, HTTPStatus.FORBIDDEN) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/flag/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/flag/BUILD.bazel new file mode 100644 index 000000000..bb5f070c4 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/flag/BUILD.bazel @@ -0,0 +1,46 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "apis_lib", + srcs = ["apis.py"], + imports = ["../.."], + deps = [ + ":models_lib", + "@common_flask_restful//:pkg", + ], +) + +py_test( + name = "apis_lib_test", + srcs = [ + "apis_test.py", + ], + imports = ["../.."], + main = "apis_test.py", + deps = [ + ":apis_lib", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_library( + name = "models_lib", + srcs = ["models.py"], + imports = ["../.."], + deps = ["//web_console_v2/api:envs_lib"], +) + +py_test( + name = "models_lib_test", + srcs = [ + "models_test.py", + ], + imports = ["../.."], + main = "models_test.py", + deps = [ + ":models_lib", + "//web_console_v2/api/testing:common_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/flag/__init__.py b/web_console_v2/api/fedlearner_webconsole/flag/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/web_console_v2/api/fedlearner_webconsole/flag/apis.py b/web_console_v2/api/fedlearner_webconsole/flag/apis.py new file mode 100644 index 000000000..017c3e47e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/flag/apis.py @@ -0,0 +1,47 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# coding: utf-8 +from http import HTTPStatus + +from flask_restful import Resource, Api +from fedlearner_webconsole.flag.models import get_flags + + +class FlagsApi(Resource): + + def get(self): + """Get flags + --- + tags: + - flag + responses: + 200: + description: Flags are returned + content: + application/json: + schema: + type: object + additionalProperties: true + example: + FLAG_1: string_value + FLAG_2: true + FLAG_3: 1 + """ + return {'data': get_flags()}, HTTPStatus.OK + + +def initialize_flags_apis(api: Api): + api.add_resource(FlagsApi, '/flags') diff --git a/web_console_v2/api/fedlearner_webconsole/flag/apis_test.py b/web_console_v2/api/fedlearner_webconsole/flag/apis_test.py new file mode 100644 index 000000000..7a6d81567 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/flag/apis_test.py @@ -0,0 +1,35 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# coding: utf-8 +from unittest.mock import patch +from testing.common import BaseTestCase +import unittest + + +class FlagsApisTest(BaseTestCase): + + @patch('fedlearner_webconsole.flag.apis.get_flags') + def test_get_flags(self, get_flags): + get_flags.return_value = {'first_flag': False, 'second_flag': 0} + response = self.get_helper('/api/v2/flags') + flags = self.get_response_data(response) + + self.assertEqual(False, flags.get('first_flag')) + self.assertEqual(0, flags.get('second_flag')) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/flag/models.py b/web_console_v2/api/fedlearner_webconsole/flag/models.py new file mode 100644 index 000000000..d0aabf467 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/flag/models.py @@ -0,0 +1,84 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# coding: utf-8 +import inspect +import json +import logging +from envs import Envs + + +class _Flag(object): + FLAGS_DICT = json.loads(Envs.FLAGS) + + def __init__(self, name: str, fallback_value): + self.name = name + self.value = fallback_value + self._merge() + + def _merge(self): + """Merge fallback values with those ones set using env""" + value_from_env = self.FLAGS_DICT.get(self.name) + + # update the value of a flag if env exists and it is of the correct type + if value_from_env is not None: + if isinstance(value_from_env, type(self.value)): + self.value = value_from_env + logging.info(f'Setting flag {self.name} to {self.value}.') + + else: + logging.warning(f""" + Flag {self.name} is set of the wrong type, falling back to {self.value}. + Expected: {type(self.value)}; Got: {type(value_from_env)} + """) + + +class Flag(object): + WORKSPACE_ENABLED = _Flag('workspace_enabled', False) + USER_MANAGEMENT_ENABLED = _Flag('user_management_enabled', True) + PRESET_TEMPLATE_EDIT_ENABLED = _Flag('preset_template_edit_enabled', False) + BCS_SUPPORT_ENABLED = _Flag('bcs_support_enabled', False) + TRUSTED_COMPUTING_ENABLED = _Flag('trusted_computing_enabled', True) + TEE_MACHINE_DEPLOYED = _Flag('tee_machine_deployed', False) + DASHBOARD_ENABLED = _Flag('dashboard_enabled', False) + OT_PSI_ENABLED = _Flag('ot_psi_enabled', True) + DATASET_STATE_FIX_ENABLED = _Flag('dataset_state_fix_enabled', False) + HASH_DATA_JOIN_ENABLED = _Flag('hash_data_join_enabled', False) + HELP_DOC_URL = _Flag('help_doc_url', '') + MODEL_JOB_GLOBAL_CONFIG_ENABLED = _Flag('model_job_global_config_enabled', False) + REVIEW_CENTER_CONFIGURATION = _Flag('review_center_configuration', '{}') + # show dataset with auth status but auto authority + DATASET_AUTH_STATUS_ENABLED = _Flag('dataset_auth_status_enabled', True) + # decide whether to check auth status when create dataset_job + DATASET_AUTH_STATUS_CHECK_ENABLED = _Flag('dataset_auth_status_check_enabled', False) + # set true after we implement this rpc func + LIST_DATASETS_RPC_ENABLED = _Flag('list_datasets_rpc_enabled', True) + # set true after we implement this rpc func + PENDING_PROJECT_ENABLED = _Flag('pending_project_enabled', True) + DATA_BATCH_RERUN_ENABLED = _Flag('data_batch_rerun_enabled', True) + + +def get_flags() -> dict: + """Construct a dictionary for flags""" + dct = {} + + # Gets flags (members of Flag) + # Ref: https://stackoverflow.com/questions/9058305/getting-attributes-of-a-class + attributes = inspect.getmembers(Flag, lambda a: not inspect.isroutine(a)) + flags = [a for a in attributes if not (a[0].startswith('__') and a[0].endswith('__'))] + for _, flag in flags: + dct[flag.name] = flag.value + + return dct diff --git a/web_console_v2/api/fedlearner_webconsole/flag/models_test.py b/web_console_v2/api/fedlearner_webconsole/flag/models_test.py new file mode 100644 index 000000000..cf66c6854 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/flag/models_test.py @@ -0,0 +1,56 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# coding: utf-8 +from unittest.mock import patch +import unittest +from fedlearner_webconsole.flag.models import _Flag, get_flags + + +class FlagMock(object): + FIRST_FLAG = _Flag('first_flag', False) + SECOND_FLAG = _Flag('second_flag', 0) + + +MOCK_ENV_FLAGS = {'first_flag': True, 'second_flag': 1} + + +class FlagsModelsTest(unittest.TestCase): + + @patch('fedlearner_webconsole.flag.models._Flag.FLAGS_DICT', MOCK_ENV_FLAGS) + def test_fallback(self): + # this instance will be modified to True + first_flag = _Flag('first_flag', False) + + # this instance will fallback to False due to type error + second_flag = _Flag('second_flag', False) + + # this instance will fallback to 0 due to the absence of its value in envs + third_flag = _Flag('third_flag', 0) + + self.assertEqual(True, first_flag.value) + self.assertEqual(False, second_flag.value) + self.assertEqual(0, third_flag.value) + + @patch('fedlearner_webconsole.flag.models.Flag', FlagMock) + def test_get_flags(self): + flags = get_flags() + + self.assertEqual(False, flags.get('first_flag')) + self.assertEqual(0, flags.get('second_flag')) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/iam/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/iam/BUILD.bazel new file mode 100644 index 000000000..9829b89bb --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/iam/BUILD.bazel @@ -0,0 +1,153 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "permission_lib", + srcs = ["permission.py"], + imports = ["../.."], + deps = [ + ":resource_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:models_lib", + ], +) + +py_test( + name = "permission_test", + size = "small", + srcs = [ + "permission_test.py", + ], + imports = ["../.."], + deps = [ + ":permission_lib", + ":resource_lib", + ], +) + +py_library( + name = "resource_lib", + srcs = ["resource.py"], + imports = ["../.."], +) + +py_test( + name = "resource_test", + size = "small", + srcs = [ + "resource_test.py", + ], + imports = ["../.."], + deps = [ + ":resource_lib", + ], +) + +py_library( + name = "checker_lib", + srcs = [ + "checker.py", + ], + imports = ["../.."], + deps = [ + ":permission_lib", + "//web_console_v2/api:envs_lib", + ], +) + +py_library( + name = "client_lib", + srcs = [ + "client.py", + ], + imports = ["../.."], + deps = [ + ":checker_lib", + ":permission_lib", + ":resource_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + ], +) + +py_test( + name = "client_test", + size = "small", + srcs = [ + "client_test.py", + ], + imports = ["../.."], + deps = [ + ":client_lib", + ":permission_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + ], +) + +py_library( + name = "iam_required_lib", + srcs = [ + "iam_required.py", + ], + imports = ["../.."], + deps = [ + ":client_lib", + ":permission_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:const_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "@common_flask//:pkg", + ], +) + +py_test( + name = "iam_required_integration_test", + size = "medium", + srcs = [ + "iam_required_integration_test.py", + ], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_base64_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_library( + name = "apis_lib", + srcs = [ + "apis.py", + ], + imports = ["../.."], + deps = [ + ":client_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:third_party_sso_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "@common_flask_restful//:pkg", + "@common_marshmallow//:pkg", + ], +) + +py_test( + name = "apis_test", + size = "small", + srcs = [ + "apis_test.py", + ], + imports = ["../.."], + deps = [ + ":permission_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:models_lib", + "//web_console_v2/api/testing:common_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/iam/__init__.py b/web_console_v2/api/fedlearner_webconsole/iam/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/web_console_v2/api/fedlearner_webconsole/iam/apis.py b/web_console_v2/api/fedlearner_webconsole/iam/apis.py new file mode 100644 index 000000000..a7bd45418 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/iam/apis.py @@ -0,0 +1,63 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Optional + +from flask_restful import Resource +from marshmallow import fields + +from fedlearner_webconsole.auth.third_party_sso import credentials_required +from fedlearner_webconsole.utils.decorators.pp_flask import use_kwargs +from fedlearner_webconsole.utils.flask_utils import get_current_user, make_flask_response +from fedlearner_webconsole.iam.client import get_iams + + +class CheckPermissionsApi(Resource): + + @credentials_required + @use_kwargs( + { + 'resource': fields.String(required=False, load_default=None), + 'permission': fields.String(required=False, load_default=None), + }, + location='query', + ) + def get(self, resource: Optional[str], permission: Optional[str]): + """Gets all IAM policies. + --- + tags: + - iam + description: gets all IAM policies. + responses: + 200: + description: + content: + application/json: + schema: + type: object + properties: + iams: + description: list of policies + type: array + items: + type: string + """ + user = get_current_user() + result = get_iams(user, resource, permission) + return make_flask_response({'iams': result}) + + +def initialize_iams_apis(api): + api.add_resource(CheckPermissionsApi, '/iams') diff --git a/web_console_v2/api/fedlearner_webconsole/iam/apis_test.py b/web_console_v2/api/fedlearner_webconsole/iam/apis_test.py new file mode 100644 index 000000000..e7556fcf3 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/iam/apis_test.py @@ -0,0 +1,32 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +from testing.common import BaseTestCase +from fedlearner_webconsole.auth.models import Role +from fedlearner_webconsole.iam.permission import _DEFAULT_PERMISSIONS + + +class IamApisTest(BaseTestCase): + + def test_workflow_with_iam(self): + resp = self.get_helper('/api/v2/iams') + data = self.get_response_data(resp) + self.assertEqual(len(data['iams']), len(_DEFAULT_PERMISSIONS[Role.USER])) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/iam/checker.py b/web_console_v2/api/fedlearner_webconsole/iam/checker.py new file mode 100644 index 000000000..f931a2b98 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/iam/checker.py @@ -0,0 +1,82 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +from abc import ABCMeta, abstractmethod +from typing import List, Tuple, Optional + +from envs import Envs +from fedlearner_webconsole.iam.permission import Permission + + +class IamChecker(metaclass=ABCMeta): + + @abstractmethod + def check(self, identity: str, resource: str, permission: Permission) -> bool: + pass + + @abstractmethod + def create(self, identity: str, resource: str, permissions: List[Permission]): + pass + + @abstractmethod + def get(self, identity: str, resource: Optional[str], + permission: Optional[Permission]) -> List[Tuple[str, str, Permission]]: + pass + + +class ThirdPartyChecker(IamChecker): + + def check(self, identity: str, resource: str, permission: Permission) -> bool: + # Calls API according to the configuration + return True + + def create(self, identity: str, resource: str, permissions: List[Permission]): + # Calls API according to the configuration + return + + def get(self, identity: str, resource: Optional[str], + permission: Optional[Permission]) -> List[Tuple[str, str, Permission]]: + # Calls API according to the configuration + pass + + +class TempChecker(IamChecker): + + def __init__(self): + self.iams = [] + + def check(self, identity: str, resource: str, permission: Permission) -> bool: + # Calls API according to the configuration + if Envs.FLASK_ENV == 'production': + return True + if (identity, resource, permission) in self.iams: + return True + return False + + def create(self, identity: str, resource: str, permissions: List[Permission]): + # Calls API according to the configuration + for permission in permissions: + self.iams.append((identity, resource, permission)) + self.iams = list(set(self.iams)) + + def get(self, identity: str, resource: Optional[str], + permission: Optional[Permission]) -> List[Tuple[str, str, Permission]]: + return [ + item for item in self.iams if item[0] == identity and resource is None or + item[1] == resource and permission is None or item[2] == permission + ] + + +checker: IamChecker = TempChecker() diff --git a/web_console_v2/api/fedlearner_webconsole/iam/client.py b/web_console_v2/api/fedlearner_webconsole/iam/client.py new file mode 100644 index 000000000..8e981bc77 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/iam/client.py @@ -0,0 +1,55 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +from typing import List, Union, Optional, Tuple + +from fedlearner_webconsole.auth.models import User +from fedlearner_webconsole.iam.checker import checker +from fedlearner_webconsole.iam.permission import Permission, is_valid_binding +from fedlearner_webconsole.iam.resource import parse_resource_name, Resource, ResourceType +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.iam.permission import get_valid_permissions, \ + get_role_default_permissions + + +def check(username: str, resource_name: str, permission: Permission) -> bool: + resources: List[Resource] = parse_resource_name(resource_name) + # Checks bindings + for resource in resources: + if not is_valid_binding(resource.type, permission): + raise ValueError(f'Invalid binding: {resource.type}-{permission}') + for resource in resources: + if checker.check(username, resource.name, permission): + return True + return False + + +def create_iams_for_resource(resource: Union[Project, Workflow], user: User): + # Should not be used in grpc server. + if isinstance(resource, Project): + resource = f'/projects/{resource.id}' + permissions = get_valid_permissions(ResourceType.PROJECT) + else: + return + checker.create(user.username, resource, permissions) + + +def create_iams_for_user(user: User): + checker.create(user.username, '/', get_role_default_permissions(user.role)) + + +def get_iams(user: User, resource: Optional[str], permission: Optional[Permission]) -> List[Tuple[str, str, str]]: + return [(item[0], item[1], item[2].value) for item in checker.get(user.username, resource, permission)] diff --git a/web_console_v2/api/fedlearner_webconsole/iam/client_test.py b/web_console_v2/api/fedlearner_webconsole/iam/client_test.py new file mode 100644 index 000000000..5d765568e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/iam/client_test.py @@ -0,0 +1,81 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch, call + +from fedlearner_webconsole.auth.models import User +from fedlearner_webconsole.iam.client import check, create_iams_for_resource, create_iams_for_user +from fedlearner_webconsole.iam.permission import Permission +from fedlearner_webconsole.project.models import Project +# must import for db analyze +# pylint: disable=unused-import +from fedlearner_webconsole.participant.models import ProjectParticipant, Participant +from fedlearner_webconsole.workflow_template.models import WorkflowTemplate +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.auth.models import Role + + +class ClientTest(unittest.TestCase): + + def test_check_invalid_binding(self): + with self.assertRaises(ValueError) as cm: + check('xiangyuxuan.prs', '/projects/123/workflows/3', Permission.DATASETS_POST) + self.assertIn('Invalid binding', str(cm.exception)) + + @patch('fedlearner_webconsole.iam.client.checker.check') + def test_check_false(self, mock_checker): + mock_checker.return_value = False + self.assertFalse(check('xprs', '/projects/123/workflows/3', Permission.WORKFLOW_PUT)) + calls = [ + call('xprs', '/', Permission.WORKFLOW_PUT), + call('xprs', '/projects/123', Permission.WORKFLOW_PUT), + call('xprs', '/projects/123/workflows/3', Permission.WORKFLOW_PUT), + ] + mock_checker.assert_has_calls(calls) + + @patch('fedlearner_webconsole.iam.client.checker.check') + def test_check_true(self, mock_checker): + mock_checker.side_effect = [False, True] + self.assertTrue(check('prs', '/projects/123/workflows/3', Permission.WORKFLOW_PUT)) + calls = [ + call('prs', '/', Permission.WORKFLOW_PUT), + call('prs', '/projects/123', Permission.WORKFLOW_PUT), + ] + mock_checker.assert_has_calls(calls) + + def test_create_iams_for_resource(self): + username = 'testu' + project_id = 1111 + self.assertFalse(check(username, f'/projects/{project_id}', Permission.PROJECT_PATCH)) + create_iams_for_resource(Project(id=project_id, name='test'), User(username=username)) + self.assertTrue(check(username, f'/projects/{project_id}', Permission.PROJECT_PATCH)) + workflow_id = 3333 + self.assertTrue(check(username, f'/projects/{project_id}/workflows/{workflow_id}', Permission.WORKFLOW_PATCH)) + self.assertFalse(check(username, f'/projects/{project_id+1}/workflows/{workflow_id}', + Permission.WORKFLOW_PATCH)) + + def test_create_iams_for_user(self): + admin = User(username='test_admin', role=Role.ADMIN) + user = User(username='test_user', role=Role.USER) + create_iams_for_user(admin) + create_iams_for_user(user) + project_id = 1 + self.assertTrue(check(admin.username, f'/projects/{project_id}/workflows/123123', Permission.WORKFLOW_PATCH)) + self.assertFalse(check(user.username, f'/projects/{project_id}/workflows/123123', Permission.WORKFLOW_PATCH)) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/iam/iam_required.py b/web_console_v2/api/fedlearner_webconsole/iam/iam_required.py new file mode 100644 index 000000000..7395d612d --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/iam/iam_required.py @@ -0,0 +1,50 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import logging +from functools import wraps + +from flask import request + +from fedlearner_webconsole.exceptions import NoAccessException +from fedlearner_webconsole.utils.const import API_VERSION +from fedlearner_webconsole.utils.flask_utils import get_current_user +from fedlearner_webconsole.iam.permission import Permission +from fedlearner_webconsole.iam.client import check + + +def iam_required(permission: Permission): + + def decorator(fn): + + @wraps(fn) + def wraper(*args, **kwargs): + if permission is None: + return fn(*args, **kwargs) + # remove the prefix of url (/api/v2/) + resource_name = request.path.rpartition(API_VERSION)[-1] + user = get_current_user() + try: + if not check(user.username, resource_name, permission): + raise NoAccessException('No permission.') + except Exception as e: + # defensive programming for internal errors. + logging.error(f'Check permission failed: {user.username} ' f'{resource_name} {permission}: {str(e)}') + raise NoAccessException('No permission.') from e + return fn(*args, **kwargs) + + return wraper + + return decorator diff --git a/web_console_v2/api/fedlearner_webconsole/iam/iam_required_integration_test.py b/web_console_v2/api/fedlearner_webconsole/iam/iam_required_integration_test.py new file mode 100644 index 000000000..b977642d0 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/iam/iam_required_integration_test.py @@ -0,0 +1,103 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from http import HTTPStatus + +from testing.common import BaseTestCase +from fedlearner_webconsole.auth.models import User +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition +from fedlearner_webconsole.utils.pp_base64 import base64encode +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState +from fedlearner_webconsole.db import db + + +class IamRequiredTest(BaseTestCase): + + def setUp(self): + super().setUp() + new_user = { + 'username': 'test_user', + 'password': base64encode('test_user12312'), + 'email': 'hello@bytedance.com', + 'role': 'USER', + 'name': 'codemonkey', + } + self.signin_as_admin() + resp = self.post_helper('/api/v2/auth/users', data=new_user) + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + with db.session_scope() as session: + no_permission_one = User(id=5, username='no_permission_one') + no_permission_one.set_password('no_permission_one') + session.add(no_permission_one) + session.commit() + + def test_workflow_with_iam(self): + project_id = 123 + workflow = Workflow( + name='test-workflow', + project_id=project_id, + config=WorkflowDefinition().SerializeToString(), + forkable=False, + state=WorkflowState.READY, + ) + with db.session_scope() as session: + session.add(workflow) + session.commit() + self.signin_helper() + response = self.patch_helper(f'/api/v2/projects/{project_id}/workflows/{workflow.id}', + data={'target_state': 'RUNNING'}) + self.assertEqual(response.status_code, HTTPStatus.FORBIDDEN) + self.signin_as_admin() + response = self.patch_helper(f'/api/v2/projects/{project_id}/workflows/{workflow.id}', + data={'target_state': 'RUNNING'}) + self.assertEqual(response.status_code, HTTPStatus.OK) + + # test project create hook + self.signin_helper() + data = { + 'name': 'test1', + 'config': { + 'variables': [{ + 'name': 'test-post', + 'value': 'test' + }] + }, + 'participant_ids': [2] + } + resp = self.post_helper('/api/v2/projects', data=data) + pro_id = self.get_response_data(resp)['id'] + workflow = Workflow( + name='test-workflow-2', + project_id=pro_id, + config=WorkflowDefinition().SerializeToString(), + forkable=False, + state=WorkflowState.READY, + ) + with db.session_scope() as session: + session.add(workflow) + session.commit() + response = self.patch_helper(f'/api/v2/projects/{pro_id}/workflows/{workflow.id}', + data={'target_state': 'RUNNING'}) + self.assertEqual(response.status_code, HTTPStatus.OK) + + self.signin_helper('test_user', 'test_user12312') + response = self.patch_helper(f'/api/v2/projects/{pro_id}/workflows/{workflow.id}', + data={'target_state': 'RUNNING'}) + self.assertEqual(response.status_code, HTTPStatus.FORBIDDEN) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/iam/permission.py b/web_console_v2/api/fedlearner_webconsole/iam/permission.py new file mode 100644 index 000000000..3175f2be4 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/iam/permission.py @@ -0,0 +1,92 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import enum +from typing import List +from fedlearner_webconsole.iam.resource import ResourceType +from fedlearner_webconsole.auth.models import Role + + +class Permission(enum.Enum): + # Create project + PROJECTS_POST = 'projects.post' + PROJECT_GET = 'project.get' + # Manage project + PROJECT_PATCH = 'project.patch' + # Create dataset + DATASETS_POST = 'datasets.post' + DATASET_DELETE = 'dataset.delete' + # Create workflow + WORKFLOWS_POST = 'workflows.post' + # Config workflow + WORKFLOW_PUT = 'workflow.put' + # Update workflow + WORKFLOW_PATCH = 'workflow.patch' + + +# Valid bindings between resources and permissions +_VALID_BINDINGS = [ + (ResourceType.APPLICATION, Permission.PROJECTS_POST), + (ResourceType.APPLICATION, Permission.PROJECT_GET), + (ResourceType.APPLICATION, Permission.PROJECT_PATCH), + (ResourceType.APPLICATION, Permission.DATASETS_POST), + (ResourceType.APPLICATION, Permission.DATASET_DELETE), + (ResourceType.APPLICATION, Permission.WORKFLOWS_POST), + (ResourceType.APPLICATION, Permission.WORKFLOW_PUT), + (ResourceType.APPLICATION, Permission.WORKFLOW_PATCH), + (ResourceType.PROJECT, Permission.PROJECT_GET), + (ResourceType.PROJECT, Permission.PROJECT_PATCH), + (ResourceType.PROJECT, Permission.DATASETS_POST), + (ResourceType.PROJECT, Permission.DATASET_DELETE), + (ResourceType.PROJECT, Permission.WORKFLOWS_POST), + (ResourceType.PROJECT, Permission.WORKFLOW_PUT), + (ResourceType.PROJECT, Permission.WORKFLOW_PATCH), + (ResourceType.DATASET, Permission.DATASET_DELETE), + (ResourceType.WORKFLOW, Permission.WORKFLOW_PUT), + (ResourceType.WORKFLOW, Permission.WORKFLOW_PATCH), +] + +_DEFAULT_PERMISSIONS = { + Role.ADMIN: [ + Permission.PROJECTS_POST, + Permission.PROJECT_GET, + Permission.PROJECT_PATCH, + Permission.DATASETS_POST, + Permission.DATASET_DELETE, + Permission.WORKFLOWS_POST, + Permission.WORKFLOW_PUT, + Permission.WORKFLOW_PATCH, + ], + Role.USER: [ + Permission.PROJECTS_POST, + Permission.PROJECT_GET, + Permission.DATASETS_POST, + Permission.WORKFLOWS_POST, + Permission.WORKFLOW_PUT, + ] +} + + +def is_valid_binding(resource_type: ResourceType, permission: Permission) -> bool: + return (resource_type, permission) in _VALID_BINDINGS + + +def get_valid_permissions(resource_type: ResourceType) -> List[Permission]: + return [item[1] for item in _VALID_BINDINGS if item[0] == resource_type] + + +def get_role_default_permissions(user_role: Role) -> List[Permission]: + # Because the enum in sqlalchemy could be any string, so we should defensive code as below. + return _DEFAULT_PERMISSIONS.get(user_role, _DEFAULT_PERMISSIONS[Role.USER]) diff --git a/web_console_v2/api/fedlearner_webconsole/iam/permission_test.py b/web_console_v2/api/fedlearner_webconsole/iam/permission_test.py new file mode 100644 index 000000000..b067d9e63 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/iam/permission_test.py @@ -0,0 +1,31 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +from fedlearner_webconsole.iam.permission import is_valid_binding, Permission +from fedlearner_webconsole.iam.resource import ResourceType + + +class PermissionTest(unittest.TestCase): + + def test_is_valid_binding(self): + self.assertTrue(is_valid_binding(ResourceType.APPLICATION, Permission.PROJECTS_POST)) + self.assertTrue(is_valid_binding(ResourceType.PROJECT, Permission.DATASETS_POST)) + self.assertFalse(is_valid_binding(ResourceType.DATASET, Permission.WORKFLOW_PUT)) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/iam/resource.py b/web_console_v2/api/fedlearner_webconsole/iam/resource.py new file mode 100644 index 000000000..0c0382889 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/iam/resource.py @@ -0,0 +1,85 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +# pylint: disable=redefined-builtin +import enum +import logging +import re +from typing import List + + +class ResourceType(enum.Enum): + # Application level + APPLICATION = 'application' + PROJECT = 'projects' + DATASET = 'datasets' + WORKFLOW = 'workflows' + + +# yapf: disable +# Resource type hierarchies +_HIERARCHIES = [ + (ResourceType.APPLICATION, ResourceType.PROJECT), + (ResourceType.PROJECT, ResourceType.DATASET), + (ResourceType.PROJECT, ResourceType.WORKFLOW) +] +# yapf: enable + + +def is_valid_hierarchy(parent: ResourceType, child: ResourceType) -> bool: + return (parent, child) in _HIERARCHIES + + +class Resource(object): + + def __init__(self, type: ResourceType, id: str, name: str): + self.type = type + self.id = id + # Resource name, example: /projects/123/workflows/234 + self.name = name + + +_RESOURCE_PATTERN = re.compile(r'/([a-z]+)/([0-9]+)') + + +def parse_resource_name(name: str) -> List[Resource]: + """Parses resource names to a list of resources. + + Why not using repeat groups in regex? + Python does support this yet, so iterate the resources one by one in name. + """ + resources = [Resource(ResourceType.APPLICATION, '', '/')] + if name == '/': + return resources + last_match = 0 + normalized_name = '' + for match in _RESOURCE_PATTERN.finditer(name): + if match.start(0) != last_match: + raise ValueError('Invalid resource name') + last_match = match.end(0) + try: + r_type = ResourceType(match.group(1)) + except ValueError as e: + logging.error(f'Unexpected resource type: {match.group(1)}') + raise ValueError('Invalid resource name') from e + id = match.group(2) + normalized_name = f'{normalized_name}/{r_type.value}/{id}' + resources.append(Resource(type=r_type, id=id, name=normalized_name)) + # ignore the resource suffix such as /peer_workflows, so the last match + # may be not same as len(name). + for i in range(1, len(resources)): + if not is_valid_hierarchy(resources[i - 1].type, resources[i].type): + raise ValueError('Invalid resource hierarchy') + return resources diff --git a/web_console_v2/api/fedlearner_webconsole/iam/resource_test.py b/web_console_v2/api/fedlearner_webconsole/iam/resource_test.py new file mode 100644 index 000000000..0a0516eb0 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/iam/resource_test.py @@ -0,0 +1,66 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from fedlearner_webconsole.iam.resource import ResourceType, is_valid_hierarchy, parse_resource_name + + +class ResourceTest(unittest.TestCase): + + def test_is_valid_hierarchy(self): + self.assertTrue(is_valid_hierarchy(ResourceType.APPLICATION, ResourceType.PROJECT)) + self.assertTrue(is_valid_hierarchy(ResourceType.PROJECT, ResourceType.WORKFLOW)) + self.assertFalse(is_valid_hierarchy(ResourceType.DATASET, ResourceType.WORKFLOW)) + + def test_parse_resource_name_correctly(self): + resources = parse_resource_name('/') + self.assertEqual(len(resources), 1) + self.assertEqual(resources[0].type, ResourceType.APPLICATION) + self.assertEqual(resources[0].name, '/') + resources = parse_resource_name('/projects/234234') + self.assertEqual(len(resources), 2) + self.assertEqual(resources[0].type, ResourceType.APPLICATION) + self.assertEqual(resources[0].name, '/') + self.assertEqual(resources[1].type, ResourceType.PROJECT) + self.assertEqual(resources[1].name, '/projects/234234') + self.assertEqual(resources[1].id, '234234') + resources = parse_resource_name('/projects/123/workflows/333') + self.assertEqual(len(resources), 3) + self.assertEqual(resources[0].type, ResourceType.APPLICATION) + self.assertEqual(resources[0].name, '/') + self.assertEqual(resources[1].type, ResourceType.PROJECT) + self.assertEqual(resources[1].name, '/projects/123') + self.assertEqual(resources[1].id, '123') + self.assertEqual(resources[2].type, ResourceType.WORKFLOW) + self.assertEqual(resources[2].name, '/projects/123/workflows/333') + self.assertEqual(resources[2].id, '333') + resources = parse_resource_name('/projects/123/workflows') + self.assertEqual(len(resources), 2) + resources = parse_resource_name('/projects/123/workflows/2/peer_workflows') + self.assertEqual(len(resources), 3) + + def test_parse_resource_name_invalid_hierarchy(self): + with self.assertRaises(ValueError) as cm: + parse_resource_name('/datasets/123/workflows/234') + self.assertEqual(str(cm.exception), 'Invalid resource hierarchy') + + def test_parse_resource_name_invalid_string(self): + with self.assertRaises(ValueError) as cm: + parse_resource_name('/project/123') + self.assertEqual(str(cm.exception), 'Invalid resource name') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/initial_db.py b/web_console_v2/api/fedlearner_webconsole/initial_db.py index da9997ba0..a5e80487d 100644 --- a/web_console_v2/api/fedlearner_webconsole/initial_db.py +++ b/web_console_v2/api/fedlearner_webconsole/initial_db.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,30 +11,251 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import collections +import json +import os -from fedlearner_webconsole.auth.models import User, Role, State -from fedlearner_webconsole.db import db_handler as db +from pathlib import Path + +from sqlalchemy.orm import Session +from google.protobuf.json_format import ParseDict + +from fedlearner_webconsole.auth.models import Role, State, User +from fedlearner_webconsole.composer.interface import ItemType +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput +from fedlearner_webconsole.proto.setting_pb2 import SystemVariables +from fedlearner_webconsole.setting.models import Setting +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.workflow_template.models import WorkflowTemplate, WorkflowTemplateKind +from fedlearner_webconsole.proto.workflow_definition_pb2 import (WorkflowDefinition, WorkflowTemplateEditorInfo) +from fedlearner_webconsole.composer.composer_service import ComposerService +from fedlearner_webconsole.flag.models import Flag + +SettingTuple = collections.namedtuple('SettingTuple', ['key', 'value']) INITIAL_USER_INFO = [{ 'username': 'ada', - 'password': 'fl@123.', + 'password': 'fl@12345.', 'name': 'ada', 'email': 'ada@fedlearner.com', 'role': Role.USER, 'state': State.ACTIVE, }, { 'username': 'admin', - 'password': 'fl@123.', + 'password': 'fl@12345.', 'name': 'admin', 'email': 'admin@fedlearner.com', 'role': Role.ADMIN, 'state': State.ACTIVE, +}, { + 'username': 'robot', + 'password': 'fl@12345.', + 'name': 'robot', + 'email': 'robot@fedlearner.com', + 'role': Role.ADMIN, + 'state': State.ACTIVE, }] +INITIAL_SYSTEM_VARIABLES = ParseDict( + { + 'variables': [{ + 'name': 'labels', + 'value': {}, + 'value_type': 'OBJECT', + 'fixed': True + }, { + 'name': 'volume_mounts_list', + 'value': [{ + 'mountPath': '/data', + 'name': 'data' + }], + 'value_type': 'LIST', + 'fixed': True + }, { + 'name': 'volumes_list', + 'value': [{ + 'persistentVolumeClaim': { + 'claimName': 'pvc-fedlearner-default' + }, + 'name': 'data' + }], + 'value_type': 'LIST', + 'fixed': True + }, { + 'name': 'envs_list', + 'value': [{ + 'name': 'HADOOP_HOME', + 'value': '' + }, { + 'name': 'MANUFACTURER', + 'value': 'dm9sY2VuZ2luZQ==' + }], + 'value_type': 'LIST', + 'fixed': True + }, { + 'name': 'namespace', + 'value': 'default', + 'value_type': 'STRING', + 'fixed': True + }, { + 'name': 'serving_image', + 'value': 'artifact.bytedance.com/fedlearner/' + 'privacy_perserving_computing_serving:7359b10685e1646450dfda389d228066', + 'value_type': 'STRING', + 'fixed': True + }, { + 'name': 'spark_image', + 'value': 'artifact.bytedance.com/fedlearner/pp_data_inspection:2.2.4.1', + 'value_type': 'STRING', + 'fixed': True + }, { + 'name': 'image_repo', + 'value': 'artifact.bytedance.com/fedlearner', + 'value_type': 'STRING', + 'fixed': False + }] + }, SystemVariables()) + +INITIAL_EMAIL_GROUP = SettingTuple(key='sys_email_group', value='privacy_computing@bytedance.com') + + +def _insert_setting_if_not_exists(session: Session, st: SettingTuple): + if session.query(Setting).filter_by(uniq_key=st.key).first() is None: + setting = Setting(uniq_key=st.key, value=st.value) + session.add(setting) + + +def migrate_system_variables(session: Session, initial_vars: SystemVariables): + setting_service = SettingService(session) + origin_sys_vars = setting_service.get_system_variables() + result = merge_system_variables(initial_vars, origin_sys_vars) + setting_service.set_system_variables(result) + + +def merge_system_variables(extend: SystemVariables, origin: SystemVariables) -> SystemVariables: + """Merge two Systemvariables, when two SystemVariable has the same name, use origin's value.""" + key_map = {var.name: var for var in extend.variables} + for var in origin.variables: + key_map[var.name] = var + return SystemVariables(variables=[key_map[key] for key in key_map]) + + +def _insert_or_update_templates(session: Session): + path = Path(__file__, '../sys_preset_templates/').resolve() + template_files = path.rglob('*.json') + for template_file in template_files: + with open(os.path.join(path, template_file), encoding='utf-8') as f: + data = json.load(f) + template_proto = ParseDict(data['config'], WorkflowDefinition(), ignore_unknown_fields=True) + editor_info_proto = ParseDict(data['editor_info'], WorkflowTemplateEditorInfo(), ignore_unknown_fields=True) + template = session.query(WorkflowTemplate).filter_by(name=data['name']).first() + if template is None: + template = WorkflowTemplate(name=data['name']) + template.comment = data['comment'] + template.group_alias = template_proto.group_alias + template.kind = WorkflowTemplateKind.PRESET.value + template.set_config(template_proto) + template.set_editor_info(editor_info_proto) + session.add(template) + + +def _insert_schedule_workflow_item(session): + composer_service = ComposerService(session) + # Finishes the old one + composer_service.finish('workflow_scheduler') + composer_service.collect_v2( + 'workflow_scheduler_v2', + items=[(ItemType.SCHEDULE_WORKFLOW, RunnerInput())], + # cron job at every 1 minute, specific time to avoid congestion. + cron_config='* * * * * 45') + composer_service.collect_v2( + 'job_scheduler_v2', + items=[(ItemType.SCHEDULE_JOB, RunnerInput())], + # cron job at every 1 minute, specific time to avoid congestion. + cron_config='* * * * * 15') + + +def _insert_dataset_job_scheduler_item(session): + composer_service = ComposerService(session) + # finish the old scheduler + composer_service.finish('dataset_job_scheduler') + composer_service.finish('dataset_cron_job_scheduler') + # insert new scheduler + composer_service.collect_v2( + 'dataset_short_period_scheduler', + items=[(ItemType.DATASET_SHORT_PERIOD_SCHEDULER, RunnerInput())], + # cron job at every 30 seconds + cron_config='* * * * * */30') + composer_service.collect_v2( + 'dataset_long_period_scheduler', + items=[(ItemType.DATASET_LONG_PERIOD_SCHEDULER, RunnerInput())], + # cron job at every 30 min + cron_config='*/30 * * * *') + + +def _insert_cleanup_cronjob_item(session): + composer_service = ComposerService(session) + composer_service.collect_v2( + 'cleanup_cron_job', + items=[(ItemType.CLEANUP_CRON_JOB, RunnerInput())], + # cron job at every 30 min + cron_config='*/30 * * * *') + + +def _insert_tee_runner_item(session): + if not Flag.TRUSTED_COMPUTING_ENABLED.value: + return + composer_service = ComposerService(session) + composer_service.collect_v2( + 'tee_create_runner', + items=[(ItemType.TEE_CREATE_RUNNER, RunnerInput())], + # cron job at every 30 seconds + cron_config='* * * * * */30') + composer_service.collect_v2( + 'tee_resource_check_runner', + items=[(ItemType.TEE_RESOURCE_CHECK_RUNNER, RunnerInput())], + # cron job at every 30 min + cron_config='*/30 * * * *') + + +def _insert_project_runner_item(session): + if not Flag.PENDING_PROJECT_ENABLED.value: + return + composer_service = ComposerService(session) + composer_service.collect_v2( + 'project_scheduler_v2', + items=[(ItemType.SCHEDULE_PROJECT, RunnerInput())], + # cron job at every 1 minute, specific time to avoid congestion. + cron_config='* * * * * 30') + + +def _insert_model_job_scheduler_runner_item(session: Session): + if not Flag.MODEL_JOB_GLOBAL_CONFIG_ENABLED: + return + composer_service = ComposerService(session) + composer_service.collect_v2('model_job_scheduler_runner', + items=[(ItemType.SCHEDULE_MODEL_JOB, RunnerInput())], + cron_config='* * * * * */30') + + +def _insert_model_job_group_scheduler_runner_item(session: Session): + if not Flag.MODEL_JOB_GLOBAL_CONFIG_ENABLED: + return + composer_service = ComposerService(session) + composer_service.collect_v2('model_job_group_scheduler_runner', + items=[(ItemType.SCHEDULE_MODEL_JOB_GROUP, RunnerInput())], + cron_config='* * * * * */30') + composer_service.collect_v2( + 'model_job_group_long_period_scheduler_runner', + items=[(ItemType.SCHEDULE_LONG_PERIOD_MODEL_JOB_GROUP, RunnerInput())], + # cron job at every 30 min + cron_config='*/30 * * * *') + def initial_db(): with db.session_scope() as session: - # initial user info first + # Initializes user info first for u_info in INITIAL_USER_INFO: username = u_info['username'] password = u_info['password'] @@ -42,13 +263,19 @@ def initial_db(): email = u_info['email'] role = u_info['role'] state = u_info['state'] - if session.query(User).filter_by( - username=username).first() is None: - user = User(username=username, - name=name, - email=email, - role=role, - state=state) + if session.query(User).filter_by(username=username).first() is None: + user = User(username=username, name=name, email=email, role=role, state=state) user.set_password(password=password) session.add(user) + # Initializes settings + _insert_setting_if_not_exists(session, INITIAL_EMAIL_GROUP) + migrate_system_variables(session, INITIAL_SYSTEM_VARIABLES) + _insert_or_update_templates(session) + _insert_schedule_workflow_item(session) + _insert_dataset_job_scheduler_item(session) + _insert_cleanup_cronjob_item(session) + _insert_tee_runner_item(session) + _insert_project_runner_item(session) + _insert_model_job_scheduler_runner_item(session) + _insert_model_job_group_scheduler_runner_item(session) session.commit() diff --git a/web_console_v2/api/fedlearner_webconsole/initial_db_test.py b/web_console_v2/api/fedlearner_webconsole/initial_db_test.py new file mode 100644 index 000000000..1bd071fb2 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/initial_db_test.py @@ -0,0 +1,52 @@ +import unittest +from google.protobuf.json_format import ParseDict + +from fedlearner_webconsole.initial_db import (_insert_or_update_templates, initial_db, migrate_system_variables, + INITIAL_SYSTEM_VARIABLES) +from fedlearner_webconsole.proto.setting_pb2 import SystemVariables +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.db import db +from fedlearner_webconsole.workflow_template.models import WorkflowTemplate +from testing.no_web_server_test_case import NoWebServerTestCase + + +class InitialDbTest(NoWebServerTestCase): + + def test_initial_db(self): + initial_db() + with db.session_scope() as session: + self.assertEqual(SettingService(session).get_system_variables_dict()['namespace'], 'default') + + def test_merge_system_variables(self): + with db.session_scope() as session: + migrate_system_variables(session, INITIAL_SYSTEM_VARIABLES) + session.commit() + + with db.session_scope() as session: + migrate_system_variables( + session, + ParseDict( + { + 'variables': [{ + 'name': 'namespace', + 'value': 'not_default' + }, { + 'name': 'unknown', + 'value': 'test' + }] + }, SystemVariables())) + self.assertEqual(SettingService(session).get_system_variables_dict()['namespace'], 'default') + self.assertEqual(SettingService(session).get_system_variables_dict()['unknown'], 'test') + session.commit() + + def test_insert_syspreset_template(self): + with db.session_scope() as session: + _insert_or_update_templates(session) + session.commit() + + with db.session_scope() as session: + self.assertEqual(session.query(WorkflowTemplate).count(), 18) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/job/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/job/BUILD.bazel new file mode 100644 index 000000000..f8561bb52 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/job/BUILD.bazel @@ -0,0 +1,317 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "controller_lib", + srcs = ["controller.py"], + imports = ["../.."], + deps = [ + ":models_lib", + ":service_lib", + ":utils_lib", + ":yaml_formatter_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:client_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:metrics_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:resource_name_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:workflow_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "controller_lib_test", + size = "small", + srcs = [ + "controller_test.py", + ], + imports = ["../.."], + main = "controller_test.py", + deps = [ + ":controller_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/job:yaml_formatter_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:const_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "crd_lib", + srcs = ["crd.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/k8s:k8s_cache_lib", + "//web_console_v2/api/fedlearner_webconsole/k8s:k8s_client_lib", + "//web_console_v2/api/fedlearner_webconsole/k8s:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:metrics_lib", + ], +) + +py_library( + name = "metrics_lib", + srcs = ["metrics.py"], + imports = ["../.."], + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:es_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:job_metrics_lib", + "@com_google_protobuf//:protobuf_python", + "@common_matplotlib//:pkg", + "@common_mpld3//:pkg", + ], +) + +py_test( + name = "metrics_lib_test", + size = "medium", + srcs = [ + "metrics_test.py", + ], + imports = ["../.."], + main = "metrics_test.py", + deps = [ + ":metrics_lib", + "//web_console_v2/api/testing:common_lib", + "//web_console_v2/api/testing/test_data:test_data_lib", + ], +) + +py_library( + name = "models_lib", + srcs = ["models.py"], + imports = ["../.."], + deps = [ + ":crd_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/k8s:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:mixins_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "models_lib_test", + size = "small", + srcs = [ + "model_test.py", + ], + imports = ["../.."], + main = "model_test.py", + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/k8s:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "service_lib", + srcs = ["service.py"], + imports = ["../.."], + deps = [ + ":models_lib", + ":utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:metrics_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_yaml_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "service_lib_test", + size = "small", + srcs = [ + "service_test.py", + ], + imports = ["../.."], + main = "service_test.py", + deps = [ + ":service_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/k8s:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_yaml_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "utils_lib", + srcs = ["utils.py"], + imports = ["../.."], + deps = ["//web_console_v2/api/fedlearner_webconsole/utils:metrics_lib"], +) + +py_test( + name = "utils_test", + size = "small", + srcs = [ + "utils_test.py", + ], + imports = ["../.."], + main = "utils_test.py", + deps = [ + ":utils_lib", + ], +) + +py_library( + name = "yaml_formatter_lib", + srcs = ["yaml_formatter.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/k8s:models_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:client_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:const_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_yaml_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:proto_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "yaml_formatter_lib_test", + size = "small", + srcs = [ + "yaml_formatter_test.py", + ], + imports = ["../.."], + main = "yaml_formatter_test.py", + deps = [ + ":yaml_formatter_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_yaml_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "scheduler_lib", + srcs = [ + "scheduler.py", + ], + imports = ["../.."], + deps = [ + ":controller_lib", + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "scheduler_lib_test", + size = "small", + srcs = [ + "scheduler_test.py", + ], + imports = ["../.."], + main = "scheduler_test.py", + deps = [ + ":scheduler_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "apis_lib", + srcs = ["apis.py"], + imports = ["../.."], + deps = [ + ":metrics_lib", + ":models_lib", + ":service_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:third_party_sso_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:client_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:es_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:kibana_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_flask_restful//:pkg", + "@common_marshmallow//:pkg", + "@common_sqlalchemy//:pkg", + "@common_webargs//:pkg", + ], +) + +py_test( + name = "apis_lib_test", + size = "medium", + srcs = [ + "apis_test.py", + ], + imports = ["../.."], + main = "apis_test.py", + deps = [ + ":apis_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:common_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "event_listener_lib", + srcs = ["event_listener.py"], + imports = ["../.."], + deps = [ + ":models_lib", + ":service_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/k8s:event_listener_lib", + "//web_console_v2/api/fedlearner_webconsole/k8s:k8s_cache_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:service_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:workflow_job_controller_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/job/apis.py b/web_console_v2/api/fedlearner_webconsole/job/apis.py index d9a073dbe..83208d251 100644 --- a/web_console_v2/api/fedlearner_webconsole/job/apis.py +++ b/web_console_v2/api/fedlearner_webconsole/job/apis.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,331 +15,545 @@ import json import logging import time +from typing import Optional -from flask_restful import Resource, reqparse, abort +from flask_restful import Resource, reqparse from google.protobuf.json_format import MessageToDict +from webargs.flaskparser import use_kwargs +from marshmallow import fields +from sqlalchemy.orm.session import Session from envs import Envs -from fedlearner_webconsole.exceptions import ( - NotFoundException, InternalException -) +from fedlearner_webconsole.db import db +from fedlearner_webconsole.exceptions import (NotFoundException, InternalException, InvalidArgumentException) from fedlearner_webconsole.job.metrics import JobMetricsBuilder from fedlearner_webconsole.job.models import Job +from fedlearner_webconsole.job.service import JobService +from fedlearner_webconsole.participant.models import Participant from fedlearner_webconsole.proto import common_pb2 from fedlearner_webconsole.rpc.client import RpcClient -from fedlearner_webconsole.utils.decorators import jwt_required +from fedlearner_webconsole.auth.third_party_sso import credentials_required from fedlearner_webconsole.utils.es import es from fedlearner_webconsole.utils.kibana import Kibana from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.utils.flask_utils import make_flask_response -def _get_job(job_id): - result = Job.query.filter_by(id=job_id).first() +def _get_job(job_id, session: Session): + result = session.query(Job).filter_by(id=job_id).first() if result is None: raise NotFoundException(f'Failed to find job_id: {job_id}') return result class JobApi(Resource): - @jwt_required() - def get(self, job_id): - job = _get_job(job_id) - return {'data': job.to_dict()} - # TODO: manual start jobs + @credentials_required + def get(self, job_id): + """Get job details. + --- + tags: + - job + description: Get job details. + parameters: + - in: path + name: job_id + schema: + type: integer + responses: + 200: + description: Detail of job + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.JobPb' + """ + with db.session_scope() as session: + job = _get_job(job_id, session) + result = job.to_proto() + result.pods.extend(JobService.get_pods(job)) + result.snapshot = JobService.get_job_yaml(job) + return make_flask_response(result) class PodLogApi(Resource): - @jwt_required() - def get(self, job_id, pod_name): - parser = reqparse.RequestParser() - parser.add_argument('start_time', type=int, location='args', - required=False, - help='start_time must be timestamp') - parser.add_argument('max_lines', type=int, location='args', - required=True, - help='max_lines is required') - data = parser.parse_args() - start_time = data['start_time'] - max_lines = data['max_lines'] - job = _get_job(job_id) - if start_time is None: - start_time = job.workflow.start_at - return {'data': es.query_log(Envs.ES_INDEX, '', pod_name, - start_time * 1000, - int(time.time() * 1000))[:max_lines][::-1]} + + @credentials_required + @use_kwargs({ + 'start_time': fields.Int(required=False, load_default=None), + 'max_lines': fields.Int(required=True) + }, + location='query') + def get(self, start_time: Optional[int], max_lines: int, job_id: int, pod_name: str): + """Get pod logs. + --- + tags: + - job + description: Get pod logs. + parameters: + - in: path + name: job_id + schema: + type: integer + - in: path + name: pod_name + schema: + type: string + - in: query + description: timestamp in seconds + name: start_time + schema: + type: integer + - in: query + name: max_lines + schema: + type: integer + required: true + responses: + 200: + description: List of pod logs + content: + application/json: + schema: + type: array + items: + type: string + + """ + with db.session_scope() as session: + job = _get_job(job_id, session) + if start_time is None and job.workflow: + start_time = job.workflow.start_at + return make_flask_response( + es.query_log(Envs.ES_INDEX, '', pod_name, (start_time or 0) * 1000)[:max_lines][::-1]) class JobLogApi(Resource): - @jwt_required() - def get(self, job_id): - parser = reqparse.RequestParser() - parser.add_argument('start_time', type=int, location='args', - required=False, - help='project_id must be timestamp') - parser.add_argument('max_lines', type=int, location='args', - required=True, - help='max_lines is required') - data = parser.parse_args() - start_time = data['start_time'] - max_lines = data['max_lines'] - job = _get_job(job_id) - if start_time is None: - start_time = job.workflow.start_at - return { - 'data': es.query_log( - Envs.ES_INDEX, job.name, - 'fedlearner-operator', - start_time * 1000, - int(time.time() * 1000), - Envs.OPERATOR_LOG_MATCH_PHRASE)[:max_lines][::-1] - } + + @credentials_required + @use_kwargs({ + 'start_time': fields.Int(required=False, load_default=None), + 'max_lines': fields.Int(required=True) + }, + location='query') + def get(self, start_time: Optional[int], max_lines: int, job_id: int): + """Get job logs. + --- + tags: + - job + description: Get job logs. + parameters: + - in: path + name: job_id + schema: + type: integer + - in: query + description: timestamp in seconds + name: start_time + schema: + type: integer + - in: query + name: max_lines + schema: + type: integer + required: true + responses: + 200: + description: List of job logs + content: + application/json: + schema: + type: array + items: + type: string + """ + with db.session_scope() as session: + job = _get_job(job_id, session) + if start_time is None and job.workflow: + start_time = job.workflow.start_at + return make_flask_response( + es.query_log(Envs.ES_INDEX, + job.name, + 'fedlearner-operator', (start_time or 0) * 1000, + match_phrase=Envs.OPERATOR_LOG_MATCH_PHRASE)[:max_lines][::-1]) class JobMetricsApi(Resource): - @jwt_required() - def get(self, job_id): - job = _get_job(job_id) - try: - metrics = JobMetricsBuilder(job).plot_metrics() - # Metrics is a list of dict. Each dict can be rendered by frontend - # with mpld3.draw_figure('figure1', json) - return {'data': metrics} - except Exception as e: # pylint: disable=broad-except - logging.warning('Error building metrics: %s', repr(e)) - abort(400, message=repr(e)) + + @credentials_required + @use_kwargs({ + 'raw': fields.Bool(required=False, load_default=False), + }, location='query') + def get(self, job_id: int, raw: bool): + """Get job Metrics. + --- + tags: + - job + description: Get job metrics. + parameters: + - in: path + name: job_id + schema: + type: integer + - in: query + name: raw + schema: + type: boolean + responses: + 200: + description: List of job metrics + content: + application/json: + schema: + type: array + items: + type: object + """ + with db.session_scope() as session: + job = _get_job(job_id, session) + try: + builder = JobMetricsBuilder(job) + if raw: + return make_flask_response(data=builder.query_metrics()) + # Metrics is a list of dict. Each dict can be rendered by frontend + # with mpld3.draw_figure('figure1', json) + return make_flask_response(data=builder.plot_metrics()) + except Exception as e: # pylint: disable=broad-except + logging.warning('Error building metrics: %s', repr(e)) + raise InvalidArgumentException(details=repr(e)) from e class PeerJobMetricsApi(Resource): - @jwt_required() - def get(self, workflow_uuid, participant_id, job_name): - workflow = Workflow.query.filter_by(uuid=workflow_uuid).first() - if workflow is None: - raise NotFoundException( - f'Failed to find workflow: {workflow_uuid}') - project_config = workflow.project.get_config() - party = project_config.participants[participant_id] - client = RpcClient(project_config, party) - resp = client.get_job_metrics(job_name) - if resp.status.code != common_pb2.STATUS_SUCCESS: - raise InternalException(resp.status.msg) - metrics = json.loads(resp.metrics) + @credentials_required + def get(self, workflow_uuid: str, participant_id: int, job_name: str): + """Get peer job metrics. + --- + tags: + - job + description: Get peer Job metrics. + parameters: + - in: path + name: workflow_uuid + schema: + type: string + - in: path + name: participant_id + schema: + type: integer + - in: path + name: job_name + schema: + type: string + responses: + 200: + description: List of job metrics + content: + application/json: + schema: + type: array + items: + type: object + """ + with db.session_scope() as session: + workflow = session.query(Workflow).filter_by(uuid=workflow_uuid).first() + if workflow is None: + raise NotFoundException(f'Failed to find workflow: {workflow_uuid}') + participant = session.query(Participant).filter_by(id=participant_id).first() + client = RpcClient.from_project_and_participant(workflow.project.name, workflow.project.token, + participant.domain_name) + resp = client.get_job_metrics(job_name) + if resp.status.code != common_pb2.STATUS_SUCCESS: + raise InternalException(resp.status.msg) + + metrics = json.loads(resp.metrics) - # Metrics is a list of dict. Each dict can be rendered by frontend with - # mpld3.draw_figure('figure1', json) - return {'data': metrics} + # Metrics is a list of dict. Each dict can be rendered by frontend with + # mpld3.draw_figure('figure1', json) + return make_flask_response(metrics) class JobEventApi(Resource): # TODO(xiangyuxuan): need test - @jwt_required() - def get(self, job_id): - parser = reqparse.RequestParser() - parser.add_argument('start_time', type=int, location='args', - required=False, - help='start_time must be timestamp') - parser.add_argument('max_lines', type=int, location='args', - required=True, - help='max_lines is required') - data = parser.parse_args() - start_time = data['start_time'] - max_lines = data['max_lines'] - job = _get_job(job_id) - if start_time is None: - start_time = job.workflow.start_at - return {'data': es.query_events(Envs.ES_INDEX, job.name, - 'fedlearner-operator', - start_time, - int(time.time() * 1000 - ), - Envs.OPERATOR_LOG_MATCH_PHRASE - )[:max_lines][::-1]} + @credentials_required + @use_kwargs({ + 'start_time': fields.Int(required=False, load_default=None), + 'max_lines': fields.Int(required=True) + }, + location='query') + def get(self, start_time: Optional[int], max_lines: int, job_id: int): + """Get job events. + --- + tags: + - job + description: Get job events. + parameters: + - in: path + name: job_id + schema: + type: integer + - in: query + description: timestamp in seconds + name: start_time + schema: + type: integer + - in: query + name: max_lines + schema: + type: integer + required: true + responses: + 200: + description: List of job events + content: + application/json: + schema: + type: array + items: + type: string + """ + with db.session_scope() as session: + job = _get_job(job_id, session) + if start_time is None and job.workflow: + start_time = job.workflow.start_at + return make_flask_response( + es.query_events(Envs.ES_INDEX, job.name, 'fedlearner-operator', start_time, int(time.time() * 1000), + Envs.OPERATOR_LOG_MATCH_PHRASE)[:max_lines][::-1]) class PeerJobEventsApi(Resource): - @jwt_required() - def get(self, workflow_uuid, participant_id, job_name): - parser = reqparse.RequestParser() - parser.add_argument('start_time', type=int, location='args', - required=False, - help='project_id must be timestamp') - parser.add_argument('max_lines', type=int, location='args', - required=True, - help='max_lines is required') - data = parser.parse_args() - start_time = data['start_time'] - max_lines = data['max_lines'] - workflow = Workflow.query.filter_by(uuid=workflow_uuid).first() - if workflow is None: - raise NotFoundException( - f'Failed to find workflow: {workflow_uuid}') - if start_time is None: - start_time = workflow.start_at - project_config = workflow.project.get_config() - party = project_config.participants[participant_id] - client = RpcClient(project_config, party) - resp = client.get_job_events(job_name=job_name, - start_time=start_time, - max_lines=max_lines) - if resp.status.code != common_pb2.STATUS_SUCCESS: - raise InternalException(resp.status.msg) - peer_events = MessageToDict( - resp, - preserving_proto_field_name=True, - including_default_value_fields=True)['logs'] - return {'data': peer_events} + + @credentials_required + @use_kwargs({ + 'start_time': fields.Int(required=False, load_default=None), + 'max_lines': fields.Int(required=True) + }, + location='query') + def get(self, start_time: Optional[int], max_lines: int, workflow_uuid: str, participant_id: int, job_name: str): + """Get peer job events. + --- + tags: + - job + description: Get peer job events. + parameters: + - in: path + name: workflow_uuid + schema: + type: string + - in: path + name: participant_id + schema: + type: integer + - in: path + name: job_name + schema: + type: string + responses: + 200: + description: List of peer job events + content: + application/json: + schema: + type: array + items: + type: string + """ + with db.session_scope() as session: + workflow = session.query(Workflow).filter_by(uuid=workflow_uuid).first() + if workflow is None: + raise NotFoundException(f'Failed to find workflow: {workflow_uuid}') + if start_time is None: + start_time = workflow.start_at + participant = session.query(Participant).filter_by(id=participant_id).first() + client = RpcClient.from_project_and_participant(workflow.project.name, workflow.project.token, + participant.domain_name) + resp = client.get_job_events(job_name=job_name, start_time=start_time, max_lines=max_lines) + if resp.status.code != common_pb2.STATUS_SUCCESS: + raise InternalException(resp.status.msg) + peer_events = MessageToDict(resp, preserving_proto_field_name=True, + including_default_value_fields=True)['logs'] + return make_flask_response(peer_events) class KibanaMetricsApi(Resource): - @jwt_required() + + @credentials_required def get(self, job_id): - job = _get_job(job_id) parser = reqparse.RequestParser() - parser.add_argument('type', type=str, location='args', + parser.add_argument('type', + type=str, + location='args', required=True, - choices=('Rate', 'Ratio', 'Numeric', - 'Time', 'Timer'), + choices=('Rate', 'Ratio', 'Numeric', 'Time', 'Timer'), help='Visualization type is required. Choices: ' - 'Rate, Ratio, Numeric, Time, Timer') - parser.add_argument('interval', type=str, location='args', + 'Rate, Ratio, Numeric, Time, Timer') + parser.add_argument('interval', + type=str, + location='args', default='', help='Time bucket interval length, ' - 'defaults to be automated by Kibana.') - parser.add_argument('x_axis_field', type=str, location='args', + 'defaults to be automated by Kibana.') + parser.add_argument('x_axis_field', + type=str, + location='args', default='tags.event_time', help='Time field (X axis) is required.') - parser.add_argument('query', type=str, location='args', - help='Additional query string to the graph.') - parser.add_argument('start_time', type=int, location='args', + parser.add_argument('query', type=str, location='args', help='Additional query string to the graph.') + parser.add_argument('start_time', + type=int, + location='args', default=-1, help='Earliest time of data.' - 'Unix timestamp in secs.') - parser.add_argument('end_time', type=int, location='args', + 'Unix timestamp in secs.') + parser.add_argument('end_time', + type=int, + location='args', default=-1, help='Latest time of data.' - 'Unix timestamp in secs.') + 'Unix timestamp in secs.') # (Joined) Rate visualization is fixed and only interval, query and # x_axis_field can be modified # Ratio visualization - parser.add_argument('numerator', type=str, location='args', + parser.add_argument('numerator', + type=str, + location='args', help='Numerator is required in Ratio ' - 'visualization. ' - 'A query string similar to args::query.') - parser.add_argument('denominator', type=str, location='args', + 'visualization. ' + 'A query string similar to args::query.') + parser.add_argument('denominator', + type=str, + location='args', help='Denominator is required in Ratio ' - 'visualization. ' - 'A query string similar to args::query.') + 'visualization. ' + 'A query string similar to args::query.') # Numeric visualization - parser.add_argument('aggregator', type=str, location='args', + parser.add_argument('aggregator', + type=str, + location='args', default='Average', - choices=('Average', 'Sum', 'Max', 'Min', 'Variance', - 'Std. Deviation', 'Sum of Squares'), + choices=('Average', 'Sum', 'Max', 'Min', 'Variance', 'Std. Deviation', 'Sum of Squares'), help='Aggregator type is required in Numeric and ' - 'Timer visualization.') - parser.add_argument('value_field', type=str, location='args', + 'Timer visualization.') + parser.add_argument('value_field', + type=str, + location='args', help='The field to be aggregated on is required ' - 'in Numeric visualization.') + 'in Numeric visualization.') # No additional arguments in Time visualization # # Timer visualization - parser.add_argument('timer_names', type=str, location='args', + parser.add_argument('timer_names', + type=str, + location='args', help='Names of timers is required in ' - 'Timer visualization.') - parser.add_argument('split', type=int, location='args', - default=0, - help='Whether to plot timers individually.') + 'Timer visualization.') + parser.add_argument('split', type=int, location='args', default=0, help='Whether to plot timers individually.') args = parser.parse_args() - try: - if args['type'] in Kibana.TSVB: - return {'data': Kibana.create_tsvb(job, args)} - if args['type'] in Kibana.TIMELION: - return {'data': Kibana.create_timelion(job, args)} - return {'data': []} - except Exception as e: # pylint: disable=broad-except - abort(400, message=repr(e)) + with db.session_scope() as session: + job = _get_job(job_id, session) + try: + if args['type'] in Kibana.TSVB: + return {'data': Kibana.create_tsvb(job, args)} + if args['type'] in Kibana.TIMELION: + return {'data': Kibana.create_timelion(job, args)} + return {'data': []} + except Exception as e: # pylint: disable=broad-except + raise InvalidArgumentException(details=repr(e)) from e class PeerKibanaMetricsApi(Resource): - @jwt_required() + + @credentials_required def get(self, workflow_uuid, participant_id, job_name): parser = reqparse.RequestParser() - parser.add_argument('type', type=str, location='args', + parser.add_argument('type', + type=str, + location='args', required=True, choices=('Ratio', 'Numeric'), help='Visualization type is required. Choices: ' - 'Rate, Ratio, Numeric, Time, Timer') - parser.add_argument('interval', type=str, location='args', + 'Rate, Ratio, Numeric, Time, Timer') + parser.add_argument('interval', + type=str, + location='args', default='', help='Time bucket interval length, ' - 'defaults to be automated by Kibana.') - parser.add_argument('x_axis_field', type=str, location='args', + 'defaults to be automated by Kibana.') + parser.add_argument('x_axis_field', + type=str, + location='args', default='tags.event_time', help='Time field (X axis) is required.') - parser.add_argument('query', type=str, location='args', - help='Additional query string to the graph.') - parser.add_argument('start_time', type=int, location='args', + parser.add_argument('query', type=str, location='args', help='Additional query string to the graph.') + parser.add_argument('start_time', + type=int, + location='args', default=-1, help='Earliest time of data.' - 'Unix timestamp in secs.') - parser.add_argument('end_time', type=int, location='args', + 'Unix timestamp in secs.') + parser.add_argument('end_time', + type=int, + location='args', default=-1, help='Latest time of data.' - 'Unix timestamp in secs.') + 'Unix timestamp in secs.') # Ratio visualization - parser.add_argument('numerator', type=str, location='args', + parser.add_argument('numerator', + type=str, + location='args', help='Numerator is required in Ratio ' - 'visualization. ' - 'A query string similar to args::query.') - parser.add_argument('denominator', type=str, location='args', + 'visualization. ' + 'A query string similar to args::query.') + parser.add_argument('denominator', + type=str, + location='args', help='Denominator is required in Ratio ' - 'visualization. ' - 'A query string similar to args::query.') + 'visualization. ' + 'A query string similar to args::query.') # Numeric visualization - parser.add_argument('aggregator', type=str, location='args', + parser.add_argument('aggregator', + type=str, + location='args', default='Average', - choices=('Average', 'Sum', 'Max', 'Min', 'Variance', - 'Std. Deviation', 'Sum of Squares'), + choices=('Average', 'Sum', 'Max', 'Min', 'Variance', 'Std. Deviation', 'Sum of Squares'), help='Aggregator type is required in Numeric and ' - 'Timer visualization.') - parser.add_argument('value_field', type=str, location='args', + 'Timer visualization.') + parser.add_argument('value_field', + type=str, + location='args', help='The field to be aggregated on is required ' - 'in Numeric visualization.') + 'in Numeric visualization.') args = parser.parse_args() - workflow = Workflow.query.filter_by(uuid=workflow_uuid).first() - if workflow is None: - raise NotFoundException( - f'Failed to find workflow: {workflow_uuid}') - project_config = workflow.project.get_config() - party = project_config.participants[participant_id] - client = RpcClient(project_config, party) - resp = client.get_job_kibana(job_name, json.dumps(args)) - if resp.status.code != common_pb2.STATUS_SUCCESS: - raise InternalException(resp.status.msg) - metrics = json.loads(resp.metrics) - # metrics is a list of 2-element lists, - # each 2-element list is a [x, y] pair. - return {'data': metrics} + with db.session_scope() as session: + workflow = session.query(Workflow).filter_by(uuid=workflow_uuid).first() + if workflow is None: + raise NotFoundException(f'Failed to find workflow: {workflow_uuid}') + participant = session.query(Participant).filter_by(id=participant_id).first() + client = RpcClient.from_project_and_participant(workflow.project.name, workflow.project.token, + participant.domain_name) + resp = client.get_job_kibana(job_name, json.dumps(args)) + if resp.status.code != common_pb2.STATUS_SUCCESS: + raise InternalException(resp.status.msg) + metrics = json.loads(resp.metrics) + # metrics is a list of 2-element lists, + # each 2-element list is a [x, y] pair. + return {'data': metrics} def initialize_job_apis(api): api.add_resource(JobApi, '/jobs/') - api.add_resource(PodLogApi, - '/jobs//pods//log') - api.add_resource(JobLogApi, - '/jobs//log') - api.add_resource(JobMetricsApi, - '/jobs//metrics') - api.add_resource(KibanaMetricsApi, - '/jobs//kibana_metrics') - api.add_resource(PeerJobMetricsApi, - '/workflows//peer_workflows' - '//jobs//metrics') - api.add_resource(PeerKibanaMetricsApi, - '/workflows//peer_workflows' - '//jobs/' - '/kibana_metrics') + api.add_resource(PodLogApi, '/jobs//pods//log') + api.add_resource(JobLogApi, '/jobs//log') + api.add_resource(JobMetricsApi, '/jobs//metrics') + api.add_resource(KibanaMetricsApi, '/jobs//kibana_metrics') + api.add_resource( + PeerJobMetricsApi, '/workflows//peer_workflows' + '//jobs//metrics') + api.add_resource( + PeerKibanaMetricsApi, '/workflows//peer_workflows' + '//jobs/' + '/kibana_metrics') api.add_resource(JobEventApi, '/jobs//events') - api.add_resource(PeerJobEventsApi, - '/workflows//peer_workflows' - '//jobs//events') + api.add_resource( + PeerJobEventsApi, '/workflows//peer_workflows' + '//jobs//events') diff --git a/web_console_v2/api/fedlearner_webconsole/job/apis_test.py b/web_console_v2/api/fedlearner_webconsole/job/apis_test.py new file mode 100644 index 000000000..103290ad7 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/job/apis_test.py @@ -0,0 +1,83 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from datetime import datetime, timezone +from unittest.mock import patch + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.job.models import Job, JobType, JobState +from fedlearner_webconsole.proto.job_pb2 import PodPb +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from testing.common import BaseTestCase + + +class JobApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + @patch('fedlearner_webconsole.job.apis.JobService.get_pods') + def test_get_job(self, mock_get_pods): + mock_get_pods.return_value = [PodPb(name='test', pod_type='a')] + created_at = datetime(2021, 10, 1, 8, 8, 8, tzinfo=timezone.utc) + with db.session_scope() as session: + job = Job(id=1, + name='test', + job_type=JobType.DATA_JOIN, + state=JobState.COMPLETED, + workflow_id=1, + project_id=1, + created_at=created_at, + updated_at=created_at) + session.add(job) + session.commit() + resp = self.get_helper('/api/v2/jobs/1') + data = self.get_response_data(resp) + self.assertEqual( + data, { + 'complete_at': 0, + 'start_at': 0, + 'crd_kind': '', + 'crd_meta': { + 'api_version': '' + }, + 'created_at': to_timestamp(created_at), + 'id': 1, + 'is_disabled': False, + 'job_type': 'DATA_JOIN', + 'name': 'test', + 'pods': [{ + 'creation_timestamp': 0, + 'message': '', + 'name': 'test', + 'pod_ip': '', + 'pod_type': 'a', + 'state': '' + }], + 'project_id': 1, + 'snapshot': '', + 'state': 'COMPLETED', + 'updated_at': to_timestamp(created_at), + 'workflow_id': 1, + 'error_message': { + 'app': '', + 'pods': {} + } + }) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/job/controller.py b/web_console_v2/api/fedlearner_webconsole/job/controller.py new file mode 100644 index 000000000..da8c646d9 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/job/controller.py @@ -0,0 +1,148 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Tuple, Optional + +from sqlalchemy.orm import Session + +from fedlearner_webconsole.job.models import Job, JobState, JobType +from fedlearner_webconsole.job.service import JobService +from fedlearner_webconsole.job.yaml_formatter import YamlFormatterService +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto import common_pb2 +from fedlearner_webconsole.proto.workflow_definition_pb2 import JobDefinition +from fedlearner_webconsole.rpc.client import RpcClient +from fedlearner_webconsole.utils.metrics import emit_store +from fedlearner_webconsole.utils.pp_datetime import now, to_timestamp +from fedlearner_webconsole.job.utils import DurationState, emit_job_duration_store +from fedlearner_webconsole.utils.resource_name import resource_uuid +from fedlearner_webconsole.utils.workflow import build_job_name + + +def _are_peers_ready(session: Session, project: Project, job_name: str) -> bool: + service = ParticipantService(session) + participants = service.get_platform_participants_by_project(project.id) + for participant in participants: + client = RpcClient.from_project_and_participant(project.name, project.token, participant.domain_name) + resp = client.check_job_ready(job_name) + # Fallback solution: we think peer is ready if rpc fails + if resp.status.code != common_pb2.STATUS_SUCCESS: + emit_store('job.controller.check_peer_ready_failed', 1) + continue + if not resp.is_ready: + return False + return True + + +def schedule_job(unused_session: Session, job: Job): + del unused_session + if job.is_disabled: + # No action + return + # COMPLETED/FAILED Job State can be scheduled since stop action will + # not change the state of completed or failed job + assert job.state in [JobState.NEW, JobState.STOPPED, JobState.COMPLETED, JobState.FAILED] + job.snapshot = None + # Marks the job to be scheduled + job.state = JobState.WAITING + job.error_message = None + + +def start_job_if_ready(session: Session, job: Job) -> Tuple[bool, Optional[str]]: + """Schedules a job for execution. + + Returns: + Job readiness and the related message. + """ + if job.state != JobState.WAITING: + return False, f'Invalid job state: {job.id} {job.state}' + + # Checks readiness locally + if not JobService(session).is_ready(job): + return False, None + config = job.get_config() + if config.is_federated: + # Checks peers' readiness for federated job + if not _are_peers_ready(session, job.project, job.name): + return False, None + + _start_job(session, job) + return True, job.error_message + + +def _start_job(session: Session, job: Job): + """Starts a job locally.""" + try: + assert job.state == JobState.WAITING, 'Job state should be WAITING' + # Builds yaml by template and submits it to k8s + yaml = YamlFormatterService(session).generate_job_run_yaml(job) + job.build_crd_service().create_app(yaml) + # Updates job status if submitting successfully + job.state = JobState.STARTED + except Exception as e: # pylint: disable=broad-except + logging.error(f'Start job {job.id} has error msg: {e.args}') + job.error_message = str(e) + + +def stop_job(unused_session: Session, job: Job): + del unused_session # Unused for now, this argument is to let invoker commit after this function + if job.state not in [JobState.WAITING, JobState.STARTED, JobState.COMPLETED, JobState.FAILED]: + logging.warning('illegal job state, name: %s, state: %s', job.name, job.state) + return + # state change: + # WAITING -> NEW + # STARTED -> STOPPED + # COMPLETED/FAILED unchanged + if job.state == JobState.STARTED: + JobService.set_status_to_snapshot(job) + job.build_crd_service().delete_app() + job.state = JobState.STOPPED + emit_job_duration_store(to_timestamp(now()) - to_timestamp(job.created_at), + job_name=job.name, + state=DurationState.STOPPED) + if job.state == JobState.WAITING: + # This change to make sure no effect on waiting jobs + job.state = JobState.NEW + + +def create_job_without_workflow(session: Session, + job_def: JobDefinition, + project_id: int, + name: Optional[str] = None, + uuid: Optional[str] = None) -> Optional[Job]: + """Create a job without workflow. + Args: + session: db session, must be committed after this function return. + job_def: JobDefinition. job_def.yaml_template should not use any variables of workflow. + project_id: int indicate a project. + name: the unique name of the job overriding the default name {uuid}-{job_def.name} + uuid: {uuid}-{job_def.name} will be the unique name of the job. When job_def.is_federated is True, + participants in the project must have a job with the same name. + Returns: + Optional[Job] + """ + if name is None: + if uuid is None: + uuid = resource_uuid() + name = build_job_name(uuid, job_def.name) + job = session.query(Job).filter_by(name=name).first() + if job is not None: + return None + job = Job(name=name, job_type=JobType(job_def.job_type), workflow_id=0, project_id=project_id, state=JobState.NEW) + JobService.set_config_and_crd_info(job, job_def) + session.add(job) + return job diff --git a/web_console_v2/api/fedlearner_webconsole/job/controller_test.py b/web_console_v2/api/fedlearner_webconsole/job/controller_test.py new file mode 100644 index 000000000..5c3d94573 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/job/controller_test.py @@ -0,0 +1,275 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import MagicMock, patch, Mock, call + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.job.controller import _start_job, schedule_job, stop_job, _are_peers_ready, \ + start_job_if_ready, create_job_without_workflow +from fedlearner_webconsole.job.models import Job, JobType, JobState +from fedlearner_webconsole.job.yaml_formatter import YamlFormatterService +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto import common_pb2 +from fedlearner_webconsole.proto.service_pb2 import CheckJobReadyResponse +from fedlearner_webconsole.proto.workflow_definition_pb2 import JobDefinition +from fedlearner_webconsole.utils.const import DEFAULT_OWNER_FOR_JOB_WITHOUT_WORKFLOW +from fedlearner_webconsole.utils.pp_datetime import now +from fedlearner_webconsole.workflow.models import Workflow # pylint: disable=unused-import +from testing.no_web_server_test_case import NoWebServerTestCase + + +class ScheduleJobTest(NoWebServerTestCase): + + def test_schedule_job_disabled(self): + with db.session_scope() as session: + job = Job(id=1, is_disabled=True, state=JobState.NEW) + schedule_job(session, job) + # No change + self.assertEqual(job.state, JobState.NEW) + + def test_schedule_job_invalid_state(self): + with db.session_scope() as session: + job = Job(id=1, state=JobState.STARTED) + self.assertRaises(AssertionError, lambda: schedule_job(session, job)) + + def test_schedule_job_successfully(self): + with db.session_scope() as session: + job = Job(id=1, state=JobState.NEW, snapshot='test snapshot') + job.set_config(JobDefinition()) + schedule_job(session, job) + self.assertIsNone(job.snapshot) + self.assertEqual(job.state, JobState.WAITING) + + @patch('fedlearner_webconsole.job.controller.RpcClient.from_project_and_participant') + def test_are_peers_ready(self, mock_rpc_client_factory: Mock): + project_id = 1 + with db.session_scope() as session: + participant_1 = Participant(id=1, name='participant 1', domain_name='p1.fedlearner.net') + participant_2 = Participant(id=2, name='participant 2', domain_name='p2.fedlearner.net') + project = Project(id=project_id, name='project 1') + session.add_all([ + participant_1, participant_2, project, + ProjectParticipant(project_id=1, participant_id=1), + ProjectParticipant(project_id=1, participant_id=2) + ]) + session.commit() + + mock_check_job_ready = MagicMock() + mock_rpc_client_factory.return_value = MagicMock(check_job_ready=mock_check_job_ready) + + job_name = 'fake_job_name' + with db.session_scope() as session: + project = session.query(Project).get(project_id) + # gRPC error + mock_check_job_ready.side_effect = [ + CheckJobReadyResponse(status=common_pb2.Status(code=common_pb2.STATUS_UNKNOWN_ERROR)), + CheckJobReadyResponse(is_ready=True) + ] + self.assertTrue(_are_peers_ready(session, project, job_name)) + mock_check_job_ready.assert_has_calls([call(job_name), call(job_name)]) + # Not ready + mock_check_job_ready.side_effect = [ + CheckJobReadyResponse(is_ready=False), + CheckJobReadyResponse(is_ready=True) + ] + self.assertFalse(_are_peers_ready(session, project, job_name)) + # Ready + mock_check_job_ready.side_effect = [ + CheckJobReadyResponse(is_ready=True), + CheckJobReadyResponse(is_ready=True) + ] + self.assertTrue(_are_peers_ready(session, project, job_name)) + + +class StartJobTest(NoWebServerTestCase): + + @patch('fedlearner_webconsole.job.controller.YamlFormatterService', spec=YamlFormatterService) + @patch('fedlearner_webconsole.job.controller.Job.build_crd_service') + def test_start_job_successfully(self, mock_crd_service, mock_formatter_class): + mock_formatter = mock_formatter_class.return_value + mock_formatter.generate_job_run_yaml.return_value = 'fake job yaml' + mock_crd_service.return_value = MagicMock(create_app=MagicMock(return_value=None)) + with db.session_scope() as session: + job = Job(id=123, + name='test job', + job_type=JobType.RAW_DATA, + state=JobState.WAITING, + workflow_id=1, + project_id=1) + session.add(job) + session.commit() + _start_job(session, job) + session.commit() + # Checks result + mock_crd_service.return_value.create_app.assert_called_with('fake job yaml') + with db.session_scope() as session: + job = session.query(Job).get(123) + self.assertEqual(job.state, JobState.STARTED) + self.assertIsNone(job.error_message) + + @patch('fedlearner_webconsole.job.controller.YamlFormatterService', spec=YamlFormatterService) + @patch('fedlearner_webconsole.job.controller.Job.build_crd_service') + def test_start_job_exception(self, mock_crd_service, mock_formatter_class): + mock_formatter = mock_formatter_class.return_value + mock_formatter.generate_job_run_yaml.return_value = 'fake job yaml' + mock_crd_service.return_value = MagicMock(create_app=MagicMock(return_value=None)) + mock_crd_service.return_value.create_app.side_effect = RuntimeError('some errors in k8s') + with db.session_scope() as session: + job = Job(id=123, + name='test job', + job_type=JobType.RAW_DATA, + state=JobState.WAITING, + workflow_id=1, + project_id=1) + session.add(job) + session.commit() + _start_job(session, job) + session.commit() + # Checks result + mock_crd_service.return_value.create_app.assert_called_with('fake job yaml') + with db.session_scope() as session: + job = session.query(Job).get(123) + self.assertEqual(job.state, JobState.WAITING) + self.assertEqual(job.error_message, 'some errors in k8s') + + +class StopJobTest(NoWebServerTestCase): + + def test_stop_job_invalid_state(self): + with db.session_scope() as session: + job = Job(id=1, state=JobState.NEW) + stop_job(session, job) + # No change + self.assertEqual(job.state, JobState.NEW) + + @patch('fedlearner_webconsole.job.controller.Job.build_crd_service') + @patch('fedlearner_webconsole.job.controller.JobService.set_status_to_snapshot') + def test_stop_job_started(self, mock_set_status_to_snapshot: Mock, mock_build_crd_service: Mock): + mock_delete_app = MagicMock() + mock_build_crd_service.return_value = MagicMock(delete_app=mock_delete_app) + + with db.session_scope() as session: + job = Job(id=1, name='test-job', state=JobState.STARTED, created_at=now()) + stop_job(session, job) + mock_set_status_to_snapshot.assert_called_once_with(job) + mock_delete_app.assert_called_once() + self.assertEqual(job.state, JobState.STOPPED) + + def test_stop_job_waiting(self): + with db.session_scope() as session: + job = Job(id=1, name='test-job', state=JobState.WAITING, created_at=now()) + stop_job(session, job) + self.assertEqual(job.state, JobState.NEW) + + def test_stop_job_completed(self): + with db.session_scope() as session: + job = Job(id=1, name='test-job', state=JobState.COMPLETED, created_at=now()) + stop_job(session, job) + # No change + self.assertEqual(job.state, JobState.COMPLETED) + + @patch('fedlearner_webconsole.job.controller._start_job') + @patch('fedlearner_webconsole.job.controller._are_peers_ready') + @patch('fedlearner_webconsole.job.controller.JobService.is_ready') + def test_start_job_if_ready(self, mock_is_ready: Mock, mock_are_peers_ready: Mock, mock_start_job: Mock): + with db.session_scope() as session: + not_ready_job = Job(id=2, + name='not_ready_job', + job_type=JobType.RAW_DATA, + state=JobState.WAITING, + workflow_id=1, + project_id=1) + mock_is_ready.return_value = False + res = start_job_if_ready(session, not_ready_job) + self.assertEqual(res, (False, None)) + peers_not_ready_job = Job(id=3, + name='peers_not_ready_job', + job_type=JobType.PSI_DATA_JOIN, + state=JobState.WAITING, + workflow_id=1, + project_id=1) + mock_is_ready.return_value = True + mock_are_peers_ready.return_value = False + peers_not_ready_job.set_config(JobDefinition(is_federated=True)) + res = start_job_if_ready(session, peers_not_ready_job) + self.assertEqual(res, (False, None)) + peers_ready_job = Job(id=4, + name='peers_ready_job', + job_type=JobType.PSI_DATA_JOIN, + state=JobState.WAITING, + workflow_id=1, + project_id=1) + mock_are_peers_ready.return_value = True + peers_ready_job.set_config(JobDefinition(is_federated=True)) + res = start_job_if_ready(session, peers_ready_job) + self.assertEqual(res, (True, None)) + running_job = Job(id=3002, + name='running_job', + job_type=JobType.RAW_DATA, + state=JobState.STARTED, + workflow_id=1, + project_id=1) + running_job.set_config(JobDefinition(is_federated=True)) + res = start_job_if_ready(session, running_job) + self.assertEqual(res, (False, 'Invalid job state: 3002 JobState.STARTED')) + start_job_calls = [call[0][1].id for call in mock_start_job.call_args_list] + self.assertCountEqual(start_job_calls, [peers_ready_job.id]) + + def test_create_job_without_workflow(self): + with db.session_scope() as session: + project = Project(id=1, name='project 1') + session.add(project) + job_def = JobDefinition(name='lonely_job', job_type=JobDefinition.ANALYZER) + job_def.yaml_template = """ + { + "apiVersion": "sparkoperator.k8s.io/v1beta2", + "kind": "SparkApplication", + "metadata": { + "name": self.name, + }, + } + """ + job = create_job_without_workflow( + session, + job_def=job_def, + project_id=1, + ) + session.commit() + self.assertEqual(job.crd_kind, 'SparkApplication') + yaml = YamlFormatterService(session).generate_job_run_yaml(job) + self.assertEqual(yaml['metadata']['labels']['owner'], DEFAULT_OWNER_FOR_JOB_WITHOUT_WORKFLOW) + with db.session_scope() as session: + job_def.yaml_template = """ + { + "apiVersion": "sparkoperator.k8s.io/v1beta2", + "kind": "SparkApplication", + "metadata": { + "name": self.name, + "namespace": workflow.name + }, + + } + """ + job = create_job_without_workflow(session, job_def=job_def, project_id=1) + session.commit() + self.assertEqual(job.crd_kind, 'SparkApplication') + with self.assertRaisesRegex(ValueError, 'Invalid python dict placeholder error msg: workflow.name'): + YamlFormatterService(session).generate_job_run_yaml(job) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/job/crd.py b/web_console_v2/api/fedlearner_webconsole/job/crd.py new file mode 100644 index 000000000..e0e4837e7 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/job/crd.py @@ -0,0 +1,64 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Optional + +from fedlearner_webconsole.k8s.k8s_cache import k8s_cache +from fedlearner_webconsole.k8s.models import CrdKind, SparkApp, FlApp, FedApp, UnknownCrd +from fedlearner_webconsole.k8s.k8s_client import k8s_client +from fedlearner_webconsole.utils.metrics import emit_store + +CRD_CLASS_MAP = { + CrdKind.FLAPP: FlApp, + CrdKind.SPARKAPPLICATION: SparkApp, + CrdKind.FEDAPP: FedApp, + CrdKind.UNKNOWN: UnknownCrd +} + + +class CrdService(object): + + def __init__(self, kind: str, api_version: str, app_name: str): + self.kind = kind + # only un-UNKNOWN kind crd support complete/failed/pods detail + # UNKNOWN only support create and delete + self.supported_kind = CrdKind.from_value(kind) + self.api_version = api_version + self.plural = f'{kind.lower()}s' + self.group, _, self.version = api_version.partition('/') + self.app_name = app_name + + def get_k8s_app(self, snapshot: Optional[dict]): + if snapshot is None: + snapshot = self.get_k8s_app_cache() + return CRD_CLASS_MAP[self.supported_kind].from_json(snapshot) + + def delete_app(self): + emit_store('job.crd_service.deletion', value=1, tags={'name': self.app_name, 'plural': self.plural}) + k8s_client.delete_app(self.app_name, self.group, self.version, self.plural) + + def create_app(self, yaml: dict): + emit_store('job.crd_service.submission', value=1, tags={'name': self.app_name, 'plural': self.plural}) + k8s_client.create_app(yaml, self.group, self.version, self.plural) + + def get_k8s_app_cache(self): + if self.supported_kind == CrdKind.UNKNOWN: + try: + return {'app': k8s_client.get_custom_object(self.app_name, self.group, self.version, self.plural)} + except Exception as e: # pylint: disable=broad-except + logging.error(f'Get app detail failed: {str(e)}') + return {'app': {}} + return k8s_cache.get_cache(self.app_name) diff --git a/web_console_v2/api/fedlearner_webconsole/job/event_listener.py b/web_console_v2/api/fedlearner_webconsole/job/event_listener.py new file mode 100644 index 000000000..87e424404 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/job/event_listener.py @@ -0,0 +1,52 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.job.models import Job, JobState +from fedlearner_webconsole.job.service import JobService +from fedlearner_webconsole.k8s.event_listener import EventListener +from fedlearner_webconsole.k8s.k8s_cache import Event, ObjectType +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.workflow.service import WorkflowService +from fedlearner_webconsole.workflow.workflow_job_controller import stop_workflow + + +class JobEventListener(EventListener): + + def update(self, event: Event): + # TODO(xiangyuxuan.prs): recompose the JobEventListener + valid_obj_type = [ObjectType.FLAPP, ObjectType.SPARKAPP, ObjectType.FEDAPP] + if event.obj_type not in valid_obj_type: + return + logging.debug('[k8s_watcher][job_event_listener]receive event %s', event.app_name) + + with db.session_scope() as session: + job = session.query(Job).filter_by(name=event.app_name).first() + if job is None: + return + old_state = job.state + result_state = JobService(session).update_running_state(event.app_name) + wid = job.workflow_id + session.commit() + + # trigger workflow state change + if old_state != result_state and result_state in [JobState.COMPLETED, JobState.FAILED]: + with db.session_scope() as session: + w = session.query(Workflow).get(wid) + logging.info(f'[JobEventListener] {w.uuid} should be stopped.') + if WorkflowService(session).should_auto_stop(w): + stop_workflow(wid) diff --git a/web_console_v2/api/fedlearner_webconsole/job/metrics.py b/web_console_v2/api/fedlearner_webconsole/job/metrics.py index cda672b54..c9a8d1565 100644 --- a/web_console_v2/api/fedlearner_webconsole/job/metrics.py +++ b/web_console_v2/api/fedlearner_webconsole/job/metrics.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,43 +13,60 @@ # limitations under the License. # coding: utf-8 -from datetime import datetime - import mpld3 +from datetime import datetime +from typing import List from matplotlib.figure import Figure - -from fedlearner_webconsole.job.models import JobType from fedlearner_webconsole.utils.es import es +from fedlearner_webconsole.job.models import Job, JobType +from fedlearner_webconsole.utils.job_metrics import get_feature_importance +from fedlearner_webconsole.proto.metrics_pb2 import ModelJobMetrics, Metric + +_CONF_METRIC_LIST = ['tp', 'tn', 'fp', 'fn'] +_TREE_METRIC_LIST = ['acc', 'auc', 'precision', 'recall', 'f1', 'ks', 'mse', 'msre', 'abs'] + _CONF_METRIC_LIST +_NN_METRIC_LIST = ['acc', 'auc', 'loss', 'mse', 'abs'] class JobMetricsBuilder(object): - def __init__(self, job): + + def __init__(self, job: Job): self._job = job def _to_datetime(self, timestamp): if timestamp is None: return None - return datetime.fromtimestamp(timestamp/1000.0) + return datetime.fromtimestamp(timestamp / 1000.0) + + def _is_nn_job(self): + return self._job.job_type in [JobType.NN_MODEL_TRANINING, JobType.NN_MODEL_EVALUATION] + + def _is_tree_job(self): + return self._job.job_type in [JobType.TREE_MODEL_TRAINING, JobType.TREE_MODEL_EVALUATION] + + def query_metrics(self): + if self._is_tree_job(): + return self.query_tree_metrics(need_feature_importance=True) + if self._is_nn_job(): + return self.query_nn_metrics() + return [] def plot_metrics(self, num_buckets=30): + figs = [] if self._job.job_type == JobType.DATA_JOIN: - metrics = self.plot_data_join_metrics(num_buckets) - elif self._job.job_type in [ - JobType.NN_MODEL_TRANINING, JobType.NN_MODEL_EVALUATION]: - metrics = self.plot_nn_metrics(num_buckets) - elif self._job.job_type in [JobType.TREE_MODEL_TRAINING, - JobType.TREE_MODEL_EVALUATION]: - metrics = self.plot_tree_metrics() + figs = self.plot_data_join_metrics(num_buckets) + elif self._is_nn_job(): + metrics = self.query_nn_metrics(num_buckets) + figs = self.plot_nn_metrics(metrics) + elif self._is_tree_job(): + metrics = self.query_tree_metrics(False) + figs = self.plot_tree_metrics(metrics) elif self._job.job_type == JobType.RAW_DATA: - metrics = self.plot_raw_data_metrics(num_buckets) - else: - metrics = [] - return metrics + figs = self.plot_raw_data_metrics(num_buckets) + return figs def plot_data_join_metrics(self, num_buckets=30): res = es.query_data_join_metrics(self._job.name, num_buckets) - time_res = es.query_time_metrics(self._job.name, num_buckets, - index='data_join*') + time_res = es.query_time_metrics(self._job.name, num_buckets, index='data_join*') metrics = [] if not res['aggregations']['OVERALL']['buckets']: return metrics @@ -57,9 +74,7 @@ def plot_data_join_metrics(self, num_buckets=30): # plot pie chart for overall join rate overall = res['aggregations']['OVERALL']['buckets'][0] labels = ['joined', 'fake', 'unjoined'] - sizes = [ - overall['JOINED']['doc_count'], overall['FAKE']['doc_count'], - overall['UNJOINED']['doc_count']] + sizes = [overall['JOINED']['doc_count'], overall['FAKE']['doc_count'], overall['UNJOINED']['doc_count']] fig = Figure() ax = fig.add_subplot(111) ax.pie(sizes, labels=labels, autopct='%1.1f%%') @@ -73,16 +88,14 @@ def plot_data_join_metrics(self, num_buckets=30): et_unjoined = [buck['UNJOINED']['doc_count'] for buck in by_et] fig = Figure() ax = fig.add_subplot(111) - ax.stackplot( - et_index, et_joined, et_faked, et_unjoined, labels=labels) + ax.stackplot(et_index, et_joined, et_faked, et_unjoined, labels=labels) twin_ax = ax.twinx() twin_ax.patch.set_alpha(0.0) et_rate = [buck['JOIN_RATE']['value'] for buck in by_et] et_rate_fake = [buck['JOIN_RATE_WITH_FAKE']['value'] for buck in by_et] twin_ax.plot(et_index, et_rate, label='join rate', color='black') - twin_ax.plot(et_index, et_rate_fake, - label='join rate w/ fake', color='#8f8f8f') # grey color + twin_ax.plot(et_index, et_rate_fake, label='join rate w/ fake', color='#8f8f8f') # grey color ax.xaxis_date() ax.legend() @@ -94,53 +107,123 @@ def plot_data_join_metrics(self, num_buckets=30): return metrics - def plot_nn_metrics(self, num_buckets=30): - res = es.query_nn_metrics(self._job.name, num_buckets) - metrics = [] - if not res['aggregations']['PROCESS_TIME']['buckets']: - return metrics - - buckets = res['aggregations']['PROCESS_TIME']['buckets'] - time = [self._to_datetime(buck['key']) for buck in buckets] - - # plot auc curve - auc = [buck['AUC']['value'] for buck in buckets] - fig = Figure() - ax = fig.add_subplot(111) - ax.plot(time, auc, label='auc') - ax.legend() - metrics.append(mpld3.fig_to_dict(fig)) - + def query_nn_metrics(self, num_buckets: int = 30) -> ModelJobMetrics: + res = es.query_nn_metrics(job_name=self._job.name, metric_list=_NN_METRIC_LIST, num_buckets=num_buckets) + metrics = ModelJobMetrics() + aggregations = res['aggregations'] + for metric in _NN_METRIC_LIST: + buckets = aggregations[metric]['PROCESS_TIME']['buckets'] + if len(buckets) == 0: + continue + times = [buck['key'] for buck in buckets] + values = [buck['VALUE']['value'] for buck in buckets] + # filter none value in times and values + time_values = [(t, v) for t, v in zip(times, values) if t is not None and v is not None] + times, values = zip(*time_values) + if len(values) == 0: + continue + metrics.train[metric].steps.extend(times) + metrics.train[metric].values.extend(values) + metrics.eval[metric].steps.extend(times) + metrics.eval[metric].values.extend(values) return metrics - def plot_tree_metrics(self): - metric_list = ['acc', 'auc', 'precision', 'recall', - 'f1', 'ks', 'mse', 'msre', 'abs'] - metrics = [] - aggregations = es.query_tree_metrics(self._job.name, metric_list) - for name in metric_list: + def plot_nn_metrics(self, metrics: ModelJobMetrics): + figs = [] + for name in metrics.train: + fig = Figure() + ax = fig.add_subplot(111) + timestamp = [self._to_datetime(t) for t in metrics.train[name].steps] + values = metrics.train[name].values + ax.plot(timestamp, values, label=name) + ax.legend() + figs.append(mpld3.fig_to_dict(fig)) + return figs + + @staticmethod + def _average_value_by_iteration(metrics: [List[int], List[int]]) -> [List[int], List[int]]: + iter_to_value = {} + for iteration, value in zip(*metrics): + if iteration not in iter_to_value: + iter_to_value[iteration] = [] + iter_to_value[iteration].append(value) + iterations = [] + values = [] + for key, value_list in iter_to_value.items(): + iterations.append(key) + values.append(sum(value_list) / len(value_list)) + return [iterations, values] + + def _get_iter_val(self, records: dict) -> Metric: + iterations = [item['_source']['tags']['iteration'] for item in records] + values = [item['_source']['value'] for item in records] + iterations, values = self._average_value_by_iteration([iterations, values]) + return Metric(steps=iterations, values=values) + + @staticmethod + def _set_confusion_metric(metrics: ModelJobMetrics): + + def _is_training() -> bool: + iter_vals = metrics.train.get('tp') + if iter_vals is not None and len(iter_vals.values) > 0: + return True + return False + + def _get_last_values(name: str, is_training: bool) -> int: + if is_training: + iter_vals = metrics.train.get(name) + else: + iter_vals = metrics.eval.get(name) + if iter_vals is not None and len(iter_vals.values) > 0: + return int(iter_vals.values[-1]) + return 0 + + _is_training = _is_training() + metrics.confusion_matrix.tp = _get_last_values('tp', _is_training) + metrics.confusion_matrix.tn = _get_last_values('tn', _is_training) + metrics.confusion_matrix.fp = _get_last_values('fp', _is_training) + metrics.confusion_matrix.fn = _get_last_values('fn', _is_training) + # remove confusion relevant metrics from train metrics + for key in _CONF_METRIC_LIST: + metrics.train.pop(key) + metrics.eval.pop(key) + + def query_tree_metrics(self, need_feature_importance=False) -> ModelJobMetrics: + job_name = self._job.name + aggregations = es.query_tree_metrics(job_name, _TREE_METRIC_LIST)['aggregations'] + metrics = ModelJobMetrics() + for name in _TREE_METRIC_LIST: train_ = aggregations[name.upper()]['TRAIN']['TOP']['hits']['hits'] eval_ = aggregations[name.upper()]['EVAL']['TOP']['hits']['hits'] - if len(train_) == 0 and len(eval_) == 0: + if len(train_) > 0: + metrics.train[name].MergeFrom(self._get_iter_val(train_)) + if len(eval_) > 0: + metrics.eval[name].MergeFrom(self._get_iter_val(eval_)) + self._set_confusion_metric(metrics) + if need_feature_importance: + metrics.feature_importance.update(get_feature_importance(self._job)) + return metrics + + def plot_tree_metrics(self, metrics: ModelJobMetrics): + metric_list = set.union(set(metrics.train.keys()), set(metrics.eval.keys())) + figs = [] + for name in metric_list: + train_metric = metrics.train.get(name) + eval_metric = metrics.eval.get(name) + if train_metric is None and eval_metric is None: continue fig = Figure() ax = fig.add_subplot(111) - if len(train_) > 0: - train_metric = [(item['_source']['tags']['iteration'], - item['_source']['value']) - for item in train_] - ax.plot(*zip(*train_metric), label='train', color='blue') - if len(eval_) > 0: - eval_metric = [(item['_source']['tags']['iteration'], - item['_source']['value']) - for item in eval_] - ax.plot(*zip(*eval_metric), label='eval', color='red') + if train_metric is not None: + ax.plot(train_metric.steps, train_metric.values, label='train', color='blue') + if eval_metric is not None: + ax.plot(eval_metric.steps, eval_metric.values, label='eval', color='red') ax.legend() ax.set_title(name) ax.set_xlabel('iteration') ax.set_ylabel('value') - metrics.append(mpld3.fig_to_dict(fig)) - return metrics + figs.append(mpld3.fig_to_dict(fig)) + return figs def plot_raw_data_metrics(self, num_buckets=30): res = es.query_time_metrics(self._job.name, num_buckets) @@ -158,19 +241,9 @@ def _plot_pt_vs_et(self, res): for buck in by_pt] fig = Figure() ax = fig.add_subplot(111) - pt_index = [ - idx for idx, time in zip(pt_index, pt_min) if time is not None - ] - ax.plot( - pt_index, - list(filter(lambda x: x is not None, pt_min)), - label='min event time' - ) - ax.plot( - pt_index, - list(filter(lambda x: x is not None, pt_max)), - label='max event time' - ) + pt_index = [idx for idx, time in zip(pt_index, pt_min) if time is not None] + ax.plot(pt_index, list(filter(lambda x: x is not None, pt_min)), label='min event time') + ax.plot(pt_index, list(filter(lambda x: x is not None, pt_max)), label='max event time') ax.xaxis_date() ax.yaxis_date() diff --git a/web_console_v2/api/fedlearner_webconsole/job/metrics_test.py b/web_console_v2/api/fedlearner_webconsole/job/metrics_test.py new file mode 100644 index 000000000..b446b008e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/job/metrics_test.py @@ -0,0 +1,279 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import time +import unittest +from unittest.mock import patch +from http import HTTPStatus + +from testing.common import BaseTestCase, TestAppProcess +from testing.test_data import es_query_result +from fedlearner_webconsole.proto import workflow_definition_pb2 +from fedlearner_webconsole.db import db +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.job.models import Job, JobType +from fedlearner_webconsole.job.metrics import JobMetricsBuilder +from fedlearner_webconsole.utils.proto import to_dict + + +@unittest.skip('require es client') +class SkippedJobMetricsBuilderTest(BaseTestCase): + + class Config(BaseTestCase.Config): + ES_HOST = '' + ES_PORT = 80 + + class FollowerConfig(Config): + GRPC_LISTEN_PORT = 4990 + + def test_data_join_metrics(self): + job = Job(name='multi-indices-test27', job_type=JobType.DATA_JOIN) + import json # pylint: disable=import-outside-toplevel + print(json.dumps(JobMetricsBuilder(job).plot_metrics())) + + def test_nn_metrics(self): + job = Job(name='automl-2782410011', job_type=JobType.NN_MODEL_TRANINING) + print(JobMetricsBuilder(job).plot_metrics()) + + def test_peer_metrics(self): + proc = TestAppProcess(JobMetricsBuilderTest, 'follower_test_peer_metrics', JobMetricsBuilderTest.FollowerConfig) + proc.start() + self.leader_test_peer_metrics() + proc.terminate() + + def leader_test_peer_metrics(self): + self.setup_project('leader', JobMetricsBuilderTest.FollowerConfig.GRPC_LISTEN_PORT) + workflow = Workflow(name='test-workflow', project_id=1) + with db.session_scope() as session: + session.add(workflow) + session.commit() + + while True: + resp = self.get_helper('/api/v2/workflows/1/peer_workflows/0/jobs/test-job/metrics') + if resp.status_code == HTTPStatus.OK: + break + time.sleep(1) + + def follower_test_peer_metrics(self): + self.setup_project('follower', JobMetricsBuilderTest.Config.GRPC_LISTEN_PORT) + with db.session_scope() as session: + workflow = Workflow(name='test-workflow', project_id=1, metric_is_public=True) + workflow.set_job_ids([1]) + session.add(workflow) + job = Job(name='automl-2782410011', + job_type=JobType.NN_MODEL_TRANINING, + workflow_id=1, + project_id=1, + config=workflow_definition_pb2.JobDefinition(name='test-job').SerializeToString()) + session.add(job) + session.commit() + + while True: + time.sleep(1) + + +_EXPECTED_TREE_METRICS_RESULT = { + 'train': { + 'ks': { + 'steps': [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0], + 'values': [ + 0.47770564314760644, 0.5349813321918623, 0.5469192171410906, 0.5596894247461416, 0.5992009702504102, + 0.6175715202967825, 0.6366317091151221, 0.6989964566835509, 0.7088535349932226, 0.7418848541057288 + ] + }, + 'recall': { + 'steps': [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0], + 'values': [ + 0.40186915887850466, 0.4252336448598131, 0.45794392523364486, 0.46261682242990654, 0.5233644859813084, + 0.514018691588785, 0.5093457943925234, 0.5373831775700935, 0.5467289719626168, 0.5654205607476636 + ] + }, + 'acc': { + 'steps': [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0], + 'values': [0.857, 0.862, 0.868, 0.872, 0.886, 0.883, 0.884, 0.895, 0.896, 0.902] + }, + 'auc': { + 'steps': [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0], + 'values': [ + 0.8011640626857863, 0.8377684240565029, 0.8533328577203871, 0.860663242253454, 0.8797977455946351, + 0.8921428741290338, 0.9041610187629308, 0.9179270409740553, 0.928827495184419, 0.9439282062257736 + ] + }, + 'precision': { + 'steps': [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0], + 'values': [ + 0.8514851485148515, 0.8584905660377359, 0.8596491228070176, 0.8839285714285714, 0.9032258064516129, + 0.8943089430894309, 0.9083333333333333, 0.9504132231404959, 0.9435483870967742, 0.9603174603174603 + ] + }, + 'f1': { + 'steps': [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0], + 'values': [ + 0.546031746031746, 0.56875, 0.5975609756097561, 0.607361963190184, 0.6627218934911242, + 0.6528189910979227, 0.6526946107784432, 0.6865671641791044, 0.6923076923076923, 0.711764705882353 + ] + } + }, + 'confusion_matrix': { + 'tp': 121, + 'tn': 781, + 'fp': 5, + 'fn': 93 + }, + 'feature_importance': { + 'x': 0.3 + }, + 'eval': {} +} + + +class JobMetricsBuilderTest(unittest.TestCase): + + @patch('fedlearner_webconsole.job.metrics.es.query_nn_metrics') + def test_query_and_plot_nn_metrics(self, mock_es_query): + mock_es_query.return_value = es_query_result.fake_es_query_nn_metrics_result + job = Job(name='test-job', job_type=JobType.NN_MODEL_TRANINING) + metrics = JobMetricsBuilder(job).query_nn_metrics() + self.assertEqual( + to_dict(metrics), { + 'train': { + 'loss': { + 'steps': [ + 1645093650000.0, 1645093655000.0, 1645093660000.0, 1645093665000.0, 1645093670000.0, + 1645093675000.0, 1645093680000.0, 1645093685000.0, 1645093690000.0, 1645093695000.0 + ], + 'values': [ + 1.8112774487783219, 0.8499700573859391, 0.5077963560819626, 0.4255857397157412, + 0.3902850116000456, 0.3689204063266516, 0.34096595416776837, 0.3247630867641419, + 0.3146447554727395, 0.3103061146461047 + ] + }, + 'acc': { + 'steps': [ + 1645093650000.0, 1645093655000.0, 1645093660000.0, 1645093665000.0, 1645093670000.0, + 1645093675000.0, 1645093680000.0, 1645093685000.0, 1645093690000.0, 1645093695000.0 + ], + 'values': [ + 0.37631335140332667, 0.6482393520849722, 0.749889914331765, 0.7920331122783514, + 0.8848890877571427, 0.8932028951744239, 0.8983024559915066, 0.9003030106425285, + 0.9026716228326161, 0.9047519653053074 + ] + } + }, + 'eval': { + 'loss': { + 'steps': [ + 1645093650000.0, 1645093655000.0, 1645093660000.0, 1645093665000.0, 1645093670000.0, + 1645093675000.0, 1645093680000.0, 1645093685000.0, 1645093690000.0, 1645093695000.0 + ], + 'values': [ + 1.8112774487783219, 0.8499700573859391, 0.5077963560819626, 0.4255857397157412, + 0.3902850116000456, 0.3689204063266516, 0.34096595416776837, 0.3247630867641419, + 0.3146447554727395, 0.3103061146461047 + ] + }, + 'acc': { + 'steps': [ + 1645093650000.0, 1645093655000.0, 1645093660000.0, 1645093665000.0, 1645093670000.0, + 1645093675000.0, 1645093680000.0, 1645093685000.0, 1645093690000.0, 1645093695000.0 + ], + 'values': [ + 0.37631335140332667, 0.6482393520849722, 0.749889914331765, 0.7920331122783514, + 0.8848890877571427, 0.8932028951744239, 0.8983024559915066, 0.9003030106425285, + 0.9026716228326161, 0.9047519653053074 + ] + } + }, + 'feature_importance': {} + }) + figs = JobMetricsBuilder(job).plot_nn_metrics(metrics) + self.assertEqual(len(figs), 2) + + @patch('fedlearner_webconsole.job.metrics.get_feature_importance') + @patch('fedlearner_webconsole.job.metrics.es.query_tree_metrics') + def test_query_and_plot_tree_metrics(self, mock_es_query, mock_get_importance): + mock_es_query.return_value = es_query_result.fake_es_query_tree_metrics_result + mock_get_importance.return_value = {'x': 0.3} + job = Job(name='test-job', job_type=JobType.TREE_MODEL_TRAINING) + metrics = JobMetricsBuilder(job).query_tree_metrics(need_feature_importance=True) + self.assertEqual(to_dict(metrics), _EXPECTED_TREE_METRICS_RESULT) + figs = JobMetricsBuilder(job).plot_tree_metrics(metrics=metrics) + self.assertEqual(len(figs), 6) + + @patch('fedlearner_webconsole.job.metrics.JobMetricsBuilder.query_nn_metrics') + @patch('fedlearner_webconsole.job.metrics.JobMetricsBuilder.query_tree_metrics') + def test_query_metrics(self, mock_tree_metrics, mock_nn_metrics): + mock_tree_metrics.return_value = {'data': 'tree_metrics'} + mock_nn_metrics.return_value = {'data': 'nn_metrics'} + treejob = Job(name='test-tree-job', job_type=JobType.TREE_MODEL_TRAINING) + metrics = JobMetricsBuilder(treejob).query_metrics() + self.assertEqual(metrics, {'data': 'tree_metrics'}) + + nnjob = Job(name='test-nn-job', job_type=JobType.NN_MODEL_TRANINING) + metrics = JobMetricsBuilder(nnjob).query_metrics() + self.assertEqual(metrics, {'data': 'nn_metrics'}) + + @patch('fedlearner_webconsole.job.metrics.get_feature_importance') + @patch('fedlearner_webconsole.job.metrics.es.query_tree_metrics') + def test_query_and_plot_eval_tree_metrics(self, mock_es_query, mock_get_importance): + mock_es_query.return_value = es_query_result.fake_es_query_eval_tree_metrics_result + mock_get_importance.return_value = {'x': 0.3} + job = Job(name='test-job', job_type=JobType.TREE_MODEL_TRAINING) + metrics = JobMetricsBuilder(job).query_tree_metrics(need_feature_importance=True) + self.assertEqual( + to_dict(metrics), { + 'eval': { + 'auc': { + 'steps': [10.0], + 'values': [0.7513349869345765] + }, + 'recall': { + 'steps': [10.0], + 'values': [0.2176754973809691] + }, + 'f1': { + 'steps': [10.0], + 'values': [0.327016797789616] + }, + 'ks': { + 'steps': [10.0], + 'values': [0.375900675399236] + }, + 'acc': { + 'steps': [10.0], + 'values': [0.8019642162921606] + }, + 'precision': { + 'steps': [10.0], + 'values': [0.6587757792451808] + } + }, + 'confusion_matrix': { + 'tp': 179, + 'tn': 2827, + 'fp': 93, + 'fn': 649 + }, + 'feature_importance': { + 'x': 0.3 + }, + 'train': {} + }) + figs = JobMetricsBuilder(job).plot_tree_metrics(metrics=metrics) + self.assertEqual(len(figs), 6) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/job/model_test.py b/web_console_v2/api/fedlearner_webconsole/job/model_test.py new file mode 100644 index 000000000..5cc6b2338 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/job/model_test.py @@ -0,0 +1,89 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from datetime import datetime, timezone +from unittest.mock import patch, MagicMock + +from fedlearner_webconsole.k8s.models import Pod, PodState, ContainerState +from fedlearner_webconsole.proto.job_pb2 import CrdMetaData, JobPb, JobErrorMessage +from fedlearner_webconsole.proto.workflow_definition_pb2 import JobDefinition +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.job.models import Job, JobType, JobState +from fedlearner_webconsole.workflow.models import Workflow # pylint: disable=unused-import +from testing.no_web_server_test_case import NoWebServerTestCase + + +class ModelTest(NoWebServerTestCase): + + def test_is_training_job(self): + job = Job() + job.job_type = JobType.NN_MODEL_TRANINING + self.assertTrue(job.is_training_job()) + job.job_type = JobType.TREE_MODEL_TRAINING + self.assertTrue(job.is_training_job()) + job.job_type = JobType.TREE_MODEL_EVALUATION + self.assertFalse(job.is_training_job()) + + def test_get_job_crdmeta(self): + job = Job() + job.set_crd_meta(CrdMetaData(api_version='a/b')) + self.assertEqual(job.get_crd_meta(), CrdMetaData(api_version='a/b')) + + def test_to_proto(self): + created_at = datetime(2021, 10, 1, 8, 8, 8, tzinfo=timezone.utc) + job = Job(id=1, + name='test', + job_type=JobType.DATA_JOIN, + state=JobState.COMPLETED, + workflow_id=1, + project_id=1, + created_at=created_at, + updated_at=created_at) + expected_job_proto = JobPb(id=1, + name='test', + job_type=JobDefinition.DATA_JOIN, + state='COMPLETED', + workflow_id=1, + project_id=1, + crd_meta=CrdMetaData(), + created_at=to_timestamp(created_at), + updated_at=to_timestamp(created_at), + error_message=JobErrorMessage()) + self.assertEqual(job.to_proto(), expected_job_proto) + + @patch('fedlearner_webconsole.job.models.Job.get_k8s_app') + def test_get_error_message_with_pods(self, mock_get_k8s_app): + fake_pods = [ + Pod(name='pod0', + container_states=[ContainerState(state='terminated', message='00031003')], + state=PodState.FAILED), + Pod(name='pod1', container_states=[ContainerState(state='terminated')], state=PodState.FAILED), + Pod(name='pod2', + container_states=[ContainerState(state='terminated', message='Completed')], + state=PodState.SUCCEEDED) + ] + mock_get_k8s_app.return_value = MagicMock(pods=fake_pods) + job = Job(error_message='test', state=JobState.FAILED) + self.assertEqual(job.get_error_message_with_pods(), + JobErrorMessage(app='test', pods={'pod0': 'terminated:00031003'})) + job.error_message = None + self.assertEqual(job.get_error_message_with_pods(), JobErrorMessage(pods={'pod0': 'terminated:00031003'})) + mock_get_k8s_app.return_value = MagicMock(pods=[]) + self.assertEqual(job.get_error_message_with_pods(), JobErrorMessage()) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/job/models.py b/web_console_v2/api/fedlearner_webconsole/job/models.py index c9b00aff6..d00479640 100644 --- a/web_console_v2/api/fedlearner_webconsole/job/models.py +++ b/web_console_v2/api/fedlearner_webconsole/job/models.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,18 +13,22 @@ # limitations under the License. # coding: utf-8 -import datetime -import logging import enum import json +from typing import Optional + +from google.protobuf import text_format from sqlalchemy.sql import func from sqlalchemy.sql.schema import Index +from fedlearner_webconsole.job.crd import CrdService +from fedlearner_webconsole.k8s.models import K8sApp, PodState +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.utils.pp_datetime import to_timestamp from fedlearner_webconsole.utils.mixins import to_dict_mixin from fedlearner_webconsole.db import db -from fedlearner_webconsole.k8s.models import FlApp, Pod, FlAppState -from fedlearner_webconsole.utils.k8s_client import k8s_client from fedlearner_webconsole.proto.workflow_definition_pb2 import JobDefinition +from fedlearner_webconsole.proto.job_pb2 import CrdMetaData, JobPb, JobErrorMessage class JobState(enum.Enum): @@ -35,13 +39,13 @@ class JobState(enum.Enum): # 4. WAITING -> NEW: triggered by user, stop workflow # 4. STARTED -> STOPPED: triggered by user, stop workflow # 5. STARTED -> COMPLETED/FAILED: triggered by k8s_watcher - INVALID = 0 # INVALID STATE - STOPPED = 1 # STOPPED BY USER - WAITING = 2 # SCHEDULED, WAITING FOR RUNNING - STARTED = 3 # RUNNING - NEW = 4 # BEFORE SCHEDULE - COMPLETED = 5 # SUCCEEDED JOB - FAILED = 6 # FAILED JOB + INVALID = 0 # INVALID STATE + STOPPED = 1 # STOPPED BY USER + WAITING = 2 # SCHEDULED, WAITING FOR RUNNING + STARTED = 3 # RUNNING + NEW = 4 # BEFORE SCHEDULE + COMPLETED = 5 # SUCCEEDED JOB + FAILED = 6 # FAILED JOB # must be consistent with JobType in proto @@ -54,22 +58,12 @@ class JobType(enum.Enum): TREE_MODEL_TRAINING = 5 NN_MODEL_EVALUATION = 6 TREE_MODEL_EVALUATION = 7 + TRANSFORMER = 8 + ANALYZER = 9 + CUSTOMIZED = 10 -def merge(x, y): - """Given two dictionaries, merge them into a new dict as a shallow copy.""" - z = x.copy() - z.update(y) - return z - - -@to_dict_mixin( - extras={ - 'state': (lambda job: job.get_state_for_frontend()), - 'pods': (lambda job: job.get_pods_for_frontend()), - 'config': (lambda job: job.get_config()), - 'complete_at': (lambda job: job.get_complete_at()) - }) +@to_dict_mixin(ignores=['config'], extras={'complete_at': (lambda job: job.get_complete_at())}) class Job(db.Model): __tablename__ = 'job_v2' __table_args__ = (Index('idx_workflow_id', 'workflow_id'), { @@ -77,15 +71,12 @@ class Job(db.Model): 'mysql_engine': 'innodb', 'mysql_charset': 'utf8mb4', }) - id = db.Column(db.Integer, - primary_key=True, - autoincrement=True, - comment='id') + id = db.Column(db.Integer, primary_key=True, autoincrement=True, comment='id') name = db.Column(db.String(255), unique=True, comment='name') - job_type = db.Column(db.Enum(JobType, native_enum=False), + job_type = db.Column(db.Enum(JobType, native_enum=False, create_constraint=False), nullable=False, comment='job type') - state = db.Column(db.Enum(JobState, native_enum=False), + state = db.Column(db.Enum(JobState, native_enum=False, create_constraint=False), nullable=False, default=JobState.INVALID, comment='state') @@ -95,157 +86,113 @@ class Job(db.Model): workflow_id = db.Column(db.Integer, nullable=False, comment='workflow id') project_id = db.Column(db.Integer, nullable=False, comment='project id') - flapp_snapshot = db.Column(db.Text(16777215), comment='flapp snapshot') - pods_snapshot = db.Column(db.Text(16777215), comment='pods snapshot') + flapp_snapshot = db.Column(db.Text(16777215), comment='flapp snapshot') # deprecated + sparkapp_snapshot = db.Column(db.Text(16777215), comment='sparkapp snapshot') # deprecated + # Format like {'app': app_status_dict, 'pods': {'items': pod_list}}. + snapshot = db.Column(db.Text(16777215), comment='snapshot') error_message = db.Column(db.Text(), comment='error message') + crd_meta = db.Column(db.Text(), comment='metadata') + # Use string but not enum, in order to support all kinds of crd to create and delete, + # but only FLApp SparkApplication and FedApp support getting pods and auto finish. + crd_kind = db.Column(db.String(255), comment='kind') - created_at = db.Column(db.DateTime(timezone=True), - server_default=func.now(), - comment='created at') + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), comment='created at') updated_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), comment='updated at') deleted_at = db.Column(db.DateTime(timezone=True), comment='deleted at') - project = db.relationship('Project', - primaryjoin='Project.id == ' - 'foreign(Job.project_id)') - workflow = db.relationship('Workflow', - primaryjoin='Workflow.id == ' - 'foreign(Job.workflow_id)') + project = db.relationship(Project.__name__, primaryjoin='Project.id == ' 'foreign(Job.project_id)') + workflow = db.relationship('Workflow', primaryjoin='Workflow.id == ' 'foreign(Job.workflow_id)') - def get_config(self): + def get_config(self) -> Optional[JobDefinition]: if self.config is not None: proto = JobDefinition() proto.ParseFromString(self.config) return proto return None - def set_config(self, proto): + def set_config(self, proto: JobDefinition): if proto is not None: self.config = proto.SerializeToString() else: self.config = None - def _set_snapshot_flapp(self): - def default(o): - if isinstance(o, (datetime.date, datetime.datetime)): - return o.isoformat() - return str(o) - - flapp = k8s_client.get_flapp(self.name) - if flapp: - self.flapp_snapshot = json.dumps(flapp, default=default) - else: - self.flapp_snapshot = None - - def get_flapp_details(self): - if self.state == JobState.STARTED: - flapp = k8s_client.get_flapp(self.name) - elif self.flapp_snapshot is not None: - flapp = json.loads(self.flapp_snapshot) - # aims to support old job - if 'flapp' not in flapp: - flapp['flapp'] = None - if 'pods' not in flapp and self.pods_snapshot: - flapp['pods'] = json.loads(self.pods_snapshot)['pods'] - else: - flapp = {'flapp': None, 'pods': {'items': []}} - return flapp - - def get_pods_for_frontend(self, include_private_info=True): - flapp_details = self.get_flapp_details() - flapp = FlApp.from_json(flapp_details.get('flapp', None)) - pods_json = None - if 'pods' in flapp_details: - pods_json = flapp_details['pods'].get('items', None) - pods = [] - if pods_json is not None: - pods = [Pod.from_json(p) for p in pods_json] - - # deduplication pods both in pods and flapp - result = {} - for pod in flapp.pods: - result[pod.name] = pod - for pod in pods: - result[pod.name] = pod - return [pod.to_dict(include_private_info) for pod in result.values()] - - def get_state_for_frontend(self): - return self.state.name - - def is_flapp_failed(self): - # TODO: make the getter more efficient - flapp = FlApp.from_json(self.get_flapp_details()['flapp']) - return flapp.state in [FlAppState.FAILED, FlAppState.SHUTDOWN] - - def is_flapp_complete(self): - # TODO: make the getter more efficient - flapp = FlApp.from_json(self.get_flapp_details()['flapp']) - return flapp.state == FlAppState.COMPLETED - - def get_complete_at(self): - # TODO: make the getter more efficient - flapp = FlApp.from_json(self.get_flapp_details()['flapp']) - return flapp.completed_at - - def stop(self): - if self.state not in [JobState.WAITING, JobState.STARTED, - JobState.COMPLETED, JobState.FAILED]: - logging.warning('illegal job state, name: %s, state: %s', - self.name, self.state) - return - if self.state == JobState.STARTED: - self._set_snapshot_flapp() - k8s_client.delete_flapp(self.name) - # state change: - # WAITING -> NEW - # STARTED -> STOPPED - # COMPLETED/FAILED unchanged - if self.state == JobState.STARTED: - self.state = JobState.STOPPED - if self.state == JobState.WAITING: - self.state = JobState.NEW - - def schedule(self): - # COMPLETED/FAILED Job State can be scheduled since stop action - # will not change the state of completed or failed job - assert self.state in [JobState.NEW, JobState.STOPPED, - JobState.COMPLETED, JobState.FAILED] - self.pods_snapshot = None - self.flapp_snapshot = None - self.state = JobState.WAITING - - def start(self): - assert self.state == JobState.WAITING - self.state = JobState.STARTED - - def complete(self): - assert self.state == JobState.STARTED, 'Job State is not STARTED' - self._set_snapshot_flapp() - k8s_client.delete_flapp(self.name) - self.state = JobState.COMPLETED - - def fail(self): - assert self.state == JobState.STARTED, 'Job State is not STARTED' - self._set_snapshot_flapp() - k8s_client.delete_flapp(self.name) - self.state = JobState.FAILED + # TODO(xiangyuxuan.prs): Remove this func and get_completed_at from model to service. + def get_k8s_app(self) -> K8sApp: + snapshot = None + if self.state != JobState.STARTED: + snapshot = self.snapshot or '{}' + snapshot = json.loads(snapshot) + return self.build_crd_service().get_k8s_app(snapshot) + + def build_crd_service(self) -> CrdService: + if self.crd_kind is not None: + return CrdService(self.crd_kind, self.get_crd_meta().api_version, self.name) + # TODO(xiangyuxuan.prs): Adapt to old data, remove in the future. + if self.job_type in [JobType.TRANSFORMER]: + return CrdService('SparkApplication', 'sparkoperator.k8s.io/v1beta2', self.name) + return CrdService('FLApp', 'fedlearner.k8s.io/v1alpha1', self.name) + + def is_training_job(self): + return self.job_type in [JobType.NN_MODEL_TRANINING, JobType.TREE_MODEL_TRAINING] + + def get_complete_at(self) -> Optional[int]: + crd_obj = self.get_k8s_app() + return crd_obj.completed_at + + def get_start_at(self) -> int: + crd_obj = self.get_k8s_app() + return crd_obj.creation_timestamp + + def get_crd_meta(self) -> CrdMetaData: + crd_meta_obj = CrdMetaData() + if self.crd_meta is not None: + return text_format.Parse(self.crd_meta, crd_meta_obj) + return crd_meta_obj + + def set_crd_meta(self, crd_meta: Optional[CrdMetaData] = None): + if crd_meta is None: + crd_meta = CrdMetaData() + self.crd_meta = text_format.MessageToString(crd_meta) + + def get_error_message_with_pods(self) -> JobErrorMessage: + failed_pods_msg = {} + for pod in self.get_k8s_app().pods: + if pod.state != PodState.FAILED: + continue + pod_error_msg = pod.get_message(include_private_info=True).summary + if pod_error_msg: + failed_pods_msg[pod.name] = pod_error_msg + return JobErrorMessage(app=self.error_message, pods=failed_pods_msg) + + def to_proto(self) -> JobPb: + return JobPb(id=self.id, + name=self.name, + job_type=self.job_type.value, + state=self.state.name, + is_disabled=self.is_disabled, + workflow_id=self.workflow_id, + project_id=self.project_id, + snapshot=self.snapshot, + error_message=self.get_error_message_with_pods(), + crd_meta=self.get_crd_meta(), + crd_kind=self.crd_kind, + created_at=to_timestamp(self.created_at), + updated_at=to_timestamp(self.updated_at), + complete_at=self.get_complete_at(), + start_at=self.get_start_at()) class JobDependency(db.Model): __tablename__ = 'job_dependency_v2' - __table_args__ = (Index('idx_src_job_id', 'src_job_id'), - Index('idx_dst_job_id', 'dst_job_id'), { - 'comment': 'record job dependencies', - 'mysql_engine': 'innodb', - 'mysql_charset': 'utf8mb4', - }) - id = db.Column(db.Integer, - primary_key=True, - autoincrement=True, - comment='id') + __table_args__ = (Index('idx_src_job_id', 'src_job_id'), Index('idx_dst_job_id', 'dst_job_id'), { + 'comment': 'record job dependencies', + 'mysql_engine': 'innodb', + 'mysql_charset': 'utf8mb4', + }) + id = db.Column(db.Integer, primary_key=True, autoincrement=True, comment='id') src_job_id = db.Column(db.Integer, comment='src job id') dst_job_id = db.Column(db.Integer, comment='dst job id') dep_index = db.Column(db.Integer, comment='dep index') diff --git a/web_console_v2/api/fedlearner_webconsole/job/scheduler.py b/web_console_v2/api/fedlearner_webconsole/job/scheduler.py new file mode 100644 index 000000000..d18476e06 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/job/scheduler.py @@ -0,0 +1,52 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Tuple + +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.interface import IRunnerV2 +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.db import db +from fedlearner_webconsole.job.controller import start_job_if_ready +from fedlearner_webconsole.job.models import Job, JobState +from fedlearner_webconsole.proto.composer_pb2 import RunnerOutput, JobSchedulerOutput + + +class JobScheduler(IRunnerV2): + + def run(self, context: RunnerContext) -> Tuple[RunnerStatus, RunnerOutput]: + with db.session_scope() as session: + waiting_jobs = [ + jid + for jid, *_ in session.query(Job.id).filter(Job.state == JobState.WAITING, Job.is_disabled.is_(False)) + ] + if waiting_jobs: + logging.info(f'[JobScheduler] Scheduling jobs {waiting_jobs}') + output = JobSchedulerOutput() + for job_id in waiting_jobs: + with db.session_scope() as session: + # Row lock to prevent other changes + job = session.query(Job).with_for_update().get(job_id) + ready, message = start_job_if_ready(session, job) + if ready: + if message: + output.failed_to_start_jobs.append(job_id) + else: + output.started_jobs.append(job_id) + if message: + output.messages[job_id] = message + session.commit() + return RunnerStatus.DONE, RunnerOutput(job_scheduler_output=output) diff --git a/web_console_v2/api/fedlearner_webconsole/job/scheduler_test.py b/web_console_v2/api/fedlearner_webconsole/job/scheduler_test.py new file mode 100644 index 000000000..9e1936cc0 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/job/scheduler_test.py @@ -0,0 +1,83 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch, Mock + +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.db import db +from fedlearner_webconsole.job.models import Job, JobType, JobState +from fedlearner_webconsole.job.scheduler import JobScheduler +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput, RunnerOutput, JobSchedulerOutput +from fedlearner_webconsole.proto.workflow_definition_pb2 import JobDefinition +from fedlearner_webconsole.workflow.models import Workflow # pylint: disable=unused-import +from testing.no_web_server_test_case import NoWebServerTestCase + + +class SchedulerTest(NoWebServerTestCase): + + @patch('fedlearner_webconsole.job.scheduler.start_job_if_ready') + def test_run(self, mock_start_job_if_ready: Mock): + with db.session_scope() as session: + ready_job = Job(id=1, + name='ready_job', + job_type=JobType.RAW_DATA, + state=JobState.WAITING, + workflow_id=1, + project_id=1) + ready_job.set_config(JobDefinition(is_federated=False)) + not_ready_job = Job(id=2, + name='not_ready_job', + job_type=JobType.RAW_DATA, + state=JobState.WAITING, + workflow_id=1, + project_id=1) + ready_job_start_failed = Job(id=3, + name='ready_failed_job', + job_type=JobType.RAW_DATA, + state=JobState.WAITING, + workflow_id=1, + project_id=1) + session.add_all([ready_job, not_ready_job, ready_job_start_failed]) + session.commit() + + def fake_start_job_if_ready(session, job): + if job.name == ready_job_start_failed.name: + job.error_message = 'Failed to start' + return True, job.error_message + if job.name == ready_job.name: + return True, None + if job.name == not_ready_job.name: + return False, None + raise RuntimeError(f'Unknown job {job.name}') + + mock_start_job_if_ready.side_effect = fake_start_job_if_ready + + runner = JobScheduler() + context = RunnerContext(0, RunnerInput()) + status, output = runner.run(context) + self.assertEqual(status, RunnerStatus.DONE) + self.assertEqual( + output, + RunnerOutput(job_scheduler_output=JobSchedulerOutput(started_jobs=[ready_job.id], + failed_to_start_jobs=[ready_job_start_failed.id], + messages={ + ready_job_start_failed.id: 'Failed to start', + }))) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/job/service.py b/web_console_v2/api/fedlearner_webconsole/job/service.py index fc015dfb6..cf351c7dd 100644 --- a/web_console_v2/api/fedlearner_webconsole/job/service.py +++ b/web_console_v2/api/fedlearner_webconsole/job/service.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,13 +13,27 @@ # limitations under the License. # coding: utf-8 - +import datetime +import json import logging +from typing import List + from sqlalchemy.orm.session import Session -from fedlearner_webconsole.rpc.client import RpcClient -from fedlearner_webconsole.job.models import Job, JobDependency, JobState -from fedlearner_webconsole.proto import common_pb2 -from fedlearner_webconsole.utils.metrics import emit_counter + +from fedlearner_webconsole.proto.job_pb2 import CrdMetaData, PodPb +from fedlearner_webconsole.proto.workflow_definition_pb2 import JobDefinition +from fedlearner_webconsole.job.models import Job, JobDependency, \ + JobState +from fedlearner_webconsole.utils.metrics import emit_store +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.utils.pp_yaml import compile_yaml_template +from fedlearner_webconsole.job.utils import DurationState, emit_job_duration_store + + +def serialize_to_json(o): + if isinstance(o, (datetime.date, datetime.datetime)): + return o.isoformat() + return str(o) class JobService: @@ -28,42 +42,91 @@ def __init__(self, session: Session): self._session = session def is_ready(self, job: Job) -> bool: - deps = self._session.query(JobDependency).filter_by( - dst_job_id=job.id).all() + deps = self._session.query(JobDependency).filter_by(dst_job_id=job.id).all() for dep in deps: src_job = self._session.query(Job).get(dep.src_job_id) - assert src_job is not None, 'Job {} not found'.format( - dep.src_job_id) + assert src_job is not None, f'Job {dep.src_job_id} not found' if not src_job.state == JobState.COMPLETED: return False return True - @staticmethod - def is_peer_ready(job: Job) -> bool: - project_config = job.project.get_config() - for party in project_config.participants: - client = RpcClient(project_config, party) - resp = client.check_job_ready(job.name) - if resp.status.code != common_pb2.STATUS_SUCCESS: - emit_counter('check_peer_ready_failed', 1) - return True - if not resp.is_ready: - return False - return True - - def update_running_state(self, job_name): + def update_running_state(self, job_name: str) -> JobState: job = self._session.query(Job).filter_by(name=job_name).first() if job is None: - emit_counter('[JobService]job_not_found', 1) - return + emit_store('job.service.update_running_state_error', + 1, + tags={ + 'job_name': job_name, + 'reason': 'job_not_found' + }) + return None if not job.state == JobState.STARTED: - emit_counter('[JobService]wrong_job_state', 1) - return - if job.is_flapp_complete(): - job.complete() - logging.debug('[JobService]change job %s state to %s', - job.name, JobState(job.state)) - elif job.is_flapp_failed(): - job.fail() - logging.debug('[JobService]change job %s state to %s', - job.name, JobState(job.state)) + emit_store('job.service.update_running_state_error', + 1, + tags={ + 'job_name': job_name, + 'reason': 'wrong_job_state' + }) + return job.state + if job.get_k8s_app().is_completed: + self.complete(job) + logging.debug('[JobService]change job %s state to %s', job.name, JobState(job.state)) + elif job.get_k8s_app().is_failed: + self.fail(job) + logging.debug('[JobService]change job %s state to %s', job.name, JobState(job.state)) + return job.state + + @staticmethod + def get_pods(job: Job, include_private_info=True) -> List[PodPb]: + crd_obj = job.get_k8s_app() + if crd_obj: + return [pod.to_proto(include_private_info) for pod in crd_obj.pods] + return [] + + @staticmethod + def set_config_and_crd_info(job: Job, proto: JobDefinition): + job.set_config(proto) + yaml = {} + try: + yaml = compile_yaml_template(job.get_config().yaml_template, post_processors=[], ignore_variables=True) + except Exception as e: # pylint: disable=broad-except + # Don't raise exception because of old templates, default None will use FLApp. + logging.error( + f'Failed format yaml for job {job.name} when try to get the kind and api_version. msg: {str(e)}') + kind = yaml.get('kind', None) + api_version = yaml.get('apiVersion', None) + job.crd_kind = kind + job.set_crd_meta(CrdMetaData(api_version=api_version)) + + @staticmethod + def complete(job: Job): + assert job.state == JobState.STARTED, 'Job State is not STARTED' + JobService.set_status_to_snapshot(job) + job.build_crd_service().delete_app() + job.state = JobState.COMPLETED + emit_job_duration_store(duration=job.get_complete_at() - to_timestamp(job.created_at), + job_name=job.name, + state=DurationState.COMPLETED) + + @staticmethod + def fail(job: Job): + assert job.state == JobState.STARTED, 'Job State is not STARTED' + JobService.set_status_to_snapshot(job) + job.build_crd_service().delete_app() + job.state = JobState.FAILED + job.error_message = job.get_k8s_app().error_message + emit_job_duration_store(duration=job.get_complete_at() - to_timestamp(job.created_at), + job_name=job.name, + state=DurationState.FAILURE) + + @staticmethod + def set_status_to_snapshot(job: Job): + app = job.build_crd_service().get_k8s_app_cache() + job.snapshot = json.dumps(app, default=serialize_to_json) + + @staticmethod + def get_job_yaml(job: Job) -> str: + # Can't query from k8s api server when job is not started. + if job.state != JobState.STARTED: + return job.snapshot or '' + return json.dumps(job.build_crd_service().get_k8s_app_cache(), default=serialize_to_json) diff --git a/web_console_v2/api/fedlearner_webconsole/job/service_test.py b/web_console_v2/api/fedlearner_webconsole/job/service_test.py new file mode 100644 index 000000000..34a064b48 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/job/service_test.py @@ -0,0 +1,307 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from unittest.mock import patch, MagicMock +from datetime import datetime + +from fedlearner_webconsole.proto.job_pb2 import PodPb + +from fedlearner_webconsole.proto import workflow_definition_pb2 +from fedlearner_webconsole.db import db +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.job.models import Job, JobDependency, JobType, JobState +from fedlearner_webconsole.job.service import JobService +from fedlearner_webconsole.k8s.models import FlApp +from testing.no_web_server_test_case import NoWebServerTestCase + + +class JobServiceTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + workflow_0 = Workflow(id=0, name='test-workflow-0', project_id=0) + workflow_1 = Workflow(id=1, name='test-workflow-1', project_id=0) + + config = workflow_definition_pb2.JobDefinition(name='test-job').SerializeToString() + job_0 = Job(id=0, + name='raw_data_0', + job_type=JobType.RAW_DATA, + state=JobState.STARTED, + workflow_id=0, + project_id=0, + config=config) + job_1 = Job(id=1, + name='raw_data_1', + job_type=JobType.RAW_DATA, + state=JobState.COMPLETED, + workflow_id=0, + project_id=0, + config=config) + job_2 = Job(id=2, + name='data_join_0', + job_type=JobType.DATA_JOIN, + state=JobState.WAITING, + workflow_id=0, + project_id=0, + config=config) + job_3 = Job(id=3, + name='data_join_1', + job_type=JobType.DATA_JOIN, + state=JobState.COMPLETED, + workflow_id=1, + project_id=0, + config=config) + job_4 = Job(id=4, + name='train_job_0', + job_type=JobType.NN_MODEL_TRANINING, + state=JobState.WAITING, + workflow_id=1, + project_id=0, + config=config) + + job_dep_0 = JobDependency(src_job_id=job_0.id, dst_job_id=job_2.id, dep_index=0) + job_dep_1 = JobDependency(src_job_id=job_1.id, dst_job_id=job_2.id, dep_index=1) + job_dep_2 = JobDependency(src_job_id=job_3.id, dst_job_id=job_4.id, dep_index=0) + + with db.session_scope() as session: + session.add_all([workflow_0, workflow_1]) + session.add_all([job_0, job_1, job_2, job_3, job_4]) + session.add_all([job_dep_0, job_dep_1, job_dep_2]) + session.commit() + + def test_is_ready(self): + with db.session_scope() as session: + job_0 = session.query(Job).get(0) + job_2 = session.query(Job).get(2) + job_4 = session.query(Job).get(4) + job_service = JobService(session) + self.assertTrue(job_service.is_ready(job_0)) + self.assertFalse(job_service.is_ready(job_2)) + self.assertTrue(job_service.is_ready(job_4)) + + @patch('fedlearner_webconsole.job.models.Job.get_k8s_app') + def test_update_running_state(self, mock_crd): + with db.session_scope() as session: + job_0 = session.query(Job).get(0) + job_2 = session.query(Job).get(2) + job_service = JobService(session) + job_service.update_running_state(job_0.name) + self.assertEqual(job_0.state, JobState.COMPLETED) + self.assertTrue(job_service.is_ready(job_2)) + job_0.state = JobState.STARTED + mock_crd.return_value = MagicMock(is_completed=False, is_failed=True, error_message=None) + job_service.update_running_state(job_0.name) + self.assertEqual(job_0.state, JobState.FAILED) + session.commit() + + def test_get_pods(self): + creation_timestamp = datetime.utcnow() + fake_pods = \ + { + 'pods': { + 'items': [ + { + 'status': { + 'phase': 'Running', + 'pod_ip': '172.0.0.1', + }, + 'metadata': { + 'labels': {'fl-replica-type': 'master'}, + 'name': 'name1', + 'creation_timestamp': creation_timestamp, + }, + 'spec': { + 'containers': [ + { + 'name': 'fake_pod', + 'resources': { + 'limits': { + 'cpu': '4000m', + 'memory': '4Gi', + }, + 'requests': { + 'cpu': '4000m', + 'memory': '4Gi', + } + } + } + ] + } + }, + { + 'status': { + 'phase': 'Pending', + 'pod_ip': '172.0.0.1', + }, + 'metadata': { + 'labels': {'fl-replica-type': 'master'}, + 'name': 'name3', + 'creation_timestamp': creation_timestamp, + }, + 'spec': { + 'containers': [ + { + 'name': 'fake_pod', + 'resources': { + 'limits': { + 'cpu': '4000m', + 'memory': '4Gi', + }, + 'requests': { + 'cpu': '4000m', + 'memory': '4Gi', + } + } + } + ] + } + }, + { + 'status': { + 'phase': 'Succeeded', + 'pod_ip': '172.0.0.2', + }, + 'metadata': { + 'labels': {'fl-replica-type': 'worker'}, + 'name': 'name2', + 'creation_timestamp': creation_timestamp, + }, + 'spec': { + 'containers': [ + { + 'name': 'fake_pod', + 'resources': { + 'limits': { + 'cpu': '4000m', + 'memory': '4Gi', + }, + 'requests': { + 'cpu': '4000m', + 'memory': '4Gi', + } + } + } + ] + } + }, { + 'status': { + 'phase': 'Running', + 'pod_ip': '172.0.0.2', + }, + 'metadata': { + 'labels': {'fl-replica-type': 'worker'}, + 'name': 'running_one', + 'creation_timestamp': creation_timestamp, + }, + 'spec': { + 'containers': [ + { + 'name': 'fake_pod', + 'resources': { + 'limits': { + 'cpu': '4000m', + 'memory': '4Gi', + }, + 'requests': { + 'cpu': '4000m', + 'memory': '4Gi', + } + } + } + ] + } + }, + ] + }, + 'app': { + 'status': { + 'appState': 'FLStateComplete', + 'flReplicaStatus': { + 'Master': { + 'active': { + }, + 'failed': {}, + 'succeeded': { + 'name1': {} + } + }, + 'Worker': { + 'active': { + 'running_one': {} + }, + 'failed': {}, + 'succeeded': { + 'name2': {} + } + } + } + } + } + } + + expected_pods = [ + PodPb( + name='name1', + pod_type='MASTER', + state='SUCCEEDED_AND_FREED', + pod_ip='172.0.0.1', + message='', + creation_timestamp=to_timestamp(creation_timestamp), + ), + PodPb(creation_timestamp=to_timestamp(creation_timestamp), + message='', + name='name3', + pod_ip='172.0.0.1', + pod_type='MASTER', + state='PENDING'), + PodPb( + name='name2', + pod_type='WORKER', + state='SUCCEEDED', + pod_ip='172.0.0.2', + message='', + creation_timestamp=to_timestamp(creation_timestamp), + ), + PodPb( + name='running_one', + pod_type='WORKER', + state='RUNNING', + pod_ip='172.0.0.2', + message='', + creation_timestamp=to_timestamp(creation_timestamp), + ) + ] + fake_job = MagicMock() + fake_job.is_sparkapp = MagicMock(return_value=False) + fake_job.get_k8s_app = MagicMock(return_value=FlApp.from_json(fake_pods)) + pods = JobService.get_pods(fake_job) + self.assertEqual(pods, expected_pods) + + def test_get_job_yaml(self): + fake_job = MagicMock() + fake_job.state = JobState.STOPPED + fake_job.snapshot = 'test' + self.assertEqual(JobService.get_job_yaml(fake_job), 'test') + fake_job.state = JobState.STARTED + test_time = datetime.now() + fake_job.build_crd_service = MagicMock(return_value=MagicMock(get_k8s_app_cache=MagicMock( + return_value={'a': test_time}))) + self.assertEqual(JobService.get_job_yaml(fake_job), f'{{"a": "{test_time.isoformat()}"}}') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/job/utils.py b/web_console_v2/api/fedlearner_webconsole/job/utils.py new file mode 100644 index 000000000..d205dd676 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/job/utils.py @@ -0,0 +1,27 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import enum +from fedlearner_webconsole.utils.metrics import emit_store + + +class DurationState(enum.Enum): + STOPPED = 'STOPPED' + COMPLETED = 'COMPLETED' + FAILURE = 'FAILURE' + + +def emit_job_duration_store(duration: int, job_name: str, state: DurationState): + emit_store('job.duration', duration, tags={'job_name': job_name, 'state': state.name}) diff --git a/web_console_v2/api/fedlearner_webconsole/job/utils_test.py b/web_console_v2/api/fedlearner_webconsole/job/utils_test.py new file mode 100644 index 000000000..22e04bab2 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/job/utils_test.py @@ -0,0 +1,32 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from fedlearner_webconsole.job.utils import DurationState, emit_job_duration_store + + +class UtilsTest(unittest.TestCase): + + def test_emit_job_duration_store(self): + with self.assertLogs() as cm: + emit_job_duration_store(10, 'u466-test-job', DurationState.COMPLETED) + logs = [r.msg for r in cm.records] + self.assertEqual( + logs, + ["""[Metric][Store] job.duration: 10, tags={'job_name': 'u466-test-job', 'state': 'COMPLETED'}"""]) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/job/yaml_formatter.py b/web_console_v2/api/fedlearner_webconsole/job/yaml_formatter.py index bac0d80cc..80a4dbed2 100644 --- a/web_console_v2/api/fedlearner_webconsole/job/yaml_formatter.py +++ b/web_console_v2/api/fedlearner_webconsole/job/yaml_formatter.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,100 +13,88 @@ # limitations under the License. # coding: utf-8 +import base64 import json import tarfile from io import BytesIO -import base64 -from string import Template -from flatten_dict import flatten -from fedlearner_webconsole.utils.system_envs import get_system_envs -from fedlearner_webconsole.proto import common_pb2 - - -class _YamlTemplate(Template): - delimiter = '$' - # Which placeholders in the template should be interpreted - idpattern = r'[a-zA-Z_\-\[0-9\]]+(\.[a-zA-Z_\-\[0-9\]]+)*' +from fedlearner_webconsole.k8s.models import CrdKind +from fedlearner_webconsole.rpc.client import gen_egress_authority +from fedlearner_webconsole.proto import common_pb2 +from fedlearner_webconsole.utils.const import DEFAULT_OWNER_FOR_JOB_WITHOUT_WORKFLOW +from fedlearner_webconsole.utils.proto import to_dict +from fedlearner_webconsole.utils.pp_yaml import compile_yaml_template, \ + add_username_in_label, GenerateDictService -def format_yaml(yaml, **kwargs): - """Formats a yaml template. - - Example usage: - format_yaml('{"abc": ${x.y}}', x={'y': 123}) - output should be '{"abc": 123}' - """ - template = _YamlTemplate(yaml) - try: - return template.substitute(flatten(kwargs or {}, - reducer='dot')) - except KeyError as e: - raise RuntimeError( - 'Unknown placeholder: {}'.format(e.args[0])) from e +CODE_TAR_FOLDER = 'code_tar' +CODE_TAR_FILE_NAME = 'code_tar.tar.gz' def make_variables_dict(variables): - var_dict = { - var.name: ( - code_dict_encode(json.loads(var.value)) - if var.value_type == common_pb2.Variable.ValueType.CODE \ - else var.value) - for var in variables - } - return var_dict - - -def generate_system_dict(): - return {'basic_envs': get_system_envs()} - - -def generate_project_dict(proj): - project = proj.to_dict() - project['variables'] = make_variables_dict( - proj.get_config().variables) - participants = project['config']['participants'] - for index, participant in enumerate(participants): - project[f'participants[{index}]'] = {} - project[f'participants[{index}]']['egress_domain'] = \ - participant['domain_name'] - project[f'participants[{index}]']['egress_host'] = \ - participant['grpc_spec']['authority'] - return project - - -def generate_workflow_dict(wf): - workflow = wf.to_dict() - workflow['variables'] = make_variables_dict( - wf.get_config().variables) - workflow['jobs'] = {} - for j in wf.get_jobs(): - variables = make_variables_dict(j.get_config().variables) - j_dic = j.to_dict() - j_dic['variables'] = variables - workflow['jobs'][j.get_config().name] = j_dic - return workflow - - -def generate_self_dict(j): - job = j.to_dict() - job['variables'] = make_variables_dict( - j.get_config().variables - ) - return job + var_dict = {} + for var in variables: + typed_value = to_dict(var.typed_value) + if var.value_type == common_pb2.Variable.CODE: + # if use or, then {} will be ignored. + var_dict[var.name] = code_dict_encode(typed_value if typed_value is not None else json.loads(var.value)) + else: + var_dict[var.name] = typed_value if typed_value is not None else var.value + return var_dict -def generate_job_run_yaml(job): - yaml = format_yaml(job.get_config().yaml_template, - workflow=generate_workflow_dict(job.workflow), - project=generate_project_dict(job.project), - system=generate_system_dict(), - self=generate_self_dict(job)) - try: - loaded = json.loads(yaml) - except Exception as e: # pylint: disable=broad-except - raise ValueError(f'Invalid json {repr(e)}: {yaml}') - return loaded +class YamlFormatterService: + + def __init__(self, session): + self._session = session + + @staticmethod + def generate_project_dict(proj): + project = to_dict(proj.to_proto()) + variables = proj.get_variables() + project['variables'] = make_variables_dict(variables) + project['participants'] = [] + for index, participant in enumerate(proj.participants): + # TODO(xiangyuxuan.prs): remove keys such as participants[0] in future. + project[f'participants[{index}]'] = {} + project[f'participants[{index}]']['egress_domain'] = \ + participant.domain_name + project[f'participants[{index}]']['egress_host'] = gen_egress_authority(participant.domain_name) + project['participants'].append(project[f'participants[{index}]']) + return project + + def generate_workflow_dict(self, wf: 'Workflow'): + workflow = wf.to_dict() + workflow['variables'] = make_variables_dict(wf.get_config().variables) + workflow['jobs'] = {} + jobs = wf.get_jobs(self._session) + for j in jobs: + variables = make_variables_dict(j.get_config().variables) + j_dic = j.to_dict() + j_dic['variables'] = variables + workflow['jobs'][j.get_config().name] = j_dic + return workflow + + @staticmethod + def generate_self_dict(j: 'Job'): + job = j.to_dict() + job['variables'] = make_variables_dict(j.get_config().variables) + return job + + def generate_job_run_yaml(self, job: 'Job') -> dict: + result_dict = compile_yaml_template(job.get_config().yaml_template, + use_old_formater=job.crd_kind is None or + job.crd_kind == CrdKind.FLAPP.value, + post_processors=[ + lambda loaded_json: add_username_in_label( + loaded_json, job.workflow.creator + if job.workflow else DEFAULT_OWNER_FOR_JOB_WITHOUT_WORKFLOW) + ], + workflow=job.workflow and self.generate_workflow_dict(job.workflow), + project=self.generate_project_dict(job.project), + system=GenerateDictService(self._session).generate_system_dict(), + self=self.generate_self_dict(job)) + return result_dict def code_dict_encode(data_dict): diff --git a/web_console_v2/api/fedlearner_webconsole/job/yaml_formatter_test.py b/web_console_v2/api/fedlearner_webconsole/job/yaml_formatter_test.py new file mode 100644 index 000000000..16f41059c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/job/yaml_formatter_test.py @@ -0,0 +1,188 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import base64 +import tarfile +import unittest +from unittest.mock import patch +from envs import Envs +from io import BytesIO +from google.protobuf.json_format import ParseDict + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.job.yaml_formatter import code_dict_encode, YamlFormatterService +from fedlearner_webconsole.job.models import Job, JobState, JobType +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto.setting_pb2 import SystemVariables +from fedlearner_webconsole.proto.workflow_definition_pb2 import JobDefinition +from fedlearner_webconsole.utils.pp_yaml import _format_yaml, GenerateDictService +from fedlearner_webconsole.workflow.models import Workflow # pylint: disable=unused-import +from testing.no_web_server_test_case import NoWebServerTestCase + +BASE_DIR = Envs.BASE_DIR + + +class YamlFormatterTest(NoWebServerTestCase): + + def test_format_with_phs(self): + project = {'variables[0]': {'storage_root_dir': 'root_dir'}} + workflow = {'jobs': {'raw_data_job': {'name': 'raw_data123'}}} + yaml = _format_yaml(""" + { + "name": "OUTPUT_BASE_DIR", + "value": "${project.variables[0].storage_root_dir}/raw_data/${workflow.jobs.raw_data_job.name}" + } + """, + project=project, + workflow=workflow) + self.assertEqual( + yaml, """ + { + "name": "OUTPUT_BASE_DIR", + "value": "root_dir/raw_data/raw_data123" + } + """) + + self.assertEqual(_format_yaml('$project.variables[0].storage_root_dir', project=project), + project['variables[0]']['storage_root_dir']) + + def test_format_with_no_ph(self): + self.assertEqual(_format_yaml('{a: 123, b: 234}'), '{a: 123, b: 234}') + + def test_format_yaml_unknown_ph(self): + x = {'y': 123} + with self.assertRaises(RuntimeError) as cm: + _format_yaml('$x.y is $i.j.k', x=x) + self.assertEqual(str(cm.exception), 'Unknown placeholder: i.j.k') + with self.assertRaises(RuntimeError) as cm: + _format_yaml('$x.y is ${i.j}', x=x) + self.assertEqual(str(cm.exception), 'Unknown placeholder: i.j') + + def test_encode_code(self): + test_data = {'test/a.py': 'awefawefawefawefwaef', 'test1/b.py': 'asdfasd', 'c.py': '', 'test/d.py': 'asdf'} + code_base64 = code_dict_encode(test_data) + code_dict = {} + if code_base64.startswith('base64://'): + tar_binary = BytesIO(base64.b64decode(code_base64[9:])) + with tarfile.open(fileobj=tar_binary) as tar: + for file in tar.getmembers(): + code_dict[file.name] = str(tar.extractfile(file).read(), encoding='utf-8') + self.assertEqual(code_dict, test_data) + + def test_generate_self_dict(self): + config = { + 'variables': [{ + 'name': 'namespace', + 'value': 'leader' + }, { + 'name': 'basic_envs', + 'value': '{}' + }, { + 'name': 'storage_root_dir', + 'value': '/' + }] + } + job = Job(name='aa', project_id=1, workflow_id=1, state=JobState.NEW) + job.set_config(ParseDict(config, JobDefinition())) + self.assertEqual( + YamlFormatterService.generate_self_dict(job), { + 'id': None, + 'crd_kind': None, + 'crd_meta': None, + 'name': 'aa', + 'job_type': None, + 'state': 'NEW', + 'is_disabled': None, + 'workflow_id': 1, + 'project_id': 1, + 'flapp_snapshot': None, + 'sparkapp_snapshot': None, + 'error_message': None, + 'created_at': None, + 'updated_at': None, + 'deleted_at': None, + 'complete_at': 0, + 'snapshot': None, + 'variables': { + 'namespace': 'leader', + 'basic_envs': '{}', + 'storage_root_dir': '/', + } + }) + + @patch('fedlearner_webconsole.setting.service.SettingService.get_application_version') + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_variables') + def test_generate_system_dict(self, mock_system_variables, mock_app_version): + data = ParseDict({'variables': [{'name': 'a', 'value': 'b'}]}, SystemVariables()) + mock_system_variables.return_value = data + mock_app_version.return_value.version.version = '2.2.2.2' + with db.session_scope() as session: + system_dict = GenerateDictService(session).generate_system_dict() + self.assertTrue(isinstance(system_dict['basic_envs'], str)) + self.assertTrue(system_dict['version'], '2.2.2.2') + self.assertEqual({'a': 'b'}, system_dict['variables']) + self.assertEqual({'a': 'b'}, system_dict['variables']) + + def test_generate_project_dict(self): + project = Project(name='project', comment='comment') + participant = Participant(name='test-participant', domain_name='fl-test.com', host='127.0.0.1', port=32443) + relationship = ProjectParticipant(project_id=1, participant_id=1) + with db.session_scope() as session: + session.add(project) + session.add(participant) + session.add(relationship) + session.commit() + project_dict = YamlFormatterService.generate_project_dict(project) + result_dict = {'egress_domain': 'fl-test.com', 'egress_host': 'fl-test-client-auth.com'} + self.assertEqual(project_dict['participants[0]'], result_dict) + self.assertEqual(project_dict['participants'][0], result_dict) + + def test_generate_job_run_yaml(self): + with db.session_scope() as session: + project = Project(id=1, name='project 1') + session.add(project) + session.flush() + job_def = JobDefinition(name='lonely_job', job_type=JobDefinition.ANALYZER) + job_def.yaml_template = """ + { + "apiVersion": "sparkoperator.k8s.io/v1beta2", + "kind": "SparkApplication", + "metadata": { + "name": self.name, + }, + + } + """ + job = Job(name='test', project_id=1, job_type=JobType(job_def.job_type), workflow_id=0) + job.set_config(job_def) + session.add(job) + session.commit() + result = YamlFormatterService(session).generate_job_run_yaml(job) + self.assertEqual( + result, { + 'apiVersion': 'sparkoperator.k8s.io/v1beta2', + 'kind': 'SparkApplication', + 'metadata': { + 'name': 'test', + 'labels': { + 'owner': 'no___workflow' + } + } + }) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/k8s/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/k8s/BUILD.bazel new file mode 100644 index 000000000..865f2ee58 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/k8s/BUILD.bazel @@ -0,0 +1,126 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "event_listener_lib", + srcs = ["event_listener.py"], + imports = ["../.."], + deps = [":k8s_cache_lib"], +) + +py_library( + name = "fake_k8s_client_lib", + srcs = ["fake_k8s_client.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/testing:helpers_lib", + "@common_kubernetes//:pkg", + ], +) + +py_library( + name = "k8s_cache_lib", + srcs = ["k8s_cache.py"], + imports = ["../.."], + deps = [":models_lib"], +) + +py_test( + name = "k8s_cache_lib_test", + size = "small", + srcs = [ + "k8s_cache_test.py", + ], + imports = ["../.."], + main = "k8s_cache_test.py", + deps = [ + ":k8s_cache_lib", + ], +) + +py_library( + name = "k8s_client_lib", + srcs = ["k8s_client.py"], + imports = ["../.."], + deps = [ + ":fake_k8s_client_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:es_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:hooks_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "@common_kubernetes//:pkg", + ], +) + +py_test( + name = "k8s_client_lib_test", + size = "small", + srcs = [ + "k8s_client_test.py", + ], + imports = ["../.."], + main = "k8s_client_test.py", + deps = [ + ":k8s_client_lib", + ], +) + +py_library( + name = "k8s_watcher_lib", + srcs = ["k8s_watcher.py"], + imports = ["../.."], + deps = [ + ":k8s_cache_lib", + ":k8s_client_lib", + ":models_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole/job:event_listener_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:event_listener_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:metrics_lib", + "@common_kubernetes//:pkg", + ], +) + +py_test( + name = "k8s_watcher_lib_test", + size = "medium", + srcs = [ + "k8s_watcher_test.py", + ], + imports = ["../.."], + main = "k8s_watcher_test.py", + deps = [ + ":k8s_watcher_lib", + "//web_console_v2/api/fedlearner_webconsole/k8s:k8s_cache_lib", + ], +) + +py_library( + name = "models_lib", + srcs = ["models.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_kubernetes//:pkg", + ], +) + +py_test( + name = "models_lib_test", + size = "small", + srcs = [ + "models_test.py", + ], + imports = ["../.."], + main = "models_test.py", + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/k8s/event_listener.py b/web_console_v2/api/fedlearner_webconsole/k8s/event_listener.py new file mode 100644 index 000000000..ffd8a5eeb --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/k8s/event_listener.py @@ -0,0 +1,24 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +from abc import ABCMeta, abstractmethod +from fedlearner_webconsole.k8s.k8s_cache import Event + + +class EventListener(metaclass=ABCMeta): + + @abstractmethod + def update(self, event: Event): + pass diff --git a/web_console_v2/api/fedlearner_webconsole/k8s/fake_k8s_client.py b/web_console_v2/api/fedlearner_webconsole/k8s/fake_k8s_client.py new file mode 100644 index 000000000..2f33aca1f --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/k8s/fake_k8s_client.py @@ -0,0 +1,287 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +# pylint: disable=logging-format-interpolation +import logging +import datetime +import time + +from kubernetes import client + +from testing.helpers import to_simple_namespace + +_RAISE_EXCEPTION_KEY = 'raise_exception' + + +class FakeResponse(object): + + def read_chunked(self, *args, **kwargs): + return [] + + def close(self): + pass + + def release_conn(self): + pass + + +class FakeCoreApi(object): + + def __init__(self, timeouts=10): + # timeout in second + self.timeouts = timeouts + + def list_namespaced_pod(self, namespace, **kwargs): + time.sleep(self.timeouts) + return FakeResponse() + + +class FakeCrdsApi(object): + + def __init__(self, timeouts=10): + # timeout in second + self.timeouts = timeouts + + def list_namespaced_custom_object(self, namespace, **kwargs): + time.sleep(self.timeouts) + return FakeResponse() + + def get_namespaced_custom_object(self, *args, **kwargs): + return FakeResponse() + + +class FakeK8sClient(object): + """A fake k8s client for development. + + With this client we can decouple the dependency of k8s cluster. + """ + + def __init__(self): + self.core = FakeCoreApi(60) + self.crds = FakeCrdsApi(60) + + def close(self): + pass + + def create_or_update_secret(self, data, metadata, secret_type, name, namespace='default'): + # User may pass two type of data: + # 1. dictionary + # 2. K8s Object + # They are both accepted by real K8s client, + # but K8s Object is not iterable. + if isinstance(data, dict) and _RAISE_EXCEPTION_KEY in data: + raise RuntimeError('[500] Fake exception for save_secret') + # Otherwise succeeds + logging.info('======================') + logging.info(f'Saved a secret with: data: {data}, ' 'metadata: {metadata}, type: {secret_type}') + + def delete_secret(self, name, namespace='default'): + logging.info('======================') + logging.info(f'Deleted a secret with: name: {name}') + + def get_secret(self, name, namespace='default'): + return client.V1Secret(api_version='v1', + data={'test': 'test'}, + kind='Secret', + metadata={ + 'name': name, + 'namespace': namespace + }, + type='Opaque') + + def create_or_update_service(self, metadata, spec, name, namespace='default'): + logging.info('======================') + logging.info(f'Saved a service with: spec: {spec}, metadata: {metadata}') + + def delete_service(self, name, namespace='default'): + logging.info('======================') + logging.info(f'Deleted a service with: name: {name}') + + def get_service(self, name, namespace='default'): + return client.V1Service(api_version='v1', + kind='Service', + metadata=client.V1ObjectMeta(name=name, namespace=namespace), + spec=client.V1ServiceSpec(selector={'app': 'nginx'})) + + def list_service(self, namespace='default'): + service_dict = {'items': [{'metadata': {'name': f'fl-{i * 3}'}} for i in 'ac']} + return to_simple_namespace(service_dict) + + def create_or_update_ingress(self, metadata, spec, name, namespace='default'): + logging.info('======================') + logging.info(f'Saved a ingress with: spec: {spec}, metadata: {metadata}') + + def delete_ingress(self, name, namespace='default'): + logging.info('======================') + logging.info(f'Deleted a ingress with: name: {name}') + + def get_ingress(self, name, namespace='default'): + return client.NetworkingV1beta1Ingress(api_version='networking.k8s.io/v1beta1', + kind='Ingress', + metadata=client.V1ObjectMeta(name=name, namespace=namespace), + spec=client.NetworkingV1beta1IngressSpec()) + + def list_ingress(self, namespace='default'): + ingress_dict = {'items': [{'metadata': {'name': f'fl-{i * 3}-client-auth'}} for i in 'abc']} + return to_simple_namespace(ingress_dict) + + def create_or_update_deployment(self, metadata, spec, name, namespace='default'): + logging.info('======================') + logging.info(f'Saved a deployment with: spec: {spec}, metadata: {metadata}') + + def delete_deployment(self, name, namespace='default'): + logging.info('======================') + logging.info(f'Deleted a deployment with: name: {name}') + + def get_deployment(self, name, namespace='default'): + return client.V1Deployment( + api_version='apps/v1', + kind='Deployment', + metadata=client.V1ObjectMeta(name=name, namespace=namespace), + spec=client.V1DeploymentSpec( + selector={'matchLabels': { + 'app': 'fedlearner-operator' + }}, + template=client.V1PodTemplateSpec(spec=client.V1PodSpec( + containers=[client.V1Container(name='fedlearner-operator', args=['test'])])))) + + def delete_app(self, app_name, group, version: str, plural: str, namespace: str = 'default'): + pass + + def create_app(self, app_name, group, version: str, plural: str, namespace: str = 'default') -> dict: + return {} + + def get_app_cache(self, app_name): + pods = { + 'pods': { + 'metadata': { + 'selfLink': '/api/v1/namespaces/default/pods', + 'resourceVersion': '780480990' + } + }, + 'items': [{ + 'metadata': { + 'name': f'{app_name}-0' + } + }, { + 'metadata': { + 'name': f'{app_name}-1' + } + }] + } + flapp = { + 'kind': 'FLAPP', + 'metadata': { + 'name': app_name, + 'namesapce': 'default' + }, + 'status': { + 'appState': 'FLStateRunning', + 'flReplicaStatus': { + 'Master': { + 'active': { + 'laomiao-raw-data-1223-v1-follower' + '-master-0-717b53c4-' + 'fef7-4d65-a309-63cf62494286': {} + } + }, + 'Worker': { + 'active': { + 'laomiao-raw-data-1223-v1-follower' + '-worker-0-61e49961-' + 'e6dd-4015-a246-b6d25e69a61c': {}, + 'laomiao-raw-data-1223-v1-follower' + '-worker-1-accef16a-' + '317f-440f-8f3f-7dd5b3552d25': {} + } + } + } + } + } + return {'flapp': flapp, 'pods': pods} + + def get_sparkapplication(self, name: str, namespace: str = 'default') -> dict: + logging.info('======================') + logging.info(f'get spark application, name: {name}, namespace: {namespace}') + return { + 'apiVersion': 'sparkoperator.k8s.io/v1beta2', + 'kind': 'SparkApplication', + 'metadata': { + 'creationTimestamp': '2021-04-15T10:43:15Z', + 'generation': 1, + 'name': name, + 'namespace': namespace, + }, + 'status': { + 'applicationState': { + 'state': 'COMPLETED' + }, + } + } + + def create_sparkapplication(self, json_object: dict, namespace: str = 'default') -> dict: + logging.info('======================') + logging.info(f'create spark application, namespace: {namespace}, ' f'json: {json_object}') + return { + 'apiVersion': 'sparkoperator.k8s.io/v1beta2', + 'kind': 'SparkApplication', + 'metadata': { + 'creationTimestamp': '2021-04-15T10:43:15Z', + 'generation': 1, + 'name': 'fl-transformer-yaml', + 'namespace': 'fedlearner', + 'resourceVersion': '348817823', + }, + 'spec': { + 'arguments': ['hdfs://user/feature/data.csv', 'hdfs://user/feature/data_tfrecords/'], + } + } + + def delete_sparkapplication(self, name: str, namespace: str = 'default') -> dict: + logging.info('======================') + logging.info(f'delete spark application, name: {name}, namespace: {namespace}') + return { + 'kind': 'Status', + 'apiVersion': 'v1', + 'metadata': {}, + 'status': 'Success', + 'details': { + 'name': name, + 'group': 'sparkoperator.k8s.io', + 'kind': 'sparkapplications', + 'uid': '790603b6-9dd6-11eb-9282-b8599fb51ea8' + } + } + + def get_pod_log(self, name: str, namespace: str, tail_lines: int): + return str(datetime.datetime.now()) + + def get_pods(self, namespace, label_selector): + fake_pod = client.V1Pod(metadata=client.V1ObjectMeta(name='fake_pod', + labels={}, + creation_timestamp=datetime.datetime.utcnow()), + status=client.V1PodStatus(phase='Running'), + spec=client.V1PodSpec(containers=[ + client.V1Container(name='fake_container', + resources=client.V1ResourceRequirements(limits={ + 'cpu': '2000m', + 'memory': '4Gi' + }, + requests={ + 'cpu': '2000m', + 'memory': '4Gi' + })) + ])) + return client.V1PodList(items=[fake_pod]) diff --git a/web_console_v2/api/fedlearner_webconsole/k8s/k8s_cache.py b/web_console_v2/api/fedlearner_webconsole/k8s/k8s_cache.py new file mode 100644 index 000000000..f74d248bc --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/k8s/k8s_cache.py @@ -0,0 +1,122 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +from enum import Enum + +from fedlearner_webconsole.k8s.models import get_app_name_from_metadata + + +class EventType(Enum): + ADDED = 'ADDED' + MODIFIED = 'MODIFIED' + DELETED = 'DELETED' + + +class ObjectType(Enum): + POD = 'POD' + FLAPP = 'FLAPP' + SPARKAPP = 'SPARKAPP' + FEDAPP = 'FEDAPP' + + +class Event(object): + + def __init__(self, app_name: str, event_type: EventType, obj_type: ObjectType, obj_dict: dict): + self.app_name = app_name + self.event_type = event_type + self.obj_type = obj_type + # {'status': {}, 'metadata': {}} + self.obj_dict = obj_dict + + @staticmethod + def from_json(event, obj_type): + # TODO(xiangyuxuan): move this to k8s/models.py + event_type = event['type'] + obj = event['object'] + if obj_type == ObjectType.POD: + app_name = get_app_name_from_metadata(obj.metadata) + obj = obj.to_dict() + status = obj.get('status') + return Event(app_name, + EventType(event_type), + obj_type, + obj_dict={ + 'status': status, + 'metadata': obj.get('metadata', {}) + }) + + metadata = obj.get('metadata', {}) + # put event to queue + return Event(metadata.get('name', None), EventType(event_type), obj_type, obj_dict=obj) + + +class K8sCache(object): + + def __init__(self): + # key: app_name, value: a dict + # {'flapp': flapp cache, 'pods': pods cache, + # 'deleted': is flapp deleted} + self._cache = {} + self._pod_cache = {} + + def inspect(self) -> dict: + c = {} + c['pod_cache'] = self._pod_cache + c['app_cache'] = self._cache + return c + + def update_cache(self, event: Event): + if event.obj_type == ObjectType.POD: + self._updata_pod_cache(event) + else: + self._update_app_cache(event) + + def get_cache(self, app_name: str) -> dict: + return self._get_app_cache(app_name) + + def _update_app_cache(self, event: Event): + app_name = event.app_name + + self._cache[app_name] = {'app': event.obj_dict} + if app_name not in self._pod_cache: + self._pod_cache[app_name] = {'items': [], 'deleted': False} + self._pod_cache[app_name]['deleted'] = False + if event.event_type == EventType.DELETED: + self._cache[app_name] = {'app': None} + self._pod_cache[app_name] = {'items': [], 'deleted': True} + + def _get_app_cache(self, app_name) -> dict: + if app_name not in self._cache: + return {'app': None, 'pods': {'items': []}} + app = {**self._cache[app_name], 'pods': self._pod_cache[app_name]} + return app + + def _updata_pod_cache(self, event: Event): + app_name = event.app_name + if app_name not in self._pod_cache: + self._pod_cache[app_name] = {'items': [], 'deleted': False} + if self._pod_cache[app_name]['deleted']: + return + existed = False + for index, pod in enumerate(self._pod_cache[app_name]['items']): + if pod['metadata']['name'] == event.obj_dict['metadata']['name']: + existed = True + self._pod_cache[app_name]['items'][index] = event.obj_dict + break + if not existed: + self._pod_cache[app_name]['items'].append(event.obj_dict) + + +k8s_cache = K8sCache() diff --git a/web_console_v2/api/fedlearner_webconsole/k8s/k8s_cache_test.py b/web_console_v2/api/fedlearner_webconsole/k8s/k8s_cache_test.py new file mode 100644 index 000000000..f4cb1f630 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/k8s/k8s_cache_test.py @@ -0,0 +1,33 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from fedlearner_webconsole.k8s.k8s_cache import Event, ObjectType, EventType + + +class EventTest(unittest.TestCase): + + def test_from_json(self): + app_object = {'metadata': {'name': 'test'}, 'status': None, 'spec': {'test': 1}} + test_event_dict = {'type': 'ADDED', 'object': app_object} + event = Event.from_json(test_event_dict, ObjectType.FLAPP) + self.assertEqual(event.app_name, 'test') + self.assertEqual(event.obj_type, ObjectType.FLAPP) + self.assertEqual(event.event_type, EventType.ADDED) + self.assertEqual(event.obj_dict, app_object) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/k8s/k8s_client.py b/web_console_v2/api/fedlearner_webconsole/k8s/k8s_client.py new file mode 100644 index 000000000..60614beb4 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/k8s/k8s_client.py @@ -0,0 +1,445 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +# pylint: disable=inconsistent-return-statements +import enum +import logging +from http import HTTPStatus +from typing import Callable, Optional + +import kubernetes +from kubernetes import client +from kubernetes.client import V1ServiceList, NetworkingV1beta1IngressList +from kubernetes.client.exceptions import ApiException + +from envs import Envs +from fedlearner_webconsole.utils.decorators.retry import retry_fn +from fedlearner_webconsole.exceptions import (NotFoundException, InternalException) +from fedlearner_webconsole.k8s.fake_k8s_client import FakeK8sClient +from fedlearner_webconsole.utils.es import es +from fedlearner_webconsole.utils.hooks import parse_and_get_fn + +# This is the default k8s client hook. +# Args: +# app_yaml [dict] the app yaml definition of k8s resource. +# Returns: +# [dict] the modified app yaml of k8s resource. +# Note: +# If you want to custom k8s client hook, +# 1. write the hook function according this interface +# 2. assign module_fn to `K8S_HOOK_MODULE_PATH` variables. +DEFAULT_K8S_CLIENT_HOOK: Callable[[dict], dict] = lambda o: o + + +# TODO(xiangyuxuan.prs): they are just used in dataset, should be deprecated. +class CrdKind(enum.Enum): + FLAPP = 'flapps' + SPARK_APPLICATION = 'sparkapplications' + + +FEDLEARNER_CUSTOM_GROUP = 'fedlearner.k8s.io' +FEDLEARNER_CUSTOM_VERSION = 'v1alpha1' + +SPARKOPERATOR_CUSTOM_GROUP = 'sparkoperator.k8s.io' +SPARKOPERATOR_CUSTOM_VERSION = 'v1beta2' +SPARKOPERATOR_NAMESPACE = Envs.K8S_NAMESPACE + +REQUEST_TIMEOUT_IN_SECOND = 10 + + +# TODO(wangsen.0914): remove create_deployment etc.; add UT for client +class K8sClient(object): + + def __init__(self): + self.core = None + self.crds = None + self._networking = None + self._app = None + self._hook_fn = DEFAULT_K8S_CLIENT_HOOK + + def init(self, config_path: Optional[str] = None, hook_module_path: Optional[str] = None): + # Sets config + if config_path is None: + kubernetes.config.load_incluster_config() + else: + kubernetes.config.load_kube_config(config_path) + + # Initialize hook + if hook_module_path: + self._hook_fn = parse_and_get_fn(hook_module_path) + + # Inits API clients + self.core = client.CoreV1Api() + self.crds = client.CustomObjectsApi() + self._networking = client.NetworkingV1beta1Api() + self._app = client.AppsV1Api() + + def close(self): + self.core.api_client.close() + self._networking.api_client.close() + + def _raise_runtime_error(self, exception: ApiException): + logging.error(f'[k8s_client]: runtime error {exception}') + raise RuntimeError(str(exception)) + + def create_or_update_secret(self, data, metadata, secret_type, name, namespace='default'): + """Create secret. If existed, then replace""" + request = client.V1Secret(api_version='v1', data=data, kind='Secret', metadata=metadata, type=secret_type) + try: + self.core.read_namespaced_secret(name, namespace) + # If the secret already exists, then we use patch to replace it. + # We don't use replace method because it requires `resourceVersion`. + self.core.patch_namespaced_secret(name, namespace, request) + return + except ApiException as e: + # 404 is expected if the secret does not exist + if e.status != HTTPStatus.NOT_FOUND: + self._raise_runtime_error(e) + try: + self.core.create_namespaced_secret(namespace, request) + except ApiException as e: + self._raise_runtime_error(e) + + def delete_secret(self, name, namespace='default'): + try: + self.core.delete_namespaced_secret(name, namespace) + except ApiException as e: + if e.status != HTTPStatus.NOT_FOUND: + self._raise_runtime_error(e) + + def get_secret(self, name, namespace='default'): + try: + return self.core.read_namespaced_secret(name, namespace) + except ApiException as e: + self._raise_runtime_error(e) + + def create_or_update_config_map(self, metadata, data, name, namespace='default'): + """Create configMap. If existed, then patch""" + request = client.V1ConfigMap(api_version='v1', kind='ConfigMap', metadata=metadata, data=data) + try: + self.core.read_namespaced_config_map(name, namespace) + # If the configMap already exists, then we use patch to replace it. + # We don't use replace method because it requires `resourceVersion`. + self.core.patch_namespaced_config_map(name, namespace, request) + return + except ApiException as e: + # 404 is expected if the configMap does not exist + if e.status != HTTPStatus.NOT_FOUND: + self._raise_runtime_error(e) + try: + self.core.create_namespaced_config_map(namespace, request) + except ApiException as e: + self._raise_runtime_error(e) + + def delete_config_map(self, name, namespace='default'): + try: + self.core.delete_namespaced_config_map(name, namespace) + except ApiException as e: + if e.status != HTTPStatus.NOT_FOUND: + self._raise_runtime_error(e) + + def get_config_map(self, name, namespace='default'): + try: + return self.core.read_namespaced_config_map(name, namespace) + except ApiException as e: + self._raise_runtime_error(e) + + def create_or_update_service(self, metadata, spec, name, namespace='default'): + """Create secret. If existed, then replace""" + request = client.V1Service(api_version='v1', kind='Service', metadata=metadata, spec=spec) + try: + self.core.read_namespaced_service(name, namespace) + # If the service already exists, then we use patch to replace it. + # We don't use replace method because it requires `resourceVersion`. + self.core.patch_namespaced_service(name, namespace, request) + return + except ApiException as e: + # 404 is expected if the service does not exist + if e.status != HTTPStatus.NOT_FOUND: + self._raise_runtime_error(e) + try: + self.core.create_namespaced_service(namespace, request) + except ApiException as e: + self._raise_runtime_error(e) + + def delete_service(self, name, namespace='default'): + try: + self.core.delete_namespaced_service(name, namespace) + except ApiException as e: + if e.status != HTTPStatus.NOT_FOUND: + self._raise_runtime_error(e) + + def get_service(self, name, namespace='default'): + try: + return self.core.read_namespaced_service(name, namespace) + except ApiException as e: + self._raise_runtime_error(e) + + def list_service(self, namespace: str = 'default') -> V1ServiceList: + try: + return self.core.list_namespaced_service(namespace) + except ApiException as e: + self._raise_runtime_error(e) + + def create_or_update_ingress(self, metadata, spec, name, namespace='default'): + request = client.NetworkingV1beta1Ingress(api_version='networking.k8s.io/v1beta1', + kind='Ingress', + metadata=metadata, + spec=spec) + try: + self._networking.read_namespaced_ingress(name, namespace) + # If the ingress already exists, then we use patch to replace it. + # We don't use replace method because it requires `resourceVersion`. + self._networking.patch_namespaced_ingress(name, namespace, request) + return + except ApiException as e: + # 404 is expected if the ingress does not exist + if e.status != HTTPStatus.NOT_FOUND: + self._raise_runtime_error(e) + try: + self._networking.create_namespaced_ingress(namespace, request) + except ApiException as e: + self._raise_runtime_error(e) + + def delete_ingress(self, name, namespace='default'): + try: + self._networking.delete_namespaced_ingress(name, namespace) + except ApiException as e: + self._raise_runtime_error(e) + + def get_ingress(self, name, namespace='default'): + try: + return self._networking.read_namespaced_ingress(name, namespace) + except ApiException as e: + if e.status != HTTPStatus.NOT_FOUND: + self._raise_runtime_error(e) + + def list_ingress(self, namespace: str = 'default') -> NetworkingV1beta1IngressList: + try: + return self._networking.list_namespaced_ingress(namespace) + except ApiException as e: + if e.status != HTTPStatus.NOT_FOUND: + self._raise_runtime_error(e) + + def create_or_update_deployment(self, metadata, spec, name, namespace='default'): + request = client.V1Deployment(api_version='apps/v1', kind='Deployment', metadata=metadata, spec=spec) + try: + self._app.read_namespaced_deployment(name, namespace) + # If the deployment already exists, then we use patch to replace it. + # We don't use replace method because it requires `resourceVersion`. + self._app.patch_namespaced_deployment(name, namespace, request) + return + except ApiException as e: + # 404 is expected if the deployment does not exist + if e.status != HTTPStatus.NOT_FOUND: + self._raise_runtime_error(e) + try: + self._app.create_namespaced_deployment(namespace, request) + except ApiException as e: + self._raise_runtime_error(e) + + def delete_deployment(self, name, namespace='default'): + try: + self._app.delete_namespaced_deployment(name, namespace) + except ApiException as e: + if e.status != HTTPStatus.NOT_FOUND: + self._raise_runtime_error(e) + + def get_deployment(self, name): + try: + return self._app.read_namespaced_deployment(name, Envs.K8S_NAMESPACE) + except ApiException as e: + self._raise_runtime_error(e) + + def get_sparkapplication(self, name: str, namespace: str = SPARKOPERATOR_NAMESPACE) -> dict: + """get sparkapp + + Args: + name (str): sparkapp name + namespace (str, optional): namespace to submit. + + Raises: + InternalException: if any error occurs during API call + NotFoundException: if the spark app is not found + + Returns: + dict: resp of k8s + """ + try: + return self.crds.get_namespaced_custom_object(group=SPARKOPERATOR_CUSTOM_GROUP, + version=SPARKOPERATOR_CUSTOM_VERSION, + namespace=namespace, + plural=CrdKind.SPARK_APPLICATION.value, + name=name) + except ApiException as err: + if err.status == 404: + raise NotFoundException() from err + raise InternalException(details=err.body) from err + + def create_sparkapplication(self, json_object: dict, namespace: str = SPARKOPERATOR_NAMESPACE) -> dict: + """ create sparkapp + + Args: + json_object (dict): json object of config + namespace (str, optional): namespace to submit. + + Returns: + dict: resp of k8s + """ + logging.debug(f'create sparkapp json is {json_object}') + return self.crds.create_namespaced_custom_object(group=SPARKOPERATOR_CUSTOM_GROUP, + version=SPARKOPERATOR_CUSTOM_VERSION, + namespace=namespace, + plural=CrdKind.SPARK_APPLICATION.value, + body=json_object) + + def delete_sparkapplication(self, name: str, namespace: str = SPARKOPERATOR_NAMESPACE) -> dict: + """ delete sparkapp + + Args: + name (str): sparkapp name + namespace (str, optional): namespace to delete. + + Raises: + NotFoundException: if the spark app is nout found + InternalException: if any error occurs during API call + + Returns: + dict: resp of k8s + """ + try: + return self.crds.delete_namespaced_custom_object(group=SPARKOPERATOR_CUSTOM_GROUP, + version=SPARKOPERATOR_CUSTOM_VERSION, + namespace=namespace, + plural=CrdKind.SPARK_APPLICATION.value, + name=name, + body=client.V1DeleteOptions()) + except ApiException as err: + if err.status == 404: + raise NotFoundException() from err + raise InternalException(details=err.body) from err + + def get_pod_log(self, name: str, namespace: str, tail_lines: int): + # this is not necessary for now + del namespace + return es.query_log(Envs.ES_INDEX, '', name)[:tail_lines][::-1] + + def get_pods(self, namespace, label_selector): + try: + return self.core.list_namespaced_pod(namespace=namespace, label_selector=label_selector) + except ApiException as e: + self._raise_runtime_error(e) + + def create_app(self, + app_yaml: dict, + group: str, + version: str, + plural: str, + namespace: str = Envs.K8S_NAMESPACE) -> dict: + try: + app_yaml = self._hook_fn(app_yaml) + return self.crds.create_namespaced_custom_object(group=group, + version=version, + namespace=namespace, + plural=plural, + body=app_yaml, + _request_timeout=REQUEST_TIMEOUT_IN_SECOND) + except ApiException as e: + # 404 is expected if the custom resource does not exist + if e.status != HTTPStatus.CONFLICT: + self._raise_runtime_error(e) + logging.warning(f'Crd object: {app_yaml} has been created!') + + @retry_fn(retry_times=3) + def delete_app(self, app_name, group, version: str, plural: str, namespace: str = Envs.K8S_NAMESPACE): + try: + self.crds.delete_namespaced_custom_object(group=group, + version=version, + namespace=namespace, + plural=plural, + name=app_name, + _request_timeout=REQUEST_TIMEOUT_IN_SECOND) + except ApiException as e: + # If the custom resource has been deleted then the exception gets ignored + if e.status != HTTPStatus.NOT_FOUND: + self._raise_runtime_error(e) + + def get_custom_object(self, + name: str, + group: str, + version: str, + plural: str, + namespace: str = Envs.K8S_NAMESPACE) -> dict: + try: + return self.crds.get_namespaced_custom_object(group=group, + version=version, + namespace=namespace, + plural=plural, + name=name, + _request_timeout=REQUEST_TIMEOUT_IN_SECOND) + except ApiException as e: + self._raise_runtime_error(e) + + def update_app(self, app_yaml: dict, group: str, version: str, plural: str, namespace: str = Envs.K8S_NAMESPACE): + try: + app_yaml = self._hook_fn(app_yaml) + name = app_yaml['metadata']['name'] + self.crds.patch_namespaced_custom_object(group=group, + version=version, + namespace=namespace, + plural=plural, + name=name, + body=app_yaml, + _request_timeout=REQUEST_TIMEOUT_IN_SECOND) + except ApiException as e: + if e.status == HTTPStatus.NOT_FOUND: + logging.error(f'[k8s_client] Resource: {app_yaml} doesn\'t exist!') + self._raise_runtime_error(e) + + def create_or_update_app(self, + app_yaml: dict, + group: str, + version: str, + plural: str, + namespace: str = Envs.K8S_NAMESPACE): + name = app_yaml['metadata']['name'] + try: + # Why not use `get_custom_object`? + # Because `get_custom_object` wraps the exception, it's difficult to parse 404 info. + self.crds.get_namespaced_custom_object(group=group, + version=version, + namespace=namespace, + plural=plural, + name=name, + _request_timeout=REQUEST_TIMEOUT_IN_SECOND) + # If the resource already exists, then we use patch to replace it. + # We don't use replace method because it requires `resourceVersion`. + self.update_app(app_yaml=app_yaml, group=group, version=version, plural=plural, namespace=namespace) + return + except ApiException as e: + # 404 is expected if the deployment does not exist + if e.status != HTTPStatus.NOT_FOUND: + self._raise_runtime_error(e) + try: + self.create_app(app_yaml=app_yaml, group=group, version=version, plural=plural, namespace=namespace) + except ApiException as e: + self._raise_runtime_error(e) + + +k8s_client = FakeK8sClient() +if Envs.FLASK_ENV == 'production' or \ + Envs.K8S_CONFIG_PATH is not None: + k8s_client = K8sClient() + k8s_client.init(config_path=Envs.K8S_CONFIG_PATH, hook_module_path=Envs.K8S_HOOK_MODULE_PATH) diff --git a/web_console_v2/api/fedlearner_webconsole/k8s/k8s_client_test.py b/web_console_v2/api/fedlearner_webconsole/k8s/k8s_client_test.py new file mode 100644 index 000000000..3d38d8914 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/k8s/k8s_client_test.py @@ -0,0 +1,142 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from unittest.mock import MagicMock, patch + +from kubernetes.client import ApiException + +from fedlearner_webconsole.k8s.k8s_client import K8sClient, REQUEST_TIMEOUT_IN_SECOND + + +class K8sClientTest(unittest.TestCase): + + def setUp(self): + super().setUp() + self._load_incluster_config_patcher = patch( + 'fedlearner_webconsole.k8s.k8s_client.kubernetes.config.load_incluster_config', lambda: None) + self._load_incluster_config_patcher.start() + + self._k8s_client = K8sClient() + self._k8s_client.init() + + def tearDown(self): + self._load_incluster_config_patcher.stop() + super().tearDown() + + def test_delete_flapp(self): + mock_crds = MagicMock() + self._k8s_client.crds = mock_crds + # Test delete successfully + mock_crds.delete_namespaced_custom_object = MagicMock() + self._k8s_client.delete_app('test_flapp', 'fedlearner.k8s.io', 'v1alpha1', 'flapps') + mock_crds.delete_namespaced_custom_object.assert_called_once_with(group='fedlearner.k8s.io', + name='test_flapp', + namespace='default', + plural='flapps', + version='v1alpha1', + _request_timeout=REQUEST_TIMEOUT_IN_SECOND) + # Tests that the flapp has been deleted + mock_crds.delete_namespaced_custom_object = MagicMock(side_effect=ApiException(status=404)) + self._k8s_client.delete_app('test_flapp2', 'fedlearner.k8s.io', 'v1alpha1', 'flapps') + self.assertEqual(mock_crds.delete_namespaced_custom_object.call_count, 1) + # Tests with other exceptions + mock_crds.delete_namespaced_custom_object = MagicMock(side_effect=ApiException(status=500)) + with self.assertRaises(RuntimeError): + self._k8s_client.delete_app('test_flapp3', 'fedlearner.k8s.io', 'v1alpha1', 'flapps') + self.assertEqual(mock_crds.delete_namespaced_custom_object.call_count, 3) + + def test_create_flapp(self): + test_yaml = {'metadata': {'name': 'test app'}, 'kind': 'flapp', 'apiVersion': 'fedlearner.k8s.io/v1alpha1'} + mock_crds = MagicMock() + self._k8s_client.crds = mock_crds + # Test create successfully + mock_crds.create_namespaced_custom_object = MagicMock() + self._k8s_client.create_app(test_yaml, plural='flapps', version='v1alpha1', group='fedlearner.k8s.io') + mock_crds.create_namespaced_custom_object.assert_called_once_with(group='fedlearner.k8s.io', + namespace='default', + plural='flapps', + version='v1alpha1', + _request_timeout=REQUEST_TIMEOUT_IN_SECOND, + body=test_yaml) + self._k8s_client.create_app(test_yaml, plural='flapps', version='v1alpha1', group='fedlearner.k8s.io') + self.assertEqual(mock_crds.create_namespaced_custom_object.call_count, 2) + + @patch('fedlearner_webconsole.k8s.k8s_client.parse_and_get_fn') + @patch('fedlearner_webconsole.k8s.k8s_client.client.CustomObjectsApi.create_namespaced_custom_object') + def test_create_app_with_hook(self, mock_create_namespaced_custom_object: MagicMock, + mock_parse_and_get_fn: MagicMock): + + def custom_magic_fn(app_yaml: dict) -> dict: + app_yaml['metadata']['name'] = app_yaml['metadata']['name'] + '_hello' + return app_yaml + + mock_parse_and_get_fn.return_value = custom_magic_fn + self._k8s_client.init(hook_module_path='test.hook:custom_magic_fn') + deployment_app_yaml = { + 'apiVersion': 'apps/v1', + 'kind': 'Deployment', + 'metadata': { + 'name': 'world', + }, + 'spec': { + 'selector': { + 'matchLabels': { + 'app': 'test-app' + } + }, + 'replicas': 1, + 'template': { + 'metadata': { + 'labels': { + 'app': 'test-app' + } + }, + 'spec': { + 'volumes': [{ + 'name': 'test-app-config', + 'configMap': { + 'name': 'test-app-config' + } + }], + 'containers': [{ + 'name': 'test-app', + 'image': 'serving:lastest', + 'args': [ + '--port=8500', '--rest_api_port=8501', '--model_config_file=/app/config/config.pb' + ], + 'ports': [{ + 'containerPort': 8500 + }, { + 'containerPort': 8501 + }], + 'volumeMounts': [{ + 'name': 'test-app-config', + 'mountPath': '/app/config/' + }] + }] + } + } + } + } + self._k8s_client.create_app(app_yaml=deployment_app_yaml, group='apps', version='v1', plural='Deployment') + + mock_parse_and_get_fn.assert_called_once_with('test.hook:custom_magic_fn') + mock_create_namespaced_custom_object.assert_called_once() + self.assertEqual(mock_create_namespaced_custom_object.call_args[1]['body']['metadata']['name'], 'world_hello') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/k8s/k8s_watcher.py b/web_console_v2/api/fedlearner_webconsole/k8s/k8s_watcher.py new file mode 100644 index 000000000..84a2ad336 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/k8s/k8s_watcher.py @@ -0,0 +1,265 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +from abc import ABC, abstractmethod +import logging +import multiprocessing +import threading +import queue +import traceback +from http import HTTPStatus +from typing import Generator, NamedTuple, Optional, Tuple +from kubernetes import client, watch +from kubernetes.client import V1ObjectMeta +from envs import Envs +from fedlearner_webconsole.job.event_listener import JobEventListener +from fedlearner_webconsole.k8s.k8s_cache import k8s_cache, Event, ObjectType +from fedlearner_webconsole.utils.metrics import emit_store, emit_counter +from fedlearner_webconsole.k8s.k8s_client import k8s_client +from fedlearner_webconsole.k8s.models import CrdKind, get_app_name_from_metadata +from fedlearner_webconsole.mmgr.event_listener import ModelEventListener + + +class CrdWatcherConfig(NamedTuple): + version: str + group: str + plural: str + object_type: ObjectType + + +WATCHER_CONFIG = { + CrdKind.FLAPP: + CrdWatcherConfig( + version='v1alpha1', + group='fedlearner.k8s.io', + plural='flapps', + object_type=ObjectType.FLAPP, + ), + CrdKind.SPARKAPPLICATION: + CrdWatcherConfig( + version='v1beta2', + group='sparkoperator.k8s.io', + plural='sparkapplications', + object_type=ObjectType.SPARKAPP, + ), + CrdKind.FEDAPP: + CrdWatcherConfig( + version='v1alpha1', + group='fedlearner.k8s.io', + plural='fedapps', + object_type=ObjectType.FEDAPP, + ), +} + +_REQUEST_TIMEOUT_IN_SECOND = 900 + + +class AbstractWatcher(ABC): + + @property + @abstractmethod + def kind(self) -> str: + raise NotImplementedError() + + @abstractmethod + def _watch(self, watcher: watch.Watch, resource_version: str) -> Generator[Tuple[Event, Optional[str]], None, None]: + """An abstract method which subclasses should implement it to watch and procedue events.""" + raise NotImplementedError() + + def _watch_forever(self, event_queue: 'queue.Queue[Event]'): + """Watches forever, which handles a lot of exceptions and make the watcher always work (hopefully).""" + # resource_version '0' means getting a recent resource without + # consistency guarantee, this is to reduce the load of etcd. + # Ref: https://kubernetes.io/docs/reference/using-api/api-concepts/ #the-resourceversion-parameter + resource_version = '0' + watcher = watch.Watch() + while True: + try: + logging.info(f'[K8s watcher] [{self.kind}] start watching, resource version: {resource_version}') + # Each round we re-watch to k8s + watch_stream = self._watch(watcher, resource_version) + emit_counter('k8s.watcher.watch', 1, tags={'kind': self.kind}) + for (event, new_version) in watch_stream: + if new_version: + # Updates newest resource version, note that resource version is string, + # using max is a little hacky. + resource_version = max(resource_version, new_version) + logging.debug(f'[K8s watcher] [{self.kind}] new resource version: {new_version}') + event_queue.put(event) + except client.exceptions.ApiException as e: + logging.exception(f'[K8s watcher] [{self.kind}] API error') + if e.status == HTTPStatus.GONE: + # It has been too old, resources should be relisted + resource_version = '0' + # TODO(xiangyuxuan.prs): remove in the future. + elif e.status == HTTPStatus.NOT_FOUND: + logging.exception(f'[K8s watcher] [{self.kind}] unsupported') + break + except Exception as e: # pylint: disable=broad-except + logging.exception(f'[K8s watcher] [{self.kind}] unexpected error') + watcher.stop() + + def run(self, event_queue: queue.Queue, retry_timeout_in_second: int): + """Starts the watcher. + + Historically the watcher (process) may hang and never send the requests to k8s API server, + so this is a workaround to retry after timeout. + + Args: + event_queue: A queue to passthrough the events from watcher. + retry_timeout_in_second: If no event received within this threshold, the watcher gets restarted. + """ + mp_context = multiprocessing.get_context('spawn') + internal_queue = mp_context.Queue() + process_name = f'k8s-watcher-{self.kind}' + process = mp_context.Process(name=process_name, daemon=True, target=self._watch_forever, args=(internal_queue,)) + process.start() + logging.info(f'[K8s watcher] [{self.kind}] process started') + while True: + try: + # Waits for a new event with timeout, if it gets stuck, then we restart the watcher. + event = internal_queue.get(timeout=retry_timeout_in_second) + # Puts to outside + event_queue.put(event) + except queue.Empty: + logging.info(f'[K8s watcher] [{self.kind}] no event in queue, restarting...') + process.terminate() + process.join() + # TODO(wangsen.0914): add process.close() here after upgrade to python 3.8 + internal_queue.close() + internal_queue = mp_context.Queue() + process = mp_context.Process(name=process_name, + daemon=True, + target=self._watch_forever, + args=(internal_queue,)) + process.start() + logging.info(f'[K8s watcher] [{self.kind}] process restarted') + + +class PodWatcher(AbstractWatcher): + + @property + def kind(self) -> str: + return ObjectType.POD.name + + def _watch(self, watcher: watch.Watch, resource_version: str) -> Generator[Tuple[Event, Optional[str]], None, None]: + stream = watcher.stream( + k8s_client.core.list_namespaced_pod, + namespace=Envs.K8S_NAMESPACE, + resource_version=resource_version, + # Sometimes watch gets stuck + _request_timeout=_REQUEST_TIMEOUT_IN_SECOND, + ) + for event in stream: + metadata: V1ObjectMeta = event['object'].metadata + if get_app_name_from_metadata(metadata): + yield Event.from_json(event, ObjectType.POD), metadata.resource_version + + +class CrdWatcher(AbstractWatcher): + + def __init__(self, config: CrdWatcherConfig): + super().__init__() + self.config = config + + @property + def kind(self) -> str: + return self.config.object_type.name + + def _watch(self, watcher: watch.Watch, resource_version: str) -> Generator[Tuple[Event, Optional[str]], None, None]: + stream = watcher.stream( + k8s_client.crds.list_namespaced_custom_object, + group=self.config.group, + version=self.config.version, + namespace=Envs.K8S_NAMESPACE, + plural=self.config.plural, + resource_version=resource_version, + # Sometimes watch gets stuck + _request_timeout=_REQUEST_TIMEOUT_IN_SECOND, + ) + for event in stream: + new_resource_version = event['object'].get('metadata', {}).get('resourceVersion', None) + yield Event.from_json(event, self.config.object_type), new_resource_version + + +class K8sWatcher(object): + + def __init__(self): + self._lock = threading.Lock() + self._running = False + self._event_consumer_thread = None + self._event_listeners = [JobEventListener(), ModelEventListener()] + + # https://stackoverflow.com/questions/62223424/simplequeue-vs-queue-in-python-what-is-the-advantage-of-using-simplequeue + # if use simplequeue, put opt never block. + # TODO(xiangyuxuan): change to simplequeue + self._queue = queue.Queue() + self._cache = {} + self._cache_lock = threading.Lock() + + def start(self): + with self._lock: + if self._running: + logging.warning('K8s watcher has already started') + return + self._running = True + + watchers = [PodWatcher()] + for _, crd_config in WATCHER_CONFIG.items(): + watchers.append(CrdWatcher(config=crd_config)) + watcher_threads = [ + threading.Thread( + name=f'k8s-watcher-{watcher.kind}', + target=watcher.run, + args=( + self._queue, + # Keep consistent with k8s watcher event timeout + _REQUEST_TIMEOUT_IN_SECOND, + ), + daemon=True, + ) for watcher in watchers + ] + + self._event_consumer_thread = threading.Thread(target=self._event_consumer, + name='cache_consumer', + daemon=True) + for wthread in watcher_threads: + wthread.start() + self._event_consumer_thread.start() + logging.info('K8s watcher started') + + def _event_consumer(self): + # TODO(xiangyuxuan): do more business level operations + while True: + try: + event = self._queue.get() + k8s_cache.update_cache(event) + self._listen_crd_event(event) + except Exception as e: # pylint: disable=broad-except + logging.error(f'K8s event_consumer : {str(e)}. ' f'traceback:{traceback.format_exc()}') + + def _listen_crd_event(self, event: Event): + if event.obj_type == ObjectType.POD: + return + for listener in self._event_listeners: + try: + listener.update(event) + # pylint: disable=broad-except + except Exception as e: + emit_store('event_listener_update_error', 1) + logging.warning(f'[K8sWatcher] listener update with error {str(e)}') + + +k8s_watcher = K8sWatcher() diff --git a/web_console_v2/api/fedlearner_webconsole/k8s/k8s_watcher_test.py b/web_console_v2/api/fedlearner_webconsole/k8s/k8s_watcher_test.py new file mode 100644 index 000000000..6cd6b480d --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/k8s/k8s_watcher_test.py @@ -0,0 +1,291 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from http import HTTPStatus +import multiprocessing +import threading +from typing import Callable, Generator, List, NamedTuple, Optional, Tuple +import unittest +from unittest.mock import MagicMock, Mock, patch +from kubernetes import client, watch +from kubernetes.client import V1Pod, V1ObjectMeta, V1OwnerReference + +from fedlearner_webconsole.k8s.k8s_watcher import AbstractWatcher, CrdWatcher, CrdWatcherConfig, PodWatcher +from fedlearner_webconsole.k8s.k8s_cache import Event, EventType, ObjectType + + +class Response(NamedTuple): + # gone/unknown/not_found/normal + type: str + events: Optional[List[Event]] + + +# Why using fake implementation instead of mock? +# because mock does not work in multiprocessing +class FakeWatcher(AbstractWatcher): + + def __init__(self, resp_sequence: Optional[List[Response]]): + super().__init__() + self.calls = [] + self._resp_sequence = resp_sequence or [] + self._round = 0 + + @property + def kind(self) -> str: + return 'fake' + + def _watch(self, watcher: watch.Watch, resource_version: str) -> Generator[Tuple[Event, Optional[str]], None, None]: + assert isinstance(watcher, watch.Watch) + self.calls.append(resource_version) + if self._round < len(self._resp_sequence): + resp = self._resp_sequence[self._round] + self._round += 1 + if resp.type == 'gone': + raise client.exceptions.ApiException(status=HTTPStatus.GONE) + if resp.type == 'not_found': + raise client.exceptions.ApiException(status=HTTPStatus.NOT_FOUND) + if resp.type == 'unknown': + raise RuntimeError('fake unknown') + for i, event in enumerate(resp.events or []): + yield event, str(i) + return + # Dead loop + while True: + pass + + +def _fake_event_from_json(event, object_type): + return event, object_type + + +class AbstractWatcherTest(unittest.TestCase): + + def _start_thread(self, func: Callable, args) -> threading.Thread: + t = threading.Thread(daemon=True, target=func, args=args) + t.start() + return t + + def test_watch_normally(self): + events = [ + Event(app_name='t1', event_type=EventType.ADDED, obj_type=ObjectType.POD, obj_dict={}), + Event(app_name='t2', event_type=EventType.MODIFIED, obj_type=ObjectType.POD, obj_dict={}), + Event(app_name='t3', event_type=EventType.MODIFIED, obj_type=ObjectType.POD, obj_dict={}), + ] + + watcher = FakeWatcher(resp_sequence=[ + Response(type='normal', events=events), + ]) + q = multiprocessing.Queue() + self._start_thread(watcher.run, args=( + q, + 1000, + )) + actual_events = [q.get(), q.get(), q.get()] + q.close() + + app_names = [e.app_name for e in actual_events] + self.assertEqual(app_names, ['t1', 't2', 't3']) + + def test_watch_k8s_api_gone(self): + events = [ + Event(app_name='t1', event_type=EventType.ADDED, obj_type=ObjectType.POD, obj_dict={}), + Event(app_name='t2', event_type=EventType.MODIFIED, obj_type=ObjectType.POD, obj_dict={}), + ] + + watcher = FakeWatcher(resp_sequence=[ + Response(type='gone', events=None), + Response(type='normal', events=events), + ]) + q = multiprocessing.Queue() + self._start_thread(watcher.run, args=( + q, + 1000, + )) + actual_events = [q.get(), q.get()] + q.close() + + app_names = [e.app_name for e in actual_events] + self.assertEqual(app_names, ['t1', 't2']) + + def test_watch_unknown_k8s_error(self): + events1 = [ + Event(app_name='t1', event_type=EventType.ADDED, obj_type=ObjectType.POD, obj_dict={}), + Event(app_name='t2', event_type=EventType.MODIFIED, obj_type=ObjectType.POD, obj_dict={}), + ] + events2 = [ + Event(app_name='t3', event_type=EventType.ADDED, obj_type=ObjectType.POD, obj_dict={}), + ] + + watcher = FakeWatcher(resp_sequence=[ + Response(type='normal', events=events1), + Response(type='unknown', events=None), + Response(type='normal', events=events2), + ]) + q = multiprocessing.Queue() + self._start_thread(watcher.run, args=( + q, + 1000, + )) + actual_events = [q.get(), q.get(), q.get()] + q.close() + + app_names = [e.app_name for e in actual_events] + self.assertEqual(app_names, ['t1', 't2', 't3']) + + def test_watch_client_hangs(self): + events = [ + Event(app_name='t1', event_type=EventType.ADDED, obj_type=ObjectType.POD, obj_dict={}), + Event(app_name='t2', event_type=EventType.MODIFIED, obj_type=ObjectType.POD, obj_dict={}), + ] + + watcher = FakeWatcher(resp_sequence=[ + Response(type='normal', events=events), + ]) + q = multiprocessing.Queue() + self._start_thread(watcher.run, args=( + q, + 15, + )) + # If no event in 10s the client will re-watch, so there should be at least 4 threads + actual_events = [q.get(), q.get(), q.get(), q.get()] + q.close() + + app_names = [e.app_name for e in actual_events] + self.assertEqual(app_names, ['t1', 't2', 't1', 't2']) + + +class PodWatcherTest(unittest.TestCase): + + def test_kind(self): + watcher = PodWatcher() + self.assertEqual(watcher.kind, 'POD') + + @patch('fedlearner_webconsole.k8s.k8s_watcher.Envs.K8S_NAMESPACE', 'fedlearner') + @patch('fedlearner_webconsole.k8s.k8s_watcher.Event.from_json', _fake_event_from_json) + @patch('fedlearner_webconsole.k8s.k8s_watcher.k8s_client') + def test_watch(self, mock_k8s_client: Mock): + mock_k8s_client.return_value = MagicMock(core=MagicMock(list_namespaced_pod=MagicMock())) + events = [ + { + 'object': + V1Pod(metadata=V1ObjectMeta( + resource_version='123', + owner_references=[ + V1OwnerReference( + api_version='v1', + controller=True, + kind='Pod', + name='test-driver', + uid='812c1a48-5585-400f-9174-471d311fbec3', + ) + ], + labels={ + 'sparkoperator.k8s.io/app-name': 'spark-app-name', + }, + )), + }, + { + 'object': + V1Pod(metadata=V1ObjectMeta( + resource_version='234', + owner_references=[ + V1OwnerReference( + api_version='v1', + controller=True, + kind='Pod', + name='test-driver', + uid='812c1a48-5585-400f-9174-471d311fbec3', + ) + ], + labels={ + 'sparkoperator.k8s.io/app-name': 'spark-app-name', + }, + )), + }, + ] + mock_stream = MagicMock(return_value=events) + mock_watcher_client = MagicMock(stream=mock_stream) + + watcher = PodWatcher() + self.assertEqual( + list(watcher._watch(mock_watcher_client, '0')), # pylint: disable=protected-access + [ + ((events[0], ObjectType.POD), '123'), + ((events[1], ObjectType.POD), '234'), + ]) + mock_stream.assert_called_once_with( + mock_k8s_client.core.list_namespaced_pod, + namespace='fedlearner', + resource_version='0', + _request_timeout=900, + ) + + +class CrdWatcherTest(unittest.TestCase): + WATCHER_CONFIG = CrdWatcherConfig( + version='v1alpha1', + group='fedlearner.k8s.io', + plural='fedapps', + object_type=ObjectType.FEDAPP, + ) + + def test_kind(self): + watcher = CrdWatcher(self.WATCHER_CONFIG) + self.assertEqual(watcher.kind, 'FEDAPP') + + @patch('fedlearner_webconsole.k8s.k8s_watcher.Envs.K8S_NAMESPACE', 'fedlearner') + @patch('fedlearner_webconsole.k8s.k8s_watcher.Event.from_json', _fake_event_from_json) + @patch('fedlearner_webconsole.k8s.k8s_watcher.k8s_client') + def test_watch(self, mock_k8s_client: Mock): + mock_k8s_client.return_value = MagicMock(crds=MagicMock(list_namespaced_custom_object=MagicMock())) + events = [ + { + 'object': { + 'metadata': { + 'resourceVersion': '1111', + }, + }, + }, + { + 'object': { + 'metadata': { + 'resourceVersion': '2222', + }, + }, + }, + ] + mock_stream = MagicMock(return_value=events) + mock_watcher_client = MagicMock(stream=mock_stream) + + watcher = CrdWatcher(self.WATCHER_CONFIG) + self.assertEqual( + list(watcher._watch(mock_watcher_client, '1000')), # pylint: disable=protected-access + [ + ((events[0], ObjectType.FEDAPP), '1111'), + ((events[1], ObjectType.FEDAPP), '2222'), + ]) + mock_stream.assert_called_once_with( + mock_k8s_client.crds.list_namespaced_custom_object, + group=self.WATCHER_CONFIG.group, + version=self.WATCHER_CONFIG.version, + namespace='fedlearner', + plural=self.WATCHER_CONFIG.plural, + resource_version='1000', + _request_timeout=900, + ) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/k8s/models.py b/web_console_v2/api/fedlearner_webconsole/k8s/models.py index 458f39f81..23969acff 100644 --- a/web_console_v2/api/fedlearner_webconsole/k8s/models.py +++ b/web_console_v2/api/fedlearner_webconsole/k8s/models.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. @@ -17,28 +17,17 @@ from abc import ABCMeta, abstractmethod from datetime import datetime, timezone from enum import Enum, unique -from typing import Optional, List +from typing import Optional, List, Dict, NamedTuple +from google.protobuf.json_format import ParseDict +from kubernetes.client import V1ObjectMeta +from fedlearner_webconsole.proto.job_pb2 import PodPb +from fedlearner_webconsole.proto.k8s_pb2 import Condition +from fedlearner_webconsole.utils.pp_datetime import to_timestamp -# Please keep the value consistent with operator's definition -@unique -class PodType(Enum): - UNKNOWN = 'UNKNOWN' - # Parameter server - PS = 'PS' - # Master worker - MASTER = 'MASTER' - WORKER = 'WORKER' - - @staticmethod - def from_value(value: str) -> 'PodType': - try: - if isinstance(value, str): - value = value.upper() - return PodType(value) - except ValueError: - logging.error(f'Unexpected value of PodType: {value}') - return PodType.UNKNOWN +class PodMessage(NamedTuple): + summary: Optional[str] + details: str @unique @@ -64,16 +53,30 @@ def from_value(value: str) -> 'PodState': return PodState.UNKNOWN +class CrdKind(Enum): + FLAPP = 'FLApp' + SPARKAPPLICATION = 'SparkApplication' + FEDAPP = 'FedApp' + UNKNOWN = 'Unknown' + + @staticmethod + def from_value(value: str) -> 'CrdKind': + try: + return CrdKind(value) + except ValueError: + return CrdKind.UNKNOWN + + class MessageProvider(metaclass=ABCMeta): + @abstractmethod def get_message(self, private: bool = False) -> Optional[str]: pass class ContainerState(MessageProvider): - def __init__(self, state: str, - message: Optional[str] = None, - reason: Optional[str] = None): + + def __init__(self, state: str, message: Optional[str] = None, reason: Optional[str] = None): self.state = state self.message = message self.reason = reason @@ -95,9 +98,8 @@ def __eq__(self, other): class PodCondition(MessageProvider): - def __init__(self, cond_type: str, - message: Optional[str] = None, - reason: Optional[str] = None): + + def __init__(self, cond_type: str, message: Optional[str] = None, reason: Optional[str] = None): self.cond_type = cond_type self.message = message self.reason = reason @@ -119,19 +121,24 @@ def __eq__(self, other): class Pod(object): + def __init__(self, name: str, state: PodState, - pod_type: PodType, + pod_type: str = 'UNKNOWN', pod_ip: str = None, container_states: List[ContainerState] = None, - pod_conditions: List[PodCondition] = None): + pod_conditions: List[PodCondition] = None, + creation_timestamp: int = None, + status_message: str = None): self.name = name self.state = state or PodState.UNKNOWN self.pod_type = pod_type self.pod_ip = pod_ip self.container_states = container_states or [] self.pod_conditions = pod_conditions or [] + self.creation_timestamp = creation_timestamp or 0 + self.status_message = status_message or '' def __eq__(self, other): if not isinstance(other, Pod): @@ -149,27 +156,32 @@ def __eq__(self, other): return self.name == other.name and \ self.state == other.state and \ self.pod_type == other.pod_type and \ - self.pod_ip == other.pod_ip + self.pod_ip == other.pod_ip and \ + self.creation_timestamp == self.creation_timestamp - def to_dict(self, include_private_info: bool = False): - # TODO: to reuse to_dict from db.py - messages = [] + def to_proto(self, include_private_info: bool = False) -> PodPb: + + return PodPb(name=self.name, + pod_type=self.pod_type, + state=self.state.name, + pod_ip=self.pod_ip, + creation_timestamp=self.creation_timestamp, + message=self.get_message(include_private_info).details) + + def get_message(self, include_private_info: bool = False) -> PodMessage: + summary = None + messages = [self.status_message] if self.status_message else [] for container_state in self.container_states: message = container_state.get_message(include_private_info) if message is not None: messages.append(message) + if container_state.state == 'terminated': + summary = message for pod_condition in self.pod_conditions: message = pod_condition.get_message(include_private_info) if message is not None: messages.append(message) - - return { - 'name': self.name, - 'pod_type': self.pod_type.name, - 'state': self.state.name, - 'pod_ip': self.pod_ip, - 'message': ', '.join(messages) - } + return PodMessage(summary=summary, details=', '.join(messages)) @classmethod def from_json(cls, p: dict) -> 'Pod': @@ -179,32 +191,83 @@ def from_json(cls, p: dict) -> 'Pod': master/v1.6.5-standalone/pod.json""" container_states: List[ContainerState] = [] pod_conditions: List[PodCondition] = [] - if 'containerStatuses' in p['status'] and \ - isinstance(p['status']['containerStatuses'], list) and \ - len(p['status']['containerStatuses']) > 0: + if 'container_statuses' in p['status'] and \ + isinstance(p['status']['container_statuses'], list) and \ + len(p['status']['container_statuses']) > 0: for state, detail in \ - p['status']['containerStatuses'][0]['state'].items(): - container_states.append(ContainerState( - state=state, - message=detail.get('message'), - reason=detail.get('reason') - )) + p['status']['container_statuses'][0]['state'].items(): + # detail may be None, so add a conditional judgement('and') + # short-circuit operation + container_states.append( + ContainerState(state=state, + message=detail and detail.get('message'), + reason=detail and detail.get('reason'))) if 'conditions' in p['status'] and \ isinstance(p['status']['conditions'], list): for cond in p['status']['conditions']: - pod_conditions.append(PodCondition( - cond_type=cond['type'], - message=cond.get('message'), - reason=cond.get('reason') - )) - return cls( - name=p['metadata']['name'], - pod_type=PodType.from_value( - p['metadata']['labels']['fl-replica-type']), - state=PodState.from_value(p['status']['phase']), - pod_ip=p['status'].get('pod_ip'), - container_states=container_states, - pod_conditions=pod_conditions) + pod_conditions.append( + PodCondition(cond_type=cond['type'], message=cond.get('message'), reason=cond.get('reason'))) + + return cls(name=p['metadata']['name'], + pod_type=get_pod_type(p), + state=PodState.from_value(p['status']['phase']), + pod_ip=p['status'].get('pod_ip'), + container_states=container_states, + pod_conditions=pod_conditions, + creation_timestamp=to_timestamp(p['metadata']['creation_timestamp']), + status_message=p['status'].get('message')) + + +def get_pod_type(pod: dict) -> str: + labels = pod['metadata']['labels'] + # SparkApplication -> pod.metadata.labels.spark-role + # FlApp -> pod.metadata.labels.fl-replica-type + pod_type = labels.get('fl-replica-type', None) or labels.get('spark-role', 'UNKNOWN') + return pod_type.upper() + + +def get_creation_timestamp_from_k8s_app(app: dict) -> int: + if 'metadata' in app and 'creationTimestamp' in app['metadata']: + return to_timestamp(app['metadata']['creationTimestamp']) + return 0 + + +class K8sApp(metaclass=ABCMeta): + + @classmethod + @abstractmethod + def from_json(cls, app_detail: dict): + pass + + @property + @abstractmethod + def is_completed(self) -> bool: + pass + + @property + @abstractmethod + def is_failed(self) -> bool: + pass + + @property + @abstractmethod + def completed_at(self) -> int: + pass + + @property + @abstractmethod + def pods(self) -> List[Pod]: + pass + + @property + @abstractmethod + def error_message(self) -> Optional[str]: + pass + + @property + @abstractmethod + def creation_timestamp(self) -> int: + pass # Please keep the value consistent with operator's definition @@ -229,14 +292,19 @@ def from_value(value: str) -> 'FlAppState': return FlAppState.UNKNOWN -class FlApp(object): +class FlApp(K8sApp): + def __init__(self, state: FlAppState = FlAppState.UNKNOWN, pods: Optional[List[Pod]] = None, - completed_at: Optional[int] = None): + completed_at: Optional[int] = None, + creation_timestamp: Optional[int] = None): self.state = state - self.pods = pods or [] - self.completed_at = completed_at + self._pods = pods or [] + self._completed_at = completed_at + self._is_failed = self.state == FlAppState.FAILED + self._is_completed = self.state == FlAppState.COMPLETED + self._creation_timestamp = creation_timestamp def __eq__(self, other): if not isinstance(other, FlApp): @@ -250,7 +318,8 @@ def __eq__(self, other): self.completed_at == other.completed_at @classmethod - def from_json(cls, flapp: dict) -> 'FlApp': + def from_json(cls, app_detail: dict) -> 'FlApp': + flapp = app_detail.get('app', None) if flapp is None \ or 'status' not in flapp \ or not isinstance(flapp['status'], dict): @@ -261,24 +330,277 @@ def from_json(cls, flapp: dict) -> 'FlApp': # Parses pod related info replicas = flapp['status'].get('flReplicaStatus', {}) for pod_type in replicas: - for state in ['failed', 'succeeded']: + for state in ['active', 'failed', 'succeeded']: for pod_name in replicas[pod_type].get(state, {}): + if state == 'active': + pod_state = PodState.RUNNING if state == 'failed': pod_state = PodState.FAILED_AND_FREED - else: + if state == 'succeeded': pod_state = PodState.SUCCEEDED_AND_FREED - pods.append(Pod( - name=pod_name, - pod_type=PodType.from_value(pod_type), - state=pod_state)) + pods.append(Pod(name=pod_name, pod_type=pod_type.upper(), state=pod_state)) state = flapp['status'].get('appState') if flapp['status'].get('completionTime', None): # Completion time is a iso formatted datetime in UTC timezone - completed_at = int(datetime.strptime( - flapp['status']['completionTime'], - '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo=timezone.utc) - .timestamp()) - + completed_at = int( + datetime.strptime(flapp['status']['completionTime'], + '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo=timezone.utc).timestamp()) + + name_to_pod = get_pod_dict_from_detail(app_detail.get('pods', {})) + for pod in pods: + # Only master pod and ps pod use state in flapp, + # because they would not immediately exit when flapp is deleted. + if pod.name not in name_to_pod: + name_to_pod[pod.name] = pod + elif pod.pod_type in ['MASTER', 'PS']: + name_to_pod[pod.name].state = pod.state + + pods = list(name_to_pod.values()) return cls(state=FlAppState.from_value(state), pods=pods, - completed_at=completed_at) + completed_at=completed_at, + creation_timestamp=get_creation_timestamp_from_k8s_app(flapp)) + + @property + def is_completed(self) -> bool: + return self._is_completed + + @property + def is_failed(self) -> bool: + return self._is_failed + + @property + def completed_at(self) -> int: + return self._completed_at or 0 + + @property + def pods(self) -> List[Pod]: + return self._pods + + @property + def error_message(self) -> Optional[str]: + return None + + @property + def creation_timestamp(self) -> int: + return self._creation_timestamp or 0 + + +@unique +class SparkAppState(Enum): + # state: https://github.com/GoogleCloudPlatform/spark-on-k8s-operator/ \ + # blob/075e5383e4678ddd70d7f3fdd71904aa3c9113c2 \ + # /pkg/apis/sparkoperator.k8s.io/v1beta2/types.go#L332 + + # core state transition: SUBMITTED -> RUNNING -> COMPLETED/FAILED + NEW = '' + SUBMITTED = 'SUBMITTED' + RUNNING = 'RUNNING' + COMPLETED = 'COMPLETED' + FAILED = 'FAILED' + SUBMISSION_FAILED = 'SUBMISSION_FAILED' + PEDNING_RERUN = 'PENDING_RERUN' + INVALIDATING = 'INVALIDATING' + SUCCEEDING = 'SUCCEEDING' + FAILING = 'FAILING' + UNKNOWN = 'UNKNOWN' + + @staticmethod + def from_value(value: str) -> 'SparkAppState': + try: + return SparkAppState(value) + except ValueError: + logging.error(f'Unexpected value of FlAppState: {value}') + return SparkAppState.UNKNOWN + + +class SparkApp(K8sApp): + + def __init__(self, + pods: List[Pod], + state: SparkAppState = SparkAppState.UNKNOWN, + completed_at: Optional[int] = None, + err_message: Optional[str] = None, + creation_timestamp: Optional[int] = None): + self.state = state + self._completed_at = completed_at + self._is_failed = self.state in [SparkAppState.FAILED] + self._is_completed = self.state in [SparkAppState.COMPLETED] + self._pods = pods + self._error_message = err_message + self._creation_timestamp = creation_timestamp + + def __eq__(self, other): + if not isinstance(other, SparkApp): + return False + return self.state == other.state and \ + self.completed_at == other.completed_at + + @classmethod + def from_json(cls, app_detail: dict) -> 'SparkApp': + sparkapp = app_detail.get('app', None) + if sparkapp is None \ + or 'status' not in sparkapp \ + or not isinstance(sparkapp['status'], dict): + return cls(pods=[]) + + status = sparkapp['status'] + application_state = status.get('applicationState', {}) + state = application_state.get('state', SparkAppState.UNKNOWN) + completed_at: Optional[int] = None + termination_time = status.get('terminationTime', None) + if termination_time: + # Completion time is a iso formatted datetime in UTC timezone + completed_at = int( + datetime.strptime(termination_time, '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo=timezone.utc).timestamp()) + pods = list(get_pod_dict_from_detail(app_detail.get('pods', {})).values()) + err_message = application_state.get('errorMessage', None) + return cls(state=SparkAppState.from_value(state), + completed_at=completed_at, + pods=pods, + err_message=err_message, + creation_timestamp=get_creation_timestamp_from_k8s_app(sparkapp)) + + @property + def is_completed(self) -> bool: + return self._is_completed + + @property + def is_failed(self) -> bool: + return self._is_failed + + @property + def completed_at(self) -> int: + return self._completed_at or 0 + + @property + def pods(self) -> List[Pod]: + return self._pods + + @property + def error_message(self) -> Optional[str]: + return self._error_message + + @property + def creation_timestamp(self) -> int: + return self._creation_timestamp or 0 + + +class FedApp(K8sApp): + + def __init__(self, pods: List[Pod], success_condition: Condition, creation_timestamp: Optional[int] = None): + self.success_condition = success_condition + self._pods = pods + self._completed_at = self.success_condition.last_transition_time and to_timestamp( + self.success_condition.last_transition_time) + self._is_failed = self.success_condition.status == Condition.FALSE + self._is_completed = self.success_condition.status == Condition.TRUE + self._creation_timestamp = creation_timestamp + + @classmethod + def from_json(cls, app_detail: dict) -> 'FedApp': + app = app_detail.get('app', None) + if app is None \ + or 'status' not in app \ + or not isinstance(app['status'], dict): + return cls([], Condition()) + + status = app['status'] + success_condition = Condition() + for c in status.get('conditions', []): + c_proto: Condition = ParseDict(c, Condition()) + if c_proto.type == Condition.SUCCEEDED: + success_condition = c_proto + pods = list(get_pod_dict_from_detail(app_detail.get('pods', {})).values()) + return cls(success_condition=success_condition, + pods=pods, + creation_timestamp=get_creation_timestamp_from_k8s_app(app)) + + @property + def is_completed(self) -> bool: + return self._is_completed + + @property + def is_failed(self) -> bool: + return self._is_failed + + @property + def completed_at(self) -> int: + return self._completed_at or 0 + + @property + def pods(self) -> List[Pod]: + return self._pods + + @property + def error_message(self) -> str: + return f'{self.success_condition.reason}: {self.success_condition.message}' + + @property + def creation_timestamp(self) -> int: + return self._creation_timestamp or 0 + + +class UnknownCrd(K8sApp): + + @classmethod + def from_json(cls, app_detail: dict) -> 'UnknownCrd': + return UnknownCrd() + + @property + def is_completed(self) -> bool: + return False + + @property + def is_failed(self) -> bool: + return False + + @property + def completed_at(self) -> int: + return 0 + + @property + def pods(self) -> List[Pod]: + return [] + + @property + def error_message(self) -> Optional[str]: + return None + + @property + def creation_timestamp(self) -> Optional[str]: + return None + + +def get_pod_dict_from_detail(pod_detail: dict) -> Dict[str, Pod]: + """ + Generate name to Pod dict from pod json detail which got from pod cache. + """ + name_to_pod = {} + pods_json = pod_detail.get('items', []) + for p in pods_json: + pod = Pod.from_json(p) + name_to_pod[pod.name] = pod + return name_to_pod + + +def get_app_name_from_metadata(metadata: V1ObjectMeta) -> Optional[str]: + """Extracts the CR app name from the metadata. + + Basically the metadata is from k8s watch event, we only care about the events + related with CRs, so we will check owner references.""" + owner_refs = metadata.owner_references or [] + if not owner_refs: + return None + + # Spark app uses labels to get app name instead of owner references, + # because executors' owner reference will be driver, not the spark app. + labels = metadata.labels or {} + sparkapp_name = labels.get('sparkoperator.k8s.io/app-name', None) + if sparkapp_name: + return sparkapp_name + + owner = owner_refs[0] + if CrdKind.from_value(owner.kind) == CrdKind.UNKNOWN: + return None + return owner.name diff --git a/web_console_v2/api/fedlearner_webconsole/k8s/models_test.py b/web_console_v2/api/fedlearner_webconsole/k8s/models_test.py new file mode 100644 index 000000000..85e98f12e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/k8s/models_test.py @@ -0,0 +1,374 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from datetime import datetime, timezone + +from kubernetes.client import V1ObjectMeta, V1OwnerReference + +from fedlearner_webconsole.k8s.models import PodState, ContainerState, \ + PodCondition, Pod, FlAppState, FlApp, SparkApp, SparkAppState, FedApp, get_app_name_from_metadata, PodMessage +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.proto.job_pb2 import PodPb + + +class PodStateTest(unittest.TestCase): + + def test_from_string(self): + self.assertEqual(PodState.from_value('Running'), PodState.RUNNING) + self.assertEqual(PodState.from_value('Unknown'), PodState.UNKNOWN) + + def test_from_unknown(self): + self.assertEqual(PodState.from_value('hhhhhhhh'), PodState.UNKNOWN) + + +class ContainerStateTest(unittest.TestCase): + + def test_get_message(self): + state = ContainerState(state='haha', message='test message', reason='test reason') + self.assertEqual(state.get_message(), 'haha:test reason') + self.assertEqual(state.get_message(private=True), 'haha:test message') + state.message = None + self.assertEqual(state.get_message(), 'haha:test reason') + self.assertEqual(state.get_message(private=True), 'haha:test reason') + + +class PodConditionTest(unittest.TestCase): + + def test_get_message(self): + cond = PodCondition(cond_type='t1', message='test message', reason='test reason') + self.assertEqual(cond.get_message(), 't1:test reason') + self.assertEqual(cond.get_message(private=True), 't1:test message') + cond.message = None + self.assertEqual(cond.get_message(), 't1:test reason') + self.assertEqual(cond.get_message(private=True), 't1:test reason') + + +class PodTest(unittest.TestCase): + + def test_to_proto(self): + pod = Pod(name='this-is-a-pod', + state=PodState.RUNNING, + pod_type='WORKER', + pod_ip='172.10.0.20', + container_states=[ContainerState(state='h1', message='test message')], + pod_conditions=[PodCondition(cond_type='h2', reason='test reason')], + creation_timestamp=100) + self.assertEqual( + pod.to_proto(include_private_info=True), + PodPb( + name='this-is-a-pod', + pod_type='WORKER', + state='RUNNING', + pod_ip='172.10.0.20', + message='h1:test message, h2:test reason', + creation_timestamp=100, + )) + + def test_from_json(self): + creation_timestamp = datetime.utcnow() + json = { + 'metadata': { + 'name': 'test-pod', + 'labels': { + 'app-name': 'u244777dac51949c5b2b-data-join-job', + 'fl-replica-type': 'master' + }, + 'creation_timestamp': creation_timestamp, + }, + 'status': { + 'pod_ip': + '172.10.0.20', + 'message': + 'test', + 'phase': + 'Running', + 'conditions': [{ + 'type': 'Failed', + 'reason': 'Test reason' + }], + 'container_statuses': [{ + 'containerID': + 'docker://034eaf58d4e24581232832661636da9949b6e2fb05\ + 6398939fc2c0f2809d4c64', + 'image': + 'artifact.bytedance.com/fedlearner/fedlearner:438d603', + 'state': { + 'running': { + 'message': 'Test message' + } + } + }] + }, + 'spec': { + 'containers': [{ + 'name': 'test-container', + 'resources': { + 'limits': { + 'cpu': '2000m', + 'memory': '4Gi', + }, + 'requests': { + 'cpu': '2000m', + 'memory': '4Gi', + } + } + }] + } + } + expected_pod = Pod(name='test-pod', + state=PodState.RUNNING, + pod_type='MASTER', + pod_ip='172.10.0.20', + container_states=[ContainerState(state='running', message='Test message')], + pod_conditions=[PodCondition(cond_type='Failed', reason='Test reason')], + creation_timestamp=to_timestamp(creation_timestamp), + status_message='test') + self.assertEqual(Pod.from_json(json), expected_pod) + + def test_get_message(self): + pod = Pod(name='test', + state=PodState.FAILED, + container_states=[ + ContainerState(state='terminated', message='0101010'), + ContainerState(state='running', message='11') + ]) + self.assertEqual(pod.get_message(True), + PodMessage(summary='terminated:0101010', details='terminated:0101010, running:11')) + self.assertEqual(pod.get_message(False), PodMessage(summary=None, details='')) + pod.container_states = [ContainerState(state='terminated')] + self.assertEqual(pod.get_message(True), PodMessage(summary=None, details='')) + + +class FlAppStateTest(unittest.TestCase): + + def test_from_string(self): + self.assertEqual(FlAppState.from_value('FLStateComplete'), FlAppState.COMPLETED) + self.assertEqual(FlAppState.from_value('Unknown'), FlAppState.UNKNOWN) + + def test_from_unknown(self): + self.assertEqual(FlAppState.from_value('hhh123hhh'), FlAppState.UNKNOWN) + + +class FlAppTest(unittest.TestCase): + + def test_from_json(self): + json = { + 'app': { + 'metadata': { + 'creationTimestamp': '2022-09-27T09:07:01Z', + }, + 'status': { + 'appState': 'FLStateComplete', + 'completionTime': '2021-04-26T08:33:45Z', + 'flReplicaStatus': { + 'Master': { + 'active': { + 'test-pod1': {} + }, + 'failed': { + 'test-pod2': {} + }, + }, + 'Worker': { + 'succeeded': { + 'test-pod3': {}, + 'test-pod4': {} + } + } + } + } + } + } + completed_at = int(datetime(2021, 4, 26, 8, 33, 45, tzinfo=timezone.utc).timestamp()) + expected_flapp = FlApp(state=FlAppState.COMPLETED, + completed_at=completed_at, + pods=[ + Pod(name='test-pod1', state=PodState.RUNNING, pod_type='MASTER'), + Pod(name='test-pod2', state=PodState.FAILED_AND_FREED, pod_type='MASTER'), + Pod(name='test-pod3', state=PodState.SUCCEEDED_AND_FREED, pod_type='WORKER'), + Pod(name='test-pod4', state=PodState.SUCCEEDED_AND_FREED, pod_type='WORKER') + ]) + actual_flapp = FlApp.from_json(json) + self.assertEqual(actual_flapp, expected_flapp) + self.assertEqual(actual_flapp.is_completed, True) + self.assertEqual(actual_flapp.completed_at, 1619426025) + self.assertEqual(actual_flapp.creation_timestamp, 1664269621) + + +class SparkAppTest(unittest.TestCase): + + def test_from_json(self): + json = { + 'app': { + 'metadata': { + 'creationTimestamp': '2022-09-27T09:07:01Z', + }, + 'status': { + 'applicationState': { + 'state': 'COMPLETED', + 'errorMessage': 'OOMKilled' + }, + 'driverInfo': { + 'podName': 'fl-transformer-yaml-driver', + }, + 'executionAttempts': 1, + 'executorState': { + 'fedlearnertransformer-4a859f78d5210f41-exec-1': 'RUNNING' + }, + 'lastSubmissionAttemptTime': '2021-04-15T10:43:28Z', + 'sparkApplicationId': 'spark-adade63e9071431881d6a16666ec1c87', + 'submissionAttempts': 1, + 'submissionID': '37a07c69-516b-48fe-ae70-701eec529eda', + 'terminationTime': '2021-04-15T10:43:53Z' + } + } + } + + completed_at = int(datetime(2021, 4, 15, 10, 43, 53, tzinfo=timezone.utc).timestamp()) + expected_sparkapp = SparkApp(state=SparkAppState.COMPLETED, completed_at=completed_at, pods=[]) + actual_sparkapp = SparkApp.from_json(json) + self.assertEqual(actual_sparkapp, expected_sparkapp) + self.assertEqual(actual_sparkapp.is_completed, True) + self.assertEqual(actual_sparkapp.is_failed, False) + self.assertEqual(actual_sparkapp.completed_at, 1618483433) + self.assertEqual(actual_sparkapp.error_message, 'OOMKilled') + self.assertEqual(actual_sparkapp.creation_timestamp, 1664269621) + + +class FedAppTest(unittest.TestCase): + + def test_from_json(self): + json = { + 'app': { + 'metadata': { + 'creationTimestamp': '2022-09-27T09:07:01Z', + }, + 'status': { + 'conditions': [{ + 'type': 'succeeded', + 'status': 'False', + 'lastTransitionTime': '2022-01-17T12:06:33Z', + 'reason': 'OutOfLimitation', + 'message': 'detail' + }] + } + } + } + fed_app = FedApp.from_json(json) + self.assertEqual(fed_app.is_failed, True) + self.assertEqual(fed_app.is_completed, False) + self.assertEqual(fed_app.completed_at, 1642421193) + self.assertEqual(fed_app.error_message, 'OutOfLimitation: detail') + self.assertEqual(fed_app.creation_timestamp, 1664269621) + json = {'app': {'status': {'conditions': []}}} + fed_app = FedApp.from_json(json) + self.assertEqual(fed_app.is_failed, False) + self.assertEqual(fed_app.is_completed, False) + self.assertEqual(fed_app.completed_at, 0) + self.assertEqual(fed_app.creation_timestamp, 0) + + +class GetAppNameFromMetadataTest(unittest.TestCase): + + def test_pure_pod(self): + metadata = V1ObjectMeta( + name='test-pod', + namespace='fedlearner', + ) + self.assertIsNone(get_app_name_from_metadata(metadata)) + + def test_sparkapp(self): + metadata = V1ObjectMeta( + name='test-driver', + namespace='fedlearner', + owner_references=[ + V1OwnerReference( + api_version='sparkoperator.k8s.io/v1beta2', + controller=True, + kind='SparkApplication', + name='spark-app-name', + uid='812c1a48-5585-400f-9174-471d311fbec3', + ) + ], + labels={ + 'sparkoperator.k8s.io/app-name': 'spark-app-name', + 'sparkoperator.k8s.io/launched-by-spark-operator': 'true', + }, + ) + self.assertEqual(get_app_name_from_metadata(metadata), 'spark-app-name') + metadata = V1ObjectMeta( + name='test-executor', + namespace='fedlearner', + owner_references=[ + V1OwnerReference( + api_version='v1', + controller=True, + kind='Pod', + name='test-driver', + uid='812c1a48-5585-400f-9174-471d311fbec3', + ) + ], + labels={ + 'sparkoperator.k8s.io/app-name': 'spark-app-name', + }, + ) + self.assertEqual(get_app_name_from_metadata(metadata), 'spark-app-name') + + def test_fedapp(self): + metadata = V1ObjectMeta(name='test-pod', + namespace='default', + owner_references=[ + V1OwnerReference( + api_version='fedlearner.k8s.io/v1alpha1', + controller=True, + kind='FedApp', + name='test-fedapp-job', + uid='bcf5324c-aa2b-4918-bdee-42ac464e18d5', + ) + ]) + self.assertEqual(get_app_name_from_metadata(metadata), 'test-fedapp-job') + + def test_flapp(self): + metadata = V1ObjectMeta(name='test-pod', + namespace='fedlearner', + owner_references=[ + V1OwnerReference( + api_version='fedlearner.k8s.io/v1alpha1', + controller=True, + kind='FLApp', + name='u130eaab6eec64552945-nn-model', + uid='bcf5324c-aa2b-4918-bdee-42ac464e18d5', + ) + ]) + self.assertEqual(get_app_name_from_metadata(metadata), 'u130eaab6eec64552945-nn-model') + + def test_unknown_metadata(self): + metadata = V1ObjectMeta(name='test-pod', + namespace='fedlearner', + owner_references=[ + V1OwnerReference( + api_version='fedlearner.k8s.io/v1alpha1', + controller=True, + kind='NewApp', + name='u130eaab6eec64552945-nn-model', + uid='bcf5324c-aa2b-4918-bdee-42ac464e18d5', + ) + ]) + self.assertIsNone(get_app_name_from_metadata(metadata)) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/middleware/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/middleware/BUILD.bazel new file mode 100644 index 000000000..9f51a2ad8 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/middleware/BUILD.bazel @@ -0,0 +1,80 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "api_latency_lib", + srcs = ["api_latency.py"], + imports = ["../.."], + deps = [ + "@common_flask//:pkg", + "@common_opentelemetry_instrumentation_flask//:pkg", + ], +) + +py_test( + name = "api_latency_test", + srcs = [ + "api_latency_test.py", + ], + imports = ["../.."], + main = "api_latency_test.py", + deps = [ + ":api_latency_lib", + "@common_flask//:pkg", + "@common_flask_testing//:pkg", + "@common_opentelemetry_sdk//:pkg", + ], +) + +py_library( + name = "log_filter_lib", + srcs = ["log_filter.py"], + imports = ["../.."], + deps = [ + ":middlewares_lib", + ":request_id_lib", + ], +) + +py_test( + name = "log_filter_test", + srcs = [ + "log_filter_test.py", + ], + imports = ["../.."], + main = "log_filter_test.py", + deps = [ + ":log_filter_lib", + ], +) + +py_library( + name = "middlewares_lib", + srcs = ["middlewares.py"], + imports = ["../.."], + visibility = ["//visibility:public"], +) + +py_library( + name = "request_id_lib", + srcs = ["request_id.py"], + imports = ["../.."], + deps = [ + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@common_flask//:pkg", + ], +) + +py_test( + name = "request_id_test", + srcs = [ + "request_id_test.py", + ], + imports = ["../.."], + main = "request_id_test.py", + deps = [ + ":request_id_lib", + "//web_console_v2/api/testing:common_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/middleware/api_latency.py b/web_console_v2/api/fedlearner_webconsole/middleware/api_latency.py new file mode 100644 index 000000000..96ac176b7 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/middleware/api_latency.py @@ -0,0 +1,22 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from flask import Flask +from opentelemetry.instrumentation.flask import FlaskInstrumentor + + +def api_latency_middleware(app: Flask) -> Flask: + FlaskInstrumentor().instrument_app(app) + return app diff --git a/web_console_v2/api/fedlearner_webconsole/middleware/api_latency_test.py b/web_console_v2/api/fedlearner_webconsole/middleware/api_latency_test.py new file mode 100644 index 000000000..dfaf806cd --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/middleware/api_latency_test.py @@ -0,0 +1,78 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import json +import unittest +import flask_testing +from flask import Flask +from io import StringIO +from opentelemetry import trace +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import SimpleSpanProcessor, ConsoleSpanExporter + +from fedlearner_webconsole.middleware.api_latency import api_latency_middleware + + +class ApiLatencyTest(flask_testing.TestCase): + + def setUp(self): + super().setUp() + self._string_io = StringIO() + trace.set_tracer_provider( + TracerProvider(resource=Resource.create({'service.name': 'test_api_lantency'}), + active_span_processor=SimpleSpanProcessor(ConsoleSpanExporter(out=self._string_io)))) + + def create_app(self): + app = Flask('test_api_lantency') + + @app.route('/test', methods=['GET']) + def test(): + return {'data': 'Hello'} + + app = api_latency_middleware(app) + return app + + def test_api_latency(self): + get_response = self.client.get('/test') + self.assertEqual(get_response.json, {'data': 'Hello'}) + span = json.loads(self._string_io.getvalue()) + self.assertEqual(span['name'], '/test') + self.assertEqual(span['kind'], 'SpanKind.SERVER') + self.assertEqual( + span['attributes'], { + 'http.method': 'GET', + 'http.server_name': 'localhost', + 'http.scheme': 'http', + 'net.host.port': 80, + 'http.host': 'localhost', + 'http.target': '/test', + 'net.peer.ip': '127.0.0.1', + 'http.user_agent': 'werkzeug/1.0.1', + 'http.flavor': '1.1', + 'http.route': '/test', + 'http.status_code': 200 + }) + self.assertEqual( + span['resource'], { + 'telemetry.sdk.language': 'python', + 'telemetry.sdk.name': 'opentelemetry', + 'telemetry.sdk.version': '1.10.0', + 'service.name': 'test_api_lantency' + }) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/middleware/log_filter.py b/web_console_v2/api/fedlearner_webconsole/middleware/log_filter.py new file mode 100644 index 000000000..82c234728 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/middleware/log_filter.py @@ -0,0 +1,27 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import logging + +from fedlearner_webconsole.middleware.request_id import get_current_request_id + + +class RequestIdLogFilter(logging.Filter): + """Log filter to inject the current request id. + """ + + def filter(self, record) -> bool: + record.request_id = get_current_request_id() + return True diff --git a/web_console_v2/api/fedlearner_webconsole/middleware/log_filter_test.py b/web_console_v2/api/fedlearner_webconsole/middleware/log_filter_test.py new file mode 100644 index 000000000..be3ccf0d8 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/middleware/log_filter_test.py @@ -0,0 +1,33 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from unittest.mock import MagicMock, patch + +from fedlearner_webconsole.middleware.log_filter import RequestIdLogFilter + + +class RequestIdLogFilterTest(unittest.TestCase): + + @patch('fedlearner_webconsole.middleware.log_filter.get_current_request_id') + def test_attach_request_id(self, mock_get_current_request_id): + mock_get_current_request_id.return_value = '123' + log_record = MagicMock() + self.assertEqual(RequestIdLogFilter().filter(log_record), True) + self.assertEqual(log_record.request_id, '123') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/middleware/middlewares.py b/web_console_v2/api/fedlearner_webconsole/middleware/middlewares.py new file mode 100644 index 000000000..e44248163 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/middleware/middlewares.py @@ -0,0 +1,36 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import logging + + +class _MiddlewareRegistry(object): + + def __init__(self): + self.middlewares = [] + + def register(self, middleware): + self.middlewares.append(middleware) + + def init_app(self, app): + logging.info('Initializing app with middlewares') + # Wraps app with middlewares + for middleware in self.middlewares: + app = middleware(app) + return app + + +flask_middlewares = _MiddlewareRegistry() +wsgi_middlewares = _MiddlewareRegistry() diff --git a/web_console_v2/api/fedlearner_webconsole/middleware/request_id.py b/web_console_v2/api/fedlearner_webconsole/middleware/request_id.py new file mode 100644 index 000000000..fb9548560 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/middleware/request_id.py @@ -0,0 +1,137 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import random +import threading +from abc import ABCMeta, abstractmethod +from datetime import datetime +from typing import Optional, List, Tuple, Union + +import grpc +from flask import Flask, Response, g, request, has_request_context + + +class RequestIdContext(metaclass=ABCMeta): + + @abstractmethod + def is_current_context(self) -> bool: + pass + + @abstractmethod + def set_request_id(self, request_id: str): + pass + + @abstractmethod + def get_request_id(self) -> Optional[str]: + pass + + +class FlaskRequestIdContext(RequestIdContext): + + def is_current_context(self) -> bool: + return has_request_context() + + def set_request_id(self, request_id: str): + g.request_id = request_id + + def get_request_id(self) -> Optional[str]: + # Defensively getting request id from flask.g + if hasattr(g, 'request_id'): + return g.request_id + return None + + +thread_local = threading.local() + + +class ThreadLocalContext(RequestIdContext): + + def is_current_context(self) -> bool: + return hasattr(thread_local, 'request_id') + + def set_request_id(self, request_id: str): + thread_local.request_id = request_id + + def get_request_id(self) -> Optional[str]: + # Defensively getting request id + if hasattr(thread_local, 'request_id'): + return thread_local.request_id + return None + + +_flask_request_id_context = FlaskRequestIdContext() +_thread_local_context = ThreadLocalContext() + + +def _gen_request_id() -> str: + # Random number in 4 digits + r = f'{random.randint(0, 9999):04}' + dt = datetime.now().strftime('%Y%m%d%H%M%S-%f') + return f'{dt}-{r}' + + +class FlaskRequestId(object): + + def __init__(self, header_name='X-TT-LOGID'): + self.header_name = header_name + + def __call__(self, app: Flask) -> Flask: + app.before_request(self._set_request_id) + app.after_request(self._add_header) + return app + + def _set_request_id(self): + # Gets existing request id or generate a new one + request_id = request.headers.get(self.header_name) or \ + _gen_request_id() + _flask_request_id_context.set_request_id(request_id) + + def _add_header(self, response: Response) -> Response: + response.headers[self.header_name] = \ + _flask_request_id_context.get_request_id() + return response + + +class GrpcRequestIdMiddleware(object): + REQUEST_HEADER_NAME = 'x-tt-logid' + + @classmethod + def add_header(cls, metadata: List[Tuple[str, Union[str, bytes]]]): + """Appends request id in metadata.""" + # From existing request id in context or generates a new one + request_id = get_current_request_id() or _gen_request_id() + metadata.append((cls.REQUEST_HEADER_NAME, request_id)) + + # Sets thread local context if we get a request id + _thread_local_context.set_request_id(request_id) + return metadata + + @classmethod + def set_request_id_in_context(cls, context: grpc.ServicerContext): + """Sets request id to thread local context for gRPC service.""" + for key, value in context.invocation_metadata(): + if key == cls.REQUEST_HEADER_NAME: + # Sets context per gRPC metadata + _thread_local_context.set_request_id(value) + return + + +def get_current_request_id() -> str: + request_id = None + if _flask_request_id_context.is_current_context(): + request_id = _flask_request_id_context.get_request_id() + elif _thread_local_context.is_current_context(): + request_id = _thread_local_context.get_request_id() + return request_id or '' diff --git a/web_console_v2/api/fedlearner_webconsole/middleware/request_id_test.py b/web_console_v2/api/fedlearner_webconsole/middleware/request_id_test.py new file mode 100644 index 000000000..e95900a75 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/middleware/request_id_test.py @@ -0,0 +1,70 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import threading +import time +import unittest + +from fedlearner_webconsole.middleware.request_id import FlaskRequestId, _thread_local_context, get_current_request_id +from testing.common import BaseTestCase + + +class FlaskRequestIdTest(BaseTestCase): + + def setUp(self): + super().setUp() + # Wraps with middleware + self.app = FlaskRequestId()(self.app) + + @self.app.route('/test', methods=['GET']) + def test_api(): + return '' + + def test_response_with_request_id(self): + response = self.client.get('/test') + self.assertEqual(len(response.headers['X-TT-LOGID']), 26, 'request id should be an uuid') + + def test_request_with_request_id(self): + response = self.client.get('/test', headers={'X-TT-LOGID': 'test-id'}) + self.assertEqual(response.headers['X-TT-LOGID'], 'test-id') + + +class ThreadLocalContextTest(unittest.TestCase): + + def test_multi_thread_context(self): + ids = {} + + def process(index: str): + if not index == 't1': + _thread_local_context.set_request_id(index) + time.sleep(0.2) + ids[index] = get_current_request_id() + + # t1 executes first + # t2 and t3 will be in parallel + t1 = threading.Thread(target=process, args=['t1']) + t1.start() + t1.join() + t2 = threading.Thread(target=process, args=['t2']) + t3 = threading.Thread(target=process, args=['t3']) + t2.start() + t3.start() + t3.join() + t2.join() + self.assertDictEqual(ids, {'t1': '', 't2': 't2', 't3': 't3'}) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/mmgr/BUILD.bazel new file mode 100644 index 000000000..65ea2f9f1 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/BUILD.bazel @@ -0,0 +1,384 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "model_job_configer_lib", + srcs = ["model_job_configer.py"], + imports = ["../.."], + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:fetcher_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:const_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:proto_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:utils_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "model_job_configer_lib_test", + size = "small", + srcs = [ + "model_job_configer_test.py", + ], + imports = ["../.."], + main = "model_job_configer_test.py", + deps = [ + ":model_job_configer_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:utils_lib", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_library( + name = "models_lib", + srcs = ["models.py"], + imports = ["../.."], + visibility = ["//visibility:public"], + deps = [ + ":utils_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "models_lib_test", + size = "small", + srcs = [ + "models_test.py", + ], + imports = ["../.."], + main = "models_test.py", + deps = [ + ":models_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "service_lib", + srcs = ["service.py"], + imports = ["../.."], + deps = [ + ":model_job_configer_lib", + ":models_lib", + ":utils_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:composer_service_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:services_lib", + "//web_console_v2/api/fedlearner_webconsole/job:metrics_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr/metrics:metrics_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:job_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:service_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:workflow_controller_lib", + "@com_google_protobuf//:protobuf_python", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "service_lib_test", + size = "medium", + srcs = [ + "service_test.py", + ], + imports = ["../.."], + main = "service_test.py", + deps = [ + ":service_lib", + "//web_console_v2/api/testing:common_lib", + "//web_console_v2/api/testing/rpc:client_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + ], +) + +py_library( + name = "controller_lib", + srcs = [ + "controller.py", + ], + imports = ["../.."], + deps = [ + ":models_lib", + ":service_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:fetcher_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:job_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:system_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/two_pc:transaction_manager_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:resource_name_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:workflow_job_controller_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:utils_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + "@common_sqlalchemy//:pkg", + ], +) + +py_library( + name = "scheduler_lib", + srcs = ["scheduler.py"], + imports = ["../.."], + deps = [ + ":controller_lib", + ":model_job_configer_lib", + ":models_lib", + ":service_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:job_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "scheduler_lib_test", + size = "small", + srcs = [ + "scheduler_test.py", + ], + imports = ["../.."], + main = "scheduler_test.py", + deps = [ + ":scheduler_lib", + "//web_console_v2/api/fedlearner_webconsole:initial_db_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/testing:common_lib", + "//web_console_v2/api/testing/rpc:client_lib", + ], +) + +py_test( + name = "controller_lib_test", + size = "medium", + srcs = [ + "controller_test.py", + ], + imports = ["../.."], + main = "controller_test.py", + deps = [ + ":controller_lib", + "//web_console_v2/api/fedlearner_webconsole:initial_db_lib", + "//web_console_v2/api/testing:fake_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "//web_console_v2/api/testing/rpc:client_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "utils_lib", + srcs = ["utils.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + ], +) + +py_test( + name = "utils_lib_test", + size = "small", + srcs = [ + "utils_test.py", + ], + imports = ["../.."], + main = "utils_test.py", + deps = [ + ":utils_lib", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_library( + name = "cronjob_lib", + srcs = [ + "cronjob.py", + ], + imports = ["../.."], + deps = [ + ":controller_lib", + ":models_lib", + ":service_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:client_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "cronjob_lib_test", + size = "medium", + srcs = [ + "cronjob_test.py", + ], + imports = ["../.."], + main = "cronjob_test.py", + deps = [ + ":cronjob_lib", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_library( + name = "apis_lib", + srcs = [ + "model_apis.py", + "model_job_apis.py", + "model_job_group_apis.py", + ], + imports = ["../.."], + deps = [ + ":controller_lib", + ":model_job_configer_lib", + ":models_lib", + ":service_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/audit:decorators_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:third_party_sso_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:client_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:system_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/scheduler:scheduler_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/swagger:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:file_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:filtering_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:paginate_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:sorting_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:workflow_job_controller_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_flask//:pkg", + "@common_flask_restful//:pkg", + "@common_marshmallow//:pkg", + "@common_sqlalchemy//:pkg", + "@common_webargs//:pkg", + ], +) + +py_test( + name = "model_apis_lib_test", + size = "medium", + srcs = [ + "model_apis_test.py", + ], + imports = ["../.."], + main = "model_apis_test.py", + deps = [ + ":apis_lib", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_test( + name = "model_job_apis_lib_test", + size = "medium", + srcs = [ + "model_job_apis_test.py", + ], + imports = ["../.."], + main = "model_job_apis_test.py", + deps = [ + ":apis_lib", + "//web_console_v2/api/testing:common_lib", + "//web_console_v2/api/testing:fake_lib", + ], +) + +py_test( + name = "model_job_group_apis_lib_test", + size = "medium", + srcs = [ + "model_job_group_apis_test.py", + ], + imports = ["../.."], + main = "model_job_group_apis_test.py", + deps = [ + ":apis_lib", + "//web_console_v2/api/testing:common_lib", + "//web_console_v2/api/testing:fake_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "event_listener_lib", + srcs = ["event_listener.py"], + imports = ["../.."], + deps = [ + ":models_lib", + ":service_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/k8s:event_listener_lib", + "//web_console_v2/api/fedlearner_webconsole/k8s:k8s_cache_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:metrics_lib", + ], +) + +py_test( + name = "event_listener_lib_test", + size = "small", + srcs = [ + "event_listener_test.py", + ], + imports = ["../.."], + main = "event_listener_test.py", + deps = [ + ":event_listener_lib", + "//web_console_v2/api/testing:common_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/__init__.py b/web_console_v2/api/fedlearner_webconsole/mmgr/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/apis.py b/web_console_v2/api/fedlearner_webconsole/mmgr/apis.py deleted file mode 100644 index cbae04a07..000000000 --- a/web_console_v2/api/fedlearner_webconsole/mmgr/apis.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -from http import HTTPStatus -from flask import request -from flask_restful import Resource -from fedlearner_webconsole.db import db_handler -from fedlearner_webconsole.exceptions import NotFoundException -from fedlearner_webconsole.mmgr.models import Model, ModelType, ModelGroup -from fedlearner_webconsole.mmgr.service import ModelService -from fedlearner_webconsole.utils.decorators import jwt_required - - -class ModelApi(Resource): - @jwt_required() - def get(self, model_id): - detail_level = request.args.get('detail_level', '') - with db_handler.session_scope() as session: - model_json = ModelService(session).query(model_id, detail_level) - if not model_json: - raise NotFoundException( - f'Failed to find model: {model_id}') - return {'data': model_json}, HTTPStatus.OK - - @jwt_required() - def put(self, model_id): - with db_handler.session_scope() as session: - model = session.query(Model).filter_by(id=model_id).one_or_none() - if not model: - raise NotFoundException( - f'Failed to find model: {model_id}') - model.extra = request.args.get('extra', model.extra) - session.commit() - return {'data': model.to_dict()}, HTTPStatus.OK - - @jwt_required() - def delete(self, model_id): - with db_handler.session_scope() as session: - model = ModelService(session).drop(model_id) - if not model: - raise NotFoundException( - f'Failed to find model: {model_id}') - return {'data': model.to_dict()}, HTTPStatus.OK - - -class ModelListApi(Resource): - @jwt_required() - def get(self): - detail_level = request.args.get('detail_level', '') - # TODO serialized query may incur performance penalty - with db_handler.session_scope() as session: - model_list = [ - ModelService(session).query(m.id, detail_level) - for m in Model.query.filter( - Model.type.in_([ - ModelType.NN_MODEL.value, ModelType.TREE_MODEL.value - ])).all() - ] - return {'data': model_list}, HTTPStatus.OK - - -class GroupListApi(Resource): - @jwt_required() - def get(self): - group_list = [o.to_dict() for o in ModelGroup.query.all()] - return {'data': group_list}, HTTPStatus.OK - - @jwt_required() - def post(self): - group = ModelGroup() - - group.name = request.args.get('name', group.name) - group.extra = request.args.get('extra', group.extra) - with db_handler.session_scope() as session: - session.add(group) - session.commit() - - return {'data': group.to_dict()}, HTTPStatus.OK - - -class GroupApi(Resource): - @jwt_required() - def patch(self, group_id): - group = ModelGroup.query.filter_by(id=group_id).one_or_none() - if not group: - raise NotFoundException( - f'Failed to find group: {group_id}') - - group.name = request.args.get('name', group.name) - group.extra = request.args.get('extra', group.extra) - with db_handler.session_scope() as session: - session.add(group) - session.commit() - - return {'data': group.to_dict()}, HTTPStatus.OK - - -def initialize_mmgr_apis(api): - api.add_resource(ModelListApi, '/models') - api.add_resource(ModelApi, '/models/') - - api.add_resource(GroupListApi, '/model_groups') - api.add_resource(GroupApi, '/model_groups/') diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/controller.py b/web_console_v2/api/fedlearner_webconsole/mmgr/controller.py new file mode 100644 index 000000000..40b2d7275 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/controller.py @@ -0,0 +1,321 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import grpc +import logging +from google.protobuf import json_format +from typing import Tuple, Optional +from sqlalchemy.orm import Session +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.models import Dataset, DataBatch +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.mmgr.models import Model, ModelJob, ModelJobGroup, ModelJobType, GroupCreateStatus, \ + GroupAutoUpdateStatus +from fedlearner_webconsole.algorithm.models import AlgorithmType +from fedlearner_webconsole.algorithm.fetcher import AlgorithmFetcher +from fedlearner_webconsole.mmgr.service import check_model_job_group, ModelJobGroupService +from fedlearner_webconsole.utils.resource_name import resource_uuid +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.two_pc.transaction_manager import TransactionManager +from fedlearner_webconsole.proto.two_pc_pb2 import TwoPcType, TransactionData, CreateModelJobData, \ + CreateModelJobGroupData +from fedlearner_webconsole.proto.mmgr_pb2 import ModelJobGroupPb +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.rpc.v2.job_service_client import JobServiceClient +from fedlearner_webconsole.rpc.v2.system_service_client import SystemServiceClient +from fedlearner_webconsole.workflow.workflow_job_controller import start_workflow, stop_workflow +from fedlearner_webconsole.workflow_template.utils import set_value +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.exceptions import InternalException +from fedlearner_webconsole.flag.models import Flag + + +def _get_transaction_manager(project_id: int, two_pc_type: TwoPcType) -> TransactionManager: + with db.session_scope() as session: + project = session.query(Project).get(project_id) + participants = ParticipantService(session).get_platform_participants_by_project(project_id) + tm = TransactionManager(project_name=project.name, + project_token=project.token, + two_pc_type=two_pc_type, + participants=[participant.domain_name for participant in participants]) + return tm + + +class CreateModelJob: + + @staticmethod + def _create_model_job_by_2pc(project_id: int, + name: str, + model_job_type: ModelJobType, + algorithm_type: AlgorithmType, + coordinator_pure_domain_name: str, + dataset_id: Optional[int] = None, + model_id: Optional[int] = None, + group_id: Optional[int] = None) -> Tuple[bool, str]: + tm = _get_transaction_manager(project_id=project_id, two_pc_type=TwoPcType.CREATE_MODEL_JOB) + with db.session_scope() as session: + project_name = session.query(Project).get(project_id).name + dataset_uuid = None + if dataset_id is not None: + dataset_uuid = session.query(Dataset).get(dataset_id).uuid + model_uuid = None + if model_id is not None: + model_uuid = session.query(Model).get(model_id).uuid + group_name = None + if group_id is not None: + group_name = session.query(ModelJobGroup).get(group_id).name + model_job_uuid = resource_uuid() + workflow_uuid = model_job_uuid + succeeded, message = tm.run( + TransactionData( + create_model_job_data=CreateModelJobData(model_job_name=name, + model_job_uuid=model_job_uuid, + model_job_type=model_job_type.name, + group_name=group_name, + algorithm_type=algorithm_type.name, + workflow_uuid=workflow_uuid, + model_uuid=model_uuid, + project_name=project_name, + coordinator_pure_domain_name=coordinator_pure_domain_name, + dataset_uuid=dataset_uuid))) + return succeeded, message + + def run(self, + project_id: int, + name: str, + model_job_type: ModelJobType, + algorithm_type: AlgorithmType, + coordinator_pure_domain_name: str, + dataset_id: Optional[int], + model_id: Optional[int] = None, + group_id: Optional[int] = None) -> Tuple[bool, str]: + # no need create model job at participants when eval or predict horizontal model + if algorithm_type in [AlgorithmType.TREE_VERTICAL, AlgorithmType.NN_VERTICAL + ] or model_job_type == ModelJobType.TRAINING: + succeeded, msg = self._create_model_job_by_2pc(project_id=project_id, + name=name, + model_job_type=model_job_type, + algorithm_type=algorithm_type, + coordinator_pure_domain_name=coordinator_pure_domain_name, + dataset_id=dataset_id, + model_id=model_id, + group_id=group_id) + return succeeded, msg + with db.session_scope() as session: + model_job = ModelJob(name=name, + group_id=group_id, + project_id=project_id, + model_job_type=model_job_type, + algorithm_type=algorithm_type, + model_id=model_id) + model_job.uuid = resource_uuid() + model_job.workflow_uuid = model_job.uuid + session.add(model_job) + session.commit() + return True, '' + + +class CreateModelJobGroup: + + @staticmethod + def run(project_id: int, name: str, algorithm_type: AlgorithmType, dataset_id: Optional[str], + coordinator_pure_domain_name: str, model_job_group_uuid: str) -> Tuple[bool, str]: + with db.session_scope() as session: + project_name = session.query(Project).get(project_id).name + dataset_uuid = None + if dataset_id is not None: + dataset_uuid = session.query(Dataset).get(dataset_id).uuid + tm = _get_transaction_manager(project_id=project_id, two_pc_type=TwoPcType.CREATE_MODEL_JOB_GROUP) + create_model_job_group_data = CreateModelJobGroupData(model_job_group_name=name, + model_job_group_uuid=model_job_group_uuid, + project_name=project_name, + algorithm_type=algorithm_type.name, + coordinator_pure_domain_name=coordinator_pure_domain_name, + dataset_uuid=dataset_uuid) + succeeded, msg = tm.run(data=TransactionData(create_model_job_group_data=create_model_job_group_data)) + return succeeded, msg + + +class LaunchModelJob: + + @staticmethod + def run(project_id: int, group_id: int, version: int): + tm = _get_transaction_manager(project_id=project_id, two_pc_type=TwoPcType.LAUNCH_MODEL_JOB) + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(group_id) + model_job_name = f'{group.name}-v{group.latest_version}' + data = TransactionData(create_model_job_data=CreateModelJobData( + model_job_name=model_job_name, model_job_uuid=resource_uuid(), group_uuid=group.uuid, version=version)) + succeeded, msg = tm.run(data) + return succeeded, msg + + +class ModelJobGroupController: + + def __init__(self, session: Session, project_id: int): + self._session = session + self._clients = [] + self._participants = ParticipantService(self._session).get_participants_by_project(project_id) + self._project = self._session.query(Project).get(project_id) + for p in self._participants: + self._clients.append(JobServiceClient.from_project_and_participant(p.domain_name, self._project.name)) + + def inform_auth_status_to_participants(self, group: ModelJobGroup): + participants_info = group.get_participants_info() + pure_domain_name = SettingService.get_system_info().pure_domain_name + participants_info.participants_map[pure_domain_name].auth_status = group.auth_status.name + group.set_participants_info(participants_info) + for client, p in zip(self._clients, self._participants): + try: + client.inform_model_job_group(group.uuid, group.auth_status) + except grpc.RpcError as e: + logging.warning(f'[model-job-group] failed to inform participant {p.id}\'s ' + f'model job group {group.uuid} with grpc code {e.code()} and details {e.details()}') + + def update_participants_model_job_group(self, + uuid: str, + auto_update_status: Optional[GroupAutoUpdateStatus] = None, + start_data_batch_id: Optional[int] = None): + start_dataset_job_stage_uuid = None + if start_data_batch_id: + start_dataset_job_stage_uuid = self._session.query(DataBatch).get( + start_data_batch_id).latest_parent_dataset_job_stage.uuid + for client, p in zip(self._clients, self._participants): + try: + client.update_model_job_group(uuid=uuid, + auto_update_status=auto_update_status, + start_dataset_job_stage_uuid=start_dataset_job_stage_uuid) + except grpc.RpcError as e: + logging.warning(f'[model-job-group] failed to update participant {p.id}\'s ' + f'model job group {uuid} with grpc code {e.code()} and details {e.details()}') + + def update_participants_auth_status(self, group: ModelJobGroup): + participants_info = group.get_participants_info() + for client, p in zip(self._clients, self._participants): + try: + resp = client.get_model_job_group(group.uuid) + if resp.auth_status: + auth_status = resp.auth_status + else: + # Use 'authorized' if the field 'auth_status' is not in the ModelJobGroupPb of the opposite side + if resp.authorized: + auth_status = AuthStatus.AUTHORIZED.name + else: + auth_status = AuthStatus.PENDING.name + participants_info.participants_map[p.pure_domain_name()].auth_status = auth_status + except grpc.RpcError as e: + logging.warning(f'[model-job-group] failed to get participant {p.id}\'s ' + f'model job group {group.uuid} with grpc code {e.code()} and details {e.details()}') + group.set_participants_info(participants_info) + self._session.commit() + + def get_model_job_group_from_participant(self, participant_id: int, + model_job_group_uuid: str) -> Optional[ModelJobGroupPb]: + resp = None + for client, p in zip(self._clients, self._participants): + if p.id == participant_id: + try: + resp = client.get_model_job_group(uuid=model_job_group_uuid) + system_client = SystemServiceClient.from_participant(domain_name=p.domain_name) + flag_resp = system_client.list_flags() + break + except grpc.RpcError as e: + logging.warning( + f'[model-job-group] failed to get participant {p.id}\'s ' + f'model job group {model_job_group_uuid} with grpc code {e.code()} and details {e.details()}') + if resp and len(resp.config.job_definitions) and flag_resp.get(Flag.MODEL_JOB_GLOBAL_CONFIG_ENABLED.name): + variables = resp.config.job_definitions[0].variables + for variable in variables: + if variable.name == 'algorithm': + algo_dict = json_format.MessageToDict(variable.typed_value) + algo = AlgorithmFetcher(self._project.id).get_algorithm(algo_dict['algorithmUuid']) + algo_dict['algorithmId'] = algo.id + algo_dict['participantId'] = algo.participant_id + algo_dict['algorithmProjectId'] = algo.algorithm_project_id + set_value(variable=variable, typed_value=algo_dict) + break + return resp + + def create_model_job_group_for_participants(self, model_job_group_id: int): + group = self._session.query(ModelJobGroup).get(model_job_group_id) + for client, p in zip(self._clients, self._participants): + try: + client.create_model_job_group(name=group.name, + uuid=group.uuid, + algorithm_type=group.algorithm_type, + dataset_uuid=group.dataset.uuid, + algorithm_project_list=group.get_algorithm_project_uuid_list()) + except grpc.RpcError as e: + logging.warning(f'[model-job-group] failed to create model job group for the participant {p.id} ' + f'with grpc code {e.code()} and details {e.details()}') + group.status = GroupCreateStatus.FAILED + return + group.status = GroupCreateStatus.SUCCEEDED + + +class ModelJobController: + + def __init__(self, session: Session, project_id: int): + self._session = session + self._client = [] + self._participants = ParticipantService(self._session).get_participants_by_project(project_id) + self._project_id = project_id + project = self._session.query(Project).get(project_id) + for p in self._participants: + self._client.append(JobServiceClient.from_project_and_participant(p.domain_name, project.name)) + + def launch_model_job(self, group_id: int) -> ModelJob: + check_model_job_group(self._project_id, group_id, self._session) + group = ModelJobGroupService(self._session).lock_and_update_version(group_id) + self._session.commit() + succeeded, msg = LaunchModelJob().run(project_id=self._project_id, + group_id=group_id, + version=group.latest_version) + if not succeeded: + raise InternalException(f'launching model job by 2PC with message: {msg}') + model_job = self._session.query(ModelJob).filter_by(group_id=group_id, version=group.latest_version).first() + return model_job + + def inform_auth_status_to_participants(self, model_job: ModelJob): + for client, p in zip(self._client, self._participants): + try: + client.inform_model_job(model_job.uuid, model_job.auth_status) + except grpc.RpcError as e: + logging.warning(f'[model-job] failed to inform participants {p.id}\'s model job ' + f'{model_job.uuid} with grpc code {e.code()} and details {e.details()}') + + def update_participants_auth_status(self, model_job: ModelJob): + participants_info = model_job.get_participants_info() + for client, p in zip(self._client, self._participants): + try: + resp = client.get_model_job(model_job.uuid) + participants_info.participants_map[p.pure_domain_name()].auth_status = resp.auth_status + except grpc.RpcError as e: + logging.warning(f'[model-job] failed to get participant {p.id}\'s model job {model_job.uuid} ' + f'with grpc code {e.code()} and details {e.details()}') + model_job.set_participants_info(participants_info) + + +# TODO(gezhengqiang): provide start model job rpc +def start_model_job(model_job_id: int): + with db.session_scope() as session: + model_job: ModelJob = session.query(ModelJob).get(model_job_id) + start_workflow(workflow_id=model_job.workflow_id) + + +def stop_model_job(model_job_id: int): + with db.session_scope() as session: + model_job: ModelJob = session.query(ModelJob).get(model_job_id) + stop_workflow(workflow_id=model_job.workflow_id) diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/controller_test.py b/web_console_v2/api/fedlearner_webconsole/mmgr/controller_test.py new file mode 100644 index 000000000..bd6e26183 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/controller_test.py @@ -0,0 +1,397 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import MagicMock, patch, call + +import grpc +from google.protobuf import json_format +from google.protobuf.empty_pb2 import Empty +from datetime import datetime +from testing.no_web_server_test_case import NoWebServerTestCase +from testing.fake_model_job_config import get_workflow_config +from testing.rpc.client import FakeRpcError + +from fedlearner_webconsole.algorithm.models import Algorithm, AlgorithmType, Source +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.models import Dataset, DatasetJob, DatasetJobState, DatasetJobKind, DatasetType, \ + DatasetJobStage, DataBatch +from fedlearner_webconsole.initial_db import _insert_or_update_templates +from fedlearner_webconsole.mmgr.models import ModelJob, ModelJobGroup, ModelJobType, ModelJobRole, GroupCreateStatus, \ + GroupAutoUpdateStatus, AuthStatus as ModelJobAuthStatus +from fedlearner_webconsole.mmgr.controller import start_model_job, stop_model_job, ModelJobGroupController, \ + ModelJobController +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto.algorithm_pb2 import AlgorithmPb +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo, ParticipantInfo +from fedlearner_webconsole.proto.setting_pb2 import SystemInfo +from fedlearner_webconsole.proto.mmgr_pb2 import ModelJobGroupPb, AlgorithmProjectList, ModelJobPb +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition, JobDefinition +from fedlearner_webconsole.workflow_template.utils import set_value +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus + + +class StartModelJobTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + session.add(ModelJob(id=1, name='name', uuid='uuid', workflow_id=2)) + session.commit() + + @patch('fedlearner_webconsole.mmgr.controller.start_workflow') + def test_start_model_job(self, mock_start_workflow: MagicMock): + start_model_job(model_job_id=1) + mock_start_workflow.assert_called_with(workflow_id=2) + + @patch('fedlearner_webconsole.mmgr.controller.stop_workflow') + def test_stop_model_job(self, mock_stop_workflow: MagicMock): + stop_model_job(model_job_id=1) + mock_stop_workflow.assert_called_with(workflow_id=2) + + +class ModelJobGroupControllerTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project') + participant1 = Participant(id=1, name='part1', domain_name='fl-demo1.com') + participant2 = Participant(id=2, name='part2', domain_name='fl-demo2.com') + dataset = Dataset(id=1, name='dataset', uuid='dataset_uuid') + pro_part1 = ProjectParticipant(id=1, project_id=1, participant_id=1) + pro_part2 = ProjectParticipant(id=2, project_id=1, participant_id=2) + group = ModelJobGroup(id=1, + name='group', + uuid='uuid', + project_id=1, + dataset_id=1, + algorithm_type=AlgorithmType.NN_VERTICAL) + dataset_job_stage = DatasetJobStage(id=1, + name='data_join', + uuid='stage_uuid', + project_id=1, + state=DatasetJobState.SUCCEEDED, + dataset_job_id=1, + data_batch_id=1) + data_batch = DataBatch(id=1, + name='20221213', + dataset_id=1, + path='/data/dataset/haha/batch/20221213', + latest_parent_dataset_job_stage_id=1, + event_time=datetime(2022, 12, 13, 16, 37, 37)) + algorithm_project_list = AlgorithmProjectList() + algorithm_project_list.algorithm_projects['test'] = 'algorithm-project-uuid1' + algorithm_project_list.algorithm_projects['part1'] = 'algorithm-project-uuid2' + algorithm_project_list.algorithm_projects['part2'] = 'algorithm-project-uuid3' + group.set_algorithm_project_uuid_list(algorithm_project_list) + algo = Algorithm(id=1, algorithm_project_id=1, name='test-algo', uuid='uuid') + participants_info = ParticipantsInfo() + participants_info.participants_map['demo0'].auth_status = AuthStatus.PENDING.name + participants_info.participants_map['demo1'].auth_status = AuthStatus.PENDING.name + participants_info.participants_map['demo2'].auth_status = AuthStatus.PENDING.name + group.set_participants_info(participants_info) + session.add_all([ + project, participant1, participant2, pro_part1, pro_part2, group, algo, dataset, dataset_job_stage, + data_batch + ]) + session.commit() + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info') + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.inform_model_job_group') + def test_inform_auth_status_to_participants(self, mock_client: MagicMock, mock_system_info: MagicMock): + system_info = SystemInfo() + system_info.pure_domain_name = 'demo0' + mock_system_info.return_value = system_info + mock_client.return_value = Empty() + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + group.auth_status = AuthStatus.AUTHORIZED + ModelJobGroupController(session, 1).inform_auth_status_to_participants(group) + participants_info = group.get_participants_info() + self.assertEqual(participants_info.participants_map[system_info.pure_domain_name].auth_status, + AuthStatus.AUTHORIZED.name) + self.assertEqual(mock_client.call_args_list, [(('uuid', AuthStatus.AUTHORIZED),), + (('uuid', AuthStatus.AUTHORIZED),)]) + # fail due to grpc abort + mock_client.side_effect = FakeRpcError(grpc.StatusCode.NOT_FOUND, 'model job group uuid is not found') + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + group.auth_status = AuthStatus.PENDING + ModelJobGroupController(session, 1).inform_auth_status_to_participants(group) + participants_info = group.get_participants_info() + self.assertEqual(participants_info.participants_map[system_info.pure_domain_name].auth_status, + AuthStatus.PENDING.name) + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.update_model_job_group') + def test_update_participants_model_job_group(self, mock_client: MagicMock): + mock_client.return_value = Empty() + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + group.auto_update_status = GroupAutoUpdateStatus.ACTIVE + group.start_data_batch_id = 1 + ModelJobGroupController(session, 1).update_participants_model_job_group( + uuid=group.uuid, + auto_update_status=group.auto_update_status, + start_data_batch_id=group.start_data_batch_id) + self.assertEqual(mock_client.call_args_list, [ + call(uuid='uuid', + auto_update_status=GroupAutoUpdateStatus.ACTIVE, + start_dataset_job_stage_uuid='stage_uuid'), + call(uuid='uuid', + auto_update_status=GroupAutoUpdateStatus.ACTIVE, + start_dataset_job_stage_uuid='stage_uuid') + ]) + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.get_model_job_group') + def test_update_participants_auth_status(self, mock_client: MagicMock): + mock_client.side_effect = [ + ModelJobGroupPb(auth_status=AuthStatus.AUTHORIZED.name), + ModelJobGroupPb(auth_status=AuthStatus.AUTHORIZED.name) + ] + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + ModelJobGroupController(session, 1).update_participants_auth_status(group) + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + participants_info = group.get_participants_info() + self.assertEqual(participants_info.participants_map['demo1'].auth_status, AuthStatus.AUTHORIZED.name) + self.assertEqual(participants_info.participants_map['demo2'].auth_status, AuthStatus.AUTHORIZED.name) + # if the 'auth_status' is not in ModelJobGroupPb + mock_client.side_effect = [ModelJobGroupPb(authorized=False), ModelJobGroupPb(authorized=False)] + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + ModelJobGroupController(session, 1).update_participants_auth_status(group) + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + participants_info = group.get_participants_info() + self.assertEqual(participants_info.participants_map['demo1'].auth_status, AuthStatus.PENDING.name) + self.assertEqual(participants_info.participants_map['demo2'].auth_status, AuthStatus.PENDING.name) + # fail due to grpc abort + mock_client.side_effect = FakeRpcError(grpc.StatusCode.NOT_FOUND, 'model job group uuid is not found') + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + ModelJobGroupController(session, 1).update_participants_auth_status(group) + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + participants_info = group.get_participants_info() + self.assertEqual(participants_info.participants_map['demo1'].auth_status, AuthStatus.PENDING.name) + self.assertEqual(participants_info.participants_map['demo2'].auth_status, AuthStatus.PENDING.name) + + @patch('fedlearner_webconsole.rpc.v2.system_service_client.SystemServiceClient.list_flags') + @patch('fedlearner_webconsole.algorithm.fetcher.AlgorithmFetcher.get_algorithm_from_participant') + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.get_model_job_group') + def test_get_model_job_group_from_participant(self, mock_client: MagicMock, mock_algo_fetcher: MagicMock, + mock_list_flags: MagicMock): + algo_dict1 = { + 'algorithmId': 1, + 'algorithmUuid': 'uuid', + 'algorithmProjectId': 1, + 'algorithmProjectUuid': 'project_uuid', + 'participantId': 0, + 'path': '/path' + } + variable = Variable(name='algorithm') + set_value(variable=variable, typed_value=algo_dict1) + config = WorkflowDefinition(job_definitions=[JobDefinition(variables=[variable])]) + mock_client.return_value = ModelJobGroupPb(name='group', uuid='uuid', config=config) + mock_list_flags.return_value = {'model_job_global_config_enabled': True} + with db.session_scope() as session: + resp = ModelJobGroupController(session, 1).get_model_job_group_from_participant(1, 'uuid') + self.assertEqual(resp.name, 'group') + self.assertEqual(resp.uuid, 'uuid') + variables = resp.config.job_definitions[0].variables + for variable in variables: + if variable.name == 'algorithm': + self.assertEqual(json_format.MessageToDict(variable.typed_value), algo_dict1) + algo_dict2 = { + 'algorithmId': 2, + 'algorithmUuid': 'peer-uuid', + 'algorithmProjectId': 1, + 'algorithmProjectUuid': 'project_uuid', + 'participantId': 2, + 'path': '/path' + } + set_value(variable, typed_value=algo_dict2) + config = WorkflowDefinition(job_definitions=[JobDefinition(variables=[variable])]) + mock_client.return_value = ModelJobGroupPb(name='group', uuid='uuid', config=config) + mock_algo_fetcher.return_value = AlgorithmPb(name='test-peer-algo', + uuid='peer-uuid', + participant_id=1, + source=Source.PARTICIPANT.name) + with db.session_scope() as session: + resp = ModelJobGroupController(session, 1).get_model_job_group_from_participant(1, 'uuid') + variables = resp.config.job_definitions[0].variables + algo_dict2['algorithmId'] = 0 + algo_dict2['algorithmProjectId'] = 0 + algo_dict2['participantId'] = 1 + for variable in variables: + if variable.name == 'algorithm': + self.assertEqual(json_format.MessageToDict(variable.typed_value), algo_dict2) + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.create_model_job_group') + def test_create_model_job_group_for_participants(self, mock_client: MagicMock): + with db.session_scope() as session: + ModelJobGroupController(session, 1).create_model_job_group_for_participants(1) + session.commit() + algorithm_project_list = AlgorithmProjectList() + algorithm_project_list.algorithm_projects['test'] = 'algorithm-project-uuid1' + algorithm_project_list.algorithm_projects['part1'] = 'algorithm-project-uuid2' + algorithm_project_list.algorithm_projects['part2'] = 'algorithm-project-uuid3' + mock_client.assert_called_with(name='group', + uuid='uuid', + algorithm_type=AlgorithmType.NN_VERTICAL, + dataset_uuid='dataset_uuid', + algorithm_project_list=algorithm_project_list) + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + self.assertEqual(group.status, GroupCreateStatus.SUCCEEDED) + mock_client.side_effect = FakeRpcError(grpc.StatusCode.INVALID_ARGUMENT, 'dataset with uuid is not found') + with db.session_scope() as session: + ModelJobGroupController(session, 1).create_model_job_group_for_participants(1) + session.commit() + mock_client.assert_called() + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + self.assertEqual(group.status, GroupCreateStatus.FAILED) + + +class ModelJobControllerTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + _insert_or_update_templates(session) + project = Project(id=1, name='test-project') + participant1 = Participant(id=1, name='part1', domain_name='fl-demo1.com') + participant2 = Participant(id=2, name='part2', domain_name='fl-demo2.com') + pro_part1 = ProjectParticipant(id=1, project_id=1, participant_id=1) + pro_part2 = ProjectParticipant(id=2, project_id=1, participant_id=2) + dataset_job = DatasetJob(id=1, + name='datasetjob', + uuid='uuid', + state=DatasetJobState.SUCCEEDED, + project_id=1, + input_dataset_id=1, + output_dataset_id=3, + kind=DatasetJobKind.OT_PSI_DATA_JOIN) + dataset = Dataset(id=3, + uuid='uuid', + name='datasetjob', + dataset_type=DatasetType.PSI, + path='/data/dataset/haha') + algorithm = Algorithm(id=2, name='algorithm') + group = ModelJobGroup(id=1, + name='group', + uuid='uuid', + project_id=1, + algorithm_type=AlgorithmType.NN_VERTICAL, + algorithm_id=2, + role=ModelJobRole.COORDINATOR, + dataset_id=3) + model_job = ModelJob(id=1, + name='model_job', + uuid='uuid', + project_id=1, + auth_status=ModelJobAuthStatus.AUTHORIZED) + participants_info = ParticipantsInfo( + participants_map={ + 'test': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'demo1': ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'demo2': ParticipantInfo(auth_status=AuthStatus.PENDING.name) + }) + model_job.set_participants_info(participants_info) + group.set_config(get_workflow_config(ModelJobType.TRAINING)) + session.add_all([ + dataset_job, dataset, project, group, algorithm, participant1, participant2, pro_part1, pro_part2, + model_job + ]) + session.commit() + + @patch('fedlearner_webconsole.two_pc.transaction_manager.TransactionManager._remote_do_two_pc') + def test_launch_model_job(self, mock_remote_do_two_pc): + with db.session_scope() as session: + group = session.query(ModelJobGroup).filter_by(uuid='uuid').first() + mock_remote_do_two_pc.return_value = True, '' + with db.session_scope() as session: + ModelJobController(session=session, project_id=1).launch_model_job(group_id=1) + group: ModelJobGroup = session.query(ModelJobGroup).filter_by(name='group').first() + model_job = group.model_jobs[0] + self.assertEqual(model_job.group_id, group.id) + self.assertTrue(model_job.project_id, group.project_id) + self.assertEqual(model_job.version, 1) + self.assertEqual(group.latest_version, 1) + self.assertTrue(model_job.algorithm_type, group.algorithm_type) + self.assertTrue(model_job.model_job_type, ModelJobType.TRAINING) + self.assertTrue(model_job.dataset_id, group.dataset_id) + self.assertTrue(model_job.workflow.get_config(), group.get_config()) + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.inform_model_job') + def test_inform_auth_status_to_participants(self, mock_inform_model_job: MagicMock): + mock_inform_model_job.return_value = Empty() + with db.session_scope() as session: + model_job = session.query(ModelJob).get(1) + ModelJobController(session, 1).inform_auth_status_to_participants(model_job) + self.assertEqual(mock_inform_model_job.call_args_list, [(('uuid', ModelJobAuthStatus.AUTHORIZED),), + (('uuid', ModelJobAuthStatus.AUTHORIZED),)]) + # fail due to grpc abort + mock_inform_model_job.reset_mock() + mock_inform_model_job.side_effect = FakeRpcError(grpc.StatusCode.NOT_FOUND, 'model job uuid is not found') + with db.session_scope() as session: + model_job = session.query(ModelJob).get(1) + ModelJobController(session, 1).inform_auth_status_to_participants(model_job) + self.assertEqual(mock_inform_model_job.call_args_list, [(('uuid', ModelJobAuthStatus.AUTHORIZED),), + (('uuid', ModelJobAuthStatus.AUTHORIZED),)]) + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.get_model_job') + def test_get_participants_auth_status(self, mock_get_model_job: MagicMock): + mock_get_model_job.side_effect = [ + ModelJobPb(auth_status=AuthStatus.AUTHORIZED.name), + ModelJobPb(auth_status=AuthStatus.AUTHORIZED.name) + ] + with db.session_scope() as session: + model_job = session.query(ModelJob).get(1) + ModelJobController(session, 1).update_participants_auth_status(model_job) + session.commit() + with db.session_scope() as session: + model_job = session.query(ModelJob).get(1) + participants_info = ParticipantsInfo( + participants_map={ + 'test': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'demo1': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'demo2': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + }) + self.assertEqual(model_job.get_participants_info(), participants_info) + # fail due to grpc abort + mock_get_model_job.side_effect = FakeRpcError(grpc.StatusCode.NOT_FOUND, 'model job uuid is not found') + with db.session_scope() as session: + model_job = session.query(ModelJob).get(1) + ModelJobController(session, 1).update_participants_auth_status(model_job) + session.commit() + with db.session_scope() as session: + model_job = session.query(ModelJob).get(1) + participants_info = ParticipantsInfo( + participants_map={ + 'test': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'demo1': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'demo2': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + }) + self.assertEqual(model_job.get_participants_info(), participants_info) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/cronjob.py b/web_console_v2/api/fedlearner_webconsole/mmgr/cronjob.py new file mode 100644 index 000000000..6c08c047f --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/cronjob.py @@ -0,0 +1,102 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Tuple, Optional +from sqlalchemy.orm import Session +from google.protobuf.struct_pb2 import Value + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.rpc.client import RpcClient +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.interface import IRunnerV2 +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.proto.composer_pb2 import RunnerOutput, ModelTrainingCronJobOutput +from fedlearner_webconsole.mmgr.service import ModelJobGroupService +from fedlearner_webconsole.mmgr.controller import LaunchModelJob +from fedlearner_webconsole.mmgr.models import ModelJobGroup +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition +from fedlearner_webconsole.proto.common_pb2 import Variable + +LOAD_MODEL_NAME = 'load_model_name' + + +class ModelTrainingCronJob(IRunnerV2): + """Launch model job periodically.""" + + @staticmethod + def _set_load_model_name(config: WorkflowDefinition, job_name: str): + """Set variable of load_model_name inplace""" + assert len(config.job_definitions) == 1 + for variable in config.job_definitions[0].variables: + if variable.name == LOAD_MODEL_NAME: + assert variable.value_type == Variable.ValueType.STRING + variable.value = job_name + variable.typed_value.MergeFrom(Value(string_value=job_name)) + + def _update_local_and_peer_config(self, session: Session, group_id: int): + group: ModelJobGroup = session.query(ModelJobGroup).get(group_id) + model_job = group.latest_completed_job() + if model_job is None: + return + job_name = model_job.job_name + config = group.get_config() + self._set_load_model_name(config=config, job_name=job_name) + group.set_config(config) + for party in group.project.participants: + client = RpcClient.from_project_and_participant(project_name=group.project.name, + project_token=group.project.token, + domain_name=party.domain_name) + config = client.get_model_job_group(model_job_group_uuid=group.uuid).config + self._set_load_model_name(config=config, job_name=job_name) + client.update_model_job_group(model_job_group_uuid=group.uuid, config=config) + + def _check_peer_auth_status(self, session: Session, group_id: int) -> Tuple[bool, Optional[str]]: + group: ModelJobGroup = session.query(ModelJobGroup).get(group_id) + for party in group.project.participants: + client = RpcClient.from_project_and_participant(project_name=group.project.name, + project_token=group.project.token, + domain_name=party.domain_name) + resp = client.get_model_job_group(model_job_group_uuid=group.uuid) + if not resp.authorized: + return False, party.domain_name + return True, None + + def run(self, context: RunnerContext) -> Tuple[RunnerStatus, RunnerOutput]: + output = ModelTrainingCronJobOutput() + group_id = context.input.model_training_cron_job_input.group_id + with db.session_scope() as session: + authorized, domain_name = self._check_peer_auth_status(session=session, group_id=group_id) + if not authorized: + message = f'party {domain_name} is not authorized for group {group_id}' + logging.warning(f'[ModelTrainingCronJob] {message}') + return RunnerStatus.FAILED, RunnerOutput(error_message=message) + group = ModelJobGroupService(session).lock_and_update_version(group_id) + session.commit() + with db.session_scope() as session: + self._update_local_and_peer_config(session, group.id) + session.commit() + succeeded, msg = LaunchModelJob().run(project_id=group.project_id, + group_id=group_id, + version=group.latest_version) + if not succeeded: + message = f'launching model job for group {group_id} by 2PC with message: {msg}' + output.message = message + logging.warning(f'[ModelTrainingCronJob] {message}') + return RunnerStatus.FAILED, RunnerOutput(model_training_cron_job_output=output) + message = f'succeeded in launch model job for group {group_id}' + logging.info(f'[ModelTrainingCronJob] {message}') + output.message = message + return RunnerStatus.DONE, RunnerOutput(model_training_cron_job_output=output) diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/cronjob_test.py b/web_console_v2/api/fedlearner_webconsole/mmgr/cronjob_test.py new file mode 100644 index 000000000..53e0c6b18 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/cronjob_test.py @@ -0,0 +1,91 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch +from google.protobuf.struct_pb2 import Value + +from testing.common import NoWebServerTestCase + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.job.models import Job, JobState, JobType +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState +from fedlearner_webconsole.mmgr.cronjob import ModelTrainingCronJob +from fedlearner_webconsole.mmgr.models import ModelJobGroup, ModelJob +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.composer_pb2 import ModelTrainingCronJobInput, RunnerInput +from fedlearner_webconsole.proto.service_pb2 import GetModelJobGroupResponse +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition, JobDefinition + + +def _get_workflow_definition() -> WorkflowDefinition: + return WorkflowDefinition( + job_definitions=[JobDefinition(name='nn-model', variables=[Variable(name='load_model_name')])]) + + +class ModelTrainingCronJobTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + project = Project(id=2, name='project') + party = Participant(id=3, name='test', domain_name='fl-test.com') + relationship = ProjectParticipant(project_id=2, participant_id=3) + job = Job(id=1, + name='uuid-nn-model', + job_type=JobType.NN_MODEL_TRANINING, + state=JobState.COMPLETED, + workflow_id=1, + project_id=2) + workflow = Workflow(id=1, name='workflow', uuid='uuid', state=WorkflowState.COMPLETED, project_id=2) + model_job = ModelJob(name='group-v1', workflow_uuid=workflow.uuid, job_name=job.name, group_id=1, project_id=2) + group = ModelJobGroup(id=1, name='group', uuid='uuid', latest_version=2, project_id=2) + group.set_config(_get_workflow_definition()) + with db.session_scope() as session: + session.add_all([project, party, relationship, job, workflow, model_job, group]) + session.commit() + + @patch('fedlearner_webconsole.rpc.client.RpcClient.get_model_job_group') + @patch('fedlearner_webconsole.rpc.client.RpcClient.update_model_job_group') + @patch('fedlearner_webconsole.mmgr.controller.LaunchModelJob.run') + def test_run(self, mock_run, mock_update_group, mock_get_group): + context = RunnerContext(index=0, + input=RunnerInput(model_training_cron_job_input=ModelTrainingCronJobInput(group_id=1))) + mock_run.return_value = True, '' + mock_get_group.return_value = GetModelJobGroupResponse(config=_get_workflow_definition(), authorized=False) + runner_status, runner_output = ModelTrainingCronJob().run(context) + # fail due to peer is not authorized + self.assertEqual(runner_status, RunnerStatus.FAILED) + self.assertEqual(runner_output.error_message, 'party fl-test.com is not authorized for group 1') + # succeeded + mock_get_group.return_value = GetModelJobGroupResponse(config=_get_workflow_definition(), authorized=True) + runner_status, runner_output = ModelTrainingCronJob().run(context) + self.assertEqual(runner_status, RunnerStatus.DONE) + mock_run.assert_called_with(project_id=2, group_id=1, version=3) + config = _get_workflow_definition() + config.job_definitions[0].variables[0].typed_value.MergeFrom(Value(string_value='uuid-nn-model')) + config.job_definitions[0].variables[0].value = 'uuid-nn-model' + mock_update_group.assert_called_with(config=config, model_job_group_uuid='uuid') + with db.session_scope() as session: + group: ModelJobGroup = session.query(ModelJobGroup).get(1) + self.assertEqual(group.latest_version, 3) + self.assertEqual(group.get_config(), config) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/event_listener.py b/web_console_v2/api/fedlearner_webconsole/mmgr/event_listener.py new file mode 100644 index 000000000..5770a9907 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/event_listener.py @@ -0,0 +1,74 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.utils.metrics import emit_store +from fedlearner_webconsole.k8s.k8s_cache import Event, EventType, ObjectType +from fedlearner_webconsole.job.models import Job, JobState +from fedlearner_webconsole.mmgr.models import Model, ModelJob, ModelJobType +from fedlearner_webconsole.mmgr.service import ModelService +from fedlearner_webconsole.k8s.event_listener import EventListener + + +def _is_model_event(event: Event) -> bool: + return event.obj_type in [ObjectType.FLAPP, ObjectType.FEDAPP + ] and event.event_type in [EventType.MODIFIED, EventType.DELETED] + + +class ModelEventListener(EventListener): + + def update(self, event: Event): + if not _is_model_event(event): + return + job_name = event.app_name + with db.session_scope() as session: + job: Job = session.query(Job).filter_by(name=job_name).first() + logging.debug('[ModelEventListener] job: %s, type: %s, state: %s', job.name, job.job_type, job.state) + if job is None: + emit_store('job_not_found', 1) + logging.warning('[ModelEventListener] job %s is not found', job_name) + return + if not job.is_training_job(): + logging.debug(f'[ModelEventListener] stop creating model due to job {job.name} is not training') + return + if not job.state == JobState.COMPLETED: + logging.debug(f'[ModelEventListener] stop creating model due to job {job.name} is not completed') + return + model = session.query(Model).filter_by(job_id=job.id).first() + if model is not None: + logging.debug( + f'[ModelEventListener] stop creating model due to model is already created for job {job.name}') + return + model_job: ModelJob = session.query(ModelJob).filter_by(job_name=job.name).first() + if model_job is None: + logging.info(f'[ModelEventListener] stop creating model due to {job.name} is not a model job') + return + if model_job.model_job_type not in [ModelJobType.TRAINING, ModelJobType.EVALUATION]: + logging.info(f'[ModelEventListener] stop creating model due to model job {model_job.name} ' + 'is not training or evaluation') + return + service = ModelService(session) + service.create_model_from_model_job(model_job=model_job) + logging.info(f'[ModelEventListener] model for job {job.name} is created') + session.commit() + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(job_name=job.name).first() + model = session.query(Model).filter_by(model_job_id=model_job.id).first() + if model is not None: + model_job.model_id = model.id + session.add(model_job) + session.commit() diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/event_listener_test.py b/web_console_v2/api/fedlearner_webconsole/mmgr/event_listener_test.py new file mode 100644 index 000000000..47201ea63 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/event_listener_test.py @@ -0,0 +1,102 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch, MagicMock +from testing.common import NoWebServerTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.mmgr.models import Model, ModelJob, ModelJobType +from fedlearner_webconsole.mmgr.event_listener import _is_model_event, ModelEventListener +from fedlearner_webconsole.job.models import Job, JobType, JobState +from fedlearner_webconsole.k8s.k8s_cache import Event, EventType, ObjectType + + +class UtilsTest(NoWebServerTestCase): + + def test_is_event_relevant(self): + self.assertFalse( + _is_model_event(Event(app_name='test', event_type=EventType.ADDED, obj_type=ObjectType.FLAPP, obj_dict={}))) + self.assertFalse( + _is_model_event(Event(app_name='test', event_type=EventType.MODIFIED, obj_type=ObjectType.POD, + obj_dict={}))) + self.assertTrue( + _is_model_event( + Event(app_name='test', event_type=EventType.MODIFIED, obj_type=ObjectType.FLAPP, obj_dict={}))) + + +class ListenerTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + workflow = Workflow(id=1, name='test-workflow') + job = Job(name='test-job', + project_id=1, + job_type=JobType.NN_MODEL_TRANINING, + state=JobState.COMPLETED, + workflow_id=1) + session.add_all([workflow, job]) + session.commit() + + @staticmethod + def _get_job(session) -> Job: + return session.query(Job).filter_by(name='test-job').first() + + @staticmethod + def _get_event() -> Event: + return Event(app_name='test-job', event_type=EventType.MODIFIED, obj_type=ObjectType.FLAPP, obj_dict={}) + + @patch('fedlearner_webconsole.mmgr.service.ModelService.create_model_from_model_job') + def test_model_update(self, mock_create_model: MagicMock): + event = self._get_event() + with db.session_scope() as session: + job = self._get_job(session) + job.state = JobState.STOPPED + session.commit() + ModelEventListener().update(event) + # not called since job state is stopped + mock_create_model.assert_not_called() + with db.session_scope() as session: + job = self._get_job(session) + job.state = JobState.COMPLETED + session.commit() + ModelEventListener().update(event) + # not called since model job is not found + mock_create_model.assert_not_called() + + with db.session_scope() as session: + model_job = ModelJob(id=1, job_name=job.name, job_id=job.id, model_job_type=ModelJobType.TRAINING) + model = Model(id=1, model_job_id=1) + session.add_all([model_job, model]) + session.commit() + ModelEventListener().update(event) + # create model + mock_create_model.assert_called() + with db.session_scope() as session: + model_job = session.query(ModelJob).get(1) + self.assertEqual(model_job.model_id, 1) + mock_create_model.reset_mock() + + with db.session_scope() as session: + session.add(Model(name=job.name, job_id=job.id)) + session.commit() + ModelEventListener().update(event) + # not called due to model is already created + mock_create_model.assert_not_called() + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/metrics/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/mmgr/metrics/BUILD.bazel new file mode 100644 index 000000000..dd3fe32a9 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/metrics/BUILD.bazel @@ -0,0 +1,30 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "metrics_lib", + srcs = ["metrics_inquirer.py"], + imports = ["../../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:es_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:job_metrics_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "metrics_lib_test", + size = "small", + srcs = [ + "metrics_inquirer_test.py", + ], + imports = ["../../.."], + main = "metrics_inquirer_test.py", + deps = [ + ":metrics_lib", + "//web_console_v2/api/testing:common_lib", + "//web_console_v2/api/testing/test_data:test_data_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/metrics/__init__.py b/web_console_v2/api/fedlearner_webconsole/mmgr/metrics/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/metrics/metrics_inquirer.py b/web_console_v2/api/fedlearner_webconsole/mmgr/metrics/metrics_inquirer.py new file mode 100644 index 000000000..a69a56ca8 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/metrics/metrics_inquirer.py @@ -0,0 +1,187 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import List + +from fedlearner_webconsole.job.models import Job +from fedlearner_webconsole.proto.metrics_pb2 import ModelJobMetrics, Metric, ConfusionMatrix +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.utils.es import ElasticSearchClient +from fedlearner_webconsole.utils.job_metrics import get_feature_importance + +_ES_INDEX_NAME = 'apm*' +_es_client = ElasticSearchClient() + + +def _build_es_query_body(algorithm_type: str, job_name: str, metric_list: List[str]): + query = { + 'size': 0, + 'query': { + 'bool': { + 'must': [{ + 'term': { + 'labels.k8s_job_name': job_name + } + }] + } + }, + 'aggs': { + metric.upper(): { + 'filter': { + 'term': { + 'labels.k8s_job_name': job_name + } + }, + 'aggs': { + mode.upper(): { + 'filter': { + 'exists': { + 'field': f'values.model.{mode}.{algorithm_type}.{metric}' + } + }, + 'aggs': { + 'TOP': { + 'top_hits': { + 'size': + 0, # 0 means get all matching hits + '_source': [ + f'values.model.{mode}.{algorithm_type}.{metric}', 'labels', '@timestamp' + ] + } + } + } + } for mode in ('train', 'eval') + } + } for metric in metric_list + } + } + return _es_client.search(index=_ES_INDEX_NAME, body=query, request_timeout=500) + + +class TreeMetricsInquirer(object): + _CONF_METRIC_LIST = ['tp', 'tn', 'fp', 'fn'] + _TREE_METRIC_LIST = ['acc', 'auc', 'precision', 'recall', 'f1', 'ks', 'mse', 'msre', 'abs'] + _CONF_METRIC_LIST + _ALGORITHM_TYPE = 'tree_vertical' + + def _extract_metric(self, records: dict, mode: str, metric_name: str) -> Metric: + iter_to_values = {} + for record in records: + iteration = record['_source']['labels']['iteration'] + value = record['_source'][f'values.model.{mode}.{self._ALGORITHM_TYPE}.{metric_name}'] + if iteration not in iter_to_values: + iter_to_values[iteration] = [] + iter_to_values[iteration].append(value) + ordered_iters = sorted(iter_to_values.keys()) + values = [ + # Avg + sum(iter_to_values[iteration]) / len(iter_to_values[iteration]) for iteration in ordered_iters + ] + return Metric(steps=ordered_iters, values=values) + + def _extract_confusion_matrix(self, metrics: Metric) -> ConfusionMatrix: + + def get_last_value(metric_name: str): + metric = metrics.get(metric_name) + if metric is not None and len(metric.values) > 0: + return int(metric.values[-1]) + return 0 + + matrix = ConfusionMatrix( + tp=get_last_value('tp'), + tn=get_last_value('tn'), + fp=get_last_value('fp'), + fn=get_last_value('fn'), + ) + # remove confusion relevant metrics + for key in self._CONF_METRIC_LIST: + metrics.pop(key) + return matrix + + def _set_confusion_metric(self, metrics: ModelJobMetrics): + + def is_training() -> bool: + iter_vals = metrics.train.get('tp') + if iter_vals is None: + return False + return len(iter_vals.values) > 0 + + if is_training(): + confusion_matrix = self._extract_confusion_matrix(metrics.train) + else: + confusion_matrix = self._extract_confusion_matrix(metrics.eval) + metrics.confusion_matrix.CopyFrom(confusion_matrix) + return metrics + + def query(self, job: Job, need_feature_importance: bool = False) -> ModelJobMetrics: + job_name = job.name + metrics = ModelJobMetrics() + query_result = _build_es_query_body(self._ALGORITHM_TYPE, job_name, self._TREE_METRIC_LIST) + if 'aggregations' not in query_result: + logging.warning(f'[METRICS] no aggregations found, job_name = {job_name}, result = {query_result}') + return metrics + aggregations = query_result['aggregations'] + for name in self._TREE_METRIC_LIST: + train_item = aggregations[name.upper()]['TRAIN']['TOP']['hits']['hits'] + eval_item = aggregations[name.upper()]['EVAL']['TOP']['hits']['hits'] + if len(train_item) > 0: + metrics.train[name].MergeFrom(self._extract_metric(train_item, 'train', name)) + if len(eval_item) > 0: + metrics.eval[name].MergeFrom(self._extract_metric(eval_item, 'eval', name)) + self._set_confusion_metric(metrics) + if need_feature_importance: + metrics.feature_importance.update(get_feature_importance(job)) + return metrics + + +class NnMetricsInquirer(object): + _NN_METRIC_LIST = ['auc', 'loss'] + _ALGORITHM_TYPE = 'nn_vertical' + + def _extract_metric(self, records: dict, mode: str, metric_name: str) -> Metric: + timestamp_to_values = {} + for record in records: + timestamp_str = record['_source']['@timestamp'] + timestamp = to_timestamp(timestamp_str) * 1000 + value = record['_source'][f'values.model.{mode}.{self._ALGORITHM_TYPE}.{metric_name}'] + timestamp_to_values[timestamp] = [] + timestamp_to_values[timestamp].append(value) + ordered_iters = sorted(timestamp_to_values.keys()) + values = [ + # Avg + sum(timestamp_to_values[timestamp]) / len(timestamp_to_values[timestamp]) for timestamp in ordered_iters + ] + return Metric(steps=ordered_iters, values=values) + + def query(self, job: Job) -> ModelJobMetrics: + job_name = job.name + metrics = ModelJobMetrics() + query_result = _build_es_query_body(self._ALGORITHM_TYPE, job_name, self._NN_METRIC_LIST) + if 'aggregations' not in query_result: + logging.warning(f'[METRICS] no aggregations found, job_name = {job_name}, result = {query_result}') + return metrics + aggregations = query_result['aggregations'] + for name in self._NN_METRIC_LIST: + train_item = aggregations[name.upper()]['TRAIN']['TOP']['hits']['hits'] + eval_item = aggregations[name.upper()]['EVAL']['TOP']['hits']['hits'] + if len(train_item) > 0: + metrics.train[name].MergeFrom(self._extract_metric(train_item, 'train', name)) + if len(eval_item) > 0: + metrics.eval[name].MergeFrom(self._extract_metric(eval_item, 'eval', name)) + return metrics + + +tree_metrics_inquirer = TreeMetricsInquirer() +nn_metrics_inquirer = NnMetricsInquirer() diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/metrics/metrics_inquirer_test.py b/web_console_v2/api/fedlearner_webconsole/mmgr/metrics/metrics_inquirer_test.py new file mode 100644 index 000000000..8d559304d --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/metrics/metrics_inquirer_test.py @@ -0,0 +1,107 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch + +from testing.common import BaseTestCase +from testing.test_data import es_query_result +from fedlearner_webconsole.mmgr.metrics.metrics_inquirer import tree_metrics_inquirer, nn_metrics_inquirer +from fedlearner_webconsole.job.models import Job, JobType +from fedlearner_webconsole.utils.proto import to_dict + +_EXPECTED_TREE_METRICS_RESULT = { + 'train': { + 'ks': { + 'steps': [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0], + 'values': [ + 0.47770564314760644, 0.5349813321918623, 0.5469192171410906, 0.5596894247461416, 0.5992009702504102, + 0.6175715202967825, 0.6366317091151221, 0.6989964566835509, 0.7088535349932226, 0.7418848541057288 + ] + }, + 'recall': { + 'steps': [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0], + 'values': [ + 0.40186915887850466, 0.4252336448598131, 0.45794392523364486, 0.46261682242990654, 0.5233644859813084, + 0.514018691588785, 0.5093457943925234, 0.5373831775700935, 0.5467289719626168, 0.5654205607476636 + ] + }, + 'acc': { + 'steps': [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0], + 'values': [0.857, 0.862, 0.868, 0.872, 0.886, 0.883, 0.884, 0.895, 0.896, 0.902] + }, + 'auc': { + 'steps': [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0], + 'values': [ + 0.8011640626857863, 0.8377684240565029, 0.8533328577203871, 0.860663242253454, 0.8797977455946351, + 0.8921428741290338, 0.9041610187629308, 0.9179270409740553, 0.928827495184419, 0.9439282062257736 + ] + }, + 'precision': { + 'steps': [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0], + 'values': [ + 0.8514851485148515, 0.8584905660377359, 0.8596491228070176, 0.8839285714285714, 0.9032258064516129, + 0.8943089430894309, 0.9083333333333333, 0.9504132231404959, 0.9435483870967742, 0.9603174603174603 + ] + }, + 'f1': { + 'steps': [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0], + 'values': [ + 0.546031746031746, 0.56875, 0.5975609756097561, 0.607361963190184, 0.6627218934911242, + 0.6528189910979227, 0.6526946107784432, 0.6865671641791044, 0.6923076923076923, 0.711764705882353 + ] + } + }, + 'confusion_matrix': { + 'tp': 121, + 'tn': 781, + 'fp': 5, + 'fn': 93 + }, + 'feature_importance': { + 'x': 0.3 + }, + 'eval': {} +} + + +class MetricsInquirerTest(BaseTestCase): + + @patch('fedlearner_webconsole.mmgr.metrics.metrics_inquirer.get_feature_importance') + @patch('fedlearner_webconsole.mmgr.metrics.metrics_inquirer._build_es_query_body') + def test_query_tree_metrics(self, mock_es_query, mock_get_importance): + mock_es_query.return_value = es_query_result.fake_es_query_tree_metrics_result_v2 + mock_get_importance.return_value = {'x': 0.3} + job = Job(name='test-job', job_type=JobType.TREE_MODEL_TRAINING) + metrics = tree_metrics_inquirer.query(job, need_feature_importance=True) + metrics_dict = to_dict(metrics) + self.assertIn('train', metrics_dict) + self.assertEqual(metrics_dict, _EXPECTED_TREE_METRICS_RESULT) + + @patch('fedlearner_webconsole.mmgr.metrics.metrics_inquirer._build_es_query_body') + def test_query_nn_vertical_metrics(self, mock_es_query): + mock_es_query.return_value = es_query_result.fake_es_query_nn_metrics_result_v2 + job = Job(name='test-job', job_type=JobType.NN_MODEL_TRANINING) + metrics = nn_metrics_inquirer.query(job) + metrics_dict = to_dict(metrics) + self.assertIn('train', metrics_dict) + self.assertEqual(1, len(metrics_dict['train']['loss']['values'])) + self.assertEqual(1, len(metrics_dict['train']['auc']['values'])) + self.assertIn(5.694229602813721, metrics_dict['train']['loss']['values']) + self.assertIn(0.6585884094238281, metrics_dict['train']['auc']['values']) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/model_apis.py b/web_console_v2/api/fedlearner_webconsole/mmgr/model_apis.py new file mode 100644 index 000000000..a2e8d5e21 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/model_apis.py @@ -0,0 +1,241 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from http import HTTPStatus +from sqlalchemy.orm import joinedload +from flask_restful import Resource +from typing import Optional +from webargs.flaskparser import use_args, use_kwargs +from marshmallow import fields, validate + +from fedlearner_webconsole.audit.decorators import emits_event +from fedlearner_webconsole.db import db +from fedlearner_webconsole.exceptions import InvalidArgumentException +from fedlearner_webconsole.job.models import Job +from fedlearner_webconsole.proto.filtering_pb2 import FilterOp +from fedlearner_webconsole.utils.filtering import SupportedField, FieldType, FilterBuilder +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.mmgr.models import Model, ModelJob +from fedlearner_webconsole.mmgr.service import ModelService, get_model +from fedlearner_webconsole.algorithm.models import AlgorithmType +from fedlearner_webconsole.utils.flask_utils import FilterExpField, make_flask_response +from fedlearner_webconsole.auth.third_party_sso import credentials_required +from fedlearner_webconsole.utils.paginate import paginate +from fedlearner_webconsole.proto.audit_pb2 import Event + + +class ModelsApi(Resource): + FILTER_FIELDS = { + 'name': SupportedField(type=FieldType.STRING, ops={FilterOp.CONTAIN: None}), + 'group_id': SupportedField(type=FieldType.NUMBER, ops={FilterOp.EQUAL: None}), + 'algorithm_type': SupportedField(type=FieldType.STRING, ops={FilterOp.EQUAL: None}), + } + + def __init__(self): + self._filter_builder = FilterBuilder(model_class=Model, supported_fields=self.FILTER_FIELDS) + + @credentials_required + @use_args( + { + 'group_id': + fields.Integer(required=False, load_default=None), + 'keyword': + fields.String(required=False, load_default=None), + 'algorithm_type': + fields.String( + required=False, load_default=None, validate=validate.OneOf([t.name for t in AlgorithmType])), + 'page': + fields.Integer(required=False, load_default=None), + 'page_size': + fields.Integer(required=False, load_default=None), + 'filter': + FilterExpField(required=False, load_default=None), + }, + location='query') + def get(self, params: dict, project_id: int): + """Get the list of models. + --- + tags: + - mmgr + description: Get the list of models + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: query + name: group_id + schema: + type: integer + - in: query + name: keyword + schema: + type: string + - in: query + name: page + schema: + type: integer + - in: query + name: page_size + schema: + type: integer + - in: query + name: filter + schema: + type: string + responses: + 200: + description: the list of models + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.ModelPb' + """ + with db.session_scope() as session: + query = session.query(Model).options( + joinedload(Model.job).load_only(Job.name, Job.workflow_id).options( + joinedload(Job.workflow).load_only(Workflow.name)), + joinedload(Model.model_job).load_only(ModelJob.name)) + if project_id: + query = query.filter_by(project_id=project_id) + if params['group_id'] is not None: + query = query.filter_by(group_id=params['group_id']) + if params['keyword'] is not None: + query = query.filter(Model.name.like(f'%{params["keyword"]}%')) + if params['algorithm_type'] is not None: + query = query.filter(Model.algorithm_type == AlgorithmType[params['algorithm_type']]) + if params['filter']: + try: + query = self._filter_builder.build_query(query, params['filter']) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid filter: {str(e)}') from e + query = query.order_by(Model.created_at.desc()) + pagination = paginate(query, params['page'], params['page_size']) + data = [d.to_proto() for d in pagination.get_items()] + return make_flask_response(data=data, page_meta=pagination.get_metadata()) + + +class ModelApi(Resource): + + @credentials_required + def get(self, project_id: int, model_id: int): + """Get the model. + --- + tags: + - mmgr + description: get the model. + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: model_id + schema: + type: integer + required: true + responses: + 200: + description: detail of the model + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.ModelPb' + """ + with db.session_scope() as session: + model = get_model(project_id=project_id, model_id=model_id, session=session) + return make_flask_response(data=model.to_proto()) + + @credentials_required + @emits_event(resource_type=Event.ResourceType.MODEL, op_type=Event.OperationType.UPDATE) + @use_kwargs({'comment': fields.Str(required=False, load_default=None)}, location='json') + def patch(self, comment: Optional[str], project_id: int, model_id: int): + """Patch the model. + --- + tags: + - mmgr + description: patch the model. + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: model_id + schema: + type: integer + required: true + requestBody: + required: False + content: + application/json: + schema: + type: object + properties: + comment: + type: string + responses: + 200: + description: detail of the model + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.ModelPb' + """ + with db.session_scope() as session: + model = get_model(project_id=project_id, model_id=model_id, session=session) + if comment is not None: + model.comment = comment + session.commit() + return make_flask_response(model.to_proto()) + + @credentials_required + @emits_event(resource_type=Event.ResourceType.MODEL, op_type=Event.OperationType.DELETE) + def delete(self, project_id: int, model_id: int): + """Delete the model. + --- + tags: + - mmgr + decription: delete the model + parameters: + - in: path + name: proejct_id + schema: + type: integer + required: true + - in: path + name: model_id + schema: + type: integer + required: true + responses: + 204: + description: delete the model successfully + """ + with db.session_scope() as session: + model = get_model(project_id=project_id, model_id=model_id, session=session) + ModelService(session).delete(model.id) + session.commit() + return make_flask_response(status=HTTPStatus.NO_CONTENT) + + +def initialize_mmgr_model_apis(api): + api.add_resource(ModelsApi, '/projects//models') + api.add_resource(ModelApi, '/projects//models/') diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/model_apis_test.py b/web_console_v2/api/fedlearner_webconsole/mmgr/model_apis_test.py new file mode 100644 index 000000000..d3eaf7d00 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/model_apis_test.py @@ -0,0 +1,127 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +import urllib.parse +from http import HTTPStatus +from datetime import datetime + +from testing.common import BaseTestCase + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.mmgr.models import Model, ModelJob, ModelJobType, ModelType +from fedlearner_webconsole.algorithm.models import AlgorithmType +from fedlearner_webconsole.job.models import Job, JobType + + +class ModelsApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + m1 = Model(name='m1', project_id=1, group_id=1, algorithm_type=AlgorithmType.NN_VERTICAL) + m2 = Model(name='m2', project_id=1, group_id=2, algorithm_type=AlgorithmType.TREE_VERTICAL) + m3 = Model(name='m3', project_id=1, group_id=2, algorithm_type=AlgorithmType.TREE_VERTICAL) + session.add_all([m1, m2, m3]) + session.commit() + + def test_get_models(self): + resp = self.get_helper('/api/v2/projects/1/models') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual(len(data), 3) + resp = self.get_helper('/api/v2/projects/1/models?group_id=2') + data = self.get_response_data(resp) + self.assertEqual(len(data), 2) + self.assertEqual(data[0]['name'], 'm2') + resp = self.get_helper('/api/v2/projects/1/models?keyword=1') + data = self.get_response_data(resp) + self.assertEqual(len(data), 1) + self.assertEqual(data[0]['name'], 'm1') + filter_param = urllib.parse.quote('(and(group_id=1)(name~="m"))') + resp = self.get_helper(f'/api/v2/projects/1/models?filter={filter_param}') + data = self.get_response_data(resp) + self.assertEqual(len(data), 1) + self.assertEqual(data[0]['name'], 'm1') + resp = self.get_helper('/api/v2/projects/1/models?algorithm_type=TREE_VERTICAL') + data = self.get_response_data(resp) + self.assertEqual(sorted([d['name'] for d in data]), ['m2', 'm3']) + + +class ModelApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + job = Job(id=3, name='job', job_type=JobType.NN_MODEL_TRANINING, workflow_id=2, project_id=1) + workflow = Workflow(id=2, name='workflow', project_id=1) + model_job = ModelJob(id=1, name='model_job', model_job_type=ModelJobType.TRAINING, group_id=1) + model = Model(id=1, + name='m1', + uuid='uuid', + project_id=1, + group_id=1, + job_id=3, + model_job_id=1, + algorithm_type=AlgorithmType.NN_VERTICAL, + model_type=ModelType.NN_MODEL, + version=1, + created_at=datetime(2022, 5, 10, 0, 0, 0), + updated_at=datetime(2022, 5, 10, 0, 0, 0)) + session.add_all([model, job, workflow, model_job]) + session.commit() + + def test_get_model(self): + resp = self.get_helper('/api/v2/projects/1/models/1') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertResponseDataEqual( + resp, { + 'id': 1, + 'name': 'm1', + 'uuid': 'uuid', + 'algorithm_type': 'NN_VERTICAL', + 'model_path': '', + 'comment': '', + 'group_id': 1, + 'project_id': 1, + 'job_id': 3, + 'model_job_id': 1, + 'version': 1, + 'workflow_id': 2, + 'workflow_name': 'workflow', + 'job_name': 'job', + 'model_job_name': 'model_job', + 'created_at': 1652140800, + 'updated_at': 1652140800 + }) + + def test_patch_model(self): + resp = self.patch_helper('/api/v2/projects/1/models/1', data={'comment': 'comment'}) + self.assertEqual(resp.status_code, HTTPStatus.OK) + with db.session_scope() as session: + model: Model = session.query(Model).get(1) + self.assertEqual(model.comment, 'comment') + + def test_delete_model(self): + resp = self.delete_helper('/api/v2/projects/1/models/1') + self.assertEqual(resp.status_code, HTTPStatus.NO_CONTENT) + with db.session_scope() as session: + model = session.query(Model).execution_options(include_deleted=True).get(1) + self.assertIsNotNone(model.deleted_at) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/model_job_apis.py b/web_console_v2/api/fedlearner_webconsole/mmgr/model_job_apis.py new file mode 100644 index 000000000..fa247fe39 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/model_job_apis.py @@ -0,0 +1,1085 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import json +import logging +import tempfile +from http import HTTPStatus +from flask import send_file +from flask_restful import Resource +from typing import Optional, List +from webargs.flaskparser import use_args, use_kwargs +from marshmallow import Schema, post_load, fields, validate +from google.protobuf.json_format import ParseDict + +from fedlearner_webconsole.audit.decorators import emits_event +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.services import BatchService +from fedlearner_webconsole.exceptions import NotFoundException, ResourceConflictException, InternalException, \ + InvalidArgumentException, NoAccessException, UnauthorizedException +from fedlearner_webconsole.utils.sorting import SorterBuilder, SortExpression, parse_expression +from fedlearner_webconsole.workflow.models import Workflow, WorkflowExternalState +from fedlearner_webconsole.workflow_template.service import dict_to_workflow_definition +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.mmgr.controller import CreateModelJob, start_model_job, stop_model_job, \ + ModelJobController, ModelJobGroupController +from fedlearner_webconsole.mmgr.models import Model, ModelJob, ModelJobGroup, ModelJobType, ModelJobRole, \ + is_federated, AuthStatus, GroupAutoUpdateStatus, GroupAuthFrontendStatus, ModelJobStatus +from fedlearner_webconsole.mmgr.service import ModelJobService, ModelJobGroupService, get_sys_template_id, \ + get_model_job, get_project, get_participant +from fedlearner_webconsole.mmgr.model_job_configer import ModelJobConfiger, set_load_model_name +from fedlearner_webconsole.algorithm.models import AlgorithmType +from fedlearner_webconsole.utils.decorators.pp_flask import input_validator +from fedlearner_webconsole.utils.file_manager import FileManager +from fedlearner_webconsole.utils.flask_utils import make_flask_response, get_current_user, FilterExpField, \ + FilterExpression +from fedlearner_webconsole.utils.file_operator import FileOperator +from fedlearner_webconsole.utils.resource_name import resource_uuid +from fedlearner_webconsole.utils.filtering import SupportedField, FieldType, FilterOp, SimpleExpression, FilterBuilder +from fedlearner_webconsole.utils.paginate import paginate +from fedlearner_webconsole.auth.third_party_sso import credentials_required +from fedlearner_webconsole.scheduler.scheduler import scheduler +from fedlearner_webconsole.rpc.client import RpcClient +from fedlearner_webconsole.swagger.models import schema_manager +from fedlearner_webconsole.workflow_template.models import WorkflowTemplate +from fedlearner_webconsole.proto.audit_pb2 import Event +from fedlearner_webconsole.proto.mmgr_pb2 import PeerModelJobPb, ModelJobGlobalConfig +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo, ParticipantInfo +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.rpc.v2.system_service_client import SystemServiceClient +from fedlearner_webconsole.flag.models import Flag + + +def _check_model_job_global_config_enable(project_id: int) -> bool: + with db.session_scope() as session: + participants = session.query(Project).get(project_id).participants + flag = True + for p in participants: + client = SystemServiceClient.from_participant(domain_name=p.domain_name) + resp = client.list_flags() + if not resp.get(Flag.MODEL_JOB_GLOBAL_CONFIG_ENABLED.name): + flag = False + return flag + + +class CreateModelJobParams(Schema): + name = fields.Str(required=True) + group_id = fields.Integer(required=False, load_default=None) + model_job_type = fields.Str(required=True, + validate=validate.OneOf([ + ModelJobType.TRAINING.name, ModelJobType.EVALUATION.name, + ModelJobType.PREDICTION.name + ])) + algorithm_type = fields.Str(required=True, + validate=validate.OneOf([ + AlgorithmType.TREE_VERTICAL.name, AlgorithmType.NN_VERTICAL.name, + AlgorithmType.NN_HORIZONTAL.name + ])) + algorithm_id = fields.Integer(required=False, load_default=None) + eval_model_job_id = fields.Integer(required=False, load_default=None) + model_id = fields.Integer(required=False, load_default=None) + dataset_id = fields.Integer(required=False, load_default=None) + data_batch_id = fields.Integer(required=False, load_default=None) + config = fields.Dict(required=False, load_default={}) + comment = fields.Str(required=False, load_default=None) + global_config = fields.Dict(required=False, load_default=None) + + @post_load() + def make(self, data, **kwargs): + data['config'] = dict_to_workflow_definition(data['config']) + data['model_job_type'] = ModelJobType[data['model_job_type']] + data['algorithm_type'] = AlgorithmType[data['algorithm_type']] + if data.get('eval_model_job_id') is not None: + with db.session_scope() as session: + model = session.query(Model).filter_by(model_job_id=data.get('eval_model_job_id')).first() + data['model_id'] = model.id + if data['global_config'] is not None: + data['global_config'] = ParseDict(data['global_config'], ModelJobGlobalConfig()) + return data + + +# TODO(hangweiqiang): remove dataset_id in parameters +class ConfigModelJobParams(Schema): + algorithm_id = fields.Integer(required=False, load_default=None) + dataset_id = fields.Integer(required=False, load_default=None) + config = fields.Dict(required=False, load_default=None) + global_config = fields.Dict(required=False, load_default=None) + comment = fields.Str(required=False, load_default=None) + + @post_load() + def make(self, data, **kwargs): + if data['config'] is not None: + data['config'] = dict_to_workflow_definition(data['config']) + if data['global_config'] is not None: + data['global_config'] = ParseDict(data['global_config'], ModelJobGlobalConfig()) + return data + + +class ListModelJobsSchema(Schema): + group_id = fields.Integer(required=False, load_default=None) + keyword = fields.String(required=False, load_default=None) + types = fields.List(fields.String(required=False, + validate=validate.OneOf([ + ModelJobType.TRAINING.name, ModelJobType.EVALUATION.name, + ModelJobType.PREDICTION.name + ])), + required=False, + load_default=None) + configured = fields.Boolean(required=False, load_default=None) + algorithm_types = fields.List(fields.String(required=True, + validate=validate.OneOf([ + AlgorithmType.TREE_VERTICAL.name, AlgorithmType.NN_VERTICAL.name, + AlgorithmType.NN_HORIZONTAL.name + ])), + required=False, + load_default=None) + states = fields.List(fields.String(required=False, + validate=validate.OneOf([s.name for s in WorkflowExternalState])), + required=False, + load_default=None) + page = fields.Integer(required=False, load_default=None) + page_size = fields.Integer(required=False, load_default=None) + filter_exp = FilterExpField(data_key='filter', required=False, load_default=None) + sorter_exp = fields.String(required=False, load_default=None, data_key='order_by') + + @post_load() + def make(self, data, **kwargs): + if data['types'] is not None: + data['types'] = [ModelJobType[t] for t in data['types']] + if data['states'] is not None: + data['states'] = [WorkflowExternalState[s] for s in data['states']] + if data['algorithm_types'] is not None: + data['algorithm_types'] = [AlgorithmType[t] for t in data['algorithm_types']] + return data + + +class ModelJobApi(Resource): + + @credentials_required + def get(self, project_id: int, model_job_id: int): + """Get the model job by id + --- + tags: + - mmgr + description: get the model job by id + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: model_job_id + schema: + type: integer + required: true + responses: + 200: + description: detail of the model job + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.ModelJobPb' + """ + with db.session_scope() as session: + model_job = get_model_job(project_id, model_job_id, session) + ModelJobService(session).update_model_job_status(model_job) + ModelJobController(session, project_id).update_participants_auth_status(model_job) + session.commit() + return make_flask_response(model_job.to_proto()) + + @input_validator + @credentials_required + @emits_event(resource_type=Event.ResourceType.MODEL_JOB, op_type=Event.OperationType.UPDATE) + @use_args(ConfigModelJobParams(), location='json') + def put(self, params: dict, project_id: int, model_job_id: int): + """Update the model job + --- + tags: + - mmgr + description: update the model job + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: model_job_id + schema: + type: integer + required: true + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/ConfigModelJobParams' + responses: + 200: + description: detail of the model job + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.ModelJobPb' + """ + dataset_id = params['dataset_id'] + algorithm_id = params['algorithm_id'] + config = params['config'] + global_config = params['global_config'] + with db.session_scope() as session: + model_job = get_model_job(project_id, model_job_id, session) + model_job.algorithm_id = algorithm_id + if dataset_id is not None: + model_job.dataset_id = dataset_id + model_job.comment = params['comment'] + if global_config is not None: + configer = ModelJobConfiger(session=session, + model_job_type=model_job.model_job_type, + algorithm_type=model_job.algorithm_type, + project_id=project_id) + domain_name = SettingService.get_system_info().pure_domain_name + config = configer.get_config(dataset_id=model_job.dataset_id, + model_id=model_job.model_id, + model_job_config=global_config.global_config[domain_name]) + ModelJobService(session).config_model_job(model_job, config=config, create_workflow=False) + model_job.role = ModelJobRole.PARTICIPANT + model_job.creator_username = get_current_user().username + # Compatible with old versions, use PUT for authorization + ModelJobService.update_model_job_auth_status(model_job=model_job, auth_status=AuthStatus.AUTHORIZED) + ModelJobController(session, project_id).inform_auth_status_to_participants(model_job) + + session.commit() + scheduler.wakeup(model_job.workflow_id) + return make_flask_response(model_job.to_proto()) + + @credentials_required + @emits_event(resource_type=Event.ResourceType.MODEL_JOB, op_type=Event.OperationType.UPDATE) + @use_kwargs( + { + 'metric_is_public': + fields.Boolean(required=False, load_default=None), + 'auth_status': + fields.String(required=False, load_default=None, validate=validate.OneOf([s.name for s in AuthStatus])), + 'comment': + fields.String(required=False, load_default=None) + }, + location='json') + def patch(self, project_id: int, model_job_id: int, metric_is_public: Optional[bool], auth_status: Optional[str], + comment: Optional[str]): + """Patch the attribute of model job + --- + tags: + - mmgr + description: change the attribuet of model job, e.g. whether metric is public + parameters: + - in: path + name: project_id + required: true + schema: + type: integer + - in: path + name: model_job_id + schema: + type: integer + required: true + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + metric_is_public: + type: boolean + auth_status: + type: string + comment: + type: string + responses: + 200: + description: detail of the model job + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.ModelJobPb' + """ + with db.session_scope() as session: + model_job = get_model_job(project_id, model_job_id, session) + if metric_is_public is not None: + model_job.metric_is_public = metric_is_public + if auth_status is not None: + ModelJobService.update_model_job_auth_status(model_job=model_job, auth_status=AuthStatus[auth_status]) + ModelJobController(session, project_id).inform_auth_status_to_participants(model_job) + model_job.creator_username = get_current_user().username + if comment is not None: + model_job.comment = comment + session.commit() + return make_flask_response(model_job.to_proto()) + + @credentials_required + @emits_event(resource_type=Event.ResourceType.MODEL_JOB, op_type=Event.OperationType.DELETE) + def delete(self, project_id: int, model_job_id: int): + """Delete the model job + --- + tags: + - mmgr + description: delete the model job + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: model_job_id + schema: + type: integer + required: true + responses: + 204: + description: delete the model job successfully + 409: + description: model job cannot be deleted + """ + with db.session_scope() as session: + model_job = get_model_job(project_id, model_job_id, session) + if not model_job.is_deletable(): + raise ResourceConflictException(f'model job cannot be deleted due to model job is {model_job.state}') + ModelJobService(session).delete(model_job.id) + session.commit() + return make_flask_response(status=HTTPStatus.NO_CONTENT) + + +class StartModelJobApi(Resource): + + @credentials_required + @emits_event(resource_type=Event.ResourceType.MODEL_JOB, op_type=Event.OperationType.UPDATE) + def post(self, project_id: int, model_job_id: int): + """Start the model job + --- + tags: + - mmgr + description: start the model job + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: model_job_id + schema: + type: integer + required: true + responses: + 200: + description: start the model job successfully + """ + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(id=model_job_id, project_id=project_id).first() + if model_job is None: + raise NotFoundException(f'[StartModelJobApi] model job {model_job_id} is not found') + start_model_job(model_job_id=model_job_id) + return make_flask_response(status=HTTPStatus.OK) + + +class StopModelJobApi(Resource): + + @credentials_required + @emits_event(resource_type=Event.ResourceType.MODEL_JOB, op_type=Event.OperationType.UPDATE) + def post(self, project_id: int, model_job_id: int): + """Stop the model job + --- + tags: + - mmgr + description: stop the model job + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: model_job_id + schema: + type: integer + required: true + responses: + 200: + description: stop the model job successfully + """ + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(id=model_job_id, project_id=project_id).first() + if model_job is None: + raise NotFoundException(f'[StopModelJobApi] model job {model_job_id} is not found') + stop_model_job(model_job_id=model_job_id) + return make_flask_response(status=HTTPStatus.OK) + + +class ModelJobMetricsApi(Resource): + + @credentials_required + def get(self, project_id: int, model_job_id: int): + """Get the model job metrics by id + --- + tags: + - mmgr + description: get the model job metrics by id + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: model_job_id + schema: + type: integer + required: true + responses: + 200: + description: detail of the model job metrics + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.ModelJobMetrics' + 500: + description: error exists when query metrics for model job + """ + with db.session_scope() as session: + model_job = get_model_job(project_id, model_job_id, session) + try: + metrics = ModelJobService(session).query_metrics(model_job) + except ValueError as e: + logging.warning(f'[Model]error when query metrics for model job {model_job_id}') + raise InternalException(details=str(e)) from e + return make_flask_response(metrics), HTTPStatus.OK + + +class ModelJobResultsApi(Resource): + + @credentials_required + def get(self, project_id: int, model_job_id: int): + """Get the model job result by id + --- + tags: + - mmgr + description: get the model job result by id + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: model_job_id + schema: + type: integer + required: true + responses: + 200: + description: file of the model job results + content: + application/json: + schema: + type: string + 204: + description: the output path does not exist + """ + with db.session_scope() as session: + model_job = get_model_job(project_id, model_job_id, session) + output_path = model_job.get_output_path() + file_manager = FileManager() + if file_manager.exists(output_path): + with tempfile.NamedTemporaryFile(suffix='.tar') as temp_file: + FileOperator().archive_to([output_path], temp_file.name) + return send_file(temp_file.name, + attachment_filename=f'{model_job.name}_result.tar', + mimetype='application/x-tar', + as_attachment=True) + return make_flask_response(status=HTTPStatus.NO_CONTENT) + + +def _validate_create_model_job_params(project_id: int, + name: str, + model_job_type: ModelJobType, + model_id: int, + group_id: Optional[int] = None): + if model_job_type == ModelJobType.TRAINING and model_id is not None: + raise InvalidArgumentException(details='model id must be None for training job') + if model_job_type in [ModelJobType.EVALUATION, ModelJobType.PREDICTION] and model_id is None: + raise InvalidArgumentException(details='model id must not be None for eval or predict job') + if model_job_type == ModelJobType.TRAINING and group_id is None: + raise InvalidArgumentException(details='training model job must be in a group') + if model_job_type in [ModelJobType.EVALUATION, ModelJobType.PREDICTION] and group_id is not None: + raise InvalidArgumentException(details='eval or predict job must not be in a group') + with db.session_scope() as session: + if group_id is not None: + group = session.query(ModelJobGroup).filter_by(project_id=project_id, id=group_id).first() + if group is None: + raise InvalidArgumentException(f'group {group_id} is not found in project {project_id}') + if model_id: + model = session.query(Model).filter_by(project_id=project_id, id=model_id).first() + if model is None: + raise InvalidArgumentException(f'model {model_id} is not found in project {project_id}') + model_job = session.query(ModelJob).filter_by(name=name).first() + if model_job is not None: + raise ResourceConflictException(f'model job {name} already exist') + + +def _build_model_job_configured_query(exp: SimpleExpression): + if exp.bool_value: + return Workflow.config.isnot(None) + return Workflow.config.is_(None) + + +# TODO(hangweiqiang): use filtering expression +class ModelJobsApi(Resource): + FILTER_FIELDS = { + 'name': SupportedField(type=FieldType.STRING, ops={FilterOp.CONTAIN: None}), + 'algorithm_type': SupportedField(type=FieldType.STRING, ops={FilterOp.IN: None}), + 'model_job_type': SupportedField(type=FieldType.STRING, ops={FilterOp.IN: None}), + 'configured': SupportedField(type=FieldType.BOOL, ops={FilterOp.EQUAL: _build_model_job_configured_query}), + 'role': SupportedField(type=FieldType.STRING, ops={FilterOp.IN: None}), + 'status': SupportedField(type=FieldType.STRING, ops={FilterOp.IN: None}), + 'auth_status': SupportedField(type=FieldType.STRING, ops={FilterOp.IN: None}) + } + + SORTER_FIELDS = ['created_at'] + + def __init__(self): + self._filter_builder = FilterBuilder(model_class=ModelJob, supported_fields=self.FILTER_FIELDS) + self._sorter_builder = SorterBuilder(model_class=ModelJob, supported_fields=self.SORTER_FIELDS) + + @credentials_required + @use_kwargs(ListModelJobsSchema(), location='query') + def get(self, project_id: int, group_id: Optional[int], keyword: Optional[str], types: Optional[List[ModelJobType]], + configured: Optional[bool], algorithm_types: Optional[List[AlgorithmType]], + states: Optional[List[WorkflowExternalState]], page: Optional[int], page_size: Optional[int], + filter_exp: Optional[FilterExpression], sorter_exp: str): + """Get the list of model jobs + --- + tags: + - mmgr + description: get the list of model jobs + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: query + name: group_id + schema: + type: integer + - in: query + name: keyword + schema: + type: string + - in: query + name: types + schema: + type: array + items: + type: string + - in: query + name: algorithm_types + schema: + type: array + items: + type: string + - in: query + name: states + schema: + type: array + items: + type: string + - in: query + name: configured + schema: + type: boolean + - in: query + name: filter + schema: + type: string + - in: query + name: order_by + schema: + type: string + - in: query + name: page + schema: + type: integer + - in: query + name: page_size + schema: + type: integer + responses: + 200: + description: list of model jobs + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.ModelJobRef' + """ + # update auth_status and participants_info of old data + with db.session_scope() as session: + model_jobs = session.query(ModelJob).filter_by(participants_info=None, project_id=project_id).all() + if model_jobs is not None: + participants = ParticipantService(session).get_participants_by_project(project_id) + participants_info = ParticipantsInfo(participants_map={ + p.pure_domain_name(): ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) for p in participants + }) + pure_domain_name = SettingService.get_system_info().pure_domain_name + participants_info.participants_map[pure_domain_name].auth_status = AuthStatus.AUTHORIZED.name + for model_job in model_jobs: + model_job.auth_status = AuthStatus.AUTHORIZED + model_job.set_participants_info(participants_info) + session.commit() + with db.session_scope() as session: + query = session.query(ModelJob) + if project_id: + query = query.filter_by(project_id=project_id) + if group_id is not None: + query = query.filter_by(group_id=group_id) + if types is not None: + query = query.filter(ModelJob.model_job_type.in_(types)) + if algorithm_types is not None: + query = query.filter(ModelJob.algorithm_type.in_(algorithm_types)) + if keyword is not None: + query = query.filter(ModelJob.name.like(f'%{keyword}%')) + if configured is not None: + if configured: + query = query.join(ModelJob.workflow).filter(Workflow.config.isnot(None)) + else: + query = query.join(ModelJob.workflow).filter(Workflow.config.is_(None)) + if filter_exp: + try: + query = query.outerjoin(Workflow, Workflow.uuid == ModelJob.workflow_uuid) + query = self._filter_builder.build_query(query, filter_exp) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid filter: {str(e)}') from e + try: + if sorter_exp is not None: + sorter_exp = parse_expression(sorter_exp) + else: + sorter_exp = SortExpression(field='created_at', is_asc=False) + query = self._sorter_builder.build_query(query, sorter_exp) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid sorter: {str(e)}') from e + pagination = paginate(query, page, page_size) + model_jobs = pagination.get_items() + for model_job in model_jobs: + ModelJobService(session).update_model_job_status(model_job) + if states is not None: + model_jobs = [m for m in model_jobs if m.state in states] + data = [m.to_ref() for m in model_jobs] + session.commit() + return make_flask_response(data=data, page_meta=pagination.get_metadata()) + + @input_validator + @credentials_required + @emits_event(resource_type=Event.ResourceType.MODEL_JOB, op_type=Event.OperationType.CREATE) + @use_args(CreateModelJobParams(), location='json') + def post(self, params: dict, project_id: int): + """Create a model job + --- + tags: + - mmgr + description: create a model job + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/CreateModelJobParams' + responses: + 201: + description: detail of the model job + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.ModelJobPb' + 500: + description: error exists when creating model job + """ + name = params['name'] + config = params['config'] + model_job_type = params['model_job_type'] + algorithm_type = params['algorithm_type'] + model_id = params['model_id'] + group_id = params['group_id'] + dataset_id = params['dataset_id'] + data_batch_id = params['data_batch_id'] + algorithm_id = params['algorithm_id'] + global_config = params['global_config'] + comment = params['comment'] + with db.session_scope() as session: + get_project(project_id, session) + _validate_create_model_job_params(project_id, name, model_job_type, model_id, group_id) + # if platform is old version or the peer's platform is old version + if not global_config or not _check_model_job_global_config_enable(project_id): + if data_batch_id is not None: + raise InternalException('auto update is not supported when our\'s or peer\'s platform is old version') + # model job type is TRAINING + if model_job_type in [ModelJobType.TRAINING]: + with db.session_scope() as session: + model_job = ModelJobController(session, project_id).launch_model_job(group_id=group_id) + session.commit() + return make_flask_response(model_job.to_proto(), status=HTTPStatus.CREATED) + # model job type is EVALUATION or PREDICTION + pure_domain_name = SettingService.get_system_info().pure_domain_name + succeeded, msg = CreateModelJob().run(project_id=project_id, + name=name, + model_job_type=model_job_type, + coordinator_pure_domain_name=pure_domain_name, + algorithm_type=algorithm_type, + dataset_id=dataset_id, + model_id=model_id, + group_id=group_id) + if not succeeded: + raise InternalException(f'error when creating model job with message: {msg}') + with db.session_scope() as session: + model_job: ModelJob = session.query(ModelJob).filter_by(name=name).first() + model_job.algorithm_id = algorithm_id + model_job.comment = comment + model_job.creator_username = get_current_user().username + workflow_uuid = model_job.workflow_uuid + ModelJobService(session).config_model_job(model_job=model_job, + config=config, + create_workflow=True, + workflow_uuid=workflow_uuid) + model_job.role = ModelJobRole.COORDINATOR + session.commit() + workflow = session.query(Workflow).filter_by(uuid=workflow_uuid).first() + # TODO(gezhengqiang): refactor config_model_job service and remove wake up after refactoring workflow + scheduler.wakeup(workflow.id) + return make_flask_response(data=model_job.to_proto(), status=HTTPStatus.CREATED) + # new version + with db.session_scope() as session: + version = None + # model job type is TRAINING + if group_id: + group: ModelJobGroup = ModelJobGroupService(session).lock_and_update_version(group_id) + if group.get_group_auth_frontend_status() not in [GroupAuthFrontendStatus.ALL_AUTHORIZED]: + raise UnauthorizedException(f'participants not all authorized in the group {group.name}') + version = group.latest_version + model_job = ModelJobService(session).create_model_job(name=name, + uuid=resource_uuid(), + role=ModelJobRole.COORDINATOR, + model_job_type=model_job_type, + algorithm_type=algorithm_type, + global_config=global_config, + group_id=group_id, + project_id=project_id, + data_batch_id=data_batch_id, + comment=comment, + version=version) + model_job.creator_username = get_current_user().username + if group_id and data_batch_id is not None: + group.auto_update_status = GroupAutoUpdateStatus.ACTIVE + group.start_data_batch_id = data_batch_id + ModelJobGroupController(session=session, project_id=project_id).update_participants_model_job_group( + uuid=group.uuid, + auto_update_status=group.auto_update_status, + start_data_batch_id=group.start_data_batch_id) + session.commit() + return make_flask_response(data=model_job.to_proto(), status=HTTPStatus.CREATED) + + +class PeerModelJobApi(Resource): + + @credentials_required + def get(self, project_id: int, model_job_id: int, participant_id: int): + """Get the peer model job + --- + tags: + - mmgr + description: get the peer model job + parameters: + - in: path + name: project_id + required: true + schema: + type: integer + - in: path + name: model_job_id + required: true + schema: + type: integer + - in: path + name: participant_id + required: true + schema: + type: integer + responses: + 200: + description: get the peer model job + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.PeerModelJobPb' + """ + with db.session_scope() as session: + model_job = get_model_job(project_id, model_job_id, session) + project = model_job.project + participant = get_participant(participant_id, project) + client = RpcClient.from_project_and_participant(project.name, project.token, participant.domain_name) + resp = client.get_model_job(model_job_uuid=model_job.uuid, need_metrics=False) + # to support backward compatibility, since peer system may not have metric_is_public + # TODO(hangweiqiang): remove code of backward compatibility + metric_is_public = True + if resp.HasField('metric_is_public'): + metric_is_public = resp.metric_is_public.value + peer_job = PeerModelJobPb(name=resp.name, + uuid=resp.uuid, + algorithm_type=resp.algorithm_type, + model_job_type=resp.model_job_type, + state=resp.state, + group_uuid=resp.group_uuid, + config=resp.config, + metric_is_public=metric_is_public) + return make_flask_response(peer_job, status=HTTPStatus.OK) + + +class PeerModelJobMetricsApi(Resource): + + @credentials_required + def get(self, project_id: int, model_job_id: int, participant_id: int): + """Get the peer model job metrics + --- + tags: + - mmgr + description: get the peer model job metrics + parameters: + - in: path + name: project_id + required: true + schema: + type: integer + - in: path + name: model_job_id + required: true + schema: + type: integer + - in: path + name: participant_id + required: true + schema: + type: integer + responses: + 200: + description: detail of the model job metrics + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.ModelJobMetrics' + 403: + description: the metric of peer model job is not public + """ + with db.session_scope() as session: + model_job = get_model_job(project_id, model_job_id, session) + project = model_job.project + participant = get_participant(participant_id, project) + client = RpcClient.from_project_and_participant(project.name, project.token, participant.domain_name) + resp = client.get_model_job(model_job_uuid=model_job.uuid, need_metrics=True) + if resp.HasField('metric_is_public') and not resp.metric_is_public.value: + raise NoAccessException('peer metric is not public') + metrics = json.loads(resp.metrics) + return make_flask_response(metrics, status=HTTPStatus.OK) + + +class LaunchModelJobApi(Resource): + + @input_validator + @credentials_required + @emits_event(resource_type=Event.ResourceType.MODEL_JOB, op_type=Event.OperationType.CREATE) + def post(self, project_id: int, group_id: int): + """Launch the model job + --- + tags: + - mmgr + description: launch the model job + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: group_id + schema: + type: integer + required: true + responses: + 201: + description: launch the model job successfully + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.ModelJobPb' + 500: + description: error exists when launching model job by 2PC + """ + with db.session_scope() as session: + model_job = ModelJobController(session, project_id).launch_model_job(group_id=group_id) + return make_flask_response(model_job.to_proto(), status=HTTPStatus.CREATED) + + +class NextAutoUpdateModelJobApi(Resource): + + @credentials_required + def get(self, project_id: int, group_id: int): + """Get the next auto update model job + --- + tags: + - mmgr + description: get the next auto update model job + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: group_id + schema: + type: integer + required: true + responses: + 200: + description: detail of the model job + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.ModelJobPb' + """ + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(project_id=project_id, group_id=group_id, + auto_update=True).order_by( + ModelJob.created_at.desc()).limit(1).first() + if model_job is None: + group = session.query(ModelJobGroup).get(group_id) + raise NotFoundException(f'The auto update model job update the group {group.name} is not found') + if model_job.status in [ModelJobStatus.CONFIGURED, ModelJobStatus.RUNNING, ModelJobStatus.PENDING]: + raise InternalException(f'The latest auto update model job {model_job.name} is running') + data_batch_id = 0 + load_model_name = '' + if model_job.status in [ModelJobStatus.STOPPED, ModelJobStatus.FAILED, ModelJobStatus.ERROR]: + previous_success_model_job = session.query(ModelJob).filter_by( + project_id=project_id, group_id=group_id, auto_update=True, + status=ModelJobStatus.SUCCEEDED).order_by(ModelJob.created_at.desc()).limit(1).first() + if previous_success_model_job is None: + return make_flask_response(model_job.to_proto()) + model_job = previous_success_model_job + next_data_batch = BatchService(session).get_next_batch(model_job.data_batch) + if model_job.status in [ModelJobStatus.SUCCEEDED] and next_data_batch is not None: + model = session.query(Model).filter_by(model_job_id=model_job.id).first() + if model is None: + raise NotFoundException(f'The model job {model_job.name}\'s model is not found') + load_model_name = model.name + data_batch_id = next_data_batch.id + if model_job.model_id is None: + model_job.model_id = model.id + model_job.data_batch_id = data_batch_id + global_config = model_job.get_global_config() + if global_config is not None: + for config in global_config.global_config.values(): + set_load_model_name(config, load_model_name) + model_job.set_global_config(global_config) + return make_flask_response(model_job.to_proto()) + + +class ModelJobDefinitionApi(Resource): + + @credentials_required + @use_kwargs( + { + 'model_job_type': fields.Str(required=True, validate=validate.OneOf([t.name for t in ModelJobType])), + 'algorithm_type': fields.Str(required=True, validate=validate.OneOf([t.name for t in AlgorithmType])), + }, + location='query') + def get(self, model_job_type: str, algorithm_type: str): + """Get variables of model_job + --- + tags: + - mmgr + description: Get variables of given type of algorithm and model job + parameters: + - in: path + name: model_job_type + schema: + type: string + - in: path + name: algorithm_type + schema: + type: string + responses: + 200: + description: variables of given algorithm type and model job type + content: + application/json: + schema: + type: object + properties: + variables: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.Variable' + is_federated: + type: boolean + """ + model_job_type = ModelJobType[model_job_type] + algorithm_type = AlgorithmType[algorithm_type] + with db.session_scope() as session: + template_id = get_sys_template_id(session=session, + algorithm_type=algorithm_type, + model_job_type=model_job_type) + template: WorkflowTemplate = session.query(WorkflowTemplate).get(template_id) + config = template.get_config() + variables = config.job_definitions[0].variables + flag = is_federated(algorithm_type=algorithm_type, model_job_type=model_job_type) + return make_flask_response(data={'variables': list(variables), 'is_federated': flag}) + + +def initialize_mmgr_model_job_apis(api): + api.add_resource(ModelJobsApi, '/projects//model_jobs') + api.add_resource(ModelJobApi, '/projects//model_jobs/') + api.add_resource(ModelJobMetricsApi, '/projects//model_jobs//metrics') + api.add_resource(ModelJobResultsApi, '/projects//model_jobs//results') + api.add_resource(StartModelJobApi, '/projects//model_jobs/:start') + api.add_resource(StopModelJobApi, '/projects//model_jobs/:stop') + api.add_resource(PeerModelJobApi, + '/projects//model_jobs//peers/') + api.add_resource(PeerModelJobMetricsApi, + '/projects//model_jobs//peers//metrics') + api.add_resource(LaunchModelJobApi, '/projects//model_job_groups/:launch') + api.add_resource(NextAutoUpdateModelJobApi, + '/projects//model_job_groups//next_auto_update_model_job') + api.add_resource(ModelJobDefinitionApi, '/model_job_definitions') + + schema_manager.append(CreateModelJobParams) + schema_manager.append(ConfigModelJobParams) + schema_manager.append(ListModelJobsSchema) diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/model_job_apis_test.py b/web_console_v2/api/fedlearner_webconsole/mmgr/model_job_apis_test.py new file mode 100644 index 000000000..ab7d7e3cc --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/model_job_apis_test.py @@ -0,0 +1,1291 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import json +import tarfile +import tempfile +import unittest +import urllib.parse +from io import BytesIO +from pathlib import Path +from http import HTTPStatus +from datetime import datetime +from unittest.mock import patch, Mock, MagicMock, call +from envs import Envs +from testing.common import BaseTestCase +from testing.fake_model_job_config import get_global_config, get_workflow_config +from google.protobuf.wrappers_pb2 import BoolValue +from google.protobuf.empty_pb2 import Empty +from google.protobuf.struct_pb2 import Value + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.initial_db import _insert_or_update_templates +from fedlearner_webconsole.utils.flask_utils import to_dict +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.participant.models import ProjectParticipant +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.mmgr.models import Model, ModelJob, ModelJobGroup, ModelJobType, ModelJobRole, AuthStatus,\ + ModelJobStatus, GroupAutoUpdateStatus +from fedlearner_webconsole.algorithm.models import AlgorithmType, Algorithm +from fedlearner_webconsole.workflow_template.utils import make_variable +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition, JobDefinition +from fedlearner_webconsole.proto.service_pb2 import GetModelJobResponse +from fedlearner_webconsole.proto.setting_pb2 import SystemInfo +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo, ParticipantInfo +from fedlearner_webconsole.proto.mmgr_pb2 import ModelJobGlobalConfig, ModelJobConfig, ModelJobPb +from fedlearner_webconsole.workflow.models import WorkflowState, TransactionState +from fedlearner_webconsole.dataset.models import Dataset, DatasetJob, DatasetJobKind, DatasetType, DatasetJobState, \ + DatasetJobStage, DataBatch + + +class ModelJobsApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + Envs.SYSTEM_INFO = '{"domain_name": "fl-test.com"}' + with db.session_scope() as session: + _insert_or_update_templates(session) + dataset_job = DatasetJob(id=1, + name='datasetjob', + uuid='dataset-job-uuid', + state=DatasetJobState.SUCCEEDED, + project_id=1, + input_dataset_id=3, + output_dataset_id=1, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN) + dataset_job_stage = DatasetJobStage(id=1, + name='data-join', + uuid='dataset-job-stage-uuid', + project_id=1, + state=DatasetJobState.SUCCEEDED, + dataset_job_id=1, + data_batch_id=1) + data_batch = DataBatch(id=1, + name='20221213', + dataset_id=1, + path='/data/dataset/haha/batch/20221213', + latest_parent_dataset_job_stage_id=1, + event_time=datetime(2022, 12, 13, 16, 37, 37)) + dataset = Dataset(id=1, + uuid='uuid', + name='dataset', + dataset_type=DatasetType.PSI, + path='/data/dataset/haha', + is_published=True) + project = Project(id=1, name='test-project') + participant = Participant(id=1, name='peer', domain_name='fl-peer.com') + pro_participant = ProjectParticipant(id=1, project_id=1, participant_id=1) + group = ModelJobGroup(id=1, name='test-group', project_id=project.id, uuid='uuid', latest_version=2) + session.add_all([project, group, dataset, dataset_job, data_batch, dataset_job_stage]) + participants_info = ParticipantsInfo() + w1 = Workflow(name='w1', + uuid='u1', + state=WorkflowState.NEW, + target_state=WorkflowState.READY, + transaction_state=TransactionState.PARTICIPANT_PREPARE) + mj1 = ModelJob(name='mj1', + workflow_uuid=w1.uuid, + project_id=1, + group_id=1, + algorithm_type=AlgorithmType.NN_VERTICAL, + model_job_type=ModelJobType.TRAINING, + role=ModelJobRole.COORDINATOR, + auth_status=AuthStatus.PENDING, + created_at=datetime(2022, 8, 4, 0, 0, 0)) + mj1.set_participants_info(participants_info) + w2 = Workflow(name='w2', uuid='u2', state=WorkflowState.READY, target_state=None) + w2.set_config(get_workflow_config(model_job_type=ModelJobType.EVALUATION)) + mj2 = ModelJob(name='mj2', + workflow_uuid=w2.uuid, + project_id=1, + group_id=2, + algorithm_type=AlgorithmType.TREE_VERTICAL, + model_job_type=ModelJobType.EVALUATION, + role=ModelJobRole.PARTICIPANT, + auth_status=AuthStatus.AUTHORIZED, + created_at=datetime(2022, 8, 4, 0, 0, 1)) + mj2.set_participants_info(participants_info) + w3 = Workflow(name='w3', uuid='u3', state=WorkflowState.RUNNING, target_state=None) + w3.set_config(get_workflow_config(model_job_type=ModelJobType.PREDICTION)) + mj3 = ModelJob(name='mj3', + workflow_uuid=w3.uuid, + project_id=1, + algorithm_type=AlgorithmType.NN_HORIZONTAL, + model_job_type=ModelJobType.PREDICTION, + role=ModelJobRole.COORDINATOR, + auth_status=AuthStatus.PENDING, + created_at=datetime(2022, 8, 4, 0, 0, 2)) + mj3.set_participants_info(participants_info) + w4 = Workflow(name='w4', uuid='u4', state=WorkflowState.RUNNING, target_state=None) + w4.set_config(get_workflow_config(model_job_type=ModelJobType.PREDICTION)) + mj4 = ModelJob(name='mj31', + workflow_uuid=w4.uuid, + project_id=1, + algorithm_type=AlgorithmType.TREE_VERTICAL, + model_job_type=ModelJobType.PREDICTION, + role=ModelJobRole.PARTICIPANT, + auth_status=AuthStatus.AUTHORIZED, + created_at=datetime(2022, 8, 4, 0, 0, 3)) + mj4.set_participants_info(participants_info) + w5 = Workflow(name='w5', uuid='u5', state=WorkflowState.COMPLETED, target_state=None) + mj5 = ModelJob(id=123, + project_id=1, + name='mj5', + workflow_uuid=w5.uuid, + role=ModelJobRole.COORDINATOR, + auth_status=AuthStatus.PENDING, + created_at=datetime(2022, 8, 4, 0, 0, 4)) + mj5.set_participants_info(participants_info) + mj6 = ModelJob(id=124, + project_id=2, + name='mj6', + workflow_uuid=w5.uuid, + role=ModelJobRole.COORDINATOR, + auth_status=AuthStatus.PENDING, + created_at=datetime(2022, 8, 4, 0, 0, 4)) + mj5.set_participants_info(participants_info) + model = Model(id=12, name='test', model_job_id=123, group_id=1, uuid='model-uuid', project_id=1) + session.add_all([w1, w2, w3, mj1, mj2, mj3, w4, mj4, w5, mj5, mj6, model, participant, pro_participant]) + session.commit() + + def test_get_model_jobs_by_project_or_group(self): + resp = self.get_helper('/api/v2/projects/2/model_jobs') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + model_job_names = sorted([d['name'] for d in data]) + self.assertEqual(model_job_names, ['mj6']) + resp = self.get_helper('/api/v2/projects/1/model_jobs?group_id=2') + data = self.get_response_data(resp) + model_job_names = sorted([d['name'] for d in data]) + self.assertEqual(model_job_names, ['mj2']) + + def test_get_model_jobs_by_type(self): + resp = self.get_helper('/api/v2/projects/1/model_jobs?types=TRAINING&types=EVALUATION') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + model_job_names = sorted([d['name'] for d in data]) + self.assertEqual(model_job_names, ['mj1', 'mj2']) + + def test_get_model_jobs_by_algorithm_types(self): + resp = self.get_helper( + '/api/v2/projects/1/model_jobs?algorithm_types=NN_VERTICAL&&algorithm_types=TREE_VERTICAL') + data = self.get_response_data(resp) + model_job_names = sorted([d['name'] for d in data]) + self.assertEqual(model_job_names, ['mj1', 'mj2', 'mj31']) + resp = self.get_helper('/api/v2/projects/1/model_jobs?algorithm_types=NN_HORIZONTAL') + data = self.get_response_data(resp) + model_job_names = sorted([d['name'] for d in data]) + self.assertEqual(model_job_names, ['mj3']) + + def test_get_model_jobs_by_states(self): + resp = self.get_helper('/api/v2/projects/1/model_jobs?states=PENDING_ACCEPT') + data = self.get_response_data(resp) + model_job_names = sorted([d['name'] for d in data]) + self.assertEqual(model_job_names, ['mj1']) + resp = self.get_helper('/api/v2/projects/1/model_jobs?states=RUNNING&states=READY_TO_RUN') + data = self.get_response_data(resp) + model_job_names = sorted([d['name'] for d in data]) + self.assertEqual(model_job_names, ['mj2', 'mj3', 'mj31']) + + def test_get_model_jobs_by_keyword(self): + resp = self.get_helper('/api/v2/projects/1/model_jobs?keyword=mj3') + data = self.get_response_data(resp) + model_job_names = sorted([d['name'] for d in data]) + self.assertEqual(model_job_names, ['mj3', 'mj31']) + + def test_get_model_jobs_by_configured(self): + resp = self.get_helper('/api/v2/projects/1/model_jobs?configured=false') + data = self.get_response_data(resp) + self.assertEqual(sorted([d['name'] for d in data]), ['mj1', 'mj5']) + resp = self.get_helper('/api/v2/projects/1/model_jobs?configured=true') + data = self.get_response_data(resp) + self.assertEqual(sorted([d['name'] for d in data]), ['mj2', 'mj3', 'mj31']) + + def test_get_model_jobs_by_expression(self): + filter_param = urllib.parse.quote('(algorithm_type:["NN_VERTICAL","TREE_VERTICAL"])') + resp = self.get_helper(f'/api/v2/projects/1/model_jobs?filter={filter_param}') + data = self.get_response_data(resp) + self.assertEqual(sorted([d['name'] for d in data]), ['mj1', 'mj2', 'mj31']) + filter_param = urllib.parse.quote('(algorithm_type:["NN_HORIZONTAL"])') + resp = self.get_helper(f'/api/v2/projects/1/model_jobs?filter={filter_param}') + data = self.get_response_data(resp) + self.assertEqual(sorted(d['name'] for d in data), ['mj3']) + filter_param = urllib.parse.quote('(role:["COORDINATOR"])') + resp = self.get_helper(f'/api/v2/projects/1/model_jobs?filter={filter_param}') + data = self.get_response_data(resp) + self.assertEqual(sorted(d['name'] for d in data), ['mj1', 'mj3', 'mj5']) + filter_param = urllib.parse.quote('(name~="1")') + resp = self.get_helper(f'/api/v2/projects/1/model_jobs?filter={filter_param}') + data = self.get_response_data(resp) + self.assertEqual(sorted(d['name'] for d in data), ['mj1', 'mj31']) + filter_param = urllib.parse.quote('(model_job_type:["TRAINING","EVALUATION"])') + resp = self.get_helper(f'/api/v2/projects/1/model_jobs?filter={filter_param}') + data = self.get_response_data(resp) + self.assertEqual(sorted(d['name'] for d in data), ['mj1', 'mj2']) + filter_param = urllib.parse.quote('(status:["RUNNING"])') + resp = self.get_helper(f'/api/v2/projects/1/model_jobs?filter={filter_param}') + data = self.get_response_data(resp) + self.assertEqual(sorted(d['name'] for d in data), ['mj3', 'mj31']) + filter_param = urllib.parse.quote('(configured=true)') + resp = self.get_helper(f'/api/v2/projects/1/model_jobs?filter={filter_param}') + data = self.get_response_data(resp) + self.assertEqual(sorted(d['name'] for d in data), ['mj2', 'mj3', 'mj31']) + filter_param = urllib.parse.quote('(auth_status:["AUTHORIZED"])') + resp = self.get_helper(f'/api/v2/projects/1/model_jobs?filter={filter_param}') + data = self.get_response_data(resp) + self.assertEqual(sorted(d['name'] for d in data), ['mj2', 'mj31']) + sorter_param = urllib.parse.quote('created_at asc') + resp = self.get_helper(f'/api/v2/projects/1/model_jobs?order_by={sorter_param}') + data = self.get_response_data(resp) + self.assertEqual([d['name'] for d in data], ['mj1', 'mj2', 'mj3', 'mj31', 'mj5']) + self.assertEqual(data[0]['status'], ModelJobStatus.PENDING.name) + self.assertEqual(data[1]['status'], ModelJobStatus.PENDING.name) + self.assertEqual(data[2]['status'], ModelJobStatus.RUNNING.name) + self.assertEqual(data[3]['status'], ModelJobStatus.RUNNING.name) + self.assertEqual(data[4]['status'], ModelJobStatus.SUCCEEDED.name) + resp = self.get_helper(f'/api/v2/projects/1/model_jobs?page=2&page_size=2&order_by={sorter_param}') + data = self.get_response_data(resp) + self.assertEqual(sorted(d['name'] for d in data), ['mj3', 'mj31']) + + @patch('fedlearner_webconsole.project.services.SettingService.get_system_info') + def test_update_auth_status_of_old_data(self, mock_get_system_info): + mock_get_system_info.return_value = SystemInfo(pure_domain_name='test') + with db.session_scope() as session: + project = Project(id=3, name='project2') + participant = Participant(id=3, name='peer2', domain_name='fl-peer2.com') + pro_participant = ProjectParticipant(id=2, project_id=3, participant_id=3) + model_job6 = ModelJob(id=6, project_id=3, name='j6', participants_info=None) + model_job7 = ModelJob(id=7, project_id=3, name='j7', participants_info=None) + session.add_all([project, participant, pro_participant, model_job6, model_job7]) + session.commit() + resp = self.get_helper('/api/v2/projects/3/model_jobs') + self.assertEqual(resp.status_code, HTTPStatus.OK) + participants_info = ParticipantsInfo( + participants_map={ + 'test': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'peer2': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + }) + with db.session_scope() as session: + model_job6 = session.query(ModelJob).get(6) + self.assertEqual(model_job6.auth_status, AuthStatus.AUTHORIZED) + self.assertEqual(model_job6.get_participants_info(), participants_info) + model_job7 = session.query(ModelJob).get(7) + self.assertEqual(model_job7.auth_status, AuthStatus.AUTHORIZED) + self.assertEqual(model_job7.get_participants_info(), participants_info) + + @patch('fedlearner_webconsole.rpc.v2.system_service_client.SystemServiceClient.list_flags') + @patch('fedlearner_webconsole.two_pc.model_job_creator.ModelJobCreator.prepare') + @patch('fedlearner_webconsole.two_pc.transaction_manager.TransactionManager._remote_do_two_pc') + def test_post_train_model_job(self, mock_remote_two_pc: Mock, mock_prepare: Mock, mock_list_flags: Mock): + mock_prepare.return_value = True, '' + config = get_workflow_config(ModelJobType.TRAINING) + mock_remote_two_pc.return_value = True, '' + mock_list_flags.return_value = {'model_job_global_config_enabled': True} + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + group.role = ModelJobRole.COORDINATOR + group.algorithm_type = AlgorithmType.TREE_VERTICAL + group.dataset_id = 1 + group.set_config(config) + session.commit() + resp = self.post_helper('/api/v2/projects/1/model_jobs', + data={ + 'name': 'train-job', + 'group_id': 1, + 'model_job_type': 'TRAINING', + 'algorithm_type': 'TREE_VERTICAL', + 'dataset_id': 1, + 'config': to_dict(config), + 'comment': 'comment' + }) + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + with db.session_scope() as session: + model_job: ModelJob = session.query(ModelJob).filter_by(group_id=1, version=3).first() + self.assertEqual(model_job.project_id, 1) + self.assertEqual(model_job.group_id, 1) + self.assertEqual(model_job.model_job_type, ModelJobType.TRAINING) + self.assertEqual(model_job.algorithm_type, AlgorithmType.TREE_VERTICAL) + self.assertEqual(model_job.dataset_id, 1) + self.assertEqual(model_job.dataset_name(), 'dataset') + self.assertEqual( + model_job.workflow.get_config(), + WorkflowDefinition(job_definitions=[ + JobDefinition(name='train-job', + job_type=JobDefinition.JobType.TREE_MODEL_TRAINING, + variables=[ + make_variable(name='mode', typed_value='train'), + make_variable(name='data_source', + typed_value='dataset-job-stage-uuid-psi-data-join-job'), + make_variable(name='data_path', typed_value=''), + make_variable(name='file_wildcard', typed_value='*.data'), + ], + yaml_template='{}') + ])) + + @patch('fedlearner_webconsole.two_pc.model_job_creator.ModelJobCreator.prepare') + @patch('fedlearner_webconsole.two_pc.transaction_manager.TransactionManager._remote_do_two_pc') + def test_post_eval_model_job(self, mock_remote_two_pc: Mock, mock_prepare: Mock): + mock_prepare.return_value = True, '' + config = get_workflow_config(ModelJobType.EVALUATION) + mock_remote_two_pc.return_value = True, '' + resp = self.post_helper('/api/v2/projects/1/model_jobs', + data={ + 'name': 'eval-job', + 'model_job_type': 'EVALUATION', + 'algorithm_type': 'TREE_VERTICAL', + 'dataset_id': 1, + 'config': to_dict(config), + 'eval_model_job_id': 123 + }) + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + with db.session_scope() as session: + model_job: ModelJob = session.query(ModelJob).filter_by(name='eval-job').first() + self.assertEqual(model_job.project_id, 1) + self.assertEqual(model_job.role, ModelJobRole.COORDINATOR) + self.assertEqual(model_job.model_job_type, ModelJobType.EVALUATION) + self.assertEqual(model_job.algorithm_type, AlgorithmType.TREE_VERTICAL) + self.assertEqual(model_job.model_id, 12) + self.assertEqual(model_job.dataset_id, 1) + + @patch('fedlearner_webconsole.two_pc.transaction_manager.TransactionManager._remote_do_two_pc') + @patch('fedlearner_webconsole.rpc.v2.system_service_client.SystemServiceClient.list_flags') + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.create_model_job') + def test_post_model_jobs_with_global_config(self, mock_create_model_job, mock_list_flags, mock_remote_do_two_pc): + mock_create_model_job.return_value = Empty() + global_config = get_global_config() + resp = self.post_helper('/api/v2/projects/1/model_jobs', + data={ + 'name': 'eval-job', + 'model_job_type': 'EVALUATION', + 'algorithm_type': 'TREE_VERTICAL', + 'dataset_id': 1, + 'model_id': 12, + 'global_config': to_dict(global_config), + 'comment': 'comment' + }) + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + with db.session_scope() as session: + model_job: ModelJob = session.query(ModelJob).filter_by(name='eval-job').first() + self.assertEqual(model_job.model_job_type, ModelJobType.EVALUATION) + self.assertEqual(model_job.algorithm_type, AlgorithmType.TREE_VERTICAL) + self.assertEqual(model_job.dataset_id, 1) + self.assertEqual(model_job.model_id, 12) + self.assertEqual(model_job.group_id, 1) + self.assertEqual(model_job.get_global_config(), get_global_config()) + self.assertEqual(model_job.comment, 'comment') + self.assertEqual(model_job.status, ModelJobStatus.PENDING) + self.assertEqual(model_job.creator_username, 'ada') + mock_list_flags.return_value = {'model_job_global_config_enabled': True} + resp = self.post_helper('/api/v2/projects/1/model_jobs', + data={ + 'name': 'train-job-1', + 'model_job_type': 'TRAINING', + 'algorithm_type': 'TREE_VERTICAL', + 'dataset_id': 1, + 'group_id': 1, + 'global_config': to_dict(global_config), + 'comment': 'comment' + }) + # fail due to no authorization + self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) + participants_info = ParticipantsInfo() + participants_info.participants_map['test'].auth_status = AuthStatus.AUTHORIZED.name + participants_info.participants_map['peer'].auth_status = AuthStatus.PENDING.name + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + group.set_participants_info(participants_info) + group.authorized = True + session.commit() + resp = self.post_helper('/api/v2/projects/1/model_jobs', + data={ + 'name': 'train-job-1', + 'model_job_type': 'TRAINING', + 'algorithm_type': 'TREE_VERTICAL', + 'dataset_id': 1, + 'group_id': 1, + 'global_config': to_dict(global_config), + 'comment': 'comment' + }) + # fail due to peer no authorization + self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + participants_info = group.get_participants_info() + participants_info.participants_map['peer'].auth_status = AuthStatus.AUTHORIZED.name + group.set_participants_info(participants_info) + session.commit() + resp = self.post_helper('/api/v2/projects/1/model_jobs', + data={ + 'name': 'train-job-1', + 'model_job_type': 'TRAINING', + 'algorithm_type': 'TREE_VERTICAL', + 'dataset_id': 1, + 'group_id': 1, + 'global_config': to_dict(global_config), + 'comment': 'comment' + }) + # create successfully + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + with db.session_scope() as session: + model_job: ModelJob = session.query(ModelJob).filter_by(name='train-job-1').first() + self.assertEqual(model_job.model_job_type, ModelJobType.TRAINING) + self.assertEqual(model_job.version, 3) + mock_list_flags.return_value = {'model_job_global_config_enabled': False} + mock_remote_do_two_pc.return_value = True, '' + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + group.role = ModelJobRole.COORDINATOR + group.algorithm_type = AlgorithmType.NN_VERTICAL + group.set_config() + session.commit() + resp = self.post_helper('/api/v2/projects/1/model_jobs', + data={ + 'name': 'train-job-2', + 'model_job_type': 'TRAINING', + 'algorithm_type': 'NN_VERTICAL', + 'dataset_id': 1, + 'group_id': 1, + 'global_config': to_dict(global_config), + 'comment': 'comment' + }) + with db.session_scope() as session: + model_job: ModelJob = session.query(ModelJob).filter_by(group_id=1, version=4).first() + self.assertIsNotNone(model_job) + self.assertEqual(model_job.model_job_type, ModelJobType.TRAINING) + self.assertEqual(model_job.algorithm_type, AlgorithmType.NN_VERTICAL) + + @patch('fedlearner_webconsole.rpc.v2.system_service_client.SystemServiceClient.list_flags') + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.update_model_job_group') + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.create_model_job') + def test_post_auto_update_model_job(self, mock_creat_model_job: MagicMock, mock_update_model_job_group: MagicMock, + mock_list_flags: MagicMock): + mock_creat_model_job.return_value = Empty() + mock_list_flags.return_value = {'model_job_global_config_enabled': True} + global_config = get_global_config() + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(1) + dataset_job.kind = DatasetJobKind.OT_PSI_DATA_JOIN + participants_info = ParticipantsInfo() + participants_info.participants_map['test'].auth_status = AuthStatus.AUTHORIZED.name + participants_info.participants_map['peer'].auth_status = AuthStatus.AUTHORIZED.name + group = session.query(ModelJobGroup).get(1) + group.set_participants_info(participants_info) + group.authorized = True + session.commit() + resp = self.post_helper('/api/v2/projects/1/model_jobs', + data={ + 'name': 'auto-update-train-job-1', + 'model_job_type': 'TRAINING', + 'algorithm_type': 'NN_VERTICAL', + 'dataset_id': 1, + 'data_batch_id': 1, + 'group_id': 1, + 'global_config': to_dict(global_config), + 'comment': 'comment' + }) + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + self.assertEqual(mock_update_model_job_group.call_args_list, [ + call(auto_update_status=GroupAutoUpdateStatus.ACTIVE, + start_dataset_job_stage_uuid='dataset-job-stage-uuid', + uuid='uuid') + ]) + with db.session_scope() as session: + model_job: ModelJob = session.query(ModelJob).filter_by(group_id=1, version=3).first() + self.assertIsNotNone(model_job) + self.assertEqual(model_job.model_job_type, ModelJobType.TRAINING) + self.assertEqual(model_job.algorithm_type, AlgorithmType.NN_VERTICAL) + self.assertEqual(model_job.auto_update, True) + self.assertEqual(model_job.data_batch_id, 1) + group: ModelJobGroup = session.query(ModelJobGroup).get(1) + self.assertEqual(group.start_data_batch_id, 1) + self.assertEqual(group.auto_update_status, GroupAutoUpdateStatus.ACTIVE) + + def test_post_model_jobs_failed(self): + # fail due to missing model_id for eval job + resp = self.post_helper('/api/v2/projects/1/model_jobs', + data={ + 'name': 'eval-job', + 'model_job_type': 'EVALUATION', + 'algorithm_type': 'TREE_VERTICAL', + 'dataset_id': 1, + 'config': {}, + }) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + # fail due to model_id existence for train job + resp = self.post_helper('/api/v2/projects/1/model_jobs', + data={ + 'name': 'train-job', + 'model_job_type': 'TRAINING', + 'algorithm_type': 'TREE_VERTICAL', + 'dataset_id': 1, + 'config': {}, + 'model_id': 1 + }) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + + def test_post_horizontal_eval_model_job(self): + with db.session_scope() as session: + model = Model(id=1, name='train-model', project_id=1) + session.add(model) + session.commit() + config = get_workflow_config(ModelJobType.EVALUATION) + resp = self.post_helper('/api/v2/projects/1/model_jobs', + data={ + 'name': 'eval-job', + 'model_job_type': 'EVALUATION', + 'algorithm_type': 'NN_HORIZONTAL', + 'algorithm_id': 3, + 'model_id': 1, + 'config': to_dict(config), + }) + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(name='eval-job').first() + self.assertEqual(model_job.project_id, 1) + self.assertEqual(model_job.algorithm_type, AlgorithmType.NN_HORIZONTAL) + self.assertEqual(model_job.role, ModelJobRole.COORDINATOR) + self.assertEqual(model_job.algorithm_id, 3) + self.assertEqual(model_job.model_id, 1) + self.assertEqual(model_job.model_job_type, ModelJobType.EVALUATION) + + @patch('fedlearner_webconsole.two_pc.transaction_manager.TransactionManager._remote_do_two_pc') + def test_post_model_job_failed_due_to_dataset(self, mock_remote_two_pc): + config = get_workflow_config(ModelJobType.EVALUATION) + mock_remote_two_pc.return_value = True, '' + # failed due to dataset is not found + resp = self.post_helper('/api/v2/projects/1/model_jobs', + data={ + 'name': 'eval-job', + 'model_job_type': 'EVALUATION', + 'algorithm_type': 'TREE_VERTICAL', + 'dataset_id': 3, + 'config': to_dict(config), + 'eval_model_job_id': 123 + }) + self.assertEqual(resp.status_code, HTTPStatus.INTERNAL_SERVER_ERROR) + with db.session_scope() as session: + dataset = session.query(Dataset).get(1) + dataset.is_published = False + session.add(dataset) + session.commit() + resp = self.post_helper('/api/v2/projects/1/model_jobs', + data={ + 'name': 'eval-job', + 'model_job_type': 'EVALUATION', + 'algorithm_type': 'TREE_VERTICAL', + 'dataset_id': 2, + 'config': to_dict(config), + 'eval_model_job_id': 123 + }) + self.assertEqual(resp.status_code, HTTPStatus.INTERNAL_SERVER_ERROR) + + +class ModelJobApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + Envs.SYSTEM_INFO = '{"domain_name": "fl-test.com"}' + with db.session_scope() as session: + project = Project(id=1, name='test-project') + participant = Participant(id=1, name='part', domain_name='fl-demo1.com') + pro_part = ProjectParticipant(id=1, project_id=1, participant_id=1) + group = ModelJobGroup(id=1, name='test-group', project_id=project.id, uuid='uuid') + workflow_uuid = 'uuid' + workflow = Workflow(id=1, + name='test-workflow-1', + project_id=1, + state=WorkflowState.NEW, + target_state=WorkflowState.READY, + transaction_state=TransactionState.PARTICIPANT_PREPARE, + uuid=workflow_uuid) + dataset_job = DatasetJob(id=1, + name='datasetjob', + uuid='uuid', + state=DatasetJobState.SUCCEEDED, + project_id=1, + input_dataset_id=1, + output_dataset_id=3, + kind=DatasetJobKind.OT_PSI_DATA_JOIN) + dataset = Dataset(id=3, + uuid='uuid', + name='dataset', + dataset_type=DatasetType.PSI, + path='/data/dataset/haha') + model_job = ModelJob(id=1, + name='test-model-job', + group_id=1, + project_id=1, + dataset_id=3, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.TREE_VERTICAL, + workflow_id=1, + workflow_uuid=workflow_uuid, + job_id=2, + job_name='uuid-train-job', + created_at=datetime(2022, 5, 10, 0, 0, 0)) + participants_info = ParticipantsInfo( + participants_map={ + 'test': ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'demo1': ParticipantInfo(auth_status=AuthStatus.PENDING.name) + }) + model_job.set_participants_info(participants_info) + session.add_all([project, group, workflow, model_job, dataset, dataset_job, participant, pro_part]) + session.commit() + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.get_model_job') + def test_get_model_job(self, mock_get_model_job): + mock_get_model_job.side_effect = [ModelJobPb(auth_status=AuthStatus.AUTHORIZED.name)] + with db.session_scope() as session: + workflow: Workflow = session.query(Workflow).filter_by(uuid='uuid').first() + config = get_workflow_config(model_job_type=ModelJobType.TRAINING) + workflow.set_config(config) + workflow.state = WorkflowState.READY + workflow.target_state = None + workflow.start_at = 1 + workflow.stop_at = 2 + model_job: ModelJob = session.query(ModelJob).get(1) + model = Model(id=1, + name='test-model', + model_job_id=model_job.id, + group_id=model_job.group_id, + created_at=datetime(2022, 5, 10, 0, 0, 0), + updated_at=datetime(2022, 5, 10, 0, 0, 0)) + session.add(model) + session.commit() + resp = self.get_helper('/api/v2/projects/1/model_jobs/1') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.maxDiff = None + self.assertPartiallyEqual(data, { + 'id': 1, + 'name': 'test-model-job', + 'role': 'PARTICIPANT', + 'model_job_type': 'TRAINING', + 'algorithm_type': 'TREE_VERTICAL', + 'auth_status': 'PENDING', + 'auto_update': False, + 'status': 'PENDING', + 'error_message': '', + 'group_id': 1, + 'project_id': 1, + 'state': 'READY_TO_RUN', + 'configured': True, + 'dataset_id': 3, + 'dataset_name': 'dataset', + 'output_model_name': 'test-model', + 'created_at': 1652140800, + 'started_at': 1, + 'stopped_at': 2, + 'uuid': '', + 'algorithm_id': 0, + 'model_id': 0, + 'model_name': '', + 'workflow_id': 1, + 'job_id': 2, + 'job_name': 'uuid-train-job', + 'creator_username': '', + 'coordinator_id': 0, + 'comment': '', + 'version': 0, + 'metric_is_public': False, + 'auth_frontend_status': 'SELF_AUTH_PENDING', + 'participants_info': { + 'participants_map': { + 'demo1': { + 'auth_status': 'AUTHORIZED', + 'name': '', + 'role': '', + 'state': '', + 'type': '' + }, + 'test': { + 'auth_status': 'PENDING', + 'name': '', + 'role': '', + 'state': '', + 'type': '' + } + } + } + }, + ignore_fields=['config', 'output_models', 'updated_at', 'data_batch_id']) + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.inform_model_job') + @patch('fedlearner_webconsole.project.services.SettingService.get_system_info') + @patch('fedlearner_webconsole.scheduler.scheduler.Scheduler.wakeup') + def test_put_model_job(self, mock_wake_up, mock_get_system_info, mock_inform_model_job): + mock_get_system_info.return_value = SystemInfo(pure_domain_name='test') + with db.session_scope() as session: + model_job = session.query(ModelJob).get(1) + model_job.uuid = 'uuid' + session.commit() + config = get_workflow_config(ModelJobType.TRAINING) + data = {'algorithm_id': 1, 'config': to_dict(config)} + resp = self.put_helper('/api/v2/projects/1/model_jobs/1', data=data) + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual(data['configured'], True) + with db.session_scope() as session: + model_job = session.query(ModelJob).get(1) + self.assertEqual(model_job.role, ModelJobRole.PARTICIPANT) + workflow = session.query(Workflow).filter_by(uuid='uuid').first() + self.assertEqual(workflow.template.name, 'sys-preset-tree-model') + self.assertEqual( + workflow.get_config(), + WorkflowDefinition(job_definitions=[ + JobDefinition(name='train-job', + job_type=JobDefinition.JobType.TREE_MODEL_TRAINING, + variables=[ + make_variable(name='mode', typed_value='train'), + make_variable(name='data_source', typed_value=''), + make_variable(name='data_path', typed_value='/data/dataset/haha/batch'), + make_variable(name='file_wildcard', typed_value='**/part*') + ], + yaml_template='{}') + ])) + participants_info = ParticipantsInfo( + participants_map={ + 'test': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'demo1': ParticipantInfo(auth_status=AuthStatus.PENDING.name) + }) + self.assertEqual(model_job.get_participants_info(), participants_info) + self.assertEqual(mock_inform_model_job.call_args_list, [(('uuid', AuthStatus.AUTHORIZED),)]) + mock_wake_up.assert_called_with(model_job.workflow_id) + + @patch('fedlearner_webconsole.project.services.SettingService.get_system_info') + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.inform_model_job') + def test_patch_model_job(self, mock_inform_model_job, mock_get_system_info): + mock_get_system_info.return_value = SystemInfo(pure_domain_name='test') + with db.session_scope() as session: + model_job = session.query(ModelJob).get(1) + model_job.uuid = 'uuid' + participants_info = ParticipantsInfo( + participants_map={ + 'test': ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'demo1': ParticipantInfo(auth_status=AuthStatus.PENDING.name) + }) + model_job.set_participants_info(participants_info) + session.commit() + resp = self.patch_helper('/api/v2/projects/1/model_jobs/1', + data={ + 'metric_is_public': False, + 'auth_status': 'HAHA' + }) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + resp = self.patch_helper('/api/v2/projects/1/model_jobs/1', + data={ + 'metric_is_public': False, + 'auth_status': 'PENDING', + 'comment': 'hahahaha' + }) + self.assertEqual(resp.status_code, HTTPStatus.OK) + with db.session_scope() as session: + model_job = session.query(ModelJob).get(1) + self.assertFalse(model_job.metric_is_public) + self.assertEqual(model_job.auth_status, AuthStatus.PENDING) + participants_info = ParticipantsInfo( + participants_map={ + 'test': ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'demo1': ParticipantInfo(auth_status=AuthStatus.PENDING.name) + }) + self.assertEqual(model_job.get_participants_info(), participants_info) + self.assertEqual(mock_inform_model_job.call_args_list, [(('uuid', AuthStatus.PENDING),)]) + self.assertEqual(model_job.creator_username, 'ada') + self.assertEqual(model_job.comment, 'hahahaha') + mock_inform_model_job.reset_mock() + self.patch_helper('/api/v2/projects/1/model_jobs/1', + data={ + 'metric_is_public': True, + 'auth_status': 'AUTHORIZED' + }) + with db.session_scope() as session: + model_job = session.query(ModelJob).get(1) + self.assertTrue(model_job.metric_is_public) + self.assertEqual(model_job.auth_status, AuthStatus.AUTHORIZED) + participants_info = ParticipantsInfo( + participants_map={ + 'test': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'demo1': ParticipantInfo(auth_status=AuthStatus.PENDING.name) + }) + self.assertEqual(model_job.get_participants_info(), participants_info) + self.assertEqual(mock_inform_model_job.call_args_list, [(('uuid', AuthStatus.AUTHORIZED),)]) + + @patch('fedlearner_webconsole.mmgr.model_job_configer.ModelJobConfiger.get_config') + def test_put_model_job_with_global_config(self, mock_get_config): + mock_get_config.return_value = get_workflow_config(ModelJobType.TRAINING) + global_config = get_global_config() + resp = self.put_helper('/api/v2/projects/1/model_jobs/1', + data={ + 'dataset_id': 3, + 'global_config': to_dict(global_config), + }) + self.assertEqual(resp.status_code, HTTPStatus.OK) + mock_get_config.assert_called_with(dataset_id=3, + model_id=None, + model_job_config=global_config.global_config['test']) + with db.session_scope() as sesssion: + self.maxDiff = None + model_job: ModelJob = sesssion.query(ModelJob).get(1) + self.assertEqual(model_job.dataset_id, 3) + self.assertEqual( + model_job.config(), + WorkflowDefinition(job_definitions=[ + JobDefinition(name='train-job', + job_type=JobDefinition.JobType.TREE_MODEL_TRAINING, + variables=[ + make_variable('mode', typed_value='train'), + make_variable('data_source', typed_value=''), + make_variable('data_path', typed_value='/data/dataset/haha/batch'), + make_variable('file_wildcard', typed_value='**/part*') + ], + yaml_template='{}') + ])) + + def test_delete_model_job(self): + resp = self.delete_helper('/api/v2/projects/1/model_jobs/1') + self.assertEqual(resp.status_code, HTTPStatus.CONFLICT) + with db.session_scope() as session: + model_job = session.query(ModelJob).get(1) + model_job.workflow.state = WorkflowState.STOPPED + session.commit() + resp = self.delete_helper('/api/v2/projects/1/model_jobs/1') + self.assertEqual(resp.status_code, HTTPStatus.NO_CONTENT) + with db.session_scope() as session: + model_job = session.query(ModelJob).execution_options(include_deleted=True).get(1) + self.assertIsNotNone(model_job.deleted_at) + + +class ModelJobResultsApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=123, name='test-project') + session.add(project) + model_job = ModelJob(id=123, name='test-model', project_id=project.id, job_name='test-job') + session.add(model_job) + session.commit() + + @patch('fedlearner_webconsole.mmgr.models.ModelJob.get_job_path') + def test_get_results(self, mock_get_job_path): + with tempfile.TemporaryDirectory() as file: + mock_get_job_path.return_value = file + Path(os.path.join(file, 'outputs')).mkdir() + Path(os.path.join(file, 'outputs', '1.output')).write_text('output_1', encoding='utf-8') + Path(os.path.join(file, 'outputs', '2.output')).write_text('output_2', encoding='utf-8') + resp = self.get_helper('/api/v2/projects/123/model_jobs/123/results') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertEqual(resp.content_type, 'application/x-tar') + with tarfile.TarFile(fileobj=BytesIO(resp.data)) as tar: + with tempfile.TemporaryDirectory() as temp_dir: + tar.extractall(temp_dir) + self.assertEqual(['1.output', '2.output'], sorted(os.listdir(os.path.join(temp_dir, 'outputs')))) + with open(os.path.join(temp_dir, 'outputs', '1.output'), encoding='utf-8') as f: + self.assertEqual(f.read(), 'output_1') + + +class StartModelJobApiTest(BaseTestCase): + + @patch('fedlearner_webconsole.mmgr.model_job_apis.start_model_job') + def test_start_model_job(self, mock_start_model_job: MagicMock): + with db.session_scope() as session: + model_job = ModelJob(id=1, name='train-job', project_id=1) + session.add(model_job) + session.commit() + resp = self.post_helper(f'/api/v2/projects/1/model_jobs/{model_job.id}:start') + self.assertEqual(resp.status_code, HTTPStatus.OK) + mock_start_model_job.assert_called_with(model_job_id=1) + + +class StopModelJobApiTest(BaseTestCase): + + @patch('fedlearner_webconsole.mmgr.model_job_apis.stop_model_job') + def test_stop_model_job(self, mock_stop_model_job: MagicMock): + with db.session_scope() as session: + model_job = ModelJob(id=1, name='train_job', workflow_id=1, project_id=1) + session.add(model_job) + session.commit() + resp = self.post_helper(f'/api/v2/projects/1/model_jobs/{model_job.id}:stop') + self.assertEqual(resp.status_code, HTTPStatus.OK) + mock_stop_model_job.assert_called_with(model_job_id=1) + + +class PeerModelJobTest(BaseTestCase): + + def setUp(self): + super().setUp() + project = Project(id=1, name='project') + participant = Participant(id=1, name='party', domain_name='fl-test.com', host='127.0.0.1', port=32443) + relationship = ProjectParticipant(project_id=1, participant_id=1) + model_job = ModelJob(id=1, project_id=1, name='model-job', uuid='uuid', workflow_uuid='workflow_uuid') + workflow = Workflow(name='workflow', uuid='workflow_uuid') + workflow.set_config(WorkflowDefinition(group_alias='haha')) + with db.session_scope() as session: + session.add_all([project, participant, relationship, model_job]) + session.commit() + + @patch('fedlearner_webconsole.rpc.client.RpcClient.get_model_job') + def test_get_peer_model_job(self, mock_get_model_job): + mock_get_model_job.return_value = GetModelJobResponse(name='name', + uuid='uuid', + group_uuid='uuid', + algorithm_type='NN_VERTICAL', + model_job_type='TRAINING', + state='COMPLETED', + metrics='12', + metric_is_public=BoolValue(value=False)) + resp = self.get_helper('/api/v2/projects/1/model_jobs/1/peers/1') + self.assertEqual(resp.status_code, HTTPStatus.OK) + mock_get_model_job.assert_called_with(model_job_uuid='uuid', need_metrics=False) + self.assertResponseDataEqual( + resp, { + 'name': 'name', + 'uuid': 'uuid', + 'algorithm_type': 'NN_VERTICAL', + 'model_job_type': 'TRAINING', + 'group_uuid': 'uuid', + 'state': 'COMPLETED', + 'config': { + 'group_alias': '', + 'variables': [], + 'job_definitions': [] + }, + 'metric_is_public': False, + }) + + +class PeerModelJobMetricsApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + project = Project(id=1, name='project') + participant = Participant(id=1, name='party', domain_name='fl-test.com', host='127.0.0.1', port=32443) + relationship = ProjectParticipant(project_id=1, participant_id=1) + model_job = ModelJob(id=1, project_id=1, name='model-job', uuid='uuid', workflow_uuid='workflow_uuid') + with db.session_scope() as session: + session.add_all([project, participant, relationship, model_job]) + session.commit() + + @patch('fedlearner_webconsole.rpc.client.RpcClient.get_model_job') + def test_get_peer_model_job(self, mock_get_model_job): + metrics = {'auc': 0.5} + mock_get_model_job.return_value = GetModelJobResponse(name='name', uuid='uuid', metrics=json.dumps(metrics)) + resp = self.get_helper('/api/v2/projects/1/model_jobs/1/peers/1/metrics') + mock_get_model_job.assert_called_with(model_job_uuid='uuid', need_metrics=True) + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertResponseDataEqual(resp, {'auc': 0.5}) + mock_get_model_job.assert_called_with(model_job_uuid='uuid', need_metrics=True) + self.assertEqual(self.get_response_data(resp), metrics) + mock_get_model_job.return_value = GetModelJobResponse(name='name', + uuid='uuid', + metric_is_public=BoolValue(value=False)) + resp = self.get_helper('/api/v2/projects/1/model_jobs/1/peers/1/metrics') + self.assertEqual(resp.status_code, HTTPStatus.FORBIDDEN) + mock_get_model_job.return_value = GetModelJobResponse(name='name', + uuid='uuid', + metric_is_public=BoolValue(value=True)) + resp = self.get_helper('/api/v2/projects/1/model_jobs/1/peers/1/metrics') + # internal error since the metric is not valid + self.assertEqual(resp.status_code, HTTPStatus.INTERNAL_SERVER_ERROR) + + +class LaunchModelJobApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + _insert_or_update_templates(session) + project = Project(id=1, name='test-project') + dataset_job = DatasetJob(id=1, + name='datasetjob', + uuid='uuid', + state=DatasetJobState.SUCCEEDED, + project_id=1, + input_dataset_id=1, + output_dataset_id=3, + kind=DatasetJobKind.OT_PSI_DATA_JOIN) + dataset = Dataset(id=3, + uuid='uuid', + name='datasetjob', + dataset_type=DatasetType.PSI, + path='/data/dataset/haha') + algorithm = Algorithm(id=2, name='algorithm') + group = ModelJobGroup(name='group', + uuid='uuid', + project_id=1, + algorithm_type=AlgorithmType.NN_VERTICAL, + algorithm_id=2, + role=ModelJobRole.COORDINATOR, + dataset_id=3) + group.set_config(get_workflow_config(ModelJobType.TRAINING)) + session.add_all([dataset_job, dataset, project, group, algorithm]) + session.commit() + + @patch('fedlearner_webconsole.two_pc.transaction_manager.TransactionManager._remote_do_two_pc') + def test_launch_model_job(self, mock_remote_do_two_pc): + with db.session_scope() as session: + group = session.query(ModelJobGroup).filter_by(uuid='uuid').first() + mock_remote_do_two_pc.return_value = True, '' + resp = self.post_helper(f'/api/v2/projects/1/model_job_groups/{group.id}:launch') + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + with db.session_scope() as session: + group: ModelJobGroup = session.query(ModelJobGroup).filter_by(name='group').first() + model_job = group.model_jobs[0] + self.assertEqual(model_job.group_id, group.id) + self.assertTrue(model_job.project_id, group.project_id) + self.assertEqual(model_job.version, 1) + self.assertEqual(group.latest_version, 1) + self.assertTrue(model_job.algorithm_type, group.algorithm_type) + self.assertTrue(model_job.model_job_type, ModelJobType.TRAINING) + self.assertTrue(model_job.dataset_id, group.dataset_id) + self.assertTrue(model_job.workflow.get_config(), group.get_config()) + + +class NextAutoUpdateModelJobApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project') + data_batch = DataBatch(id=1, + name='20220101-08', + dataset_id=1, + event_time=datetime(year=2000, month=1, day=1, hour=8), + latest_parent_dataset_job_stage_id=1) + group1 = ModelJobGroup(id=1, name='group1', project_id=1, auto_update_status=GroupAutoUpdateStatus.ACTIVE) + group2 = ModelJobGroup(id=2, name='group2', project_id=1, auto_update_status=GroupAutoUpdateStatus.ACTIVE) + group3 = ModelJobGroup(id=3, name='group3', project_id=1, auto_update_status=GroupAutoUpdateStatus.ACTIVE) + model_job1 = ModelJob(id=1, + group_id=1, + auto_update=False, + project_id=1, + created_at=datetime(2022, 12, 16, 1, 0, 0), + status=ModelJobStatus.SUCCEEDED, + data_batch_id=1) + global_config2 = ModelJobGlobalConfig( + dataset_uuid='uuid', + global_config={ + 'test1': ModelJobConfig(algorithm_uuid='uuid1', variables=[Variable(name='load_model_name')]), + 'test2': ModelJobConfig(algorithm_uuid='uuid2', variables=[Variable(name='load_model_name')]) + }) + model_job2 = ModelJob(id=2, + group_id=2, + auto_update=True, + project_id=1, + data_batch_id=5, + created_at=datetime(2022, 12, 16, 2, 0, 0), + status=ModelJobStatus.RUNNING) + model_job2.set_global_config(global_config2) + global_config3 = ModelJobGlobalConfig( + dataset_uuid='uuid', + global_config={ + 'test1': ModelJobConfig(algorithm_uuid='uuid1', variables=[Variable(name='load_model_name')]), + 'test2': ModelJobConfig(algorithm_uuid='uuid2', variables=[Variable(name='load_model_name')]) + }) + model_job3 = ModelJob(id=3, + name='test-model', + group_id=3, + auto_update=True, + created_at=datetime(2022, 12, 16, 3, 0, 0), + status=ModelJobStatus.SUCCEEDED, + data_batch_id=1, + role=ModelJobRole.COORDINATOR, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + project_id=1, + comment='comment', + version=3) + model_job3.set_global_config(global_config3) + session.add_all([project, group1, group2, group3, model_job1, model_job2, model_job3, data_batch]) + session.commit() + + @patch('fedlearner_webconsole.dataset.services.BatchService.get_next_batch') + def test_get_next_auto_update_model_job(self, mock_get_next_batch: MagicMock): + # fail due to model job group has no auto update model jobs + resp = self.get_helper('/api/v2/projects/1/model_job_groups/1/next_auto_update_model_job') + self.assertEqual(resp.status_code, HTTPStatus.NOT_FOUND) + # fail due to the latest auto update model job is running + mock_get_next_batch.return_value = DataBatch(id=2) + resp = self.get_helper('/api/v2/projects/1/model_job_groups/2/next_auto_update_model_job') + self.assertEqual(resp.status_code, HTTPStatus.INTERNAL_SERVER_ERROR) + with db.session_scope() as session: + model_job = session.query(ModelJob).get(2) + model_job.status = ModelJobStatus.CONFIGURED + session.commit() + resp = self.get_helper('/api/v2/projects/1/model_job_groups/2/next_auto_update_model_job') + self.assertEqual(resp.status_code, HTTPStatus.INTERNAL_SERVER_ERROR) + with db.session_scope() as session: + model_job = session.query(ModelJob).get(2) + model_job.status = ModelJobStatus.PENDING + session.commit() + resp = self.get_helper('/api/v2/projects/1/model_job_groups/2/next_auto_update_model_job') + self.assertEqual(resp.status_code, HTTPStatus.INTERNAL_SERVER_ERROR) + # when the latest auto update model job is stopped and there is no previous successful model job + with db.session_scope() as session: + model_job = session.query(ModelJob).get(2) + model_job.status = ModelJobStatus.STOPPED + session.commit() + resp = self.get_helper('/api/v2/projects/1/model_job_groups/2/next_auto_update_model_job') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual(data['data_batch_id'], 5) + global_config = ModelJobGlobalConfig(dataset_uuid='uuid', + global_config={ + 'test1': + ModelJobConfig(algorithm_uuid='uuid1', + variables=[ + Variable(name='load_model_name', + value='', + value_type=Variable.ValueType.STRING) + ]), + 'test2': + ModelJobConfig(algorithm_uuid='uuid2', + variables=[ + Variable(name='load_model_name', + value='', + value_type=Variable.ValueType.STRING) + ]) + }) + self.assertEqual(data['global_config'], to_dict(global_config)) + # when the latest auto model job is failed and there is previous successful auto update model job + with db.session_scope() as session: + model_job = session.query(ModelJob).get(2) + model_job.status = ModelJobStatus.FAILED + global_config = ModelJobGlobalConfig( + dataset_uuid='uuid', + global_config={ + 'test1': ModelJobConfig(algorithm_uuid='uuid1', variables=[Variable(name='load_model_name')]), + 'test2': ModelJobConfig(algorithm_uuid='uuid2', variables=[Variable(name='load_model_name')]) + }) + model_job = ModelJob(id=4, + group_id=2, + auto_update=True, + project_id=1, + data_batch_id=3, + created_at=datetime(2022, 12, 16, 1, 0, 0), + status=ModelJobStatus.SUCCEEDED, + model_id=2) + model = Model(id=2, model_job_id=4, name='test-previous-model') + model_job.set_global_config(global_config) + session.add_all([model, model_job]) + session.commit() + mock_get_next_batch.return_value = DataBatch(id=4) + resp = self.get_helper('/api/v2/projects/1/model_job_groups/2/next_auto_update_model_job') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual(data['data_batch_id'], 4) + global_config = ModelJobGlobalConfig( + dataset_uuid='uuid', + global_config={ + 'test1': + ModelJobConfig(algorithm_uuid='uuid1', + variables=[ + Variable(name='load_model_name', + value='test-previous-model', + typed_value=Value(string_value='test-previous-model'), + value_type=Variable.ValueType.STRING) + ]), + 'test2': + ModelJobConfig(algorithm_uuid='uuid2', + variables=[ + Variable(name='load_model_name', + value='test-previous-model', + typed_value=Value(string_value='test-previous-model'), + value_type=Variable.ValueType.STRING) + ]) + }) + self.assertEqual(data['global_config'], to_dict(global_config)) + # when the latest auto update model job is succeeded and next batch is None + mock_get_next_batch.return_value = None + resp = self.get_helper('/api/v2/projects/1/model_job_groups/3/next_auto_update_model_job') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual(data['data_batch_id'], 0) + global_config = ModelJobGlobalConfig(dataset_uuid='uuid', + global_config={ + 'test1': + ModelJobConfig(algorithm_uuid='uuid1', + variables=[ + Variable(name='load_model_name', + value='', + typed_value=Value(string_value=''), + value_type=Variable.ValueType.STRING) + ]), + 'test2': + ModelJobConfig(algorithm_uuid='uuid2', + variables=[ + Variable(name='load_model_name', + value='', + typed_value=Value(string_value=''), + value_type=Variable.ValueType.STRING) + ]) + }) + self.assertEqual(data['global_config'], to_dict(global_config)) + # when the latest auto update model job is succeeded and there is next data batch, but there is no model + mock_get_next_batch.return_value = DataBatch(id=3) + resp = self.get_helper('/api/v2/projects/1/model_job_groups/3/next_auto_update_model_job') + self.assertEqual(resp.status_code, HTTPStatus.NOT_FOUND) + # when the latest auto update model job is succeeded and there is next data batch, and there is model + with db.session_scope() as session: + model = Model(id=1, name='test-model', model_job_id=3, uuid='uuid') + session.add(model) + session.commit() + mock_get_next_batch.return_value = DataBatch(id=3) + resp = self.get_helper('/api/v2/projects/1/model_job_groups/3/next_auto_update_model_job') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + global_config = ModelJobGlobalConfig( + dataset_uuid='uuid', + global_config={ + 'test1': + ModelJobConfig(algorithm_uuid='uuid1', + variables=[ + Variable(name='load_model_name', + value='test-model', + typed_value=Value(string_value='test-model'), + value_type=Variable.ValueType.STRING) + ]), + 'test2': + ModelJobConfig(algorithm_uuid='uuid2', + variables=[ + Variable(name='load_model_name', + value='test-model', + typed_value=Value(string_value='test-model'), + value_type=Variable.ValueType.STRING) + ]) + }) + self.assertEqual(data['data_batch_id'], 3) + self.assertEqual(data['global_config'], to_dict(global_config)) + self.assertEqual(data['model_id'], 1) + + +class ModelJobDefinitionApiTest(BaseTestCase): + + def test_get_definitions(self): + resp = self.get_helper('/api/v2/model_job_definitions?algorithm_type=NN_VERTICAL&model_job_type=TRAINING') + data = self.get_response_data(resp) + self.assertEqual(data['is_federated'], True) + self.assertEqual(len(data['variables']), 32) + resp = self.get_helper('/api/v2/model_job_definitions?algorithm_type=NN_HORIZONTAL&model_job_type=EVALUATION') + data = self.get_response_data(resp) + self.assertEqual(data['is_federated'], False) + self.assertEqual(len(data['variables']), 8) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/model_job_configer.py b/web_console_v2/api/fedlearner_webconsole/mmgr/model_job_configer.py new file mode 100644 index 000000000..29d40c3d7 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/model_job_configer.py @@ -0,0 +1,179 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import logging +from typing import Optional, List +from sqlalchemy.orm.session import Session +from google.protobuf.struct_pb2 import Value + +from fedlearner_webconsole.exceptions import InvalidArgumentException +from fedlearner_webconsole.exceptions import InternalException +from fedlearner_webconsole.algorithm.models import AlgorithmType +from fedlearner_webconsole.algorithm.fetcher import AlgorithmFetcher +from fedlearner_webconsole.dataset.models import Dataset, DatasetJob, DatasetJobKind, DataBatch +from fedlearner_webconsole.mmgr.models import Model, ModelJobType +from fedlearner_webconsole.workflow_template.models import WorkflowTemplate +from fedlearner_webconsole.workflow_template.utils import make_variable, set_value +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.mmgr_pb2 import ModelJobConfig +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition +from fedlearner_webconsole.utils.proto import to_dict +from fedlearner_webconsole.utils.const import SYS_PRESET_TREE_TEMPLATE, SYS_PRESET_VERTICAL_NN_TEMPLATE, \ + SYS_PRESET_HORIZONTAL_NN_TEMPLATE, SYS_PRESET_HORIZONTAL_NN_EVAL_TEMPLATE + +LOAD_MODEL_NAME = 'load_model_name' + + +def set_load_model_name(config: ModelJobConfig, model_name: str): + """Set variable of load_model_name inplace""" + for variable in config.variables: + if variable.name == LOAD_MODEL_NAME: + assert variable.value_type == Variable.ValueType.STRING + variable.value = model_name + variable.typed_value.MergeFrom(Value(string_value=model_name)) + + +def get_sys_template_id(session: Session, algorithm_type: AlgorithmType, model_job_type: ModelJobType) -> Optional[int]: + template_name = None + if algorithm_type == AlgorithmType.NN_VERTICAL: + template_name = SYS_PRESET_VERTICAL_NN_TEMPLATE + if algorithm_type == AlgorithmType.NN_HORIZONTAL: + if model_job_type == ModelJobType.TRAINING: + template_name = SYS_PRESET_HORIZONTAL_NN_TEMPLATE + else: + template_name = SYS_PRESET_HORIZONTAL_NN_EVAL_TEMPLATE + if algorithm_type == AlgorithmType.TREE_VERTICAL: + template_name = SYS_PRESET_TREE_TEMPLATE + if template_name: + template_id = session.query(WorkflowTemplate.id).filter_by(name=template_name).first() + if template_id is not None: + return template_id[0] + return None + + +def _set_variable(variables: List[Variable], new_variable: Variable): + for variable in variables: + if variable.name == new_variable.name: + variable.CopyFrom(new_variable) + return + raise Exception(f'variable {new_variable.name} is not found') + + +class ModelJobConfiger: + + def __init__(self, session: Session, model_job_type: ModelJobType, algorithm_type: AlgorithmType, project_id: int): + self._session = session + self.model_job_type = model_job_type + self.algorithm_type = algorithm_type + self.project_id = project_id + + @staticmethod + def _init_config(config: WorkflowDefinition, variables: List[Variable]): + assert len(config.job_definitions) == 1 + new_dict = {i.name: i for i in variables} + for var in config.job_definitions[0].variables: + if var.name in new_dict: + var.typed_value.CopyFrom(new_dict[var.name].typed_value) + var.value = new_dict[var.name].value + + def _get_config(self) -> WorkflowDefinition: + template_id = get_sys_template_id(session=self._session, + algorithm_type=self.algorithm_type, + model_job_type=self.model_job_type) + if template_id is None: + raise InternalException('preset template is not found') + template: WorkflowTemplate = self._session.query(WorkflowTemplate).get(template_id) + return template.get_config() + + def get_dataset_variables(self, dataset_id: Optional[int], data_batch_id: Optional[int] = None) -> List[Variable]: + if dataset_id is None: + return [] + dataset: Dataset = self._session.query(Dataset).get(dataset_id) + dataset_job: DatasetJob = self._session.query(DatasetJob).filter_by(output_dataset_id=dataset_id).first() + if dataset_job is None: + raise InvalidArgumentException(f'dataset job for dataset {dataset_id} is not found') + data_source = dataset.get_data_source() + data_path = os.path.join(dataset.path, 'batch') + if data_batch_id is not None: + data_batch = self._session.query(DataBatch).get(data_batch_id) + data_path = data_batch.path + # TODO(hangweiqiang): use data path for all kind, and set file_wildcard for nn + variables = [] + if dataset_job.kind == DatasetJobKind.RSA_PSI_DATA_JOIN: + # there is no data_source in nn horizontal preset template + if self.algorithm_type != AlgorithmType.NN_HORIZONTAL: + variables.append(make_variable(name='data_source', typed_value=data_source)) + variables.append(make_variable(name='data_path', typed_value='')) + if self.algorithm_type == AlgorithmType.TREE_VERTICAL: + variables.append(make_variable(name='file_wildcard', typed_value='*.data')) + if dataset_job.kind in [ + DatasetJobKind.OT_PSI_DATA_JOIN, DatasetJobKind.HASH_DATA_JOIN, DatasetJobKind.DATA_ALIGNMENT, + DatasetJobKind.IMPORT_SOURCE + ]: + # there is no data_source in nn horizontal preset template + if self.algorithm_type != AlgorithmType.NN_HORIZONTAL: + variables.append(make_variable(name='data_source', typed_value='')) + variables.append(make_variable(name='data_path', typed_value=data_path)) + if self.algorithm_type == AlgorithmType.TREE_VERTICAL: + variables.append(make_variable(name='file_wildcard', typed_value='**/part*')) + return variables + + def get_config(self, dataset_id: int, model_id: Optional[int], + model_job_config: ModelJobConfig) -> WorkflowDefinition: + """get local workflow config from model_job_config""" + config = self._get_config() + self._init_config(config=config, variables=model_job_config.variables) + mode = 'train' if self.model_job_type == ModelJobType.TRAINING else 'eval' + variables = config.job_definitions[0].variables + # there is no mode variable in nn horizontal preset template + if self.algorithm_type != AlgorithmType.NN_HORIZONTAL: + _set_variable(variables=variables, new_variable=make_variable(name='mode', typed_value=mode)) + dataset_variables = self.get_dataset_variables(dataset_id=dataset_id) + for var in dataset_variables: + _set_variable(variables=variables, new_variable=var) + if model_job_config.algorithm_uuid: + algorithm = AlgorithmFetcher(self.project_id).get_algorithm(model_job_config.algorithm_uuid) + parameter = model_job_config.algorithm_parameter + algo_dict = { + 'algorithmId': algorithm.id, + 'algorithmUuid': algorithm.uuid, + 'algorithmProjectId': algorithm.algorithm_project_id, + 'algorithmProjectUuid': algorithm.algorithm_project_uuid, + 'participantId': algorithm.participant_id, + 'path': algorithm.path, + 'config': to_dict(parameter)['variables'] + } + variables = config.job_definitions[0].variables + for variable in variables: + if variable.name == 'algorithm': + set_value(variable=variable, typed_value=algo_dict) + if model_id is not None: + model: Model = self._session.query(Model).get(model_id) + _set_variable(variables=variables, + new_variable=make_variable(name='load_model_name', typed_value=model.job_name())) + return config + + # TODO(hangweiqiang): remove this function after ModelJobConfig is used + def set_dataset(self, config: WorkflowDefinition, dataset_id: Optional[int], data_batch_id: Optional[int] = None): + variables = config.job_definitions[0].variables + dataset_variables = self.get_dataset_variables(dataset_id=dataset_id, data_batch_id=data_batch_id) + names = {variable.name for variable in variables} + for variable in dataset_variables: + # check existence of variable in config for backward compatibility + if variable.name in names: + _set_variable(variables=variables, new_variable=variable) + else: + logging.info(f'[set_dataset] variable {variable.name} is not found in config') diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/model_job_configer_test.py b/web_console_v2/api/fedlearner_webconsole/mmgr/model_job_configer_test.py new file mode 100644 index 000000000..187da1a86 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/model_job_configer_test.py @@ -0,0 +1,324 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import json +import tempfile +import unittest +from datetime import datetime +from envs import Envs +from unittest.mock import patch +from google.protobuf.struct_pb2 import Value + +from testing.common import NoWebServerTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.initial_db import _insert_or_update_templates +from fedlearner_webconsole.job.models import Job, JobType, JobState +from fedlearner_webconsole.mmgr.models import Model, ModelJobType +from fedlearner_webconsole.mmgr.model_job_configer import ModelJobConfiger, get_sys_template_id, set_load_model_name +from fedlearner_webconsole.algorithm.models import Algorithm, AlgorithmProject, AlgorithmType +from fedlearner_webconsole.algorithm.utils import algorithm_cache_path +from fedlearner_webconsole.dataset.models import Dataset, DatasetJob, DatasetJobState, DatasetJobKind, DatasetType, \ + DatasetJobStage, DataBatch +from fedlearner_webconsole.workflow_template.models import WorkflowTemplate +from fedlearner_webconsole.workflow_template.utils import make_variable +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.mmgr_pb2 import ModelJobConfig +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition, JobDefinition +from fedlearner_webconsole.proto.algorithm_pb2 import AlgorithmParameter, AlgorithmVariable +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.utils.proto import to_dict, remove_secrets + + +def _get_config() -> WorkflowDefinition: + return WorkflowDefinition(job_definitions=[ + JobDefinition(variables=[ + Variable(name='data_source'), + Variable(name='data_path'), + Variable(name='file_wildcard'), + ]) + ]) + + +def _set_config(config: WorkflowDefinition, name: str, value: str): + for var in config.job_definitions[0].variables: + if var.name == name: + var.value = value + var.typed_value.MergeFrom(Value(string_value=value)) + var.value_type = Variable.ValueType.STRING + + +class UtilsTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + _insert_or_update_templates(session) + session.commit() + + def test_get_template(self): + with db.session_scope() as session: + template_id = get_sys_template_id(session, + AlgorithmType.TREE_VERTICAL, + model_job_type=ModelJobType.TRAINING) + self.assertEqual(session.query(WorkflowTemplate).get(template_id).name, 'sys-preset-tree-model') + template_id = get_sys_template_id(session, AlgorithmType.NN_VERTICAL, model_job_type=ModelJobType.TRAINING) + self.assertEqual(session.query(WorkflowTemplate).get(template_id).name, 'sys-preset-nn-model') + template_id = get_sys_template_id(session, + AlgorithmType.NN_HORIZONTAL, + model_job_type=ModelJobType.TRAINING) + self.assertEqual(session.query(WorkflowTemplate).get(template_id).name, 'sys-preset-nn-horizontal-model') + template_id = get_sys_template_id(session, + AlgorithmType.NN_HORIZONTAL, + model_job_type=ModelJobType.EVALUATION) + self.assertEqual( + session.query(WorkflowTemplate).get(template_id).name, 'sys-preset-nn-horizontal-eval-model') + + def test_set_load_model_name(self): + config = ModelJobConfig(algorithm_uuid='uuid', variables=[Variable(name='load_model_name')]) + set_load_model_name(config, 'test-model') + expected_config = ModelJobConfig(algorithm_uuid='uuid', + variables=[ + Variable(name='load_model_name', + value='test-model', + typed_value=Value(string_value='test-model'), + value_type=Variable.ValueType.STRING) + ]) + self.assertEqual(config, expected_config) + config = ModelJobConfig(algorithm_uuid='uuid', variables=[Variable(name='test')]) + set_load_model_name(config, 'test-model') + expected_config = ModelJobConfig(algorithm_uuid='uuid', variables=[Variable(name='test')]) + self.assertEqual(config, expected_config) + + +class ModelJobConfigerTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + project = Project(id=1, name='project') + participant = Participant(id=1, name='part', domain_name='test') + project.participants = [participant] + dataset_job = DatasetJob(id=1, + name='data-join', + uuid='dataset-job-uuid', + project_id=1, + state=DatasetJobState.SUCCEEDED, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + input_dataset_id=1, + output_dataset_id=2, + workflow_id=1) + dataset_job_stage = DatasetJobStage(id=1, + name='data-join', + uuid='dataset-job-stage-uuid', + project_id=1, + state=DatasetJobState.SUCCEEDED, + dataset_job_id=1, + data_batch_id=1) + workflow = Workflow(id=1, uuid='workflow-uuid', name='workflow') + dataset = Dataset(id=2, uuid='uuid', name='datasetjob', dataset_type=DatasetType.PSI, path='/data/dataset/haha') + data_batch = DataBatch(id=1, + name='20221213', + dataset_id=1, + path='/data/dataset/haha/batch/20221213', + event_time=datetime(2022, 12, 13, 16, 37, 37)) + algorithm_project = AlgorithmProject(id=1, + name='algo-project', + uuid='uuid', + type=AlgorithmType.NN_VERTICAL, + path='/data/algorithm_project/uuid') + algorithm = Algorithm(id=1, + name='algo', + uuid='uuid', + type=AlgorithmType.NN_VERTICAL, + path='/data/algorithm/uuid', + algorithm_project_id=1) + parameter = AlgorithmParameter() + parameter.variables.extend([AlgorithmVariable(name='EMBED_SIZE', value='128')]) + algorithm.set_parameter(parameter=parameter) + job = Job(id=1, + name='uuid-train-job', + workflow_id=1, + project_id=1, + job_type=JobType.NN_MODEL_TRANINING, + state=JobState.COMPLETED) + model = Model(id=2, name='model', job_id=1) + with db.session_scope() as session: + _insert_or_update_templates(session) + session.add_all([ + project, participant, dataset_job, dataset_job_stage, dataset, algorithm, algorithm_project, model, job, + workflow, data_batch + ]) + session.commit() + + def test_get_config(self): + with db.session_scope() as session: + parameter = AlgorithmParameter(variables=[AlgorithmVariable(name='EMBED_SIZE', value='256')]) + model_job_config = ModelJobConfig( + algorithm_uuid='uuid', + algorithm_parameter=parameter, + variables=[Variable(name='sparse_estimator', typed_value=Value(string_value='true'))]) + configer = ModelJobConfiger(session, ModelJobType.TRAINING, AlgorithmType.NN_VERTICAL, 1) + config = configer.get_config(dataset_id=2, model_id=2, model_job_config=model_job_config) + self.assertEqual(config.job_definitions[0].job_type, JobDefinition.JobType.NN_MODEL_TRANINING) + self.assertEqual(len(config.job_definitions), 1) + var_dict = {var.name: var for var in config.job_definitions[0].variables} + self.assertEqual(var_dict['load_model_name'].typed_value, Value(string_value='uuid-train-job')) + self.assertEqual(var_dict['data_source'].typed_value, + Value(string_value='dataset-job-stage-uuid-psi-data-join-job')) + self.assertEqual(var_dict['mode'].typed_value, Value(string_value='train')) + self.assertEqual(var_dict['sparse_estimator'].typed_value, Value(string_value='true')) + self.assertEqual( + to_dict(var_dict['algorithm'].typed_value), { + 'algorithmId': 1.0, + 'algorithmUuid': 'uuid', + 'algorithmProjectUuid': 'uuid', + 'config': [{ + 'comment': '', + 'display_name': '', + 'name': 'EMBED_SIZE', + 'required': False, + 'value': '256', + 'value_type': 'STRING' + }], + 'participantId': 0.0, + 'path': '/data/algorithm/uuid', + 'algorithmProjectId': 1.0 + }) + self.assertEqual(json.loads(var_dict['algorithm'].widget_schema), { + 'component': 'AlgorithmSelect', + 'required': True, + 'tag': 'OPERATING_PARAM' + }) + + @patch('fedlearner_webconsole.rpc.v2.resource_service_client.ResourceServiceClient.get_algorithm_files') + @patch('fedlearner_webconsole.rpc.v2.resource_service_client.ResourceServiceClient.get_algorithm') + def test_get_config_when_algorithm_from_participant(self, mock_get_algorithm, mock_get_algorithm_files): + with db.session_scope() as session: + algo = session.query(Algorithm).get(1).to_proto() + algo.uuid = 'uuid-from-participant' + mock_get_algorithm.return_value = remove_secrets(algo) + parameter = AlgorithmParameter(variables=[AlgorithmVariable(name='EMBED_SIZE', value='256')]) + model_job_config = ModelJobConfig( + algorithm_uuid='uuid-from-participant', + algorithm_parameter=parameter, + variables=[Variable(name='sparse_estimator', typed_value=Value(string_value='true'))]) + configer = ModelJobConfiger(session, ModelJobType.TRAINING, AlgorithmType.NN_VERTICAL, 1) + with tempfile.TemporaryDirectory() as temp_dir: + Envs.STORAGE_ROOT = temp_dir + config = configer.get_config(dataset_id=2, model_id=2, model_job_config=model_job_config) + var_dict = {var.name: var for var in config.job_definitions[0].variables} + self.assertEqual( + to_dict(var_dict['algorithm'].typed_value), { + 'algorithmId': 0, + 'algorithmUuid': 'uuid-from-participant', + 'algorithmProjectUuid': 'uuid', + 'config': [{ + 'comment': '', + 'display_name': '', + 'name': 'EMBED_SIZE', + 'required': False, + 'value': '256', + 'value_type': 'STRING' + }], + 'participantId': 1.0, + 'path': algorithm_cache_path(Envs.STORAGE_ROOT, 'uuid-from-participant'), + 'algorithmProjectId': 0 + }) + + def test_get_dataset_variables(self): + with db.session_scope() as session: + # test for config RSA dataset for tree + configer = ModelJobConfiger(session=session, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.TREE_VERTICAL, + project_id=1) + variables = configer.get_dataset_variables(dataset_id=2) + expected_variables = [ + make_variable(name='data_source', typed_value='dataset-job-stage-uuid-psi-data-join-job'), + make_variable(name='data_path', typed_value=''), + make_variable(name='file_wildcard', typed_value='*.data'), + ] + self.assertEqual(variables, expected_variables) + # test for config RSA dataset for NN + config = _get_config() + configer = ModelJobConfiger(session=session, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + project_id=1) + variables = configer.get_dataset_variables(dataset_id=2) + expected_variables = [ + make_variable(name='data_source', typed_value='dataset-job-stage-uuid-psi-data-join-job'), + make_variable(name='data_path', typed_value='') + ] + self.assertEqual(variables, expected_variables) + # test for config RSA dataset when datset_job_stage is None + dataset_job_stage = session.query(DatasetJobStage).get(1) + dataset_job_stage.dataset_job_id = 2 + session.flush() + configer = ModelJobConfiger(session=session, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + project_id=1) + variables = configer.get_dataset_variables(dataset_id=2) + expected_variables = [ + make_variable(name='data_source', typed_value='workflow-uuid-psi-data-join-job'), + make_variable(name='data_path', typed_value='') + ] + self.assertEqual(variables, expected_variables) + + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(1) + dataset_job.kind = DatasetJobKind.OT_PSI_DATA_JOIN + session.commit() + with db.session_scope() as session: + # test for config OT dataset for tree + configer = ModelJobConfiger(session=session, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.TREE_VERTICAL, + project_id=1) + variables = configer.get_dataset_variables(dataset_id=2) + expected_variables = [ + make_variable(name='data_source', typed_value=''), + make_variable(name='data_path', typed_value='/data/dataset/haha/batch'), + make_variable(name='file_wildcard', typed_value='**/part*'), + ] + self.assertEqual(variables, expected_variables) + # test for config OT dataset for nn + configer = ModelJobConfiger(session=session, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + project_id=1) + variables = configer.get_dataset_variables(dataset_id=2) + expected_variables = [ + make_variable(name='data_source', typed_value=''), + make_variable(name='data_path', typed_value='/data/dataset/haha/batch'), + ] + self.assertEqual(variables, expected_variables) + # test when data_batch_id is set + configer = ModelJobConfiger(session=session, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + project_id=1) + variables = configer.get_dataset_variables(dataset_id=2, data_batch_id=1) + expected_variables = [ + make_variable(name='data_source', typed_value=''), + make_variable(name='data_path', typed_value='/data/dataset/haha/batch/20221213') + ] + self.assertEqual(variables, expected_variables) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/model_job_group_apis.py b/web_console_v2/api/fedlearner_webconsole/mmgr/model_job_group_apis.py new file mode 100644 index 000000000..83f2daa19 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/model_job_group_apis.py @@ -0,0 +1,687 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from http import HTTPStatus +from flask_restful import Resource +from typing import Optional +from webargs.flaskparser import use_args, use_kwargs +from marshmallow import Schema, post_load, fields, validate +from google.protobuf.json_format import ParseDict + +from fedlearner_webconsole.audit.decorators import emits_event +from fedlearner_webconsole.db import db +from fedlearner_webconsole.exceptions import ResourceConflictException, InternalException, InvalidArgumentException +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.proto.filtering_pb2 import FilterExpression, FilterOp, SimpleExpression +from fedlearner_webconsole.proto.audit_pb2 import Event +from fedlearner_webconsole.proto.mmgr_pb2 import ModelJobGlobalConfig, AlgorithmProjectList +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo, ParticipantInfo +from fedlearner_webconsole.proto.review_pb2 import TicketDetails, TicketType +from fedlearner_webconsole.utils.filtering import SupportedField, FieldType, FilterBuilder +from fedlearner_webconsole.utils.sorting import SorterBuilder, SortExpression, parse_expression +from fedlearner_webconsole.utils.resource_name import resource_uuid +from fedlearner_webconsole.workflow_template.service import dict_to_workflow_definition +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.mmgr.controller import CreateModelJobGroup, ModelJobGroupController +from fedlearner_webconsole.mmgr.models import ModelJobGroup, ModelJobType, ModelJobRole, GroupCreateStatus, \ + GroupAutoUpdateStatus +from fedlearner_webconsole.mmgr.service import ModelJobGroupService, get_model_job_group, get_participant,\ + get_dataset, get_project, get_algorithm, ModelJobService +from fedlearner_webconsole.mmgr.model_job_configer import ModelJobConfiger +from fedlearner_webconsole.algorithm.models import AlgorithmType, AlgorithmProject +from fedlearner_webconsole.utils.decorators.pp_flask import input_validator +from fedlearner_webconsole.utils.flask_utils import FilterExpField, make_flask_response, get_current_user +from fedlearner_webconsole.utils.paginate import paginate +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.auth.third_party_sso import credentials_required +from fedlearner_webconsole.rpc.client import RpcClient +from fedlearner_webconsole.review.ticket_helper import get_ticket_helper +from fedlearner_webconsole.swagger.models import schema_manager + + +class CreateModelJobGroupParams(Schema): + name = fields.Str(required=True) + dataset_id = fields.Integer(required=False, load_default=None) + algorithm_type = fields.Str(required=True, + validate=validate.OneOf([ + AlgorithmType.TREE_VERTICAL.name, AlgorithmType.NN_VERTICAL.name, + AlgorithmType.NN_HORIZONTAL.name + ])) + + @post_load() + def make(self, data, **kwargs): + data['algorithm_type'] = AlgorithmType[data['algorithm_type']] + return data + + +class CreateModelJobGroupParamsV2(Schema): + name = fields.Str(required=True) + dataset_id = fields.Integer(required=True) + algorithm_type = fields.Str(required=True, + validate=validate.OneOf([ + AlgorithmType.TREE_VERTICAL.name, AlgorithmType.NN_VERTICAL.name, + AlgorithmType.NN_HORIZONTAL.name + ])) + algorithm_project_list = fields.Dict(required=False, load_default=None) + comment = fields.Str(required=False, load_default=None) + + @post_load() + def make(self, data, **kwargs): + data['algorithm_type'] = AlgorithmType[data['algorithm_type']] + if data['algorithm_project_list'] is not None: + data['algorithm_project_list'] = ParseDict(data['algorithm_project_list'], AlgorithmProjectList()) + return data + + +class ConfigModelJobGroupParams(Schema): + authorized = fields.Boolean(required=False, load_default=None) + algorithm_id = fields.Integer(required=False, load_default=None) + config = fields.Dict(required=False, load_default=None) + cron_config = fields.String(required=False, load_default=None) + comment = fields.Str(required=False, load_default=None) + # TODO(gezhengqiang): delete dataset_id + dataset_id = fields.Integer(required=False, load_default=None) + global_config = fields.Dict(required=False, load_default=None) + + @post_load() + def make(self, data, **kwargs): + if data['config'] is not None: + data['config'] = dict_to_workflow_definition(data['config']) + if data['global_config'] is not None: + data['global_config'] = ParseDict(data['global_config'], ModelJobGlobalConfig()) + return data + + +class ConfigPeerModelJobGroup(Schema): + config = fields.Dict(required=False, load_default=None) + global_config = fields.Dict(required=False, load_default=None) + + @post_load() + def make(self, data, **kwargs): + if data['config'] is None and data['global_config'] is None: + raise InvalidArgumentException('either config or global config must be set') + if data['config'] is not None: + data['config'] = dict_to_workflow_definition(data['config']) + if data['global_config'] is not None: + data['global_config'] = ParseDict(data['global_config'], ModelJobGlobalConfig()) + return data + + +def _build_group_configured_query(exp: SimpleExpression): + if exp.bool_value: + return ModelJobGroup.config.isnot(None) + return ModelJobGroup.config.is_(None) + + +class ModelJobGroupsApi(Resource): + + FILTER_FIELDS = { + 'name': SupportedField(type=FieldType.STRING, ops={FilterOp.CONTAIN: None}), + 'configured': SupportedField( + type=FieldType.BOOL, + ops={ + FilterOp.EQUAL: _build_group_configured_query, + }, + ), + 'role': SupportedField(type=FieldType.STRING, ops={ + FilterOp.IN: None, + }), + 'algorithm_type': SupportedField(type=FieldType.STRING, ops={ + FilterOp.IN: None, + }), + } + + SORTER_FIELDS = ['created_at'] + + def __init__(self): + self._filter_builder = FilterBuilder(model_class=ModelJobGroup, supported_fields=self.FILTER_FIELDS) + self._sorter_builder = SorterBuilder(model_class=ModelJobGroup, supported_fields=self.SORTER_FIELDS) + + @credentials_required + @use_kwargs( + { + 'page': fields.Integer(required=False, load_default=None), + 'page_size': fields.Integer(required=False, load_default=None), + 'filter_exp': FilterExpField(data_key='filter', required=False, load_default=None), + 'sorter_exp': fields.String(required=False, load_default=None, data_key='order_by'), + }, + location='query') + def get( + self, + page: Optional[int], + page_size: Optional[int], + filter_exp: Optional[FilterExpression], + sorter_exp: Optional[str], + project_id: int, + ): + """Get the list of model job groups + --- + tags: + - mmgr + description: get the list of model job groups + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: query + name: page + schema: + type: integer + - in: query + name: page_size + schema: + type: integer + - in: query + name: filter + schema: + type: string + responses: + 200: + description: the list of model job groups + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.ModelJobGroupRef' + """ + with db.session_scope() as session: + # to filter out groups created by old api determined by uuid + query = session.query(ModelJobGroup).filter(ModelJobGroup.uuid.isnot(None)) + if project_id: + query = query.filter_by(project_id=project_id) + if filter_exp: + try: + query = self._filter_builder.build_query(query, filter_exp) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid filter: {str(e)}') from e + try: + if sorter_exp is not None: + sorter_exp = parse_expression(sorter_exp) + else: + sorter_exp = SortExpression(field='created_at', is_asc=False) + query = self._sorter_builder.build_query(query, sorter_exp) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid sorter: {str(e)}') from e + pagination = paginate(query, page, page_size) + for group in pagination.get_items(): + if len(group.model_jobs) != 0: + ModelJobService(session).update_model_job_status(group.model_jobs[0]) + data = [d.to_ref() for d in pagination.get_items()] + session.commit() + return make_flask_response(data=data, page_meta=pagination.get_metadata()) + + @input_validator + @credentials_required + @emits_event(resource_type=Event.ResourceType.MODEL_JOB_GROUP, op_type=Event.OperationType.CREATE) + @use_args(CreateModelJobGroupParams(), location='json') + def post(self, params: dict, project_id: int): + """Create the model job group + --- + tags: + - mmgr + description: create the model job group + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/CreateModelJobGroupParams' + responses: + 201: + description: the detail of the model job group + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.ModelJobGroupPb' + 409: + description: the group already exists + 500: + description: error exists when creating model job by 2PC + """ + name = params['name'] + dataset_id = params['dataset_id'] + with db.session_scope() as session: + get_project(project_id, session) + if dataset_id: + get_dataset(dataset_id, session) + group = session.query(ModelJobGroup).filter_by(name=name).first() + if group is not None: + raise ResourceConflictException(f'group {name} already exists') + pure_domain_name = SettingService.get_system_info().pure_domain_name + model_job_group_uuid = resource_uuid() + group = ModelJobGroup(name=name, + uuid=model_job_group_uuid, + project_id=project_id, + dataset_id=dataset_id, + algorithm_type=params['algorithm_type'], + role=ModelJobRole.COORDINATOR, + creator_username=get_current_user().username, + authorized=True, + auth_status=AuthStatus.AUTHORIZED) + participants = ParticipantService(session).get_participants_by_project(project_id) + participants_info = ParticipantsInfo(participants_map={ + p.pure_domain_name(): ParticipantInfo(auth_status=AuthStatus.PENDING.name) for p in participants + }) + participants_info.participants_map[pure_domain_name].auth_status = AuthStatus.AUTHORIZED.name + group.set_participants_info(participants_info) + session.add(group) + ticket_helper = get_ticket_helper(session) + ticket_helper.create_ticket(TicketType.CREATE_MODELJOB_GROUP, TicketDetails(uuid=group.uuid)) + session.commit() + succeeded, msg = CreateModelJobGroup().run(project_id=project_id, + name=name, + algorithm_type=params['algorithm_type'], + dataset_id=dataset_id, + coordinator_pure_domain_name=pure_domain_name, + model_job_group_uuid=model_job_group_uuid) + if not succeeded: + raise InternalException(f'creating model job by 2PC with message: {msg}') + with db.session_scope() as session: + group: ModelJobGroup = session.query(ModelJobGroup).filter_by(name=name).first() + group.status = GroupCreateStatus.SUCCEEDED + session.commit() + return make_flask_response(data=group.to_proto(), status=HTTPStatus.CREATED) + + +class ModelJobGroupsApiV2(Resource): + + @input_validator + @credentials_required + @emits_event(resource_type=Event.ResourceType.MODEL_JOB_GROUP, op_type=Event.OperationType.CREATE) + @use_args(CreateModelJobGroupParamsV2(), location='json') + def post(self, params: dict, project_id: int): + """Create the model job group + --- + tags: + - mmgr + description: create the model job group + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/CreateModelJobGroupParamsV2' + responses: + 201: + description: the detail of the model job group + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.ModelJobGroupPb' + 409: + description: the group already exists + """ + name = params['name'] + dataset_id = params['dataset_id'] + algorithm_type = params['algorithm_type'] + algorithm_project_list = AlgorithmProjectList() + if params['algorithm_project_list']: + algorithm_project_list = params['algorithm_project_list'] + with db.session_scope() as session: + get_project(project_id, session) + get_dataset(dataset_id, session) + group = session.query(ModelJobGroup).filter_by(name=name).first() + if group is not None: + raise ResourceConflictException(f'group {name} already exists') + model_job_group_uuid = resource_uuid() + group = ModelJobGroup(name=name, + uuid=model_job_group_uuid, + project_id=project_id, + dataset_id=dataset_id, + algorithm_type=algorithm_type, + role=ModelJobRole.COORDINATOR, + creator_username=get_current_user().username, + comment=params['comment']) + # make configured true + group.set_config() + pure_domain_name = SettingService.get_system_info().pure_domain_name + # set algorithm project uuid map + group.set_algorithm_project_uuid_list(algorithm_project_list) + # set algorithm project id + algorithm_project_uuid = algorithm_project_list.algorithm_projects.get(pure_domain_name) + if algorithm_project_uuid is None and algorithm_type not in [AlgorithmType.TREE_VERTICAL]: + raise Exception(f'algorithm project uuid must be given if algorithm type is {algorithm_type.name}') + if algorithm_project_uuid is not None: + algorithm_project = session.query(AlgorithmProject).filter_by(uuid=algorithm_project_uuid).first() + if algorithm_project is not None: + group.algorithm_project_id = algorithm_project.id + session.add(group) + ModelJobGroupService(session).initialize_auth_status(group) + ticket_helper = get_ticket_helper(session) + ticket_helper.create_ticket(TicketType.CREATE_MODELJOB_GROUP, TicketDetails(uuid=group.uuid)) + if group.ticket_status in [TicketStatus.APPROVED]: + ModelJobGroupController( + session=session, + project_id=project_id).create_model_job_group_for_participants(model_job_group_id=group.id) + session.commit() + return make_flask_response(group.to_proto(), status=HTTPStatus.CREATED) + + +class ModelJobGroupApi(Resource): + + @credentials_required + def get(self, project_id: int, group_id: int): + """Get the model job group + --- + tags: + - mmgr + descriptions: get the model job group + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: group_id + schema: + type: integer + required: true + responses: + 200: + description: detail of the model job group + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.ModelJobGroupPb' + """ + with db.session_scope() as session: + group = get_model_job_group(project_id=project_id, group_id=group_id, session=session) + ModelJobGroupController(session, project_id).update_participants_auth_status(group) + return make_flask_response(group.to_proto()) + + @input_validator + @credentials_required + @emits_event(resource_type=Event.ResourceType.MODEL_JOB_GROUP, op_type=Event.OperationType.UPDATE) + @use_args(ConfigModelJobGroupParams(), location='json') + def put(self, params: dict, project_id: int, group_id: int): + """Update the model job group + --- + tags: + - mmgr + description: update the model job group + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: group_id + schema: + type: integer + required: true + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/ConfigModelJobGroupParams' + responses: + 200: + description: update the model job group successfully + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.ModelJobGroupPb' + 400: + description: algorihm is not found or algorithm type mismatch between group and algorithms + """ + with db.session_scope() as session: + group = get_model_job_group(project_id=project_id, group_id=group_id, session=session) + if params['authorized'] is not None: + group.authorized = params['authorized'] + if group.authorized: + group.auth_status = AuthStatus.AUTHORIZED + group.set_config() + else: + group.auth_status = AuthStatus.PENDING + ModelJobGroupController(session, project_id).inform_auth_status_to_participants(group) + if params['algorithm_id'] is not None: + algorithm = get_algorithm(project_id=project_id, algorithm_id=params['algorithm_id'], session=session) + if algorithm is None: + raise InvalidArgumentException(f'algorithm {params["algorithm_id"]} is not found') + if algorithm.type != group.algorithm_type: + raise InvalidArgumentException(f'algorithm type mismatch between group and algorithm: ' + f'{group.algorithm_type.name} vs {algorithm.type.name}') + group.algorithm_id = params['algorithm_id'] + group.algorithm_project_id = algorithm.algorithm_project_id + if params['dataset_id'] is not None: + group.dataset_id = params['dataset_id'] + if params['config'] is not None: + configer = ModelJobConfiger(session=session, + model_job_type=ModelJobType.TRAINING, + algorithm_type=group.algorithm_type, + project_id=project_id) + configer.set_dataset(config=params['config'], dataset_id=group.dataset_id) + group.set_config(params['config']) + if params['global_config'] is not None: + configer = ModelJobConfiger(session=session, + model_job_type=ModelJobType.TRAINING, + algorithm_type=group.algorithm_type, + project_id=project_id) + domain_name = SettingService.get_system_info().pure_domain_name + config = configer.get_config(dataset_id=group.dataset_id, + model_id=None, + model_job_config=params['global_config'].global_config[domain_name]) + group.set_config(config) + if params['comment'] is not None: + group.comment = params['comment'] + if group.creator_username is None: + group.creator_username = get_current_user().username + if params['cron_config'] is not None: + ModelJobGroupService(session).update_cronjob_config(group=group, cron_config=params['cron_config']) + session.commit() + return make_flask_response(data=group.to_proto()) + + @credentials_required + @emits_event(resource_type=Event.ResourceType.MODEL_JOB_GROUP, op_type=Event.OperationType.DELETE) + def delete(self, project_id: int, group_id: int): + """Delete the model job group + --- + tags: + - mmgr + description: delete the model job group + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: group_id + schema: + type: integer + required: true + responses: + 204: + description: delete the model job group successfully + 409: + description: group cannot be deleted due to some model job is ready or running + """ + with db.session_scope() as session: + group = get_model_job_group(project_id=project_id, group_id=group_id, session=session) + if not group.is_deletable(): + raise ResourceConflictException('group cannot be deleted due to some model job is ready or running') + ModelJobGroupService(session).delete(group.id) + session.commit() + return make_flask_response(status=HTTPStatus.NO_CONTENT) + + +class PeerModelJobGroupApi(Resource): + + @credentials_required + def get(self, project_id: int, group_id: int, participant_id: int): + """Get the peer model job group + --- + tags: + - mmgr + description: Get the peer model job group + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: group_id + schema: + type: integer + required: true + - in: path + name: participant_id + schema: + type: integer + required: true + responses: + 200: + description: detail of the model job group + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.GetModelJobGroupResponse' + """ + with db.session_scope() as session: + group = get_model_job_group(project_id=project_id, group_id=group_id, session=session) + resp = ModelJobGroupController(session, project_id).get_model_job_group_from_participant( + participant_id=participant_id, model_job_group_uuid=group.uuid) + return make_flask_response(resp, status=HTTPStatus.OK) + + @credentials_required + @use_args(ConfigPeerModelJobGroup(), location='json') + def patch(self, params: dict, project_id: int, group_id: int, participant_id: int): + """Patch a peer model job group + --- + tags: + - mmgr + description: patch a peer model job group + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: group_id + schema: + type: integer + required: true + - in: path + name: participant_id + schema: + type: integer + required: true + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/ConfigPeerModelJobGroup' + responses: + 200: + description: update the peer model job group successfully + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.UpdateModelJobGroupResponse' + """ + config = params['config'] + global_config = params['global_config'] + with db.session_scope() as session: + group = get_model_job_group(project_id=project_id, group_id=group_id, session=session) + project = group.project + participant = get_participant(participant_id, project) + client = RpcClient.from_project_and_participant(project.name, project.token, participant.domain_name) + if global_config is not None: + configer = ModelJobConfiger(session=session, + model_job_type=ModelJobType.TRAINING, + algorithm_type=group.algorithm_type, + project_id=project_id) + domain_name = participant.pure_domain_name() + config = configer.get_config(dataset_id=group.dataset_id, + model_id=None, + model_job_config=global_config.global_config[domain_name]) + resp = client.update_model_job_group(model_job_group_uuid=group.uuid, config=config) + return make_flask_response(resp, status=HTTPStatus.OK) + + +class ModelJobGroupStopAutoUpdateApi(Resource): + + @input_validator + @credentials_required + @emits_event(resource_type=Event.ResourceType.MODEL_JOB_GROUP, op_type=Event.OperationType.STOP) + def post(self, project_id: int, group_id: int): + """Stop trigger auto update model job in this model job group + --- + tags: + - mmgr + description: create the model job group + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: group_id + schema: + type: integer + required: true + responses: + 200: + description: stop the auto update model job successfully + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.ModelJobGroupPb' + """ + with db.session_scope() as session: + group: ModelJobGroup = get_model_job_group(project_id=project_id, group_id=group_id, session=session) + group.auto_update_status = GroupAutoUpdateStatus.STOPPED + ModelJobGroupController(session=session, project_id=project_id).update_participants_model_job_group( + uuid=group.uuid, auto_update_status=group.auto_update_status) + session.commit() + return make_flask_response(data=group.to_proto(), status=HTTPStatus.OK) + + +def initialize_mmgr_model_job_group_apis(api): + api.add_resource(ModelJobGroupsApi, '/projects//model_job_groups') + api.add_resource(ModelJobGroupsApiV2, '/projects//model_job_groups_v2') + api.add_resource(ModelJobGroupApi, '/projects//model_job_groups/') + api.add_resource(ModelJobGroupStopAutoUpdateApi, + '/projects//model_job_groups/:stop_auto_update') + api.add_resource(PeerModelJobGroupApi, + '/projects//model_job_groups//peers/') + + schema_manager.append(CreateModelJobGroupParams) + schema_manager.append(CreateModelJobGroupParamsV2) + schema_manager.append(ConfigModelJobGroupParams) + schema_manager.append(ConfigPeerModelJobGroup) diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/model_job_group_apis_test.py b/web_console_v2/api/fedlearner_webconsole/mmgr/model_job_group_apis_test.py new file mode 100644 index 000000000..1dcd4757b --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/model_job_group_apis_test.py @@ -0,0 +1,551 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +import urllib.parse +from http import HTTPStatus +from datetime import datetime +from unittest.mock import patch, Mock, ANY, MagicMock, call +from google.protobuf.empty_pb2 import Empty +from envs import Envs +from testing.common import BaseTestCase +from testing.fake_model_job_config import get_global_config, get_workflow_config + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.utils.flask_utils import to_dict +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.participant.models import ProjectParticipant +from fedlearner_webconsole.mmgr.models import ModelJob, ModelJobGroup, ModelJobType, ModelJobRole, \ + GroupAuthFrontendStatus, GroupAutoUpdateStatus, ModelJobStatus +from fedlearner_webconsole.algorithm.models import AlgorithmType, Algorithm, AlgorithmProject +from fedlearner_webconsole.proto.service_pb2 import UpdateModelJobGroupResponse +from fedlearner_webconsole.proto.project_pb2 import ParticipantInfo, ParticipantsInfo +from fedlearner_webconsole.proto.mmgr_pb2 import ModelJobGroupPb, AlgorithmProjectList +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.setting_pb2 import SystemInfo +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition, JobDefinition +from fedlearner_webconsole.dataset.models import Dataset +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus + + +class ModelJobGroupsApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project') + participant = Participant(id=1, name='party', domain_name='fl-peer.com', host='127.0.0.1', port=32443) + relationship = ProjectParticipant(project_id=1, participant_id=1) + algo_project = AlgorithmProject(id=1, name='algo') + algo = Algorithm(id=2, name='algo', algorithm_project_id=1) + session.add_all([project, algo, algo_project, participant, relationship]) + g1 = ModelJobGroup(id=1, + name='g1', + uuid='u1', + role=ModelJobRole.COORDINATOR, + algorithm_type=AlgorithmType.NN_VERTICAL, + algorithm_project_id=1, + algorithm_id=2, + project_id=1, + created_at=datetime(2021, 1, 1, 0, 0, 0)) + g1.set_config(get_workflow_config(ModelJobType.TRAINING)) + g2 = ModelJobGroup(name='g2', + uuid='u2', + project_id=1, + role=ModelJobRole.COORDINATOR, + algorithm_type=AlgorithmType.NN_HORIZONTAL, + created_at=datetime(2021, 1, 1, 0, 0, 1)) + g3 = ModelJobGroup(name='g3', + uuid='u3', + project_id=2, + role=ModelJobRole.PARTICIPANT, + algorithm_type=AlgorithmType.TREE_VERTICAL, + created_at=datetime(2021, 1, 1, 0, 0, 1)) + workflow = Workflow(id=1, name='workflow', state=WorkflowState.RUNNING) + model_job = ModelJob(id=1, group_id=1, status=ModelJobStatus.PENDING, workflow_id=1) + dataset = Dataset(name='dataset', uuid='dataset_uuid', is_published=True) + session.add_all([g1, g2, g3, dataset, workflow, model_job]) + session.commit() + + def test_get_groups(self): + resp = self.get_helper('/api/v2/projects/1/model_job_groups') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual(len(data), 2) + self.assertEqual(data[0]['name'], 'g2') + self.assertEqual(data[0]['configured'], False) + self.assertEqual(data[1]['name'], 'g1') + self.assertEqual(data[1]['configured'], True) + self.assertEqual(data[1]['latest_job_state'], 'RUNNING') + resp = self.get_helper('/api/v2/projects/0/model_job_groups') + data = self.get_response_data(resp) + self.assertEqual(len(data), 3) + resp = self.get_helper('/api/v2/projects/0/model_job_groups?filter=(configured%3Dfalse)') + data = self.get_response_data(resp) + self.assertEqual(sorted([d['name'] for d in data]), ['g2', 'g3']) + + def test_get_groups_by_filtering_expression(self): + filter_param = urllib.parse.quote('(algorithm_type:["NN_VERTICAL","NN_HORIZONTAL"])') + resp = self.get_helper(f'/api/v2/projects/0/model_job_groups?filter={filter_param}') + data = self.get_response_data(resp) + self.assertEqual([d['name'] for d in data], ['g2', 'g1']) + filter_param = urllib.parse.quote('(algorithm_type:["NN_VERTICAL"])') + resp = self.get_helper(f'/api/v2/projects/0/model_job_groups?filter={filter_param}') + data = self.get_response_data(resp) + self.assertEqual([d['name'] for d in data], ['g1']) + filter_param = urllib.parse.quote('(role:["COORDINATOR"])') + resp = self.get_helper(f'/api/v2/projects/0/model_job_groups?filter={filter_param}') + data = self.get_response_data(resp) + self.assertEqual([d['name'] for d in data], ['g2', 'g1']) + filter_param = urllib.parse.quote('(name~="1")') + resp = self.get_helper(f'/api/v2/projects/0/model_job_groups?filter={filter_param}') + data = self.get_response_data(resp) + self.assertEqual([d['name'] for d in data], ['g1']) + filter_param = urllib.parse.quote('created_at asc') + resp = self.get_helper(f'/api/v2/projects/0/model_job_groups?order_by={filter_param}') + data = self.get_response_data(resp) + self.assertEqual([d['name'] for d in data], ['g1', 'g2', 'g3']) + + # TODO(linfan): refactor transaction manager + @patch('fedlearner_webconsole.project.services.SettingService.get_system_info') + @patch('fedlearner_webconsole.two_pc.model_job_group_creator.ModelJobGroupCreator.prepare') + @patch('fedlearner_webconsole.two_pc.transaction_manager.TransactionManager._remote_do_two_pc') + def test_post_model_job_group(self, mock_remote_twp_pc, mock_prepare, mock_system_info): + mock_system_info.return_value = SystemInfo(pure_domain_name='test', name='name') + mock_prepare.return_value = True, '' + with db.session_scope() as session: + dataset_id = session.query(Dataset).filter_by(uuid='dataset_uuid').first().id + mock_remote_twp_pc.return_value = True, '' + resp = self.post_helper('/api/v2/projects/1/model_job_groups', + data={ + 'name': 'group', + 'algorithm_type': AlgorithmType.NN_VERTICAL.name, + 'dataset_id': dataset_id + }) + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + with db.session_scope() as session: + group: ModelJobGroup = session.query(ModelJobGroup).filter_by(name='group').first() + self.assertEqual(group.algorithm_type, AlgorithmType.NN_VERTICAL) + self.assertEqual(group.role, ModelJobRole.COORDINATOR) + self.assertIsNone(group.coordinator_id) + self.assertEqual(group.creator_username, 'ada') + self.assertEqual( + group.get_participants_info(), + ParticipantsInfo( + participants_map={ + 'peer': ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'test': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + })) + self.assertEqual(group.get_group_auth_frontend_status(), GroupAuthFrontendStatus.PART_AUTH_PENDING) + + @patch('fedlearner_webconsole.two_pc.transaction_manager.TransactionManager._remote_do_two_pc') + def test_post_model_job_group_failed(self, mock_remote_twp_pc): + mock_remote_twp_pc.return_value = True, '' + resp = self.post_helper('/api/v2/projects/1/model_job_groups', + data={ + 'name': 'group', + 'algorithm_type': AlgorithmType.NN_VERTICAL.name, + 'dataset_id': -1 + }) + # fail due to dataset is not found + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + with db.session_scope() as session: + dataset = session.query(Dataset).filter_by(uuid='dataset_uuid').first() + dataset.is_published = False + session.add(dataset) + session.commit() + resp = self.post_helper('/api/v2/projects/1/model_job_groups', + data={ + 'name': 'group', + 'algorithm_type': AlgorithmType.NN_VERTICAL.name, + 'dataset_id': dataset.id + }) + # fail due to dataset is not published + self.assertEqual(resp.status_code, HTTPStatus.INTERNAL_SERVER_ERROR) + + +class ModelJobGroupsApiV2Test(BaseTestCase): + + def setUp(self): + super().setUp() + Envs.SYSTEM_INFO = '{"domain_name": "fl-test.com"}' + with db.session_scope() as session: + project = Project(id=1, name='project') + participant = Participant(id=1, name='party', domain_name='fl-peer.com', host='127.0.0.1', port=32443) + relationship = ProjectParticipant(project_id=1, participant_id=1) + algo_project = AlgorithmProject(id=1, name='algo') + algo = Algorithm(id=2, name='algo', algorithm_project_id=1) + dataset = Dataset(id=1, name='dataset', uuid='dataset_uuid', is_published=True) + session.add_all([project, algo, algo_project, participant, relationship, dataset]) + session.commit() + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.create_model_job_group') + @patch('fedlearner_webconsole.project.services.SettingService.get_system_info') + def test_post_model_job(self, mock_system_info, mock_client): + mock_system_info.return_value = SystemInfo(pure_domain_name='test', name='name') + algorithm_project_list = AlgorithmProjectList() + algorithm_project_list.algorithm_projects['test'] = 'uuid-test' + algorithm_project_list.algorithm_projects['peer'] = 'uuid-peer' + resp = self.post_helper('/api/v2/projects/1/model_job_groups_v2', + data={ + 'name': 'group', + 'dataset_id': 1, + 'algorithm_type': AlgorithmType.NN_VERTICAL.name, + 'algorithm_project_list': to_dict(algorithm_project_list), + 'comment': 'comment' + }) + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + mock_client.assert_called() + with db.session_scope() as session: + group: ModelJobGroup = session.query(ModelJobGroup).filter_by(name='group').first() + self.assertEqual(group.algorithm_type, AlgorithmType.NN_VERTICAL) + self.assertEqual(group.role, ModelJobRole.COORDINATOR) + self.assertEqual(group.dataset.uuid, 'dataset_uuid') + self.assertIsNone(group.coordinator_id) + self.assertEqual(group.creator_username, 'ada') + self.assertEqual(group.comment, 'comment') + self.assertEqual( + group.get_participants_info(), + ParticipantsInfo( + participants_map={ + 'peer': ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'test': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + })) + self.assertEqual(group.get_algorithm_project_uuid_list(), + AlgorithmProjectList(algorithm_projects={ + 'peer': 'uuid-peer', + 'test': 'uuid-test' + })) + self.assertEqual(group.get_group_auth_frontend_status(), GroupAuthFrontendStatus.PART_AUTH_PENDING) + self.assertEqual(group.to_proto().configured, True) + resp = self.post_helper('/api/v2/projects/1/model_job_groups_v2', + data={ + 'name': 'new_group', + 'dataset_id': 1, + 'algorithm_type': AlgorithmType.TREE_VERTICAL.name, + }) + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + with db.session_scope() as session: + group = session.query(ModelJobGroup).filter_by(name='new_group').first() + self.assertEqual(group.dataset.uuid, 'dataset_uuid') + algorithm_project_list = AlgorithmProjectList() + self.assertEqual(group.get_algorithm_project_uuid_list(), algorithm_project_list) + resp = self.post_helper('/api/v2/projects/1/model_job_groups_v2', + data={ + 'name': 'new_group', + 'dataset_id': 1, + 'algorithm_type': AlgorithmType.NN_VERTICAL.name, + }) + self.assertEqual(resp.status_code, HTTPStatus.CONFLICT) + + +class ModelJobGroupApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + Envs.SYSTEM_INFO = '{"domain_name": "fl-test.com"}' + with db.session_scope() as session: + algo_project = AlgorithmProject(id=123, name='algo_project', project_id=1) + dataset = Dataset(id=2, name='dataset') + algorithm = Algorithm(id=1, + name='algo', + algorithm_project_id=123, + type=AlgorithmType.NN_VERTICAL, + project_id=1) + group = ModelJobGroup(id=1, + name='group', + algorithm_type=AlgorithmType.NN_VERTICAL, + uuid='uuid', + creator_username='ada', + project_id=1, + created_at=datetime(2022, 5, 6, 0, 0, 0), + updated_at=datetime(2022, 5, 6, 0, 0, 0)) + project = Project(id=1, name='project') + participant1 = Participant(id=1, name='part1', domain_name='fl-demo1.com') + participant2 = Participant(id=2, name='part2', domain_name='fl-demo2.com') + pro_part1 = ProjectParticipant(id=1, project_id=1, participant_id=1) + pro_part2 = ProjectParticipant(id=2, project_id=1, participant_id=2) + participants_info = ParticipantsInfo() + participants_info.participants_map['test'].auth_status = AuthStatus.PENDING.name + participants_info.participants_map['demo1'].auth_status = AuthStatus.PENDING.name + participants_info.participants_map['demo2'].auth_status = AuthStatus.PENDING.name + group.set_participants_info(participants_info) + session.add_all( + [algo_project, algorithm, group, dataset, project, participant1, participant2, pro_part1, pro_part2]) + session.commit() + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.get_model_job_group') + def test_get_group(self, mock_client: MagicMock): + mock_client.side_effect = [ + ModelJobGroupPb(auth_status=AuthStatus.AUTHORIZED.name), + ModelJobGroupPb(auth_status=AuthStatus.AUTHORIZED.name) + ] + with db.session_scope() as session: + group: ModelJobGroup = session.query(ModelJobGroup).get(1) + group.algorithm_project_id = 1 + group.algorithm_id = 2 + group.dataset_id = 2 + group.set_config(get_workflow_config(ModelJobType.TRAINING)) + algorithm_project_list = AlgorithmProjectList() + algorithm_project_list.algorithm_projects['test'] = 'uuid-test' + algorithm_project_list.algorithm_projects['demo1'] = 'uuid-demo1' + algorithm_project_list.algorithm_projects['demo2'] = 'uuid-demo2' + group.set_algorithm_project_uuid_list(algorithm_project_list) + group.comment = 'comment' + group.latest_version = 1 + model_job = ModelJob(name='job-1', group_id=1) + session.add(model_job) + session.commit() + resp = self.get_helper('/api/v2/projects/1/model_job_groups/1') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.maxDiff = None + self.assertResponseDataEqual(resp, { + 'id': 1, + 'uuid': 'uuid', + 'name': 'group', + 'project_id': 1, + 'role': 'PARTICIPANT', + 'creator_username': 'ada', + 'coordinator_id': 0, + 'authorized': False, + 'auto_update_status': 'INITIAL', + 'dataset_id': 2, + 'algorithm_type': 'NN_VERTICAL', + 'algorithm_project_id': 1, + 'algorithm_id': 2, + 'comment': 'comment', + 'cron_config': '', + 'configured': True, + 'latest_version': 1, + 'config': to_dict(get_workflow_config(ModelJobType.TRAINING)), + 'latest_job_state': 'PENDING', + 'auth_frontend_status': 'SELF_AUTH_PENDING', + 'auth_status': 'PENDING', + 'created_at': 1651795200, + 'algorithm_project_uuid_list': { + 'algorithm_projects': { + 'test': 'uuid-test', + 'demo1': 'uuid-demo1', + 'demo2': 'uuid-demo2' + } + }, + 'participants_info': { + 'participants_map': { + 'test': { + 'auth_status': 'PENDING', + 'name': '', + 'role': '', + 'state': '', + 'type': '' + }, + 'demo1': { + 'auth_status': 'AUTHORIZED', + 'name': '', + 'role': '', + 'state': '', + 'type': '' + }, + 'demo2': { + 'auth_status': 'AUTHORIZED', + 'name': '', + 'role': '', + 'state': '', + 'type': '' + } + } + }, + }, + ignore_fields=['model_jobs', 'updated_at', 'start_data_batch_id']) + data = self.get_response_data(resp) + self.assertEqual(len(data['model_jobs']), 1) + self.assertPartiallyEqual(data['model_jobs'][0], { + 'id': 1, + 'name': 'job-1', + 'role': 'PARTICIPANT', + 'model_job_type': 'UNSPECIFIED', + 'algorithm_type': 'UNSPECIFIED', + 'state': 'PENDING_ACCEPT', + 'group_id': 1, + 'status': 'PENDING', + 'uuid': '', + 'configured': False, + 'creator_username': '', + 'coordinator_id': 0, + 'version': 0, + 'project_id': 0, + 'started_at': 0, + 'stopped_at': 0, + 'metric_is_public': False, + 'algorithm_id': 0, + 'auth_status': 'PENDING', + 'auto_update': False, + 'auth_frontend_status': 'SELF_AUTH_PENDING', + 'participants_info': { + 'participants_map': {} + } + }, + ignore_fields=['created_at', 'updated_at']) + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.inform_model_job_group') + def test_put_model_job_group(self, mock_client: MagicMock): + mock_client.return_value = Empty() + config = get_workflow_config(ModelJobType.TRAINING) + resp = self.put_helper('/api/v2/projects/1/model_job_groups/1', + data={ + 'authorized': True, + 'algorithm_id': 1, + 'config': to_dict(config), + 'comment': 'comment' + }) + self.assertEqual(resp.status_code, HTTPStatus.OK) + with db.session_scope() as session: + group: ModelJobGroup = session.query(ModelJobGroup).get(1) + self.assertTrue(group.authorized) + self.assertEqual(group.algorithm_id, 1) + self.assertEqual(group.algorithm_project_id, 123) + self.assertEqual(group.algorithm_type, AlgorithmType.NN_VERTICAL) + self.assertEqual(group.get_config(), config) + self.assertEqual(group.comment, 'comment') + self.assertEqual(group.to_proto().configured, True) + participants_info = group.get_participants_info() + self.assertEqual(participants_info.participants_map['test'].auth_status, AuthStatus.AUTHORIZED.name) + self.assertEqual(mock_client.call_args_list, [(('uuid', AuthStatus.AUTHORIZED),), + (('uuid', AuthStatus.AUTHORIZED),)]) + + @patch('fedlearner_webconsole.mmgr.model_job_configer.ModelJobConfiger.get_config') + def test_put_model_job_group_with_global_config(self, mock_get_config): + mock_get_config.return_value = get_workflow_config(ModelJobType.EVALUATION) + global_config = get_global_config() + resp = self.put_helper('/api/v2/projects/1/model_job_groups/1', + data={ + 'dataset_id': 1, + 'global_config': to_dict(global_config) + }) + self.assertEqual(resp.status_code, HTTPStatus.OK) + mock_get_config.assert_called_with(dataset_id=1, + model_id=None, + model_job_config=global_config.global_config['test']) + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + self.assertEqual(group.dataset_id, 1) + self.assertEqual(group.get_config(), get_workflow_config(ModelJobType.EVALUATION)) + + @patch('fedlearner_webconsole.mmgr.service.ModelJobGroupService.update_cronjob_config') + def test_put_model_job_group_with_cron_config(self, mock_cronjob_config: Mock): + resp = self.put_helper('/api/v2/projects/1/model_job_groups/1', data={}) + self.assertEqual(resp.status_code, HTTPStatus.OK) + mock_cronjob_config.assert_not_called() + self.put_helper('/api/v2/projects/1/model_job_groups/1', data={ + 'cron_config': '*/10 * * * *', + }) + mock_cronjob_config.assert_called_once_with(group=ANY, cron_config='*/10 * * * *') + self.put_helper('/api/v2/projects/1/model_job_groups/1', data={ + 'cron_config': '', + }) + mock_cronjob_config.assert_called_with(group=ANY, cron_config='') + + def test_delete_model_job_group(self): + resp = self.delete_helper('/api/v2/projects/1/model_job_groups/1') + self.assertEqual(resp.status_code, HTTPStatus.NO_CONTENT) + with db.session_scope() as session: + group = session.query(ModelJobGroup).execution_options(include_deleted=True).get(1) + self.assertIsNotNone(group.deleted_at) + for job in group.model_jobs: + self.assertIsNotNone(job.deleted_at) + + +class PeerModelJobGroupApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project') + participant = Participant(id=1, name='party', domain_name='fl-peer.com', host='127.0.0.1', port=32443) + relationship = ProjectParticipant(project_id=1, participant_id=1) + group = ModelJobGroup(id=1, name='group', uuid='uuid', project_id=1, dataset_id=1) + session.add_all([project, participant, relationship, group]) + session.commit() + + @patch('fedlearner_webconsole.rpc.v2.system_service_client.SystemServiceClient.list_flags') + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.get_model_job_group') + def test_get_peer_model_job_group(self, mock_get_group, mock_list_flags): + config = WorkflowDefinition(job_definitions=[JobDefinition(variables=[Variable(name='test')])]) + mock_get_group.return_value = ModelJobGroupPb(name='group', uuid='uuid', config=config) + mock_list_flags.return_value = {'model_job_global_config_enabled': True} + resp = self.get_helper('/api/v2/projects/1/model_job_groups/1/peers/1') + self.assertEqual(resp.status_code, HTTPStatus.OK) + mock_get_group.assert_called() + data = self.get_response_data(resp) + self.assertEqual(data['name'], 'group') + self.assertEqual(data['uuid'], 'uuid') + + @patch('fedlearner_webconsole.rpc.client.RpcClient.update_model_job_group') + def test_patch_peer_model_job_group(self, mock_update_group): + config = get_workflow_config(ModelJobType.TRAINING) + mock_update_group.return_value = UpdateModelJobGroupResponse(uuid='uuid', config=config) + resp = self.patch_helper('/api/v2/projects/1/model_job_groups/1/peers/1', data={'config': to_dict(config)}) + self.assertEqual(resp.status_code, HTTPStatus.OK) + mock_update_group.assert_called_with(model_job_group_uuid='uuid', config=config) + + @patch('fedlearner_webconsole.rpc.client.RpcClient.update_model_job_group') + @patch('fedlearner_webconsole.mmgr.model_job_configer.ModelJobConfiger.get_config') + def test_patch_peer_model_job_group_with_global_config(self, mock_get_config, mock_update_group): + config = get_workflow_config(ModelJobType.TRAINING) + mock_get_config.return_value = config + mock_update_group.return_value = UpdateModelJobGroupResponse(uuid='uuid', config=config) + global_config = get_global_config() + resp = self.patch_helper('/api/v2/projects/1/model_job_groups/1/peers/1', + data={'global_config': to_dict(global_config)}) + self.assertEqual(resp.status_code, HTTPStatus.OK) + mock_get_config.assert_called_with(dataset_id=1, + model_id=None, + model_job_config=global_config.global_config['peer']) + mock_update_group.assert_called_with(model_job_group_uuid='uuid', config=config) + + +class StopAutoUpdateApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project') + relationship = ProjectParticipant(project_id=1, participant_id=1) + participant = Participant(id=1, name='party', domain_name='fl-peer.com', host='127.0.0.1', port=32443) + group = ModelJobGroup(id=1, + name='group', + uuid='uuid', + project_id=1, + dataset_id=1, + auto_update_status=GroupAutoUpdateStatus.ACTIVE, + start_data_batch_id=1) + session.add_all([project, participant, relationship, group]) + session.commit() + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.update_model_job_group') + def test_post_stop(self, mock_client: MagicMock): + mock_client.return_value = Empty() + resp = self.post_helper('/api/v2/projects/1/model_job_groups/1:stop_auto_update') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual(data['auto_update_status'], GroupAutoUpdateStatus.STOPPED.name) + self.assertEqual( + mock_client.call_args_list, + [call(uuid='uuid', auto_update_status=GroupAutoUpdateStatus.STOPPED, start_dataset_job_stage_uuid=None)]) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/models.py b/web_console_v2/api/fedlearner_webconsole/mmgr/models.py index 6db0ea885..4eda2e302 100644 --- a/web_console_v2/api/fedlearner_webconsole/mmgr/models.py +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/models.py @@ -1,108 +1,648 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # -# Licensed under the Apache License, Version 2.0 (the 'License'); +# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, +# distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -# coding: utf-8 +# import enum +import logging +from typing import Optional +from google.protobuf import text_format from sqlalchemy.sql import func -from sqlalchemy.orm import remote, foreign from sqlalchemy.sql.schema import Index, UniqueConstraint -from fedlearner_webconsole.utils.mixins import to_dict_mixin from fedlearner_webconsole.db import db, default_table_args -from fedlearner_webconsole.job.models import Job +from fedlearner_webconsole.algorithm.models import Algorithm, AlgorithmType +from fedlearner_webconsole.dataset.models import Dataset +from fedlearner_webconsole.mmgr.utils import get_job_path, get_exported_model_path, get_checkpoint_path, \ + get_output_path +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.utils.base_model import auth_model +from fedlearner_webconsole.utils.base_model.softdelete_model import SoftDeleteModel +from fedlearner_webconsole.utils.base_model.review_ticket_and_auth_model import ReviewTicketAndAuthModel +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition +from fedlearner_webconsole.workflow.models import Workflow, WorkflowExternalState +from fedlearner_webconsole.proto.mmgr_pb2 import ModelJobPb, ModelJobGroupPb, ModelJobRef, ModelJobGroupRef, ModelPb, \ + ModelJobGlobalConfig, AlgorithmProjectList class ModelType(enum.Enum): - NN_MODEL = 0 - NN_EVALUATION = 1 + UNSPECIFIED = 0 + NN_MODEL = 1 TREE_MODEL = 2 - TREE_EVALUATION = 3 -class ModelState(enum.Enum): - NEW = -1 # before workflow has synced both party - COMMITTING = 0 # (transient) after workflow has synced both party, before committing to k8s - COMMITTED = 1 # after committed to k8s but before running - WAITING = 2 # k8s is queueing the related job(s) - RUNNING = 3 # k8s is running the related job(s) - PAUSED = 4 # related workflow has been paused by end-user - SUCCEEDED = 5 - FAILED = 6 - # DROPPING = 7 # (transient) removing model and its related resources - DROPPED = 8 # model has been removed +class ModelJobType(enum.Enum): + UNSPECIFIED = 0 + NN_TRAINING = 1 + NN_EVALUATION = 2 + NN_PREDICTION = 3 + TREE_TRAINING = 4 + TREE_EVALUATION = 5 + TREE_PREDICTION = 6 + TRAINING = 7 + EVALUATION = 8 + PREDICTION = 9 -# TODO transaction -@to_dict_mixin() -class Model(db.Model): - __tablename__ = 'models_v2' - __table_args__ = (Index('idx_job_name', 'job_name'), - UniqueConstraint('job_name', name='uniq_job_name'), - default_table_args('model')) - - id = db.Column(db.Integer, primary_key=True, comment='id') - name = db.Column(db.String(255), - comment='name') # can be modified by end-user - version = db.Column(db.Integer, default=0, comment='version') - type = db.Column(db.Integer, comment='type') - state = db.Column(db.Integer, comment='state') +class ModelJobRole(enum.Enum): + PARTICIPANT = 0 + COORDINATOR = 1 + + +class ModelJobStatus(enum.Enum): + PENDING = 'PENDING' # all model jobs are created, the local algorithm files and the local workflow are pending + CONFIGURED = 'CONFIGURED' # the local algorithm files are available and the local workflow is created + ERROR = 'ERROR' # error during creating model job + RUNNING = 'RUNNING' + STOPPED = 'STOPPED' + SUCCEEDED = 'SUCCEEDED' + FAILED = 'FAILED' # job failed during running + + +class AuthStatus(enum.Enum): + PENDING = 'PENDING' + AUTHORIZED = 'AUTHORIZED' + + +class GroupCreateStatus(enum.Enum): + PENDING = 'PENDING' + FAILED = 'FAILED' + SUCCEEDED = 'SUCCEEDED' + + +class GroupAuthFrontendStatus(enum.Enum): + TICKET_PENDING = 'TICKET_PENDING' + TICKET_DECLINED = 'TICKET_DECLINED' + CREATE_PENDING = 'CREATE_PENDING' + CREATE_FAILED = 'CREATE_FAILED' + SELF_AUTH_PENDING = 'SELF_AUTH_PENDING' + PART_AUTH_PENDING = 'PART_AUTH_PENDING' + ALL_AUTHORIZED = 'ALL_AUTHORIZED' + + +class GroupAutoUpdateStatus(enum.Enum): + INITIAL = 'INITIAL' + ACTIVE = 'ACTIVE' + STOPPED = 'STOPPED' + + +class ModelJobCreateStatus(enum.Enum): + PENDING = 'PENDING' + FAILED = 'FAILED' + SUCCEEDED = 'SUCCEEDED' + + +class ModelJobAuthFrontendStatus(enum.Enum): + TICKET_PENDING = 'TICKET_PENDING' + TICKET_DECLINED = 'TICKET_DECLINED' + CREATE_PENDING = 'CREATE_PENDING' + CREATE_FAILED = 'CREATE_FAILED' + SELF_AUTH_PENDING = 'SELF_AUTH_PENDING' + PART_AUTH_PENDING = 'PART_AUTH_PENDING' + ALL_AUTHORIZED = 'ALL_AUTHORIZED' + + +class ModelJob(db.Model, SoftDeleteModel, ReviewTicketAndAuthModel): + __tablename__ = 'model_jobs_v2' + __table_args__ = (Index('idx_uuid', + 'uuid'), UniqueConstraint('job_name', + name='uniq_job_name'), default_table_args('model_jobs_v2')) + + id = db.Column(db.Integer, primary_key=True, comment='id', autoincrement=True) + name = db.Column(db.String(255), comment='name') + uuid = db.Column(db.String(64), comment='uuid') + role = db.Column(db.Enum(ModelJobRole, native_enum=False, length=32, create_constraint=False), + default=ModelJobRole.PARTICIPANT, + comment='role') + model_job_type = db.Column(db.Enum(ModelJobType, native_enum=False, length=32, create_constraint=False), + default=ModelJobType.UNSPECIFIED, + comment='type') job_name = db.Column(db.String(255), comment='job_name') - parent_id = db.Column(db.Integer, comment='parent_id') + job_id = db.Column(db.Integer, comment='job id') + # the model id used for prediction or evaluation + model_id = db.Column(db.Integer, comment='model_id') + group_id = db.Column(db.Integer, comment='group_id') + project_id = db.Column(db.Integer, comment='project id') + workflow_id = db.Column(db.Integer, comment='workflow id') + workflow_uuid = db.Column(db.String(64), comment='workflow uuid') + algorithm_type = db.Column(db.Enum(AlgorithmType, native_enum=False, length=32, create_constraint=False), + default=AlgorithmType.UNSPECIFIED, + comment='algorithm type') + algorithm_id = db.Column(db.Integer, comment='algorithm id') + dataset_id = db.Column(db.Integer, comment='dataset id') params = db.Column(db.Text(), comment='params') metrics = db.Column(db.Text(), comment='metrics') - created_at = db.Column(db.DateTime(timezone=True), - comment='created_at', - server_default=func.now()) + extra = db.Column(db.Text(), comment='extra') + favorite = db.Column(db.Boolean, default=False, comment='favorite') + comment = db.Column('cmt', db.Text(), key='comment', comment='comment') + version = db.Column(db.Integer, comment='version') + creator_username = db.Column(db.String(255), comment='creator username') + coordinator_id = db.Column(db.Integer, comment='coordinator participant id') + path = db.Column('fspath', db.String(512), key='path', comment='model job path') + metric_is_public = db.Column(db.Boolean(), default=False, comment='is metric public') + global_config = db.Column(db.Text(16777215), comment='global_config') + status = db.Column(db.Enum(ModelJobStatus, native_enum=False, length=32, create_constraint=False), + default=ModelJobStatus.PENDING, + comment='model job status') + create_status = db.Column(db.Enum(ModelJobCreateStatus, native_enum=False, length=32, create_constraint=False), + default=ModelJobCreateStatus.PENDING, + comment='create status') + auth_status = db.Column(db.Enum(AuthStatus, native_enum=False, length=32, create_constraint=False), + default=AuthStatus.PENDING, + comment='authorization status') + auto_update = db.Column(db.Boolean(), server_default=db.text('0'), comment='is auto update') + data_batch_id = db.Column(db.Integer, comment='data_batches id for auto update job') + error_message = db.Column(db.Text(), comment='error message') + created_at = db.Column(db.DateTime(timezone=True), comment='created_at', server_default=func.now()) updated_at = db.Column(db.DateTime(timezone=True), comment='updated_at', server_default=func.now(), onupdate=func.now()) deleted_at = db.Column(db.DateTime(timezone=True), comment='deleted_at') + # the model id used for prediction or evaluation + model = db.relationship('Model', primaryjoin='Model.id == foreign(ModelJob.model_id)') + group = db.relationship('ModelJobGroup', primaryjoin='ModelJobGroup.id == foreign(ModelJob.group_id)') + project = db.relationship(Project.__name__, primaryjoin='Project.id == foreign(ModelJob.project_id)') + # job_name is the foreign key, job_id is unknown when creating + job = db.relationship('Job', primaryjoin='Job.name == foreign(ModelJob.job_name)') + # workflow_uuid is the foreign key, workflow_id is unknown when creating + workflow = db.relationship(Workflow.__name__, primaryjoin='Workflow.uuid == foreign(ModelJob.workflow_uuid)') + algorithm = db.relationship(Algorithm.__name__, primaryjoin='Algorithm.id == foreign(ModelJob.algorithm_id)') + dataset = db.relationship(Dataset.__name__, primaryjoin='Dataset.id == foreign(ModelJob.dataset_id)') + data_batch = db.relationship('DataBatch', primaryjoin='DataBatch.id == foreign(ModelJob.data_batch_id)') - group_id = db.Column(db.Integer, default=0, comment='group_id') - # TODO https://code.byted.org/data/fedlearner_web_console_v2/issues/289 - extra = db.Column(db.Text(), comment='extra') # json string + output_model = db.relationship( + 'Model', + uselist=False, + primaryjoin='ModelJob.id == foreign(Model.model_job_id)', + # To disable the warning of back_populates + overlaps='model_job') + + def to_proto(self) -> ModelJobPb: + config = self.config() + model_job = ModelJobPb( + id=self.id, + name=self.name, + uuid=self.uuid, + role=self.role.name, + model_job_type=self.model_job_type.name, + algorithm_type=self.algorithm_type.name if self.algorithm_type else AlgorithmType.UNSPECIFIED.name, + algorithm_id=self.algorithm_id, + group_id=self.group_id, + project_id=self.project_id, + state=self.state.name, + configured=config is not None, + model_id=self.model_id, + model_name=self.model_name(), + job_id=self.job_id, + job_name=self.job_name, + workflow_id=self.workflow_id, + dataset_id=self.dataset_id, + dataset_name=self.dataset_name(), + creator_username=self.creator_username, + coordinator_id=self.coordinator_id, + auth_status=self.auth_status.name if self.auth_status else '', + status=self.status.name if self.status else '', + error_message=self.error_message, + auto_update=self.auto_update, + data_batch_id=self.data_batch_id, + created_at=to_timestamp(self.created_at), + updated_at=to_timestamp(self.updated_at), + started_at=self.started_at(), + stopped_at=self.stopped_at(), + version=self.version, + comment=self.comment, + metric_is_public=self.metric_is_public, + global_config=self.get_global_config(), + participants_info=self.get_participants_info(), + auth_frontend_status=self.get_model_job_auth_frontend_status().name) + if config is not None: + model_job.config.MergeFrom(config) + if self.output_model is not None: + model_job.output_model_name = self.output_model.name + model_job.output_models.append(self.output_model.to_proto()) + return model_job + + def to_ref(self) -> ModelJobRef: + return ModelJobRef( + id=self.id, + name=self.name, + uuid=self.uuid, + group_id=self.group_id, + project_id=self.project_id, + role=self.role.name, + model_job_type=self.model_job_type.name, + algorithm_type=self.algorithm_type.name if self.algorithm_type else AlgorithmType.UNSPECIFIED.name, + algorithm_id=self.algorithm_id, + state=self.state.name, + configured=self.config() is not None, + creator_username=self.creator_username, + coordinator_id=self.coordinator_id, + created_at=to_timestamp(self.created_at), + updated_at=to_timestamp(self.updated_at), + started_at=self.started_at(), + stopped_at=self.stopped_at(), + version=self.version, + metric_is_public=self.metric_is_public, + status=self.status.name if self.status else '', + auto_update=self.auto_update, + auth_status=self.auth_status.name if self.auth_status else '', + participants_info=self.get_participants_info(), + auth_frontend_status=self.get_model_job_auth_frontend_status().name) + + @property + def state(self) -> WorkflowExternalState: + # TODO(hangweiqiang): design model job state + if self.workflow is None: + return WorkflowExternalState.PENDING_ACCEPT + return self.workflow.get_state_for_frontend() + + def get_model_job_auth_frontend_status(self) -> ModelJobAuthFrontendStatus: + if self.ticket_status == TicketStatus.PENDING: + if self.ticket_uuid is not None: + return ModelJobAuthFrontendStatus.TICKET_PENDING + # Update old data that is set to PENDING by default when ticket is disabled + self.ticket_status = TicketStatus.APPROVED + if self.ticket_status == TicketStatus.DECLINED: + return ModelJobAuthFrontendStatus.TICKET_DECLINED + if self.auth_status not in [AuthStatus.AUTHORIZED]: + return ModelJobAuthFrontendStatus.SELF_AUTH_PENDING + if self.is_all_participants_authorized(): + return ModelJobAuthFrontendStatus.ALL_AUTHORIZED + if self.create_status in [ModelJobCreateStatus.PENDING]: + return ModelJobAuthFrontendStatus.CREATE_PENDING + if self.create_status in [ModelJobCreateStatus.FAILED]: + return ModelJobAuthFrontendStatus.CREATE_FAILED + return ModelJobAuthFrontendStatus.PART_AUTH_PENDING + + def get_job_path(self): + path = self.project.get_storage_root_path(None) + if path is None: + logging.warning('cannot find storage_root_path') + return None + return get_job_path(path, self.job_name) + + def get_exported_model_path(self) -> Optional[str]: + """Get the path of the exported models. + + Returns: + The path of the exported_models is returned. Return None if the + path can not found. There may be multiple checkpoints under the + path. The file structure of nn_model under the path of + exported_model is + - exported_models: + - ${terminated time, e.g. 1619769879} + - _SUCCESS + - saved_model.pb + - variables + - variables.data-00000-of-00001 + - variables.index + """ + job_path = self.get_job_path() + if job_path is None: + return None + return get_exported_model_path(job_path) + + def get_checkpoint_path(self): + job_path = self.get_job_path() + if job_path is None: + return None + return get_checkpoint_path(job_path=job_path) + + def get_output_path(self): + job_path = self.get_job_path() + if job_path is None: + return None + return get_output_path(job_path) + + def model_name(self) -> Optional[str]: + if self.model_id is not None: + return self.model.name + return None + + def dataset_name(self) -> Optional[str]: + # checking through relationship instead of existence of id, since item is possibly deleted + if self.dataset is not None: + return self.dataset.name + return None + + def started_at(self) -> Optional[int]: + if self.workflow: + return self.workflow.start_at + return None - parent = db.relationship('Model', - primaryjoin=remote(id) == foreign(parent_id), - backref='children') - job = db.relationship('Job', primaryjoin=Job.name == foreign(job_name)) + def stopped_at(self) -> Optional[int]: + if self.workflow: + return self.workflow.stop_at + return None - def get_eval_model(self): - return [ - child for child in self.children if child.type in - [ModelType.NN_EVALUATION.value, ModelType.TREE_EVALUATION.value] + def config(self) -> Optional[WorkflowDefinition]: + if self.workflow: + return self.workflow.get_config() + return None + + def is_deletable(self) -> bool: + return self.state in [ + WorkflowExternalState.FAILED, WorkflowExternalState.STOPPED, WorkflowExternalState.COMPLETED ] + def set_global_config(self, proto: ModelJobGlobalConfig): + self.global_config = text_format.MessageToString(proto) -@to_dict_mixin() -class ModelGroup(db.Model): - __tablename__ = 'model_groups_v2' - __table_args__ = (default_table_args('model_groups_v2')) + def get_global_config(self) -> Optional[ModelJobGlobalConfig]: + if self.global_config is not None: + return text_format.Parse(self.global_config, ModelJobGlobalConfig()) + return None - id = db.Column(db.Integer, primary_key=True, comment='id') - name = db.Column(db.String(255), - comment='name') # can be modified by end-user - created_at = db.Column(db.DateTime(timezone=True), - comment='created_at', - server_default=func.now()) +class Model(db.Model, SoftDeleteModel): + __tablename__ = 'models_v2' + __table_args__ = (UniqueConstraint('name', name='uniq_name'), UniqueConstraint('uuid', name='uniq_uuid'), + default_table_args('models_v2')) + + id = db.Column(db.Integer, primary_key=True, comment='id', autoincrement=True) + name = db.Column(db.String(255), comment='name') + uuid = db.Column(db.String(64), comment='uuid') + algorithm_type = db.Column(db.Enum(AlgorithmType, native_enum=False, length=32, create_constraint=False), + default=AlgorithmType.UNSPECIFIED, + comment='algorithm type') + # TODO(hangweiqiang): remove model_type coloumn + model_type = db.Column(db.Enum(ModelType, native_enum=False, length=32, create_constraint=False), + default=ModelType.UNSPECIFIED, + comment='type') + model_path = db.Column(db.String(512), comment='model path') + favorite = db.Column(db.Boolean, default=False, comment='favorite model') + comment = db.Column('cmt', db.Text(), key='comment', comment='comment') + group_id = db.Column(db.Integer, comment='group_id') + project_id = db.Column(db.Integer, comment='project_id') + job_id = db.Column(db.Integer, comment='job id') + model_job_id = db.Column(db.Integer, comment='model job id') + version = db.Column(db.Integer, comment='version') + created_at = db.Column(db.DateTime(timezone=True), comment='created_at', server_default=func.now()) updated_at = db.Column(db.DateTime(timezone=True), comment='updated_at', server_default=func.now(), onupdate=func.now()) deleted_at = db.Column(db.DateTime(timezone=True), comment='deleted_at') + group = db.relationship('ModelJobGroup', primaryjoin='ModelJobGroup.id == foreign(Model.group_id)') + project = db.relationship('Project', primaryjoin='Project.id == foreign(Model.project_id)') + job = db.relationship('Job', primaryjoin='Job.id == foreign(Model.job_id)') + # the model_job generating this model + model_job = db.relationship('ModelJob', primaryjoin='ModelJob.id == foreign(Model.model_job_id)') + # the model_jobs inheriting this model + derived_model_jobs = db.relationship( + 'ModelJob', + primaryjoin='foreign(ModelJob.model_id) == Model.id', + # To disable the warning of back_populates + overlaps='model') + + def to_proto(self) -> ModelPb: + return ModelPb( + id=self.id, + name=self.name, + uuid=self.uuid, + algorithm_type=self.algorithm_type.name if self.algorithm_type else AlgorithmType.UNSPECIFIED.name, + group_id=self.group_id, + project_id=self.project_id, + model_job_id=self.model_job_id, + model_job_name=self.model_job_name(), + job_id=self.job_id, + job_name=self.job_name(), + workflow_id=self.workflow_id(), + workflow_name=self.workflow_name(), + version=self.version, + created_at=to_timestamp(self.created_at), + updated_at=to_timestamp(self.updated_at), + comment=self.comment, + model_path=self.model_path) + + def workflow_id(self): + if self.job is not None: + return self.job.workflow_id + return None + + def job_name(self): + if self.job is not None: + return self.job.name + return None + + def workflow_name(self): + if self.job is not None: + return self.job.workflow.name + return None + + def model_job_name(self): + if self.model_job is not None: + return self.model_job.name + return None - # TODO https://code.byted.org/data/fedlearner_web_console_v2/issues/289 + def get_exported_model_path(self): + """Get the path of the exported models + same with get_exported_path function in ModelJob class + """ + return get_exported_model_path(self.model_path) + + def get_checkpoint_path(self): + return get_checkpoint_path(self.model_path) + + +class ModelJobGroup(db.Model, SoftDeleteModel, ReviewTicketAndAuthModel): + # inconsistency between table name and class name due to historical issues + __tablename__ = 'model_groups_v2' + __table_args__ = (UniqueConstraint('name', name='uniq_name'), default_table_args('model_groups_v2')) + + id = db.Column(db.Integer, primary_key=True, comment='id', autoincrement=True) + uuid = db.Column(db.String(64), comment='uuid') + name = db.Column(db.String(255), comment='name') + project_id = db.Column(db.Integer, comment='project_id') + role = db.Column(db.Enum(ModelJobRole, native_enum=False, length=32, create_constraint=False), + default=ModelJobRole.PARTICIPANT, + comment='role') + authorized = db.Column(db.Boolean, default=False, comment='authorized to participants in project') + dataset_id = db.Column(db.Integer, comment='dataset id') + algorithm_type = db.Column(db.Enum(AlgorithmType, native_enum=False, length=32, create_constraint=False), + default=AlgorithmType.UNSPECIFIED, + comment='algorithm type') + algorithm_project_id = db.Column(db.Integer, comment='algorithm project id') + algorithm_id = db.Column(db.Integer, comment='algorithm id') + config = db.Column(db.Text(16777215), comment='config') + cron_job_global_config = db.Column(db.Text(16777215), comment='global config for cron job') + # use proto.AlgorithmProjectList to store the algorithm project uuid of each participant + algorithm_project_uuid_list = db.Column('algorithm_uuid_list', + db.Text(16777215), + key='algorithm_uuid_list', + comment='algorithm project uuid for all participants') + comment = db.Column('cmt', db.Text(), key='comment', comment='comment') + creator_username = db.Column(db.String(255), comment='creator username') + coordinator_id = db.Column(db.Integer, comment='coordinator participant id') + cron_config = db.Column(db.String(255), comment='cron expression in UTC timezone') + path = db.Column('fspath', db.String(512), key='path', comment='model job group path') + _auth_status = db.Column('auth_status', + db.Enum(auth_model.AuthStatus, native_enum=False, length=32, create_constraint=False), + default=auth_model.AuthStatus.PENDING, + comment='auth status') + auto_update_status = db.Column(db.Enum(GroupAutoUpdateStatus, native_enum=False, length=32, + create_constraint=False), + default=GroupAutoUpdateStatus.INITIAL, + comment='auto update status') + start_data_batch_id = db.Column(db.Integer, comment='start data_batches id for auto update job') + created_at = db.Column(db.DateTime(timezone=True), comment='created_at', server_default=func.now()) + updated_at = db.Column(db.DateTime(timezone=True), + comment='updated_at', + server_default=func.now(), + onupdate=func.now()) + deleted_at = db.Column(db.DateTime(timezone=True), comment='deleted_at') extra = db.Column(db.Text(), comment='extra') # json string + latest_version = db.Column(db.Integer, default=0, comment='latest version') + status = db.Column(db.Enum(GroupCreateStatus, native_enum=False, length=32, create_constraint=False), + default=GroupCreateStatus.PENDING, + comment='create status') + project = db.relationship('Project', primaryjoin='Project.id == foreign(ModelJobGroup.project_id)') + algorithm = db.relationship('Algorithm', primaryjoin='Algorithm.id == foreign(ModelJobGroup.algorithm_id)') + algorithm_project = db.relationship( + 'AlgorithmProject', primaryjoin='AlgorithmProject.id == foreign(ModelJobGroup.algorithm_project_id)') + dataset = db.relationship('Dataset', primaryjoin='Dataset.id == foreign(ModelJobGroup.dataset_id)') + model_jobs = db.relationship( + 'ModelJob', + order_by='desc(ModelJob.version)', + primaryjoin='ModelJobGroup.id == foreign(ModelJob.group_id)', + # To disable the warning of back_populates + overlaps='group') + start_data_batch = db.relationship('DataBatch', + primaryjoin='DataBatch.id == foreign(ModelJobGroup.start_data_batch_id)') + + @property + def auth_status(self): + if self._auth_status is not None: + return self._auth_status + if self.authorized: + return auth_model.AuthStatus.AUTHORIZED + return auth_model.AuthStatus.PENDING + + @auth_status.setter + def auth_status(self, auth_status: auth_model.AuthStatus): + self._auth_status = auth_status + + def to_ref(self) -> ModelJobGroupRef: + group = ModelJobGroupRef(id=self.id, + name=self.name, + uuid=self.uuid, + role=self.role.name, + project_id=self.project_id, + authorized=self.authorized, + algorithm_type=self.algorithm_type.name, + configured=self.config is not None, + creator_username=self.creator_username, + coordinator_id=self.coordinator_id, + latest_version=self.latest_version, + participants_info=self.get_participants_info(), + auth_status=self.auth_status.name, + created_at=to_timestamp(self.created_at), + updated_at=to_timestamp(self.updated_at)) + latest_job_state = self.latest_job_state() + if latest_job_state is not None: + group.latest_job_state = latest_job_state.name + group.auth_frontend_status = self.get_group_auth_frontend_status().name + return group + + def to_proto(self) -> ModelJobGroupPb: + group = ModelJobGroupPb(id=self.id, + name=self.name, + uuid=self.uuid, + role=self.role.name, + project_id=self.project_id, + authorized=self.authorized, + dataset_id=self.dataset_id, + algorithm_type=self.algorithm_type.name, + algorithm_project_id=self.algorithm_project_id, + algorithm_id=self.algorithm_id, + configured=self.config is not None, + creator_username=self.creator_username, + coordinator_id=self.coordinator_id, + cron_config=self.cron_config, + latest_version=self.latest_version, + participants_info=self.get_participants_info(), + algorithm_project_uuid_list=self.get_algorithm_project_uuid_list(), + auth_status=self.auth_status.name, + auto_update_status=self.auto_update_status.name if self.auto_update_status else '', + start_data_batch_id=self.start_data_batch_id, + created_at=to_timestamp(self.created_at), + updated_at=to_timestamp(self.updated_at), + comment=self.comment) + latest_job_state = self.latest_job_state() + if latest_job_state is not None: + group.latest_job_state = latest_job_state.name + group.auth_frontend_status = self.get_group_auth_frontend_status().name + if self.config is not None: + group.config.MergeFrom(self.get_config()) + group.model_jobs.extend([mj.to_ref() for mj in self.model_jobs]) + return group + + def latest_job_state(self) -> Optional[ModelJobStatus]: + if len(self.model_jobs) == 0: + return None + return self.model_jobs[0].status + + def get_group_auth_frontend_status(self) -> GroupAuthFrontendStatus: + if self.ticket_status == TicketStatus.PENDING: + if self.ticket_uuid is not None: + return GroupAuthFrontendStatus.TICKET_PENDING + # Update old data that is set to PENDING by default when ticket is disabled + self.ticket_status = TicketStatus.APPROVED + if self.ticket_status == TicketStatus.DECLINED: + return GroupAuthFrontendStatus.TICKET_DECLINED + if not self.authorized: + return GroupAuthFrontendStatus.SELF_AUTH_PENDING + if self.is_all_participants_authorized(): + return GroupAuthFrontendStatus.ALL_AUTHORIZED + if self.status == GroupCreateStatus.PENDING: + return GroupAuthFrontendStatus.CREATE_PENDING + if self.status == GroupCreateStatus.FAILED: + return GroupAuthFrontendStatus.CREATE_FAILED + return GroupAuthFrontendStatus.PART_AUTH_PENDING + + def get_config(self) -> Optional[WorkflowDefinition]: + if self.config is not None: + return text_format.Parse(self.config, WorkflowDefinition()) + return None + + def set_config(self, config: Optional[WorkflowDefinition] = None): + if config is None: + config = WorkflowDefinition() + self.config = text_format.MessageToString(config) + + def is_deletable(self) -> bool: + for model_job in self.model_jobs: + if not model_job.is_deletable(): + return False + return True + + def latest_completed_job(self) -> Optional[ModelJob]: + for job in self.model_jobs: + if job.state == WorkflowExternalState.COMPLETED: + return job + return None + + def set_algorithm_project_uuid_list(self, proto: AlgorithmProjectList): + self.algorithm_project_uuid_list = text_format.MessageToString(proto) + + def get_algorithm_project_uuid_list(self) -> AlgorithmProjectList: + algorithm_project_uuid_list = AlgorithmProjectList() + if self.algorithm_project_uuid_list is not None: + algorithm_project_uuid_list = text_format.Parse(self.algorithm_project_uuid_list, AlgorithmProjectList()) + return algorithm_project_uuid_list + + +def is_federated(algorithm_type: AlgorithmType, model_job_type: ModelJobType) -> bool: + return algorithm_type != AlgorithmType.NN_HORIZONTAL or model_job_type == ModelJobType.TRAINING diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/models_test.py b/web_console_v2/api/fedlearner_webconsole/mmgr/models_test.py new file mode 100644 index 000000000..bcbff920a --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/models_test.py @@ -0,0 +1,452 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch, PropertyMock +from datetime import datetime +from google.protobuf.json_format import MessageToDict + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.algorithm.models import AlgorithmType +from fedlearner_webconsole.workflow.models import WorkflowExternalState +from fedlearner_webconsole.mmgr.models import ModelJob, Model, ModelJobType, ModelJobGroup, ModelJobRole, \ + GroupCreateStatus, GroupAuthFrontendStatus, AlgorithmProjectList, ModelJobAuthFrontendStatus, \ + ModelJobCreateStatus, AuthStatus as ModelJobAuthStatus +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition, JobDefinition +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.mmgr_pb2 import ModelJobRef, ModelPb, ModelJobGroupRef, ModelJobGroupPb, \ + ModelJobGlobalConfig, ModelJobConfig +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo, ParticipantInfo +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus + + +class ModelTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + model = Model(id=1, + name='model', + uuid='uuid', + group_id=2, + project_id=3, + job_id=4, + model_job_id=5, + version=1, + created_at=datetime(2022, 5, 10, 0, 0, 0), + updated_at=datetime(2022, 5, 10, 0, 0, 0)) + session.add(model) + session.commit() + + def test_to_proto(self): + with db.session_scope() as session: + model: Model = session.query(Model).get(1) + pb = ModelPb(id=1, + name='model', + uuid='uuid', + group_id=2, + project_id=3, + algorithm_type='UNSPECIFIED', + job_id=4, + model_job_id=5, + version=1, + created_at=1652140800, + updated_at=1652140800) + self.assertEqual(model.to_proto(), pb) + model.algorithm_type = AlgorithmType.NN_VERTICAL + pb.algorithm_type = 'NN_VERTICAL' + self.assertEqual(model.to_proto(), pb) + + +class ModelJobTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='test-project') + model_job = ModelJob(id=1, + name='job', + uuid='uuid', + project_id=1, + group_id=2, + model_job_type=ModelJobType.TRAINING, + role=ModelJobRole.COORDINATOR, + algorithm_type=AlgorithmType.NN_VERTICAL, + ticket_status=TicketStatus.PENDING, + ticket_uuid='ticket_uuid', + job_name='uuid-train-job', + job_id=3, + workflow_uuid='uuid', + workflow_id=5, + algorithm_id=6, + dataset_id=7, + creator_username='ada', + coordinator_id=8, + version=1, + created_at=datetime(2022, 5, 10, 0, 0, 0), + updated_at=datetime(2022, 5, 10, 0, 0, 0), + metric_is_public=True, + auto_update=True, + auth_status=ModelJobAuthStatus.AUTHORIZED, + error_message='error_message') + session.add_all([project, model_job]) + session.commit() + + @patch('fedlearner_webconsole.project.models.Project.get_storage_root_path') + def test_exported_model_path(self, mock_get_storage_root_path): + mock_get_storage_root_path.return_value = '/data/' + with db.session_scope() as session: + model_job = session.query(ModelJob).get(1) + session.add(model_job) + session.flush() + # test for model_job.get_exported_model_path + expected_path = f'/data/job_output/{model_job.job_name}/exported_models' + self.assertEqual(model_job.get_exported_model_path(), expected_path) + # test for model.get_exported_model_path + model_path = '/data/model_output/uuid' + model = Model(model_path=model_path) + expected_path = '/data/model_output/uuid/exported_models' + self.assertEqual(model.get_exported_model_path(), expected_path) + + @patch('fedlearner_webconsole.mmgr.models.ModelJob.state', new_callable=PropertyMock) + def test_is_deletable(self, mock_state): + mock_state.return_value = WorkflowExternalState.RUNNING + model_job = ModelJob(name='model_job') + self.assertEqual(model_job.is_deletable(), False) + mock_state.return_value = WorkflowExternalState.FAILED + self.assertEqual(model_job.is_deletable(), True) + + def test_to_ref(self): + with db.session_scope() as session: + model_job: ModelJob = session.query(ModelJob).get(1) + ref = ModelJobRef(id=1, + name='job', + uuid='uuid', + project_id=1, + group_id=2, + model_job_type='TRAINING', + role='COORDINATOR', + algorithm_type='NN_VERTICAL', + algorithm_id=6, + state='PENDING_ACCEPT', + configured=False, + creator_username='ada', + coordinator_id=8, + version=1, + created_at=1652140800, + updated_at=1652140800, + metric_is_public=True, + status='PENDING', + auth_frontend_status='TICKET_PENDING', + auth_status='AUTHORIZED', + auto_update=True, + participants_info=ParticipantsInfo()) + self.assertEqual(model_job.to_ref(), ref) + + def test_to_proto(self): + with db.session_scope() as session: + model_job: ModelJob = session.query(ModelJob).get(1) + global_config = ModelJobGlobalConfig(dataset_uuid='uuid', + global_config={'test': ModelJobConfig(algorithm_uuid='uuid')}) + model_job.set_global_config(global_config) + self.assertPartiallyEqual(MessageToDict(model_job.to_proto()), { + 'id': '1', + 'name': 'job', + 'uuid': 'uuid', + 'role': 'COORDINATOR', + 'modelJobType': 'TRAINING', + 'algorithmType': 'NN_VERTICAL', + 'algorithmId': '6', + 'groupId': '2', + 'projectId': '1', + 'state': 'PENDING_ACCEPT', + 'jobId': '3', + 'workflowId': '5', + 'datasetId': '7', + 'creatorUsername': 'ada', + 'coordinatorId': '8', + 'version': '1', + 'jobName': 'uuid-train-job', + 'metricIsPublic': True, + 'status': 'PENDING', + 'authStatus': 'AUTHORIZED', + 'autoUpdate': True, + 'errorMessage': 'error_message', + 'globalConfig': { + 'globalConfig': { + 'test': { + 'algorithmUuid': 'uuid' + } + }, + 'datasetUuid': 'uuid' + }, + 'authFrontendStatus': 'TICKET_PENDING', + 'participantsInfo': {}, + }, + ignore_fields=['createdAt', 'updatedAt']) + + def test_set_and_get_global_config(self): + global_config = ModelJobGlobalConfig(dataset_uuid='uuid', + global_config={'test': ModelJobConfig(algorithm_uuid='uuid')}) + with db.session_scope() as session: + model_job: ModelJob = session.query(ModelJob).get(1) + self.assertIsNone(model_job.get_global_config()) + model_job.set_global_config(proto=global_config) + session.commit() + with db.session_scope() as session: + model_job: ModelJob = session.query(ModelJob).get(1) + self.assertEqual(model_job.get_global_config(), global_config) + + def test_get_model_job_auth_frontend_status(self): + with db.session_scope() as session: + model_job = session.query(ModelJob).get(1) + # case 1 + self.assertEqual(model_job.get_model_job_auth_frontend_status(), ModelJobAuthFrontendStatus.TICKET_PENDING) + # case 2 + model_job.ticket_status = TicketStatus.DECLINED + self.assertEqual(model_job.get_model_job_auth_frontend_status(), ModelJobAuthFrontendStatus.TICKET_DECLINED) + # case 3 + model_job.ticket_status = TicketStatus.APPROVED + model_job.auth_status = ModelJobAuthStatus.PENDING + self.assertEqual(model_job.get_model_job_auth_frontend_status(), + ModelJobAuthFrontendStatus.SELF_AUTH_PENDING) + # case 4 + model_job.auth_status = ModelJobAuthStatus.AUTHORIZED + participants_info = ParticipantsInfo( + participants_map={ + 'test_1': ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'test_2': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + }) + model_job.set_participants_info(participants_info) + model_job.create_status = ModelJobCreateStatus.PENDING + self.assertEqual(model_job.get_model_job_auth_frontend_status(), ModelJobAuthFrontendStatus.CREATE_PENDING) + # case 5 + model_job.create_status = ModelJobCreateStatus.FAILED + self.assertEqual(model_job.get_model_job_auth_frontend_status(), ModelJobAuthFrontendStatus.CREATE_FAILED) + # case 6 + model_job.create_status = ModelJobCreateStatus.SUCCEEDED + self.assertEqual(model_job.get_model_job_auth_frontend_status(), + ModelJobAuthFrontendStatus.PART_AUTH_PENDING) + # case 7 + participants_info = ParticipantsInfo( + participants_map={ + 'test_1': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'test_2': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + }) + model_job.set_participants_info(participants_info) + self.assertEqual(model_job.get_model_job_auth_frontend_status(), ModelJobAuthFrontendStatus.ALL_AUTHORIZED) + # case 8 + model_job.ticket_uuid = None + model_job.ticket_status = TicketStatus.PENDING + self.assertEqual(model_job.get_model_job_auth_frontend_status(), ModelJobAuthFrontendStatus.ALL_AUTHORIZED) + self.assertEqual(model_job.ticket_status, TicketStatus.APPROVED) + + +class ModelJobGroupTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + config = WorkflowDefinition(job_definitions=[ + JobDefinition(name='train-job', + job_type=JobDefinition.JobType.TREE_MODEL_TRAINING, + variables=[Variable(name='mode', value='train')]) + ]) + job = ModelJob(id=1, + name='job', + uuid='uuid', + project_id=2, + group_id=1, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + auth_status=ModelJobAuthStatus.AUTHORIZED, + auto_update=True, + created_at=datetime(2022, 5, 10, 0, 0, 0), + updated_at=datetime(2022, 5, 10, 0, 0, 0), + version=1) + group = ModelJobGroup(id=1, + name='group', + uuid='uuid', + project_id=2, + role=ModelJobRole.COORDINATOR, + authorized=False, + ticket_status=TicketStatus.PENDING, + ticket_uuid='ticket_uuid', + dataset_id=3, + algorithm_type=AlgorithmType.NN_VERTICAL, + algorithm_project_id=4, + algorithm_id=5, + creator_username='ada', + coordinator_id=6, + created_at=datetime(2022, 5, 10, 0, 0, 0), + updated_at=datetime(2022, 5, 10, 0, 0, 0)) + algorithm_project_list = AlgorithmProjectList() + algorithm_project_list.algorithm_projects['test'] = 'uuid-test' + algorithm_project_list.algorithm_projects['peer'] = 'uuid-peer' + group.set_algorithm_project_uuid_list(algorithm_project_list) + group.set_config(config) + session.add_all([job, group]) + session.commit() + + def test_get_config(self): + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + self.assertEqual( + group.get_config(), + WorkflowDefinition(job_definitions=[ + JobDefinition(name='train-job', + job_type=JobDefinition.JobType.TREE_MODEL_TRAINING, + variables=[Variable(name='mode', value='train')]) + ])) + + @patch('fedlearner_webconsole.mmgr.models.ModelJob.state', new_callable=PropertyMock) + def test_is_deletable(self, mock_state): + with db.session_scope() as session: + group: ModelJobGroup = session.query(ModelJobGroup).get(1) + mock_state.return_value = WorkflowExternalState.RUNNING + self.assertEqual(group.is_deletable(), False) + mock_state.return_value = WorkflowExternalState.STOPPED + self.assertEqual(group.is_deletable(), True) + + def test_auth_status(self): + group = ModelJobGroup(id=2, auth_status=None) + self.assertEqual(group.auth_status, AuthStatus.PENDING) + group.authorized = True + self.assertEqual(group.auth_status, AuthStatus.AUTHORIZED) + group.authorized = False + group.auth_status = AuthStatus.AUTHORIZED + self.assertEqual(group.auth_status, AuthStatus.AUTHORIZED) + with db.session_scope() as session: + session.add(group) + session.commit() + self.assertEqual(group._auth_status, AuthStatus.AUTHORIZED) # pylint: disable=protected-access + + def test_get_group_auth_frontend_status(self): + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + # case 1 + self.assertEqual(group.get_group_auth_frontend_status(), GroupAuthFrontendStatus.TICKET_PENDING) + # case 2 + group.ticket_status = TicketStatus.DECLINED + self.assertEqual(group.get_group_auth_frontend_status(), GroupAuthFrontendStatus.TICKET_DECLINED) + # case 3 + group.ticket_status = TicketStatus.APPROVED + group.authorized = False + self.assertEqual(group.get_group_auth_frontend_status(), GroupAuthFrontendStatus.SELF_AUTH_PENDING) + # case 4 + group.authorized = True + participants_info = ParticipantsInfo( + participants_map={ + 'test_1': ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'test_2': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + }) + group.set_participants_info(participants_info) + group.status = GroupCreateStatus.PENDING + self.assertEqual(group.get_group_auth_frontend_status(), GroupAuthFrontendStatus.CREATE_PENDING) + # case 5 + group.status = GroupCreateStatus.FAILED + self.assertEqual(group.get_group_auth_frontend_status(), GroupAuthFrontendStatus.CREATE_FAILED) + # case 6 + group.status = GroupCreateStatus.SUCCEEDED + self.assertEqual(group.get_group_auth_frontend_status(), GroupAuthFrontendStatus.PART_AUTH_PENDING) + # case 7 + participants_info = ParticipantsInfo( + participants_map={ + 'test_1': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'test_2': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + }) + group.set_participants_info(participants_info) + self.assertEqual(group.get_group_auth_frontend_status(), GroupAuthFrontendStatus.ALL_AUTHORIZED) + # case 8 + group.ticket_uuid = None + group.ticket_status = TicketStatus.PENDING + self.assertEqual(group.get_group_auth_frontend_status(), GroupAuthFrontendStatus.ALL_AUTHORIZED) + self.assertEqual(group.ticket_status, TicketStatus.APPROVED) + + def test_to_ref(self): + with db.session_scope() as session: + group: ModelJobGroup = session.query(ModelJobGroup).get(1) + ref = ModelJobGroupRef(id=1, + name='group', + uuid='uuid', + role='COORDINATOR', + project_id=2, + algorithm_type='NN_VERTICAL', + configured=True, + creator_username='ada', + coordinator_id=6, + latest_job_state='PENDING', + auth_frontend_status='TICKET_PENDING', + auth_status='PENDING', + participants_info=group.get_participants_info(), + created_at=1652140800, + updated_at=1652140800) + self.assertEqual(group.to_ref(), ref) + + def test_to_proto(self): + with db.session_scope() as session: + group: ModelJobGroup = session.query(ModelJobGroup).get(1) + proto = ModelJobGroupPb(id=1, + name='group', + uuid='uuid', + role='COORDINATOR', + project_id=2, + dataset_id=3, + algorithm_type='NN_VERTICAL', + algorithm_project_id=4, + algorithm_id=5, + configured=True, + creator_username='ada', + coordinator_id=6, + latest_job_state='PENDING', + auth_frontend_status='TICKET_PENDING', + auth_status='PENDING', + auto_update_status='INITIAL', + participants_info=group.get_participants_info(), + algorithm_project_uuid_list=group.get_algorithm_project_uuid_list(), + created_at=1652140800, + updated_at=1652140800) + config = WorkflowDefinition(job_definitions=[ + JobDefinition(name='train-job', + job_type=JobDefinition.JobType.TREE_MODEL_TRAINING, + variables=[Variable(name='mode', value='train')]) + ]) + proto.config.MergeFrom(config) + proto.model_jobs.append( + ModelJobRef(id=1, + name='job', + uuid='uuid', + group_id=1, + project_id=2, + role='PARTICIPANT', + model_job_type='TRAINING', + algorithm_type='NN_VERTICAL', + state='PENDING_ACCEPT', + created_at=1652140800, + updated_at=1652140800, + version=1, + status='PENDING', + auto_update=True, + auth_status='AUTHORIZED', + auth_frontend_status='ALL_AUTHORIZED', + participants_info=ParticipantsInfo())) + self.assertEqual(group.to_proto(), proto) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/scheduler.py b/web_console_v2/api/fedlearner_webconsole/mmgr/scheduler.py new file mode 100644 index 000000000..05f36cf7f --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/scheduler.py @@ -0,0 +1,206 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from sqlalchemy import or_ +from typing import List, Tuple + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.services import BatchService +from fedlearner_webconsole.composer.interface import IRunnerV2, RunnerContext, RunnerOutput +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.mmgr.models import ModelJob, ModelJobRole, ModelJobStatus, ModelJobGroup, \ + GroupCreateStatus, GroupAutoUpdateStatus, Model, ModelJobType, ModelJobAuthFrontendStatus +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.proto.mmgr_pb2 import ModelJobConfig +from fedlearner_webconsole.mmgr.model_job_configer import ModelJobConfiger, set_load_model_name +from fedlearner_webconsole.mmgr.service import ModelJobService, ModelJobGroupService +from fedlearner_webconsole.mmgr.controller import ModelJobGroupController +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.utils.resource_name import resource_uuid + + +class ModelJobSchedulerRunner(IRunnerV2): + + @staticmethod + def _check_model_job(model_job_id: int): + """check workflow state and update model job status""" + with db.session_scope() as session: + model_job = session.query(ModelJob).get(model_job_id) + ModelJobService(session).update_model_job_status(model_job) + session.commit() + logging.info(f'[ModelJobScheduler] model_job {model_job.name} updates status to {model_job.status}') + + @staticmethod + def _config_model_job(model_job_id: int): + """config model job by calling model job configer and model job service""" + with db.session_scope() as session: + model_job: ModelJob = session.query(ModelJob).get(model_job_id) + global_config = model_job.get_global_config() + if global_config is None: + ModelJobService(session).update_model_job_status(model_job) + session.commit() + return + domain_name = SettingService(session).get_system_info().pure_domain_name + model_job_config: ModelJobConfig = global_config.global_config.get(domain_name) + try: + configer = ModelJobConfiger(session=session, + model_job_type=model_job.model_job_type, + algorithm_type=model_job.algorithm_type, + project_id=model_job.project_id) + config = configer.get_config(dataset_id=model_job.dataset_id, + model_id=model_job.model_id, + model_job_config=model_job_config) + ModelJobService(session).config_model_job(model_job=model_job, + config=config, + create_workflow=False, + need_to_create_ready_workflow=True, + workflow_uuid=model_job.uuid) + except Exception as e: # pylint: disable=broad-except + logging.exception(f'[ModelJobScheduler] config model job {model_job_id} failed') + model_job.error_message = str(e) + model_job.status = ModelJobStatus.ERROR + finally: + session.commit() + logging.info(f'[ModelJobScheduler] model_job {model_job_id} is CONFIGURED') + + def schedule_model_job(self): + # 1. filter training model job with status PENDING and evaluation or prediction model job with status PENDING + # and ModelJobAuthFrontendStatus ALL_AUTHORIZED, and pull algorithm and create workflow, then status + # becomes CONFIGURED + with db.session_scope() as session: + training_model_job_ids: List[Tuple[int]] = session.query(ModelJob.id).filter_by( + status=ModelJobStatus.PENDING, model_job_type=ModelJobType.TRAINING).all() + non_training_model_job_ids: List[Tuple[int]] = session.query( + ModelJob.id).filter(ModelJob.model_job_type != ModelJobType.TRAINING).filter( + ModelJob.status == ModelJobStatus.PENDING).all() + for training_model_job_id, *_ in training_model_job_ids: + self._config_model_job(model_job_id=training_model_job_id) + for non_training_model_job_id, *_ in non_training_model_job_ids: + with db.session_scope() as session: + model_job = session.query(ModelJob).get(non_training_model_job_id) + if model_job.get_model_job_auth_frontend_status() in [ModelJobAuthFrontendStatus.ALL_AUTHORIZED]: + self._config_model_job(model_job_id=non_training_model_job_id) + # 2. filter model job with status CONFIGURED, RUNNING, and update the model job status + model_job_ids: List[Tuple[int]] = session.query(ModelJob.id).filter( + or_(ModelJob.status == ModelJobStatus.CONFIGURED, ModelJob.status == ModelJobStatus.RUNNING)).all() + for model_job_id, *_ in model_job_ids: + self._check_model_job(model_job_id=model_job_id) + + def run(self, context: RunnerContext) -> Tuple[RunnerStatus, RunnerOutput]: + try: + self.schedule_model_job() + except Exception as e: # pylint: disable=broad-except + logging.exception('[ModelJobScheduler] schedule model job failed') + return RunnerStatus.FAILED, RunnerOutput(error_message=str(e)) + + return RunnerStatus.DONE, RunnerOutput() + + +class ModelJobGroupSchedulerRunner(IRunnerV2): + + @staticmethod + def _create_model_job_group_for_participants(model_job_group_id: int): + """create model job group for the participants""" + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(model_job_group_id) + ModelJobGroupController(session=session, + project_id=group.project_id).create_model_job_group_for_participants( + model_job_group_id=model_job_group_id) + session.commit() + + def _schedule_model_job_group(self): + # filter model job group with ticket status APPROVED, create the model job group for the participants + with db.session_scope() as session: + model_job_group_ids: List[Tuple[int]] = session.query( + ModelJobGroup.id).filter_by(role=ModelJobRole.COORDINATOR, + status=GroupCreateStatus.PENDING, + ticket_status=TicketStatus.APPROVED).all() + for model_job_group_id, *_ in model_job_group_ids: + self._create_model_job_group_for_participants(model_job_group_id=model_job_group_id) + + def run(self, context: RunnerContext) -> Tuple[RunnerStatus, RunnerOutput]: + try: + self._schedule_model_job_group() + except Exception as e: # pylint: disable=broad-except + logging.exception('[ModelJobGroupScheduler] schedule model job group failed') + return RunnerStatus.FAILED, RunnerOutput(error_message=str(e)) + + return RunnerStatus.DONE, RunnerOutput() + + +class ModelJobGroupLongPeriodScheduler(IRunnerV2): + + @staticmethod + def _create_auto_update_model_job(model_job_group_id: int): + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(auto_update=True, group_id=model_job_group_id).order_by( + ModelJob.created_at.desc()).limit(1).first() + if model_job is None: + group = session.query(ModelJobGroup).get(model_job_group_id) + logging.warning(f'There is no auto update model jobs in the model job group {group.name}') + return + if model_job.status not in [ModelJobStatus.SUCCEEDED]: + logging.warning(f'The status of the latest auto update model job {model_job.name} is not SUCCEEDED') + return + next_data_batch = BatchService(session).get_next_batch(model_job.data_batch) + if next_data_batch is None: + logging.warning( + f'There is no next data batch after the data batch with name: {model_job.data_batch.name}') + return + group: ModelJobGroup = ModelJobGroupService(session).lock_and_update_version(model_job_group_id) + version = group.latest_version + model_job_name = f'{group.name}-v{version}' + # Load the model of the previous model job for the new training job + model_name = model_job.model_name() + if model_name is None: + model = session.query(Model).filter_by(name=model_job.name).first() + if model is None: + raise Exception(f'model_job {model_job.name}\'s model is not found') + model_job.model_id = model.id + model_name = model.name + global_config = model_job.get_global_config() + if global_config is not None: + for config in global_config.global_config.values(): + set_load_model_name(config, model_name) + ModelJobService(session).create_model_job(name=model_job_name, + uuid=resource_uuid(), + role=ModelJobRole.COORDINATOR, + model_job_type=model_job.model_job_type, + algorithm_type=model_job.algorithm_type, + global_config=global_config, + group_id=model_job_group_id, + project_id=model_job.project_id, + data_batch_id=next_data_batch.id, + comment=model_job.comment, + version=version) + session.commit() + + def _schedule_model_job_group(self): + # filter model job group with auto update status ACTIVE, + # create auto update model job for all participants by COORDINATOR + with db.session_scope() as session: + model_job_group_ids: List[Tuple[int]] = session.query(ModelJobGroup.id).filter_by( + role=ModelJobRole.COORDINATOR, auto_update_status=GroupAutoUpdateStatus.ACTIVE).all() + for model_job_group_id, *_ in model_job_group_ids: + try: + self._create_auto_update_model_job(model_job_group_id=model_job_group_id) + except Exception as e: # pylint: disable=broad-except + logging.exception(f'[ModelJobGroupLongPeriodScheduler] fail to create auto update model job for ' + f'group id: {model_job_group_id}') + + def run(self, context: RunnerContext) -> Tuple[RunnerStatus, RunnerOutput]: + self._schedule_model_job_group() + return RunnerStatus.DONE, RunnerOutput() diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/scheduler_test.py b/web_console_v2/api/fedlearner_webconsole/mmgr/scheduler_test.py new file mode 100644 index 000000000..deca67711 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/scheduler_test.py @@ -0,0 +1,511 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# pylint: disable=protected-access +import grpc +import unittest +from datetime import datetime +from unittest.mock import patch, MagicMock, call, ANY +from google.protobuf.struct_pb2 import Value + +from testing.common import NoWebServerTestCase +from testing.rpc.client import FakeRpcError +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.models import Dataset, DataBatch +from fedlearner_webconsole.initial_db import _insert_or_update_templates +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.algorithm.models import AlgorithmType +from fedlearner_webconsole.mmgr.models import ModelJob, ModelJobRole, ModelJobStatus, ModelJobType, ModelJobGroup, \ + GroupCreateStatus, GroupAutoUpdateStatus, Model, AuthStatus +from fedlearner_webconsole.mmgr.scheduler import ModelJobSchedulerRunner, ModelJobGroupSchedulerRunner, \ + ModelJobGroupLongPeriodScheduler +from fedlearner_webconsole.composer.interface import RunnerStatus, RunnerContext +from fedlearner_webconsole.job.models import Job +from fedlearner_webconsole.proto.mmgr_pb2 import ModelJobGlobalConfig, ModelJobConfig, AlgorithmProjectList +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition, JobDefinition +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput, RunnerOutput +from fedlearner_webconsole.proto.setting_pb2 import SystemInfo +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo, ParticipantInfo +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus + + +class ModelJobSchedulerTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='test') + participant = Participant(id=1, name='party', domain_name='fl-peer.com') + project_participant = ProjectParticipant(project_id=1, participant_id=1) + _insert_or_update_templates(session) + g1 = ModelJobGroup(id=1, name='g1', uuid='group-uuid') + m1 = ModelJob(id=1, + name='j1', + role=ModelJobRole.COORDINATOR, + status=ModelJobStatus.PENDING, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + dataset_id=1, + project_id=1) + m1.set_global_config(ModelJobGlobalConfig(global_config={'test': ModelJobConfig(algorithm_uuid='uuid')})) + m2 = ModelJob(id=2, + name='j2', + model_job_type=ModelJobType.TRAINING, + role=ModelJobRole.PARTICIPANT, + status=ModelJobStatus.PENDING) + m3 = ModelJob(id=3, + name='j3', + role=ModelJobRole.COORDINATOR, + status=ModelJobStatus.CONFIGURED, + project_id=1, + uuid='uuid', + group_id=1, + version=3, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL) + m3.set_global_config(ModelJobGlobalConfig(global_config={'test': ModelJobConfig(algorithm_uuid='uuid')})) + m4 = ModelJob(id=4, name='j4', role=ModelJobRole.PARTICIPANT, status=ModelJobStatus.CONFIGURED) + m5 = ModelJob(id=5, name='j5', role=ModelJobRole.PARTICIPANT, status=ModelJobStatus.RUNNING) + m6 = ModelJob(id=6, + name='j6', + role=ModelJobRole.COORDINATOR, + status=ModelJobStatus.PENDING, + model_job_type=ModelJobType.EVALUATION, + auth_status=AuthStatus.AUTHORIZED) + participants_info = ParticipantsInfo( + participants_map={ + 'demo1': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'demo2': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + }) + m6.set_participants_info(participants_info) + m7 = ModelJob(id=7, + name='j7', + role=ModelJobRole.PARTICIPANT, + status=ModelJobStatus.RUNNING, + model_job_type=ModelJobType.PREDICTION, + auth_status=AuthStatus.AUTHORIZED) + participants_info = ParticipantsInfo( + participants_map={ + 'demo1': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'demo2': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + }) + m7.set_participants_info(participants_info) + m8 = ModelJob(id=8, + name='j8', + role=ModelJobRole.PARTICIPANT, + status=ModelJobStatus.PENDING, + model_job_type=ModelJobType.EVALUATION, + auth_status=AuthStatus.PENDING) + participants_info = ParticipantsInfo( + participants_map={ + 'demo1': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'demo2': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + }) + m8.set_participants_info(participants_info) + m9 = ModelJob(id=9, + name='j9', + role=ModelJobRole.PARTICIPANT, + status=ModelJobStatus.PENDING, + model_job_type=ModelJobType.PREDICTION, + auth_status=AuthStatus.AUTHORIZED) + participants_info = ParticipantsInfo( + participants_map={ + 'demo1': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'demo2': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + }) + m9.set_participants_info(participants_info) + session.add_all([project, participant, project_participant, g1, m1, m2, m3, m4, m5, m6, m7, m8, m9]) + session.commit() + + @patch('fedlearner_webconsole.mmgr.model_job_configer.ModelJobConfiger.set_dataset') + @patch('fedlearner_webconsole.mmgr.service.ModelJobService._get_job') + @patch('fedlearner_webconsole.mmgr.scheduler.ModelJobConfiger') + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info') + def test_config_model_job(self, mock_system_info: MagicMock, mock_configer: MagicMock, mock_get_job: MagicMock, + mock_set_dataset: MagicMock): + mock_system_info.return_value = SystemInfo(pure_domain_name='test') + mock_get_job.return_value = Job(id=1, name='job') + instance = mock_configer.return_value + instance.get_config.return_value = WorkflowDefinition(job_definitions=[JobDefinition(name='nn-model')]) + scheduler = ModelJobSchedulerRunner() + scheduler._config_model_job(model_job_id=1) + mock_configer.assert_called_with(session=ANY, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + project_id=1) + instance.get_config.assert_called_with(dataset_id=1, + model_id=None, + model_job_config=ModelJobConfig(algorithm_uuid='uuid')) + with db.session_scope() as session: + model_job = session.query(ModelJob).get(1) + self.assertEqual(model_job.status, ModelJobStatus.CONFIGURED) + + def test_config_model_job_with_no_global_config(self): + with db.session_scope() as session: + workflow = Workflow(id=1, name='workflow', uuid='uuid', state=WorkflowState.RUNNING) + model_job = session.query(ModelJob).get(2) + model_job.workflow_uuid = 'uuid' + session.add(workflow) + session.commit() + scheduler = ModelJobSchedulerRunner() + scheduler._config_model_job(model_job_id=2) + with db.session_scope() as session: + model_job = session.query(ModelJob).get(2) + self.assertEqual(model_job.status, ModelJobStatus.RUNNING) + + def test_check_model_job(self): + ModelJobSchedulerRunner._check_model_job(model_job_id=3) + with db.session_scope() as session: + model_job = session.query(ModelJob).get(3) + self.assertEqual(model_job.status, ModelJobStatus.CONFIGURED) + workflow = Workflow(id=1, state=WorkflowState.READY) + model_job.workflow_id = 1 + session.add(workflow) + session.commit() + ModelJobSchedulerRunner._check_model_job(model_job_id=3) + with db.session_scope() as session: + model_job = session.query(ModelJob).get(3) + self.assertEqual(model_job.status, ModelJobStatus.CONFIGURED) + workflow = session.query(Workflow).get(1) + workflow.state = WorkflowState.RUNNING + session.commit() + ModelJobSchedulerRunner._check_model_job(model_job_id=3) + with db.session_scope() as session: + model_job = session.query(ModelJob).get(3) + self.assertEqual(model_job.status, ModelJobStatus.RUNNING) + workflow = session.query(Workflow).get(1) + workflow.state = WorkflowState.FAILED + session.commit() + ModelJobSchedulerRunner._check_model_job(model_job_id=3) + with db.session_scope() as session: + model_job = session.query(ModelJob).get(3) + self.assertEqual(model_job.status, ModelJobStatus.FAILED) + + @patch('fedlearner_webconsole.mmgr.scheduler.ModelJobSchedulerRunner._check_model_job') + @patch('fedlearner_webconsole.mmgr.scheduler.ModelJobSchedulerRunner._config_model_job') + def test_schedule_model_job(self, mock_config: MagicMock, mock_check_job: MagicMock): + scheduler = ModelJobSchedulerRunner() + scheduler.schedule_model_job() + mock_config.assert_has_calls( + calls=[call( + model_job_id=1), call(model_job_id=2), + call(model_job_id=6), + call(model_job_id=9)]) + mock_check_job.assert_has_calls(calls=[call(model_job_id=3), call(model_job_id=4), call(model_job_id=5)]) + + @patch('fedlearner_webconsole.mmgr.scheduler.ModelJobSchedulerRunner.schedule_model_job') + def test_run(self, mock_schedule_model_job: MagicMock): + scheduler = ModelJobSchedulerRunner() + runner_input = RunnerInput() + runner_context = RunnerContext(index=0, input=runner_input) + runner_status, runner_output = scheduler.run(runner_context) + mock_schedule_model_job.assert_called() + mock_schedule_model_job.reset_mock() + self.assertEqual(runner_output, RunnerOutput()) + self.assertEqual(runner_status, RunnerStatus.DONE) + + def side_effect(): + raise Exception('haha') + + mock_schedule_model_job.side_effect = side_effect + scheduler = ModelJobSchedulerRunner() + runner_status, runner_output = scheduler.run(runner_context) + mock_schedule_model_job.assert_called() + self.assertEqual(runner_output, RunnerOutput(error_message='haha')) + self.assertEqual(runner_status, RunnerStatus.FAILED) + + +class ModelJobGroupSchedulerTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project') + participant = Participant(id=1, name='peer', domain_name='fl-peer.com') + project_participant = ProjectParticipant(id=1, project_id=1, participant_id=1) + dataset = Dataset(id=1, uuid='dataset_uuid', name='dataset') + algorithm_project_list = AlgorithmProjectList() + algorithm_project_list.algorithm_projects['test'] = 'algorithm-project-uuid1' + algorithm_project_list.algorithm_projects['peer'] = 'algorithm-project-uuid2' + group1 = ModelJobGroup(id=1, + project_id=1, + uuid='uuid1', + name='group1', + status=GroupCreateStatus.PENDING, + ticket_status=TicketStatus.PENDING) + group2 = ModelJobGroup(id=2, + project_id=1, + uuid='uuid2', + name='group2', + status=GroupCreateStatus.PENDING, + ticket_status=TicketStatus.APPROVED, + algorithm_type=AlgorithmType.NN_VERTICAL, + dataset_id=1, + role=ModelJobRole.COORDINATOR) + group2.set_algorithm_project_uuid_list(algorithm_project_list) + group3 = ModelJobGroup(id=3, + project_id=1, + uuid='uuid3', + name='group3', + status=GroupCreateStatus.PENDING, + ticket_status=TicketStatus.APPROVED, + algorithm_type=AlgorithmType.NN_HORIZONTAL, + dataset_id=1, + role=ModelJobRole.COORDINATOR) + group4 = ModelJobGroup(id=4, + project_id=1, + uuid='uuid4', + name='group', + status=GroupCreateStatus.PENDING, + ticket_status=TicketStatus.APPROVED, + role=ModelJobRole.PARTICIPANT) + group5 = ModelJobGroup(id=5, + project_id=1, + uuid='uuid5', + name='group5', + status=GroupCreateStatus.SUCCEEDED, + ticket_status=TicketStatus.APPROVED) + group6 = ModelJobGroup(id=6, + project_id=1, + uuid='uuid6', + name='group6', + status=GroupCreateStatus.FAILED, + ticket_status=TicketStatus.APPROVED) + session.add_all( + [project, participant, project_participant, dataset, group1, group2, group3, group4, group5, group6]) + session.commit() + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.create_model_job_group') + def test_create_model_job_group_for_participants(self, mock_client: MagicMock): + scheduler = ModelJobGroupSchedulerRunner() + scheduler._create_model_job_group_for_participants(model_job_group_id=2) + algorithm_project_list = AlgorithmProjectList() + algorithm_project_list.algorithm_projects['test'] = 'algorithm-project-uuid1' + algorithm_project_list.algorithm_projects['peer'] = 'algorithm-project-uuid2' + mock_client.assert_called_with(name='group2', + uuid='uuid2', + algorithm_type=AlgorithmType.NN_VERTICAL, + dataset_uuid='dataset_uuid', + algorithm_project_list=algorithm_project_list) + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(2) + self.assertEqual(group.status, GroupCreateStatus.SUCCEEDED) + mock_client.side_effect = FakeRpcError(grpc.StatusCode.INVALID_ARGUMENT, 'dataset with uuid is not found') + scheduler._create_model_job_group_for_participants(model_job_group_id=3) + mock_client.assert_called() + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(3) + self.assertEqual(group.status, GroupCreateStatus.FAILED) + + @patch('fedlearner_webconsole.mmgr.scheduler.ModelJobGroupSchedulerRunner._create_model_job_group_for_participants') + def test_schedule_model_job_group(self, mock_create_model_job_group): + scheduler = ModelJobGroupSchedulerRunner() + scheduler._schedule_model_job_group() + mock_create_model_job_group.assert_has_calls(calls=[call(model_job_group_id=2), call(model_job_group_id=3)]) + + @patch('fedlearner_webconsole.mmgr.scheduler.ModelJobGroupSchedulerRunner._schedule_model_job_group') + def test_run(self, mock_schedule_model_job_group: MagicMock): + scheduler = ModelJobGroupSchedulerRunner() + runner_input = RunnerInput() + runner_context = RunnerContext(index=0, input=runner_input) + runner_status, runner_output = scheduler.run(runner_context) + mock_schedule_model_job_group.assert_called() + mock_schedule_model_job_group.reset_mock() + self.assertEqual(runner_output, RunnerOutput()) + self.assertEqual(runner_status, RunnerStatus.DONE) + + def side_effect(): + raise Exception('haha') + + mock_schedule_model_job_group.side_effect = side_effect + scheduler = ModelJobGroupSchedulerRunner() + runner_status, runner_output = scheduler.run(runner_context) + mock_schedule_model_job_group.assert_called() + self.assertEqual(runner_output, RunnerOutput(error_message='haha')) + self.assertEqual(runner_status, RunnerStatus.FAILED) + + +class ModelJobGroupLongPeriodSchedulerTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + data_batch = DataBatch(id=1, + name='20220101-08', + dataset_id=1, + event_time=datetime(year=2000, month=1, day=1, hour=8), + latest_parent_dataset_job_stage_id=1) + group1 = ModelJobGroup(id=1, + name='group1', + role=ModelJobRole.COORDINATOR, + auto_update_status=GroupAutoUpdateStatus.ACTIVE) + group2 = ModelJobGroup(id=2, + name='group2', + role=ModelJobRole.COORDINATOR, + auto_update_status=GroupAutoUpdateStatus.INITIAL) + group3 = ModelJobGroup(id=3, + name='group3', + role=ModelJobRole.PARTICIPANT, + auto_update_status=GroupAutoUpdateStatus.ACTIVE) + group4 = ModelJobGroup(id=4, + name='group4', + role=ModelJobRole.COORDINATOR, + auto_update_status=GroupAutoUpdateStatus.ACTIVE) + group5 = ModelJobGroup(id=5, + name='group5', + role=ModelJobRole.COORDINATOR, + auto_update_status=GroupAutoUpdateStatus.ACTIVE, + latest_version=4) + model_job1 = ModelJob(id=1, + group_id=4, + auto_update=True, + created_at=datetime(2022, 12, 16, 1, 0, 0), + status=ModelJobStatus.SUCCEEDED, + data_batch_id=1) + model_job2 = ModelJob(id=2, + group_id=4, + auto_update=False, + created_at=datetime(2022, 12, 16, 2, 0, 0), + status=ModelJobStatus.SUCCEEDED) + model_job3 = ModelJob(id=3, + group_id=4, + auto_update=True, + created_at=datetime(2022, 12, 16, 3, 0, 0), + status=ModelJobStatus.RUNNING, + data_batch_id=1) + model_job4 = ModelJob(id=4, + group_id=5, + auto_update=True, + created_at=datetime(2022, 12, 16, 1, 0, 0), + status=ModelJobStatus.SUCCEEDED, + data_batch_id=1) + model_job5 = ModelJob(id=5, + group_id=5, + auto_update=False, + created_at=datetime(2022, 12, 16, 2, 0, 0), + status=ModelJobStatus.FAILED) + global_config = ModelJobGlobalConfig( + dataset_uuid='uuid', + global_config={ + 'test1': ModelJobConfig(algorithm_uuid='uuid1', variables=[Variable(name='load_model_name')]), + 'test2': ModelJobConfig(algorithm_uuid='uuid2', variables=[Variable(name='load_model_name')]) + }) + model_job6 = ModelJob(id=6, + name='model-job6', + group_id=5, + auto_update=True, + created_at=datetime(2022, 12, 16, 3, 0, 0), + status=ModelJobStatus.SUCCEEDED, + data_batch_id=1, + role=ModelJobRole.COORDINATOR, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + project_id=1, + comment='comment', + version=3) + model_job6.set_global_config(global_config) + session.add_all([ + group1, group2, group3, group4, group5, model_job1, model_job2, model_job3, model_job4, model_job5, + model_job6, data_batch + ]) + session.commit() + + @patch('fedlearner_webconsole.mmgr.scheduler.resource_uuid') + @patch('fedlearner_webconsole.mmgr.service.ModelJobService.create_model_job') + @patch('fedlearner_webconsole.dataset.services.BatchService.get_next_batch') + def test_create_auto_update_model_job(self, mock_get_next_batch: MagicMock, mock_create_model_job: MagicMock, + mock_resource_uuid: MagicMock): + scheduler = ModelJobGroupLongPeriodScheduler() + # fail due to model job is None + scheduler._create_auto_update_model_job(model_job_group_id=1) + mock_create_model_job.assert_not_called() + # fail due to model job status is not SUCCEEDED + scheduler._create_auto_update_model_job(model_job_group_id=4) + mock_create_model_job.assert_not_called() + # fail due to next data batch is None + mock_get_next_batch.return_value = None + scheduler._create_auto_update_model_job(model_job_group_id=5) + mock_create_model_job.assert_not_called() + # create auto model job failed due to model_name is None + mock_get_next_batch.return_value = DataBatch(id=2) + mock_resource_uuid.return_value = 'uuid' + with self.assertRaises(Exception): + scheduler._create_auto_update_model_job(model_job_group_id=5) + with db.session_scope() as session: + model = Model(id=1, name='model-job6', uuid='uuid') + session.add(model) + session.commit() + scheduler._create_auto_update_model_job(model_job_group_id=5) + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(name='model-job6').first() + self.assertEqual(model_job.model_id, 1) + global_config = ModelJobGlobalConfig( + dataset_uuid='uuid', + global_config={ + 'test1': + ModelJobConfig(algorithm_uuid='uuid1', + variables=[ + Variable(name='load_model_name', + value='model-job6', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='model-job6')) + ]), + 'test2': + ModelJobConfig(algorithm_uuid='uuid2', + variables=[ + Variable(name='load_model_name', + value='model-job6', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='model-job6')) + ]) + }) + mock_create_model_job.assert_called_with(name='group5-v5', + uuid='uuid', + role=ModelJobRole.COORDINATOR, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + global_config=global_config, + group_id=5, + project_id=1, + data_batch_id=2, + comment='comment', + version=5) + + @patch('fedlearner_webconsole.mmgr.scheduler.ModelJobGroupLongPeriodScheduler._create_auto_update_model_job') + def test_schedule_model_job_group(self, mock_create_auto_update_model_job: MagicMock): + scheduler = ModelJobGroupLongPeriodScheduler() + scheduler._schedule_model_job_group() + mock_create_auto_update_model_job.assert_has_calls( + calls=[call(model_job_group_id=1), + call(model_job_group_id=4), + call(model_job_group_id=5)]) + + @patch('fedlearner_webconsole.mmgr.scheduler.ModelJobGroupLongPeriodScheduler._schedule_model_job_group') + def test_run(self, mock_schedule_model_job_group: MagicMock): + scheduler = ModelJobGroupLongPeriodScheduler() + runner_input = RunnerInput() + runner_context = RunnerContext(index=0, input=runner_input) + runner_status, runner_output = scheduler.run(runner_context) + mock_schedule_model_job_group.assert_called() + mock_schedule_model_job_group.reset_mock() + self.assertEqual(runner_output, RunnerOutput()) + self.assertEqual(runner_status, RunnerStatus.DONE) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/service.py b/web_console_v2/api/fedlearner_webconsole/mmgr/service.py index 2f811c169..55dcc175d 100644 --- a/web_console_v2/api/fedlearner_webconsole/mmgr/service.py +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/service.py @@ -1,161 +1,474 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# -# coding: utf-8 - -import os -import json import logging -from fedlearner_webconsole.db import make_session_context +from typing import Optional +from sqlalchemy.orm import Session + +from fedlearner_webconsole.composer.interface import ItemType +from fedlearner_webconsole.exceptions import InvalidArgumentException, NotFoundException +from fedlearner_webconsole.mmgr.metrics.metrics_inquirer import tree_metrics_inquirer, nn_metrics_inquirer +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput, ModelTrainingCronJobInput +from fedlearner_webconsole.proto.metrics_pb2 import ModelJobMetrics from fedlearner_webconsole.job.metrics import JobMetricsBuilder -from fedlearner_webconsole.job.models import Job, JobType, JobState, JobDefinition -from fedlearner_webconsole.job.yaml_formatter import generate_job_run_yaml -from fedlearner_webconsole.mmgr.models import Model, ModelType, ModelState -from fedlearner_webconsole.utils.k8s_cache import Event, EventType, ObjectType +from fedlearner_webconsole.job.models import Job +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState +from fedlearner_webconsole.algorithm.models import AlgorithmType, Algorithm, AlgorithmProject, Source +from fedlearner_webconsole.mmgr.models import Model, ModelJob, ModelType, ModelJobGroup, ModelJobType, ModelJobRole, \ + ModelJobStatus, AuthStatus +from fedlearner_webconsole.mmgr.utils import deleted_name +from fedlearner_webconsole.mmgr.model_job_configer import get_sys_template_id, ModelJobConfiger +from fedlearner_webconsole.mmgr.utils import get_job_path, build_workflow_name, \ + is_model_job +from fedlearner_webconsole.dataset.models import Dataset, DatasetJob, DatasetJobKind, DataBatch +from fedlearner_webconsole.composer.composer_service import CronJobService +from fedlearner_webconsole.workflow.workflow_controller import create_ready_workflow +from fedlearner_webconsole.workflow.service import CreateNewWorkflowParams, WorkflowService +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo, ParticipantInfo +from fedlearner_webconsole.utils.pp_datetime import now +from fedlearner_webconsole.utils.const import SYSTEM_WORKFLOW_CREATOR_USERNAME +from fedlearner_webconsole.utils.base_model import auth_model +from fedlearner_webconsole.proto.mmgr_pb2 import ModelJobGlobalConfig, ModelJobConfig, AlgorithmProjectList +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.rpc.v2.job_service_client import JobServiceClient -class ModelService: +def get_project(project_id: int, session: Session) -> Project: + project = session.query(Project).get(project_id) + if project is None: + raise NotFoundException(f'project {project_id} is not found') + return project + + +def get_dataset(dataset_id: int, session: Session) -> Dataset: + dataset = session.query(Dataset).get(dataset_id) + if dataset is None: + raise InvalidArgumentException(f'dataset {dataset_id} is not found') + return dataset + + +def get_model_job(project_id: int, model_job_id: int, session: Session) -> ModelJob: + model_job = session.query(ModelJob).filter_by(id=model_job_id, project_id=project_id).first() + if model_job is None: + raise NotFoundException(f'[Model]model job {model_job_id} is not found') + return model_job + + +def get_model_job_group(project_id: int, group_id: int, session: Session) -> ModelJobGroup: + query = session.query(ModelJobGroup).filter_by(id=group_id, project_id=project_id) + group = query.first() + if group is None: + raise NotFoundException(f'[Model]model group {group_id} is not found') + return group + + +def check_model_job_group(project_id: int, group_id: int, sesseion: Session): + group = sesseion.query(ModelJobGroup).filter_by(id=group_id, project_id=project_id).first() + if group is None: + raise NotFoundException(f'[Model]model group {group_id} is not found') + + +def get_participant(participant_id: int, project: Project) -> Participant: + for participant in project.participants: + if participant.id == participant_id: + return participant + raise NotFoundException(f'participant {participant_id} is not found') + + +def get_model(project_id: int, model_id: int, session: Session) -> Model: + model = session.query(Model).filter_by(project_id=project_id, id=model_id).first() + if model is None: + raise NotFoundException(f'[Model]model {model_id} is not found') + return model + + +def get_algorithm(project_id: int, algorithm_id: int, session: Session) -> Optional[Algorithm]: + query = session.query(Algorithm) + if project_id: + # query under project and preset algorithms with project_id as null + query = query.filter((Algorithm.project_id == project_id) | (Algorithm.source == Source.PRESET)) + algo = query.filter_by(id=algorithm_id).first() + return algo + + +class ModelJobService: def __init__(self, session): self._session = session - job_type_map = { - JobType.NN_MODEL_TRANINING: ModelType.NN_MODEL.value, - JobType.NN_MODEL_EVALUATION: ModelType.NN_EVALUATION.value, - JobType.TREE_MODEL_TRAINING: ModelType.TREE_MODEL.value, - JobType.TREE_MODEL_EVALUATION: ModelType.TREE_EVALUATION.value - } - - job_state_map = { - JobState.STARTED: ModelState.RUNNING.value, - JobState.COMPLETED: ModelState.SUCCEEDED.value, - JobState.FAILED: ModelState.FAILED.value, - JobState.STOPPED: ModelState.PAUSED.value, - JobState.WAITING: ModelState.WAITING.value - } + @staticmethod + def query_metrics(model_job: ModelJob, job: Optional[Job] = None) -> ModelJobMetrics: + job = job or model_job.job + builder = JobMetricsBuilder(job) + if model_job.algorithm_type == AlgorithmType.TREE_VERTICAL: + model_job_metrics = tree_metrics_inquirer.query(job, need_feature_importance=True) + if len(model_job_metrics.train) == 0 and len(model_job_metrics.eval) == 0: + # legacy metrics support + logging.info(f'use legacy tree model metrics, job name = {job.name}') + return builder.query_tree_metrics(need_feature_importance=True) + return model_job_metrics + if model_job.algorithm_type == AlgorithmType.NN_VERTICAL: + model_job_metrics = nn_metrics_inquirer.query(job) + if len(model_job_metrics.train) == 0 and len(model_job_metrics.eval) == 0: + # legacy metrics support + logging.info(f'use legacy nn model metrics, job name = {job.name}') + return builder.query_nn_metrics() + return model_job_metrics + if model_job.algorithm_type == AlgorithmType.NN_HORIZONTAL: + return builder.query_nn_metrics() + raise ValueError(f'invalid algorithm type {model_job.algorithm_type}') @staticmethod - def is_model_related_job(job): - job_type = job.job_type - if isinstance(job_type, int): - job_type = JobType(job.job_type) - return job_type in [ - JobType.NN_MODEL_TRANINING, JobType.NN_MODEL_EVALUATION, - JobType.TREE_MODEL_TRAINING, JobType.TREE_MODEL_EVALUATION - ] - - def k8s_watcher_hook(self, event: Event): - logging.info('[ModelService][k8s_watcher_hook] %s %s: %s', event.obj_type, event.event_type, event.flapp_name) - if event.obj_type == ObjectType.FLAPP and event.event_type in [ - EventType.MODIFIED, EventType.DELETED - ]: - job = self._session.query(Job).filter_by( - name=event.flapp_name).one_or_none() - if not job: - return logging.warning('[ModelService][k8s_watcher_hook] job not found: %s', event.flapp_name) - if self.is_model_related_job(job): - self.on_job_update(job) - - def workflow_hook(self, job: Job): - if self.is_model_related_job(job): - self.create(job) - - def plot_metrics(self, model, job=None): - try: - return JobMetricsBuilder(job or model.job).plot_metrics() - except Exception as e: - return repr(e) - - def is_model_quiescence(self, state): - return state in [ - ModelState.SUCCEEDED.value, ModelState.FAILED.value, - ModelState.PAUSED.value - ] - - def on_job_update(self, job: Job): - logging.info('[ModelService][on_job_update] job name: %s', job.name) - model = self._session.query(Model).filter_by(job_name=job.name).one() - # see also `fedlearner_webconsole.job.models.Job.stop` - if job.state in self.job_state_map: - state = self.job_state_map[job.state] + def _get_job(workflow: Workflow) -> Optional[Job]: + for job in workflow.owned_jobs: + if is_model_job(job.job_type): + return job + return None + + def _create_model_job_for_participants(self, model_job: ModelJob): + project = self._session.query(Project).get(model_job.project_id) + group = self._session.query(ModelJobGroup).get(model_job.group_id) + global_config = model_job.get_global_config() + for participant in project.participants: + client = JobServiceClient.from_project_and_participant(participant.domain_name, project.name) + try: + client.create_model_job(name=model_job.name, + uuid=model_job.uuid, + group_uuid=group.uuid, + model_job_type=model_job.model_job_type, + algorithm_type=model_job.algorithm_type, + global_config=global_config, + version=model_job.version) + logging.info(f'[ModelJob] model job {model_job.id} is ready') + except Exception as e: # pylint: disable=broad-except + logging.exception('[ModelJob] creating model job for participants failed') + raise Exception(f'[ModelJob] creating model job for participants failed with detail {str(e)}') from e + + # TODO(hangweiqiang): ensure version is unique for training job under model job group + def create_model_job(self, + name: str, + uuid: str, + project_id: int, + role: ModelJobRole, + model_job_type: ModelJobType, + algorithm_type: AlgorithmType, + global_config: ModelJobGlobalConfig, + group_id: Optional[int] = None, + coordinator_id: Optional[int] = 0, + data_batch_id: Optional[int] = None, + version: Optional[int] = None, + comment: Optional[str] = None) -> ModelJob: + model_job = ModelJob(name=name, + uuid=uuid, + group_id=group_id, + project_id=project_id, + role=role, + model_job_type=model_job_type, + algorithm_type=algorithm_type, + coordinator_id=coordinator_id, + version=version, + comment=comment) + assert global_config.dataset_uuid != '', 'dataset uuid must not be empty' + dataset = self._session.query(Dataset).filter_by(uuid=global_config.dataset_uuid).first() + assert dataset is not None, f'dataset with uuid {global_config.dataset_uuid} is not found' + model_job.dataset_id = dataset.id + if data_batch_id is not None: # for auto update jobs + assert algorithm_type in [AlgorithmType.NN_VERTICAL],\ + 'auto update is only supported for nn vertical train' + dataset_job: DatasetJob = self._session.query(DatasetJob).filter_by(output_dataset_id=dataset.id).first() + assert dataset_job.kind != DatasetJobKind.RSA_PSI_DATA_JOIN,\ + 'auto update is not supported for RSA-PSI dataset' + data_batch: DataBatch = self._session.query(DataBatch).get(data_batch_id) + assert data_batch is not None, f'data batch {data_batch_id} is not found' + assert data_batch.is_available(), f'data batch {data_batch_id} is not available' + assert data_batch.latest_parent_dataset_job_stage is not None, 'dataset job stage with id is not found' + model_job.data_batch_id = data_batch_id + model_job.auto_update = True + if role in [ModelJobRole.COORDINATOR]: + global_config.dataset_job_stage_uuid = data_batch.latest_parent_dataset_job_stage.uuid + model_job.set_global_config(global_config) + self.initialize_auth_status(model_job) + # when model job type is eval or predict + if global_config.model_uuid != '': + model = self._session.query(Model).filter_by(uuid=global_config.model_uuid).first() + assert model is not None, f'model with uuid {global_config.model_uuid} is not found' + model_job.model_id = model.id + # add model's group id to model_job when eval and predict + model_job.group_id = model.group_id + pure_domain_name = SettingService(session=self._session).get_system_info().pure_domain_name + model_job_config: ModelJobConfig = global_config.global_config.get(pure_domain_name) + assert model_job_config is not None, f'model_job_config of self domain name {pure_domain_name} must not be None' + if model_job_config.algorithm_uuid != '': + algorithm = self._session.query(Algorithm).filter_by(uuid=model_job_config.algorithm_uuid).first() + # algorithm is none if algorithm_uuid points to a published algorithm at the peer platform + if algorithm is not None: + model_job.algorithm_id = algorithm.id + # no need create model job at participants when eval or predict horizontal model + if model_job_type in [ModelJobType.TRAINING] and role in [ModelJobRole.COORDINATOR]: + self._create_model_job_for_participants(model_job) + if model_job_type in [ModelJobType.EVALUATION, ModelJobType.PREDICTION] and algorithm_type not in [ + AlgorithmType.NN_HORIZONTAL + ] and role in [ModelJobRole.COORDINATOR]: + self._create_model_job_for_participants(model_job) + self._session.add(model_job) + return model_job + + def config_model_job(self, + model_job: ModelJob, + config: WorkflowDefinition, + create_workflow: bool, + need_to_create_ready_workflow: Optional[bool] = False, + workflow_uuid: Optional[str] = None): + workflow_name = build_workflow_name(model_job_type=model_job.model_job_type.name, + algorithm_type=model_job.algorithm_type.name, + model_job_name=model_job.name) + template_id = get_sys_template_id(self._session, model_job.algorithm_type, model_job.model_job_type) + if template_id is None: + raise ValueError(f'workflow template for {model_job.algorithm_type.name} not found') + workflow_comment = f'created by model_job {model_job.name}' + configer = ModelJobConfiger(session=self._session, + model_job_type=model_job.model_job_type, + algorithm_type=model_job.algorithm_type, + project_id=model_job.project_id) + configer.set_dataset(config=config, dataset_id=model_job.dataset_id, data_batch_id=model_job.data_batch_id) + if need_to_create_ready_workflow: + workflow = create_ready_workflow( + session=self._session, + name=workflow_name, + config=config, + project_id=model_job.project_id, + template_id=template_id, + uuid=workflow_uuid, + comment=workflow_comment, + ) + elif create_workflow: + params = CreateNewWorkflowParams(project_id=model_job.project_id, template_id=template_id) + workflow = WorkflowService(self._session).create_workflow(name=workflow_name, + config=config, + params=params, + comment=workflow_comment, + uuid=workflow_uuid, + creator_username=SYSTEM_WORKFLOW_CREATOR_USERNAME) else: - return logging.warning( - '[ModelService][on_job_update] job state is %s', job.state) - if model.state != ModelState.RUNNING.value and state == ModelState.RUNNING.value: - logging.info( - '[ModelService][on_job_update] updating model(%d).version from %s to %s', - model.id, model.version, model.version + 1) - model.version += 1 - logging.info( - '[ModelService][on_job_update] updating model(%d).state from %s to %s', - model.id, model.state, state) - if self.is_model_quiescence(state): - model.metrics = json.dumps(self.plot_metrics(model, job)) - model.state = state - self._session.add(model) + workflow = self._session.query(Workflow).filter_by(uuid=model_job.workflow_uuid).first() + if workflow is None: + raise ValueError(f'workflow with uuid {model_job.workflow_uuid} not found') + workflow = WorkflowService(self._session).config_workflow(workflow=workflow, + template_id=template_id, + config=config, + comment=workflow_comment, + creator_username=SYSTEM_WORKFLOW_CREATOR_USERNAME) + self._session.flush() + model_job.workflow_id = workflow.id + model_job.workflow_uuid = workflow.uuid + job = self._get_job(workflow) + assert job is not None, 'model job not found in workflow' + model_job.job_name = job.name + model_job.job_id = job.id + model_job.status = ModelJobStatus.CONFIGURED + self._session.flush() + + def update_model_job_status(self, model_job: ModelJob): + workflow = self._session.query(Workflow).filter_by(uuid=model_job.workflow_uuid).first() + if workflow: + if workflow.state in [WorkflowState.RUNNING]: + model_job.status = ModelJobStatus.RUNNING + if workflow.state in [WorkflowState.STOPPED]: + model_job.status = ModelJobStatus.STOPPED + if workflow.state in [WorkflowState.COMPLETED]: + model_job.status = ModelJobStatus.SUCCEEDED + if workflow.state in [WorkflowState.FAILED]: + model_job.status = ModelJobStatus.FAILED + + def initialize_auth_status(self, model_job: ModelJob): + pure_domain_name = SettingService(self._session).get_system_info().pure_domain_name + participants = ParticipantService(self._session).get_participants_by_project(model_job.project_id) + # 1. default all authorized when model job type is training + # 2. default all authorized when algorithm type is nn_horizontal and model job type is evaluation or prediction + # 3. set coordinator authorized when algorithm type is not nn_horizontal and model job type is evaluation or + # prediction + participants_info = ParticipantsInfo(participants_map={ + p.pure_domain_name(): ParticipantInfo(auth_status=AuthStatus.PENDING.name) for p in participants + }) + participants_info.participants_map[pure_domain_name].auth_status = AuthStatus.PENDING.name + if model_job.model_job_type in [ModelJobType.TRAINING + ] or model_job.algorithm_type in [AlgorithmType.NN_HORIZONTAL]: + participants_info = ParticipantsInfo(participants_map={ + p.pure_domain_name(): ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) for p in participants + }) + participants_info.participants_map[pure_domain_name].auth_status = AuthStatus.AUTHORIZED.name + model_job.auth_status = AuthStatus.AUTHORIZED + elif model_job.role in [ModelJobRole.COORDINATOR]: + participants_info.participants_map[pure_domain_name].auth_status = AuthStatus.AUTHORIZED.name + model_job.auth_status = AuthStatus.AUTHORIZED + model_job.set_participants_info(participants_info) + + @staticmethod + def update_model_job_auth_status(model_job: ModelJob, auth_status: AuthStatus): + model_job.auth_status = auth_status + participants_info = model_job.get_participants_info() + pure_domain_name = SettingService.get_system_info().pure_domain_name + participants_info.participants_map[pure_domain_name].auth_status = auth_status.name + model_job.set_participants_info(participants_info) + + def delete(self, job_id: int): + model_job: ModelJob = self._session.query(ModelJob).get(job_id) + model_job.deleted_at = now() + model_job.name = deleted_name(model_job.name) + if model_job.output_model is not None: + ModelService(self._session).delete(model_job.output_model.id) - def create(self, job: Job, parent_job_name=None, group_id=0): - logging.info('[ModelService][create] create model %s', job.name) - model = Model() - model.name = job.name # TODO allow rename by end-user - model.type = self.job_type_map[job.job_type] - model.state = ModelState.COMMITTING.value - model.job_name = job.name - if parent_job_name: - parent = self._session.query(Model).filter_by( - job_name=parent_job_name).one_or_none() - if not parent: - return parent - model.version = parent.version - model.parent_id = parent.id - model.params = json.dumps({}) - model.group_id = group_id - model.state = ModelState.COMMITTED.value - self._session.add(model) - self._session.commit() - return model - # `detail_level` is a comma separated string list - # contains `metrics` if `plot_metrics` result is - def query(self, model_id, detail_level=''): - model = self._session.query(Model).filter_by(id=model_id).one_or_none() - if not model: - return model - detail_level = detail_level.split(',') - model_json = model.to_dict() - model_json['detail_level'] = detail_level - if 'metrics' in detail_level: - if self.is_model_quiescence(model) and model.metrics: - model_json['metrics'] = json.loads(model.metrics) - else: model_json['metrics'] = self.plot_metrics(model) - return model_json - - def drop(self, model_id): - model = self._session.query(Model).filter_by(id=model_id).one_or_none() - if not model: - return model - if model.state not in [ - ModelState.SUCCEEDED.value, ModelState.FAILED.value - ]: # FIXME atomicity - raise Exception( - f'cannot delete model when model.state is {model.state}') - # model.state = ModelState.DROPPING.value - # TODO remove model files from NFS et al. - model.state = ModelState.DROPPED.value +class ModelService: + + def __init__(self, session: Session): + self._session = session + + def create_model_from_model_job(self, model_job: ModelJob): + name = f'{model_job.group.name}-v{model_job.version}' + model_type = ModelType.NN_MODEL + if model_job.algorithm_type == AlgorithmType.TREE_VERTICAL: + model_type = ModelType.TREE_MODEL + model = Model(name=name, + uuid=model_job.uuid, + version=model_job.version, + model_type=model_type, + algorithm_type=model_job.algorithm_type, + project_id=model_job.project_id, + job_id=model_job.job_id, + group_id=model_job.group_id, + model_job_id=model_job.id) + storage_root_dir = model_job.project.get_storage_root_path(None) + if storage_root_dir is None: + logging.warning(f'[ModelService] storage root of project {model_job.project.name} is None') + raise RuntimeError(f'storage root of project {model_job.project.name} is None') + model.model_path = get_job_path(storage_root_dir, model_job.job.name) self._session.add(model) - self._session.commit() + + def delete(self, model_id: int): + model: Model = self._session.query(Model).get(model_id) + model.deleted_at = now() + model.name = deleted_name(model.name) + + +class ModelJobGroupService: + + def __init__(self, session: Session): + self._session = session + + def launch_model_job(self, group: ModelJobGroup, name: str, uuid: str, version: int) -> ModelJob: + model_job = ModelJob( + name=name, + uuid=uuid, + group_id=group.id, + project_id=group.project_id, + model_job_type=ModelJobType.TRAINING, + algorithm_type=group.algorithm_type, + algorithm_id=group.algorithm_id, + dataset_id=group.dataset_id, + version=version, + ) + self._session.add(model_job) + self._session.flush() + ModelJobService(self._session).config_model_job(model_job, + group.get_config(), + create_workflow=False, + need_to_create_ready_workflow=True, + workflow_uuid=model_job.uuid) + group.latest_version = version + self._session.flush() + return model_job + + def delete(self, group_id: int): + group: ModelJobGroup = self._session.query(ModelJobGroup).get(group_id) + group.name = deleted_name(group.name) + group.deleted_at = now() + job_service = ModelJobService(self._session) + for job in group.model_jobs: + job_service.delete(job.id) + + def lock_and_update_version(self, group_id: int) -> ModelJobGroup: + group: ModelJobGroup = self._session.query(ModelJobGroup).populate_existing().with_for_update().get(group_id) + # use exclusive lock to ensure version is unique and increasing. + # since 2PC has its own db transaction, and the latest_version of group should be updated in service, + # to avoid lock conflict, the latest_version is updated and lock is released, + # and the version is passed to 2PC transaction. + group.latest_version = group.latest_version + 1 + return group + + def update_cronjob_config(self, group: ModelJobGroup, cron_config: str): + """Update model training cron job config + + Args: + group: group for updating cron config + cron_config: cancel cron job if cron config is empty string; create + or update cron job if cron config is valid + """ + item_name = f'model_training_cron_job_{group.id}' + group.cron_config = cron_config + if cron_config: + runner_input = RunnerInput(model_training_cron_job_input=ModelTrainingCronJobInput(group_id=group.id)) + items = [(ItemType.MODEL_TRAINING_CRON_JOB, runner_input)] + CronJobService(self._session).start_cronjob(item_name=item_name, items=items, cron_config=cron_config) + else: + CronJobService(self._session).stop_cronjob(item_name=item_name) + + def create_group(self, name: str, uuid: str, project_id: int, role: ModelJobRole, dataset_id: int, + algorithm_type: AlgorithmType, algorithm_project_list: AlgorithmProjectList, + coordinator_id: int) -> ModelJobGroup: + dataset = self._session.query(Dataset).get(dataset_id) + assert dataset is not None, f'dataset with id {dataset_id} is not found' + group = ModelJobGroup(name=name, + uuid=uuid, + role=role, + project_id=project_id, + dataset_id=dataset_id, + algorithm_type=algorithm_type, + coordinator_id=coordinator_id) + group.set_algorithm_project_uuid_list(algorithm_project_list) + pure_domain_name = SettingService(session=self._session).get_system_info().pure_domain_name + algorithm_project_uuid = algorithm_project_list.algorithm_projects.get(pure_domain_name) + if algorithm_project_uuid is None and algorithm_type != AlgorithmType.TREE_VERTICAL: + raise Exception(f'algorithm project uuid must be given if algorithm type is {algorithm_type.name}') + if algorithm_project_uuid is not None: + algorithm_project = self._session.query(AlgorithmProject).filter_by(uuid=algorithm_project_uuid).first() + # algorithm project is none if uuid points to a published algorithm at the peer platform + if algorithm_project is not None: + group.algorithm_project_id = algorithm_project.id + self._session.add(group) + return group + + def get_latest_model_from_model_group(self, model_group_id: int) -> Model: + model = self._session.query(Model).filter_by(group_id=model_group_id).order_by(Model.version.desc()).first() + if model is None: + raise InvalidArgumentException(f'model in group {model_group_id} is not found') return model - def get_checkpoint_path(self, job): - return None + def initialize_auth_status(self, group: ModelJobGroup): + # set auth status map + pure_domain_name = SettingService(self._session).get_system_info().pure_domain_name + participants = ParticipantService(self._session).get_participants_by_project(group.project_id) + participants_info = ParticipantsInfo(participants_map={ + p.pure_domain_name(): ParticipantInfo(auth_status=AuthStatus.PENDING.name) for p in participants + }) + participants_info.participants_map[pure_domain_name].auth_status = AuthStatus.AUTHORIZED.name + group.set_participants_info(participants_info) + # compatible with older versions of auth status + group.authorized = True + group.auth_status = auth_model.AuthStatus.AUTHORIZED diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/service_test.py b/web_console_v2/api/fedlearner_webconsole/mmgr/service_test.py new file mode 100644 index 000000000..0bcd6fe32 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/service_test.py @@ -0,0 +1,805 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import grpc +import unittest +from unittest.mock import patch, Mock +from google.protobuf.struct_pb2 import Value +from google.protobuf.empty_pb2 import Empty + +from testing.common import NoWebServerTestCase +from testing.rpc.client import FakeRpcError +from fedlearner_webconsole.db import db +from fedlearner_webconsole.initial_db import _insert_or_update_templates +from fedlearner_webconsole.composer.interface import ItemType +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.mmgr.models import ModelJob, ModelJobGroup, ModelJobType, Model, ModelJobRole, \ + ModelJobStatus, AuthStatus +from fedlearner_webconsole.mmgr.service import ModelJobService, ModelJobGroupService, ModelService +from fedlearner_webconsole.job.models import Job, JobType, JobState +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState +from fedlearner_webconsole.algorithm.models import AlgorithmType, Algorithm, AlgorithmProject +from fedlearner_webconsole.dataset.models import Dataset, DatasetJob, DatasetJobState, DatasetJobKind, DatasetType, \ + DataBatch, DatasetJobStage +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.workflow_definition_pb2 import JobDefinition +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition +from fedlearner_webconsole.proto.mmgr_pb2 import ModelJobGlobalConfig, ModelJobConfig, AlgorithmProjectList +from fedlearner_webconsole.proto.setting_pb2 import SystemInfo +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput, ModelTrainingCronJobInput +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo, ParticipantInfo + + +class ModelJobServiceTest(NoWebServerTestCase): + + def setUp(self) -> None: + + super().setUp() + with db.session_scope() as session: + _insert_or_update_templates(session) + dataset_job = DatasetJob(id=1, + name='datasetjob', + uuid='uuid', + state=DatasetJobState.SUCCEEDED, + project_id=1, + input_dataset_id=1, + output_dataset_id=2, + kind=DatasetJobKind.OT_PSI_DATA_JOIN) + dataset = Dataset(id=2, + uuid='uuid', + name='datasetjob', + dataset_type=DatasetType.PSI, + path='/data/dataset/haha') + dataset_rsa = Dataset(id=3, + uuid='uuid_rsa', + name='dataset_rsa', + dataset_type=DatasetType.PSI, + path='/data/dataset/haha') + dataset_job_rsa = DatasetJob(id=2, + name='dataset_job_rsa', + uuid='uuid_rsa', + state=DatasetJobState.SUCCEEDED, + project_id=1, + input_dataset_id=1, + output_dataset_id=3, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN) + dataset_job_stage = DatasetJobStage(id=1, + uuid='uuid', + name='dataset_job_stage_1', + project_id=1, + dataset_job_id=1, + data_batch_id=1, + state=DatasetJobState.SUCCEEDED) + data_batch = DataBatch(id=1, + name='data_batch_1', + dataset_id=dataset.id, + latest_parent_dataset_job_stage_id=1) + model_job = ModelJob(name='test-model-job', + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.TREE_VERTICAL, + dataset_id=2, + model_id=1, + project_id=1, + workflow_uuid='test-uuid') + algorithm = Algorithm(id=1, name='algo', uuid='uuid', project_id=1) + project = Project(id=1, name='project') + model_job_group = ModelJobGroup(id=1, name='model_job_group', uuid='uuid') + participant1 = Participant(id=1, name='demo1', domain_name='fl-demo1.com') + participant2 = Participant(id=2, name='demo2', domain_name='fl-demo2.com') + project_part1 = ProjectParticipant(id=1, project_id=1, participant_id=1) + project_part2 = ProjectParticipant(id=2, project_id=1, participant_id=2) + session.add_all([ + model_job, dataset, dataset_job, algorithm, project, participant1, participant2, project_part1, + project_part2, model_job_group, dataset_rsa, dataset_job_rsa, dataset_job_stage, data_batch + ]) + session.commit() + + @staticmethod + def _get_workflow_config(): + return WorkflowDefinition(job_definitions=[ + JobDefinition(name='train-job', + job_type=JobDefinition.JobType.TREE_MODEL_TRAINING, + variables=[ + Variable(name='mode', value='train'), + Variable(name='data_source'), + Variable(name='data_path'), + Variable(name='file_wildcard'), + ], + yaml_template='{}') + ]) + + def test_config_model_job_create_workflow(self): + config = self._get_workflow_config() + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(name='test-model-job').first() + ModelJobService(session).config_model_job(model_job, + config=config, + create_workflow=True, + workflow_uuid='test-uuid') + session.commit() + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(name='test-model-job').first() + workflow = session.query(Workflow).filter_by(uuid='test-uuid').first() + self.assertEqual(workflow.creator, 's_y_s_t_e_m') + self.assertEqual(model_job.job_name, 'test-uuid-train-job') + self.assertEqual(model_job.job_id, workflow.owned_jobs[0].id) + self.assertEqual(model_job.workflow.template.name, 'sys-preset-tree-model') + self.assertEqual( + model_job.workflow.get_config(), + WorkflowDefinition(job_definitions=[ + JobDefinition(name='train-job', + job_type=JobDefinition.JobType.TREE_MODEL_TRAINING, + variables=[ + Variable(name='mode', value='train'), + Variable(name='data_source', + value='', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='')), + Variable(name='data_path', + value='/data/dataset/haha/batch', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='/data/dataset/haha/batch')), + Variable(name='file_wildcard', + value='**/part*', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='**/part*')), + ], + yaml_template='{}') + ])) + + def test_config_model_job_not_create_workflow(self): + config = self._get_workflow_config() + with db.session_scope() as session: + workflow = Workflow(name='test-workflow', uuid='test-uuid', state=WorkflowState.NEW, project_id=1) + session.add(workflow) + session.commit() + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(name='test-model-job').first() + ModelJobService(session).config_model_job(model_job, + config=config, + create_workflow=False, + workflow_uuid='test-uuid') + session.commit() + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(name='test-model-job').first() + workflow = session.query(Workflow).filter_by(name='test-workflow').first() + self.assertEqual(workflow.creator, 's_y_s_t_e_m') + self.assertEqual(model_job.job_name, 'test-uuid-train-job') + self.assertEqual(model_job.job_id, workflow.owned_jobs[0].id) + self.assertEqual(model_job.workflow.template.name, 'sys-preset-tree-model') + self.assertEqual( + model_job.workflow.get_config(), + WorkflowDefinition(job_definitions=[ + JobDefinition(name='train-job', + job_type=JobDefinition.JobType.TREE_MODEL_TRAINING, + variables=[ + Variable(name='mode', value='train'), + Variable(name='data_source', + value='', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='')), + Variable(name='data_path', + value='/data/dataset/haha/batch', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='/data/dataset/haha/batch')), + Variable(name='file_wildcard', + value='**/part*', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='**/part*')), + ], + yaml_template='{}') + ])) + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.create_model_job') + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info') + def test_create_model_job(self, mock_get_system_info, mock_create_model_job): + mock_get_system_info.return_value = SystemInfo(pure_domain_name='test') + # fail due to dataset uuid is None + with db.session_scope() as session: + service = ModelJobService(session=session) + global_config = ModelJobGlobalConfig() + with self.assertRaises(AssertionError, msg='dataset uuid must not be None'): + service.create_model_job(name='name', + uuid='uuid', + group_id=1, + project_id=1, + role=ModelJobRole.COORDINATOR, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + global_config=global_config) + # fail due to dataset is not found + with db.session_scope() as session: + service = ModelJobService(session=session) + global_config = ModelJobGlobalConfig(dataset_uuid='uuid1') + with self.assertRaises(AssertionError, msg='dataset with uuid uuid1 is not found'): + service.create_model_job(name='name', + uuid='uuid', + group_id=1, + project_id=1, + role=ModelJobRole.COORDINATOR, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + global_config=global_config) + # fail due to domain name in model_job_config is None + with db.session_scope() as session: + service = ModelJobService(session=session) + global_config = ModelJobGlobalConfig(dataset_uuid='uuid') + with self.assertRaises(AssertionError, msg='model_job_config of self domain name test must not be None'): + service.create_model_job(name='name', + uuid='uuid', + group_id=1, + project_id=1, + role=ModelJobRole.COORDINATOR, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + global_config=global_config) + # create model job when role is participant and model_job_type is TRAINING + with db.session_scope() as session: + service = ModelJobService(session=session) + global_config = ModelJobGlobalConfig(dataset_uuid='uuid', + global_config={'test': ModelJobConfig(algorithm_uuid='uuid')}) + service.create_model_job(name='model_job_1', + uuid='uuid-1', + group_id=2, + project_id=3, + coordinator_id=1, + role=ModelJobRole.PARTICIPANT, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + global_config=global_config, + version=3) + session.commit() + with db.session_scope() as session: + model_job: ModelJob = session.query(ModelJob).filter_by(name='model_job_1').first() + self.assertEqual(model_job.model_job_type, ModelJobType.TRAINING) + self.assertEqual(model_job.algorithm_type, AlgorithmType.NN_VERTICAL) + self.assertEqual(model_job.dataset_id, 2) + self.assertEqual(model_job.get_global_config(), global_config) + self.assertEqual(model_job.algorithm_id, 1) + self.assertEqual(model_job.version, 3) + self.assertEqual(model_job.group_id, 2) + self.assertEqual(model_job.project_id, 3) + self.assertEqual(model_job.coordinator_id, 1) + self.assertEqual(model_job.auth_status, AuthStatus.AUTHORIZED) + self.assertEqual( + model_job.get_participants_info(), + ParticipantsInfo(participants_map={'test': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name)})) + self.assertEqual(model_job.status, ModelJobStatus.PENDING) + self.assertEqual(model_job.auto_update, False) + # create model job when role is coordinator and model_job_type is EVALUATION + with db.session_scope() as session: + service = ModelJobService(session) + global_config = ModelJobGlobalConfig(dataset_uuid='uuid', + global_config={ + 'test': ModelJobConfig(algorithm_uuid='uuid'), + 'demo1': ModelJobConfig(algorithm_uuid='uuid'), + 'demo2': ModelJobConfig(algorithm_uuid='uuid') + }) + mock_create_model_job.side_effect = [Empty(), Empty()] + service.create_model_job(name='model_job_2', + uuid='uuid-2', + group_id=1, + project_id=1, + role=ModelJobRole.COORDINATOR, + model_job_type=ModelJobType.EVALUATION, + algorithm_type=AlgorithmType.NN_VERTICAL, + global_config=global_config, + version=3) + session.commit() + mock_create_model_job.assert_called() + with db.session_scope() as session: + model_job: ModelJob = session.query(ModelJob).filter_by(name='model_job_2').first() + self.assertEqual(model_job.model_job_type, ModelJobType.EVALUATION) + self.assertEqual(model_job.algorithm_type, AlgorithmType.NN_VERTICAL) + self.assertEqual(model_job.auth_status, AuthStatus.AUTHORIZED) + self.assertEqual( + model_job.get_participants_info(), + ParticipantsInfo( + participants_map={ + 'test': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'demo1': ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'demo2': ParticipantInfo(auth_status=AuthStatus.PENDING.name) + })) + self.assertEqual(model_job.status, ModelJobStatus.PENDING) + self.assertEqual(model_job.coordinator_id, 0) + self.assertEqual(model_job.auto_update, False) + # create model job when role is participant and model_job_type is EVALUATION + mock_create_model_job.reset_mock() + with db.session_scope() as session: + service = ModelJobService(session) + global_config = ModelJobGlobalConfig(dataset_uuid='uuid', + global_config={ + 'test': ModelJobConfig(algorithm_uuid='uuid'), + 'demo1': ModelJobConfig(algorithm_uuid='uuid'), + 'demo2': ModelJobConfig(algorithm_uuid='uuid') + }) + mock_create_model_job.side_effect = [Empty(), Empty()] + service.create_model_job(name='model_job_5', + uuid='uuid-5', + group_id=1, + project_id=1, + role=ModelJobRole.PARTICIPANT, + model_job_type=ModelJobType.EVALUATION, + algorithm_type=AlgorithmType.NN_VERTICAL, + global_config=global_config, + version=3) + session.commit() + mock_create_model_job.assert_not_called() + with db.session_scope() as session: + model_job: ModelJob = session.query(ModelJob).filter_by(name='model_job_5').first() + self.assertEqual(model_job.auth_status, AuthStatus.PENDING) + self.assertEqual( + model_job.get_participants_info(), + ParticipantsInfo( + participants_map={ + 'test': ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'demo1': ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'demo2': ParticipantInfo(auth_status=AuthStatus.PENDING.name) + })) + # create eval horizontal model job when role is coordinator + mock_create_model_job.reset_mock() + with db.session_scope() as session: + service = ModelJobService(session) + global_config = ModelJobGlobalConfig(dataset_uuid='uuid', + global_config={ + 'test': ModelJobConfig(algorithm_uuid='uuid'), + 'demo1': ModelJobConfig(algorithm_uuid='uuid'), + 'demo2': ModelJobConfig(algorithm_uuid='uuid') + }) + service.create_model_job(name='model_job_3', + uuid='uuid-3', + project_id=1, + group_id=None, + role=ModelJobRole.COORDINATOR, + model_job_type=ModelJobType.EVALUATION, + algorithm_type=AlgorithmType.NN_HORIZONTAL, + global_config=global_config, + version=4) + session.commit() + mock_create_model_job.assert_not_called() + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(name='model_job_3').first() + self.assertEqual(model_job.model_job_type, ModelJobType.EVALUATION) + self.assertEqual(model_job.algorithm_type, AlgorithmType.NN_HORIZONTAL) + self.assertEqual(model_job.auth_status, AuthStatus.AUTHORIZED) + # create predict horizontal model job when role is coordinator + mock_create_model_job.reset_mock() + with db.session_scope() as session: + service = ModelJobService(session) + global_config = ModelJobGlobalConfig(dataset_uuid='uuid', + global_config={ + 'test': ModelJobConfig(algorithm_uuid='uuid'), + 'demo1': ModelJobConfig(algorithm_uuid='uuid'), + 'demo2': ModelJobConfig(algorithm_uuid='uuid') + }) + service.create_model_job(name='model_job_4', + uuid='uuid-4', + project_id=1, + group_id=None, + role=ModelJobRole.COORDINATOR, + model_job_type=ModelJobType.PREDICTION, + algorithm_type=AlgorithmType.NN_HORIZONTAL, + global_config=global_config, + version=4) + session.commit() + mock_create_model_job.assert_not_called() + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(name='model_job_4').first() + self.assertEqual(model_job.model_job_type, ModelJobType.PREDICTION) + self.assertEqual(model_job.algorithm_type, AlgorithmType.NN_HORIZONTAL) + self.assertEqual(model_job.auth_status, AuthStatus.AUTHORIZED) + # fail due to grpc error + with db.session_scope() as session: + service = ModelJobService(session) + global_config = ModelJobGlobalConfig(dataset_uuid='uuid', + global_config={ + 'test': ModelJobConfig(algorithm_uuid='uuid'), + 'demo1': ModelJobConfig(algorithm_uuid='uuid'), + 'demo2': ModelJobConfig(algorithm_uuid='uuid') + }) + mock_create_model_job.side_effect = [ + Empty(), FakeRpcError(grpc.StatusCode.UNIMPLEMENTED, 'rpc not implemented') + ] + with self.assertRaises(Exception): + service.create_model_job(name='model_job_2', + uuid='uuid-2', + group_id=1, + project_id=1, + role=ModelJobRole.COORDINATOR, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + global_config=global_config, + version=3) + + def test_update_model_job_status(self): + with db.session_scope() as session: + workflow = Workflow(id=1, uuid='test-uuid', state=WorkflowState.NEW) + session.add(workflow) + session.commit() + model_job = session.query(ModelJob).filter_by(name='test-model-job').first() + ModelJobService(session).update_model_job_status(model_job) + self.assertEqual(model_job.status, ModelJobStatus.PENDING) + workflow = session.query(Workflow).filter_by(uuid='test-uuid').first() + workflow.state = WorkflowState.RUNNING + ModelJobService(session).update_model_job_status(model_job) + self.assertEqual(model_job.status, ModelJobStatus.RUNNING) + workflow.state = WorkflowState.STOPPED + ModelJobService(session).update_model_job_status(model_job) + self.assertEqual(model_job.status, ModelJobStatus.STOPPED) + workflow.state = WorkflowState.COMPLETED + ModelJobService(session).update_model_job_status(model_job) + self.assertEqual(model_job.status, ModelJobStatus.SUCCEEDED) + workflow.state = WorkflowState.FAILED + ModelJobService(session).update_model_job_status(model_job) + self.assertEqual(model_job.status, ModelJobStatus.FAILED) + + @patch('fedlearner_webconsole.project.services.SettingService.get_system_info') + def test_initialize_auth_status(self, mock_system_info): + mock_system_info.return_value = SystemInfo(pure_domain_name='test', name='name') + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(name='test-model-job').first() + ModelJobService(session).initialize_auth_status(model_job) + session.commit() + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(name='test-model-job').first() + self.assertEqual(model_job.auth_status, AuthStatus.AUTHORIZED) + self.assertEqual( + model_job.get_participants_info(), + ParticipantsInfo( + participants_map={ + 'test': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'demo1': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'demo2': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + })) + mock_system_info.return_value = SystemInfo(pure_domain_name='test', name='name') + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(name='test-model-job').first() + model_job.algorithm_type = AlgorithmType.NN_HORIZONTAL + ModelJobService(session).initialize_auth_status(model_job) + session.commit() + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(name='test-model-job').first() + self.assertEqual(model_job.auth_status, AuthStatus.AUTHORIZED) + self.assertEqual( + model_job.get_participants_info(), + ParticipantsInfo( + participants_map={ + 'test': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'demo1': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'demo2': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + })) + mock_system_info.return_value = SystemInfo(pure_domain_name='test', name='name') + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(name='test-model-job').first() + model_job.algorithm_type = AlgorithmType.TREE_VERTICAL + model_job.model_job_type = ModelJobType.EVALUATION + ModelJobService(session).initialize_auth_status(model_job) + session.commit() + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(name='test-model-job').first() + self.assertEqual(model_job.auth_status, AuthStatus.AUTHORIZED) + self.assertEqual( + model_job.get_participants_info(), + ParticipantsInfo( + participants_map={ + 'test': ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'demo1': ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'demo2': ParticipantInfo(auth_status=AuthStatus.PENDING.name) + })) + mock_system_info.return_value = SystemInfo(pure_domain_name='test', name='name') + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(name='test-model-job').first() + model_job.role = ModelJobRole.COORDINATOR + ModelJobService(session).initialize_auth_status(model_job) + session.commit() + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(name='test-model-job').first() + self.assertEqual(model_job.auth_status, AuthStatus.AUTHORIZED) + self.assertEqual( + model_job.get_participants_info(), + ParticipantsInfo( + participants_map={ + 'test': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'demo1': ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'demo2': ParticipantInfo(auth_status=AuthStatus.PENDING.name) + })) + + @patch('fedlearner_webconsole.project.services.SettingService.get_system_info') + def test_update_model_job_auth_status(self, mock_get_system_info): + mock_get_system_info.return_value = SystemInfo(pure_domain_name='test') + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(name='test-model-job').first() + ModelJobService.update_model_job_auth_status(model_job, AuthStatus.AUTHORIZED) + session.commit() + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(name='test-model-job').first() + self.assertEqual(model_job.auth_status, AuthStatus.AUTHORIZED) + self.assertEqual( + model_job.get_participants_info(), + ParticipantsInfo(participants_map={'test': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name)})) + ModelJobService.update_model_job_auth_status(model_job, AuthStatus.PENDING) + session.commit() + with db.session_scope() as session: + model_job = session.query(ModelJob).filter_by(name='test-model-job').first() + self.assertEqual(model_job.auth_status, AuthStatus.PENDING) + self.assertEqual( + model_job.get_participants_info(), + ParticipantsInfo(participants_map={'test': ParticipantInfo(auth_status=AuthStatus.PENDING.name)})) + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.create_model_job') + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info') + def test_create_auto_update_model_job(self, mock_get_system_info, mock_create_model_job): + mock_get_system_info.return_value = SystemInfo(pure_domain_name='test') + # fail due to algorithm type not supported + with db.session_scope() as session: + service = ModelJobService(session=session) + global_config = ModelJobGlobalConfig(dataset_uuid='uuid') + with self.assertRaises(AssertionError, msg='auto update is only supported for nn vertical train'): + service.create_model_job(name='name', + uuid='uuid', + group_id=1, + project_id=1, + role=ModelJobRole.COORDINATOR, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_HORIZONTAL, + global_config=global_config, + data_batch_id=1) + # fail due to dataset job type not supported + with db.session_scope() as session: + service = ModelJobService(session=session) + global_config = ModelJobGlobalConfig(dataset_uuid='uuid_rsa') + with self.assertRaises(AssertionError, msg='auto update is not supported for RSA-PSI dataset'): + service.create_model_job(name='name', + uuid='uuid', + group_id=1, + project_id=1, + role=ModelJobRole.COORDINATOR, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + global_config=global_config, + data_batch_id=1) + # fail due to data batch is not found + with db.session_scope() as session: + service = ModelJobService(session=session) + global_config = ModelJobGlobalConfig(dataset_uuid='uuid') + with self.assertRaises(AssertionError, msg='data batch 2 is not found'): + service.create_model_job(name='name', + uuid='uuid', + group_id=1, + project_id=1, + role=ModelJobRole.COORDINATOR, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + global_config=global_config, + data_batch_id=2) + # create success + with db.session_scope() as session: + service = ModelJobService(session=session) + global_config = ModelJobGlobalConfig(dataset_uuid='uuid', + global_config={'test': ModelJobConfig(algorithm_uuid='uuid')}) + service.create_model_job(name='model_job_1', + uuid='uuid', + group_id=1, + project_id=1, + role=ModelJobRole.COORDINATOR, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + global_config=global_config, + data_batch_id=1) + session.commit() + mock_create_model_job.assert_called() + with db.session_scope() as session: + model_job: ModelJob = session.query(ModelJob).filter_by(name='model_job_1').first() + self.assertEqual(model_job.data_batch_id, 1) + self.assertEqual(model_job.auto_update, True) + global_config.dataset_job_stage_uuid = 'uuid' + self.assertEqual(model_job.get_global_config(), global_config) + + +class ModelServiceTest(NoWebServerTestCase): + + _MODEL_NAME = 'test-model' + _PROJECT_ID = 123 + _GROUP_ID = 123 + _MODEL_JOB_ID = 123 + _JOB_ID = 123 + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=self._PROJECT_ID, name='test-project') + session.add(project) + session.flush() + workflow = Workflow(name='test-workflow', project_id=project.id) + session.add(workflow) + session.flush() + workflow = Workflow(id=1, name='workflow', uuid='uuid', project_id=project.id) + job = Job(id=self._JOB_ID, + name='uuid-nn-model', + project_id=project.id, + job_type=JobType.NN_MODEL_TRANINING, + state=JobState.COMPLETED, + workflow_id=workflow.id) + job.set_config(JobDefinition(name='nn-model')) + session.add(job) + group = ModelJobGroup(id=self._GROUP_ID, name='test-group', project_id=project.id) + session.add(group) + session.flush() + model_job = ModelJob(id=self._MODEL_JOB_ID, + name='test-model-job', + uuid='test-uuid', + algorithm_type=AlgorithmType.NN_VERTICAL, + model_job_type=ModelJobType.NN_TRAINING, + group_id=group.id, + project_id=project.id, + job_name=job.name, + job_id=job.id, + version=2) + session.add(model_job) + session.commit() + + @patch('fedlearner_webconsole.project.models.Project.get_storage_root_path') + def test_create_model_from_model_job(self, mock_get_storage_root_path): + mock_get_storage_root_path.return_value = '/data' + with db.session_scope() as session: + service = ModelService(session) + job = session.query(Job).get(self._JOB_ID) + model_job = session.query(ModelJob).get(self._MODEL_JOB_ID) + service.create_model_from_model_job(model_job=model_job) + session.commit() + with db.session_scope() as session: + model_job = session.query(ModelJob).get(self._MODEL_JOB_ID) + model: Model = session.query(Model).filter_by(uuid=model_job.uuid).first() + self.assertEqual(model.name, 'test-group-v2') + self.assertEqual(model.job_id, job.id) + self.assertEqual(model.project_id, self._PROJECT_ID) + self.assertEqual(model.model_path, '/data/job_output/uuid-nn-model') + self.assertEqual(model.model_job_id, model_job.id) + self.assertEqual(model.group_id, model_job.group_id) + + mock_get_storage_root_path.return_value = None + with self.assertRaises(RuntimeError, msg='storage root of project test-project is None') as cm: + with db.session_scope() as session: + service = ModelService(session) + model_job = session.query(ModelJob).get(self._MODEL_JOB_ID) + service.create_model_from_model_job(model_job=model_job) + + +class ModelJobGroupServiceTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + dataset = Dataset(id=1, name='name', uuid='uuid') + project = Project(id=1, name='project') + group = ModelJobGroup(id=1, name='group', project_id=1) + algorithm_project = AlgorithmProject(id=1, name='name', uuid='algo-uuid') + session.add_all([dataset, project, group, algorithm_project]) + session.commit() + + @patch('fedlearner_webconsole.composer.composer_service.CronJobService.start_cronjob') + @patch('fedlearner_webconsole.composer.composer_service.CronJobService.stop_cronjob') + def test_update_cronjob_config(self, mock_stop_cronjob: Mock, mock_start_cronjob: Mock): + with db.session_scope() as session: + group: ModelJobGroup = session.query(ModelJobGroup).get(1) + ModelJobGroupService(session).update_cronjob_config(group, '') + self.assertEqual(group.cron_config, '') + mock_start_cronjob.assert_not_called() + mock_stop_cronjob.assert_called_once_with(item_name='model_training_cron_job_1') + mock_stop_cronjob.reset_mock() + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + ModelJobGroupService(session).update_cronjob_config(group, '*/10 * * * *') + self.assertEqual(group.cron_config, '*/10 * * * *') + mock_start_cronjob.assert_called_once_with( + item_name='model_training_cron_job_1', + items=[(ItemType.MODEL_TRAINING_CRON_JOB, + RunnerInput(model_training_cron_job_input=ModelTrainingCronJobInput(group_id=1)))], + cron_config='*/10 * * * *') + mock_stop_cronjob.assert_not_called() + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info', + lambda _: SystemInfo(pure_domain_name='test')) + def test_create_group(self): + with db.session_scope() as session: + service = ModelJobGroupService(session) + with self.assertRaises(AssertionError, msg='dataset with id 2 is not found'): + service.create_group(name='name', + uuid='uuid', + project_id=1, + role=ModelJobRole.COORDINATOR, + dataset_id=2, + algorithm_type=AlgorithmType.NN_VERTICAL, + algorithm_project_list=AlgorithmProjectList(), + coordinator_id=1) + with db.session_scope() as session: + service = ModelJobGroupService(session) + with self.assertRaises(Exception, msg='algorithm project must be given if algorithm type is NN_VERTICAL'): + service.create_group(name='name', + uuid='uuid', + project_id=1, + role=ModelJobRole.COORDINATOR, + dataset_id=1, + algorithm_type=AlgorithmType.NN_VERTICAL, + algorithm_project_list=AlgorithmProjectList(), + coordinator_id=1) + with db.session_scope() as session: + service = ModelJobGroupService(session) + service.create_group(name='name', + uuid='uuid', + project_id=1, + role=ModelJobRole.COORDINATOR, + dataset_id=1, + algorithm_type=AlgorithmType.NN_VERTICAL, + algorithm_project_list=AlgorithmProjectList(algorithm_projects={'test': 'algo-uuid'}), + coordinator_id=1) + session.commit() + with db.session_scope() as session: + group: ModelJobGroup = session.query(ModelJobGroup).filter_by(name='name').first() + self.assertEqual(group.name, 'name') + self.assertEqual(group.project_id, 1) + self.assertEqual(group.role, ModelJobRole.COORDINATOR) + self.assertEqual(group.dataset_id, 1) + self.assertEqual(group.algorithm_type, AlgorithmType.NN_VERTICAL) + self.assertEqual(group.algorithm_project_id, 1) + self.assertEqual(group.get_algorithm_project_uuid_list(), + AlgorithmProjectList(algorithm_projects={'test': 'algo-uuid'})) + self.assertEqual(group.coordinator_id, 1) + + def test_get_latest_model_from_model_group(self): + with db.session_scope() as session: + model_1 = Model() + model_1.name = 'test_model_name_1' + model_1.project_id = 1 + model_1.version = 1 + model_1.group_id = 1 + model_2 = Model() + model_2.name = 'test_model_name_2' + model_2.project_id = 1 + model_2.version = 2 + model_2.group_id = 1 + session.add_all([model_1, model_2]) + session.commit() + with db.session_scope() as session: + service = ModelJobGroupService(session) + model = service.get_latest_model_from_model_group(1) + self.assertEqual('test_model_name_2', model.name) + + @patch('fedlearner_webconsole.project.services.SettingService.get_system_info') + def test_initialize_auth_status(self, mock_system_info): + mock_system_info.return_value = SystemInfo(pure_domain_name='test', name='name') + with db.session_scope() as session: + participant = Participant(id=1, name='party', domain_name='fl-peer.com', host='127.0.0.1', port=32443) + relationship = ProjectParticipant(project_id=1, participant_id=1) + session.add_all([participant, relationship]) + session.commit() + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + ModelJobGroupService(session).initialize_auth_status(group) + session.commit() + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + self.assertEqual( + group.get_participants_info(), + ParticipantsInfo( + participants_map={ + 'peer': ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'test': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + })) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/utils.py b/web_console_v2/api/fedlearner_webconsole/mmgr/utils.py new file mode 100644 index 000000000..7bc41fca3 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/utils.py @@ -0,0 +1,63 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +from uuid import uuid4 + +from fedlearner_webconsole.utils.pp_datetime import now +from fedlearner_webconsole.job.models import JobType + + +def get_job_path(storage_root_path: str, job_name: str) -> str: + return os.path.join(storage_root_path, 'job_output', job_name) + + +def get_exported_model_path(job_path: str) -> str: + return os.path.join(job_path, 'exported_models') + + +def get_checkpoint_path(job_path: str) -> str: + return os.path.join(job_path, 'checkpoints') + + +def get_output_path(job_path: str) -> str: + return os.path.join(job_path, 'outputs') + + +def exported_model_version_path(exported_models_path, version: int): + return os.path.join(exported_models_path, str(version)) + + +def get_model_path(storage_root_path: str, uuid: str) -> str: + return os.path.join(storage_root_path, 'model_output', uuid) + + +def build_workflow_name(model_job_type: str, algorithm_type: str, model_job_name: str) -> str: + prefix = f'{model_job_type.lower()}-{algorithm_type.lower()}-{model_job_name}' + # since the length of workflow name is limited to 255, the length of prefix should be less than 249 + return f'{prefix[:249]}-{uuid4().hex[:5]}' + + +def is_model_job(job_type: JobType): + return job_type in [ + JobType.NN_MODEL_TRANINING, JobType.NN_MODEL_EVALUATION, JobType.TREE_MODEL_TRAINING, + JobType.TREE_MODEL_EVALUATION + ] + + +def deleted_name(name: str): + """Rename the deleted model job, model, group due to unique constraint on name""" + timestamp = now().strftime('%Y%m%d_%H%M%S') + return f'deleted_at_{timestamp}_{name}' diff --git a/web_console_v2/api/fedlearner_webconsole/mmgr/utils_test.py b/web_console_v2/api/fedlearner_webconsole/mmgr/utils_test.py new file mode 100644 index 000000000..e17b9d861 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/mmgr/utils_test.py @@ -0,0 +1,61 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from fedlearner_webconsole.job.models import JobType +from fedlearner_webconsole.mmgr.utils import get_exported_model_path, get_job_path, get_checkpoint_path, \ + exported_model_version_path, is_model_job + + +class UtilsTest(unittest.TestCase): + + def test_is_model_job(self): + self.assertFalse(is_model_job(job_type=JobType.TRANSFORMER)) + self.assertFalse(is_model_job(job_type=JobType.RAW_DATA)) + self.assertFalse(is_model_job(job_type=JobType.DATA_JOIN)) + self.assertFalse(is_model_job(job_type=JobType.PSI_DATA_JOIN)) + self.assertTrue(is_model_job(job_type=JobType.NN_MODEL_TRANINING)) + self.assertTrue(is_model_job(job_type=JobType.NN_MODEL_EVALUATION)) + self.assertTrue(is_model_job(job_type=JobType.TREE_MODEL_TRAINING)) + self.assertTrue(is_model_job(job_type=JobType.TREE_MODEL_EVALUATION)) + + def test_get_job_path(self): + storage_root_path = '/data' + job_name = 'train_job' + job_path = get_job_path(storage_root_path, job_name) + exported_path = f'{storage_root_path}/job_output/{job_name}' + self.assertEqual(job_path, exported_path) + + def test_get_exported_model_path(self): + job_path = '/data/job_output/train_job' + exported_model_path = get_exported_model_path(job_path) + expected_path = f'{job_path}/exported_models' + self.assertEqual(exported_model_path, expected_path) + + def test_get_checkpoint_path(self): + job_path = '/data/job_output/train_job' + checkpoint_path = get_checkpoint_path(job_path) + expected_path = f'{job_path}/checkpoints' + self.assertEqual(checkpoint_path, expected_path) + + def test_exported_model_version_path(self): + exported_model_path = '/data/model_output/uuid' + exported_model_path_v1 = exported_model_version_path(exported_model_path, 1) + expected_path = f'{exported_model_path}/1' + self.assertEqual(exported_model_path_v1, expected_path) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/notification/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/notification/BUILD.bazel new file mode 100644 index 000000000..e4ae7249a --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/notification/BUILD.bazel @@ -0,0 +1,58 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "notification_lib", + srcs = [ + "email.py", + "sender.py", + "template.py", + ], + imports = ["../.."], + visibility = [ + "//visibility:public", + ], + deps = [ + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "email_test", + srcs = [ + "email_test.py", + ], + imports = ["../.."], + main = "email_test.py", + deps = [ + ":notification_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "sender_test", + srcs = [ + "sender_test.py", + ], + imports = ["../.."], + main = "sender_test.py", + deps = [ + ":notification_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "template_test", + srcs = [ + "template_test.py", + ], + imports = ["../.."], + main = "template_test.py", + deps = [ + ":notification_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/notification/__init__.py b/web_console_v2/api/fedlearner_webconsole/notification/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/web_console_v2/api/fedlearner_webconsole/notification/email.py b/web_console_v2/api/fedlearner_webconsole/notification/email.py new file mode 100644 index 000000000..09d042121 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/notification/email.py @@ -0,0 +1,25 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from fedlearner_webconsole.notification.sender import send +from fedlearner_webconsole.notification.template import NotificationTemplateName, render + + +def send_email(address: str, template_name: NotificationTemplateName, **kwargs): + notification = render(template_name, **kwargs) + # TODO(linfan.fine): validate the email address + if address: + notification.receivers.append(address) + send(notification) diff --git a/web_console_v2/api/fedlearner_webconsole/notification/email_test.py b/web_console_v2/api/fedlearner_webconsole/notification/email_test.py new file mode 100644 index 000000000..b35f6f334 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/notification/email_test.py @@ -0,0 +1,39 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch, Mock + +from fedlearner_webconsole.notification.email import send_email +from fedlearner_webconsole.notification.template import NotificationTemplateName +from fedlearner_webconsole.proto.notification_pb2 import Notification + + +class EmailTest(unittest.TestCase): + + @patch('fedlearner_webconsole.notification.email.render') + @patch('fedlearner_webconsole.notification.email.send') + def test_send_email(self, mock_send: Mock, mock_render: Mock): + subject = 'test_subject' + content = 'test_content' + address = 'a@b.com' + mock_render.return_value = Notification(subject=subject, content=content) + send_email(address, NotificationTemplateName.WORKFLOW_COMPLETE, var1='aaa', var2='bbb') + mock_send.assert_called_once_with(Notification(subject=subject, content=content, receivers=[address])) + mock_render.assert_called_once_with(NotificationTemplateName.WORKFLOW_COMPLETE, var1='aaa', var2='bbb') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/notification/sender.py b/web_console_v2/api/fedlearner_webconsole/notification/sender.py new file mode 100644 index 000000000..ceb1440ce --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/notification/sender.py @@ -0,0 +1,50 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import logging + +from fedlearner_webconsole.proto.notification_pb2 import Notification + + +class Sender(metaclass=abc.ABCMeta): + + @abc.abstractmethod + def send(self, notification: Notification): + """Sends notification by third-party services.""" + + +senders = {} + + +def register_sender(name: str, sender: Sender): + senders[name] = sender + + +def send(notification: Notification): + """Sends a notification. + + Invoking senders directly while there is no performance concerns as of now. + In the future, it should be sent to a queue, and we can use consumer-producers pattern + to send those notifications asynchronously.""" + if not senders: + logging.info('[Notification] no sender for %s', notification.subject) + return + for name, sender in senders.items(): + try: + sender.send(notification) + logging.info('[Notification] %s sent by %s', notification.subject, name) + except Exception: # pylint: disable=broad-except + logging.exception('[Notification] sender %s failed to send %s', name, notification.subject) diff --git a/web_console_v2/api/fedlearner_webconsole/notification/sender_test.py b/web_console_v2/api/fedlearner_webconsole/notification/sender_test.py new file mode 100644 index 000000000..d58b2b167 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/notification/sender_test.py @@ -0,0 +1,41 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import MagicMock + +from fedlearner_webconsole.notification.sender import register_sender, send +from fedlearner_webconsole.proto.notification_pb2 import Notification + + +class SenderTest(unittest.TestCase): + + def test_send(self): + mock_sender = MagicMock() + mock_sender.send = MagicMock() + register_sender('mock_sender', mock_sender) + + notification = Notification(subject='test subject', content='test content', receivers=[]) + send(notification) + mock_sender.send.assert_called_once_with(notification) + + def test_send_with_no_sender(self): + notification = Notification(subject='test subject', content='test content', receivers=[]) + # No exception is expected + send(notification) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/notification/template.py b/web_console_v2/api/fedlearner_webconsole/notification/template.py new file mode 100644 index 000000000..3f6068d07 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/notification/template.py @@ -0,0 +1,50 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import enum +from string import Template +from typing import NamedTuple + +from fedlearner_webconsole.proto.notification_pb2 import Notification + + +class NotificationTemplate(NamedTuple): + subject: Template + content: Template + + +class NotificationTemplateName(enum.Enum): + WORKFLOW_COMPLETE = 'WORKFLOW_COMPLETE' + + +_UNKNOWN_TEMPLATE = NotificationTemplate( + subject=Template('Unknown email'), + content=Template(''), +) + +_WORKFLOW_COMPLETE_TEMPLATE = NotificationTemplate( + subject=Template('【隐私计算平台】工作流「${name}」- 运行结束 - ${state}'), + content=Template('「工作流中心」:工作流「${name}」- 运行结束 - ${state},详情请见:${link}'), +) + +TEMPLATES = {NotificationTemplateName.WORKFLOW_COMPLETE: _WORKFLOW_COMPLETE_TEMPLATE} + + +def render(template_name: NotificationTemplateName, **kwargs) -> Notification: + template = TEMPLATES.get(template_name, _UNKNOWN_TEMPLATE) + return Notification( + subject=template.subject.safe_substitute(kwargs), + content=template.content.safe_substitute(kwargs), + ) diff --git a/web_console_v2/api/fedlearner_webconsole/notification/template_test.py b/web_console_v2/api/fedlearner_webconsole/notification/template_test.py new file mode 100644 index 000000000..7c79788d8 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/notification/template_test.py @@ -0,0 +1,45 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +from fedlearner_webconsole.notification.template import render, NotificationTemplateName +from fedlearner_webconsole.proto.notification_pb2 import Notification + + +class TemplateTest(unittest.TestCase): + + def test_render(self): + email = render(NotificationTemplateName.WORKFLOW_COMPLETE, + name='test workflow', + state='FAILED', + link='www.a.com') + self.assertEqual( + email, + Notification(subject='【隐私计算平台】工作流「test workflow」- 运行结束 - FAILED', + content='「工作流中心」:工作流「test workflow」- 运行结束 - FAILED,详情请见:www.a.com')) + # some variables are not passed + email = render(NotificationTemplateName.WORKFLOW_COMPLETE, name='test workflow', unknown_var='123') + self.assertEqual( + email, + Notification(subject='【隐私计算平台】工作流「test workflow」- 运行结束 - ${state}', + content='「工作流中心」:工作流「test workflow」- 运行结束 - ${state},详情请见:${link}')) + + def test_render_unknown(self): + self.assertEqual(render('unknown template', hello=123), Notification(subject='Unknown email',)) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/participant/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/participant/BUILD.bazel new file mode 100644 index 000000000..1daa06058 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/participant/BUILD.bazel @@ -0,0 +1,128 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "k8s_utils_lib", + srcs = ["k8s_utils.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/k8s:k8s_client_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:domain_name_lib", + ], +) + +py_test( + name = "k8s_utils_lib_test", + srcs = [ + "k8s_utils_test.py", + ], + imports = ["../.."], + main = "k8s_utils_test.py", + deps = [ + ":k8s_utils_lib", + "//web_console_v2/api/testing:helpers_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "models_lib", + srcs = ["models.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:domain_name_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "models_lib_test", + srcs = [ + "models_test.py", + ], + imports = ["../.."], + main = "models_test.py", + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "services_lib", + srcs = ["services.py"], + imports = ["../.."], + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:domain_name_lib", + ], +) + +py_test( + name = "services_lib_test", + srcs = [ + "services_test.py", + ], + imports = ["../.."], + main = "services_test.py", + deps = [ + ":models_lib", + ":services_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_time_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "apis_lib", + srcs = ["apis.py"], + imports = ["../.."], + deps = [ + ":k8s_utils_lib", + ":models_lib", + ":services_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/audit:decorators_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:third_party_sso_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:client_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:system_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_flask_restful//:pkg", + "@common_webargs//:pkg", + ], +) + +py_test( + name = "apis_lib_test", + size = "medium", + srcs = [ + "apis_test.py", + ], + imports = ["../.."], + main = "apis_test.py", + deps = [ + ":apis_lib", + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_time_lib", + "//web_console_v2/api/testing:common_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/participant/__init__.py b/web_console_v2/api/fedlearner_webconsole/participant/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/web_console_v2/api/fedlearner_webconsole/participant/apis.py b/web_console_v2/api/fedlearner_webconsole/participant/apis.py new file mode 100644 index 000000000..c7ff12619 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/participant/apis.py @@ -0,0 +1,426 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +from http import HTTPStatus +from typing import Optional +from flask_restful import Api, Resource +from webargs import fields, validate +from google.protobuf.json_format import MessageToDict + +from fedlearner_webconsole.audit.decorators import emits_event +from fedlearner_webconsole.db import db +from fedlearner_webconsole.exceptions import InvalidArgumentException, ResourceConflictException, \ + NotFoundException, MethodNotAllowedException +from fedlearner_webconsole.participant.k8s_utils import get_host_and_port, get_valid_candidates, \ + create_or_update_participant_in_k8s +from fedlearner_webconsole.participant.models import Participant, ParticipantType +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.proto.common_pb2 import StatusCode +from fedlearner_webconsole.rpc.client import RpcClient +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.utils.decorators.pp_flask import use_kwargs, input_validator, admin_required +from fedlearner_webconsole.auth.third_party_sso import credentials_required +from fedlearner_webconsole.utils.flask_utils import make_flask_response +from fedlearner_webconsole.rpc.v2.system_service_client import SystemServiceClient + + +def _get_empty_message_hint(field: str) -> str: + return f'{field} should not be empty' + + +def _create_participant_in_k8s(domain_name: str, host: str, port: int, namespace: str): + # crete manually must have all the arguments + if host is None or port is None: + raise InvalidArgumentException('Do not have host or port.') + # create ingress and service + # TODO(taoyanting):validate url + create_or_update_participant_in_k8s(domain_name=domain_name, host=host, port=port, namespace=namespace) + + +class ParticipantsApi(Resource): + + @credentials_required + def get(self): + """Get all participant information ordered by `created_by`. + --- + tags: + - participant + description: Get all participant information ordered by `created_by`. + responses: + 200: + description: list of paritcipant + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.Participant' + """ + with db.session_scope() as session: + participants = session.query(Participant). \ + order_by(Participant.created_at.desc()).all() + participant_service = ParticipantService(session) + protos = [] + for participant in participants: + # A trade-off to join project counts with participants + proto = participant.to_proto() + proto.num_project = participant_service.get_number_of_projects(participant.id) + protos.append(proto) + return make_flask_response(data=protos) + + # TODO(taoyanting): refactor this api + @input_validator + @credentials_required + @emits_event(audit_fields=['is_manual_configured', 'type']) + @use_kwargs({ + 'name': + fields.Str(required=True), + 'domain_name': + fields.Str(required=True), + 'is_manual_configured': + fields.Bool(required=False, load_default=False), + 'type': + fields.Str(required=False, + load_default=ParticipantType.PLATFORM.name, + validate=validate.OneOf([t.name for t in ParticipantType])), + 'host': + fields.Str(required=False, load_default=None), + 'port': + fields.Integer(required=False, load_default=None), + 'comment': + fields.Str(required=False, load_default=None), + }) + def post( + self, + name: str, + domain_name: str, + is_manual_configured: bool, + type: Optional[str], # pylint: disable=redefined-builtin + host: Optional[str], + port: Optional[int], + comment: Optional[str]): + """Create new participant + --- + tags: + - participant + description: Create new participant + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + name: + type: string + domain_name: + type: string + is_manual_configured: + type: boolean + type: + type: string + host: + type: string + port: + type: integer + comment: + type: string + responses: + 200: + description: Participant that you created. + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.Participant' + """ + + extra = {} + extra['is_manual_configured'] = is_manual_configured + participant_type = ParticipantType[type] + + with db.session_scope() as session: + if session.query(Participant). \ + filter_by(domain_name=domain_name).first() is not None: + raise ResourceConflictException(message='Participant domain name has been used') + service = ParticipantService(session) + if participant_type == ParticipantType.LIGHT_CLIENT: + participant = service.create_light_client_participant(name, domain_name, comment) + else: + if is_manual_configured: + namespace = SettingService(session).get_namespace() + _create_participant_in_k8s(domain_name, host, port, namespace) + else: + host, port = get_host_and_port(domain_name) + participant = service.create_platform_participant(name, domain_name, host, port, extra, comment) + try: + session.commit() + except Exception as e: + raise InvalidArgumentException(details=str(e)) from e + return make_flask_response(data=participant.to_proto(), status=HTTPStatus.CREATED) + + +class ParticipantApi(Resource): + + @credentials_required + def get(self, participant_id: int): + """Get details of particiapnt + --- + tags: + - participant + description: Get details of particiapnt + parameters: + - in: path + name: participant_id + schema: + type: integer + responses: + 200: + description: the specified participant + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.Participant' + """ + with db.session_scope() as session: + participant = session.query(Participant).filter_by(id=participant_id).first() + if participant is None: + raise NotFoundException(f'Failed to find participant: {participant_id}') + return make_flask_response(data=participant.to_proto()) + + @credentials_required + @input_validator + @emits_event() + @use_kwargs({ + 'name': fields.Str(required=False, load_default=None), + 'domain_name': fields.Str(required=False, load_default=None), + 'host': fields.Str(required=False, load_default=None), + 'port': fields.Integer(required=False, load_default=None), + 'comment': fields.Str(required=False, load_default=None), + }) + def patch(self, participant_id: int, name: Optional[str], domain_name: Optional[str], host: Optional[str], + port: Optional[int], comment: Optional[str]): + """Partial update the given participant + --- + tags: + - participant + description: Partial update the given participant + parameters: + - in: path + name: participant_id + schema: + type: integer + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + name: + type: string + domain_name: + type: string + host: + type: string + port: + type: integer + comment: + type: string + responses: + 200: + description: the updated participant + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.Participant' + """ + with db.session_scope() as session: + participant: Participant = session.query(Participant).filter_by(id=participant_id).first() + if participant is None: + raise NotFoundException(f'Failed to find participant: {participant_id}') + + participant.name = name or participant.name + participant.comment = comment or participant.comment + + if domain_name is not None and domain_name != participant.domain_name: + if session.query(Participant).filter_by(domain_name=domain_name).first() is not None: + raise ResourceConflictException(message='Participant domain name has been used') + participant.domain_name = domain_name + if participant.type == ParticipantType.PLATFORM: + extra = participant.get_extra_info() + if extra['is_manual_configured']: + if domain_name or host or port: + participant.host = host or participant.host + participant.port = port or participant.port + + # TODO(taoyanting):validate url + try: + namespace = SettingService(session).get_namespace() + create_or_update_participant_in_k8s(domain_name=participant.domain_name, + host=participant.host, + port=participant.port, + namespace=namespace) + except Exception as e: + raise InvalidArgumentException(details=str(e)) from e + elif domain_name is not None: + host, port = get_host_and_port(participant.domain_name) + participant.host = host + participant.port = port + participant.set_extra_info(extra) + try: + session.commit() + return make_flask_response(data=participant.to_proto()) + except Exception as e: + raise InvalidArgumentException(details=e) from e + + @credentials_required + @admin_required + @emits_event() + def delete(self, participant_id): + """Delete a participant + --- + tags: + - participant + description: Delete a participant + parameters: + - in: path + name: participant_id + schema: + type: integer + responses: + 204: + description: Deleted successfully + """ + with db.session_scope() as session: + participant = session.query(Participant).filter_by(id=participant_id).first() + if participant is None: + raise NotFoundException(f'Failed to find participant: {participant_id}') + + service = ParticipantService(session) + num_project = service.get_number_of_projects(participant_id) + if num_project != 0: + raise MethodNotAllowedException(f'Failed to delete participant: {participant_id}, ' + f'because it has related projects') + session.delete(participant) + session.commit() + return make_flask_response(status=HTTPStatus.NO_CONTENT) + + +class ParticipantConnectionChecksApi(Resource): + + @credentials_required + def get(self, participant_id: int): + """Check participant connection status + --- + tags: + - participant + description: Check participant connection status + parameters: + - in: path + name: participant_id + schema: + type: integer + responses: + 200: + description: connection status + content: + application/json: + schema: + type: object + properties: + success: + type: boolean + message: + type: string + application_version: + $ref: '#/definitions/proto.ApplicationVersion' + """ + with db.session_scope() as session: + participant = session.query(Participant).filter_by(id=participant_id).first() + if participant is None: + raise NotFoundException(f'Failed to find participant: {participant_id}') + + client = RpcClient.from_participant(participant.domain_name) + result = client.check_peer_connection() + version = {} + if result.status.code == StatusCode.STATUS_SUCCESS: + version = MessageToDict(result.application_version, preserving_proto_field_name=True) + return make_flask_response({ + 'success': result.status.code == StatusCode.STATUS_SUCCESS, + 'message': result.status.msg, + 'application_version': version + }) + + +class ParticipantCandidatesApi(Resource): + + @credentials_required + @admin_required + def get(self): + """Get candidate participant according to kueburnetes resource. + --- + tags: + - participant + description: Get candidate participant according to kueburnetes resource. + responses: + 200: + description: + content: + application/json: + schema: + type: array + items: + type: object + properties: + domain_name: + type: string + """ + return make_flask_response(get_valid_candidates()) + + +class ParticipantFlagsApi(Resource): + + def get(self, participant_id: int): + """Get flags from participant + --- + tags: + - flag + responses: + 200: + description: Participant's flags are returned + content: + application/json: + schema: + type: object + additionalProperties: true + example: + FLAG_1: string_value + FLAG_2: true + FLAG_3: 1 + """ + with db.session_scope() as session: + participant: Participant = session.query(Participant).get(participant_id) + if participant is None: + raise NotFoundException(f'Failed to find participant: {participant_id}') + client = SystemServiceClient.from_participant(domain_name=participant.domain_name) + return make_flask_response(data=client.list_flags()) + + +def initialize_participant_apis(api: Api): + api.add_resource(ParticipantsApi, '/participants') + api.add_resource(ParticipantApi, '/participants/') + api.add_resource(ParticipantConnectionChecksApi, '/participants//connection_checks') + api.add_resource(ParticipantCandidatesApi, '/participant_candidates') + api.add_resource(ParticipantFlagsApi, '/participants//flags') diff --git a/web_console_v2/api/fedlearner_webconsole/participant/apis_test.py b/web_console_v2/api/fedlearner_webconsole/participant/apis_test.py new file mode 100644 index 000000000..d06a13dcf --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/participant/apis_test.py @@ -0,0 +1,372 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest + +from http import HTTPStatus + +from unittest.mock import patch, MagicMock + +from fedlearner_webconsole.utils.pp_time import sleep +from fedlearner_webconsole.db import db +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant, ParticipantType +from testing.common import BaseTestCase + + +class ParticipantsApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + self.fake_certs = {'test/test.certs': 'key'} + self.default_participant = Participant(name='test-particitant-name', + domain_name='fl-test.com', + host='1.1.1.1', + port=32443, + comment='test comment') + self.default_participant.set_extra_info({'is_manual_configured': False}) + + self.participant_manually = Participant(name='test-manual-participant', + domain_name='fl-test-manual.com', + host='1.1.1.2', + port=443) + self.participant_manually.set_extra_info({ + 'is_manual_configured': True, + }) + + with db.session_scope() as session: + session.add(self.default_participant) + session.flush() + sleep(1) + session.add(self.participant_manually) + session.commit() + + @patch('fedlearner_webconsole.participant.apis.get_host_and_port') + def test_post_participant_without_certificate(self, mock_get_host_and_port): + name = 'test-post-participant' + domain_name = 'fl-post-test.com' + comment = 'test post participant' + host = '120.0.0.20' + port = 20 + mock_get_host_and_port.return_value = (host, port) + + create_response = self.post_helper('/api/v2/participants', + data={ + 'name': name, + 'domain_name': domain_name, + 'is_manual_configured': False, + 'comment': comment + }) + self.assertEqual(HTTPStatus.CREATED, create_response.status_code) + participant = self.get_response_data(create_response) + # yapf: disable + self.assertPartiallyEqual(participant, { + 'id': 3, + 'comment': comment, + 'domain_name': 'fl-post-test.com', + 'pure_domain_name': 'post-test', + 'host': host, + 'name': name, + 'port': port, + 'extra': { + 'is_manual_configured': False, + }, + 'type': 'PLATFORM', + 'last_connected_at': 0, + 'num_project': 0, + }, ignore_fields=['created_at', 'updated_at']) + # yapf: enable + + @patch('fedlearner_webconsole.participant.apis.create_or_update_participant_in_k8s') + def test_post_participant_manually(self, mock_create_or_update_participant_in_k8s): + name = 'test-post-participant' + domain_name = 'fl-post-test.com' + comment = 'test post participant' + host = '120.0.0.20' + port = 20 + + create_response = self.post_helper('/api/v2/participants', + data={ + 'name': name, + 'domain_name': domain_name, + 'comment': comment, + 'is_manual_configured': True, + 'host': host, + 'port': port, + }) + + self.assertEqual(HTTPStatus.CREATED, create_response.status_code) + participant = self.get_response_data(create_response) + # yapf: disable + self.assertPartiallyEqual(participant, { + 'id': 3, + 'comment': comment, + 'domain_name': 'fl-post-test.com', + 'pure_domain_name': 'post-test', + 'host': host, + 'name': name, + 'port': port, + 'extra': { + 'is_manual_configured': True, + }, + 'type': 'PLATFORM', + 'last_connected_at': 0, + 'num_project': 0, + }, ignore_fields=['created_at', 'updated_at']) + # yapf: enable + mock_create_or_update_participant_in_k8s.assert_called_once_with(domain_name='fl-post-test.com', + host='120.0.0.20', + namespace='default', + port=20) + + def test_post_light_client_participant(self): + resp = self.post_helper('/api/v2/participants', + data={ + 'name': 'light-client', + 'domain_name': 'fl-light-client.com', + 'type': 'LIGHT_CLIENT', + 'is_manual_configured': False, + }) + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + with db.session_scope() as session: + participant = session.query(Participant).filter_by(name='light-client').first() + self.assertEqual(participant.domain_name, 'fl-light-client.com') + self.assertEqual(participant.type, ParticipantType.LIGHT_CLIENT) + self.assertResponseDataEqual(resp, { + 'name': 'light-client', + 'domain_name': 'fl-light-client.com', + 'pure_domain_name': 'light-client', + 'host': '', + 'port': 0, + 'type': 'LIGHT_CLIENT', + 'comment': '', + 'extra': { + 'is_manual_configured': False + }, + 'last_connected_at': 0, + 'num_project': 0, + }, + ignore_fields=['id', 'created_at', 'updated_at']) + + @patch('fedlearner_webconsole.participant.apis.get_host_and_port') + def test_post_conflict_domain_name_participant(self, mock_get_host_and_port): + mock_get_host_and_port.return_value = ('1.1.1.1', 1) + create_response = self.post_helper('/api/v2/participants', + data={ + 'name': 'test-post-conflict-participant', + 'domain_name': 'fl-test.com', + 'is_manual_configured': False, + }) + + self.assertEqual(HTTPStatus.CONFLICT, create_response.status_code) + + def test_list_participant(self): + list_response = self.get_helper('/api/v2/participants') + participants = self.get_response_data(list_response) + self.assertPartiallyEqual( + participants, + [{ + 'comment': '', + 'domain_name': 'fl-test-manual.com', + 'pure_domain_name': 'test-manual', + 'host': '1.1.1.2', + 'id': 2, + 'name': 'test-manual-participant', + 'port': 443, + 'extra': { + 'is_manual_configured': True, + }, + 'last_connected_at': 0, + 'num_project': 0, + 'type': 'PLATFORM', + }, { + 'comment': 'test comment', + 'domain_name': 'fl-test.com', + 'pure_domain_name': 'test', + 'host': '1.1.1.1', + 'id': 1, + 'name': 'test-particitant-name', + 'port': 32443, + 'extra': { + 'is_manual_configured': False, + }, + 'last_connected_at': 0, + 'num_project': 0, + 'type': 'PLATFORM', + }], + ignore_fields=['created_at', 'updated_at'], + ) + + @patch('fedlearner_webconsole.participant.apis.get_host_and_port') + def test_update_participant(self, mock_get_host_and_port): + name = 'test-update-participant' + domain_name = 'fl-update-test.com' + comment = 'test update participant' + ip = '120.0.0.30' + port = 30 + mock_get_host_and_port.return_value = (ip, port) + + update_response = self.patch_helper('/api/v2/participants/1', + data={ + 'name': name, + 'domain_name': domain_name, + 'comment': comment, + }) + participant = self.get_response_data(update_response) + + self.assertEqual(update_response.status_code, HTTPStatus.OK) + # yapf: disable + self.assertPartiallyEqual(participant, { + 'comment': comment, + 'domain_name': 'fl-update-test.com', + 'pure_domain_name': 'update-test', + 'host': ip, + 'id': 1, + 'name': name, + 'port': port, + 'extra': { + 'is_manual_configured': False, + }, + 'last_connected_at': 0, + 'num_project': 0, + 'type': 'PLATFORM' + }, ignore_fields=['created_at', 'updated_at']) + # yapf: enable + + def test_update_participant_conflict_domain_name(self): + update_response = self.patch_helper('/api/v2/participants/1', data={ + 'domain_name': 'fl-test-manual.com', + }) + self.assertEqual(update_response.status_code, HTTPStatus.CONFLICT) + + @patch('fedlearner_webconsole.participant.apis.create_or_update_participant_in_k8s') + def test_update_host_and_port(self, mock_create_or_update_participant_in_k8s): + update_response = self.patch_helper('/api/v2/participants/2', data={ + 'host': '1.112.212.20', + 'port': 9999, + }) + + self.assertEqual(update_response.status_code, HTTPStatus.OK) + self.assertEqual(self.get_response_data(update_response)['port'], 9999) + + mock_create_or_update_participant_in_k8s.assert_called_once() + + @patch('fedlearner_webconsole.participant.apis.create_or_update_participant_in_k8s') + def test_update_only_name(self, mock_create_or_update_participant_in_k8s): + update_response = self.patch_helper('/api/v2/participants/2', data={ + 'name': 'fl-test-only-name', + }) + + self.assertEqual(update_response.status_code, HTTPStatus.OK) + self.assertEqual(self.get_response_data(update_response)['name'], 'fl-test-only-name') + + mock_create_or_update_participant_in_k8s.assert_not_called() + + def test_update_light_client(self): + with db.session_scope() as session: + party = Participant(name='test-party', domain_name='fl-light-client.com', type=ParticipantType.LIGHT_CLIENT) + session.add(party) + session.commit() + resp = self.patch_helper(f'/api/v2/participants/{party.id}', + data={ + 'name': 'test-name', + 'domain_name': 'fl-1.com', + 'comment': 'comment' + }) + self.assertEqual(resp.status_code, HTTPStatus.OK) + with db.session_scope() as session: + party = session.query(Participant).get(party.id) + self.assertEqual(party.name, 'test-name') + self.assertEqual(party.domain_name, 'fl-1.com') + self.assertEqual(party.comment, 'comment') + + def test_get_participant(self): + with db.session_scope() as session: + relationship = ProjectParticipant(project_id=1, participant_id=1) + session.add(relationship) + session.commit() + get_response = self.get_helper('/api/v2/participants/1') + participant = self.get_response_data(get_response) + # yapf: disable + self.assertPartiallyEqual(participant, { + 'comment': 'test comment', + 'domain_name': 'fl-test.com', + 'pure_domain_name': 'test', + 'host': '1.1.1.1', + 'id': 1, + 'name': 'test-particitant-name', + 'port': 32443, + 'extra': { + 'is_manual_configured': False, + }, + 'type': 'PLATFORM', + 'last_connected_at': 0, + 'num_project': 0, + }, ignore_fields=['created_at', 'updated_at']) + # yapf: enable + + def test_delete_participant(self): + self.signin_as_admin() + with db.session_scope() as session: + relationship = ProjectParticipant(project_id=1, participant_id=2) + session.add(relationship) + session.commit() + # test delete participant which does not exist + delete_response = self.delete_helper('/api/v2/participants/3') + self.assertEqual(delete_response.status_code, HTTPStatus.NOT_FOUND) + + # test delete participant which has related projects + delete_response = self.delete_helper('/api/v2/participants/2') + self.assertEqual(delete_response.status_code, HTTPStatus.METHOD_NOT_ALLOWED) + + # test delete participant successfully + delete_response = self.delete_helper('/api/v2/participants/1') + self.assertEqual(delete_response.status_code, HTTPStatus.NO_CONTENT) + + +class ParticipantCandidatesApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + self.signin_as_admin() + + def test_get_valid_candidates(self): + get_response = self.get_helper('/api/v2/participant_candidates') + data = self.get_response_data(get_response) + self.assertEqual(get_response.status_code, HTTPStatus.OK) + self.assertEqual(data, [{'domain_name': 'fl-aaa.com'}, {'domain_name': 'fl-ccc.com'}]) + + +class ParticipantFlagsApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + session.add(Participant(id=1, name='party', domain_name='fl-test.com')) + session.commit() + + @patch('fedlearner_webconsole.participant.apis.SystemServiceClient') + def test_get_peer_flags(self, mock_client: MagicMock): + instance = mock_client.from_participant.return_value + instance.list_flags.return_value = {'key': 'value'} + # fail due to participant not found + resp = self.get_helper('/api/v2/participants/2/flags') + self.assertEqual(resp.status_code, HTTPStatus.NOT_FOUND) + resp = self.get_helper('/api/v2/participants/1/flags') + mock_client.from_participant.assert_called_with(domain_name='fl-test.com') + self.assertResponseDataEqual(resp, {'key': 'value'}) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/participant/k8s_utils.py b/web_console_v2/api/fedlearner_webconsole/participant/k8s_utils.py new file mode 100644 index 000000000..90dcbf4c9 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/participant/k8s_utils.py @@ -0,0 +1,134 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import re +from typing import Tuple, List, Dict + +from envs import Envs +from fedlearner_webconsole.db import db +from fedlearner_webconsole.exceptions import InvalidArgumentException +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.k8s.k8s_client import k8s_client +from fedlearner_webconsole.utils.domain_name import get_pure_domain_name + +_RE_INGRESS_NAME = re.compile(r'^(fl-).+(-client-auth)$') +_RE_SERVICE_NAME = re.compile(r'^(fl-).+$') + + +def get_valid_candidates() -> Dict[str, List[str]]: + with db.session_scope() as session: + namespace = SettingService(session).get_namespace() + ingresses = k8s_client.list_ingress(namespace) + ingress_names = [] + for ingress in ingresses.items: + if hasattr(ingress, 'metadata') and hasattr(ingress.metadata, 'name'): + name = ingress.metadata.name + if _RE_INGRESS_NAME.fullmatch(name): + ingress_names.append(name) + + services = k8s_client.list_service(namespace) + service_names = [] + for service in services.items: + if hasattr(service, 'metadata') and hasattr(service.metadata, 'name'): + name = service.metadata.name + if _RE_SERVICE_NAME.fullmatch(name): + service_names.append(name) + + candidates = [{'domain_name': f'{name[:-12]}.com'} for name in ingress_names if name[:-12] in service_names] + return candidates + + +def get_host_and_port(domain_name: str) -> Tuple[str, int]: + with db.session_scope() as session: + namespace = SettingService(session).get_namespace() + service_name = domain_name.rpartition('.')[0] + ingress_name = f'{service_name}-client-auth' + + try: + service = k8s_client.get_service(name=service_name, namespace=namespace) + ingress = k8s_client.get_ingress(name=ingress_name, namespace=namespace) + host = service.spec.external_name + port = ingress.spec.rules[0].http.paths[0].backend.service_port + except Exception as e: + raise InvalidArgumentException(details=f'can not find post or port in ingress, {e}') from e + + return host, port + + +def _create_or_update_participant_ingress(name: str, service_port: int, namespace: str): + client_auth_ingress_name = f'{name}-client-auth' + pure_domain_name = get_pure_domain_name(name) + host = f'{pure_domain_name}.fedlearner.net' + configuration_snippet = f""" + grpc_next_upstream_tries 5; + grpc_set_header Host {host}; + grpc_set_header Authority {host};""" + # TODO(wangsen.0914): removes this hack after we align the controller + is_tce = False # TODO(lixiaoguang.01) hardcode + secret_path = 'ingress-nginx/client' if not is_tce else 'tce_static/bdcert' + grpc_ssl_trusted_certificate = 'all.pem' if not is_tce else 'intermediate.pem' + server_snippet = f""" + grpc_ssl_verify on; + grpc_ssl_server_name on; + grpc_ssl_name {host}; + grpc_ssl_trusted_certificate /etc/{secret_path}/{grpc_ssl_trusted_certificate}; + grpc_ssl_certificate /etc/{secret_path}/client.pem; + grpc_ssl_certificate_key /etc/{secret_path}/client.key;""" + # yapf: disable + k8s_client.create_or_update_ingress(metadata={ + 'name': client_auth_ingress_name, + 'namespace': namespace, + 'annotations': { + 'nginx.ingress.kubernetes.io/backend-protocol': 'GRPCS', + 'nginx.ingress.kubernetes.io/http2-insecure-port': 'true', + 'nginx.ingress.kubernetes.io/configuration-snippet': configuration_snippet, + 'nginx.ingress.kubernetes.io/server-snippet': server_snippet + } + }, + spec={ + 'rules': [{ + 'host': f'{client_auth_ingress_name}.com', + 'http': { + 'paths': [{ + 'pathType': 'ImplementationSpecific', + 'backend': { + 'serviceName': name, + 'servicePort': service_port + } + }] + } + }], + 'ingressClassName': None + }, + name=client_auth_ingress_name, + namespace=namespace) + # yapf: enable + + +def create_or_update_participant_in_k8s(domain_name: str, host: str, port: int, namespace: str): + name = domain_name.rpartition('.')[0] + k8s_client.create_or_update_service( + metadata={ + 'name': name, + 'namespace': namespace, + }, + spec={ + 'externalName': host, + 'type': 'ExternalName', + }, + name=name, + namespace=namespace, + ) + _create_or_update_participant_ingress(name=name, service_port=port, namespace=namespace) diff --git a/web_console_v2/api/fedlearner_webconsole/participant/k8s_utils_test.py b/web_console_v2/api/fedlearner_webconsole/participant/k8s_utils_test.py new file mode 100644 index 000000000..89b03b47c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/participant/k8s_utils_test.py @@ -0,0 +1,130 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch, MagicMock + +from fedlearner_webconsole.participant.k8s_utils import get_host_and_port, _create_or_update_participant_ingress, \ + create_or_update_participant_in_k8s +from testing.helpers import to_simple_namespace +from testing.no_web_server_test_case import NoWebServerTestCase + + +class ParticipantsK8sUtilsTest(NoWebServerTestCase): + + @patch('fedlearner_webconsole.participant.k8s_utils.k8s_client') + @patch('fedlearner_webconsole.participant.k8s_utils.SettingService.get_namespace') + def test_get_host_and_port(self, mock_get_namespace, mock_k8s_client): + fake_service = to_simple_namespace({'spec': {'external_name': '127.0.0.10'}}) + fake_ingress = to_simple_namespace({ + 'spec': { + 'rules': [{ + 'http': { + 'paths': [{ + 'backend': { + 'service_name': 'fakeservice', + 'service_port': 32443 + } + }] + } + }] + } + }) + domain_name = 'test_domain_name.com' + mock_get_namespace.return_value = 'default' + mock_k8s_client.get_service = MagicMock(return_value=fake_service) + mock_k8s_client.get_ingress = MagicMock(return_value=fake_ingress) + + host, port = get_host_and_port(domain_name) + self.assertEqual(host, '127.0.0.10') + self.assertEqual(port, 32443) + mock_k8s_client.get_service.assert_called_once_with(name='test_domain_name', namespace='default') + mock_k8s_client.get_ingress.assert_called_once_with(name='test_domain_name-client-auth', namespace='default') + + @patch('fedlearner_webconsole.participant.k8s_utils.k8s_client') + def test_create_or_update_participant_ingress(self, mock_k8s_client: MagicMock): + mock_k8s_client.create_or_update_ingress = MagicMock() + _create_or_update_participant_ingress('fl-test', service_port=32443, namespace='fedlearner') + mock_k8s_client.create_or_update_ingress.assert_called_once_with( + name='fl-test-client-auth', + namespace='fedlearner', + metadata={ + 'name': 'fl-test-client-auth', + 'namespace': 'fedlearner', + 'annotations': { + 'nginx.ingress.kubernetes.io/backend-protocol': 'GRPCS', + 'nginx.ingress.kubernetes.io/http2-insecure-port': 'true', + 'nginx.ingress.kubernetes.io/configuration-snippet': + '\n' + ' grpc_next_upstream_tries 5;\n' + ' grpc_set_header Host test.fedlearner.net;\n' + ' grpc_set_header Authority test.fedlearner.net;', + 'nginx.ingress.kubernetes.io/server-snippet': + '\n' + ' grpc_ssl_verify on;\n' + ' grpc_ssl_server_name on;\n' + ' grpc_ssl_name test.fedlearner.net;\n' + ' grpc_ssl_trusted_certificate /etc/ingress-nginx/client/all.pem;\n' + ' grpc_ssl_certificate /etc/ingress-nginx/client/client.pem;\n' + ' grpc_ssl_certificate_key /etc/ingress-nginx/client/client.key;' + } + }, + spec={ + 'rules': [{ + 'host': 'fl-test-client-auth.com', + 'http': { + 'paths': [{ + 'pathType': 'ImplementationSpecific', + 'backend': { + 'serviceName': 'fl-test', + 'servicePort': 32443 + } + }] + } + }], + 'ingressClassName': None + }, + ) + + @patch('fedlearner_webconsole.participant.k8s_utils._create_or_update_participant_ingress') + @patch('fedlearner_webconsole.participant.k8s_utils.k8s_client') + def test_create_or_update_participant_in_k8s(self, mock_k8s_client: MagicMock, + mock_create_or_update_participant_ingress: MagicMock): + mock_k8s_client.create_or_update_service = MagicMock() + create_or_update_participant_in_k8s(domain_name='fl-a-test.com', + host='1.2.3.4', + port=32443, + namespace='fedlearner') + mock_k8s_client.create_or_update_service.assert_called_once_with( + name='fl-a-test', + namespace='fedlearner', + metadata={ + 'name': 'fl-a-test', + 'namespace': 'fedlearner', + }, + spec={ + 'externalName': '1.2.3.4', + 'type': 'ExternalName', + }, + ) + mock_create_or_update_participant_ingress.assert_called_once_with( + name='fl-a-test', + service_port=32443, + namespace='fedlearner', + ) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/participant/models.py b/web_console_v2/api/fedlearner_webconsole/participant/models.py new file mode 100644 index 000000000..1f719bd31 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/participant/models.py @@ -0,0 +1,96 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# coding: utf-8 +import json +from enum import Enum +from typing import Dict +from sqlalchemy import UniqueConstraint, Index +from sqlalchemy.sql import func + +from fedlearner_webconsole.proto import participant_pb2 +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.db import db, default_table_args +from fedlearner_webconsole.utils.domain_name import get_pure_domain_name +from fedlearner_webconsole.utils.base_model.review_ticket_model import ReviewTicketModel + + +class ParticipantType(Enum): + PLATFORM = 0 + LIGHT_CLIENT = 1 + + +class Participant(db.Model, ReviewTicketModel): + __tablename__ = 'participants_v2' + __table_args__ = (UniqueConstraint('domain_name', name='uniq_domain_name'), + default_table_args('This is webconsole participant table.')) + id = db.Column(db.Integer, primary_key=True, autoincrement=True, comment='participant id') + name = db.Column(db.String(255), nullable=False, comment='participant name') + domain_name = db.Column(db.String(255), unique=True, nullable=False, comment='participant domain_name') + host = db.Column(db.String(255), comment='participant host') + port = db.Column(db.Integer, comment='host port') + type = db.Column('participant_type', + db.Enum(ParticipantType, native_enum=False, length=64, create_constraint=False), + default=ParticipantType.PLATFORM, + key='type', + comment='participant type') + comment = db.Column('cmt', db.Text(), key='comment', comment='comment') + extra = db.Column(db.Text(), comment='extra_info') + last_connected_at = db.Column(db.DateTime(timezone=True), comment='last connected at') + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), comment='created at') + updated_at = db.Column(db.DateTime(timezone=True), + onupdate=func.now(), + server_default=func.now(), + comment='updated at') + + def set_extra_info(self, extra_info: Dict): + self.extra = json.dumps(extra_info) + + def get_extra_info(self) -> Dict: + if self.extra is not None: + return json.loads(self.extra) + return {} + + def get_type(self) -> ParticipantType: + return self.type if self.type else ParticipantType.PLATFORM + + def pure_domain_name(self): + return get_pure_domain_name(self.domain_name) + + def to_proto(self) -> participant_pb2.Participant: + extra_info = self.get_extra_info() + proto = participant_pb2.Participant( + id=self.id, + name=self.name, + domain_name=self.domain_name, + pure_domain_name=self.pure_domain_name(), + host=self.host, + port=self.port, + type=self.get_type().name, + comment=self.comment, + last_connected_at=to_timestamp(self.last_connected_at) if self.last_connected_at else 0, + created_at=to_timestamp(self.created_at), + updated_at=to_timestamp(self.updated_at) if self.updated_at else 0, + extra=participant_pb2.ParticipantExtra(is_manual_configured=extra_info.get('is_manual_configured', False))) + return proto + + +class ProjectParticipant(db.Model): + __tablename__ = 'projects_participants_v2' + __table_args__ = (Index('idx_project_id', 'project_id'), Index('idx_participant_id', 'participant_id'), + default_table_args('This is webcocsole projects and participants relationship table.')) + id = db.Column(db.Integer, primary_key=True, autoincrement=True, comment='relationship id') + project_id = db.Column(db.Integer, nullable=False, comment='project_id id') + participant_id = db.Column(db.Integer, nullable=False, comment='participants_id id') diff --git a/web_console_v2/api/fedlearner_webconsole/participant/models_test.py b/web_console_v2/api/fedlearner_webconsole/participant/models_test.py new file mode 100644 index 000000000..ac648c270 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/participant/models_test.py @@ -0,0 +1,54 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from datetime import datetime, timezone + +from fedlearner_webconsole.participant.models import Participant, ParticipantType +from fedlearner_webconsole.proto import participant_pb2 +from testing.no_web_server_test_case import NoWebServerTestCase + + +class ParticipantTest(NoWebServerTestCase): + + def test_to_proto(self): + created_at = datetime(2022, 5, 1, 10, 10, tzinfo=timezone.utc) + participant = Participant(id=123, + name='testp', + domain_name='fl-test.com', + host='test.fl.com', + port=32443, + type=ParticipantType.PLATFORM, + comment='c', + created_at=created_at, + updated_at=created_at, + extra='{"is_manual_configured":true}') + self.assertEqual( + participant.to_proto(), + participant_pb2.Participant(id=123, + name='testp', + domain_name='fl-test.com', + pure_domain_name='test', + host='test.fl.com', + port=32443, + type='PLATFORM', + comment='c', + created_at=int(created_at.timestamp()), + updated_at=int(created_at.timestamp()), + extra=participant_pb2.ParticipantExtra(is_manual_configured=True))) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/participant/services.py b/web_console_v2/api/fedlearner_webconsole/participant/services.py new file mode 100644 index 000000000..c98738dbc --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/participant/services.py @@ -0,0 +1,85 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +from typing import List, Optional +from fedlearner_webconsole.auth.models import Session +from fedlearner_webconsole.participant.models import ProjectParticipant, Participant, ParticipantType + + +class ParticipantService(object): + + def __init__(self, session: Session): + self._session = session + + def get_participant_by_pure_domain_name(self, pure_domain_name: str) -> Optional[Participant]: + """Finds a specific participant by pure domain name, e.g. aliyun-test. + + For compatible reason, we have two kinds of domain name, fl-xxx.com and xxx.fedlearner.net, + so we need to identify a participant by pure domain name globally.""" + participants = self._session.query(Participant).filter( + Participant.domain_name.like(f'%{pure_domain_name}%')).all() + for p in participants: + if p.pure_domain_name() == pure_domain_name: + return p + return None + + def get_participants_by_project(self, project_id: int) -> List: + # the precision of datetime cannot suffice, use id instead + participants = self._session.query(Participant).join( + ProjectParticipant, ProjectParticipant.participant_id == Participant.id).filter( + ProjectParticipant.project_id == project_id). \ + order_by(Participant.created_at.desc()).all() + return participants + + # get only platform participant, ignore light-client type participant + def get_platform_participants_by_project(self, project_id: int) -> List: + participants = self.get_participants_by_project(project_id) + platform_participants = [] + for participant in participants: + # a hack that previous participant_type is null + if participant.get_type() == ParticipantType.PLATFORM: + platform_participants.append(participant) + return platform_participants + + def get_number_of_projects(self, participant_id: int) -> int: + return self._session.query(ProjectParticipant).filter_by(participant_id=participant_id).count() + + def create_light_client_participant(self, + name: str, + domain_name: str, + comment: Optional[str] = None) -> Participant: + participant = Participant(name=name, + domain_name=domain_name, + type=ParticipantType.LIGHT_CLIENT, + comment=comment) + self._session.add(participant) + return participant + + def create_platform_participant(self, + name: str, + domain_name: str, + host: str, + port: int, + extra: dict, + comment: Optional[str] = None) -> Participant: + participant = Participant(name=name, + domain_name=domain_name, + host=host, + port=port, + type=ParticipantType.PLATFORM, + comment=comment) + participant.set_extra_info(extra) + self._session.add(participant) + return participant diff --git a/web_console_v2/api/fedlearner_webconsole/participant/services_test.py b/web_console_v2/api/fedlearner_webconsole/participant/services_test.py new file mode 100644 index 000000000..4870857e7 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/participant/services_test.py @@ -0,0 +1,109 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.participant.models import Participant, ParticipantType, ProjectParticipant +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.utils.pp_time import sleep +from testing.no_web_server_test_case import NoWebServerTestCase + + +class ParticipantServiceTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + self.participant_1 = Participant(name='participant 1', domain_name='fl-participant-1.com') + self.participant_2 = Participant(name='participant 2', domain_name='participant-2.fedlearner.net') + self.participant_3 = Participant(name='participant 3', + domain_name='participant-3.fedlearner.net', + type=ParticipantType.LIGHT_CLIENT) + self.project_1 = Project(name='project 1') + self.project_2 = Project(name='project 2') + self.project_3 = Project(name='project 3') + self.relationship_11 = ProjectParticipant(project_id=1, participant_id=1) + self.relationship_12 = ProjectParticipant(project_id=1, participant_id=2) + self.relationship_22 = ProjectParticipant(project_id=2, participant_id=2) + self.relationship_33 = ProjectParticipant(project_id=3, participant_id=3) + + with db.session_scope() as session: + session.add(self.participant_1) + session.flush() + sleep(1) + session.add(self.participant_2) + session.add(self.project_1) + session.flush() + sleep(1) + session.add(self.project_2) + session.add(self.project_3) + session.add(self.participant_3) + session.add(self.relationship_11) + session.add(self.relationship_12) + session.add(self.relationship_22) + session.add(self.relationship_33) + session.commit() + + def test_get_participant_by_pure_domain_name(self): + with db.session_scope() as session: + service = ParticipantService(session) + + p = service.get_participant_by_pure_domain_name('participant-1') + self.assertEqual(p.id, self.participant_1.id) + p = service.get_participant_by_pure_domain_name('participant-2') + self.assertEqual(p.id, self.participant_2.id) + self.assertIsNone(service.get_participant_by_pure_domain_name('participant')) + self.assertIsNone(service.get_participant_by_pure_domain_name('none')) + + def test_get_participants_by_project_id(self): + with db.session_scope() as session: + service = ParticipantService(session) + participants = service.get_participants_by_project(1) + self.assertEqual(len(participants), 2) + self.assertEqual(participants[0].name, 'participant 2') + self.assertEqual(participants[1].name, 'participant 1') + + participants = service.get_participants_by_project(2) + self.assertEqual(len(participants), 1) + self.assertEqual(participants[0].name, 'participant 2') + + participants = service.get_participants_by_project(3) + self.assertEqual(len(participants), 1) + self.assertEqual(participants[0].name, 'participant 3') + + def test_get_platform_participants_by_project(self): + + with db.session_scope() as session: + service = ParticipantService(session) + participants = service.get_platform_participants_by_project(2) + self.assertEqual(len(participants), 1) + self.assertEqual(participants[0].name, 'participant 2') + + participants = service.get_platform_participants_by_project(3) + self.assertEqual(len(participants), 0) + + def test_get_number_of_projects(self): + with db.session_scope() as session: + service = ParticipantService(session) + num1 = service.get_number_of_projects(1) + self.assertEqual(num1, 1) + + num2 = service.get_number_of_projects(2) + self.assertEqual(num2, 2) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/project/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/project/BUILD.bazel new file mode 100644 index 000000000..f16d5a8ce --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/project/BUILD.bazel @@ -0,0 +1,188 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "models_lib", + srcs = ["models.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "models_lib_test", + size = "small", + srcs = [ + "models_test.py", + ], + imports = ["../.."], + main = "models_test.py", + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "apis_lib", + srcs = ["apis.py"], + imports = ["../.."], + deps = [ + ":models_lib", + ":services_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/audit:decorators_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:third_party_sso_lib", + "//web_console_v2/api/fedlearner_webconsole/iam:client_lib", + "//web_console_v2/api/fedlearner_webconsole/iam:iam_required_lib", + "//web_console_v2/api/fedlearner_webconsole/iam:permission_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/project:controllers_lib", + "//web_console_v2/api/fedlearner_webconsole/review:ticket_helper_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:client_lib", + "//web_console_v2/api/fedlearner_webconsole/swagger:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_flask_restful//:pkg", + "@common_marshmallow//:pkg", + ], +) + +py_test( + name = "apis_lib_test", + size = "medium", + srcs = [ + "apis_test.py", + ], + imports = ["../.."], + main = "apis_test.py", + deps = [ + ":apis_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:common_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "controllers_lib", + srcs = [ + "controllers.py", + ], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:project_service_client_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_test( + name = "controllers_lib_test", + size = "small", + srcs = [ + "controllers_test.py", + ], + imports = ["../.."], + main = "controllers_test.py", + deps = [ + ":controllers_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "services_lib", + srcs = ["services.py"], + imports = ["../.."], + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:filtering_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:paginate_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:resource_name_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "services_lib_test", + size = "small", + srcs = [ + "services_test.py", + ], + imports = ["../.."], + main = "services_test.py", + deps = [ + ":models_lib", + ":services_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "project_scheduler_lib", + srcs = [ + "project_scheduler.py", + ], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/project:controllers_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:services_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:project_service_client_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_test( + name = "project_scheduler_lib_test", + size = "small", + srcs = [ + "project_scheduler_test.py", + ], + imports = ["../.."], + main = "project_scheduler_test.py", + deps = [ + ":project_scheduler_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/project:controllers_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/project/add_on.py b/web_console_v2/api/fedlearner_webconsole/project/add_on.py deleted file mode 100644 index c20453528..000000000 --- a/web_console_v2/api/fedlearner_webconsole/project/add_on.py +++ /dev/null @@ -1,342 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -import tarfile -import io -import os -from base64 import b64encode, b64decode -from typing import Type, Dict -from OpenSSL import crypto, SSL -from fedlearner_webconsole.utils.k8s_client import K8sClient - -CA_SECRET_NAME = 'ca-secret' -OPERATOR_NAME = 'fedlearner-operator' -SERVER_SECRET_NAME = 'fedlearner-proxy-server' -INGRESS_NGINX_CONTROLLER_NAME = 'fedlearner-stack-ingress-nginx-controller' - - -def parse_certificates(encoded_gz): - """ - Parse certificates from base64-encoded string to a dict - Args: - encoded_gz: A base64-encoded string from a `.gz` file. - Returns: - dict: key is the file name, value is the content - """ - binary_gz = io.BytesIO(b64decode(encoded_gz)) - with tarfile.open(fileobj=binary_gz) as gz: - certificates = {} - for file in gz.getmembers(): - if file.isfile(): - # raw file name is like `fl-test.com/client/client.pem` - certificates[file.name.split('/', 1)[-1]] = \ - str(b64encode(gz.extractfile(file).read()), - encoding='utf-8') - return certificates - - -def verify_certificates(certificates: Dict[str, str]) -> (bool, str): - """ - Verify certificates from 4 aspects: - 1. The CN of all public keys are equal. - 2. All the CN are generic domain names. - 3. Public key match private key. - 4. Private key is signed by CA. - Args: - certificates: - Returns: - """ - try: - client_public_key = crypto.load_certificate( - crypto.FILETYPE_PEM, - b64decode(certificates.get('client/client.pem'))) - server_public_key = crypto.load_certificate( - crypto.FILETYPE_PEM, - b64decode(certificates.get('server/server.pem'))) - client_private_key = crypto.load_privatekey( - crypto.FILETYPE_PEM, - b64decode(certificates.get('client/client.key'))) - server_private_key = crypto.load_privatekey( - crypto.FILETYPE_PEM, - b64decode(certificates.get('server/server.key'))) - client_intermediate_ca = crypto.load_certificate( - crypto.FILETYPE_PEM, - b64decode(certificates.get('client/intermediate.pem'))) - server_intermediate_ca = crypto.load_certificate( - crypto.FILETYPE_PEM, - b64decode(certificates.get('server/intermediate.pem'))) - client_root_ca = crypto.load_certificate( - crypto.FILETYPE_PEM, - b64decode(certificates.get('client/root.pem'))) - server_root_ca = crypto.load_certificate( - crypto.FILETYPE_PEM, - b64decode(certificates.get('server/root.pem'))) - except crypto.Error as err: - return False, 'Format of key or CA is invalid: {}'.format(err) - - if client_public_key.get_subject().CN != server_public_key.get_subject().CN: - return False, 'Client and server public key CN mismatch' - if not client_public_key.get_subject().CN.startswith('*.'): - return False, 'CN of public key should be a generic domain name' - - try: - client_context = SSL.Context(SSL.TLSv1_METHOD) - client_context.use_certificate(client_public_key) - client_context.use_privatekey(client_private_key) - client_context.check_privatekey() - - server_context = SSL.Context(SSL.TLSv1_METHOD) - server_context.use_certificate(server_public_key) - server_context.use_privatekey(server_private_key) - server_context.check_privatekey() - except SSL.Error as err: - return False, 'Key pair mismatch: {}'.format(err) - - try: - client_store = crypto.X509Store() - client_store.add_cert(client_root_ca) - client_store.add_cert(client_intermediate_ca) - crypto.X509StoreContext(client_store, client_public_key)\ - .verify_certificate() - except crypto.X509StoreContextError as err: - return False, 'Client key and CA mismatch: {}'.format(err) - try: - server_store = crypto.X509Store() - server_store.add_cert(server_root_ca) - server_store.add_cert(server_intermediate_ca) - crypto.X509StoreContext(server_store, server_public_key)\ - .verify_certificate() - except crypto.X509StoreContextError as err: - return False, 'Server key and CA mismatch: {}'.format(err) - - return True, '' - - -def create_add_on(client: Type[K8sClient], domain_name: str, url: str, - certificates: Dict[str, str], custom_host: str = None): - """ - Idempotent - Create add on and upgrade nginx-ingress and operator. - If add on of domain_name exists, replace it. - - Args: - client: K8s client instance - domain_name: participant's domain name, used to create Ingress - url: participant's external ip, used to create ExternalName - Service - certificates: used for two-way tls authentication and to create one - server Secret, one client Secret and one CA - custom_host: used for case where participant is using an external - authentication gateway - """ - # url: xxx.xxx.xxx.xxx:xxxxx - ip = url.split(':')[0] - port = int(url.split(':')[1]) - client_all_pem = str(b64encode('{}\n{}'.format( - str(b64decode(certificates.get('client/intermediate.pem')), - encoding='utf-8').strip(), - str(b64decode(certificates.get('client/root.pem')), - encoding='utf-8').strip()).encode()), encoding='utf-8') - server_all_pem = str(b64encode('{}\n{}'.format( - str(b64decode(certificates.get('server/intermediate.pem')), - encoding='utf-8').strip(), - str(b64decode(certificates.get('server/root.pem')), - encoding='utf-8').strip()).encode()), encoding='utf-8') - name = domain_name.split('.')[0] - client_secret_name = '{}-client'.format(name) - client_auth_ingress_name = '-client-auth.'.join(domain_name.split('.')) - - # Create server certificate secret - # If users verify gRpc in external gateway, - # `AUTHORIZATION_MODE` should be set to `EXTERNAL`. - if os.environ.get('AUTHORIZATION_MODE') != 'EXTERNAL': - client.create_or_update_secret( - data={ - 'ca.crt': certificates.get('server/intermediate.pem'), - 'tls.crt': certificates.get('server/server.pem'), - 'tls.key': certificates.get('server/server.key') - }, - metadata={ - 'name': SERVER_SECRET_NAME, - 'namespace': 'default' - }, - secret_type='Opaque', - name=SERVER_SECRET_NAME - ) - client.create_or_update_secret( - data={ - 'ca.crt': server_all_pem - }, - metadata={ - 'name': CA_SECRET_NAME, - 'namespace': 'default' - }, - secret_type='Opaque', - name=CA_SECRET_NAME - ) - # TODO: Support multiple participants - operator = client.get_deployment(OPERATOR_NAME) - new_args = list(filter(lambda arg: not arg.startswith('--ingress'), - operator.spec.template.spec.containers[0].args)) - new_args.extend([ - '--ingress-extra-host-suffix=".{}"'.format(domain_name), - '--ingress-client-auth-secret-name="default/ca-secret"', - '--ingress-enabled-client-auth=true', - '--ingress-secret-name={}'.format(SERVER_SECRET_NAME)]) - operator.spec.template.spec.containers[0].args = new_args - client.create_or_update_deployment(metadata=operator.metadata, - spec=operator.spec, - name=OPERATOR_NAME) - - # Create client certificate secret - client.create_or_update_secret( - data={ - 'client.pem': certificates.get('client/intermediate.pem'), - 'client.key': certificates.get('client/client.key'), - 'all.pem': client_all_pem - }, - metadata={ - 'name': client_secret_name - }, - secret_type='Opaque', - name=client_secret_name - ) - - # Update ingress-nginx-controller to load client secret - ingress_nginx_controller = client.get_deployment( - INGRESS_NGINX_CONTROLLER_NAME - ) - volumes = ingress_nginx_controller.spec.template.spec.volumes or [] - volumes = list(filter(lambda volume: volume.name != client_secret_name, - volumes)) - volumes.append({ - 'name': client_secret_name, - 'secret': { - 'secretName': client_secret_name - } - }) - volume_mounts = ingress_nginx_controller.spec.template\ - .spec.containers[0].volume_mounts or [] - volume_mounts = list(filter( - lambda mount: mount.name != client_secret_name, volume_mounts)) - volume_mounts.append( - { - 'mountPath': '/etc/{}/client/'.format(name), - 'name': client_secret_name - }) - ingress_nginx_controller.spec.template.spec.volumes = volumes - ingress_nginx_controller.spec.template\ - .spec.containers[0].volume_mounts = volume_mounts - client.create_or_update_deployment( - metadata=ingress_nginx_controller.metadata, - spec=ingress_nginx_controller.spec, - name=INGRESS_NGINX_CONTROLLER_NAME - ) - # TODO: check ingress-nginx-controller's health - - # Create ingress to forward request to peer - client.create_or_update_service( - metadata={ - 'name': name, - 'namespace': 'default' - }, - spec={ - 'externalName': ip, - 'type': 'ExternalName' - }, - name=name - ) - configuration_snippet_template = 'grpc_next_upstream_tries 5;\n'\ - 'grpc_set_header Host {0};\n'\ - 'grpc_set_header Authority {0};' - configuration_snippet = \ - configuration_snippet_template.format(custom_host or '$http_x_host') - client.create_or_update_ingress( - metadata={ - 'name': domain_name, - 'namespace': 'default', - 'annotations': { - 'kubernetes.io/ingress.class': 'nginx', - 'nginx.ingress.kubernetes.io/backend-protocol': 'GRPCS', - 'nginx.ingress.kubernetes.io/http2-insecure-port': 't', - 'nginx.ingress.kubernetes.io/configuration-snippet': - configuration_snippet - } - }, - spec={ - 'rules': [{ - 'host': domain_name, - 'http': { - 'paths': [ - { - 'path': '/', - 'backend': { - 'serviceName': name, - 'servicePort': port - } - } - ] - } - }] - }, - name=domain_name - ) - # In most case with external authorization mode, - # secrets are created by helm charts (deploy/charts/fedlearner-add-on). - # So use `ingress-nginx` as default. - # FIXME: change when supporting multi-peer - secret_path = name if os.environ.get('AUTHORIZATION_MODE') != 'EXTERNAL' \ - else 'ingress-nginx' - server_snippet_template = \ - 'grpc_ssl_verify on;\n'\ - 'grpc_ssl_server_name on;\n'\ - 'grpc_ssl_name {0};\n'\ - 'grpc_ssl_trusted_certificate /etc/{1}/client/all.pem;\n'\ - 'grpc_ssl_certificate /etc/{1}/client/client.pem;\n'\ - 'grpc_ssl_certificate_key /etc/{1}/client/client.key;' - server_snippet = server_snippet_template.format( - custom_host or '$http_x_host', secret_path) - client.create_or_update_ingress( - metadata={ - 'name': client_auth_ingress_name, - 'namespace': 'default', - 'annotations': { - 'kubernetes.io/ingress.class': 'nginx', - 'nginx.ingress.kubernetes.io/backend-protocol': 'GRPCS', - 'nginx.ingress.kubernetes.io/http2-insecure-port': 't', - 'nginx.ingress.kubernetes.io/configuration-snippet': - configuration_snippet, - 'nginx.ingress.kubernetes.io/server-snippet': server_snippet - } - }, - spec={ - 'rules': [{ - 'host': client_auth_ingress_name, - 'http': { - 'paths': [ - { - 'path': '/', - 'backend': { - 'serviceName': name, - 'servicePort': port - } - } - ] - } - }] - }, - name=client_auth_ingress_name - ) diff --git a/web_console_v2/api/fedlearner_webconsole/project/apis.py b/web_console_v2/api/fedlearner_webconsole/project/apis.py index c13a6a3ea..97e00c83b 100644 --- a/web_console_v2/api/fedlearner_webconsole/project/apis.py +++ b/web_console_v2/api/fedlearner_webconsole/project/apis.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. @@ -13,282 +13,498 @@ # limitations under the License. # coding: utf-8 -# pylint: disable=raise-missing-from - -import re from enum import Enum -from uuid import uuid4 +from functools import partial +from http import HTTPStatus +from typing import Optional, Dict, Any, List -from sqlalchemy.sql import func -from flask import request -from flask_restful import Resource, Api, reqparse from google.protobuf.json_format import ParseDict +from flask_restful import Resource, Api +from marshmallow import Schema, fields, validate, post_load +from marshmallow.validate import Length +from envs import Envs +from fedlearner_webconsole.audit.decorators import emits_event from fedlearner_webconsole.db import db -from fedlearner_webconsole.project.models import Project -from fedlearner_webconsole.proto.common_pb2 import Variable, StatusCode -from fedlearner_webconsole.proto.project_pb2 \ - import Project as ProjectProto, CertificateStorage, \ - Participant as ParticipantProto -from fedlearner_webconsole.project.add_on \ - import parse_certificates, verify_certificates, create_add_on +from fedlearner_webconsole.iam.client import create_iams_for_resource +from fedlearner_webconsole.iam.iam_required import iam_required +from fedlearner_webconsole.iam.permission import Permission +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.participant.models import ProjectParticipant +from fedlearner_webconsole.project.controllers import PendingProjectRpcController +from fedlearner_webconsole.project.models import Project, PendingProjectState, ProjectRole, PendingProject +from fedlearner_webconsole.project.services import ProjectService, PendingProjectService +from fedlearner_webconsole.proto.common_pb2 import StatusCode, Variable from fedlearner_webconsole.exceptions \ - import InvalidArgumentException, NotFoundException + import InvalidArgumentException, NotFoundException, ResourceConflictException, InternalException +from fedlearner_webconsole.proto.project_pb2 import ProjectConfig +from fedlearner_webconsole.proto.review_pb2 import TicketType, TicketDetails +from fedlearner_webconsole.review.ticket_helper import get_ticket_helper from fedlearner_webconsole.rpc.client import RpcClient -from fedlearner_webconsole.utils.decorators import jwt_required -from fedlearner_webconsole.utils.k8s_client import k8s_client -from fedlearner_webconsole.workflow.models import Workflow - -_CERTIFICATE_FILE_NAMES = [ - 'client/client.pem', 'client/client.key', 'client/intermediate.pem', - 'client/root.pem', 'server/server.pem', 'server/server.key', - 'server/intermediate.pem', 'server/root.pem' -] - -_URL_REGEX = r'(?:^((?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])(?:\.' \ - r'(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])){3})(?::+' \ - r'(\d+))?$)|(?:^\[((?:(?:[0-9a-fA-F:]){1,4}(?:(?::(?:[0-9a-fA-F]' \ - r'){1,4}|:)){2,7})+)\](?::+(\d+))?|((?:(?:[0-9a-fA-F:]){1,4}(?:(' \ - r'?::(?:[0-9a-fA-F]){1,4}|:)){2,7})+)$)' +from fedlearner_webconsole.rpc.v2.project_service_client import ProjectServiceClient +from fedlearner_webconsole.swagger.models import schema_manager +from fedlearner_webconsole.utils.decorators.pp_flask import input_validator, use_args, use_kwargs +from fedlearner_webconsole.auth.third_party_sso import credentials_required +from fedlearner_webconsole.utils.flask_utils import get_current_user, make_flask_response, FilterExpField class ErrorMessage(Enum): PARAM_FORMAT_ERROR = 'Format of parameter {} is wrong: {}' - NAME_CONFLICT = 'Project name {} has been used.' + + +def _add_variable(config: Optional[Dict], field: str, value: Any) -> Dict: + config = config or {} + config['variables'] = config.get('variables', []) + for item in config['variables']: + if item['name'] == field: + return config + config['variables'].append({'name': field, 'value': value}) + return config + + +class CreateProjectParameter(Schema): + name = fields.String(required=True) + config = fields.Dict(load_default={}) + # System does not support multiple participants now + participant_ids = fields.List(fields.Integer(), validate=Length(equal=1)) + comment = fields.String(load_default='') + + +class CreatePendingProjectParameter(Schema): + name = fields.String(required=True) + config = fields.Dict(load_default={}) + participant_ids = fields.List(fields.Integer(), validate=Length(min=1)) + comment = fields.String(load_default='') + + @post_load() + def make(self, data, **kwargs): + data['config'] = ParseDict(data['config'], ProjectConfig(), ignore_unknown_fields=True) + return data class ProjectsApi(Resource): - @jwt_required() - def post(self): - parser = reqparse.RequestParser() - parser.add_argument('name', - required=True, - type=str, - help=ErrorMessage.PARAM_FORMAT_ERROR.value.format( - 'name', 'Empty')) - parser.add_argument('config', - required=True, - type=dict, - help=ErrorMessage.PARAM_FORMAT_ERROR.value.format( - 'config', 'Empty')) - parser.add_argument('comment') - data = parser.parse_args() + + @input_validator + @credentials_required + @iam_required(Permission.PROJECTS_POST) + @emits_event(audit_fields=['participant_ids']) + @use_args(CreateProjectParameter()) + def post(self, data: Dict): + """Creates a new project. + --- + tags: + - project + description: Creates a new project + parameters: + - in: body + name: body + schema: + $ref: '#/definitions/CreateProjectParameter' + responses: + 201: + description: Created a project + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.Project' + """ name = data['name'] config = data['config'] comment = data['comment'] - - if Project.query.filter_by(name=name).first() is not None: - raise InvalidArgumentException( - details=ErrorMessage.NAME_CONFLICT.value.format(name)) - - if config.get('participants') is None: - raise InvalidArgumentException( - details=ErrorMessage.PARAM_FORMAT_ERROR.value.format( - 'participants', 'Empty')) - if len(config.get('participants')) != 1: - # TODO: remove limit after operator supports multiple participants - raise InvalidArgumentException( - details='Currently not support multiple participants.') - - # exact configuration from variables - # TODO: one custom host for one participant - grpc_ssl_server_host = None - egress_host = None - for variable in config.get('variables', []): - if variable.get('name') == 'GRPC_SSL_SERVER_HOST': - grpc_ssl_server_host = variable.get('value') - if variable.get('name') == 'EGRESS_HOST': - egress_host = variable.get('value') - - # parse participant - certificates = {} - for participant in config.get('participants'): - if 'name' not in participant.keys() or \ - 'domain_name' not in participant.keys(): - raise InvalidArgumentException( - details=ErrorMessage.PARAM_FORMAT_ERROR.value.format( - 'participants', 'Participant must have name and ' - 'domain_name.')) - domain_name = participant.get('domain_name') - # Grpc spec - participant['grpc_spec'] = { - 'authority': - egress_host or '{}-client-auth.com'.format(domain_name[:-4]) - } - - if participant.get('certificates'): - # If users use web console to create add-on, - # peer url must be given - if 'url' not in participant.keys(): + participant_ids = data['participant_ids'] + with db.session_scope() as session: + if session.query(Project).filter_by(name=name).first() is not None: + raise ResourceConflictException(message=f'Project name {name} has been used.') + + with db.session_scope() as session: + try: + user = get_current_user() + # defensive programming, if user is none, wont query user.username + new_project = Project(name=name, comment=comment, creator=user and user.username) + config = _add_variable(config, 'storage_root_path', Envs.STORAGE_ROOT) + try: + new_project.set_config(ParseDict(config, ProjectConfig())) + except Exception as e: raise InvalidArgumentException( - details=ErrorMessage.PARAM_FORMAT_ERROR.value.format( - 'participants', 'Participant must have url.')) - if re.match(_URL_REGEX, participant.get('url')) is None: - raise InvalidArgumentException('URL pattern is wrong') - - current_cert = parse_certificates( - participant.get('certificates')) - success, err = verify_certificates(current_cert) - if not success: - raise InvalidArgumentException(err) - certificates[domain_name] = {'certs': current_cert} - if 'certificates' in participant.keys(): - participant.pop('certificates') - - new_project = Project() - # generate token - # If users send a token, then use it instead. - # If `token` is None, generate a new one by uuid. - config['name'] = name - token = config.get('token', uuid4().hex) - config['token'] = token - - # check format of config - try: - new_project.set_config(ParseDict(config, ProjectProto())) - except Exception as e: - raise InvalidArgumentException( - details=ErrorMessage.PARAM_FORMAT_ERROR.value.format( - 'config', e)) - new_project.set_certificate( - ParseDict({'domain_name_to_cert': certificates}, - CertificateStorage())) - new_project.name = name - new_project.token = token - new_project.comment = comment - - # create add on - for participant in new_project.get_config().participants: - if participant.domain_name in\ - new_project.get_certificate().domain_name_to_cert.keys(): - _create_add_on( - participant, - new_project.get_certificate().domain_name_to_cert[ - participant.domain_name], grpc_ssl_server_host) - try: - new_project = db.session.merge(new_project) - db.session.commit() - except Exception as e: - raise InvalidArgumentException(details=str(e)) - - return {'data': new_project.to_dict()} - - @jwt_required() + details=ErrorMessage.PARAM_FORMAT_ERROR.value.format('config', e)) from e + session.add(new_project) + session.flush() + + for participant_id in participant_ids: + # insert a relationship into the table + new_relationship = ProjectParticipant(project_id=new_project.id, participant_id=participant_id) + session.add(new_relationship) + + create_iams_for_resource(new_project, user) + session.commit() + except Exception as e: + raise InvalidArgumentException(details=str(e)) from e + return make_flask_response(data=new_project.to_proto(), status=HTTPStatus.CREATED) + + @credentials_required def get(self): - # TODO: Not count soft-deleted workflow - projects = db.session.query( - Project, func.count(Workflow.id).label('num_workflow'))\ - .join(Workflow, Workflow.project_id == Project.id, isouter=True)\ - .group_by(Project.id)\ - .all() - result = [] - for project in projects: - project_dict = project.Project.to_dict() - project_dict['num_workflow'] = project.num_workflow - result.append(project_dict) - return {'data': result} + """Gets all projects. + --- + tags: + - project + description: gets all projects. + responses: + 200: + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.ProjectRef' + """ + with db.session_scope() as session: + service = ProjectService(session) + return make_flask_response(data=service.get_projects()) class ProjectApi(Resource): - @jwt_required() - def get(self, project_id): - project = Project.query.filter_by(id=project_id).first() - if project is None: - raise NotFoundException( - f'Failed to find project: {project_id}') - return {'data': project.to_dict()} - - @jwt_required() - def patch(self, project_id): - project = Project.query.filter_by(id=project_id).first() - if project is None: - raise NotFoundException( - f'Failed to find project: {project_id}') - config = project.get_config() - if request.json.get('token') is not None: - new_token = request.json.get('token') - config.token = new_token - project.token = new_token - if request.json.get('variables') is not None: - del config.variables[:] - config.variables.extend([ - ParseDict(variable, Variable()) - for variable in request.json.get('variables') - ]) - - # exact configuration from variables - grpc_ssl_server_host = None - egress_host = None - for variable in config.variables: - if variable.name == 'GRPC_SSL_SERVER_HOST': - grpc_ssl_server_host = variable.value - if variable.name == 'EGRESS_HOST': - egress_host = variable.value - - if request.json.get('participant_name'): - config.participants[0].name = request.json.get('participant_name') - - if request.json.get('comment'): - project.comment = request.json.get('comment') - - for participant in config.participants: - if participant.domain_name in\ - project.get_certificate().domain_name_to_cert.keys(): - _create_add_on( - participant, - project.get_certificate().domain_name_to_cert[ - participant.domain_name], grpc_ssl_server_host) - if egress_host: - participant.grpc_spec.authority = egress_host - project.set_config(config) - try: - db.session.commit() - except Exception as e: - raise InvalidArgumentException(details=e) - return {'data': project.to_dict()} + + @credentials_required + @iam_required(Permission.PROJECT_GET) + def get(self, project_id: int): + """Gets a project. + --- + tags: + - project + description: Gets a project + parameters: + - in: path + name: project_id + schema: + type: integer + responses: + 200: + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.Project' + """ + with db.session_scope() as session: + project = session.query(Project).filter_by(id=project_id).first() + if project is None: + raise NotFoundException(f'Failed to find project: {project_id}') + return make_flask_response(data=project.to_proto(), status=HTTPStatus.OK) + + @input_validator + @credentials_required + @iam_required(Permission.PROJECT_PATCH) + @emits_event(audit_fields=['variables']) + @use_kwargs({ + 'comment': fields.String(load_default=None), + 'variables': fields.List(fields.Dict(), load_default=None), + 'config': fields.Dict(load_default=None) + }) + def patch(self, project_id: int, comment: Optional[str], variables: Optional[List[Dict]], config: Optional[Dict]): + """Patch a project. + --- + tags: + - project + description: Update a project. + parameters: + - in: path + name: project_id + schema: + type: integer + - in: body + name: body + schema: + type: object + properties: + comment: + type: string + variables: + description: A list of variables to override existing ones. + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.Variable' + config: + description: Config of project, include variables. + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.ProjectConfig' + responses: + 200: + description: Updated project + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.Project' + """ + with db.session_scope() as session: + project = session.query(Project).filter_by(id=project_id).first() + if project is None: + raise NotFoundException(f'Failed to find project: {project_id}') + + if comment: + project.comment = comment + + if config is not None: + config_proto = ParseDict(config, ProjectConfig(), ignore_unknown_fields=True) + project.set_config(config_proto) + session.flush() + # TODO(xiangyuxuan.prs): remove variables parameter when pending project launch + if variables is not None: + # Overrides all variables + variables = [ParseDict(variable, Variable()) for variable in variables] + project.set_variables(variables) + try: + session.commit() + except Exception as e: + raise InvalidArgumentException(details=e) from e + + return make_flask_response(data=project.to_proto(), status=HTTPStatus.OK) class CheckConnectionApi(Resource): - @jwt_required() - def post(self, project_id): - project = Project.query.filter_by(id=project_id).first() - if project is None: - raise NotFoundException( - f'Failed to find project: {project_id}') - success = True - details = [] - # TODO: Concurrently check - for participant in project.get_config().participants: - result = self.check_connection(project.get_config(), participant) - success = success & (result.code == StatusCode.STATUS_SUCCESS) + + @credentials_required + def get(self, project_id: int): + """Checks the connection for a project. + --- + tags: + - project + description: Checks the connection for a project. + parameters: + - in: path + name: project_id + schema: + type: integer + responses: + 200: + content: + application/json: + schema: + type: object + properties: + success: + description: If the connection is established or not. + type: boolean + message: + type: string + """ + with db.session_scope() as session: + project = session.query(Project).filter_by(id=project_id).first() + if project is None: + raise NotFoundException(f'Failed to find project: {project_id}') + service = ParticipantService(session) + participants = service.get_platform_participants_by_project(project.id) + + error_messages = [] + for participant in participants: + client = RpcClient.from_project_and_participant(project.name, project.token, participant.domain_name) + result = client.check_connection().status if result.code != StatusCode.STATUS_SUCCESS: - details.append(result.msg) - return {'data': {'success': success, 'details': details}} + error_messages.append( + f'failed to validate {participant.domain_name}\'s workspace, result: {result.msg}') - def check_connection(self, project_config: ProjectProto, - participant_proto: ParticipantProto): - client = RpcClient(project_config, participant_proto) - return client.check_connection().status + return { + 'data': { + 'success': len(error_messages) == 0, + 'message': '\n'.join(error_messages) if len(error_messages) > 0 else 'validate project successfully!' + } + }, HTTPStatus.OK + + +class ProjectParticipantsApi(Resource): + + @credentials_required + def get(self, project_id: int): + """Gets participants of a project. + --- + tags: + - project + description: Gets participants of a project. + parameters: + - in: path + name: project_id + schema: + type: integer + responses: + 200: + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.Participant' + """ + with db.session_scope() as session: + project = session.query(Project).filter_by(id=project_id).first() + if project is None: + raise NotFoundException(f'Failed to find project: {project_id}') + service = ParticipantService(session) + participants = service.get_participants_by_project(project_id) + return make_flask_response(data=[participant.to_proto() for participant in participants]) + + +class PendingProjectsApi(Resource): + + @input_validator + @credentials_required + @iam_required(Permission.PROJECTS_POST) + @use_args(CreatePendingProjectParameter()) + def post(self, data: Dict): + """Creates a new pending project. + --- + tags: + - project + description: Creates a new pending project + parameters: + - in: body + name: body + schema: + $ref: '#/definitions/CreatePendingProjectParameter' + responses: + 201: + description: Created a pending project + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.PendingProjectPb' + """ + with db.session_scope() as session: + # TODO(xiangyuxuan.prs): remove after using token instead of name to ensure consistency of project + if PendingProjectService(session).duplicated_name_exists(data['name']): + raise ResourceConflictException(f'{data["name"]} has already existed') + participants_info = PendingProjectService(session).build_participants_info(data['participant_ids']) + pending_project = PendingProjectService(session).create_pending_project(data['name'], + data['config'], + participants_info, + data['comment'], + get_current_user().username, + state=PendingProjectState.ACCEPTED, + role=ProjectRole.COORDINATOR) + session.flush() + ticket_helper = get_ticket_helper(session) + ticket_helper.create_ticket(TicketType.CREATE_PROJECT, TicketDetails(uuid=pending_project.uuid)) + session.commit() + return make_flask_response(data=pending_project.to_proto(), status=HTTPStatus.CREATED) + + @credentials_required + @use_args( + { + 'filter': FilterExpField( + required=False, + load_default=None, + ), + 'page': fields.Integer(required=False, load_default=1), + 'page_size': fields.Integer(required=False, load_default=10) + }, + location='query') + def get(self, params: dict): + """Gets all pending projects. + --- + tags: + - project + description: gets all pending projects. + responses: + 200: + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.PendingProjectPb' + """ + with db.session_scope() as session: + try: + pagination = PendingProjectService(session).list_pending_projects( + filter_exp=params['filter'], + page=params['page'], + page_size=params['page_size'], + ) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid filter: {str(e)}') from e + data = [t.to_proto() for t in pagination.get_items()] + return make_flask_response(data=data, page_meta=pagination.get_metadata()) + + +class PendingProjectApi(Resource): + + @credentials_required + @use_kwargs({ + 'state': + fields.String(required=True, + validate=validate.OneOf([PendingProjectState.ACCEPTED.name, PendingProjectState.CLOSED.name])) + }) + def patch(self, pending_project_id: int, state: str): + """Accept or refuse a pending project. + --- + tags: + - project + description: Accept or refuse a pending project. + parameters: + - in: path + name: pending_project_id + schema: + type: integer + - in: body + name: body + schema: + type: object + properties: + state: + type: string + responses: + 200: + description: a pending project + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.PendingProjectPb' + """ + with db.session_scope() as session: + pending_project = PendingProjectService(session).update_state_as_participant(pending_project_id, state) + resp = PendingProjectRpcController(pending_project).sync_pending_project_state_to_coordinator( + uuid=pending_project.uuid, state=PendingProjectState(state)) + if not resp.succeeded: + raise InternalException(f'connect to coordinator failed: {resp.msg}') + session.commit() + return make_flask_response(data=pending_project.to_proto()) + + def delete(self, pending_project_id: int): + """Delete pending project by id. + --- + tags: + - project + description: Delete pending project. + parameters: + - in: path + name: pending_project_id + required: true + schema: + type: integer + description: The ID of the pending project + responses: + 204: + description: No content. + """ + with db.session_scope() as session: + pending_project = session.query(PendingProject).get(pending_project_id) + if pending_project is None: + return make_flask_response(status=HTTPStatus.NO_CONTENT) + result = PendingProjectRpcController(pending_project).send_to_participants( + partial(ProjectServiceClient.delete_pending_project, uuid=pending_project.uuid)) + if not all(resp.succeeded for resp in result.values()): + raise InternalException(f'delete participants failed: {result}') + with db.session_scope() as session: + session.delete(pending_project) + session.commit() + return make_flask_response(status=HTTPStatus.NO_CONTENT) def initialize_project_apis(api: Api): api.add_resource(ProjectsApi, '/projects') api.add_resource(ProjectApi, '/projects/') - api.add_resource(CheckConnectionApi, - '/projects//connection_checks') - - -def _create_add_on(participant, certificate, grpc_ssl_server_host=None): - if certificate is None: - return - # check validation - for file_name in _CERTIFICATE_FILE_NAMES: - if certificate.certs.get(file_name) is None: - raise InvalidArgumentException( - details=ErrorMessage.PARAM_FORMAT_ERROR.value.format( - 'certificates', '{} not existed'.format(file_name))) - try: - create_add_on(k8s_client, participant.domain_name, participant.url, - certificate.certs, grpc_ssl_server_host) - except RuntimeError as e: - raise InvalidArgumentException(details=str(e)) + api.add_resource(ProjectParticipantsApi, '/projects//participants') + api.add_resource(CheckConnectionApi, '/projects//connection_checks') + + api.add_resource(PendingProjectsApi, '/pending_projects') + api.add_resource(PendingProjectApi, '/pending_project/') + + schema_manager.append(CreateProjectParameter) + schema_manager.append(CreatePendingProjectParameter) diff --git a/web_console_v2/api/fedlearner_webconsole/project/apis_test.py b/web_console_v2/api/fedlearner_webconsole/project/apis_test.py new file mode 100644 index 000000000..4e5b7d487 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/project/apis_test.py @@ -0,0 +1,392 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import json +import unittest +from datetime import datetime, timezone + +from http import HTTPStatus +from unittest.mock import patch, MagicMock + +from google.protobuf.json_format import ParseDict + +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant, ParticipantType +from fedlearner_webconsole.project.apis import _add_variable +from fedlearner_webconsole.project.controllers import ParticipantResp +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.utils.proto import to_dict +from fedlearner_webconsole.utils.pp_time import sleep +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project, PendingProject, PendingProjectState, ProjectRole +from fedlearner_webconsole.proto.project_pb2 import ProjectConfig, ParticipantsInfo, ParticipantInfo +from fedlearner_webconsole.workflow.models import Workflow +from testing.common import BaseTestCase + + +class ProjectApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + self.signin_as_admin() + self.default_project = Project() + self.default_project.name = 'test-default_project' + self.default_project.set_config(ParseDict({'variables': [{'name': 'test', 'value': 'test'}]}, ProjectConfig())) + self.default_project.comment = 'test comment' + + workflow = Workflow(name='workflow_key_get1', project_id=1) + participant = Participant(name='test-participant', domain_name='fl-test.com', host='127.0.0.1', port=32443) + relationship = ProjectParticipant(project_id=1, participant_id=1) + with db.session_scope() as session: + session.add(self.default_project) + session.add(workflow) + session.add(participant) + session.add(relationship) + session.commit() + + def test_add_variable(self): + # test none + self.assertEqual(_add_variable(None, 'storage_root_path', '/data'), + {'variables': [{ + 'name': 'storage_root_path', + 'value': '/data' + }]}) + # test variables is [] + self.assertEqual(_add_variable({'variables': []}, 'storage_root_path', '/data'), + {'variables': [{ + 'name': 'storage_root_path', + 'value': '/data' + }]}) + # test has other variables + self.assertEqual( + _add_variable({'variables': [{ + 'name': 'test-post', + 'value': 'test' + }]}, 'storage_root_path', '/data'), + {'variables': [{ + 'name': 'test-post', + 'value': 'test' + }, { + 'name': 'storage_root_path', + 'value': '/data' + }]}) + # test already set storage_root_path + self.assertEqual( + _add_variable({'variables': [{ + 'name': 'storage_root_path', + 'value': '/fake_data' + }]}, 'storage_root_path', '/data'), {'variables': [{ + 'name': 'storage_root_path', + 'value': '/fake_data' + }]}) + + def test_get_project(self): + get_response = self.get_helper('/api/v2/projects/1') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + queried_project = json.loads(get_response.data).get('data') + with db.session_scope() as session: + project_in_db = session.query(Project).get(1) + self.assertEqual(queried_project, to_dict(project_in_db.to_proto())) + + def test_get_not_found_project(self): + get_response = self.get_helper(f'/api/v2/projects/{1000}') + self.assertEqual(get_response.status_code, HTTPStatus.NOT_FOUND) + + def test_post_project_by_new_participant(self): + name = 'test-post-project' + comment = 'test post project(by new participant)' + config = {'variables': [{'name': 'test-post', 'value': 'test'}]} + create_response = self.post_helper('/api/v2/projects', + data={ + 'name': name, + 'comment': comment, + 'config': config, + 'participant_ids': [2] + }) + + self.assertEqual(create_response.status_code, HTTPStatus.CREATED) + created_project = self.get_response_data(create_response) + + with db.session_scope() as session: + relationship = session.query(ProjectParticipant).all() + queried_project = session.query(Project).filter_by(name=name).first() + + self.assertEqual((relationship[1].project_id, relationship[1].participant_id), (2, 2)) + self.assertEqual(created_project, to_dict(queried_project.to_proto())) + + def test_post_conflict_name_project(self): + create_response = self.post_helper('/api/v2/projects', + data={ + 'name': self.default_project.name, + 'participant_ids': [1], + }) + self.assertEqual(create_response.status_code, HTTPStatus.CONFLICT) + + def test_list_project(self): + list_response = self.get_helper('/api/v2/projects') + project_list = self.get_response_data(list_response) + self.assertEqual(len(project_list), 1) + project_id = project_list[0]['id'] + with db.session_scope() as session: + queried_project = session.query(Project).get(project_id) + ref = queried_project.to_ref() + ref.num_workflow = 1 + self.assertEqual(project_list[0], to_dict(ref)) + + def test_update_project(self): + updated_comment = 'updated comment' + variables = [{'name': 'test-variables', 'value': 'variables'}] + update_response = self.patch_helper('/api/v2/projects/1', + data={ + 'comment': updated_comment, + 'variables': variables, + }) + self.assertEqual(update_response.status_code, HTTPStatus.OK) + # test response + project = self.get_response_data(update_response) + self.assertEqual(project['comment'], updated_comment) + self.assertEqual(project['variables'], [{ + 'access_mode': 'UNSPECIFIED', + 'name': 'test-variables', + 'value': 'variables', + 'value_type': 'STRING', + 'tag': '', + 'widget_schema': '' + }]) + # test database + get_response = self.get_helper('/api/v2/projects/1') + project = self.get_response_data(get_response) + self.assertEqual(project['comment'], updated_comment) + + def test_update_project_config(self): + config = {'variables': [{'name': 'test-variables', 'value': 'variables'}]} + update_response = self.patch_helper('/api/v2/projects/1', data={ + 'config': config, + }) + self.assertEqual(update_response.status_code, HTTPStatus.OK) + # test database + get_response = self.get_helper('/api/v2/projects/1') + project = self.get_response_data(get_response) + config = { + 'abilities': [], + 'action_rules': {}, + 'support_blockchain': + False, + 'variables': [{ + 'access_mode': 'UNSPECIFIED', + 'name': 'test-variables', + 'tag': '', + 'value': 'variables', + 'value_type': 'STRING', + 'widget_schema': '' + }] + } + self.assertEqual(project['config'], config) + + def test_update_not_found_project(self): + updated_comment = 'updated comment' + update_response = self.patch_helper(f'/api/v2/projects/{1000}', data={'comment': updated_comment}) + self.assertEqual(update_response.status_code, HTTPStatus.NOT_FOUND) + + def test_post_project_with_multiple_participants(self): + create_response = self.post_helper('/api/v2/projects', + data={ + 'name': 'test name', + 'comment': 'test comment', + 'participant_ids': [1, 2, 3] + }) + self.assertEqual(create_response.status_code, HTTPStatus.BAD_REQUEST) + + def test_post_project_with_light_client(self): + with db.session_scope() as session: + light_participant = Participant(name='light-client', + type=ParticipantType.LIGHT_CLIENT, + domain_name='fl-light-client.com', + host='127.0.0.1', + port=32443) + session.add(light_participant) + session.commit() + resp = self.post_helper('/api/v2/projects', + data={ + 'name': 'test-project', + 'comment': 'test comment', + 'participant_ids': [light_participant.id] + }) + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + with db.session_scope() as session: + project = session.query(Project).filter_by(name='test-project').first() + self.assertEqual(project.participants[0].name, 'light-client') + self.assertEqual(project.get_participant_type(), ParticipantType.LIGHT_CLIENT) + + +class ProjectParticipantsApi(BaseTestCase): + + def setUp(self): + super().setUp() + + self.participant_1 = Participant(name='participant pro1', domain_name='fl-participant-1.com') + self.participant_2 = Participant(name='participant pro2', domain_name='fl-participant-2.com') + self.project_1 = Project(name='project 1') + self.relationship_11 = ProjectParticipant(project_id=1, participant_id=1) + self.relationship_12 = ProjectParticipant(project_id=1, participant_id=2) + with db.session_scope() as session: + session.add(self.participant_1) + session.flush() + sleep(1) + session.add(self.participant_2) + session.add(self.project_1) + session.add(self.relationship_11) + session.add(self.relationship_12) + session.commit() + + def test_get_project_participants(self): + get_response = self.get_helper('/api/v2/projects/1/participants') + participants = self.get_response_data(get_response) + self.assertEqual(len(participants), 2) + self.assertEqual(participants[0]['name'], 'participant pro2') + self.assertEqual(participants[0]['pure_domain_name'], 'participant-2') + self.assertEqual(participants[1]['name'], 'participant pro1') + self.assertEqual(participants[1]['pure_domain_name'], 'participant-1') + + +class PendingProjectsApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + self.participant_1 = Participant(name='participant pro1', domain_name='fl-participant-1.com') + self.participant_2 = Participant(name='participant pro2', domain_name='fl-participant-2.com') + with db.session_scope() as session: + session.add(self.participant_1) + session.add(self.participant_2) + session.commit() + + def test_post_pending_projects(self): + resp = self.post_helper('/api/v2/pending_projects', + data={ + 'name': 'test-project', + 'comment': 'test comment', + 'config': { + 'variables': [] + }, + 'participant_ids': [self.participant_1.id, self.participant_2.id] + }) + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + resp = self.post_helper('/api/v2/pending_projects', + data={ + 'name': 'test-project', + 'comment': 'test comment', + 'participant_ids': [] + }) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + with db.session_scope() as session: + pending_project: PendingProject = session.query(PendingProject).filter_by(name='test-project').first() + self.assertEqual(pending_project.role, ProjectRole.COORDINATOR) + self.assertEqual(pending_project.state, PendingProjectState.ACCEPTED) + self.assertEqual(pending_project.creator_username, 'ada') + expected_info = ParticipantsInfo( + participants_map={ + 'participant-1': + ParticipantInfo(name='participant pro1', + role=ProjectRole.PARTICIPANT.name, + state=PendingProjectState.PENDING.name, + type=ParticipantType.PLATFORM.name), + 'participant-2': + ParticipantInfo(name='participant pro2', + role=ProjectRole.PARTICIPANT.name, + state=PendingProjectState.PENDING.name, + type=ParticipantType.PLATFORM.name), + '': + ParticipantInfo(role=ProjectRole.COORDINATOR.name, + state=PendingProjectState.ACCEPTED.name, + type=ParticipantType.PLATFORM.name) + }) + self.assertEqual(pending_project.get_participants_info(), expected_info) + + @patch('fedlearner_webconsole.project.apis.PendingProjectService.list_pending_projects') + def test_get_pending_projects(self, mock_list): + created_at = datetime(2022, 5, 10, 0, 0, 0) + updated_at = datetime(2022, 5, 10, 0, 0, 0) + pending_proj = PendingProject(id=123, + name='test', + uuid='uuid', + state=PendingProjectState.ACCEPTED, + role=ProjectRole.PARTICIPANT, + comment='test', + created_at=created_at, + updated_at=updated_at, + ticket_status=TicketStatus.PENDING) + mock_list.return_value.get_items.return_value = [pending_proj] + mock_list.return_value.get_metadata.return_value = { + 'current_page': 1, + 'page_size': 1, + 'total_pages': 1, + 'total_items': 1 + } + resp = self.get_helper('/api/v2/pending_projects') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertEqual(len(self.get_response_data(resp)), 1) + + +class PendingProjectApiTest(BaseTestCase): + + @patch('fedlearner_webconsole.project.apis.PendingProjectService.update_state_as_participant') + @patch('fedlearner_webconsole.project.apis.PendingProjectRpcController.sync_pending_project_state_to_coordinator') + def test_patch_pending_project(self, mock_sync: MagicMock, mock_update: MagicMock): + pending_project = PendingProject(name='test', state=PendingProjectState.PENDING, role=ProjectRole.PARTICIPANT) + with db.session_scope() as session: + session.add(pending_project) + session.commit() + mock_sync.return_value = ParticipantResp(succeeded=True, resp=None, msg='') + mock_update.return_value = PendingProject(name='test', + state=PendingProjectState.PENDING, + role=ProjectRole.PARTICIPANT, + created_at=datetime(2022, 5, 1, 10, 10, tzinfo=timezone.utc), + updated_at=datetime(2022, 5, 1, 10, 10, tzinfo=timezone.utc), + ticket_status=TicketStatus.APPROVED) + resp = self.patch_helper(f'/api/v2/pending_project/{pending_project.id}', + data={ + 'state': PendingProjectState.ACCEPTED.name, + }) + mock_sync.assert_called_once_with(uuid=pending_project.uuid, state=PendingProjectState.ACCEPTED) + self.assertEqual(resp.status_code, HTTPStatus.OK) + mock_update.assert_called_once_with(pending_project.id, PendingProjectState.ACCEPTED.name) + + @patch('fedlearner_webconsole.project.apis.PendingProjectRpcController.send_to_participants') + def test_delete_pending_project(self, mock_sync_delete: MagicMock): + pending_project = PendingProject(name='test', state=PendingProjectState.PENDING, role=ProjectRole.PARTICIPANT) + with db.session_scope() as session: + session.add(pending_project) + session.commit() + mock_sync_delete.return_value = { + 'a': ParticipantResp(succeeded=True, resp=None, msg=''), + 'b': ParticipantResp(succeeded=False, resp=None, msg='aa') + } + resp = self.delete_helper(f'/api/v2/pending_project/{pending_project.id}') + self.assertEqual(resp.status_code, 500) + mock_sync_delete.assert_called_once() + with db.session_scope() as session: + self.assertIsNotNone(session.query(PendingProject).get(pending_project.id)) + mock_sync_delete.return_value = { + 'a': ParticipantResp(succeeded=True, resp=None, msg=''), + 'b': ParticipantResp(succeeded=True, resp=None, msg='aa') + } + self.delete_helper(f'/api/v2/pending_project/{pending_project.id}') + + self.assertEqual(mock_sync_delete.call_count, 2) + with db.session_scope() as session: + self.assertIsNone(session.query(PendingProject).get(pending_project.id)) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/project/controllers.py b/web_console_v2/api/fedlearner_webconsole/project/controllers.py new file mode 100644 index 000000000..7398ed5b6 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/project/controllers.py @@ -0,0 +1,77 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Callable, Dict, NamedTuple + +import grpc +from google.protobuf.empty_pb2 import Empty +from google.protobuf.message import Message + +from fedlearner_webconsole.participant.models import ParticipantType +from fedlearner_webconsole.project.models import PendingProject, ProjectRole, PendingProjectState +from fedlearner_webconsole.rpc.v2.project_service_client import ProjectServiceClient + + +class ParticipantResp(NamedTuple): + succeeded: bool + resp: Message + msg: str + + +def _get_domain_name(pure_domain_name: str) -> str: + """Get domain name from pure_domain_name + + Args: + pure_domain_name (str): pure_domain_name + + Returns: + str: domain name, like fl-ali-test.com + """ + return f'fl-{pure_domain_name}.com' + + +def _get_resp(pure_domain_name: str, method: Callable) -> ParticipantResp: + client = ProjectServiceClient.from_participant(_get_domain_name(pure_domain_name)) + try: + resp = ParticipantResp(True, method(client), '') + except grpc.RpcError as e: + resp = ParticipantResp(False, Empty(), str(e)) + return resp + + +class PendingProjectRpcController(object): + """A helper to Send Grpc request via participants_info in pending project.""" + + def __init__(self, pending_project: PendingProject = None): + self._pending_project = pending_project + + def send_to_participants(self, method: Callable) -> Dict[str, ParticipantResp]: + if self._pending_project.role == ProjectRole.PARTICIPANT: + # when a project is in pending the proxy should not be supported, + # which participant used to connect to others via coordinator. + raise ValueError('participant cant connect to participant in pending project') + resp_map = {} + for pure_domain_name, p_info in self._pending_project.get_participants_info().participants_map.items(): + if p_info.role == ProjectRole.COORDINATOR.name or p_info.type == ParticipantType.LIGHT_CLIENT.name: + continue + + resp_map[pure_domain_name] = _get_resp(pure_domain_name, method) + return resp_map + + def sync_pending_project_state_to_coordinator(self, uuid: str, state: PendingProjectState) -> ParticipantResp: + assert self._pending_project.role == ProjectRole.PARTICIPANT + pure_domain, _ = self._pending_project.get_coordinator_info() + return _get_resp(pure_domain, + lambda client: ProjectServiceClient.sync_pending_project_state(client, uuid, state)) diff --git a/web_console_v2/api/fedlearner_webconsole/project/controllers_test.py b/web_console_v2/api/fedlearner_webconsole/project/controllers_test.py new file mode 100644 index 000000000..f05d9496f --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/project/controllers_test.py @@ -0,0 +1,96 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import Mock, call, patch + +import grpc +from google.protobuf.empty_pb2 import Empty + +from fedlearner_webconsole.participant.models import ParticipantType +from fedlearner_webconsole.project.controllers import PendingProjectRpcController, ParticipantResp, _get_domain_name +from fedlearner_webconsole.project.models import PendingProject, ProjectRole, PendingProjectState +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo, ParticipantInfo +from fedlearner_webconsole.rpc.v2.client_base import ParticipantRpcClient +from testing.no_web_server_test_case import NoWebServerTestCase + + +class FakeRpcClient(ParticipantRpcClient): + + def __init__(self): + super().__init__(None) + + def fake_method(self, request: str, succeeded: bool = True): + if succeeded: + return request + raise grpc.RpcError + + def sync_pending_project_state(self, uuid: str, state: PendingProjectState): + del uuid, state + return Empty() + + +class ProjectControllerTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + self.pending_project = PendingProject(id=1, role=ProjectRole.COORDINATOR) + self.pending_project.set_participants_info( + ParticipantsInfo( + participants_map={ + 'coordinator': ParticipantInfo(role=ProjectRole.COORDINATOR.name), + 'part1': ParticipantInfo(role=ProjectRole.PARTICIPANT.name), + 'part2': ParticipantInfo(role=ProjectRole.PARTICIPANT.name), + 'part3': ParticipantInfo(role=ProjectRole.PARTICIPANT.name, type=ParticipantType.LIGHT_CLIENT.name), + })) + + @patch('fedlearner_webconsole.project.controllers.ProjectServiceClient.from_participant') + def test_send_to_all(self, mock_from_participant: Mock): + mock_from_participant.return_value = FakeRpcClient() + result = PendingProjectRpcController( + self.pending_project).send_to_participants(lambda client: FakeRpcClient.fake_method(client, request='test')) + mock_from_participant.assert_has_calls([call('fl-part1.com'), call('fl-part2.com')], any_order=True) + self.assertEqual(mock_from_participant.call_count, 2) + self.assertEqual( + result, { + 'part1': ParticipantResp(succeeded=True, resp='test', msg=''), + 'part2': ParticipantResp(succeeded=True, resp='test', msg='') + }) + # Failed case + result = PendingProjectRpcController(self.pending_project).send_to_participants( + lambda client: client.fake_method(request='test', succeeded=False)) + self.assertEqual( + result, { + 'part1': ParticipantResp(succeeded=False, resp=Empty(), msg=''), + 'part2': ParticipantResp(succeeded=False, resp=Empty(), msg='') + }) + + @patch('fedlearner_webconsole.project.controllers.ProjectServiceClient.from_participant') + @patch('fedlearner_webconsole.project.controllers.ProjectServiceClient.sync_pending_project_state', + FakeRpcClient.sync_pending_project_state) + def test_send_to_coordinator(self, mock_from_participant: Mock): + mock_from_participant.return_value = FakeRpcClient() + self.pending_project.role = ProjectRole.PARTICIPANT + result = PendingProjectRpcController(self.pending_project).sync_pending_project_state_to_coordinator( + uuid='test', state=PendingProjectState.ACCEPTED) + mock_from_participant.assert_called_once_with('fl-coordinator.com') + self.assertEqual(result, ParticipantResp(succeeded=True, resp=Empty(), msg='')) + + def test_get_domain_name(self): + self.assertEqual(_get_domain_name('bytedance'), 'fl-bytedance.com') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/project/models.py b/web_console_v2/api/fedlearner_webconsole/project/models.py index 464d2877d..3b054d22d 100644 --- a/web_console_v2/api/fedlearner_webconsole/project/models.py +++ b/web_console_v2/api/fedlearner_webconsole/project/models.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,67 +13,217 @@ # limitations under the License. # coding: utf-8 +import enum +from typing import Optional, List, Tuple +from google.protobuf import text_format from sqlalchemy.sql import func from sqlalchemy.sql.schema import Index, UniqueConstraint -from fedlearner_webconsole.utils.mixins import to_dict_mixin -from fedlearner_webconsole.db import db + +from fedlearner_webconsole.proto.project_pb2 import ProjectRef, ParticipantsInfo, ProjectConfig, ParticipantInfo +from fedlearner_webconsole.utils.base_model.review_ticket_model import ReviewTicketModel +from fedlearner_webconsole.utils.base_model.softdelete_model import SoftDeleteModel +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.db import db, default_table_args from fedlearner_webconsole.proto import project_pb2 +from fedlearner_webconsole.participant.models import ParticipantType + + +class PendingProjectState(enum.Enum): + PENDING = 'PENDING' + ACCEPTED = 'ACCEPTED' + FAILED = 'FAILED' + CLOSED = 'CLOSED' + + +class ProjectRole(enum.Enum): + COORDINATOR = 'COORDINATOR' + PARTICIPANT = 'PARTICIPANT' + + +class Action(enum.Enum): + ID_ALIGNMENT = 'ID_ALIGNMENT' + DATA_ALIGNMENT = 'DATA_ALIGNMENT' + HORIZONTAL_TRAIN = 'HORIZONTAL_TRAIN' + VERTICAL_TRAIN = 'VERTICAL_TRAIN' + VERTICAL_EVAL = 'VERTICAL_EVAL' + VERTICAL_PRED = 'VERTICAL_PRED' + VERTICAL_SERVING = 'VERTICAL_SERVING' + WORKFLOW = 'WORKFLOW' + TEE_SERVICE = 'TEE_SERVICE' + TEE_RESULT_EXPORT = 'TEE_SERVICE' -@to_dict_mixin(ignores=['certificate'], - extras={'config': (lambda project: project.get_config())}) class Project(db.Model): __tablename__ = 'projects_v2' - __table_args__ = (UniqueConstraint('name', name='idx_name'), - Index('idx_token', 'token'), { - 'comment': 'webconsole projects', - 'mysql_engine': 'innodb', - 'mysql_charset': 'utf8mb4', - }) - id = db.Column(db.Integer, - primary_key=True, - autoincrement=True, - comment='id') + __table_args__ = (UniqueConstraint('name', name='idx_name'), Index('idx_token', 'token'), { + 'comment': 'webconsole projects', + 'mysql_engine': 'innodb', + 'mysql_charset': 'utf8mb4', + }) + id = db.Column(db.Integer, primary_key=True, autoincrement=True, comment='id') + role = db.Column(db.Enum(ProjectRole, length=32, native_enum=False, create_constraint=False), + default=ProjectRole.PARTICIPANT, + comment='pending project role') + participants_info = db.Column(db.Text(), comment='participants info') + name = db.Column(db.String(255), comment='name') token = db.Column(db.String(64), comment='token') config = db.Column(db.LargeBinary(), comment='config') - certificate = db.Column(db.LargeBinary(), comment='certificate') comment = db.Column('cmt', db.Text(), key='comment', comment='comment') - created_at = db.Column(db.DateTime(timezone=True), - server_default=func.now(), - comment='created at') + creator = db.Column(db.String(255), comment='creator') + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), comment='created at') updated_at = db.Column(db.DateTime(timezone=True), onupdate=func.now(), server_default=func.now(), comment='updated at') deleted_at = db.Column(db.DateTime(timezone=True), comment='deleted at') + participants = db.relationship('Participant', + secondary='projects_participants_v2', + primaryjoin='Project.id == foreign(ProjectParticipant.project_id)', + secondaryjoin='Participant.id == foreign(ProjectParticipant.participant_id)') - def set_config(self, proto): + def set_config(self, proto: project_pb2.ProjectConfig): self.config = proto.SerializeToString() - def get_config(self): - if self.config is None: - return None - proto = project_pb2.Project() - proto.ParseFromString(self.config) - return proto + def _get_config(self) -> project_pb2.ProjectConfig: + config = project_pb2.ProjectConfig() + if self.config: + config.ParseFromString(self.config) + return config + + def get_variables(self) -> List[Variable]: + return list(self._get_config().variables) + + def set_variables(self, variables: List[Variable]): + config = self._get_config() + del config.variables[:] + config.variables.extend(variables) + self.set_config(config) - def set_certificate(self, proto): - self.certificate = proto.SerializeToString() + def get_storage_root_path(self, dft_value: str) -> str: + variables = self.get_variables() + for variable in variables: + if variable.name == 'storage_root_path': + return variable.value + return dft_value - def get_certificate(self): - if self.certificate is None: + def get_participant_type(self) -> Optional[ParticipantType]: + if len(self.participants) == 0: return None - proto = project_pb2.CertificateStorage() - proto.ParseFromString(self.certificate) + return self.participants[0].get_type() + + def set_participants_info(self, proto: ParticipantsInfo): + self.participants_info = text_format.MessageToString(proto) + + def get_participants_info(self) -> ParticipantsInfo: + if self.participants_info is not None: + return text_format.Parse(self.participants_info, ParticipantsInfo()) + return ParticipantsInfo() + + def to_ref(self) -> ProjectRef: + participant_type = self.get_participant_type() + ref = ProjectRef(id=self.id, + name=self.name, + creator=self.creator, + created_at=to_timestamp(self.created_at), + participant_type=participant_type.name if participant_type else None, + participants_info=self.get_participants_info(), + role=self.role.name if self.role else None) + for participant in self.participants: + ref.participants.append(participant.to_proto()) + return ref + + def to_proto(self) -> project_pb2.Project: + participant_type = self.get_participant_type() + proto = project_pb2.Project(id=self.id, + name=self.name, + creator=self.creator, + created_at=to_timestamp(self.created_at), + updated_at=to_timestamp(self.updated_at), + participant_type=participant_type.name if participant_type else None, + token=self.token, + comment=self.comment, + variables=self.get_variables(), + participants_info=self.get_participants_info(), + config=self._get_config(), + role=self.role.name if self.role else None) + for participant in self.participants: + proto.participants.append(participant.to_proto()) return proto - def get_namespace(self): - config = self.get_config() - if config is not None: - variables = self.get_config().variables - for variable in variables: - if variable.name == 'namespace': - return variable.value - return 'default' + +class PendingProject(db.Model, SoftDeleteModel, ReviewTicketModel): + __tablename__ = 'pending_projects_v2' + __table_args__ = (default_table_args('This is webconsole pending_project table')) + id = db.Column(db.Integer, primary_key=True, autoincrement=True, comment='id') + name = db.Column(db.String(255), comment='name') + uuid = db.Column(db.String(64), comment='uuid') + config = db.Column(db.Text(), comment='config') + state = db.Column(db.Enum(PendingProjectState, length=32, native_enum=False, create_constraint=False), + nullable=False, + default=PendingProjectState.PENDING, + comment='pending project stage state') + participants_info = db.Column(db.Text(), comment='participants info') + role = db.Column(db.Enum(ProjectRole, length=32, native_enum=False, create_constraint=False), + nullable=False, + default=ProjectRole.PARTICIPANT, + comment='pending project role') + + comment = db.Column('cmt', db.Text(), key='comment', comment='comment') + creator_username = db.Column(db.String(255), comment='creator') + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), comment='created at') + updated_at = db.Column(db.DateTime(timezone=True), + onupdate=func.now(), + server_default=func.now(), + comment='updated at') + + def set_participants_info(self, proto: ParticipantsInfo): + self.participants_info = text_format.MessageToString(proto) + + def get_participants_info(self) -> ParticipantsInfo: + if self.participants_info is not None: + return text_format.Parse(self.participants_info, ParticipantsInfo()) + return ParticipantsInfo() + + def set_config(self, proto: ProjectConfig): + self.config = text_format.MessageToString(proto) + + def get_config(self) -> ProjectConfig: + if self.config is not None: + return text_format.Parse(self.config, ProjectConfig()) + return ProjectConfig() + + def to_proto(self) -> project_pb2.PendingProjectPb: + return project_pb2.PendingProjectPb(id=self.id, + name=self.name, + uuid=self.uuid, + config=self.get_config(), + state=self.state.name, + participants_info=self.get_participants_info(), + role=self.role.name, + comment=self.comment, + creator_username=self.creator_username, + created_at=to_timestamp(self.created_at), + updated_at=to_timestamp(self.updated_at), + ticket_uuid=self.ticket_uuid, + ticket_status=self.ticket_status.name, + participant_type=self.get_participant_type()) + + def get_participant_info(self, pure_domain: str) -> Optional[ParticipantInfo]: + return self.get_participants_info().participants_map.get(pure_domain) + + def get_coordinator_info(self) -> Tuple[str, ParticipantInfo]: + for pure_domain, p_info in self.get_participants_info().participants_map.items(): + if p_info.role == ProjectRole.COORDINATOR.name: + return pure_domain, p_info + raise ValueError(f'not found coordinator in pending project {self.id}') + + def get_participant_type(self) -> str: + # In the short term, the project will only have one type of participants, + # make pending project type hack to be the type of the first participant. + for info in self.get_participants_info().participants_map.values(): + if info.role == ProjectRole.PARTICIPANT.name: + return info.type + return ParticipantType.LIGHT_CLIENT.name diff --git a/web_console_v2/api/fedlearner_webconsole/project/models_test.py b/web_console_v2/api/fedlearner_webconsole/project/models_test.py new file mode 100644 index 000000000..c852a9db3 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/project/models_test.py @@ -0,0 +1,187 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from datetime import datetime, timezone + +from google.protobuf.struct_pb2 import Value + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant, ParticipantType +from fedlearner_webconsole.project.models import Project, PendingProject, PendingProjectState, ProjectRole +from fedlearner_webconsole.proto import project_pb2 +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.project_pb2 import ProjectConfig, ProjectRef, ParticipantInfo, \ + PendingProjectPb, ParticipantsInfo +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from testing.no_web_server_test_case import NoWebServerTestCase + + +class ProjectTest(NoWebServerTestCase): + + def test_set_and_get_variables(self): + config = ProjectConfig(variables=[ + Variable(name='old_var', value='old', access_mode=Variable.PEER_READABLE), + ]) + with db.session_scope() as session: + project = Project(id=111, config=config.SerializeToString()) + session.add(project) + session.commit() + self.assertEqual(len(project.get_variables()), 1) + self.assertEqual(project.get_variables()[0].name, 'old_var') + project.set_variables([Variable(name='new_var', value='new', access_mode=Variable.PEER_WRITABLE)]) + session.commit() + with db.session_scope() as session: + project = session.query(Project).get(111) + self.assertEqual(len(project.get_variables()), 1) + self.assertEqual(project.get_variables()[0].name, 'new_var') + + def test_get_storage_root_path(self): + project = Project(id=111) + self.assertEqual(project.get_storage_root_path('not found'), 'not found') + project.set_variables( + [Variable(name='storage_root_path', value='root path', access_mode=Variable.PEER_READABLE)]) + self.assertEqual(project.get_storage_root_path('not found'), 'root path') + + def test_to_ref(self): + created_at = datetime(2022, 5, 1, 10, 10, tzinfo=timezone.utc) + project_id = 66666 + participant_id = 2 + with db.session_scope() as session: + project = Project(id=project_id, name='test project', creator='test_user', created_at=created_at) + participant = Participant(id=participant_id, name='test part', domain_name='fl-test.com') + relation = ProjectParticipant(project_id=project.id, participant_id=participant.id) + session.add_all([project, participant, relation]) + session.commit() + with db.session_scope() as session: + project = session.query(Project).get(project_id) + participant = session.query(Participant).get(participant_id) + self.assertEqual( + project.to_ref(), + ProjectRef(id=project_id, + name='test project', + creator='test_user', + participant_type='PLATFORM', + created_at=int(created_at.timestamp()), + participants=[participant.to_proto()], + participants_info=ParticipantsInfo(), + role=ProjectRole.PARTICIPANT.name), + ) + + def test_to_proto(self): + created_at = datetime(2022, 5, 1, 10, 10, tzinfo=timezone.utc) + project_id = 12356 + participant_id = 22 + variable = Variable(name='test_var', access_mode=Variable.PEER_READABLE, typed_value=Value(string_value='jjjj')) + with db.session_scope() as session: + project = Project(id=project_id, + name='test project', + creator='test_user', + created_at=created_at, + comment='test comment', + token='test token') + project.set_variables([variable]) + participant = Participant(id=participant_id, name='test part', domain_name='fl-test.com') + relation = ProjectParticipant(project_id=project.id, participant_id=participant.id) + participants_info = ParticipantsInfo(participants_map={'test': ParticipantInfo(name='test part')}) + project.set_participants_info(participants_info) + session.add_all([project, participant, relation]) + session.commit() + with db.session_scope() as session: + project = session.query(Project).get(project_id) + participant = session.query(Participant).get(participant_id) + actual = project.to_proto() + self.assertEqual( + actual, + project_pb2.Project(id=project_id, + name='test project', + token='test token', + comment='test comment', + creator='test_user', + participant_type='PLATFORM', + created_at=int(created_at.timestamp()), + updated_at=actual.updated_at, + variables=[variable], + participants=[participant.to_proto()], + participants_info=participants_info, + config=ProjectConfig(variables=[variable]), + role=ProjectRole.PARTICIPANT.name), + ) + + +class PendingProjectTest(NoWebServerTestCase): + + def test_to_proto(self): + created_at = datetime(2022, 5, 10, 0, 0, 0) + updated_at = datetime(2022, 5, 10, 0, 0, 0) + pending_proj = PendingProject(id=123, + name='test', + uuid='uuid', + state=PendingProjectState.ACCEPTED, + role=ProjectRole.PARTICIPANT, + comment='test', + created_at=created_at, + updated_at=updated_at, + ticket_status=TicketStatus.PENDING) + pending_proj.set_config(ProjectConfig()) + participants_infos = ParticipantsInfo( + participants_map={'test': ParticipantInfo(name='test', role=PendingProjectState.ACCEPTED.name)}) + pending_proj.set_participants_info(participants_infos) + self.assertEqual( + pending_proj.to_proto(), + PendingProjectPb(id=123, + name='test', + uuid='uuid', + state=PendingProjectState.ACCEPTED.name, + role=ProjectRole.PARTICIPANT.name, + comment='test', + created_at=1652140800, + updated_at=1652140800, + config=ProjectConfig(), + participants_info=participants_infos, + ticket_status=TicketStatus.PENDING.name, + participant_type=ParticipantType.LIGHT_CLIENT.name)) + + def test_get_participant_info(self): + pending_proj = PendingProject(id=123, name='test', uuid='uuid') + pending_proj.set_config(ProjectConfig()) + participants_infos = ParticipantsInfo(participants_map={'test': ParticipantInfo(name='test')}) + pending_proj.set_participants_info(participants_infos) + self.assertEqual(pending_proj.get_participant_info('test'), ParticipantInfo(name='test')) + self.assertEqual(pending_proj.get_participant_info('test1'), None) + + def test_get_coordinator_info(self): + pending_proj = PendingProject(id=123, name='test', uuid='uuid') + pending_proj.set_config(ProjectConfig()) + participants_infos = ParticipantsInfo( + participants_map={'test': ParticipantInfo(name='test', role=ProjectRole.COORDINATOR.name)}) + pending_proj.set_participants_info(participants_infos) + self.assertEqual(pending_proj.get_coordinator_info(), + ('test', ParticipantInfo(name='test', role=ProjectRole.COORDINATOR.name))) + pending_proj.set_participants_info(ParticipantsInfo()) + with self.assertRaises(ValueError): + pending_proj.get_coordinator_info() + + def test_get_participant_type(self): + pending_proj = PendingProject(id=123, name='test', uuid='uuid') + participants_infos = ParticipantsInfo(participants_map={ + 'test': ParticipantInfo(name='test', role=ProjectRole.PARTICIPANT.name, type=ParticipantType.PLATFORM.name) + }) + pending_proj.set_participants_info(participants_infos) + self.assertEqual(pending_proj.get_participant_type(), ParticipantType.PLATFORM.name) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/project/project_scheduler.py b/web_console_v2/api/fedlearner_webconsole/project/project_scheduler.py new file mode 100644 index 000000000..80065f6e4 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/project/project_scheduler.py @@ -0,0 +1,127 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from functools import partial +from typing import Tuple, List + +from sqlalchemy.orm import Session + +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.interface import IRunnerV2 +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.controllers import PendingProjectRpcController +from fedlearner_webconsole.project.models import PendingProject, ProjectRole, PendingProjectState +from fedlearner_webconsole.project.services import PendingProjectService +from fedlearner_webconsole.proto.composer_pb2 import RunnerOutput +from fedlearner_webconsole.rpc.v2.project_service_client import ProjectServiceClient +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus + + +def _get_ids_needed_schedule(session: Session) -> List[int]: + return [ + p.id for p in session.query(PendingProject.id).filter_by(role=ProjectRole.COORDINATOR, + ticket_status=TicketStatus.APPROVED, + state=PendingProjectState.ACCEPTED).all() + ] + + +def _if_pending_project_needed_create(p: PendingProject) -> bool: + part_info_list = p.get_participants_info().participants_map.values() + # coordinator can go next step only after all participants make their choices. + if any(part_info.state == PendingProjectState.PENDING.name for part_info in part_info_list): + return False + return any(part_info.state == PendingProjectState.ACCEPTED.name + for part_info in part_info_list + if part_info.role == ProjectRole.PARTICIPANT.name) + + +def _if_all_participants_closed(p: PendingProject) -> bool: + closed_part_count = 0 + part_info_list = p.get_participants_info().participants_map.values() + for part_info in part_info_list: + if part_info.role == ProjectRole.COORDINATOR.name: + continue + if part_info.state == PendingProjectState.CLOSED.name: + closed_part_count += 1 + return closed_part_count > 0 and closed_part_count == len(part_info_list) - 1 + + +class ScheduleProjectRunner(IRunnerV2): + + @staticmethod + def _create_pending_project(ids: List[int]): + for pid in ids: + with db.session_scope() as session: + p = session.query(PendingProject).get(pid) + PendingProjectRpcController(p).send_to_participants( + partial(ProjectServiceClient.create_pending_project, pending_project=p)) + return ids + + @staticmethod + def _update_all_participants(ids: List[int]) -> List[int]: + for pid in ids: + with db.session_scope() as session: + p = session.query(PendingProject).get(pid) + PendingProjectRpcController(p).send_to_participants( + partial(ProjectServiceClient.update_pending_project, + uuid=p.uuid, + participants_map=p.get_participants_info().participants_map)) + return ids + + @staticmethod + def _create_project(ids: List[int]) -> List[str]: + p_needed_create = [] + with db.session_scope() as session: + for pid in ids: + p = session.query(PendingProject).get(pid) + if _if_pending_project_needed_create(p): + p_needed_create.append(p) + for p in p_needed_create: + result = PendingProjectRpcController(p).send_to_participants( + partial(ProjectServiceClient.create_project, uuid=p.uuid)) + if all(resp.succeeded for resp in result.values()): + # the project of coordinator must be created at last when all participants finished, + # to let scheduler be able to retry when some participant failed. + with db.session_scope() as session: + session.connection(execution_options={'isolation_level': 'SERIALIZABLE'}) + PendingProjectService(session).create_project_locally(p.uuid) + session.commit() + else: + logging.error(f'create project {p.uuid} failed: {result}') + return [p.uuid for p in p_needed_create] + + @staticmethod + def _fail_pending_project(ids: List[int]): + failed_ids = [] + for p_id in ids: + with db.session_scope() as session: + p: PendingProject = session.query(PendingProject).get(p_id) + if _if_all_participants_closed(p): + p.state = PendingProjectState.FAILED + failed_ids.append(p.id) + session.commit() + return failed_ids + + def run(self, context: RunnerContext) -> Tuple[RunnerStatus, RunnerOutput]: + with db.session_scope() as session: + ids = _get_ids_needed_schedule(session) + output = RunnerOutput() + output.pending_project_scheduler_output.pending_project_created_ids.extend(self._create_pending_project(ids)) + output.pending_project_scheduler_output.pending_project_updated_ids.extend(self._update_all_participants(ids)) + output.pending_project_scheduler_output.projects_created_uuids.extend(self._create_project(ids)) + output.pending_project_scheduler_output.pending_project_failed_ids.extend(self._fail_pending_project(ids)) + return RunnerStatus.DONE, output diff --git a/web_console_v2/api/fedlearner_webconsole/project/project_scheduler_test.py b/web_console_v2/api/fedlearner_webconsole/project/project_scheduler_test.py new file mode 100644 index 000000000..02b4102b9 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/project/project_scheduler_test.py @@ -0,0 +1,214 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from unittest.mock import patch, MagicMock + +from google.protobuf.empty_pb2 import Empty + +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.controllers import ParticipantResp +from fedlearner_webconsole.project.models import PendingProject, ProjectRole, PendingProjectState +from fedlearner_webconsole.project.project_scheduler import ScheduleProjectRunner, _get_ids_needed_schedule,\ + _if_pending_project_needed_create, _if_all_participants_closed +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput, RunnerOutput, PendingProjectSchedulerOutput +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo, ParticipantInfo +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from testing.no_web_server_test_case import NoWebServerTestCase + + +class ProjectSchedulerTest(NoWebServerTestCase): + + def test_get_ids_needed_schedule(self): + p1 = PendingProject(uuid='a', + role=ProjectRole.COORDINATOR, + ticket_status=TicketStatus.APPROVED, + state=PendingProjectState.ACCEPTED) + p2 = PendingProject(uuid='b', + role=ProjectRole.PARTICIPANT, + ticket_status=TicketStatus.APPROVED, + state=PendingProjectState.ACCEPTED) + p3 = PendingProject(uuid='a', + role=ProjectRole.COORDINATOR, + ticket_status=TicketStatus.APPROVED, + state=PendingProjectState.CLOSED) + with db.session_scope() as session: + session.add(p1) + session.add(p2) + session.add(p3) + session.commit() + with db.session_scope() as session: + self.assertEqual(_get_ids_needed_schedule(session), [p1.id]) # pylint: disable=protected-access + + def test_if_pending_project_needed_create(self): + p = PendingProject() + + # Test case: all accepted + part_infos = ParticipantsInfo( + participants_map={ + 'a': ParticipantInfo(state=PendingProjectState.ACCEPTED.name, role=ProjectRole.PARTICIPANT.name), + 'b': ParticipantInfo(state=PendingProjectState.ACCEPTED.name, role=ProjectRole.PARTICIPANT.name) + }) + p.set_participants_info(part_infos) + self.assertEqual(_if_pending_project_needed_create(p), True) + + # Test case: one rejected one accepted + part_infos = ParticipantsInfo( + participants_map={ + 'a': ParticipantInfo(state=PendingProjectState.CLOSED.name, role=ProjectRole.PARTICIPANT.name), + 'b': ParticipantInfo(state=PendingProjectState.ACCEPTED.name, role=ProjectRole.PARTICIPANT.name) + }) + p.set_participants_info(part_infos) + self.assertEqual(_if_pending_project_needed_create(p), True) + + # Test case: one pending one accepted + part_infos = ParticipantsInfo( + participants_map={ + 'a': ParticipantInfo(state=PendingProjectState.PENDING.name, role=ProjectRole.PARTICIPANT.name), + 'b': ParticipantInfo(state=PendingProjectState.ACCEPTED.name, role=ProjectRole.PARTICIPANT.name) + }) + p.set_participants_info(part_infos) + self.assertEqual(_if_pending_project_needed_create(p), False) + + def test_if_all_participants_closed(self): + p = PendingProject() + only_coordinator = ParticipantsInfo(participants_map={ + 'a': ParticipantInfo(state=PendingProjectState.ACCEPTED.name, role=ProjectRole.COORDINATOR.name), + }) + p.set_participants_info(only_coordinator) + self.assertEqual(_if_all_participants_closed(p), False) + + part_all_accepted = ParticipantsInfo( + participants_map={ + 'a': ParticipantInfo(state=PendingProjectState.ACCEPTED.name, role=ProjectRole.COORDINATOR.name), + 'b': ParticipantInfo(state=PendingProjectState.CLOSED.name, role=ProjectRole.PARTICIPANT.name), + 'c': ParticipantInfo(state=PendingProjectState.CLOSED.name, role=ProjectRole.PARTICIPANT.name), + }) + p.set_participants_info(part_all_accepted) + self.assertEqual(_if_all_participants_closed(p), True) + part_infos_one_pending = ParticipantsInfo( + participants_map={ + 'a': ParticipantInfo(state=PendingProjectState.ACCEPTED.name, role=ProjectRole.COORDINATOR.name), + 'b': ParticipantInfo(state=PendingProjectState.CLOSED.name, role=ProjectRole.PARTICIPANT.name), + 'c': ParticipantInfo(state=PendingProjectState.PENDING.name, role=ProjectRole.PARTICIPANT.name), + }) + p.set_participants_info(part_infos_one_pending) + self.assertEqual(_if_all_participants_closed(p), False) + + @patch('fedlearner_webconsole.project.project_scheduler.PendingProjectRpcController.send_to_participants') + def test_create_pending_project(self, mock_sent: MagicMock): + p1 = PendingProject(uuid='a', + role=ProjectRole.COORDINATOR, + ticket_status=TicketStatus.APPROVED, + state=PendingProjectState.ACCEPTED) + with db.session_scope() as session: + session.add(p1) + session.commit() + result = ScheduleProjectRunner()._create_pending_project([p1.id]) # pylint: disable=protected-access + mock_sent.assert_called_once() + self.assertEqual(result, [p1.id]) + + @patch('fedlearner_webconsole.project.project_scheduler.PendingProjectRpcController.send_to_participants') + def test_sync_all_participants(self, mock_sent: MagicMock): + p1 = PendingProject(uuid='a', + role=ProjectRole.COORDINATOR, + ticket_status=TicketStatus.APPROVED, + state=PendingProjectState.ACCEPTED) + with db.session_scope() as session: + session.add(p1) + session.commit() + result = ScheduleProjectRunner()._update_all_participants([p1.id]) # pylint: disable=protected-access + mock_sent.assert_called_once() + self.assertEqual(result, [p1.id]) + + @patch('fedlearner_webconsole.project.project_scheduler.PendingProjectRpcController.send_to_participants') + @patch('fedlearner_webconsole.project.project_scheduler.PendingProjectService.create_project_locally') + def test_create_project(self, mock_create: MagicMock, mock_sent: MagicMock): + p1 = PendingProject(uuid='a', + role=ProjectRole.COORDINATOR, + ticket_status=TicketStatus.APPROVED, + state=PendingProjectState.ACCEPTED) + p3 = PendingProject(uuid='c', + role=ProjectRole.COORDINATOR, + ticket_status=TicketStatus.APPROVED, + state=PendingProjectState.ACCEPTED) + part_infos = ParticipantsInfo( + participants_map={ + 'a': ParticipantInfo(state=PendingProjectState.ACCEPTED.name), + 'b': ParticipantInfo(state=PendingProjectState.ACCEPTED.name) + }) + p1.set_participants_info(part_infos) + p3.set_participants_info( + ParticipantsInfo( + participants_map={ + 'a': ParticipantInfo(state=PendingProjectState.ACCEPTED.name), + 'b': ParticipantInfo(state=PendingProjectState.PENDING.name) + })) + with db.session_scope() as session: + session.add(p1) + session.add(p3) + session.commit() + mock_sent.return_value = { + 'a': ParticipantResp(succeeded=True, resp=Empty(), msg=''), + 'b': ParticipantResp(succeeded=True, resp=Empty(), msg='') + } + result = ScheduleProjectRunner()._create_project([p1.id, p3.id]) # pylint: disable=protected-access + mock_sent.assert_called_once() + mock_create.assert_called_once_with(p1.uuid) + self.assertEqual(result, [p1.uuid]) + + def test_fail_project(self): + p1 = PendingProject(uuid='a', + role=ProjectRole.COORDINATOR, + ticket_status=TicketStatus.APPROVED, + state=PendingProjectState.ACCEPTED) + p2 = PendingProject(uuid='b', + role=ProjectRole.COORDINATOR, + ticket_status=TicketStatus.APPROVED, + state=PendingProjectState.ACCEPTED) + part_infos = ParticipantsInfo(participants_map={ + 'a': ParticipantInfo(state=PendingProjectState.ACCEPTED.name, role=ProjectRole.PARTICIPANT.name), + }) + p1.set_participants_info(part_infos) + part_infos = ParticipantsInfo(participants_map={ + 'a': ParticipantInfo(state=PendingProjectState.CLOSED.name, role=ProjectRole.PARTICIPANT.name), + }) + p2.set_participants_info(part_infos) + with db.session_scope() as session: + session.add(p1) + session.add(p2) + session.commit() + result = ScheduleProjectRunner()._fail_pending_project([p1.id, p2.id]) # pylint: disable=protected-access + self.assertEqual(result, [p2.id]) + with db.session_scope() as session: + p2 = session.query(PendingProject).get(p2.id) + self.assertEqual(p2.state, PendingProjectState.FAILED) + + def test_run(self): + p1 = PendingProject(uuid='a', + role=ProjectRole.COORDINATOR, + ticket_status=TicketStatus.APPROVED, + state=PendingProjectState.ACCEPTED) + with db.session_scope() as session: + session.add(p1) + session.commit() + result: RunnerOutput = ScheduleProjectRunner().run(RunnerContext(1, RunnerInput())) + self.assertEqual(result[0], RunnerStatus.DONE) + self.assertEqual( + result[1].pending_project_scheduler_output, + PendingProjectSchedulerOutput(pending_project_created_ids=[p1.id], + pending_project_updated_ids=[p1.id], + projects_created_uuids=[p1.uuid])) diff --git a/web_console_v2/api/fedlearner_webconsole/project/services.py b/web_console_v2/api/fedlearner_webconsole/project/services.py new file mode 100644 index 000000000..e4f841f28 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/project/services.py @@ -0,0 +1,197 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +from typing import List, Dict, Optional + +from google.protobuf.struct_pb2 import Value +from sqlalchemy import func +from sqlalchemy.orm import Session, joinedload + +from envs import Envs +from fedlearner_webconsole.exceptions import ResourceConflictException +from fedlearner_webconsole.participant.models import ProjectParticipant, Participant, ParticipantType +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.project.models import Project, PendingProject, PendingProjectState, ProjectRole +from fedlearner_webconsole.proto.filtering_pb2 import FilterExpression, FilterOp +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.project_pb2 import ProjectRef, ProjectConfig, ParticipantsInfo, ParticipantInfo +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.utils.filtering import FilterBuilder, SupportedField, FieldType +from fedlearner_webconsole.utils.paginate import Pagination, paginate +from fedlearner_webconsole.utils.resource_name import resource_uuid +from fedlearner_webconsole.workflow.models import Workflow + + +class ProjectService(object): + + def __init__(self, session: Session): + self._session = session + + def get_projects_by_participant(self, participant_id: int) -> List[Dict]: + projects = self._session.query(Project).join( + ProjectParticipant, ProjectParticipant.project_id == Project.id).filter( + ProjectParticipant.participant_id == participant_id). \ + order_by(Project.created_at.desc()).all() + return projects + + def get_projects(self) -> List[ProjectRef]: + """Gets all projects in the platform.""" + # TODO(linfan.fine): Not count soft-deleted workflow + # Project left join workflow to get workflow counts + projects = self._session.query( + Project, func.count(Workflow.id).label('num_workflow')) \ + .options(joinedload(Project.participants)) \ + .outerjoin(Workflow, Workflow.project_id == Project.id) \ + .group_by(Project.id) \ + .order_by(Project.created_at.desc()) \ + .all() + refs = [] + for row in projects: + ref: ProjectRef = row.Project.to_ref() + ref.num_workflow = row.num_workflow + refs.append(ref) + return refs + + +class PendingProjectService(object): + FILTER_FIELDS = { + 'name': SupportedField(type=FieldType.STRING, ops={FilterOp.CONTAIN: None}), + 'role': SupportedField(type=FieldType.STRING, ops={FilterOp.EQUAL: None}), + 'state': SupportedField(type=FieldType.STRING, ops={ + FilterOp.EQUAL: None, + FilterOp.IN: None + }), + } + + def __init__(self, session: Session): + self._session = session + self._filter_builder = FilterBuilder(model_class=PendingProject, supported_fields=self.FILTER_FIELDS) + + def build_participants_info(self, participant_ids: List[int]) -> ParticipantInfo: + participants_info = ParticipantsInfo() + for p_id in participant_ids: + participant = self._session.query(Participant).get(p_id) + assert participant is not None, f'participant with id {p_id} is not found' + p_info = ParticipantInfo(name=participant.name, + role=ProjectRole.PARTICIPANT.name, + state=PendingProjectState.PENDING.name if participant.get_type() + == ParticipantType.PLATFORM else PendingProjectState.ACCEPTED.name, + type=participant.get_type().name) + participants_info.participants_map[participant.pure_domain_name()].CopyFrom(p_info) + + sys_info = SettingService(self._session).get_system_info() + coordinator_info = ParticipantInfo(name=sys_info.name, + state=PendingProjectState.ACCEPTED.name, + role=ProjectRole.COORDINATOR.name, + type=ParticipantType.PLATFORM.name) + participants_info.participants_map[sys_info.pure_domain_name].CopyFrom(coordinator_info) + return participants_info + + def get_ids_from_participants_info(self, participants_info: ParticipantInfo) -> List[int]: + participant_ids = [] + for pure_domain, p_info in participants_info.participants_map.items(): + if p_info.role == ProjectRole.COORDINATOR: + continue + participant = ParticipantService(self._session).get_participant_by_pure_domain_name(pure_domain) + if participant is not None: + participant_ids.append(participant.id) + return participant_ids + + def create_pending_project(self, + name: str, + config: ProjectConfig, + participants_info: ParticipantInfo, + comment: str, + creator_username: str, + uuid: Optional[str] = None, + role: ProjectRole = ProjectRole.PARTICIPANT, + state: PendingProjectState = PendingProjectState.PENDING) -> PendingProject: + pending_project = PendingProject( + name=name, + uuid=uuid if uuid else resource_uuid(), + comment=comment, + role=role, + state=state, + creator_username=creator_username, + ) + pending_project.set_config(config) + pending_project.set_participants_info(participants_info) + self._session.add(pending_project) + self._session.flush() + return pending_project + + def update_state_as_participant(self, pending_project_id: int, state: str) -> PendingProject: + pending_project: PendingProject = self._session.query(PendingProject).get(pending_project_id) + assert pending_project is not None, f'pending project with id {pending_project_id} is not found' + assert pending_project.role == ProjectRole.PARTICIPANT, 'only participant can accept or refuse' + + # TODO(xiangyuxuan.prs): remove after using token instead of name to ensure consistency of project + if PendingProjectState(state) == PendingProjectState.ACCEPTED and self.duplicated_name_exists( + pending_project.name): + raise ResourceConflictException(f'{pending_project.name} has already existed') + + pending_project.state = PendingProjectState(state) + return pending_project + + def list_pending_projects(self, + page: Optional[int] = None, + page_size: Optional[int] = None, + filter_exp: Optional[FilterExpression] = None) -> Pagination: + """Lists pending project by filter expression and pagination. + + Raises: + ValueError: if the expression is unsupported. + """ + query = self._session.query(PendingProject) + if filter_exp: + query = self._filter_builder.build_query(query, filter_exp) + query = query.order_by(PendingProject.id.desc()) + return paginate(query, page, page_size) + + def create_project_locally(self, pending_project_uuid: str): + pending_project: PendingProject = self._session.query(PendingProject).filter_by( + uuid=pending_project_uuid).first() + project = Project(name=pending_project.name, + token=pending_project.uuid, + role=pending_project.role, + creator=pending_project.creator_username, + comment=pending_project.comment) + project.set_participants_info(pending_project.get_participants_info()) + project_config: ProjectConfig = pending_project.get_config() + # init storage root path variable to make user use config in environment by default. + project_config.variables.append( + Variable(name='storage_root_path', + typed_value=Value(string_value=Envs.STORAGE_ROOT), + value=Envs.STORAGE_ROOT)) + project.set_config(project_config) + self._session.add(project) + self._session.flush() + part_ids = self.get_ids_from_participants_info(pending_project.get_participants_info()) + for participant_id in part_ids: + # insert a relationship into the table + new_relationship = ProjectParticipant(project_id=project.id, participant_id=participant_id) + self._session.add(new_relationship) + self._session.flush() + pending_project.state = PendingProjectState.CLOSED + + def duplicated_name_exists(self, name: str) -> bool: + p = self._session.query(Project.id).filter_by(name=name).first() + if p is not None: + return True + pending_p = self._session.query(PendingProject.id).filter_by(name=name, + state=PendingProjectState.ACCEPTED).first() + if pending_p is not None: + return True + return False diff --git a/web_console_v2/api/fedlearner_webconsole/project/services_test.py b/web_console_v2/api/fedlearner_webconsole/project/services_test.py new file mode 100644 index 000000000..7a0c62f81 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/project/services_test.py @@ -0,0 +1,253 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import time +import unittest +from unittest.mock import patch + +from google.protobuf.struct_pb2 import Value + +from envs import Envs +from fedlearner_webconsole.db import db +from fedlearner_webconsole.exceptions import ResourceConflictException +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant, ParticipantType +from fedlearner_webconsole.project.models import Project, ProjectRole, PendingProjectState, PendingProject +from fedlearner_webconsole.project.services import ProjectService, PendingProjectService +from fedlearner_webconsole.proto.filtering_pb2 import FilterExpression, FilterExpressionKind, SimpleExpression, FilterOp +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo, ParticipantInfo, ProjectConfig +from fedlearner_webconsole.proto.setting_pb2 import SystemInfo +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.workflow.models import Workflow +from testing.no_web_server_test_case import NoWebServerTestCase + + +class ProjectServiceTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project_1 = Project(id=1, name='project 1', creator='user1') + participant_1 = Participant(id=1, name='participant 1', domain_name='fl-participant-1.com') + relation = ProjectParticipant(project_id=project_1.id, participant_id=participant_1.id) + session.add_all([project_1, participant_1, relation]) + session.commit() + time.sleep(1) + + project_2 = Project(id=2, name='project 2', creator='user2') + participant_2 = Participant(id=2, name='participant 2', domain_name='fl-participant-2.com') + relation_1 = ProjectParticipant(project_id=project_1.id, participant_id=participant_2.id) + relation_2 = ProjectParticipant(project_id=project_2.id, participant_id=participant_2.id) + session.add_all([project_2, participant_2, relation_1, relation_2]) + + session.commit() + + def test_get_projects_by_participant_id(self): + with db.session_scope() as session: + service = ProjectService(session) + projects = service.get_projects_by_participant(1) + self.assertEqual(len(projects), 1) + self.assertEqual(projects[0].name, 'project 1') + + projects = service.get_projects_by_participant(2) + self.assertEqual(len(projects), 2) + self.assertCountEqual([projects[0].name, projects[1].name], ['project 1', 'project 2']) + + def test_get_projects(self): + with db.session_scope() as session: + workflow_1 = Workflow(name='workflow 1', project_id=1) + workflow_2 = Workflow(name='workflow 2', project_id=1) + session.add_all([workflow_1, workflow_2]) + session.commit() + with db.session_scope() as session: + service = ProjectService(session) + projects = service.get_projects() + self.assertEqual(len(projects), 2) + + self.assertEqual(projects[0].name, 'project 2') + self.assertEqual(len(projects[0].participants), 1) + self.assertEqual(projects[0].num_workflow, 0) + + self.assertEqual(projects[1].name, 'project 1') + self.assertEqual(len(projects[1].participants), 2) + self.assertEqual(projects[1].num_workflow, 2) + + +class PendingProjectServiceTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + participant_1 = Participant(id=1, + name='participant 1', + domain_name='fl-participant-1.com', + type=ParticipantType.LIGHT_CLIENT) + participant_2 = Participant(id=2, + name='participant 2', + domain_name='fl-participant-2.com', + type=ParticipantType.PLATFORM) + session.add_all([participant_1, participant_2]) + session.commit() + + @patch('fedlearner_webconsole.project.services.SettingService.get_system_info') + def test_build_participants_info(self, mock_system_info): + mock_system_info.return_value = SystemInfo(pure_domain_name='self', name='self_name') + with db.session_scope() as session: + info = PendingProjectService(session).build_participants_info([1, 2]) + self.assertEqual( + info, + ParticipantsInfo( + participants_map={ + 'participant-1': + ParticipantInfo(name='participant 1', + role=ProjectRole.PARTICIPANT.name, + state=PendingProjectState.ACCEPTED.name, + type=ParticipantType.LIGHT_CLIENT.name), + 'participant-2': + ParticipantInfo(name='participant 2', + role=ProjectRole.PARTICIPANT.name, + state=PendingProjectState.PENDING.name, + type=ParticipantType.PLATFORM.name), + 'self': + ParticipantInfo(name='self_name', + role=ProjectRole.COORDINATOR.name, + state=PendingProjectState.ACCEPTED.name, + type=ParticipantType.PLATFORM.name) + })) + + def test_get_ids_from_participants_info(self): + participants_info = ParticipantsInfo( + participants_map={ + 'participant-1': ParticipantInfo(name='participant 1'), + 'participant-2': ParticipantInfo(name='participant 2'), + 'self': ParticipantInfo(name='self', role=ProjectRole.COORDINATOR.name), + 'no connection': ParticipantInfo(name='no connection') + }) + with db.session_scope() as session: + ids = PendingProjectService(session).get_ids_from_participants_info(participants_info) + self.assertCountEqual(ids, [1, 2]) + + def test_create_pending_project(self): + with db.session_scope() as session: + pending_project = PendingProjectService(session).create_pending_project( + name='test', + config=ProjectConfig(variables=[Variable(name='test')]), + participants_info=ParticipantsInfo( + participants_map={'self': ParticipantInfo(name='self', role=ProjectRole.COORDINATOR.name)}), + comment='test', + creator_username='test', + uuid='uuid') + session.commit() + with db.session_scope() as session: + result: PendingProject = session.query(PendingProject).get(pending_project.id) + self.assertEqual(result.get_config(), ProjectConfig(variables=[Variable(name='test')])) + self.assertEqual( + result.get_participants_info(), + ParticipantsInfo( + participants_map={'self': ParticipantInfo(name='self', role=ProjectRole.COORDINATOR.name)})) + self.assertEqual(result.name, 'test') + self.assertEqual(result.uuid, 'uuid') + + @patch('fedlearner_webconsole.project.services.PendingProjectService.duplicated_name_exists') + def test_update_state_as_participant(self, mock_dup): + pending_project = PendingProject(name='test', state=PendingProjectState.PENDING, role=ProjectRole.PARTICIPANT) + + with db.session_scope() as session: + session.add(pending_project) + session.commit() + + mock_dup.return_value = True + with db.session_scope() as session: + with self.assertRaises(ResourceConflictException): + PendingProjectService(session).update_state_as_participant(pending_project.id, + PendingProjectState.ACCEPTED.name) + with db.session_scope() as session: + PendingProjectService(session).update_state_as_participant(pending_project.id, + PendingProjectState.CLOSED.name) + session.commit() + with db.session_scope() as session: + result: PendingProject = session.query(PendingProject).get(pending_project.id) + self.assertEqual(result.state, PendingProjectState.CLOSED) + + def test_create_project_locally(self): + with db.session_scope() as session: + pending_project = PendingProjectService(session).create_pending_project( + name='test', + config=ProjectConfig(variables=[Variable(name='test')]), + participants_info=ParticipantsInfo( + participants_map={'self': ParticipantInfo(name='self', role=ProjectRole.COORDINATOR.name)}), + comment='test', + creator_username='test', + uuid='uuid') + session.commit() + + with db.session_scope() as session: + PendingProjectService(session).create_project_locally(pending_project.uuid) + session.commit() + with db.session_scope() as session: + project = session.query(Project).filter_by(name=pending_project.name, token=pending_project.uuid).first() + self.assertEqual(project.get_variables(), [ + Variable(name='test'), + Variable(name='storage_root_path', + value=Envs.STORAGE_ROOT, + typed_value=Value(string_value=Envs.STORAGE_ROOT)) + ]) + pending_project = session.query(PendingProject).get(pending_project.id) + self.assertEqual( + project.get_participants_info(), + ParticipantsInfo( + participants_map={'self': ParticipantInfo(name='self', role=ProjectRole.COORDINATOR.name)})) + self.assertEqual(project.creator, 'test') + self.assertEqual(pending_project.state, PendingProjectState.CLOSED) + self.assertEqual(project.comment, 'test') + + def test_list_pending_projects(self): + pending_project = PendingProject(name='test', state=PendingProjectState.PENDING, role=ProjectRole.PARTICIPANT) + pending_project1 = PendingProject(name='test1', + state=PendingProjectState.ACCEPTED, + role=ProjectRole.COORDINATOR) + with db.session_scope() as session: + session.add(pending_project) + session.add(pending_project1) + session.commit() + with db.session_scope() as session: + result = PendingProjectService(session).list_pending_projects().get_items() + self.assertEqual(len(result), 2) + result = PendingProjectService(session).list_pending_projects(page=1, page_size=1).get_items() + self.assertEqual(len(result), 1) + self.assertEqual(result[0].id, pending_project1.id) + exp = FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='role', + op=FilterOp.EQUAL, + string_value='COORDINATOR')) + result = PendingProjectService(session).list_pending_projects(filter_exp=exp).get_items() + self.assertEqual(len(result), 1) + self.assertEqual(result[0].id, pending_project1.id) + + def test_duplicated_name_exists(self): + p = Project(name='test') + pending_p = PendingProject(name='test1', state=PendingProjectState.ACCEPTED) + pending_p2 = PendingProject(name='test2', state=PendingProjectState.CLOSED) + with db.session_scope() as session: + session.add_all([p, pending_p, pending_p2]) + session.commit() + with db.session_scope() as session: + self.assertTrue(PendingProjectService(session).duplicated_name_exists(p.name)) + self.assertTrue(PendingProjectService(session).duplicated_name_exists(pending_p.name)) + self.assertFalse(PendingProjectService(session).duplicated_name_exists(pending_p2.name)) + self.assertFalse(PendingProjectService(session).duplicated_name_exists('test0')) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/proto/common/extension_pb2.pyi b/web_console_v2/api/fedlearner_webconsole/proto/common/extension_pb2.pyi new file mode 100644 index 000000000..bcc03e48f --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/proto/common/extension_pb2.pyi @@ -0,0 +1,9 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import google.protobuf.descriptor + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... + +secret: google.protobuf.descriptor.FieldDescriptor = ... diff --git a/web_console_v2/api/fedlearner_webconsole/proto/jsonschemas/.gitignore b/web_console_v2/api/fedlearner_webconsole/proto/jsonschemas/.gitignore new file mode 100644 index 000000000..5e7d2734c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/proto/jsonschemas/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +!.gitignore diff --git a/web_console_v2/api/fedlearner_webconsole/proto/rpc/v2/job_service_pb2.pyi b/web_console_v2/api/fedlearner_webconsole/proto/rpc/v2/job_service_pb2.pyi new file mode 100644 index 000000000..0816a4b5d --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/proto/rpc/v2/job_service_pb2.pyi @@ -0,0 +1,304 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import fedlearner_webconsole.proto.dataset_pb2 +import fedlearner_webconsole.proto.mmgr_pb2 +import google.protobuf.descriptor +import google.protobuf.message +import typing +import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... + +class InformTrustedJobGroupRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + UUID_FIELD_NUMBER: builtins.int + AUTH_STATUS_FIELD_NUMBER: builtins.int + uuid: typing.Text = ... + auth_status: typing.Text = ... + + def __init__(self, + *, + uuid : typing.Text = ..., + auth_status : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"auth_status",b"auth_status",u"uuid",b"uuid"]) -> None: ... +global___InformTrustedJobGroupRequest = InformTrustedJobGroupRequest + +class UpdateTrustedJobGroupRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + UUID_FIELD_NUMBER: builtins.int + ALGORITHM_UUID_FIELD_NUMBER: builtins.int + uuid: typing.Text = ... + algorithm_uuid: typing.Text = ... + + def __init__(self, + *, + uuid : typing.Text = ..., + algorithm_uuid : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"algorithm_uuid",b"algorithm_uuid",u"uuid",b"uuid"]) -> None: ... +global___UpdateTrustedJobGroupRequest = UpdateTrustedJobGroupRequest + +class DeleteTrustedJobGroupRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + UUID_FIELD_NUMBER: builtins.int + uuid: typing.Text = ... + + def __init__(self, + *, + uuid : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"uuid",b"uuid"]) -> None: ... +global___DeleteTrustedJobGroupRequest = DeleteTrustedJobGroupRequest + +class GetTrustedJobGroupRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + UUID_FIELD_NUMBER: builtins.int + uuid: typing.Text = ... + + def __init__(self, + *, + uuid : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"uuid",b"uuid"]) -> None: ... +global___GetTrustedJobGroupRequest = GetTrustedJobGroupRequest + +class GetTrustedJobGroupResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + AUTH_STATUS_FIELD_NUMBER: builtins.int + auth_status: typing.Text = ... + + def __init__(self, + *, + auth_status : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"auth_status",b"auth_status"]) -> None: ... +global___GetTrustedJobGroupResponse = GetTrustedJobGroupResponse + +class InformTrustedJobRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + UUID_FIELD_NUMBER: builtins.int + AUTH_STATUS_FIELD_NUMBER: builtins.int + uuid: typing.Text = ... + auth_status: typing.Text = ... + + def __init__(self, + *, + uuid : typing.Text = ..., + auth_status : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"auth_status",b"auth_status",u"uuid",b"uuid"]) -> None: ... +global___InformTrustedJobRequest = InformTrustedJobRequest + +class GetTrustedJobRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + UUID_FIELD_NUMBER: builtins.int + uuid: typing.Text = ... + + def __init__(self, + *, + uuid : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"uuid",b"uuid"]) -> None: ... +global___GetTrustedJobRequest = GetTrustedJobRequest + +class GetTrustedJobResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + AUTH_STATUS_FIELD_NUMBER: builtins.int + auth_status: typing.Text = ... + + def __init__(self, + *, + auth_status : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"auth_status",b"auth_status"]) -> None: ... +global___GetTrustedJobResponse = GetTrustedJobResponse + +class CreateTrustedExportJobRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + UUID_FIELD_NUMBER: builtins.int + NAME_FIELD_NUMBER: builtins.int + EXPORT_COUNT_FIELD_NUMBER: builtins.int + PARENT_UUID_FIELD_NUMBER: builtins.int + TICKET_UUID_FIELD_NUMBER: builtins.int + uuid: typing.Text = ... + name: typing.Text = ... + export_count: builtins.int = ... + parent_uuid: typing.Text = ... + ticket_uuid: typing.Text = ... + + def __init__(self, + *, + uuid : typing.Text = ..., + name : typing.Text = ..., + export_count : builtins.int = ..., + parent_uuid : typing.Text = ..., + ticket_uuid : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"export_count",b"export_count",u"name",b"name",u"parent_uuid",b"parent_uuid",u"ticket_uuid",b"ticket_uuid",u"uuid",b"uuid"]) -> None: ... +global___CreateTrustedExportJobRequest = CreateTrustedExportJobRequest + +class CreateModelJobRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + NAME_FIELD_NUMBER: builtins.int + UUID_FIELD_NUMBER: builtins.int + GROUP_UUID_FIELD_NUMBER: builtins.int + MODEL_JOB_TYPE_FIELD_NUMBER: builtins.int + ALGORITHM_TYPE_FIELD_NUMBER: builtins.int + GLOBAL_CONFIG_FIELD_NUMBER: builtins.int + VERSION_FIELD_NUMBER: builtins.int + name: typing.Text = ... + uuid: typing.Text = ... + group_uuid: typing.Text = ... + model_job_type: typing.Text = ... + algorithm_type: typing.Text = ... + version: builtins.int = ... + + @property + def global_config(self) -> fedlearner_webconsole.proto.mmgr_pb2.ModelJobGlobalConfig: ... + + def __init__(self, + *, + name : typing.Text = ..., + uuid : typing.Text = ..., + group_uuid : typing.Text = ..., + model_job_type : typing.Text = ..., + algorithm_type : typing.Text = ..., + global_config : typing.Optional[fedlearner_webconsole.proto.mmgr_pb2.ModelJobGlobalConfig] = ..., + version : builtins.int = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal[u"global_config",b"global_config"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal[u"algorithm_type",b"algorithm_type",u"global_config",b"global_config",u"group_uuid",b"group_uuid",u"model_job_type",b"model_job_type",u"name",b"name",u"uuid",b"uuid",u"version",b"version"]) -> None: ... +global___CreateModelJobRequest = CreateModelJobRequest + +class CreateDatasetJobStageRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DATASET_JOB_UUID_FIELD_NUMBER: builtins.int + DATASET_JOB_STAGE_UUID_FIELD_NUMBER: builtins.int + NAME_FIELD_NUMBER: builtins.int + EVENT_TIME_FIELD_NUMBER: builtins.int + dataset_job_uuid: typing.Text = ... + dataset_job_stage_uuid: typing.Text = ... + name: typing.Text = ... + event_time: builtins.int = ... + + def __init__(self, + *, + dataset_job_uuid : typing.Text = ..., + dataset_job_stage_uuid : typing.Text = ..., + name : typing.Text = ..., + event_time : builtins.int = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"dataset_job_stage_uuid",b"dataset_job_stage_uuid",u"dataset_job_uuid",b"dataset_job_uuid",u"event_time",b"event_time",u"name",b"name"]) -> None: ... +global___CreateDatasetJobStageRequest = CreateDatasetJobStageRequest + +class GetDatasetJobStageRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DATASET_JOB_STAGE_UUID_FIELD_NUMBER: builtins.int + dataset_job_stage_uuid: typing.Text = ... + + def __init__(self, + *, + dataset_job_stage_uuid : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"dataset_job_stage_uuid",b"dataset_job_stage_uuid"]) -> None: ... +global___GetDatasetJobStageRequest = GetDatasetJobStageRequest + +class GetDatasetJobStageResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DATASET_JOB_STAGE_FIELD_NUMBER: builtins.int + + @property + def dataset_job_stage(self) -> fedlearner_webconsole.proto.dataset_pb2.DatasetJobStage: ... + + def __init__(self, + *, + dataset_job_stage : typing.Optional[fedlearner_webconsole.proto.dataset_pb2.DatasetJobStage] = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal[u"dataset_job_stage",b"dataset_job_stage"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal[u"dataset_job_stage",b"dataset_job_stage"]) -> None: ... +global___GetDatasetJobStageResponse = GetDatasetJobStageResponse + +class CreateModelJobGroupRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + NAME_FIELD_NUMBER: builtins.int + UUID_FIELD_NUMBER: builtins.int + ALGORITHM_TYPE_FIELD_NUMBER: builtins.int + DATASET_UUID_FIELD_NUMBER: builtins.int + ALGORITHM_PROJECT_LIST_FIELD_NUMBER: builtins.int + name: typing.Text = ... + uuid: typing.Text = ... + algorithm_type: typing.Text = ... + dataset_uuid: typing.Text = ... + + @property + def algorithm_project_list(self) -> fedlearner_webconsole.proto.mmgr_pb2.AlgorithmProjectList: ... + + def __init__(self, + *, + name : typing.Text = ..., + uuid : typing.Text = ..., + algorithm_type : typing.Text = ..., + dataset_uuid : typing.Text = ..., + algorithm_project_list : typing.Optional[fedlearner_webconsole.proto.mmgr_pb2.AlgorithmProjectList] = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal[u"algorithm_project_list",b"algorithm_project_list"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal[u"algorithm_project_list",b"algorithm_project_list",u"algorithm_type",b"algorithm_type",u"dataset_uuid",b"dataset_uuid",u"name",b"name",u"uuid",b"uuid"]) -> None: ... +global___CreateModelJobGroupRequest = CreateModelJobGroupRequest + +class GetModelJobRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + UUID_FIELD_NUMBER: builtins.int + uuid: typing.Text = ... + + def __init__(self, + *, + uuid : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"uuid",b"uuid"]) -> None: ... +global___GetModelJobRequest = GetModelJobRequest + +class GetModelJobGroupRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + UUID_FIELD_NUMBER: builtins.int + uuid: typing.Text = ... + + def __init__(self, + *, + uuid : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"uuid",b"uuid"]) -> None: ... +global___GetModelJobGroupRequest = GetModelJobGroupRequest + +class InformModelJobGroupRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + UUID_FIELD_NUMBER: builtins.int + AUTH_STATUS_FIELD_NUMBER: builtins.int + uuid: typing.Text = ... + auth_status: typing.Text = ... + + def __init__(self, + *, + uuid : typing.Text = ..., + auth_status : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"auth_status",b"auth_status",u"uuid",b"uuid"]) -> None: ... +global___InformModelJobGroupRequest = InformModelJobGroupRequest + +class UpdateDatasetJobSchedulerStateRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + UUID_FIELD_NUMBER: builtins.int + SCHEDULER_STATE_FIELD_NUMBER: builtins.int + uuid: typing.Text = ... + scheduler_state: typing.Text = ... + + def __init__(self, + *, + uuid : typing.Text = ..., + scheduler_state : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"scheduler_state",b"scheduler_state",u"uuid",b"uuid"]) -> None: ... +global___UpdateDatasetJobSchedulerStateRequest = UpdateDatasetJobSchedulerStateRequest diff --git a/web_console_v2/api/fedlearner_webconsole/proto/rpc/v2/project_service_pb2.pyi b/web_console_v2/api/fedlearner_webconsole/proto/rpc/v2/project_service_pb2.pyi new file mode 100644 index 000000000..0352da8c0 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/proto/rpc/v2/project_service_pb2.pyi @@ -0,0 +1,149 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import fedlearner_webconsole.proto.project_pb2 +import fedlearner_webconsole.proto.workflow_definition_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import typing +import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... + +class CreatePendingProjectRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + NAME_FIELD_NUMBER: builtins.int + UUID_FIELD_NUMBER: builtins.int + PARTICIPANTS_INFO_FIELD_NUMBER: builtins.int + COMMENT_FIELD_NUMBER: builtins.int + CREATOR_USERNAME_FIELD_NUMBER: builtins.int + CONFIG_FIELD_NUMBER: builtins.int + TICKET_UUID_FIELD_NUMBER: builtins.int + name: typing.Text = ... + uuid: typing.Text = ... + comment: typing.Text = ... + creator_username: typing.Text = ... + ticket_uuid: typing.Text = ... + + @property + def participants_info(self) -> fedlearner_webconsole.proto.project_pb2.ParticipantsInfo: ... + + @property + def config(self) -> fedlearner_webconsole.proto.project_pb2.ProjectConfig: ... + + def __init__(self, + *, + name : typing.Text = ..., + uuid : typing.Text = ..., + participants_info : typing.Optional[fedlearner_webconsole.proto.project_pb2.ParticipantsInfo] = ..., + comment : typing.Text = ..., + creator_username : typing.Text = ..., + config : typing.Optional[fedlearner_webconsole.proto.project_pb2.ProjectConfig] = ..., + ticket_uuid : typing.Text = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal[u"config",b"config",u"participants_info",b"participants_info"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal[u"comment",b"comment",u"config",b"config",u"creator_username",b"creator_username",u"name",b"name",u"participants_info",b"participants_info",u"ticket_uuid",b"ticket_uuid",u"uuid",b"uuid"]) -> None: ... +global___CreatePendingProjectRequest = CreatePendingProjectRequest + +class UpdatePendingProjectRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + class ParticipantsMapEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: typing.Text = ... + + @property + def value(self) -> fedlearner_webconsole.proto.project_pb2.ParticipantInfo: ... + + def __init__(self, + *, + key : typing.Text = ..., + value : typing.Optional[fedlearner_webconsole.proto.project_pb2.ParticipantInfo] = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal[u"value",b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal[u"key",b"key",u"value",b"value"]) -> None: ... + + UUID_FIELD_NUMBER: builtins.int + PARTICIPANTS_MAP_FIELD_NUMBER: builtins.int + uuid: typing.Text = ... + + @property + def participants_map(self) -> google.protobuf.internal.containers.MessageMap[typing.Text, fedlearner_webconsole.proto.project_pb2.ParticipantInfo]: ... + + def __init__(self, + *, + uuid : typing.Text = ..., + participants_map : typing.Optional[typing.Mapping[typing.Text, fedlearner_webconsole.proto.project_pb2.ParticipantInfo]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"participants_map",b"participants_map",u"uuid",b"uuid"]) -> None: ... +global___UpdatePendingProjectRequest = UpdatePendingProjectRequest + +class SyncPendingProjectStateRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + UUID_FIELD_NUMBER: builtins.int + STATE_FIELD_NUMBER: builtins.int + uuid: typing.Text = ... + state: typing.Text = ... + + def __init__(self, + *, + uuid : typing.Text = ..., + state : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"state",b"state",u"uuid",b"uuid"]) -> None: ... +global___SyncPendingProjectStateRequest = SyncPendingProjectStateRequest + +class CreateProjectRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + UUID_FIELD_NUMBER: builtins.int + uuid: typing.Text = ... + + def __init__(self, + *, + uuid : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"uuid",b"uuid"]) -> None: ... +global___CreateProjectRequest = CreateProjectRequest + +class DeletePendingProjectRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + UUID_FIELD_NUMBER: builtins.int + uuid: typing.Text = ... + + def __init__(self, + *, + uuid : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"uuid",b"uuid"]) -> None: ... +global___DeletePendingProjectRequest = DeletePendingProjectRequest + +class SendTemplateRevisionRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + CONFIG_FIELD_NUMBER: builtins.int + NAME_FIELD_NUMBER: builtins.int + COMMENT_FIELD_NUMBER: builtins.int + KIND_FIELD_NUMBER: builtins.int + REVISION_INDEX_FIELD_NUMBER: builtins.int + name: typing.Text = ... + comment: typing.Text = ... + kind: typing.Text = ... + revision_index: builtins.int = ... + + @property + def config(self) -> fedlearner_webconsole.proto.workflow_definition_pb2.WorkflowDefinition: ... + + def __init__(self, + *, + config : typing.Optional[fedlearner_webconsole.proto.workflow_definition_pb2.WorkflowDefinition] = ..., + name : typing.Text = ..., + comment : typing.Text = ..., + kind : typing.Text = ..., + revision_index : builtins.int = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal[u"config",b"config"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal[u"comment",b"comment",u"config",b"config",u"kind",b"kind",u"name",b"name",u"revision_index",b"revision_index"]) -> None: ... +global___SendTemplateRevisionRequest = SendTemplateRevisionRequest diff --git a/web_console_v2/api/fedlearner_webconsole/proto/rpc/v2/resource_service_pb2.pyi b/web_console_v2/api/fedlearner_webconsole/proto/rpc/v2/resource_service_pb2.pyi new file mode 100644 index 000000000..0b67b9ec3 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/proto/rpc/v2/resource_service_pb2.pyi @@ -0,0 +1,174 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import fedlearner_webconsole.proto.algorithm_pb2 +import fedlearner_webconsole.proto.dataset_pb2 +import fedlearner_webconsole.proto.filtering_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import typing +import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... + +class ListAlgorithmProjectsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + FILTER_EXP_FIELD_NUMBER: builtins.int + + @property + def filter_exp(self) -> fedlearner_webconsole.proto.filtering_pb2.FilterExpression: ... + + def __init__(self, + *, + filter_exp : typing.Optional[fedlearner_webconsole.proto.filtering_pb2.FilterExpression] = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal[u"filter_exp",b"filter_exp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal[u"filter_exp",b"filter_exp"]) -> None: ... +global___ListAlgorithmProjectsRequest = ListAlgorithmProjectsRequest + +class ListAlgorithmProjectsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + ALGORITHM_PROJECTS_FIELD_NUMBER: builtins.int + + @property + def algorithm_projects(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[fedlearner_webconsole.proto.algorithm_pb2.AlgorithmProjectPb]: ... + + def __init__(self, + *, + algorithm_projects : typing.Optional[typing.Iterable[fedlearner_webconsole.proto.algorithm_pb2.AlgorithmProjectPb]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"algorithm_projects",b"algorithm_projects"]) -> None: ... +global___ListAlgorithmProjectsResponse = ListAlgorithmProjectsResponse + +class ListAlgorithmsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + ALGORITHM_PROJECT_UUID_FIELD_NUMBER: builtins.int + algorithm_project_uuid: typing.Text = ... + + def __init__(self, + *, + algorithm_project_uuid : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"algorithm_project_uuid",b"algorithm_project_uuid"]) -> None: ... +global___ListAlgorithmsRequest = ListAlgorithmsRequest + +class ListAlgorithmsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + ALGORITHMS_FIELD_NUMBER: builtins.int + + @property + def algorithms(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[fedlearner_webconsole.proto.algorithm_pb2.AlgorithmPb]: ... + + def __init__(self, + *, + algorithms : typing.Optional[typing.Iterable[fedlearner_webconsole.proto.algorithm_pb2.AlgorithmPb]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"algorithms",b"algorithms"]) -> None: ... +global___ListAlgorithmsResponse = ListAlgorithmsResponse + +class GetAlgorithmProjectRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + ALGORITHM_PROJECT_UUID_FIELD_NUMBER: builtins.int + algorithm_project_uuid: typing.Text = ... + + def __init__(self, + *, + algorithm_project_uuid : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"algorithm_project_uuid",b"algorithm_project_uuid"]) -> None: ... +global___GetAlgorithmProjectRequest = GetAlgorithmProjectRequest + +class GetAlgorithmRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + ALGORITHM_UUID_FIELD_NUMBER: builtins.int + algorithm_uuid: typing.Text = ... + + def __init__(self, + *, + algorithm_uuid : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"algorithm_uuid",b"algorithm_uuid"]) -> None: ... +global___GetAlgorithmRequest = GetAlgorithmRequest + +class GetAlgorithmFilesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + ALGORITHM_UUID_FIELD_NUMBER: builtins.int + algorithm_uuid: typing.Text = ... + + def __init__(self, + *, + algorithm_uuid : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"algorithm_uuid",b"algorithm_uuid"]) -> None: ... +global___GetAlgorithmFilesRequest = GetAlgorithmFilesRequest + +class GetAlgorithmFilesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + HASH_FIELD_NUMBER: builtins.int + CHUNK_FIELD_NUMBER: builtins.int + hash: typing.Text = ... + chunk: builtins.bytes = ... + + def __init__(self, + *, + hash : typing.Text = ..., + chunk : builtins.bytes = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"chunk",b"chunk",u"hash",b"hash"]) -> None: ... +global___GetAlgorithmFilesResponse = GetAlgorithmFilesResponse + +class InformDatasetRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + UUID_FIELD_NUMBER: builtins.int + AUTH_STATUS_FIELD_NUMBER: builtins.int + uuid: typing.Text = ... + auth_status: typing.Text = ... + + def __init__(self, + *, + uuid : typing.Text = ..., + auth_status : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"auth_status",b"auth_status",u"uuid",b"uuid"]) -> None: ... +global___InformDatasetRequest = InformDatasetRequest + +class ListDatasetsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + UUID_FIELD_NUMBER: builtins.int + KIND_FIELD_NUMBER: builtins.int + STATE_FIELD_NUMBER: builtins.int + TIME_RANGE_FIELD_NUMBER: builtins.int + uuid: typing.Text = ... + kind: typing.Text = ... + state: typing.Text = ... + + @property + def time_range(self) -> fedlearner_webconsole.proto.dataset_pb2.TimeRange: ... + + def __init__(self, + *, + uuid : typing.Text = ..., + kind : typing.Text = ..., + state : typing.Text = ..., + time_range : typing.Optional[fedlearner_webconsole.proto.dataset_pb2.TimeRange] = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal[u"time_range",b"time_range"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal[u"kind",b"kind",u"state",b"state",u"time_range",b"time_range",u"uuid",b"uuid"]) -> None: ... +global___ListDatasetsRequest = ListDatasetsRequest + +class ListDatasetsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + PARTICIPANT_DATASETS_FIELD_NUMBER: builtins.int + + @property + def participant_datasets(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[fedlearner_webconsole.proto.dataset_pb2.ParticipantDatasetRef]: ... + + def __init__(self, + *, + participant_datasets : typing.Optional[typing.Iterable[fedlearner_webconsole.proto.dataset_pb2.ParticipantDatasetRef]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"participant_datasets",b"participant_datasets"]) -> None: ... +global___ListDatasetsResponse = ListDatasetsResponse diff --git a/web_console_v2/api/fedlearner_webconsole/proto/rpc/v2/review_service_pb2.pyi b/web_console_v2/api/fedlearner_webconsole/proto/rpc/v2/review_service_pb2.pyi new file mode 100644 index 000000000..43120bc48 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/proto/rpc/v2/review_service_pb2.pyi @@ -0,0 +1,45 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import fedlearner_webconsole.proto.review_pb2 +import google.protobuf.descriptor +import google.protobuf.message +import typing +import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... + +class CreateTicketRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + TTYPE_FIELD_NUMBER: builtins.int + CREATOR_USERNAME_FIELD_NUMBER: builtins.int + DETAILS_FIELD_NUMBER: builtins.int + ttype: fedlearner_webconsole.proto.review_pb2.TicketType.V = ... + creator_username: typing.Text = ... + + @property + def details(self) -> fedlearner_webconsole.proto.review_pb2.TicketDetails: ... + + def __init__(self, + *, + ttype : fedlearner_webconsole.proto.review_pb2.TicketType.V = ..., + creator_username : typing.Text = ..., + details : typing.Optional[fedlearner_webconsole.proto.review_pb2.TicketDetails] = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal[u"details",b"details"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal[u"creator_username",b"creator_username",u"details",b"details",u"ttype",b"ttype"]) -> None: ... +global___CreateTicketRequest = CreateTicketRequest + +class GetTicketRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + UUID_FIELD_NUMBER: builtins.int + uuid: typing.Text = ... + + def __init__(self, + *, + uuid : typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"uuid",b"uuid"]) -> None: ... +global___GetTicketRequest = GetTicketRequest diff --git a/web_console_v2/api/fedlearner_webconsole/proto/rpc/v2/system_service_pb2.pyi b/web_console_v2/api/fedlearner_webconsole/proto/rpc/v2/system_service_pb2.pyi new file mode 100644 index 000000000..fd4c5eca9 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/proto/rpc/v2/system_service_pb2.pyi @@ -0,0 +1,82 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import fedlearner_webconsole.proto.common_pb2 +import google.protobuf.descriptor +import google.protobuf.message +import google.protobuf.struct_pb2 +import typing +import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... + +class CheckHealthRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + def __init__(self, + ) -> None: ... +global___CheckHealthRequest = CheckHealthRequest + +class CheckHealthResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + APPLICATION_VERSION_FIELD_NUMBER: builtins.int + HEALTHY_FIELD_NUMBER: builtins.int + MESSAGE_FIELD_NUMBER: builtins.int + healthy: builtins.bool = ... + message: typing.Text = ... + + @property + def application_version(self) -> fedlearner_webconsole.proto.common_pb2.ApplicationVersion: ... + + def __init__(self, + *, + application_version : typing.Optional[fedlearner_webconsole.proto.common_pb2.ApplicationVersion] = ..., + healthy : builtins.bool = ..., + message : typing.Text = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal[u"application_version",b"application_version"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal[u"application_version",b"application_version",u"healthy",b"healthy",u"message",b"message"]) -> None: ... +global___CheckHealthResponse = CheckHealthResponse + +class ListFlagsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + def __init__(self, + ) -> None: ... +global___ListFlagsRequest = ListFlagsRequest + +class ListFlagsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + FLAGS_FIELD_NUMBER: builtins.int + + @property + def flags(self) -> google.protobuf.struct_pb2.Struct: ... + + def __init__(self, + *, + flags : typing.Optional[google.protobuf.struct_pb2.Struct] = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal[u"flags",b"flags"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal[u"flags",b"flags"]) -> None: ... +global___ListFlagsResponse = ListFlagsResponse + +class CheckTeeEnabledRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + def __init__(self, + ) -> None: ... +global___CheckTeeEnabledRequest = CheckTeeEnabledRequest + +class CheckTeeEnabledResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + TEE_ENABLED_FIELD_NUMBER: builtins.int + tee_enabled: builtins.bool = ... + + def __init__(self, + *, + tee_enabled : builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal[u"tee_enabled",b"tee_enabled"]) -> None: ... +global___CheckTeeEnabledResponse = CheckTeeEnabledResponse diff --git a/web_console_v2/api/fedlearner_webconsole/review/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/review/BUILD.bazel new file mode 100644 index 000000000..9c1c6149c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/review/BUILD.bazel @@ -0,0 +1,86 @@ +load("@rules_python//python:defs.bzl", "py_library", "py_test") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "common_lib", + srcs = [ + "common.py", + ], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/tee:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_library( + name = "ticket_helper_lib", + srcs = [ + "ticket_helper.py", + ], + imports = ["../.."], + deps = [ + ":common_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:review_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "ticket_helper_lib_test", + srcs = [ + "ticket_helper_test.py", + ], + imports = ["../.."], + main = "ticket_helper_test.py", + deps = [ + ":ticket_helper_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_library( + name = "runners_lib", + srcs = [ + "runners.py", + ], + imports = ["../.."], + deps = [ + ":common_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:review_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "runners_test", + size = "small", + srcs = [ + "runners_test.py", + ], + imports = ["../.."], + deps = [ + ":runners_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/review/common.py b/web_console_v2/api/fedlearner_webconsole/review/common.py new file mode 100644 index 000000000..924cf7204 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/review/common.py @@ -0,0 +1,33 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from fedlearner_webconsole.project.models import PendingProject +from fedlearner_webconsole.proto import review_pb2 +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.dataset.models import Dataset +from fedlearner_webconsole.mmgr.models import ModelJobGroup +from fedlearner_webconsole.tee.models import TrustedJobGroup, TrustedJob + +REVIEW_ORM_MAPPER = { + review_pb2.TicketType.CREATE_PARTICIPANT: Participant, + review_pb2.TicketType.CREATE_PROJECT: PendingProject, + review_pb2.TicketType.PUBLISH_DATASET: Dataset, + review_pb2.TicketType.CREATE_PROCESSED_DATASET: Dataset, + review_pb2.TicketType.CREATE_MODELJOB_GROUP: ModelJobGroup, + review_pb2.TicketType.TK_CREATE_TRUSTED_JOB_GROUP: TrustedJobGroup, + review_pb2.TicketType.TK_CREATE_TRUSTED_EXPORT_JOB: TrustedJob, +} + +NO_CENTRAL_SERVER_UUID = 'no_central_server_uuid' diff --git a/web_console_v2/api/fedlearner_webconsole/review/runners.py b/web_console_v2/api/fedlearner_webconsole/review/runners.py new file mode 100644 index 000000000..138efad93 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/review/runners.py @@ -0,0 +1,70 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Tuple +from fedlearner_webconsole.db import db + +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.interface import IRunnerV2 +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.review.common import REVIEW_ORM_MAPPER +from fedlearner_webconsole.rpc.v2.review_service_client import ReviewServiceClient +from fedlearner_webconsole.proto import review_pb2 +from fedlearner_webconsole.proto.composer_pb2 import RunnerOutput, TicketHelperOutput +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus + + +class TicketHelperRunner(IRunnerV2): + + def __init__(self, domain_name: str): + self._domain_name = domain_name + + def _update_ticket_status(self, resource) -> bool: + uuid = getattr(resource, 'ticket_uuid') + client = ReviewServiceClient.from_participant(domain_name=self._domain_name) + resp = client.get_ticket(uuid) + status = TicketStatus(review_pb2.ReviewStatus.Name(resp.status)) + if status in [TicketStatus.APPROVED, TicketStatus.DECLINED]: + setattr(resource, 'ticket_status', status) + return True + + return False + + def run(self, context: RunnerContext) -> Tuple[RunnerStatus, RunnerOutput]: + del context + + ticket_output = TicketHelperOutput() + for ttype, orm in REVIEW_ORM_MAPPER.items(): + ttype_name = review_pb2.TicketType.Name(ttype) + updated_ticket = ticket_output.updated_ticket[ttype_name].ids + unupdated_ticket = ticket_output.unupdated_ticket[ttype_name].ids + failed_ticket = ticket_output.failed_ticket[ttype_name].ids + with db.session_scope() as session: + resources = session.query(orm).filter_by(ticket_status=TicketStatus.PENDING).all() + for resource in resources: + try: + if self._update_ticket_status(resource): + updated_ticket.append(resource.id) + else: + unupdated_ticket.append(resource.id) + except Exception: # pylint: disable=broad-except + failed_ticket.append(resource.id) + session.commit() + logging.info(f'ticket routine for {ttype_name}:') + logging.info(f' updated_ticket {updated_ticket}') + logging.info(f' failed_ticket {failed_ticket}') + + return (RunnerStatus.DONE, RunnerOutput(ticket_helper_output=ticket_output)) diff --git a/web_console_v2/api/fedlearner_webconsole/review/runners_test.py b/web_console_v2/api/fedlearner_webconsole/review/runners_test.py new file mode 100644 index 000000000..230228eeb --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/review/runners_test.py @@ -0,0 +1,75 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch, MagicMock, call +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto import review_pb2 +from fedlearner_webconsole.review.runners import TicketHelperRunner +from fedlearner_webconsole.composer.context import RunnerContext, RunnerInput +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus + + +class TicketHelperRunnerTest(NoWebServerTestCase): + + @patch('fedlearner_webconsole.review.runners.ReviewServiceClient') + def test_run(self, mock_review_service_client: MagicMock): + with db.session_scope() as session: + session.add( + Participant(id=111, + name='p1', + domain_name='fl-test1.com', + ticket_status=TicketStatus.PENDING, + ticket_uuid='u12345')) + session.add( + Participant(id=222, + name='p2', + domain_name='fl-test2.com', + ticket_status=TicketStatus.APPROVED, + ticket_uuid='u22345')) + session.add( + Participant(id=333, + name='p3', + domain_name='fl-test3.com', + ticket_status=TicketStatus.DECLINED, + ticket_uuid='u32345')) + session.add( + Participant(id=444, + name='p1', + domain_name='fl-test4.com', + ticket_status=TicketStatus.PENDING, + ticket_uuid='u42345')) + session.commit() + + client = MagicMock() + mock_review_service_client.from_participant.return_value = client + client.get_ticket.side_effect = [ + review_pb2.Ticket(status=review_pb2.ReviewStatus.APPROVED), + review_pb2.Ticket(status=review_pb2.ReviewStatus.PENDING) + ] + + runner = TicketHelperRunner(domain_name='fl-central.com') + _, output = runner.run(RunnerContext(0, RunnerInput())) + + mock_review_service_client.from_participant.assert_called_with(domain_name='fl-central.com') + client.get_ticket.assert_has_calls(calls=[call('u12345'), call('u42345')]) + self.assertEqual(output.ticket_helper_output.updated_ticket['CREATE_PARTICIPANT'].ids, [111]) + self.assertEqual(output.ticket_helper_output.unupdated_ticket['CREATE_PARTICIPANT'].ids, [444]) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/review/ticket_helper.py b/web_console_v2/api/fedlearner_webconsole/review/ticket_helper.py new file mode 100644 index 000000000..e610d39d0 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/review/ticket_helper.py @@ -0,0 +1,122 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from abc import ABC, abstractmethod +import json +from typing import Callable +from sqlalchemy.orm import Session + +from fedlearner_webconsole.flag.models import Flag +from fedlearner_webconsole.review import common +from fedlearner_webconsole.db import db +from fedlearner_webconsole.exceptions import InvalidArgumentException +from fedlearner_webconsole.proto import review_pb2 +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.rpc.v2.review_service_client import ReviewServiceClient +from fedlearner_webconsole.utils.flask_utils import get_current_user + + +class ITicketHelper(ABC): + + @abstractmethod + def create_ticket(self, ticket_type: review_pb2.TicketType, details: review_pb2.TicketDetails) -> review_pb2.Ticket: + raise NotImplementedError() + + @abstractmethod + def get_ticket(self, uuid: str) -> review_pb2.Ticket: + raise NotImplementedError() + + @abstractmethod + def validate_ticket(self, uuid: str, validate_fn: Callable[[review_pb2.Ticket], bool]) -> bool: + raise NotImplementedError() + + +def _get_model_from_ticket_type(ticket_type: review_pb2.TicketType) -> db.Model: + model = common.REVIEW_ORM_MAPPER.get(ticket_type) + if model is None: + raise InvalidArgumentException(details=f'failed to get orm.Model for {review_pb2.TicketType.Name(ticket_type)}') + return model + + +class NoCenterServerTicketHelper(ITicketHelper): + + def __init__(self, session: Session): + self._session = session + + def create_ticket(self, ticket_type: review_pb2.TicketType, details: review_pb2.TicketDetails) -> review_pb2.Ticket: + model = _get_model_from_ticket_type(ticket_type) + # TODO(wangsen.0914): add extension for filter related resources other than uuid. + resource = self._session.query(model).filter_by(uuid=details.uuid).first() + if resource is None: + raise InvalidArgumentException(details=f'failed to get resource with {details.uuid}') + resource.ticket_status = TicketStatus(review_pb2.ReviewStatus.Name(review_pb2.ReviewStatus.APPROVED)) + resource.ticket_uuid = common.NO_CENTRAL_SERVER_UUID + return review_pb2.Ticket(type=ticket_type, + details=details, + uuid=common.NO_CENTRAL_SERVER_UUID, + status=review_pb2.ReviewStatus.APPROVED, + review_strategy=review_pb2.ReviewStrategy.AUTO) + + def get_ticket(self, uuid: str) -> review_pb2.Ticket: + return review_pb2.Ticket(uuid=uuid, + type=review_pb2.TicketType.UNKOWN_TYPE, + status=review_pb2.ReviewStatus.APPROVED, + review_strategy=review_pb2.ReviewStrategy.AUTO) + + def validate_ticket(self, uuid: str, validate_fn: Callable[[review_pb2.Ticket], bool]) -> bool: + del validate_fn # ignore validate_fn, because center server is not configured. + + if uuid != common.NO_CENTRAL_SERVER_UUID: + return False + return True + + +class CenterServerTicketHelper(ITicketHelper): + + def __init__(self, session: Session, domain_name: str): + self._session = session + self._domain_name = domain_name + + def create_ticket(self, ticket_type: review_pb2.TicketType, details: review_pb2.TicketDetails) -> review_pb2.Ticket: + model = _get_model_from_ticket_type(ticket_type) + # TODO(wangsen.0914): add extension for filter related resources other than uuid. + resource = self._session.query(model).filter_by(uuid=details.uuid).first() + if resource is None: + raise InvalidArgumentException(details=f'failed to get resource with {details.uuid}') + client = ReviewServiceClient.from_participant(domain_name=self._domain_name) + current_user = get_current_user() + creator_username = current_user.username if current_user else None + ticket = client.create_ticket(ticket_type, creator_username, details) + resource.ticket_status = TicketStatus(review_pb2.ReviewStatus.Name(ticket.status)) + resource.ticket_uuid = ticket.uuid + return ticket + + def get_ticket(self, uuid: str) -> review_pb2.Ticket: + client = ReviewServiceClient.from_participant(domain_name=self._domain_name) + return client.get_ticket(uuid) + + def validate_ticket(self, uuid: str, validate_fn: Callable[[review_pb2.Ticket], bool]) -> bool: + ticket = self.get_ticket(uuid) + if ticket.uuid != uuid: + return False + return validate_fn(ticket) + + +def get_ticket_helper(session: Session) -> ITicketHelper: + configuration = json.loads(Flag.REVIEW_CENTER_CONFIGURATION.value) + if not configuration: + return NoCenterServerTicketHelper(session) + domain_name = configuration['domain_name'] + return CenterServerTicketHelper(session, domain_name) diff --git a/web_console_v2/api/fedlearner_webconsole/review/ticket_helper_test.py b/web_console_v2/api/fedlearner_webconsole/review/ticket_helper_test.py new file mode 100644 index 000000000..a9d4dd6e4 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/review/ticket_helper_test.py @@ -0,0 +1,170 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +import unittest +from unittest.mock import call, patch, MagicMock +from fedlearner_webconsole.exceptions import InvalidArgumentException +from fedlearner_webconsole.review.common import NO_CENTRAL_SERVER_UUID +from fedlearner_webconsole.review.ticket_helper import (NoCenterServerTicketHelper, CenterServerTicketHelper, + get_ticket_helper) +from fedlearner_webconsole.proto import review_pb2 +from fedlearner_webconsole.db import db +from fedlearner_webconsole.auth.models import User +from fedlearner_webconsole.utils.base_model.review_ticket_model import ReviewTicketModel, TicketStatus +from testing.common import NoWebServerTestCase + + +class FakeModel(db.Model, ReviewTicketModel): + __tablename__ = 'fake_model' + id = db.Column(db.Integer, primary_key=True, autoincrement=True, comment='id') + uuid = db.Column(db.String(255), nullable=True, comment='uuid') + + +class NoCenterServerTicketHelperTest(NoWebServerTestCase): + + def test_get_ticket(self): + with db.session_scope() as session: + ticket = NoCenterServerTicketHelper(session).get_ticket(uuid='u12345') + self.assertEqual( + ticket, + review_pb2.Ticket(uuid='u12345', + type=review_pb2.TicketType.UNKOWN_TYPE, + status=review_pb2.ReviewStatus.APPROVED, + review_strategy=review_pb2.ReviewStrategy.AUTO)) + + @patch('fedlearner_webconsole.review.common.REVIEW_ORM_MAPPER', + {review_pb2.TicketType.CREATE_PARTICIPANT: FakeModel}) + def test_create_ticket(self): + with db.session_scope() as session: + self.assertRaises(InvalidArgumentException, + NoCenterServerTicketHelper(session).create_ticket, + ticket_type=review_pb2.TicketType.CREATE_NODE, + details=review_pb2.TicketDetails(uuid='u1234')) + + with db.session_scope() as session: + fake_data = FakeModel(uuid='u1234') + session.add(fake_data) + session.flush() + ticket = NoCenterServerTicketHelper(session).create_ticket( + ticket_type=review_pb2.TicketType.CREATE_PARTICIPANT, details=review_pb2.TicketDetails(uuid='u1234')) + session.flush() + self.assertEqual(fake_data.ticket_status, TicketStatus.APPROVED) + session.commit() + with db.session_scope() as session: + resource = session.query(FakeModel).filter_by(uuid='u1234').first() + self.assertEqual(resource.ticket_status, TicketStatus.APPROVED) + self.assertEqual(resource.ticket_uuid, NO_CENTRAL_SERVER_UUID) + self.assertEqual( + ticket, + review_pb2.Ticket(uuid=NO_CENTRAL_SERVER_UUID, + type=review_pb2.TicketType.CREATE_PARTICIPANT, + details=review_pb2.TicketDetails(uuid='u1234'), + status=review_pb2.ReviewStatus.APPROVED, + review_strategy=review_pb2.ReviewStrategy.AUTO)) + + def test_validate_ticket(self): + with db.session_scope() as session: + # ignore validate_fn, because center server is not configured. + validate_fn = lambda _: False + self.assertFalse(NoCenterServerTicketHelper(session).validate_ticket('u1234', validate_fn)) + self.assertTrue(NoCenterServerTicketHelper(session).validate_ticket(NO_CENTRAL_SERVER_UUID, validate_fn)) + + +class CenterServerTicketHelperTest(NoWebServerTestCase): + + @patch('fedlearner_webconsole.review.ticket_helper.ReviewServiceClient') + def test_validate_ticket(self, mock_review_service_client: MagicMock): + client = MagicMock() + mock_review_service_client.from_participant.return_value = client + client.get_ticket.side_effect = [ + review_pb2.Ticket(uuid='u1234', + status=review_pb2.ReviewStatus.APPROVED, + details=review_pb2.TicketDetails(uuid='u1234')), + review_pb2.Ticket(uuid='u234', + status=review_pb2.ReviewStatus.APPROVED, + details=review_pb2.TicketDetails(uuid='u2345')), + review_pb2.Ticket(uuid='u1234', + status=review_pb2.ReviewStatus.APPROVED, + details=review_pb2.TicketDetails(uuid='u2345')), + ] + + with db.session_scope() as session: + validate_fn = lambda t: t.details.uuid == 'u2345' + self.assertFalse(CenterServerTicketHelper(session, 'fl-central.com').validate_ticket('u1234', validate_fn)) + self.assertFalse(CenterServerTicketHelper(session, 'fl-central.com').validate_ticket('u1234', validate_fn)) + self.assertTrue(CenterServerTicketHelper(session, 'fl-central.com').validate_ticket('u1234', validate_fn)) + + mock_review_service_client.from_participant.assert_called_with(domain_name='fl-central.com') + client.get_ticket.assert_has_calls(calls=[call('u1234'), call('u1234'), call('u1234')]) + + @patch('fedlearner_webconsole.review.common.REVIEW_ORM_MAPPER', + {review_pb2.TicketType.CREATE_PARTICIPANT: FakeModel}) + @patch('fedlearner_webconsole.review.ticket_helper.get_current_user', lambda: User(username='creator')) + @patch('fedlearner_webconsole.review.ticket_helper.ReviewServiceClient') + def test_create_ticket(self, mock_review_service_client: MagicMock): + with db.session_scope() as session: + self.assertRaises(InvalidArgumentException, + CenterServerTicketHelper(session, 'fl-central').create_ticket, + ticket_type=review_pb2.TicketType.CREATE_NODE, + details=review_pb2.TicketDetails(uuid='u1234')) + + client = MagicMock() + mock_review_service_client.from_participant.return_value = client + client.create_ticket.return_value = review_pb2.Ticket(uuid='u4321', + status=review_pb2.ReviewStatus.PENDING, + details=review_pb2.TicketDetails(uuid='u1234')) + + with db.session_scope() as session: + fake_data = FakeModel(uuid='u1234') + session.add(fake_data) + session.flush() + CenterServerTicketHelper(session, 'fl-central.com').create_ticket( + ticket_type=review_pb2.TicketType.CREATE_PARTICIPANT, details=review_pb2.TicketDetails(uuid='u1234')) + session.flush() + self.assertEqual(fake_data.ticket_status, TicketStatus.PENDING) + session.commit() + + with db.session_scope() as session: + resource = session.query(FakeModel).filter_by(uuid='u1234').first() + self.assertEqual(resource.ticket_status, TicketStatus.PENDING) + self.assertEqual(resource.ticket_uuid, 'u4321') + + mock_review_service_client.from_participant.assert_called_with(domain_name='fl-central.com') + self.assertEqual([call[0][1] for call in client.create_ticket.call_args_list][0], 'creator') + + +class GetTicketHelperTest(unittest.TestCase): + + @patch('fedlearner_webconsole.flag.models.Flag.REVIEW_CENTER_CONFIGURATION.value', '{}') + def test_no_center_server(self): + with db.session_scope() as session: + self.assertIsInstance(get_ticket_helper(session), NoCenterServerTicketHelper) + + @patch('fedlearner_webconsole.flag.models.Flag.REVIEW_CENTER_CONFIGURATION.value', + '{"domain_name": "fl-central.com"}') + def test_with_center_server(self): + with db.session_scope() as session: + self.assertIsInstance(get_ticket_helper(session), CenterServerTicketHelper) + + @patch('fedlearner_webconsole.flag.models.Flag.REVIEW_CENTER_CONFIGURATION.value', '{"dom_name": "fl-central.com"}') + def test_with_invalid_center_server(self): + with db.session_scope() as session: + self.assertRaises(KeyError, get_ticket_helper, session) + + +if __name__ == '__main__': + logging.basicConfig(level=logging.DEBUG) + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/rpc/BUILD.bazel new file mode 100644 index 000000000..fac35558a --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/BUILD.bazel @@ -0,0 +1,152 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "auth_lib", + srcs = ["auth.py"], + imports = ["../.."], +) + +py_test( + name = "auth_lib_test", + srcs = [ + "auth_test.py", + ], + imports = ["../../.."], + main = "auth_test.py", + deps = [ + ":auth_lib", + ], +) + +py_library( + name = "client_interceptor_lib", + srcs = ["client_interceptor.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/middleware:request_id_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + ], +) + +py_library( + name = "client_lib", + srcs = ["client.py"], + imports = ["../.."], + deps = [ + ":client_interceptor_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:client_base_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_test( + name = "client_lib_test", + # TODO(liuhehan): change it back to small when dataset model lightweight enough. + size = "medium", + srcs = [ + "client_test.py", + ], + imports = ["../../.."], + main = "client_test.py", + deps = [ + ":client_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/middleware:request_id_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:fake_lib", + "//web_console_v2/api/testing/rpc:client_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "server_lib", + srcs = ["server.py"], + imports = ["../.."], + deps = [ + ":auth_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/audit:decorators_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:services_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset/job_configer", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/job:metrics_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/job:service_lib", + "//web_console_v2/api/fedlearner_webconsole/middleware:request_id_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:service_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/review:ticket_helper_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:auth_server_interceptor_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:job_service_server_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:project_service_server_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:resource_service_server_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:system_service_server_lib", + "//web_console_v2/api/fedlearner_webconsole/serving:services_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/two_pc:handlers_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:domain_name_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:es_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:kibana_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:proto_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:resource_manager_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:service_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:workflow_controller_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + "@common_grpcio_reflection//:pkg", + ], +) + +py_test( + name = "server_lib_test", + size = "small", + srcs = [ + "server_test.py", + ], + imports = ["../../.."], + main = "server_test.py", + deps = [ + ":auth_lib", + ":server_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/middleware:request_id_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + "//web_console_v2/api/testing:fake_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/auth.py b/web_console_v2/api/fedlearner_webconsole/rpc/auth.py new file mode 100644 index 000000000..93412c039 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/auth.py @@ -0,0 +1,41 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Optional + +X_HOST_HEADER = 'x-host' +SSL_CLIENT_SUBJECT_DN_HEADER = 'ssl-client-subject-dn' +PROJECT_NAME_HEADER = 'project-name' + + +def get_common_name(subject_dn: str) -> Optional[str]: + """Gets common name from x.509 + + Args: + subject_dn (str): ssl-client-subject-dn from header + + Returns: + Optional[str]: common name if exists + """ + + # ssl-client-subject-dn example: + # CN=*.fl-xxx.com,OU=security,O=security,L=beijing,ST=beijing,C=CN + for s in subject_dn.split(','): + if s.find('=') == -1: + return None + k, v = s.split('=', maxsplit=1) + if k == 'CN': + return v + return None diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/auth_test.py b/web_console_v2/api/fedlearner_webconsole/rpc/auth_test.py new file mode 100644 index 000000000..59c3b6f98 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/auth_test.py @@ -0,0 +1,34 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +from fedlearner_webconsole.rpc.auth import get_common_name + + +class AuthTest(unittest.TestCase): + + def test_get_common_name(self): + self.assertIsNone(get_common_name('invalid')) + self.assertIsNone(get_common_name('CN*.fl-xxx.com,C=CN')) + self.assertEqual(get_common_name('CN=*.fl-xxx.com,OU=security,O=security,L=beijing,ST=beijing,C=CN'), + '*.fl-xxx.com') + self.assertEqual(get_common_name('CN=aaa.fedlearner.net,OU=security,O=security,L=beijing,ST=beijing,C=CN'), + 'aaa.fedlearner.net') + self.assertEqual(get_common_name('CN==*.fl-xxx.com,C=CN'), '=*.fl-xxx.com') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/client.py b/web_console_v2/api/fedlearner_webconsole/rpc/client.py index 726568ad6..829778325 100644 --- a/web_console_v2/api/fedlearner_webconsole/rpc/client.py +++ b/web_console_v2/api/fedlearner_webconsole/rpc/client.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,159 +17,212 @@ import logging from functools import wraps +from typing import Optional import grpc +from google.protobuf import empty_pb2 from envs import Envs -from fedlearner_webconsole.exceptions import ( - UnauthorizedException, InvalidArgumentException -) -from fedlearner_webconsole.proto import ( - service_pb2, service_pb2_grpc, common_pb2 -) -from fedlearner_webconsole.utils.decorators import retry_fn - - -def _build_channel(url, authority): - """A helper function to build gRPC channel for easy testing.""" - return grpc.insecure_channel( - target=url, +from fedlearner_webconsole.utils.decorators.lru_cache import lru_cache +from fedlearner_webconsole.utils.decorators.retry import retry_fn +from fedlearner_webconsole.exceptions import (UnauthorizedException, InvalidArgumentException) +from fedlearner_webconsole.proto import (dataset_pb2, service_pb2, service_pb2_grpc, common_pb2) +from fedlearner_webconsole.proto.service_pb2_grpc import WebConsoleV2ServiceStub +from fedlearner_webconsole.proto.serving_pb2 import ServingServiceType +from fedlearner_webconsole.proto.two_pc_pb2 import TwoPcType, TwoPcAction, TransactionData +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition +from fedlearner_webconsole.rpc.client_interceptor import ClientInterceptor +from fedlearner_webconsole.rpc.v2.client_base import get_nginx_controller_url + + +@lru_cache(timeout=60, maxsize=100) +def _build_grpc_stub(egress_url: str, authority: str) -> WebConsoleV2ServiceStub: + """A helper function to build gRPC stub with cache. + + Notice that as we cache the stub, if nginx controller gets restarted, the channel may break. + This practice is following official best practice: https://grpc.io/docs/guides/performance/ + + Args: + egress_url: nginx controller url in current cluster. + authority: ingress domain in current cluster. + + Returns: + A grpc service stub to call API. + """ + channel = grpc.insecure_channel( + target=egress_url, # options defined at # https://github.com/grpc/grpc/blob/master/include/grpc/impl/codegen/grpc_types.h options=[('grpc.default_authority', authority)]) + channel = grpc.intercept_channel(channel, ClientInterceptor()) + return service_pb2_grpc.WebConsoleV2ServiceStub(channel) +# TODO(linfan.fine): refactor catch_and_fallback def catch_and_fallback(resp_class): + def decorator(f): + @wraps(f) def wrapper(*args, **kwargs): try: return f(*args, **kwargs) except grpc.RpcError as e: - return resp_class(status=common_pb2.Status( - code=common_pb2.STATUS_UNKNOWN_ERROR, msg=repr(e))) + return resp_class(status=common_pb2.Status(code=common_pb2.STATUS_UNKNOWN_ERROR, msg=repr(e))) return wrapper return decorator +def _need_retry_for_get(err: Exception) -> bool: + if not isinstance(err, grpc.RpcError): + return False + # No need to retry for NOT_FOUND + return err.code() != grpc.StatusCode.NOT_FOUND + + +def _default_need_retry(err: Exception) -> bool: + return isinstance(err, grpc.RpcError) + + class RpcClient(object): - def __init__(self, project_config, receiver_config): - self._project = project_config - self._receiver = receiver_config - self._auth_info = service_pb2.ProjAuthInfo( - project_name=self._project.name, - target_domain=self._receiver.domain_name, - auth_token=self._project.token) - - egress_url = 'fedlearner-stack-ingress-nginx-controller.default.svc:80' - for variable in self._project.variables: - if variable.name == 'EGRESS_URL': - egress_url = variable.value - break - self._client = service_pb2_grpc.WebConsoleV2ServiceStub( - _build_channel(egress_url, self._receiver.grpc_spec.authority)) + + def __init__(self, + egress_url: str, + authority: str, + x_host: str, + project_auth_info: Optional[service_pb2.ProjAuthInfo] = None): + """Inits rpc client. + + Args: + egress_url: nginx controller url in current cluster. + authority: ingress domain in current cluster. + x_host: ingress domain in target cluster, nginx will handle the + rewriting. + project_auth_info: info for project level authentication. + """ + self._x_host = x_host + self._project_auth_info = project_auth_info + + self._client = _build_grpc_stub(egress_url, authority) + + @classmethod + def from_project_and_participant(cls, project_name: str, project_token: str, domain_name: str): + # Builds auth info from project and receiver + auth_info = service_pb2.ProjAuthInfo(project_name=project_name, + target_domain=domain_name, + auth_token=project_token) + return cls(egress_url=get_nginx_controller_url(), + authority=gen_egress_authority(domain_name), + x_host=gen_x_host(domain_name), + project_auth_info=auth_info) + + @classmethod + def from_participant(cls, domain_name: str): + return cls(egress_url=get_nginx_controller_url(), + authority=gen_egress_authority(domain_name), + x_host=gen_x_host(domain_name)) def _get_metadata(self): - metadata = [] - x_host_prefix = 'fedlearner-webconsole-v2' - for variable in self._project.variables: - if variable.name == 'X_HOST': - x_host_prefix = variable.value - break - metadata.append(('x-host', '{}.{}'.format(x_host_prefix, - self._receiver.domain_name))) - for key, value in self._receiver.grpc_spec.extra_headers.items(): - metadata.append((key, value)) # metadata is a tuple of tuples - return tuple(metadata) + return tuple([('x-host', self._x_host)]) @catch_and_fallback(resp_class=service_pb2.CheckConnectionResponse) - @retry_fn(retry_times=3, needed_exceptions=[grpc.RpcError]) + @retry_fn(retry_times=3, need_retry=_default_need_retry) def check_connection(self): - msg = service_pb2.CheckConnectionRequest(auth_info=self._auth_info) - response = self._client.CheckConnection( - request=msg, - metadata=self._get_metadata(), - timeout=Envs.GRPC_CLIENT_TIMEOUT) + msg = service_pb2.CheckConnectionRequest(auth_info=self._project_auth_info) + response = self._client.CheckConnection(request=msg, + metadata=self._get_metadata(), + timeout=Envs.GRPC_CLIENT_TIMEOUT) if response.status.code != common_pb2.STATUS_SUCCESS: - logging.debug('check_connection request error: %s', - response.status.msg) + logging.debug('check_connection request error: %s', response.status.msg) + return response + + @catch_and_fallback(resp_class=service_pb2.CheckPeerConnectionResponse) + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def check_peer_connection(self): + # TODO(taoyanting): double check + msg = service_pb2.CheckPeerConnectionRequest() + response = self._client.CheckPeerConnection(request=msg, + metadata=self._get_metadata(), + timeout=Envs.GRPC_CLIENT_TIMEOUT) + if response.status.code != common_pb2.STATUS_SUCCESS: + logging.debug('check_connection request error: %s', response.status.msg) return response @catch_and_fallback(resp_class=service_pb2.UpdateWorkflowStateResponse) - @retry_fn(retry_times=3, needed_exceptions=[grpc.RpcError]) - def update_workflow_state(self, name, state, target_state, - transaction_state, uuid, forked_from_uuid, - extra=''): - msg = service_pb2.UpdateWorkflowStateRequest( - auth_info=self._auth_info, - workflow_name=name, - state=state.value, - target_state=target_state.value, - transaction_state=transaction_state.value, - uuid=uuid, - forked_from_uuid=forked_from_uuid, - extra=extra - ) - response = self._client.UpdateWorkflowState( - request=msg, metadata=self._get_metadata(), - timeout=Envs.GRPC_CLIENT_TIMEOUT) + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def update_workflow_state(self, name, state, target_state, transaction_state, uuid, forked_from_uuid, extra=''): + msg = service_pb2.UpdateWorkflowStateRequest(auth_info=self._project_auth_info, + workflow_name=name, + state=state.value, + target_state=target_state.value, + transaction_state=transaction_state.value, + uuid=uuid, + forked_from_uuid=forked_from_uuid, + extra=extra) + response = self._client.UpdateWorkflowState(request=msg, + metadata=self._get_metadata(), + timeout=Envs.GRPC_CLIENT_TIMEOUT) if response.status.code != common_pb2.STATUS_SUCCESS: - logging.debug('update_workflow_state request error: %s', - response.status.msg) + logging.debug('update_workflow_state request error: %s', response.status.msg) return response @catch_and_fallback(resp_class=service_pb2.GetWorkflowResponse) - @retry_fn(retry_times=3, needed_exceptions=[grpc.RpcError]) - def get_workflow(self, name): - msg = service_pb2.GetWorkflowRequest(auth_info=self._auth_info, - workflow_name=name) + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def get_workflow(self, uuid, name): + msg = service_pb2.GetWorkflowRequest(auth_info=self._project_auth_info, workflow_name=name, workflow_uuid=uuid) response = self._client.GetWorkflow(request=msg, metadata=self._get_metadata(), timeout=Envs.GRPC_CLIENT_TIMEOUT) if response.status.code != common_pb2.STATUS_SUCCESS: - logging.debug('get_workflow request error: %s', - response.status.msg) + logging.debug('get_workflow request error: %s', response.status.msg) return response @catch_and_fallback(resp_class=service_pb2.UpdateWorkflowResponse) - @retry_fn(retry_times=3, needed_exceptions=[grpc.RpcError]) - def update_workflow(self, name, config): - msg = service_pb2.UpdateWorkflowRequest(auth_info=self._auth_info, + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def update_workflow(self, uuid, name, config): + msg = service_pb2.UpdateWorkflowRequest(auth_info=self._project_auth_info, workflow_name=name, + workflow_uuid=uuid, config=config) response = self._client.UpdateWorkflow(request=msg, metadata=self._get_metadata(), timeout=Envs.GRPC_CLIENT_TIMEOUT) if response.status.code != common_pb2.STATUS_SUCCESS: - logging.debug('update_workflow request error: %s', - response.status.msg) + logging.debug('update_workflow request error: %s', response.status.msg) + return response + + @catch_and_fallback(resp_class=service_pb2.InvalidateWorkflowResponse) + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def invalidate_workflow(self, uuid: str): + msg = service_pb2.InvalidateWorkflowRequest(auth_info=self._project_auth_info, workflow_uuid=uuid) + response = self._client.InvalidateWorkflow(request=msg, + metadata=self._get_metadata(), + timeout=Envs.GRPC_CLIENT_TIMEOUT) + if response.status.code != common_pb2.STATUS_SUCCESS: + logging.debug('invalidate_workflow request error: %s', response.status.msg) return response @catch_and_fallback(resp_class=service_pb2.GetJobMetricsResponse) - @retry_fn(retry_times=3, needed_exceptions=[grpc.RpcError]) + @retry_fn(retry_times=3, need_retry=_default_need_retry) def get_job_metrics(self, job_name): - msg = service_pb2.GetJobMetricsRequest(auth_info=self._auth_info, - job_name=job_name) + msg = service_pb2.GetJobMetricsRequest(auth_info=self._project_auth_info, job_name=job_name) response = self._client.GetJobMetrics(request=msg, metadata=self._get_metadata(), timeout=Envs.GRPC_CLIENT_TIMEOUT) if response.status.code != common_pb2.STATUS_SUCCESS: - logging.debug('get_job_metrics request error: %s', - response.status.msg) + logging.debug('get_job_metrics request error: %s', response.status.msg) return response @catch_and_fallback(resp_class=service_pb2.GetJobMetricsResponse) - @retry_fn(retry_times=3, needed_exceptions=[grpc.RpcError]) + @retry_fn(retry_times=3, need_retry=_default_need_retry) def get_job_kibana(self, job_name, json_args): - msg = service_pb2.GetJobKibanaRequest(auth_info=self._auth_info, - job_name=job_name, - json_args=json_args) + msg = service_pb2.GetJobKibanaRequest(auth_info=self._project_auth_info, job_name=job_name, json_args=json_args) response = self._client.GetJobKibana(request=msg, metadata=self._get_metadata(), timeout=Envs.GRPC_CLIENT_TIMEOUT) @@ -179,14 +232,13 @@ def get_job_kibana(self, job_name, json_args): raise UnauthorizedException(status.msg) if status.code == common_pb2.STATUS_INVALID_ARGUMENT: raise InvalidArgumentException(status.msg) - logging.debug('get_job_kibana request error: %s', - response.status.msg) + logging.debug('get_job_kibana request error: %s', response.status.msg) return response @catch_and_fallback(resp_class=service_pb2.GetJobEventsResponse) - @retry_fn(retry_times=3, needed_exceptions=[grpc.RpcError]) + @retry_fn(retry_times=3, need_retry=_default_need_retry) def get_job_events(self, job_name, start_time, max_lines): - msg = service_pb2.GetJobEventsRequest(auth_info=self._auth_info, + msg = service_pb2.GetJobEventsRequest(auth_info=self._project_auth_info, job_name=job_name, start_time=start_time, max_lines=max_lines) @@ -195,21 +247,135 @@ def get_job_events(self, job_name, start_time, max_lines): timeout=Envs.GRPC_CLIENT_TIMEOUT) if response.status.code != common_pb2.STATUS_SUCCESS: - logging.debug('get_job_events request error: %s', - response.status.msg) + logging.debug('get_job_events request error: %s', response.status.msg) return response @catch_and_fallback(resp_class=service_pb2.CheckJobReadyResponse) - @retry_fn(retry_times=3, needed_exceptions=[grpc.RpcError]) + @retry_fn(retry_times=3, need_retry=_default_need_retry) def check_job_ready(self, job_name: str) \ -> service_pb2.CheckJobReadyResponse: - msg = service_pb2.CheckJobReadyRequest(auth_info=self._auth_info, - job_name=job_name) + msg = service_pb2.CheckJobReadyRequest(auth_info=self._project_auth_info, job_name=job_name) response = self._client.CheckJobReady(request=msg, timeout=Envs.GRPC_CLIENT_TIMEOUT, metadata=self._get_metadata()) if response.status.code != common_pb2.STATUS_SUCCESS: - logging.debug('check_job_ready request error: %s', - response.status.msg) + logging.debug('check_job_ready request error: %s', response.status.msg) + return response + + @catch_and_fallback(resp_class=service_pb2.TwoPcResponse) + @retry_fn(retry_times=3, need_retry=_default_need_retry, delay=200, backoff=2) + def run_two_pc(self, transaction_uuid: str, two_pc_type: TwoPcType, action: TwoPcAction, + data: TransactionData) -> service_pb2.TwoPcResponse: + msg = service_pb2.TwoPcRequest(auth_info=self._project_auth_info, + transaction_uuid=transaction_uuid, + type=two_pc_type, + action=action, + data=data) + response = self._client.Run2Pc(request=msg, metadata=self._get_metadata(), timeout=Envs.GRPC_CLIENT_TIMEOUT) + return response + + @catch_and_fallback(resp_class=service_pb2.ServingServiceResponse) + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def operate_serving_service(self, operation_type: ServingServiceType, serving_model_uuid: str, model_uuid: str, + name: str): + msg = service_pb2.ServingServiceRequest(auth_info=self._project_auth_info, + operation_type=operation_type, + serving_model_uuid=serving_model_uuid, + model_uuid=model_uuid, + serving_model_name=name) + response = self._client.ServingServiceManagement(request=msg, + metadata=self._get_metadata(), + timeout=Envs.GRPC_CLIENT_TIMEOUT) + + if response.status.code != common_pb2.STATUS_SUCCESS: + logging.debug('serving_service request error: %s', response.status.msg) + return response + + @catch_and_fallback(resp_class=service_pb2.ServingServiceInferenceResponse) + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def inference_serving_service(self, serving_model_uuid: str, example_id: str): + msg = service_pb2.ServingServiceInferenceRequest(auth_info=self._project_auth_info, + serving_model_uuid=serving_model_uuid, + example_id=example_id) + response = self._client.ServingServiceInference(request=msg, + metadata=self._get_metadata(), + timeout=Envs.GRPC_CLIENT_TIMEOUT) + + if response.status.code != common_pb2.STATUS_SUCCESS: + logging.debug('serving_service request error: %s', response.status.msg) return response + + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def get_model_job(self, model_job_uuid: str, need_metrics: bool = False) -> service_pb2.GetModelJobResponse: + request = service_pb2.GetModelJobRequest(auth_info=self._project_auth_info, + uuid=model_job_uuid, + need_metrics=need_metrics) + return self._client.GetModelJob(request, metadata=self._get_metadata(), timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def get_model_job_group(self, model_job_group_uuid: str) -> service_pb2.GetModelJobGroupResponse: + request = service_pb2.GetModelJobGroupRequest(auth_info=self._project_auth_info, uuid=model_job_group_uuid) + return self._client.GetModelJobGroup(request, metadata=self._get_metadata(), timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def update_model_job_group(self, model_job_group_uuid: str, + config: WorkflowDefinition) -> service_pb2.UpdateModelJobGroupResponse: + request = service_pb2.UpdateModelJobGroupRequest(auth_info=self._project_auth_info, + uuid=model_job_group_uuid, + config=config) + return self._client.UpdateModelJobGroup(request, + metadata=self._get_metadata(), + timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def list_participant_datasets(self, + kind: Optional[str] = None, + uuid: Optional[str] = None) -> service_pb2.ListParticipantDatasetsResponse: + request = service_pb2.ListParticipantDatasetsRequest(auth_info=self._project_auth_info) + if kind is not None: + request.kind = kind + if uuid is not None: + request.uuid = uuid + return self._client.ListParticipantDatasets(request, + metadata=self._get_metadata(), + timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_need_retry_for_get) + def get_dataset_job(self, uuid: str) -> service_pb2.GetDatasetJobResponse: + request = service_pb2.GetDatasetJobRequest(auth_info=self._project_auth_info, uuid=uuid) + return self._client.GetDatasetJob(request, metadata=self._get_metadata(), timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def create_dataset_job(self, dataset_job: dataset_pb2.DatasetJob, ticket_uuid: str, + dataset: dataset_pb2.Dataset) -> empty_pb2.Empty: + request = service_pb2.CreateDatasetJobRequest(auth_info=self._project_auth_info, + dataset_job=dataset_job, + ticket_uuid=ticket_uuid, + dataset=dataset) + return self._client.CreateDatasetJob(request, metadata=self._get_metadata(), timeout=Envs.GRPC_CLIENT_TIMEOUT) + + +def gen_egress_authority(domain_name: str) -> str: + """generate egress host + Args: + domain_name: + ex: 'test-1.com' + Returns: + authority: + ex:'test-1-client-auth.com' + """ + domain_name_prefix = domain_name.rpartition('.')[0] + return f'{domain_name_prefix}-client-auth.com' + + +def gen_x_host(domain_name: str) -> str: + """generate x host + Args: + domain_name: + ex: 'test-1.com' + Returns: + x-host: + ex:'fedlearner-webconsole-v2.test-1.com' + """ + return f'fedlearner-webconsole-v2.{domain_name}' diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/client_interceptor.py b/web_console_v2/api/fedlearner_webconsole/rpc/client_interceptor.py new file mode 100644 index 000000000..9b9719c77 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/client_interceptor.py @@ -0,0 +1,61 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +from typing import NamedTuple, Optional, Sequence, Tuple, Union + +import grpc + +from fedlearner_webconsole.middleware.request_id import GrpcRequestIdMiddleware + + +# pylint: disable=line-too-long +# Ref: https://github.com/d5h-foss/grpc-interceptor/blob/master/src/grpc_interceptor/client.py#L9 +class _ClientCallDetailsFields(NamedTuple): + method: str + timeout: Optional[float] + metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] + credentials: Optional[grpc.CallCredentials] + wait_for_ready: Optional[bool] + compression: Optional[grpc.Compression] + + +class ClientCallDetails(_ClientCallDetailsFields, grpc.ClientCallDetails): + pass + + +class ClientInterceptor(grpc.UnaryUnaryClientInterceptor, grpc.UnaryStreamClientInterceptor, + grpc.StreamUnaryClientInterceptor, grpc.StreamStreamClientInterceptor): + + def _intercept_call(self, continuation, client_call_details, request_or_iterator): + metadata = [] + if client_call_details.metadata is not None: + metadata = list(client_call_details.metadata) + # Metadata of ClientCallDetails can not be set directly + new_details = ClientCallDetails(client_call_details.method, client_call_details.timeout, + GrpcRequestIdMiddleware.add_header(metadata), client_call_details.credentials, + client_call_details.wait_for_ready, client_call_details.compression) + return continuation(new_details, request_or_iterator) + + def intercept_unary_unary(self, continuation, client_call_details, request): + return self._intercept_call(continuation, client_call_details, request) + + def intercept_unary_stream(self, continuation, client_call_details, request): + return self._intercept_call(continuation, client_call_details, request) + + def intercept_stream_unary(self, continuation, client_call_details, request_iterator): + return self._intercept_call(continuation, client_call_details, request_iterator) + + def intercept_stream_stream(self, continuation, client_call_details, request_iterator): + return self._intercept_call(continuation, client_call_details, request_iterator) diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/client_test.py b/web_console_v2/api/fedlearner_webconsole/rpc/client_test.py new file mode 100644 index 000000000..e772d49bb --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/client_test.py @@ -0,0 +1,281 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from unittest.mock import patch + +import grpc_testing +from grpc import StatusCode +from datetime import datetime +from google.protobuf.wrappers_pb2 import BoolValue +from google.protobuf import empty_pb2 + +from fedlearner_webconsole.middleware.request_id import GrpcRequestIdMiddleware +from fedlearner_webconsole.proto.two_pc_pb2 import TwoPcType, TwoPcAction, TransactionData, CreateModelJobData +from fedlearner_webconsole.participant.models import Participant + +from fedlearner_webconsole.proto.service_pb2 import DESCRIPTOR, CheckPeerConnectionRequest, \ + CheckPeerConnectionResponse, CreateDatasetJobRequest, TwoPcRequest, TwoPcResponse +from fedlearner_webconsole.rpc.client import RpcClient, _build_grpc_stub +from fedlearner_webconsole.project.models import Project as ProjectModel +from fedlearner_webconsole.job.models import Job +from fedlearner_webconsole.proto.common_pb2 import (Status, StatusCode as FedLearnerStatusCode) +from fedlearner_webconsole.proto.dataset_pb2 import ParticipantDatasetRef +from fedlearner_webconsole.proto.service_pb2 import (CheckConnectionRequest, ProjAuthInfo) +from fedlearner_webconsole.proto.service_pb2 import CheckConnectionResponse, \ + CheckJobReadyResponse, CheckJobReadyRequest, ListParticipantDatasetsRequest, ListParticipantDatasetsResponse, \ + GetModelJobRequest, GetModelJobResponse +from fedlearner_webconsole.proto import dataset_pb2, project_pb2 +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.dataset.models import DatasetFormat, DatasetKindV2 +from testing.rpc.client import RpcClientTestCase +from testing.fake_time_patcher import FakeTimePatcher + +TARGET_SERVICE = DESCRIPTOR.services_by_name['WebConsoleV2Service'] + + +class RpcClientTest(RpcClientTestCase): + _TEST_PROJECT_NAME = 'test-project' + _TEST_RECEIVER_NAME = 'test-receiver' + _TEST_URL = 'localhost:123' + _X_HOST_HEADER_KEY = 'x-host' + _TEST_X_HOST = 'fedlearner-webconsole-v2.fl-test.com' + _TEST_SELF_DOMAIN_NAME = 'fl-test-self.com' + + @classmethod + def setUpClass(cls): + super().setUpClass() + participant = Participant(name=cls._TEST_RECEIVER_NAME, domain_name='fl-test.com') + job = Job(name='test-job') + + cls._participant = participant + cls._project = ProjectModel(name=cls._TEST_PROJECT_NAME) + cls._job = job + + # Builds a testing channel + cls._fake_channel = grpc_testing.channel(DESCRIPTOR.services_by_name.values(), grpc_testing.strict_real_time()) + cls._fake_channel_patcher = patch('fedlearner_webconsole.rpc.client.grpc.insecure_channel') + cls._mock_build_channel = cls._fake_channel_patcher.start() + cls._mock_build_channel.return_value = cls._fake_channel + + @classmethod + def tearDownClass(cls): + cls._fake_channel_patcher.stop() + super().tearDownClass() + + def setUp(self): + super().setUp() + self._client = RpcClient.from_project_and_participant(self._project.name, self._project.token, + self._participant.domain_name) + + def test_build_grpc_stub(self): + fake_timer = FakeTimePatcher() + fake_timer.start() + authority = 'fl-test-client-auth.com' + + # Don't know where to put this check - - + self._mock_build_channel.assert_called_once_with( + options=[('grpc.default_authority', 'fl-test-client-auth.com')], + target='fedlearner-stack-ingress-nginx-controller.default.svc:80') + self._mock_build_channel.reset_mock() + + _build_grpc_stub(self._TEST_URL, authority) + self._mock_build_channel.assert_called_once_with(options=[('grpc.default_authority', authority)], + target=self._TEST_URL) + _build_grpc_stub(self._TEST_URL, authority) + self.assertEqual(self._mock_build_channel.call_count, 1) + # Ticks 61 seconds to timeout + fake_timer.interrupt(61) + _build_grpc_stub(self._TEST_URL, authority) + self.assertEqual(self._mock_build_channel.call_count, 2) + fake_timer.stop() + + @patch('fedlearner_webconsole.middleware.request_id.get_current_request_id') + def test_request_id_in_metadata(self, mock_get_current_request_id): + mock_get_current_request_id.return_value = 'test-request-id' + + call = self.client_execution_pool.submit(self._client.check_connection) + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + TARGET_SERVICE.methods_by_name['CheckConnection']) + self.assertIn((GrpcRequestIdMiddleware.REQUEST_HEADER_NAME, 'test-request-id'), invocation_metadata) + # We don't care the result + rpc.terminate(response=CheckConnectionResponse(), code=StatusCode.OK, trailing_metadata=(), details=None) + + def test_check_connection(self): + call = self.client_execution_pool.submit(self._client.check_connection) + + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + TARGET_SERVICE.methods_by_name['CheckConnection']) + + self.assertIn((self._X_HOST_HEADER_KEY, self._TEST_X_HOST), invocation_metadata) + self.assertEqual( + request, + CheckConnectionRequest(auth_info=ProjAuthInfo(project_name=self._project.name, + target_domain=self._participant.domain_name, + auth_token=self._project.token))) + + expected_status = Status(code=FedLearnerStatusCode.STATUS_SUCCESS, msg='test') + rpc.terminate(response=CheckConnectionResponse(status=expected_status), + code=StatusCode.OK, + trailing_metadata=(), + details=None) + self.assertEqual(call.result().status, expected_status) + + def test_check_peer_connection(self): + call = self.client_execution_pool.submit(self._client.check_peer_connection) + + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + TARGET_SERVICE.methods_by_name['CheckPeerConnection']) + + self.assertIn((self._X_HOST_HEADER_KEY, self._TEST_X_HOST), invocation_metadata) + + self.assertEqual(request, CheckPeerConnectionRequest()) + + expected_status = Status(code=FedLearnerStatusCode.STATUS_SUCCESS, msg='received check request successfully!') + rpc.terminate(response=CheckPeerConnectionResponse(status=expected_status, + application_version={'version': '2.0.1.5'}), + code=StatusCode.OK, + trailing_metadata=(), + details=None) + self.assertEqual(call.result().status, expected_status) + self.assertEqual(call.result().application_version.version, '2.0.1.5') + + def test_check_job_ready(self): + call = self.client_execution_pool.submit(self._client.check_job_ready, self._job.name) + + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + TARGET_SERVICE.methods_by_name['CheckJobReady']) + + self.assertIn((self._X_HOST_HEADER_KEY, self._TEST_X_HOST), invocation_metadata) + self.assertEqual( + request, + CheckJobReadyRequest(job_name=self._job.name, + auth_info=ProjAuthInfo(project_name=self._project.name, + target_domain=self._participant.domain_name, + auth_token=self._project.token))) + + expected_status = Status(code=FedLearnerStatusCode.STATUS_SUCCESS, msg='test') + rpc.terminate(response=CheckJobReadyResponse(status=expected_status), + code=StatusCode.OK, + trailing_metadata=(), + details=None) + self.assertEqual(call.result().status, expected_status) + + def test_run_two_pc(self): + transaction_data = TransactionData(create_model_job_data=CreateModelJobData(model_job_name='test model name')) + call = self.client_execution_pool.submit(self._client.run_two_pc, + transaction_uuid='test-id', + two_pc_type=TwoPcType.CREATE_MODEL_JOB, + action=TwoPcAction.PREPARE, + data=transaction_data) + + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + TARGET_SERVICE.methods_by_name['Run2Pc']) + + self.assertIn((self._X_HOST_HEADER_KEY, self._TEST_X_HOST), invocation_metadata) + self.assertEqual( + request, + TwoPcRequest(auth_info=ProjAuthInfo(project_name=self._project.name, + target_domain=self._participant.domain_name, + auth_token=self._project.token), + transaction_uuid='test-id', + type=TwoPcType.CREATE_MODEL_JOB, + action=TwoPcAction.PREPARE, + data=transaction_data)) + + expected_status = Status(code=FedLearnerStatusCode.STATUS_SUCCESS, msg='test run two pc') + rpc.terminate(response=TwoPcResponse(status=expected_status), + code=StatusCode.OK, + trailing_metadata=(), + details=None) + self.assertEqual(call.result().status, expected_status) + + def test_list_participant_datasets(self): + call = self.client_execution_pool.submit(self._client.list_participant_datasets) + + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + TARGET_SERVICE.methods_by_name['ListParticipantDatasets']) + + self.assertIn((self._X_HOST_HEADER_KEY, self._TEST_X_HOST), invocation_metadata) + self.assertEqual( + request, + ListParticipantDatasetsRequest(auth_info=ProjAuthInfo(project_name=self._project.name, + target_domain=self._participant.domain_name, + auth_token=self._project.token))) + + dataref = ParticipantDatasetRef(uuid='1', + name='dataset', + format=DatasetFormat.TABULAR.name, + file_size=0, + updated_at=to_timestamp(datetime(2012, 1, 14, 12, 0, 5)), + dataset_kind=DatasetKindV2.RAW.name) + rpc.terminate(response=ListParticipantDatasetsResponse(participant_datasets=[dataref]), + code=StatusCode.OK, + trailing_metadata=(), + details=None) + self.assertEqual(call.result(), ListParticipantDatasetsResponse(participant_datasets=[dataref])) + + def test_get_model_job(self): + call = self.client_execution_pool.submit(self._client.get_model_job, model_job_uuid='uuid', need_metrics=True) + + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + TARGET_SERVICE.methods_by_name['GetModelJob']) + + self.assertIn((self._X_HOST_HEADER_KEY, self._TEST_X_HOST), invocation_metadata) + self.assertEqual( + request, + GetModelJobRequest(auth_info=ProjAuthInfo(project_name=self._project.name, + target_domain=self._participant.domain_name, + auth_token=self._project.token), + uuid='uuid', + need_metrics=True)) + + expected_metric = BoolValue(value=True) + resp = GetModelJobResponse(metric_is_public=expected_metric) + rpc.terminate(response=resp, code=StatusCode.OK, trailing_metadata=(), details=None) + self.assertEqual(call.result(), GetModelJobResponse(metric_is_public=expected_metric)) + + def test_create_dataset_job(self): + participants_info = project_pb2.ParticipantsInfo( + participants_map={ + 'test_participant_1': project_pb2.ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'test_participant_2': project_pb2.ParticipantInfo(auth_status=AuthStatus.PENDING.name) + }) + call = self.client_execution_pool.submit(self._client.create_dataset_job, + dataset_job=dataset_pb2.DatasetJob(uuid='test'), + ticket_uuid='test ticket_uuid', + dataset=dataset_pb2.Dataset(participants_info=participants_info)) + + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + TARGET_SERVICE.methods_by_name['CreateDatasetJob']) + + self.assertIn((self._X_HOST_HEADER_KEY, self._TEST_X_HOST), invocation_metadata) + self.assertEqual( + request, + CreateDatasetJobRequest(auth_info=ProjAuthInfo(project_name=self._project.name, + target_domain=self._participant.domain_name, + auth_token=self._project.token), + dataset_job=dataset_pb2.DatasetJob(uuid='test'), + ticket_uuid='test ticket_uuid', + dataset=dataset_pb2.Dataset(participants_info=participants_info))) + + resp = empty_pb2.Empty() + rpc.terminate(response=resp, code=StatusCode.OK, trailing_metadata=(), details=None) + self.assertEqual(call.result(), empty_pb2.Empty()) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/server.py b/web_console_v2/api/fedlearner_webconsole/rpc/server.py index 19b9ac285..99d154b75 100644 --- a/web_console_v2/api/fedlearner_webconsole/rpc/server.py +++ b/web_console_v2/api/fedlearner_webconsole/rpc/server.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,47 +13,115 @@ # limitations under the License. # coding: utf-8 -# pylint: disable=broad-except, cyclic-import +# pylint: disable=broad-except +from datetime import timedelta +import inspect import time import logging import json -import os import sys import threading import traceback from concurrent import futures +from functools import wraps +from envs import Envs + import grpc from grpc_reflection.v1alpha import reflection -from fedlearner_webconsole.proto import ( - service_pb2, service_pb2_grpc, - common_pb2, workflow_definition_pb2 -) +from google.protobuf import empty_pb2 +from google.protobuf.wrappers_pb2 import BoolValue +from fedlearner_webconsole.middleware.request_id import GrpcRequestIdMiddleware +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.proto import (dataset_pb2, service_pb2, service_pb2_grpc, common_pb2, + workflow_definition_pb2) +from fedlearner_webconsole.proto.review_pb2 import ReviewStatus +from fedlearner_webconsole.proto.rpc.v2 import system_service_pb2_grpc, system_service_pb2, project_service_pb2_grpc +from fedlearner_webconsole.proto.service_pb2 import (TwoPcRequest, TwoPcResponse) +from fedlearner_webconsole.review.ticket_helper import get_ticket_helper +from fedlearner_webconsole.review.common import NO_CENTRAL_SERVER_UUID +from fedlearner_webconsole.rpc.auth import get_common_name +from fedlearner_webconsole.rpc.v2.system_service_server import SystemGrpcService +from fedlearner_webconsole.serving.services import NegotiatorServingService +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.two_pc.handlers import run_two_pc_action +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.utils.domain_name import get_pure_domain_name from fedlearner_webconsole.utils.es import es -from fedlearner_webconsole.db import db, get_session +from fedlearner_webconsole.db import db from fedlearner_webconsole.utils.kibana import Kibana from fedlearner_webconsole.project.models import Project -from fedlearner_webconsole.workflow.models import ( - Workflow, WorkflowState, TransactionState, - _merge_workflow_config -) - +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.workflow.models import (Workflow, WorkflowState, TransactionState) +from fedlearner_webconsole.workflow.resource_manager import \ + merge_workflow_config, ResourceManager +from fedlearner_webconsole.workflow.service import WorkflowService +from fedlearner_webconsole.workflow.workflow_controller import invalidate_workflow_locally +from fedlearner_webconsole.utils.pp_datetime import now +from fedlearner_webconsole.utils.proto import to_json, to_dict from fedlearner_webconsole.job.models import Job from fedlearner_webconsole.job.service import JobService from fedlearner_webconsole.job.metrics import JobMetricsBuilder -from fedlearner_webconsole.exceptions import ( - UnauthorizedException, InvalidArgumentException -) -from envs import Envs - - +from fedlearner_webconsole.mmgr.models import ModelJobGroup +from fedlearner_webconsole.exceptions import (UnauthorizedException, InvalidArgumentException) +from fedlearner_webconsole.proto.audit_pb2 import Event +from fedlearner_webconsole.mmgr.models import ModelJob +from fedlearner_webconsole.mmgr.service import ModelJobService +from fedlearner_webconsole.dataset.services import DatasetService, DatasetJobService, BatchService +from fedlearner_webconsole.dataset.models import DatasetJob, DatasetJobKind, \ + Dataset, ProcessedDataset, DatasetKindV2, DatasetFormat, ResourceState +from fedlearner_webconsole.dataset.auth_service import AuthService +from fedlearner_webconsole.dataset.job_configer.dataset_job_configer import DatasetJobConfiger +from fedlearner_webconsole.proto.rpc.v2 import job_service_pb2_grpc, job_service_pb2, resource_service_pb2_grpc,\ + resource_service_pb2 +from fedlearner_webconsole.rpc.v2.auth_server_interceptor import AuthServerInterceptor +from fedlearner_webconsole.rpc.v2.job_service_server import JobServiceServicer +from fedlearner_webconsole.rpc.v2.resource_service_server import ResourceServiceServicer +from fedlearner_webconsole.rpc.v2.project_service_server import ProjectGrpcService +from fedlearner_webconsole.flag.models import Flag +from fedlearner_webconsole.audit.decorators import emits_rpc_event, get_two_pc_request_uuid + + +def _set_request_id_for_all_methods(): + """A hack way to wrap all gRPC methods to set request id in context. + + Why not service interceptor? + The request id is attached on thread local, but interceptor is not sharing + the same thread with service handler, we are not able to set the context on + thread local in interceptor as it will not work in service handler.""" + + def set_request_id_in_context(fn): + + @wraps(fn) + def wrapper(self, request, context): + GrpcRequestIdMiddleware.set_request_id_in_context(context) + return fn(self, request, context) + + return wrapper + + def decorate(cls): + # A hack to get all methods + grpc_methods = service_pb2.DESCRIPTOR.services_by_name['WebConsoleV2Service'].methods_by_name + for name, fn in inspect.getmembers(cls, inspect.isfunction): + # If this is a gRPC method + if name in grpc_methods: + setattr(cls, name, set_request_id_in_context(fn)) + return cls + + return decorate + + +@_set_request_id_for_all_methods() class RPCServerServicer(service_pb2_grpc.WebConsoleV2ServiceServicer): + def __init__(self, server): self._server = server def _secure_exc(self): exc_type, exc_obj, exc_tb = sys.exc_info() # filter out exc_obj to protect sensitive info - secure_exc = 'Error %s at '%exc_type + secure_exc = f'Error {exc_type} at ' secure_exc += ''.join(traceback.format_tb(exc_tb)) return secure_exc @@ -61,199 +129,254 @@ def _try_handle_request(self, func, request, context, resp_class): try: return func(request, context) except UnauthorizedException as e: - return resp_class( - status=common_pb2.Status( - code=common_pb2.STATUS_UNAUTHORIZED, - msg='Invalid auth: %s'%repr(request.auth_info))) + return resp_class(status=common_pb2.Status(code=common_pb2.STATUS_UNAUTHORIZED, + msg=f'Invalid auth: {repr(request.auth_info)}')) except Exception as e: logging.error('%s rpc server error: %s', func.__name__, repr(e)) - return resp_class( - status=common_pb2.Status( - code=common_pb2.STATUS_UNKNOWN_ERROR, - msg=self._secure_exc())) + return resp_class(status=common_pb2.Status(code=common_pb2.STATUS_UNKNOWN_ERROR, msg=self._secure_exc())) def CheckConnection(self, request, context): - return self._try_handle_request( - self._server.check_connection, request, context, - service_pb2.CheckConnectionResponse) + return self._try_handle_request(self._server.check_connection, request, context, + service_pb2.CheckConnectionResponse) - def Ping(self, request, context): - return self._try_handle_request( - self._server.ping, request, context, - service_pb2.PingResponse) + def CheckPeerConnection(self, request, context): + return self._try_handle_request(self._server.check_peer_connection, request, context, + service_pb2.CheckPeerConnectionResponse) + @emits_rpc_event(resource_type=Event.ResourceType.WORKFLOW, + op_type=Event.OperationType.UPDATE_STATE, + resource_name_fn=lambda request: request.uuid) def UpdateWorkflowState(self, request, context): - return self._try_handle_request( - self._server.update_workflow_state, request, context, - service_pb2.UpdateWorkflowStateResponse) + return self._try_handle_request(self._server.update_workflow_state, request, context, + service_pb2.UpdateWorkflowStateResponse) def GetWorkflow(self, request, context): - return self._try_handle_request( - self._server.get_workflow, request, context, - service_pb2.GetWorkflowResponse) + return self._try_handle_request(self._server.get_workflow, request, context, service_pb2.GetWorkflowResponse) + @emits_rpc_event(resource_type=Event.ResourceType.WORKFLOW, + op_type=Event.OperationType.UPDATE, + resource_name_fn=lambda request: request.workflow_uuid) def UpdateWorkflow(self, request, context): - return self._try_handle_request( - self._server.update_workflow, request, context, - service_pb2.UpdateWorkflowResponse) + return self._try_handle_request(self._server.update_workflow, request, context, + service_pb2.UpdateWorkflowResponse) + + @emits_rpc_event(resource_type=Event.ResourceType.WORKFLOW, + op_type=Event.OperationType.INVALIDATE, + resource_name_fn=lambda request: request.workflow_uuid) + def InvalidateWorkflow(self, request, context): + return self._try_handle_request(self._server.invalidate_workflow, request, context, + service_pb2.InvalidateWorkflowResponse) def GetJobMetrics(self, request, context): - return self._try_handle_request( - self._server.get_job_metrics, request, context, - service_pb2.GetJobMetricsResponse) + return self._try_handle_request(self._server.get_job_metrics, request, context, + service_pb2.GetJobMetricsResponse) def GetJobKibana(self, request, context): - return self._try_handle_request( - self._server.get_job_kibana, request, context, - service_pb2.GetJobKibanaResponse - ) + return self._try_handle_request(self._server.get_job_kibana, request, context, service_pb2.GetJobKibanaResponse) def GetJobEvents(self, request, context): - return self._try_handle_request( - self._server.get_job_events, request, context, - service_pb2.GetJobEventsResponse) + return self._try_handle_request(self._server.get_job_events, request, context, service_pb2.GetJobEventsResponse) def CheckJobReady(self, request, context): - return self._try_handle_request( - self._server.check_job_ready, request, context, - service_pb2.CheckJobReadyResponse) - - + return self._try_handle_request(self._server.check_job_ready, request, context, + service_pb2.CheckJobReadyResponse) + + def _run_2pc(self, request: TwoPcRequest, context: grpc.ServicerContext) -> TwoPcResponse: + with db.session_scope() as session: + project, _ = self._server.check_auth_info(request.auth_info, context, session) + succeeded, message = run_two_pc_action(session=session, + tid=request.transaction_uuid, + two_pc_type=request.type, + action=request.action, + data=request.data) + session.commit() + return TwoPcResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), + transaction_uuid=request.transaction_uuid, + type=request.type, + action=request.action, + succeeded=succeeded, + message=message) + + @emits_rpc_event(resource_type=Event.ResourceType.UNKNOWN_RESOURCE_TYPE, + op_type=Event.OperationType.UNKNOWN_OPERATION_TYPE, + resource_name_fn=get_two_pc_request_uuid) + def Run2Pc(self, request: TwoPcRequest, context: grpc.ServicerContext): + return self._try_handle_request(self._run_2pc, request, context, service_pb2.TwoPcResponse) + + @emits_rpc_event(resource_type=Event.ResourceType.SERVING_SERVICE, + op_type=Event.OperationType.OPERATE, + resource_name_fn=lambda request: request.serving_model_uuid) + def ServingServiceManagement(self, request: service_pb2.ServingServiceRequest, + context: grpc.ServicerContext) -> service_pb2.ServingServiceResponse: + return self._try_handle_request(self._server.operate_serving_service, request, context, + service_pb2.ServingServiceResponse) + + @emits_rpc_event(resource_type=Event.ResourceType.SERVING_SERVICE, + op_type=Event.OperationType.INFERENCE, + resource_name_fn=lambda request: request.serving_model_uuid) + def ServingServiceInference(self, request: service_pb2.ServingServiceInferenceRequest, + context: grpc.ServicerContext) -> service_pb2.ServingServiceInferenceResponse: + return self._try_handle_request(self._server.inference_serving_service, request, context, + service_pb2.ServingServiceInferenceResponse) + + def ClientHeartBeat(self, request, context): + return self._server.client_heart_beat(request, context) + + def GetModelJob(self, request, context): + return self._server.get_model_job(request, context) + + def GetModelJobGroup(self, request, context): + return self._server.get_model_job_group(request, context) + + @emits_rpc_event(resource_type=Event.ResourceType.MODEL_JOB_GROUP, + op_type=Event.OperationType.UPDATE, + resource_name_fn=lambda request: request.uuid) + def UpdateModelJobGroup(self, request, context): + return self._server.update_model_job_group(request, context) + + def ListParticipantDatasets(self, request, context): + return self._server.list_participant_datasets(request, context) + + def GetDatasetJob(self, request: service_pb2.GetDatasetJobRequest, + context: grpc.ServicerContext) -> service_pb2.GetDatasetJobResponse: + return self._server.get_dataset_job(request, context) + + @emits_rpc_event(resource_type=Event.ResourceType.DATASET_JOB, + op_type=Event.OperationType.CREATE, + resource_name_fn=lambda request: request.dataset_job.uuid) + def CreateDatasetJob(self, request: service_pb2.CreateDatasetJobRequest, + context: grpc.ServicerContext) -> empty_pb2.Empty: + return self._server.create_dataset_job(request, context) + + +# TODO(wangsen.0914): make the rpc server clean, move business logic out class RpcServer(object): + def __init__(self): + self.started = False self._lock = threading.Lock() - self._started = False self._server = None - self._app = None - def start(self, app): - assert not self._started, 'Already started' - self._app = app - listen_port = app.config.get('GRPC_LISTEN_PORT', 1999) + def start(self, port: int): + assert not self.started, 'Already started' with self._lock: - self._server = grpc.server( - futures.ThreadPoolExecutor(max_workers=20)) - service_pb2_grpc.add_WebConsoleV2ServiceServicer_to_server( - RPCServerServicer(self), self._server) - # reflection support server find the proto file path automatically - # when using grpcurl - reflection.enable_server_reflection( - service_pb2.DESCRIPTOR.services_by_name, self._server) - self._server.add_insecure_port('[::]:%d' % listen_port) + self._server = grpc.server(futures.ThreadPoolExecutor(max_workers=30), + interceptors=[AuthServerInterceptor()]) + service_pb2_grpc.add_WebConsoleV2ServiceServicer_to_server(RPCServerServicer(self), self._server) + system_service_pb2_grpc.add_SystemServiceServicer_to_server(SystemGrpcService(), self._server) + job_service_pb2_grpc.add_JobServiceServicer_to_server(JobServiceServicer(), self._server) + resource_service_pb2_grpc.add_ResourceServiceServicer_to_server(ResourceServiceServicer(), self._server) + project_service_pb2_grpc.add_ProjectServiceServicer_to_server(ProjectGrpcService(), self._server) + # reflection supports server find service by using url, e.g. /SystemService.CheckHealth + reflection.enable_server_reflection(service_pb2.DESCRIPTOR.services_by_name, self._server) + reflection.enable_server_reflection(system_service_pb2.DESCRIPTOR.services_by_name, self._server) + reflection.enable_server_reflection(job_service_pb2.DESCRIPTOR.services_by_name, self._server) + reflection.enable_server_reflection(resource_service_pb2.DESCRIPTOR.services_by_name, self._server) + self._server.add_insecure_port(f'[::]:{port}') self._server.start() - self._started = True + self.started = True def stop(self): - if not self._started: + if not self.started: return with self._lock: self._server.stop(None).wait() del self._server - self._started = False + self.started = False - def check_auth_info(self, auth_info, context): + def check_auth_info(self, auth_info, context, session): logging.debug('auth_info: %s', auth_info) - project = Project.query.filter_by( - name=auth_info.project_name).first() + project = session.query(Project).filter_by(name=auth_info.project_name).first() if project is None: - raise UnauthorizedException('Invalid project') - project_config = project.get_config() + raise UnauthorizedException(f'Invalid project {auth_info.project_name}') # TODO: fix token verification # if project_config.token != auth_info.auth_token: # raise UnauthorizedException('Invalid token') - # Use first participant to mock for unit test + service = ParticipantService(session) + participants = service.get_participants_by_project(project.id) # TODO: Fix for multi-peer - source_party = project_config.participants[0] - if os.environ.get('FLASK_ENV') == 'production': + source_party = participants[0] + if Envs.FLASK_ENV == 'production': + source_party = None metadata = dict(context.invocation_metadata()) - # ssl-client-subject-dn example: - # CN=*.fl-xxx.com,OU=security,O=security,L=beijing,ST=beijing,C=CN - cn = metadata.get('ssl-client-subject-dn').split(',')[0][5:] - for party in project_config.participants: - if party.domain_name == cn: + cn = get_common_name(metadata.get('ssl-client-subject-dn')) + if not cn: + raise UnauthorizedException('Failed to get domain name from certs') + pure_domain_name = get_pure_domain_name(cn) + for party in participants: + if get_pure_domain_name(party.domain_name) == pure_domain_name: source_party = party if source_party is None: - raise UnauthorizedException('Invalid domain') + raise UnauthorizedException(f'Invalid domain {pure_domain_name}') return project, source_party def check_connection(self, request, context): - with self._app.app_context(): - _, party = self.check_auth_info(request.auth_info, context) - logging.debug( - 'received check_connection from %s', party.domain_name) - return service_pb2.CheckConnectionResponse( - status=common_pb2.Status( - code=common_pb2.STATUS_SUCCESS)) - - def ping(self, request, context): - return service_pb2.PingResponse( - status=common_pb2.Status( - code=common_pb2.STATUS_SUCCESS), - msg='Pong!') + with db.session_scope() as session: + _, party = self.check_auth_info(request.auth_info, context, session) + logging.debug('received check_connection from %s', party.domain_name) + return service_pb2.CheckConnectionResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS)) + + def check_peer_connection(self, request, context): + logging.debug('received request: check peer connection') + with db.session_scope() as session: + service = SettingService(session) + version = service.get_application_version() + return service_pb2.CheckPeerConnectionResponse(status=common_pb2.Status( + code=common_pb2.STATUS_SUCCESS, msg='participant received check request successfully!'), + application_version=version.to_proto()) def update_workflow_state(self, request, context): - with self._app.app_context(): - project, party = self.check_auth_info(request.auth_info, context) - logging.debug( - 'received update_workflow_state from %s: %s', - party.domain_name, request) + with db.session_scope() as session: + project, party = self.check_auth_info(request.auth_info, context, session) + logging.debug('received update_workflow_state from %s: %s', party.domain_name, request) name = request.workflow_name uuid = request.uuid forked_from_uuid = request.forked_from_uuid - forked_from = Workflow.query.filter_by( + forked_from = session.query(Workflow).filter_by( uuid=forked_from_uuid).first().id if forked_from_uuid else None state = WorkflowState(request.state) target_state = WorkflowState(request.target_state) transaction_state = TransactionState(request.transaction_state) - workflow = Workflow.query.filter_by( - name=request.workflow_name, - project_id=project.id).first() + workflow = session.query(Workflow).filter_by(name=request.workflow_name, project_id=project.id).first() if workflow is None: assert state == WorkflowState.NEW assert target_state == WorkflowState.READY - workflow = Workflow( - name=name, - project_id=project.id, - state=state, target_state=target_state, - transaction_state=transaction_state, - uuid=uuid, - forked_from=forked_from, - extra=request.extra - ) - db.session.add(workflow) - db.session.commit() - db.session.refresh(workflow) - - workflow.update_state( - state, target_state, transaction_state) - db.session.commit() - return service_pb2.UpdateWorkflowStateResponse( - status=common_pb2.Status( - code=common_pb2.STATUS_SUCCESS), - state=workflow.state.value, - target_state=workflow.target_state.value, - transaction_state=workflow.transaction_state.value) + workflow = Workflow(name=name, + project_id=project.id, + state=state, + target_state=target_state, + transaction_state=transaction_state, + uuid=uuid, + forked_from=forked_from, + extra=request.extra) + session.add(workflow) + session.commit() + session.refresh(workflow) + + ResourceManager(session, workflow).update_state(state, target_state, transaction_state) + session.commit() + return service_pb2.UpdateWorkflowStateResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), + state=workflow.state.value, + target_state=workflow.target_state.value, + transaction_state=workflow.transaction_state.value) def _filter_workflow(self, workflow, modes): # filter peer-readable and peer-writable variables if workflow is None: return None - new_wf = workflow_definition_pb2.WorkflowDefinition( - group_alias=workflow.group_alias, - is_left=workflow.is_left) + new_wf = workflow_definition_pb2.WorkflowDefinition(group_alias=workflow.group_alias) for var in workflow.variables: if var.access_mode in modes: new_wf.variables.append(var) for job_def in workflow.job_definitions: # keep yaml template private - new_jd = workflow_definition_pb2.JobDefinition( - name=job_def.name, - job_type=job_def.job_type, - is_federated=job_def.is_federated, - dependencies=job_def.dependencies) + new_jd = workflow_definition_pb2.JobDefinition(name=job_def.name, + job_type=job_def.job_type, + is_federated=job_def.is_federated, + dependencies=job_def.dependencies) for var in job_def.variables: if var.access_mode in modes: new_jd.variables.append(var) @@ -261,147 +384,300 @@ def _filter_workflow(self, workflow, modes): return new_wf def get_workflow(self, request, context): - with self._app.app_context(): - project, party = self.check_auth_info(request.auth_info, context) - workflow = Workflow.query.filter_by( - name=request.workflow_name, - project_id=project.id).first() + with db.session_scope() as session: + project, party = self.check_auth_info(request.auth_info, context, session) + # TODO(hangweiqiang): remove workflow name + # compatible method for previous version + if request.workflow_uuid: + workflow = session.query(Workflow).filter_by(uuid=request.workflow_uuid, project_id=project.id).first() + else: + workflow = session.query(Workflow).filter_by(name=request.workflow_name, project_id=project.id).first() assert workflow is not None, 'Workflow not found' config = workflow.get_config() - config = self._filter_workflow( - config, - [ - common_pb2.Variable.PEER_READABLE, - common_pb2.Variable.PEER_WRITABLE - ]) + config = self._filter_workflow(config, + [common_pb2.Variable.PEER_READABLE, common_pb2.Variable.PEER_WRITABLE]) # job details - jobs = [service_pb2.JobDetail( - name=job.name, - state=job.get_state_for_frontend(), - pods=json.dumps( - job.get_pods_for_frontend(include_private_info=False))) - for job in workflow.get_jobs()] + jobs = [ + service_pb2.JobDetail( + name=job.name, + state=job.state.name, + created_at=to_timestamp(job.created_at), + pods=json.dumps([to_dict(pod) + for pod in JobService.get_pods(job, include_private_info=False)])) + for job in workflow.get_jobs(session) + ] # fork info forked_from = '' if workflow.forked_from: - forked_from = Workflow.query.get(workflow.forked_from).name - return service_pb2.GetWorkflowResponse( - name=request.workflow_name, - status=common_pb2.Status( - code=common_pb2.STATUS_SUCCESS), - config=config, - jobs=jobs, - state=workflow.state.value, - target_state=workflow.target_state.value, - transaction_state=workflow.transaction_state.value, - forkable=workflow.forkable, - forked_from=forked_from, - create_job_flags=workflow.get_create_job_flags(), - peer_create_job_flags=workflow.get_peer_create_job_flags(), - fork_proposal_config=workflow.get_fork_proposal_config(), - uuid=workflow.uuid, - metric_is_public=workflow.metric_is_public) + forked_from = session.query(Workflow).get(workflow.forked_from).name + return service_pb2.GetWorkflowResponse(name=workflow.name, + status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), + config=config, + jobs=jobs, + state=workflow.state.value, + target_state=workflow.target_state.value, + transaction_state=workflow.transaction_state.value, + forkable=workflow.forkable, + forked_from=forked_from, + create_job_flags=workflow.get_create_job_flags(), + peer_create_job_flags=workflow.get_peer_create_job_flags(), + fork_proposal_config=workflow.get_fork_proposal_config(), + uuid=workflow.uuid, + metric_is_public=workflow.metric_is_public, + is_finished=workflow.is_finished()) def update_workflow(self, request, context): - with self._app.app_context(): - project, party = self.check_auth_info(request.auth_info, context) - workflow = Workflow.query.filter_by( - name=request.workflow_name, - project_id=project.id).first() + with db.session_scope() as session: + project, party = self.check_auth_info(request.auth_info, context, session) + # TODO(hangweiqiang): remove workflow name + # compatible method for previous version + if request.workflow_uuid: + workflow = session.query(Workflow).filter_by(uuid=request.workflow_uuid, project_id=project.id).first() + else: + workflow = session.query(Workflow).filter_by(name=request.workflow_name, project_id=project.id).first() assert workflow is not None, 'Workflow not found' config = workflow.get_config() - _merge_workflow_config( - config, request.config, - [common_pb2.Variable.PEER_WRITABLE]) - workflow.set_config(config) - db.session.commit() - - config = self._filter_workflow( - config, - [ - common_pb2.Variable.PEER_READABLE, - common_pb2.Variable.PEER_WRITABLE - ]) - return service_pb2.UpdateWorkflowResponse( - status=common_pb2.Status( - code=common_pb2.STATUS_SUCCESS), - workflow_name=request.workflow_name, - config=config) + merge_workflow_config(config, request.config, [common_pb2.Variable.PEER_WRITABLE]) + WorkflowService(session).update_config(workflow, config) + session.commit() + + config = self._filter_workflow(config, + [common_pb2.Variable.PEER_READABLE, common_pb2.Variable.PEER_WRITABLE]) + # compatible method for previous version + if request.workflow_uuid: + return service_pb2.UpdateWorkflowResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), + workflow_uuid=request.workflow_uuid, + config=config) + return service_pb2.UpdateWorkflowResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), + workflow_name=request.workflow_name, + config=config) + + def invalidate_workflow(self, request, context): + with db.session_scope() as session: + project, party = self.check_auth_info(request.auth_info, context, session) + workflow = session.query(Workflow).filter_by(uuid=request.workflow_uuid, project_id=project.id).first() + if workflow is None: + logging.error(f'Failed to find workflow: {request.workflow_uuid}') + return service_pb2.InvalidateWorkflowResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), + succeeded=False) + invalidate_workflow_locally(session, workflow) + session.commit() + return service_pb2.InvalidateWorkflowResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), + succeeded=True) def _check_metrics_public(self, request, context): - project, party = self.check_auth_info(request.auth_info, context) - job = db.session.query(Job).filter_by(name=request.job_name, - project_id=project.id).first() - assert job is not None, f'job {request.job_name} not found' - workflow = job.workflow - if not workflow.metric_is_public: - raise UnauthorizedException('Metric is private!') - return job + with db.session_scope() as session: + project, party = self.check_auth_info(request.auth_info, context, session) + job = session.query(Job).filter_by(name=request.job_name, project_id=project.id).first() + assert job is not None, f'job {request.job_name} not found' + workflow = job.workflow + if not workflow.metric_is_public: + raise UnauthorizedException('Metric is private!') + return job def get_job_metrics(self, request, context): - with self._app.app_context(): + with db.session_scope(): job = self._check_metrics_public(request, context) metrics = JobMetricsBuilder(job).plot_metrics() - return service_pb2.GetJobMetricsResponse( - status=common_pb2.Status( - code=common_pb2.STATUS_SUCCESS), - metrics=json.dumps(metrics)) + return service_pb2.GetJobMetricsResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), + metrics=json.dumps(metrics)) def get_job_kibana(self, request, context): - with self._app.app_context(): + with db.session_scope(): job = self._check_metrics_public(request, context) try: - metrics = Kibana.remote_query(job, - json.loads(request.json_args)) + metrics = Kibana.remote_query(job, json.loads(request.json_args)) except UnauthorizedException as ua_e: return service_pb2.GetJobKibanaResponse( - status=common_pb2.Status( - code=common_pb2.STATUS_UNAUTHORIZED, - msg=ua_e.message)) + status=common_pb2.Status(code=common_pb2.STATUS_UNAUTHORIZED, msg=ua_e.message)) except InvalidArgumentException as ia_e: return service_pb2.GetJobKibanaResponse( - status=common_pb2.Status( - code=common_pb2.STATUS_INVALID_ARGUMENT, - msg=ia_e.message)) - return service_pb2.GetJobKibanaResponse( - status=common_pb2.Status( - code=common_pb2.STATUS_SUCCESS), - metrics=json.dumps(metrics)) + status=common_pb2.Status(code=common_pb2.STATUS_INVALID_ARGUMENT, msg=ia_e.message)) + return service_pb2.GetJobKibanaResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), + metrics=json.dumps(metrics)) def get_job_events(self, request, context): - with self._app.app_context(): - project, party = self.check_auth_info(request.auth_info, context) - job = Job.query.filter_by(name=request.job_name, - project_id=project.id).first() + with db.session_scope() as session: + project, party = self.check_auth_info(request.auth_info, context, session) + job = session.query(Job).filter_by(name=request.job_name, project_id=project.id).first() assert job is not None, \ f'Job {request.job_name} not found' - result = es.query_events('filebeat-*', job.name, - 'fedlearner-operator', - request.start_time, - int(time.time() * 1000), - Envs.OPERATOR_LOG_MATCH_PHRASE - )[:request.max_lines][::-1] + result = es.query_events('filebeat-*', job.name, 'fedlearner-operator', request.start_time, + int(time.time() * 1000), Envs.OPERATOR_LOG_MATCH_PHRASE)[:request.max_lines][::-1] - return service_pb2.GetJobEventsResponse( - status=common_pb2.Status( - code=common_pb2.STATUS_SUCCESS), - logs=result) + return service_pb2.GetJobEventsResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), + logs=result) def check_job_ready(self, request, context): - with self._app.app_context(): - project, _ = self.check_auth_info(request.auth_info, context) - job = db.session.query(Job).filter_by(name=request.job_name, - project_id=project.id).first() + with db.session_scope() as session: + project, _ = self.check_auth_info(request.auth_info, context, session) + job = session.query(Job).filter_by(name=request.job_name, project_id=project.id).first() assert job is not None, \ f'Job {request.job_name} not found' - with get_session(db.get_engine()) as session: - is_ready = JobService(session).is_ready(job) - return service_pb2.CheckJobReadyResponse( - status=common_pb2.Status( - code=common_pb2.STATUS_SUCCESS), - is_ready=is_ready) + is_ready = JobService(session).is_ready(job) + return service_pb2.CheckJobReadyResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), + is_ready=is_ready) + + def operate_serving_service(self, request, context) -> service_pb2.ServingServiceResponse: + with db.session_scope() as session: + project, _ = self.check_auth_info(request.auth_info, context, session) + return NegotiatorServingService(session).handle_participant_request(request, project) + + def inference_serving_service(self, request, context) -> service_pb2.ServingServiceInferenceResponse: + with db.session_scope() as session: + project, _ = self.check_auth_info(request.auth_info, context, session) + return NegotiatorServingService(session).handle_participant_inference_request(request, project) + + def client_heart_beat(self, request: service_pb2.ClientHeartBeatRequest, context): + with db.session_scope() as session: + party: Participant = session.query(Participant).filter_by(request.domain_name) + if party is None: + return service_pb2.ClientHeartBeatResponse(succeeded=False) + party.last_connected_at = now() + session.commit() + return service_pb2.ClientHeartBeatResponse(succeeded=True) + + def get_model_job(self, request: service_pb2.GetModelJobRequest, context) -> service_pb2.GetModelJobResponse: + with db.session_scope() as session: + project, _ = self.check_auth_info(request.auth_info, context, session) + model_job: ModelJob = session.query(ModelJob).filter_by(uuid=request.uuid).first() + group_uuid = None + if model_job.group: + group_uuid = model_job.group.uuid + config = model_job.workflow.get_config() + config = self._filter_workflow(config, + [common_pb2.Variable.PEER_READABLE, common_pb2.Variable.PEER_WRITABLE]) + metrics = None + if request.need_metrics and model_job.job is not None and model_job.metric_is_public: + metrics = to_json(ModelJobService(session).query_metrics(model_job)) + return service_pb2.GetModelJobResponse(name=model_job.name, + uuid=model_job.uuid, + algorithm_type=model_job.algorithm_type.name, + model_job_type=model_job.model_job_type.name, + state=model_job.state.name, + group_uuid=group_uuid, + config=config, + metrics=metrics, + metric_is_public=BoolValue(value=model_job.metric_is_public)) + + def get_model_job_group(self, request: service_pb2.GetModelJobGroupRequest, context): + with db.session_scope() as session: + project, _ = self.check_auth_info(request.auth_info, context, session) + group: ModelJobGroup = session.query(ModelJobGroup).filter_by(uuid=request.uuid).first() + return service_pb2.GetModelJobGroupResponse(name=group.name, + uuid=group.uuid, + role=group.role.name, + authorized=group.authorized, + algorithm_type=group.algorithm_type.name, + config=group.get_config()) + + def update_model_job_group(self, request: service_pb2.UpdateModelJobGroupRequest, context): + with db.session_scope() as session: + project, _ = self.check_auth_info(request.auth_info, context, session) + group: ModelJobGroup = session.query(ModelJobGroup).filter_by(uuid=request.uuid).first() + if not group.authorized: + raise UnauthorizedException(f'group {group.name} is not authorized for editing') + group.set_config(request.config) + session.commit() + return service_pb2.UpdateModelJobGroupResponse(uuid=group.uuid, config=group.get_config()) + + # TODO(liuhehan): delete after all participants support new rpc + def list_participant_datasets(self, request: service_pb2.ListParticipantDatasetsRequest, context): + kind = DatasetKindV2(request.kind) if request.kind else None + uuid = request.uuid if request.uuid else None + state = ResourceState.SUCCEEDED + with db.session_scope() as session: + project, _ = self.check_auth_info(request.auth_info, context, session) + datasets = DatasetService(session=session).get_published_datasets(project.id, kind, uuid, state) + return service_pb2.ListParticipantDatasetsResponse(participant_datasets=datasets) + + def get_dataset_job(self, request: service_pb2.GetDatasetJobRequest, + context: grpc.ServicerContext) -> service_pb2.GetDatasetJobResponse: + with db.session_scope() as session: + self.check_auth_info(request.auth_info, context, session) + dataset_job_model = session.query(DatasetJob).filter(DatasetJob.uuid == request.uuid).first() + if dataset_job_model is None: + context.abort(code=grpc.StatusCode.NOT_FOUND, details=f'could not find dataset {request.uuid}') + dataset_job = dataset_job_model.to_proto() + dataset_job.workflow_definition.MergeFrom( + DatasetJobConfiger.from_kind(dataset_job_model.kind, session).get_config()) + return service_pb2.GetDatasetJobResponse(dataset_job=dataset_job) + + def create_dataset_job(self, request: service_pb2.CreateDatasetJobRequest, + context: grpc.ServicerContext) -> empty_pb2.Empty: + with db.session_scope() as session: + project, participant = self.check_auth_info(request.auth_info, context, session) + + # this is a hack to allow no ticket_uuid, delete it after all customers update + ticket_uuid = request.ticket_uuid if request.ticket_uuid else NO_CENTRAL_SERVER_UUID + ticket_helper = get_ticket_helper(session=session) + validate = ticket_helper.validate_ticket( + ticket_uuid, lambda ticket: ticket.details.uuid == request.dataset_job.result_dataset_uuid and ticket. + status == ReviewStatus.APPROVED) + if not validate: + message = f'[create_dataset_job]: ticket status is not approved, ticket_uuid: {request.ticket_uuid}' + logging.warning(message) + context.abort(code=grpc.StatusCode.PERMISSION_DENIED, details=message) + + processed_dataset = session.query(ProcessedDataset).filter_by( + uuid=request.dataset_job.result_dataset_uuid).first() + if processed_dataset is None: + # create processed dataset + domain_name = SettingService.get_system_info().pure_domain_name + dataset_job_config = request.dataset_job.global_configs.global_configs.get(domain_name) + dataset = session.query(Dataset).filter_by(uuid=dataset_job_config.dataset_uuid).first() + dataset_param = dataset_pb2.DatasetParameter( + name=request.dataset_job.result_dataset_name, + type=dataset.dataset_type.value, + project_id=project.id, + kind=DatasetKindV2.PROCESSED.value, + format=DatasetFormat(dataset.dataset_format).name, + uuid=request.dataset_job.result_dataset_uuid, + is_published=True, + creator_username=request.dataset.creator_username, + ) + participants_info = request.dataset.participants_info + if not Flag.DATASET_AUTH_STATUS_CHECK_ENABLED.value: + # auto set participant auth_status and cache to authorized if no need check + dataset_param.auth_status = AuthStatus.AUTHORIZED.name + participants_info.participants_map[domain_name].auth_status = AuthStatus.AUTHORIZED.name + processed_dataset = DatasetService(session=session).create_dataset(dataset_param) + processed_dataset.ticket_uuid = request.ticket_uuid + processed_dataset.ticket_status = TicketStatus.APPROVED + session.flush([processed_dataset]) + # old dataset job will create data_batch in grpc level + # new dataset job will create data_batch before create dataset_job_stage + if not request.dataset_job.has_stages: + batch_parameter = dataset_pb2.BatchParameter(dataset_id=processed_dataset.id) + BatchService(session).create_batch(batch_parameter) + + dataset_job = session.query(DatasetJob).filter_by(uuid=request.dataset_job.uuid).first() + if dataset_job is None: + time_range = timedelta(days=request.dataset_job.time_range.days, + hours=request.dataset_job.time_range.hours) + dataset_job = DatasetJobService(session=session).create_as_participant( + project_id=project.id, + kind=DatasetJobKind(request.dataset_job.kind), + global_configs=request.dataset_job.global_configs, + config=request.dataset_job.workflow_definition, + output_dataset_id=processed_dataset.id, + coordinator_id=participant.id, + uuid=request.dataset_job.uuid, + creator_username=request.dataset_job.creator_username, + time_range=time_range if time_range else None) + session.flush() + AuthService(session=session, dataset_job=dataset_job).initialize_participants_info_as_participant( + participants_info=request.dataset.participants_info) + + session.commit() + return empty_pb2.Empty() + + def wait_for_termination(self): + if not self.started: + logging.warning('gRPC service is not yet started, failed to wait') + self._server.wait_for_termination() rpc_server = RpcServer() diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/server_test.py b/web_console_v2/api/fedlearner_webconsole/rpc/server_test.py new file mode 100644 index 000000000..d43fb8fef --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/server_test.py @@ -0,0 +1,396 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import time +from typing import Optional +import unittest +import grpc +from datetime import datetime, timedelta +from concurrent import futures + +from unittest.mock import patch + +from google.protobuf.struct_pb2 import Value +from google.protobuf.wrappers_pb2 import BoolValue + +from testing.no_web_server_test_case import NoWebServerTestCase +from testing.dataset import FakeDatasetJobConfiger +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.rpc.v2 import system_service_pb2_grpc +from fedlearner_webconsole.proto.rpc.v2.system_service_pb2 import CheckHealthRequest +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition, JobDefinition +from fedlearner_webconsole.proto.setting_pb2 import SystemInfo +from fedlearner_webconsole.proto import dataset_pb2, project_pb2, service_pb2, service_pb2_grpc +from fedlearner_webconsole.proto.metrics_pb2 import ModelJobMetrics +from fedlearner_webconsole.auth.models import Session +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.rpc.v2.auth_server_interceptor import AuthServerInterceptor +from fedlearner_webconsole.rpc.server import RPCServerServicer, RpcServer, rpc_server +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.job.models import Job, JobType, JobState +from fedlearner_webconsole.dataset.models import Dataset, DatasetJob, DatasetJobKind, DatasetJobState, DatasetKindV2, \ + DatasetType, ProcessedDataset +from fedlearner_webconsole.mmgr.models import ModelJob, ModelJobGroup, ModelJobType +from fedlearner_webconsole.algorithm.models import AlgorithmType +from fedlearner_webconsole.utils.proto import to_json +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.review.common import NO_CENTRAL_SERVER_UUID + +_FAKE_SYSTEM_INFO = SystemInfo( + name='test', + domain_name='fl-participant.com', + pure_domain_name='participant', +) + + +def make_check_auth_info(self, auth_info: Optional[service_pb2.ProjAuthInfo], context: Optional[grpc.ServicerContext], + session: Session): + project = Project(id=1, name='test') + participant = Participant(id=1, name='participant', domain_name='test_domain') + return project, participant + + +class ServerTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + listen_port = 1991 + self._server = grpc.server(futures.ThreadPoolExecutor(max_workers=20), interceptors=[AuthServerInterceptor()]) + service_pb2_grpc.add_WebConsoleV2ServiceServicer_to_server(RPCServerServicer(RpcServer()), self._server) + self._server.add_insecure_port(f'[::]:{listen_port}') + self._server.start() + + self._stub = service_pb2_grpc.WebConsoleV2ServiceStub(grpc.insecure_channel(target=f'localhost:{listen_port}')) + + def tearDown(self): + self._server.stop(5) + return super().tearDown() + + @patch('fedlearner_webconsole.rpc.server.RpcServer.check_auth_info', lambda *args: ('test_project', 'party_1')) + def test_get_dataset_job_unexist(self): + with self.assertRaises(grpc.RpcError) as cm: + self._stub.GetDatasetJob(service_pb2.GetDatasetJobRequest(auth_info=None, uuid='u1234')) + + self.assertEqual(cm.exception.code(), grpc.StatusCode.NOT_FOUND) + + @patch('fedlearner_webconsole.dataset.services.DatasetJobConfiger.from_kind', + lambda *args: FakeDatasetJobConfiger(None)) + @patch('fedlearner_webconsole.rpc.server.RpcServer.check_auth_info', lambda *args: ('test_project', 'party_1')) + def test_get_dataset_job(self): + request = service_pb2.GetDatasetJobRequest(auth_info=None, uuid='dataset_job_uuid') + # no dataset_job + with self.assertRaises(grpc.RpcError): + self._stub.GetDatasetJob(request) + # check no output_dataset and workflow failed + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + uuid='dataset_job_uuid', + project_id=1, + input_dataset_id=1, + output_dataset_id=0, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.RUNNING, + coordinator_id=0, + workflow_id=0) + session.add(dataset_job) + session.commit() + resp = self._stub.GetDatasetJob(request) + self.assertEqual(resp.dataset_job.uuid, dataset_job.uuid) + self.assertEqual(resp.dataset_job.result_dataset_uuid, '') + self.assertEqual(resp.dataset_job.is_ready, False) + # check is_ready successed + with db.session_scope() as session: + dataset = Dataset(id=2, + name='output dataset', + uuid='result_dataset_uuid', + path='/data/dataset/321', + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 7), + dataset_kind=DatasetKindV2.PROCESSED) + session.add(dataset) + workflow = Workflow(id=1, project_id=1, name='test workflow', uuid='dataset_job_uuid') + session.add(workflow) + dataset_job = session.query(DatasetJob).get(1) + dataset_job.workflow_id = 1 + dataset_job.output_dataset_id = 2 + session.commit() + resp = self._stub.GetDatasetJob(request) + self.assertEqual(resp.dataset_job.uuid, dataset_job.uuid) + self.assertEqual(resp.dataset_job.result_dataset_uuid, 'result_dataset_uuid') + self.assertEqual(resp.dataset_job.is_ready, True) + + @patch('fedlearner_webconsole.rpc.server.RpcServer.check_auth_info', make_check_auth_info) + @patch('fedlearner_webconsole.rpc.server.SettingService.get_system_info', lambda: _FAKE_SYSTEM_INFO) + def test_create_dataset_job(self): + with db.session_scope() as session: + project = Project(id=1, name='test') + session.add(project) + dataset = Dataset(id=1, + name='input dataset', + uuid='raw_dataset_uuid', + path='/data/dataset/321', + is_published=True, + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 7), + dataset_kind=DatasetKindV2.RAW) + session.add(dataset) + session.commit() + dataset_job = DatasetJob(id=1, + uuid='dataset_job_uuid', + project_id=1, + input_dataset_id=1, + output_dataset_id=2, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.RUNNING, + coordinator_id=0, + workflow_id=1) + dataset_job_parameter = dataset_pb2.DatasetJob( + uuid=dataset_job.uuid, + kind=dataset_job.kind.value, + global_configs=dataset_pb2.DatasetJobGlobalConfigs(global_configs={ + _FAKE_SYSTEM_INFO.pure_domain_name: dataset_pb2.DatasetJobConfig(dataset_uuid='raw_dataset_uuid') + }), + workflow_definition=WorkflowDefinition(group_alias='test'), + result_dataset_uuid='dataset_uuid', + result_dataset_name='dataset_name', + creator_username='test user') + request = service_pb2.CreateDatasetJobRequest(auth_info=None, + dataset_job=dataset_job_parameter, + ticket_uuid=NO_CENTRAL_SERVER_UUID) + self._stub.CreateDatasetJob(request) + with db.session_scope() as session: + dataset = session.query(ProcessedDataset).filter_by(uuid='dataset_uuid').first() + self.assertEqual(dataset.name, 'dataset_name') + self.assertEqual(dataset.dataset_kind, DatasetKindV2.PROCESSED) + self.assertEqual(len(dataset.data_batches), 1) + self.assertEqual(dataset.is_published, True) + dataset_job = session.query(DatasetJob).filter_by(uuid='dataset_job_uuid').first() + self.assertIsNotNone(dataset_job) + self.assertEqual(dataset_job.output_dataset_id, dataset.id) + self.assertEqual(dataset_job.creator_username, 'test user') + self.assertIsNone(dataset_job.time_range) + + # test with time_range + dataset_job = DatasetJob(id=2, + uuid='dataset_job_uuid with time_range', + project_id=1, + input_dataset_id=1, + output_dataset_id=2, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.RUNNING, + coordinator_id=0, + workflow_id=1, + time_range=timedelta(days=1)) + dataset_job_parameter = dataset_pb2.DatasetJob( + uuid=dataset_job.uuid, + kind=dataset_job.kind.value, + global_configs=dataset_pb2.DatasetJobGlobalConfigs(global_configs={ + _FAKE_SYSTEM_INFO.pure_domain_name: dataset_pb2.DatasetJobConfig(dataset_uuid='raw_dataset_uuid') + }), + workflow_definition=WorkflowDefinition(group_alias='test'), + result_dataset_uuid='dataset_uuid wit time_range', + result_dataset_name='dataset_name', + creator_username='test user', + time_range=dataset_job.time_range_pb) + request = service_pb2.CreateDatasetJobRequest(auth_info=None, + dataset_job=dataset_job_parameter, + ticket_uuid=NO_CENTRAL_SERVER_UUID) + self._stub.CreateDatasetJob(request) + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).filter_by(uuid='dataset_job_uuid with time_range').first() + self.assertEqual(dataset_job.time_range, timedelta(days=1)) + + @patch('fedlearner_webconsole.rpc.server.RpcServer.check_auth_info', make_check_auth_info) + @patch('fedlearner_webconsole.rpc.server.SettingService.get_system_info', lambda: _FAKE_SYSTEM_INFO) + def test_create_dataset_job_has_stage(self): + with db.session_scope() as session: + project = Project(id=1, name='test') + session.add(project) + streaming_dataset = Dataset(id=1, + name='input streaming_dataset', + uuid='raw_dataset_uuid', + path='/data/dataset/321', + is_published=True, + project_id=1, + created_at=datetime(2012, 1, 14, 12, 0, 7), + dataset_kind=DatasetKindV2.RAW, + dataset_type=DatasetType.STREAMING.value) + session.add(streaming_dataset) + session.commit() + dataset_job_parameter = dataset_pb2.DatasetJob( + uuid='dataset_job_uuid', + kind=DatasetJobKind.DATA_ALIGNMENT.value, + global_configs=dataset_pb2.DatasetJobGlobalConfigs(global_configs={ + _FAKE_SYSTEM_INFO.pure_domain_name: dataset_pb2.DatasetJobConfig(dataset_uuid='raw_dataset_uuid') + }), + workflow_definition=WorkflowDefinition(group_alias='test'), + result_dataset_uuid='dataset_uuid', + result_dataset_name='dataset_name', + has_stages=True, + ) + participants_info = project_pb2.ParticipantsInfo( + participants_map={ + 'test_participant_1': project_pb2.ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'test_participant_2': project_pb2.ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'participant': project_pb2.ParticipantInfo(auth_status=AuthStatus.PENDING.name) + }) + dataset_parameter = dataset_pb2.Dataset(participants_info=participants_info, creator_username='test user') + request = service_pb2.CreateDatasetJobRequest(auth_info=None, + dataset_job=dataset_job_parameter, + ticket_uuid=NO_CENTRAL_SERVER_UUID, + dataset=dataset_parameter) + self._stub.CreateDatasetJob(request) + with db.session_scope() as session: + dataset = session.query(ProcessedDataset).filter_by(uuid='dataset_uuid').first() + self.assertEqual(dataset.name, 'dataset_name') + self.assertEqual(dataset.dataset_kind, DatasetKindV2.PROCESSED) + self.assertEqual(len(dataset.data_batches), 0) + self.assertEqual(dataset.is_published, True) + self.assertEqual(dataset.dataset_type, DatasetType.STREAMING) + self.assertEqual(dataset.creator_username, 'test user') + dataset_job = session.query(DatasetJob).filter_by(uuid='dataset_job_uuid').first() + self.assertIsNotNone(dataset_job) + self.assertEqual(dataset_job.output_dataset_id, dataset.id) + self.assertEqual(dataset.ticket_uuid, NO_CENTRAL_SERVER_UUID) + self.assertEqual(dataset.ticket_status, TicketStatus.APPROVED) + expected_participants_info = project_pb2.ParticipantsInfo( + participants_map={ + 'test_participant_1': project_pb2.ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'test_participant_2': project_pb2.ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'participant': project_pb2.ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + }) + self.assertEqual(dataset.get_participants_info(), expected_participants_info) + + @patch('fedlearner_webconsole.rpc.server.RpcServer.check_auth_info', make_check_auth_info) + def test_update_workflow(self): + uuid = 'u1ff0ab5596bb487e96c' + with db.session_scope() as session: + var1 = Variable(name='hello', + value_type=Variable.NUMBER, + typed_value=Value(number_value=1), + access_mode=Variable.PEER_WRITABLE) + var2 = Variable(name='hello', + value_type=Variable.NUMBER, + typed_value=Value(number_value=1), + access_mode=Variable.PEER_READABLE) + jd = JobDefinition(name='test1', yaml_template='{}', variables=[var1, var2]) + wd = WorkflowDefinition(job_definitions=[jd]) + workflow = Workflow( + name='test-workflow', + uuid=uuid, + project_id=1, + config=wd.SerializeToString(), + ) + session.add(workflow) + session.flush() + job = Job(name='test_job', + config=jd.SerializeToString(), + workflow_id=workflow.id, + job_type=JobType(1), + project_id=1, + is_disabled=False) + session.add(job) + session.flush() + workflow.job_ids = str(job.id) + session.commit() + var1 = Variable(name='hello', + value_type=Variable.NUMBER, + typed_value=Value(number_value=2), + access_mode=Variable.PEER_WRITABLE) + var2 = Variable(name='hello', + value_type=Variable.NUMBER, + typed_value=Value(number_value=2), + access_mode=Variable.PEER_READABLE) + jd = JobDefinition(name='test1', yaml_template='{}', variables=[var1, var2]) + wd = WorkflowDefinition(job_definitions=[jd]) + request = service_pb2.UpdateWorkflowRequest(auth_info=None, workflow_uuid=uuid, config=wd) + self._stub.UpdateWorkflow(request) + with db.session_scope() as session: + workflow = session.query(Workflow).filter_by(uuid=uuid).first() + self.assertEqual(workflow.get_config().job_definitions[0].variables[0].typed_value, Value(number_value=2)) + self.assertEqual(workflow.get_config().job_definitions[0].variables[1].typed_value, Value(number_value=1)) + jd = workflow.get_jobs(session)[0].get_config() + self.assertEqual(jd.variables[0].typed_value, Value(number_value=2)) + self.assertEqual(jd.variables[1].typed_value, Value(number_value=1)) + + @patch('fedlearner_webconsole.rpc.server.RpcServer.check_auth_info', make_check_auth_info) + @patch('fedlearner_webconsole.mmgr.service.ModelJobService.query_metrics') + def test_get_model_job(self, mock_query_metrics): + metrics = ModelJobMetrics() + metric = metrics.train.get_or_create('acc') + metric.steps.extend([1, 2, 3]) + metric.values.extend([1.0, 2.0, 3.0]) + mock_query_metrics.return_value = metrics + with db.session_scope() as session: + job = Job(name='uuid-job', + project_id=1, + workflow_id=1, + job_type=JobType.NN_MODEL_TRANINING, + state=JobState.COMPLETED) + workflow = Workflow(id=1, name='workflow', uuid='uuid') + group = ModelJobGroup(id=1, name='group', uuid='uuid', project_id=1) + model_job = ModelJob(id=1, + name='job', + uuid='uuid', + group_id=1, + project_id=1, + metric_is_public=False, + algorithm_type=AlgorithmType.NN_VERTICAL, + model_job_type=ModelJobType.TRAINING, + workflow_uuid='uuid', + job_name='uuid-job') + session.add_all([job, workflow, group, model_job]) + session.commit() + request = service_pb2.GetModelJobRequest(auth_info=None, uuid='uuid', need_metrics=True) + resp = self._stub.GetModelJob(request) + mock_query_metrics.assert_not_called() + expected_resp = service_pb2.GetModelJobResponse(name='job', + uuid='uuid', + algorithm_type='NN_VERTICAL', + model_job_type='TRAINING', + group_uuid='uuid', + state='INVALID', + metric_is_public=BoolValue(value=False)) + self.assertEqual(resp, expected_resp) + with db.session_scope() as session: + model_job: ModelJob = session.query(ModelJob).get(1) + model_job.metric_is_public = True + session.commit() + resp = self._stub.GetModelJob(request) + mock_query_metrics.assert_called() + expected_resp.metric_is_public.MergeFrom(BoolValue(value=True)) + expected_resp.metrics = to_json(metrics) + self.assertEqual(resp, expected_resp) + + +class RpcServerTest(NoWebServerTestCase): + + def test_smoke_test(self): + rpc_server.start(13546) + # Waits for server ready + time.sleep(2) + stub = system_service_pb2_grpc.SystemServiceStub(grpc.insecure_channel(target='localhost:13546')) + self.assertIsNotNone( + stub.CheckHealth(CheckHealthRequest(), + metadata=[('ssl-client-subject-dn', + 'CN=aaa.fedlearner.net,OU=security,O=security,L=beijing,ST=beijing,C=CN')])) + rpc_server.stop() + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/rpc/v2/BUILD.bazel new file mode 100644 index 000000000..6dd8e5680 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/BUILD.bazel @@ -0,0 +1,483 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "auth_client_interceptor_lib", + srcs = ["auth_client_interceptor.py"], + imports = ["../../.."], + deps = [ + ":utils_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:auth_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + ], +) + +py_test( + name = "auth_client_interceptor_lib_test", + srcs = [ + "auth_client_interceptor_test.py", + ], + imports = ["../../.."], + main = "auth_client_interceptor_test.py", + deps = [ + ":auth_client_interceptor_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:auth_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/testing:py_proto", + "//web_console_v2/api/testing/rpc:client_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "auth_server_interceptor_lib", + srcs = ["auth_server_interceptor.py"], + imports = ["../../.."], + deps = [ + ":utils_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:auth_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:domain_name_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + ], +) + +py_test( + name = "auth_server_interceptor_lib_test", + srcs = [ + "auth_server_interceptor_test.py", + ], + imports = ["../../.."], + main = "auth_server_interceptor_test.py", + deps = [ + ":auth_server_interceptor_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/testing:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "//web_console_v2/api/testing/rpc:client_lib", + "//web_console_v2/api/testing/rpc:service_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + ], +) + +py_library( + name = "client_base_lib", + srcs = ["client_base.py"], + imports = ["../../.."], + deps = [ + ":auth_client_interceptor_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + ], +) + +py_test( + name = "client_base_lib_test", + srcs = [ + "client_base_test.py", + ], + imports = ["../../.."], + main = "client_base_test.py", + deps = [ + ":client_base_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/testing:py_proto", + "//web_console_v2/api/testing:fake_lib", + "//web_console_v2/api/testing/rpc:client_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@common_grpcio_testing//:pkg", + ], +) + +py_library( + name = "system_service_client_lib", + srcs = ["system_service_client.py"], + imports = ["../../.."], + deps = [ + ":client_base_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + ], +) + +py_test( + name = "system_service_client_lib_test", + srcs = [ + "system_service_client_test.py", + ], + imports = ["../../.."], + main = "system_service_client_test.py", + deps = [ + ":system_service_client_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + "//web_console_v2/api/testing/rpc:client_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + "@common_grpcio_testing//:pkg", + ], +) + +py_library( + name = "system_service_server_lib", + srcs = ["system_service_server.py"], + imports = ["../../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/tee:services_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + ], +) + +py_test( + name = "system_service_server_lib_test", + srcs = [ + "system_service_server_test.py", + ], + imports = ["../../.."], + main = "system_service_server_test.py", + deps = [ + ":system_service_server_lib", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:app_version_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "job_service_client_lib", + srcs = ["job_service_client.py"], + imports = ["../../.."], + deps = [ + ":client_base_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "//web_console_v2/api/fedlearner_webconsole/tee:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_test( + name = "job_service_client_lib_test", + size = "medium", + srcs = [ + "job_service_client_test.py", + ], + imports = ["../../.."], + main = "job_service_client_test.py", + deps = [ + ":job_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "//web_console_v2/api/fedlearner_webconsole/tee:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing/rpc:client_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "utils_lib", + srcs = ["utils.py"], + imports = ["../../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:auth_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:domain_name_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_base64_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "utils_lib_test", + srcs = [ + "utils_test.py", + ], + imports = ["../../.."], + main = "utils_test.py", + deps = [ + ":utils_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:auth_lib", + ], +) + +py_library( + name = "job_service_server_lib", + srcs = ["job_service_server.py"], + imports = ["../../.."], + deps = [ + ":utils_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:fetcher_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/audit:decorators_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:local_controllers_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:services_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset/job_configer", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:service_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/review:ticket_helper_lib", + "//web_console_v2/api/fedlearner_webconsole/tee:models_lib", + "//web_console_v2/api/fedlearner_webconsole/tee:services_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_test( + name = "job_service_server_lib_test", + size = "small", + srcs = [ + "job_service_server_test.py", + ], + imports = ["../../.."], + main = "job_service_server_test.py", + deps = [ + ":job_service_server_lib", + ":utils_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/review:common_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:auth_lib", + "//web_console_v2/api/fedlearner_webconsole/tee:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:fake_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "//web_console_v2/api/testing/rpc:client_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "resource_service_server_lib", + srcs = ["resource_service_server.py"], + imports = ["../../.."], + deps = [ + ":utils_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:service_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm/transmit", + "//web_console_v2/api/fedlearner_webconsole/audit:decorators_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:services_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:proto_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_test( + name = "resource_service_server_lib_test", + size = "small", + srcs = [ + "resource_service_server_test.py", + ], + imports = ["../../.."], + main = "resource_service_server_test.py", + deps = [ + ":resource_service_server_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:filtering_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "resource_service_client_lib", + srcs = ["resource_service_client.py"], + imports = ["../../.."], + deps = [ + ":client_base_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_test( + name = "resource_service_client_lib_test", + size = "small", + srcs = [ + "resource_service_client_test.py", + ], + imports = ["../../.."], + main = "resource_service_client_test.py", + deps = [ + ":resource_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing/rpc:client_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + "@common_grpcio_testing//:pkg", + ], +) + +py_library( + name = "project_service_server_lib", + srcs = ["project_service_server.py"], + imports = ["../../.."], + deps = [ + ":utils_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/audit:decorators_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:services_lib", + "//web_console_v2/api/fedlearner_webconsole/review:ticket_helper_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:service_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + ], +) + +py_test( + name = "project_service_server_lib_test", + size = "small", + srcs = [ + "project_service_server_test.py", + ], + imports = ["../../.."], + main = "project_service_server_test.py", + deps = [ + ":project_service_server_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "project_service_client_lib", + srcs = ["project_service_client.py"], + imports = ["../../.."], + deps = [ + ":client_base_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + ], +) + +py_test( + name = "project_service_client_lib_test", + size = "small", + srcs = [ + "project_service_client_test.py", + ], + imports = ["../../.."], + main = "project_service_client_test.py", + deps = [ + ":project_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing/rpc:client_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + "@common_grpcio_testing//:pkg", + ], +) + +py_library( + name = "review_service_client_lib", + srcs = ["review_service_client.py"], + imports = ["../../.."], + deps = [ + ":client_base_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + ], +) + +py_test( + name = "review_service_client_lib_test", + srcs = [ + "review_service_client_test.py", + ], + imports = ["../../.."], + main = "review_service_client_test.py", + deps = [ + ":review_service_client_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + "//web_console_v2/api/testing/rpc:client_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + "@common_grpcio_testing//:pkg", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/auth_client_interceptor.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/auth_client_interceptor.py new file mode 100644 index 000000000..b8d8d9310 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/auth_client_interceptor.py @@ -0,0 +1,111 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Any, Callable, Iterator, NamedTuple, Optional, Sequence, Tuple, Union + +import grpc + +from fedlearner_webconsole.rpc.auth import PROJECT_NAME_HEADER, X_HOST_HEADER +from fedlearner_webconsole.rpc.v2.utils import encode_project_name + + +# Ref: https://github.com/d5h-foss/grpc-interceptor/blob/master/src/grpc_interceptor/client.py#L9 +class _ClientCallDetailsFields(NamedTuple): + method: str + timeout: Optional[float] + metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] + credentials: Optional[grpc.CallCredentials] + wait_for_ready: Optional[bool] + compression: Optional[grpc.Compression] + + +class ClientCallDetails(_ClientCallDetailsFields, grpc.ClientCallDetails): + pass + + +RequestOrIterator = Union[Any, Iterator[Any]] + + +class _RpcErrorOutcome(grpc.RpcError, grpc.Future): + + def __init__(self, rpc_error: grpc.RpcError): + super().__init__() + self._error = rpc_error + + def cancel(self): + return False + + def cancelled(self): + return False + + def running(self): + return False + + def done(self): + return True + + def result(self, timeout=None): + raise self._error + + def exception(self, timeout=None): + return self._error + + def traceback(self, timeout=None): + return '' + + def add_done_callback(self, fn): + fn(self) + + +class AuthClientInterceptor(grpc.UnaryUnaryClientInterceptor, grpc.UnaryStreamClientInterceptor, + grpc.StreamUnaryClientInterceptor, grpc.StreamStreamClientInterceptor): + + def __init__(self, x_host: str, project_name: Optional[str] = None): + super().__init__() + self.x_host = x_host + self.project_name = project_name + + def _intercept_call(self, continuation: Callable[[ClientCallDetails, RequestOrIterator], Any], + client_call_details: ClientCallDetails, request_or_iterator: RequestOrIterator): + metadata = [] + if client_call_details.metadata is not None: + metadata = list(client_call_details.metadata) + metadata.append((X_HOST_HEADER, self.x_host)) + if self.project_name is not None: + metadata.append((PROJECT_NAME_HEADER, encode_project_name(self.project_name))) + + # Metadata of ClientCallDetails can not be set directly + new_details = ClientCallDetails(client_call_details.method, client_call_details.timeout, metadata, + client_call_details.credentials, client_call_details.wait_for_ready, + client_call_details.compression) + response_future = continuation(new_details, request_or_iterator) + # This is a hack for testing only that grpc interceptor will treat testing channel's grpc error as + # a regular response instead of an exception, whose interface is different with channel, + # it was introduced in https://github.com/grpc/grpc/pull/17317 + if isinstance(response_future, grpc.RpcError) and not isinstance(response_future, grpc.Future): + return _RpcErrorOutcome(response_future) + return response_future + + def intercept_unary_unary(self, continuation, client_call_details, request): + return self._intercept_call(continuation, client_call_details, request) + + def intercept_unary_stream(self, continuation, client_call_details, request): + return self._intercept_call(continuation, client_call_details, request) + + def intercept_stream_unary(self, continuation, client_call_details, request_iterator): + return self._intercept_call(continuation, client_call_details, request_iterator) + + def intercept_stream_stream(self, continuation, client_call_details, request_iterator): + return self._intercept_call(continuation, client_call_details, request_iterator) diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/auth_client_interceptor_test.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/auth_client_interceptor_test.py new file mode 100644 index 000000000..552b667c9 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/auth_client_interceptor_test.py @@ -0,0 +1,109 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Optional +import unittest + +import grpc +import grpc_testing + +from fedlearner_webconsole.proto.testing import service_pb2, service_pb2_grpc +from fedlearner_webconsole.rpc.auth import PROJECT_NAME_HEADER, X_HOST_HEADER +from fedlearner_webconsole.rpc.v2.auth_client_interceptor import AuthClientInterceptor +from google.protobuf.descriptor import ServiceDescriptor +from testing.rpc.client import RpcClientTestCase + +_TEST_SERVICE_DESCRIPTOR: ServiceDescriptor = service_pb2.DESCRIPTOR.services_by_name['TestService'] + + +class AuthClientInterceptorTest(RpcClientTestCase): + _X_HOST = 'fedlearner-webconsole-v2.fl-test.com' + + def set_up(self, project_name: Optional[str] = None) -> service_pb2_grpc.TestServiceStub: + self._fake_channel: grpc_testing.Channel = grpc_testing.channel([_TEST_SERVICE_DESCRIPTOR], + grpc_testing.strict_real_time()) + channel = grpc.intercept_channel(self._fake_channel, + AuthClientInterceptor(x_host=self._X_HOST, project_name=project_name)) + self._stub = service_pb2_grpc.TestServiceStub(channel) + + def test_x_host(self): + self.set_up() + call = self.client_execution_pool.submit(self._stub.FakeUnaryUnary, service_pb2.FakeUnaryUnaryRequest()) + + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _TEST_SERVICE_DESCRIPTOR.methods_by_name['FakeUnaryUnary']) + + self.assertIn((X_HOST_HEADER, self._X_HOST), invocation_metadata) + rpc.terminate(response=service_pb2.FakeUnaryUnaryResponse(), + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None) + # Waits for finish + call.result() + + def test_x_host_unauthenticated(self): + self.set_up() + + call = self.client_execution_pool.submit(self._stub.FakeUnaryUnary, service_pb2.FakeUnaryUnaryRequest()) + + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _TEST_SERVICE_DESCRIPTOR.methods_by_name['FakeUnaryUnary']) + + self.assertIn((X_HOST_HEADER, self._X_HOST), invocation_metadata) + rpc.terminate(response=service_pb2.FakeUnaryUnaryResponse(), + code=grpc.StatusCode.UNAUTHENTICATED, + trailing_metadata=(), + details=None) + with self.assertRaises(grpc.RpcError) as cm: + call.result() + self.assertEqual(cm.exception.code(), grpc.StatusCode.UNAUTHENTICATED) + + def test_project_name(self): + self.set_up(project_name='test-project-113') + + call = self.client_execution_pool.submit(self._stub.FakeUnaryUnary, service_pb2.FakeUnaryUnaryRequest()) + + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _TEST_SERVICE_DESCRIPTOR.methods_by_name['FakeUnaryUnary']) + + self.assertIn((X_HOST_HEADER, self._X_HOST), invocation_metadata) + self.assertIn((PROJECT_NAME_HEADER, 'test-project-113'), invocation_metadata) + rpc.terminate(response=service_pb2.FakeUnaryUnaryResponse(), + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None) + # Waits for finish + call.result() + + def test_project_name_unicode(self): + self.set_up(project_name='test中文') + + call = self.client_execution_pool.submit(self._stub.FakeUnaryUnary, service_pb2.FakeUnaryUnaryRequest()) + + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _TEST_SERVICE_DESCRIPTOR.methods_by_name['FakeUnaryUnary']) + + self.assertIn((X_HOST_HEADER, self._X_HOST), invocation_metadata) + self.assertIn((PROJECT_NAME_HEADER, 'dGVzdOS4reaWhw=='), invocation_metadata) + rpc.terminate(response=service_pb2.FakeUnaryUnaryResponse(), + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None) + # Waits for finish + call.result() + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/auth_server_interceptor.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/auth_server_interceptor.py new file mode 100644 index 000000000..5dbdc8cbe --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/auth_server_interceptor.py @@ -0,0 +1,157 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Callable, Any, Optional, Tuple + +import grpc + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.rpc.auth import get_common_name, SSL_CLIENT_SUBJECT_DN_HEADER, PROJECT_NAME_HEADER +from fedlearner_webconsole.rpc.v2.utils import decode_project_name +from fedlearner_webconsole.utils.domain_name import get_pure_domain_name + +# Which services should disable the auth interceptors. +DISABLED_SERVICES = frozenset([ + # Skips the old gRPC service as it has a separate way to check auth. + 'fedlearner_webconsole.proto.WebConsoleV2Service', +]) +# Which services should use project-based auth interceptors. +PROJECT_BASED_SERVICES = frozenset([ + 'fedlearner_webconsole.proto.rpc.v2.JobService', +]) + + +class AuthException(Exception): + + def __init__(self, message: str): + super().__init__(message) + self.message = message + + +def _get_handler_factory(handler: grpc.RpcMethodHandler) -> Callable: + if handler.unary_unary: + return grpc.unary_unary_rpc_method_handler + if handler.unary_stream: + return grpc.unary_stream_rpc_method_handler + if handler.stream_unary: + return grpc.stream_unary_rpc_method_handler + if handler.stream_stream: + return grpc.stream_stream_rpc_method_handler + raise RuntimeError(f'Unrecognized rpc handler: {handler}') + + +def _parse_method_name(method_full_name: str) -> Tuple[str, str]: + """Parses grpc method name in service interceptor. + + Arguments: + method_full_name: Full name of the method, e.g. /fedlearner_webconsole.proto.testing.TestService/FakeUnaryUnary + + Returns: + A tuple of service name and method name, e.g. 'fedlearner_webconsole.proto.testing.TestService' + and 'FakeUnaryUnary'. + """ + names = method_full_name.split('/') + return names[-2], names[-1] + + +class AuthServerInterceptor(grpc.ServerInterceptor): + """Auth related stuff on server side, which will work for those service which injects this + interceptor. + + Ref: https://github.com/grpc/grpc/blob/v1.40.x/examples/python/interceptors/headers/request_header_validator_interceptor.py # pylint:disable=line-too-long + """ + + def _build_rpc_terminator(self, message: str): + + def terminate(request_or_iterator: Any, context: grpc.ServicerContext): + context.abort(grpc.StatusCode.UNAUTHENTICATED, message) + + return terminate + + def _verify_domain_name(self, handler_call_details: grpc.HandlerCallDetails) -> str: + """Verifies if the traffic is secure by checking ssl-client-subject-dn header. + + Returns: + The pure domain name. + + Raises: + AuthException: if the traffic is insecure. + """ + ssl_client_subject_dn = None + for header, value in handler_call_details.invocation_metadata: + if header == SSL_CLIENT_SUBJECT_DN_HEADER: + ssl_client_subject_dn = value + break + if not ssl_client_subject_dn: + raise AuthException('No client subject dn found') + # If this header is set, it passed the TLS verification + common_name = get_common_name(ssl_client_subject_dn) + if not common_name: + logging.error('[gRPC auth] invalid subject dn: %s', ssl_client_subject_dn) + raise AuthException('Invalid subject dn') + # Extracts the pure domain name, e.g. bytedance-test + pure_domain_name = get_pure_domain_name(common_name) + if not pure_domain_name: + logging.error('[gRPC auth] no valid domain name found in %s', ssl_client_subject_dn) + raise AuthException('Invalid domain name') + return pure_domain_name + + def _verify_project_info(self, handler_call_details: grpc.HandlerCallDetails, pure_domain_name: str): + project_name = None + for header, value in handler_call_details.invocation_metadata: + if header == PROJECT_NAME_HEADER: + project_name = decode_project_name(value) + break + if not project_name: + raise AuthException('No project name found') + with db.session_scope() as session: + project = session.query(Project.id).filter_by(name=project_name).first() + if not project: + logging.error('[gRPC auth] invalid project: %s', project_name) + raise AuthException(f'Invalid project {project_name}') + project_id, = project + # Checks if the caller has the access to this project + service = ParticipantService(session) + participants = service.get_participants_by_project(project_id) + has_access = False + for p in participants: + if p.pure_domain_name() == pure_domain_name: + has_access = True + break + if not has_access: + raise AuthException(f'No access to {project_name}') + + def intercept_service(self, continuation: Callable[[grpc.HandlerCallDetails], grpc.RpcMethodHandler], + handler_call_details: grpc.HandlerCallDetails) -> Optional[grpc.RpcMethodHandler]: + next_handler = continuation(handler_call_details) + + package_service_name, _ = _parse_method_name(handler_call_details.method) + # Skips the interceptor if the service does not intend to use it + if package_service_name in DISABLED_SERVICES: + return next_handler + + try: + pure_domain_name = self._verify_domain_name(handler_call_details) + # Project based service + if package_service_name in PROJECT_BASED_SERVICES: + self._verify_project_info(handler_call_details, pure_domain_name) + # Go ahead! + return next_handler + except AuthException as e: + handler_factory = _get_handler_factory(next_handler) + return handler_factory(self._build_rpc_terminator(e.message)) diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/auth_server_interceptor_test.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/auth_server_interceptor_test.py new file mode 100644 index 000000000..61c68c9e9 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/auth_server_interceptor_test.py @@ -0,0 +1,184 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import contextlib +import unittest +from unittest.mock import patch + +import grpc + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto.testing import service_pb2_grpc +from fedlearner_webconsole.proto.testing.service_pb2 import FakeUnaryUnaryRequest, FakeStreamStreamRequest +from fedlearner_webconsole.rpc.v2.auth_server_interceptor import AuthServerInterceptor, _parse_method_name +from testing.no_web_server_test_case import NoWebServerTestCase +from testing.rpc.client import testing_channel +from testing.rpc.service import TestService + + +class ParseMethodNameTest(unittest.TestCase): + + def test_parse_method_name(self): + self.assertEqual(_parse_method_name('/fedlearner_webconsole.proto.testing.TestService/FakeUnaryUnary'), + ('fedlearner_webconsole.proto.testing.TestService', 'FakeUnaryUnary')) + self.assertEqual(_parse_method_name('test-service/TestM'), ('test-service', 'TestM')) + + +class AuthServerInterceptorTest(NoWebServerTestCase): + + def set_up_client(self, is_project_based=False, skip=False) -> service_pb2_grpc.TestServiceStub: + if is_project_based: + project_based_patcher = patch( + 'fedlearner_webconsole.rpc.v2.auth_server_interceptor.PROJECT_BASED_SERVICES', + frozenset(['fedlearner_webconsole.proto.testing.TestService']), + ) + project_based_patcher.start() + if skip: + skip_patcher = patch( + 'fedlearner_webconsole.rpc.v2.auth_server_interceptor.DISABLED_SERVICES', + frozenset(['fedlearner_webconsole.proto.testing.TestService']), + ) + skip_patcher.start() + + def stop_patchers(): + if is_project_based: + project_based_patcher.stop() + if skip: + skip_patcher.stop() + + def register_service(server: grpc.Server): + service_pb2_grpc.add_TestServiceServicer_to_server(TestService(), server) + + with contextlib.ExitStack() as stack: + channel = stack.enter_context( + testing_channel( + register_service, + server_interceptors=[AuthServerInterceptor()], + )) + stub = service_pb2_grpc.TestServiceStub(channel) + # Cleans up for the server + self.addCleanup(stack.pop_all().close) + self.addCleanup(stop_patchers) + return stub + + def test_verify_domain_name(self): + stub = self.set_up_client() + valid_subject_dn = 'CN=aaa.fedlearner.net,OU=security,O=security,L=beijing,ST=beijing,C=CN' + # Normal unary-unary + resp = stub.FakeUnaryUnary(request=FakeUnaryUnaryRequest(), + metadata=[('ssl-client-subject-dn', valid_subject_dn)]) + self.assertIsNotNone(resp) + + # Normal stream-stream + def generate_request(): + yield FakeStreamStreamRequest() + + # Makes sure the stream-stream request is executed + self.assertEqual( + len(list(stub.FakeStreamStream(generate_request(), + metadata=[('ssl-client-subject-dn', valid_subject_dn)]))), 1) + + with self.assertRaisesRegex(grpc.RpcError, 'No client subject dn found') as cm: + # No ssl header + stub.FakeUnaryUnary(request=FakeUnaryUnaryRequest()) + self.assertEqual(cm.exception.code(), grpc.StatusCode.UNAUTHENTICATED) + with self.assertRaisesRegex(grpc.RpcError, 'No client subject dn found') as cm: + # No ssl header + list(stub.FakeStreamStream(generate_request())) + self.assertEqual(cm.exception.code(), grpc.StatusCode.UNAUTHENTICATED) + with self.assertRaisesRegex(grpc.RpcError, 'Invalid subject dn') as cm: + stub.FakeUnaryUnary(request=FakeUnaryUnaryRequest(), + metadata=[('ssl-client-subject-dn', 'invalid subject dn')]) + self.assertEqual(cm.exception.code(), grpc.StatusCode.UNAUTHENTICATED) + with self.assertRaisesRegex(grpc.RpcError, 'Invalid domain name') as cm: + stub.FakeUnaryUnary(request=FakeUnaryUnaryRequest(), + metadata=[('ssl-client-subject-dn', + 'CN=test.net,OU=security,O=security,L=beijing,ST=beijing,C=CN')]) + self.assertEqual(cm.exception.code(), grpc.StatusCode.UNAUTHENTICATED) + + def test_verify_project(self): + valid_subject_dn = 'CN=test.fedlearner.net,OU=security,O=security,L=beijing,ST=beijing,C=CN' + with db.session_scope() as session: + project = Project(id=123, name='test-project') + participant = Participant( + id=666, + name='test-participant', + domain_name='fl-test.com', + host='127.0.0.1', + port=32443, + ) + relationship = ProjectParticipant(project_id=project.id, participant_id=participant.id) + session.add_all([project, participant, relationship]) + session.commit() + stub = self.set_up_client(is_project_based=True) + + # Valid request + resp = stub.FakeUnaryUnary(request=FakeUnaryUnaryRequest(), + metadata=[('ssl-client-subject-dn', valid_subject_dn), + ('project-name', 'test-project')]) + self.assertIsNotNone(resp) + + # No project name + with self.assertRaisesRegex(grpc.RpcError, 'No project name found') as cm: + stub.FakeUnaryUnary(request=FakeUnaryUnaryRequest(), metadata=[('ssl-client-subject-dn', valid_subject_dn)]) + self.assertEqual(cm.exception.code(), grpc.StatusCode.UNAUTHENTICATED) + # Invalid project + with self.assertRaisesRegex(grpc.RpcError, 'Invalid project hhh-project') as cm: + stub.FakeUnaryUnary(request=FakeUnaryUnaryRequest(), + metadata=[('ssl-client-subject-dn', valid_subject_dn), ('project-name', 'hhh-project')]) + self.assertEqual(cm.exception.code(), grpc.StatusCode.UNAUTHENTICATED) + # No access + with self.assertRaisesRegex(grpc.RpcError, 'No access to test-project') as cm: + stub.FakeUnaryUnary(request=FakeUnaryUnaryRequest(), + metadata=[ + ('ssl-client-subject-dn', + 'CN=another.fedlearner.net,OU=security,O=security,L=beijing,ST=beijing,C=CN'), + ('project-name', 'test-project') + ]) + self.assertEqual(cm.exception.code(), grpc.StatusCode.UNAUTHENTICATED) + + def test_verify_project_unicode(self): + valid_subject_dn = 'CN=test.fedlearner.net,OU=security,O=security,L=beijing,ST=beijing,C=CN' + with db.session_scope() as session: + project = Project(id=123, name='测试工作区') + participant = Participant( + id=666, + name='test-participant', + domain_name='fl-test.com', + host='127.0.0.1', + port=32443, + ) + relationship = ProjectParticipant(project_id=project.id, participant_id=participant.id) + session.add_all([project, participant, relationship]) + session.commit() + stub = self.set_up_client(is_project_based=True) + + # Valid request + resp = stub.FakeUnaryUnary(request=FakeUnaryUnaryRequest(), + metadata=[('ssl-client-subject-dn', valid_subject_dn), + ('project-name', '5rWL6K+V5bel5L2c5Yy6')]) + self.assertIsNotNone(resp) + + def test_skip(self): + stub = self.set_up_client(skip=True) + # No auth related info + resp = stub.FakeUnaryUnary(request=FakeUnaryUnaryRequest()) + self.assertIsNotNone(resp) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/client_base.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/client_base.py new file mode 100644 index 000000000..942f628c1 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/client_base.py @@ -0,0 +1,100 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from abc import ABC +from typing import Optional + +import grpc +from envs import Envs + +from fedlearner_webconsole.rpc.v2.auth_client_interceptor import AuthClientInterceptor +from fedlearner_webconsole.utils.decorators.lru_cache import lru_cache + + +@lru_cache(timeout=60, maxsize=100) +def build_grpc_channel(nginx_controller_url: str, + peer_domain_name: str, + project_name: Optional[str] = None) -> grpc.Channel: + """A helper function to build gRPC channel with cache. + + Notice that as we cache the channel, if nginx controller gets restarted, the channel may break. + This practice is following official best practice: https://grpc.io/docs/guides/performance/ + + Args: + nginx_controller_url: Nginx controller url in current cluster, + e.g. fedlearner-stack-ingress-nginx-controller.default.svc:80 + peer_domain_name: Domain name of the peer which we want to connect to, e.g. fl-test.com + project_name: Project name which the client works on. + + Returns: + A grpc service channel to construct grpc clients. + """ + # Authority is used to route the traffic out of cluster, specificly it will look like fl-test-client-auth.com + domain_name_prefix = peer_domain_name.rpartition('.')[0] + authority = f'{domain_name_prefix}-client-auth.com' + + channel = grpc.insecure_channel( + target=nginx_controller_url, + # options defined at + # https://github.com/grpc/grpc/blob/master/include/grpc/impl/codegen/grpc_types.h + options=[('grpc.default_authority', authority)]) + + x_host = f'fedlearner-webconsole-v2.{peer_domain_name}' + # Adds auth client interceptor to auto-populate auth related headers + channel = grpc.intercept_channel(channel, AuthClientInterceptor(x_host=x_host, project_name=project_name)) + return channel + + +def get_nginx_controller_url() -> str: + """Generates nginx controller url in current cluster. + + Basically our gRPC client talks to the nginx controller. + """ + if Envs.DEBUG and Envs.GRPC_SERVER_URL is not None: + return Envs.GRPC_SERVER_URL + return 'fedlearner-stack-ingress-nginx-controller.default.svc:80' + + +class ParticipantRpcClient(ABC): + """Abstract class for clients which only work on participant system level, e.g. system service to check health. + """ + + def __init__(self, channel: grpc.Channel): + pass + + @classmethod + def from_participant(cls, domain_name: str): + channel = build_grpc_channel( + get_nginx_controller_url(), + domain_name, + ) + return cls(channel) + + +class ParticipantProjectRpcClient(ABC): + """Abstract class for clients which work on participant's project level, e.g. model service to train/eval. + """ + + def __init__(self, channel: grpc.Channel): + pass + + @classmethod + def from_project_and_participant(cls, domain_name: str, project_name: str): + channel = build_grpc_channel( + get_nginx_controller_url(), + domain_name, + project_name, + ) + return cls(channel) diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/client_base_test.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/client_base_test.py new file mode 100644 index 000000000..894792eaa --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/client_base_test.py @@ -0,0 +1,155 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import MagicMock, Mock, call, patch +import grpc +import grpc_testing + +from fedlearner_webconsole.proto.testing import service_pb2 +from fedlearner_webconsole.rpc.v2.client_base import (ParticipantProjectRpcClient, ParticipantRpcClient, + build_grpc_channel, get_nginx_controller_url) +from testing.fake_time_patcher import FakeTimePatcher +from testing.rpc.client import RpcClientTestCase + + +class GetNginxControllerUrlTest(unittest.TestCase): + + def test_prod(self): + self.assertEqual( + get_nginx_controller_url(), + 'fedlearner-stack-ingress-nginx-controller.default.svc:80', + ) + + @patch('envs.Envs.DEBUG', 'True') + @patch('envs.Envs.GRPC_SERVER_URL', 'xxx.default.svc:443') + def test_custom_url(self): + self.assertEqual( + get_nginx_controller_url(), + 'xxx.default.svc:443', + ) + + +class BuildGrpcChannelTest(RpcClientTestCase): + + def setUp(self): + super().setUp() + self._insecure_channel_patcher = patch('fedlearner_webconsole.rpc.v2.client_base.grpc.insecure_channel') + self._mock_insecure_channel: Mock = self._insecure_channel_patcher.start() + self._mock_insecure_channel.return_value = grpc_testing.channel( + service_pb2.DESCRIPTOR.services_by_name.values(), grpc_testing.strict_real_time()) + + def tearDown(self): + self._insecure_channel_patcher.stop() + super().tearDown() + + @patch('fedlearner_webconsole.rpc.v2.client_base.AuthClientInterceptor', spec=grpc.UnaryUnaryClientInterceptor) + def test_build_same_channel(self, mock_auth_client_interceptor: Mock): + fake_timer = FakeTimePatcher() + fake_timer.start() + nginx_controller_url = 'test-nginx.default.svc:80' + channel1 = build_grpc_channel(nginx_controller_url, 'fl-test1.com') + # Within 60s + channel2 = build_grpc_channel(nginx_controller_url, 'fl-test1.com') + # Checks if it is the same instance + self.assertTrue(channel1 is channel2) + self._mock_insecure_channel.assert_called_once_with( + target=nginx_controller_url, + options=[('grpc.default_authority', 'fl-test1-client-auth.com')], + ) + mock_auth_client_interceptor.assert_called_once_with( + x_host='fedlearner-webconsole-v2.fl-test1.com', + project_name=None, + ) + + # Ticks 62 seconds to timeout + fake_timer.interrupt(62) + channel3 = build_grpc_channel(nginx_controller_url, 'fl-test1.com') + self.assertTrue(channel3 is not channel1) + self.assertEqual(self._mock_insecure_channel.call_count, 2) + self.assertEqual(mock_auth_client_interceptor.call_count, 2) + + @patch('fedlearner_webconsole.rpc.v2.client_base.AuthClientInterceptor', spec=grpc.UnaryUnaryClientInterceptor) + def test_build_different_channels(self, mock_auth_client_interceptor: Mock): + nginx_controller_url = 'test.default.svc:80' + channel1 = build_grpc_channel(nginx_controller_url, 'fl-test1.com') + channel2 = build_grpc_channel(nginx_controller_url, 'fl-test1.com', project_name='test-project') + self.assertTrue(channel1 is not channel2) + + self.assertEqual(self._mock_insecure_channel.call_args_list, [ + call( + target=nginx_controller_url, + options=[('grpc.default_authority', 'fl-test1-client-auth.com')], + ), + call( + target=nginx_controller_url, + options=[('grpc.default_authority', 'fl-test1-client-auth.com')], + ), + ]) + self.assertEqual(mock_auth_client_interceptor.call_args_list, [ + call( + x_host='fedlearner-webconsole-v2.fl-test1.com', + project_name=None, + ), + call( + x_host='fedlearner-webconsole-v2.fl-test1.com', + project_name='test-project', + ), + ]) + + +class _FakeRpcClient(ParticipantRpcClient, ParticipantProjectRpcClient): + + def __init__(self, channel): + super().__init__(channel) + self.channel = channel + + +class ParticipantRpcClientTest(unittest.TestCase): + + @patch('fedlearner_webconsole.rpc.v2.client_base.build_grpc_channel') + def test_from_participant(self, mock_build_grpc_channel: Mock): + fake_channel = MagicMock() + mock_build_grpc_channel.return_value = fake_channel + + domain_name = 'fl-test.com' + fake_client = _FakeRpcClient.from_participant(domain_name=domain_name) + self.assertTrue(fake_client.channel is fake_channel) + mock_build_grpc_channel.assert_called_once_with( + 'fedlearner-stack-ingress-nginx-controller.default.svc:80', + domain_name, + ) + + +class ParticipantProjectRpcClientTest(unittest.TestCase): + + @patch('fedlearner_webconsole.rpc.v2.client_base.build_grpc_channel') + def test_from_project_and_participant(self, mock_build_grpc_channel: Mock): + fake_channel = MagicMock() + mock_build_grpc_channel.return_value = fake_channel + + domain_name = 'fl-test.com' + project_name = 'test-prrrr' + fake_client = _FakeRpcClient.from_project_and_participant(domain_name, project_name) + self.assertTrue(fake_client.channel is fake_channel) + mock_build_grpc_channel.assert_called_once_with( + 'fedlearner-stack-ingress-nginx-controller.default.svc:80', + domain_name, + project_name, + ) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/job_service_client.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/job_service_client.py new file mode 100644 index 000000000..64e414add --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/job_service_client.py @@ -0,0 +1,174 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import grpc +from datetime import datetime +from google.protobuf import empty_pb2 +from typing import Optional + +from envs import Envs +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.utils.decorators.retry import retry_fn +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.proto.rpc.v2.job_service_pb2_grpc import JobServiceStub +from fedlearner_webconsole.rpc.v2.client_base import ParticipantProjectRpcClient +from fedlearner_webconsole.mmgr.models import ModelJobType, AlgorithmType, GroupAutoUpdateStatus +from fedlearner_webconsole.dataset.models import DatasetJobSchedulerState +from fedlearner_webconsole.proto.mmgr_pb2 import ModelJobGlobalConfig, AlgorithmProjectList, ModelJobPb, ModelJobGroupPb +from fedlearner_webconsole.proto.rpc.v2.job_service_pb2 import CreateModelJobRequest, InformTrustedJobGroupRequest, \ + UpdateTrustedJobGroupRequest, DeleteTrustedJobGroupRequest, GetTrustedJobGroupRequest, \ + GetTrustedJobGroupResponse, CreateDatasetJobStageRequest, GetDatasetJobStageRequest, GetDatasetJobStageResponse, \ + CreateModelJobGroupRequest, GetModelJobRequest, GetModelJobGroupRequest, InformModelJobGroupRequest, \ + InformTrustedJobRequest, GetTrustedJobRequest, GetTrustedJobResponse, CreateTrustedExportJobRequest, \ + UpdateDatasetJobSchedulerStateRequest, UpdateModelJobGroupRequest, InformModelJobRequest + + +def _need_retry_for_get(err: Exception) -> bool: + if not isinstance(err, grpc.RpcError): + return False + # No need to retry for NOT_FOUND + return err.code() != grpc.StatusCode.NOT_FOUND + + +def _need_retry_for_create(err: Exception) -> bool: + if not isinstance(err, grpc.RpcError): + return False + # No need to retry for INVALID_ARGUMENT + return err.code() != grpc.StatusCode.INVALID_ARGUMENT + + +def _default_need_retry(err: Exception) -> bool: + return isinstance(err, grpc.RpcError) + + +class JobServiceClient(ParticipantProjectRpcClient): + + def __init__(self, channel: grpc.Channel): + super().__init__(channel) + self._stub: JobServiceStub = JobServiceStub(channel) + + @retry_fn(retry_times=3, need_retry=_need_retry_for_get) + def inform_trusted_job_group(self, uuid: str, auth_status: AuthStatus) -> empty_pb2.Empty: + msg = InformTrustedJobGroupRequest(uuid=uuid, auth_status=auth_status.name) + return self._stub.InformTrustedJobGroup(request=msg, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_need_retry_for_get) + def update_trusted_job_group(self, uuid: str, algorithm_uuid: str) -> empty_pb2.Empty: + msg = UpdateTrustedJobGroupRequest(uuid=uuid, algorithm_uuid=algorithm_uuid) + return self._stub.UpdateTrustedJobGroup(request=msg, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def delete_trusted_job_group(self, uuid: str) -> empty_pb2.Empty: + msg = DeleteTrustedJobGroupRequest(uuid=uuid) + return self._stub.DeleteTrustedJobGroup(request=msg, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_need_retry_for_get) + def get_trusted_job_group(self, uuid: str) -> GetTrustedJobGroupResponse: + msg = GetTrustedJobGroupRequest(uuid=uuid) + return self._stub.GetTrustedJobGroup(request=msg, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_need_retry_for_create) + def create_trusted_export_job(self, uuid: str, name: str, export_count: int, parent_uuid: str, + ticket_uuid: str) -> empty_pb2.Empty: + msg = CreateTrustedExportJobRequest(uuid=uuid, + name=name, + export_count=export_count, + parent_uuid=parent_uuid, + ticket_uuid=ticket_uuid) + return self._stub.CreateTrustedExportJob(request=msg, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_need_retry_for_get) + def get_model_job(self, uuid: str) -> ModelJobPb: + return self._stub.GetModelJob(request=GetModelJobRequest(uuid=uuid), timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def create_model_job(self, name: str, uuid: str, group_uuid: str, model_job_type: ModelJobType, + algorithm_type: AlgorithmType, global_config: ModelJobGlobalConfig, + version: int) -> empty_pb2.Empty: + request = CreateModelJobRequest(name=name, + uuid=uuid, + group_uuid=group_uuid, + model_job_type=model_job_type.name, + algorithm_type=algorithm_type.name, + global_config=global_config, + version=version) + return self._stub.CreateModelJob(request=request, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def inform_model_job(self, uuid: str, auth_status: AuthStatus) -> empty_pb2.Empty: + msg = InformModelJobRequest(uuid=uuid, auth_status=auth_status.name) + return self._stub.InformModelJob(request=msg, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_need_retry_for_get) + def get_model_job_group(self, uuid: str) -> ModelJobGroupPb: + return self._stub.GetModelJobGroup(request=GetModelJobGroupRequest(uuid=uuid), timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def inform_model_job_group(self, uuid: str, auth_status: AuthStatus) -> empty_pb2.Empty: + msg = InformModelJobGroupRequest(uuid=uuid, auth_status=auth_status.name) + return self._stub.InformModelJobGroup(request=msg, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def update_model_job_group(self, + uuid: str, + auto_update_status: Optional[GroupAutoUpdateStatus] = None, + start_dataset_job_stage_uuid: Optional[str] = None) -> empty_pb2.Empty: + msg = UpdateModelJobGroupRequest(uuid=uuid, + auto_update_status=auto_update_status.name if auto_update_status else None, + start_dataset_job_stage_uuid=start_dataset_job_stage_uuid) + return self._stub.UpdateModelJobGroup(request=msg, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_need_retry_for_create) + def create_dataset_job_stage(self, + dataset_job_uuid: str, + dataset_job_stage_uuid: str, + name: str, + event_time: Optional[datetime] = None) -> empty_pb2.Empty: + request = CreateDatasetJobStageRequest(dataset_job_uuid=dataset_job_uuid, + dataset_job_stage_uuid=dataset_job_stage_uuid, + name=name, + event_time=to_timestamp(event_time) if event_time else None) + return self._stub.CreateDatasetJobStage(request=request, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_need_retry_for_get) + def get_dataset_job_stage(self, dataset_job_stage_uuid: str) -> GetDatasetJobStageResponse: + msg = GetDatasetJobStageRequest(dataset_job_stage_uuid=dataset_job_stage_uuid) + return self._stub.GetDatasetJobStage(request=msg, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_need_retry_for_create) + def update_dataset_job_scheduler_state(self, uuid: str, + scheduler_state: DatasetJobSchedulerState) -> empty_pb2.Empty: + request = UpdateDatasetJobSchedulerStateRequest(uuid=uuid, scheduler_state=scheduler_state.name) + return self._stub.UpdateDatasetJobSchedulerState(request=request, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_need_retry_for_create) + def create_model_job_group(self, name: str, uuid: str, algorithm_type: AlgorithmType, dataset_uuid: str, + algorithm_project_list: AlgorithmProjectList): + request = CreateModelJobGroupRequest(name=name, + uuid=uuid, + algorithm_type=algorithm_type.name, + dataset_uuid=dataset_uuid, + algorithm_project_list=algorithm_project_list) + return self._stub.CreateModelJobGroup(request, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def inform_trusted_job(self, uuid: str, auth_status: AuthStatus) -> empty_pb2.Empty: + msg = InformTrustedJobRequest(uuid=uuid, auth_status=auth_status.name) + return self._stub.InformTrustedJob(request=msg, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_need_retry_for_get) + def get_trusted_job(self, uuid: str) -> GetTrustedJobResponse: + msg = GetTrustedJobRequest(uuid=uuid) + return self._stub.GetTrustedJob(request=msg, timeout=Envs.GRPC_CLIENT_TIMEOUT) diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/job_service_client_test.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/job_service_client_test.py new file mode 100644 index 000000000..d077dccfa --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/job_service_client_test.py @@ -0,0 +1,428 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from datetime import datetime +import unittest +import grpc +import grpc_testing +from google.protobuf.empty_pb2 import Empty +from google.protobuf.descriptor import ServiceDescriptor +from testing.rpc.client import RpcClientTestCase +from fedlearner_webconsole.proto.dataset_pb2 import DatasetJobConfig, DatasetJobGlobalConfigs, DatasetJobStage +from fedlearner_webconsole.proto.rpc.v2 import job_service_pb2 +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition +from fedlearner_webconsole.rpc.v2.job_service_client import JobServiceClient +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.mmgr.models import ModelJobType, GroupAutoUpdateStatus +from fedlearner_webconsole.dataset.models import DatasetJobSchedulerState +from fedlearner_webconsole.algorithm.models import AlgorithmType +from fedlearner_webconsole.proto.mmgr_pb2 import ModelJobGlobalConfig, AlgorithmProjectList, ModelJobPb, ModelJobGroupPb +from fedlearner_webconsole.proto.rpc.v2.job_service_pb2 import CreateModelJobRequest, InformTrustedJobGroupRequest, \ + UpdateTrustedJobGroupRequest, DeleteTrustedJobGroupRequest, GetTrustedJobGroupRequest, \ + GetTrustedJobGroupResponse, CreateDatasetJobStageRequest, GetDatasetJobStageRequest, GetDatasetJobStageResponse, \ + CreateModelJobGroupRequest, GetModelJobRequest, GetModelJobGroupRequest, InformModelJobGroupRequest, \ + InformTrustedJobRequest, GetTrustedJobRequest, GetTrustedJobResponse, CreateTrustedExportJobRequest, \ + UpdateDatasetJobSchedulerStateRequest, UpdateModelJobGroupRequest, InformModelJobRequest + +_SERVICE_DESCRIPTOR: ServiceDescriptor = job_service_pb2.DESCRIPTOR.services_by_name['JobService'] + + +class JobServiceClientTest(RpcClientTestCase): + + def setUp(self): + super().setUp() + self._fake_channel: grpc_testing.Channel = grpc_testing.channel([_SERVICE_DESCRIPTOR], + grpc_testing.strict_real_time()) + self._client = JobServiceClient(self._fake_channel) + + def test_inform_trusted_job_group(self): + call = self.client_execution_pool.submit(self._client.inform_trusted_job_group, + uuid='uuid', + auth_status=AuthStatus.AUTHORIZED) + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['InformTrustedJobGroup']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, InformTrustedJobGroupRequest(uuid='uuid', auth_status='AUTHORIZED')) + self.assertEqual(call.result(), expected_response) + + def test_update_trusted_job_group(self): + call = self.client_execution_pool.submit(self._client.update_trusted_job_group, + uuid='uuid', + algorithm_uuid='algorithm-uuid') + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['UpdateTrustedJobGroup']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, UpdateTrustedJobGroupRequest(uuid='uuid', algorithm_uuid='algorithm-uuid')) + self.assertEqual(call.result(), expected_response) + + def test_delete_trusted_job_group(self): + call = self.client_execution_pool.submit(self._client.delete_trusted_job_group, uuid='uuid') + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['DeleteTrustedJobGroup']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, DeleteTrustedJobGroupRequest(uuid='uuid')) + self.assertEqual(call.result(), expected_response) + + def test_get_trusted_job_group(self): + call = self.client_execution_pool.submit(self._client.get_trusted_job_group, uuid='uuid') + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['GetTrustedJobGroup']) + expected_response = GetTrustedJobGroupResponse(auth_status='AUTHORIZED') + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, GetTrustedJobGroupRequest(uuid='uuid')) + self.assertEqual(call.result(), expected_response) + + def test_get_trusted_job(self): + call = self.client_execution_pool.submit(self._client.get_trusted_job, uuid='uuid') + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['GetTrustedJob']) + expected_response = GetTrustedJobResponse(auth_status=AuthStatus.WITHDRAW.name) + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, GetTrustedJobRequest(uuid='uuid')) + self.assertEqual(call.result(), expected_response) + + def test_get_model_job(self): + call = self.client_execution_pool.submit(self._client.get_model_job, uuid='uuid') + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['GetModelJob']) + expected_response = ModelJobPb(name='name') + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, GetModelJobRequest(uuid='uuid')) + self.assertEqual(call.result(), expected_response) + + def test_create_model_job(self): + call = self.client_execution_pool.submit(self._client.create_model_job, + name='name', + uuid='uuid', + group_uuid='group_uuid', + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + global_config=ModelJobGlobalConfig(), + version=3) + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['CreateModelJob']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual( + request, + CreateModelJobRequest(name='name', + uuid='uuid', + group_uuid='group_uuid', + model_job_type='TRAINING', + algorithm_type='NN_VERTICAL', + global_config=ModelJobGlobalConfig(), + version=3)) + self.assertEqual(call.result(), expected_response) + + def test_inform_model_job(self): + call = self.client_execution_pool.submit(self._client.inform_model_job, + uuid='uuid', + auth_status=AuthStatus.AUTHORIZED) + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['InformModelJob']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, InformModelJobRequest(uuid='uuid', auth_status=AuthStatus.AUTHORIZED.name)) + self.assertEqual(call.result(), expected_response) + + def test_create_dataset_job_stage(self): + event_time = datetime(2022, 1, 1) + call = self.client_execution_pool.submit(self._client.create_dataset_job_stage, + dataset_job_uuid='dataset_job_uuid', + dataset_job_stage_uuid='dataset_job_stage_uuid', + name='20220101', + event_time=event_time) + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['CreateDatasetJobStage']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual( + request, + CreateDatasetJobStageRequest( + dataset_job_uuid='dataset_job_uuid', + dataset_job_stage_uuid='dataset_job_stage_uuid', + name='20220101', + event_time=to_timestamp(event_time), + )) + self.assertEqual(call.result(), expected_response) + + # test event_time is None + call = self.client_execution_pool.submit(self._client.create_dataset_job_stage, + dataset_job_uuid='dataset_job_uuid', + dataset_job_stage_uuid='dataset_job_stage_uuid', + name='20220101', + event_time=None) + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['CreateDatasetJobStage']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual( + request, + CreateDatasetJobStageRequest(dataset_job_uuid='dataset_job_uuid', + dataset_job_stage_uuid='dataset_job_stage_uuid', + name='20220101')) + self.assertEqual(call.result(), expected_response) + + def test_get_dataset_job_stage(self): + call = self.client_execution_pool.submit(self._client.get_dataset_job_stage, + dataset_job_stage_uuid='dataset_job_stage_uuid') + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['GetDatasetJobStage']) + dataset_job_stage = DatasetJobStage( + id=1, + uuid='fake stage uuid', + name='test_dataset_job_stage', + dataset_job_uuid='fake job uuid', + global_configs=DatasetJobGlobalConfigs( + global_configs={'test_domain': DatasetJobConfig(dataset_uuid='dataset uuid', variables=[])}), + workflow_definition=WorkflowDefinition(group_alias='fake template', variables=[], job_definitions=[])) + expected_response = GetDatasetJobStageResponse(dataset_job_stage=dataset_job_stage) + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, GetDatasetJobStageRequest(dataset_job_stage_uuid='dataset_job_stage_uuid')) + self.assertEqual(call.result(), expected_response) + + def test_update_dataset_job_scheduler_state(self): + call = self.client_execution_pool.submit(self._client.update_dataset_job_scheduler_state, + uuid='dataset_job_uuid', + scheduler_state=DatasetJobSchedulerState.RUNNABLE) + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['UpdateDatasetJobSchedulerState']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, + UpdateDatasetJobSchedulerStateRequest( + uuid='dataset_job_uuid', + scheduler_state='RUNNABLE', + )) + self.assertEqual(call.result(), expected_response) + + def test_get_model_job_group(self): + call = self.client_execution_pool.submit(self._client.get_model_job_group, uuid='uuid') + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['GetModelJobGroup']) + expected_response = ModelJobGroupPb(name='12') + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, GetModelJobGroupRequest(uuid='uuid')) + self.assertEqual(call.result(), expected_response) + + def test_inform_model_job_group(self): + call = self.client_execution_pool.submit(self._client.inform_model_job_group, + uuid='uuid', + auth_status=AuthStatus.AUTHORIZED) + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['InformModelJobGroup']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, InformModelJobGroupRequest(uuid='uuid', auth_status=AuthStatus.AUTHORIZED.name)) + self.assertEqual(call.result(), expected_response) + + def test_update_model_job_group(self): + call = self.client_execution_pool.submit(self._client.update_model_job_group, + uuid='uuid', + auto_update_status=GroupAutoUpdateStatus.ACTIVE, + start_dataset_job_stage_uuid='stage_uuid') + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['UpdateModelJobGroup']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual( + request, + UpdateModelJobGroupRequest(uuid='uuid', + auto_update_status=GroupAutoUpdateStatus.ACTIVE.name, + start_dataset_job_stage_uuid='stage_uuid')) + self.assertEqual(call.result(), expected_response) + call = self.client_execution_pool.submit(self._client.update_model_job_group, + uuid='uuid', + auto_update_status=GroupAutoUpdateStatus.STOPPED) + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['UpdateModelJobGroup']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual( + request, + UpdateModelJobGroupRequest(uuid='uuid', + auto_update_status=GroupAutoUpdateStatus.STOPPED.name, + start_dataset_job_stage_uuid=None)) + self.assertEqual(call.result(), expected_response) + call = self.client_execution_pool.submit(self._client.update_model_job_group, uuid='uuid') + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['UpdateModelJobGroup']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual( + request, UpdateModelJobGroupRequest(uuid='uuid', auto_update_status=None, + start_dataset_job_stage_uuid=None)) + self.assertEqual(call.result(), expected_response) + + def test_create_model_job_group(self): + call = self.client_execution_pool.submit( + self._client.create_model_job_group, + name='name', + uuid='uuid', + algorithm_type=AlgorithmType.NN_VERTICAL, + dataset_uuid='uuid', + algorithm_project_list=AlgorithmProjectList(algorithm_projects={'test': 'uuid'})) + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['CreateModelJobGroup']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual( + request, + CreateModelJobGroupRequest( + name='name', + uuid='uuid', + algorithm_type='NN_VERTICAL', + dataset_uuid='uuid', + algorithm_project_list=AlgorithmProjectList(algorithm_projects={'test': 'uuid'}))) + self.assertEqual(call.result(), expected_response) + + def test_inform_trusted_job(self): + call = self.client_execution_pool.submit(self._client.inform_trusted_job, + uuid='uuid', + auth_status=AuthStatus.AUTHORIZED) + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['InformTrustedJob']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, InformTrustedJobRequest(uuid='uuid', auth_status=AuthStatus.AUTHORIZED.name)) + self.assertEqual(call.result(), expected_response) + + def test_create_trusted_export_job(self): + call = self.client_execution_pool.submit(self._client.create_trusted_export_job, + uuid='uuid1', + name='V1-domain1-1', + export_count=1, + parent_uuid='uuid2', + ticket_uuid='ticket uuid') + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['CreateTrustedExportJob']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual( + request, + CreateTrustedExportJobRequest(uuid='uuid1', + name='V1-domain1-1', + export_count=1, + parent_uuid='uuid2', + ticket_uuid='ticket uuid')) + self.assertEqual(call.result(), expected_response) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/job_service_server.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/job_service_server.py new file mode 100644 index 000000000..096034549 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/job_service_server.py @@ -0,0 +1,395 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +import grpc +from grpc import ServicerContext +from google.protobuf import empty_pb2 +import sqlalchemy +from fedlearner_webconsole.dataset.auth_service import AuthService +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto.rpc.v2 import job_service_pb2_grpc +from fedlearner_webconsole.proto.rpc.v2.job_service_pb2 import CreateModelJobRequest, InformTrustedJobGroupRequest, \ + UpdateTrustedJobGroupRequest, DeleteTrustedJobGroupRequest, GetTrustedJobGroupRequest, \ + GetTrustedJobGroupResponse, CreateDatasetJobStageRequest, GetDatasetJobStageRequest, GetDatasetJobStageResponse, \ + CreateModelJobGroupRequest, GetModelJobGroupRequest, GetModelJobRequest, InformModelJobGroupRequest, \ + InformTrustedJobRequest, GetTrustedJobRequest, GetTrustedJobResponse, CreateTrustedExportJobRequest, \ + UpdateDatasetJobSchedulerStateRequest, UpdateModelJobGroupRequest, InformModelJobRequest +from fedlearner_webconsole.proto.mmgr_pb2 import ModelJobPb, ModelJobGroupPb +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.tee.services import TrustedJobGroupService, TrustedJobService +from fedlearner_webconsole.tee.models import TrustedJobGroup, TrustedJob, TrustedJobStatus, TrustedJobType +from fedlearner_webconsole.mmgr.models import ModelJobRole, ModelJobType, AlgorithmType, ModelJobGroup, ModelJob, \ + GroupCreateStatus, GroupAutoUpdateStatus +from fedlearner_webconsole.mmgr.service import ModelJobService, ModelJobGroupService +from fedlearner_webconsole.rpc.v2.utils import get_grpc_context_info +from fedlearner_webconsole.dataset.job_configer.dataset_job_configer import DatasetJobConfiger +from fedlearner_webconsole.dataset.models import DatasetJob, DatasetJobSchedulerState, DatasetJobStage, Dataset +from fedlearner_webconsole.dataset.services import DatasetService +from fedlearner_webconsole.dataset.local_controllers import DatasetJobStageLocalController +from fedlearner_webconsole.dataset.services import DatasetJobService +from fedlearner_webconsole.utils.pp_datetime import from_timestamp +from fedlearner_webconsole.utils.proto import remove_secrets +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.algorithm.fetcher import AlgorithmFetcher +from fedlearner_webconsole.exceptions import NotFoundException +from fedlearner_webconsole.audit.decorators import emits_rpc_event +from fedlearner_webconsole.proto.audit_pb2 import Event +from fedlearner_webconsole.review.ticket_helper import get_ticket_helper + + +class JobServiceServicer(job_service_pb2_grpc.JobServiceServicer): + + @emits_rpc_event(resource_type=Event.ResourceType.TRUSTED_JOB_GROUP, + op_type=Event.OperationType.INFORM, + resource_name_fn=lambda request: request.uuid) + def InformTrustedJobGroup(self, request: InformTrustedJobGroupRequest, context: ServicerContext) -> empty_pb2.Empty: + with db.session_scope() as session: + project_id, client_id = get_grpc_context_info(session, context) + group: TrustedJobGroup = session.query(TrustedJobGroup).populate_existing().with_for_update().filter_by( + project_id=project_id, uuid=request.uuid).first() + if group is None: + context.abort(grpc.StatusCode.NOT_FOUND, f'trusted job group {request.uuid} not found') + try: + auth_status = AuthStatus[request.auth_status] + except KeyError: + context.abort(grpc.StatusCode.INVALID_ARGUMENT, f'auth_status {request.auth_status} is invalid') + unauth_set = set(group.get_unauth_participant_ids()) + if auth_status == AuthStatus.AUTHORIZED: + unauth_set.discard(client_id) + else: + unauth_set.add(client_id) + group.set_unauth_participant_ids(list(unauth_set)) + session.commit() + return empty_pb2.Empty() + + @emits_rpc_event(resource_type=Event.ResourceType.TRUSTED_JOB_GROUP, + op_type=Event.OperationType.UPDATE, + resource_name_fn=lambda request: request.uuid) + def UpdateTrustedJobGroup(self, request: UpdateTrustedJobGroupRequest, context: ServicerContext) -> empty_pb2.Empty: + with db.session_scope() as session: + project_id, client_id = get_grpc_context_info(session, context) + group: TrustedJobGroup = session.query(TrustedJobGroup).filter_by(project_id=project_id, + uuid=request.uuid).first() + if group is None: + context.abort(grpc.StatusCode.NOT_FOUND, f'trusted job group {request.uuid} not found') + if client_id != group.coordinator_id: + context.abort(grpc.StatusCode.PERMISSION_DENIED, 'only coordinator can update algorithm') + try: + algorithm = AlgorithmFetcher(project_id).get_algorithm(request.algorithm_uuid) + old_algorithm = AlgorithmFetcher(project_id).get_algorithm(group.algorithm_uuid) + if algorithm.algorithm_project_uuid != old_algorithm.algorithm_project_uuid: + context.abort(grpc.StatusCode.INVALID_ARGUMENT, 'upstream algorithm project mismatch') + except NotFoundException as e: + context.abort(grpc.StatusCode.INVALID_ARGUMENT, e.message) + group.algorithm_uuid = request.algorithm_uuid + session.commit() + return empty_pb2.Empty() + + @emits_rpc_event(resource_type=Event.ResourceType.TRUSTED_JOB_GROUP, + op_type=Event.OperationType.DELETE, + resource_name_fn=lambda request: request.uuid) + def DeleteTrustedJobGroup(self, request: DeleteTrustedJobGroupRequest, context: ServicerContext) -> empty_pb2.Empty: + with db.session_scope() as session: + project_id, client_id = get_grpc_context_info(session, context) + group: TrustedJobGroup = session.query(TrustedJobGroup).filter_by(project_id=project_id, + uuid=request.uuid).first() + if group is None: + return empty_pb2.Empty() + if client_id != group.coordinator_id: + context.abort(grpc.StatusCode.PERMISSION_DENIED, 'only coordinator can delete the trusted job group') + if not group.is_deletable(): + context.abort(grpc.StatusCode.FAILED_PRECONDITION, 'trusted job is not deletable') + TrustedJobGroupService(session).delete(group) + session.commit() + return empty_pb2.Empty() + + def GetTrustedJobGroup(self, request: GetTrustedJobGroupRequest, + context: ServicerContext) -> GetTrustedJobGroupResponse: + with db.session_scope() as session: + project_id, _ = get_grpc_context_info(session, context) + group: TrustedJobGroup = session.query(TrustedJobGroup).filter_by(project_id=project_id, + uuid=request.uuid).first() + if group is None: + context.abort(grpc.StatusCode.NOT_FOUND, f'trusted job group {request.uuid} not found') + return GetTrustedJobGroupResponse(auth_status=group.auth_status.name) + + def GetModelJob(self, request: GetModelJobRequest, context: ServicerContext) -> ModelJobPb: + with db.session_scope() as session: + project_id, _ = get_grpc_context_info(session, context) + model_job: ModelJob = session.query(ModelJob).filter_by(uuid=request.uuid).first() + if model_job is None: + context.abort(grpc.StatusCode.NOT_FOUND, f'model job with uuid {request.uuid} is not found') + return remove_secrets(model_job.to_proto()) + + @emits_rpc_event(resource_type=Event.ResourceType.MODEL_JOB, + op_type=Event.OperationType.CREATE, + resource_name_fn=lambda request: request.uuid) + def CreateModelJob(self, request: CreateModelJobRequest, context: ServicerContext) -> empty_pb2.Empty: + with db.session_scope() as session: + project_id, client_id = get_grpc_context_info(session, context) + if session.query(ModelJob).filter_by(uuid=request.uuid).first() is not None: + return empty_pb2.Empty() + if session.query(ModelJob).filter_by(name=request.name).first() is not None: + context.abort(grpc.StatusCode.INVALID_ARGUMENT, f'model job {request.name} already exist') + group = session.query(ModelJobGroup).filter_by(uuid=request.group_uuid).first() + if group is None: + context.abort(grpc.StatusCode.INVALID_ARGUMENT, f'model job group {request.group_uuid} not found') + model_job_type = ModelJobType[request.model_job_type] + if model_job_type in [ModelJobType.TRAINING] and group.latest_version >= request.version: + context.abort( + grpc.StatusCode.INVALID_ARGUMENT, f'the latest version of model group {group.name} ' + f'is larger than or equal to the given version') + service = ModelJobService(session) + algorithm_type = AlgorithmType[request.algorithm_type] + data_batch_id = None + dataset_job_stage_uuid = request.global_config.dataset_job_stage_uuid + if dataset_job_stage_uuid != '': + dataset_job_stage = session.query(DatasetJobStage).filter_by(uuid=dataset_job_stage_uuid).first() + data_batch_id = dataset_job_stage.data_batch_id + model_job = service.create_model_job(name=request.name, + uuid=request.uuid, + group_id=group.id, + project_id=project_id, + role=ModelJobRole.PARTICIPANT, + model_job_type=model_job_type, + algorithm_type=algorithm_type, + coordinator_id=client_id, + data_batch_id=data_batch_id, + global_config=request.global_config, + version=request.version) + if model_job_type in [ModelJobType.TRAINING]: + group.latest_version = model_job.version + session.commit() + return empty_pb2.Empty() + + @emits_rpc_event(resource_type=Event.ResourceType.MODEL_JOB, + op_type=Event.OperationType.INFORM, + resource_name_fn=lambda request: request.uuid) + def InformModelJob(self, request: InformModelJobRequest, context: ServicerContext) -> empty_pb2.Empty: + with db.session_scope() as session: + project_id, client_id = get_grpc_context_info(session, context) + model_job: ModelJob = session.query(ModelJob).populate_existing().with_for_update().filter_by( + project_id=project_id, uuid=request.uuid).first() + if model_job is None: + context.abort(grpc.StatusCode.NOT_FOUND, f'model job {request.uuid} is not found') + try: + auth_status = AuthStatus[request.auth_status] + except KeyError: + context.abort(grpc.StatusCode.INVALID_ARGUMENT, f'auth_status {request.auth_status} is invalid') + pure_domain_name = session.query(Participant).get(client_id).pure_domain_name() + participants_info = model_job.get_participants_info() + participants_info.participants_map[pure_domain_name].auth_status = auth_status.name + model_job.set_participants_info(participants_info) + session.commit() + return empty_pb2.Empty() + + @emits_rpc_event(resource_type=Event.ResourceType.DATASET_JOB_STAGE, + op_type=Event.OperationType.CREATE, + resource_name_fn=lambda request: request.dataset_job_uuid) + def CreateDatasetJobStage(self, request: CreateDatasetJobStageRequest, context: ServicerContext) -> empty_pb2.Empty: + try: + with db.session_scope() as session: + # we set isolation_level to SERIALIZABLE to make sure state won't be changed within this session + session.connection(execution_options={'isolation_level': 'SERIALIZABLE'}) + _, client_id = get_grpc_context_info(session, context) + dataset_job: DatasetJob = session.query(DatasetJob).filter( + DatasetJob.uuid == request.dataset_job_uuid).first() + if dataset_job is None: + context.abort(grpc.StatusCode.INVALID_ARGUMENT, + f'dataset_job {request.dataset_job_uuid} is not found') + if dataset_job.output_dataset is None: + context.abort(grpc.StatusCode.INVALID_ARGUMENT, + f'output dataset is not found, dataset_job uuid: {request.dataset_job_uuid}') + # check authorization + if not AuthService(session=session, dataset_job=dataset_job).check_local_authorized(): + message = '[CreateDatasetJobStage] still waiting for authorized, ' \ + f'dataset_job_uuid: {request.dataset_job_uuid}' + logging.warning(message) + context.abort(grpc.StatusCode.PERMISSION_DENIED, message) + event_time = from_timestamp(request.event_time) if request.event_time else None + # check data_batch ready + data_batch = DatasetService(session).get_data_batch(dataset=dataset_job.input_dataset, + event_time=event_time) + if data_batch is None or not data_batch.is_available(): + message = '[CreateDatasetJobStage] input_dataset data_batch is not ready, ' \ + f'datasetJob uuid: {request.dataset_job_uuid}' + logging.warning(message) + context.abort(grpc.StatusCode.FAILED_PRECONDITION, message) + DatasetJobStageLocalController(session=session).create_data_batch_and_job_stage_as_participant( + dataset_job_id=dataset_job.id, + coordinator_id=client_id, + uuid=request.dataset_job_stage_uuid, + name=request.name, + event_time=event_time) + session.commit() + except sqlalchemy.exc.OperationalError as e: + # catch deadlock exception + logging.warning('[create dataset job stage rpc]: [SKIP] catch operation error in session', exc_info=True) + return empty_pb2.Empty() + + def GetDatasetJobStage(self, request: GetDatasetJobStageRequest, + context: ServicerContext) -> GetDatasetJobStageResponse: + with db.session_scope() as session: + dataset_job_stage: DatasetJobStage = session.query(DatasetJobStage).filter( + DatasetJobStage.uuid == request.dataset_job_stage_uuid).first() + if dataset_job_stage is None: + context.abort(code=grpc.StatusCode.NOT_FOUND, + details=f'could not find dataset_job_stage {request.dataset_job_stage_uuid}') + dataset_job_stage_proto = dataset_job_stage.to_proto() + dataset_job_stage_proto.workflow_definition.MergeFrom( + DatasetJobConfiger.from_kind(dataset_job_stage.dataset_job.kind, session).get_config()) + return GetDatasetJobStageResponse(dataset_job_stage=dataset_job_stage_proto) + + def UpdateDatasetJobSchedulerState(self, request: UpdateDatasetJobSchedulerStateRequest, + context: ServicerContext) -> empty_pb2.Empty: + with db.session_scope() as session: + project_id, _ = get_grpc_context_info(session, context) + dataset_job: DatasetJob = session.query(DatasetJob).filter(DatasetJob.project_id == project_id).filter( + DatasetJob.uuid == request.uuid).first() + if dataset_job is None: + context.abort(code=grpc.StatusCode.NOT_FOUND, details=f'could not find dataset_job {request.uuid}') + if request.scheduler_state == DatasetJobSchedulerState.RUNNABLE.name: + DatasetJobService(session=session).start_cron_scheduler(dataset_job=dataset_job) + elif request.scheduler_state == DatasetJobSchedulerState.STOPPED.name: + DatasetJobService(session=session).stop_cron_scheduler(dataset_job=dataset_job) + else: + context.abort(code=grpc.StatusCode.INVALID_ARGUMENT, + details='scheduler state must in [RUNNABLE, STOPPED]') + session.commit() + return empty_pb2.Empty() + + def GetModelJobGroup(self, request: GetModelJobGroupRequest, context: ServicerContext) -> ModelJobGroupPb: + with db.session_scope() as session: + project_id, _ = get_grpc_context_info(session, context) + group = session.query(ModelJobGroup).filter_by(uuid=request.uuid).first() + if group is None: + context.abort(grpc.StatusCode.NOT_FOUND, f'model job group with uuid {request.uuid} is not found') + return remove_secrets(group.to_proto()) + + @emits_rpc_event(resource_type=Event.ResourceType.MODEL_JOB_GROUP, + op_type=Event.OperationType.INFORM, + resource_name_fn=lambda request: request.uuid) + def InformModelJobGroup(self, request: InformModelJobGroupRequest, context: ServicerContext) -> empty_pb2.Empty: + with db.session_scope() as session: + project_id, client_id = get_grpc_context_info(session, context) + group: ModelJobGroup = session.query(ModelJobGroup).populate_existing().with_for_update().filter_by( + project_id=project_id, uuid=request.uuid).first() + if group is None: + context.abort(grpc.StatusCode.NOT_FOUND, f'model job group {request.uuid} is not found') + try: + auth_status = AuthStatus[request.auth_status] + except KeyError: + context.abort(grpc.StatusCode.INVALID_ARGUMENT, f'auth_status {request.auth_status} is invalid') + pure_domain_name = session.query(Participant).get(client_id).pure_domain_name() + participants_info = group.get_participants_info() + participants_info.participants_map[pure_domain_name].auth_status = auth_status.name + group.set_participants_info(participants_info) + session.commit() + return empty_pb2.Empty() + + def UpdateModelJobGroup(self, request: UpdateModelJobGroupRequest, context: ServicerContext) -> empty_pb2.Empty: + with db.session_scope() as session: + project_id, _ = get_grpc_context_info(session, context) + group: ModelJobGroup = session.query(ModelJobGroup).populate_existing().with_for_update().filter_by( + project_id=project_id, uuid=request.uuid).first() + if group is None: + context.abort(grpc.StatusCode.NOT_FOUND, f'model job group {request.uuid} is not found') + if request.auto_update_status != '': + try: + auto_update_status = GroupAutoUpdateStatus[request.auto_update_status] + except KeyError: + context.abort(grpc.StatusCode.INVALID_ARGUMENT, + f'auto_update_status {request.auto_update_status} is invalid') + group.auto_update_status = auto_update_status + if request.start_dataset_job_stage_uuid != '': + dataset_job_stage = session.query(DatasetJobStage).filter_by( + uuid=request.start_dataset_job_stage_uuid).first() + group.start_data_batch_id = dataset_job_stage.data_batch_id + session.commit() + return empty_pb2.Empty() + + @emits_rpc_event(resource_type=Event.ResourceType.MODEL_JOB_GROUP, + op_type=Event.OperationType.INFORM, + resource_name_fn=lambda request: request.uuid) + def CreateModelJobGroup(self, request: CreateModelJobGroupRequest, context: ServicerContext) -> empty_pb2.Empty: + with db.session_scope() as session: + project_id, client_id = get_grpc_context_info(session, context) + group = session.query(ModelJobGroup).filter_by(uuid=request.uuid).first() + if group is not None: + return empty_pb2.Empty() + dataset = session.query(Dataset).filter_by(uuid=request.dataset_uuid).first() + if dataset is None: + context.abort(grpc.StatusCode.INVALID_ARGUMENT, f'dataset with uuid {request.uuid} is not found') + service = ModelJobGroupService(session) + algorithm_type = AlgorithmType[request.algorithm_type] + group = service.create_group(name=request.name, + uuid=request.uuid, + project_id=project_id, + role=ModelJobRole.PARTICIPANT, + dataset_id=dataset.id, + algorithm_type=algorithm_type, + algorithm_project_list=request.algorithm_project_list, + coordinator_id=client_id) + group.status = GroupCreateStatus.SUCCEEDED + session.add(group) + session.commit() + return empty_pb2.Empty() + + def InformTrustedJob(self, request: InformTrustedJobRequest, context: ServicerContext) -> empty_pb2.Empty: + with db.session_scope() as session: + project_id, client_id = get_grpc_context_info(session, context) + trusted_job: TrustedJob = session.query(TrustedJob).filter_by(project_id=project_id, + uuid=request.uuid).first() + if trusted_job is None: + context.abort(grpc.StatusCode.NOT_FOUND, f'trusted job {request.uuid} not found') + try: + auth_status = AuthStatus[request.auth_status] + except KeyError: + context.abort(grpc.StatusCode.INVALID_ARGUMENT, f'auth_status {request.auth_status} is invalid') + pure_domain_name = session.query(Participant).get(client_id).pure_domain_name() + participants_info = trusted_job.get_participants_info() + participants_info.participants_map[pure_domain_name].auth_status = auth_status.name + trusted_job.set_participants_info(participants_info) + session.commit() + return empty_pb2.Empty() + + def GetTrustedJob(self, request: GetTrustedJobRequest, context: ServicerContext) -> GetTrustedJobResponse: + with db.session_scope() as session: + project_id, _ = get_grpc_context_info(session, context) + trusted_job: TrustedJob = session.query(TrustedJob).filter_by(project_id=project_id, + uuid=request.uuid).first() + if trusted_job is None: + context.abort(grpc.StatusCode.NOT_FOUND, f'trusted job {request.uuid} not found') + return GetTrustedJobResponse(auth_status=trusted_job.auth_status.name) + + def CreateTrustedExportJob(self, request: CreateTrustedExportJobRequest, + context: ServicerContext) -> empty_pb2.Empty: + with db.session_scope() as session: + project_id, client_id = get_grpc_context_info(session, context) + validate = get_ticket_helper(session).validate_ticket(request.ticket_uuid, + lambda ticket: ticket.details.uuid == request.uuid) + if not validate: + context.abort(grpc.StatusCode.PERMISSION_DENIED, f'ticket {request.ticket_uuid} is not validated') + tee_analyze_job = session.query(TrustedJob).filter_by(project_id=project_id, + type=TrustedJobType.ANALYZE, + uuid=request.parent_uuid).first() + if tee_analyze_job is None or tee_analyze_job.get_status() != TrustedJobStatus.SUCCEEDED: + context.abort(grpc.StatusCode.INVALID_ARGUMENT, f'tee_analyze_job {request.parent_uuid} invalid') + TrustedJobService(session).create_external_export(request.uuid, request.name, client_id, + request.export_count, request.ticket_uuid, + tee_analyze_job) + session.commit() + return empty_pb2.Empty() diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/job_service_server_test.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/job_service_server_test.py new file mode 100644 index 000000000..0984fb3da --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/job_service_server_test.py @@ -0,0 +1,881 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from datetime import datetime, timedelta +import unittest +from unittest.mock import patch, MagicMock +import grpc +from google.protobuf.empty_pb2 import Empty +from concurrent import futures +from testing.dataset import FakeDatasetJobConfiger +from testing.no_web_server_test_case import NoWebServerTestCase +from testing.rpc.client import FakeRpcError +from fedlearner_webconsole.db import db +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.rpc.auth import SSL_CLIENT_SUBJECT_DN_HEADER, PROJECT_NAME_HEADER +from fedlearner_webconsole.proto.rpc.v2 import job_service_pb2_grpc +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo +from fedlearner_webconsole.proto.setting_pb2 import SystemInfo +from fedlearner_webconsole.rpc.v2.job_service_server import JobServiceServicer +from fedlearner_webconsole.rpc.v2.utils import get_grpc_context_info +from fedlearner_webconsole.dataset.models import DataBatch, Dataset, DatasetJob, DatasetJobKind, DatasetJobStage, \ + DatasetJobState, DatasetKindV2, DatasetType, DatasetJobSchedulerState +from fedlearner_webconsole.tee.models import TrustedJobGroup, TrustedJob, TrustedJobStatus, TrustedJobType +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.algorithm.models import Algorithm, AlgorithmProject, AlgorithmType +from fedlearner_webconsole.mmgr.models import ModelJob, ModelJobGroup, ModelJobType, ModelJobRole, ModelJobStatus, \ + AuthStatus as ModelAuthStatus, GroupAutoUpdateStatus +from fedlearner_webconsole.job.models import Job, JobType +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto.mmgr_pb2 import ModelJobGlobalConfig, ModelJobConfig, AlgorithmProjectList, \ + ModelJobPb, ModelJobGroupPb +from fedlearner_webconsole.proto.rpc.v2.job_service_pb2 import CreateModelJobRequest, InformTrustedJobGroupRequest, \ + UpdateTrustedJobGroupRequest, DeleteTrustedJobGroupRequest, GetTrustedJobGroupRequest, \ + GetTrustedJobGroupResponse, CreateDatasetJobStageRequest, GetDatasetJobStageRequest, CreateModelJobGroupRequest, \ + GetModelJobRequest, GetModelJobGroupRequest, InformModelJobGroupRequest, InformTrustedJobRequest, \ + GetTrustedJobRequest, GetTrustedJobResponse, CreateTrustedExportJobRequest, UpdateDatasetJobSchedulerStateRequest, \ + UpdateModelJobGroupRequest, InformModelJobRequest +from fedlearner_webconsole.review.common import NO_CENTRAL_SERVER_UUID + + +class FakeContext: + + def __init__(self, metadata): + self._metadata = metadata + + def invocation_metadata(self): + return self._metadata + + +class GetGrpcContextInfoTest(NoWebServerTestCase): + + def setUp(self) -> None: + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='proj-name') + participant = Participant(id=1, name='part2', domain_name='fl-domain2.com') + session.add_all([project, participant]) + session.commit() + + def test_get_grpc_context_info(self): + metadata = ((SSL_CLIENT_SUBJECT_DN_HEADER, + 'CN=domain2.fedlearner.net,OU=security,O=security,L=beijing,ST=beijing,C=CN'), + (PROJECT_NAME_HEADER, 'proj-name')) + # since interceptor has already validated the info, only test happy case + with db.session_scope() as session: + project_id, client_id = get_grpc_context_info(session, FakeContext(metadata)) + self.assertEqual(project_id, 1) + self.assertEqual(client_id, 1) + + +class SystemServiceTest(NoWebServerTestCase): + LISTEN_PORT = 2000 + + def setUp(self): + super().setUp() + self._server = grpc.server(futures.ThreadPoolExecutor(max_workers=20)) + job_service_pb2_grpc.add_JobServiceServicer_to_server(JobServiceServicer(), self._server) + self._server.add_insecure_port(f'[::]:{self.LISTEN_PORT}') + self._server.start() + self._channel = grpc.insecure_channel(target=f'localhost:{self.LISTEN_PORT}') + self._stub = job_service_pb2_grpc.JobServiceStub(self._channel) + + def tearDown(self): + self._channel.close() + self._server.stop(5) + return super().tearDown() + + @patch('fedlearner_webconsole.rpc.v2.job_service_server.get_grpc_context_info') + def test_inform_trusted_job_group(self, mock_get_grpc_context_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + with db.session_scope() as session: + project = Project(id=1, name='proj-name') + participant1 = Participant(id=1, name='part2', domain_name='fl-domain2.com') + participant2 = Participant(id=2, name='part3', domain_name='fl-domain3.com') + group = TrustedJobGroup(id=1, name='group', uuid='uuid', project_id=1, coordinator_id=0) + group.set_unauth_participant_ids([1, 2]) + session.add_all([project, participant1, participant2, group]) + session.commit() + # authorize + self._stub.InformTrustedJobGroup(InformTrustedJobGroupRequest(uuid='uuid', auth_status='AUTHORIZED')) + with db.session_scope() as session: + group = session.query(TrustedJobGroup).filter_by(uuid='uuid').first() + self.assertCountEqual(group.get_unauth_participant_ids(), [2]) + # pend + self._stub.InformTrustedJobGroup(InformTrustedJobGroupRequest(uuid='uuid', auth_status='PENDING')) + with db.session_scope() as session: + group = session.query(TrustedJobGroup).filter_by(uuid='uuid').first() + self.assertCountEqual(group.get_unauth_participant_ids(), [1, 2]) + # fail due to group uuid not found + with self.assertRaises(grpc.RpcError) as cm: + self._stub.InformTrustedJobGroup(InformTrustedJobGroupRequest(uuid='not-exist', auth_status='AUTHORIZED')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.NOT_FOUND) + # fail due to invalid auth status + with self.assertRaises(grpc.RpcError) as cm: + self._stub.InformTrustedJobGroup(InformTrustedJobGroupRequest(uuid='uuid', auth_status='AUTHORIZE')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.INVALID_ARGUMENT) + + @patch('fedlearner_webconsole.algorithm.fetcher.AlgorithmFetcher.get_algorithm_from_participant') + @patch('fedlearner_webconsole.rpc.v2.job_service_server.get_grpc_context_info') + def test_update_trusted_job_group(self, mock_get_grpc_context_info: MagicMock, mock_get_algorithm: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + mock_get_algorithm.side_effect = FakeRpcError(grpc.StatusCode.NOT_FOUND, 'not found') + with db.session_scope() as session: + project = Project(id=1, name='proj-name') + participant1 = Participant(id=1, name='part2', domain_name='fl-domain2.com') + algorithm_proj1 = AlgorithmProject(id=1, uuid='algorithm-proj-uuid1') + algorithm_proj2 = AlgorithmProject(id=2, uuid='algorithm-proj-uuid2') + algorithm1 = Algorithm(id=1, algorithm_project_id=1, uuid='algorithm-uuid1') + algorithm2 = Algorithm(id=2, algorithm_project_id=1, uuid='algorithm-uuid2') + algorithm3 = Algorithm(id=3, algorithm_project_id=2, uuid='algorithm-uuid3') + group1 = TrustedJobGroup(id=1, + name='group1', + uuid='uuid1', + project_id=1, + algorithm_uuid='algorithm-uuid1', + coordinator_id=1) + group2 = TrustedJobGroup(id=2, + name='group2', + uuid='uuid2', + project_id=1, + algorithm_uuid='algorithm-uuid1', + coordinator_id=0) + session.add_all([ + project, participant1, algorithm_proj1, algorithm_proj2, algorithm1, algorithm2, algorithm3, group1, + group2 + ]) + session.commit() + self._stub.UpdateTrustedJobGroup(UpdateTrustedJobGroupRequest(uuid='uuid1', algorithm_uuid='algorithm-uuid2')) + with db.session_scope() as session: + group = session.query(TrustedJobGroup).filter_by(uuid='uuid1').first() + self.assertEqual(group.algorithm_uuid, 'algorithm-uuid2') + # fail due to group uuid not found + with self.assertRaises(grpc.RpcError) as cm: + self._stub.UpdateTrustedJobGroup( + UpdateTrustedJobGroupRequest(uuid='not-exist', algorithm_uuid='algorithm-uuid2')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.NOT_FOUND) + # fail due to client not coordinator + with self.assertRaises(grpc.RpcError) as cm: + self._stub.UpdateTrustedJobGroup( + UpdateTrustedJobGroupRequest(uuid='uuid2', algorithm_uuid='algorithm-uuid2')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.PERMISSION_DENIED) + # fail due to algorithm not found + with self.assertRaises(grpc.RpcError) as cm: + self._stub.UpdateTrustedJobGroup( + UpdateTrustedJobGroupRequest(uuid='uuid1', algorithm_uuid='algorithm-not-exist')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.INVALID_ARGUMENT) + # fail due to mismatched algorithm project + with self.assertRaises(grpc.RpcError) as cm: + self._stub.UpdateTrustedJobGroup( + UpdateTrustedJobGroupRequest(uuid='uuid1', algorithm_uuid='algorithm-uuid3')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.INVALID_ARGUMENT) + + @patch('fedlearner_webconsole.rpc.v2.job_service_server.get_grpc_context_info') + def test_delete_trusted_job_group(self, mock_get_grpc_context_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + with db.session_scope() as session: + project = Project(id=1, name='proj-name') + participant1 = Participant(id=1, name='part2', domain_name='fl-domain2.com') + group1 = TrustedJobGroup(id=1, uuid='uuid1', project_id=1, coordinator_id=1) + group2 = TrustedJobGroup(id=2, uuid='uuid2', project_id=1, coordinator_id=0) + trusted_job1 = TrustedJob(id=1, + name='V1', + trusted_job_group_id=1, + job_id=1, + status=TrustedJobStatus.RUNNING) + job1 = Job(id=1, name='job-name1', job_type=JobType.CUSTOMIZED, workflow_id=0, project_id=1) + trusted_job2 = TrustedJob(id=2, + name='V2', + trusted_job_group_id=1, + job_id=2, + status=TrustedJobStatus.SUCCEEDED) + job2 = Job(id=2, name='job-name2', job_type=JobType.CUSTOMIZED, workflow_id=0, project_id=1) + session.add_all([project, participant1, group1, group2, trusted_job1, job1, trusted_job2, job2]) + session.commit() + # fail due to client is not coordinator + with self.assertRaises(grpc.RpcError) as cm: + self._stub.DeleteTrustedJobGroup(DeleteTrustedJobGroupRequest(uuid='uuid2')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.PERMISSION_DENIED) + # delete group not exist + resp = self._stub.DeleteTrustedJobGroup(DeleteTrustedJobGroupRequest(uuid='not-exist')) + self.assertEqual(resp, Empty()) + # fail due to trusted job is still running + with self.assertRaises(grpc.RpcError) as cm: + self._stub.DeleteTrustedJobGroup(DeleteTrustedJobGroupRequest(uuid='uuid1')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.FAILED_PRECONDITION) + # successful + with db.session_scope() as session: + trusted_job1 = session.query(TrustedJob).get(1) + trusted_job1.status = TrustedJobStatus.FAILED + session.commit() + self._stub.DeleteTrustedJobGroup(DeleteTrustedJobGroupRequest(uuid='uuid1')) + with db.session_scope() as session: + self.assertIsNone(session.query(TrustedJobGroup).get(1)) + self.assertIsNone(session.query(TrustedJob).get(1)) + self.assertIsNone(session.query(TrustedJob).get(2)) + self.assertIsNone(session.query(Job).get(1)) + self.assertIsNone(session.query(Job).get(2)) + + @patch('fedlearner_webconsole.rpc.v2.job_service_server.get_grpc_context_info') + def test_get_trusted_job_group(self, mock_get_grpc_context_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + with db.session_scope() as session: + group = TrustedJobGroup(id=1, name='group', uuid='uuid', project_id=1, auth_status=AuthStatus.AUTHORIZED) + session.add_all([group]) + session.commit() + resp = self._stub.GetTrustedJobGroup(GetTrustedJobGroupRequest(uuid='uuid')) + self.assertEqual(resp, GetTrustedJobGroupResponse(auth_status='AUTHORIZED')) + # fail due to not found + with self.assertRaises(grpc.RpcError) as cm: + self._stub.GetTrustedJobGroup(GetTrustedJobGroupRequest(uuid='uuid-not-exist')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.NOT_FOUND) + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info') + @patch('fedlearner_webconsole.rpc.v2.job_service_server.get_grpc_context_info') + def test_create_trusted_export_job(self, mock_get_grpc_context_info: MagicMock, mock_get_system_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + mock_get_system_info.return_value = SystemInfo(pure_domain_name='domain1') + with db.session_scope() as session: + project = Project(id=1, name='project') + participant1 = Participant(id=1, name='part2', domain_name='fl-domain2.com') + participant2 = Participant(id=2, name='part3', domain_name='fl-domain3.com') + proj_part1 = ProjectParticipant(project_id=1, participant_id=1) + proj_part2 = ProjectParticipant(project_id=1, participant_id=2) + tee_analyze_job = TrustedJob(id=1, + uuid='uuid1', + type=TrustedJobType.ANALYZE, + project_id=1, + version=1, + trusted_job_group_id=1, + export_count=2, + status=TrustedJobStatus.SUCCEEDED) + session.add_all([project, participant1, participant2, proj_part1, proj_part2, tee_analyze_job]) + session.commit() + # successful + req = CreateTrustedExportJobRequest(uuid='uuid2', + name='V1-domain2-1', + export_count=1, + ticket_uuid=NO_CENTRAL_SERVER_UUID, + parent_uuid='uuid1') + self._stub.CreateTrustedExportJob(req) + with db.session_scope() as session: + tee_export_job = session.query(TrustedJob).filter_by(uuid='uuid2').first() + self.assertEqual(tee_export_job.name, 'V1-domain2-1') + self.assertEqual(tee_export_job.type, TrustedJobType.EXPORT) + self.assertEqual(tee_export_job.export_count, 1) + self.assertEqual(tee_export_job.project_id, 1) + self.assertEqual(tee_export_job.trusted_job_group_id, 1) + self.assertEqual(tee_export_job.status, TrustedJobStatus.CREATED) + self.assertEqual(tee_export_job.auth_status, AuthStatus.PENDING) + participants_info = ParticipantsInfo() + participants_info.participants_map['domain1'].auth_status = AuthStatus.PENDING.name + participants_info.participants_map['domain2'].auth_status = AuthStatus.AUTHORIZED.name + participants_info.participants_map['domain3'].auth_status = AuthStatus.PENDING.name + self.assertEqual(tee_export_job.get_participants_info(), participants_info) + # failed due to tee_analyze_job not valid + with self.assertRaises(grpc.RpcError) as cm: + req.parent_uuid = 'not-exist' + self._stub.CreateTrustedExportJob(req) + self.assertEqual(cm.exception.code(), grpc.StatusCode.INVALID_ARGUMENT) + # failed due to ticket invalid + with self.assertRaises(grpc.RpcError) as cm: + req.ticket_uuid = 'invalid ticket' + self._stub.CreateTrustedExportJob(req) + self.assertEqual(cm.exception.code(), grpc.StatusCode.PERMISSION_DENIED) + + @patch('fedlearner_webconsole.mmgr.service.ModelJobService.create_model_job') + @patch('fedlearner_webconsole.rpc.v2.job_service_server.get_grpc_context_info') + def test_create_model_job(self, mock_get_grpc_context_info: MagicMock, mock_create_model_job: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + # fail due to group not found + with self.assertRaises(grpc.RpcError) as cm: + self._stub.CreateModelJob(CreateModelJobRequest(group_uuid='uuid-not-exist')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.NOT_FOUND) + with db.session_scope() as session: + session.add(ModelJobGroup(id=2, name='name', uuid='group_uuid', project_id=1, latest_version=2)) + session.add(ModelJob(id=1, name='model-job', project_id=1)) + session.commit() + # fail due to model job name already exists + with self.assertRaises(grpc.RpcError) as cm: + self._stub.CreateModelJob(CreateModelJobRequest(group_uuid='group_uuid', uuid='uuid', name='model-job')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.INVALID_ARGUMENT) + # fail due to the model job version not larger than group's latest version + with self.assertRaises(grpc.RpcError) as cm: + self._stub.CreateModelJob( + CreateModelJobRequest(group_uuid='group_uuid', + name='name', + version=2, + model_job_type=ModelJobType.TRAINING.name, + algorithm_type=AlgorithmType.NN_VERTICAL.name)) + self.assertEqual(cm.exception.code(), grpc.StatusCode.INVALID_ARGUMENT) + # create training model job successfully + mock_create_model_job.return_value = ModelJob(name='haha', uuid='uuid', version=3) + global_config = ModelJobGlobalConfig(global_config={'test': ModelJobConfig()}) + self._stub.CreateModelJob( + CreateModelJobRequest(name='name', + uuid='uuid', + group_uuid='group_uuid', + model_job_type='TRAINING', + algorithm_type='NN_VERTICAL', + global_config=global_config, + version=3)) + mock_create_model_job.assert_called_with(name='name', + uuid='uuid', + group_id=2, + project_id=1, + role=ModelJobRole.PARTICIPANT, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + coordinator_id=1, + data_batch_id=None, + global_config=global_config, + version=3) + with db.session_scope() as session: + group = session.query(ModelJobGroup).filter_by(name='name').first() + self.assertEqual(group.latest_version, 3) + # create evaluation model job successfully + mock_create_model_job.return_value = ModelJob(name='haha', uuid='uuid', version=None) + global_config = ModelJobGlobalConfig(global_config={'test': ModelJobConfig()}) + self._stub.CreateModelJob( + CreateModelJobRequest(name='name', + uuid='uuid', + group_uuid='group_uuid', + model_job_type='EVALUATION', + algorithm_type='NN_VERTICAL', + global_config=global_config, + version=0)) + mock_create_model_job.assert_called_with(name='name', + uuid='uuid', + group_id=2, + project_id=1, + role=ModelJobRole.PARTICIPANT, + model_job_type=ModelJobType.EVALUATION, + algorithm_type=AlgorithmType.NN_VERTICAL, + coordinator_id=1, + data_batch_id=None, + global_config=global_config, + version=0) + with db.session_scope() as session: + group = session.query(ModelJobGroup).filter_by(name='name').first() + self.assertEqual(group.latest_version, 3) + # create auto update model job + with db.session_scope() as session: + data_batch = DataBatch(id=1, + name='0', + dataset_id=1, + path='/test_dataset/1/batch/0', + event_time=datetime(2021, 10, 28, 16, 37, 37)) + dataset_job_stage = DatasetJobStage(id=1, + name='data-join', + uuid='dataset-job-stage-uuid', + project_id=1, + state=DatasetJobState.SUCCEEDED, + dataset_job_id=1, + data_batch_id=1) + session.add_all([data_batch, dataset_job_stage]) + session.commit() + global_config = ModelJobGlobalConfig(dataset_job_stage_uuid='dataset-job-stage-uuid') + self._stub.CreateModelJob( + CreateModelJobRequest(name='name', + uuid='uuid', + group_uuid='group_uuid', + model_job_type='TRAINING', + algorithm_type='NN_VERTICAL', + global_config=global_config, + version=4)) + mock_create_model_job.assert_called_with(name='name', + uuid='uuid', + group_id=2, + project_id=1, + role=ModelJobRole.PARTICIPANT, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + coordinator_id=1, + data_batch_id=1, + global_config=global_config, + version=4) + + @patch('fedlearner_webconsole.rpc.v2.job_service_server.get_grpc_context_info') + def test_inform_model_job(self, mock_get_grpc_context_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + with db.session_scope() as session: + project = Project(id=1, name='project') + participant = Participant(id=1, name='part1', domain_name='fl-demo1.com') + pro_part = ProjectParticipant(id=1, project_id=1, participant_id=1) + model_job = ModelJob(id=1, + name='model_job', + uuid='uuid', + project_id=1, + auth_status=ModelAuthStatus.AUTHORIZED) + participants_info = ParticipantsInfo() + participants_info.participants_map['demo1'].auth_status = AuthStatus.PENDING.name + model_job.set_participants_info(participants_info) + session.add_all([project, participant, pro_part, model_job]) + session.commit() + self._stub.InformModelJob(InformModelJobRequest(uuid='uuid', auth_status=AuthStatus.AUTHORIZED.name)) + # authorized + with db.session_scope() as session: + model_job = session.query(ModelJob).get(1) + participants_info = model_job.get_participants_info() + self.assertEqual(participants_info.participants_map['demo1'].auth_status, AuthStatus.AUTHORIZED.name) + # pending + self._stub.InformModelJob(InformModelJobRequest(uuid='uuid', auth_status=AuthStatus.PENDING.name)) + with db.session_scope() as session: + model_job = session.query(ModelJob).get(1) + participants_info = model_job.get_participants_info() + self.assertEqual(participants_info.participants_map['demo1'].auth_status, AuthStatus.PENDING.name) + # fail due to model job not found + with self.assertRaises(grpc.RpcError) as cm: + self._stub.InformModelJob(InformModelJobRequest(uuid='uuid1', auth_status=AuthStatus.PENDING.name)) + self.assertEqual(cm.exception.code(), grpc.StatusCode.NOT_FOUND) + # fail due to auth_status invalid + with self.assertRaises(grpc.RpcError) as cm: + self._stub.InformModelJob(InformModelJobRequest(uuid='uuid', auth_status='aaaaa')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.INVALID_ARGUMENT) + + @patch('fedlearner_webconsole.dataset.models.DataBatch.is_available') + @patch('fedlearner_webconsole.rpc.v2.job_service_server.get_grpc_context_info') + def test_create_dataset_job_stage(self, mock_get_grpc_context_info: MagicMock, mock_is_available: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + mock_is_available.return_value = True + # test streaming + event_time = datetime(2022, 1, 1) + request = CreateDatasetJobStageRequest(dataset_job_uuid='dataset_job_123', + dataset_job_stage_uuid='dataset_job_stage_123', + name='test_stage', + event_time=to_timestamp(event_time)) + with self.assertRaises(grpc.RpcError) as cm: + self._stub.CreateDatasetJobStage(request) + self.assertEqual(cm.exception.code(), grpc.StatusCode.INVALID_ARGUMENT) + self.assertEqual(cm.exception.details(), 'dataset_job dataset_job_123 is not found') + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + uuid='dataset_job_123', + project_id=1, + input_dataset_id=1, + output_dataset_id=10, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.PENDING, + coordinator_id=1) + session.add(dataset_job) + dataset = Dataset(id=1, + uuid='dataset input', + name='default dataset input', + dataset_type=DatasetType.PSI, + comment='test comment', + path='/data/dataset/123', + project_id=1, + dataset_kind=DatasetKindV2.RAW, + is_published=True) + session.add(dataset) + data_batch = DataBatch(id=1, + name='test_batch', + dataset_id=1, + latest_parent_dataset_job_stage_id=100, + latest_analyzer_dataset_job_stage_id=100) + session.add(data_batch) + session.commit() + with self.assertRaises(grpc.RpcError) as cm: + self._stub.CreateDatasetJobStage(request) + self.assertEqual(cm.exception.code(), grpc.StatusCode.INVALID_ARGUMENT) + self.assertEqual(cm.exception.details(), 'output dataset is not found, dataset_job uuid: dataset_job_123') + with db.session_scope() as session: + default_dataset = Dataset(id=10, + uuid='dataset_123', + name='default dataset', + dataset_type=DatasetType.STREAMING, + comment='test comment', + path='/data/dataset/123', + project_id=1, + dataset_kind=DatasetKindV2.RAW, + is_published=True) + session.add(default_dataset) + session.commit() + resp = self._stub.CreateDatasetJobStage(request) + self.assertEqual(resp, Empty()) + with db.session_scope() as session: + data_batch: DataBatch = session.query(DataBatch).filter(DataBatch.event_time == event_time).first() + self.assertEqual(data_batch.dataset_id, 10) + self.assertEqual(data_batch.name, '20220101') + self.assertEqual(data_batch.path, '/data/dataset/123/batch/20220101') + dataset_job_stage: DatasetJobStage = session.query(DatasetJobStage).filter( + DatasetJobStage.uuid == 'dataset_job_stage_123').first() + self.assertEqual(dataset_job_stage.dataset_job_id, 1) + self.assertEqual(dataset_job_stage.data_batch_id, data_batch.id) + self.assertEqual(dataset_job_stage.event_time, event_time) + self.assertEqual(dataset_job_stage.project_id, 1) + self.assertEqual(dataset_job_stage.coordinator_id, 1) + # idempotent test + resp = self._stub.CreateDatasetJobStage(request) + self.assertEqual(resp, Empty()) + with db.session_scope() as session: + data_batches = session.query(DataBatch).filter(DataBatch.event_time == event_time).all() + self.assertEqual(len(data_batches), 1) + # test psi + with db.session_scope() as session: + dataset_job_2 = DatasetJob(id=2, + uuid='dataset_job_2', + project_id=1, + input_dataset_id=1, + output_dataset_id=11, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.PENDING, + coordinator_id=1) + session.add(dataset_job_2) + dataset_2 = Dataset(id=11, + uuid='dataset_2', + name='default dataset', + dataset_type=DatasetType.PSI, + comment='test comment', + path='/data/dataset/123', + project_id=1, + dataset_kind=DatasetKindV2.RAW, + is_published=True) + session.add(dataset_2) + session.commit() + request = CreateDatasetJobStageRequest(dataset_job_uuid='dataset_job_2', + dataset_job_stage_uuid='dataset_job_stage_2', + name='test_stage') + resp = self._stub.CreateDatasetJobStage(request) + self.assertEqual(resp, Empty()) + with db.session_scope() as session: + data_batch: DataBatch = session.query(DataBatch).filter(DataBatch.dataset_id == 11).first() + self.assertEqual(data_batch.name, '0') + self.assertEqual(data_batch.path, '/data/dataset/123/batch/0') + dataset_job_stage: DatasetJobStage = session.query(DatasetJobStage).filter( + DatasetJobStage.uuid == 'dataset_job_stage_2').first() + self.assertEqual(dataset_job_stage.dataset_job_id, 2) + self.assertEqual(dataset_job_stage.data_batch_id, data_batch.id) + self.assertIsNone(dataset_job_stage.event_time) + self.assertEqual(dataset_job_stage.project_id, 1) + self.assertEqual(dataset_job_stage.coordinator_id, 1) + # idempotent test + resp = self._stub.CreateDatasetJobStage(request) + self.assertEqual(resp, Empty()) + with db.session_scope() as session: + data_batches = session.query(DataBatch).filter(DataBatch.dataset_id == 11).all() + self.assertEqual(len(data_batches), 1) + + # test batch not ready + mock_is_available.reset_mock() + mock_is_available.return_value = False + with self.assertRaises(grpc.RpcError) as cm: + self._stub.CreateDatasetJobStage(request) + self.assertEqual(cm.exception.code(), grpc.StatusCode.FAILED_PRECONDITION) + + @patch('fedlearner_webconsole.dataset.services.DatasetJobConfiger.from_kind', + lambda *args: FakeDatasetJobConfiger(None)) + @patch('fedlearner_webconsole.rpc.v2.job_service_server.get_grpc_context_info') + def test_get_dataset_job_stage(self, mock_get_grpc_context_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + uuid='dataset_job_uuid', + project_id=1, + input_dataset_id=1, + output_dataset_id=0, + kind=DatasetJobKind.IMPORT_SOURCE, + state=DatasetJobState.RUNNING, + coordinator_id=0, + workflow_id=0) + session.add(dataset_job) + job_stage = DatasetJobStage(id=1, + uuid='job_stage_uuid', + name='default dataset job stage', + project_id=1, + workflow_id=1, + dataset_job_id=1, + data_batch_id=1, + event_time=datetime(2012, 1, 15), + state=DatasetJobState.PENDING) + session.add(job_stage) + session.commit() + request = GetDatasetJobStageRequest(dataset_job_stage_uuid='job_stage_uuid') + resp = self._stub.GetDatasetJobStage(request) + self.assertEqual(resp.dataset_job_stage.uuid, 'job_stage_uuid') + self.assertEqual(resp.dataset_job_stage.name, 'default dataset job stage') + self.assertEqual(resp.dataset_job_stage.dataset_job_id, 1) + self.assertEqual(resp.dataset_job_stage.event_time, to_timestamp(datetime(2012, 1, 15))) + self.assertEqual(resp.dataset_job_stage.is_ready, False) + + @patch('fedlearner_webconsole.rpc.v2.job_service_server.get_grpc_context_info') + def test_update_dataset_job_scheduler_state(self, mock_get_grpc_context_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + with db.session_scope() as session: + dataset_job = DatasetJob(id=1, + uuid='dataset_job_uuid', + project_id=1, + input_dataset_id=1, + output_dataset_id=0, + kind=DatasetJobKind.IMPORT_SOURCE, + state=DatasetJobState.RUNNING, + coordinator_id=0, + workflow_id=0, + scheduler_state=DatasetJobSchedulerState.PENDING, + time_range=timedelta(days=1)) + session.add(dataset_job) + session.commit() + request = UpdateDatasetJobSchedulerStateRequest(uuid='dataset_job_uuid', + scheduler_state=DatasetJobSchedulerState.RUNNABLE.name) + resp = self._stub.UpdateDatasetJobSchedulerState(request) + self.assertEqual(resp, Empty()) + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(1) + self.assertEqual(dataset_job.scheduler_state, DatasetJobSchedulerState.RUNNABLE) + request = UpdateDatasetJobSchedulerStateRequest(uuid='dataset_job_uuid', + scheduler_state=DatasetJobSchedulerState.STOPPED.name) + resp = self._stub.UpdateDatasetJobSchedulerState(request) + self.assertEqual(resp, Empty()) + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(1) + self.assertEqual(dataset_job.scheduler_state, DatasetJobSchedulerState.STOPPED) + request = UpdateDatasetJobSchedulerStateRequest(uuid='dataset_job_uuid', + scheduler_state=DatasetJobSchedulerState.PENDING.name) + with self.assertRaises(grpc.RpcError) as cm: + self._stub.UpdateDatasetJobSchedulerState(request) + + @patch('fedlearner_webconsole.rpc.v2.job_service_server.get_grpc_context_info') + def test_get_model_job(self, mock_get_grpc_context_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + request = GetModelJobRequest(uuid='uuid') + with self.assertRaises(grpc.RpcError) as cm: + self._stub.GetModelJob(request=request) + self.assertEqual(cm.exception.code(), grpc.StatusCode.NOT_FOUND) + self.assertEqual(cm.exception.details(), 'model job uuid is not found') + with db.session_scope() as session: + model_job = ModelJob(uuid='uuid', + role=ModelJobRole.PARTICIPANT, + model_job_type=ModelJobType.TRAINING, + algorithm_type=AlgorithmType.NN_VERTICAL, + auth_status=ModelAuthStatus.AUTHORIZED, + status=ModelJobStatus.CONFIGURED, + created_at=datetime(2022, 8, 16, 0, 0), + updated_at=datetime(2022, 8, 16, 0, 0)) + session.add(model_job) + session.commit() + resp = self._stub.GetModelJob(request=request) + self.assertEqual( + resp, + ModelJobPb(id=1, + uuid='uuid', + role='PARTICIPANT', + model_job_type='TRAINING', + algorithm_type='NN_VERTICAL', + state='PENDING_ACCEPT', + auth_status='AUTHORIZED', + status='CONFIGURED', + auth_frontend_status='ALL_AUTHORIZED', + participants_info=ParticipantsInfo(), + created_at=1660608000, + updated_at=1660608000)) + + @patch('fedlearner_webconsole.rpc.v2.job_service_server.get_grpc_context_info') + def test_get_model_job_group(self, mock_get_grpc_context_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + request = GetModelJobGroupRequest(uuid='uuid') + with self.assertRaises(grpc.RpcError) as cm: + self._stub.GetModelJobGroup(request=request) + self.assertEqual(cm.exception.code(), grpc.StatusCode.NOT_FOUND) + self.assertEqual(cm.exception.details(), 'model job group with uuid uuid is not found') + with db.session_scope() as session: + group = ModelJobGroup(id=1, + role=ModelJobRole.PARTICIPANT, + uuid='uuid', + authorized=True, + algorithm_type=AlgorithmType.NN_VERTICAL, + created_at=datetime(2022, 8, 16, 0, 0), + updated_at=datetime(2022, 8, 16, 0, 0)) + session.add(group) + session.commit() + resp = self._stub.GetModelJobGroup(request=request) + self.assertEqual( + resp, + ModelJobGroupPb(id=1, + uuid='uuid', + role='PARTICIPANT', + algorithm_type='NN_VERTICAL', + authorized=True, + auth_frontend_status='ALL_AUTHORIZED', + auth_status='PENDING', + auto_update_status='INITIAL', + participants_info=ParticipantsInfo(), + algorithm_project_uuid_list=AlgorithmProjectList(), + created_at=1660608000, + updated_at=1660608000)) + + @patch('fedlearner_webconsole.rpc.v2.job_service_server.get_grpc_context_info') + def test_inform_model_job_group(self, mock_get_grpc_context_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + with db.session_scope() as session: + project = Project(id=1, name='project') + participant1 = Participant(id=1, name='part1', domain_name='fl-demo1.com') + participant2 = Participant(id=2, name='part2', domain_name='fl-demo2.com') + group = ModelJobGroup(id=1, uuid='uuid', project_id=1) + participants_info = ParticipantsInfo() + participants_info.participants_map['demo1'].auth_status = AuthStatus.PENDING.name + participants_info.participants_map['demo2'].auth_status = AuthStatus.PENDING.name + group.set_participants_info(participants_info) + session.add_all([project, participant1, participant2, group]) + session.commit() + # authorized + self._stub.InformModelJobGroup(InformModelJobGroupRequest(uuid='uuid', auth_status=AuthStatus.AUTHORIZED.name)) + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + participants_info = group.get_participants_info() + self.assertEqual(participants_info.participants_map['demo1'].auth_status, AuthStatus.AUTHORIZED.name) + # pending + self._stub.InformModelJobGroup(InformModelJobGroupRequest(uuid='uuid', auth_status=AuthStatus.PENDING.name)) + with db.session_scope() as session: + group = session.query(ModelJobGroup).get(1) + participants_info = group.get_participants_info() + self.assertEqual(participants_info.participants_map['demo1'].auth_status, AuthStatus.PENDING.name) + # fail due to group not found + with self.assertRaises(grpc.RpcError) as cm: + self._stub.InformModelJobGroup( + InformModelJobGroupRequest(uuid='uuid-1', auth_status=AuthStatus.PENDING.name)) + self.assertEqual(cm.exception.code(), grpc.StatusCode.NOT_FOUND) + # fail due to auth_status invalid + with self.assertRaises(grpc.RpcError) as cm: + self._stub.InformModelJobGroup(InformModelJobGroupRequest(uuid='uuid', auth_status='aaaaa')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.INVALID_ARGUMENT) + + @patch('fedlearner_webconsole.rpc.v2.job_service_server.get_grpc_context_info') + def test_update_model_job_group(self, mock_get_grpc_context_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + with db.session_scope() as session: + project = Project(id=1, name='project') + group = ModelJobGroup(id=1, + name='group', + uuid='group_uuid', + project_id=1, + auto_update_status=GroupAutoUpdateStatus.INITIAL) + dataset_job_stage = DatasetJobStage(id=1, + name='data_join', + uuid='stage_uuid', + project_id=1, + state=DatasetJobState.SUCCEEDED, + dataset_job_id=1, + data_batch_id=1) + session.add_all([project, group, dataset_job_stage]) + session.commit() + # fail due to group not found + with self.assertRaises(grpc.RpcError) as cm: + self._stub.UpdateModelJobGroup( + UpdateModelJobGroupRequest(uuid='uuid', + auto_update_status=GroupAutoUpdateStatus.ACTIVE.name, + start_dataset_job_stage_uuid='stage_uuid')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.NOT_FOUND) + # fail due to auto_update_status invalid + with self.assertRaises(grpc.RpcError) as cm: + self._stub.UpdateModelJobGroup( + UpdateModelJobGroupRequest(uuid='group_uuid', + auto_update_status='aaa', + start_dataset_job_stage_uuid='stage_uuid')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.INVALID_ARGUMENT) + # update auto_update_status and start_data_batch_id + self._stub.UpdateModelJobGroup( + UpdateModelJobGroupRequest(uuid='group_uuid', + auto_update_status=GroupAutoUpdateStatus.ACTIVE.name, + start_dataset_job_stage_uuid='stage_uuid')) + with db.session_scope() as session: + group = session.query(ModelJobGroup).filter_by(uuid='group_uuid', project_id=1).first() + self.assertEqual(group.auto_update_status, GroupAutoUpdateStatus.ACTIVE) + self.assertEqual(group.start_data_batch_id, 1) + # only update auto_update_status + self._stub.UpdateModelJobGroup( + UpdateModelJobGroupRequest(uuid='group_uuid', auto_update_status=GroupAutoUpdateStatus.STOPPED.name)) + with db.session_scope() as session: + group = session.query(ModelJobGroup).filter_by(uuid='group_uuid', project_id=1).first() + self.assertEqual(group.auto_update_status, GroupAutoUpdateStatus.STOPPED) + + @patch('fedlearner_webconsole.mmgr.service.ModelJobGroupService.create_group') + @patch('fedlearner_webconsole.rpc.v2.job_service_server.get_grpc_context_info') + def test_create_model_job_group(self, mock_get_grpc_context_info: MagicMock, mock_create_group: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + request = CreateModelJobGroupRequest( + name='name', + uuid='uuid', + algorithm_type=AlgorithmType.NN_VERTICAL.name, + dataset_uuid='uuid', + algorithm_project_list=AlgorithmProjectList(algorithm_projects={'test': 'uuid'})) + with self.assertRaises(grpc.RpcError) as cm: + # test dataset not found + resp = self._stub.CreateModelJobGroup(request) + self.assertEqual(cm.exception.code(), grpc.StatusCode.INVALID_ARGUMENT) + self.assertEqual(cm.exception.details(), 'dataset with uuid uuid is not found') + with db.session_scope() as session: + session.add(Dataset(id=1, name='name', uuid='uuid')) + session.commit() + mock_create_group.return_value = ModelJobGroup(name='name', uuid='uuid') + resp = self._stub.CreateModelJobGroup(request) + # create group + mock_create_group.assert_called_with( + name='name', + uuid='uuid', + project_id=1, + role=ModelJobRole.PARTICIPANT, + dataset_id=1, + algorithm_type=AlgorithmType.NN_VERTICAL, + algorithm_project_list=AlgorithmProjectList(algorithm_projects={'test': 'uuid'}), + coordinator_id=1) + mock_create_group.reset_mock() + resp = self._stub.CreateModelJobGroup(request) + # create group not called if group is already created + mock_create_group.assert_not_called() + + @patch('fedlearner_webconsole.rpc.v2.job_service_server.get_grpc_context_info') + def test_inform_trusted_job(self, mock_get_grpc_context_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + with db.session_scope() as session: + project = Project(id=1, name='project') + participant1 = Participant(id=1, name='part1', domain_name='fl-domain2.com') + trusted_job = TrustedJob(id=1, uuid='uuid', project_id=1) + participants_info = ParticipantsInfo() + participants_info.participants_map['domain2'].auth_status = AuthStatus.PENDING.name + trusted_job.set_participants_info(participants_info) + session.add_all([project, participant1, trusted_job]) + session.commit() + self._stub.InformTrustedJob(InformTrustedJobRequest(uuid='uuid', auth_status=AuthStatus.AUTHORIZED.name)) + with db.session_scope() as session: + trusted_job = session.query(TrustedJob).get(1) + participants_info = trusted_job.get_participants_info() + self.assertEqual(participants_info.participants_map['domain2'].auth_status, AuthStatus.AUTHORIZED.name) + # fail due to group not found + with self.assertRaises(grpc.RpcError) as cm: + self._stub.InformTrustedJob(InformTrustedJobRequest(uuid='not-exist', auth_status=AuthStatus.WITHDRAW.name)) + self.assertEqual(cm.exception.code(), grpc.StatusCode.NOT_FOUND) + # fail due to auth_status invalid + with self.assertRaises(grpc.RpcError) as cm: + self._stub.InformTrustedJob(InformTrustedJobRequest(uuid='uuid', auth_status='AUTHORIZE')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.INVALID_ARGUMENT) + + @patch('fedlearner_webconsole.rpc.v2.job_service_server.get_grpc_context_info') + def test_get_trusted_job(self, mock_get_grpc_context_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + with db.session_scope() as session: + trusted_job = TrustedJob(id=1, name='name', uuid='uuid', project_id=1, auth_status=AuthStatus.AUTHORIZED) + session.add_all([trusted_job]) + session.commit() + resp = self._stub.GetTrustedJob(GetTrustedJobRequest(uuid='uuid')) + self.assertEqual(resp, GetTrustedJobResponse(auth_status='AUTHORIZED')) + # fail due to not found + with self.assertRaises(grpc.RpcError) as cm: + self._stub.GetTrustedJob(GetTrustedJobRequest(uuid='uuid-not-exist')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.NOT_FOUND) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/project_service_client.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/project_service_client.py new file mode 100644 index 000000000..3dd0a99c1 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/project_service_client.py @@ -0,0 +1,86 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Dict + +import grpc +from google.protobuf import empty_pb2 + +from envs import Envs +from fedlearner_webconsole.project.models import PendingProject, PendingProjectState +from fedlearner_webconsole.workflow_template.models import WorkflowTemplateKind +from fedlearner_webconsole.proto.project_pb2 import ParticipantInfo +from fedlearner_webconsole.proto.rpc.v2.project_service_pb2 import CreatePendingProjectRequest, \ + UpdatePendingProjectRequest, SyncPendingProjectStateRequest, CreateProjectRequest, DeletePendingProjectRequest, \ + SendTemplateRevisionRequest +from fedlearner_webconsole.proto.rpc.v2.project_service_pb2_grpc import ProjectServiceStub +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition +from fedlearner_webconsole.rpc.v2.client_base import ParticipantRpcClient +from fedlearner_webconsole.utils.decorators.retry import retry_fn + + +# only unavailable is caused by network jitter. +def _retry_unavailable(err: Exception) -> bool: + if not isinstance(err, grpc.RpcError): + return False + return err.code() == grpc.StatusCode.UNAVAILABLE + + +class ProjectServiceClient(ParticipantRpcClient): + + def __init__(self, channel: grpc.Channel): + super().__init__(channel) + self._stub: ProjectServiceStub = ProjectServiceStub(channel) + + @retry_fn(retry_times=3, need_retry=_retry_unavailable) + def create_pending_project(self, pending_project: PendingProject) -> empty_pb2.Empty: + request = CreatePendingProjectRequest(uuid=pending_project.uuid, + name=pending_project.name, + participants_info=pending_project.get_participants_info(), + comment=pending_project.comment, + creator_username=pending_project.creator_username, + config=pending_project.get_config(), + ticket_uuid=pending_project.ticket_uuid) + return self._stub.CreatePendingProject(request, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_retry_unavailable) + def update_pending_project(self, uuid: str, participants_map: Dict[str, ParticipantInfo]) -> empty_pb2.Empty: + request = UpdatePendingProjectRequest(uuid=uuid, participants_map=participants_map) + return self._stub.UpdatePendingProject(request, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_retry_unavailable) + def create_project(self, uuid: str): + request = CreateProjectRequest(uuid=uuid) + return self._stub.CreateProject(request, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_retry_unavailable) + def sync_pending_project_state(self, uuid: str, state: PendingProjectState) -> empty_pb2.Empty: + request = SyncPendingProjectStateRequest(uuid=uuid, state=state.name) + return self._stub.SyncPendingProjectState(request, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_retry_unavailable) + def delete_pending_project(self, uuid: str): + request = DeletePendingProjectRequest(uuid=uuid) + return self._stub.DeletePendingProject(request, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_retry_unavailable) + def send_template_revision(self, config: WorkflowDefinition, name: str, comment: str, kind: WorkflowTemplateKind, + revision_index: int): + request = SendTemplateRevisionRequest(config=config, + name=name, + comment=comment, + kind=kind.name, + revision_index=revision_index) + return self._stub.SendTemplateRevision(request, timeout=Envs.GRPC_CLIENT_TIMEOUT) diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/project_service_client_test.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/project_service_client_test.py new file mode 100644 index 000000000..5724c1b2c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/project_service_client_test.py @@ -0,0 +1,169 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +import grpc +import grpc_testing +from google.protobuf.empty_pb2 import Empty +from google.protobuf.descriptor import ServiceDescriptor + +from fedlearner_webconsole.project.models import PendingProject, PendingProjectState, ProjectRole +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo, ParticipantInfo +from fedlearner_webconsole.proto.rpc.v2.project_service_pb2 import CreatePendingProjectRequest, \ + UpdatePendingProjectRequest, SyncPendingProjectStateRequest, CreateProjectRequest, DeletePendingProjectRequest, \ + SendTemplateRevisionRequest +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition +from fedlearner_webconsole.proto.rpc.v2 import project_service_pb2 +from fedlearner_webconsole.rpc.v2.project_service_client import ProjectServiceClient +from fedlearner_webconsole.workflow_template.models import WorkflowTemplateKind +from testing.rpc.client import RpcClientTestCase + +_SERVER_DESCRIPTOR: ServiceDescriptor = project_service_pb2.DESCRIPTOR.services_by_name['ProjectService'] + + +class ProjectServiceClientTest(RpcClientTestCase): + + def setUp(self): + super().setUp() + self._fake_channel: grpc_testing.Channel = grpc_testing.channel([_SERVER_DESCRIPTOR], + grpc_testing.strict_real_time()) + self._client = ProjectServiceClient(self._fake_channel) + + def test_create_pending_project(self): + pending_project = PendingProject(uuid='test', name='test-project', ticket_uuid='test') + + participants_info = ParticipantsInfo( + participants_map={ + 'test': + ParticipantInfo( + name='test', state=PendingProjectState.ACCEPTED.name, role=ProjectRole.COORDINATOR.name), + 'part': + ParticipantInfo( + name='part', state=PendingProjectState.PENDING.name, role=ProjectRole.PARTICIPANT.name) + }) + pending_project.set_participants_info(participants_info) + call = self.client_execution_pool.submit(self._client.create_pending_project, pending_project=pending_project) + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVER_DESCRIPTOR.methods_by_name['CreatePendingProject']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual( + request, + CreatePendingProjectRequest(uuid='test', + participants_info=participants_info, + name='test-project', + config=pending_project.get_config(), + ticket_uuid='test')) + self.assertEqual(call.result(), expected_response) + + def test_update_pending_project(self): + participants_map = { + 'part': + ParticipantInfo(name='part', state=PendingProjectState.ACCEPTED.name, role=ProjectRole.PARTICIPANT.name) + } + + call = self.client_execution_pool.submit(self._client.update_pending_project, + uuid='test', + participants_map=participants_map) + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVER_DESCRIPTOR.methods_by_name['UpdatePendingProject']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, UpdatePendingProjectRequest(uuid='test', participants_map=participants_map)) + self.assertEqual(call.result(), expected_response) + + def test_sync_pending_project_state(self): + call = self.client_execution_pool.submit(self._client.sync_pending_project_state, + uuid='test', + state=PendingProjectState.ACCEPTED) + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVER_DESCRIPTOR.methods_by_name['SyncPendingProjectState']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, SyncPendingProjectStateRequest(uuid='test', state='ACCEPTED')) + self.assertEqual(call.result(), expected_response) + + def test_create_project(self): + call = self.client_execution_pool.submit(self._client.create_project, uuid='test') + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVER_DESCRIPTOR.methods_by_name['CreateProject']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, CreateProjectRequest(uuid='test')) + self.assertEqual(call.result(), expected_response) + + def test_delete_pending_project(self): + call = self.client_execution_pool.submit(self._client.delete_pending_project, uuid='test') + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVER_DESCRIPTOR.methods_by_name['DeletePendingProject']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, DeletePendingProjectRequest(uuid='test')) + self.assertEqual(call.result(), expected_response) + + def test_send_template_revision(self): + call = self.client_execution_pool.submit(self._client.send_template_revision, + config=WorkflowDefinition(), + name='test', + comment='test', + kind=WorkflowTemplateKind.PEER, + revision_index=1) + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVER_DESCRIPTOR.methods_by_name['SendTemplateRevision']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual( + request, + SendTemplateRevisionRequest(config=WorkflowDefinition(), + name='test', + comment='test', + kind=WorkflowTemplateKind.PEER.name, + revision_index=1)) + self.assertEqual(call.result(), expected_response) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/project_service_server.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/project_service_server.py new file mode 100644 index 000000000..e2a73efd0 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/project_service_server.py @@ -0,0 +1,181 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Dict + +import grpc +from google.protobuf import empty_pb2 +from grpc import ServicerContext + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import PendingProjectState, ProjectRole, PendingProject, Project +from fedlearner_webconsole.project.services import PendingProjectService +from fedlearner_webconsole.proto.project_pb2 import ParticipantInfo, ParticipantsInfo +from fedlearner_webconsole.proto.rpc.v2.project_service_pb2_grpc import ProjectServiceServicer +from fedlearner_webconsole.proto.rpc.v2.project_service_pb2 import CreatePendingProjectRequest, \ + UpdatePendingProjectRequest, SyncPendingProjectStateRequest, CreateProjectRequest, SendTemplateRevisionRequest +from fedlearner_webconsole.review.ticket_helper import get_ticket_helper +from fedlearner_webconsole.rpc.v2.utils import get_pure_domain_from_context +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.utils.pp_datetime import now +from fedlearner_webconsole.workflow_template.service import WorkflowTemplateRevisionService +from fedlearner_webconsole.audit.decorators import emits_rpc_event +from fedlearner_webconsole.proto.audit_pb2 import Event + + +def _is_same_participants(participants_map: Dict[str, ParticipantInfo], + new_participants_map: Dict[str, ParticipantInfo]) -> bool: + return set(participants_map) != set(new_participants_map) + + +class ProjectGrpcService(ProjectServiceServicer): + + @emits_rpc_event(resource_type=Event.ResourceType.PENDING_PROJECT, + op_type=Event.OperationType.CREATE, + resource_name_fn=lambda request: request.uuid) + def CreatePendingProject(self, request: CreatePendingProjectRequest, context: ServicerContext): + + with db.session_scope() as session: + existed = session.query(PendingProject).filter_by(uuid=request.uuid).first() + # make CreatePendingProject idempotent + if existed is not None: + return empty_pb2.Empty() + validate = get_ticket_helper(session).validate_ticket(request.ticket_uuid, + lambda ticket: ticket.details.uuid == request.uuid) + if not validate: + context.abort(grpc.StatusCode.PERMISSION_DENIED, f'ticket {request.ticket_uuid} is not validated') + pending_project = PendingProjectService(session).create_pending_project( + name=request.name, + config=request.config, + participants_info=request.participants_info, + comment=request.comment, + uuid=request.uuid, + creator_username=request.creator_username, + state=PendingProjectState.PENDING, + role=ProjectRole.PARTICIPANT) + pending_project.ticket_uuid = request.ticket_uuid + pending_project.ticket_status = TicketStatus.APPROVED + session.commit() + return empty_pb2.Empty() + + @emits_rpc_event(resource_type=Event.ResourceType.PENDING_PROJECT, + op_type=Event.OperationType.UPDATE, + resource_name_fn=lambda request: request.uuid) + def UpdatePendingProject(self, request: UpdatePendingProjectRequest, context: ServicerContext): + peer_pure_domain = get_pure_domain_from_context(context) + with db.session_scope() as session: + # we set isolation_level to SERIALIZABLE to make sure participants_info won't be changed within this session + session.connection(execution_options={'isolation_level': 'SERIALIZABLE'}) + pending_project: PendingProject = session.query(PendingProject).filter_by(uuid=request.uuid).first() + if pending_project is None: + context.abort(grpc.StatusCode.NOT_FOUND, f'not found pending project uuid: {request.uuid}') + participants_map = pending_project.get_participants_info().participants_map + peer_info = pending_project.get_participant_info(peer_pure_domain) + if not peer_info or peer_info.role == ProjectRole.PARTICIPANT.name: + context.abort(grpc.StatusCode.PERMISSION_DENIED, + f'{peer_pure_domain} is not coordinator in pending project {request.uuid}') + if _is_same_participants(participants_map, request.participants_map): + context.abort(grpc.StatusCode.PERMISSION_DENIED, + f'can not change participants when the pending project {request.uuid} has been approved') + participants_map.MergeFrom(request.participants_map) + pending_project.set_participants_info(ParticipantsInfo(participants_map=participants_map)) + session.commit() + return empty_pb2.Empty() + + @emits_rpc_event(resource_type=Event.ResourceType.PENDING_PROJECT, + op_type=Event.OperationType.CONTROL_STATE, + resource_name_fn=lambda request: request.uuid) + def SyncPendingProjectState(self, request: SyncPendingProjectStateRequest, context: ServicerContext): + peer_pure_domain = get_pure_domain_from_context(context) + if request.state not in [PendingProjectState.ACCEPTED.name, PendingProjectState.CLOSED.name]: + context.abort(grpc.StatusCode.PERMISSION_DENIED, + f'participant can only sync ACCEPTED or CLOSED but got: {request.state}') + with db.session_scope() as session: + session.connection(execution_options={'isolation_level': 'SERIALIZABLE'}) + pending_project: PendingProject = session.query(PendingProject).filter_by(uuid=request.uuid).first() + if pending_project is None: + context.abort(grpc.StatusCode.NOT_FOUND, f'not found pending project uuid: {request.uuid}') + participants_info = pending_project.get_participants_info() + if peer_pure_domain not in participants_info.participants_map: + context.abort(grpc.StatusCode.PERMISSION_DENIED, + f'{peer_pure_domain} is not in pending project {request.uuid}') + participants_info.participants_map[peer_pure_domain].state = request.state + pending_project.set_participants_info(participants_info) + session.commit() + return empty_pb2.Empty() + + @emits_rpc_event(resource_type=Event.ResourceType.PROJECT, + op_type=Event.OperationType.CREATE, + resource_name_fn=lambda request: request.uuid) + def CreateProject(self, request: CreateProjectRequest, context: ServicerContext): + with db.session_scope() as session: + pending_project: PendingProject = session.query(PendingProject).filter_by(uuid=request.uuid).first() + + if pending_project is None: + message = f'failed to find pending project, uuid is {request.uuid}' + logging.error(message) + context.abort(grpc.StatusCode.NOT_FOUND, message) + + if pending_project.state == PendingProjectState.CLOSED: + logging.info(f'{pending_project.uuid} pending project has closed') + return empty_pb2.Empty() + + if pending_project.state != PendingProjectState.ACCEPTED: + message = f'{pending_project.uuid} pending project has not been accepted' + logging.info(message) + context.abort(grpc.StatusCode.FAILED_PRECONDITION, message) + + project = session.query(Project).filter_by(name=pending_project.name).first() + if project is not None: + message = f'{pending_project.name} project has already existed, uuid is {pending_project.uuid}' + logging.error(message) + context.abort(grpc.StatusCode.ALREADY_EXISTS, message) + + with db.session_scope() as session: + session.connection(execution_options={'isolation_level': 'SERIALIZABLE'}) + PendingProjectService(session).create_project_locally(pending_project.uuid) + session.commit() + return empty_pb2.Empty() + + @emits_rpc_event(resource_type=Event.ResourceType.PROJECT, + op_type=Event.OperationType.DELETE, + resource_name_fn=lambda request: request.uuid) + def DeletePendingProject(self, request: CreateProjectRequest, context: ServicerContext): + peer_pure_domain = get_pure_domain_from_context(context) + with db.session_scope() as session: + pending_project: PendingProject = session.query(PendingProject).filter_by(uuid=request.uuid).first() + if pending_project is None: + return empty_pb2.Empty() + if peer_pure_domain != pending_project.get_coordinator_info()[0]: + context.abort(grpc.StatusCode.PERMISSION_DENIED, + f'{peer_pure_domain} is not coordinator in pending project {request.uuid}') + pending_project.deleted_at = now() + session.commit() + return empty_pb2.Empty() + + def SendTemplateRevision(self, request: SendTemplateRevisionRequest, context: ServicerContext): + peer_pure_domain = get_pure_domain_from_context(context) + with db.session_scope() as session: + WorkflowTemplateRevisionService(session).create_revision( + name=request.name, + kind=request.kind, + config=request.config, + revision_index=request.revision_index, + comment=request.comment, + peer_pure_domain=peer_pure_domain, + ) + session.commit() + return empty_pb2.Empty() diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/project_service_server_test.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/project_service_server_test.py new file mode 100644 index 000000000..914d6f2e6 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/project_service_server_test.py @@ -0,0 +1,268 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from concurrent import futures +from unittest.mock import patch, MagicMock + +import grpc +from google.protobuf.empty_pb2 import Empty + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import PendingProjectState, PendingProject, ProjectRole, Project +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo, ParticipantInfo +from fedlearner_webconsole.proto.rpc.v2 import project_service_pb2_grpc +from fedlearner_webconsole.proto.rpc.v2.project_service_pb2 import CreatePendingProjectRequest, \ + SyncPendingProjectStateRequest, UpdatePendingProjectRequest, CreateProjectRequest, DeletePendingProjectRequest, \ + SendTemplateRevisionRequest +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition +from fedlearner_webconsole.review import common +from fedlearner_webconsole.rpc.v2.project_service_server import ProjectGrpcService, _is_same_participants +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.workflow_template.models import WorkflowTemplateKind, WorkflowTemplate, \ + WorkflowTemplateRevision +from testing.no_web_server_test_case import NoWebServerTestCase + + +class ProjectServiceTest(NoWebServerTestCase): + LISTEN_PORT = 2001 + + def setUp(self): + super().setUp() + self._server = grpc.server(futures.ThreadPoolExecutor(max_workers=20)) + project_service_pb2_grpc.add_ProjectServiceServicer_to_server(ProjectGrpcService(), self._server) + self._server.add_insecure_port(f'[::]:{self.LISTEN_PORT}') + self._server.start() + self._channel = grpc.insecure_channel(target=f'localhost:{self.LISTEN_PORT}') + self._stub = project_service_pb2_grpc.ProjectServiceStub(self._channel) + self.participants_map = { + 'test': + ParticipantInfo(name='test', state=PendingProjectState.ACCEPTED.name, + role=ProjectRole.COORDINATOR.name), + 'part1': + ParticipantInfo(name='part', state=PendingProjectState.PENDING.name, role=ProjectRole.PARTICIPANT.name), + 'part2': + ParticipantInfo(name='part', state=PendingProjectState.PENDING.name, role=ProjectRole.PARTICIPANT.name) + } + + def tearDown(self): + self._channel.close() + self._server.stop(5) + return super().tearDown() + + @patch('fedlearner_webconsole.rpc.v2.project_service_server.get_ticket_helper') + def test_create_pending_project(self, mock_get_ticket_helper): + participants_info = ParticipantsInfo( + participants_map={ + 'test': + ParticipantInfo( + name='test', state=PendingProjectState.ACCEPTED.name, role=ProjectRole.COORDINATOR.name), + 'part': + ParticipantInfo( + name='part', state=PendingProjectState.PENDING.name, role=ProjectRole.PARTICIPANT.name) + }) + request = CreatePendingProjectRequest(name='test-project', + uuid='test', + participants_info=participants_info, + ticket_uuid=common.NO_CENTRAL_SERVER_UUID) + mock_get_ticket_helper.return_value.validate_ticket.return_value = True + resp = self._stub.CreatePendingProject(request, None) + with db.session_scope() as session: + pending_project: PendingProject = session.query(PendingProject).filter_by(uuid='test').first() + self.assertEqual(resp, Empty()) + self.assertEqual(pending_project.state, PendingProjectState.PENDING) + self.assertEqual(pending_project.role, ProjectRole.PARTICIPANT) + self.assertEqual(pending_project.get_participants_info(), participants_info) + self.assertEqual(pending_project.ticket_status, TicketStatus.APPROVED) + self.assertEqual(pending_project.ticket_uuid, common.NO_CENTRAL_SERVER_UUID) + + @patch('fedlearner_webconsole.rpc.v2.project_service_server.get_ticket_helper') + def test_create_pending_project_ticket_wrong(self, mock_get_ticket_helper): + mock_get_ticket_helper.return_value.validate_ticket.return_value = False + participants_info = ParticipantsInfo( + participants_map={ + 'test': + ParticipantInfo( + name='test', state=PendingProjectState.ACCEPTED.name, role=ProjectRole.COORDINATOR.name), + 'part': + ParticipantInfo( + name='part', state=PendingProjectState.PENDING.name, role=ProjectRole.PARTICIPANT.name) + }) + request = CreatePendingProjectRequest(name='test-project', + uuid='test', + participants_info=participants_info, + ticket_uuid='wrong') + with self.assertRaisesRegex(grpc.RpcError, 'ticket wrong is not validated') as cm: + self._stub.CreatePendingProject(request, None) + self.assertEqual(cm.exception.code(), grpc.StatusCode.PERMISSION_DENIED) + with db.session_scope() as session: + pending_project: PendingProject = session.query(PendingProject).filter_by(uuid='test').first() + self.assertIsNone(pending_project) + + @patch('fedlearner_webconsole.rpc.v2.project_service_server.get_pure_domain_from_context') + def test_update_pending_project(self, mock_pure_domain): + participants_map = self.participants_map + participants_info = ParticipantsInfo(participants_map=participants_map) + pending_project = PendingProject(uuid='unique1', + name='test', + state=PendingProjectState.PENDING, + role=ProjectRole.PARTICIPANT) + pending_project.set_participants_info(participants_info) + with db.session_scope() as session: + session.add(pending_project) + session.commit() + mock_pure_domain.return_value = 'wrong coordinator' + participants_map['part2'].state = PendingProjectState.ACCEPTED.name + with self.assertRaisesRegex(grpc.RpcError, + 'wrong coordinator is not coordinator in pending project unique1') as cm: + self._stub.UpdatePendingProject( + UpdatePendingProjectRequest(uuid=pending_project.uuid, participants_map=participants_map)) + self.assertEqual(cm.exception.code(), grpc.StatusCode.PERMISSION_DENIED) + mock_pure_domain.return_value = 'test' + resp = self._stub.UpdatePendingProject( + UpdatePendingProjectRequest(uuid=pending_project.uuid, participants_map=participants_map)) + with db.session_scope() as session: + self.assertEqual(resp, Empty()) + result: PendingProject = session.query(PendingProject).get(pending_project.id) + self.assertEqual(result.get_participants_info(), ParticipantsInfo(participants_map=participants_map)) + + @patch('fedlearner_webconsole.rpc.v2.project_service_server.get_pure_domain_from_context') + def test_sync_pending_project_state(self, mock_pure_domain): + # test participant sync to coordinator + participants_info = ParticipantsInfo(participants_map=self.participants_map) + pending_project = PendingProject(uuid='unique2', + name='test', + state=PendingProjectState.ACCEPTED, + role=ProjectRole.COORDINATOR) + pending_project.set_participants_info(participants_info) + with db.session_scope() as session: + session.add(pending_project) + session.commit() + mock_pure_domain.return_value = 'part1' + resp = self._stub.SyncPendingProjectState( + SyncPendingProjectStateRequest(uuid=pending_project.uuid, state='ACCEPTED')) + with db.session_scope() as session: + self.assertEqual(resp, Empty()) + result: PendingProject = session.query(PendingProject).get(pending_project.id) + participants_info.participants_map['part1'].state = PendingProjectState.ACCEPTED.name + self.assertEqual(result.get_participants_info(), participants_info) + + def test_is_same_participants(self): + self.assertFalse( + _is_same_participants(self.participants_map, { + 'test': ParticipantInfo(), + 'part1': ParticipantInfo(), + 'part2': ParticipantInfo() + })) + self.assertTrue( + _is_same_participants(self.participants_map, { + 'test': ParticipantInfo(), + 'part1': ParticipantInfo(), + 'part3': ParticipantInfo() + })) + + @patch('fedlearner_webconsole.rpc.v2.project_service_server.PendingProjectService.create_project_locally') + def test_create_project(self, mock_create: MagicMock): + with db.session_scope() as session: + pending_project1 = PendingProject(uuid='test1', + id=1, + name='test project', + state=PendingProjectState.ACCEPTED) + pending_project2 = PendingProject(uuid='pending', + id=2, + name='test project 2', + state=PendingProjectState.PENDING) + pending_project3 = PendingProject(uuid='dup', + id=3, + name='test project 3', + state=PendingProjectState.ACCEPTED) + project = Project(name='test project 3') + session.add_all([pending_project1, pending_project2, pending_project3, project]) + session.commit() + # successful + resp = self._stub.CreateProject(CreateProjectRequest(uuid='test1')) + self.assertEqual(resp, Empty()) + mock_create.assert_called_once_with('test1') + # fail due to pending project not found + with self.assertRaisesRegex(grpc.RpcError, 'failed to find pending project, uuid is nothing') as cm: + self._stub.CreateProject(CreateProjectRequest(uuid='nothing')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.NOT_FOUND) + mock_create.assert_not_called() + # fail due to state not valid + with self.assertRaisesRegex(grpc.RpcError, 'pending pending project has not been accepted') as cm: + self._stub.CreateProject(CreateProjectRequest(uuid='pending')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.PERMISSION_DENIED) + mock_create.assert_not_called() + # fail due to name duplicate + with self.assertRaisesRegex(grpc.RpcError, 'test project 3 project has already existed, uuid is dup') as cm: + self._stub.CreateProject(CreateProjectRequest(uuid='dup')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.ALREADY_EXISTS) + mock_create.assert_not_called() + + @patch('fedlearner_webconsole.rpc.v2.project_service_server.get_pure_domain_from_context') + def test_delete_pending_project(self, mock_pure_domain: MagicMock): + with db.session_scope() as session: + pending_project1 = PendingProject(uuid='test1', + id=1, + name='test project', + state=PendingProjectState.ACCEPTED) + pending_project1.set_participants_info(ParticipantsInfo(participants_map=self.participants_map)) + session.add(pending_project1) + session.commit() + mock_pure_domain.return_value = 'part1' + with self.assertRaisesRegex(grpc.RpcError, 'part1 is not coordinator in pending project test1') as cm: + self._stub.DeletePendingProject(DeletePendingProjectRequest(uuid='test1')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.PERMISSION_DENIED) + mock_pure_domain.return_value = 'test' + resp = self._stub.DeletePendingProject(DeletePendingProjectRequest(uuid='test1')) + self.assertEqual(resp, Empty()) + with db.session_scope() as session: + self.assertIsNone(session.query(PendingProject).get(pending_project1.id)) + resp = self._stub.DeletePendingProject(DeletePendingProjectRequest(uuid='test1')) + self.assertEqual(resp, Empty()) + + @patch('fedlearner_webconsole.rpc.v2.project_service_server.get_pure_domain_from_context') + def test_send_template_revision(self, mock_pure_domain: MagicMock): + mock_pure_domain.return_value = 'a' + self._stub.SendTemplateRevision( + SendTemplateRevisionRequest(config=WorkflowDefinition(group_alias='test'), + name='test', + revision_index=2, + comment='test comment', + kind=WorkflowTemplateKind.PEER.name)) + self._stub.SendTemplateRevision( + SendTemplateRevisionRequest(config=WorkflowDefinition(group_alias='test', variables=[Variable()]), + name='test', + revision_index=3, + comment='test comment', + kind=WorkflowTemplateKind.PEER.name)) + self._stub.SendTemplateRevision( + SendTemplateRevisionRequest(config=WorkflowDefinition(group_alias='test'), + name='test', + revision_index=1, + comment='test comment', + kind=WorkflowTemplateKind.PEER.name)) + with db.session_scope() as session: + tpl = session.query(WorkflowTemplate).filter_by(name='test').first() + self.assertEqual(tpl.get_config(), WorkflowDefinition(group_alias='test', variables=[Variable()])) + self.assertEqual(tpl.coordinator_pure_domain_name, 'a') + self.assertEqual(tpl.kind, 2) + revisions = session.query(WorkflowTemplateRevision).filter_by(template_id=tpl.id).all() + self.assertEqual(sorted([r.revision_index for r in revisions]), [1, 2, 3]) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/resource_service_client.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/resource_service_client.py new file mode 100644 index 000000000..df4f9a848 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/resource_service_client.py @@ -0,0 +1,99 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import grpc +from envs import Envs +from typing import Iterable, Optional +from google.protobuf import empty_pb2 + +from fedlearner_webconsole.rpc.v2.client_base import ParticipantProjectRpcClient +from fedlearner_webconsole.dataset.models import DatasetKindV2, ResourceState +from fedlearner_webconsole.proto.filtering_pb2 import FilterExpression +from fedlearner_webconsole.proto.rpc.v2.resource_service_pb2_grpc import ResourceServiceStub +from fedlearner_webconsole.proto.rpc.v2.resource_service_pb2 import GetAlgorithmRequest, GetAlgorithmProjectRequest, \ + InformDatasetRequest, ListAlgorithmProjectsRequest, ListAlgorithmProjectsResponse, ListAlgorithmsRequest, \ + ListAlgorithmsResponse, GetAlgorithmFilesRequest, GetAlgorithmFilesResponse, ListDatasetsRequest, \ + ListDatasetsResponse +from fedlearner_webconsole.proto.dataset_pb2 import TimeRange +from fedlearner_webconsole.proto.algorithm_pb2 import AlgorithmProjectPb, AlgorithmPb +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.utils.decorators.retry import retry_fn + + +def _need_retry_for_get(err: Exception) -> bool: + if not isinstance(err, grpc.RpcError): + return False + # No need to retry for NOT_FOUND and PERMISSION_DENIED + if err.code() == grpc.StatusCode.NOT_FOUND or err.code() == grpc.StatusCode.PERMISSION_DENIED: + return False + return True + + +def _default_need_retry(err: Exception) -> bool: + return isinstance(err, grpc.RpcError) + + +class ResourceServiceClient(ParticipantProjectRpcClient): + + def __init__(self, channel: grpc.Channel): + super().__init__(channel) + self._stub: ResourceServiceStub = ResourceServiceStub(channel) + + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def list_algorithm_projects(self, filter_exp: Optional[FilterExpression] = None) -> ListAlgorithmProjectsResponse: + request = ListAlgorithmProjectsRequest(filter_exp=filter_exp) + return self._stub.ListAlgorithmProjects(request=request, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_need_retry_for_get) + def get_algorithm_project(self, algorithm_project_uuid: str) -> AlgorithmProjectPb: + request = GetAlgorithmProjectRequest(algorithm_project_uuid=algorithm_project_uuid) + return self._stub.GetAlgorithmProject(request=request, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def list_algorithms(self, algorithm_project_uuid: str) -> ListAlgorithmsResponse: + request = ListAlgorithmsRequest(algorithm_project_uuid=algorithm_project_uuid) + return self._stub.ListAlgorithms(request=request, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_need_retry_for_get) + def get_algorithm(self, algorithm_uuid: str) -> AlgorithmPb: + request = GetAlgorithmRequest(algorithm_uuid=algorithm_uuid) + return self._stub.GetAlgorithm(request=request, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_need_retry_for_get) + def get_algorithm_files(self, algorithm_uuid: str) -> Iterable[GetAlgorithmFilesResponse]: + request = GetAlgorithmFilesRequest(algorithm_uuid=algorithm_uuid) + return self._stub.GetAlgorithmFiles(request=request, timeout=Envs.GRPC_STREAM_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def inform_dataset(self, dataset_uuid: str, auth_status: AuthStatus) -> empty_pb2.Empty: + request = InformDatasetRequest(uuid=dataset_uuid, auth_status=auth_status.name) + return self._stub.InformDataset(request=request, timeout=Envs.GRPC_CLIENT_TIMEOUT) + + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def list_datasets(self, + kind: Optional[DatasetKindV2] = None, + uuid: Optional[str] = None, + state: Optional[ResourceState] = None, + time_range: Optional[TimeRange] = None) -> ListDatasetsResponse: + request = ListDatasetsRequest() + if kind is not None: + request.kind = kind.name + if uuid is not None: + request.uuid = uuid + if state is not None: + request.state = state.name + if time_range is not None: + request.time_range.MergeFrom(time_range) + return self._stub.ListDatasets(request=request, timeout=Envs.GRPC_CLIENT_TIMEOUT) diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/resource_service_client_test.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/resource_service_client_test.py new file mode 100644 index 000000000..be6c725a5 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/resource_service_client_test.py @@ -0,0 +1,185 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import grpc +import unittest +import grpc_testing +from google.protobuf.descriptor import ServiceDescriptor +from google.protobuf.empty_pb2 import Empty + +from testing.rpc.client import RpcClientTestCase +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.dataset.models import DatasetKindV2, ResourceState +from fedlearner_webconsole.proto.rpc.v2 import resource_service_pb2 +from fedlearner_webconsole.proto.algorithm_pb2 import AlgorithmPb, AlgorithmProjectPb +from fedlearner_webconsole.proto.dataset_pb2 import ParticipantDatasetRef, TimeRange +from fedlearner_webconsole.proto.rpc.v2.resource_service_pb2 import GetAlgorithmRequest, GetAlgorithmProjectRequest, \ + InformDatasetRequest, ListAlgorithmProjectsRequest, ListAlgorithmProjectsResponse, ListAlgorithmsRequest, \ + ListAlgorithmsResponse, GetAlgorithmFilesRequest, GetAlgorithmFilesResponse, ListDatasetsRequest, \ + ListDatasetsResponse +from fedlearner_webconsole.rpc.v2.resource_service_client import ResourceServiceClient + +_SERVER_DESCRIPTOR: ServiceDescriptor = resource_service_pb2.DESCRIPTOR.services_by_name['ResourceService'] + + +class ResourceServiceClientTest(RpcClientTestCase): + + def setUp(self): + super().setUp() + self._fake_channel: grpc_testing.Channel = grpc_testing.channel([_SERVER_DESCRIPTOR], + grpc_testing.strict_real_time()) + self._client = ResourceServiceClient(self._fake_channel) + + def test_list_algorithm_projects(self): + call = self.client_execution_pool.submit(self._client.list_algorithm_projects) + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVER_DESCRIPTOR.methods_by_name['ListAlgorithmProjects']) + algorithm_projects = [ + AlgorithmProjectPb(uuid='1', name='algo-project-1', type='NN_LOCAL', source='USER'), + AlgorithmProjectPb(uuid='2', name='algo-project-2', type='NN_VERTICAL', source='USER') + ] + expected_response = ListAlgorithmProjectsResponse(algorithm_projects=algorithm_projects) + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, ListAlgorithmProjectsRequest()) + self.assertEqual(call.result(), expected_response) + + def test_list_algorithms(self): + call = self.client_execution_pool.submit(self._client.list_algorithms, algorithm_project_uuid='1') + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVER_DESCRIPTOR.methods_by_name['ListAlgorithms']) + algorithms = [ + AlgorithmPb(uuid='1', name='test-algo-1', version=1, type='NN_LOCAL', source='USER'), + AlgorithmPb(uuid='2', name='test-algo-2', version=2, type='NN_VERTICAL', source='USER') + ] + expected_response = ListAlgorithmsResponse(algorithms=algorithms) + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, ListAlgorithmsRequest(algorithm_project_uuid='1')) + self.assertEqual(call.result(), expected_response) + + def test_get_algorithm_project(self): + call = self.client_execution_pool.submit(self._client.get_algorithm_project, algorithm_project_uuid='1') + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVER_DESCRIPTOR.methods_by_name['GetAlgorithmProject']) + expected_response = AlgorithmProjectPb(uuid='1', name='test-algo-project') + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, GetAlgorithmProjectRequest(algorithm_project_uuid='1')) + self.assertEqual(call.result(), expected_response) + + def test_get_algorithm(self): + call = self.client_execution_pool.submit(self._client.get_algorithm, algorithm_uuid='1') + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVER_DESCRIPTOR.methods_by_name['GetAlgorithm']) + expected_response = AlgorithmPb(uuid='1', name='test-algo') + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, GetAlgorithmRequest(algorithm_uuid='1')) + self.assertEqual(call.result(), expected_response) + + def test_get_algorithm_files(self): + call = self.client_execution_pool.submit(self._client.get_algorithm_files, algorithm_uuid='1') + invocation_metadata, request, rpc = self._fake_channel.take_unary_stream( + _SERVER_DESCRIPTOR.methods_by_name['GetAlgorithmFiles']) + resp = GetAlgorithmFilesResponse(hash='ac3ee699961c58ef80a78c2434efe0d0', chunk=b'') + rpc.send_response(resp) + rpc.send_response(resp) + rpc.terminate( + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, GetAlgorithmFilesRequest(algorithm_uuid='1')) + resps = list(call.result()) + self.assertEqual(len(resps), 2) + for res in resps: + self.assertEqual(res, resp) + + def test_inform_dataset(self): + call = self.client_execution_pool.submit(self._client.inform_dataset, + dataset_uuid='test dataset uuid', + auth_status=AuthStatus.AUTHORIZED) + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVER_DESCRIPTOR.methods_by_name['InformDataset']) + expected_response = Empty() + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, InformDatasetRequest(uuid='test dataset uuid', + auth_status=AuthStatus.AUTHORIZED.name)) + self.assertEqual(call.result(), expected_response) + + def test_list_datasets(self): + # test no args + call = self.client_execution_pool.submit(self._client.list_datasets) + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVER_DESCRIPTOR.methods_by_name['ListDatasets']) + participant_datasets_ref = [ + ParticipantDatasetRef(uuid='dataset_1 uuid', project_id=1, name='dataset_1'), + ParticipantDatasetRef(uuid='dataset_2 uuid', project_id=1, name='dataset_2') + ] + expected_response = ListDatasetsResponse(participant_datasets=participant_datasets_ref) + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(request, ListDatasetsRequest()) + self.assertEqual(call.result(), expected_response) + # test has args + call = self.client_execution_pool.submit(self._client.list_datasets, + kind=DatasetKindV2.RAW, + uuid='dataset_1 uuid', + state=ResourceState.SUCCEEDED, + time_range=TimeRange(days=1)) + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVER_DESCRIPTOR.methods_by_name['ListDatasets']) + participant_datasets_ref = [ParticipantDatasetRef(uuid='dataset_1 uuid', project_id=1, name='dataset_1')] + expected_response = ListDatasetsResponse(participant_datasets=participant_datasets_ref) + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual( + request, + ListDatasetsRequest(kind='RAW', uuid='dataset_1 uuid', state='SUCCEEDED', time_range=TimeRange(days=1))) + self.assertEqual(call.result(), expected_response) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/resource_service_server.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/resource_service_server.py new file mode 100644 index 000000000..1f03d1056 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/resource_service_server.py @@ -0,0 +1,146 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import grpc +from grpc import ServicerContext +from google.protobuf import empty_pb2 +from typing import Iterable +from datetime import timedelta + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.utils.proto import remove_secrets +from fedlearner_webconsole.proto.rpc.v2 import resource_service_pb2_grpc +from fedlearner_webconsole.proto.rpc.v2.resource_service_pb2 import GetAlgorithmRequest, GetAlgorithmProjectRequest, \ + InformDatasetRequest, ListAlgorithmProjectsRequest, ListAlgorithmProjectsResponse, ListAlgorithmsRequest, \ + ListAlgorithmsResponse, GetAlgorithmFilesRequest, GetAlgorithmFilesResponse, ListDatasetsRequest, \ + ListDatasetsResponse +from fedlearner_webconsole.proto.algorithm_pb2 import AlgorithmProjectPb, AlgorithmPb +from fedlearner_webconsole.algorithm.service import AlgorithmProjectService, AlgorithmService +from fedlearner_webconsole.algorithm.models import AlgorithmProject, Algorithm, PublishStatus +from fedlearner_webconsole.algorithm.transmit.sender import AlgorithmSender +from fedlearner_webconsole.dataset.models import Dataset, DatasetKindV2, ResourceState +from fedlearner_webconsole.dataset.services import DatasetService +from fedlearner_webconsole.dataset.auth_service import AuthService +from fedlearner_webconsole.rpc.v2.utils import get_grpc_context_info, get_pure_domain_from_context +from fedlearner_webconsole.audit.decorators import emits_rpc_event +from fedlearner_webconsole.proto.audit_pb2 import Event + + +class ResourceServiceServicer(resource_service_pb2_grpc.ResourceServiceServicer): + + def ListAlgorithmProjects(self, request: ListAlgorithmProjectsRequest, + context: ServicerContext) -> ListAlgorithmProjectsResponse: + with db.session_scope() as session: + project_id, _ = get_grpc_context_info(session, context) + algo_projects = AlgorithmProjectService(session).get_published_algorithm_projects( + project_id=project_id, filter_exp=request.filter_exp) + algorithm_projects = [] + for algo_project in algo_projects: + algo_project.updated_at = AlgorithmProjectService(session).get_published_algorithms_latest_update_time( + algo_project.id) + algorithm_projects.append(remove_secrets(algo_project.to_proto())) + return ListAlgorithmProjectsResponse(algorithm_projects=algorithm_projects) + + def ListAlgorithms(self, request: ListAlgorithmsRequest, context: ServicerContext) -> ListAlgorithmsResponse: + with db.session_scope() as session: + project_id, _ = get_grpc_context_info(session, context) + algorithm_project: AlgorithmProject = session.query(AlgorithmProject). \ + filter_by(project_id=project_id, + uuid=request.algorithm_project_uuid).first() + if algorithm_project is None: + context.abort(grpc.StatusCode.NOT_FOUND, + f'algorithm_project uuid: {request.algorithm_project_uuid} not found') + algos = AlgorithmService(session).get_published_algorithms(project_id=project_id, + algorithm_project_id=algorithm_project.id) + algorithms = [] + for algo in algos: + algorithms.append(remove_secrets(algo.to_proto())) + return ListAlgorithmsResponse(algorithms=algorithms) + + def GetAlgorithmProject(self, request: GetAlgorithmProjectRequest, context: ServicerContext) -> AlgorithmProjectPb: + with db.session_scope() as session: + algo_project: AlgorithmProject = session.query(AlgorithmProject).filter_by( + uuid=request.algorithm_project_uuid).first() + if algo_project is None: + context.abort(grpc.StatusCode.NOT_FOUND, f'algorithm project uuid:' + f' {request.algorithm_project_uuid} not found') + if algo_project.publish_status != PublishStatus.PUBLISHED: + context.abort(grpc.StatusCode.PERMISSION_DENIED, f'algorithm project uuid:' + f' {request.algorithm_project_uuid} is not published') + return remove_secrets(algo_project.to_proto()) + + def GetAlgorithm(self, request: GetAlgorithmRequest, context: ServicerContext) -> AlgorithmPb: + with db.session_scope() as session: + algorithm: Algorithm = session.query(Algorithm).filter_by(uuid=request.algorithm_uuid).first() + if algorithm is None: + context.abort(grpc.StatusCode.NOT_FOUND, f'algorithm uuid: {request.algorithm_uuid} not found') + if algorithm.publish_status != PublishStatus.PUBLISHED: + context.abort(grpc.StatusCode.PERMISSION_DENIED, f'algorithm uuid: {request.algorithm_uuid} ' + f'is not published') + return remove_secrets(algorithm.to_proto()) + + def GetAlgorithmFiles(self, request: GetAlgorithmFilesRequest, + context: ServicerContext) -> Iterable[GetAlgorithmFilesResponse]: + with db.session_scope() as session: + algorithm: Algorithm = session.query(Algorithm).filter_by(uuid=request.algorithm_uuid).first() + if algorithm is None: + context.abort(grpc.StatusCode.NOT_FOUND, f'algorithm uuid: {request.algorithm_uuid} not found') + if algorithm.publish_status != PublishStatus.PUBLISHED: + context.abort(grpc.StatusCode.PERMISSION_DENIED, f'algorithm uuid: {request.algorithm_uuid} ' + f'is not published') + yield from AlgorithmSender().make_algorithm_iterator(algorithm.path) + + @emits_rpc_event(resource_type=Event.ResourceType.DATASET, + op_type=Event.OperationType.INFORM, + resource_name_fn=lambda request: request.uuid) + def InformDataset(self, request: InformDatasetRequest, context: ServicerContext) -> empty_pb2.Empty: + with db.session_scope() as session: + project_id, _ = get_grpc_context_info(session, context) + participant_pure_domain = get_pure_domain_from_context(context) + dataset: Dataset = session.query(Dataset).populate_existing().with_for_update().filter_by( + project_id=project_id, uuid=request.uuid).first() + if dataset is None: + context.abort(grpc.StatusCode.NOT_FOUND, f'dataset {request.uuid} not found') + try: + AuthStatus[request.auth_status] + except KeyError: + context.abort(grpc.StatusCode.INVALID_ARGUMENT, f'auth_status {request.auth_status} is invalid') + + participants_info = dataset.get_participants_info() + if participant_pure_domain not in participants_info.participants_map: + context.abort(grpc.StatusCode.PERMISSION_DENIED, + f'{participant_pure_domain} is not participant of dataset {request.uuid}') + AuthService(session=session, dataset_job=dataset.parent_dataset_job).update_auth_status( + domain_name=participant_pure_domain, auth_status=AuthStatus[request.auth_status]) + session.commit() + return empty_pb2.Empty() + + def ListDatasets(self, request: ListDatasetsRequest, context: ServicerContext) -> ListDatasetsResponse: + kind = DatasetKindV2[request.kind] if request.kind else None + uuid = request.uuid if request.uuid else None + state = ResourceState[request.state] if request.state else None + time_range = timedelta(days=request.time_range.days, hours=request.time_range.hours) + # set time_range to None if time_range is empty + if not time_range: + time_range = None + with db.session_scope() as session: + project_id, _ = get_grpc_context_info(session, context) + datasets = DatasetService(session=session).get_published_datasets(project_id=project_id, + kind=kind, + uuid=uuid, + state=state, + time_range=time_range) + return ListDatasetsResponse(participant_datasets=datasets) diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/resource_service_server_test.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/resource_service_server_test.py new file mode 100644 index 000000000..adfce842b --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/resource_service_server_test.py @@ -0,0 +1,438 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import grpc +import unittest +from unittest.mock import ANY, patch, MagicMock +from concurrent import futures +from datetime import datetime, timedelta +from google.protobuf.json_format import ParseDict + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.rpc.v2.resource_service_server import ResourceServiceServicer +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.algorithm.models import AlgorithmProject, Algorithm, AlgorithmType, Source, PublishStatus,\ + AlgorithmParameter +from fedlearner_webconsole.dataset.models import Dataset, DatasetJob, DatasetJobKind, DatasetJobState, DatasetKindV2,\ + DatasetType, ResourceState +from fedlearner_webconsole.utils.filtering import parse_expression +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.utils.flask_utils import to_dict +from fedlearner_webconsole.proto.rpc.v2 import resource_service_pb2_grpc +from fedlearner_webconsole.proto.dataset_pb2 import TimeRange +from fedlearner_webconsole.proto.rpc.v2.resource_service_pb2 import InformDatasetRequest, \ + ListAlgorithmProjectsRequest, ListAlgorithmsRequest, GetAlgorithmRequest, GetAlgorithmFilesRequest, \ + GetAlgorithmProjectRequest, ListDatasetsRequest +from fedlearner_webconsole.proto.project_pb2 import ParticipantInfo, ParticipantsInfo + + +class ResourceServiceTest(NoWebServerTestCase): + LISTEN_PORT = 1989 + + def setUp(self): + super().setUp() + self._server = grpc.server(futures.ThreadPoolExecutor(max_workers=20)) + resource_service_pb2_grpc.add_ResourceServiceServicer_to_server(ResourceServiceServicer(), self._server) + self._server.add_insecure_port(f'[::]:{self.LISTEN_PORT}') + self._server.start() + self._channel = grpc.insecure_channel(target=f'localhost:{self.LISTEN_PORT}') + self._stub = resource_service_pb2_grpc.ResourceServiceStub(self._channel) + + with db.session_scope() as session: + project1 = Project(id=1, name='project-1') + project2 = Project(id=2, name='project-2') + algo_project1 = AlgorithmProject(id=1, + project_id=1, + uuid='algo-project-uuid-1', + name='algo-project-1', + type=AlgorithmType.NN_LOCAL, + source=Source.USER, + latest_version=1, + publish_status=PublishStatus.PUBLISHED, + comment='comment-1', + created_at=datetime(2012, 1, 14, 12, 0, 5), + updated_at=datetime(2012, 1, 14, 12, 0, 5)) + algo_project2 = AlgorithmProject(id=2, + project_id=1, + uuid='algo-project-uuid-2', + name='algo-project-2', + type=AlgorithmType.NN_VERTICAL, + source=Source.THIRD_PARTY, + latest_version=2, + publish_status=PublishStatus.PUBLISHED, + comment='comment-2', + created_at=datetime(2012, 1, 14, 12, 0, 5), + updated_at=datetime(2012, 1, 14, 12, 0, 5)) + algo_project3 = AlgorithmProject(id=3, + project_id=1, + uuid='algo-project-uuid-3', + name='algo-project-3', + type=AlgorithmType.NN_VERTICAL, + source=Source.USER, + latest_version=3, + publish_status=PublishStatus.UNPUBLISHED, + comment='comment-3', + created_at=datetime(2012, 1, 14, 12, 0, 5), + updated_at=datetime(2012, 1, 14, 12, 0, 5)) + algo_project4 = AlgorithmProject(id=4, + project_id=2, + uuid='algo-project-uuid-4', + name='algo-project-4', + type=AlgorithmType.NN_VERTICAL, + source=Source.USER, + latest_version=4, + publish_status=PublishStatus.UNPUBLISHED, + comment='comment-4', + created_at=datetime(2012, 1, 14, 12, 0, 5), + updated_at=datetime(2012, 1, 14, 12, 0, 5)) + algo1 = Algorithm(id=1, + algorithm_project_id=1, + project_id=1, + name='algo-1', + uuid='algo-uuid-1', + version=1, + publish_status=PublishStatus.PUBLISHED, + type=AlgorithmType.NN_VERTICAL, + source=Source.USER, + comment='comment-1', + created_at=datetime(2012, 1, 14, 12, 0, 5), + updated_at=datetime(2012, 1, 15, 12, 0, 5)) + algo2 = Algorithm(id=2, + algorithm_project_id=1, + project_id=1, + name='algo-2', + uuid='algo-uuid-2', + version=2, + publish_status=PublishStatus.PUBLISHED, + type=AlgorithmType.NN_LOCAL, + source=Source.THIRD_PARTY, + comment='comment-2', + created_at=datetime(2012, 1, 14, 12, 0, 5), + updated_at=datetime(2012, 1, 16, 12, 0, 5)) + algo3 = Algorithm(id=3, + algorithm_project_id=1, + project_id=1, + name='algo-3', + uuid='algo-uuid-3', + version=3, + publish_status=PublishStatus.UNPUBLISHED, + type=AlgorithmType.TREE_VERTICAL, + source=Source.UNSPECIFIED, + comment='comment-3', + created_at=datetime(2012, 1, 14, 12, 0, 5), + updated_at=datetime(2012, 1, 14, 12, 0, 5)) + algo4 = Algorithm(id=4, + algorithm_project_id=2, + project_id=1, + name='algo-4', + uuid='algo-uuid-4', + version=4, + publish_status=PublishStatus.PUBLISHED, + type=AlgorithmType.NN_VERTICAL, + source=Source.USER, + comment='comment-4', + created_at=datetime(2012, 1, 14, 12, 0, 5), + updated_at=datetime(2012, 1, 14, 12, 0, 5)) + algo5 = Algorithm(id=5, + algorithm_project_id=3, + project_id=1, + name='algo-5', + uuid='algo-uuid-5', + version=5, + publish_status=PublishStatus.UNPUBLISHED, + type=AlgorithmType.NN_VERTICAL, + source=Source.USER, + comment='comment-5', + created_at=datetime(2012, 1, 14, 12, 0, 5), + updated_at=datetime(2012, 1, 14, 12, 0, 5)) + algo6 = Algorithm(id=6, + algorithm_project_id=4, + project_id=2, + name='algo-6', + uuid='algo-uuid-6', + version=4, + publish_status=PublishStatus.PUBLISHED, + type=AlgorithmType.NN_LOCAL, + source=Source.THIRD_PARTY, + comment='comment-6', + created_at=datetime(2012, 1, 14, 12, 0, 5), + updated_at=datetime(2012, 1, 14, 12, 0, 5)) + parameter = ParseDict({'variables': [{'name': 'BATCH_SIZE', 'value': '128'}]}, AlgorithmParameter()) + algo1.set_parameter(parameter) + session.add_all([ + project1, project2, algo_project1, algo_project2, algo_project3, algo_project4, algo1, algo2, algo3, + algo4, algo5, algo6 + ]) + session.commit() + + def tearDown(self): + self._channel.close() + self._server.stop(5) + return super().tearDown() + + @patch('fedlearner_webconsole.rpc.v2.resource_service_server.get_grpc_context_info') + def test_list_algorithm_projects(self, mock_get_grpc_context_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + resp = self._stub.ListAlgorithmProjects(ListAlgorithmProjectsRequest()) + algorithm_projects = sorted(resp.algorithm_projects, key=lambda x: x.uuid) + self.assertEqual(len(algorithm_projects), 2) + self.assertEqual(algorithm_projects[0].uuid, 'algo-project-uuid-1') + self.assertEqual(algorithm_projects[0].type, 'NN_LOCAL') + self.assertEqual(algorithm_projects[0].source, 'USER') + self.assertEqual(algorithm_projects[0].updated_at, to_timestamp(datetime(2012, 1, 16, 12, 0, 5))) + self.assertEqual(algorithm_projects[1].uuid, 'algo-project-uuid-2') + self.assertEqual(algorithm_projects[1].type, 'NN_VERTICAL') + self.assertEqual(algorithm_projects[1].source, 'THIRD_PARTY') + self.assertEqual(algorithm_projects[1].updated_at, to_timestamp(datetime(2012, 1, 14, 12, 0, 5))) + + @patch('fedlearner_webconsole.rpc.v2.resource_service_server.get_grpc_context_info') + def test_list_algorithm_project_with_filter_exp(self, mock_get_grpc_context_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + filter_exp = parse_expression('(name~="1")') + resp = self._stub.ListAlgorithmProjects(ListAlgorithmProjectsRequest(filter_exp=filter_exp)) + self.assertEqual(len(resp.algorithm_projects), 1) + algo_project = resp.algorithm_projects[0] + self.assertEqual(algo_project.uuid, 'algo-project-uuid-1') + self.assertEqual(algo_project.type, 'NN_LOCAL') + self.assertEqual(algo_project.source, 'USER') + filter_exp = parse_expression('(type:["NN_VERTICAL"])') + resp = self._stub.ListAlgorithmProjects(ListAlgorithmProjectsRequest(filter_exp=filter_exp)) + self.assertEqual((len(resp.algorithm_projects)), 1) + algo_project = resp.algorithm_projects[0] + self.assertEqual(algo_project.uuid, 'algo-project-uuid-2') + self.assertEqual(algo_project.source, 'THIRD_PARTY') + + @patch('fedlearner_webconsole.rpc.v2.resource_service_server.get_grpc_context_info') + def test_list_algorithms(self, mock_get_grpc_context_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + resp = self._stub.ListAlgorithms(ListAlgorithmsRequest(algorithm_project_uuid='algo-project-uuid-1')) + algorithms = resp.algorithms + self.assertEqual(len(algorithms), 2) + self.assertEqual(algorithms[0].uuid, 'algo-uuid-1') + self.assertEqual(algorithms[1].uuid, 'algo-uuid-2') + self.assertEqual(algorithms[0].type, 'NN_VERTICAL') + self.assertEqual(algorithms[1].source, 'THIRD_PARTY') + + @patch('fedlearner_webconsole.rpc.v2.resource_service_server.get_grpc_context_info') + def test_list_algorithms_with_wrong_algorithm_project_uuid(self, mock_get_grpc_context_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + with self.assertRaises(grpc.RpcError) as cm: + self._stub.ListAlgorithms(ListAlgorithmsRequest(algorithm_project_uuid='algo-project-uuid-5')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.NOT_FOUND) + + @patch('fedlearner_webconsole.rpc.v2.resource_service_server.get_grpc_context_info') + def test_get_algorithm_project(self, mock_get_grpc_context_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + resp = self._stub.GetAlgorithmProject(GetAlgorithmProjectRequest(algorithm_project_uuid='algo-project-uuid-1')) + self.assertEqual(resp.type, 'NN_LOCAL') + self.assertEqual(resp.source, 'USER') + + @patch('fedlearner_webconsole.rpc.v2.resource_service_server.get_grpc_context_info') + def test_get_algorithm_project_with_wrong_uuid(self, mock_get_grpc_context_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + with self.assertRaises(grpc.RpcError) as cm: + resp = self._stub.GetAlgorithmProject(GetAlgorithmProjectRequest(algorithm_project_uuid='1')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.NOT_FOUND) + + @patch('fedlearner_webconsole.rpc.v2.resource_service_server.get_grpc_context_info') + def test_get_unpublished_algorithm_project(self, mock_get_grpc_context_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + with self.assertRaises(grpc.RpcError) as cm: + resp = self._stub.GetAlgorithmProject( + GetAlgorithmProjectRequest(algorithm_project_uuid='algo-project-uuid-3')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.PERMISSION_DENIED) + + def test_get_algorithm_files_with_wrong_algorithm_uuid(self): + with self.assertRaises(grpc.RpcError) as cm: + resp = self._stub.GetAlgorithmFiles(GetAlgorithmFilesRequest(algorithm_uuid='algo-uuid-7')) + # Grpc error cannot be thrown if no iterating when the rpc is streaming + for _ in resp: + pass + self.assertEqual(cm.exception.code(), grpc.StatusCode.NOT_FOUND) + + def test_get_algorithm_with_unpublished_algorithm(self): + with self.assertRaises(grpc.RpcError) as cm: + resp = self._stub.GetAlgorithm(GetAlgorithmRequest(algorithm_uuid='algo-uuid-3')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.PERMISSION_DENIED) + + def test_get_algorithm(self): + resp = self._stub.GetAlgorithm(GetAlgorithmRequest(algorithm_uuid='algo-uuid-1')) + self.assertEqual(resp.uuid, 'algo-uuid-1') + self.assertEqual(resp.name, 'algo-1') + self.assertEqual(resp.type, 'NN_VERTICAL') + self.assertEqual(resp.source, 'USER') + self.assertEqual(resp.version, 1) + + def test_get_algorithm_files(self): + data_iterator = self._stub.GetAlgorithmFiles(GetAlgorithmFilesRequest(algorithm_uuid='algo-uuid-1')) + resps = list(data_iterator) + self.assertEqual(len(resps), 1) + self.assertEqual(resps[0].hash, 'd41d8cd98f00b204e9800998ecf8427e') + + @patch('fedlearner_webconsole.rpc.v2.resource_service_server.get_pure_domain_from_context') + @patch('fedlearner_webconsole.rpc.v2.resource_service_server.get_grpc_context_info') + def test_inform_dataset(self, mock_get_grpc_context_info: MagicMock, mock_get_pure_domain_from_context: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + participant_domain_name = 'test participant' + mock_get_pure_domain_from_context.return_value = participant_domain_name + # test not_found + with self.assertRaises(grpc.RpcError) as cm: + self._stub.InformDataset(InformDatasetRequest(uuid='dataset uuid', auth_status='AUTHORIZED')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.NOT_FOUND) + + # test invalidate participant + with db.session_scope() as session: + dataset = Dataset(id=1, + uuid='dataset uuid', + name='default dataset', + dataset_type=DatasetType.PSI, + comment='test comment', + path='/data/dataset/123', + project_id=1, + dataset_kind=DatasetKindV2.RAW, + is_published=True, + auth_status=AuthStatus.PENDING) + session.add(dataset) + dataset_job = DatasetJob(id=1, + uuid='dataset_job uuid', + project_id=1, + input_dataset_id=0, + output_dataset_id=1, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.PENDING, + coordinator_id=1) + session.add(dataset_job) + session.commit() + with self.assertRaises(grpc.RpcError) as cm: + self._stub.InformDataset(InformDatasetRequest(uuid='dataset uuid', auth_status='AUTHORIZED')) + self.assertEqual(cm.exception.code(), grpc.StatusCode.INVALID_ARGUMENT) + + # test pass + with db.session_scope() as session: + dataset: Dataset = session.query(Dataset).get(1) + participants_info = ParticipantsInfo( + participants_map={participant_domain_name: ParticipantInfo(auth_status='PENDING')}) + dataset.set_participants_info(participants_info=participants_info) + session.commit() + self._stub.InformDataset(InformDatasetRequest(uuid='dataset uuid', auth_status='AUTHORIZED')) + with db.session_scope() as session: + dataset: Dataset = session.query(Dataset).get(1) + participants_info = dataset.get_participants_info() + self.assertEqual(participants_info.participants_map[participant_domain_name].auth_status, 'AUTHORIZED') + + @patch('fedlearner_webconsole.rpc.v2.resource_service_server.get_grpc_context_info') + def test_list_datasets(self, mock_get_grpc_context_info: MagicMock): + mock_get_grpc_context_info.return_value = 1, 1 + with db.session_scope() as session: + dataset_job_1 = DatasetJob(id=1, + uuid='dataset_job_1', + project_id=1, + input_dataset_id=0, + output_dataset_id=1, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.PENDING, + coordinator_id=1) + session.add(dataset_job_1) + dataset_1 = Dataset(id=1, + uuid='dataset_1 uuid', + name='default dataset 1', + dataset_type=DatasetType.PSI, + comment='test comment', + path='/data/dataset/123', + project_id=1, + dataset_kind=DatasetKindV2.RAW, + is_published=True) + session.add(dataset_1) + dataset_job_2 = DatasetJob(id=2, + uuid='dataset_job_2', + project_id=1, + input_dataset_id=0, + output_dataset_id=2, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN, + state=DatasetJobState.SUCCEEDED, + coordinator_id=1, + time_range=timedelta(days=1)) + session.add(dataset_job_2) + dataset_2 = Dataset(id=2, + uuid='dataset_2 uuid', + name='default dataset 2', + dataset_type=DatasetType.PSI, + comment='test comment', + path='/data/dataset/123', + project_id=1, + dataset_kind=DatasetKindV2.PROCESSED, + is_published=True) + session.add(dataset_2) + session.commit() + + # test no filter + expected_response = { + 'participant_datasets': [{ + 'uuid': 'dataset_2 uuid', + 'name': 'default dataset 2', + 'format': 'TABULAR', + 'updated_at': ANY, + 'dataset_kind': 'PROCESSED', + 'dataset_type': 'PSI', + 'auth_status': 'PENDING', + 'project_id': ANY, + 'participant_id': ANY, + 'file_size': 0, + 'value': 0 + }, { + 'uuid': 'dataset_1 uuid', + 'name': 'default dataset 1', + 'format': 'TABULAR', + 'updated_at': ANY, + 'dataset_kind': 'RAW', + 'dataset_type': 'PSI', + 'auth_status': 'PENDING', + 'project_id': ANY, + 'participant_id': ANY, + 'file_size': 0, + 'value': 0 + }] + } + resp = self._stub.ListDatasets(ListDatasetsRequest()) + self.assertEqual(to_dict(resp), expected_response) + + # test with filter + expected_response = { + 'participant_datasets': [{ + 'uuid': 'dataset_2 uuid', + 'name': 'default dataset 2', + 'format': 'TABULAR', + 'updated_at': ANY, + 'dataset_kind': 'PROCESSED', + 'dataset_type': 'PSI', + 'auth_status': 'PENDING', + 'project_id': ANY, + 'participant_id': ANY, + 'file_size': 0, + 'value': 0 + }] + } + resp = self._stub.ListDatasets( + ListDatasetsRequest(uuid='dataset_2 uuid', + kind=DatasetKindV2.PROCESSED.name, + state=ResourceState.SUCCEEDED.name, + time_range=TimeRange(days=1))) + self.assertEqual(to_dict(resp), expected_response) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/review_service_client.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/review_service_client.py new file mode 100644 index 000000000..9dee0dd2e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/review_service_client.py @@ -0,0 +1,46 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import grpc +from fedlearner_webconsole.proto.rpc.v2 import review_service_pb2 +from fedlearner_webconsole.proto.rpc.v2.review_service_pb2_grpc import ReviewServiceStub +from fedlearner_webconsole.proto import review_pb2 +from fedlearner_webconsole.rpc.v2.client_base import ParticipantRpcClient +from fedlearner_webconsole.utils.decorators.retry import retry_fn + + +def _default_need_retry(err: Exception) -> bool: + return isinstance(err, grpc.RpcError) + + +class ReviewServiceClient(ParticipantRpcClient): + + def __init__(self, channel: grpc.Channel): + super().__init__(channel) + self._stub: ReviewServiceStub = ReviewServiceStub(channel) + + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def create_ticket(self, ttype: review_pb2.TicketType, creator_username: str, + details: review_pb2.TicketDetails) -> review_pb2.Ticket: + return self._stub.CreateTicket( + review_service_pb2.CreateTicketRequest( + ttype=ttype, + creator_username=creator_username, + details=details, + )) + + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def get_ticket(self, uuid: str) -> review_pb2.Ticket: + return self._stub.GetTicket(review_service_pb2.GetTicketRequest(uuid=uuid)) diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/review_service_client_test.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/review_service_client_test.py new file mode 100644 index 000000000..9604efee1 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/review_service_client_test.py @@ -0,0 +1,82 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +import grpc +import grpc_testing + +from google.protobuf.descriptor import ServiceDescriptor +from fedlearner_webconsole.proto.rpc.v2 import review_service_pb2 +from fedlearner_webconsole.proto import review_pb2 + +from fedlearner_webconsole.rpc.v2.review_service_client import ReviewServiceClient +from testing.rpc.client import RpcClientTestCase + +_SERVICE_DESCRIPTOR: ServiceDescriptor = review_service_pb2.DESCRIPTOR.services_by_name['ReviewService'] + + +class ReviewServiceClientTest(RpcClientTestCase): + + def setUp(self): + super().setUp() + self._fake_channel: grpc_testing.Channel = grpc_testing.channel([_SERVICE_DESCRIPTOR], + grpc_testing.strict_real_time()) + self._client = ReviewServiceClient(self._fake_channel) + + def test_check_health(self): + call = self.client_execution_pool.submit(self._client.create_ticket, + ttype=review_pb2.TicketType.CREATE_PROJECT, + creator_username='fffff', + details=review_pb2.TicketDetails(uuid='u1234')) + + _, _, rpc = self._fake_channel.take_unary_unary(_SERVICE_DESCRIPTOR.methods_by_name['CreateTicket']) + + expected_response = review_pb2.Ticket( + type=review_pb2.TicketType.CREATE_PROJECT, + creator_username='fffff', + details=review_pb2.TicketDetails(uuid='u1234'), + uuid='u4321', + ) + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(call.result(), expected_response) + + def test_get_ticket(self): + call = self.client_execution_pool.submit(self._client.get_ticket, uuid='u4321') + + _, _, rpc = self._fake_channel.take_unary_unary(_SERVICE_DESCRIPTOR.methods_by_name['GetTicket']) + + expected_response = review_pb2.Ticket( + type=review_pb2.TicketType.CREATE_PROJECT, + creator_username='fffff', + details=review_pb2.TicketDetails(uuid='u1234'), + uuid='u4321', + ) + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(call.result(), expected_response) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/system_service_client.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/system_service_client.py new file mode 100644 index 000000000..c2b7a58ce --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/system_service_client.py @@ -0,0 +1,52 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import grpc +from fedlearner_webconsole.proto.rpc.v2.system_service_pb2 import (CheckHealthRequest, CheckHealthResponse, + ListFlagsRequest, ListFlagsResponse, + CheckTeeEnabledRequest, CheckTeeEnabledResponse) +from fedlearner_webconsole.proto.rpc.v2.system_service_pb2_grpc import SystemServiceStub +from fedlearner_webconsole.rpc.v2.client_base import ParticipantRpcClient +from fedlearner_webconsole.utils.proto import to_dict +from fedlearner_webconsole.utils.decorators.retry import retry_fn + + +def _default_need_retry(err: Exception) -> bool: + return isinstance(err, grpc.RpcError) + + +class SystemServiceClient(ParticipantRpcClient): + + def __init__(self, channel: grpc.Channel): + super().__init__(channel) + self._stub: SystemServiceStub = SystemServiceStub(channel) + + def check_health(self) -> CheckHealthResponse: + try: + return self._stub.CheckHealth(CheckHealthRequest()) + except grpc.RpcError as e: + # For health check, we don't throw grpc error directly + return CheckHealthResponse( + healthy=False, + message=e.details(), + ) + + def list_flags(self) -> dict: + response: ListFlagsResponse = self._stub.ListFlags(ListFlagsRequest()) + return to_dict(response.flags) + + @retry_fn(retry_times=3, need_retry=_default_need_retry) + def check_tee_enabled(self) -> CheckTeeEnabledResponse: + return self._stub.CheckTeeEnabled(CheckTeeEnabledRequest()) diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/system_service_client_test.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/system_service_client_test.py new file mode 100644 index 000000000..b1bf1e36d --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/system_service_client_test.py @@ -0,0 +1,118 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +import grpc +import grpc_testing + +from google.protobuf.descriptor import ServiceDescriptor +from google.protobuf.struct_pb2 import Struct, Value +from fedlearner_webconsole.proto import common_pb2 +from fedlearner_webconsole.proto.rpc.v2 import system_service_pb2 + +from fedlearner_webconsole.rpc.v2.system_service_client import SystemServiceClient +from testing.rpc.client import RpcClientTestCase + +_SERVICE_DESCRIPTOR: ServiceDescriptor = system_service_pb2.DESCRIPTOR.services_by_name['SystemService'] + + +class SystemServiceClientTest(RpcClientTestCase): + + def setUp(self): + super().setUp() + self._fake_channel: grpc_testing.Channel = grpc_testing.channel([_SERVICE_DESCRIPTOR], + grpc_testing.strict_real_time()) + self._client = SystemServiceClient(self._fake_channel) + + def test_check_health(self): + call = self.client_execution_pool.submit(self._client.check_health) + + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['CheckHealth']) + + expected_response = system_service_pb2.CheckHealthResponse( + application_version=common_pb2.ApplicationVersion( + revision='test rev', + branch_name='test branch', + version='1.0.0.1', + pub_date='20221212', + ), + healthy=True, + ) + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(call.result(), expected_response) + + def test_check_health_rpc_error(self): + call = self.client_execution_pool.submit(self._client.check_health) + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['CheckHealth']) + rpc.terminate( + response=None, + code=grpc.StatusCode.UNKNOWN, + trailing_metadata=(), + details='unknown server error', + ) + self.assertEqual(call.result(), + system_service_pb2.CheckHealthResponse( + healthy=False, + message='unknown server error', + )) + + def test_list_flags(self): + call = self.client_execution_pool.submit(self._client.list_flags) + + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['ListFlags']) + + expected_response = system_service_pb2.ListFlagsResponse(flags=Struct( + fields={ + 'flag1': Value(bool_value=True), + 'flag2': Value(string_value='string_value'), + })) + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(call.result(), { + 'flag1': True, + 'flag2': 'string_value', + }) + + def test_check_tee_enabled(self): + call = self.client_execution_pool.submit(self._client.check_tee_enabled) + + invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( + _SERVICE_DESCRIPTOR.methods_by_name['CheckTeeEnabled']) + + expected_response = system_service_pb2.CheckTeeEnabledResponse(tee_enabled=True) + rpc.terminate( + response=expected_response, + code=grpc.StatusCode.OK, + trailing_metadata=(), + details=None, + ) + self.assertEqual(call.result(), expected_response) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/system_service_server.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/system_service_server.py new file mode 100644 index 000000000..387cf06d0 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/system_service_server.py @@ -0,0 +1,42 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from grpc import ServicerContext + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.flag.models import get_flags +from fedlearner_webconsole.proto.rpc.v2 import system_service_pb2_grpc +from fedlearner_webconsole.proto.rpc.v2.system_service_pb2 import (CheckHealthResponse, CheckHealthRequest, + ListFlagsRequest, ListFlagsResponse, + CheckTeeEnabledRequest, CheckTeeEnabledResponse) +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.tee.services import check_tee_enabled + + +# TODO(linfan.fine): adds request id decorator +class SystemGrpcService(system_service_pb2_grpc.SystemServiceServicer): + + def CheckHealth(self, request: CheckHealthRequest, context: ServicerContext): + with db.session_scope() as session: + version = SettingService(session).get_application_version() + return CheckHealthResponse(application_version=version.to_proto(), healthy=True) + + def ListFlags(self, request: ListFlagsRequest, context: ServicerContext): + resp = ListFlagsResponse() + resp.flags.update(get_flags()) + return resp + + def CheckTeeEnabled(self, request: CheckTeeEnabledRequest, context: ServicerContext): + return CheckTeeEnabledResponse(tee_enabled=check_tee_enabled()) diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/system_service_server_test.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/system_service_server_test.py new file mode 100644 index 000000000..83b85fedf --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/system_service_server_test.py @@ -0,0 +1,105 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from concurrent import futures +from unittest.mock import patch, Mock + +import grpc + +from google.protobuf.struct_pb2 import Struct, Value + +from fedlearner_webconsole.flag.models import Flag +from fedlearner_webconsole.proto import common_pb2 +from fedlearner_webconsole.proto.rpc.v2 import system_service_pb2_grpc +from fedlearner_webconsole.proto.rpc.v2.system_service_pb2 import CheckHealthRequest, CheckHealthResponse, \ + ListFlagsRequest, ListFlagsResponse, CheckTeeEnabledRequest, CheckTeeEnabledResponse +from fedlearner_webconsole.rpc.v2.system_service_server import SystemGrpcService +from fedlearner_webconsole.utils.app_version import ApplicationVersion +from testing.no_web_server_test_case import NoWebServerTestCase + + +class SystemServiceTest(NoWebServerTestCase): + LISTEN_PORT = 1999 + + def setUp(self): + super().setUp() + self._server = grpc.server(futures.ThreadPoolExecutor(max_workers=20)) + system_service_pb2_grpc.add_SystemServiceServicer_to_server(SystemGrpcService(), self._server) + self._server.add_insecure_port(f'[::]:{self.LISTEN_PORT}') + self._server.start() + + self._stub = system_service_pb2_grpc.SystemServiceStub( + grpc.insecure_channel(target=f'localhost:{self.LISTEN_PORT}')) + + def tearDown(self): + self._server.stop(5) + return super().tearDown() + + @patch('fedlearner_webconsole.rpc.v2.system_service_server.SettingService.get_application_version') + def test_check_health(self, mock_get_application_version: Mock): + mock_get_application_version.return_value = ApplicationVersion( + revision='test rev', + branch_name='test branch', + version='1.0.0.1', + pub_date='20220101', + ) + + resp = self._stub.CheckHealth(CheckHealthRequest()) + self.assertEqual( + resp, + CheckHealthResponse( + application_version=common_pb2.ApplicationVersion( + revision='test rev', + branch_name='test branch', + version='1.0.0.1', + pub_date='20220101', + ), + healthy=True, + )) + + @patch('fedlearner_webconsole.rpc.v2.system_service_server.get_flags') + def test_list_flags(self, mock_flags: Mock): + mock_flags.return_value = { + 'flag1': True, + 'flag2': 'string_value', + 'flag3': { + 'key': 'value', + }, + } + + resp = self._stub.ListFlags(ListFlagsRequest()) + self.assertEqual( + resp, + ListFlagsResponse(flags=Struct( + fields={ + 'flag1': Value(bool_value=True), + 'flag2': Value(string_value='string_value'), + 'flag3': Value(struct_value=Struct(fields={ + 'key': Value(string_value='value'), + })) + }))) + + def test_check_tee_enabled(self): + Flag.TEE_MACHINE_DEPLOYED.value = True + resp = self._stub.CheckTeeEnabled(CheckTeeEnabledRequest()) + self.assertEqual(resp, CheckTeeEnabledResponse(tee_enabled=True)) + Flag.TEE_MACHINE_DEPLOYED.value = False + resp = self._stub.CheckTeeEnabled(CheckTeeEnabledRequest()) + self.assertEqual(resp, CheckTeeEnabledResponse(tee_enabled=False)) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/utils.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/utils.py new file mode 100644 index 000000000..46aa498e9 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/utils.py @@ -0,0 +1,63 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from grpc import ServicerContext +from typing import Tuple +from sqlalchemy.orm import Session +from fedlearner_webconsole.rpc.auth import get_common_name, SSL_CLIENT_SUBJECT_DN_HEADER, PROJECT_NAME_HEADER +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.utils.domain_name import get_pure_domain_name +from fedlearner_webconsole.utils.pp_base64 import base64decode, base64encode + + +def get_grpc_context_info(session: Session, context: ServicerContext) -> Tuple[int, int]: + metadata = dict(context.invocation_metadata()) + project_name = decode_project_name(metadata.get(PROJECT_NAME_HEADER)) + project_id, *_ = session.query(Project.id).filter_by(name=project_name).first() + cn = get_common_name(metadata.get(SSL_CLIENT_SUBJECT_DN_HEADER)) + client_id = ParticipantService(session).get_participant_by_pure_domain_name(get_pure_domain_name(cn)).id + return project_id, client_id + + +def get_pure_domain_from_context(context: ServicerContext) -> str: + metadata = dict(context.invocation_metadata()) + cn = get_common_name(metadata.get(SSL_CLIENT_SUBJECT_DN_HEADER)) + return get_pure_domain_name(cn) + + +def _is_ascii(s: str) -> bool: + return all(ord(c) < 128 for c in s) + + +def encode_project_name(project_name: str) -> str: + """Encodes project name to grpc-acceptable format. + + gRPC does not recognize unicode in headers, and due to historical + reason, we have to be compatiable with anscii strings, otherwise + old gRPC server can not get the project name correctly.""" + if _is_ascii(project_name): + return project_name + return base64encode(project_name) + + +def decode_project_name(encoded: str) -> str: + try: + return base64decode(encoded) + except Exception: # pylint: disable=broad-except + # Not a base64 encoded string + pass + # Encoded as raw, see details in `encode_project_name` + return encoded diff --git a/web_console_v2/api/fedlearner_webconsole/rpc/v2/utils_test.py b/web_console_v2/api/fedlearner_webconsole/rpc/v2/utils_test.py new file mode 100644 index 000000000..02c68c426 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/rpc/v2/utils_test.py @@ -0,0 +1,50 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import MagicMock + +from fedlearner_webconsole.rpc.auth import SSL_CLIENT_SUBJECT_DN_HEADER +from fedlearner_webconsole.rpc.v2.utils import decode_project_name, encode_project_name, get_pure_domain_from_context + + +class UtilsTest(unittest.TestCase): + + def test_get_pure_domain_from_context(self): + mock_context = MagicMock(invocation_metadata=MagicMock( + return_value={ + SSL_CLIENT_SUBJECT_DN_HEADER: 'CN=*.fl-xxx.com,OU=security,O=security,L=beijing,ST=beijing,C=CN' + })) + self.assertEqual(get_pure_domain_from_context(mock_context), 'xxx') + + def test_encode_project_name_anscii(self): + self.assertEqual(encode_project_name('hello world'), 'hello world') + self.assertEqual(encode_project_name('-h%20w'), '-h%20w') + + def test_encode_project_name_unicode(self): + self.assertEqual(encode_project_name('这是一个测试的名字'), '6L+Z5piv5LiA5Liq5rWL6K+V55qE5ZCN5a2X') + self.assertEqual(encode_project_name('中文 & en'), '5Lit5paHICYgZW4=') + + def test_decode_project_name_anscii(self): + self.assertEqual(decode_project_name('hello world'), 'hello world') + self.assertEqual(decode_project_name('-h%20w'), '-h%20w') + + def test_decode_project_name_unicode(self): + self.assertEqual(decode_project_name('6L+Z5piv5LiA5Liq5rWL6K+V55qE5ZCN5a2X'), '这是一个测试的名字') + self.assertEqual(decode_project_name('5Lit5paHICYgZW4='), '中文 & en') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/scheduler/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/scheduler/BUILD.bazel new file mode 100644 index 000000000..9b2ca40ca --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/scheduler/BUILD.bazel @@ -0,0 +1,59 @@ +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "scheduler_lib", + srcs = [ + "scheduler.py", + "transaction.py", + ], + imports = ["../.."], + deps = [ + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:client_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:resource_manager_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:workflow_controller_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "scheduler_lib_test", + size = "medium", + srcs = [ + "scheduler_test.py", + ], + flaky = True, + imports = ["../.."], + main = "scheduler_test.py", + deps = [ + ":scheduler_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:server_lib", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_test( + name = "workflow_commit_lib_test", + size = "small", + srcs = [ + "workflow_commit_test.py", + ], + imports = ["../.."], + main = "workflow_commit_test.py", + deps = [ + ":scheduler_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/job:yaml_formatter_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:service_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:common_lib", + "//web_console_v2/api/testing/workflow_template", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/scheduler/scheduler.py b/web_console_v2/api/fedlearner_webconsole/scheduler/scheduler.py index d3e15aa1d..d7b649585 100644 --- a/web_console_v2/api/fedlearner_webconsole/scheduler/scheduler.py +++ b/web_console_v2/api/fedlearner_webconsole/scheduler/scheduler.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,52 +14,37 @@ # coding: utf-8 # pylint: disable=broad-except - -import os import threading import logging import traceback -from fedlearner_webconsole.job.yaml_formatter import generate_job_run_yaml +from queue import Queue, Empty +from envs import Envs from fedlearner_webconsole.db import db -from fedlearner_webconsole.dataset.import_handler import ImportHandler -from fedlearner_webconsole.utils.k8s_client import k8s_client from fedlearner_webconsole.workflow.models import Workflow, WorkflowState -from fedlearner_webconsole.job.models import Job, JobState from fedlearner_webconsole.scheduler.transaction import TransactionManager -from fedlearner_webconsole.db import get_session -from fedlearner_webconsole.job.service import JobService class Scheduler(object): + def __init__(self): self._condition = threading.Condition(threading.RLock()) self._running = False self._terminate = False self._thread = None - self._pending_workflows = [] - self._pending_jobs = [] - #TODO: remove app - self._app = None - self._db_engine = None - self._import_handler = ImportHandler() - - def start(self, app, force=False): + self.workflow_queue = Queue() + + def start(self, force=False): if self._running: if not force: raise RuntimeError('Scheduler is already started') self.stop() - self._app = app - with self._app.app_context(): - self._db_engine = db.get_engine() - with self._condition: self._running = True self._terminate = False self._thread = threading.Thread(target=self._routine) self._thread.daemon = True self._thread.start() - self._import_handler.init(app) logging.info('Scheduler started') def stop(self): @@ -68,62 +53,37 @@ def stop(self): with self._condition: self._terminate = True - self._condition.notify_all() + # Interrupt the block of workflow_queue.get to stop immediately. + self.workflow_queue.put(None) print('stopping') self._thread.join() self._running = False logging.info('Scheduler stopped') - def wakeup(self, workflow_ids=None, - job_ids=None, - data_batch_ids=None): - with self._condition: - if workflow_ids: - if isinstance(workflow_ids, int): - workflow_ids = [workflow_ids] - self._pending_workflows.extend(workflow_ids) - if job_ids: - if isinstance(job_ids, int): - job_ids = [job_ids] - self._pending_jobs.extend(job_ids) - if data_batch_ids: - self._import_handler.schedule_to_handle(data_batch_ids) - self._condition.notify_all() + def wakeup(self, workflow_id=None): + self.workflow_queue.put(workflow_id) def _routine(self): - self._app.app_context().push() - interval = int(os.environ.get( - 'FEDLEARNER_WEBCONSOLE_POLLING_INTERVAL', 60)) + interval = float(Envs.SCHEDULER_POLLING_INTERVAL) while True: - with self._condition: - notified = self._condition.wait(interval) - - # TODO(wangsen): use Sqlalchemy insdtead of flask-Sqlalchemy - # refresh a new session to catch the update of db - db.session.remove() - if self._terminate: - return - if notified: - workflow_ids = self._pending_workflows - self._pending_workflows = [] - self._poll_workflows(workflow_ids) - - job_ids = self._pending_jobs - self._pending_jobs = [] - job_ids.extend(_get_waiting_jobs()) - self._poll_jobs(job_ids) - - self._import_handler.handle(pull=False) - continue - - workflows = db.session.query(Workflow.id).filter( - Workflow.target_state != WorkflowState.INVALID).all() + try: + try: + pending_workflow = self.workflow_queue.get(timeout=interval) + except Empty: + pending_workflow = None + with self._condition: + if self._terminate: + return + if pending_workflow: + self._poll_workflows([pending_workflow]) + + with db.session_scope() as session: + workflows = session.query(Workflow.id).filter(Workflow.target_state != WorkflowState.INVALID).all() self._poll_workflows([wid for wid, in workflows]) - - self._poll_jobs(_get_waiting_jobs()) - - self._import_handler.handle(pull=True) + # make the scheduler routine run forever. + except Exception as e: + logging.error(f'Scheduler routine wrong: {str(e)}') def _poll_workflows(self, workflow_ids): logging.info(f'Scheduler polling {len(workflow_ids)} workflows...') @@ -131,58 +91,13 @@ def _poll_workflows(self, workflow_ids): try: self._schedule_workflow(workflow_id) except Exception as e: - logging.warning( - 'Error while scheduling workflow ' - f'{workflow_id}:\n{traceback.format_exc()}') - - def _poll_jobs(self, job_ids): - logging.info(f'Scheduler polling {len(job_ids)} jobs...') - for job_id in job_ids: - try: - self._schedule_job(job_id) - except Exception as e: - logging.warning( - 'Error while scheduling job ' - f'{job_id}:\n{traceback.format_exc()}') + logging.warning('Error while scheduling workflow ' f'{workflow_id}:\n{traceback.format_exc()}') def _schedule_workflow(self, workflow_id): logging.debug(f'Scheduling workflow {workflow_id}') - tm = TransactionManager(workflow_id) - return tm.process() - - def _schedule_job(self, job_id): - job = Job.query.get(job_id) - assert job is not None, f'Job {job_id} not found' - if job.state != JobState.WAITING: - return job.state - - with get_session(self._db_engine) as session: - job_service = JobService(session) - if not job_service.is_ready(job): - return job.state - config = job.get_config() - if config.is_federated: - if not job_service.is_peer_ready(job): - return job.state - - try: - yaml = generate_job_run_yaml(job) - k8s_client.create_flapp(yaml) - except Exception as e: - logging.error(f'Start job {job_id} has error msg: {e.args}') - job.error_message = str(e) - db.session.commit() - return job.state - job.error_message = None - job.start() - db.session.commit() - - return job.state - - -def _get_waiting_jobs(): - return [jid for jid, in db.session.query( - Job.id).filter(Job.state == JobState.WAITING)] + with db.session_scope() as session: + tm = TransactionManager(workflow_id, session) + return tm.process() scheduler = Scheduler() diff --git a/web_console_v2/api/fedlearner_webconsole/scheduler/scheduler_test.py b/web_console_v2/api/fedlearner_webconsole/scheduler/scheduler_test.py new file mode 100644 index 000000000..356b5d011 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/scheduler/scheduler_test.py @@ -0,0 +1,244 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +from unittest.mock import patch + +import time +import copy +import unittest +import secrets +import logging +from http import HTTPStatus + +from uuid import uuid4 +from envs import Envs +from testing.common import BaseTestCase +from testing.common import multi_process_test +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto.common_pb2 import CreateJobFlag +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition +from fedlearner_webconsole.rpc.server import rpc_server +from fedlearner_webconsole.job.models import Job +from fedlearner_webconsole.workflow_template.models import WorkflowTemplate + + +class LeaderConfig(object): + SQLALCHEMY_DATABASE_URI = f'sqlite:///{Envs.BASE_DIR}/{uuid4()}-leader.db' + SQLALCHEMY_TRACK_MODIFICATIONS = False + JWT_SECRET_KEY = secrets.token_urlsafe(64) + PROPAGATE_EXCEPTIONS = True + LOGGING_LEVEL = logging.DEBUG + GRPC_LISTEN_PORT = 3990 + + +class FollowerConfig(object): + SQLALCHEMY_DATABASE_URI = f'sqlite:///{Envs.BASE_DIR}/{uuid4()}-follower.db' + SQLALCHEMY_TRACK_MODIFICATIONS = False + JWT_SECRET_KEY = secrets.token_urlsafe(64) + PROPAGATE_EXCEPTIONS = True + LOGGING_LEVEL = logging.DEBUG + GRPC_LISTEN_PORT = 4990 + + +class WorkflowTest(BaseTestCase): + + class Config(LeaderConfig): + pass + + @classmethod + def setUpClass(cls): + cls._patcher = patch('envs.Envs.SCHEDULER_POLLING_INTERVAL', '0.1') + cls._patcher.start() + + @classmethod + def tearDownClass(cls): + cls._patcher.stop() + + def setUp(self): + super().setUp() + self.signin_as_admin() + template1 = WorkflowTemplate(name='t1', comment='comment for t1', group_alias='g1') + template1.set_config(WorkflowDefinition(group_alias='g1',)) + with db.session_scope() as session: + session.add(template1) + session.commit() + # This is actually an integration test, so we need to start the rpc + rpc_server.stop() + rpc_server.start(self.Config.GRPC_LISTEN_PORT) + self._wf_template = { + 'group_alias': + 'test-template', + 'job_definitions': [{ + 'is_federated': True, + 'name': 'job1', + 'variables': [{ + 'name': 'x', + 'value': '1', + 'access_mode': 3 + }] + }, { + 'is_federated': True, + 'name': 'job2', + 'variables': [{ + 'name': 'y', + 'value': '2', + 'access_mode': 2 + }] + }] + } + + def leader_test_workflow(self): + self.setup_project('leader', FollowerConfig.GRPC_LISTEN_PORT) + cwf_resp = self.post_helper('/api/v2/projects/1/workflows', + data={ + 'name': 'test-workflow', + 'project_id': 1, + 'forkable': True, + 'config': self._wf_template, + 'template_id': 1 + }) + self.assertEqual(cwf_resp.status_code, HTTPStatus.CREATED) + cwf_data = self.get_response_data(cwf_resp) + self.assertEqual(cwf_data['job_ids'], [1, 2]) + + self._check_workflow_state(1, 'READY_TO_RUN') + + # test update + patch_config = copy.deepcopy(self._wf_template) + patch_config['job_definitions'][1]['variables'][0]['value'] = '4' + resp = self.patch_helper('/api/v2/projects/1/workflows/1', data={'config': patch_config, 'template_id': 1}) + self.assertEqual(resp.status_code, HTTPStatus.OK) + + resp = self.get_helper('/api/v2/projects/1/workflows/1') + self.assertEqual(resp.status_code, HTTPStatus.OK) + ret_wf = resp.json['data']['config'] + self.assertEqual(ret_wf['job_definitions'][1]['variables'][0]['value'], '4') + + # test update remote + patch_config['job_definitions'][0]['variables'][0]['value'] = '5' + resp = self.patch_helper('/api/v2/projects/1/workflows/1/peer_workflows', + data={ + 'config': patch_config, + 'template_id': 1 + }) + self.assertEqual(resp.status_code, HTTPStatus.OK) + + resp = self.get_helper('/api/v2/projects/1/workflows/1/peer_workflows') + self.assertEqual(resp.status_code, HTTPStatus.OK) + ret_wf = list(resp.json['data'].values())[0]['config'] + self.assertEqual(ret_wf['job_definitions'][0]['variables'][0]['value'], '5') + + # test fork + cwf_resp = self.post_helper('/api/v2/projects/1/workflows', + data={ + 'name': 'test-workflow2', + 'project_id': 1, + 'forkable': True, + 'forked_from': 1, + 'create_job_flags': [ + CreateJobFlag.REUSE, + CreateJobFlag.NEW, + ], + 'peer_create_job_flags': [ + CreateJobFlag.REUSE, + CreateJobFlag.NEW, + ], + 'template_id': 1, + 'config': self._wf_template, + 'fork_proposal_config': { + 'job_definitions': [{ + 'variables': [{ + 'name': 'x', + 'value': '2' + }] + }, { + 'variables': [{ + 'name': 'y', + 'value': '3' + }] + }] + } + }) + self.assertEqual(cwf_resp.status_code, HTTPStatus.CREATED) + cwf_data = self.get_response_data(cwf_resp) + self.assertEqual(cwf_data['job_ids'], [1, 3]) + self._check_workflow_state(2, 'READY_TO_RUN') + time.sleep(2) + resp = self.post_helper('/api/v2/projects/1/workflows/2:invalidate') + self._check_workflow_state(2, 'INVALID') + + def follower_test_workflow(self): + self.setup_project('follower', LeaderConfig.GRPC_LISTEN_PORT) + self._check_workflow_state(1, 'PENDING_ACCEPT') + + cwf_resp = self.put_helper('/api/v2/projects/1/workflows/1', + data={ + 'forkable': True, + 'config': self._wf_template, + 'template_id': 1 + }) + self.assertEqual(cwf_resp.status_code, HTTPStatus.OK) + cwf_data = self.get_response_data(cwf_resp) + self.assertEqual(cwf_data['job_ids'], [1, 2]) + self._check_workflow_state(1, 'READY_TO_RUN') + with db.session_scope() as session: + self.assertEqual(len(session.query(Job).filter_by(workflow_id=1).all()), 2) + + # test fork + json = self._check_workflow_state(2, 'READY_TO_RUN') + with db.session_scope() as session: + self.assertEqual(len(session.query(Job).all()), 3) + self.assertEqual(json['data']['create_job_flags'], [ + CreateJobFlag.REUSE, + CreateJobFlag.NEW, + ]) + self.assertEqual(json['data']['peer_create_job_flags'], [ + CreateJobFlag.REUSE, + CreateJobFlag.NEW, + ]) + jobs = json['data']['config']['job_definitions'] + self.assertEqual(jobs[0]['variables'][0]['value'], '2') + self.assertEqual(jobs[1]['variables'][0]['value'], '2') + time.sleep(2) + resp = self.post_helper('/api/v2/projects/1/workflows/2:invalidate') + self._check_workflow_state(2, 'INVALID') + + def _check_workflow_state(self, workflow_id, state, max_retries=10): + cnt = 0 + while True: + time.sleep(0.1) + cnt = cnt + 1 + if cnt > max_retries: + self.fail(f'workflow [{workflow_id}] state is unexpected') + resp = self.get_helper(f'/api/v2/projects/1/workflows/{workflow_id}') + if resp.status_code != HTTPStatus.OK: + logging.info(f'get workflow {workflow_id} failed: {resp.json}') + continue + if resp.json['data']['state'] == state: + return resp.json + + +if __name__ == '__main__': + multi_process_test([{ + 'class': WorkflowTest, + 'method': 'leader_test_workflow', + 'config': LeaderConfig + }, { + 'class': WorkflowTest, + 'method': 'follower_test_workflow', + 'config': FollowerConfig + }]) + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/scheduler/transaction.py b/web_console_v2/api/fedlearner_webconsole/scheduler/transaction.py index aa605e157..706e1a75a 100644 --- a/web_console_v2/api/fedlearner_webconsole/scheduler/transaction.py +++ b/web_console_v2/api/fedlearner_webconsole/scheduler/transaction.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,18 +13,21 @@ # limitations under the License. # coding: utf-8 - -from fedlearner_webconsole.db import db +from fedlearner_webconsole.participant.services import ParticipantService from fedlearner_webconsole.rpc.client import RpcClient -from fedlearner_webconsole.workflow.models import ( - Workflow, WorkflowState, TransactionState, VALID_TRANSITIONS -) +from fedlearner_webconsole.workflow.models import (Workflow, WorkflowState, TransactionState, VALID_TRANSITIONS) from fedlearner_webconsole.proto import common_pb2 +from fedlearner_webconsole.workflow.resource_manager import ResourceManager +from fedlearner_webconsole.workflow.workflow_controller import invalidate_workflow_locally + class TransactionManager(object): - def __init__(self, workflow_id): + + def __init__(self, workflow_id, session): self._workflow_id = workflow_id - self._workflow = Workflow.query.get(workflow_id) + self._session = session + # TODO(hangweiqiang): remove workflow, project from __init__ + self._workflow = session.query(Workflow).get(workflow_id) assert self._workflow is not None self._project = self._workflow.project assert self._project is not None @@ -39,15 +42,14 @@ def project(self): def process(self): # process local workflow + manager = ResourceManager(self._session, self._workflow) if self._workflow.is_local(): - self._workflow.update_local_state() + manager.update_local_state() self._reload() return self._workflow # reload workflow and resolve -ing states - self._workflow.update_state( - self._workflow.state, self._workflow.target_state, - self._workflow.transaction_state) + manager.update_state(self._workflow.state, self._workflow.target_state, self._workflow.transaction_state) self._reload() if not self._recover_from_abort(): @@ -56,77 +58,67 @@ def process(self): if self._workflow.target_state == WorkflowState.INVALID: return self._workflow - if self._workflow.state == WorkflowState.INVALID: - raise RuntimeError( - f'Cannot process invalid workflow {self._workflow.name}') + if self._workflow.is_invalid(): + raise RuntimeError(f'Cannot process invalid workflow {self._workflow.name}') assert (self._workflow.state, self._workflow.target_state) \ - in VALID_TRANSITIONS + in VALID_TRANSITIONS if self._workflow.transaction_state == TransactionState.READY: # prepare self as coordinator - self._workflow.update_state( - self._workflow.state, - self._workflow.target_state, - TransactionState.COORDINATOR_PREPARE) + manager.update_state(self._workflow.state, self._workflow.target_state, + TransactionState.COORDINATOR_PREPARE) self._reload() if self._workflow.transaction_state == \ TransactionState.COORDINATOR_COMMITTABLE: # prepare self succeeded. Tell participants to prepare - states = self._broadcast_state( - self._workflow.state, self._workflow.target_state, - TransactionState.PARTICIPANT_PREPARE) + states = self._broadcast_state(self._workflow.state, self._workflow.target_state, + TransactionState.PARTICIPANT_PREPARE) committable = True for state in states: if state != TransactionState.PARTICIPANT_COMMITTABLE: committable = False if state == TransactionState.ABORTED: # abort as coordinator if some participants aborted - self._workflow.update_state( - None, None, TransactionState.COORDINATOR_ABORTING) + manager.update_state(None, None, TransactionState.COORDINATOR_ABORTING) self._reload() break # commit as coordinator if participants all committable if committable: - self._workflow.update_state( - None, None, TransactionState.COORDINATOR_COMMITTING) + manager.update_state(None, None, TransactionState.COORDINATOR_COMMITTING) self._reload() if self._workflow.transaction_state == \ TransactionState.COORDINATOR_COMMITTING: # committing as coordinator. tell participants to commit - if self._broadcast_state_and_check( - self._workflow.state, self._workflow.target_state, - TransactionState.PARTICIPANT_COMMITTING, - TransactionState.READY): + if self._broadcast_state_and_check(self._workflow.state, self._workflow.target_state, + TransactionState.PARTICIPANT_COMMITTING, TransactionState.READY): # all participants committed. finish. - self._workflow.commit() + manager.commit() self._reload() self._recover_from_abort() return self._workflow def _reload(self): - db.session.commit() - db.session.refresh(self._workflow) + self._session.commit() + self._session.refresh(self._workflow) - def _broadcast_state( - self, state, target_state, transaction_state): - project_config = self._project.get_config() + def _broadcast_state(self, state, target_state, transaction_state): + service = ParticipantService(self._session) + participants = service.get_platform_participants_by_project(self._project.id) states = [] - for party in project_config.participants: - client = RpcClient(project_config, party) - forked_from_uuid = Workflow.query.filter_by( - id=self._workflow.forked_from - ).first().uuid if self._workflow.forked_from else None - resp = client.update_workflow_state( - self._workflow.name, state, target_state, transaction_state, - self._workflow.uuid, - forked_from_uuid, self._workflow.extra) + for participant in participants: + client = RpcClient.from_project_and_participant(self._project.name, self._project.token, + participant.domain_name) + forked_from_uuid = self._session.query(Workflow).filter_by( + id=self._workflow.forked_from).first().uuid if self._workflow.forked_from else None + resp = client.update_workflow_state(self._workflow.name, state, target_state, transaction_state, + self._workflow.uuid, forked_from_uuid, self._workflow.extra) if resp.status.code == common_pb2.STATUS_SUCCESS: - if resp.state == WorkflowState.INVALID: - self._workflow.invalidate() + if WorkflowState(resp.state) == WorkflowState.INVALID: + invalidate_workflow_locally(self._session, self._workflow) self._reload() raise RuntimeError('Peer workflow invalidated. Abort.') states.append(TransactionState(resp.transaction_state)) @@ -134,8 +126,7 @@ def _broadcast_state( states.append(None) return states - def _broadcast_state_and_check(self, - state, target_state, transaction_state, target_transaction_state): + def _broadcast_state_and_check(self, state, target_state, transaction_state, target_transaction_state): states = self._broadcast_state(state, target_state, transaction_state) for i in states: if i != target_transaction_state: @@ -145,13 +136,10 @@ def _broadcast_state_and_check(self, def _recover_from_abort(self): if self._workflow.transaction_state == \ TransactionState.COORDINATOR_ABORTING: - if not self._broadcast_state_and_check( - self._workflow.state, WorkflowState.INVALID, - TransactionState.PARTICIPANT_ABORTING, - TransactionState.ABORTED): + if not self._broadcast_state_and_check(self._workflow.state, WorkflowState.INVALID, + TransactionState.PARTICIPANT_ABORTING, TransactionState.ABORTED): return False - self._workflow.update_state( - None, WorkflowState.INVALID, TransactionState.ABORTED) + self._workflow.update_state(None, WorkflowState.INVALID, TransactionState.ABORTED, self._session) self._reload() if self._workflow.transaction_state != TransactionState.ABORTED: @@ -159,10 +147,9 @@ def _recover_from_abort(self): assert self._workflow.target_state == WorkflowState.INVALID - if not self._broadcast_state_and_check( - self._workflow.state, WorkflowState.INVALID, - TransactionState.READY, TransactionState.READY): + if not self._broadcast_state_and_check(self._workflow.state, WorkflowState.INVALID, TransactionState.READY, + TransactionState.READY): return False - self._workflow.update_state(None, None, TransactionState.READY) + self._workflow.update_state(None, None, TransactionState.READY, self._session) self._reload() return True diff --git a/web_console_v2/api/fedlearner_webconsole/scheduler/workflow_commit_test.py b/web_console_v2/api/fedlearner_webconsole/scheduler/workflow_commit_test.py new file mode 100644 index 000000000..2095a8cda --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/scheduler/workflow_commit_test.py @@ -0,0 +1,110 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import os +import time +import unittest +from google.protobuf.json_format import ParseDict + +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.workflow.service import WorkflowService +from fedlearner_webconsole.db import db +from fedlearner_webconsole.job.models import JobState +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState +from fedlearner_webconsole.scheduler.transaction import TransactionState +from fedlearner_webconsole.scheduler.scheduler import \ + scheduler +from fedlearner_webconsole.proto import project_pb2 +from fedlearner_webconsole.job.yaml_formatter import YamlFormatterService +from testing.workflow_template.test_template_left import make_workflow_template +from testing.no_web_server_test_case import NoWebServerTestCase + + +class WorkflowsCommitTest(NoWebServerTestCase): + + @classmethod + def setUpClass(cls): + os.environ['FEDLEARNER_WEBCONSOLE_POLLING_INTERVAL'] = '0.1' + + def setUp(self): + super().setUp() + # Inserts project + config = { + 'variables': [{ + 'name': 'namespace', + 'value': 'leader' + }, { + 'name': 'basic_envs', + 'value': '{}' + }, { + 'name': 'storage_root_path', + 'value': '/' + }] + } + + project = Project(name='test', config=ParseDict(config, project_pb2.ProjectConfig()).SerializeToString()) + participant = Participant(name='party_leader', host='127.0.0.1', port=5000, domain_name='fl-leader.com') + relationship = ProjectParticipant(project_id=1, participant_id=1) + with db.session_scope() as session: + session.add(project) + session.add(participant) + session.add(relationship) + session.commit() + + @staticmethod + def _wait_until(cond, retry_times: int = 5): + for _ in range(retry_times): + time.sleep(0.1) + with db.session_scope() as session: + if cond(session): + return + + def test_workflow_commit(self): + # test the committing stage for workflow creating + workflow_def = make_workflow_template() + workflow = Workflow(id=20, + name='job_test1', + comment='这是一个测试工作流', + config=workflow_def.SerializeToString(), + project_id=1, + forkable=True, + state=WorkflowState.NEW, + target_state=WorkflowState.READY, + transaction_state=TransactionState.PARTICIPANT_COMMITTING, + creator='test_creator') + with db.session_scope() as session: + session.add(workflow) + session.commit() + WorkflowService(session).setup_jobs(workflow) + session.commit() + + scheduler.wakeup(20) + self._wait_until(lambda session: session.query(Workflow).get(20).state == WorkflowState.READY) + with db.session_scope() as session: + workflow = session.query(Workflow).get(20) + jobs = workflow.get_jobs(session) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0].state, JobState.NEW) + self.assertEqual(jobs[1].state, JobState.NEW) + # test generate job run yaml + job_loaded_json = YamlFormatterService(session).generate_job_run_yaml(jobs[0]) + self.assertEqual(job_loaded_json['metadata']['name'], jobs[0].name) + self.assertEqual(job_loaded_json['metadata']['labels']['owner'], workflow.creator) + session.commit() + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/serving/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/serving/BUILD.bazel new file mode 100644 index 000000000..f8cb1f555 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/serving/BUILD.bazel @@ -0,0 +1,276 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "database_fetcher_lib", + srcs = ["database_fetcher.py"], + imports = ["../.."], +) + +py_library( + name = "metrics_lib", + srcs = ["metrics.py"], + imports = ["../.."], + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:metrics_lib", + ], +) + +py_library( + name = "models_lib", + srcs = ["models.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_library( + name = "services_lib", + srcs = ["services.py"], + imports = ["../.."], + deps = [ + ":database_fetcher_lib", + ":metrics_lib", + ":models_lib", + ":remote_lib", + ":serving_yaml_template_lib", + ":utils_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/k8s:k8s_client_lib", + "//web_console_v2/api/fedlearner_webconsole/k8s:models_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:service_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:client_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:const_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:sorting_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + "@common_kubernetes//:pkg", + "@common_tensorflow//:pkg", + "@common_tensorflow_serving_api//:pkg", + ], +) + +py_library( + name = "serving_yaml_template_lib", + srcs = ["serving_yaml_template.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_yaml_lib", + "@common_sqlalchemy//:pkg", + ], +) + +py_library( + name = "runners_lib", + srcs = [ + "runners.py", + ], + imports = ["../.."], + deps = [ + ":models_lib", + ":services_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:composer_service_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:file_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_time_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:process_utils_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_sqlalchemy//:pkg", + ], +) + +py_library( + name = "utils_lib", + srcs = [ + "utils.py", + ], + imports = ["../.."], + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "@common_sqlalchemy//:pkg", + ], +) + +py_library( + name = "remote_lib", + srcs = [ + "remote.py", + ], + imports = ["../.."], + visibility = [ + "//visibility:public", + ], + deps = [ + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_library( + name = "apis_lib", + srcs = ["apis.py"], + imports = ["../.."], + deps = [ + ":metrics_lib", + ":models_lib", + ":participant_fetcher_lib", + ":remote_lib", + ":runners_lib", + ":services_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/audit:decorators_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:third_party_sso_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:composer_service_lib", + "//web_console_v2/api/fedlearner_webconsole/swagger:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:proto_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_flask_restful//:pkg", + "@common_marshmallow//:pkg", + "@common_sqlalchemy//:pkg", + "@common_tensorflow//:pkg", + ], +) + +py_library( + name = "participant_fetcher_lib", + srcs = ["participant_fetcher.py"], + imports = ["../.."], + deps = [ + ":models_lib", + ":services_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_test( + name = "apis_lib_test", + size = "medium", + srcs = [ + "apis_test.py", + ], + imports = ["../.."], + main = "apis_test.py", + deps = [ + ":apis_lib", + "//web_console_v2/api/testing:common_lib", + "//web_console_v2/api/testing:fake_lib", + ], +) + +py_test( + name = "apis_inference_lib_test", + size = "medium", + srcs = [ + "apis_inference_test.py", + ], + imports = ["../.."], + main = "apis_inference_test.py", + deps = [ + ":apis_lib", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_test( + name = "apis_runner_lib_test", + size = "medium", + srcs = [ + "apis_runner_test.py", + ], + imports = ["../.."], + main = "apis_runner_test.py", + deps = [ + ":apis_lib", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_test( + name = "models_lib_test", + size = "small", + srcs = [ + "models_test.py", + ], + imports = ["../.."], + main = "models_test.py", + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:proto_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_test( + name = "services_lib_test", + size = "medium", + srcs = [ + "services_test.py", + ], + data = [ + "//web_console_v2/api/testing/test_data", + ], + imports = ["../.."], + main = "services_test.py", + deps = [ + ":services_lib", + "//web_console_v2/api/testing:common_lib", + "//web_console_v2/api/testing:fake_lib", + ], +) + +py_test( + name = "serving_yaml_template_lib_test", + size = "small", + srcs = [ + "serving_yaml_template_test.py", + ], + imports = ["../.."], + main = "serving_yaml_template_test.py", + deps = [ + ":serving_yaml_template_lib", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_test( + name = "utils_lib_test", + size = "small", + srcs = [ + "utils_test.py", + ], + imports = ["../.."], + main = "utils_test.py", + deps = [ + ":utils_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/serving/__init__.py b/web_console_v2/api/fedlearner_webconsole/serving/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/web_console_v2/api/fedlearner_webconsole/serving/apis.py b/web_console_v2/api/fedlearner_webconsole/serving/apis.py new file mode 100644 index 000000000..16d752eb2 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/serving/apis.py @@ -0,0 +1,556 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import json +from http import HTTPStatus +from typing import Optional + +from flask_restful import Resource +from google.protobuf import json_format +from google.protobuf.text_format import Parse +from marshmallow import Schema, fields, post_load +from sqlalchemy.orm import joinedload +from sqlalchemy.sql.elements import ColumnElement +from tensorflow.core.example.example_pb2 import Example + +from fedlearner_webconsole.proto.serving_pb2 import ServingServiceRemotePlatform +from fedlearner_webconsole.serving import remote +from fedlearner_webconsole.utils.decorators.pp_flask import use_args, use_kwargs + +from fedlearner_webconsole.composer.composer_service import ComposerService +from fedlearner_webconsole.audit.decorators import emits_event +from fedlearner_webconsole.composer.interface import ItemType +from fedlearner_webconsole.proto import serving_pb2 +from fedlearner_webconsole.proto.audit_pb2 import Event +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput, ModelSignatureParserInput +from fedlearner_webconsole.proto.filtering_pb2 import FilterOp, SimpleExpression, FilterExpression +from fedlearner_webconsole.serving.metrics import serving_metrics_emit_counter +from fedlearner_webconsole.serving.participant_fetcher import ParticipantFetcher +from fedlearner_webconsole.serving.runners import ModelSignatureParser, start_query_participant, start_update_model +from fedlearner_webconsole.swagger.models import schema_manager +from fedlearner_webconsole.auth.third_party_sso import credentials_required +from fedlearner_webconsole.db import db +from fedlearner_webconsole.exceptions import NotFoundException, InvalidArgumentException, \ + InternalException +from fedlearner_webconsole.serving.models import ServingModel, ServingDeployment, ServingNegotiator +from fedlearner_webconsole.serving.services import TensorflowServingService, ServingDeploymentService, \ + ServingModelService +from fedlearner_webconsole.utils import filtering, sorting, flask_utils +from fedlearner_webconsole.utils.decorators.pp_flask import input_validator +from fedlearner_webconsole.utils.flask_utils import make_flask_response, FilterExpField +from fedlearner_webconsole.utils.proto import to_dict + +SORT_SUPPORTED_COLUMN = ['created_at'] + + +class ResourceParams(Schema): + cpu = fields.Str(required=True) + memory = fields.Str(required=True) + replicas = fields.Integer(required=True) + + +class RemotePlatformParams(Schema): + platform = fields.Str(required=True) + payload = fields.Str(required=True) + + +class ServingCreateParams(Schema): + name = fields.Str(required=True) + comment = fields.Str(required=False) + model_id = fields.Integer(required=False) + model_group_id = fields.Integer(required=False) + is_local = fields.Boolean(required=False) + resource = fields.Nested(ResourceParams, required=False) + remote_platform = fields.Nested(RemotePlatformParams, required=False) + + @post_load + def make(self, data, **kwargs): + if 'resource' in data: + data['resource'] = json_format.ParseDict(data['resource'], serving_pb2.ServingServiceResource()) + if 'remote_platform' in data: + data['remote_platform'] = json_format.ParseDict(data['remote_platform'], + serving_pb2.ServingServiceRemotePlatform()) + return data + + +class ServingUpdateParams(Schema): + comment = fields.Str(required=False) + model_id = fields.Integer(required=False) + model_group_id = fields.Integer(required=False) + resource = fields.Nested(ResourceParams, required=False) + + @post_load + def make(self, data, **kwargs): + if 'resource' in data: + data['resource'] = json_format.ParseDict(data['resource'], serving_pb2.ServingServiceResource()) + return data + + +def _build_keyword_query(exp: SimpleExpression) -> ColumnElement: + return ServingModel.name.ilike(f'%{exp.string_value}%') + + +class ServingServicesApiV2(Resource): + + FILTER_FIELDS = { + 'name': + filtering.SupportedField(type=filtering.FieldType.STRING, ops={FilterOp.EQUAL: None}), + 'keyword': + filtering.SupportedField(type=filtering.FieldType.STRING, ops={FilterOp.CONTAIN: _build_keyword_query}), + } + + SORTER_FIELDS = ['created_at'] + + def __init__(self): + self._filter_builder = filtering.FilterBuilder(model_class=ServingModel, supported_fields=self.FILTER_FIELDS) + self._sorter_builder = sorting.SorterBuilder(model_class=ServingModel, supported_fields=self.SORTER_FIELDS) + + @use_kwargs( + { + 'filter_exp': FilterExpField(data_key='filter', required=False, load_default=None), + 'sorter_exp': fields.String(data_key='order_by', required=False, load_default=None), + }, + location='query') + @credentials_required + def get(self, project_id: int, filter_exp: Optional[FilterExpression], sorter_exp: Optional[str]): + """Get serving services list + --- + tags: + - serving + description: get serving services list + parameters: + - in: path + name: project_id + schema: + type: integer + - in: query + name: filter + schema: + type: string + - in: query + name: order_by + schema: + type: string + responses: + 200: + description: list of service service information + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.ServingService' + """ + service_list = [] + with db.session_scope() as session: + query = session.query(ServingModel) + query = query.filter(ServingModel.project_id == project_id) + if filter_exp is not None: + try: + query = self._filter_builder.build_query(query, filter_exp) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid filter_exp: {str(e)}') from e + if sorter_exp is not None: + try: + sorter_exp = sorting.parse_expression(sorter_exp) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid sorter: {str(e)}') from e + else: + sorter_exp = sorting.SortExpression(field='created_at', is_asc=False) + query = self._sorter_builder.build_query(query, sorter_exp) + query = query.outerjoin(ServingDeployment, + ServingDeployment.id == ServingModel.serving_deployment_id).options( + joinedload(ServingModel.serving_deployment)) + all_records = query.all() + for serving_model in all_records: + serving_service = serving_model.to_serving_service() + with db.session_scope() as session: + serving_model_service = ServingModelService(session) + serving_model_service.set_resource_and_status_on_ref(serving_service, serving_model) + serving_model_service.set_is_local_on_ref(serving_service, serving_model) + service_list.append(serving_service) + return make_flask_response(data=service_list, status=HTTPStatus.OK) + + @use_args(ServingCreateParams(), location='json_or_form') + @input_validator + @credentials_required + @emits_event(resource_type=Event.SERVING_SERVICE, op_type=Event.CREATE) + def post(self, body: dict, project_id: int): + """Create one serving service + --- + tags: + - serving + description: create one serving service + parameters: + - in: path + name: project_id + schema: + type: integer + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/ServingCreateParams' + responses: + 201: + description: detail of one serving service + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.ServingServiceDetail' + """ + if 'remote_platform' in body: # need check sso for third-party serving + current_sso = flask_utils.get_current_sso() + if current_sso is None: + raise InvalidArgumentException('not a sso user') + with db.session_scope() as session: + serving_model_service = ServingModelService(session) + serving_model = serving_model_service.create_from_param( + project_id=project_id, + name=body['name'], + is_local=body['is_local'], + comment=body['comment'] if 'comment' in body else None, + model_id=body['model_id'] if 'model_id' in body else None, + model_group_id=body['model_group_id'] if 'model_group_id' in body else None, + resource=body['resource'] if 'resource' in body else None, + remote_platform=body['remote_platform'] if 'remote_platform' in body else None) + + # start async query participant serving status + if 'is_local' in body and not body['is_local']: + start_query_participant(session) + + # start async query signature + if 'remote_platform' not in body: + runner_item_name = ModelSignatureParser.generate_task_name(serving_model.id, serving_model.name) + runner_input = RunnerInput(model_signature_parser_input=ModelSignatureParserInput( + serving_model_id=serving_model.id)) + ComposerService(session).collect_v2(name=runner_item_name, + items=[(ItemType.SERVING_SERVICE_PARSE_SIGNATURE, runner_input)]) + + # start auto update model runner + if serving_model.model_group_id is not None: + start_update_model(session) + + session.commit() + serving_metrics_emit_counter('serving.create.success', serving_model) + return make_flask_response(data=serving_model.to_serving_service_detail(), status=HTTPStatus.CREATED) + + +class ServingServiceApiV2(Resource): + + @use_kwargs({ + 'sorter_exp': fields.String(data_key='order_by', required=False, load_default=None), + }, + location='query') + @credentials_required + def get(self, project_id: int, serving_model_id: int, sorter_exp: Optional[str]): + """Get one serving service + --- + tags: + - serving + description: get one serving service + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: serving_model_id + schema: + type: integer + - in: query + name: order_by + schema: + type: string + responses: + 200: + description: detail of one serving service + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.ServingServiceDetail' + """ + sorter = None + if sorter_exp is not None: + try: + sorter = sorting.parse_expression(sorter_exp) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid sorter: {str(e)}') from e + with db.session_scope() as session: + serving_model_service = ServingModelService(session) + result = serving_model_service.get_serving_service_detail(serving_model_id, project_id, sorter) + return make_flask_response(data=result) + + @use_args(ServingUpdateParams(), location='json_or_form') + @credentials_required + @emits_event(resource_type=Event.SERVING_SERVICE, op_type=Event.UPDATE) + def patch(self, body: dict, project_id: int, serving_model_id: int): + """Modify one serving service + --- + tags: + - serving + description: get one serving service + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: serving_model_id + schema: + type: integer + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/ServingUpdateParams' + responses: + 200: + description: detail of one serving service + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.ServingServiceDetail' + """ + with db.session_scope() as session: + serving_model = session.query(ServingModel).filter_by(id=serving_model_id, project_id=project_id).options( + joinedload(ServingModel.serving_deployment)).one_or_none() + if not serving_model: + raise NotFoundException(f'Failed to find serving service: {serving_model_id}') + if 'comment' in body: + serving_model.comment = body['comment'] + if serving_model.serving_deployment.is_remote_serving(): # need check sso for third-party serving + current_sso = flask_utils.get_current_sso() + if current_sso is None: + raise InvalidArgumentException('not a sso user') + need_update_model = False + if 'model_id' in body: + need_update_model = ServingModelService(session).update_model(model_id=body['model_id'], + model_group_id=None, + serving_model=serving_model) + elif 'model_group_id' in body: + need_update_model = ServingModelService(session).update_model(model_id=None, + model_group_id=body['model_group_id'], + serving_model=serving_model) + start_update_model(session) + if 'resource' in body: + current_resource = json.loads(serving_model.serving_deployment.resource) + new_resource = to_dict(body['resource']) + if new_resource != current_resource: + ServingModelService(session).update_resource(new_resource, serving_model) + if need_update_model and not serving_model.serving_deployment.is_remote_serving(): + # start async query signature + runner_item_name = ModelSignatureParser.generate_task_name(serving_model.id, serving_model.name) + runner_input = RunnerInput(model_signature_parser_input=ModelSignatureParserInput( + serving_model_id=serving_model.id)) + ComposerService(session).collect_v2(name=runner_item_name, + items=[(ItemType.SERVING_SERVICE_PARSE_SIGNATURE, runner_input)]) + session.add(serving_model) + session.commit() + serving_metrics_emit_counter('serving.update.success', serving_model) + return make_flask_response(data=serving_model.to_serving_service_detail()) + + @credentials_required + @emits_event(resource_type=Event.SERVING_SERVICE, op_type=Event.DELETE) + def delete(self, project_id: int, serving_model_id: int): + """Delete one serving service + --- + tags: + - serving + description: delete one serving service + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: serving_model_id + schema: + type: integer + responses: + 204: + description: delete the sering service successfully + """ + with db.session_scope() as session: + serving_model = session.query(ServingModel).filter_by(id=serving_model_id, + project_id=project_id).one_or_none() + if not serving_model: + serving_metrics_emit_counter('serving.delete.db_error', serving_model) + raise NotFoundException(f'Failed to find serving model: {serving_model_id}') + ServingModelService(session).delete_serving_service(serving_model) + session.commit() + serving_metrics_emit_counter('serving.delete.success', serving_model) + return make_flask_response(status=HTTPStatus.NO_CONTENT) + + +class ServingServiceInferenceApiV2(Resource): + + @use_args({'input_data': fields.String(required=True, help='serving input data')}, location='json') + @credentials_required + def post(self, body: dict, project_id: int, serving_model_id: int): + """Get inference result from a serving service + --- + tags: + - serving + description: get inference result from a serving service + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: serving_model_id + schema: + type: integer + requestBody: + required: true + description: input data to do inference + content: + application/json: + schema: + type: string + responses: + 200: + description: inference result + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.PredictResponse' + """ + try: + input_data = Parse(body['input_data'], Example()) + except Exception as err: + serving_metrics_emit_counter('serving.inference.invalid_arguments') + raise InvalidArgumentException(f'Failed to parse inference input: {serving_model_id}') from err + with db.session_scope() as session: + serving_model = session.query(ServingModel).filter_by(id=serving_model_id, + project_id=project_id).one_or_none() + if not serving_model: + serving_metrics_emit_counter('serving.inference.db_error') + raise NotFoundException(f'Failed to find serving model: {serving_model_id}') + deployment_name = serving_model.serving_deployment.deployment_name + tf_serving_service = TensorflowServingService(deployment_name) + extend_input = {} + with db.session_scope() as session: + serving_negotiator = session.query(ServingNegotiator).filter_by( + serving_model_id=serving_model_id).one_or_none() + if serving_negotiator is not None: + extend_input.update(ParticipantFetcher(session).fetch(serving_negotiator, '1')) + output = tf_serving_service.get_model_inference_output(input_data, extend_input) + if 'Error' in output: + serving_metrics_emit_counter('serving.inference.rpc_error') + raise InternalException(f'Failed to do inference: {output}') + serving_metrics_emit_counter('serving.inference.success') + return make_flask_response(data=output) + + +class ServingServiceInstanceLogApiV2(Resource): + + @use_args({'tail_lines': fields.Integer(required=True, help='tail lines is required')}, location='query') + @credentials_required + def get(self, body: dict, project_id: int, serving_model_id: int, instance_name: str): + """Get inference result from a serving service + --- + tags: + - serving + description: get inference result from a serving service + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: serving_model_id + schema: + type: integer + - in: path + name: instance_name + schema: + type: string + - in: query + name: tail_lines + schema: + type: integer + description: lines of log + responses: + 200: + description: inference result + content: + application/json: + schema: + type: array + items: + type: string + """ + with db.session_scope() as session: + serving_model = session.query(ServingModel).filter_by(id=serving_model_id, + project_id=project_id).one_or_none() + if not serving_model: + serving_metrics_emit_counter('serving.logs.db_error') + raise NotFoundException(f'Failed to find serving model: {serving_model_id}') + tail_lines = body['tail_lines'] + result = ServingDeploymentService.get_pod_log(instance_name, tail_lines) + return make_flask_response(data=result) + + +class ServingServiceRemotePlatformsApi(Resource): + + @credentials_required + def get(self, project_id: int): + """Get supported third-party serving platform + --- + tags: + - serving + description: get supported third-party serving platform + parameters: + - in: path + name: project_id + schema: + type: integer + responses: + 200: + description: list of supported serving remote platform + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.ServingServiceRemotePlatform' + """ + result_list = [] + current_sso = flask_utils.get_current_sso() + if current_sso is None: + return make_flask_response(data=result_list) + for key, value in remote.supported_remote_serving.items(): + support_platform = ServingServiceRemotePlatform(platform=key) + result_list.append(support_platform) + return make_flask_response(data=result_list) + + +def initialize_serving_services_apis(api): + api.add_resource(ServingServicesApiV2, '/projects//serving_services') + api.add_resource(ServingServiceApiV2, '/projects//serving_services/') + api.add_resource(ServingServiceInferenceApiV2, + '/projects//serving_services//inference') + api.add_resource( + ServingServiceInstanceLogApiV2, '/projects//serving_services//instances' + '//log') + api.add_resource(ServingServiceRemotePlatformsApi, '/projects//serving_services/remote_platforms') + + # if a schema is used, one has to append it to schema_manager so Swagger knows there is a schema available + schema_manager.append(ServingCreateParams) + schema_manager.append(ServingUpdateParams) diff --git a/web_console_v2/api/fedlearner_webconsole/serving/apis_inference_test.py b/web_console_v2/api/fedlearner_webconsole/serving/apis_inference_test.py new file mode 100644 index 000000000..82f4eccd9 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/serving/apis_inference_test.py @@ -0,0 +1,313 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import json +import unittest +from http import HTTPStatus +from unittest.mock import MagicMock, patch + +import numpy as np + +from tensorflow.core.example.example_pb2 import Example +from tensorflow.core.example.feature_pb2 import Feature, Int64List, Features + +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.db import db +from fedlearner_webconsole.mmgr.models import Model +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto import service_pb2, serving_pb2, common_pb2 +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput +from fedlearner_webconsole.serving.database_fetcher import DatabaseFetcher +from fedlearner_webconsole.serving.models import ServingModel, ServingModelStatus +from fedlearner_webconsole.serving.runners import QueryParticipantStatusRunner +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.serving.services import NegotiatorServingService +from testing.common import BaseTestCase + +TEST_SIGNATURE = { + 'inputs': [{ + 'name': 'example_id', + 'type': 'DT_STRING' + }, { + 'name': 'raw_id', + 'type': 'DT_STRING' + }, { + 'name': 'x0', + 'type': 'DT_FLOAT' + }, { + 'name': 'x1', + 'type': 'DT_INT64', + 'dim': [4] + }], + 'from_participants': { + 'act1_f': { + 'name': 'act1_f:0', + 'dtype': 'DT_FLOAT', + 'tensorShape': { + 'unknownRank': True + } + }, + 'act2_f': { + 'name': 'act2_f:0', + 'dtype': 'DT_DOUBLE' + }, + 'act3_f': { + 'name': 'act3_f:0', + 'dtype': 'DT_INT32' + }, + 'act4_f': { + 'name': 'act4_f:0', + 'dtype': 'DT_INT64' + }, + 'act5_f': { + 'name': 'act5_f:0', + 'dtype': 'DT_UINT32' + }, + 'act6_f': { + 'name': 'act6_f:0', + 'dtype': 'DT_UINT64' + }, + 'act7_f': { + 'name': 'act7_f:0', + 'dtype': 'DT_STRING' + }, + 'act8_f': { + 'name': 'act8_f:0', + 'dtype': 'DT_BOOL' + } + } +} + +TEST_OUTPUT = { + 'result': { + 'act1_f': { + 'dtype': 'DT_FLOAT', + 'floatVal': 0.1 + }, + 'act2_f': { + 'dtype': 'DT_DOUBLE', + 'doubleVal': 0.1 + }, + 'act3_f': { + 'dtype': 'DT_INT32', + 'intVal': -11 + }, + 'act4_f': { + 'dtype': 'DT_INT64', + 'int64Val': -12 + }, + 'act5_f': { + 'dtype': 'DT_UINT32', + 'uint32Val': 13 + }, + 'act6_f': { + 'dtype': 'DT_UINT64', + 'uint64Val': 14 + }, + 'act7_f': { + 'dtype': 'DT_STRING', + 'stringVal': 'test' + }, + 'act8_f': { + 'dtype': 'DT_BOOL', + 'boolVal': False + }, + } +} + + +def _get_create_serving_service_input(name, model_id: int): + res = { + 'name': name, + 'comment': 'test-comment-1', + 'model_id': model_id, + 'is_local': True, + 'resource': { + 'cpu': '2', + 'memory': '2', + 'replicas': 3, + } + } + return res + + +class ServingServicesApiInferenceTest(BaseTestCase): + + def setUp(self): + self.maxDiff = None + super().setUp() + # insert project + with db.session_scope() as session: + project = Project() + project.name = 'test_project_name' + session.add(project) + session.flush([project]) + + participant = Participant() + participant.name = 'test_participant_name' + participant.domain_name = 'test_domain_name' + participant.project_id = project.id + session.add(participant) + session.flush([participant]) + + project_participant = ProjectParticipant() + project_participant.participant_id = participant.id + project_participant.project_id = project.id + session.add(project_participant) + + model = Model() + model.name = 'test_model_name' + model.model_path = '/test_path/' + model.group_id = 1 + model.uuid = 'test_uuid_1' + model.project_id = project.id + + session.add(model) + session.commit() + self.project_id = project.id + self.model_id = model.id + self.model_uuid = model.uuid + + @patch('fedlearner_webconsole.serving.services.ServingDeploymentService.create_or_update_deployment') + def test_post_serving_service_inference(self, mock_create_deployment: MagicMock): + # create + name = 'test-serving-service-1' + serving_service = _get_create_serving_service_input(name, model_id=self.model_id) + response = self.post_helper(f'/api/v2/projects/{self.project_id}/serving_services', data=serving_service) + self.assertEqual(response.status_code, HTTPStatus.CREATED) + data = self.get_response_data(response) + serving_model_id = data['id'] # get id from create response + # make input + fake_data = { + 'raw': Feature(int64_list=Int64List(value=np.random.randint(low=0, high=255, size=(128 * 128 * 3)))), + 'label': Feature(int64_list=Int64List(value=[1])) + } + fake_input = { + 'input_data': str(Example(features=Features(feature=fake_data))), + } + # post + response = self.post_helper(f'/api/v2/projects/{self.project_id}/serving_services/{serving_model_id}/inference', + data=fake_input) + self.assertEqual(HTTPStatus.INTERNAL_SERVER_ERROR, response.status_code) + data = self.get_response_data(response) + self.assertIsNone(data) + + @patch('fedlearner_webconsole.rpc.client.RpcClient.operate_serving_service') + @patch('fedlearner_webconsole.rpc.client.RpcClient.inference_serving_service') + @patch('fedlearner_webconsole.serving.services.ServingDeploymentService.create_or_update_deployment') + def test_post_federal_serving_service_inference(self, mock_create_deployment: MagicMock, + mock_federal_inference: MagicMock, + mock_federal_operation: MagicMock): + mock_inference_response = service_pb2.ServingServiceInferenceResponse( + status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), code=serving_pb2.SERVING_SERVICE_SUCCESS) + mock_inference_response.data.update(TEST_OUTPUT) + mock_federal_inference.return_value = mock_inference_response + mock_federal_operation.return_value = service_pb2.ServingServiceResponse( + status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), code=serving_pb2.SERVING_SERVICE_SUCCESS) + # create serving service + name = 'test-serving-service-1' + serving_service = _get_create_serving_service_input(name, model_id=self.model_id) + serving_service['is_local'] = False + response = self.post_helper(f'/api/v2/projects/{self.project_id}/serving_services', data=serving_service) + self.assertEqual(response.status_code, HTTPStatus.CREATED) + data = self.get_response_data(response) + serving_model_id = data['id'] # get id from create response + + # mock signature runner + with db.session_scope() as session: + serving_model = session.query(ServingModel).get(serving_model_id) + serving_model.signature = json.dumps(TEST_SIGNATURE) + session.commit() + + # mock query runner + runner = QueryParticipantStatusRunner() + test_context = RunnerContext(0, RunnerInput()) + runner_status, _ = runner.run(test_context) + self.assertEqual(runner_status, RunnerStatus.DONE) + with db.session_scope() as session: + serving_model = session.query(ServingModel).get(serving_model_id) + self.assertEqual(serving_model.status, ServingModelStatus.LOADING) + + # inference, make input + fake_data = { + 'raw': Feature(int64_list=Int64List(value=np.random.randint(low=0, high=255, size=(128 * 128 * 3)))), + 'label': Feature(int64_list=Int64List(value=[1])) + } + fake_input = { + 'input_data': str(Example(features=Features(feature=fake_data))), + } + # post + response = self.post_helper(f'/api/v2/projects/{self.project_id}/serving_services/{serving_model_id}/inference', + data=fake_input) + self.assertEqual(HTTPStatus.INTERNAL_SERVER_ERROR, response.status_code) + data = self.get_response_data(response) + self.assertIsNone(data) + + @patch('fedlearner_webconsole.serving.services.ServingDeploymentService.create_or_update_deployment') + def test_federal_serving_service_inference_from_participant(self, mock_create_deployment: MagicMock): + mock_serving_uuid = 'test_uuid_1' + mock_serving_model_name = 'test_serving_model_name_1' + # create from participant + with db.session_scope() as session: + project = session.query(Project).get(self.project_id) + request = service_pb2.ServingServiceRequest() + request.operation_type = serving_pb2.ServingServiceType.SERVING_SERVICE_CREATE + request.serving_model_uuid = mock_serving_uuid + request.model_uuid = self.model_uuid + request.serving_model_name = mock_serving_model_name + NegotiatorServingService(session).handle_participant_request(request, project) + + # get list + response = self.get_helper(f'/api/v2/projects/{self.project_id}/serving_services') + data = self.get_response_data(response) + serving_model_id = data[0]['id'] + + # get one + response = self.get_helper(f'/api/v2/projects/{self.project_id}/serving_services/{serving_model_id}') + data = self.get_response_data(response) + self.assertEqual('WAITING_CONFIG', data['status']) + + # config + serving_service = { + 'comment': 'test-comment-1', + 'model_id': self.model_id, + 'resource': { + 'cpu': '2', + 'memory': '2', + 'replicas': 3, + } + } + response = self.patch_helper(f'/api/v2/projects/{self.project_id}/serving_services/{serving_model_id}', + data=serving_service) + data = self.get_response_data(response) + + # get one + response = self.get_helper(f'/api/v2/projects/{self.project_id}/serving_services/{serving_model_id}') + data = self.get_response_data(response) + self.assertEqual('LOADING', data['status']) + + # inference from participant + query_key = 1 + test_signature = json.dumps(TEST_SIGNATURE) + data_record = DatabaseFetcher.fetch_by_int_key(query_key, test_signature) + self.assertEqual(len(data_record['x0']), 1) + self.assertEqual(data_record['x0'][0], 0.1) + self.assertEqual(len(data_record['x1']), 4) + self.assertEqual(data_record['x1'][0], 1) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/serving/apis_runner_test.py b/web_console_v2/api/fedlearner_webconsole/serving/apis_runner_test.py new file mode 100644 index 000000000..772eaf2d8 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/serving/apis_runner_test.py @@ -0,0 +1,307 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import json +import unittest +from http import HTTPStatus +from multiprocessing import Queue +from unittest.mock import MagicMock, patch + +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.db import db +from fedlearner_webconsole.mmgr.models import Model, ModelJobGroup +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto import service_pb2, serving_pb2, common_pb2 +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput, ModelSignatureParserInput +from fedlearner_webconsole.serving.models import ServingModel, ServingNegotiator, ServingModelStatus +from fedlearner_webconsole.serving.runners import ModelSignatureParser, QueryParticipantStatusRunner, UpdateModelRunner +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from testing.common import BaseTestCase + + +def _get_create_serving_service_input(name, model_id: int): + res = { + 'name': name, + 'comment': 'test-comment-1', + 'model_id': model_id, + 'is_local': True, + 'resource': { + 'cpu': '2', + 'memory': '2', + 'replicas': 3, + } + } + return res + + +def _fake_update_parsed_signature_sub_process(q: Queue, model_path: str): + mock_signature_type_dict = { + '4s_code_ctr': 'DT_FLOAT', + 'event_name_ctr': 'DT_FLOAT', + 'source_account_ctr': 'DT_FLOAT', + 'source_channel_ctr': 'DT_FLOAT', + 'example_id': 'DT_STRING', + 'raw_id': 'DT_INT64', + } + mock_parsed_example = serving_pb2.ServingServiceSignature() + for key, value in mock_signature_type_dict.items(): + mock_example_input = serving_pb2.ServingServiceSignatureInput(name=key, type=value) + mock_parsed_example.inputs.append(mock_example_input) + q.put(mock_parsed_example) + + +class ServingServicesApiRunnerTest(BaseTestCase): + + def setUp(self): + self.maxDiff = None + super().setUp() + # insert project + with db.session_scope() as session: + project = Project() + project.name = 'test_project_name' + session.add(project) + session.flush([project]) + + participant = Participant() + participant.name = 'test_participant_name' + participant.domain_name = 'test_domain_name' + participant.project_id = project.id + session.add(participant) + session.flush([participant]) + + project_participant = ProjectParticipant() + project_participant.participant_id = participant.id + project_participant.project_id = project.id + session.add(project_participant) + + model_job_group = ModelJobGroup() + session.add(model_job_group) + session.flush([model_job_group]) + + model = Model() + model.name = 'test_model_name' + model.model_path = '/test_path/' + model.group_id = model_job_group.id + model.uuid = 'test_uuid_1' + model.project_id = project.id + model.version = 1 + + session.add(model) + session.commit() + self.project_id = project.id + self.model_id = model.id + self.model_uuid = model.uuid + self.model_group_id = model_job_group.id + + @patch('fedlearner_webconsole.rpc.client.RpcClient.operate_serving_service') + @patch('fedlearner_webconsole.serving.services.ServingDeploymentService.create_or_update_deployment') + def test_query_participant_runner(self, mock_create_deployment: MagicMock, mock_federal_operation: MagicMock): + mock_federal_operation.return_value = service_pb2.ServingServiceResponse( + status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), code=serving_pb2.SERVING_SERVICE_SUCCESS) + # create federal serving service + name = 'test-serving-service-1' + serving_service = _get_create_serving_service_input(name, model_id=self.model_id) + serving_service['is_local'] = False + response = self.post_helper(f'/api/v2/projects/{self.project_id}/serving_services', data=serving_service) + self.assertEqual(response.status_code, HTTPStatus.CREATED) + + # create another federal serving service + name = 'test-serving-service-2' + serving_service = _get_create_serving_service_input(name, model_id=self.model_id) + serving_service['is_local'] = False + response = self.post_helper(f'/api/v2/projects/{self.project_id}/serving_services', data=serving_service) + self.assertEqual(response.status_code, HTTPStatus.CREATED) + + # create another local serving service + name = 'test-serving-service-3' + serving_service = _get_create_serving_service_input(name, model_id=self.model_id) + response = self.post_helper(f'/api/v2/projects/{self.project_id}/serving_services', data=serving_service) + self.assertEqual(response.status_code, HTTPStatus.CREATED) + + # check status + with db.session_scope() as session: + query = session.query(ServingNegotiator) + query = query.filter(ServingNegotiator.is_local.is_(False)) + query = query.outerjoin( + ServingNegotiator.serving_model).filter(ServingModel.status == ServingModelStatus.PENDING_ACCEPT) + all_records = query.all() + self.assertEqual(len(all_records), 2) + + # call query runner + runner = QueryParticipantStatusRunner() + test_context = RunnerContext(0, RunnerInput()) + runner_status, _ = runner.run(test_context) + self.assertEqual(runner_status, RunnerStatus.DONE) + + # check status again + with db.session_scope() as session: + query = session.query(ServingNegotiator) + query = query.filter(ServingNegotiator.is_local.is_(False)) + query = query.outerjoin( + ServingNegotiator.serving_model).filter(ServingModel.status == ServingModelStatus.PENDING_ACCEPT) + all_records = query.all() + self.assertEqual(len(all_records), 0) + + @patch('fedlearner_webconsole.serving.services.ServingDeploymentService.create_or_update_deployment') + def test_query_participant_runner_exception_branch(self, mock_create_deployment: MagicMock): + serving_model = ServingModel() + serving_model.project_id = self.project_id + serving_model.name = 'test_serving_model_name' + serving_model.status = ServingModelStatus.PENDING_ACCEPT + serving_negotiator = ServingNegotiator() + serving_negotiator.project_id = self.project_id + serving_negotiator.is_local = False + with db.session_scope() as session: + session.add(serving_model) + session.flush([serving_model]) + serving_negotiator.serving_model_id = serving_model.id + session.add(serving_negotiator) + session.commit() + + # call query runner + runner = QueryParticipantStatusRunner() + test_context = RunnerContext(0, RunnerInput()) + runner_status, _ = runner.run(test_context) + self.assertEqual(runner_status, RunnerStatus.DONE) + + @patch('fedlearner_webconsole.rpc.client.RpcClient.operate_serving_service') + @patch('fedlearner_webconsole.serving.services.TensorflowServingService.get_model_signature') + @patch('fedlearner_webconsole.serving.runners._update_parsed_signature', _fake_update_parsed_signature_sub_process) + @patch('fedlearner_webconsole.serving.services.ServingDeploymentService.create_or_update_deployment') + def test_parse_signature_runner(self, mock_create_deployment: MagicMock, get_model_signature: MagicMock, + mock_federal_operation: MagicMock): + mock_federal_operation.return_value = service_pb2.ServingServiceResponse( + status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), code=serving_pb2.SERVING_SERVICE_SUCCESS) + get_model_signature.return_value = { + 'inputs': { + 'act1_f': { + 'name': 'act1_f:0', + 'dtype': 'DT_FLOAT', + 'tensorShape': { + 'unknownRank': True + } + }, + 'examples': { + 'name': 'examples:0', + 'dtype': 'DT_STRING', + 'tensorShape': { + 'unknownRank': True + } + } + }, + 'outputs': { + 'output': { + 'name': 'Sigmoid:0', + 'dtype': 'DT_FLOAT', + 'tensorShape': { + 'unknownRank': True + } + } + }, + 'methodName': 'tensorflow/serving/predict' + } + # create serving service + name = 'test-serving-service-1' + serving_service = _get_create_serving_service_input(name, model_id=self.model_id) + serving_service['is_local'] = False + response = self.post_helper(f'/api/v2/projects/{self.project_id}/serving_services', data=serving_service) + self.assertEqual(response.status_code, HTTPStatus.CREATED) + data = self.get_response_data(response) + serving_model_id = data['id'] # get id from create response + + # call signature runner + runner = ModelSignatureParser() + runner_input = RunnerInput(model_signature_parser_input=ModelSignatureParserInput( + serving_model_id=serving_model_id)) + runner.run(RunnerContext(0, runner_input)) + + # check db + with db.session_scope() as session: + serving_model = session.query(ServingModel).get(serving_model_id) + self.assertEqual(serving_model.name, name) + self.assertEqual(serving_model.status, ServingModelStatus.PENDING_ACCEPT) + signature_dict = json.loads(serving_model.signature) + self.assertEqual(len(signature_dict['inputs']), 6) + self.assertIn('from_participants', signature_dict) + self.assertIn('act1_f', signature_dict['from_participants']) + self.assertIn('outputs', signature_dict) + self.assertIn('output', signature_dict['outputs']) + serving_negotiator = session.query(ServingNegotiator).filter_by( + serving_model_id=serving_model_id).one_or_none() + self.assertIsNotNone(serving_negotiator) + self.assertEqual(serving_negotiator.project_id, self.project_id) + self.assertEqual(serving_negotiator.with_label, True) + raw_signature_dict = json.loads(serving_negotiator.raw_signature) + self.assertIn('inputs', raw_signature_dict) + self.assertIn('outputs', raw_signature_dict) + + @patch('fedlearner_webconsole.rpc.client.RpcClient.operate_serving_service') + @patch('fedlearner_webconsole.serving.services.ServingDeploymentService.create_or_update_deployment') + def test_update_model_runner(self, mock_create_deployment: MagicMock, mock_federal_operation: MagicMock): + mock_federal_operation.return_value = service_pb2.ServingServiceResponse( + status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), code=serving_pb2.SERVING_SERVICE_SUCCESS) + # create federal serving service + name = 'test-auto-update-1' + serving_service = { + 'name': name, + 'comment': 'test-comment-1', + 'model_group_id': self.model_group_id, + 'is_local': True, + 'resource': { + 'cpu': '2', + 'memory': '2', + 'replicas': 3, + } + } + response = self.post_helper(f'/api/v2/projects/{self.project_id}/serving_services', data=serving_service) + self.assertEqual(response.status_code, HTTPStatus.CREATED) + data = self.get_response_data(response) + serving_model_id = data['id'] # get id from create response + + # create anothor model + with db.session_scope() as session: + model = Model() + model.name = 'test_model_name_2' + model.model_path = '/test_path_2/' + model.group_id = self.model_group_id + model.uuid = 'test_uuid_2' + model.project_id = self.project_id + model.version = 2 + session.add(model) + session.commit() + model_id_2 = model.id + + # check status + with db.session_scope() as session: + serving_model = session.query(ServingModel).filter_by(id=serving_model_id).one_or_none() + self.assertEqual(serving_model.model_group_id, self.model_group_id) + self.assertEqual(serving_model.model_id, self.model_id) + + # call update model runner + runner = UpdateModelRunner() + test_context = RunnerContext(0, RunnerInput()) + runner_status, _ = runner.run(test_context) + self.assertEqual(runner_status, RunnerStatus.DONE) + + # check status again + with db.session_scope() as session: + serving_model = session.query(ServingModel).filter_by(id=serving_model_id).one_or_none() + self.assertEqual(serving_model.model_group_id, self.model_group_id) + self.assertEqual(serving_model.model_id, model_id_2) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/serving/apis_test.py b/web_console_v2/api/fedlearner_webconsole/serving/apis_test.py new file mode 100644 index 000000000..b514ff9ae --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/serving/apis_test.py @@ -0,0 +1,449 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import json +import unittest +import urllib.parse +from datetime import datetime, timezone +from http import HTTPStatus +from unittest.mock import MagicMock, patch + +from envs import Envs +from fedlearner_webconsole.composer.models import SchedulerItem +from fedlearner_webconsole.db import db +from fedlearner_webconsole.k8s.models import Pod, PodState +from fedlearner_webconsole.mmgr.models import Model, ModelJobGroup +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.serving.models import ServingModel, ServingNegotiator +from fedlearner_webconsole.serving.remote import register_remote_serving +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from testing.common import BaseTestCase +from testing.fake_remote_serving import FakeRemoteServing + + +def _get_create_serving_service_input(name, project_id: int): + res = { + 'name': name, + 'comment': 'test-comment-1', + 'cpu_per_instance': '2000m', + 'memory_per_instance': '2Gi', + 'instance_num': 3, + 'project_id': project_id, + 'is_local': True + } + return res + + +def _get_create_serving_service_input_v2(name, model_id: int): + res = { + 'name': name, + 'comment': 'test-comment-1', + 'model_id': model_id, + 'is_local': True, + 'resource': { + 'cpu': '2', + 'memory': '2', + 'replicas': 3, + } + } + return res + + +class ServingServicesApiV2Test(BaseTestCase): + + def setUp(self): + super().setUp() + # insert project + with db.session_scope() as session: + project = Project() + project.name = 'test_project_name' + session.add(project) + session.flush([project]) + + participant = Participant() + participant.name = 'test_participant_name' + participant.domain_name = 'test_domain_name' + participant.project_id = project.id + session.add(participant) + session.flush([participant]) + + project_participant = ProjectParticipant() + project_participant.participant_id = participant.id + project_participant.project_id = project.id + session.add(project_participant) + + model_job_group = ModelJobGroup() + session.add(model_job_group) + session.flush([model_job_group]) + + model_1 = Model() + model_1.name = 'test_model_name_1' + model_1.model_path = '/test_path_1/' + model_1.uuid = 'test_uuid_1' + model_1.project_id = project.id + model_1.version = 1 + model_1.group_id = model_job_group.id + model_2 = Model() + model_2.name = 'test_model_name_2' + model_2.model_path = '/test_path_2/' + model_2.uuid = 'test_uuid_2' + model_2.project_id = project.id + model_2.version = 2 + model_2.group_id = model_job_group.id + session.add_all([model_1, model_2]) + + session.commit() + self.project_id = project.id + self.model_id_1 = model_1.id + self.model_group_id = model_job_group.id + self.model_id_2 = model_2.id + + @patch('fedlearner_webconsole.serving.services.ServingDeploymentService.create_or_update_deployment') + def test_create_serving_service(self, mock_create_deployment: MagicMock): + # create serving service + name = 'test-serving-service-1' + serving_service = _get_create_serving_service_input_v2(name, model_id=self.model_id_1) + response = self.post_helper(f'/api/v2/projects/{self.project_id}/serving_services', data=serving_service) + self.assertEqual(HTTPStatus.CREATED, response.status_code) + data = self.get_response_data(response) + self.assertEqual(0, data['model_group_id']) + serving_model_id = data['id'] # get id from create response + + # check db + with db.session_scope() as session: + serving_model = session.query(ServingModel).get(serving_model_id) + self.assertEqual(serving_model.name, name) + self.assertEqual('/test_path_1/exported_models', serving_model.model_path) + serving_deployment = serving_model.serving_deployment + deployment_name_substr = f'serving-{serving_model_id}-' + self.assertIn(deployment_name_substr, serving_deployment.deployment_name) + self.assertEqual(serving_deployment.resource, json.dumps({ + 'cpu': '2', + 'memory': '2', + 'replicas': 3, + })) + serving_negotiator = session.query(ServingNegotiator).filter_by( + serving_model_id=serving_model_id).one_or_none() + self.assertIsNotNone(serving_negotiator) + self.assertEqual(serving_negotiator.project_id, self.project_id) + + mock_create_deployment.assert_called_once() + + # create same name + response = self.post_helper(f'/api/v2/projects/{self.project_id}/serving_services', data=serving_service) + data = self.get_response_data(response) + self.assertEqual(HTTPStatus.CONFLICT, response.status_code) + self.assertIsNone(data) + + # resource format error + serving_service['resource'] = { + 'cpu': 2, + 'memory': '2', + 'replicas': 3, + } + response = self.post_helper(f'/api/v2/projects/{self.project_id}/serving_services', data=serving_service) + self.assertEqual(HTTPStatus.BAD_REQUEST, response.status_code) + + @patch('fedlearner_webconsole.serving.services.ServingDeploymentService.create_or_update_deployment') + def test_create_auto_update_serving_service(self, mock_create_deployment: MagicMock): + # create serving service + name = 'test-auto-update-1' + serving_service = { + 'name': name, + 'comment': 'test-comment-1', + 'model_group_id': self.model_group_id, + 'is_local': True, + 'resource': { + 'cpu': '2', + 'memory': '2', + 'replicas': 3, + } + } + response = self.post_helper(f'/api/v2/projects/{self.project_id}/serving_services', data=serving_service) + self.assertEqual(HTTPStatus.CREATED, response.status_code) + data = self.get_response_data(response) + self.assertEqual(self.model_id_2, data['model_id']) + serving_model_id = data['id'] # get id from create response + + # check db + with db.session_scope() as session: + serving_model = session.query(ServingModel).get(serving_model_id) + self.assertEqual(serving_model.name, name) + self.assertEqual('/test_path_2/exported_models', serving_model.model_path) + + mock_create_deployment.assert_called_once() + + @patch('fedlearner_webconsole.serving.services.ServingDeploymentService.create_or_update_deployment') + def test_get_serving_services(self, mock_create_deployment: MagicMock): + # create + name1 = 'test-get-services-1' + serving_service = _get_create_serving_service_input_v2(name1, model_id=self.model_id_1) + response = self.post_helper(f'/api/v2/projects/{self.project_id}/serving_services', data=serving_service) + self.assertEqual(HTTPStatus.CREATED, response.status_code) + name2 = 'test-get-services-2' + serving_service = _get_create_serving_service_input_v2(name2, model_id=self.model_id_1) + response = self.post_helper(f'/api/v2/projects/{self.project_id}/serving_services', data=serving_service) + self.assertEqual(HTTPStatus.CREATED, response.status_code) + # get list + response = self.get_helper(f'/api/v2/projects/{self.project_id}/serving_services') + data = self.get_response_data(response) + self.assertEqual(2, len(data)) + self.assertIn(data[0]['name'], [name1, name2]) + self.assertIn(data[1]['name'], [name1, name2]) + self.assertEqual(self.project_id, data[0]['project_id']) + self.assertEqual('LOADING', data[0]['status']) + self.assertEqual('UNKNOWN', data[0]['instance_num_status']) + + # get with filter + filter_param = urllib.parse.quote(f'(name="{name1}")') + response = self.get_helper(f'/api/v2/projects/{self.project_id}/serving_services?filter={filter_param}') + data = self.get_response_data(response) + self.assertEqual(1, len(data)) + + filter_param = urllib.parse.quote('(name="test-get-services-3")') # test not found + response = self.get_helper(f'/api/v2/projects/{self.project_id}/serving_services?filter={filter_param}') + data = self.get_response_data(response) + self.assertEqual(0, len(data)) + + filter_param = urllib.parse.quote('(keyword~="services-1")') + response = self.get_helper(f'/api/v2/projects/{self.project_id}/serving_services?filter={filter_param}') + data = self.get_response_data(response) + self.assertEqual(1, len(data)) + + sorter_param = urllib.parse.quote('created_at asc') + response = self.get_helper(f'/api/v2/projects/{self.project_id}/serving_services?order_by={sorter_param}') + data = self.get_response_data(response) + self.assertEqual(2, len(data)) + self.assertEqual([name1, name2], [data[0]['name'], data[1]['name']]) + + sorter_param = urllib.parse.quote('created_at desc') + response = self.get_helper(f'/api/v2/projects/{self.project_id}/serving_services?order_by={sorter_param}') + data = self.get_response_data(response) + self.assertEqual(2, len(data)) + self.assertEqual([name2, name1], [data[0]['name'], data[1]['name']]) + + sorter_param = urllib.parse.quote('something_unsupported desc') + response = self.get_helper(f'/api/v2/projects/{self.project_id}/serving_services?order_by={sorter_param}') + data = self.get_response_data(response) + self.assertIsNone(data) + + @patch('fedlearner_webconsole.serving.services.ServingDeploymentService.get_pods_info') + @patch('fedlearner_webconsole.serving.services.ServingDeploymentService.create_or_update_deployment') + def test_get_serving_service(self, mock_create_deployment: MagicMock, mock_get_pods_info): + test_datetime = datetime(2022, 1, 1, 8, 8, 8, tzinfo=timezone.utc) + fake_pods = [ + Pod(name='pod0', state=PodState.FAILED, creation_timestamp=to_timestamp(test_datetime)), + Pod(name='pod1', state=PodState.RUNNING, creation_timestamp=to_timestamp(test_datetime) - 1), + Pod(name='pod2', state=PodState.SUCCEEDED, creation_timestamp=to_timestamp(test_datetime) + 1) + ] + mock_get_pods_info.return_value = fake_pods + # create + name1 = 'test-get-services-1' + serving_service = _get_create_serving_service_input_v2(name1, model_id=self.model_id_1) + response = self.post_helper(f'/api/v2/projects/{self.project_id}/serving_services', data=serving_service) + self.assertEqual(HTTPStatus.CREATED, response.status_code) + data = self.get_response_data(response) + serving_service_id = data['id'] + # get one + response = self.get_helper(f'/api/v2/projects/{self.project_id}/serving_services/{serving_service_id}') + data = self.get_response_data(response) + self.assertEqual(name1, data['name']) + self.assertEqual({'cpu': '2', 'memory': '2', 'replicas': 3}, data['resource']) + self.assertEqual(f'/api/v2/projects/{self.project_id}/serving_services/{serving_service_id}/inference', + data['endpoint']) + self.assertEqual('UNKNOWN', data['instance_num_status']) + self.assertEqual(['pod0', 'pod1', 'pod2'], [x['name'] for x in data['instances']]) + sorter_param = urllib.parse.quote('created_at asc') + response = self.get_helper( + f'/api/v2/projects/{self.project_id}/serving_services/{serving_service_id}?order_by={sorter_param}') + data = self.get_response_data(response) + self.assertEqual(3, len(data['instances'])) + self.assertEqual(['pod1', 'pod0', 'pod2'], [x['name'] for x in data['instances']]) + sorter_param = urllib.parse.quote('created_at desc') + response = self.get_helper( + f'/api/v2/projects/{self.project_id}/serving_services/{serving_service_id}?order_by={sorter_param}') + data = self.get_response_data(response) + self.assertEqual(3, len(data['instances'])) + self.assertEqual(['pod2', 'pod0', 'pod1'], [x['name'] for x in data['instances']]) + + @patch('fedlearner_webconsole.serving.services.ServingDeploymentService.create_or_update_deployment') + def test_update_serving_service(self, mock_create_deployment: MagicMock): + # create + name = 'test-update-service-1' + serving_service = _get_create_serving_service_input_v2(name, model_id=self.model_id_1) + response = self.post_helper(f'/api/v2/projects/{self.project_id}/serving_services', data=serving_service) + self.assertEqual(response.status_code, HTTPStatus.CREATED) + data = self.get_response_data(response) + service_id = data['id'] # get id from create response + # update comments + new_comment = 'test-comment-2' + serving_service = { + 'comment': new_comment, + 'resource': { + 'cpu': '2', + 'memory': '2', + 'replicas': 3, + } + } + response = self.patch_helper(f'/api/v2/projects/{self.project_id}/serving_services/{service_id}', + data=serving_service) + data = self.get_response_data(response) + self.assertEqual(data['comment'], new_comment) + + # change from model_id to model_group_id + serving_service = { + 'model_group_id': self.model_group_id, + } + response = self.patch_helper(f'/api/v2/projects/{self.project_id}/serving_services/{service_id}', + data=serving_service) + data = self.get_response_data(response) + self.assertEqual(self.model_id_2, data['model_id']) + self.assertEqual(self.model_group_id, data['model_group_id']) + # check db + with db.session_scope() as session: + serving_model = session.query(ServingModel).get(service_id) + self.assertEqual('/test_path_2/exported_models', serving_model.model_path) + + # change from model_group_id to model_id + serving_service = { + 'model_id': self.model_id_1, + } + response = self.patch_helper(f'/api/v2/projects/{self.project_id}/serving_services/{service_id}', + data=serving_service) + data = self.get_response_data(response) + self.assertEqual(0, data['model_group_id']) + self.assertEqual(self.model_id_1, data['model_id']) + # check db + with db.session_scope() as session: + serving_model = session.query(ServingModel).get(service_id) + self.assertEqual('/test_path_1/exported_models', serving_model.model_path) + + @patch('fedlearner_webconsole.serving.services.ServingDeploymentService.create_or_update_deployment') + @patch('fedlearner_webconsole.serving.services.k8s_client') + def test_delete_serving_service(self, mock_create_deployment: MagicMock, mock_k8s_client: MagicMock): + mock_k8s_client.delete_config_map = MagicMock() + mock_k8s_client.delete_deployment = MagicMock() + mock_k8s_client.delete_service = MagicMock() + # create + name = 'test-delete-service-1' + serving_service = _get_create_serving_service_input_v2(name, model_id=self.model_id_1) + response = self.post_helper(f'/api/v2/projects/{self.project_id}/serving_services', data=serving_service) + self.assertEqual(response.status_code, HTTPStatus.CREATED) + data = self.get_response_data(response) + service_id = data['id'] # get id from create response + # delete + response = self.delete_helper(f'/api/v2/projects/{self.project_id}/serving_services/{service_id}') + self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) + # get + response = self.get_helper(f'/api/v2/projects/{self.project_id}/serving_services/{service_id}') + self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) + data = self.get_response_data(response) + self.assertIsNone(data) + + @patch('fedlearner_webconsole.serving.services.ServingDeploymentService.create_or_update_deployment') + @patch('fedlearner_webconsole.serving.services.k8s_client.get_pod_log') + def test_get_serving_service_instance_log(self, mock_query_log: MagicMock, mock_create_deployment: MagicMock): + # create + name = 'test-get-service-instance-log-1' + serving_service = _get_create_serving_service_input_v2(name, model_id=self.model_id_1) + response = self.post_helper(f'/api/v2/projects/{self.project_id}/serving_services', data=serving_service) + mock_create_deployment.assert_called_once() + self.assertEqual(response.status_code, HTTPStatus.CREATED) + data = self.get_response_data(response) + service_id = data['id'] # get id from create response + # get + response = self.get_helper(f'/api/v2/projects/{self.project_id}/serving_services/{service_id}') + data = self.get_response_data(response) + self.assertEqual(len(data['instances']), 1) + instance_name = data['instances'][0]['name'] # get id from create response + # get log + mock_query_log.return_value = ['test', 'hello'] + response = self.get_helper(f'/api/v2/projects/{self.project_id}/serving_services/{service_id}' + f'/instances/{instance_name}/log?tail_lines={500}') + mock_query_log.assert_called_once_with(instance_name, namespace=Envs.K8S_NAMESPACE, tail_lines=500) + self.assertEqual(response.status_code, HTTPStatus.OK) + self.assertCountEqual(self.get_response_data(response), ['test', 'hello']) + + @patch('fedlearner_webconsole.utils.flask_utils.get_current_sso', MagicMock(return_value='test-sso')) + def test_remote_platform_serving_service(self): + reckon_remote_serving = FakeRemoteServing() + register_remote_serving(FakeRemoteServing.SERVING_PLATFORM, reckon_remote_serving) + # get + response = self.get_helper(f'/api/v2/projects/{self.project_id}/serving_services/remote_platforms') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(1, len(data)) + self.assertEqual(FakeRemoteServing.SERVING_PLATFORM, data[0]['platform']) + self.assertEqual('', data[0]['payload']) + + # create serving service + name = 'test-remote-serving-1' + serving_service = { + 'name': name, + 'model_group_id': self.model_group_id, + 'is_local': True, + 'remote_platform': { + 'platform': FakeRemoteServing.SERVING_PLATFORM, + 'payload': 'test-payload', + } + } + response = self.post_helper(f'/api/v2/projects/{self.project_id}/serving_services', data=serving_service) + self.assertEqual(HTTPStatus.CREATED, response.status_code) + data = self.get_response_data(response) + self.assertEqual({'platform': 'unittest_mock', 'payload': 'test-payload'}, data['remote_platform']) + service_id = data['id'] + + # get list + response = self.get_helper(f'/api/v2/projects/{self.project_id}/serving_services') + data = self.get_response_data(response) + self.assertEqual(1, len(data)) + self.assertEqual('AVAILABLE', data[0]['status']) + self.assertTrue(data[0]['support_inference']) + + # get one + response = self.get_helper(f'/api/v2/projects/{self.project_id}/serving_services/{service_id}') + data = self.get_response_data(response) + self.assertEqual('test_deploy_url', data['endpoint']) + self.assertEqual('AVAILABLE', data['status']) + self.assertTrue(data['support_inference']) + self.assertEqual({'platform': 'unittest_mock', 'payload': 'test-payload'}, data['remote_platform']) + + # change from model_group_id to model_id + serving_service = { + 'model_id': self.model_id_1, + } + response = self.patch_helper(f'/api/v2/projects/{self.project_id}/serving_services/{service_id}', + data=serving_service) + data = self.get_response_data(response) + self.assertEqual(0, data['model_group_id']) + self.assertEqual(self.model_id_1, data['model_id']) + # check db + with db.session_scope() as session: + serving_model = session.query(ServingModel).get(service_id) + self.assertEqual('/test_path_1/exported_models', serving_model.model_path) + item = session.query(SchedulerItem.id).filter(SchedulerItem.name.like(f'%{name}%')).first() + self.assertIsNone(item) + + # delete + response = self.delete_helper(f'/api/v2/projects/{self.project_id}/serving_services/{service_id}') + self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) + response = self.get_helper(f'/api/v2/projects/{self.project_id}/serving_services/{service_id}') + data = self.get_response_data(response) + self.assertIsNone(data) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/serving/database_fetcher.py b/web_console_v2/api/fedlearner_webconsole/serving/database_fetcher.py new file mode 100644 index 000000000..08d1c7fb8 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/serving/database_fetcher.py @@ -0,0 +1,43 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# coding: utf-8 +import json + + +class DatabaseFetcher: + + @staticmethod + def fetch_by_int_key(query_key: int, signature: str) -> dict: + result = {'raw_id': [query_key], 'example_id': [str(query_key)]} + signature_dict = json.loads(signature) + signature_input = signature_dict['inputs'] + for item in signature_input: + input_name = item['name'] + input_type = item['type'] + result[input_name] = [] + if 'dim' in item: + dim = int(item['dim'][0]) + else: + dim = 1 + for _ in range(0, dim): + # TODO(lixiaoguang.01) fetch from ABase, match name + if input_type == 'DT_STRING': + result[input_name].append('') + elif input_type in ('DT_FLOAT', 'DT_DOUBLE'): + result[input_name].append(0.1) + elif input_type in ('DT_INT64', 'DT_INT32'): + result[input_name].append(1) + return result diff --git a/web_console_v2/api/fedlearner_webconsole/serving/metrics.py b/web_console_v2/api/fedlearner_webconsole/serving/metrics.py new file mode 100644 index 000000000..9ad3e2077 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/serving/metrics.py @@ -0,0 +1,30 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from fedlearner_webconsole.serving.models import ServingModel +from fedlearner_webconsole.utils import metrics + + +def serving_metrics_emit_counter(name: str, serving_model: ServingModel = None): + if serving_model is None: + metrics.emit_counter(name, 1) + return + metrics.emit_counter(name, + 1, + tags={ + 'project_id': str(serving_model.project_id), + 'serving_model_id': str(serving_model.id), + 'serving_model_name': serving_model.name, + }) diff --git a/web_console_v2/api/fedlearner_webconsole/serving/models.py b/web_console_v2/api/fedlearner_webconsole/serving/models.py new file mode 100644 index 000000000..64a138fa9 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/serving/models.py @@ -0,0 +1,155 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import enum +import json + +from sqlalchemy.sql.schema import UniqueConstraint, Index + +from fedlearner_webconsole.db import db, default_table_args +from fedlearner_webconsole.proto import serving_pb2 +from fedlearner_webconsole.utils.pp_datetime import now, to_timestamp +from fedlearner_webconsole.mmgr.models import ModelType + + +class ServingModelStatus(enum.Enum): + UNKNOWN = 0 + LOADING = 1 + AVAILABLE = 2 + UNLOADING = 3 + PENDING_ACCEPT = 4 + DELETED = 5 + WAITING_CONFIG = 6 + + +class ServingDeploymentStatus(enum.Enum): + UNAVAILABLE = 0 + AVAILABLE = 1 + + +class ServingModel(db.Model): + __tablename__ = 'serving_models_v2' + __table_args__ = (UniqueConstraint('name', name='uniq_name'), default_table_args('serving models')) + id = db.Column(db.Integer, primary_key=True, autoincrement=True, comment='id') + project_id = db.Column(db.Integer, nullable=False, comment='project id') + name = db.Column(db.String(255), comment='name') + serving_deployment_id = db.Column(db.Integer, comment='serving deployment db id') + comment = db.Column('cmt', db.Text(), key='comment', comment='comment') + model_id = db.Column(db.Integer, comment='model id') + model_type = db.Column(db.Enum(ModelType, native_enum=False, length=64, create_constraint=False), + default=ModelType.NN_MODEL, + comment='model type') + model_path = db.Column(db.String(255), default=None, comment='model\'s path') + model_group_id = db.Column(db.Integer, comment='model group id for auto update scenario') + pending_model_id = db.Column(db.Integer, comment='model id when waiting for participants\' config') + pending_model_group_id = db.Column(db.Integer, comment='model group id when waiting for participants\' config') + signature = db.Column(db.Text(), default='', comment='model signature') + status = db.Column(db.Enum(ServingModelStatus, native_enum=False, length=64, create_constraint=False), + default=ServingModelStatus.UNKNOWN, + comment='status') + endpoint = db.Column(db.String(255), comment='endpoint') + created_at = db.Column(db.DateTime(timezone=True), comment='created_at', default=now) + updated_at = db.Column(db.DateTime(timezone=True), comment='updated_at', default=now, onupdate=now) + extra = db.Column(db.Text(), comment='extra') + + project = db.relationship('Project', primaryjoin='Project.id == foreign(ServingModel.project_id)') + serving_deployment = db.relationship('ServingDeployment', + primaryjoin='ServingDeployment.id == ' + 'foreign(ServingModel.serving_deployment_id)') + model = db.relationship('Model', primaryjoin='Model.id == foreign(ServingModel.model_id)') + pending_model = db.relationship('Model', primaryjoin='Model.id == foreign(ServingModel.pending_model_id)') + model_group = db.relationship('ModelJobGroup', + primaryjoin='ModelJobGroup.id == foreign(ServingModel.model_group_id)') + pending_model_group = db.relationship('ModelJobGroup', + primaryjoin='ModelJobGroup.id == ' + 'foreign(ServingModel.pending_model_group_id)') + + def to_serving_service(self) -> serving_pb2.ServingService: + return serving_pb2.ServingService(id=self.id, + project_id=self.project_id, + name=self.name, + comment=self.comment, + is_local=True, + status=self.status.name, + support_inference=False, + created_at=to_timestamp(self.created_at), + updated_at=to_timestamp(self.updated_at)) + + def to_serving_service_detail(self) -> serving_pb2.ServingServiceDetail: + detail = serving_pb2.ServingServiceDetail(id=self.id, + project_id=self.project_id, + name=self.name, + comment=self.comment, + model_id=self.model_id, + model_group_id=self.model_group_id, + model_type=self.model_type.name, + is_local=True, + endpoint=self.endpoint, + signature=self.signature, + status=self.status.name, + support_inference=False, + created_at=to_timestamp(self.created_at), + updated_at=to_timestamp(self.updated_at)) + if self.serving_deployment.is_remote_serving(): + platform_config: dict = json.loads(self.serving_deployment.deploy_platform) + detail.remote_platform.CopyFrom( + serving_pb2.ServingServiceRemotePlatform( + platform=platform_config['platform'], + payload=platform_config['payload'], + )) + return detail + + +class ServingDeployment(db.Model): + __tablename__ = 'serving_deployments_v2' + __table_args__ = (default_table_args('serving deployments in webconsole')) + id = db.Column(db.Integer, primary_key=True, autoincrement=True, comment='id') + project_id = db.Column(db.Integer, nullable=False, comment='project id') + deployment_name = db.Column(db.String(255), comment='deployment name') + resource = db.Column('rsc', db.String(255), comment='resource') + endpoint = db.Column(db.String(255), comment='endpoint') + deploy_platform = db.Column(db.Text(), comment='deploy platform. None means inside this platform') + status = db.Column(db.Enum(ServingDeploymentStatus, native_enum=False, length=64, create_constraint=False), + default=ServingDeploymentStatus.UNAVAILABLE, + comment='status') + created_at = db.Column(db.DateTime(timezone=True), comment='created_at', default=now) + extra = db.Column(db.Text(), comment='extra') + + project = db.relationship('Project', primaryjoin='Project.id == foreign(ServingDeployment.project_id)') + + def is_remote_serving(self) -> bool: + return self.deploy_platform is not None + + +class ServingNegotiator(db.Model): + __tablename__ = 'serving_negotiators_v2' + __table_args__ = (Index('idx_serving_model_uuid', + 'serving_model_uuid'), default_table_args('serving negotiators in webconsole')) + id = db.Column(db.Integer, primary_key=True, autoincrement=True, comment='id') + project_id = db.Column(db.Integer, nullable=False, comment='project id') + serving_model_id = db.Column(db.Integer, nullable=False, comment='serving model id') + is_local = db.Column(db.Boolean, comment='can serving locally') + with_label = db.Column(db.Boolean, comment='federal side with label or not') + serving_model_uuid = db.Column(db.String(255), comment='uuid for federal model') + feature_dataset_id = db.Column(db.Integer, comment='feature dataset id') + data_source_map = db.Column(db.Text(), comment='where to get model inference arguments') + raw_signature = db.Column(db.Text(), comment='save raw signature from tf serving') + created_at = db.Column(db.DateTime(timezone=True), comment='created_at', default=now) + extra = db.Column(db.Text(), comment='extra') + + project = db.relationship('Project', primaryjoin='Project.id == foreign(ServingNegotiator.project_id)') + serving_model = db.relationship('ServingModel', + primaryjoin='ServingModel.id == ' + 'foreign(ServingNegotiator.serving_model_id)') diff --git a/web_console_v2/api/fedlearner_webconsole/serving/models_test.py b/web_console_v2/api/fedlearner_webconsole/serving/models_test.py new file mode 100644 index 000000000..6bcff5367 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/serving/models_test.py @@ -0,0 +1,100 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import json +import unittest + +from google.protobuf.json_format import MessageToDict + +from fedlearner_webconsole.mmgr.models import ModelJobGroup, Model, ModelType +from fedlearner_webconsole.proto import serving_pb2 +from fedlearner_webconsole.serving.models import ServingModel, ServingModelStatus, ServingDeployment +from fedlearner_webconsole.utils.proto import to_dict +from fedlearner_webconsole.db import db +from testing.no_web_server_test_case import NoWebServerTestCase + + +class ServingModelTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + model_job_group = ModelJobGroup() + session.add(model_job_group) + session.flush([model_job_group]) + + model = Model() + model.name = 'test_model_name_1' + model.project_id = 1 + model.group_id = model_job_group.id + session.add(model) + session.flush([model]) + + deployment = ServingDeployment() + deployment.project_id = 1 + deploy_config = serving_pb2.RemoteDeployConfig(platform='test-platform', + payload='test-payload', + deploy_name='privacy-platform-test-serving', + model_src_path='') + deployment.deploy_platform = json.dumps(MessageToDict(deploy_config)) + session.add(deployment) + session.flush([deployment]) + + serving_model = ServingModel() + serving_model.project_id = 1 + serving_model.name = 'test-serving-model-1' + serving_model.model_id = model.id + serving_model.serving_deployment_id = deployment.id + session.add(serving_model) + session.commit() + + self.model_group_id = model_job_group.id + self.model_id = model.id + self.serving_model_id = serving_model.id + + def test_to_serving_service_detail(self): + with db.session_scope() as session: + serving_model: ServingModel = session.query(ServingModel).get(self.serving_model_id) + expected_detail = serving_pb2.ServingServiceDetail(id=self.serving_model_id, + project_id=1, + name='test-serving-model-1', + model_id=self.model_id, + model_type=ModelType.NN_MODEL.name, + is_local=True, + status=ServingModelStatus.UNKNOWN.name, + support_inference=False) + self.assertPartiallyEqual(to_dict(expected_detail), + to_dict(serving_model.to_serving_service_detail()), + ignore_fields=['created_at', 'updated_at', 'remote_platform']) + self.assertEqual(0, serving_model.to_serving_service_detail().model_group_id) + + serving_model.model_group_id = self.serving_model_id + expected_detail.model_group_id = self.model_group_id + self.assertPartiallyEqual(to_dict(expected_detail), + to_dict(serving_model.to_serving_service_detail()), + ignore_fields=['created_at', 'updated_at', 'remote_platform']) + + expected_detail.remote_platform.CopyFrom( + serving_pb2.ServingServiceRemotePlatform( + platform='test-platform', + payload='test-payload', + )) + self.assertPartiallyEqual(to_dict(expected_detail), + to_dict(serving_model.to_serving_service_detail()), + ignore_fields=['created_at', 'updated_at']) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/serving/participant_fetcher.py b/web_console_v2/api/fedlearner_webconsole/serving/participant_fetcher.py new file mode 100644 index 000000000..9a90e17db --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/serving/participant_fetcher.py @@ -0,0 +1,76 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# coding: utf-8 +import json + +from google.protobuf.json_format import MessageToDict + +from fedlearner_webconsole.db import Session +from fedlearner_webconsole.exceptions import InternalException +from fedlearner_webconsole.proto import serving_pb2 +from fedlearner_webconsole.serving.models import ServingNegotiator +from fedlearner_webconsole.serving.services import NegotiatorServingService + + +class ParticipantFetcher: + _TF_DT_INT_TYPE_SET = ['DT_INT32', 'DT_INT16', 'DT_UINT16', 'DT_INT8', 'DT_UINT8'] + + def __init__(self, session: Session = None): + self._session = session + + def fetch(self, serving_negotiator: ServingNegotiator, example_id: str) -> dict: + if serving_negotiator.is_local: + return {} + resp = NegotiatorServingService(self._session).participant_serving_service_inference( + serving_negotiator, example_id) + if resp.code != serving_pb2.SERVING_SERVICE_SUCCESS: + raise InternalException(resp.msg) + data = MessageToDict(resp.data) + participant_result = data['result'] + signature = serving_negotiator.serving_model.signature + signature_dict = json.loads(signature) + signature_extend = signature_dict['from_participants'] + assert len(signature_extend) == len(participant_result), \ + f'Dim not match, need {len(signature_extend)}, got {len(participant_result)}' + result = {} + for item_key in participant_result: + if item_key not in signature_extend and len(participant_result) > 1: + continue + input_key = item_key + if len(participant_result) == 1: + input_key = list(signature_extend.keys())[0] + dtype = participant_result[item_key]['dtype'] + result[input_key] = self._get_value_by_dtype(dtype, participant_result[item_key]) + return result + + def _get_value_by_dtype(self, dtype: str, input_data: dict): + if dtype == 'DT_FLOAT': + return input_data['floatVal'] + if dtype == 'DT_DOUBLE': + return input_data['doubleVal'] + if dtype == self._TF_DT_INT_TYPE_SET: + return input_data['intVal'] + if dtype == 'DT_INT64': + return input_data['int64Val'] + if dtype == 'DT_UINT32': + return input_data['uint32Val'] + if dtype == 'DT_UINT64': + return input_data['uint64Val'] + if dtype == 'DT_STRING': + return input_data['stringVal'] + if dtype == 'DT_BOOL': + return input_data['boolVal'] + return '' diff --git a/web_console_v2/api/fedlearner_webconsole/serving/remote.py b/web_console_v2/api/fedlearner_webconsole/serving/remote.py new file mode 100644 index 000000000..2a7252751 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/serving/remote.py @@ -0,0 +1,52 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from abc import abstractmethod, ABCMeta +from typing import Optional + +from fedlearner_webconsole.proto.serving_pb2 import RemoteDeployConfig, RemoteDeployState + + +class IRemoteServing(metaclass=ABCMeta): + """Deploy model on remote third-party serving platform + + """ + + @abstractmethod + def deploy_model(self, creator: str, config: RemoteDeployConfig) -> Optional[int]: + pass + + @abstractmethod + def get_deploy_url(self, config: RemoteDeployConfig) -> str: + pass + + @abstractmethod + def validate_config(self, config: RemoteDeployConfig) -> bool: + pass + + @abstractmethod + def get_deploy_status(self, config: RemoteDeployConfig) -> RemoteDeployState: + return RemoteDeployState.REMOTE_DEPLOY_READY + + @abstractmethod + def undeploy_model(self, config: RemoteDeployConfig): + pass + + +supported_remote_serving = {} + + +def register_remote_serving(name: str, serving: IRemoteServing): + supported_remote_serving[name] = serving diff --git a/web_console_v2/api/fedlearner_webconsole/serving/runners.py b/web_console_v2/api/fedlearner_webconsole/serving/runners.py new file mode 100644 index 000000000..424950179 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/serving/runners.py @@ -0,0 +1,194 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import hashlib +import json +import logging +import os +import traceback +from typing import Tuple + +from google.protobuf.json_format import MessageToDict +from multiprocessing import Queue +from sqlalchemy.orm import Session, joinedload + +from fedlearner_webconsole.composer.composer_service import ComposerService +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.interface import ItemType, IRunnerV2 +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.db import db +from fedlearner_webconsole.exceptions import NotFoundException +from fedlearner_webconsole.proto import serving_pb2 +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput, RunnerOutput +from fedlearner_webconsole.proto.serving_pb2 import ServingServiceType +from fedlearner_webconsole.serving.models import ServingModel, ServingNegotiator, ServingModelStatus, ServingDeployment +from fedlearner_webconsole.serving.services import NegotiatorServingService, SavedModelService, \ + TensorflowServingService, ServingModelService +from fedlearner_webconsole.utils import pp_datetime +from fedlearner_webconsole.utils.file_manager import FileManager +from fedlearner_webconsole.utils.pp_time import sleep +from fedlearner_webconsole.utils.process_utils import get_result_by_sub_process +from fedlearner_webconsole.mmgr.service import ModelJobGroupService +from fedlearner_webconsole.project.models import Project + + +def _update_parsed_signature(q: Queue, model_path: str): + file_manager = FileManager() + exported_dirs = file_manager.ls(model_path, include_directory=True) + newest_version = max([int(os.path.basename(v.path)) for v in exported_dirs if os.path.basename(v.path).isnumeric()]) + pb_path = os.path.join(model_path, str(newest_version), 'saved_model.pb') + saved_model_bytes = file_manager.read_bytes(pb_path) + signature_from_saved_model = SavedModelService.get_parse_example_details(saved_model_bytes) + q.put(signature_from_saved_model) + + +class ModelSignatureParser(IRunnerV2): + """ Parse example from model saved path + """ + + def __init__(self) -> None: + self.PARSE_TIMES = 10 + + def run(self, context: RunnerContext) -> Tuple[RunnerStatus, RunnerOutput]: + serving_model_id = context.input.model_signature_parser_input.serving_model_id + try: + for num in range(0, self.PARSE_TIMES): + with db.session_scope() as session: + # update parsed signature in serving model + serving_model = session.query(ServingModel).filter_by(id=serving_model_id).one_or_none() + if not serving_model: + raise NotFoundException(f'Failed to find serving model: {serving_model_id}') + signature_from_saved_model = get_result_by_sub_process(name='serving parse signature', + target=_update_parsed_signature, + kwargs={ + 'model_path': serving_model.model_path, + }) + signature_dict = MessageToDict(signature_from_saved_model) + # update raw signature in serving negotiator + deployment_name = serving_model.serving_deployment.deployment_name + tf_serving_service = TensorflowServingService(deployment_name) + signature_from_tf = tf_serving_service.get_model_signature() + raw_signature = json.dumps(signature_from_tf) + if len(raw_signature) > 0 and raw_signature != '{}': + update_serving_negotiator = session.query(ServingNegotiator).filter_by( + serving_model_id=serving_model.id).one_or_none() + update_serving_negotiator.raw_signature = raw_signature + # add outputs to parsed signature + signature_dict['outputs'] = signature_from_tf['outputs'] + signature_dict['from_participants'] = signature_from_tf['inputs'] + if 'examples' in signature_dict['from_participants']: + signature_dict['from_participants'].pop('examples') + serving_model.signature = json.dumps(signature_dict) + session.commit() + return RunnerStatus.DONE, RunnerOutput() + sleep(3) + except Exception: # pylint: disable=broad-except + error_message = f'[ModelSignatureParser] failed to run, serving id={serving_model_id}' + logging.exception(error_message) + return RunnerStatus.FAILED, RunnerOutput(error_message='[ModelSignatureParser] failed to get signature from tf') + + @staticmethod + def generate_task_name(serving_model_id: int, name: str): + hash_value = hashlib.sha256(str(pp_datetime.now()).encode('utf8')) + return f'parse_signature_{serving_model_id}_{name}_{hash_value.hexdigest()[0:6]}' + + +class QueryParticipantStatusRunner(IRunnerV2): + + def run(self, context: RunnerContext) -> Tuple[RunnerStatus, RunnerOutput]: + self._auto_run_query() + return RunnerStatus.DONE, RunnerOutput() + + @staticmethod + def _auto_run_query(): + with db.session_scope() as session: + query = session.query(ServingNegotiator) + query = query.filter(ServingNegotiator.is_local.is_(False)) + query = query.outerjoin(ServingNegotiator.serving_model).options(joinedload( + ServingNegotiator.serving_model)).filter(ServingModel.status == ServingModelStatus.PENDING_ACCEPT) + query = query.outerjoin(Project, Project.id == ServingNegotiator.project_id).options( + joinedload(ServingNegotiator.project)) + all_records = query.all() + for serving_negotiator in all_records: + with db.session_scope() as session: + serving_model = serving_negotiator.serving_model + try: + result = NegotiatorServingService(session).operate_participant_serving_service( + serving_negotiator, ServingServiceType.SERVING_SERVICE_QUERY) + if result == serving_pb2.SERVING_SERVICE_SUCCESS: + serving_model.status = ServingModelStatus.LOADING + session.add(serving_model) + session.commit() + except Exception as e: # pylint: disable=broad-except + logging.warning(f'[QueryParticipantStatusRunner] auto run query participant' + f' for {serving_model.name} with error {e}, trace: {traceback.format_exc()}') + + +class UpdateModelRunner(IRunnerV2): + + def run(self, context: RunnerContext) -> Tuple[RunnerStatus, RunnerOutput]: + self._auto_run_update() + return RunnerStatus.DONE, RunnerOutput() + + @staticmethod + def _auto_run_update(): + with db.session_scope() as session: + all_records = session.query(ServingModel).filter(ServingModel.model_group_id.isnot(None)).outerjoin( + ServingDeployment, ServingDeployment.id == ServingModel.serving_deployment_id).options( + joinedload(ServingModel.serving_deployment)).all() + for serving_model in all_records: + with db.session_scope() as session: + try: + model = ModelJobGroupService(session).get_latest_model_from_model_group( + serving_model.model_group_id) + if serving_model.model_id == model.id: + # already serving the latest model + continue + serving_model.model_id = model.id + serving_model.model_path = model.get_exported_model_path() + if serving_model.serving_deployment.is_remote_serving(): + ServingModelService(session).update_remote_serving_model(serving_model) + session.add(serving_model) + session.commit() + except Exception as e: # pylint: disable=broad-except + logging.warning( + f'[UpdateModelRunner] auto run update model for {serving_model.name} with error {e}, ' + f'trace: {traceback.format_exc()}') + + +def start_query_participant(session: Session): + composer_service_name = 'serving_model_query_participant_status_v2' + composer_service = ComposerService(session) + if composer_service.get_item_status(composer_service_name) is not None: + return + runner_input = RunnerInput() + composer_service.collect_v2( + name=composer_service_name, + items=[(ItemType.SERVING_SERVICE_QUERY_PARTICIPANT_STATUS, runner_input)], + # cron job at every 10 seconds + cron_config='* * * * * */10') + + +def start_update_model(session: Session): + composer_service_name = 'serving_model_update_model_v2' + composer_service = ComposerService(session) + if composer_service.get_item_status(composer_service_name) is not None: + return + runner_input = RunnerInput() + composer_service.collect_v2( + name=composer_service_name, + items=[(ItemType.SERVING_SERVICE_UPDATE_MODEL, runner_input)], + # cron job at every 30 seconds + cron_config='* * * * * */30') diff --git a/web_console_v2/api/fedlearner_webconsole/serving/services.py b/web_console_v2/api/fedlearner_webconsole/serving/services.py new file mode 100644 index 000000000..c4de6e47d --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/serving/services.py @@ -0,0 +1,779 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import hashlib +import json +import grpc +import logging + +from google.protobuf import json_format +from google.protobuf.json_format import MessageToDict +from tensorflow import make_tensor_proto +from tensorflow.core.protobuf import saved_model_pb2 # pylint: disable=no-name-in-module +from tensorflow.core.framework import graph_pb2, node_def_pb2, types_pb2 # pylint: disable=no-name-in-module +from tensorflow.core.example.example_pb2 import Example # pylint: disable=no-name-in-module +from tensorflow.core.example.feature_pb2 import Int64List, Feature, FloatList, BytesList, Features # pylint: disable=no-name-in-module +from tensorflow_serving.apis import (get_model_status_pb2, model_service_pb2_grpc, get_model_metadata_pb2, + prediction_service_pb2_grpc, predict_pb2) +from tensorflow_serving.apis.get_model_metadata_pb2 import SignatureDefMap +from tensorflow_serving.apis.get_model_status_pb2 import ModelVersionStatus +from typing import Dict, List, Tuple, Optional +from kubernetes.client.models.v1_pod_list import V1PodList +from envs import Envs +from fedlearner_webconsole.exceptions import InternalException, ResourceConflictException, NotFoundException, \ + InvalidArgumentException +from fedlearner_webconsole.mmgr.models import Model +from fedlearner_webconsole.mmgr.service import ModelJobGroupService +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto import common_pb2, service_pb2, serving_pb2 +from fedlearner_webconsole.proto.serving_pb2 import ServingServiceType, RemoteDeployState +from fedlearner_webconsole.rpc.client import RpcClient +from fedlearner_webconsole.serving import remote +from fedlearner_webconsole.serving.database_fetcher import DatabaseFetcher +from fedlearner_webconsole.serving.metrics import serving_metrics_emit_counter +from fedlearner_webconsole.serving.utils import get_model, get_serving_negotiator_by_serving_model_id +from fedlearner_webconsole.utils import pp_datetime, flask_utils +from fedlearner_webconsole.k8s.models import Pod, PodState +from fedlearner_webconsole.db import Session +from fedlearner_webconsole.k8s.k8s_client import k8s_client +from fedlearner_webconsole.serving.models import ServingModel, ServingNegotiator, ServingModelStatus, ServingDeployment +from fedlearner_webconsole.serving.serving_yaml_template import (generate_serving_yaml, DEPLOYMENT_TEMPLATE, + CONFIG_MAP_TEMPLATE, SERVICE_TEMPLATE) +from fedlearner_webconsole.utils.const import API_VERSION +from fedlearner_webconsole.proto.serving_pb2 import (ServingServiceInstance, ServingServiceSignature, + ServingServiceSignatureInput) +from fedlearner_webconsole.utils.sorting import SortExpression + + +class ServingDeploymentService: + + def __init__(self, session: Session = None): + self._session = session + + @staticmethod + def get_base_path(serving_model_id: int): + # TODO(wangsen.0914): should having a serving storage filesystem. /cc @lixiaoguang.01 + return f'test/{serving_model_id}' + + def _get_serving_object_definition(self, serving_model: ServingModel) -> Tuple[Dict, Dict, Dict]: + """get all kubernetes definition + + Returns: + configMap, Deployment, Service + """ + resource = json.loads(serving_model.serving_deployment.resource) + if serving_model.model_path is not None: + model_path = serving_model.model_path + else: + model_path = self.get_base_path(serving_model.id) + serving_config = { + 'project': serving_model.project, + 'model': { + 'base_path': model_path + }, + 'serving': { + 'name': serving_model.serving_deployment.deployment_name, + 'resource': { + 'resource': { + 'cpu': resource['cpu'], + 'memory': resource['memory'] + }, + 'replicas': resource['replicas'], + }, + } + } + config_map_object = generate_serving_yaml(serving_config, CONFIG_MAP_TEMPLATE, self._session) + deployment_object = generate_serving_yaml(serving_config, DEPLOYMENT_TEMPLATE, self._session) + service_object = generate_serving_yaml(serving_config, SERVICE_TEMPLATE, self._session) + + return config_map_object, deployment_object, service_object + + def create_or_update_deployment(self, serving_model: ServingModel): + """post a bunch of k8s resources. + + Raises: + Raises RuntimeError if k8s post ops failed. Then you should call `session.rollback()`. + """ + config_map_object, deployment_object, service_object = self._get_serving_object_definition(serving_model) + + # For core api, failed to use *_app method. + k8s_client.create_or_update_config_map(metadata=config_map_object['metadata'], + data=config_map_object['data'], + name=config_map_object['metadata']['name'], + namespace=Envs.K8S_NAMESPACE) + + k8s_client.create_or_update_app(app_yaml=deployment_object, + group='apps', + version='v1', + plural='deployments', + namespace=Envs.K8S_NAMESPACE) + + k8s_client.create_or_update_service(metadata=service_object['metadata'], + spec=service_object['spec'], + name=service_object['metadata']['name'], + namespace=Envs.K8S_NAMESPACE) + + def delete_deployment(self, serving_model: ServingModel): + """delete a bunch of k8s resources. + """ + try: + config_map_object, deployment_object, service_object = self._get_serving_object_definition(serving_model) + + # For core api, failed to use *_app method. + k8s_client.delete_config_map(name=config_map_object['metadata']['name'], namespace=Envs.K8S_NAMESPACE) + + k8s_client.delete_app(app_name=deployment_object['metadata']['name'], + group='apps', + version='v1', + plural='deployments', + namespace=Envs.K8S_NAMESPACE) + + k8s_client.delete_service(name=service_object['metadata']['name'], namespace=Envs.K8S_NAMESPACE) + except RuntimeError as err: + logging.warning(f'Failed to delete serving k8s resources, {err}') + + @staticmethod + def get_pods_info(deployment_name: str) -> List[Pod]: + pods: V1PodList = k8s_client.get_pods(Envs.K8S_NAMESPACE, label_selector=f'app={deployment_name}') + + pods_info = [] + for p in pods.items: + pods_info.append(Pod.from_json(p.to_dict())) + return pods_info + + @staticmethod + def get_replica_status(deployment_name: str) -> str: + config = k8s_client.get_deployment(deployment_name) + if config is not None and config.status is not None: + if config.status.ready_replicas is None: + config.status.ready_replicas = 0 + return f'{config.status.ready_replicas}/{config.spec.replicas}' + return 'UNKNOWN' + + @classmethod + def get_pods_status(cls, deployment_name: str) -> List[ServingServiceInstance]: + pods = cls.get_pods_info(deployment_name) + result = [] + for pod in pods: + instance = ServingServiceInstance(name=pod.name, + cpu='UNKNOWN', + memory='UNKNOWN', + created_at=pod.creation_timestamp) + if pod.state in (PodState.RUNNING, PodState.SUCCEEDED): + instance.status = 'AVAILABLE' + else: + instance.status = 'UNAVAILABLE' + result.append(instance) + return result + + @staticmethod + def get_pod_log(pod_name: str, tail_lines: int) -> List[str]: + """get pod log + + Args: + pod_name (str): pod name that you want to query + tail_lines (int): lines you want to query + + Returns: + List[str]: list of logs + """ + return k8s_client.get_pod_log(pod_name, namespace=Envs.K8S_NAMESPACE, tail_lines=tail_lines) + + @staticmethod + def generate_deployment_name(serving_model_id: int) -> str: + hash_value = hashlib.sha256(str(pp_datetime.now()).encode('utf8')) + return f'serving-{serving_model_id}-{hash_value.hexdigest()[0:6]}' + + +class TensorflowServingService: + + def __init__(self, deployment_name): + self._deployment_name = deployment_name + model_server_address = f'{deployment_name}.{Envs.K8S_NAMESPACE}.svc:8500' + channel = grpc.insecure_channel(model_server_address) + self.model_service_stub = model_service_pb2_grpc.ModelServiceStub(channel) + self.prediction_service_stub = prediction_service_pb2_grpc.PredictionServiceStub(channel) + + def get_model_status(self) -> ModelVersionStatus.State: + """ ref: https://github.com/tensorflow/serving/blob/master/tensorflow_serving/apis/get_model_status.proto#L26 + """ + request = get_model_status_pb2.GetModelStatusRequest() + request.model_spec.name = self._deployment_name + try: + state = self.model_service_stub.GetModelStatus(request).model_version_status[0].state + except grpc.RpcError: + return ModelVersionStatus.State.UNKNOWN + if state == ModelVersionStatus.State.START: + state = ModelVersionStatus.State.LOADING + elif state == ModelVersionStatus.State.END: + state = ModelVersionStatus.State.UNKNOWN + return state + + def get_model_signature(self) -> dict: + request = get_model_metadata_pb2.GetModelMetadataRequest() + request.model_spec.name = self._deployment_name + request.metadata_field.append('signature_def') + try: + metadata = self.prediction_service_stub.GetModelMetadata(request) + except grpc.RpcError: + return {} + signature = SignatureDefMap() + metadata.metadata['signature_def'].Unpack(signature) + return MessageToDict(signature.signature_def['serving_default']) + + def get_model_inference_output(self, user_input: Example, extend_input: Optional[dict] = None) -> dict: + inputs = make_tensor_proto([user_input.SerializeToString()]) + request = predict_pb2.PredictRequest() + request.model_spec.name = self._deployment_name + request.inputs['examples'].CopyFrom(inputs) + if extend_input is not None: + for k in extend_input: + ext_inputs = make_tensor_proto([extend_input[k]]) + request.inputs[k].CopyFrom(ext_inputs) + try: + output = self.prediction_service_stub.Predict(request) + except grpc.RpcError as err: + logging.error(f'Failed to inference, {err}') + return {'Error': str(err)} + return MessageToDict(output) + + @staticmethod + def get_model_inference_endpoint(project_id: int, serving_model_id: int) -> str: + return f'{API_VERSION}/projects/{project_id}/serving_services/{serving_model_id}/inference' + + +class NegotiatorServingService: + + def __init__(self, session: Session = None): + self._session = session + + def _handle_participant_request_create(self, request: service_pb2.ServingServiceRequest, + project: Project) -> service_pb2.ServingServiceResponse: + response = service_pb2.ServingServiceResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), + code=serving_pb2.SERVING_SERVICE_SUCCESS) + # check model existence + model = self._session.query(Model).filter_by(uuid=request.model_uuid, project_id=project.id).first() + if model is None: + response.status.code = common_pb2.STATUS_NOT_FOUND + response.code = serving_pb2.SERVING_SERVICE_MODEL_NOT_FOUND + response.msg = 'model not found' + serving_metrics_emit_counter('serving.from_participant.create.model_not_found') + return response + # check serving model name + serving_model = self._session.query(ServingModel).filter_by(name=request.serving_model_name).first() + if serving_model is not None: + response.status.code = common_pb2.STATUS_INVALID_ARGUMENT + response.code = serving_pb2.SERVING_SERVICE_NAME_DUPLICATED + response.msg = 'serving model name is duplicated' + serving_metrics_emit_counter('serving.from_participant.create.duplicated', serving_model) + return response + # create db records in 3 tables + serving_model = ServingModel() + if model is not None: + serving_model.model_id = model.id + serving_model.model_path = model.get_exported_model_path() + serving_model.name = request.serving_model_name + serving_model.project_id = project.id + serving_model.status = ServingModelStatus.WAITING_CONFIG + serving_deployment = ServingDeployment() + serving_deployment.project_id = project.id + serving_deployment.resource = json.dumps({'cpu': '1000m', 'memory': '1Gi', 'replicas': 0}) + serving_negotiator = ServingNegotiator() + serving_negotiator.project_id = project.id + serving_negotiator.is_local = False + try: + self._session.add(serving_model) + self._session.flush([serving_model]) + except Exception as err: + serving_metrics_emit_counter('serving.from_participant.create.db_error', serving_model) + raise ResourceConflictException( + f'create serving service fail! serving model name = {serving_model.name}, err = {err}') from err + serving_deployment.deployment_name = ServingDeploymentService.generate_deployment_name(serving_model.id) + self._session.add(serving_deployment) + self._session.flush([serving_deployment]) + serving_negotiator.serving_model_id = serving_model.id + serving_negotiator.serving_model_uuid = request.serving_model_uuid + serving_negotiator.with_label = False + self._session.add(serving_negotiator) + serving_model.endpoint = TensorflowServingService.get_model_inference_endpoint(project.id, serving_model.id) + serving_model.serving_deployment_id = serving_deployment.id + self._session.commit() + serving_metrics_emit_counter('serving.from_participant.create.success', serving_model) + return response + + def _handle_participant_request_query( + self, request: service_pb2.ServingServiceRequest) -> service_pb2.ServingServiceResponse: + response = service_pb2.ServingServiceResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), + code=serving_pb2.SERVING_SERVICE_SUCCESS) + # check serving model status + serving_model = self._session.query(ServingModel).filter_by(name=request.serving_model_name).first() + if serving_model is None: + response.status.code = common_pb2.STATUS_NOT_FOUND + response.code = serving_pb2.SERVING_SERVICE_MODEL_NOT_FOUND + response.msg = f'serving model not found, name = {request.serving_model_name}' + serving_metrics_emit_counter('serving.from_participant.query.serving_not_found') + return response + if serving_model.status == ServingModelStatus.WAITING_CONFIG: + response.code = serving_pb2.SERVING_SERVICE_PENDING_ACCEPT + response.msg = 'serving model is waiting for config' + serving_metrics_emit_counter('serving.from_participant.query.waiting', serving_model) + return response + serving_metrics_emit_counter('serving.from_participant.query.success', serving_model) + return response + + def _handle_participant_request_destroy( + self, request: service_pb2.ServingServiceRequest) -> service_pb2.ServingServiceResponse: + response = service_pb2.ServingServiceResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), + code=serving_pb2.SERVING_SERVICE_SUCCESS) + serving_negotiator = self._session.query(ServingNegotiator).filter_by( + serving_model_uuid=request.serving_model_uuid).one_or_none() + if serving_negotiator is None: + response.msg = 'serving negotiator is already deleted' + serving_metrics_emit_counter('serving.from_participant.delete.already_done') + return response + if serving_negotiator.serving_model.status == ServingModelStatus.WAITING_CONFIG: + self._session.delete(serving_negotiator.serving_model.serving_deployment) + self._session.delete(serving_negotiator.serving_model) + self._session.delete(serving_negotiator) + self._session.commit() + serving_metrics_emit_counter('serving.from_participant.delete.directly', serving_negotiator.serving_model) + return response + serving_negotiator.serving_model.status = ServingModelStatus.DELETED + serving_metrics_emit_counter('serving.from_participant.delete.success', serving_negotiator.serving_model) + self._session.commit() + return response + + @staticmethod + def generate_uuid(serving_model_id: int) -> str: + hash_value = hashlib.sha256(str(pp_datetime.now()).encode('utf8')) + return f'{serving_model_id}{hash_value.hexdigest()[0:6]}' + + def operate_participant_serving_service(self, serving_negotiator: ServingNegotiator, operation: ServingServiceType): + serving_model = self._session.query(ServingModel).filter_by( + id=serving_negotiator.serving_model_id).one_or_none() + # no need to notify participants when serving on third party platform + if serving_model.serving_deployment.is_remote_serving(): + return serving_pb2.SERVING_SERVICE_SUCCESS + service = ParticipantService(self._session) + participants = service.get_platform_participants_by_project(serving_negotiator.project.id) + for participant in participants: + client = RpcClient.from_project_and_participant(serving_negotiator.project.name, + serving_negotiator.project.token, participant.domain_name) + model_uuid = '' + if serving_model.model is not None: + model_uuid = '' or serving_model.model.uuid + resp = client.operate_serving_service(operation, serving_negotiator.serving_model_uuid, model_uuid, + serving_model.name) + if resp.status.code != common_pb2.STATUS_SUCCESS: + msg = f'operate participant fail! status code = {resp.status.code}, msg = {resp.msg}' + logging.error(msg) + raise InternalException(msg) + if operation == serving_pb2.SERVING_SERVICE_CREATE: + if resp.code != serving_pb2.SERVING_SERVICE_SUCCESS: + return resp.code + elif operation == serving_pb2.SERVING_SERVICE_QUERY: + if resp.code != serving_pb2.SERVING_SERVICE_SUCCESS: + return resp.code + else: # SERVING_SERVICE_DESTROY + pass + return serving_pb2.SERVING_SERVICE_SUCCESS + + def participant_serving_service_inference(self, serving_negotiator: ServingNegotiator, + example_id: str) -> service_pb2.ServingServiceInferenceResponse: + service = ParticipantService(self._session) + participants = service.get_platform_participants_by_project(serving_negotiator.project.id) + assert len(participants) == 1, f'support one participant only! num = {len(participants)}' + client = RpcClient.from_project_and_participant(serving_negotiator.project.name, + serving_negotiator.project.token, participants[0].domain_name) + resp = client.inference_serving_service(serving_negotiator.serving_model_uuid, example_id) + if resp.status.code != common_pb2.STATUS_SUCCESS: + logging.error(resp.status.msg) + raise InternalException(resp.status.msg) + return resp + + def handle_participant_request(self, request: service_pb2.ServingServiceRequest, + project: Project) -> service_pb2.ServingServiceResponse: + if request.operation_type == ServingServiceType.SERVING_SERVICE_CREATE: + return self._handle_participant_request_create(request, project) + if request.operation_type == ServingServiceType.SERVING_SERVICE_QUERY: + return self._handle_participant_request_query(request) + if request.operation_type == ServingServiceType.SERVING_SERVICE_DESTROY: + return self._handle_participant_request_destroy(request) + response = service_pb2.ServingServiceResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), + code=serving_pb2.SERVING_SERVICE_SUCCESS) + return response + + def handle_participant_inference_request(self, request: service_pb2.ServingServiceInferenceRequest, + project: Project) -> service_pb2.ServingServiceInferenceResponse: + response = service_pb2.ServingServiceInferenceResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), + code=serving_pb2.SERVING_SERVICE_SUCCESS) + serving_negotiator = self._session.query(ServingNegotiator).filter_by( + serving_model_uuid=request.serving_model_uuid, project_id=project.id).one_or_none() + if serving_negotiator is None: + response.rpc_status.code = common_pb2.STATUS_NOT_FOUND + response.code = serving_pb2.SERVING_SERVICE_NEGOTIATOR_NOT_FOUND + response.msg = 'serving negotiator not found' + return response + deployment_name = serving_negotiator.serving_model.serving_deployment.deployment_name + tf_serving_service = TensorflowServingService(deployment_name) + query_key = int(request.example_id) + data_record = DatabaseFetcher.fetch_by_int_key(query_key, serving_negotiator.serving_model.signature) + feature_input = {} + for k, item in data_record.items(): + if isinstance(item[0], int): + int_list = Int64List(value=item) + feature_input[k] = Feature(int64_list=int_list) + if isinstance(item[0], float): + float_list = FloatList(value=item) + feature_input[k] = Feature(float_list=float_list) + if isinstance(item[0], str): + data_record_bytes = [x.encode(encoding='utf-8') for x in item] + bytes_list = BytesList(value=data_record_bytes) + feature_input[k] = Feature(bytes_list=bytes_list) + input_data = Example(features=Features(feature=feature_input)) + output = tf_serving_service.get_model_inference_output(input_data) + response.data.update({'result': output['outputs']}) + return response + + +class SavedModelService: + PARSE_EXAMPLE_NAME = 'ParseExample/ParseExample' + INPUT_NODE_NAMES = [PARSE_EXAMPLE_NAME] + + @staticmethod + def get_nodes_from_graph(graph: graph_pb2.GraphDef, node_list: List[str]) -> Dict[str, node_def_pb2.NodeDef]: + """get nodes from graph by node names + + Args: + graph (graph_pb2.GraphDef): GraphDef + node_list (List[str]): node name list + + Returns: + Dict[str, node_def_pb2.NodeDef]: a mapping from node_name to NodeDef + + Raises: + AssertionError: when failed to get all nodes required by node_list + """ + result = {} + for n in graph.node: + if n.name in node_list: + result[n.name] = n + assert list(result.keys()) == node_list, f'Failed to get nodes: {node_list - result.keys()}' + return result + + @classmethod + def get_parse_example_details(cls, saved_model_binary: bytes) -> ServingServiceSignature: + saved_model_message = saved_model_pb2.SavedModel() + saved_model_message.ParseFromString(saved_model_binary) + graph = saved_model_message.meta_graphs[0].graph_def + + parse_example_op = cls.get_nodes_from_graph(graph, cls.INPUT_NODE_NAMES)[cls.PARSE_EXAMPLE_NAME] + assert parse_example_op.op == 'ParseExample', f'{parse_example_op} node is not a ParseExample op' + + dense_keys_inputs = [i for i in parse_example_op.input if 'dense_keys' in i] + assert len(dense_keys_inputs) == parse_example_op.attr['Ndense'].i, 'Consistency check failed' + + dense_keys_nodes = cls.get_nodes_from_graph(graph, dense_keys_inputs) + # Keep nodes in order + dense_keys_list = [dense_keys_nodes[i] for i in dense_keys_inputs] + signature = ServingServiceSignature() + # For more details on serving/examples/parse_graph.py + for n, t, s in zip(dense_keys_list, parse_example_op.attr['Tdense'].list.type, + parse_example_op.attr['dense_shapes'].list.shape): + signature_input = ServingServiceSignatureInput(name=n.attr['value'].tensor.string_val[0], + type=types_pb2.DataType.Name(t), + dim=[d.size for d in s.dim]) + signature.inputs.append(signature_input) + return signature + + +class ServingModelService(object): + + def __init__(self, session: Session = None): + self._session = session + + def create_from_param(self, + project_id: int, + name: str, + is_local: bool, + comment: Optional[str], + model_id: Optional[int], + model_group_id: Optional[int], + resource: serving_pb2.ServingServiceResource = None, + remote_platform: serving_pb2.ServingServiceRemotePlatform = None) -> ServingModel: + session = self._session + serving_model = ServingModel() + if model_id is not None: + serving_model.model_id = model_id + model = get_model(serving_model.model_id, self._session) + elif model_group_id is not None: + serving_model.model_group_id = model_group_id + model = ModelJobGroupService(self._session).get_latest_model_from_model_group(serving_model.model_group_id) + serving_model.model_id = model.id + else: + raise InvalidArgumentException('model_id and model_group_id need to fill one') + serving_model.name = name + serving_model.project_id = project_id + serving_model.comment = comment + serving_model.status = ServingModelStatus.LOADING + serving_model.model_path = model.get_exported_model_path() + + try: + session.add(serving_model) + session.flush([serving_model]) + except Exception as err: + serving_metrics_emit_counter('serving.create.db_fail', serving_model) + raise ResourceConflictException( + f'create serving service fail! serving model name = {serving_model.name}, err = {err}') from err + + serving_deployment = ServingDeployment() + serving_deployment.project_id = project_id + session.add(serving_deployment) + session.flush([serving_deployment]) + serving_model.serving_deployment_id = serving_deployment.id + + serving_negotiator = ServingNegotiator() + serving_negotiator.project_id = project_id + serving_negotiator.is_local = is_local + + if remote_platform is None: # serving inside this platform + serving_deployment.resource = json.dumps(MessageToDict(resource)) + serving_deployment.deployment_name = ServingDeploymentService.generate_deployment_name(serving_model.id) + serving_model.endpoint = TensorflowServingService.get_model_inference_endpoint(project_id, serving_model.id) + self._create_or_update_deployment(serving_model) + else: # remote serving + deploy_config: serving_pb2.RemoteDeployConfig = self._create_remote_serving(remote_platform, serving_model) + serving_deployment.deploy_platform = json.dumps(MessageToDict(deploy_config)) + serving_model.endpoint = self._get_remote_serving_url(remote_platform) + + # Notifying participants needs to be placed behind the k8s operation, + # because when the k8s operation fails, it avoids the participants from generating dirty data + serving_negotiator.serving_model_id = serving_model.id + serving_negotiator.serving_model_uuid = NegotiatorServingService.generate_uuid(serving_model.id) + serving_negotiator.with_label = True + session.add(serving_negotiator) + session.flush([serving_negotiator]) + if not serving_negotiator.is_local: + serving_model.status = ServingModelStatus.PENDING_ACCEPT + result = NegotiatorServingService(session).operate_participant_serving_service( + serving_negotiator, serving_pb2.SERVING_SERVICE_CREATE) + if result != serving_pb2.SERVING_SERVICE_SUCCESS: + raise InternalException(details=f'create participant serving service fail! result code = {result}') + return serving_model + + def get_serving_service_detail(self, + serving_model_id: int, + project_id: Optional[int] = None, + sorter: Optional[SortExpression] = None) -> serving_pb2.ServingServiceDetail: + serving_model = self._session.query(ServingModel).filter_by(id=serving_model_id).one_or_none() + if not serving_model: + raise NotFoundException(f'Failed to find serving model {serving_model_id}') + if project_id is not None: + if serving_model.project_id != project_id: + raise NotFoundException(f'Failed to find serving model {serving_model_id} in project {project_id}') + deployment_name = serving_model.serving_deployment.deployment_name + result = serving_model.to_serving_service_detail() + if serving_model.serving_deployment.is_remote_serving(): + result.status = self._get_remote_serving_status(serving_model).name + result.support_inference = (result.status == ServingModelStatus.AVAILABLE.name) + else: + status = TensorflowServingService(deployment_name).get_model_status() + if serving_model.status == ServingModelStatus.LOADING and status != ModelVersionStatus.State.UNKNOWN: + result.status = ModelVersionStatus.State.Name(status) + result.support_inference = (result.status == ServingModelStatus.AVAILABLE.name) + resource = json.loads(serving_model.serving_deployment.resource) + resource = serving_pb2.ServingServiceResource( + cpu=resource['cpu'], + memory=resource['memory'], + replicas=resource['replicas'], + ) + result.resource.CopyFrom(resource) + if result.resource.replicas > 0: + k8s_serving_service = ServingDeploymentService() + result.instance_num_status = k8s_serving_service.get_replica_status(deployment_name) + instances = k8s_serving_service.get_pods_status(deployment_name) + if sorter is not None: + if sorter.field == 'created_at': + reverse = not sorter.is_asc + instances = sorted(instances, key=lambda x: x.created_at, reverse=reverse) + result.instances.extend(instances) + else: + result.instance_num_status = 'UNKNOWN' + serving_negotiator = get_serving_negotiator_by_serving_model_id(serving_model_id, self._session) + if serving_negotiator is not None: + result.is_local = serving_negotiator.is_local + if not serving_negotiator.with_label: + result.support_inference = False + return result + + def set_resource_and_status_on_ref(self, single_res: serving_pb2.ServingService, serving_model: ServingModel): + if serving_model.serving_deployment.is_remote_serving(): + single_res.status = self._get_remote_serving_status(serving_model).name + single_res.support_inference = (single_res.status == ServingModelStatus.AVAILABLE.name) + return + deployment_name = serving_model.serving_deployment.deployment_name + resource = json.loads(serving_model.serving_deployment.resource) + tf_serving_service = TensorflowServingService(deployment_name) + status = tf_serving_service.get_model_status() + if serving_model.status == ServingModelStatus.LOADING and status != ModelVersionStatus.State.UNKNOWN: + single_res.status = ModelVersionStatus.State.Name(status) + single_res.support_inference = (single_res.status == ServingModelStatus.AVAILABLE.name) + if resource['replicas'] > 0: + single_res.instance_num_status = ServingDeploymentService.get_replica_status(deployment_name) + else: + single_res.instance_num_status = 'UNKNOWN' + single_res.resource.cpu = resource['cpu'] + single_res.resource.memory = resource['memory'] + single_res.resource.replicas = resource['replicas'] + + def set_is_local_on_ref(self, single_res: serving_pb2.ServingService, serving_model: ServingModel): + serving_negotiator = self._session.query(ServingNegotiator).filter_by( + serving_model_id=serving_model.id).one_or_none() + if serving_negotiator is not None: + single_res.is_local = serving_negotiator.is_local + if not serving_negotiator.with_label: + single_res.support_inference = False + + def update_model(self, model_id: Optional[int], model_group_id: Optional[int], serving_model: ServingModel) -> bool: + need_update = False + if model_id is not None: + if model_id != serving_model.model_id: + model = get_model(model_id, self._session) + serving_model.model_id = model.id + serving_model.model_path = model.get_exported_model_path() + need_update = True + serving_model.model_group_id = None # clear model group config + elif model_group_id is not None and model_group_id != serving_model.model_group_id: + model = ModelJobGroupService(self._session).get_latest_model_from_model_group(model_group_id) + if serving_model.model_id != model.id: + serving_model.model_id = model.id + serving_model.model_path = model.get_exported_model_path() + need_update = True + serving_model.model_group_id = model_group_id + + if not need_update: + return False + + if serving_model.serving_deployment.is_remote_serving(): + self.update_remote_serving_model(serving_model) + return True + + serving_negotiator = self._session.query(ServingNegotiator).filter_by( + serving_model_id=serving_model.id).one_or_none() + if serving_negotiator is not None and not serving_negotiator.is_local: + # TODO(lixiaoguang.01) support update model for federal serving + raise InvalidArgumentException('update model is not supported for federal serving') + + self._create_or_update_deployment(serving_model) + return True + + def update_resource(self, resource: dict, serving_model: ServingModel): + serving_model.serving_deployment.resource = json.dumps(resource) + serving_model.status = ServingModelStatus.LOADING + self._create_or_update_deployment(serving_model) + self._session.add(serving_model.serving_deployment) + + def delete_serving_service(self, serving_model: ServingModel): + serving_negotiator = self._session.query(ServingNegotiator).filter_by( + serving_model_id=serving_model.id).one_or_none() + if serving_negotiator is not None: + if not serving_negotiator.is_local: + NegotiatorServingService(self._session).operate_participant_serving_service( + serving_negotiator, serving_pb2.ServingServiceType.SERVING_SERVICE_DESTROY) + self._session.delete(serving_negotiator) + if serving_model.serving_deployment.is_remote_serving(): + self._undeploy_remote_serving(serving_model) + else: + try: + ServingDeploymentService(self._session).delete_deployment(serving_model) + except RuntimeError as err: + serving_metrics_emit_counter('serving.delete.deployment_error', serving_model) + raise ResourceConflictException( + f'delete deployment fail! serving model id = {serving_model.id}, err = {err}') from err + self._session.delete(serving_model.serving_deployment) + self._session.delete(serving_model) + + def _create_or_update_deployment(self, serving_model: ServingModel): + try: + ServingDeploymentService(self._session).create_or_update_deployment(serving_model) + except RuntimeError as err: + serving_metrics_emit_counter('serving.deployment_error', serving_model) + raise InternalException( + f'create or update deployment fail! serving model id = {serving_model.id}, err = {err}') from err + + @staticmethod + def _create_remote_serving(remote_platform: serving_pb2.ServingServiceRemotePlatform, + serving_model: ServingModel) -> serving_pb2.RemoteDeployConfig: + current_user = flask_utils.get_current_user() + if remote_platform.platform not in remote.supported_remote_serving: + raise InvalidArgumentException(f'platform {remote_platform.platform} not supported') + deploy_config = serving_pb2.RemoteDeployConfig(platform=remote_platform.platform, + payload=remote_platform.payload, + deploy_name=f'privacy-platform-{serving_model.name}', + model_src_path=serving_model.model_path) + remote_helper = remote.supported_remote_serving[remote_platform.platform] + try: + deploy_config.deploy_id = remote_helper.deploy_model(current_user.username, deploy_config) + except (FileNotFoundError, AttributeError) as err: + serving_metrics_emit_counter('serving.remote_deployment_error', serving_model) + raise InvalidArgumentException( + f'create remote deployment fail! serving model id = {serving_model.id}, err = {err}') from err + # not stored in db, fetch from serving_model when deploy + deploy_config.model_src_path = '' + return deploy_config + + def update_remote_serving_model(self, serving_model: ServingModel): + current_user = flask_utils.get_current_user() + if current_user is None: + username = 'robot' + else: + username = current_user.username + deploy_config = serving_pb2.RemoteDeployConfig() + json_format.Parse(serving_model.serving_deployment.deploy_platform, deploy_config) + if deploy_config.platform not in remote.supported_remote_serving: + raise InvalidArgumentException(f'platform {deploy_config.platform} not supported') + deploy_config.model_src_path = serving_model.model_path + remote_helper = remote.supported_remote_serving[deploy_config.platform] + remote_helper.deploy_model(username, deploy_config) + + @staticmethod + def _get_remote_serving_url(remote_platform: serving_pb2.ServingServiceRemotePlatform) -> str: + if remote_platform.platform not in remote.supported_remote_serving: + raise InvalidArgumentException(f'platform {remote_platform.platform} not supported') + deploy_config = serving_pb2.RemoteDeployConfig(payload=remote_platform.payload) + remote_helper = remote.supported_remote_serving[remote_platform.platform] + return remote_helper.get_deploy_url(deploy_config) + + @staticmethod + def _get_remote_serving_status(serving_model: ServingModel) -> ServingModelStatus: + deploy_config = serving_pb2.RemoteDeployConfig() + json_format.Parse(serving_model.serving_deployment.deploy_platform, deploy_config) + if deploy_config.platform not in remote.supported_remote_serving: + raise InvalidArgumentException(f'platform {deploy_config.platform} not supported') + remote_helper = remote.supported_remote_serving[deploy_config.platform] + deploy_status = remote_helper.get_deploy_status(deploy_config) + if deploy_status == RemoteDeployState.REMOTE_DEPLOY_READY: + return ServingModelStatus.AVAILABLE + return ServingModelStatus.LOADING + + @staticmethod + def _undeploy_remote_serving(serving_model: ServingModel): + deploy_config = serving_pb2.RemoteDeployConfig() + json_format.Parse(serving_model.serving_deployment.deploy_platform, deploy_config) + if deploy_config.platform not in remote.supported_remote_serving: + raise InvalidArgumentException(f'platform {deploy_config.platform} not supported') + remote_helper = remote.supported_remote_serving[deploy_config.platform] + remote_helper.undeploy_model(deploy_config) diff --git a/web_console_v2/api/fedlearner_webconsole/serving/services_test.py b/web_console_v2/api/fedlearner_webconsole/serving/services_test.py new file mode 100644 index 000000000..8ec297594 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/serving/services_test.py @@ -0,0 +1,511 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import json +import os +import unittest +from unittest.mock import MagicMock, patch, call + +from google.protobuf import text_format +from google.protobuf.json_format import MessageToDict +from tensorflow.core.protobuf import saved_model_pb2 +from envs import Envs +from fedlearner_webconsole.auth.models import User +from fedlearner_webconsole.exceptions import NotFoundException, InvalidArgumentException +from fedlearner_webconsole.initial_db import initial_db +from fedlearner_webconsole.mmgr.models import Model, ModelJobGroup, ModelType +from fedlearner_webconsole.proto import serving_pb2 +from fedlearner_webconsole.serving.remote import register_remote_serving +from fedlearner_webconsole.utils.proto import to_dict +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.serving.models import ServingDeployment, ServingModel, ServingNegotiator, ServingModelStatus +from fedlearner_webconsole.serving.services import SavedModelService, ServingDeploymentService, ServingModelService + +from testing.common import BaseTestCase +from testing.fake_remote_serving import FakeRemoteServing +from testing.no_web_server_test_case import NoWebServerTestCase + + +class ServingDeploymentServiceTest(NoWebServerTestCase): + + def setUp(self) -> None: + super().setUp() + initial_db() + with db.session_scope() as session: + + project = Project() + project.name = 'test_project_name' + session.add(project) + session.flush([project]) + + serving_deployment = ServingDeployment() + serving_deployment.deployment_name = 'test_deployment_name' + serving_deployment.project_id = project.id + serving_deployment.resource = json.dumps({'cpu': '4000m', 'memory': '8Gi', 'replicas': 3}) + session.add(serving_deployment) + session.flush([serving_deployment]) + + serving_model = ServingModel() + serving_model.project_id = project.id + serving_model.serving_deployment_id = serving_deployment.id + session.add(serving_model) + + session.commit() + + self.serving_model_id = serving_model.id + + @patch('fedlearner_webconsole.serving.services.k8s_client') + def test_create_deployment(self, mock_k8s_client: MagicMock): + mock_k8s_client.create_or_update_app = MagicMock() + mock_k8s_client.create_or_update_config_map = MagicMock() + mock_k8s_client.create_or_update_service = MagicMock() + + with db.session_scope() as session: + # This is the best practices for using serving_model_id to interact with two session. + serving_model = session.query(ServingModel).get(self.serving_model_id) + service = ServingDeploymentService(session) + service.create_or_update_deployment(serving_model) + + mock_k8s_client.create_or_update_config_map.assert_called_once() + mock_k8s_client.create_or_update_service.assert_called_once() + mock_k8s_client.create_or_update_app.assert_called_once() + + @patch('fedlearner_webconsole.serving.services.k8s_client') + def test_delete_deployment(self, mock_k8s_client: MagicMock): + mock_k8s_client.delete_app = MagicMock() + mock_k8s_client.delete_config_map = MagicMock() + mock_k8s_client.delete_service = MagicMock() + + with db.session_scope() as session: + # This is the best practices for using serving_model_id to interact with two session. + serving_model = session.query(ServingModel).get(self.serving_model_id) + service = ServingDeploymentService(session) + service.delete_deployment(serving_model) + + mock_k8s_client.delete_app.assert_has_calls([ + call(app_name=serving_model.serving_deployment.deployment_name, + group='apps', + version='v1', + plural='deployments', + namespace=Envs.K8S_NAMESPACE), + ]) + mock_k8s_client.delete_config_map.assert_called_once_with( + name=f'{serving_model.serving_deployment.deployment_name}-config', namespace=Envs.K8S_NAMESPACE) + mock_k8s_client.delete_service.assert_called_once_with(name=serving_model.serving_deployment.deployment_name, + namespace=Envs.K8S_NAMESPACE) + + def test_get_pods_info(self): + with db.session_scope() as session: + serving_model = session.query(ServingModel).filter_by(id=self.serving_model_id).one() + deployment_name = serving_model.serving_deployment.deployment_name + + info = ServingDeploymentService.get_pods_info(deployment_name) + self.assertEqual(len(info), 1) + + +class SavedModelServiceTest(NoWebServerTestCase): + + def setUp(self) -> None: + super().setUp() + + with open(os.path.join(Envs.BASE_DIR, 'testing/test_data/saved_model.pbtxt'), 'rt', encoding='utf-8') as f: + self.saved_model_text = f.read() + + self.saved_model_message = text_format.Parse(self.saved_model_text, saved_model_pb2.SavedModel()) + self.graph = self.saved_model_message.meta_graphs[0].graph_def + + def test_get_nodes_from_graph(self): + parse_example_node = SavedModelService.get_nodes_from_graph( + self.graph, ['ParseExample/ParseExample'])['ParseExample/ParseExample'] + self.assertEqual(parse_example_node.name, 'ParseExample/ParseExample') + self.assertEqual(parse_example_node.op, 'ParseExample') + + dense_nodes = SavedModelService.get_nodes_from_graph( + self.graph, ['ParseExample/ParseExample/dense_keys_0', 'ParseExample/ParseExample/dense_keys_1']) + self.assertEqual(len(dense_nodes), 2) + + def test_get_parse_example_details(self): + signatures = SavedModelService.get_parse_example_details(self.saved_model_message.SerializeToString()) + self.assertCountEqual([i.name for i in signatures.inputs], ['example_id', 'x']) + self.assertCountEqual([i.type for i in signatures.inputs], ['DT_STRING', 'DT_FLOAT']) + self.assertCountEqual([i.dim for i in signatures.inputs], [[], [392]]) + + +# Use BaseTestCase instead of NoWebServerTestCase to get system.variables.labels when generate deployment yaml +class ServingModelServiceTest(BaseTestCase): + + def setUp(self): + super().setUp() + # insert project + with db.session_scope() as session: + project = Project() + project.name = 'test_project_name' + session.add(project) + session.flush([project]) + + model_job_group = ModelJobGroup() + session.add(model_job_group) + session.flush([model_job_group]) + + model_1 = Model() + model_1.name = 'test_model_name_1' + model_1.model_path = '/test_path_1/' + model_1.uuid = 'test_uuid_1' + model_1.project_id = project.id + model_1.version = 1 + model_1.group_id = model_job_group.id + + model_2 = Model() + model_2.name = 'test_model_name_2' + model_2.model_path = '/test_path_2/' + model_2.project_id = project.id + model_2.version = 2 + model_2.group_id = model_job_group.id + + session.add_all([model_1, model_2]) + session.commit() + self.project_id = project.id + self.model_1_id = model_1.id + self.model_1_uuid = model_1.uuid + self.model_2_id = model_2.id + self.model_group_id = model_job_group.id + + @patch('fedlearner_webconsole.serving.services.ServingDeploymentService.create_or_update_deployment') + def test_create_from_param(self, mock_create_deployment: MagicMock): + name = 'test-serving-service-1' + resource = serving_pb2.ServingServiceResource( + cpu='1', + memory='2', + replicas=3, + ) + param = { + 'model_id': self.model_1_id, + 'name': name, + 'comment': '', + 'resource': resource, + 'is_local': True, + } + with db.session_scope() as session: + serving_model_service = ServingModelService(session) + serving_model = serving_model_service.create_from_param(project_id=self.project_id, + name=param['name'], + is_local=param['is_local'], + comment=param['comment'], + model_id=param['model_id'], + model_group_id=None, + resource=param['resource']) + session.commit() + serving_model_id = serving_model.id + + # check db + with db.session_scope() as session: + serving_model = session.query(ServingModel).get(serving_model_id) + self.assertEqual(name, serving_model.name) + self.assertEqual('/test_path_1/exported_models', serving_model.model_path) + serving_deployment = serving_model.serving_deployment + deployment_name_substr = f'serving-{serving_model_id}-' + self.assertIn(deployment_name_substr, serving_deployment.deployment_name) + self.assertEqual( + serving_deployment.resource, + json.dumps({ + 'cpu': resource.cpu, + 'memory': resource.memory, + 'replicas': resource.replicas, + })) + serving_negotiator = session.query(ServingNegotiator).filter_by( + serving_model_id=serving_model_id).one_or_none() + self.assertIsNotNone(serving_negotiator) + self.assertEqual(serving_negotiator.project_id, self.project_id) + + mock_create_deployment.assert_called_once() + + name = 'test-auto-update-1' + param = { + 'model_group_id': self.model_group_id, + 'name': name, + 'resource': resource, + 'is_local': True, + } + with db.session_scope() as session: + serving_model_service = ServingModelService(session) + serving_model = serving_model_service.create_from_param(project_id=self.project_id, + name=param['name'], + is_local=param['is_local'], + comment=None, + model_id=None, + model_group_id=param['model_group_id'], + resource=param['resource']) + session.commit() + serving_model_id = serving_model.id + + # check db + with db.session_scope() as session: + serving_model = session.query(ServingModel).get(serving_model_id) + self.assertEqual(self.model_2_id, serving_model.model_id) + self.assertEqual('/test_path_2/exported_models', serving_model.model_path) + + @patch('fedlearner_webconsole.utils.flask_utils.get_current_user', MagicMock(return_value=User(username='test'))) + def test_create_remote_serving_from_param(self): + reckon_remote_serving = FakeRemoteServing() + register_remote_serving(FakeRemoteServing.SERVING_PLATFORM, reckon_remote_serving) + name = 'test-remote-serving-1' + remote_platform = serving_pb2.ServingServiceRemotePlatform(platform=FakeRemoteServing.SERVING_PLATFORM, + payload='test-payload') + with db.session_scope() as session: + serving_model_service = ServingModelService(session) + serving_model = serving_model_service.create_from_param(project_id=self.project_id, + name=name, + is_local=True, + comment=None, + model_id=None, + model_group_id=self.model_group_id, + resource=None, + remote_platform=remote_platform) + session.commit() + serving_model_id = serving_model.id + + # check db + with db.session_scope() as session: + serving_model = session.query(ServingModel).get(serving_model_id) + self.assertEqual(FakeRemoteServing.DEPLOY_URL, serving_model.endpoint) + deploy_platform = serving_pb2.RemoteDeployConfig( + platform=FakeRemoteServing.SERVING_PLATFORM, + payload='test-payload', + deploy_id=1, + deploy_name=f'privacy-platform-test-remote-serving-{serving_model_id}', + model_src_path='', + ) + self.assertEqual(json.dumps(MessageToDict(deploy_platform)), + serving_model.serving_deployment.deploy_platform) + + @patch('fedlearner_webconsole.utils.flask_utils.get_current_user', MagicMock(return_value=User(username='test'))) + @patch('fedlearner_webconsole.serving.services.ServingDeploymentService.create_or_update_deployment') + def test_get_detail(self, mock_create_deployment: MagicMock): + name = 'test-serving-service-1' + resource = serving_pb2.ServingServiceResource( + cpu='1', + memory='2', + replicas=3, + ) + param = { + 'model_id': self.model_1_id, + 'name': name, + 'comment': '', + 'resource': resource, + 'is_local': False, + } + with db.session_scope() as session: + serving_model_service = ServingModelService(session) + serving_model = serving_model_service.create_from_param(project_id=self.project_id, + name=param['name'], + is_local=param['is_local'], + comment=param['comment'], + model_id=param['model_id'], + model_group_id=None, + resource=param['resource']) + detail = serving_model_service.get_serving_service_detail(serving_model.id, serving_model.project_id) + self.assertEqual(name, detail.name) + self.assertEqual(ServingModelStatus.PENDING_ACCEPT.name, detail.status) + try: + serving_model_service.get_serving_service_detail(serving_model.id + 1) + except NotFoundException: + pass + try: + serving_model_service.get_serving_service_detail(serving_model.id, serving_model.project_id + 1) + except NotFoundException: + pass + + # get remote serving detail + name = 'test-remote-serving-1' + reckon_remote_serving = FakeRemoteServing() + register_remote_serving(FakeRemoteServing.SERVING_PLATFORM, reckon_remote_serving) + remote_platform = serving_pb2.ServingServiceRemotePlatform(platform=FakeRemoteServing.SERVING_PLATFORM, + payload='test-payload') + with db.session_scope() as session: + serving_model_service = ServingModelService(session) + serving_model = serving_model_service.create_from_param(project_id=self.project_id, + name=name, + is_local=False, + comment=None, + model_id=self.model_1_id, + model_group_id=None, + resource=None, + remote_platform=remote_platform) + detail = serving_model_service.get_serving_service_detail(serving_model.id, serving_model.project_id) + expected_detail = serving_pb2.ServingServiceDetail(id=serving_model.id, + project_id=self.project_id, + name=name, + model_id=self.model_1_id, + model_type=ModelType.NN_MODEL.name, + is_local=False, + endpoint='test_deploy_url', + instance_num_status='UNKNOWN', + status=ServingModelStatus.AVAILABLE.name, + support_inference=True) + expected_detail.remote_platform.CopyFrom( + serving_pb2.ServingServiceRemotePlatform( + platform='unittest_mock', + payload='test-payload', + )) + self.assertPartiallyEqual(to_dict(expected_detail), + to_dict(detail), + ignore_fields=['created_at', 'updated_at']) + + @patch('fedlearner_webconsole.serving.services.ServingDeploymentService.create_or_update_deployment') + def test_set_ref(self, mock_create_deployment: MagicMock): + name = 'test-serving-service-1' + resource = serving_pb2.ServingServiceResource( + cpu='1', + memory='2', + replicas=3, + ) + param = { + 'model_id': self.model_1_id, + 'name': name, + 'comment': '', + 'resource': resource, + 'is_local': False, + } + with db.session_scope() as session: + serving_model_service = ServingModelService(session) + serving_model = serving_model_service.create_from_param(project_id=self.project_id, + name=param['name'], + is_local=param['is_local'], + comment=param['comment'], + model_id=param['model_id'], + model_group_id=None, + resource=param['resource']) + serving_service = serving_model.to_serving_service() + self.assertTrue(serving_service.is_local) # default value + serving_model_service = ServingModelService(session) + serving_model_service.set_resource_and_status_on_ref(serving_service, serving_model) + serving_model_service.set_is_local_on_ref(serving_service, serving_model) + self.assertEqual('UNKNOWN', serving_service.instance_num_status) + self.assertEqual('1', serving_service.resource.cpu) + self.assertEqual('2', serving_service.resource.memory) + self.assertEqual(3, serving_service.resource.replicas) + self.assertFalse(serving_service.is_local) + + @patch('fedlearner_webconsole.serving.services.ServingDeploymentService.create_or_update_deployment') + def test_update_model(self, mock_create_deployment: MagicMock): + resource = serving_pb2.ServingServiceResource( + cpu='1', + memory='2', + replicas=3, + ) + with db.session_scope() as session: + serving_model_service = ServingModelService(session) + serving_model = serving_model_service.create_from_param(project_id=self.project_id, + name='test-serving-service-1', + is_local=True, + comment='', + model_id=self.model_1_id, + model_group_id=None, + resource=resource) + + need_update = serving_model_service.update_model(model_id=None, + model_group_id=self.model_group_id, + serving_model=serving_model) + self.assertEqual(True, need_update) + self.assertEqual(self.model_2_id, serving_model.model_id) + + need_update = serving_model_service.update_model(model_id=self.model_2_id, + model_group_id=self.model_group_id, + serving_model=serving_model) + self.assertEqual(False, need_update) + self.assertEqual(self.model_2_id, serving_model.model_id) + self.assertIsNone(serving_model.model_group_id) + + need_update = serving_model_service.update_model(model_id=self.model_1_id, + model_group_id=self.model_group_id, + serving_model=serving_model) + self.assertEqual(True, need_update) + self.assertEqual(self.model_1_id, serving_model.model_id) + self.assertIsNone(serving_model.model_group_id) + + serving_model = serving_model_service.create_from_param(project_id=self.project_id, + name='test-serving-service-2', + is_local=False, + comment='', + model_id=self.model_1_id, + model_group_id=None, + resource=resource) + with self.assertRaises(InvalidArgumentException): + serving_model_service.update_model(model_id=None, + model_group_id=self.model_group_id, + serving_model=serving_model) + + @patch('fedlearner_webconsole.utils.flask_utils.get_current_user', MagicMock(return_value=User(username='test'))) + @patch('fedlearner_webconsole.serving.services.k8s_client') + @patch('fedlearner_webconsole.serving.services.ServingDeploymentService.create_or_update_deployment') + def test_delete_serving(self, mock_create_deployment: MagicMock, mock_k8s_client: MagicMock): + mock_k8s_client.delete_config_map = MagicMock() + mock_k8s_client.delete_app = MagicMock() + mock_k8s_client.delete_service = MagicMock() + # delete serving inside platform + resource = serving_pb2.ServingServiceResource( + cpu='1', + memory='2', + replicas=3, + ) + with db.session_scope() as session: + serving_model_service = ServingModelService(session) + serving_model = serving_model_service.create_from_param(project_id=self.project_id, + name='test-serving-service-1', + is_local=True, + comment='', + model_id=self.model_1_id, + model_group_id=None, + resource=resource) + serving_model_id = serving_model.id + serving_deployment_id = serving_model.serving_deployment_id + serving_model_service.delete_serving_service(serving_model) + mock_k8s_client.delete_config_map.assert_called_once() + mock_k8s_client.delete_app.assert_called_once() + mock_k8s_client.delete_service.assert_called_once() + # check db + with db.session_scope() as session: + serving_model = session.query(ServingModel).get(serving_model_id) + self.assertIsNone(serving_model) + serving_deployment = session.query(ServingDeployment).get(serving_deployment_id) + self.assertIsNone(serving_deployment) + negotiator = session.query(ServingNegotiator).filter_by(serving_model_id=serving_model_id).one_or_none() + self.assertIsNone(negotiator) + + # delete remote serving + reckon_remote_serving = FakeRemoteServing() + register_remote_serving(FakeRemoteServing.SERVING_PLATFORM, reckon_remote_serving) + remote_platform = serving_pb2.ServingServiceRemotePlatform(platform=FakeRemoteServing.SERVING_PLATFORM, + payload='test-payload') + with db.session_scope() as session: + serving_model_service = ServingModelService(session) + serving_model = serving_model_service.create_from_param(project_id=self.project_id, + name='test-remote-serving-1', + is_local=True, + comment=None, + model_id=None, + model_group_id=self.model_group_id, + resource=None, + remote_platform=remote_platform) + serving_model_service.delete_serving_service(serving_model) + # called times not increased + mock_k8s_client.delete_config_map.assert_called_once() + mock_k8s_client.delete_app.assert_called_once() + mock_k8s_client.delete_service.assert_called_once() + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/serving/serving_yaml_template.py b/web_console_v2/api/fedlearner_webconsole/serving/serving_yaml_template.py new file mode 100644 index 000000000..bb7aadbf1 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/serving/serving_yaml_template.py @@ -0,0 +1,154 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Dict, Union +from sqlalchemy.ext.declarative import DeclarativeMeta +from sqlalchemy.orm import Session +from fedlearner_webconsole.utils.flask_utils import get_current_user +from fedlearner_webconsole.utils.pp_yaml import compile_yaml_template,\ + add_username_in_label, GenerateDictService + +CONFIG_MAP_TEMPLATE = """{ + "apiVersion": "v1", + "kind": "ConfigMap", + "metadata": { + "name": self.name + "-config" + }, + "data": { + "config.pb": "model_config_list {\\n config {\\n name: '" + self.name + "'\\n base_path: '" + model.base_path + "'\\n model_platform: 'tensorflow'\\n }\\n}\\n" + } +}""" + +DEPLOYMENT_TEMPLATE = """{ + "apiVersion": "apps/v1", + "kind": "Deployment", + "metadata": { + "name": self.name, + "labels": system.variables.labels, + "annotations": { + "queue": "fedlearner", + "schedulerName": "batch", + "min-member": "1", + "resource-cpu": str(self.resource.resource.cpu), + "resource-mem": str(self.resource.resource.memory), + }, + }, + "spec": { + "selector": { + "matchLabels": { + "app": self.name + } + }, + "replicas": int(self.resource.replicas), + "template": { + "metadata": { + "labels": { + "app": self.name + } + }, + "spec": { + "volumes": [ + { + "name": self.name+ "-config", + "configMap": { + "name": self.name + "-config" + } + } + ] + list(system.variables.volumes_list), + "containers": [ + { + "name": self.name, + "image": system.variables.serving_image, + "resources": { + "limits": dict(self.resource.resource) + }, + "args": [ + "--port=8500", + "--rest_api_port=8501", + "--model_config_file=/app/config/config.pb" + ], + "env": system.basic_envs_list, + "ports": [ + { + "containerPort": 8500, + "name": "grpc", + }, + { + "containerPort": 8501, + "name": "restful", + } + ], + "volumeMounts": [ + { + "name": self.name + "-config", + "mountPath": "/app/config/" + } + ] + list(system.variables.volume_mounts_list) + } + ] + } + } + } +}""" + +SERVICE_TEMPLATE = """{ + "apiVersion": "v1", + "kind": "Service", + "metadata": { + "name": self.name + }, + "spec": { + "selector": { + "app": self.name + }, + "ports": [ + { + "port": 8501, + "targetPort": "restful", + "name": "restful", + }, + { + "port": 8500, + "targetPort": "grpc", + "name": "grpc", + } + ] + } +}""" + + +def generate_self_dict(serving: Union[Dict, DeclarativeMeta]) -> Dict: + if not isinstance(serving, dict): + serving = serving.to_dict() + return serving + + +def generate_model_dict(model: Union[Dict, DeclarativeMeta]) -> Dict: + if not isinstance(model, dict): + model = model.to_dict() + return model + + +def generate_serving_yaml(serving: Dict[str, Union[Dict, DeclarativeMeta]], yaml_template: str, + session: Session) -> Dict: + result_dict = compile_yaml_template( + yaml_template, + post_processors=[ + lambda loaded_json: add_username_in_label(loaded_json, getattr(get_current_user(), 'username', None)) + ], + system=GenerateDictService(session).generate_system_dict(), + model=generate_model_dict(serving['model']), + self=generate_self_dict(serving['serving'])) + return result_dict diff --git a/web_console_v2/api/fedlearner_webconsole/serving/serving_yaml_template_test.py b/web_console_v2/api/fedlearner_webconsole/serving/serving_yaml_template_test.py new file mode 100644 index 000000000..89156d2cb --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/serving/serving_yaml_template_test.py @@ -0,0 +1,91 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch + +from google.protobuf.text_format import Parse +from tensorflow_serving.config.model_server_config_pb2 import ModelServerConfig + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.serving.serving_yaml_template import (CONFIG_MAP_TEMPLATE, DEPLOYMENT_TEMPLATE, + SERVICE_TEMPLATE, generate_serving_yaml) + + +class ServingYamlTemplateTest(unittest.TestCase): + + def setUp(self): + + self.patcher_generate_system_dict = patch( + 'fedlearner_webconsole.serving.serving_yaml_template.GenerateDictService.generate_system_dict') + mock = self.patcher_generate_system_dict.start() + mock.return_value = { + 'basic_envs_list': { + 'name': 'HADOOP_HOME', + 'value': '/hadoop/' + }, + 'variables': { + 'labels': {}, + 'serving_image': 'dockerhub.com/fedlearner/serving:latest', + 'volumes_list': [{}], + 'volume_mounts_list': [{}], + } + } + + self.serving = { + 'project': None, + 'model': { + 'base_path': '/test', + }, + 'serving': { + 'name': 'serving-demo', + 'resource': { + 'resource': { + 'cpu': '4000m', + 'memory': '4Gi', + }, + 'replicas': 2, + }, + }, + } + return super().setUp() + + def tearDown(self): + self.patcher_generate_system_dict.stop() + return super().tearDown() + + def test_config_map(self): + with db.session_scope() as session: + config_map_object = generate_serving_yaml(self.serving, CONFIG_MAP_TEMPLATE, session) + config = Parse(config_map_object['data']['config.pb'], ModelServerConfig()) + self.assertEqual(config.model_config_list.config[0].base_path, '/test') + + def test_deployment(self): + with db.session_scope() as session: + deployment_object = generate_serving_yaml(self.serving, DEPLOYMENT_TEMPLATE, session) + self.assertEqual('4000m', + deployment_object['spec']['template']['spec']['containers'][0]['resources']['limits']['cpu']) + self.assertEqual('serving-demo', deployment_object['metadata']['name']) + self.assertEqual('4000m', deployment_object['metadata']['annotations']['resource-cpu']) + self.assertEqual('4Gi', deployment_object['metadata']['annotations']['resource-mem']) + + def test_service(self): + with db.session_scope() as session: + service_object = generate_serving_yaml(self.serving, SERVICE_TEMPLATE, session) + self.assertEqual('serving-demo', service_object['metadata']['name']) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/serving/utils.py b/web_console_v2/api/fedlearner_webconsole/serving/utils.py new file mode 100644 index 000000000..f055a28f0 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/serving/utils.py @@ -0,0 +1,34 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Optional + +from sqlalchemy.orm import Session + +from fedlearner_webconsole.exceptions import NotFoundException +from fedlearner_webconsole.mmgr.models import Model +from fedlearner_webconsole.serving.models import ServingNegotiator + + +def get_model(model_id: int, session: Session) -> Model: + model = session.query(Model).get(model_id) + if model is None: + raise NotFoundException(f'[Serving] model {model_id} is not found') + return model + + +def get_serving_negotiator_by_serving_model_id(serving_model_id: int, session: Session) -> Optional[ServingNegotiator]: + serving_negotiator = session.query(ServingNegotiator).filter_by(serving_model_id=serving_model_id).one_or_none() + return serving_negotiator diff --git a/web_console_v2/api/fedlearner_webconsole/serving/utils_test.py b/web_console_v2/api/fedlearner_webconsole/serving/utils_test.py new file mode 100644 index 000000000..15caa0607 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/serving/utils_test.py @@ -0,0 +1,59 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.mmgr.models import Model +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.serving.utils import get_model +from testing.no_web_server_test_case import NoWebServerTestCase + + +class ServingServicesUtilsTest(NoWebServerTestCase): + + def setUp(self): + self.maxDiff = None + super().setUp() + # insert project + with db.session_scope() as session: + project = Project() + project.name = 'test_project_name' + session.add(project) + session.flush([project]) + + model = Model() + model.name = 'test_model_name' + model.model_path = '/test_path/' + model.group_id = 1 + model.uuid = 'test_uuid_1' + model.project_id = project.id + + session.add(model) + session.commit() + self.project_id = project.id + self.model_id = model.id + self.model_uuid = model.uuid + + def test_get_model(self): + with db.session_scope() as session: + model = get_model(self.model_id, session) + self.assertEqual(self.project_id, model.project_id) + self.assertEqual(self.model_id, model.id) + self.assertEqual(self.model_uuid, model.uuid) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/setting/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/setting/BUILD.bazel new file mode 100644 index 000000000..56802f042 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/setting/BUILD.bazel @@ -0,0 +1,105 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "models_lib", + srcs = ["models.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "models_test", + srcs = [ + "models_test.py", + ], + imports = ["../.."], + main = "models_test.py", + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "service_lib", + srcs = ["service.py"], + imports = ["../.."], + deps = [ + ":models_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:app_version_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:domain_name_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "service_test", + srcs = [ + "service_test.py", + ], + imports = ["../.."], + main = "service_test.py", + deps = [ + ":models_lib", + ":service_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole:initial_db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:app_version_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "apis_lib", + srcs = ["apis.py"], + imports = ["../.."], + deps = [ + ":service_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:third_party_sso_lib", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/k8s:k8s_client_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_flask_restful//:pkg", + "@common_marshmallow//:pkg", + ], +) + +py_test( + name = "apis_test", + size = "medium", + srcs = [ + "apis_test.py", + ], + imports = ["../.."], + main = "apis_test.py", + deps = [ + ":apis_lib", + ":models_lib", + ":service_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:common_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/setting/apis.py b/web_console_v2/api/fedlearner_webconsole/setting/apis.py index 339406ae4..58aaaaabb 100644 --- a/web_console_v2/api/fedlearner_webconsole/setting/apis.py +++ b/web_console_v2/api/fedlearner_webconsole/setting/apis.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,99 +13,327 @@ # limitations under the License. # coding: utf-8 +import logging +from http import HTTPStatus from pathlib import Path +from flask_restful import Resource +from google.protobuf.json_format import ParseDict, ParseError +from marshmallow import fields -from flask_restful import Resource, reqparse - -from fedlearner_webconsole.utils.k8s_client import k8s_client -from fedlearner_webconsole.utils.decorators import jwt_required -from fedlearner_webconsole.utils.decorators import admin_required +from fedlearner_webconsole.k8s.k8s_client import k8s_client +from fedlearner_webconsole.auth.third_party_sso import credentials_required +from fedlearner_webconsole.proto.setting_pb2 import SystemVariables, SettingPb +from fedlearner_webconsole.utils.decorators.pp_flask import admin_required, use_kwargs, use_args +from fedlearner_webconsole.setting.service import DashboardService, SettingService +from fedlearner_webconsole.db import db +from fedlearner_webconsole.exceptions import (NotFoundException, NoAccessException, InvalidArgumentException) +from fedlearner_webconsole.utils.flask_utils import make_flask_response +from fedlearner_webconsole.flag.models import Flag _POD_NAMESPACE = 'default' # Ref: https://stackoverflow.com/questions/46046110/ # how-to-get-the-current-namespace-in-a-pod -_k8s_namespace_file = Path( - '/var/run/secrets/kubernetes.io/serviceaccount/namespace') +_k8s_namespace_file = Path('/var/run/secrets/kubernetes.io/serviceaccount/namespace') if _k8s_namespace_file.is_file(): - _POD_NAMESPACE = _k8s_namespace_file.read_text() + _POD_NAMESPACE = _k8s_namespace_file.read_text(encoding='utf-8') + +_SPECIAL_KEYS = ['webconsole_image', 'system_info', 'system_variables'] -class SettingsApi(Resource): - @jwt_required() +class SettingApi(Resource): + + @credentials_required @admin_required - def get(self): - deployment = k8s_client.get_deployment( - name='fedlearner-web-console-v2', namespace=_POD_NAMESPACE) + def _get_webconsole_image(self) -> SettingPb: + try: + deployment = k8s_client.get_deployment(name='fedlearner-web-console-v2') + image = deployment.spec.template.spec.containers[0].image + except Exception as e: # pylint: disable=broad-except + logging.error(f'settings: get deployment: {str(e)}') + image = None + return SettingPb( + uniq_key='webconsole_image', + value=image, + ) + + @credentials_required + @admin_required + def _get_system_variables(self) -> SystemVariables: + with db.session_scope() as session: + return SettingService(session).get_system_variables() + + def get(self, key: str): + """Gets a specific setting. + --- + tags: + - system + description: gets a specific setting. + parameters: + - in: path + name: key + schema: + type: string + required: true + responses: + 200: + description: the setting + content: + application/json: + schema: + oneOf: + - $ref: '#/definitions/fedlearner_webconsole.proto.SettingPb' + - $ref: '#/definitions/fedlearner_webconsole.proto.SystemVariables' + - $ref: '#/definitions/fedlearner_webconsole.proto.SystemInfo' + """ + if key == 'webconsole_image': + return make_flask_response(self._get_webconsole_image()) + + if key == 'system_variables': + return make_flask_response(self._get_system_variables()) + + if key == 'system_info': + return make_flask_response(SettingService.get_system_info()) + + setting = None + if key not in _SPECIAL_KEYS: + with db.session_scope() as session: + setting = SettingService(session).get_setting(key) + if setting is None: + raise NotFoundException(message=f'Failed to find setting {key}') + return make_flask_response(setting.to_proto()) + + @credentials_required + @admin_required + @use_kwargs({'value': fields.String(required=True)}) + def put(self, key: str, value: str): + """Updates a specific setting. + --- + tags: + - system + description: updates a specific setting. + parameters: + - in: path + name: key + schema: + type: string + required: true + - in: body + name: body + schema: + type: object + properties: + value: + type: str + required: true + responses: + 200: + description: logs + content: + application/json: + schema: + type: array + items: + type: string + """ + if key in _SPECIAL_KEYS: + raise NoAccessException(message=f'Not able to update {key}') + + with db.session_scope() as session: + setting = SettingService(session).create_or_update_setting(key, value) + return make_flask_response(setting.to_proto()) + - return { - 'data': { - 'webconsole_image': - deployment.spec.template.spec.containers[0].image - } - } +class UpdateSystemVariablesApi(Resource): - @jwt_required() + @credentials_required @admin_required - def patch(self): - parser = reqparse.RequestParser() - parser.add_argument('webconsole_image', - type=str, - required=False, - default=None, - help='image for webconsole') - data = parser.parse_args() - - if data['webconsole_image']: - new_image = data['webconsole_image'] - deployment = k8s_client.get_deployment('fedlearner-web-console-v2', - _POD_NAMESPACE) - spec = deployment.spec - spec.template.spec.containers[0].image = new_image - metadata = deployment.metadata - k8s_client.create_or_update_deployment( - metadata=metadata, - spec=spec, - name=metadata.name, - namespace=metadata.namespace) - - return {'data': {}} + @use_args({'variables': fields.List(fields.Dict())}) + def post(self, params: dict): + """Updates system variables. + --- + tags: + - system + description: updates all system variables. + parameters: + - in: body + name: body + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.SystemVariables' + responses: + 200: + description: updated system variables + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.SystemVariables' + """ + try: + system_variables = ParseDict(params, SystemVariables()) + except ParseError as e: + raise InvalidArgumentException(details=str(e)) from e + with db.session_scope() as session: + # TODO(xiangyuxuan.prs): check fixed flag + SettingService(session).set_system_variables(system_variables) + session.commit() + return make_flask_response(system_variables) + + +class UpdateImageApi(Resource): + + @credentials_required + @admin_required + @use_kwargs({'webconsole_image': fields.String(required=True)}) + def post(self, webconsole_image: str): + """Updates webconsole image. + --- + tags: + - system + description: updates webconsole image. + parameters: + - in: body + name: body + schema: + type: object + properties: + image_uri: + type: string + required: true + responses: + 204: + description: updated successfully + """ + deployment = k8s_client.get_deployment('fedlearner-web-console-v2') + spec = deployment.spec + spec.template.spec.containers[0].image = webconsole_image + metadata = deployment.metadata + k8s_client.create_or_update_deployment(metadata=metadata, + spec=spec, + name=metadata.name, + namespace=metadata.namespace) + return make_flask_response(status=HTTPStatus.NO_CONTENT) class SystemPodLogsApi(Resource): - @jwt_required() + + @credentials_required @admin_required - def get(self, pod_name): - parser = reqparse.RequestParser() - parser.add_argument('tail_lines', - type=int, - location='args', - required=True, - help='tail lines is required') - data = parser.parse_args() - tail_lines = data['tail_lines'] - return { - 'data': - k8s_client.get_pod_log(name=pod_name, - namespace=_POD_NAMESPACE, - tail_lines=tail_lines).split('\n') - } + @use_kwargs({'tail_lines': fields.Integer(required=True)}, location='query') + def get(self, pod_name: str, tail_lines: int): + """Gets webconsole pod logs. + --- + tags: + - system + description: gets webconsole pod logs. + parameters: + - in: path + name: pod_name + schema: + type: string + required: true + - in: query + name: tail_lines + schema: + type: integer + required: true + responses: + 200: + description: logs + content: + application/json: + schema: + type: array + items: + type: string + """ + return make_flask_response( + k8s_client.get_pod_log(name=pod_name, namespace=_POD_NAMESPACE, tail_lines=tail_lines).split('\n')) class SystemPodsApi(Resource): - @jwt_required() + + @credentials_required @admin_required def get(self): + """Gets webconsole pods. + --- + tags: + - system + description: gets webconsole pods. + responses: + 200: + description: name list of pods + content: + application/json: + schema: + type: array + items: + type: string + """ webconsole_v2_pod_list = list( - map( - lambda pod: pod.metadata.name, - k8s_client.get_pods( - _POD_NAMESPACE, - 'app.kubernetes.io/instance=fedlearner-web-console-v2'). - items)) - return {'data': webconsole_v2_pod_list} + map(lambda pod: pod.metadata.name, + k8s_client.get_pods(_POD_NAMESPACE, 'app.kubernetes.io/instance=fedlearner-web-console-v2').items)) + return make_flask_response(webconsole_v2_pod_list) + + +class VersionsApi(Resource): + # This is a system-based api, no JWT-Token for now. + def get(self): + """Gets the version info. + --- + tags: + - system + description: gets the version info. + responses: + 200: + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.ApplicationVersion' + """ + return make_flask_response(SettingService.get_application_version().to_proto()) + + +class DashboardsApi(Resource): + + @credentials_required + @admin_required + def get(self): + """Get dashboard information API + --- + tags: + - system + description: Get dashboard information API + responses: + 200: + description: a list of dashboard information. Note that the following dashboard ['overview'] is available. + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.DashboardInformation' + 500: + description: dashboard setup is wrong, please check. + content: + appliction/json: + schema: + type: object + properties: + code: + type: integer + message: + type: string + """ + if not Flag.DASHBOARD_ENABLED.value: + raise NoAccessException('if you want to view dashboard, please enable flag `DASHBOARD_ENABLED`') + return make_flask_response(DashboardService().get_dashboards()) def initialize_setting_apis(api): - api.add_resource(SettingsApi, '/settings') + api.add_resource(UpdateSystemVariablesApi, '/settings:update_system_variables') + api.add_resource(UpdateImageApi, '/settings:update_image') + api.add_resource(SettingApi, '/settings/') + api.add_resource(VersionsApi, '/versions') api.add_resource(SystemPodLogsApi, '/system_pods//logs') api.add_resource(SystemPodsApi, '/system_pods/name') + api.add_resource(DashboardsApi, '/dashboards') diff --git a/web_console_v2/api/fedlearner_webconsole/setting/apis_test.py b/web_console_v2/api/fedlearner_webconsole/setting/apis_test.py new file mode 100644 index 000000000..0e6ef20b2 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/setting/apis_test.py @@ -0,0 +1,322 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import json +import logging +import os +import unittest +from http import HTTPStatus +from types import SimpleNamespace +from unittest.mock import patch, MagicMock + +from google.protobuf.struct_pb2 import Value + +from envs import Envs +from fedlearner_webconsole.exceptions import InternalException +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto.setting_pb2 import DashboardInformation, SystemVariables, SystemVariable, SystemInfo +from fedlearner_webconsole.setting.apis import _POD_NAMESPACE +from fedlearner_webconsole.setting.models import Setting +from fedlearner_webconsole.setting.service import SettingService + +from testing.common import BaseTestCase + + +class SettingApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def setUp(self): + super().setUp() + with db.session_scope() as session: + setting = Setting(uniq_key='key1', value='value 1') + session.add(setting) + session.commit() + + @patch('fedlearner_webconsole.setting.apis.k8s_client') + def test_get_webconsole_image(self, mock_k8s_client: MagicMock): + deployment = SimpleNamespace( + **{ + 'metadata': + SimpleNamespace(**{ + 'name': 'fedlearner-web-console-v2', + 'namespace': 'testns' + }), + 'spec': + SimpleNamespace( + **{ + 'template': + SimpleNamespace( + **{ + 'spec': + SimpleNamespace( + **{'containers': [SimpleNamespace(**{'image': 'fedlearner:test'})]}) + }) + }) + }) + mock_k8s_client.get_deployment = MagicMock(return_value=deployment) + resp = self.get_helper('/api/v2/settings/webconsole_image') + self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) + self.signin_as_admin() + resp = self.get_helper('/api/v2/settings/webconsole_image') + self.assertResponseDataEqual(resp, { + 'uniq_key': 'webconsole_image', + 'value': 'fedlearner:test', + }) + + def test_get_system_variables(self): + system_variables = SystemVariables(variables=[ + SystemVariable(name='test1', value_type=SystemVariable.ValueType.INT, value=Value(number_value=1)) + ]) + with db.session_scope() as session: + SettingService(session).set_system_variables(system_variables) + session.commit() + resp = self.get_helper('/api/v2/settings/system_variables') + self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) + self.signin_as_admin() + resp = self.get_helper('/api/v2/settings/system_variables') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertResponseDataEqual( + resp, {'variables': [{ + 'name': 'test1', + 'value': 1.0, + 'value_type': 'INT', + 'fixed': False + }]}) + + def test_get(self): + resp = self.get_helper('/api/v2/settings/key1') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertEqual(self.get_response_data(resp)['value'], 'value 1') + # Black list one + resp = self.get_helper('/api/v2/settings/variables') + self.assertEqual(resp.status_code, HTTPStatus.NOT_FOUND) + resp = self.get_helper('/api/v2/settings/key2') + self.assertEqual(resp.status_code, HTTPStatus.NOT_FOUND) + + def test_put(self): + resp = self.put_helper('/api/v2/settings/key1', data={'value': 'new value'}) + self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) + self.signin_as_admin() + resp = self.put_helper('/api/v2/settings/key1', data={'value': 'new value'}) + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertEqual(self.get_response_data(resp)['value'], 'new value') + with db.session_scope() as session: + setting = session.query(Setting).filter_by(uniq_key='key1').first() + self.assertEqual(setting.value, 'new value') + # Black list one + resp = self.put_helper('/api/v2/settings/system_variables', data={'value': 'new value'}) + self.assertEqual(resp.status_code, HTTPStatus.FORBIDDEN) + + +class SettingsApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def setUp(self): + super().setUp() + self._system_pods = SimpleNamespace( + **{ + 'items': [ + SimpleNamespace(**{'metadata': SimpleNamespace(**{'name': 'fake-fedlearner-web-console-v2-1'})}), + SimpleNamespace(**{'metadata': SimpleNamespace(**{'name': 'fake-fedlearner-web-console-v2-2'})}), + ] + }) + self._system_pod_log = 'log1\nlog2' + self._mock_k8s_client = MagicMock() + self._mock_k8s_client.get_pods = MagicMock(return_value=self._system_pods) + self._mock_k8s_client.get_pod_log = MagicMock(return_value=self._system_pod_log) + self.signin_as_admin() + + def test_get_system_pods(self): + with patch('fedlearner_webconsole.setting.apis.k8s_client', self._mock_k8s_client): + resp = self.get_helper('/api/v2/system_pods/name') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertEqual(self.get_response_data(resp), + ['fake-fedlearner-web-console-v2-1', 'fake-fedlearner-web-console-v2-2']) + + def test_get_system_pods_log(self): + fake_pod_name = 'fake-fedlearner-web-console-v2-1' + with patch('fedlearner_webconsole.setting.apis.k8s_client', self._mock_k8s_client): + resp = self.get_helper(f'/api/v2/system_pods/{fake_pod_name}/logs?tail_lines=100') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertEqual(self.get_response_data(resp), ['log1', 'log2']) + self._mock_k8s_client.get_pod_log.assert_called_with(name=fake_pod_name, + namespace=_POD_NAMESPACE, + tail_lines=100) + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info', + lambda: SystemInfo(name='hahaha', domain_name='fl-test.com', pure_domain_name='test')) + def test_get_own_info_api(self): + resp = self.get_helper('/api/v2/settings/system_info') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertResponseDataEqual(resp, {'name': 'hahaha', 'domain_name': 'fl-test.com', 'pure_domain_name': 'test'}) + + +class UpdateSystemVariablesApi(BaseTestCase): + + def test_post_no_permission(self): + resp = self.post_helper('/api/v2/settings:update_system_variables', data={'variables': []}) + self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) + + def test_post_invalid_argument(self): + system_variables = SystemVariables(variables=[ + SystemVariable(name='test1', value_type=SystemVariable.ValueType.INT, value=Value(number_value=1)) + ]) + with db.session_scope() as session: + SettingService(session).set_system_variables(system_variables) + session.commit() + + self.signin_as_admin() + resp = self.post_helper('/api/v2/settings:update_system_variables', data={'variables': [{'h': 'ff'}]}) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + self.assertIn('Failed to parse variables field', json.loads(resp.data).get('details')) + + with db.session_scope() as session: + self.assertEqual(system_variables, SettingService(session).get_system_variables()) + + def test_post_200(self): + self.signin_as_admin() + resp = self.post_helper('/api/v2/settings:update_system_variables', + data={'variables': [{ + 'name': 'new_var', + 'value': 2, + 'value_type': 'INT' + }]}) + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertResponseDataEqual( + resp, {'variables': [{ + 'name': 'new_var', + 'value': 2.0, + 'value_type': 'INT', + 'fixed': False + }]}) + + expected_system_variables = SystemVariables(variables=[ + SystemVariable(name='new_var', value_type=SystemVariable.ValueType.INT, value=Value(number_value=2)) + ]) + with db.session_scope() as session: + self.assertEqual(expected_system_variables, SettingService(session).get_system_variables()) + + +class UpdateImageApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + @patch('fedlearner_webconsole.setting.apis.k8s_client') + def test_post(self, mock_k8s_client: MagicMock): + deployment = SimpleNamespace( + **{ + 'metadata': + SimpleNamespace(**{ + 'name': 'fedlearner-web-console-v2', + 'namespace': 'testns' + }), + 'spec': + SimpleNamespace( + **{ + 'template': + SimpleNamespace( + **{ + 'spec': + SimpleNamespace( + **{'containers': [SimpleNamespace(**{'image': 'fedlearner:test'})]}) + }) + }) + }) + mock_k8s_client.get_deployment = MagicMock(return_value=deployment) + mock_k8s_client.create_or_update_deployment = MagicMock() + + resp = self.post_helper('/api/v2/settings:update_image', data={'webconsole_image': 'test-new-image'}) + self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) + + self.signin_as_admin() + resp = self.post_helper('/api/v2/settings:update_image', data={'webconsole_image': 'test-new-image'}) + self.assertEqual(resp.status_code, HTTPStatus.NO_CONTENT) + _, kwargs = mock_k8s_client.create_or_update_deployment.call_args + self.assertEqual(kwargs['spec'].template.spec.containers[0].image, 'test-new-image') + self.assertEqual(kwargs['name'], deployment.metadata.name) + self.assertEqual(kwargs['namespace'], deployment.metadata.namespace) + + +class VersionsApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def test_get_version_api(self): + resp = self.get_helper('/api/v2/versions') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertEqual(self.get_response_data(resp)['branch_name'], '') + + content = """ + revision:f09d681b4eda01f053cc1a645fa6fc0775852a48 + branch name:release-2.0.1 + version:2.0.1.5 + pub date:Fri Jul 16 12:23:19 CST 2021 + """ + application_version_path = os.path.join(Envs.BASE_DIR, '../current_revision') + with open(application_version_path, 'wt', encoding='utf-8') as f: + f.write(content) + + resp = self.get_helper('/api/v2/versions') + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertResponseDataEqual( + resp, { + 'pub_date': 'Fri Jul 16 12:23:19 CST 2021', + 'revision': 'f09d681b4eda01f053cc1a645fa6fc0775852a48', + 'branch_name': 'release-2.0.1', + 'version': '2.0.1.5', + }) + + os.remove(application_version_path) + + +class DashboardsApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def test_get_flag_disable(self): + self.signin_as_admin() + get_dashboard_response = self.get_helper('/api/v2/dashboards') + self.assertEqual(get_dashboard_response.status_code, HTTPStatus.FORBIDDEN) + + @patch('fedlearner_webconsole.flag.models.Flag.DASHBOARD_ENABLED.value', True) + @patch('fedlearner_webconsole.setting.apis.DashboardService.get_dashboards') + def test_get(self, mock_get_dashboards: MagicMock): + mock_get_dashboards.return_value = [DashboardInformation()] + get_dashboard_response = self.get_helper('/api/v2/dashboards') + self.assertEqual(get_dashboard_response.status_code, HTTPStatus.UNAUTHORIZED) + + mock_get_dashboards.reset_mock() + self.signin_as_admin() + mock_get_dashboards.return_value = [DashboardInformation()] + get_dashboard_response = self.get_helper('/api/v2/dashboards') + self.assertEqual(get_dashboard_response.status_code, HTTPStatus.OK) + self.assertResponseDataEqual(get_dashboard_response, [{'name': '', 'uuid': '', 'url': ''}]) + + mock_get_dashboards.reset_mock() + mock_get_dashboards.side_effect = InternalException('') + get_dashboard_response = self.get_helper('/api/v2/dashboards') + self.assertEqual(get_dashboard_response.status_code, HTTPStatus.INTERNAL_SERVER_ERROR) + + +if __name__ == '__main__': + logging.basicConfig(level=logging.DEBUG) + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/setting/models.py b/web_console_v2/api/fedlearner_webconsole/setting/models.py index 7d46db01f..f1b5bae1b 100644 --- a/web_console_v2/api/fedlearner_webconsole/setting/models.py +++ b/web_console_v2/api/fedlearner_webconsole/setting/models.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,16 +13,26 @@ # limitations under the License. # coding: utf-8 -# pylint: disable=raise-missing-from - -from sqlalchemy import UniqueConstraint +from sqlalchemy import UniqueConstraint, func from fedlearner_webconsole.db import db, default_table_args +from fedlearner_webconsole.proto.setting_pb2 import SettingPb class Setting(db.Model): __tablename__ = 'settings_v2' - __table_args__ = (UniqueConstraint('key', name='uniq_key'), - default_table_args('this is webconsole settings table')) - id = db.Column(db.Integer, primary_key=True, comment='id') - key = db.Column(db.String(255), nullable=False, comment='key') + __table_args__ = (UniqueConstraint('uniq_key', + name='uniq_key'), default_table_args('this is webconsole settings table')) + id = db.Column(db.Integer, primary_key=True, comment='id', autoincrement=True) + uniq_key = db.Column(db.String(255), nullable=False, comment='uniq_key') value = db.Column(db.Text, comment='value') + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), comment='created at') + updated_at = db.Column(db.DateTime(timezone=True), + onupdate=func.now(), + server_default=func.now(), + comment='updated at') + + def to_proto(self): + return SettingPb( + uniq_key=self.uniq_key, + value=self.value, + ) diff --git a/web_console_v2/api/fedlearner_webconsole/setting/models_test.py b/web_console_v2/api/fedlearner_webconsole/setting/models_test.py new file mode 100644 index 000000000..1fc037f0c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/setting/models_test.py @@ -0,0 +1,43 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto.setting_pb2 import SettingPb +from fedlearner_webconsole.setting.models import Setting +from testing.no_web_server_test_case import NoWebServerTestCase + + +class SettingTest(NoWebServerTestCase): + + def test_to_proto(self): + with db.session_scope() as session: + setting = Setting( + uniq_key='test', + value='test value', + ) + session.add(setting) + session.commit() + with db.session_scope() as session: + setting = session.query(Setting).filter_by(uniq_key='test').first() + self.assertEqual(setting.to_proto(), SettingPb( + uniq_key='test', + value='test value', + )) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/setting/service.py b/web_console_v2/api/fedlearner_webconsole/setting/service.py new file mode 100644 index 000000000..b34213a1b --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/setting/service.py @@ -0,0 +1,141 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import logging +import os +import json +from typing import List, Optional +from google.protobuf import text_format +from google.protobuf.json_format import MessageToDict, Parse, ParseDict, ParseError +from sqlalchemy.orm.session import Session +from envs import Envs +from fedlearner_webconsole.proto import setting_pb2 +from fedlearner_webconsole.setting.models import Setting +from fedlearner_webconsole.proto.setting_pb2 import SystemVariables +from fedlearner_webconsole.utils.app_version import ApplicationVersion +from fedlearner_webconsole.utils.domain_name import get_pure_domain_name +from fedlearner_webconsole.exceptions import InternalException + + +def parse_application_version(content: str) -> ApplicationVersion: + revision, branch_name, version, pub_date = None, None, None, None + for line in content.split('\n'): + if line.find(':') == -1: + continue + key, value = line.split(':', 1) + key, value = key.strip(), value.strip() + if value == '': + continue + if key == 'revision': + revision = value + elif key == 'branch name': + branch_name = value + elif key == 'version': + version = value + elif key == 'pub date': + pub_date = value + + return ApplicationVersion(revision=revision, branch_name=branch_name, version=version, pub_date=pub_date) + + +class SettingService: + + def __init__(self, session: Session): + self._session = session + + def get_setting(self, key: str) -> Optional[Setting]: + return self._session.query(Setting).filter_by(uniq_key=key).first() + + def create_or_update_setting(self, key: str, value: str) -> Setting: + setting = self._session.query(Setting).filter_by(uniq_key=key).first() + if setting is None: + setting = Setting(uniq_key=key, value=value) + self._session.add(setting) + self._session.commit() + else: + setting.value = value + self._session.commit() + return setting + + def get_system_variables(self) -> SystemVariables: + result = SystemVariables() + setting = self.get_setting('system_variables') + if setting is None: + return result + text_format.Parse(setting.value, result) + return result + + def set_system_variables(self, system_variables: SystemVariables): + self.create_or_update_setting('system_variables', text_format.MessageToString(system_variables)) + + @staticmethod + def get_application_version() -> ApplicationVersion: + application_version_path = os.path.join(Envs.BASE_DIR, '../current_revision') + if not os.path.exists(application_version_path): + content = '' + else: + with open(application_version_path, 'r', encoding='utf-8') as f: + content = f.read() + return parse_application_version(content) + + def get_namespace(self) -> str: + return self.get_system_variables_dict().get('namespace', 'default') + + def get_system_variables_dict(self) -> dict: + variables = self.get_system_variables().variables + return { + var.name: MessageToDict(var.value, preserving_proto_field_name=True, including_default_value_fields=True) + for var in variables + } + + @staticmethod + def get_system_info() -> setting_pb2.SystemInfo: + system_info: setting_pb2.SystemInfo = Parse(Envs.SYSTEM_INFO, setting_pb2.SystemInfo()) + system_info.pure_domain_name = get_pure_domain_name(system_info.domain_name) or '' + return system_info + + +class DashboardService(object): + # Reference: https://discuss.elastic.co/t/kibana-g-and-a-parameters-in-the-dashboards-url-string/264642 + DASHBOARD_FMT_STR = '{kibana_address}/app/kibana#/dashboard/{object_uuid}?_a=(filters:!((query:(match_phrase:(service.environment:{cluster})))))' # pylint:disable=line-too-long + + REQUIRED_DASHBOARD = frozenset(['overview']) + + @staticmethod + def _validate_saved_object_uuid(saved_object_uuid: str) -> bool: + if not isinstance(saved_object_uuid, str) or not saved_object_uuid: + return False + return True + + def get_dashboards(self) -> List[setting_pb2.DashboardInformation]: + dashboard_list = json.loads(Envs.KIBANA_DASHBOARD_LIST) + if not DashboardService.REQUIRED_DASHBOARD.issubset({d['name'] for d in dashboard_list}): + raise InternalException( + f'failed to find required dashboard {list(DashboardService.REQUIRED_DASHBOARD)} uuid') + try: + dashboard_information_list = [] + for item in dashboard_list: + dashboard_information = ParseDict(item, setting_pb2.DashboardInformation(), ignore_unknown_fields=False) + if not self._validate_saved_object_uuid(dashboard_information.uuid): + raise InternalException(f'invalid uuid for dashboard {dashboard_information.name}') + + dashboard_information.url = DashboardService.DASHBOARD_FMT_STR.format( + kibana_address=Envs.KIBANA_ADDRESS, object_uuid=dashboard_information.uuid, cluster=Envs.CLUSTER) + dashboard_information_list.append(dashboard_information) + return dashboard_information_list + except ParseError as err: + msg = f'invalid `KIBANA_DASHBOARD_LIST`, details: {err}' + logging.warning(msg) + raise InternalException(msg) from err diff --git a/web_console_v2/api/fedlearner_webconsole/setting/service_test.py b/web_console_v2/api/fedlearner_webconsole/setting/service_test.py new file mode 100644 index 000000000..b774daeac --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/setting/service_test.py @@ -0,0 +1,156 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +# pylint: disable=protected-access +import json +import unittest +from unittest.mock import patch + +from google.protobuf.json_format import ParseDict + +from fedlearner_webconsole.initial_db import initial_db +from fedlearner_webconsole.proto.setting_pb2 import DashboardInformation, SystemInfo, SystemVariables +from fedlearner_webconsole.setting.models import Setting +from fedlearner_webconsole.setting.service import DashboardService, parse_application_version, SettingService +from fedlearner_webconsole.utils.app_version import Version +from fedlearner_webconsole.db import db +from fedlearner_webconsole.exceptions import InternalException + +from testing.no_web_server_test_case import NoWebServerTestCase + + +class ServiceTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + initial_db() + + def test_get_setting(self): + with db.session_scope() as session: + setting = Setting(uniq_key='test_key1', value='test value 1') + session.add(setting) + session.commit() + # A new session + with db.session_scope() as session: + setting = SettingService(session).get_setting('test_key1') + self.assertEqual(setting.value, 'test value 1') + setting = SettingService(session).get_setting('100') + self.assertIsNone(setting) + + def test_set_setting(self): + # A new setting + with db.session_scope() as session: + setting = SettingService(session).create_or_update_setting(key='k1', value='v1') + self.assertEqual(setting.uniq_key, 'k1') + self.assertEqual(setting.value, 'v1') + setting_in_db = \ + session.query(Setting).filter_by(uniq_key='k1').first() + self.assertEqual(setting_in_db.value, 'v1') + # Existing setting + with db.session_scope() as session: + SettingService(session).create_or_update_setting(key='k1', value='v2') + setting_in_db = \ + session.query(Setting).filter_by(uniq_key='k1').first() + self.assertEqual(setting_in_db.value, 'v2') + + def test_parse_application_version(self): + content = """ + revision:f09d681b4eda01f053cc1a645fa6fc0775852a48 + branch name:release-2.0.1 + version:2.0.1.5 + pub date:Fri Jul 16 12:23:19 CST 2021 + """ + application_version = parse_application_version(content) + self.assertEqual(application_version.revision, 'f09d681b4eda01f053cc1a645fa6fc0775852a48') + self.assertEqual(application_version.branch_name, 'release-2.0.1') + self.assertEqual(application_version.version, Version('2.0.1.5')) + self.assertEqual(application_version.pub_date, 'Fri Jul 16 12:23:19 CST 2021') + + content = """ + revision:f09d681b4eda01f053cc1a645fa6fc0775852a48 + branch name:master + version: + pub date:Fri Jul 16 12:23:19 CST 2021 + """ + application_version = parse_application_version(content) + self.assertEqual(application_version.revision, 'f09d681b4eda01f053cc1a645fa6fc0775852a48') + self.assertEqual(application_version.branch_name, 'master') + self.assertIsNone(application_version.version.version) + self.assertEqual(application_version.pub_date, 'Fri Jul 16 12:23:19 CST 2021') + + def test_get_variable_by_key(self): + with db.session_scope() as session: + self.assertEqual(SettingService(session).get_system_variables_dict()['namespace'], 'default') + self.assertIsNone(SettingService(session).get_system_variables_dict().get('not-existed')) + + def test_get_system_variables_dict(self): + test_data = {'variables': [{'name': 'a', 'value': 2}, {'name': 'b', 'value': []}]} + with db.session_scope() as session: + SettingService(session).set_system_variables(ParseDict(test_data, SystemVariables())) + self.assertEqual(SettingService(session).get_system_variables_dict(), {'a': 2, 'b': []}) + + @patch('envs.Envs.SYSTEM_INFO', + json.dumps({ + 'name': 'hahaha', + 'domain_name': 'fl-test.com', + 'pure_domain_name': 'test' + })) + def test_get_system_info(self): + with db.session_scope() as session: + system_info = SettingService(session).get_system_info() + self.assertEqual(system_info, SystemInfo(name='hahaha', domain_name='fl-test.com', pure_domain_name='test')) + + +class DashboardServiceTest(unittest.TestCase): + + def test_validate_saved_object_uuid(self): + self.assertFalse(DashboardService._validate_saved_object_uuid('')) + self.assertFalse(DashboardService._validate_saved_object_uuid(None)) + self.assertFalse(DashboardService._validate_saved_object_uuid(1)) + self.assertTrue(DashboardService._validate_saved_object_uuid('c4c0af20-d03c-11ec-9be6-d5c22c92cd59')) + + def test_get_dashboards(self): + with patch('envs.Envs.KIBANA_DASHBOARD_LIST', '[]'): + with self.assertRaises(InternalException) as cm: + DashboardService().get_dashboards() + self.assertEqual(cm.exception.details, 'failed to find required dashboard [\'overview\'] uuid') + with patch('envs.Envs.KIBANA_DASHBOARD_LIST', json.dumps([{'name': 'overview', 'uuid': 1}])): + with self.assertRaises(InternalException) as cm: + DashboardService().get_dashboards() + self.assertEqual( + cm.exception.details, 'invalid `KIBANA_DASHBOARD_LIST`, ' + 'details: Failed to parse uuid field: expected string or bytes-like object.') + with patch('envs.Envs.KIBANA_DASHBOARD_LIST', json.dumps([{'name': 'overview', 'test': 1}])): + with self.assertRaises(InternalException) as cm: + DashboardService().get_dashboards() + self.assertIn( + 'invalid `KIBANA_DASHBOARD_LIST`, details: Message type "fedlearner_webconsole.proto.DashboardInformation" has no field named "test".', # pylint: disable=line-too-long + cm.exception.details) + with patch('envs.Envs.KIBANA_DASHBOARD_LIST', json.dumps([{'name': 'overview', 'uuid': '1'}])): + self.assertEqual( + DashboardService().get_dashboards(), + [ + DashboardInformation( + name='overview', + uuid='1', + # pylint: disable=line-too-long + url= + 'localhost:1993/app/kibana#/dashboard/1?_a=(filters:!((query:(match_phrase:(service.environment:default)))))', + ) + ]) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/sparkapp/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/sparkapp/BUILD.bazel new file mode 100644 index 000000000..c796eb015 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sparkapp/BUILD.bazel @@ -0,0 +1,89 @@ +load("@rules_python//python:defs.bzl", "py_library", "py_test") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "schema_lib", + srcs = ["schema.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:images_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_test( + name = "schema_lib_test", + srcs = ["schema_test.py"], + imports = ["../.."], + main = "schema_test.py", + deps = [ + ":schema_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_library( + name = "service_lib", + srcs = ["service.py"], + imports = ["../.."], + deps = [ + ":schema_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole/k8s:k8s_client_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:file_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "service_lib_test", + srcs = ["service_test.py"], + data = [ + "//web_console_v2/api/testing/test_data", + ], + imports = ["../.."], + main = "service_test.py", + deps = [ + ":schema_lib", + ":service_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_library( + name = "apis_lib", + srcs = ["apis.py"], + imports = ["../.."], + deps = [ + ":schema_lib", + ":service_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:third_party_sso_lib", + "//web_console_v2/api/fedlearner_webconsole/swagger:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "@common_flask_restful//:pkg", + "@common_marshmallow//:pkg", + "@common_webargs//:pkg", + ], +) + +py_test( + name = "apis_lib_test", + srcs = ["apis_test.py"], + data = [ + "//web_console_v2/api/testing/test_data", + ], + imports = ["../.."], + main = "apis_test.py", + deps = [ + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:common_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/sparkapp/apis.py b/web_console_v2/api/fedlearner_webconsole/sparkapp/apis.py index 70dfc6339..feca6f55a 100644 --- a/web_console_v2/api/fedlearner_webconsole/sparkapp/apis.py +++ b/web_console_v2/api/fedlearner_webconsole/sparkapp/apis.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,54 +13,165 @@ # limitations under the License. # coding: utf-8 -import base64 from http import HTTPStatus +import logging -from flask import request from flask_restful import Api, Resource +from marshmallow import Schema, fields, post_load +from webargs.flaskparser import use_args, use_kwargs from fedlearner_webconsole.sparkapp.schema import SparkAppConfig -from fedlearner_webconsole.utils.decorators import jwt_required +from fedlearner_webconsole.auth.third_party_sso import credentials_required from fedlearner_webconsole.sparkapp.service import SparkAppService -from fedlearner_webconsole.exceptions import (InvalidArgumentException, - NotFoundException) +from fedlearner_webconsole.exceptions import (InternalException, NotFoundException) +from fedlearner_webconsole.utils.flask_utils import make_flask_response +from fedlearner_webconsole.swagger.models import schema_manager -class SparkAppsApi(Resource): - @jwt_required() - def post(self): - service = SparkAppService() - data = request.json +class SparkAppPodParameter(Schema): + cores = fields.Integer(required=True) + memory = fields.String(required=True) + instances = fields.Integer(required=False, load_default=1) + core_limit = fields.String(required=False) + volume_mounts = fields.List(fields.Dict(fields.String, fields.String), required=False) + envs = fields.Dict(fields.String, fields.String) - try: - config = SparkAppConfig.from_dict(data) - if config.files: - config.files = base64.b64decode(config.files) - except ValueError as err: - raise InvalidArgumentException(details=err) - res = service.submit_sparkapp(config=config) - return {'data': res.to_dict()}, HTTPStatus.CREATED +class SparkAppCreateParameter(Schema): + name = fields.String(required=True) + files = fields.String(required=False, load_default=None) + files_path = fields.String(required=False, load_default=None) + image_url = fields.String(required=False, load_default=None) + volumes = fields.List(fields.Dict(fields.String, fields.String), required=False, load_default=[]) + driver_config = fields.Nested(SparkAppPodParameter) + executor_config = fields.Nested(SparkAppPodParameter) + py_files = fields.List(fields.String, required=False, load_default=[]) + command = fields.List(fields.String, required=False, load_default=[]) + main_application = fields.String(required=True) + + @post_load + def make_spark_app_config(self, data, **kwargs): + del kwargs + return SparkAppConfig.from_dict(data) + + +class SparkAppsApi(Resource): + + @credentials_required + @use_args(SparkAppCreateParameter()) + def post(self, config: SparkAppConfig): + """Create sparkapp + --- + tags: + - sparkapp + description: Create sparkapp + parameters: + - in: body + name: body + schema: + $ref: '#/definitions/SparkAppCreateParameter' + responses: + 201: + description: The sparkapp is created + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.SparkAppInfo' + """ + service = SparkAppService() + return make_flask_response(data=service.submit_sparkapp(config=config), status=HTTPStatus.CREATED) class SparkAppApi(Resource): - @jwt_required() + + @credentials_required def get(self, sparkapp_name: str): + """Get sparkapp status + --- + tags: + - sparkapp + description: Get sparkapp status + parameters: + - in: path + name: sparkapp_name + schema: + type: string + responses: + 200: + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.SparkAppInfo' + """ service = SparkAppService() - return { - 'data': service.get_sparkapp_info(sparkapp_name).to_dict() - }, HTTPStatus.OK + return make_flask_response(data=service.get_sparkapp_info(sparkapp_name)) - @jwt_required() + @credentials_required def delete(self, sparkapp_name: str): + """Delete a sparkapp whether the existence of sparkapp + --- + tags: + - sparkapp + description: Delete a sparkapp whether the existence of sparkapp + parameters: + - in: path + name: sparkapp_name + schema: + type: string + responses: + 204: + description: finish sparkapp deletion + """ service = SparkAppService() try: - sparkapp_info = service.delete_sparkapp(sparkapp_name) - return {'data': sparkapp_info.to_dict()}, HTTPStatus.OK + service.delete_sparkapp(sparkapp_name) except NotFoundException: - return {'data': {'name': sparkapp_name}}, HTTPStatus.OK + logging.warning(f'[sparkapp] could not find sparkapp {sparkapp_name}') + + return make_flask_response(status=HTTPStatus.NO_CONTENT) + + +class SparkAppLogApi(Resource): + + @credentials_required + @use_kwargs({'lines': fields.Integer(required=True, help='lines is required')}, location='query') + def get(self, sparkapp_name: str, lines: int): + """Get sparkapp logs + --- + tags: + - sparkapp + description: Get sparkapp logs + parameters: + - in: path + name: sparkapp_name + schema: + type: string + - in: query + name: lines + schema: + type: integer + responses: + 200: + content: + application/json: + schema: + type: array + items: + type: string + """ + max_limit = 10000 + if lines is None or lines > max_limit: + lines = max_limit + service = SparkAppService() + try: + return make_flask_response(data=service.get_sparkapp_log(sparkapp_name, lines)) + except Exception as e: # pylint: disable=broad-except) + raise InternalException(details=f'error {e}') from e def initialize_sparkapps_apis(api: Api): api.add_resource(SparkAppsApi, '/sparkapps') api.add_resource(SparkAppApi, '/sparkapps/') + api.add_resource(SparkAppLogApi, '/sparkapps//log') + + schema_manager.append(SparkAppCreateParameter) diff --git a/web_console_v2/api/fedlearner_webconsole/sparkapp/apis_test.py b/web_console_v2/api/fedlearner_webconsole/sparkapp/apis_test.py new file mode 100644 index 000000000..27a0b332a --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sparkapp/apis_test.py @@ -0,0 +1,114 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +from http import HTTPStatus +import os +import unittest +import base64 + +from unittest.mock import MagicMock, patch +from fedlearner_webconsole.proto import sparkapp_pb2 + +from testing.common import BaseTestCase +from envs import Envs + +BASE_DIR = Envs.BASE_DIR + + +class SparkAppApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + self._upload_path = os.path.join(BASE_DIR, 'test') + self._upload_path_patcher = patch('fedlearner_webconsole.sparkapp.service.UPLOAD_PATH', self._upload_path) + self._upload_path_patcher.start() + + def tearDown(self): + self._upload_path_patcher.stop() + super().tearDown() + + @patch('fedlearner_webconsole.sparkapp.service.SparkAppService.submit_sparkapp') + def test_submit_sparkapp(self, mock_submit_sparkapp: MagicMock): + mock_submit_sparkapp.return_value = sparkapp_pb2.SparkAppInfo() + tarball_file_path = os.path.join(BASE_DIR, 'testing/test_data/sparkapp.tar') + with open(tarball_file_path, 'rb') as f: + files_bin = f.read() + + post_response = self.post_helper( + '/api/v2/sparkapps', { + 'name': 'fl-transformer-yaml', + 'files': base64.b64encode(files_bin).decode(), + 'image_url': 'dockerhub.com', + 'driver_config': { + 'cores': 1, + 'memory': '200m', + 'core_limit': '4000m', + }, + 'executor_config': { + 'cores': 1, + 'memory': '200m', + 'instances': 5, + }, + 'command': ['data.csv', 'data.rd'], + 'main_application': '${prefix}/convertor.py' + }) + self.assertEqual(post_response.status_code, HTTPStatus.CREATED, post_response.json) + mock_submit_sparkapp.assert_called_once() + _, kwargs = mock_submit_sparkapp.call_args + self.assertEqual(kwargs['config'].name, 'fl-transformer-yaml') + + mock_submit_sparkapp.reset_mock() + mock_submit_sparkapp.return_value = sparkapp_pb2.SparkAppInfo() + post_response = self.post_helper( + '/api/v2/sparkapps', { + 'name': 'fl-transformer-yaml', + 'image_url': 'dockerhub.com', + 'driver_config': { + 'cores': 1, + 'memory': '200m', + 'core_limit': '4000m', + }, + 'executor_config': { + 'cores': 1, + 'memory': '200m', + 'instances': 5, + }, + 'command': ['data.csv', 'data.rd'], + 'main_application': '${prefix}/convertor.py' + }) + self.assertEqual(post_response.status_code, HTTPStatus.CREATED, post_response.json) + mock_submit_sparkapp.assert_called_once() + _, kwargs = mock_submit_sparkapp.call_args + self.assertEqual(kwargs['config'].name, 'fl-transformer-yaml') + + @patch('fedlearner_webconsole.sparkapp.service.SparkAppService.get_sparkapp_info') + def test_get_sparkapp_info(self, mock_get_sparkapp: MagicMock): + mock_get_sparkapp.return_value = sparkapp_pb2.SparkAppInfo() + + get_response = self.get_helper('/api/v2/sparkapps/fl-transformer-yaml') + self.assertEqual(get_response.status_code, HTTPStatus.OK) + + mock_get_sparkapp.assert_called_once_with('fl-transformer-yaml') + + @patch('fedlearner_webconsole.sparkapp.service.SparkAppService.delete_sparkapp') + def test_delete_sparkapp(self, mock_delete_sparkapp: MagicMock): + mock_delete_sparkapp.return_value = sparkapp_pb2.SparkAppInfo() + resp = self.delete_helper('/api/v2/sparkapps/fl-transformer-yaml') + self.assertEqual(resp.status_code, HTTPStatus.NO_CONTENT) + mock_delete_sparkapp.assert_called_once_with('fl-transformer-yaml') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/sparkapp/schema.py b/web_console_v2/api/fedlearner_webconsole/sparkapp/schema.py index 31d91f44a..9b65b04ce 100644 --- a/web_console_v2/api/fedlearner_webconsole/sparkapp/schema.py +++ b/web_console_v2/api/fedlearner_webconsole/sparkapp/schema.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,26 +13,28 @@ # limitations under the License. # coding: utf-8 -from envs import Envs +import base64 +import logging +from typing import Optional +from google.protobuf.json_format import ParseDict, MessageToDict +from fedlearner_webconsole.db import db +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.utils.images import generate_unified_version_image +from fedlearner_webconsole.proto import sparkapp_pb2 -from fedlearner_webconsole.utils.mixins import from_dict_mixin, to_dict_mixin -SPARK_POD_CONFIG_SERILIZE_FIELDS = [ - 'cores', 'memory', 'instances', 'core_limit', 'envs', 'volume_mounts' -] +class SparkPodConfig(object): + def __init__(self, spark_pod_config: sparkapp_pb2.SparkPodConfig): + self._spark_pod_config = spark_pod_config -@to_dict_mixin(to_dict_fields=SPARK_POD_CONFIG_SERILIZE_FIELDS, - ignore_none=True) -@from_dict_mixin(from_dict_fields=SPARK_POD_CONFIG_SERILIZE_FIELDS) -class SparkPodConfig(object): - def __init__(self): - self.cores = None - self.memory = None - self.instances = None - self.core_limit = None - self.volume_mounts = [] - self.envs = {} + @classmethod + def from_dict(cls, inputs: dict) -> 'SparkPodConfig': + spark_pod_config = sparkapp_pb2.SparkPodConfig() + envs = inputs.pop('envs') + inputs['env'] = [{'name': k, 'value': v} for k, v in envs.items()] + spark_pod_config = ParseDict(inputs, spark_pod_config, ignore_unknown_fields=True) + return cls(spark_pod_config) def build_config(self) -> dict: """ build config for sparkoperator api @@ -41,171 +43,172 @@ def build_config(self) -> dict: Returns: dict: part of sparkoperator body """ - config = { - 'cores': self.cores, - 'memory': self.memory, - } - if self.instances: - config['instances'] = self.instances - if self.core_limit: - config['coreLimit'] = self.core_limit - if self.envs and len(self.envs) > 0: - config['env'] = [{ - 'name': k, - 'value': v - } for k, v in self.envs.items()] - if self.volume_mounts and len(self.volume_mounts) > 0: - config['volumeMounts'] = self.volume_mounts - - return config - - -SPARK_APP_CONFIG_SERILIZE_FIELDS = [ - 'name', 'files', 'files_path', 'volumes', 'image_url', 'driver_config', - 'executor_config', 'command', 'main_application', 'py_files' -] -SPARK_APP_CONFIG_REQUIRED_FIELDS = ['name', 'image_url'] - - -@to_dict_mixin(to_dict_fields=SPARK_APP_CONFIG_SERILIZE_FIELDS, - ignore_none=True) -@from_dict_mixin(from_dict_fields=SPARK_APP_CONFIG_SERILIZE_FIELDS, - required_fields=SPARK_APP_CONFIG_REQUIRED_FIELDS) + return MessageToDict(self._spark_pod_config, + including_default_value_fields=False, + preserving_proto_field_name=False) + + class SparkAppConfig(object): - def __init__(self): - self.name = None - # local files should be compressed to submit spark - self.files = None - # if nas/hdfs has those files, such as analyzer, only need files path \ - # to submit spark - self.files_path = None - self.image_url = None - self.volumes = [] - self.driver_config = SparkPodConfig() - self.executor_config = SparkPodConfig() - self.py_files = [] - self.command = [] - self.main_application = None - - def _replace_placeholder_with_real_path(self, exper: str, - sparkapp_path: str): + + def __init__(self, spark_app_config: sparkapp_pb2.SparkAppConfig): + self._spark_app_config = spark_app_config + self.files: Optional[bytes] = None + + @property + def files_path(self): + return self._spark_app_config.files_path + + @property + def name(self): + return self._spark_app_config.name + + @classmethod + def from_dict(cls, inputs: dict) -> 'SparkAppConfig': + self = cls(sparkapp_pb2.SparkAppConfig()) + if 'files' in inputs: + input_files = inputs.pop('files') + if isinstance(input_files, str): + self.files = base64.b64decode(input_files) + elif isinstance(input_files, (bytearray, bytes)): + self.files = input_files + else: + logging.debug(f'[SparkAppConfig]: ignore parsing files fields, expected type is str or bytes, \ + actually is {type(input_files)}') + self._spark_app_config = ParseDict(inputs, self._spark_app_config, ignore_unknown_fields=True) + return self + + def _replace_placeholder_with_real_path(self, exper: str, sparkapp_path: str) -> str: """ replace ${prefix} with real path Args: + exper (str): sparkapp expression in body sparkapp_path (str): sparkapp real path + + Returns: + return the real path without ${prefix} expression """ return exper.replace('${prefix}', sparkapp_path) def build_config(self, sparkapp_path: str) -> dict: - return { - 'apiVersion': 'sparkoperator.k8s.io/v1beta2', - 'kind': 'SparkApplication', - 'metadata': { - 'name': self.name, - 'namespace': Envs.K8S_NAMESPACE, - 'labels': Envs.K8S_LABEL_INFO - }, - 'spec': { - 'type': - 'Python', - 'pythonVersion': - '3', - 'mode': - 'cluster', - 'image': - self.image_url, - 'imagePullPolicy': - 'Always', - 'volumes': - self.volumes, - 'mainApplicationFile': - self._replace_placeholder_with_real_path( - self.main_application, sparkapp_path), - 'arguments': [ - self._replace_placeholder_with_real_path(c, sparkapp_path) - for c in self.command - ], - 'deps': { - 'pyFiles': [ - self._replace_placeholder_with_real_path( - f, sparkapp_path) for f in self.py_files - ] - }, - 'sparkConf': { - 'spark.shuffle.service.enabled': 'false', - }, - 'sparkVersion': - '3.0.0', - 'restartPolicy': { - 'type': 'Never', - }, - 'dynamicAllocation': { - 'enabled': False, - }, - 'driver': { - **self.driver_config.build_config(), - 'labels': { - 'version': '3.0.0' + # sparkapp configuration limitation: initial executors must [5, 30] + if self._spark_app_config.executor_config.instances > 30: + self._spark_app_config.dynamic_allocation.max_executors = self._spark_app_config.executor_config.instances + self._spark_app_config.executor_config.instances = 30 + + with db.session_scope() as session: + setting_service = SettingService(session) + sys_variables = setting_service.get_system_variables_dict() + namespace = setting_service.get_namespace() + labels = sys_variables.get('labels') + if not self._spark_app_config.image_url: + self._spark_app_config.image_url = generate_unified_version_image(sys_variables.get('spark_image')) + for volume in sys_variables.get('volumes_list', []): + self._spark_app_config.volumes.append( + ParseDict(volume, sparkapp_pb2.Volume(), ignore_unknown_fields=True)) + for volume_mount in sys_variables.get('volume_mounts_list', []): + volume_mount_pb = ParseDict(volume_mount, sparkapp_pb2.VolumeMount(), ignore_unknown_fields=True) + self._spark_app_config.executor_config.volume_mounts.append(volume_mount_pb) + self._spark_app_config.driver_config.volume_mounts.append(volume_mount_pb) + envs_list = [] + for env in sys_variables.get('envs_list', []): + envs_list.append(ParseDict(env, sparkapp_pb2.Env())) + self._spark_app_config.driver_config.env.extend(envs_list) + self._spark_app_config.executor_config.env.extend(envs_list) + base_config = { + 'apiVersion': 'sparkoperator.k8s.io/v1beta2', + 'kind': 'SparkApplication', + 'metadata': { + 'name': self._spark_app_config.name, + 'namespace': namespace, + 'labels': labels, + # Aimed for resource queue management purpose. + # It should work fine on where there is no resource queue service. + 'annotations': { + 'queue': 'fedlearner-spark', + 'schedulerName': 'batch', }, - 'serviceAccount': 'spark', }, - 'executor': { - **self.executor_config.build_config(), - 'labels': { - 'version': '3.0.0' + 'spec': { + 'type': + 'Python', + 'timeToLiveSeconds': + 1800, + 'pythonVersion': + '3', + 'mode': + 'cluster', + 'image': + self._spark_app_config.image_url, + 'imagePullPolicy': + 'IfNotPresent', + 'volumes': [ + MessageToDict(volume, including_default_value_fields=False, preserving_proto_field_name=False) + for volume in self._spark_app_config.volumes + ], + 'arguments': [ + self._replace_placeholder_with_real_path(c, sparkapp_path) + for c in self._spark_app_config.command + ], + 'sparkConf': { + 'spark.shuffle.service.enabled': 'false', }, + 'sparkVersion': + '3.0.0', + 'restartPolicy': { + 'type': 'Never', + }, + 'dynamicAllocation': + MessageToDict(self._spark_app_config.dynamic_allocation, + including_default_value_fields=False, + preserving_proto_field_name=False), + 'driver': { + **SparkPodConfig(self._spark_app_config.driver_config).build_config(), + 'labels': { + 'version': '3.0.0' + }, + 'serviceAccount': 'spark', + }, + 'executor': { + **SparkPodConfig(self._spark_app_config.executor_config).build_config(), + 'labels': { + 'version': '3.0.0' + }, + } } } - } - - -SPARK_APP_INFO_SERILIZE_FIELDS = [ - 'name', 'namespace', 'command', 'driver', 'executor', 'image_url', - 'main_application', 'spark_version', 'type', 'state' -] - - -@to_dict_mixin(to_dict_fields=SPARK_APP_INFO_SERILIZE_FIELDS, ignore_none=True) -@from_dict_mixin(from_dict_fields=SPARK_APP_INFO_SERILIZE_FIELDS) -class SparkAppInfo(object): - @classmethod - def from_k8s_resp(cls, resp): - sparkapp_info = cls() - if 'name' in resp['metadata']: - sparkapp_info.name = resp['metadata']['name'] - elif 'name' in resp['details']: - sparkapp_info.name = resp['details']['name'] - sparkapp_info.namespace = resp['metadata'].get('namespace', None) - sparkapp_info.state = None - if 'status' in resp: - if isinstance(resp['status'], str): - sparkapp_info.state = None - elif isinstance(resp['status'], dict): - sparkapp_info.state = resp.get('status', - {}).get('applicationState', - {}).get('state', None) - sparkapp_info.command = resp.get('spec', {}).get('arguments', None) - sparkapp_info.executor = SparkPodConfig.from_dict( - resp.get('spec', {}).get('executor', {})) - sparkapp_info.driver = SparkPodConfig.from_dict( - resp.get('spec', {}).get('driver', {})) - sparkapp_info.image_url = resp.get('spec', {}).get('image', None) - sparkapp_info.main_application = resp.get('spec', {}).get( - 'mainApplicationFile', None) - sparkapp_info.spark_version = resp.get('spec', - {}).get('sparkVersion', None) - sparkapp_info.type = resp.get('spec', {}).get('type', None) - - return sparkapp_info - - def __init__(self): - self.name = None - self.state = None - self.namespace = None - self.command = None - self.driver = SparkPodConfig() - self.executor = SparkPodConfig() - self.image_url = None - self.main_application = None - self.spark_version = None - self.type = None + if self._spark_app_config.main_application: + base_config['spec']['mainApplicationFile'] = self._replace_placeholder_with_real_path( + self._spark_app_config.main_application, sparkapp_path) + if self._spark_app_config.py_files: + base_config['spec']['deps'] = { + 'pyFiles': [ + self._replace_placeholder_with_real_path(f, sparkapp_path) + for f in self._spark_app_config.py_files + ] + } + return base_config + + +def from_k8s_resp(resp: dict) -> sparkapp_pb2.SparkAppInfo: + sparkapp_info = sparkapp_pb2.SparkAppInfo() + if 'name' in resp['metadata']: + sparkapp_info.name = resp['metadata']['name'] + elif 'name' in resp['details']: + sparkapp_info.name = resp['details']['name'] + sparkapp_info.namespace = resp['metadata'].get('namespace', '') + if 'status' in resp: + if isinstance(resp['status'], str): + sparkapp_info.state = resp['status'] + elif isinstance(resp['status'], dict): + sparkapp_info.state = resp.get('status', {}).get('applicationState', {}).get('state', '') + sparkapp_info.command.extend(resp.get('spec', {}).get('arguments', [])) + sparkapp_info.executor.MergeFrom( + ParseDict(resp.get('spec', {}).get('executor', {}), sparkapp_pb2.SparkPodConfig(), ignore_unknown_fields=True)) + sparkapp_info.driver.MergeFrom( + ParseDict(resp.get('spec', {}).get('driver', {}), sparkapp_pb2.SparkPodConfig(), ignore_unknown_fields=True)) + sparkapp_info.image_url = resp.get('spec', {}).get('image', '') + sparkapp_info.main_application = resp.get('spec', {}).get('mainApplicationFile', '') + sparkapp_info.spark_version = resp.get('spec', {}).get('sparkVersion', '3') + sparkapp_info.type = resp.get('spec', {}).get('type', '') + + return sparkapp_info diff --git a/web_console_v2/api/fedlearner_webconsole/sparkapp/schema_test.py b/web_console_v2/api/fedlearner_webconsole/sparkapp/schema_test.py new file mode 100644 index 000000000..3aef5a4d1 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sparkapp/schema_test.py @@ -0,0 +1,202 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from testing.common import NoWebServerTestCase +from fedlearner_webconsole.proto import sparkapp_pb2 +from fedlearner_webconsole.sparkapp.schema import SparkAppConfig, SparkPodConfig, from_k8s_resp + + +class SparkAppSchemaTest(NoWebServerTestCase): + + def test_spark_pod_config(self): + inputs = {'cores': 1, 'memory': '200m', 'core_limit': '4000m', 'envs': {'HELLO': '1'}} + spark_pod_config = SparkPodConfig.from_dict(inputs) + config = spark_pod_config.build_config() + self.assertDictEqual(config, { + 'cores': 1, + 'memory': '200m', + 'coreLimit': '4000m', + 'env': [{ + 'name': 'HELLO', + 'value': '1' + }] + }) + + def test_sparkapp_config(self): + inputs = { + 'name': 'test', + 'files': bytes(100), + 'image_url': 'dockerhub.com', + 'driver_config': { + 'cores': 1, + 'memory': '200m', + 'core_limit': '4000m', + 'envs': { + 'HELLO': '1' + }, + 'volumeMounts': [{ + 'mountPath': '/data', + 'name': 'data' + }] + }, + 'executor_config': { + 'cores': 1, + 'memory': '200m', + 'instances': 64, + 'envs': { + 'HELLO': '1' + }, + 'volumeMounts': [{ + 'mountPath': '/data', + 'name': 'data', + 'unknown': '2', + }] + }, + 'command': ['hhh', 'another'], + 'main_application': '${prefix}/main.py', + 'volumes': [{ + 'name': 'data', + 'hostPath': { + 'path': '/data', + }, + 'unknown': '1', + }] + } + sparkapp_config = SparkAppConfig.from_dict(inputs) + config = sparkapp_config.build_config('./test') + self.assertEqual(config['spec']['mainApplicationFile'], './test/main.py') + self.assertNotIn('instances', config['spec']['driver']) + self.assertEqual([{'name': 'data', 'hostPath': {'path': '/data',}}], config['spec']['volumes']) + self.assertEqual(config['spec']['executor']['instances'], 30) + self.assertEqual(config['spec']['dynamicAllocation']['maxExecutors'], 64) + + def test_sparkapp_dynamic_allocation(self): + inputs = { + 'name': 'test', + 'image_url': 'test.com/test/hhh:1', + 'dynamic_allocation': { + 'enabled': True, + 'initialExecutors': 2, + 'minExecutors': 2, + 'maxExecutors': 10 + } + } + sparkapp_config: SparkAppConfig = SparkAppConfig.from_dict(inputs) + config = sparkapp_config.build_config('./test') + print(config['spec']['dynamicAllocation']) + self.assertEqual(len(config['spec']['dynamicAllocation']), 4) + self.assertTrue(config['spec']['dynamicAllocation']['enabled']) + + def test_sparkapp_info(self): + resp = { + 'apiVersion': 'sparkoperator.k8s.io/v1beta2', + 'kind': 'SparkApplication', + 'metadata': { + 'creationTimestamp': + '2021-05-18T08:59:16Z', + 'generation': + 1, + 'name': + 'fl-transformer-yaml', + 'namespace': + 'fedlearner', + 'resourceVersion': + '432649442', + 'selfLink': + '/apis/sparkoperator.k8s.io/v1beta2/namespaces/fedlearner/sparkapplications/fl-transformer-yaml', + 'uid': + '52d66d27-b7b7-11eb-b9df-b8599fdb0aac' + }, + 'spec': { + 'arguments': ['data.csv', 'data_tfrecords/'], + 'driver': { + 'coreLimit': '4000m', + 'cores': 1, + 'labels': { + 'version': '3.0.0' + }, + 'memory': '512m', + 'serviceAccount': 'spark', + 'volumeMounts': [{ + 'mountPath': '/data', + 'name': 'data', + 'readOnly': True + }], + }, + 'dynamicAllocation': { + 'enabled': False + }, + 'executor': { + 'cores': 1, + 'instances': 1, + 'labels': { + 'version': '3.0.0' + }, + 'memory': '512m', + 'volumeMounts': [{ + 'mountPath': '/data', + 'name': 'data', + 'readOnly': True + }], + }, + 'image': 'dockerhub.com', + 'imagePullPolicy': 'Always', + 'mainApplicationFile': 'transformer.py', + 'mode': 'cluster', + 'pythonVersion': '3', + 'restartPolicy': { + 'type': 'Never' + }, + 'sparkConf': { + 'spark.shuffle.service.enabled': 'false' + }, + 'sparkVersion': '3.0.0', + 'type': 'Python', + }, + 'status': { + 'applicationState': { + 'state': 'COMPLETED' + }, + 'driverInfo': { + 'podName': 'fl-transformer-yaml-driver', + 'webUIAddress': '11.249.131.12:4040', + 'webUIPort': 4040, + 'webUIServiceName': 'fl-transformer-yaml-ui-svc' + }, + 'executionAttempts': 1, + 'executorState': { + 'fl-transformer-yaml-bdc15979a314310b-exec-1': 'PENDING', + 'fl-transformer-yaml-bdc15979a314310b-exec-2': 'COMPLETED' + }, + 'lastSubmissionAttemptTime': '2021-05-18T10:31:13Z', + 'sparkApplicationId': 'spark-a380bfd520164d828a334bcb3a6404f9', + 'submissionAttempts': 1, + 'submissionID': '5bc7e2e7-cc0f-420c-8bc7-138b651a1dde', + 'terminationTime': '2021-05-18T10:32:08Z' + } + } + + sparkapp_info = from_k8s_resp(resp) + self.assertEqual(sparkapp_info.namespace, 'fedlearner') + self.assertEqual(sparkapp_info.name, 'fl-transformer-yaml') + self.assertEqual(sparkapp_info.driver.volume_mounts[0], + sparkapp_pb2.VolumeMount(mount_path='/data', name='data', read_only=True)) + self.assertEqual(sparkapp_info.executor.volume_mounts[0], + sparkapp_pb2.VolumeMount(mount_path='/data', name='data', read_only=True)) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/sparkapp/service.py b/web_console_v2/api/fedlearner_webconsole/sparkapp/service.py index 21e612777..68a14f4be 100644 --- a/web_console_v2/api/fedlearner_webconsole/sparkapp/service.py +++ b/web_console_v2/api/fedlearner_webconsole/sparkapp/service.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -20,29 +20,23 @@ from typing import Tuple from envs import Envs +from fedlearner_webconsole.proto import sparkapp_pb2 from fedlearner_webconsole.utils.file_manager import FileManager -from fedlearner_webconsole.sparkapp.schema import SparkAppConfig, SparkAppInfo -from fedlearner_webconsole.utils.k8s_client import k8s_client -from fedlearner_webconsole.utils.tars import TarCli +from fedlearner_webconsole.sparkapp.schema import SparkAppConfig, from_k8s_resp +from fedlearner_webconsole.k8s.k8s_client import (SPARKOPERATOR_CUSTOM_GROUP, SPARKOPERATOR_CUSTOM_VERSION, CrdKind, + k8s_client, SPARKOPERATOR_NAMESPACE) +from fedlearner_webconsole.utils.file_operator import FileOperator UPLOAD_PATH = Envs.STORAGE_ROOT class SparkAppService(object): + def __init__(self) -> None: self._base_dir = os.path.join(UPLOAD_PATH, 'sparkapp') - self._file_client = FileManager() - - self._file_client.mkdir(self._base_dir) - - def _clear_and_make_an_empty_dir(self, dir_name: str): - try: - self._file_client.remove(dir_name) - except Exception as err: # pylint: disable=broad-except - logging.error('failed to remove %s with exception %s', dir_name, - err) - finally: - self._file_client.mkdir(dir_name) + self._file_manager = FileManager() + self._file_operator = FileOperator() + self._file_manager.mkdir(self._base_dir) def _get_sparkapp_upload_path(self, name: str) -> Tuple[bool, str]: """get upload path for specific sparkapp @@ -57,50 +51,10 @@ def _get_sparkapp_upload_path(self, name: str) -> Tuple[bool, str]: """ sparkapp_path = os.path.join(self._base_dir, name) - existable = False - try: - self._file_client.ls(sparkapp_path) - existable = True - except ValueError: - existable = False - + existable = self._file_manager.isdir(sparkapp_path) return existable, sparkapp_path - def _copy_files_to_target_filesystem(self, source_filesystem_path: str, - target_filesystem_path: str) -> bool: - """ copy files to remote filesystem - - untar if file is tared - - copy files to remote filesystem - - Args: - source_filesystem_path (str): local filesystem - target_filesystem_path (str): remote filesystem - - Returns: - bool: whether success - """ - temp_path = source_filesystem_path - if source_filesystem_path.find('.tar') != -1: - temp_path = os.path.abspath( - os.path.join(source_filesystem_path, '../tmp')) - os.makedirs(temp_path) - TarCli.untar_file(source_filesystem_path, temp_path) - - for root, dirs, files in os.walk(temp_path): - relative_path = os.path.relpath(root, temp_path) - for f in files: - file_path = os.path.join(root, f) - remote_file_path = os.path.join(target_filesystem_path, - relative_path, f) - self._file_client.copy(file_path, remote_file_path) - for d in dirs: - remote_dir_path = os.path.join(target_filesystem_path, - relative_path, d) - self._file_client.mkdir(remote_dir_path) - - return True - - def submit_sparkapp(self, config: SparkAppConfig) -> SparkAppInfo: + def submit_sparkapp(self, config: SparkAppConfig) -> sparkapp_pb2.SparkAppInfo: """submit sparkapp Args: @@ -112,25 +66,27 @@ def submit_sparkapp(self, config: SparkAppConfig) -> SparkAppInfo: Returns: SparkAppInfo: resp of sparkapp """ + logging.info(f'submit sparkapp with config:{config}') sparkapp_path = config.files_path - if config.files_path is None: + if not config.files_path: _, sparkapp_path = self._get_sparkapp_upload_path(config.name) - self._clear_and_make_an_empty_dir(sparkapp_path) + self._file_operator.clear_and_make_an_empty_dir(sparkapp_path) - with tempfile.TemporaryDirectory() as temp_dir: - tar_path = os.path.join(temp_dir, 'files.tar') - with open(tar_path, 'wb') as fwrite: - fwrite.write(config.files) - self._copy_files_to_target_filesystem( - source_filesystem_path=tar_path, - target_filesystem_path=sparkapp_path) + # In case there is no files + if config.files is not None: + with tempfile.TemporaryDirectory() as temp_dir: + tar_path = os.path.join(temp_dir, 'files.tar') + with open(tar_path, 'wb') as fwrite: + fwrite.write(config.files) + self._file_operator.copy_to(tar_path, sparkapp_path, extract=True) config_dict = config.build_config(sparkapp_path) logging.info(f'submit sparkapp, config: {config_dict}') - resp = k8s_client.create_sparkapplication(config_dict) - return SparkAppInfo.from_k8s_resp(resp) + resp = k8s_client.create_app(config_dict, SPARKOPERATOR_CUSTOM_GROUP, SPARKOPERATOR_CUSTOM_VERSION, + CrdKind.SPARK_APPLICATION.value) + return from_k8s_resp(resp) - def get_sparkapp_info(self, name: str) -> SparkAppInfo: + def get_sparkapp_info(self, name: str) -> sparkapp_pb2.SparkAppInfo: """ get sparkapp info Args: @@ -143,9 +99,21 @@ def get_sparkapp_info(self, name: str) -> SparkAppInfo: SparkAppInfo: resp of sparkapp """ resp = k8s_client.get_sparkapplication(name) - return SparkAppInfo.from_k8s_resp(resp) + return from_k8s_resp(resp) + + def get_sparkapp_log(self, name: str, lines: int) -> str: + """ get sparkapp log + + Args: + name (str): sparkapp name + lines (int): max lines of log + + Returns: + str: sparkapp log + """ + return k8s_client.get_pod_log(f'{name}-driver', SPARKOPERATOR_NAMESPACE, tail_lines=lines) - def delete_sparkapp(self, name: str) -> SparkAppInfo: + def delete_sparkapp(self, name: str) -> sparkapp_pb2.SparkAppInfo: """delete sparkapp - delete sparkapp. If failed, raise exception - delete the tmp filesystem @@ -162,9 +130,9 @@ def delete_sparkapp(self, name: str) -> SparkAppInfo: """ existable, sparkapp_path = self._get_sparkapp_upload_path(name) if existable: - self._file_client.remove(sparkapp_path) + self._file_manager.remove(sparkapp_path) resp = k8s_client.delete_sparkapplication(name) - sparkapp_info = SparkAppInfo.from_k8s_resp(resp) + sparkapp_info = from_k8s_resp(resp) return sparkapp_info diff --git a/web_console_v2/api/fedlearner_webconsole/sparkapp/service_test.py b/web_console_v2/api/fedlearner_webconsole/sparkapp/service_test.py new file mode 100644 index 000000000..a485547cd --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sparkapp/service_test.py @@ -0,0 +1,278 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import os +import shutil +import unittest + +from unittest.mock import MagicMock, patch + +from envs import Envs +from fedlearner_webconsole.sparkapp.schema import SparkAppConfig +from fedlearner_webconsole.sparkapp.service import SparkAppService +from testing.common import NoWebServerTestCase + +BASE_DIR = Envs.BASE_DIR + + +class SparkAppServiceTest(NoWebServerTestCase): + + def setUp(self) -> None: + super().setUp() + self._upload_path = os.path.join(BASE_DIR, 'test-spark') + os.makedirs(self._upload_path) + self._patch_upload_path = patch('fedlearner_webconsole.sparkapp.service.UPLOAD_PATH', self._upload_path) + self._patch_upload_path.start() + self._sparkapp_service = SparkAppService() + + def tearDown(self) -> None: + self._patch_upload_path.stop() + shutil.rmtree(self._upload_path) + return super().tearDown() + + def _get_tar_file_path(self) -> str: + return os.path.join(BASE_DIR, 'testing/test_data/sparkapp.tar') + + def test_get_sparkapp_upload_path(self): + existable, sparkapp_path = self._sparkapp_service._get_sparkapp_upload_path('test') # pylint: disable=protected-access + self.assertFalse(existable) + + os.makedirs(sparkapp_path) + existable, _ = self._sparkapp_service._get_sparkapp_upload_path('test') # pylint: disable=protected-access + self.assertTrue(existable) + + @patch('fedlearner_webconsole.k8s.k8s_client.k8s_client.create_app') + def test_submit_sparkapp(self, mock_create_app: MagicMock): + mock_create_app.return_value = { + 'apiVersion': 'sparkoperator.k8s.io/v1beta2', + 'kind': 'SparkApplication', + 'metadata': { + 'creationTimestamp': + '2021-05-18T08:59:16Z', + 'generation': + 1, + 'name': + 'fl-transformer-yaml', + 'namespace': + 'fedlearner', + 'resourceVersion': + '432649442', + 'selfLink': + '/apis/sparkoperator.k8s.io/v1beta2/namespaces/fedlearner/sparkapplications/fl-transformer-yaml', + 'uid': + '52d66d27-b7b7-11eb-b9df-b8599fdb0aac' + }, + 'spec': { + 'arguments': ['data.csv', 'data_tfrecords/'], + 'driver': { + 'coreLimit': '4000m', + 'cores': 1, + 'labels': { + 'version': '3.0.0' + }, + 'memory': '512m', + 'serviceAccount': 'spark', + }, + 'dynamicAllocation': { + 'enabled': False + }, + 'executor': { + 'cores': 1, + 'instances': 1, + 'labels': { + 'version': '3.0.0' + }, + 'memory': '512m', + }, + 'image': 'dockerhub.com', + 'imagePullPolicy': 'Always', + 'mainApplicationFile': 'transformer.py', + 'mode': 'cluster', + 'pythonVersion': '3', + 'restartPolicy': { + 'type': 'Never' + }, + 'sparkConf': { + 'spark.shuffle.service.enabled': 'false' + }, + 'sparkVersion': '3.0.0', + 'type': 'Python', + }, + 'status': { + 'applicationState': { + 'state': 'COMPLETED' + }, + 'driverInfo': { + 'podName': 'fl-transformer-yaml-driver', + 'webUIAddress': '11.249.131.12:4040', + 'webUIPort': 4040, + 'webUIServiceName': 'fl-transformer-yaml-ui-svc' + }, + 'executionAttempts': 1, + 'executorState': { + 'fl-transformer-yaml-bdc15979a314310b-exec-1': 'PENDING', + 'fl-transformer-yaml-bdc15979a314310b-exec-2': 'COMPLETED' + }, + 'lastSubmissionAttemptTime': '2021-05-18T10:31:13Z', + 'sparkApplicationId': 'spark-a380bfd520164d828a334bcb3a6404f9', + 'submissionAttempts': 1, + 'submissionID': '5bc7e2e7-cc0f-420c-8bc7-138b651a1dde', + 'terminationTime': '2021-05-18T10:32:08Z' + } + } + + tarball_file_path = os.path.join(BASE_DIR, 'testing/test_data/sparkapp.tar') + with open(tarball_file_path, 'rb') as f: + files_bin = f.read() + + inputs = { + 'name': 'fl-transformer-yaml', + 'files': files_bin, + 'image_url': 'dockerhub.com', + 'driver_config': { + 'cores': 1, + 'memory': '200m', + 'coreLimit': '4000m', + }, + 'executor_config': { + 'cores': 1, + 'memory': '200m', + 'instances': 5, + }, + 'command': ['data.csv', 'data.rd'], + 'main_application': '${prefix}/convertor.py' + } + config = SparkAppConfig.from_dict(inputs) + resp = self._sparkapp_service.submit_sparkapp(config) + + self.assertTrue( + os.path.exists(os.path.join(self._upload_path, 'sparkapp', 'fl-transformer-yaml', 'convertor.py'))) + mock_create_app.assert_called_once() + args = mock_create_app.call_args[0] + self.assertEqual(len(args), 4) + self.assertEqual(args[1:], ('sparkoperator.k8s.io', 'v1beta2', 'sparkapplications')) + self.assertTrue(resp.namespace, 'fedlearner') + + @patch('fedlearner_webconsole.k8s.k8s_client.k8s_client.get_sparkapplication') + def test_get_sparkapp_info(self, mock_get_sparkapp: MagicMock): + mock_get_sparkapp.return_value = { + 'apiVersion': 'sparkoperator.k8s.io/v1beta2', + 'kind': 'SparkApplication', + 'metadata': { + 'creationTimestamp': + '2021-05-18T08:59:16Z', + 'generation': + 1, + 'name': + 'fl-transformer-yaml', + 'namespace': + 'fedlearner', + 'resourceVersion': + '432649442', + 'selfLink': + '/apis/sparkoperator.k8s.io/v1beta2/namespaces/fedlearner/sparkapplications/fl-transformer-yaml', + 'uid': + '52d66d27-b7b7-11eb-b9df-b8599fdb0aac' + }, + 'spec': { + 'arguments': ['data.csv', 'data_tfrecords/'], + 'driver': { + 'coreLimit': '4000m', + 'cores': 1, + 'labels': { + 'version': '3.0.0' + }, + 'memory': '512m', + 'serviceAccount': 'spark', + }, + 'dynamicAllocation': { + 'enabled': False + }, + 'executor': { + 'cores': 1, + 'instances': 1, + 'labels': { + 'version': '3.0.0' + }, + 'memory': '512m', + }, + 'image': 'dockerhub.com', + 'imagePullPolicy': 'Always', + 'mainApplicationFile': 'transformer.py', + 'mode': 'cluster', + 'pythonVersion': '3', + 'restartPolicy': { + 'type': 'Never' + }, + 'sparkConf': { + 'spark.shuffle.service.enabled': 'false' + }, + 'sparkVersion': '3.0.0', + 'type': 'Python', + }, + 'status': { + 'applicationState': { + 'state': 'COMPLETED' + }, + 'driverInfo': { + 'podName': 'fl-transformer-yaml-driver', + 'webUIAddress': '11.249.131.12:4040', + 'webUIPort': 4040, + 'webUIServiceName': 'fl-transformer-yaml-ui-svc' + }, + 'executionAttempts': 1, + 'executorState': { + 'fl-transformer-yaml-bdc15979a314310b-exec-1': 'PENDING', + 'fl-transformer-yaml-bdc15979a314310b-exec-2': 'COMPLETED' + }, + 'lastSubmissionAttemptTime': '2021-05-18T10:31:13Z', + 'sparkApplicationId': 'spark-a380bfd520164d828a334bcb3a6404f9', + 'submissionAttempts': 1, + 'submissionID': '5bc7e2e7-cc0f-420c-8bc7-138b651a1dde', + 'terminationTime': '2021-05-18T10:32:08Z' + } + } + + resp = self._sparkapp_service.get_sparkapp_info('fl-transformer-yaml') + + mock_get_sparkapp.assert_called_once() + self.assertTrue(resp.namespace, 'fedlearner') + + @patch('fedlearner_webconsole.sparkapp.service.SparkAppService._get_sparkapp_upload_path') + @patch('fedlearner_webconsole.utils.file_manager.FileManager.remove') + @patch('fedlearner_webconsole.k8s.k8s_client.k8s_client.delete_sparkapplication') + def test_delete_sparkapp(self, mock_delete_sparkapp: MagicMock, mock_file_mananger_remove: MagicMock, + mock_upload_path: MagicMock): + mock_delete_sparkapp.return_value = { + 'kind': 'Status', + 'apiVersion': 'v1', + 'metadata': {}, + 'status': 'Success', + 'details': { + 'name': 'fl-transformer-yaml', + 'group': 'sparkoperator.k8s.io', + 'kind': 'sparkapplications', + 'uid': '52d66d27-b7b7-11eb-b9df-b8599fdb0aac' + } + } + mock_upload_path.return_value = (True, 'test') + resp = self._sparkapp_service.delete_sparkapp(name='fl-transformer-yaml') + mock_delete_sparkapp.assert_called_once() + mock_file_mananger_remove.assert_called_once() + self.assertTrue(resp.name, 'fl-transformer-yaml') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/swagger/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/swagger/BUILD.bazel new file mode 100644 index 000000000..38a8e3d24 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/swagger/BUILD.bazel @@ -0,0 +1,12 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "models_lib", + srcs = ["models.py"], + imports = ["../.."], + deps = [ + "@common_marshmallow//:pkg", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/swagger/__init__.py b/web_console_v2/api/fedlearner_webconsole/swagger/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/web_console_v2/api/fedlearner_webconsole/swagger/models.py b/web_console_v2/api/fedlearner_webconsole/swagger/models.py new file mode 100644 index 000000000..b2eddbe52 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/swagger/models.py @@ -0,0 +1,35 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import List + +from marshmallow import Schema + + +class _SchemaManager(object): + + def __init__(self): + self._schemas = [] + + def append(self, schema: Schema): + if schema in self._schemas: + return + self._schemas.append(schema) + + def get_schemas(self) -> List[Schema]: + return self._schemas + + +schema_manager = _SchemaManager() diff --git a/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/BUILD.bazel new file mode 100644 index 000000000..68001c2c8 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/BUILD.bazel @@ -0,0 +1,5 @@ +filegroup( + name = "sys_preset_templates", + srcs = glob(["**/*.json"]), + visibility = ["//visibility:public"], +) diff --git a/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/e2e-fed-left.json b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/e2e-fed-left.json new file mode 100644 index 000000000..0165805ab --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/e2e-fed-left.json @@ -0,0 +1,1994 @@ +{ + "comment": "", + "config": { + "group_alias": "e2e-test", + "job_definitions": [ + { + "dependencies": [], + "easy_mode": true, + "is_federated": false, + "job_type": "RAW_DATA", + "name": "raw-data-job-streaming", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"role\": \"Follower\",\n \"peerSpecs\": {\n \"Leader\": {\n \"peerURL\": \"\",\n \"authority\": \"\"\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_NAME\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_TYPE\",\n \"value\": \"Streaming\"\n },\n {\n \"name\": \"OUTPUT_PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"INPUT_BASE_DIR\",\n \"value\": str(workflow.variables.input_base_dir)\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/raw_data/\" + self.name\n },\n {\n \"name\": \"RAW_DATA_PUBLISH_DIR\",\n \"value\": \"portal_publish_dir/\" + self.name\n },\n {\n \"name\": \"FILE_WILDCARD\",\n \"value\": str(workflow.variables.file_wildcard)\n },\n {\n \"name\": \"LONG_RUNNING\",\n \"value\": \"\"\n },\n {\n \"name\": \"CHECK_SUCCESS_TAG\",\n \"value\": \"\"\n },\n {\n \"name\": \"FILES_PER_JOB_LIMIT\",\n \"value\": str(None)\n },\n {\n \"name\": \"SINGLE_SUBFOLDER\",\n \"value\": \"\"\n }\n\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": False,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n\n {\n \"name\": \"BATCH_SIZE\",\n \"value\": str(1024)\n },\n {\n \"name\": \"INPUT_DATA_FORMAT\",\n \"value\": str(workflow.variables.input_data_format)\n },\n {\n \"name\": \"COMPRESSED_TYPE\",\n \"value\": \"\"\n },\n {\n \"name\": \"OUTPUT_DATA_FORMAT\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"BUILDER_COMPRESSED_TYPE\",\n \"value\": \"\"\n },\n {\n \"name\": \"MEMORY_LIMIT_RATIO\",\n \"value\": str(70)\n },\n {\n \"name\": \"OPTIONAL_FIELDS\",\n \"value\": \"\"\n }\n\n\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_worker.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": False,\n \"replicas\": int(workflow.variables.num_partitions)\n }\n }\n }\n}\n" + }, + { + "dependencies": [], + "easy_mode": true, + "is_federated": false, + "job_type": "RAW_DATA", + "name": "raw-data-job-psi", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"role\": \"Follower\",\n \"peerSpecs\": {\n \"Leader\": {\n \"peerURL\": \"\",\n \"authority\": \"\"\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_NAME\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_TYPE\",\n \"value\": \"PSI\"\n },\n {\n \"name\": \"OUTPUT_PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"INPUT_BASE_DIR\",\n \"value\": str(workflow.variables.input_base_dir)\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/raw_data/\" + self.name\n },\n {\n \"name\": \"RAW_DATA_PUBLISH_DIR\",\n \"value\": \"portal_publish_dir/\" + self.name\n },\n {\n \"name\": \"FILE_WILDCARD\",\n \"value\": str(workflow.variables.file_wildcard)\n },\n {\n \"name\": \"LONG_RUNNING\",\n \"value\": \"\"\n },\n {\n \"name\": \"CHECK_SUCCESS_TAG\",\n \"value\": \"\"\n },\n {\n \"name\": \"FILES_PER_JOB_LIMIT\",\n \"value\": str(None)\n },\n {\n \"name\": \"SINGLE_SUBFOLDER\",\n \"value\": \"\"\n }\n\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": False,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n\n {\n \"name\": \"BATCH_SIZE\",\n \"value\": str(1024)\n },\n {\n \"name\": \"INPUT_DATA_FORMAT\",\n \"value\": str(workflow.variables.input_data_format)\n },\n {\n \"name\": \"COMPRESSED_TYPE\",\n \"value\": \"\"\n },\n {\n \"name\": \"OUTPUT_DATA_FORMAT\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"BUILDER_COMPRESSED_TYPE\",\n \"value\": \"\"\n },\n {\n \"name\": \"MEMORY_LIMIT_RATIO\",\n \"value\": str(70)\n },\n {\n \"name\": \"OPTIONAL_FIELDS\",\n \"value\": \"\"\n }\n\n\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_worker.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": False,\n \"replicas\": int(workflow.variables.num_partitions)\n }\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "raw-data-job-streaming" + }, + { + "source": "raw-data-job-psi" + } + ], + "easy_mode": true, + "is_federated": true, + "job_type": "DATA_JOIN", + "name": "data-join-job", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"role\": str(workflow.variables.role),\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if str(workflow.variables.role)==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"ROLE\",\n \"value\": str(workflow.variables.role).lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"BATCH_MODE\",\n \"value\": \"--batch_mode\"\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"START_TIME\",\n \"value\": str(0)\n },\n {\n \"name\": \"END_TIME\",\n \"value\": str(999999999999)\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + str(workflow.jobs['raw-data-job-streaming'].name)\n },\n {\n # not work, remove it after prepare_launch_data_join_cli been removed\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(0.0)\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\":\n list(system.variables.volume_mounts_list)\n ,\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/data_join/run_data_join_master.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\":\n list(system.variables.volumes_list)\n\n }\n },\n \"pair\": true,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"ROLE\",\n \"value\": str(workflow.variables.role).lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + str(workflow.jobs['raw-data-job-streaming'].name)\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_INTERVAL\",\n \"value\": str(-1)\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_THRESHOLD\",\n \"value\": str(4096)\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_INTERVAL\",\n \"value\": str(-1)\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_THRESHOLD\",\n \"value\": str(4096)\n },\n {\n \"name\": \"MIN_MATCHING_WINDOW\",\n \"value\": str(1024)\n },\n {\n \"name\": \"MAX_MATCHING_WINDOW\",\n \"value\": str(4096)\n },\n {\n \"name\": \"RAW_DATA_ITER\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"ENABLE_NEGATIVE_EXAMPLE_GENERATOR\",\n \"value\": str(False)\n },\n {\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(0.0)\n },\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\":\n list(system.variables.volume_mounts_list)\n ,\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/data_join/run_data_join_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\":\n list(system.variables.volumes_list)\n\n }\n },\n \"pair\": true,\n \"replicas\": int(workflow.variables.num_partitions)\n }\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "raw-data-job-streaming" + }, + { + "source": "raw-data-job-psi" + } + ], + "easy_mode": true, + "is_federated": true, + "job_type": "PSI_DATA_JOIN", + "name": "psi-data-join-job", + "variables": [ + { + "access_mode": "PEER_WRITABLE", + "name": "rsa_key_pem", + "tag": "", + "typed_value": "-----BEGIN RSA PUBLIC KEY-----\nMIGJAoGBAMZYpBzYDnROmrqC8LhDXhgW13E/JuTUHkHKsGwPScnp5TAueqo53ayu\nYzSlLrI+yQp206Kb/C+w/VdWJcLLAjAUBGqfZvCnsmpfOMt+s3JrNH24RCg282m/\nnIdpoVqb7SEDFlJPq3s0g/oZ5v0c74Yy5J/DuuaWcuU7URuYRbbnAgMBAAE=\n-----END RSA PUBLIC KEY-----", + "value": "-----BEGIN RSA PUBLIC KEY-----\nMIGJAoGBAMZYpBzYDnROmrqC8LhDXhgW13E/JuTUHkHKsGwPScnp5TAueqo53ayu\nYzSlLrI+yQp206Kb/C+w/VdWJcLLAjAUBGqfZvCnsmpfOMt+s3JrNH24RCg282m/\nnIdpoVqb7SEDFlJPq3s0g/oZ5v0c74Yy5J/DuuaWcuU7URuYRbbnAgMBAAE=\n-----END RSA PUBLIC KEY-----", + "value_type": "STRING", + "widget_schema": "{\"component\":\"TextArea\",\"required\":false}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "rsa_private_key_path", + "tag": "", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "rsa_key_path", + "tag": "", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false}" + } + ], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"role\": str(workflow.variables.role),\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if str(workflow.variables.role)==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"ROLE\",\n \"value\": str(workflow.variables.role).lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"START_TIME\",\n \"value\": str(0)\n },\n {\n \"name\": \"END_TIME\",\n \"value\": str(999999999999)\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + str(workflow.jobs['raw-data-job-psi'].name)\n },\n {\n # not work, remove it after prepare_launch_data_join_cli been removed\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(0.0)\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/rsa_psi/run_psi_data_join_master.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": True,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"ROLE\",\n \"value\": str(workflow.variables.role).lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"BATCH_MODE\",\n \"value\": \"--batch_mode\"\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + str(workflow.jobs['raw-data-job-psi'].name)\n },\n {\n \"name\": \"RSA_KEY_PEM\",\n \"value\": str(self.variables.rsa_key_pem)\n },\n {\n \"name\": \"RSA_KEY_PATH\",\n \"value\": str(self.variables.rsa_key_path)\n },\n {\n \"name\": \"RSA_PRIVATE_KEY_PATH\",\n \"value\": str(self.variables.rsa_private_key_path)\n },\n {\n \"name\": \"KMS_KEY_NAME\",\n \"value\": \"\"\n },\n {\n \"name\": \"KMS_CLIENT\",\n \"value\": \"data.aml.fl\"\n },\n {\n \"name\": \"PSI_RAW_DATA_ITER\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"DATA_BLOCK_BUILDER\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"PSI_OUTPUT_BUILDER\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_INTERVAL\",\n \"value\": str(-1)\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_THRESHOLD\",\n \"value\": str(4096)\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_INTERVAL\",\n \"value\": str(-1)\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_THRESHOLD\",\n \"value\": str(4096)\n },\n {\n \"name\": \"EXAMPLE_JOINER\",\n \"value\": \"SORT_RUN_JOINER\"\n },\n {\n \"name\": \"PSI_READ_AHEAD_SIZE\",\n \"value\": str(None)\n },\n {\n \"name\": \"SORT_RUN_MERGER_READ_AHEAD_BUFFER\",\n \"value\": str(None)\n },\n {\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(0.0)\n },\n {\n \"name\": \"ENABLE_NEGATIVE_EXAMPLE_GENERATOR\",\n \"value\": str(False)\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/rsa_psi/run_psi_data_join_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": True,\n \"replicas\": int(int(workflow.variables.num_partitions))\n }\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "data-join-job" + } + ], + "easy_mode": false, + "is_federated": true, + "job_type": "NN_MODEL_TRANINING", + "name": "nn-train", + "variables": [ + { + "access_mode": "PEER_WRITABLE", + "name": "epoch_num", + "tag": "", + "typed_value": "10", + "value": "10", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "code_tar", + "tag": "", + "typed_value": { + "follower/config.py": "leader_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9', 'x10', 'x11', 'x12']\nleader_label_name = ['label']\nfollower_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9']\n", + "follower/main.py": "# Copyright 2020 The FedLearner Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# coding: utf-8\n# pylint: disable=no-else-return, inconsistent-return-statements\n\nimport os\nimport logging\nimport numpy as np\nimport tensorflow.compat.v1 as tf\nimport fedlearner.trainer as flt\nfrom config import *\nfrom fedlearner.trainer.trainer_worker import StepLossAucMetricsHook\n\nROLE = 'follower'\n\nparser = flt.trainer_worker.create_argument_parser()\nparser.add_argument('--batch-size',\n type=int,\n default=100,\n help='Training batch size.')\nargs = parser.parse_args()\n\n\ndef input_fn(bridge, trainer_master):\n dataset = flt.data.DataBlockLoader(args.batch_size, ROLE, bridge,\n trainer_master).make_dataset()\n\n def parse_fn(example):\n feature_map = dict()\n feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\n feature_map['raw_id'] = tf.FixedLenFeature([], tf.string)\n for name in follower_feature_names:\n feature_map[name] = tf.FixedLenFeature([],\n tf.float32,\n default_value=0.0)\n features = tf.parse_example(example, features=feature_map)\n return features, dict(y=tf.constant(0))\n\n dataset = dataset.map(map_func=parse_fn,\n num_parallel_calls=tf.data.experimental.AUTOTUNE)\n return dataset\n\n\ndef serving_input_receiver_fn():\n feature_map = {\n \"example_id\": tf.FixedLenFeature([], tf.string),\n \"raw_id\": tf.FixedLenFeature([], tf.string),\n }\n for name in follower_feature_names:\n feature_map[name] = tf.FixedLenFeature([],\n tf.float32,\n default_value=0.0)\n record_batch = tf.placeholder(dtype=tf.string, name='examples')\n features = tf.parse_example(record_batch, features=feature_map)\n features['act1_f'] = tf.placeholder(dtype=tf.float32, name='act1_f')\n receiver_tensors = {'examples': record_batch, 'act1_f': features['act1_f']}\n return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)\n\n\ndef model_fn(model, features, labels, mode):\n logging.info('model_fn: mode %s', mode)\n x = [\n tf.expand_dims(features[name], axis=-1)\n for name in follower_feature_names\n ]\n x = tf.concat(x, axis=-1)\n\n w1f = tf.get_variable(\n 'w1l',\n shape=[len(follower_feature_names),\n len(leader_label_name)],\n dtype=tf.float32,\n initializer=tf.random_uniform_initializer(-0.01, 0.01))\n b1f = tf.get_variable('b1l',\n shape=[len(leader_label_name)],\n dtype=tf.float32,\n initializer=tf.zeros_initializer())\n\n act1_f = tf.nn.bias_add(tf.matmul(x, w1f), b1f)\n\n if mode == tf.estimator.ModeKeys.PREDICT:\n return model.make_spec(mode=mode, predictions=act1_f)\n\n if mode == tf.estimator.ModeKeys.TRAIN:\n gact1_f = model.send('act1_f', act1_f, require_grad=True)\n elif mode == tf.estimator.ModeKeys.EVAL:\n model.send('act1_f', act1_f, require_grad=False)\n\n #acc = model.recv('acc', tf.float32, require_grad=False)\n auc = model.recv('auc', tf.float32, require_grad=False)\n loss = model.recv('loss', tf.float32, require_grad=False)\n logging_hook = tf.train.LoggingTensorHook({\n 'auc': auc, 'loss': loss,\n }, every_n_iter=10)\n step_metric_hook = StepLossAucMetricsHook(loss_tensor=loss, auc_tensor=auc)\n\n global_step = tf.train.get_or_create_global_step()\n if mode == tf.estimator.ModeKeys.TRAIN:\n optimizer = tf.train.GradientDescentOptimizer(0.1)\n train_op = model.minimize(optimizer,\n act1_f,\n grad_loss=gact1_f,\n global_step=global_step)\n return model.make_spec(mode,\n loss=tf.math.reduce_mean(act1_f),\n train_op=train_op,\n training_hooks=[logging_hook, step_metric_hook])\n if mode == tf.estimator.ModeKeys.EVAL:\n fake_loss = tf.reduce_mean(act1_f)\n return model.make_spec(mode=mode, loss=fake_loss,\n evaluation_hooks=[logging_hook, step_metric_hook])\n\n\nif __name__ == '__main__':\n logging.basicConfig(level=logging.INFO)\n flt.trainer_worker.train(ROLE, args, input_fn, model_fn,\n serving_input_receiver_fn)\n", + "leader/config.py": "leader_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9', 'x10', 'x11', 'x12']\nleader_label_name = ['label']\nfollower_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9']\n", + "leader/main.py": "# Copyright 2020 The FedLearner Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# coding: utf-8\n# pylint: disable=no-else-return, inconsistent-return-statements\n\nimport os\nimport logging\nimport tensorflow.compat.v1 as tf\nimport fedlearner.trainer as flt\nfrom config import *\nfrom fedlearner.trainer.trainer_worker import StepLossAucMetricsHook\n\nROLE = 'leader'\n\nparser = flt.trainer_worker.create_argument_parser()\nparser.add_argument('--batch-size',\n type=int,\n default=100,\n help='Training batch size.')\nargs = parser.parse_args()\n\n\ndef input_fn(bridge, trainer_master):\n dataset = flt.data.DataBlockLoader(args.batch_size, ROLE, bridge,\n trainer_master).make_dataset()\n\n def parse_fn(example):\n feature_map = dict()\n feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\n feature_map['raw_id'] = tf.FixedLenFeature([], tf.string)\n for name in leader_feature_names:\n feature_map[name] = tf.FixedLenFeature([],\n tf.float32,\n default_value=0.0)\n label_map = {}\n for name in leader_label_name:\n label_map[name] = tf.FixedLenFeature([],\n tf.float32,\n default_value=0.0)\n features = tf.parse_example(example, features=feature_map)\n labels = tf.parse_example(example, features=label_map)\n return features, labels\n\n dataset = dataset.map(map_func=parse_fn,\n num_parallel_calls=tf.data.experimental.AUTOTUNE)\n return dataset\n\n\ndef serving_input_receiver_fn():\n feature_map = {\n \"example_id\": tf.FixedLenFeature([], tf.string),\n \"raw_id\": tf.FixedLenFeature([], tf.string),\n }\n for name in leader_feature_names:\n feature_map[name] = tf.FixedLenFeature([],\n tf.float32,\n default_value=0.0)\n record_batch = tf.placeholder(dtype=tf.string, name='examples')\n features = tf.parse_example(record_batch, features=feature_map)\n features['act1_f'] = tf.placeholder(dtype=tf.float32, name='act1_f')\n receiver_tensors = {'examples': record_batch, 'act1_f': features['act1_f']}\n return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)\n\n\ndef model_fn(model, features, labels, mode):\n logging.info('model_fn: mode %s', mode)\n x = [\n tf.expand_dims(features[name], axis=-1)\n for name in leader_feature_names\n ]\n x = tf.concat(x, axis=-1)\n\n w1l = tf.get_variable(\n 'w1l',\n shape=[len(leader_feature_names),\n len(leader_label_name)],\n dtype=tf.float32,\n initializer=tf.random_uniform_initializer(-0.01, 0.01))\n b1l = tf.get_variable('b1l',\n shape=[len(leader_label_name)],\n dtype=tf.float32,\n initializer=tf.zeros_initializer())\n\n act1_l = tf.nn.bias_add(tf.matmul(x, w1l), b1l)\n if mode == tf.estimator.ModeKeys.TRAIN:\n act1_f = model.recv('act1_f', tf.float32, require_grad=True)\n elif mode == tf.estimator.ModeKeys.EVAL:\n act1_f = model.recv('act1_f', tf.float32, require_grad=False)\n else:\n act1_f = features['act1_f']\n logits = act1_l + act1_f\n pred = tf.math.sigmoid(logits)\n\n if mode == tf.estimator.ModeKeys.PREDICT:\n return model.make_spec(mode=mode, predictions=pred)\n\n y = [tf.expand_dims(labels[name], axis=-1) for name in leader_label_name]\n y = tf.concat(y, axis=-1)\n\n loss = tf.nn.sigmoid_cross_entropy_with_logits(labels=y, logits=logits)\n _, auc = tf.metrics.auc(labels=y, predictions=pred)\n #correct = tf.nn.in_top_k(predictions=logits, targets=y, k=1)\n #acc = tf.reduce_mean(input_tensor=tf.cast(correct, tf.float32))\n logging_hook = tf.train.LoggingTensorHook({\n # 'acc': acc,\n 'auc': auc,\n 'loss': loss,\n }, every_n_iter=10)\n step_metric_hook = StepLossAucMetricsHook(loss_tensor=loss, auc_tensor=auc)\n #model.send('acc', acc, require_grad=False)\n model.send('auc', auc, require_grad=False)\n model.send('loss', loss, require_grad=False)\n\n global_step = tf.train.get_or_create_global_step()\n if mode == tf.estimator.ModeKeys.TRAIN:\n optimizer = tf.train.AdamOptimizer(1e-4)\n train_op = model.minimize(optimizer, loss, global_step=global_step)\n return model.make_spec(mode=mode,\n loss=loss,\n train_op=train_op,\n training_hooks=[logging_hook, step_metric_hook])\n\n if mode == tf.estimator.ModeKeys.EVAL:\n loss_pair = tf.metrics.mean(loss)\n return model.make_spec(mode=mode,\n loss=loss,\n eval_metric_ops={'loss': loss_pair},\n evaluation_hooks=[logging_hook, step_metric_hook])\n\n\nif __name__ == '__main__':\n logging.basicConfig(level=logging.INFO)\n flt.trainer_worker.train(ROLE, args, input_fn, model_fn,\n serving_input_receiver_fn)\n", + "main.py": "" + }, + "value": "{\"main.py\":\"\",\"leader/main.py\":\"# Copyright 2020 The FedLearner Authors. All Rights Reserved.\\n#\\n# Licensed under the Apache License, Version 2.0 (the \\\"License\\\");\\n# you may not use this file except in compliance with the License.\\n# You may obtain a copy of the License at\\n#\\n# http://www.apache.org/licenses/LICENSE-2.0\\n#\\n# Unless required by applicable law or agreed to in writing, software\\n# distributed under the License is distributed on an \\\"AS IS\\\" BASIS,\\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\\n# See the License for the specific language governing permissions and\\n# limitations under the License.\\n\\n# coding: utf-8\\n# pylint: disable=no-else-return, inconsistent-return-statements\\n\\nimport os\\nimport logging\\nimport tensorflow.compat.v1 as tf\\nimport fedlearner.trainer as flt\\nfrom config import *\\nfrom fedlearner.trainer.trainer_worker import StepLossAucMetricsHook\\n\\nROLE = 'leader'\\n\\nparser = flt.trainer_worker.create_argument_parser()\\nparser.add_argument('--batch-size',\\n type=int,\\n default=100,\\n help='Training batch size.')\\nargs = parser.parse_args()\\n\\n\\ndef input_fn(bridge, trainer_master):\\n dataset = flt.data.DataBlockLoader(args.batch_size, ROLE, bridge,\\n trainer_master).make_dataset()\\n\\n def parse_fn(example):\\n feature_map = dict()\\n feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\\n feature_map['raw_id'] = tf.FixedLenFeature([], tf.string)\\n for name in leader_feature_names:\\n feature_map[name] = tf.FixedLenFeature([],\\n tf.float32,\\n default_value=0.0)\\n label_map = {}\\n for name in leader_label_name:\\n label_map[name] = tf.FixedLenFeature([],\\n tf.float32,\\n default_value=0.0)\\n features = tf.parse_example(example, features=feature_map)\\n labels = tf.parse_example(example, features=label_map)\\n return features, labels\\n\\n dataset = dataset.map(map_func=parse_fn,\\n num_parallel_calls=tf.data.experimental.AUTOTUNE)\\n return dataset\\n\\n\\ndef serving_input_receiver_fn():\\n feature_map = {\\n \\\"example_id\\\": tf.FixedLenFeature([], tf.string),\\n \\\"raw_id\\\": tf.FixedLenFeature([], tf.string),\\n }\\n for name in leader_feature_names:\\n feature_map[name] = tf.FixedLenFeature([],\\n tf.float32,\\n default_value=0.0)\\n record_batch = tf.placeholder(dtype=tf.string, name='examples')\\n features = tf.parse_example(record_batch, features=feature_map)\\n features['act1_f'] = tf.placeholder(dtype=tf.float32, name='act1_f')\\n receiver_tensors = {'examples': record_batch, 'act1_f': features['act1_f']}\\n return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)\\n\\n\\ndef model_fn(model, features, labels, mode):\\n logging.info('model_fn: mode %s', mode)\\n x = [\\n tf.expand_dims(features[name], axis=-1)\\n for name in leader_feature_names\\n ]\\n x = tf.concat(x, axis=-1)\\n\\n w1l = tf.get_variable(\\n 'w1l',\\n shape=[len(leader_feature_names),\\n len(leader_label_name)],\\n dtype=tf.float32,\\n initializer=tf.random_uniform_initializer(-0.01, 0.01))\\n b1l = tf.get_variable('b1l',\\n shape=[len(leader_label_name)],\\n dtype=tf.float32,\\n initializer=tf.zeros_initializer())\\n\\n act1_l = tf.nn.bias_add(tf.matmul(x, w1l), b1l)\\n if mode == tf.estimator.ModeKeys.TRAIN:\\n act1_f = model.recv('act1_f', tf.float32, require_grad=True)\\n elif mode == tf.estimator.ModeKeys.EVAL:\\n act1_f = model.recv('act1_f', tf.float32, require_grad=False)\\n else:\\n act1_f = features['act1_f']\\n logits = act1_l + act1_f\\n pred = tf.math.sigmoid(logits)\\n\\n if mode == tf.estimator.ModeKeys.PREDICT:\\n return model.make_spec(mode=mode, predictions=pred)\\n\\n y = [tf.expand_dims(labels[name], axis=-1) for name in leader_label_name]\\n y = tf.concat(y, axis=-1)\\n\\n loss = tf.nn.sigmoid_cross_entropy_with_logits(labels=y, logits=logits)\\n _, auc = tf.metrics.auc(labels=y, predictions=pred)\\n #correct = tf.nn.in_top_k(predictions=logits, targets=y, k=1)\\n #acc = tf.reduce_mean(input_tensor=tf.cast(correct, tf.float32))\\n logging_hook = tf.train.LoggingTensorHook({\\n # 'acc': acc,\\n 'auc': auc,\\n 'loss': loss,\\n }, every_n_iter=10)\\n step_metric_hook = StepLossAucMetricsHook(loss_tensor=loss, auc_tensor=auc)\\n #model.send('acc', acc, require_grad=False)\\n model.send('auc', auc, require_grad=False)\\n model.send('loss', loss, require_grad=False)\\n\\n global_step = tf.train.get_or_create_global_step()\\n if mode == tf.estimator.ModeKeys.TRAIN:\\n optimizer = tf.train.AdamOptimizer(1e-4)\\n train_op = model.minimize(optimizer, loss, global_step=global_step)\\n return model.make_spec(mode=mode,\\n loss=loss,\\n train_op=train_op,\\n training_hooks=[logging_hook, step_metric_hook])\\n\\n if mode == tf.estimator.ModeKeys.EVAL:\\n loss_pair = tf.metrics.mean(loss)\\n return model.make_spec(mode=mode,\\n loss=loss,\\n eval_metric_ops={'loss': loss_pair},\\n evaluation_hooks=[logging_hook, step_metric_hook])\\n\\n\\nif __name__ == '__main__':\\n logging.basicConfig(level=logging.INFO)\\n flt.trainer_worker.train(ROLE, args, input_fn, model_fn,\\n serving_input_receiver_fn)\\n\",\"follower/main.py\":\"# Copyright 2020 The FedLearner Authors. All Rights Reserved.\\n#\\n# Licensed under the Apache License, Version 2.0 (the \\\"License\\\");\\n# you may not use this file except in compliance with the License.\\n# You may obtain a copy of the License at\\n#\\n# http://www.apache.org/licenses/LICENSE-2.0\\n#\\n# Unless required by applicable law or agreed to in writing, software\\n# distributed under the License is distributed on an \\\"AS IS\\\" BASIS,\\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\\n# See the License for the specific language governing permissions and\\n# limitations under the License.\\n\\n# coding: utf-8\\n# pylint: disable=no-else-return, inconsistent-return-statements\\n\\nimport os\\nimport logging\\nimport numpy as np\\nimport tensorflow.compat.v1 as tf\\nimport fedlearner.trainer as flt\\nfrom config import *\\nfrom fedlearner.trainer.trainer_worker import StepLossAucMetricsHook\\n\\nROLE = 'follower'\\n\\nparser = flt.trainer_worker.create_argument_parser()\\nparser.add_argument('--batch-size',\\n type=int,\\n default=100,\\n help='Training batch size.')\\nargs = parser.parse_args()\\n\\n\\ndef input_fn(bridge, trainer_master):\\n dataset = flt.data.DataBlockLoader(args.batch_size, ROLE, bridge,\\n trainer_master).make_dataset()\\n\\n def parse_fn(example):\\n feature_map = dict()\\n feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\\n feature_map['raw_id'] = tf.FixedLenFeature([], tf.string)\\n for name in follower_feature_names:\\n feature_map[name] = tf.FixedLenFeature([],\\n tf.float32,\\n default_value=0.0)\\n features = tf.parse_example(example, features=feature_map)\\n return features, dict(y=tf.constant(0))\\n\\n dataset = dataset.map(map_func=parse_fn,\\n num_parallel_calls=tf.data.experimental.AUTOTUNE)\\n return dataset\\n\\n\\ndef serving_input_receiver_fn():\\n feature_map = {\\n \\\"example_id\\\": tf.FixedLenFeature([], tf.string),\\n \\\"raw_id\\\": tf.FixedLenFeature([], tf.string),\\n }\\n for name in follower_feature_names:\\n feature_map[name] = tf.FixedLenFeature([],\\n tf.float32,\\n default_value=0.0)\\n record_batch = tf.placeholder(dtype=tf.string, name='examples')\\n features = tf.parse_example(record_batch, features=feature_map)\\n features['act1_f'] = tf.placeholder(dtype=tf.float32, name='act1_f')\\n receiver_tensors = {'examples': record_batch, 'act1_f': features['act1_f']}\\n return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)\\n\\n\\ndef model_fn(model, features, labels, mode):\\n logging.info('model_fn: mode %s', mode)\\n x = [\\n tf.expand_dims(features[name], axis=-1)\\n for name in follower_feature_names\\n ]\\n x = tf.concat(x, axis=-1)\\n\\n w1f = tf.get_variable(\\n 'w1l',\\n shape=[len(follower_feature_names),\\n len(leader_label_name)],\\n dtype=tf.float32,\\n initializer=tf.random_uniform_initializer(-0.01, 0.01))\\n b1f = tf.get_variable('b1l',\\n shape=[len(leader_label_name)],\\n dtype=tf.float32,\\n initializer=tf.zeros_initializer())\\n\\n act1_f = tf.nn.bias_add(tf.matmul(x, w1f), b1f)\\n\\n if mode == tf.estimator.ModeKeys.PREDICT:\\n return model.make_spec(mode=mode, predictions=act1_f)\\n\\n if mode == tf.estimator.ModeKeys.TRAIN:\\n gact1_f = model.send('act1_f', act1_f, require_grad=True)\\n elif mode == tf.estimator.ModeKeys.EVAL:\\n model.send('act1_f', act1_f, require_grad=False)\\n\\n #acc = model.recv('acc', tf.float32, require_grad=False)\\n auc = model.recv('auc', tf.float32, require_grad=False)\\n loss = model.recv('loss', tf.float32, require_grad=False)\\n logging_hook = tf.train.LoggingTensorHook({\\n 'auc': auc, 'loss': loss,\\n }, every_n_iter=10)\\n step_metric_hook = StepLossAucMetricsHook(loss_tensor=loss, auc_tensor=auc)\\n\\n global_step = tf.train.get_or_create_global_step()\\n if mode == tf.estimator.ModeKeys.TRAIN:\\n optimizer = tf.train.GradientDescentOptimizer(0.1)\\n train_op = model.minimize(optimizer,\\n act1_f,\\n grad_loss=gact1_f,\\n global_step=global_step)\\n return model.make_spec(mode,\\n loss=tf.math.reduce_mean(act1_f),\\n train_op=train_op,\\n training_hooks=[logging_hook, step_metric_hook])\\n if mode == tf.estimator.ModeKeys.EVAL:\\n fake_loss = tf.reduce_mean(act1_f)\\n return model.make_spec(mode=mode, loss=fake_loss,\\n evaluation_hooks=[logging_hook, step_metric_hook])\\n\\n\\nif __name__ == '__main__':\\n logging.basicConfig(level=logging.INFO)\\n flt.trainer_worker.train(ROLE, args, input_fn, model_fn,\\n serving_input_receiver_fn)\\n\",\"follower/config.py\":\"leader_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9', 'x10', 'x11', 'x12']\\nleader_label_name = ['label']\\nfollower_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9']\\n\",\"leader/config.py\":\"leader_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9', 'x10', 'x11', 'x12']\\nleader_label_name = ['label']\\nfollower_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9']\\n\"}", + "value_type": "CODE", + "widget_schema": "{\"component\":\"Code\",\"required\":true}" + } + ], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FedApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"activeDeadlineSeconds\": 1200, \n \"fedReplicaSpecs\": {\n \"Master\": {\n \"backoffLimit\": 1,\n \"mustSuccess\": False,\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": project.variables.storage_root_path\n },\n {\n \"name\": \"ROLE\",\n \"value\": workflow.variables.role.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": project.variables.storage_root_path + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"EPOCH_NUM\",\n \"value\": str(int(self.variables.epoch_num))\n },\n {\n \"name\": \"START_DATE\",\n \"value\": str(None)\n },\n {\n \"name\": \"END_DATE\",\n \"value\": str(None)\n },\n {\n \"name\": \"DATA_SOURCE\",\n \"value\": workflow.jobs['data-join-job'].name\n },\n {\n \"name\": \"ONLINE_TRAINING\",\n \"value\": \"\"\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(False)\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": \"\"\n },\n {\n \"name\": \"CODE_TAR\",\n \"value\": self.variables.code_tar\n },\n {\n \"name\": \"CHECKPOINT_PATH\",\n \"value\": \"\"\n },\n {\n \"name\": \"LOAD_CHECKPOINT_FILENAME\",\n \"value\": \"\"\n },\n {\n \"name\": \"LOAD_CHECKPOINT_FILENAME_WITH_PATH\",\n \"value\": \"\"\n },\n {\n \"name\": \"LOAD_CHECKPOINT_PATH\",\n \"value\": \"\" and project.variables.storage_root_path + \"/job_output/\" + \"\" + \"/checkpoints\"\n },\n {\n \"name\": \"EXPORT_PATH\",\n \"value\": \"\"\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": workflow.variables.image,\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tf-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/trainer/run_trainer_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n\n }\n },\n \"replicas\": int(1)\n },\n \"PS\": {\n \"backoffLimit\": 1,\n \"mustSuccess\": False,\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": project.variables.storage_root_path\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n }\n\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": workflow.variables.image,\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tf-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/trainer/run_trainer_ps.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"replicas\": int(1)\n },\n \"Worker\": {\n \"backoffLimit\": 6,\n \"mustSuccess\": True,\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": project.variables.storage_root_path\n },\n {\n \"name\": \"ROLE\",\n \"value\": workflow.variables.role.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": project.variables.storage_root_path + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"MODE\",\n \"value\": \"train\"\n },\n {\n \"name\": \"VERBOSITY\",\n \"value\": str(1)\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": \"\"\n },\n {\n \"name\": \"CODE_TAR\",\n \"value\": self.variables.code_tar\n },\n {\n \"name\": \"SAVE_CHECKPOINT_STEPS\",\n \"value\": str(1000)\n },\n {\n \"name\": \"SAVE_CHECKPOINT_SECS\",\n \"value\": str(None)\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(False)\n },\n {\n \"name\": \"SUMMARY_SAVE_STEPS\",\n \"value\": str(None)\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": workflow.variables.image,\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tf-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\":[\"/bin/bash\",\"-c\"],\n \"args\": [\"export WORKER_RANK=$$INDEX && export PEER_ADDR=$$SERVICE_ID && /app/deploy/scripts/trainer/run_trainer_worker.sh\"],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"replicas\": int(1)\n }\n }\n }\n}" + }, + { + "dependencies": [ + { + "source": "data-join-job" + }, + { + "source": "psi-data-join-job" + } + ], + "easy_mode": true, + "is_federated": true, + "job_type": "TREE_MODEL_TRAINING", + "name": "tree-train", + "variables": [ + { + "access_mode": "PEER_WRITABLE", + "name": "max_depth", + "tag": "", + "typed_value": "3", + "value": "3", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "max_iters", + "tag": "", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "file_ext", + "tag": "", + "typed_value": ".data", + "value": ".data", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "file_type", + "tag": "", + "typed_value": "tfrecord", + "value": "tfrecord", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"enum\":[\"csv\",\"tfrecord\"]}" + } + ], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"role\": str(workflow.variables.role),\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if str(workflow.variables.role)==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"ROLE\",\n \"value\": str(workflow.variables.role).lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"MODE\",\n \"value\": \"train\"\n },\n {\n \"name\": \"LOSS_TYPE\",\n \"value\": \"logistic\"\n },\n {\n \"name\": \"DATA_SOURCE\",\n \"value\": str(workflow.jobs['psi-data-join-job'].name)\n },\n {\n \"name\": \"DATA_PATH\",\n \"value\": \"\"\n },\n {\n \"name\": \"VALIDATION_DATA_PATH\",\n \"value\": \"\"\n },\n {\n \"name\": \"NO_DATA\",\n \"value\": str(False)\n },\n {\n \"name\": \"FILE_EXT\",\n \"value\": str(self.variables.file_ext)\n },\n {\n \"name\": \"FILE_TYPE\",\n \"value\": str(self.variables.file_type)\n },\n {\n \"name\": \"LOAD_MODEL_PATH\",\n \"value\": \"\"\n },\n {\n \"name\": \"LOAD_MODEL_NAME\",\n \"value\": \"\"\n },\n {\n \"name\": \"VERBOSITY\",\n \"value\": str(1)\n },\n {\n \"name\": \"LEARNING_RATE\",\n \"value\": str(0.3)\n },\n {\n \"name\": \"MAX_ITERS\",\n \"value\": str(int(self.variables.max_iters))\n },\n {\n \"name\": \"MAX_DEPTH\",\n \"value\": str(int(self.variables.max_depth))\n },\n {\n \"name\": \"MAX_BINS\",\n \"value\": str(33)\n },\n {\n \"name\": \"L2_REGULARIZATION\",\n \"value\": str(1.0)\n },\n {\n \"name\": \"NUM_PARALLEL\",\n \"value\": str(1)\n },\n {\n \"name\": \"VERIFY_EXAMPLE_IDS\",\n \"value\": str(False)\n },\n {\n \"name\": \"IGNORE_FIELDS\",\n \"value\": \"\"\n },\n {\n \"name\": \"CAT_FIELDS\",\n \"value\": \"\"\n },\n {\n \"name\": \"LABEL_FIELD\",\n \"value\": \"label\"\n },\n {\n \"name\": \"SEND_SCORES_TO_FOLLOWER\",\n \"value\": str(False)\n },\n {\n \"name\": \"SEND_METRICS_TO_FOLLOWER\",\n \"value\": str(False)\n },\n {\n \"name\": \"ENABLE_PACKING\",\n \"value\": str(True)\n },\n {\n \"name\": \"ES_BATCH_SIZE\",\n \"value\": str(10)\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tf-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/trainer/run_tree_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"4Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"4Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": True,\n \"replicas\": 1\n }\n }\n }\n}\n" + } + ], + "variables": [ + { + "access_mode": "PEER_WRITABLE", + "name": "image", + "tag": "", + "typed_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "num_partitions", + "tag": "", + "typed_value": "1", + "value": "1", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "role", + "tag": "", + "typed_value": "Follower", + "value": "Follower", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"enum\":[\"Leader\",\"Follower\"]}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "input_base_dir", + "tag": "", + "typed_value": "/app/deploy/integrated_test/credit_default", + "value": "/app/deploy/integrated_test/credit_default", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "file_wildcard", + "tag": "", + "typed_value": "*host.csv", + "value": "*host.csv", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "input_data_format", + "tag": "", + "typed_value": "CSV_DICT", + "value": "CSV_DICT", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"enum\":[\"CSV_DICT\",\"TF_RECORD\"]}" + } + ] + }, + "editor_info": { + "yaml_editor_infos": { + "data-join-job": { + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": ${Slot_role},\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if ${Slot_role}==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"BATCH_MODE\",\n \"value\": ${Slot_batch_mode}\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(${Slot_partition_num})\n },\n {\n \"name\": \"START_TIME\",\n \"value\": str(${Slot_start_time})\n },\n {\n \"name\": \"END_TIME\",\n \"value\": str(${Slot_end_time})\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + ${Slot_raw_data_name}\n },\n {\n # not work, remove it after prepare_launch_data_join_cli been removed\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(${Slot_negative_sampling_rate})\n }\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\":\n ${Slot_volume_mounts}\n ,\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/data_join/run_data_join_master.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\":\n ${Slot_volumes}\n\n }\n },\n \"pair\": true,\n \"replicas\": ${Slot_master_replicas}\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(${Slot_partition_num})\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + ${Slot_raw_data_name}\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_INTERVAL\",\n \"value\": str(${Slot_data_block_dump_interval})\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_THRESHOLD\",\n \"value\": str(${Slot_data_block_dump_threshold})\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_INTERVAL\",\n \"value\": str(${Slot_example_id_dump_interval})\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_THRESHOLD\",\n \"value\": str(${Slot_example_id_dump_threshold})\n },\n {\n \"name\": \"MIN_MATCHING_WINDOW\",\n \"value\": str(${Slot_min_matching_window})\n },\n {\n \"name\": \"MAX_MATCHING_WINDOW\",\n \"value\": str(${Slot_max_matching_window})\n },\n {\n \"name\": \"RAW_DATA_ITER\",\n \"value\": ${Slot_raw_data_iter}\n },\n {\n \"name\": \"ENABLE_NEGATIVE_EXAMPLE_GENERATOR\",\n \"value\": str(${Slot_enable_negative_example_generator})\n },\n {\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(${Slot_negative_sampling_rate})\n },\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\":\n ${Slot_volume_mounts}\n ,\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/data_join/run_data_join_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\":\n ${Slot_volumes}\n\n }\n },\n \"pair\": true,\n \"replicas\": ${Slot_partition_num}\n }\n }\n }\n}\n", + "slots": { + "Slot_batch_mode": { + "default": "", + "default_value": "--batch_mode", + "help": "如果为空则为常驻求交", + "label": "是否为批处理模式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_block_dump_interval": { + "default": "", + "default_value": -1.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次data block,小于0则无此限制", + "label": "数据dump时间间隔", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_data_block_dump_threshold": { + "default": "", + "default_value": 4096.0, + "help": "建议不修改,最多多少个样本就dump为一个data block,小于等于0则无此限制", + "label": "数据dump临界点", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_enable_negative_example_generator": { + "default": "", + "default_value": false, + "help": "建议不修改,是否开启负采样,当follower求交时遇到无法匹配上的leader的example id,会以negative_sampling_rate为概率生成一个新的样本。", + "label": "负采样比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "BOOL" + }, + "Slot_end_time": { + "default": "", + "default_value": 999999999999.0, + "help": "建议不修改,使用自这个时间以前的数据,仅从文件名筛选所以格式依据文件名(yyyymmdd或timestamp)", + "label": "数据末尾时间", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_example_id_dump_interval": { + "default": "", + "default_value": -1.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次example id,小于0则无此限制", + "label": "数据id dump时间间隔", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_example_id_dump_threshold": { + "default": "", + "default_value": 4096.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次example id,小于0则无此限制", + "label": "数据id dump临界点", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_image": { + "default": "", + "default_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "help": "建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模版不适用", + "label": "容器镜像", + "reference": "workflow.variables.image", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_master_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "Master的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_envs": { + "default": "", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "label": "Master额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_master_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "Master的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_replicas": { + "default": "", + "default_value": 1.0, + "help": "同时运行的完全相同的Master Pods数量", + "label": "Master的Pod个数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_max_matching_window": { + "default": "", + "default_value": 4096.0, + "help": "建议不修改,the max matching window for example join. <=0 means window size is infinite", + "label": "最大匹配滑窗", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_min_matching_window": { + "default": "", + "default_value": 1024.0, + "help": "建议不修改,the min matching window for example join ,<=0 means window size is infinite", + "label": "最小匹配滑窗", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_negative_sampling_rate": { + "default": "", + "default_value": 0.0, + "help": "建议不修改,负采样比例,当follower求交时遇到无法匹配上的leader的example id,会以此概率生成一个新的样本。", + "label": "负采样比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "NUMBER" + }, + "Slot_partition_num": { + "default": "", + "default_value": 4.0, + "help": "建议修改,求交后数据分区的数量,建议和raw_data一致", + "label": "数据分区的数量", + "reference": "workflow.variables.num_partitions", + "reference_type": "WORKFLOW", + "value_type": "INT" + }, + "Slot_raw_data_iter": { + "default": "", + "default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "label": "raw_data文件类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_raw_data_name": { + "default": "", + "default_value": "", + "help": "必须修改,原始数据的发布地址,根据参数内容在portal_publish_dir地址下寻找", + "label": "raw_data名字", + "reference": "workflow.jobs['raw-data-job-streaming'].name", + "reference_type": "JOB_PROPERTY", + "value_type": "STRING" + }, + "Slot_role": { + "default": "", + "default_value": "Leader", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "label": "Flapp通讯时角色", + "reference": "workflow.variables.role", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_start_time": { + "default": "", + "default_value": 0.0, + "help": "建议不修改,使用自这个时间起的数据,仅从文件名筛选所以格式依据文件名(yyyymmdd或timestamp)", + "label": "数据起始时间", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_worker_cpu": { + "default": "", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "label": "Worker的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "default": "", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_worker_memory": { + "default": "", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "label": "Worker的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + } + }, + "variables": [] + }, + "nn-train": { + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": ${Slot_role},\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if ${Slot_role}==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"EPOCH_NUM\",\n \"value\": str(${Slot_epoch_num})\n },\n {\n \"name\": \"START_DATE\",\n \"value\": str(${Slot_start_date})\n },\n {\n \"name\": \"END_DATE\",\n \"value\": str(${Slot_end_date})\n },\n {\n \"name\": \"DATA_SOURCE\",\n \"value\": ${Slot_data_source}\n },\n {\n \"name\": \"ONLINE_TRAINING\",\n \"value\": ${Slot_online_training}\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(${Slot_sparse_estimator})\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": ${Slot_code_key}\n },\n {\n \"name\": \"CODE_TAR\",\n \"value\": ${Slot_code_tar}\n },\n {\n \"name\": \"CHECKPOINT_PATH\",\n \"value\": ${Slot_checkpoint_path}\n },\n {\n \"name\": \"LOAD_CHECKPOINT_FILENAME\",\n \"value\": ${Slot_load_checkpoint_filename}\n },\n {\n \"name\": \"LOAD_CHECKPOINT_FILENAME_WITH_PATH\",\n \"value\": ${Slot_load_checkpoint_filename_with_path}\n },\n {\n \"name\": \"LOAD_CHECKPOINT_PATH\",\n \"value\": ${Slot_load_checkpoint_from_job} and ${Slot_storage_root_path} + \"/job_output/\" + ${Slot_load_checkpoint_from_job} + \"/checkpoints\"\n },\n {\n \"name\": \"EXPORT_PATH\",\n \"value\": ${Slot_export_path}\n }\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/trainer/run_trainer_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n\n }\n },\n \"pair\": False,\n \"replicas\": int(${Slot_master_replicas})\n },\n \"PS\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n }\n\n ] + ${Slot_ps_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/trainer/run_trainer_ps.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_ps_cpu},\n \"memory\": ${Slot_ps_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_ps_cpu},\n \"memory\": ${Slot_ps_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": int(${Slot_ps_replicas})\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"MODE\",\n \"value\": ${Slot_mode}\n },\n {\n \"name\": \"VERBOSITY\",\n \"value\": str(${Slot_verbosity})\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": ${Slot_code_key}\n },\n {\n \"name\": \"CODE_TAR\",\n \"value\": ${Slot_code_tar}\n },\n {\n \"name\": \"SAVE_CHECKPOINT_STEPS\",\n \"value\": str(${Slot_save_checkpoint_steps})\n },\n {\n \"name\": \"SAVE_CHECKPOINT_SECS\",\n \"value\": str(${Slot_save_checkpoint_secs})\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(${Slot_sparse_estimator})\n },\n {\n \"name\": \"SUMMARY_SAVE_STEPS\",\n \"value\": str(${Slot_summary_save_steps})\n }\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/trainer/run_trainer_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": True,\n \"replicas\": int(${Slot_worker_replicas})\n }\n }\n }\n}\n", + "slots": { + "Slot_checkpoint_path": { + "default": "", + "default_value": "", + "help": "不建议修改,checkpoint输出路径,建议为空,会默认使用{storage_root_path}/job_output/{job_name}/checkpoints,强烈建议保持空值", + "label": "CHECKPOINT_PATH", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_code_key": { + "default": "", + "default_value": "", + "help": "代码tar包地址,如果为空则使用code tar", + "label": "模型代码路径", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_code_tar": { + "default": "", + "default_value": "", + "help": "代码包,variable中请使用代码类型", + "label": "代码", + "reference": "self.variables.code_tar", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_data_source": { + "default": "", + "default_value": "", + "help": "必须修改,求交任务的名字", + "label": "数据源", + "reference": "workflow.jobs['data-join-job'].name", + "reference_type": "JOB_PROPERTY", + "value_type": "STRING" + }, + "Slot_end_date": { + "default": "", + "default_value": null, + "help": "training data end date", + "label": "结束时间", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_epoch_num": { + "default": "", + "default_value": 1.0, + "help": "number of epoch for training, not support in online training", + "label": "epoch数量", + "reference": "self.variables.epoch_num", + "reference_type": "SELF", + "value_type": "INT" + }, + "Slot_export_path": { + "default": "", + "default_value": "", + "help": "使用默认空值,将把models保存到$OUTPUT_BASE_DIR/exported_models 路径下。", + "label": "EXPORT_PATH", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_image": { + "default": "", + "default_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "help": "建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模板不适用", + "label": "容器镜像", + "reference": "workflow.variables.image", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_load_checkpoint_filename": { + "default": "", + "default_value": "", + "help": "加载checkpoint_path下的相对路径的checkpoint, 默认会加载checkpoint_path下的latest checkpoint", + "label": "LOAD_CHECKPOINT_FILENAME", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_load_checkpoint_filename_with_path": { + "default": "", + "default_value": "", + "help": "加载绝对路径下的checkpoint,需要细致到文件名", + "label": "从绝对路径加载checkpoint", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_load_checkpoint_from_job": { + "default": "", + "default_value": "", + "help": "指定任务名job_output下的latest checkpoint", + "label": "以任务名加载checkpoint", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "Master的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_envs": { + "default": "", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "label": "Master额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_master_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "Master的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_replicas": { + "default": "", + "default_value": 1.0, + "help": "同时运行的完全相同的Master Pods数量", + "label": "Master的Pod个数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_mode": { + "default": "", + "default_value": "train", + "help": "choices:['train','eval'] 训练还是验证", + "label": "模式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_online_training": { + "default": "", + "default_value": "", + "help": "['','--online_training'] 否 是,the train master run for online training", + "label": "是否在线训练", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_ps_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "PS的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_ps_envs": { + "default": "", + "default_value": [], + "help": "数组类型,ps pod额外的环境变量", + "label": "PS额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_ps_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "PS的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_ps_replicas": { + "default": "", + "default_value": 1.0, + "help": "同时运行的完全相同的PS Pods数量", + "label": "PS的Pod个数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_role": { + "default": "", + "default_value": "Leader", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "label": "Flapp通讯时角色", + "reference": "workflow.variables.role", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_save_checkpoint_secs": { + "default": "", + "default_value": null, + "help": "int,Number of secs between checkpoints.", + "label": "SAVE_CHECKPOINT_SECS", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_save_checkpoint_steps": { + "default": "", + "default_value": 1000.0, + "help": "int, Number of steps between checkpoints.", + "label": "SAVE_CHECKPOINT_STEPS", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_sparse_estimator": { + "default": "", + "default_value": false, + "help": "bool,default False Whether using sparse estimator.", + "label": "SPARSE_ESTIMATOR", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "BOOL" + }, + "Slot_start_date": { + "default": "", + "default_value": null, + "help": "training data start date", + "label": "开始时间", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_suffle_data_block": { + "default": "", + "default_value": "", + "help": "['','--shuffle_data_block'] 否 是,shuffle the data block or not", + "label": "是否shuffle数据块", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_summary_save_steps": { + "default": "", + "default_value": null, + "help": "int, Number of steps to save summary files.", + "label": "SUMMARY_SAVE_STEPS", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_verbosity": { + "default": "", + "default_value": 1.0, + "help": "int, Logging level", + "label": "日志等级", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_worker_cpu": { + "default": "", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "label": "Worker的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "default": "", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_worker_memory": { + "default": "", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "label": "Worker的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_replicas": { + "default": "", + "default_value": 1.0, + "help": "同时运行的完全相同的Worker Pods数量", + "label": "Worker的Pod个数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + } + }, + "variables": [] + }, + "psi-data-join-job": { + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": ${Slot_role},\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if ${Slot_role}==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(${Slot_partition_num})\n },\n {\n \"name\": \"START_TIME\",\n \"value\": str(${Slot_start_time})\n },\n {\n \"name\": \"END_TIME\",\n \"value\": str(${Slot_end_time})\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + ${Slot_raw_data_name}\n },\n {\n # not work, remove it after prepare_launch_data_join_cli been removed\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(${Slot_negative_sampling_rate})\n }\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/rsa_psi/run_psi_data_join_master.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": True,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"BATCH_MODE\",\n \"value\": ${Slot_batch_mode}\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(${Slot_partition_num})\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + ${Slot_raw_data_name}\n },\n {\n \"name\": \"RSA_KEY_PEM\",\n \"value\": ${Slot_rsa_key_pem}\n },\n {\n \"name\": \"RSA_KEY_PATH\",\n \"value\": ${Slot_rsa_key_path}\n },\n {\n \"name\": \"RSA_PRIVATE_KEY_PATH\",\n \"value\": ${Slot_rsa_private_key_path}\n },\n {\n \"name\": \"KMS_KEY_NAME\",\n \"value\": ${Slot_kms_key_name}\n },\n {\n \"name\": \"KMS_CLIENT\",\n \"value\": ${Slot_kms_client}\n },\n {\n \"name\": \"PSI_RAW_DATA_ITER\",\n \"value\": ${Slot_psi_raw_data_iter}\n },\n {\n \"name\": \"DATA_BLOCK_BUILDER\",\n \"value\": ${Slot_data_block_builder}\n },\n {\n \"name\": \"PSI_OUTPUT_BUILDER\",\n \"value\": ${Slot_psi_output_builder}\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_INTERVAL\",\n \"value\": str(${Slot_data_block_dump_interval})\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_THRESHOLD\",\n \"value\": str(${Slot_data_block_dump_threshold})\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_INTERVAL\",\n \"value\": str(${Slot_example_id_dump_interval})\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_THRESHOLD\",\n \"value\": str(${Slot_example_id_dump_threshold})\n },\n {\n \"name\": \"EXAMPLE_JOINER\",\n \"value\": \"SORT_RUN_JOINER\"\n },\n {\n \"name\": \"PSI_READ_AHEAD_SIZE\",\n \"value\": str(${Slot_psi_read_ahead_size})\n },\n {\n \"name\": \"SORT_RUN_MERGER_READ_AHEAD_BUFFER\",\n \"value\": str(${Slot_run_merger_read_ahead_buffer})\n },\n {\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(${Slot_negative_sampling_rate})\n },\n {\n \"name\": \"ENABLE_NEGATIVE_EXAMPLE_GENERATOR\",\n \"value\": str(${Slot_enable_negative_example_generator})\n }\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/rsa_psi/run_psi_data_join_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": True,\n \"replicas\": int(${Slot_partition_num})\n }\n }\n }\n}\n", + "slots": { + "Slot_batch_mode": { + "default": "", + "default_value": "--batch_mode", + "help": "如果为空则为常驻求交", + "label": "是否为批处理模式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_block_builder": { + "default": "", + "default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "label": "data block output数据类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_block_dump_interval": { + "default": "", + "default_value": -1.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次data block,小于0则无此限制", + "label": "数据dump时间间隔", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_data_block_dump_threshold": { + "default": "", + "default_value": 4096.0, + "help": "建议不修改,最多多少个样本就dump为一个data block,小于等于0则无此限制", + "label": "数据dump临界点", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_enable_negative_example_generator": { + "default": "", + "default_value": false, + "help": "建议不修改,是否开启负采样,当follower求交时遇到无法匹配上的leader的example id,会以negative_sampling_rate为概率生成一个新的样本。", + "label": "负采样比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "BOOL" + }, + "Slot_end_time": { + "default": "", + "default_value": 999999999999.0, + "help": "建议不修改,使用自这个时间以前的数据,仅从文件名筛选所以格式依据文件名(yyyymmdd或timestamp)", + "label": "数据末尾时间", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_example_id_dump_interval": { + "default": "", + "default_value": -1.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次example id,小于0则无此限制", + "label": "数据id dump时间间隔", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_example_id_dump_threshold": { + "default": "", + "default_value": 4096.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次example id,小于0则无此限制", + "label": "数据id dump临界点", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_image": { + "default": "", + "default_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "help": "建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模版不适用", + "label": "容器镜像", + "reference": "workflow.variables.image", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_kms_client": { + "default": "", + "default_value": "data.aml.fl", + "help": "kms client", + "label": "kms client", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_kms_key_name": { + "default": "", + "default_value": "", + "help": "kms中的密钥名称,站内镜像需使用KMS", + "label": "密钥名称", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_master_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "Master的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_envs": { + "default": "", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "label": "Master额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_master_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "Master的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_negative_sampling_rate": { + "default": "", + "default_value": 0.0, + "help": "建议不修改,负采样比例,当follower求交时遇到无法匹配上的leader的example id,会以此概率生成一个新的样本。", + "label": "负采样比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "NUMBER" + }, + "Slot_partition_num": { + "default": "", + "default_value": 4.0, + "help": "建议修改,求交后数据分区的数量,建议和raw_data一致", + "label": "数据分区的数量", + "reference": "workflow.variables.num_partitions", + "reference_type": "WORKFLOW", + "value_type": "INT" + }, + "Slot_psi_output_builder": { + "default": "", + "default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "label": "PSI output数据类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_psi_raw_data_iter": { + "default": "", + "default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "label": "raw data数据类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_psi_read_ahead_size": { + "default": "", + "default_value": null, + "help": "建议不填, the read ahead size for raw data", + "label": "psi_read_ahead_size", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_raw_data_name": { + "default": "", + "default_value": "", + "help": "必须修改,原始数据的发布地址,根据参数内容在portal_publish_dir地址下寻找", + "label": "raw_data名字", + "reference": "workflow.jobs['raw-data-job-psi'].name", + "reference_type": "JOB_PROPERTY", + "value_type": "STRING" + }, + "Slot_role": { + "default": "", + "default_value": "Leader", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "label": "Flapp通讯时角色", + "reference": "workflow.variables.role", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_rsa_key_path": { + "default": "", + "default_value": "", + "help": "RSA公钥或私钥的地址,在无RSA_KEY_PEM时必填", + "label": "RSA钥匙地址", + "reference": "self.variables.rsa_key_path", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_rsa_key_pem": { + "default": "", + "default_value": "", + "help": "RSA公钥,follower需提供", + "label": "RSA公钥", + "reference": "self.variables.rsa_key_pem", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_rsa_private_key_path": { + "default": "", + "default_value": "", + "help": "RSA私钥的地址, leader必填", + "label": "RSA私钥地址", + "reference": "self.variables.rsa_private_key_path", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_run_merger_read_ahead_buffer": { + "default": "", + "default_value": null, + "help": "建议不填, sort run merger read ahead buffer", + "label": "run_merger_read_ahead_buffer", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_start_time": { + "default": "", + "default_value": 0.0, + "help": "建议不修改,使用自这个时间起的数据,仅从文件名筛选所以格式依据文件名(yyyymmdd或timestamp)", + "label": "数据起始时间", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_worker_cpu": { + "default": "", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "label": "Worker的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "default": "", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_worker_memory": { + "default": "", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "label": "Worker的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + } + }, + "variables": [] + }, + "raw-data-job-psi": { + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": \"Follower\",\n \"peerSpecs\": {\n \"Leader\": {\n \"peerURL\": \"\",\n \"authority\": \"\"\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_NAME\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_TYPE\",\n \"value\": ${Slot_data_portal_type}\n },\n {\n \"name\": \"OUTPUT_PARTITION_NUM\",\n \"value\": str(${Slot_output_partition_num})\n },\n {\n \"name\": \"INPUT_BASE_DIR\",\n \"value\": ${Slot_input_base_dir}\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/raw_data/\" + self.name\n },\n {\n \"name\": \"RAW_DATA_PUBLISH_DIR\",\n \"value\": \"portal_publish_dir/\" + self.name\n },\n {\n \"name\": \"FILE_WILDCARD\",\n \"value\": ${Slot_file_wildcard}\n },\n {\n \"name\": \"LONG_RUNNING\",\n \"value\": ${Slot_long_running}\n },\n {\n \"name\": \"CHECK_SUCCESS_TAG\",\n \"value\": ${Slot_check_success_tag}\n },\n {\n \"name\": \"FILES_PER_JOB_LIMIT\",\n \"value\": str(${Slot_files_per_job_limit})\n },\n {\n \"name\": \"SINGLE_SUBFOLDER\",\n \"value\": ${Slot_single_subfolder}\n }\n\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n\n {\n \"name\": \"BATCH_SIZE\",\n \"value\": str(${Slot_batch_size})\n },\n {\n \"name\": \"INPUT_DATA_FORMAT\",\n \"value\": ${Slot_input_data_format}\n },\n {\n \"name\": \"COMPRESSED_TYPE\",\n \"value\": ${Slot_compressed_type}\n },\n {\n \"name\": \"OUTPUT_DATA_FORMAT\",\n \"value\": ${Slot_output_data_format}\n },\n {\n \"name\": \"BUILDER_COMPRESSED_TYPE\",\n \"value\": ${Slot_builder_compressed_type}\n },\n {\n \"name\": \"MEMORY_LIMIT_RATIO\",\n \"value\": str(${Slot_memory_limit_ratio})\n },\n {\n \"name\": \"OPTIONAL_FIELDS\",\n \"value\": ${Slot_optional_fields}\n }\n\n\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_worker.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": ${Slot_output_partition_num}\n }\n }\n }\n}\n", + "slots": { + "Slot_batch_size": { + "default": "", + "default_value": 1024.0, + "help": "原始数据是一批一批的从文件系统中读出来,batch_size为batch的大小", + "label": "Batch大小", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_builder_compressed_type": { + "default": "", + "default_value": "", + "help": "choices=['', 'ZLIB', 'GZIP'] the format for output file", + "label": "输出压缩格式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_check_success_tag": { + "default": "", + "default_value": "", + "help": "choices:['','--check_success_tag'] means false and true, Check that a _SUCCESS file exists before processing files in a subfolder", + "label": "是否检查成功标志", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_compressed_type": { + "default": "", + "default_value": "", + "help": "choices=['', 'ZLIB', 'GZIP'] the compressed type of input data file", + "label": "压缩方式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_portal_type": { + "default": "", + "default_value": "PSI", + "help": "运行过一次后修改无效!! the type of data portal type ,choices=['PSI', 'Streaming']", + "label": "数据入口类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_file_wildcard": { + "default": "", + "default_value": "*.rd", + "help": "文件名称的通配符, 将会读取input_base_dir下所以满足条件的文件,如\n1. *.csv,意为读取所有csv格式文件\n2. *.tfrecord,意为读取所有tfrecord格式文件\n3. xxx.txt,意为读取文件名为xxx.txt的文件", + "label": "文件名称的通配符", + "reference": "workflow.variables.file_wildcard", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_files_per_job_limit": { + "default": "", + "default_value": null, + "help": "空即不设限制,Max number of files in a job", + "label": "每个任务最多文件数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_image": { + "default": "", + "default_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "help": "建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模版不适用", + "label": "容器镜像", + "reference": "workflow.variables.image", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_input_base_dir": { + "default": "", + "default_value": "/app/deploy/integrated_test/tfrecord_raw_data", + "help": "必须修改,运行过一次后修改无效!!the base dir of input directory", + "label": "输入路径", + "reference": "workflow.variables.input_base_dir", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_input_data_format": { + "default": "", + "default_value": "TF_RECORD", + "help": "choices=['TF_RECORD', 'CSV_DICT'] the type for input data iterator", + "label": "输入数据格式", + "reference": "workflow.variables.input_data_format", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_long_running": { + "default": "", + "default_value": "", + "help": "choices: ['','--long_running']否,是。是否为常驻上传原始数据", + "label": "是否常驻", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "Master的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_envs": { + "default": "", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "label": "Master额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_master_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "Master的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_memory_limit_ratio": { + "default": "", + "default_value": 70.0, + "help": "预测是否会OOM的时候用到,如果预测继续执行下去时占用内存会超过这个比例,就阻塞,直到尚未处理的任务处理完成。 注意这是个40-81之间的整数。", + "label": "内存限制比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_optional_fields": { + "default": "", + "default_value": "", + "help": "optional stat fields used in joiner, separated by comma between fields, e.g. \"label,rit\"Each field will be stripped", + "label": "可选字段", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_output_data_format": { + "default": "", + "default_value": "TF_RECORD", + "help": "choices=['TF_RECORD', 'CSV_DICT'] the format for output file", + "label": "输出格式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_output_partition_num": { + "default": "", + "default_value": 4.0, + "help": "运行过一次后修改无效!!输出数据的文件数量,对应Worker数量", + "label": "数据分区的数量", + "reference": "workflow.variables.num_partitions", + "reference_type": "WORKFLOW", + "value_type": "INT" + }, + "Slot_single_subfolder": { + "default": "", + "default_value": "", + "help": "choices:['','--single_subfolder'] 否 是,Only process one subfolder at a time", + "label": "是否单一子文件夹", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_worker_cpu": { + "default": "", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "label": "Worker的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "default": "", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_worker_memory": { + "default": "", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "label": "Worker的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + } + }, + "variables": [] + }, + "raw-data-job-streaming": { + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": \"Follower\",\n \"peerSpecs\": {\n \"Leader\": {\n \"peerURL\": \"\",\n \"authority\": \"\"\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_NAME\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_TYPE\",\n \"value\": ${Slot_data_portal_type}\n },\n {\n \"name\": \"OUTPUT_PARTITION_NUM\",\n \"value\": str(${Slot_output_partition_num})\n },\n {\n \"name\": \"INPUT_BASE_DIR\",\n \"value\": ${Slot_input_base_dir}\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/raw_data/\" + self.name\n },\n {\n \"name\": \"RAW_DATA_PUBLISH_DIR\",\n \"value\": \"portal_publish_dir/\" + self.name\n },\n {\n \"name\": \"FILE_WILDCARD\",\n \"value\": ${Slot_file_wildcard}\n },\n {\n \"name\": \"LONG_RUNNING\",\n \"value\": ${Slot_long_running}\n },\n {\n \"name\": \"CHECK_SUCCESS_TAG\",\n \"value\": ${Slot_check_success_tag}\n },\n {\n \"name\": \"FILES_PER_JOB_LIMIT\",\n \"value\": str(${Slot_files_per_job_limit})\n },\n {\n \"name\": \"SINGLE_SUBFOLDER\",\n \"value\": ${Slot_single_subfolder}\n }\n\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n\n {\n \"name\": \"BATCH_SIZE\",\n \"value\": str(${Slot_batch_size})\n },\n {\n \"name\": \"INPUT_DATA_FORMAT\",\n \"value\": ${Slot_input_data_format}\n },\n {\n \"name\": \"COMPRESSED_TYPE\",\n \"value\": ${Slot_compressed_type}\n },\n {\n \"name\": \"OUTPUT_DATA_FORMAT\",\n \"value\": ${Slot_output_data_format}\n },\n {\n \"name\": \"BUILDER_COMPRESSED_TYPE\",\n \"value\": ${Slot_builder_compressed_type}\n },\n {\n \"name\": \"MEMORY_LIMIT_RATIO\",\n \"value\": str(${Slot_memory_limit_ratio})\n },\n {\n \"name\": \"OPTIONAL_FIELDS\",\n \"value\": ${Slot_optional_fields}\n }\n\n\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_worker.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": ${Slot_output_partition_num}\n }\n }\n }\n}\n", + "slots": { + "Slot_batch_size": { + "default": "", + "default_value": 1024.0, + "help": "原始数据是一批一批的从文件系统中读出来,batch_size为batch的大小", + "label": "Batch大小", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_builder_compressed_type": { + "default": "", + "default_value": "", + "help": "choices=['', 'ZLIB', 'GZIP'] the format for output file", + "label": "输出压缩格式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_check_success_tag": { + "default": "", + "default_value": "", + "help": "choices:['','--check_success_tag'] means false and true, Check that a _SUCCESS file exists before processing files in a subfolder", + "label": "是否检查成功标志", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_compressed_type": { + "default": "", + "default_value": "", + "help": "choices=['', 'ZLIB', 'GZIP'] the compressed type of input data file", + "label": "压缩方式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_portal_type": { + "default": "", + "default_value": "Streaming", + "help": "运行过一次后修改无效!! the type of data portal type ,choices=['PSI', 'Streaming']", + "label": "数据入口类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_file_wildcard": { + "default": "", + "default_value": "*.rd", + "help": "文件名称的通配符, 将会读取input_base_dir下所以满足条件的文件,如\n1. *.csv,意为读取所有csv格式文件\n2. *.tfrecord,意为读取所有tfrecord格式文件\n3. xxx.txt,意为读取文件名为xxx.txt的文件", + "label": "文件名称的通配符", + "reference": "workflow.variables.file_wildcard", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_files_per_job_limit": { + "default": "", + "default_value": null, + "help": "空即不设限制,Max number of files in a job", + "label": "每个任务最多文件数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_image": { + "default": "", + "default_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "help": "建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模版不适用", + "label": "容器镜像", + "reference": "workflow.variables.image", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_input_base_dir": { + "default": "", + "default_value": "/app/deploy/integrated_test/tfrecord_raw_data", + "help": "必须修改,运行过一次后修改无效!!the base dir of input directory", + "label": "输入路径", + "reference": "workflow.variables.input_base_dir", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_input_data_format": { + "default": "", + "default_value": "TF_RECORD", + "help": "choices=['TF_RECORD', 'CSV_DICT'] the type for input data iterator", + "label": "输入数据格式", + "reference": "workflow.variables.input_data_format", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_long_running": { + "default": "", + "default_value": "", + "help": "choices: ['','--long_running']否,是。是否为常驻上传原始数据", + "label": "是否常驻", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "Master的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_envs": { + "default": "", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "label": "Master额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_master_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "Master的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_memory_limit_ratio": { + "default": "", + "default_value": 70.0, + "help": "预测是否会OOM的时候用到,如果预测继续执行下去时占用内存会超过这个比例,就阻塞,直到尚未处理的任务处理完成。 注意这是个40-81之间的整数。", + "label": "内存限制比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_optional_fields": { + "default": "", + "default_value": "", + "help": "optional stat fields used in joiner, separated by comma between fields, e.g. \"label,rit\"Each field will be stripped", + "label": "可选字段", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_output_data_format": { + "default": "", + "default_value": "TF_RECORD", + "help": "choices=['TF_RECORD', 'CSV_DICT'] the format for output file", + "label": "输出格式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_output_partition_num": { + "default": "", + "default_value": 4.0, + "help": "运行过一次后修改无效!!输出数据的文件数量,对应Worker数量", + "label": "数据分区的数量", + "reference": "workflow.variables.num_partitions", + "reference_type": "WORKFLOW", + "value_type": "INT" + }, + "Slot_single_subfolder": { + "default": "", + "default_value": "", + "help": "choices:['','--single_subfolder'] 否 是,Only process one subfolder at a time", + "label": "是否单一子文件夹", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_worker_cpu": { + "default": "", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "label": "Worker的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "default": "", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_worker_memory": { + "default": "", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "label": "Worker的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + } + }, + "variables": [] + }, + "tree-train": { + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": ${Slot_role},\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if ${Slot_role}==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"MODE\",\n \"value\": ${Slot_mode}\n },\n {\n \"name\": \"LOSS_TYPE\",\n \"value\": ${Slot_loss_type}\n },\n {\n \"name\": \"DATA_SOURCE\",\n \"value\": ${Slot_data_source}\n },\n {\n \"name\": \"DATA_PATH\",\n \"value\": ${Slot_data_path}\n },\n {\n \"name\": \"VALIDATION_DATA_PATH\",\n \"value\": ${Slot_validation_data_path}\n },\n {\n \"name\": \"NO_DATA\",\n \"value\": str(${Slot_no_data})\n },\n {\n \"name\": \"FILE_EXT\",\n \"value\": ${Slot_file_ext}\n },\n {\n \"name\": \"FILE_TYPE\",\n \"value\": ${Slot_file_type}\n },\n {\n \"name\": \"LOAD_MODEL_PATH\",\n \"value\": ${Slot_load_model_path}\n },\n {\n \"name\": \"LOAD_MODEL_NAME\",\n \"value\": ${Slot_load_model_name}\n },\n {\n \"name\": \"VERBOSITY\",\n \"value\": str(${Slot_verbosity})\n },\n {\n \"name\": \"LEARNING_RATE\",\n \"value\": str(${Slot_learning_rate})\n },\n {\n \"name\": \"MAX_ITERS\",\n \"value\": str(${Slot_max_iters})\n },\n {\n \"name\": \"MAX_DEPTH\",\n \"value\": str(${Slot_max_depth})\n },\n {\n \"name\": \"MAX_BINS\",\n \"value\": str(${Slot_max_bins})\n },\n {\n \"name\": \"L2_REGULARIZATION\",\n \"value\": str(${Slot_l2_regularization})\n },\n {\n \"name\": \"NUM_PARALLEL\",\n \"value\": str(${Slot_num_parallel})\n },\n {\n \"name\": \"VERIFY_EXAMPLE_IDS\",\n \"value\": str(${Slot_verify_example_ids})\n },\n {\n \"name\": \"IGNORE_FIELDS\",\n \"value\": ${Slot_ignore_fields}\n },\n {\n \"name\": \"CAT_FIELDS\",\n \"value\": ${Slot_cat_fields}\n },\n {\n \"name\": \"LABEL_FIELD\",\n \"value\": ${Slot_label_field}\n },\n {\n \"name\": \"SEND_SCORES_TO_FOLLOWER\",\n \"value\": str(${Slot_send_scores_to_follower})\n },\n {\n \"name\": \"SEND_METRICS_TO_FOLLOWER\",\n \"value\": str(${Slot_send_metrics_to_follower})\n },\n {\n \"name\": \"ENABLE_PACKING\",\n \"value\": str(${Slot_enable_packing})\n },\n {\n \"name\": \"ES_BATCH_SIZE\",\n \"value\": str(${Slot_es_batch_size})\n }\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tf-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/trainer/run_tree_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_mem}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_mem}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": True,\n \"replicas\": 1\n }\n }\n }\n}\n", + "slots": { + "Slot_cat_fields": { + "default": "", + "default_value": "", + "help": "类别类型特征,特征的值需要是非负整数。以逗号分隔如:alive,country,sex", + "label": "类别类型特征", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_path": { + "default": "", + "default_value": "", + "help": "数据存放位置", + "label": "数据存放位置", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_source": { + "default": "", + "default_value": "", + "help": "求交数据集名称", + "label": "求交数据集名称", + "reference": "workflow.jobs['psi-data-join-job'].name", + "reference_type": "JOB_PROPERTY", + "value_type": "STRING" + }, + "Slot_enable_packing": { + "default": "", + "default_value": true, + "help": "是否开启优化", + "label": "是否开启优化", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "BOOL" + }, + "Slot_es_batch_size": { + "default": "", + "default_value": 10.0, + "help": "ES_BATCH_SIZE", + "label": "ES_BATCH_SIZE", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_file_ext": { + "default": "", + "default_value": ".data", + "help": "文件后缀", + "label": "文件后缀", + "reference": "self.variables.file_ext", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_file_type": { + "default": "", + "default_value": "csv", + "help": "文件类型,csv或tfrecord", + "label": "文件类型,csv或tfrecord", + "reference": "self.variables.file_type", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_ignore_fields": { + "default": "", + "default_value": "", + "help": "以逗号分隔如:name,age,sex", + "label": "不入模的特征", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_image": { + "default": "", + "default_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "help": "建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模版不适用", + "label": "容器镜像", + "reference": "workflow.variables.image", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_l2_regularization": { + "default": "", + "default_value": 1.0, + "help": "L2惩罚系数", + "label": "L2惩罚系数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "NUMBER" + }, + "Slot_label_field": { + "default": "", + "default_value": "label", + "help": "label特征名", + "label": "label特征名", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_learning_rate": { + "default": "", + "default_value": 0.3, + "help": "学习率", + "label": "学习率", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "NUMBER" + }, + "Slot_load_model_name": { + "default": "", + "default_value": "", + "help": "按任务名称加载模型,{STORAGE_ROOT_PATH}/job_output/{LOAD_MODEL_NAME}/exported_models", + "label": "模型任务名称", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_load_model_path": { + "default": "", + "default_value": "", + "help": "模型文件地址", + "label": "模型文件地址", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_loss_type": { + "default": "", + "default_value": "logistic", + "help": "损失函数类型,logistic或mse,默认logistic", + "label": "损失函数类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_max_bins": { + "default": "", + "default_value": 33.0, + "help": "最大分箱数", + "label": "最大分箱数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_max_depth": { + "default": "", + "default_value": 3.0, + "help": "最大深度", + "label": "最大深度", + "reference": "self.variables.max_depth", + "reference_type": "SELF", + "value_type": "INT" + }, + "Slot_max_iters": { + "default": "", + "default_value": 5.0, + "help": "树的数量", + "label": "迭代数", + "reference": "self.variables.max_iters", + "reference_type": "SELF", + "value_type": "INT" + }, + "Slot_mode": { + "default": "", + "default_value": "train", + "help": "任务类型,train或eval", + "label": "任务类型,train或eval", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_no_data": { + "default": "", + "default_value": false, + "help": "Leader是否没数据", + "label": "Leader是否没数据", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "BOOL" + }, + "Slot_num_parallel": { + "default": "", + "default_value": 1.0, + "help": "进程数量", + "label": "进程数量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_role": { + "default": "", + "default_value": "Leader", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "label": "Flapp通讯时角色", + "reference": "workflow.variables.role", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_send_metrics_to_follower": { + "default": "", + "default_value": false, + "help": "是否发送指标到follower", + "label": "是否发送指标到follower", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "BOOL" + }, + "Slot_send_scores_to_follower": { + "default": "", + "default_value": false, + "help": "是否发送结果到follower", + "label": "是否发送结果到follower", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "BOOL" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_validation_data_path": { + "default": "", + "default_value": "", + "help": "验证数据集地址", + "label": "验证数据集地址", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_verbosity": { + "default": "", + "default_value": 1.0, + "help": "日志输出等级", + "label": "日志输出等级", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_verify_example_ids": { + "default": "", + "default_value": false, + "help": "是否检查example_id对齐 If set to true, the first column of the data will be treated as example ids that must match between leader and follower", + "label": "是否检查example_id对齐", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "BOOL" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_worker_cpu": { + "default": "", + "default_value": "2000m", + "help": "所需CPU", + "label": "所需CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "default": "", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_worker_mem": { + "default": "", + "default_value": "4Gi", + "help": "所需内存", + "label": "所需内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + } + }, + "variables": [] + } + } + }, + "group_alias": "e2e-test", + "name": "e2e-fed-left" +} \ No newline at end of file diff --git a/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/e2e-fed-right.json b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/e2e-fed-right.json new file mode 100644 index 000000000..42d0fa7fc --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/e2e-fed-right.json @@ -0,0 +1,1994 @@ +{ + "comment": "", + "config": { + "group_alias": "e2e-test", + "job_definitions": [ + { + "dependencies": [], + "easy_mode": true, + "is_federated": false, + "job_type": "RAW_DATA", + "name": "raw-data-job-streaming", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"role\": \"Follower\",\n \"peerSpecs\": {\n \"Leader\": {\n \"peerURL\": \"\",\n \"authority\": \"\"\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_NAME\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_TYPE\",\n \"value\": \"Streaming\"\n },\n {\n \"name\": \"OUTPUT_PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"INPUT_BASE_DIR\",\n \"value\": str(workflow.variables.input_base_dir)\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/raw_data/\" + self.name\n },\n {\n \"name\": \"RAW_DATA_PUBLISH_DIR\",\n \"value\": \"portal_publish_dir/\" + self.name\n },\n {\n \"name\": \"FILE_WILDCARD\",\n \"value\": str(workflow.variables.file_wildcard)\n },\n {\n \"name\": \"LONG_RUNNING\",\n \"value\": \"\"\n },\n {\n \"name\": \"CHECK_SUCCESS_TAG\",\n \"value\": \"\"\n },\n {\n \"name\": \"FILES_PER_JOB_LIMIT\",\n \"value\": str(None)\n },\n {\n \"name\": \"SINGLE_SUBFOLDER\",\n \"value\": \"\"\n }\n\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": False,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n\n {\n \"name\": \"BATCH_SIZE\",\n \"value\": str(1024)\n },\n {\n \"name\": \"INPUT_DATA_FORMAT\",\n \"value\": str(workflow.variables.input_data_format)\n },\n {\n \"name\": \"COMPRESSED_TYPE\",\n \"value\": \"\"\n },\n {\n \"name\": \"OUTPUT_DATA_FORMAT\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"BUILDER_COMPRESSED_TYPE\",\n \"value\": \"\"\n },\n {\n \"name\": \"MEMORY_LIMIT_RATIO\",\n \"value\": str(70)\n },\n {\n \"name\": \"OPTIONAL_FIELDS\",\n \"value\": \"\"\n }\n\n\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_worker.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": False,\n \"replicas\": int(workflow.variables.num_partitions)\n }\n }\n }\n}\n" + }, + { + "dependencies": [], + "easy_mode": true, + "is_federated": false, + "job_type": "RAW_DATA", + "name": "raw-data-job-psi", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"role\": \"Follower\",\n \"peerSpecs\": {\n \"Leader\": {\n \"peerURL\": \"\",\n \"authority\": \"\"\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_NAME\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_TYPE\",\n \"value\": \"PSI\"\n },\n {\n \"name\": \"OUTPUT_PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"INPUT_BASE_DIR\",\n \"value\": str(workflow.variables.input_base_dir)\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/raw_data/\" + self.name\n },\n {\n \"name\": \"RAW_DATA_PUBLISH_DIR\",\n \"value\": \"portal_publish_dir/\" + self.name\n },\n {\n \"name\": \"FILE_WILDCARD\",\n \"value\": str(workflow.variables.file_wildcard)\n },\n {\n \"name\": \"LONG_RUNNING\",\n \"value\": \"\"\n },\n {\n \"name\": \"CHECK_SUCCESS_TAG\",\n \"value\": \"\"\n },\n {\n \"name\": \"FILES_PER_JOB_LIMIT\",\n \"value\": str(None)\n },\n {\n \"name\": \"SINGLE_SUBFOLDER\",\n \"value\": \"\"\n }\n\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": False,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n\n {\n \"name\": \"BATCH_SIZE\",\n \"value\": str(1024)\n },\n {\n \"name\": \"INPUT_DATA_FORMAT\",\n \"value\": str(workflow.variables.input_data_format)\n },\n {\n \"name\": \"COMPRESSED_TYPE\",\n \"value\": \"\"\n },\n {\n \"name\": \"OUTPUT_DATA_FORMAT\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"BUILDER_COMPRESSED_TYPE\",\n \"value\": \"\"\n },\n {\n \"name\": \"MEMORY_LIMIT_RATIO\",\n \"value\": str(70)\n },\n {\n \"name\": \"OPTIONAL_FIELDS\",\n \"value\": \"\"\n }\n\n\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_worker.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": False,\n \"replicas\": int(workflow.variables.num_partitions)\n }\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "raw-data-job-streaming" + }, + { + "source": "raw-data-job-psi" + } + ], + "easy_mode": true, + "is_federated": true, + "job_type": "DATA_JOIN", + "name": "data-join-job", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"role\": str(workflow.variables.role),\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if str(workflow.variables.role)==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"ROLE\",\n \"value\": str(workflow.variables.role).lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"BATCH_MODE\",\n \"value\": \"--batch_mode\"\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"START_TIME\",\n \"value\": str(0)\n },\n {\n \"name\": \"END_TIME\",\n \"value\": str(999999999999)\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + str(workflow.jobs['raw-data-job-streaming'].name)\n },\n {\n # not work, remove it after prepare_launch_data_join_cli been removed\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(0.0)\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\":\n list(system.variables.volume_mounts_list)\n ,\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/data_join/run_data_join_master.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\":\n list(system.variables.volumes_list)\n\n }\n },\n \"pair\": true,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"ROLE\",\n \"value\": str(workflow.variables.role).lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + str(workflow.jobs['raw-data-job-streaming'].name)\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_INTERVAL\",\n \"value\": str(-1)\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_THRESHOLD\",\n \"value\": str(4096)\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_INTERVAL\",\n \"value\": str(-1)\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_THRESHOLD\",\n \"value\": str(4096)\n },\n {\n \"name\": \"MIN_MATCHING_WINDOW\",\n \"value\": str(1024)\n },\n {\n \"name\": \"MAX_MATCHING_WINDOW\",\n \"value\": str(4096)\n },\n {\n \"name\": \"RAW_DATA_ITER\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"ENABLE_NEGATIVE_EXAMPLE_GENERATOR\",\n \"value\": str(False)\n },\n {\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(0.0)\n },\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\":\n list(system.variables.volume_mounts_list)\n ,\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/data_join/run_data_join_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\":\n list(system.variables.volumes_list)\n\n }\n },\n \"pair\": true,\n \"replicas\": int(workflow.variables.num_partitions)\n }\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "raw-data-job-streaming" + }, + { + "source": "raw-data-job-psi" + } + ], + "easy_mode": true, + "is_federated": true, + "job_type": "PSI_DATA_JOIN", + "name": "psi-data-join-job", + "variables": [ + { + "access_mode": "PEER_WRITABLE", + "name": "rsa_key_pem", + "tag": "", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"TextArea\",\"required\":false}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "rsa_private_key_path", + "tag": "", + "typed_value": "/app/deploy/integrated_test/rsa_private.key", + "value": "/app/deploy/integrated_test/rsa_private.key", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "rsa_key_path", + "tag": "", + "typed_value": "/app/deploy/integrated_test/rsa_private.key", + "value": "/app/deploy/integrated_test/rsa_private.key", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false}" + } + ], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"role\": str(workflow.variables.role),\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if str(workflow.variables.role)==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"ROLE\",\n \"value\": str(workflow.variables.role).lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"START_TIME\",\n \"value\": str(0)\n },\n {\n \"name\": \"END_TIME\",\n \"value\": str(999999999999)\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + str(workflow.jobs['raw-data-job-psi'].name)\n },\n {\n # not work, remove it after prepare_launch_data_join_cli been removed\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(0.0)\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/rsa_psi/run_psi_data_join_master.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": True,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"ROLE\",\n \"value\": str(workflow.variables.role).lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"BATCH_MODE\",\n \"value\": \"--batch_mode\"\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + str(workflow.jobs['raw-data-job-psi'].name)\n },\n {\n \"name\": \"RSA_KEY_PEM\",\n \"value\": str(self.variables.rsa_key_pem)\n },\n {\n \"name\": \"RSA_KEY_PATH\",\n \"value\": str(self.variables.rsa_key_path)\n },\n {\n \"name\": \"RSA_PRIVATE_KEY_PATH\",\n \"value\": str(self.variables.rsa_private_key_path)\n },\n {\n \"name\": \"KMS_KEY_NAME\",\n \"value\": \"\"\n },\n {\n \"name\": \"KMS_CLIENT\",\n \"value\": \"data.aml.fl\"\n },\n {\n \"name\": \"PSI_RAW_DATA_ITER\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"DATA_BLOCK_BUILDER\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"PSI_OUTPUT_BUILDER\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_INTERVAL\",\n \"value\": str(-1)\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_THRESHOLD\",\n \"value\": str(4096)\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_INTERVAL\",\n \"value\": str(-1)\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_THRESHOLD\",\n \"value\": str(4096)\n },\n {\n \"name\": \"EXAMPLE_JOINER\",\n \"value\": \"SORT_RUN_JOINER\"\n },\n {\n \"name\": \"PSI_READ_AHEAD_SIZE\",\n \"value\": str(None)\n },\n {\n \"name\": \"SORT_RUN_MERGER_READ_AHEAD_BUFFER\",\n \"value\": str(None)\n },\n {\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(0.0)\n },\n {\n \"name\": \"ENABLE_NEGATIVE_EXAMPLE_GENERATOR\",\n \"value\": str(False)\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/rsa_psi/run_psi_data_join_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": True,\n \"replicas\": int(int(workflow.variables.num_partitions))\n }\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "data-join-job" + } + ], + "easy_mode": false, + "is_federated": true, + "job_type": "NN_MODEL_TRANINING", + "name": "nn-train", + "variables": [ + { + "access_mode": "PEER_WRITABLE", + "name": "epoch_num", + "tag": "", + "typed_value": "10", + "value": "10", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "code_tar", + "tag": "", + "typed_value": { + "follower/config.py": "leader_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9', 'x10', 'x11', 'x12']\nleader_label_name = ['label']\nfollower_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9']\n", + "follower/main.py": "# Copyright 2020 The FedLearner Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# coding: utf-8\n# pylint: disable=no-else-return, inconsistent-return-statements\n\nimport os\nimport logging\nimport numpy as np\nimport tensorflow.compat.v1 as tf\nimport fedlearner.trainer as flt\nfrom config import *\nfrom fedlearner.trainer.trainer_worker import StepLossAucMetricsHook\n\nROLE = 'follower'\n\nparser = flt.trainer_worker.create_argument_parser()\nparser.add_argument('--batch-size',\n type=int,\n default=100,\n help='Training batch size.')\nargs = parser.parse_args()\n\n\ndef input_fn(bridge, trainer_master):\n dataset = flt.data.DataBlockLoader(args.batch_size, ROLE, bridge,\n trainer_master).make_dataset()\n\n def parse_fn(example):\n feature_map = dict()\n feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\n feature_map['raw_id'] = tf.FixedLenFeature([], tf.string)\n for name in follower_feature_names:\n feature_map[name] = tf.FixedLenFeature([],\n tf.float32,\n default_value=0.0)\n features = tf.parse_example(example, features=feature_map)\n return features, dict(y=tf.constant(0))\n\n dataset = dataset.map(map_func=parse_fn,\n num_parallel_calls=tf.data.experimental.AUTOTUNE)\n return dataset\n\n\ndef serving_input_receiver_fn():\n feature_map = {\n \"example_id\": tf.FixedLenFeature([], tf.string),\n \"raw_id\": tf.FixedLenFeature([], tf.string),\n }\n for name in follower_feature_names:\n feature_map[name] = tf.FixedLenFeature([],\n tf.float32,\n default_value=0.0)\n record_batch = tf.placeholder(dtype=tf.string, name='examples')\n features = tf.parse_example(record_batch, features=feature_map)\n features['act1_f'] = tf.placeholder(dtype=tf.float32, name='act1_f')\n receiver_tensors = {'examples': record_batch, 'act1_f': features['act1_f']}\n return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)\n\n\ndef model_fn(model, features, labels, mode):\n logging.info('model_fn: mode %s', mode)\n x = [\n tf.expand_dims(features[name], axis=-1)\n for name in follower_feature_names\n ]\n x = tf.concat(x, axis=-1)\n\n w1f = tf.get_variable(\n 'w1l',\n shape=[len(follower_feature_names),\n len(leader_label_name)],\n dtype=tf.float32,\n initializer=tf.random_uniform_initializer(-0.01, 0.01))\n b1f = tf.get_variable('b1l',\n shape=[len(leader_label_name)],\n dtype=tf.float32,\n initializer=tf.zeros_initializer())\n\n act1_f = tf.nn.bias_add(tf.matmul(x, w1f), b1f)\n\n if mode == tf.estimator.ModeKeys.PREDICT:\n return model.make_spec(mode=mode, predictions=act1_f)\n\n if mode == tf.estimator.ModeKeys.TRAIN:\n gact1_f = model.send('act1_f', act1_f, require_grad=True)\n elif mode == tf.estimator.ModeKeys.EVAL:\n model.send('act1_f', act1_f, require_grad=False)\n\n #acc = model.recv('acc', tf.float32, require_grad=False)\n auc = model.recv('auc', tf.float32, require_grad=False)\n loss = model.recv('loss', tf.float32, require_grad=False)\n logging_hook = tf.train.LoggingTensorHook({\n 'auc': auc, 'loss': loss,\n }, every_n_iter=10)\n step_metric_hook = StepLossAucMetricsHook(loss_tensor=loss, auc_tensor=auc)\n\n global_step = tf.train.get_or_create_global_step()\n if mode == tf.estimator.ModeKeys.TRAIN:\n optimizer = tf.train.GradientDescentOptimizer(0.1)\n train_op = model.minimize(optimizer,\n act1_f,\n grad_loss=gact1_f,\n global_step=global_step)\n return model.make_spec(mode,\n loss=tf.math.reduce_mean(act1_f),\n train_op=train_op,\n training_hooks=[logging_hook, step_metric_hook])\n if mode == tf.estimator.ModeKeys.EVAL:\n fake_loss = tf.reduce_mean(act1_f)\n return model.make_spec(mode=mode, loss=fake_loss,\n evaluation_hooks=[logging_hook, step_metric_hook])\n\n\nif __name__ == '__main__':\n logging.basicConfig(level=logging.INFO)\n flt.trainer_worker.train(ROLE, args, input_fn, model_fn,\n serving_input_receiver_fn)\n", + "leader/config.py": "leader_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9', 'x10', 'x11', 'x12']\nleader_label_name = ['label']\nfollower_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9']\n", + "leader/main.py": "# Copyright 2020 The FedLearner Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# coding: utf-8\n# pylint: disable=no-else-return, inconsistent-return-statements\n\nimport os\nimport logging\nimport tensorflow.compat.v1 as tf\nimport fedlearner.trainer as flt\nfrom config import *\nfrom fedlearner.trainer.trainer_worker import StepLossAucMetricsHook\n\nROLE = 'leader'\n\nparser = flt.trainer_worker.create_argument_parser()\nparser.add_argument('--batch-size',\n type=int,\n default=100,\n help='Training batch size.')\nargs = parser.parse_args()\n\n\ndef input_fn(bridge, trainer_master):\n dataset = flt.data.DataBlockLoader(args.batch_size, ROLE, bridge,\n trainer_master).make_dataset()\n\n def parse_fn(example):\n feature_map = dict()\n feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\n feature_map['raw_id'] = tf.FixedLenFeature([], tf.string)\n for name in leader_feature_names:\n feature_map[name] = tf.FixedLenFeature([],\n tf.float32,\n default_value=0.0)\n label_map = {}\n for name in leader_label_name:\n label_map[name] = tf.FixedLenFeature([],\n tf.float32,\n default_value=0.0)\n features = tf.parse_example(example, features=feature_map)\n labels = tf.parse_example(example, features=label_map)\n return features, labels\n\n dataset = dataset.map(map_func=parse_fn,\n num_parallel_calls=tf.data.experimental.AUTOTUNE)\n return dataset\n\n\ndef serving_input_receiver_fn():\n feature_map = {\n \"example_id\": tf.FixedLenFeature([], tf.string),\n \"raw_id\": tf.FixedLenFeature([], tf.string),\n }\n for name in leader_feature_names:\n feature_map[name] = tf.FixedLenFeature([],\n tf.float32,\n default_value=0.0)\n record_batch = tf.placeholder(dtype=tf.string, name='examples')\n features = tf.parse_example(record_batch, features=feature_map)\n features['act1_f'] = tf.placeholder(dtype=tf.float32, name='act1_f')\n receiver_tensors = {'examples': record_batch, 'act1_f': features['act1_f']}\n return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)\n\n\ndef model_fn(model, features, labels, mode):\n logging.info('model_fn: mode %s', mode)\n x = [\n tf.expand_dims(features[name], axis=-1)\n for name in leader_feature_names\n ]\n x = tf.concat(x, axis=-1)\n\n w1l = tf.get_variable(\n 'w1l',\n shape=[len(leader_feature_names),\n len(leader_label_name)],\n dtype=tf.float32,\n initializer=tf.random_uniform_initializer(-0.01, 0.01))\n b1l = tf.get_variable('b1l',\n shape=[len(leader_label_name)],\n dtype=tf.float32,\n initializer=tf.zeros_initializer())\n\n act1_l = tf.nn.bias_add(tf.matmul(x, w1l), b1l)\n if mode == tf.estimator.ModeKeys.TRAIN:\n act1_f = model.recv('act1_f', tf.float32, require_grad=True)\n elif mode == tf.estimator.ModeKeys.EVAL:\n act1_f = model.recv('act1_f', tf.float32, require_grad=False)\n else:\n act1_f = features['act1_f']\n logits = act1_l + act1_f\n pred = tf.math.sigmoid(logits)\n\n if mode == tf.estimator.ModeKeys.PREDICT:\n return model.make_spec(mode=mode, predictions=pred)\n\n y = [tf.expand_dims(labels[name], axis=-1) for name in leader_label_name]\n y = tf.concat(y, axis=-1)\n\n loss = tf.nn.sigmoid_cross_entropy_with_logits(labels=y, logits=logits)\n _, auc = tf.metrics.auc(labels=y, predictions=pred)\n #correct = tf.nn.in_top_k(predictions=logits, targets=y, k=1)\n #acc = tf.reduce_mean(input_tensor=tf.cast(correct, tf.float32))\n logging_hook = tf.train.LoggingTensorHook({\n # 'acc': acc,\n 'auc': auc,\n 'loss': loss,\n }, every_n_iter=10)\n step_metric_hook = StepLossAucMetricsHook(loss_tensor=loss, auc_tensor=auc)\n #model.send('acc', acc, require_grad=False)\n model.send('auc', auc, require_grad=False)\n model.send('loss', loss, require_grad=False)\n\n global_step = tf.train.get_or_create_global_step()\n if mode == tf.estimator.ModeKeys.TRAIN:\n optimizer = tf.train.AdamOptimizer(1e-4)\n train_op = model.minimize(optimizer, loss, global_step=global_step)\n return model.make_spec(mode=mode,\n loss=loss,\n train_op=train_op,\n training_hooks=[logging_hook, step_metric_hook])\n\n if mode == tf.estimator.ModeKeys.EVAL:\n loss_pair = tf.metrics.mean(loss)\n return model.make_spec(mode=mode,\n loss=loss,\n eval_metric_ops={'loss': loss_pair},\n evaluation_hooks=[logging_hook, step_metric_hook])\n\n\nif __name__ == '__main__':\n logging.basicConfig(level=logging.INFO)\n flt.trainer_worker.train(ROLE, args, input_fn, model_fn,\n serving_input_receiver_fn)\n", + "main.py": "" + }, + "value": "{\"main.py\":\"\",\"leader/main.py\":\"# Copyright 2020 The FedLearner Authors. All Rights Reserved.\\n#\\n# Licensed under the Apache License, Version 2.0 (the \\\"License\\\");\\n# you may not use this file except in compliance with the License.\\n# You may obtain a copy of the License at\\n#\\n# http://www.apache.org/licenses/LICENSE-2.0\\n#\\n# Unless required by applicable law or agreed to in writing, software\\n# distributed under the License is distributed on an \\\"AS IS\\\" BASIS,\\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\\n# See the License for the specific language governing permissions and\\n# limitations under the License.\\n\\n# coding: utf-8\\n# pylint: disable=no-else-return, inconsistent-return-statements\\n\\nimport os\\nimport logging\\nimport tensorflow.compat.v1 as tf\\nimport fedlearner.trainer as flt\\nfrom config import *\\nfrom fedlearner.trainer.trainer_worker import StepLossAucMetricsHook\\n\\nROLE = 'leader'\\n\\nparser = flt.trainer_worker.create_argument_parser()\\nparser.add_argument('--batch-size',\\n type=int,\\n default=100,\\n help='Training batch size.')\\nargs = parser.parse_args()\\n\\n\\ndef input_fn(bridge, trainer_master):\\n dataset = flt.data.DataBlockLoader(args.batch_size, ROLE, bridge,\\n trainer_master).make_dataset()\\n\\n def parse_fn(example):\\n feature_map = dict()\\n feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\\n feature_map['raw_id'] = tf.FixedLenFeature([], tf.string)\\n for name in leader_feature_names:\\n feature_map[name] = tf.FixedLenFeature([],\\n tf.float32,\\n default_value=0.0)\\n label_map = {}\\n for name in leader_label_name:\\n label_map[name] = tf.FixedLenFeature([],\\n tf.float32,\\n default_value=0.0)\\n features = tf.parse_example(example, features=feature_map)\\n labels = tf.parse_example(example, features=label_map)\\n return features, labels\\n\\n dataset = dataset.map(map_func=parse_fn,\\n num_parallel_calls=tf.data.experimental.AUTOTUNE)\\n return dataset\\n\\n\\ndef serving_input_receiver_fn():\\n feature_map = {\\n \\\"example_id\\\": tf.FixedLenFeature([], tf.string),\\n \\\"raw_id\\\": tf.FixedLenFeature([], tf.string),\\n }\\n for name in leader_feature_names:\\n feature_map[name] = tf.FixedLenFeature([],\\n tf.float32,\\n default_value=0.0)\\n record_batch = tf.placeholder(dtype=tf.string, name='examples')\\n features = tf.parse_example(record_batch, features=feature_map)\\n features['act1_f'] = tf.placeholder(dtype=tf.float32, name='act1_f')\\n receiver_tensors = {'examples': record_batch, 'act1_f': features['act1_f']}\\n return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)\\n\\n\\ndef model_fn(model, features, labels, mode):\\n logging.info('model_fn: mode %s', mode)\\n x = [\\n tf.expand_dims(features[name], axis=-1)\\n for name in leader_feature_names\\n ]\\n x = tf.concat(x, axis=-1)\\n\\n w1l = tf.get_variable(\\n 'w1l',\\n shape=[len(leader_feature_names),\\n len(leader_label_name)],\\n dtype=tf.float32,\\n initializer=tf.random_uniform_initializer(-0.01, 0.01))\\n b1l = tf.get_variable('b1l',\\n shape=[len(leader_label_name)],\\n dtype=tf.float32,\\n initializer=tf.zeros_initializer())\\n\\n act1_l = tf.nn.bias_add(tf.matmul(x, w1l), b1l)\\n if mode == tf.estimator.ModeKeys.TRAIN:\\n act1_f = model.recv('act1_f', tf.float32, require_grad=True)\\n elif mode == tf.estimator.ModeKeys.EVAL:\\n act1_f = model.recv('act1_f', tf.float32, require_grad=False)\\n else:\\n act1_f = features['act1_f']\\n logits = act1_l + act1_f\\n pred = tf.math.sigmoid(logits)\\n\\n if mode == tf.estimator.ModeKeys.PREDICT:\\n return model.make_spec(mode=mode, predictions=pred)\\n\\n y = [tf.expand_dims(labels[name], axis=-1) for name in leader_label_name]\\n y = tf.concat(y, axis=-1)\\n\\n loss = tf.nn.sigmoid_cross_entropy_with_logits(labels=y, logits=logits)\\n _, auc = tf.metrics.auc(labels=y, predictions=pred)\\n #correct = tf.nn.in_top_k(predictions=logits, targets=y, k=1)\\n #acc = tf.reduce_mean(input_tensor=tf.cast(correct, tf.float32))\\n logging_hook = tf.train.LoggingTensorHook({\\n # 'acc': acc,\\n 'auc': auc,\\n 'loss': loss,\\n }, every_n_iter=10)\\n step_metric_hook = StepLossAucMetricsHook(loss_tensor=loss, auc_tensor=auc)\\n #model.send('acc', acc, require_grad=False)\\n model.send('auc', auc, require_grad=False)\\n model.send('loss', loss, require_grad=False)\\n\\n global_step = tf.train.get_or_create_global_step()\\n if mode == tf.estimator.ModeKeys.TRAIN:\\n optimizer = tf.train.AdamOptimizer(1e-4)\\n train_op = model.minimize(optimizer, loss, global_step=global_step)\\n return model.make_spec(mode=mode,\\n loss=loss,\\n train_op=train_op,\\n training_hooks=[logging_hook, step_metric_hook])\\n\\n if mode == tf.estimator.ModeKeys.EVAL:\\n loss_pair = tf.metrics.mean(loss)\\n return model.make_spec(mode=mode,\\n loss=loss,\\n eval_metric_ops={'loss': loss_pair},\\n evaluation_hooks=[logging_hook, step_metric_hook])\\n\\n\\nif __name__ == '__main__':\\n logging.basicConfig(level=logging.INFO)\\n flt.trainer_worker.train(ROLE, args, input_fn, model_fn,\\n serving_input_receiver_fn)\\n\",\"follower/main.py\":\"# Copyright 2020 The FedLearner Authors. All Rights Reserved.\\n#\\n# Licensed under the Apache License, Version 2.0 (the \\\"License\\\");\\n# you may not use this file except in compliance with the License.\\n# You may obtain a copy of the License at\\n#\\n# http://www.apache.org/licenses/LICENSE-2.0\\n#\\n# Unless required by applicable law or agreed to in writing, software\\n# distributed under the License is distributed on an \\\"AS IS\\\" BASIS,\\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\\n# See the License for the specific language governing permissions and\\n# limitations under the License.\\n\\n# coding: utf-8\\n# pylint: disable=no-else-return, inconsistent-return-statements\\n\\nimport os\\nimport logging\\nimport numpy as np\\nimport tensorflow.compat.v1 as tf\\nimport fedlearner.trainer as flt\\nfrom config import *\\nfrom fedlearner.trainer.trainer_worker import StepLossAucMetricsHook\\n\\nROLE = 'follower'\\n\\nparser = flt.trainer_worker.create_argument_parser()\\nparser.add_argument('--batch-size',\\n type=int,\\n default=100,\\n help='Training batch size.')\\nargs = parser.parse_args()\\n\\n\\ndef input_fn(bridge, trainer_master):\\n dataset = flt.data.DataBlockLoader(args.batch_size, ROLE, bridge,\\n trainer_master).make_dataset()\\n\\n def parse_fn(example):\\n feature_map = dict()\\n feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\\n feature_map['raw_id'] = tf.FixedLenFeature([], tf.string)\\n for name in follower_feature_names:\\n feature_map[name] = tf.FixedLenFeature([],\\n tf.float32,\\n default_value=0.0)\\n features = tf.parse_example(example, features=feature_map)\\n return features, dict(y=tf.constant(0))\\n\\n dataset = dataset.map(map_func=parse_fn,\\n num_parallel_calls=tf.data.experimental.AUTOTUNE)\\n return dataset\\n\\n\\ndef serving_input_receiver_fn():\\n feature_map = {\\n \\\"example_id\\\": tf.FixedLenFeature([], tf.string),\\n \\\"raw_id\\\": tf.FixedLenFeature([], tf.string),\\n }\\n for name in follower_feature_names:\\n feature_map[name] = tf.FixedLenFeature([],\\n tf.float32,\\n default_value=0.0)\\n record_batch = tf.placeholder(dtype=tf.string, name='examples')\\n features = tf.parse_example(record_batch, features=feature_map)\\n features['act1_f'] = tf.placeholder(dtype=tf.float32, name='act1_f')\\n receiver_tensors = {'examples': record_batch, 'act1_f': features['act1_f']}\\n return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)\\n\\n\\ndef model_fn(model, features, labels, mode):\\n logging.info('model_fn: mode %s', mode)\\n x = [\\n tf.expand_dims(features[name], axis=-1)\\n for name in follower_feature_names\\n ]\\n x = tf.concat(x, axis=-1)\\n\\n w1f = tf.get_variable(\\n 'w1l',\\n shape=[len(follower_feature_names),\\n len(leader_label_name)],\\n dtype=tf.float32,\\n initializer=tf.random_uniform_initializer(-0.01, 0.01))\\n b1f = tf.get_variable('b1l',\\n shape=[len(leader_label_name)],\\n dtype=tf.float32,\\n initializer=tf.zeros_initializer())\\n\\n act1_f = tf.nn.bias_add(tf.matmul(x, w1f), b1f)\\n\\n if mode == tf.estimator.ModeKeys.PREDICT:\\n return model.make_spec(mode=mode, predictions=act1_f)\\n\\n if mode == tf.estimator.ModeKeys.TRAIN:\\n gact1_f = model.send('act1_f', act1_f, require_grad=True)\\n elif mode == tf.estimator.ModeKeys.EVAL:\\n model.send('act1_f', act1_f, require_grad=False)\\n\\n #acc = model.recv('acc', tf.float32, require_grad=False)\\n auc = model.recv('auc', tf.float32, require_grad=False)\\n loss = model.recv('loss', tf.float32, require_grad=False)\\n logging_hook = tf.train.LoggingTensorHook({\\n 'auc': auc, 'loss': loss,\\n }, every_n_iter=10)\\n step_metric_hook = StepLossAucMetricsHook(loss_tensor=loss, auc_tensor=auc)\\n\\n global_step = tf.train.get_or_create_global_step()\\n if mode == tf.estimator.ModeKeys.TRAIN:\\n optimizer = tf.train.GradientDescentOptimizer(0.1)\\n train_op = model.minimize(optimizer,\\n act1_f,\\n grad_loss=gact1_f,\\n global_step=global_step)\\n return model.make_spec(mode,\\n loss=tf.math.reduce_mean(act1_f),\\n train_op=train_op,\\n training_hooks=[logging_hook, step_metric_hook])\\n if mode == tf.estimator.ModeKeys.EVAL:\\n fake_loss = tf.reduce_mean(act1_f)\\n return model.make_spec(mode=mode, loss=fake_loss,\\n evaluation_hooks=[logging_hook, step_metric_hook])\\n\\n\\nif __name__ == '__main__':\\n logging.basicConfig(level=logging.INFO)\\n flt.trainer_worker.train(ROLE, args, input_fn, model_fn,\\n serving_input_receiver_fn)\\n\",\"follower/config.py\":\"leader_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9', 'x10', 'x11', 'x12']\\nleader_label_name = ['label']\\nfollower_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9']\\n\",\"leader/config.py\":\"leader_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9', 'x10', 'x11', 'x12']\\nleader_label_name = ['label']\\nfollower_feature_names = ['x0', 'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9']\\n\"}", + "value_type": "CODE", + "widget_schema": "{\"component\":\"Code\",\"required\":true}" + } + ], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FedApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"activeDeadlineSeconds\": 1200, \n \"fedReplicaSpecs\": {\n \"Master\": {\n \"backoffLimit\": 1,\n \"mustSuccess\": False,\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": project.variables.storage_root_path\n },\n {\n \"name\": \"ROLE\",\n \"value\": workflow.variables.role.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": project.variables.storage_root_path + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"EPOCH_NUM\",\n \"value\": str(int(self.variables.epoch_num))\n },\n {\n \"name\": \"START_DATE\",\n \"value\": str(None)\n },\n {\n \"name\": \"END_DATE\",\n \"value\": str(None)\n },\n {\n \"name\": \"DATA_SOURCE\",\n \"value\": workflow.jobs['data-join-job'].name\n },\n {\n \"name\": \"ONLINE_TRAINING\",\n \"value\": \"\"\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(False)\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": \"\"\n },\n {\n \"name\": \"CODE_TAR\",\n \"value\": self.variables.code_tar\n },\n {\n \"name\": \"CHECKPOINT_PATH\",\n \"value\": \"\"\n },\n {\n \"name\": \"LOAD_CHECKPOINT_FILENAME\",\n \"value\": \"\"\n },\n {\n \"name\": \"LOAD_CHECKPOINT_FILENAME_WITH_PATH\",\n \"value\": \"\"\n },\n {\n \"name\": \"LOAD_CHECKPOINT_PATH\",\n \"value\": \"\" and project.variables.storage_root_path + \"/job_output/\" + \"\" + \"/checkpoints\"\n },\n {\n \"name\": \"EXPORT_PATH\",\n \"value\": \"\"\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": workflow.variables.image,\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tf-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/trainer/run_trainer_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n\n }\n },\n \"replicas\": int(1)\n },\n \"PS\": {\n \"backoffLimit\": 1,\n \"mustSuccess\": False,\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": project.variables.storage_root_path\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n }\n\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": workflow.variables.image,\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tf-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/trainer/run_trainer_ps.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"replicas\": int(1)\n },\n \"Worker\": {\n \"backoffLimit\": 6,\n \"mustSuccess\": True,\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": project.variables.storage_root_path\n },\n {\n \"name\": \"ROLE\",\n \"value\": workflow.variables.role.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": project.variables.storage_root_path + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"MODE\",\n \"value\": \"train\"\n },\n {\n \"name\": \"VERBOSITY\",\n \"value\": str(1)\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": \"\"\n },\n {\n \"name\": \"CODE_TAR\",\n \"value\": self.variables.code_tar\n },\n {\n \"name\": \"SAVE_CHECKPOINT_STEPS\",\n \"value\": str(1000)\n },\n {\n \"name\": \"SAVE_CHECKPOINT_SECS\",\n \"value\": str(None)\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(False)\n },\n {\n \"name\": \"SUMMARY_SAVE_STEPS\",\n \"value\": str(None)\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": workflow.variables.image,\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tf-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\":[\"/bin/bash\",\"-c\"],\n \"args\": [\"export WORKER_RANK=$$INDEX && export PEER_ADDR=$$SERVICE_ID && /app/deploy/scripts/trainer/run_trainer_worker.sh\"],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"replicas\": int(1)\n }\n }\n }\n}" + }, + { + "dependencies": [ + { + "source": "data-join-job" + }, + { + "source": "psi-data-join-job" + } + ], + "easy_mode": true, + "is_federated": true, + "job_type": "TREE_MODEL_TRAINING", + "name": "tree-train", + "variables": [ + { + "access_mode": "PEER_WRITABLE", + "name": "max_depth", + "tag": "", + "typed_value": "3", + "value": "3", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "max_iters", + "tag": "", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "file_ext", + "tag": "", + "typed_value": ".data", + "value": ".data", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "file_type", + "tag": "", + "typed_value": "tfrecord", + "value": "tfrecord", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + } + ], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"role\": str(workflow.variables.role),\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if str(workflow.variables.role)==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"ROLE\",\n \"value\": str(workflow.variables.role).lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"MODE\",\n \"value\": \"train\"\n },\n {\n \"name\": \"LOSS_TYPE\",\n \"value\": \"logistic\"\n },\n {\n \"name\": \"DATA_SOURCE\",\n \"value\": str(workflow.jobs['psi-data-join-job'].name)\n },\n {\n \"name\": \"DATA_PATH\",\n \"value\": \"\"\n },\n {\n \"name\": \"VALIDATION_DATA_PATH\",\n \"value\": \"\"\n },\n {\n \"name\": \"NO_DATA\",\n \"value\": str(False)\n },\n {\n \"name\": \"FILE_EXT\",\n \"value\": str(self.variables.file_ext)\n },\n {\n \"name\": \"FILE_TYPE\",\n \"value\": str(self.variables.file_type)\n },\n {\n \"name\": \"LOAD_MODEL_PATH\",\n \"value\": \"\"\n },\n {\n \"name\": \"LOAD_MODEL_NAME\",\n \"value\": \"\"\n },\n {\n \"name\": \"VERBOSITY\",\n \"value\": str(1)\n },\n {\n \"name\": \"LEARNING_RATE\",\n \"value\": str(0.3)\n },\n {\n \"name\": \"MAX_ITERS\",\n \"value\": str(int(self.variables.max_iters))\n },\n {\n \"name\": \"MAX_DEPTH\",\n \"value\": str(int(self.variables.max_depth))\n },\n {\n \"name\": \"MAX_BINS\",\n \"value\": str(33)\n },\n {\n \"name\": \"L2_REGULARIZATION\",\n \"value\": str(1.0)\n },\n {\n \"name\": \"NUM_PARALLEL\",\n \"value\": str(1)\n },\n {\n \"name\": \"VERIFY_EXAMPLE_IDS\",\n \"value\": str(False)\n },\n {\n \"name\": \"IGNORE_FIELDS\",\n \"value\": \"\"\n },\n {\n \"name\": \"CAT_FIELDS\",\n \"value\": \"\"\n },\n {\n \"name\": \"LABEL_FIELD\",\n \"value\": \"label\"\n },\n {\n \"name\": \"SEND_SCORES_TO_FOLLOWER\",\n \"value\": str(False)\n },\n {\n \"name\": \"SEND_METRICS_TO_FOLLOWER\",\n \"value\": str(False)\n },\n {\n \"name\": \"ENABLE_PACKING\",\n \"value\": str(True)\n },\n {\n \"name\": \"ES_BATCH_SIZE\",\n \"value\": str(10)\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tf-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/trainer/run_tree_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"4Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"4Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": True,\n \"replicas\": 1\n }\n }\n }\n}\n" + } + ], + "variables": [ + { + "access_mode": "PEER_WRITABLE", + "name": "image", + "tag": "", + "typed_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "num_partitions", + "tag": "", + "typed_value": "1", + "value": "1", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "role", + "tag": "", + "typed_value": "Leader", + "value": "Leader", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"enum\":[\"Leader\",\"Follower\"]}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "input_base_dir", + "tag": "", + "typed_value": "/app/deploy/integrated_test/credit_default", + "value": "/app/deploy/integrated_test/credit_default", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "file_wildcard", + "tag": "", + "typed_value": "*guest.csv", + "value": "*guest.csv", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "input_data_format", + "tag": "", + "typed_value": "CSV_DICT", + "value": "CSV_DICT", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"enum\":[\"CSV_DICT\",\"TF_RECORD\"]}" + } + ] + }, + "editor_info": { + "yaml_editor_infos": { + "data-join-job": { + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": ${Slot_role},\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if ${Slot_role}==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"BATCH_MODE\",\n \"value\": ${Slot_batch_mode}\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(${Slot_partition_num})\n },\n {\n \"name\": \"START_TIME\",\n \"value\": str(${Slot_start_time})\n },\n {\n \"name\": \"END_TIME\",\n \"value\": str(${Slot_end_time})\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + ${Slot_raw_data_name}\n },\n {\n # not work, remove it after prepare_launch_data_join_cli been removed\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(${Slot_negative_sampling_rate})\n }\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\":\n ${Slot_volume_mounts}\n ,\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/data_join/run_data_join_master.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\":\n ${Slot_volumes}\n\n }\n },\n \"pair\": true,\n \"replicas\": ${Slot_master_replicas}\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(${Slot_partition_num})\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + ${Slot_raw_data_name}\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_INTERVAL\",\n \"value\": str(${Slot_data_block_dump_interval})\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_THRESHOLD\",\n \"value\": str(${Slot_data_block_dump_threshold})\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_INTERVAL\",\n \"value\": str(${Slot_example_id_dump_interval})\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_THRESHOLD\",\n \"value\": str(${Slot_example_id_dump_threshold})\n },\n {\n \"name\": \"MIN_MATCHING_WINDOW\",\n \"value\": str(${Slot_min_matching_window})\n },\n {\n \"name\": \"MAX_MATCHING_WINDOW\",\n \"value\": str(${Slot_max_matching_window})\n },\n {\n \"name\": \"RAW_DATA_ITER\",\n \"value\": ${Slot_raw_data_iter}\n },\n {\n \"name\": \"ENABLE_NEGATIVE_EXAMPLE_GENERATOR\",\n \"value\": str(${Slot_enable_negative_example_generator})\n },\n {\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(${Slot_negative_sampling_rate})\n },\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\":\n ${Slot_volume_mounts}\n ,\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/data_join/run_data_join_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\":\n ${Slot_volumes}\n\n }\n },\n \"pair\": true,\n \"replicas\": ${Slot_partition_num}\n }\n }\n }\n}\n", + "slots": { + "Slot_batch_mode": { + "default": "", + "default_value": "--batch_mode", + "help": "如果为空则为常驻求交", + "label": "是否为批处理模式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_block_dump_interval": { + "default": "", + "default_value": -1.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次data block,小于0则无此限制", + "label": "数据dump时间间隔", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_data_block_dump_threshold": { + "default": "", + "default_value": 4096.0, + "help": "建议不修改,最多多少个样本就dump为一个data block,小于等于0则无此限制", + "label": "数据dump临界点", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_enable_negative_example_generator": { + "default": "", + "default_value": false, + "help": "建议不修改,是否开启负采样,当follower求交时遇到无法匹配上的leader的example id,会以negative_sampling_rate为概率生成一个新的样本。", + "label": "负采样比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "BOOL" + }, + "Slot_end_time": { + "default": "", + "default_value": 999999999999.0, + "help": "建议不修改,使用自这个时间以前的数据,仅从文件名筛选所以格式依据文件名(yyyymmdd或timestamp)", + "label": "数据末尾时间", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_example_id_dump_interval": { + "default": "", + "default_value": -1.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次example id,小于0则无此限制", + "label": "数据id dump时间间隔", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_example_id_dump_threshold": { + "default": "", + "default_value": 4096.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次example id,小于0则无此限制", + "label": "数据id dump临界点", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_image": { + "default": "", + "default_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "help": "建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模版不适用", + "label": "容器镜像", + "reference": "workflow.variables.image", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_master_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "Master的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_envs": { + "default": "", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "label": "Master额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_master_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "Master的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_replicas": { + "default": "", + "default_value": 1.0, + "help": "同时运行的完全相同的Master Pods数量", + "label": "Master的Pod个数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_max_matching_window": { + "default": "", + "default_value": 4096.0, + "help": "建议不修改,the max matching window for example join. <=0 means window size is infinite", + "label": "最大匹配滑窗", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_min_matching_window": { + "default": "", + "default_value": 1024.0, + "help": "建议不修改,the min matching window for example join ,<=0 means window size is infinite", + "label": "最小匹配滑窗", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_negative_sampling_rate": { + "default": "", + "default_value": 0.0, + "help": "建议不修改,负采样比例,当follower求交时遇到无法匹配上的leader的example id,会以此概率生成一个新的样本。", + "label": "负采样比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "NUMBER" + }, + "Slot_partition_num": { + "default": "", + "default_value": 4.0, + "help": "建议修改,求交后数据分区的数量,建议和raw_data一致", + "label": "数据分区的数量", + "reference": "workflow.variables.num_partitions", + "reference_type": "WORKFLOW", + "value_type": "INT" + }, + "Slot_raw_data_iter": { + "default": "", + "default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "label": "raw_data文件类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_raw_data_name": { + "default": "", + "default_value": "", + "help": "必须修改,原始数据的发布地址,根据参数内容在portal_publish_dir地址下寻找", + "label": "raw_data名字", + "reference": "workflow.jobs['raw-data-job-streaming'].name", + "reference_type": "JOB_PROPERTY", + "value_type": "STRING" + }, + "Slot_role": { + "default": "", + "default_value": "Leader", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "label": "Flapp通讯时角色", + "reference": "workflow.variables.role", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_start_time": { + "default": "", + "default_value": 0.0, + "help": "建议不修改,使用自这个时间起的数据,仅从文件名筛选所以格式依据文件名(yyyymmdd或timestamp)", + "label": "数据起始时间", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_worker_cpu": { + "default": "", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "label": "Worker的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "default": "", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_worker_memory": { + "default": "", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "label": "Worker的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + } + }, + "variables": [] + }, + "nn-train": { + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": ${Slot_role},\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if ${Slot_role}==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"EPOCH_NUM\",\n \"value\": str(${Slot_epoch_num})\n },\n {\n \"name\": \"START_DATE\",\n \"value\": str(${Slot_start_date})\n },\n {\n \"name\": \"END_DATE\",\n \"value\": str(${Slot_end_date})\n },\n {\n \"name\": \"DATA_SOURCE\",\n \"value\": ${Slot_data_source}\n },\n {\n \"name\": \"ONLINE_TRAINING\",\n \"value\": ${Slot_online_training}\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(${Slot_sparse_estimator})\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": ${Slot_code_key}\n },\n {\n \"name\": \"CODE_TAR\",\n \"value\": ${Slot_code_tar}\n },\n {\n \"name\": \"CHECKPOINT_PATH\",\n \"value\": ${Slot_checkpoint_path}\n },\n {\n \"name\": \"LOAD_CHECKPOINT_FILENAME\",\n \"value\": ${Slot_load_checkpoint_filename}\n },\n {\n \"name\": \"LOAD_CHECKPOINT_FILENAME_WITH_PATH\",\n \"value\": ${Slot_load_checkpoint_filename_with_path}\n },\n {\n \"name\": \"LOAD_CHECKPOINT_PATH\",\n \"value\": ${Slot_load_checkpoint_from_job} and ${Slot_storage_root_path} + \"/job_output/\" + ${Slot_load_checkpoint_from_job} + \"/checkpoints\"\n },\n {\n \"name\": \"EXPORT_PATH\",\n \"value\": ${Slot_export_path}\n }\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/trainer/run_trainer_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n\n }\n },\n \"pair\": False,\n \"replicas\": int(${Slot_master_replicas})\n },\n \"PS\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n }\n\n ] + ${Slot_ps_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/trainer/run_trainer_ps.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_ps_cpu},\n \"memory\": ${Slot_ps_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_ps_cpu},\n \"memory\": ${Slot_ps_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": int(${Slot_ps_replicas})\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"MODE\",\n \"value\": ${Slot_mode}\n },\n {\n \"name\": \"VERBOSITY\",\n \"value\": str(${Slot_verbosity})\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": ${Slot_code_key}\n },\n {\n \"name\": \"CODE_TAR\",\n \"value\": ${Slot_code_tar}\n },\n {\n \"name\": \"SAVE_CHECKPOINT_STEPS\",\n \"value\": str(${Slot_save_checkpoint_steps})\n },\n {\n \"name\": \"SAVE_CHECKPOINT_SECS\",\n \"value\": str(${Slot_save_checkpoint_secs})\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(${Slot_sparse_estimator})\n },\n {\n \"name\": \"SUMMARY_SAVE_STEPS\",\n \"value\": str(${Slot_summary_save_steps})\n }\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/trainer/run_trainer_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": True,\n \"replicas\": int(${Slot_worker_replicas})\n }\n }\n }\n}\n", + "slots": { + "Slot_checkpoint_path": { + "default": "", + "default_value": "", + "help": "不建议修改,checkpoint输出路径,建议为空,会默认使用{storage_root_path}/job_output/{job_name}/checkpoints,强烈建议保持空值", + "label": "CHECKPOINT_PATH", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_code_key": { + "default": "", + "default_value": "", + "help": "代码tar包地址,如果为空则使用code tar", + "label": "模型代码路径", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_code_tar": { + "default": "", + "default_value": "", + "help": "代码包,variable中请使用代码类型", + "label": "代码", + "reference": "self.variables.code_tar", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_data_source": { + "default": "", + "default_value": "", + "help": "必须修改,求交任务的名字", + "label": "数据源", + "reference": "workflow.jobs['data-join-job'].name", + "reference_type": "JOB_PROPERTY", + "value_type": "STRING" + }, + "Slot_end_date": { + "default": "", + "default_value": null, + "help": "training data end date", + "label": "结束时间", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_epoch_num": { + "default": "", + "default_value": 1.0, + "help": "number of epoch for training, not support in online training", + "label": "epoch数量", + "reference": "self.variables.epoch_num", + "reference_type": "SELF", + "value_type": "INT" + }, + "Slot_export_path": { + "default": "", + "default_value": "", + "help": "使用默认空值,将把models保存到$OUTPUT_BASE_DIR/exported_models 路径下。", + "label": "EXPORT_PATH", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_image": { + "default": "", + "default_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "help": "建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模板不适用", + "label": "容器镜像", + "reference": "workflow.variables.image", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_load_checkpoint_filename": { + "default": "", + "default_value": "", + "help": "加载checkpoint_path下的相对路径的checkpoint, 默认会加载checkpoint_path下的latest checkpoint", + "label": "LOAD_CHECKPOINT_FILENAME", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_load_checkpoint_filename_with_path": { + "default": "", + "default_value": "", + "help": "加载绝对路径下的checkpoint,需要细致到文件名", + "label": "从绝对路径加载checkpoint", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_load_checkpoint_from_job": { + "default": "", + "default_value": "", + "help": "指定任务名job_output下的latest checkpoint", + "label": "以任务名加载checkpoint", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "Master的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_envs": { + "default": "", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "label": "Master额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_master_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "Master的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_replicas": { + "default": "", + "default_value": 1.0, + "help": "同时运行的完全相同的Master Pods数量", + "label": "Master的Pod个数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_mode": { + "default": "", + "default_value": "train", + "help": "choices:['train','eval'] 训练还是验证", + "label": "模式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_online_training": { + "default": "", + "default_value": "", + "help": "['','--online_training'] 否 是,the train master run for online training", + "label": "是否在线训练", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_ps_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "PS的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_ps_envs": { + "default": "", + "default_value": [], + "help": "数组类型,ps pod额外的环境变量", + "label": "PS额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_ps_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "PS的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_ps_replicas": { + "default": "", + "default_value": 1.0, + "help": "同时运行的完全相同的PS Pods数量", + "label": "PS的Pod个数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_role": { + "default": "", + "default_value": "Leader", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "label": "Flapp通讯时角色", + "reference": "workflow.variables.role", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_save_checkpoint_secs": { + "default": "", + "default_value": null, + "help": "int,Number of secs between checkpoints.", + "label": "SAVE_CHECKPOINT_SECS", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_save_checkpoint_steps": { + "default": "", + "default_value": 1000.0, + "help": "int, Number of steps between checkpoints.", + "label": "SAVE_CHECKPOINT_STEPS", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_sparse_estimator": { + "default": "", + "default_value": false, + "help": "bool,default False Whether using sparse estimator.", + "label": "SPARSE_ESTIMATOR", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "BOOL" + }, + "Slot_start_date": { + "default": "", + "default_value": null, + "help": "training data start date", + "label": "开始时间", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_suffle_data_block": { + "default": "", + "default_value": "", + "help": "['','--shuffle_data_block'] 否 是,shuffle the data block or not", + "label": "是否shuffle数据块", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_summary_save_steps": { + "default": "", + "default_value": null, + "help": "int, Number of steps to save summary files.", + "label": "SUMMARY_SAVE_STEPS", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_verbosity": { + "default": "", + "default_value": 1.0, + "help": "int, Logging level", + "label": "日志等级", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_worker_cpu": { + "default": "", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "label": "Worker的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "default": "", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_worker_memory": { + "default": "", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "label": "Worker的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_replicas": { + "default": "", + "default_value": 1.0, + "help": "同时运行的完全相同的Worker Pods数量", + "label": "Worker的Pod个数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + } + }, + "variables": [] + }, + "psi-data-join-job": { + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": ${Slot_role},\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if ${Slot_role}==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(${Slot_partition_num})\n },\n {\n \"name\": \"START_TIME\",\n \"value\": str(${Slot_start_time})\n },\n {\n \"name\": \"END_TIME\",\n \"value\": str(${Slot_end_time})\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + ${Slot_raw_data_name}\n },\n {\n # not work, remove it after prepare_launch_data_join_cli been removed\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(${Slot_negative_sampling_rate})\n }\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/rsa_psi/run_psi_data_join_master.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": True,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"BATCH_MODE\",\n \"value\": ${Slot_batch_mode}\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(${Slot_partition_num})\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + ${Slot_raw_data_name}\n },\n {\n \"name\": \"RSA_KEY_PEM\",\n \"value\": ${Slot_rsa_key_pem}\n },\n {\n \"name\": \"RSA_KEY_PATH\",\n \"value\": ${Slot_rsa_key_path}\n },\n {\n \"name\": \"RSA_PRIVATE_KEY_PATH\",\n \"value\": ${Slot_rsa_private_key_path}\n },\n {\n \"name\": \"KMS_KEY_NAME\",\n \"value\": ${Slot_kms_key_name}\n },\n {\n \"name\": \"KMS_CLIENT\",\n \"value\": ${Slot_kms_client}\n },\n {\n \"name\": \"PSI_RAW_DATA_ITER\",\n \"value\": ${Slot_psi_raw_data_iter}\n },\n {\n \"name\": \"DATA_BLOCK_BUILDER\",\n \"value\": ${Slot_data_block_builder}\n },\n {\n \"name\": \"PSI_OUTPUT_BUILDER\",\n \"value\": ${Slot_psi_output_builder}\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_INTERVAL\",\n \"value\": str(${Slot_data_block_dump_interval})\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_THRESHOLD\",\n \"value\": str(${Slot_data_block_dump_threshold})\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_INTERVAL\",\n \"value\": str(${Slot_example_id_dump_interval})\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_THRESHOLD\",\n \"value\": str(${Slot_example_id_dump_threshold})\n },\n {\n \"name\": \"EXAMPLE_JOINER\",\n \"value\": \"SORT_RUN_JOINER\"\n },\n {\n \"name\": \"PSI_READ_AHEAD_SIZE\",\n \"value\": str(${Slot_psi_read_ahead_size})\n },\n {\n \"name\": \"SORT_RUN_MERGER_READ_AHEAD_BUFFER\",\n \"value\": str(${Slot_run_merger_read_ahead_buffer})\n },\n {\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(${Slot_negative_sampling_rate})\n },\n {\n \"name\": \"ENABLE_NEGATIVE_EXAMPLE_GENERATOR\",\n \"value\": str(${Slot_enable_negative_example_generator})\n }\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/rsa_psi/run_psi_data_join_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": True,\n \"replicas\": int(${Slot_partition_num})\n }\n }\n }\n}\n", + "slots": { + "Slot_batch_mode": { + "default": "", + "default_value": "--batch_mode", + "help": "如果为空则为常驻求交", + "label": "是否为批处理模式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_block_builder": { + "default": "", + "default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "label": "data block output数据类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_block_dump_interval": { + "default": "", + "default_value": -1.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次data block,小于0则无此限制", + "label": "数据dump时间间隔", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_data_block_dump_threshold": { + "default": "", + "default_value": 4096.0, + "help": "建议不修改,最多多少个样本就dump为一个data block,小于等于0则无此限制", + "label": "数据dump临界点", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_enable_negative_example_generator": { + "default": "", + "default_value": false, + "help": "建议不修改,是否开启负采样,当follower求交时遇到无法匹配上的leader的example id,会以negative_sampling_rate为概率生成一个新的样本。", + "label": "负采样比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "BOOL" + }, + "Slot_end_time": { + "default": "", + "default_value": 999999999999.0, + "help": "建议不修改,使用自这个时间以前的数据,仅从文件名筛选所以格式依据文件名(yyyymmdd或timestamp)", + "label": "数据末尾时间", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_example_id_dump_interval": { + "default": "", + "default_value": -1.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次example id,小于0则无此限制", + "label": "数据id dump时间间隔", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_example_id_dump_threshold": { + "default": "", + "default_value": 4096.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次example id,小于0则无此限制", + "label": "数据id dump临界点", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_image": { + "default": "", + "default_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "help": "建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模版不适用", + "label": "容器镜像", + "reference": "workflow.variables.image", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_kms_client": { + "default": "", + "default_value": "data.aml.fl", + "help": "kms client", + "label": "kms client", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_kms_key_name": { + "default": "", + "default_value": "", + "help": "kms中的密钥名称,站内镜像需使用KMS", + "label": "密钥名称", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_master_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "Master的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_envs": { + "default": "", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "label": "Master额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_master_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "Master的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_negative_sampling_rate": { + "default": "", + "default_value": 0.0, + "help": "建议不修改,负采样比例,当follower求交时遇到无法匹配上的leader的example id,会以此概率生成一个新的样本。", + "label": "负采样比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "NUMBER" + }, + "Slot_partition_num": { + "default": "", + "default_value": 4.0, + "help": "建议修改,求交后数据分区的数量,建议和raw_data一致", + "label": "数据分区的数量", + "reference": "workflow.variables.num_partitions", + "reference_type": "WORKFLOW", + "value_type": "INT" + }, + "Slot_psi_output_builder": { + "default": "", + "default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "label": "PSI output数据类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_psi_raw_data_iter": { + "default": "", + "default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "label": "raw data数据类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_psi_read_ahead_size": { + "default": "", + "default_value": null, + "help": "建议不填, the read ahead size for raw data", + "label": "psi_read_ahead_size", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_raw_data_name": { + "default": "", + "default_value": "", + "help": "必须修改,原始数据的发布地址,根据参数内容在portal_publish_dir地址下寻找", + "label": "raw_data名字", + "reference": "workflow.jobs['raw-data-job-psi'].name", + "reference_type": "JOB_PROPERTY", + "value_type": "STRING" + }, + "Slot_role": { + "default": "", + "default_value": "Leader", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "label": "Flapp通讯时角色", + "reference": "workflow.variables.role", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_rsa_key_path": { + "default": "", + "default_value": "", + "help": "RSA公钥或私钥的地址,在无RSA_KEY_PEM时必填", + "label": "RSA钥匙地址", + "reference": "self.variables.rsa_key_path", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_rsa_key_pem": { + "default": "", + "default_value": "", + "help": "RSA公钥,follower需提供", + "label": "RSA公钥", + "reference": "self.variables.rsa_key_pem", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_rsa_private_key_path": { + "default": "", + "default_value": "", + "help": "RSA私钥的地址, leader必填", + "label": "RSA私钥地址", + "reference": "self.variables.rsa_private_key_path", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_run_merger_read_ahead_buffer": { + "default": "", + "default_value": null, + "help": "建议不填, sort run merger read ahead buffer", + "label": "run_merger_read_ahead_buffer", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_start_time": { + "default": "", + "default_value": 0.0, + "help": "建议不修改,使用自这个时间起的数据,仅从文件名筛选所以格式依据文件名(yyyymmdd或timestamp)", + "label": "数据起始时间", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_worker_cpu": { + "default": "", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "label": "Worker的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "default": "", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_worker_memory": { + "default": "", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "label": "Worker的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + } + }, + "variables": [] + }, + "raw-data-job-psi": { + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": \"Follower\",\n \"peerSpecs\": {\n \"Leader\": {\n \"peerURL\": \"\",\n \"authority\": \"\"\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_NAME\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_TYPE\",\n \"value\": ${Slot_data_portal_type}\n },\n {\n \"name\": \"OUTPUT_PARTITION_NUM\",\n \"value\": str(${Slot_output_partition_num})\n },\n {\n \"name\": \"INPUT_BASE_DIR\",\n \"value\": ${Slot_input_base_dir}\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/raw_data/\" + self.name\n },\n {\n \"name\": \"RAW_DATA_PUBLISH_DIR\",\n \"value\": \"portal_publish_dir/\" + self.name\n },\n {\n \"name\": \"FILE_WILDCARD\",\n \"value\": ${Slot_file_wildcard}\n },\n {\n \"name\": \"LONG_RUNNING\",\n \"value\": ${Slot_long_running}\n },\n {\n \"name\": \"CHECK_SUCCESS_TAG\",\n \"value\": ${Slot_check_success_tag}\n },\n {\n \"name\": \"FILES_PER_JOB_LIMIT\",\n \"value\": str(${Slot_files_per_job_limit})\n },\n {\n \"name\": \"SINGLE_SUBFOLDER\",\n \"value\": ${Slot_single_subfolder}\n }\n\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n\n {\n \"name\": \"BATCH_SIZE\",\n \"value\": str(${Slot_batch_size})\n },\n {\n \"name\": \"INPUT_DATA_FORMAT\",\n \"value\": ${Slot_input_data_format}\n },\n {\n \"name\": \"COMPRESSED_TYPE\",\n \"value\": ${Slot_compressed_type}\n },\n {\n \"name\": \"OUTPUT_DATA_FORMAT\",\n \"value\": ${Slot_output_data_format}\n },\n {\n \"name\": \"BUILDER_COMPRESSED_TYPE\",\n \"value\": ${Slot_builder_compressed_type}\n },\n {\n \"name\": \"MEMORY_LIMIT_RATIO\",\n \"value\": str(${Slot_memory_limit_ratio})\n },\n {\n \"name\": \"OPTIONAL_FIELDS\",\n \"value\": ${Slot_optional_fields}\n }\n\n\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_worker.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": ${Slot_output_partition_num}\n }\n }\n }\n}\n", + "slots": { + "Slot_batch_size": { + "default": "", + "default_value": 1024.0, + "help": "原始数据是一批一批的从文件系统中读出来,batch_size为batch的大小", + "label": "Batch大小", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_builder_compressed_type": { + "default": "", + "default_value": "", + "help": "choices=['', 'ZLIB', 'GZIP'] the format for output file", + "label": "输出压缩格式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_check_success_tag": { + "default": "", + "default_value": "", + "help": "choices:['','--check_success_tag'] means false and true, Check that a _SUCCESS file exists before processing files in a subfolder", + "label": "是否检查成功标志", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_compressed_type": { + "default": "", + "default_value": "", + "help": "choices=['', 'ZLIB', 'GZIP'] the compressed type of input data file", + "label": "压缩方式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_portal_type": { + "default": "", + "default_value": "PSI", + "help": "运行过一次后修改无效!! the type of data portal type ,choices=['PSI', 'Streaming']", + "label": "数据入口类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_file_wildcard": { + "default": "", + "default_value": "*.rd", + "help": "文件名称的通配符, 将会读取input_base_dir下所以满足条件的文件,如\n1. *.csv,意为读取所有csv格式文件\n2. *.tfrecord,意为读取所有tfrecord格式文件\n3. xxx.txt,意为读取文件名为xxx.txt的文件", + "label": "文件名称的通配符", + "reference": "workflow.variables.file_wildcard", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_files_per_job_limit": { + "default": "", + "default_value": null, + "help": "空即不设限制,Max number of files in a job", + "label": "每个任务最多文件数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_image": { + "default": "", + "default_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "help": "建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模版不适用", + "label": "容器镜像", + "reference": "workflow.variables.image", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_input_base_dir": { + "default": "", + "default_value": "/app/deploy/integrated_test/tfrecord_raw_data", + "help": "必须修改,运行过一次后修改无效!!the base dir of input directory", + "label": "输入路径", + "reference": "workflow.variables.input_base_dir", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_input_data_format": { + "default": "", + "default_value": "TF_RECORD", + "help": "choices=['TF_RECORD', 'CSV_DICT'] the type for input data iterator", + "label": "输入数据格式", + "reference": "workflow.variables.input_data_format", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_long_running": { + "default": "", + "default_value": "", + "help": "choices: ['','--long_running']否,是。是否为常驻上传原始数据", + "label": "是否常驻", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "Master的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_envs": { + "default": "", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "label": "Master额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_master_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "Master的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_memory_limit_ratio": { + "default": "", + "default_value": 70.0, + "help": "预测是否会OOM的时候用到,如果预测继续执行下去时占用内存会超过这个比例,就阻塞,直到尚未处理的任务处理完成。 注意这是个40-81之间的整数。", + "label": "内存限制比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_optional_fields": { + "default": "", + "default_value": "", + "help": "optional stat fields used in joiner, separated by comma between fields, e.g. \"label,rit\"Each field will be stripped", + "label": "可选字段", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_output_data_format": { + "default": "", + "default_value": "TF_RECORD", + "help": "choices=['TF_RECORD', 'CSV_DICT'] the format for output file", + "label": "输出格式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_output_partition_num": { + "default": "", + "default_value": 4.0, + "help": "运行过一次后修改无效!!输出数据的文件数量,对应Worker数量", + "label": "数据分区的数量", + "reference": "workflow.variables.num_partitions", + "reference_type": "WORKFLOW", + "value_type": "INT" + }, + "Slot_single_subfolder": { + "default": "", + "default_value": "", + "help": "choices:['','--single_subfolder'] 否 是,Only process one subfolder at a time", + "label": "是否单一子文件夹", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_worker_cpu": { + "default": "", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "label": "Worker的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "default": "", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_worker_memory": { + "default": "", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "label": "Worker的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + } + }, + "variables": [] + }, + "raw-data-job-streaming": { + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": \"Follower\",\n \"peerSpecs\": {\n \"Leader\": {\n \"peerURL\": \"\",\n \"authority\": \"\"\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_NAME\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_TYPE\",\n \"value\": ${Slot_data_portal_type}\n },\n {\n \"name\": \"OUTPUT_PARTITION_NUM\",\n \"value\": str(${Slot_output_partition_num})\n },\n {\n \"name\": \"INPUT_BASE_DIR\",\n \"value\": ${Slot_input_base_dir}\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/raw_data/\" + self.name\n },\n {\n \"name\": \"RAW_DATA_PUBLISH_DIR\",\n \"value\": \"portal_publish_dir/\" + self.name\n },\n {\n \"name\": \"FILE_WILDCARD\",\n \"value\": ${Slot_file_wildcard}\n },\n {\n \"name\": \"LONG_RUNNING\",\n \"value\": ${Slot_long_running}\n },\n {\n \"name\": \"CHECK_SUCCESS_TAG\",\n \"value\": ${Slot_check_success_tag}\n },\n {\n \"name\": \"FILES_PER_JOB_LIMIT\",\n \"value\": str(${Slot_files_per_job_limit})\n },\n {\n \"name\": \"SINGLE_SUBFOLDER\",\n \"value\": ${Slot_single_subfolder}\n }\n\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n\n {\n \"name\": \"BATCH_SIZE\",\n \"value\": str(${Slot_batch_size})\n },\n {\n \"name\": \"INPUT_DATA_FORMAT\",\n \"value\": ${Slot_input_data_format}\n },\n {\n \"name\": \"COMPRESSED_TYPE\",\n \"value\": ${Slot_compressed_type}\n },\n {\n \"name\": \"OUTPUT_DATA_FORMAT\",\n \"value\": ${Slot_output_data_format}\n },\n {\n \"name\": \"BUILDER_COMPRESSED_TYPE\",\n \"value\": ${Slot_builder_compressed_type}\n },\n {\n \"name\": \"MEMORY_LIMIT_RATIO\",\n \"value\": str(${Slot_memory_limit_ratio})\n },\n {\n \"name\": \"OPTIONAL_FIELDS\",\n \"value\": ${Slot_optional_fields}\n }\n\n\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_worker.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": ${Slot_output_partition_num}\n }\n }\n }\n}\n", + "slots": { + "Slot_batch_size": { + "default": "", + "default_value": 1024.0, + "help": "原始数据是一批一批的从文件系统中读出来,batch_size为batch的大小", + "label": "Batch大小", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_builder_compressed_type": { + "default": "", + "default_value": "", + "help": "choices=['', 'ZLIB', 'GZIP'] the format for output file", + "label": "输出压缩格式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_check_success_tag": { + "default": "", + "default_value": "", + "help": "choices:['','--check_success_tag'] means false and true, Check that a _SUCCESS file exists before processing files in a subfolder", + "label": "是否检查成功标志", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_compressed_type": { + "default": "", + "default_value": "", + "help": "choices=['', 'ZLIB', 'GZIP'] the compressed type of input data file", + "label": "压缩方式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_portal_type": { + "default": "", + "default_value": "Streaming", + "help": "运行过一次后修改无效!! the type of data portal type ,choices=['PSI', 'Streaming']", + "label": "数据入口类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_file_wildcard": { + "default": "", + "default_value": "*.rd", + "help": "文件名称的通配符, 将会读取input_base_dir下所以满足条件的文件,如\n1. *.csv,意为读取所有csv格式文件\n2. *.tfrecord,意为读取所有tfrecord格式文件\n3. xxx.txt,意为读取文件名为xxx.txt的文件", + "label": "文件名称的通配符", + "reference": "workflow.variables.file_wildcard", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_files_per_job_limit": { + "default": "", + "default_value": null, + "help": "空即不设限制,Max number of files in a job", + "label": "每个任务最多文件数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_image": { + "default": "", + "default_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "help": "建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模版不适用", + "label": "容器镜像", + "reference": "workflow.variables.image", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_input_base_dir": { + "default": "", + "default_value": "/app/deploy/integrated_test/tfrecord_raw_data", + "help": "必须修改,运行过一次后修改无效!!the base dir of input directory", + "label": "输入路径", + "reference": "workflow.variables.input_base_dir", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_input_data_format": { + "default": "", + "default_value": "TF_RECORD", + "help": "choices=['TF_RECORD', 'CSV_DICT'] the type for input data iterator", + "label": "输入数据格式", + "reference": "workflow.variables.input_data_format", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_long_running": { + "default": "", + "default_value": "", + "help": "choices: ['','--long_running']否,是。是否为常驻上传原始数据", + "label": "是否常驻", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "Master的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_envs": { + "default": "", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "label": "Master额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_master_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "Master的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_memory_limit_ratio": { + "default": "", + "default_value": 70.0, + "help": "预测是否会OOM的时候用到,如果预测继续执行下去时占用内存会超过这个比例,就阻塞,直到尚未处理的任务处理完成。 注意这是个40-81之间的整数。", + "label": "内存限制比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_optional_fields": { + "default": "", + "default_value": "", + "help": "optional stat fields used in joiner, separated by comma between fields, e.g. \"label,rit\"Each field will be stripped", + "label": "可选字段", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_output_data_format": { + "default": "", + "default_value": "TF_RECORD", + "help": "choices=['TF_RECORD', 'CSV_DICT'] the format for output file", + "label": "输出格式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_output_partition_num": { + "default": "", + "default_value": 4.0, + "help": "运行过一次后修改无效!!输出数据的文件数量,对应Worker数量", + "label": "数据分区的数量", + "reference": "workflow.variables.num_partitions", + "reference_type": "WORKFLOW", + "value_type": "INT" + }, + "Slot_single_subfolder": { + "default": "", + "default_value": "", + "help": "choices:['','--single_subfolder'] 否 是,Only process one subfolder at a time", + "label": "是否单一子文件夹", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_worker_cpu": { + "default": "", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "label": "Worker的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "default": "", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_worker_memory": { + "default": "", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "label": "Worker的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + } + }, + "variables": [] + }, + "tree-train": { + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": ${Slot_role},\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if ${Slot_role}==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"MODE\",\n \"value\": ${Slot_mode}\n },\n {\n \"name\": \"LOSS_TYPE\",\n \"value\": ${Slot_loss_type}\n },\n {\n \"name\": \"DATA_SOURCE\",\n \"value\": ${Slot_data_source}\n },\n {\n \"name\": \"DATA_PATH\",\n \"value\": ${Slot_data_path}\n },\n {\n \"name\": \"VALIDATION_DATA_PATH\",\n \"value\": ${Slot_validation_data_path}\n },\n {\n \"name\": \"NO_DATA\",\n \"value\": str(${Slot_no_data})\n },\n {\n \"name\": \"FILE_EXT\",\n \"value\": ${Slot_file_ext}\n },\n {\n \"name\": \"FILE_TYPE\",\n \"value\": ${Slot_file_type}\n },\n {\n \"name\": \"LOAD_MODEL_PATH\",\n \"value\": ${Slot_load_model_path}\n },\n {\n \"name\": \"LOAD_MODEL_NAME\",\n \"value\": ${Slot_load_model_name}\n },\n {\n \"name\": \"VERBOSITY\",\n \"value\": str(${Slot_verbosity})\n },\n {\n \"name\": \"LEARNING_RATE\",\n \"value\": str(${Slot_learning_rate})\n },\n {\n \"name\": \"MAX_ITERS\",\n \"value\": str(${Slot_max_iters})\n },\n {\n \"name\": \"MAX_DEPTH\",\n \"value\": str(${Slot_max_depth})\n },\n {\n \"name\": \"MAX_BINS\",\n \"value\": str(${Slot_max_bins})\n },\n {\n \"name\": \"L2_REGULARIZATION\",\n \"value\": str(${Slot_l2_regularization})\n },\n {\n \"name\": \"NUM_PARALLEL\",\n \"value\": str(${Slot_num_parallel})\n },\n {\n \"name\": \"VERIFY_EXAMPLE_IDS\",\n \"value\": str(${Slot_verify_example_ids})\n },\n {\n \"name\": \"IGNORE_FIELDS\",\n \"value\": ${Slot_ignore_fields}\n },\n {\n \"name\": \"CAT_FIELDS\",\n \"value\": ${Slot_cat_fields}\n },\n {\n \"name\": \"LABEL_FIELD\",\n \"value\": ${Slot_label_field}\n },\n {\n \"name\": \"SEND_SCORES_TO_FOLLOWER\",\n \"value\": str(${Slot_send_scores_to_follower})\n },\n {\n \"name\": \"SEND_METRICS_TO_FOLLOWER\",\n \"value\": str(${Slot_send_metrics_to_follower})\n },\n {\n \"name\": \"ENABLE_PACKING\",\n \"value\": str(${Slot_enable_packing})\n },\n {\n \"name\": \"ES_BATCH_SIZE\",\n \"value\": str(${Slot_es_batch_size})\n }\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tf-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/trainer/run_tree_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_mem}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_mem}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": True,\n \"replicas\": 1\n }\n }\n }\n}\n", + "slots": { + "Slot_cat_fields": { + "default": "", + "default_value": "", + "help": "类别类型特征,特征的值需要是非负整数。以逗号分隔如:alive,country,sex", + "label": "类别类型特征", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_path": { + "default": "", + "default_value": "", + "help": "数据存放位置", + "label": "数据存放位置", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_source": { + "default": "", + "default_value": "", + "help": "求交数据集名称", + "label": "求交数据集名称", + "reference": "workflow.jobs['psi-data-join-job'].name", + "reference_type": "JOB_PROPERTY", + "value_type": "STRING" + }, + "Slot_enable_packing": { + "default": "", + "default_value": true, + "help": "是否开启优化", + "label": "是否开启优化", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "BOOL" + }, + "Slot_es_batch_size": { + "default": "", + "default_value": 10.0, + "help": "ES_BATCH_SIZE", + "label": "ES_BATCH_SIZE", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_file_ext": { + "default": "", + "default_value": ".csv", + "help": "文件后缀", + "label": "文件后缀", + "reference": "self.variables.file_ext", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_file_type": { + "default": "", + "default_value": "csv", + "help": "文件类型,csv或tfrecord", + "label": "文件类型,csv或tfrecord", + "reference": "self.variables.file_type", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_ignore_fields": { + "default": "", + "default_value": "", + "help": "以逗号分隔如:name,age,sex", + "label": "不入模的特征", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_image": { + "default": "", + "default_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "help": "建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模版不适用", + "label": "容器镜像", + "reference": "workflow.variables.image", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_l2_regularization": { + "default": "", + "default_value": 1.0, + "help": "L2惩罚系数", + "label": "L2惩罚系数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "NUMBER" + }, + "Slot_label_field": { + "default": "", + "default_value": "label", + "help": "label特征名", + "label": "label特征名", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_learning_rate": { + "default": "", + "default_value": 0.3, + "help": "学习率", + "label": "学习率", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "NUMBER" + }, + "Slot_load_model_name": { + "default": "", + "default_value": "", + "help": "按任务名称加载模型,{STORAGE_ROOT_PATH}/job_output/{LOAD_MODEL_NAME}/exported_models", + "label": "模型任务名称", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_load_model_path": { + "default": "", + "default_value": "", + "help": "模型文件地址", + "label": "模型文件地址", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_loss_type": { + "default": "", + "default_value": "logistic", + "help": "损失函数类型,logistic或mse,默认logistic", + "label": "损失函数类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_max_bins": { + "default": "", + "default_value": 33.0, + "help": "最大分箱数", + "label": "最大分箱数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_max_depth": { + "default": "", + "default_value": 3.0, + "help": "最大深度", + "label": "最大深度", + "reference": "self.variables.max_depth", + "reference_type": "SELF", + "value_type": "INT" + }, + "Slot_max_iters": { + "default": "", + "default_value": 5.0, + "help": "树的数量", + "label": "迭代数", + "reference": "self.variables.max_iters", + "reference_type": "SELF", + "value_type": "INT" + }, + "Slot_mode": { + "default": "", + "default_value": "train", + "help": "任务类型,train或eval", + "label": "任务类型,train或eval", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_no_data": { + "default": "", + "default_value": false, + "help": "Leader是否没数据", + "label": "Leader是否没数据", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "BOOL" + }, + "Slot_num_parallel": { + "default": "", + "default_value": 1.0, + "help": "进程数量", + "label": "进程数量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_role": { + "default": "", + "default_value": "Leader", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "label": "Flapp通讯时角色", + "reference": "workflow.variables.role", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_send_metrics_to_follower": { + "default": "", + "default_value": false, + "help": "是否发送指标到follower", + "label": "是否发送指标到follower", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "BOOL" + }, + "Slot_send_scores_to_follower": { + "default": "", + "default_value": false, + "help": "是否发送结果到follower", + "label": "是否发送结果到follower", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "BOOL" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_validation_data_path": { + "default": "", + "default_value": "", + "help": "验证数据集地址", + "label": "验证数据集地址", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_verbosity": { + "default": "", + "default_value": 1.0, + "help": "日志输出等级", + "label": "日志输出等级", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_verify_example_ids": { + "default": "", + "default_value": false, + "help": "是否检查example_id对齐 If set to true, the first column of the data will be treated as example ids that must match between leader and follower", + "label": "是否检查example_id对齐", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "BOOL" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_worker_cpu": { + "default": "", + "default_value": "2000m", + "help": "所需CPU", + "label": "所需CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "default": "", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_worker_mem": { + "default": "", + "default_value": "4Gi", + "help": "所需内存", + "label": "所需内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + } + }, + "variables": [] + } + } + }, + "group_alias": "e2e-test", + "name": "e2e-fed-right" +} \ No newline at end of file diff --git a/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/e2e-local.json b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/e2e-local.json new file mode 100644 index 000000000..fa5294234 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/e2e-local.json @@ -0,0 +1,285 @@ +{ + "comment": "", + "config": { + "group_alias": "e2e-test", + "job_definitions": [ + { + "dependencies": [], + "easy_mode": true, + "is_federated": false, + "job_type": "RAW_DATA", + "name": "raw-data-job", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"role\": \"Follower\",\n \"peerSpecs\": {\n \"Leader\": {\n \"peerURL\": \"\",\n \"authority\": \"\"\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": project.variables.storage_root_path\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_NAME\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_TYPE\",\n \"value\": \"Streaming\"\n },\n {\n \"name\": \"OUTPUT_PARTITION_NUM\",\n \"value\": str(4)\n },\n {\n \"name\": \"INPUT_BASE_DIR\",\n \"value\": \"/app/deploy/integrated_test/tfrecord_raw_data\"\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": project.variables.storage_root_path + \"/raw_data/\" + self.name\n },\n {\n \"name\": \"RAW_DATA_PUBLISH_DIR\",\n \"value\": \"portal_publish_dir/\" + self.name\n },\n {\n \"name\": \"FILE_WILDCARD\",\n \"value\": \"*.rd\"\n },\n {\n \"name\": \"LONG_RUNNING\",\n \"value\": \"\"\n },\n {\n \"name\": \"CHECK_SUCCESS_TAG\",\n \"value\": \"--check_success_tag\"\n },\n {\n \"name\": \"FILES_PER_JOB_LIMIT\",\n \"value\": str(None)\n },\n {\n \"name\": \"SINGLE_SUBFOLDER\",\n \"value\": \"\"\n }\n\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": workflow.variables.image,\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": False,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": project.variables.storage_root_path\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": project.variables.storage_root_path + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n\n {\n \"name\": \"BATCH_SIZE\",\n \"value\": str(1024)\n },\n {\n \"name\": \"INPUT_DATA_FORMAT\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"COMPRESSED_TYPE\",\n \"value\": \"\"\n },\n {\n \"name\": \"OUTPUT_DATA_FORMAT\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"BUILDER_COMPRESSED_TYPE\",\n \"value\": \"\"\n },\n {\n \"name\": \"MEMORY_LIMIT_RATIO\",\n \"value\": str(70)\n },\n {\n \"name\": \"OPTIONAL_FIELDS\",\n \"value\": \"\"\n }\n\n\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": workflow.variables.image,\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_worker.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": False,\n \"replicas\": 4\n }\n }\n }\n}\n" + } + ], + "variables": [ + { + "access_mode": "PEER_WRITABLE", + "name": "image", + "typed_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + } + ] + }, + "editor_info": { + "yaml_editor_infos": { + "raw-data-job": { + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": \"Follower\",\n \"peerSpecs\": {\n \"Leader\": {\n \"peerURL\": \"\",\n \"authority\": \"\"\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_NAME\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_TYPE\",\n \"value\": ${Slot_data_portal_type}\n },\n {\n \"name\": \"OUTPUT_PARTITION_NUM\",\n \"value\": str(${Slot_output_partition_num})\n },\n {\n \"name\": \"INPUT_BASE_DIR\",\n \"value\": ${Slot_input_base_dir}\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/raw_data/\" + self.name\n },\n {\n \"name\": \"RAW_DATA_PUBLISH_DIR\",\n \"value\": \"portal_publish_dir/\" + self.name\n },\n {\n \"name\": \"FILE_WILDCARD\",\n \"value\": ${Slot_file_wildcard}\n },\n {\n \"name\": \"LONG_RUNNING\",\n \"value\": ${Slot_long_running}\n },\n {\n \"name\": \"CHECK_SUCCESS_TAG\",\n \"value\": ${Slot_check_success_tag}\n },\n {\n \"name\": \"FILES_PER_JOB_LIMIT\",\n \"value\": str(${Slot_files_per_job_limit})\n },\n {\n \"name\": \"SINGLE_SUBFOLDER\",\n \"value\": ${Slot_single_subfolder}\n }\n\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n\n {\n \"name\": \"BATCH_SIZE\",\n \"value\": str(${Slot_batch_size})\n },\n {\n \"name\": \"INPUT_DATA_FORMAT\",\n \"value\": ${Slot_input_data_format}\n },\n {\n \"name\": \"COMPRESSED_TYPE\",\n \"value\": ${Slot_compressed_type}\n },\n {\n \"name\": \"OUTPUT_DATA_FORMAT\",\n \"value\": ${Slot_output_data_format}\n },\n {\n \"name\": \"BUILDER_COMPRESSED_TYPE\",\n \"value\": ${Slot_builder_compressed_type}\n },\n {\n \"name\": \"MEMORY_LIMIT_RATIO\",\n \"value\": str(${Slot_memory_limit_ratio})\n },\n {\n \"name\": \"OPTIONAL_FIELDS\",\n \"value\": ${Slot_optional_fields}\n }\n\n\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_worker.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": ${Slot_output_partition_num}\n }\n }\n }\n}\n", + "slots": { + "Slot_batch_size": { + "default": "", + "default_value": 1024.0, + "help": "原始数据是一批一批的从文件系统中读出来,batch_size为batch的大小", + "label": "Batch大小", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_builder_compressed_type": { + "default": "", + "default_value": "", + "help": "choices=['', 'ZLIB', 'GZIP'] the format for output file", + "label": "输出压缩格式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_check_success_tag": { + "default": "", + "default_value": "--check_success_tag", + "help": "choices:['','--check_success_tag'] means false and true, Check that a _SUCCESS file exists before processing files in a subfolder", + "label": "是否检查成功标志", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_compressed_type": { + "default": "", + "default_value": "", + "help": "choices=['', 'ZLIB', 'GZIP'] the compressed type of input data file", + "label": "压缩方式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_portal_type": { + "default": "", + "default_value": "Streaming", + "help": "运行过一次后修改无效!! the type of data portal type ,choices=['PSI', 'Streaming']", + "label": "数据入口类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_file_wildcard": { + "default": "", + "default_value": "*.rd", + "help": "文件名称的通配符, 将会读取input_base_dir下所以满足条件的文件,如\n1. *.csv,意为读取所有csv格式文件\n2. *.tfrecord,意为读取所有tfrecord格式文件\n3. xxx.txt,意为读取文件名为xxx.txt的文件", + "label": "文件名称的通配符", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_files_per_job_limit": { + "default": "", + "default_value": null, + "help": "空即不设限制,Max number of files in a job", + "label": "每个任务最多文件数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_image": { + "default": "", + "default_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "help": "建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模版不适用", + "label": "容器镜像", + "reference": "workflow.variables.image", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_input_base_dir": { + "default": "", + "default_value": "/app/deploy/integrated_test/tfrecord_raw_data", + "help": "必须修改,运行过一次后修改无效!!the base dir of input directory", + "label": "输入路径", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_input_data_format": { + "default": "", + "default_value": "TF_RECORD", + "help": "choices=['TF_RECORD', 'CSV_DICT'] the type for input data iterator", + "label": "输入数据格式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_long_running": { + "default": "", + "default_value": "", + "help": "choices: ['','--long_running']否,是。是否为常驻上传原始数据", + "label": "是否常驻", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "Master的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_envs": { + "default": "", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "label": "Master额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_master_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "Master的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_memory_limit_ratio": { + "default": "", + "default_value": 70.0, + "help": "预测是否会OOM的时候用到,如果预测继续执行下去时占用内存会超过这个比例,就阻塞,直到尚未处理的任务处理完成。 注意这是个40-81之间的整数。", + "label": "内存限制比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_optional_fields": { + "default": "", + "default_value": "", + "help": "optional stat fields used in joiner, separated by comma between fields, e.g. \"label,rit\"Each field will be stripped", + "label": "可选字段", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_output_data_format": { + "default": "", + "default_value": "TF_RECORD", + "help": "choices=['TF_RECORD', 'CSV_DICT'] the format for output file", + "label": "输出格式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_output_partition_num": { + "default": "", + "default_value": 4.0, + "help": "运行过一次后修改无效!!输出数据的文件数量,对应Worker数量", + "label": "数据分区的数量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_single_subfolder": { + "default": "", + "default_value": "", + "help": "choices:['','--single_subfolder'] 否 是,Only process one subfolder at a time", + "label": "是否单一子文件夹", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_worker_cpu": { + "default": "", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "label": "Worker的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "default": "", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_worker_memory": { + "default": "", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "label": "Worker的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + } + }, + "variables": [] + } + } + }, + "group_alias": "e2e-test", + "name": "e2e-local" +} \ No newline at end of file diff --git a/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/e2e-sparse-estimator-test-right.json b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/e2e-sparse-estimator-test-right.json new file mode 100644 index 000000000..742d28904 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/e2e-sparse-estimator-test-right.json @@ -0,0 +1,1127 @@ +{ + "comment": "", + "config": { + "group_alias": "e2e-test", + "job_definitions": [ + { + "dependencies": [], + "easy_mode": true, + "is_federated": false, + "job_type": "RAW_DATA", + "name": "raw-data-job-psi", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"role\": \"Follower\",\n \"peerSpecs\": {\n \"Leader\": {\n \"peerURL\": \"\",\n \"authority\": \"\"\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_NAME\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_TYPE\",\n \"value\": \"PSI\"\n },\n {\n \"name\": \"OUTPUT_PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"INPUT_BASE_DIR\",\n \"value\": str(workflow.variables.input_base_dir)\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/raw_data/\" + self.name\n },\n {\n \"name\": \"RAW_DATA_PUBLISH_DIR\",\n \"value\": \"portal_publish_dir/\" + self.name\n },\n {\n \"name\": \"FILE_WILDCARD\",\n \"value\": str(workflow.variables.file_wildcard)\n },\n {\n \"name\": \"LONG_RUNNING\",\n \"value\": \"\"\n },\n {\n \"name\": \"CHECK_SUCCESS_TAG\",\n \"value\": \"\"\n },\n {\n \"name\": \"FILES_PER_JOB_LIMIT\",\n \"value\": str(None)\n },\n {\n \"name\": \"SINGLE_SUBFOLDER\",\n \"value\": \"\"\n }\n\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": False,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n\n {\n \"name\": \"BATCH_SIZE\",\n \"value\": str(1024)\n },\n {\n \"name\": \"INPUT_DATA_FORMAT\",\n \"value\": str(workflow.variables.input_data_format)\n },\n {\n \"name\": \"COMPRESSED_TYPE\",\n \"value\": \"\"\n },\n {\n \"name\": \"OUTPUT_DATA_FORMAT\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"BUILDER_COMPRESSED_TYPE\",\n \"value\": \"\"\n },\n {\n \"name\": \"MEMORY_LIMIT_RATIO\",\n \"value\": str(70)\n },\n {\n \"name\": \"OPTIONAL_FIELDS\",\n \"value\": \"\"\n }\n\n\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_worker.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": False,\n \"replicas\": int(workflow.variables.num_partitions)\n }\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "raw-data-job-psi" + } + ], + "easy_mode": true, + "is_federated": true, + "job_type": "PSI_DATA_JOIN", + "name": "psi-data-join-job", + "variables": [ + { + "access_mode": "PEER_WRITABLE", + "name": "rsa_key_pem", + "tag": "", + "typed_value": "-----BEGIN RSA PUBLIC KEY-----\nMIGJAoGBAMZYpBzYDnROmrqC8LhDXhgW13E/JuTUHkHKsGwPScnp5TAueqo53ayu\nYzSlLrI+yQp206Kb/C+w/VdWJcLLAjAUBGqfZvCnsmpfOMt+s3JrNH24RCg282m/\nnIdpoVqb7SEDFlJPq3s0g/oZ5v0c74Yy5J/DuuaWcuU7URuYRbbnAgMBAAE=\n-----END RSA PUBLIC KEY-----\n", + "value": "-----BEGIN RSA PUBLIC KEY-----\nMIGJAoGBAMZYpBzYDnROmrqC8LhDXhgW13E/JuTUHkHKsGwPScnp5TAueqo53ayu\nYzSlLrI+yQp206Kb/C+w/VdWJcLLAjAUBGqfZvCnsmpfOMt+s3JrNH24RCg282m/\nnIdpoVqb7SEDFlJPq3s0g/oZ5v0c74Yy5J/DuuaWcuU7URuYRbbnAgMBAAE=\n-----END RSA PUBLIC KEY-----\n", + "value_type": "STRING", + "widget_schema": "{\"component\":\"TextArea\",\"required\":false}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "rsa_private_key_path", + "tag": "", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "rsa_key_path", + "tag": "", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "worker_cpu", + "tag": "", + "typed_value": "2000m", + "value": "2000m", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "worker_mem", + "tag": "", + "typed_value": "3Gi", + "value": "3Gi", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + } + ], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"role\": str(workflow.variables.role),\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if str(workflow.variables.role)==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"ROLE\",\n \"value\": str(workflow.variables.role).lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"START_TIME\",\n \"value\": str(0)\n },\n {\n \"name\": \"END_TIME\",\n \"value\": str(999999999999)\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + str(workflow.jobs['raw-data-job-psi'].name)\n },\n {\n # not work, remove it after prepare_launch_data_join_cli been removed\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(0.0)\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/rsa_psi/run_psi_data_join_master.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": \"2000m\",\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": True,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"ROLE\",\n \"value\": str(workflow.variables.role).lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"BATCH_MODE\",\n \"value\": \"--batch_mode\"\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + str(workflow.jobs['raw-data-job-psi'].name)\n },\n {\n \"name\": \"RSA_KEY_PEM\",\n \"value\": str(self.variables.rsa_key_pem)\n },\n {\n \"name\": \"RSA_KEY_PATH\",\n \"value\": str(self.variables.rsa_key_path)\n },\n {\n \"name\": \"RSA_PRIVATE_KEY_PATH\",\n \"value\": str(self.variables.rsa_private_key_path)\n },\n {\n \"name\": \"KMS_KEY_NAME\",\n \"value\": \"\"\n },\n {\n \"name\": \"KMS_CLIENT\",\n \"value\": \"data.aml.fl\"\n },\n {\n \"name\": \"PSI_RAW_DATA_ITER\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"DATA_BLOCK_BUILDER\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"PSI_OUTPUT_BUILDER\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_INTERVAL\",\n \"value\": str(-1)\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_THRESHOLD\",\n \"value\": str(4096)\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_INTERVAL\",\n \"value\": str(-1)\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_THRESHOLD\",\n \"value\": str(4096)\n },\n {\n \"name\": \"EXAMPLE_JOINER\",\n \"value\": \"SORT_RUN_JOINER\"\n },\n {\n \"name\": \"PSI_READ_AHEAD_SIZE\",\n \"value\": str(None)\n },\n {\n \"name\": \"SORT_RUN_MERGER_READ_AHEAD_BUFFER\",\n \"value\": str(None)\n },\n {\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(0.0)\n },\n {\n \"name\": \"ENABLE_NEGATIVE_EXAMPLE_GENERATOR\",\n \"value\": str(False)\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/rsa_psi/run_psi_data_join_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": str(self.variables.worker_cpu),\n \"memory\": str(self.variables.worker_mem)\n },\n \"requests\": {\n \"cpu\": str(self.variables.worker_cpu),\n \"memory\": str(self.variables.worker_mem)\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": True,\n \"replicas\": int(int(workflow.variables.num_partitions))\n }\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "psi-data-join-job" + } + ], + "easy_mode": false, + "is_federated": true, + "job_type": "NN_MODEL_TRANINING", + "name": "nn-train-job", + "variables": [ + { + "access_mode": "PEER_WRITABLE", + "name": "code_tar", + "tag": "", + "typed_value": { + "follower/main.py": "# encoding=utf8\nimport os\nimport logging\nimport datetime\n\nimport tensorflow.compat.v1 as tf\n\nimport fedlearner.trainer as flt\n# from byted_deepinsight import DeepInsight2Hook\n\nROLE = 'follower'\n\nparser = flt.trainer_worker.create_argument_parser()\nparser.add_argument('--batch-size', type=int, default=16,\n help='Training batch size.')\nparser.add_argument('--clean-model', type=bool, default=False,\n help='clean checkpoint and saved_model')\nargs = parser.parse_args()\n\n\ndef apply_clean():\n if args.worker_rank == 0 and args.clean_model and tf.io.gfile.exists(args.checkpoint_path):\n tf.logging.info(\"--clean_model flag set. Removing existing checkpoint_path dir:\"\n \" {}\".format(args.checkpoint_path))\n tf.io.gfile.rmtree(args.checkpoint_path)\n\n if args.worker_rank == 0 and args.clean_model and args.export_path and tf.io.gfile.exists(args.export_path):\n tf.logging.info(\"--clean_model flag set. Removing existing savedmodel dir:\"\n \" {}\".format(args.export_path))\n tf.io.gfile.rmtree(args.export_path)\n\n\ndef input_fn(bridge, trainer_master=None):\n dataset = flt.data.DataBlockLoader(\n args.batch_size, ROLE, bridge, trainer_master).make_dataset()\n \n def parse_fn(example):\n feature_map = {}\n feature_map[\"example_id\"] = tf.FixedLenFeature([], tf.string)\n feature_map[\"label\"] = tf.FixedLenFeature([], tf.int64)\n features = tf.parse_example(example, features=feature_map)\n labels = {'label': features.pop('label')}\n return features, labels\n\n dataset = dataset.map(map_func=parse_fn,\n num_parallel_calls=tf.data.experimental.AUTOTUNE)\n return dataset\n\n\ndef raw_serving_input_receiver_fn():\n features = {}\n features['logits'] = tf.placeholder(dtype=tf.float32, name='logits')\n return tf.estimator.export.build_raw_serving_input_receiver_fn(features)()\n\n\ndef model_fn(model, features, labels, mode):\n global_step = tf.train.get_or_create_global_step()\n \n if mode == tf.estimator.ModeKeys.TRAIN:\n logits = model.recv('logits', tf.float32, require_grad=True)\n else:\n logits = features['logits']\n\n if mode == tf.estimator.ModeKeys.TRAIN:\n y = tf.dtypes.cast(labels['label'], tf.float32)\n logits = tf.reshape(logits, y.shape)\n loss = tf.nn.sigmoid_cross_entropy_with_logits(\n labels=y, logits=logits)\n loss = tf.math.reduce_mean(loss)\n\n # cala auc\n pred = tf.math.sigmoid(logits)\n _, auc = tf.metrics.auc(labels=y, predictions=pred)\n\n logging_hook = tf.train.LoggingTensorHook(\n {\"loss\": loss, \"auc\": auc}, every_n_iter=10)\n\n # send auc back to leader\n model.send('auc', auc, require_grad=False)\n model.send('loss', loss, require_grad=False)\n \n ## visulization with tensorboard\n # current_time = datetime.datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n # train_log_dir = os.path.join(args.tensorboard_log, current_time)\n # loss_op = tf.summary.scalar('train_loss', loss)\n # auc_op = tf.summary.scalar('train_auc', auc)\n # summary_hook = tf.train.SummarySaverHook(\n # save_steps=5,\n # output_dir=train_log_dir,\n # summary_op=[loss_op, auc_op])\n \n ## visulization with deepinsight\n # uid_tensor = tf.reshape(features['uid'], shape=[-1])\n # req_time_tensor = tf.reshape(features['req_time'], shape=[-1])\n # score_tensor = tf.reshape(pred, shape=[args.batch_size])\n # label_tensor = tf.reshape(y, shape=[-1])\n # # logging.info(\"==> uid tensor : %s, req_time_tensor: %s, score_tensor: %s, label_tensor: %s\" % (uid_tensor, req_time_tensor, score_tensor, label_tensor))\n # deep_insight_hook = DeepInsight2Hook(uid_tensor, req_time_tensor, score_tensor, label_tensor)\n \n train_hooks = [logging_hook] #, summary_hook, deep_insight_hook]\n\n # optimizer = tf.train.GradientDescentOptimizer(0.1)\n # optimizer = tf.train.AdagradOptimizer(0.1)\n # optimizer = tf.train.AdamOptimizer()\n optimizer = tf.train.FtrlOptimizer(learning_rate=0.16921544485102483, \n l1_regularization_strength=1e-05, l2_regularization_strength=0.0005945795938393141,\n initial_accumulator_value=0.44352,\n learning_rate_power=-0.59496)\n train_op = model.minimize(optimizer, loss, global_step=global_step)\n return model.make_spec(mode, loss=loss, train_op=train_op,\n training_hooks=train_hooks)\n\n if mode == tf.estimator.ModeKeys.PREDICT:\n return model.make_spec(mode, predictions=logits)\n\n\n\nif __name__ == '__main__':\n logging.basicConfig(\n level=logging.INFO,\n format='%(asctime)-15s [%(filename)s:%(lineno)d] %(levelname)s %(message)s'\n )\n apply_clean()\n try:\n flt.trainer_worker.train(\n ROLE, args, input_fn,\n model_fn, raw_serving_input_receiver_fn)\n except ValueError as err:\n logging.info('cannot save model as there is no parameters: details:{}'.format(err))\n\n\n", + "main.py": "" + }, + "value": "{\"main.py\":\"\",\"follower/main.py\":\"# encoding=utf8\\nimport os\\nimport logging\\nimport datetime\\n\\nimport tensorflow.compat.v1 as tf\\n\\nimport fedlearner.trainer as flt\\n# from byted_deepinsight import DeepInsight2Hook\\n\\nROLE = 'follower'\\n\\nparser = flt.trainer_worker.create_argument_parser()\\nparser.add_argument('--batch-size', type=int, default=16,\\n help='Training batch size.')\\nparser.add_argument('--clean-model', type=bool, default=False,\\n help='clean checkpoint and saved_model')\\nargs = parser.parse_args()\\n\\n\\ndef apply_clean():\\n if args.worker_rank == 0 and args.clean_model and tf.io.gfile.exists(args.checkpoint_path):\\n tf.logging.info(\\\"--clean_model flag set. Removing existing checkpoint_path dir:\\\"\\n \\\" {}\\\".format(args.checkpoint_path))\\n tf.io.gfile.rmtree(args.checkpoint_path)\\n\\n if args.worker_rank == 0 and args.clean_model and args.export_path and tf.io.gfile.exists(args.export_path):\\n tf.logging.info(\\\"--clean_model flag set. Removing existing savedmodel dir:\\\"\\n \\\" {}\\\".format(args.export_path))\\n tf.io.gfile.rmtree(args.export_path)\\n\\n\\ndef input_fn(bridge, trainer_master=None):\\n dataset = flt.data.DataBlockLoader(\\n args.batch_size, ROLE, bridge, trainer_master).make_dataset()\\n \\n def parse_fn(example):\\n feature_map = {}\\n feature_map[\\\"example_id\\\"] = tf.FixedLenFeature([], tf.string)\\n feature_map[\\\"label\\\"] = tf.FixedLenFeature([], tf.int64)\\n features = tf.parse_example(example, features=feature_map)\\n labels = {'label': features.pop('label')}\\n return features, labels\\n\\n dataset = dataset.map(map_func=parse_fn,\\n num_parallel_calls=tf.data.experimental.AUTOTUNE)\\n return dataset\\n\\n\\ndef raw_serving_input_receiver_fn():\\n features = {}\\n features['logits'] = tf.placeholder(dtype=tf.float32, name='logits')\\n return tf.estimator.export.build_raw_serving_input_receiver_fn(features)()\\n\\n\\ndef model_fn(model, features, labels, mode):\\n global_step = tf.train.get_or_create_global_step()\\n \\n if mode == tf.estimator.ModeKeys.TRAIN:\\n logits = model.recv('logits', tf.float32, require_grad=True)\\n else:\\n logits = features['logits']\\n\\n if mode == tf.estimator.ModeKeys.TRAIN:\\n y = tf.dtypes.cast(labels['label'], tf.float32)\\n logits = tf.reshape(logits, y.shape)\\n loss = tf.nn.sigmoid_cross_entropy_with_logits(\\n labels=y, logits=logits)\\n loss = tf.math.reduce_mean(loss)\\n\\n # cala auc\\n pred = tf.math.sigmoid(logits)\\n _, auc = tf.metrics.auc(labels=y, predictions=pred)\\n\\n logging_hook = tf.train.LoggingTensorHook(\\n {\\\"loss\\\": loss, \\\"auc\\\": auc}, every_n_iter=10)\\n\\n # send auc back to leader\\n model.send('auc', auc, require_grad=False)\\n model.send('loss', loss, require_grad=False)\\n \\n ## visulization with tensorboard\\n # current_time = datetime.datetime.now().strftime(\\\"%Y%m%d_%H%M%S\\\")\\n # train_log_dir = os.path.join(args.tensorboard_log, current_time)\\n # loss_op = tf.summary.scalar('train_loss', loss)\\n # auc_op = tf.summary.scalar('train_auc', auc)\\n # summary_hook = tf.train.SummarySaverHook(\\n # save_steps=5,\\n # output_dir=train_log_dir,\\n # summary_op=[loss_op, auc_op])\\n \\n ## visulization with deepinsight\\n # uid_tensor = tf.reshape(features['uid'], shape=[-1])\\n # req_time_tensor = tf.reshape(features['req_time'], shape=[-1])\\n # score_tensor = tf.reshape(pred, shape=[args.batch_size])\\n # label_tensor = tf.reshape(y, shape=[-1])\\n # # logging.info(\\\"==> uid tensor : %s, req_time_tensor: %s, score_tensor: %s, label_tensor: %s\\\" % (uid_tensor, req_time_tensor, score_tensor, label_tensor))\\n # deep_insight_hook = DeepInsight2Hook(uid_tensor, req_time_tensor, score_tensor, label_tensor)\\n \\n train_hooks = [logging_hook] #, summary_hook, deep_insight_hook]\\n\\n # optimizer = tf.train.GradientDescentOptimizer(0.1)\\n # optimizer = tf.train.AdagradOptimizer(0.1)\\n # optimizer = tf.train.AdamOptimizer()\\n optimizer = tf.train.FtrlOptimizer(learning_rate=0.16921544485102483, \\n l1_regularization_strength=1e-05, l2_regularization_strength=0.0005945795938393141,\\n initial_accumulator_value=0.44352,\\n learning_rate_power=-0.59496)\\n train_op = model.minimize(optimizer, loss, global_step=global_step)\\n return model.make_spec(mode, loss=loss, train_op=train_op,\\n training_hooks=train_hooks)\\n\\n if mode == tf.estimator.ModeKeys.PREDICT:\\n return model.make_spec(mode, predictions=logits)\\n\\n\\n\\nif __name__ == '__main__':\\n logging.basicConfig(\\n level=logging.INFO,\\n format='%(asctime)-15s [%(filename)s:%(lineno)d] %(levelname)s %(message)s'\\n )\\n apply_clean()\\n try:\\n flt.trainer_worker.train(\\n ROLE, args, input_fn,\\n model_fn, raw_serving_input_receiver_fn)\\n except ValueError as err:\\n logging.info('cannot save model as there is no parameters: details:{}'.format(err))\\n\\n\\n\"}", + "value_type": "CODE", + "widget_schema": "{\"component\":\"Code\",\"required\":false}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "epoch_num", + "tag": "", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "pod_cpu", + "tag": "", + "typed_value": "4000m", + "value": "4000m", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "pod_mem", + "tag": "", + "typed_value": "8Gi", + "value": "8Gi", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "worker_pod_num", + "tag": "", + "typed_value": "1", + "value": "1", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "sparse_estimator", + "tag": "", + "typed_value": "false", + "value": "false", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"enum\":[\"true\",\"false\"]}" + } + ], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FedApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"activeDeadlineSeconds\": 1200, \n \"fedReplicaSpecs\": {\n \"Master\": {\n \"backoffLimit\": 1,\n \"mustSuccess\": False,\n \"template\": {\n \"spec\": {\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": project.variables.storage_root_path\n },\n {\n \"name\": \"ROLE\",\n \"value\": workflow.variables.role.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": project.variables.storage_root_path + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"EPOCH_NUM\",\n \"value\": str(int(self.variables.epoch_num))\n },\n {\n \"name\": \"START_DATE\",\n \"value\": str(None)\n },\n {\n \"name\": \"END_DATE\",\n \"value\": str(None)\n },\n {\n \"name\": \"DATA_SOURCE\",\n \"value\": workflow.jobs['psi-data-join-job'].name\n },\n {\n \"name\": \"ONLINE_TRAINING\",\n \"value\": \"\"\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(bool(self.variables.sparse_estimator))\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": \"\"\n },\n {\n \"name\": \"CODE_TAR\",\n \"value\": self.variables.code_tar\n },\n {\n \"name\": \"CHECKPOINT_PATH\",\n \"value\": \"\"\n },\n {\n \"name\": \"LOAD_CHECKPOINT_FILENAME\",\n \"value\": \"\"\n },\n {\n \"name\": \"LOAD_CHECKPOINT_FILENAME_WITH_PATH\",\n \"value\": \"\"\n },\n {\n \"name\": \"LOAD_CHECKPOINT_PATH\",\n \"value\": \"\" and project.variables.storage_root_path + \"/job_output/\" + \"\" + \"/checkpoints\"\n },\n {\n \"name\": \"EXPORT_PATH\",\n \"value\": \"\"\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": workflow.variables.image,\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tf-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/trainer/run_trainer_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": self.variables.pod_cpu,\n \"memory\": self.variables.pod_mem\n },\n \"requests\": {\n \"cpu\": self.variables.pod_cpu,\n \"memory\": self.variables.pod_mem\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n\n }\n },\n \"replicas\": int(1)\n },\n \"PS\": {\n \"backoffLimit\": 1,\n \"mustSuccess\": False,\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": project.variables.storage_root_path\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n }\n\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": workflow.variables.image,\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tf-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/trainer/run_trainer_ps.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": self.variables.pod_cpu,\n \"memory\": self.variables.pod_mem\n },\n \"requests\": {\n \"cpu\": self.variables.pod_cpu,\n \"memory\": self.variables.pod_mem\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"replicas\": int(1)\n },\n \"Worker\": {\n \"backoffLimit\": 6,\n \"mustSuccess\": True,\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": project.variables.storage_root_path\n },\n {\n \"name\": \"ROLE\",\n \"value\": workflow.variables.role.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": project.variables.storage_root_path + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"MODE\",\n \"value\": \"train\"\n },\n {\n \"name\": \"VERBOSITY\",\n \"value\": str(1)\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": \"\"\n },\n {\n \"name\": \"CODE_TAR\",\n \"value\": self.variables.code_tar\n },\n {\n \"name\": \"SAVE_CHECKPOINT_STEPS\",\n \"value\": str(1000)\n },\n {\n \"name\": \"SAVE_CHECKPOINT_SECS\",\n \"value\": str(None)\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(bool(self.variables.sparse_estimator))\n },\n {\n \"name\": \"SUMMARY_SAVE_STEPS\",\n \"value\": str(None)\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": workflow.variables.image,\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tf-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\":[\"/bin/bash\",\"-c\"],\n \"args\": [\"export WORKER_RANK=$$INDEX && export PEER_ADDR=$$SERVICE_ID && /app/deploy/scripts/trainer/run_trainer_worker.sh\"],\n \"resources\": {\n \"limits\": {\n \"cpu\": self.variables.pod_cpu,\n \"memory\": \"3Gi\"\n },\n \"requests\": {\n \"cpu\": self.variables.pod_cpu,\n \"memory\": \"3Gi\"\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"replicas\": int(int(self.variables.worker_pod_num))\n }\n }\n }\n}\n" + } + ], + "variables": [ + { + "access_mode": "PEER_WRITABLE", + "name": "image", + "tag": "", + "typed_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "num_partitions", + "tag": "", + "typed_value": "4", + "value": "4", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "role", + "tag": "", + "typed_value": "Follower", + "value": "Follower", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"enum\":[\"Leader\",\"Follower\"]}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "input_base_dir", + "tag": "", + "typed_value": "/data/upload/test_sparse_estimator", + "value": "/data/upload/test_sparse_estimator", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "file_wildcard", + "tag": "", + "typed_value": "*part-r-*", + "value": "*part-r-*", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "input_data_format", + "tag": "", + "typed_value": "TF_RECORD", + "value": "TF_RECORD", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"enum\":[\"CSV_DICT\",\"TF_RECORD\"]}" + } + ] + }, + "editor_info": { + "yaml_editor_infos": { + "nn-train-job": { + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": ${Slot_role},\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if ${Slot_role}==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"EPOCH_NUM\",\n \"value\": str(${Slot_epoch_num})\n },\n {\n \"name\": \"START_DATE\",\n \"value\": str(${Slot_start_date})\n },\n {\n \"name\": \"END_DATE\",\n \"value\": str(${Slot_end_date})\n },\n {\n \"name\": \"DATA_SOURCE\",\n \"value\": ${Slot_data_source}\n },\n {\n \"name\": \"ONLINE_TRAINING\",\n \"value\": ${Slot_online_training}\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(${Slot_sparse_estimator})\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": ${Slot_code_key}\n },\n {\n \"name\": \"CODE_TAR\",\n \"value\": ${Slot_code_tar}\n },\n {\n \"name\": \"CHECKPOINT_PATH\",\n \"value\": ${Slot_checkpoint_path}\n },\n {\n \"name\": \"LOAD_CHECKPOINT_FILENAME\",\n \"value\": ${Slot_load_checkpoint_filename}\n },\n {\n \"name\": \"LOAD_CHECKPOINT_FILENAME_WITH_PATH\",\n \"value\": ${Slot_load_checkpoint_filename_with_path}\n },\n {\n \"name\": \"LOAD_CHECKPOINT_PATH\",\n \"value\": ${Slot_load_checkpoint_from_job} and ${Slot_storage_root_path} + \"/job_output/\" + ${Slot_load_checkpoint_from_job} + \"/checkpoints\"\n },\n {\n \"name\": \"EXPORT_PATH\",\n \"value\": ${Slot_export_path}\n }\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/trainer/run_trainer_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n\n }\n },\n \"pair\": False,\n \"replicas\": int(${Slot_master_replicas})\n },\n \"PS\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n }\n\n ] + ${Slot_ps_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/trainer/run_trainer_ps.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_ps_cpu},\n \"memory\": ${Slot_ps_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_ps_cpu},\n \"memory\": ${Slot_ps_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": int(${Slot_ps_replicas})\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"MODE\",\n \"value\": ${Slot_mode}\n },\n {\n \"name\": \"VERBOSITY\",\n \"value\": str(${Slot_verbosity})\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": ${Slot_code_key}\n },\n {\n \"name\": \"CODE_TAR\",\n \"value\": ${Slot_code_tar}\n },\n {\n \"name\": \"SAVE_CHECKPOINT_STEPS\",\n \"value\": str(${Slot_save_checkpoint_steps})\n },\n {\n \"name\": \"SAVE_CHECKPOINT_SECS\",\n \"value\": str(${Slot_save_checkpoint_secs})\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(${Slot_sparse_estimator})\n },\n {\n \"name\": \"SUMMARY_SAVE_STEPS\",\n \"value\": str(${Slot_summary_save_steps})\n }\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/trainer/run_trainer_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": True,\n \"replicas\": int(${Slot_worker_replicas})\n }\n }\n }\n}\n", + "slots": { + "Slot_checkpoint_path": { + "default": "", + "default_value": "", + "help": "不建议修改,checkpoint输出路径,建议为空,会默认使用{storage_root_path}/job_output/{job_name}/checkpoints,强烈建议保持空值", + "label": "CHECKPOINT_PATH", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_code_key": { + "default": "", + "default_value": "", + "help": "代码tar包地址,如果为空则使用code tar", + "label": "模型代码路径", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_code_tar": { + "default": "", + "default_value": "", + "help": "代码包,variable中请使用代码类型", + "label": "代码", + "reference": "self.variables.code_tar", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_data_source": { + "default": "", + "default_value": "", + "help": "必须修改,求交任务的名字", + "label": "数据源", + "reference": "workflow.jobs['psi-data-join-job'].name", + "reference_type": "JOB_PROPERTY", + "value_type": "STRING" + }, + "Slot_end_date": { + "default": "", + "default_value": null, + "help": "training data end date", + "label": "结束时间", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_epoch_num": { + "default": "", + "default_value": 1.0, + "help": "number of epoch for training, not support in online training", + "label": "epoch数量", + "reference": "self.variables.epoch_num", + "reference_type": "SELF", + "value_type": "INT" + }, + "Slot_export_path": { + "default": "", + "default_value": "", + "help": "使用默认空值,将把models保存到$OUTPUT_BASE_DIR/exported_models 路径下。", + "label": "EXPORT_PATH", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_image": { + "default": "", + "default_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "help": "建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模板不适用", + "label": "容器镜像", + "reference": "workflow.variables.image", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_load_checkpoint_filename": { + "default": "", + "default_value": "", + "help": "加载checkpoint_path下的相对路径的checkpoint, 默认会加载checkpoint_path下的latest checkpoint", + "label": "LOAD_CHECKPOINT_FILENAME", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_load_checkpoint_filename_with_path": { + "default": "", + "default_value": "", + "help": "加载绝对路径下的checkpoint,需要细致到文件名", + "label": "从绝对路径加载checkpoint", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_load_checkpoint_from_job": { + "default": "", + "default_value": "", + "help": "指定任务名job_output下的latest checkpoint", + "label": "以任务名加载checkpoint", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "Master的CPU", + "reference": "self.variables.pod_cpu", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_master_envs": { + "default": "", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "label": "Master额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_master_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "Master的内存", + "reference": "self.variables.pod_mem", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_master_replicas": { + "default": "", + "default_value": 1.0, + "help": "同时运行的完全相同的Master Pods数量", + "label": "Master的Pod个数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_mode": { + "default": "", + "default_value": "train", + "help": "choices:['train','eval'] 训练还是验证", + "label": "模式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_online_training": { + "default": "", + "default_value": "", + "help": "['','--online_training'] 否 是,the train master run for online training", + "label": "是否在线训练", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_ps_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "PS的CPU", + "reference": "self.variables.pod_cpu", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_ps_envs": { + "default": "", + "default_value": [], + "help": "数组类型,ps pod额外的环境变量", + "label": "PS额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_ps_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "PS的内存", + "reference": "self.variables.pod_mem", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_ps_replicas": { + "default": "", + "default_value": 1.0, + "help": "同时运行的完全相同的PS Pods数量", + "label": "PS的Pod个数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_role": { + "default": "", + "default_value": "Leader", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "label": "Flapp通讯时角色", + "reference": "workflow.variables.role", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_save_checkpoint_secs": { + "default": "", + "default_value": null, + "help": "int,Number of secs between checkpoints.", + "label": "SAVE_CHECKPOINT_SECS", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_save_checkpoint_steps": { + "default": "", + "default_value": 1000.0, + "help": "int, Number of steps between checkpoints.", + "label": "SAVE_CHECKPOINT_STEPS", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_sparse_estimator": { + "default": "", + "default_value": false, + "help": "bool,default False Whether using sparse estimator.", + "label": "SPARSE_ESTIMATOR", + "reference": "self.variables.sparse_estimator", + "reference_type": "SELF", + "value_type": "BOOL" + }, + "Slot_start_date": { + "default": "", + "default_value": null, + "help": "training data start date", + "label": "开始时间", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_suffle_data_block": { + "default": "", + "default_value": "", + "help": "['','--shuffle_data_block'] 否 是,shuffle the data block or not", + "label": "是否shuffle数据块", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_summary_save_steps": { + "default": "", + "default_value": null, + "help": "int, Number of steps to save summary files.", + "label": "SUMMARY_SAVE_STEPS", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_verbosity": { + "default": "", + "default_value": 1.0, + "help": "int, Logging level", + "label": "日志等级", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_worker_cpu": { + "default": "", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "label": "Worker的CPU", + "reference": "self.variables.pod_cpu", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "default": "", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_worker_memory": { + "default": "", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "label": "Worker的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_replicas": { + "default": "", + "default_value": 1.0, + "help": "同时运行的完全相同的Worker Pods数量", + "label": "Worker的Pod个数", + "reference": "self.variables.worker_pod_num", + "reference_type": "SELF", + "value_type": "INT" + } + }, + "variables": [] + }, + "psi-data-join-job": { + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": ${Slot_role},\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if ${Slot_role}==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(${Slot_partition_num})\n },\n {\n \"name\": \"START_TIME\",\n \"value\": str(${Slot_start_time})\n },\n {\n \"name\": \"END_TIME\",\n \"value\": str(${Slot_end_time})\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + ${Slot_raw_data_name}\n },\n {\n # not work, remove it after prepare_launch_data_join_cli been removed\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(${Slot_negative_sampling_rate})\n }\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/rsa_psi/run_psi_data_join_master.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": True,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"BATCH_MODE\",\n \"value\": ${Slot_batch_mode}\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(${Slot_partition_num})\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + ${Slot_raw_data_name}\n },\n {\n \"name\": \"RSA_KEY_PEM\",\n \"value\": ${Slot_rsa_key_pem}\n },\n {\n \"name\": \"RSA_KEY_PATH\",\n \"value\": ${Slot_rsa_key_path}\n },\n {\n \"name\": \"RSA_PRIVATE_KEY_PATH\",\n \"value\": ${Slot_rsa_private_key_path}\n },\n {\n \"name\": \"KMS_KEY_NAME\",\n \"value\": ${Slot_kms_key_name}\n },\n {\n \"name\": \"KMS_CLIENT\",\n \"value\": ${Slot_kms_client}\n },\n {\n \"name\": \"PSI_RAW_DATA_ITER\",\n \"value\": ${Slot_psi_raw_data_iter}\n },\n {\n \"name\": \"DATA_BLOCK_BUILDER\",\n \"value\": ${Slot_data_block_builder}\n },\n {\n \"name\": \"PSI_OUTPUT_BUILDER\",\n \"value\": ${Slot_psi_output_builder}\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_INTERVAL\",\n \"value\": str(${Slot_data_block_dump_interval})\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_THRESHOLD\",\n \"value\": str(${Slot_data_block_dump_threshold})\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_INTERVAL\",\n \"value\": str(${Slot_example_id_dump_interval})\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_THRESHOLD\",\n \"value\": str(${Slot_example_id_dump_threshold})\n },\n {\n \"name\": \"EXAMPLE_JOINER\",\n \"value\": \"SORT_RUN_JOINER\"\n },\n {\n \"name\": \"PSI_READ_AHEAD_SIZE\",\n \"value\": str(${Slot_psi_read_ahead_size})\n },\n {\n \"name\": \"SORT_RUN_MERGER_READ_AHEAD_BUFFER\",\n \"value\": str(${Slot_run_merger_read_ahead_buffer})\n },\n {\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(${Slot_negative_sampling_rate})\n },\n {\n \"name\": \"ENABLE_NEGATIVE_EXAMPLE_GENERATOR\",\n \"value\": str(${Slot_enable_negative_example_generator})\n }\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/rsa_psi/run_psi_data_join_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": True,\n \"replicas\": int(${Slot_partition_num})\n }\n }\n }\n}\n", + "slots": { + "Slot_batch_mode": { + "default": "", + "default_value": "--batch_mode", + "help": "如果为空则为常驻求交", + "label": "是否为批处理模式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_block_builder": { + "default": "", + "default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "label": "data block output数据类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_block_dump_interval": { + "default": "", + "default_value": -1.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次data block,小于0则无此限制", + "label": "数据dump时间间隔", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_data_block_dump_threshold": { + "default": "", + "default_value": 4096.0, + "help": "建议不修改,最多多少个样本就dump为一个data block,小于等于0则无此限制", + "label": "数据dump临界点", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_enable_negative_example_generator": { + "default": "", + "default_value": false, + "help": "建议不修改,是否开启负采样,当follower求交时遇到无法匹配上的leader的example id,会以negative_sampling_rate为概率生成一个新的样本。", + "label": "负采样比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "BOOL" + }, + "Slot_end_time": { + "default": "", + "default_value": 999999999999.0, + "help": "建议不修改,使用自这个时间以前的数据,仅从文件名筛选所以格式依据文件名(yyyymmdd或timestamp)", + "label": "数据末尾时间", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_example_id_dump_interval": { + "default": "", + "default_value": -1.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次example id,小于0则无此限制", + "label": "数据id dump时间间隔", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_example_id_dump_threshold": { + "default": "", + "default_value": 4096.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次example id,小于0则无此限制", + "label": "数据id dump临界点", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_image": { + "default": "", + "default_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "help": "建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模版不适用", + "label": "容器镜像", + "reference": "workflow.variables.image", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_kms_client": { + "default": "", + "default_value": "data.aml.fl", + "help": "kms client", + "label": "kms client", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_kms_key_name": { + "default": "", + "default_value": "", + "help": "kms中的密钥名称,站内镜像需使用KMS", + "label": "密钥名称", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_master_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "Master的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_envs": { + "default": "", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "label": "Master额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_master_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "Master的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_negative_sampling_rate": { + "default": "", + "default_value": 0.0, + "help": "建议不修改,负采样比例,当follower求交时遇到无法匹配上的leader的example id,会以此概率生成一个新的样本。", + "label": "负采样比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "NUMBER" + }, + "Slot_partition_num": { + "default": "", + "default_value": 4.0, + "help": "建议修改,求交后数据分区的数量,建议和raw_data一致", + "label": "数据分区的数量", + "reference": "workflow.variables.num_partitions", + "reference_type": "WORKFLOW", + "value_type": "INT" + }, + "Slot_psi_output_builder": { + "default": "", + "default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "label": "PSI output数据类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_psi_raw_data_iter": { + "default": "", + "default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "label": "raw data数据类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_psi_read_ahead_size": { + "default": "", + "default_value": null, + "help": "建议不填, the read ahead size for raw data", + "label": "psi_read_ahead_size", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_raw_data_name": { + "default": "", + "default_value": "", + "help": "必须修改,原始数据的发布地址,根据参数内容在portal_publish_dir地址下寻找", + "label": "raw_data名字", + "reference": "workflow.jobs['raw-data-job-psi'].name", + "reference_type": "JOB_PROPERTY", + "value_type": "STRING" + }, + "Slot_role": { + "default": "", + "default_value": "Leader", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "label": "Flapp通讯时角色", + "reference": "workflow.variables.role", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_rsa_key_path": { + "default": "", + "default_value": "", + "help": "RSA公钥或私钥的地址,在无RSA_KEY_PEM时必填", + "label": "RSA钥匙地址", + "reference": "self.variables.rsa_key_path", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_rsa_key_pem": { + "default": "", + "default_value": "", + "help": "RSA公钥,follower需提供", + "label": "RSA公钥", + "reference": "self.variables.rsa_key_pem", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_rsa_private_key_path": { + "default": "", + "default_value": "", + "help": "RSA私钥的地址, leader必填", + "label": "RSA私钥地址", + "reference": "self.variables.rsa_private_key_path", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_run_merger_read_ahead_buffer": { + "default": "", + "default_value": null, + "help": "建议不填, sort run merger read ahead buffer", + "label": "run_merger_read_ahead_buffer", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_start_time": { + "default": "", + "default_value": 0.0, + "help": "建议不修改,使用自这个时间起的数据,仅从文件名筛选所以格式依据文件名(yyyymmdd或timestamp)", + "label": "数据起始时间", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_worker_cpu": { + "default": "", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "label": "Worker的CPU", + "reference": "self.variables.worker_cpu", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "default": "", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_worker_memory": { + "default": "", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "label": "Worker的内存", + "reference": "self.variables.worker_mem", + "reference_type": "SELF", + "value_type": "STRING" + } + }, + "variables": [] + }, + "raw-data-job-psi": { + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": \"Follower\",\n \"peerSpecs\": {\n \"Leader\": {\n \"peerURL\": \"\",\n \"authority\": \"\"\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_NAME\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_TYPE\",\n \"value\": ${Slot_data_portal_type}\n },\n {\n \"name\": \"OUTPUT_PARTITION_NUM\",\n \"value\": str(${Slot_output_partition_num})\n },\n {\n \"name\": \"INPUT_BASE_DIR\",\n \"value\": ${Slot_input_base_dir}\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/raw_data/\" + self.name\n },\n {\n \"name\": \"RAW_DATA_PUBLISH_DIR\",\n \"value\": \"portal_publish_dir/\" + self.name\n },\n {\n \"name\": \"FILE_WILDCARD\",\n \"value\": ${Slot_file_wildcard}\n },\n {\n \"name\": \"LONG_RUNNING\",\n \"value\": ${Slot_long_running}\n },\n {\n \"name\": \"CHECK_SUCCESS_TAG\",\n \"value\": ${Slot_check_success_tag}\n },\n {\n \"name\": \"FILES_PER_JOB_LIMIT\",\n \"value\": str(${Slot_files_per_job_limit})\n },\n {\n \"name\": \"SINGLE_SUBFOLDER\",\n \"value\": ${Slot_single_subfolder}\n }\n\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n\n {\n \"name\": \"BATCH_SIZE\",\n \"value\": str(${Slot_batch_size})\n },\n {\n \"name\": \"INPUT_DATA_FORMAT\",\n \"value\": ${Slot_input_data_format}\n },\n {\n \"name\": \"COMPRESSED_TYPE\",\n \"value\": ${Slot_compressed_type}\n },\n {\n \"name\": \"OUTPUT_DATA_FORMAT\",\n \"value\": ${Slot_output_data_format}\n },\n {\n \"name\": \"BUILDER_COMPRESSED_TYPE\",\n \"value\": ${Slot_builder_compressed_type}\n },\n {\n \"name\": \"MEMORY_LIMIT_RATIO\",\n \"value\": str(${Slot_memory_limit_ratio})\n },\n {\n \"name\": \"OPTIONAL_FIELDS\",\n \"value\": ${Slot_optional_fields}\n }\n\n\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_worker.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": ${Slot_output_partition_num}\n }\n }\n }\n}\n", + "slots": { + "Slot_batch_size": { + "default": "", + "default_value": 1024.0, + "help": "原始数据是一批一批的从文件系统中读出来,batch_size为batch的大小", + "label": "Batch大小", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_builder_compressed_type": { + "default": "", + "default_value": "", + "help": "choices=['', 'ZLIB', 'GZIP'] the format for output file", + "label": "输出压缩格式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_check_success_tag": { + "default": "", + "default_value": "", + "help": "choices:['','--check_success_tag'] means false and true, Check that a _SUCCESS file exists before processing files in a subfolder", + "label": "是否检查成功标志", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_compressed_type": { + "default": "", + "default_value": "", + "help": "choices=['', 'ZLIB', 'GZIP'] the compressed type of input data file", + "label": "压缩方式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_portal_type": { + "default": "", + "default_value": "PSI", + "help": "运行过一次后修改无效!! the type of data portal type ,choices=['PSI', 'Streaming']", + "label": "数据入口类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_file_wildcard": { + "default": "", + "default_value": "*.rd", + "help": "文件名称的通配符, 将会读取input_base_dir下所以满足条件的文件,如\n1. *.csv,意为读取所有csv格式文件\n2. *.tfrecord,意为读取所有tfrecord格式文件\n3. xxx.txt,意为读取文件名为xxx.txt的文件", + "label": "文件名称的通配符", + "reference": "workflow.variables.file_wildcard", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_files_per_job_limit": { + "default": "", + "default_value": null, + "help": "空即不设限制,Max number of files in a job", + "label": "每个任务最多文件数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_image": { + "default": "", + "default_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "help": "建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模版不适用", + "label": "容器镜像", + "reference": "workflow.variables.image", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_input_base_dir": { + "default": "", + "default_value": "/app/deploy/integrated_test/tfrecord_raw_data", + "help": "必须修改,运行过一次后修改无效!!the base dir of input directory", + "label": "输入路径", + "reference": "workflow.variables.input_base_dir", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_input_data_format": { + "default": "", + "default_value": "TF_RECORD", + "help": "choices=['TF_RECORD', 'CSV_DICT'] the type for input data iterator", + "label": "输入数据格式", + "reference": "workflow.variables.input_data_format", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_long_running": { + "default": "", + "default_value": "", + "help": "choices: ['','--long_running']否,是。是否为常驻上传原始数据", + "label": "是否常驻", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "Master的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_envs": { + "default": "", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "label": "Master额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_master_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "Master的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_memory_limit_ratio": { + "default": "", + "default_value": 70.0, + "help": "预测是否会OOM的时候用到,如果预测继续执行下去时占用内存会超过这个比例,就阻塞,直到尚未处理的任务处理完成。 注意这是个40-81之间的整数。", + "label": "内存限制比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_optional_fields": { + "default": "", + "default_value": "", + "help": "optional stat fields used in joiner, separated by comma between fields, e.g. \"label,rit\"Each field will be stripped", + "label": "可选字段", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_output_data_format": { + "default": "", + "default_value": "TF_RECORD", + "help": "choices=['TF_RECORD', 'CSV_DICT'] the format for output file", + "label": "输出格式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_output_partition_num": { + "default": "", + "default_value": 4.0, + "help": "运行过一次后修改无效!!输出数据的文件数量,对应Worker数量", + "label": "数据分区的数量", + "reference": "workflow.variables.num_partitions", + "reference_type": "WORKFLOW", + "value_type": "INT" + }, + "Slot_single_subfolder": { + "default": "", + "default_value": "", + "help": "choices:['','--single_subfolder'] 否 是,Only process one subfolder at a time", + "label": "是否单一子文件夹", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_worker_cpu": { + "default": "", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "label": "Worker的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "default": "", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_worker_memory": { + "default": "", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "label": "Worker的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + } + }, + "variables": [] + } + } + }, + "group_alias": "e2e-test", + "name": "e2e-sparse-estimator-test-right" +} \ No newline at end of file diff --git a/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-alignment-task.json b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-alignment-task.json new file mode 100644 index 000000000..faebee076 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-alignment-task.json @@ -0,0 +1,196 @@ +{ + "comment": "", + "config": { + "group_alias": "sys_preset_data_alignment", + "job_definitions": [ + { + "dependencies": [], + "easy_mode": false, + "is_federated": false, + "job_type": "TRANSFORMER", + "name": "alignment-job", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": list(system.variables.volumes_list),\n \"mainApplicationFile\": \"/opt/spark/work-dir/dataset_alignment.py\",\n \"arguments\": [\n \"--input_dataset_path=\" + workflow.variables.input_dataset_path,\n \"--input_batch_path=\" + workflow.variables.input_batch_path,\n \"--json_schema=\" + workflow.variables.json_schema,\n \"--wildcard=\" + workflow.variables.wildcard,\n \"--output_dataset_path=\" + workflow.variables.output_dataset_path,\n \"--output_batch_path=\" + workflow.variables.output_batch_path,\n \"--output_error_path=\" + workflow.variables.output_dataset_path + \"/errors\",\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": int(workflow.variables.driver_cores),\n \"coreLimit\": workflow.variables.driver_cores_limit,\n \"memory\": workflow.variables.driver_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n\n },\n \"executor\": {\n \"cores\": int(workflow.variables.executor_cores),\n \"instances\": int(workflow.variables.initial_executors),\n \"memory\": workflow.variables.executor_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": int(workflow.variables.initial_executors),\n \"maxExecutors\": int(workflow.variables.max_executors),\n \"minExecutors\": int(workflow.variables.min_executors)\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "alignment-job" + } + ], + "easy_mode": false, + "is_federated": false, + "job_type": "ANALYZER", + "name": "analyzer", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": list(system.variables.volumes_list),\n \"mainApplicationFile\": \"/opt/spark/work-dir/analyzer_v2.py\",\n \"arguments\": [\n workflow.variables.data_type,\n \"--data_path=\" + workflow.variables.output_dataset_path,\n \"--file_wildcard=\" + \"batch/**/**\",\n \"--buckets_num=\" + workflow.variables.buckets_num,\n \"--thumbnail_path=\" + workflow.variables.thumbnail_path,\n \"--batch_name=\" + str(workflow.variables.output_batch_name),\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": 1,\n \"coreLimit\": \"1200m\",\n \"memory\": \"4g\",\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list\n },\n \"executor\": {\n \"cores\": 2,\n \"instances\": 2,\n \"memory\": \"4g\",\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": 2,\n \"maxExecutors\": 64,\n \"minExecutors\": 2,\n }\n }\n}\n" + } + ], + "variables": [ + { + "access_mode": "PEER_READABLE", + "name": "buckets_num", + "tag": "OPERATING_PARAM", + "typed_value": "10", + "value": "10", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "thumbnail_path", + "tag": "INPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"INPUT_PATH\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "input_dataset_path", + "tag": "INPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"INPUT_PATH\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "input_batch_path", + "tag": "INPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"INPUT_PATH\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "output_dataset_path", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"OUTPUT_PATH\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "output_batch_path", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"OUTPUT_PATH\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "data_type", + "tag": "OPERATING_PARAM", + "typed_value": "tabular", + "value": "tabular", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "wildcard", + "tag": "INPUT_PATH", + "typed_value": "**", + "value": "**", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"INPUT_PATH\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "json_schema", + "tag": "OPERATING_PARAM", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "driver_cores", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "1", + "value": "1", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "driver_cores_limit", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "4000m", + "value": "4000m", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "driver_mem", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "4g", + "value": "4g", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "executor_cores", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "executor_mem", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "4g", + "value": "4g", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "initial_executors", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "max_executors", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "128", + "value": "128", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "min_executors", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "output_batch_name", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"OUTPUT_PATH\",\"hidden\":true}" + } + ] + }, + "editor_info": {}, + "name": "sys-preset-alignment-task" +} \ No newline at end of file diff --git a/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-analyzer.json b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-analyzer.json new file mode 100644 index 000000000..2bee220ba --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-analyzer.json @@ -0,0 +1,148 @@ +{ + "comment": "", + "config": { + "group_alias": "sys_preset_analyzer", + "job_definitions": [ + { + "dependencies": [], + "easy_mode": false, + "is_federated": false, + "job_type": "ANALYZER", + "name": "analyzer", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": list(system.variables.volumes_list),\n \"mainApplicationFile\": \"/opt/spark/work-dir/analyzer_v2.py\",\n \"arguments\": [\n workflow.variables.data_type,\n \"--data_path=\" + workflow.variables.input_batch_path,\n \"--file_wildcard=\" + \"batch/**/**\",\n \"--buckets_num=\" + workflow.variables.buckets_num,\n \"--thumbnail_path=\" + workflow.variables.thumbnail_path,\n \"--batch_name=\" + str(workflow.variables.output_batch_name),\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": int(workflow.variables.driver_cores),\n \"coreLimit\": workflow.variables.driver_cores_limit,\n \"memory\": workflow.variables.driver_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list\n },\n \"executor\": {\n \"cores\": int(workflow.variables.executor_cores),\n \"instances\": int(workflow.variables.executor_nums),\n \"memory\": workflow.variables.executor_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": int(workflow.variables.initial_executors),\n \"maxExecutors\": int(workflow.variables.max_executors),\n \"minExecutors\": int(workflow.variables.min_executors)\n }\n }\n}\n" + } + ], + "variables": [ + { + "access_mode": "PEER_READABLE", + "name": "input_batch_path", + "tag": "INPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"INPUT_PATH\",\"tooltip\":\"输入batch路径\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "data_type", + "tag": "OPERATING_PARAM", + "typed_value": "tabular", + "value": "tabular", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":false,\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "buckets_num", + "tag": "OPERATING_PARAM", + "typed_value": "10", + "value": "10", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"the number of buckets for hist\",\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "thumbnail_path", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"dir path to save the thumbnails\",\"tag\":\"OUTPUT_PATH\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "driver_cores", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "1", + "value": "1", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "driver_cores_limit", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "4000m", + "value": "4000m", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "driver_mem", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "4g", + "value": "4g", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "executor_cores", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":false,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "executor_nums", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "executor_mem", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "4g", + "value": "4g", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":false,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "initial_executors", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":false,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "max_executors", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "64", + "value": "64", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":false,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "min_executors", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":false,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "output_batch_name", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"OUTPUT_PATH\"}" + } + ] + }, + "editor_info": {}, + "group_alias": "sys_preset_analyzer", + "name": "sys-preset-analyzer" +} \ No newline at end of file diff --git a/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-converter-analyzer.json b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-converter-analyzer.json new file mode 100644 index 000000000..dc3387407 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-converter-analyzer.json @@ -0,0 +1,246 @@ +{ + "comment": "", + "config": { + "group_alias": "sys_preset_converter_analyzer", + "job_definitions": [ + { + "dependencies": [], + "easy_mode": false, + "is_federated": false, + "job_type": "TRANSFORMER", + "name": "format-checker", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": list(system.variables.volumes_list),\n \"mainApplicationFile\": \"/opt/spark/work-dir/dataset_format_checker.py\",\n \"arguments\": [\n workflow.variables.data_type,\n \"--input_batch_path=\" + workflow.variables.input_batch_path,\n \"--format=\" + workflow.variables.file_format,\n \"--checkers=\" + workflow.variables.checkers,\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": int(workflow.variables.driver_cores),\n \"coreLimit\": workflow.variables.driver_cores_limit,\n \"memory\": workflow.variables.driver_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n },\n \"executor\": {\n \"cores\": int(workflow.variables.executor_cores),\n \"instances\": int(workflow.variables.executor_nums),\n \"memory\": workflow.variables.executor_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": int(workflow.variables.initial_executors),\n \"maxExecutors\": int(workflow.variables.max_executors),\n \"minExecutors\": int(workflow.variables.min_executors)\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "format-checker" + } + ], + "easy_mode": false, + "is_federated": false, + "job_type": "TRANSFORMER", + "name": "converter", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": list(system.variables.volumes_list),\n \"mainApplicationFile\": \"/opt/spark/work-dir/converter_v2.py\",\n \"arguments\": [\n workflow.variables.data_type,\n \"--output_dataset_path=\" + workflow.variables.dataset_path, \n \"--output_batch_path=\" + workflow.variables.batch_path,\n \"--input_batch_path=\" + workflow.variables.input_batch_path,\n \"--format=\" + workflow.variables.file_format,\n \"--manifest_name=\" + workflow.variables.manifest_name,\n \"--images_dir_name=\" + workflow.variables.images_dir_name,\n \"--import_type=\" + workflow.variables.import_type,\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": int(workflow.variables.driver_cores),\n \"coreLimit\": workflow.variables.driver_cores_limit,\n \"memory\": workflow.variables.driver_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n },\n \"executor\": {\n \"cores\": int(workflow.variables.executor_cores),\n \"instances\": int(workflow.variables.executor_nums),\n \"memory\": workflow.variables.executor_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": int(workflow.variables.initial_executors),\n \"maxExecutors\": int(workflow.variables.max_executors),\n \"minExecutors\": int(workflow.variables.min_executors)\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "converter" + } + ], + "easy_mode": false, + "is_federated": false, + "job_type": "ANALYZER", + "name": "analyzer", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": list(system.variables.volumes_list),\n \"mainApplicationFile\": \"/opt/spark/work-dir/analyzer_v2.py\",\n \"arguments\": [\n workflow.variables.data_type,\n \"--data_path=\" + workflow.variables.dataset_path,\n \"--file_wildcard=\" + \"batch/**/**\",\n \"--buckets_num=\" + workflow.variables.buckets_num,\n \"--thumbnail_path=\" + workflow.variables.thumbnail_path,\n \"--batch_name=\" + str(workflow.variables.output_batch_name),\n \"--skip\" if str(workflow.variables.skip_analyzer)==\"true\" else ''\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": int(workflow.variables.driver_cores),\n \"coreLimit\": workflow.variables.driver_cores_limit,\n \"memory\": workflow.variables.driver_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list\n },\n \"executor\": {\n \"cores\": int(workflow.variables.executor_cores),\n \"instances\": int(workflow.variables.executor_nums),\n \"memory\": workflow.variables.executor_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": int(workflow.variables.initial_executors),\n \"maxExecutors\": int(workflow.variables.max_executors),\n \"minExecutors\": int(workflow.variables.min_executors)\n }\n }\n}\n" + } + ], + "variables": [ + { + "access_mode": "PEER_WRITABLE", + "name": "file_format", + "tag": "OPERATING_PARAM", + "typed_value": "csv", + "value": "csv", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "dataset_path", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"OUTPUT_PATH\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "batch_path", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"OUTPUT_PATH\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "input_batch_path", + "tag": "INPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"INPUT_PATH\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "data_type", + "tag": "OPERATING_PARAM", + "typed_value": "tabular", + "value": "tabular", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":false,\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "manifest_name", + "tag": "OPERATING_PARAM", + "typed_value": "manifest.json", + "value": "manifest.json", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"manifest file name in image dataset directory\",\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "images_dir_name", + "tag": "OPERATING_PARAM", + "typed_value": "images", + "value": "images", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"images directory name in image dataset directory\",\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "buckets_num", + "tag": "OPERATING_PARAM", + "typed_value": "10", + "value": "10", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"the number of buckets for hist\",\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "thumbnail_path", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"dir path to save the thumbnails\",\"tag\":\"OUTPUT_PATH\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "driver_cores", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "1", + "value": "1", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "driver_cores_limit", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "4000m", + "value": "4000m", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "driver_mem", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "4g", + "value": "4g", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "executor_cores", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":false,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "executor_nums", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "4", + "value": "4", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "executor_mem", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "4g", + "value": "4g", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":false,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "initial_executors", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":false,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "max_executors", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "64", + "value": "64", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":false,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "min_executors", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":false,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "checkers", + "tag": "OPERATING_PARAM", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tag\":\"OPERATING_PARAM\",\"tooltip\":\"数据集导入检查项\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "skip_analyzer", + "tag": "OPERATING_PARAM", + "typed_value": "false", + "value": "false", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"OPERATING_PARAM\",\"hidden\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "import_type", + "tag": "OPERATING_PARAM", + "typed_value": "COPY", + "value": "COPY", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "output_batch_name", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"OUTPUT_PATH\",\"hidden\":true}" + } + ] + }, + "editor_info": {}, + "group_alias": "sys_preset_converter_analyzer", + "name": "sys-preset-converter-analyzer" +} \ No newline at end of file diff --git a/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-export-dataset.json b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-export-dataset.json new file mode 100644 index 000000000..6e57e1fc3 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-export-dataset.json @@ -0,0 +1,277 @@ +{ + "name": "sys-preset-export-dataset", + "group_alias": "sys_preset_export_dataset", + "config": { + "group_alias": "sys_preset_export_dataset", + "variables": [ + { + "name": "dataset_path", + "access_mode": "PEER_READABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"待倒出数据集路径\",\"hidden\":true,\"tag\":\"INPUT_PATH\"}", + "typed_value": "", + "tag": "INPUT_PATH", + "value": "", + "value_type": "STRING" + }, + { + "name": "export_path", + "access_mode": "PEER_READABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"导出路径\",\"hidden\":true,\"tag\":\"OUTPUT_PATH\"}", + "typed_value": "", + "tag": "OUTPUT_PATH", + "value": "", + "value_type": "STRING" + }, + { + "name": "file_wildcard", + "value": "batch/**/**", + "access_mode": "PEER_READABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"INPUT_PATH\"}", + "typed_value": "batch/**/**", + "tag": "INPUT_PATH", + "value_type": "STRING" + }, + { + "name": "driver_cores", + "value": "1", + "access_mode": "PEER_READABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "1", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "driver_cores_limit", + "value": "4000m", + "access_mode": "PEER_READABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "4000m", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "driver_mem", + "value": "4g", + "access_mode": "PEER_READABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "4g", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "executor_cores", + "value": "2", + "access_mode": "PEER_READABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":false,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "2", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "executor_mem", + "value": "4g", + "access_mode": "PEER_READABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":false,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "4g", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "initial_executors", + "value": "2", + "access_mode": "PEER_READABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":false,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "2", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "max_executors", + "value": "128", + "access_mode": "PEER_READABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":false,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "128", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "min_executors", + "value": "2", + "access_mode": "PEER_READABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":false,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "2", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "file_format", + "value": "tfrecords", + "access_mode": "PEER_READABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"OPERATING_PARAM\",\"tooltip\":\"文件存储格式\"}", + "typed_value": "tfrecords", + "tag": "OPERATING_PARAM", + "value_type": "STRING" + }, + { + "name": "batch_name", + "access_mode": "PEER_READABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"数据批次名\",\"tag\":\"OPERATING_PARAM\"}", + "typed_value": "", + "tag": "OPERATING_PARAM", + "value": "", + "value_type": "STRING" + } + ], + "job_definitions": [ + { + "name": "export-dataset", + "job_type": "TRANSFORMER", + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": list(system.variables.volumes_list),\n \"mainApplicationFile\": \"/opt/spark/work-dir/export_dataset.py\",\n \"arguments\": [\n \"--data_path=\" + str(workflow.variables.dataset_path),\n \"--file_wildcard=\" + str(workflow.variables.file_wildcard),\n \"--export_path=\" + str(workflow.variables.export_path),\n \"--batch_name=\" + str(workflow.variables.batch_name),\n \"--file_format=\" + str(workflow.variables.file_format)\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": int(workflow.variables.driver_cores),\n \"coreLimit\": workflow.variables.driver_cores_limit,\n \"memory\": workflow.variables.driver_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n\n },\n \"executor\": {\n \"cores\": int(workflow.variables.executor_cores),\n \"instances\": int(workflow.variables.initial_executors),\n \"memory\": workflow.variables.executor_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": int(workflow.variables.initial_executors),\n \"maxExecutors\": int(workflow.variables.max_executors),\n \"minExecutors\": int(workflow.variables.min_executors)\n }\n }\n}\n", + "is_federated": false, + "variables": [], + "dependencies": [], + "easy_mode": false + } + ] + }, + "editor_info": { + "yaml_editor_infos": { + "export-dataset": { + "slots": { + "Slot_labels": { + "reference": "system.variables.labels", + "help": "建议不修改,格式: {}", + "reference_type": "SYSTEM", + "label": "FLAPP额外元信息", + "default_value": {}, + "value_type": "OBJECT", + "default": "" + }, + "Slot_volumes": { + "reference": "system.variables.volumes_list", + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "reference_type": "SYSTEM", + "label": "为Pod提供的卷", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "value_type": "LIST", + "default": "" + }, + "Slot_driver_cores": { + "reference": "self.variables.undefined", + "help": "driver核心数", + "reference_type": "SELF", + "label": "driver核心数", + "default_value": "1000m", + "default": "", + "value_type": "STRING" + }, + "Slot_configs": { + "help": "使用特征选择组件", + "label": "配置", + "default_value": {}, + "value_type": "OBJECT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_executor_instances": { + "reference": "self.variables.undefined", + "help": "excutor实例数", + "reference_type": "SELF", + "label": "excutor实例数", + "default_value": 1.0, + "value_type": "INT", + "default": "" + }, + "Slot_executor_cores": { + "reference": "self.variables.undefined", + "help": "excutor核心数", + "reference_type": "SELF", + "label": "excutor核心数", + "default_value": "1000m", + "default": "", + "value_type": "STRING" + }, + "Slot_image": { + "reference": "system.variables.spark_image", + "help": "特征工程时选用的镜像", + "reference_type": "SYSTEM", + "label": "镜像", + "default_value": "artifact.bytedance.com/tce/spark_tfrecords_base:a3b2965430074bce316b13ec98ba8856", + "default": "", + "value_type": "STRING" + }, + "Slot_spark_transformer_file": { + "label": "特征工程脚本文件", + "default_value": "aaaaaa", + "reference": "", + "default": "", + "help": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_driver_core_limit": { + "reference": "self.variables.undefined", + "help": "driver核心数限制", + "reference_type": "SELF", + "label": "driver核心数限制", + "default_value": "1200m", + "default": "", + "value_type": "STRING" + }, + "Slot_volume_mounts": { + "reference": "system.variables.volume_mounts_list", + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "reference_type": "SYSTEM", + "label": "卷挂载位置", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "value_type": "LIST", + "default": "" + }, + "Slot_dataset": { + "label": "输入数据集", + "default_value": "", + "reference": "", + "default": "", + "help": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_driver_memory": { + "reference": "self.variables.undefined", + "help": "driver内存", + "reference_type": "SELF", + "label": "driver内存", + "default_value": "1024m", + "default": "", + "value_type": "STRING" + }, + "Slot_executor_memory": { + "reference": "self.variables.undefined", + "help": "excutor内存", + "reference_type": "SELF", + "label": "excutor内存", + "default_value": "512m", + "default": "", + "value_type": "STRING" + } + }, + "meta_yaml": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": ${Slot_labels},\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": ${Slot_image},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": ${Slot_volumes},\n \"mainApplicationFile\": ${Slot_spark_transformer_file},\n \"arguments\": [\n ${Slot_dataset},\n \"rds/**\",\n str(${Slot_configs})\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": ${Slot_driver_cores},\n \"coreLimit\": ${Slot_driver_core_limit},\n \"memory\": ${Slot_driver_memory},\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": ${Slot_volume_mounts}\n },\n \"executor\": {\n \"cores\": ${Slot_executor_cores},\n \"instances\": ${Slot_executor_instances},\n \"memory\": ${Slot_executor_memory},\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": ${Slot_volume_mounts}\n }\n }\n}\n", + "variables": [] + } + } + }, + "comment": "" +} \ No newline at end of file diff --git a/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-hash-data-join-analyzer.json b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-hash-data-join-analyzer.json new file mode 100644 index 000000000..10a073159 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-hash-data-join-analyzer.json @@ -0,0 +1,205 @@ +{ + "comment": "", + "config": { + "group_alias": "hash-data-join", + "job_definitions": [ + { + "dependencies": [], + "easy_mode": false, + "is_federated": false, + "job_type": "TRANSFORMER", + "name": "partition-job", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": list(system.variables.volumes_list),\n \"mainApplicationFile\": \"/opt/spark/work-dir/partition.py\",\n \"arguments\": [\n \"--input_path=\" + workflow.variables.input_batch_path + '/' + 'part*',\n \"--file_format=\" + 'tfrecords',\n \"--part_key=\" + workflow.variables.part_key,\n \"--part_num=\" + workflow.variables.part_num,\n \"--output_file_format=\" + 'tfrecords',\n \"--output_dir=\" + workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1]\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": 2,\n \"memory\": \"4g\",\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + [],\n },\n \"executor\": {\n \"cores\": int(workflow.variables.executor_cores),\n \"instances\": 1,\n \"memory\": workflow.variables.executor_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": int(workflow.variables.initial_executor_num),\n \"minExecutors\": int(workflow.variables.min_executor_num),\n \"maxExecutors\": int(workflow.variables.max_executor_num)\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "partition-job" + } + ], + "easy_mode": false, + "is_federated": true, + "job_type": "PSI_DATA_JOIN", + "name": "hash-data-join", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FedApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"fedReplicaSpecs\": {\n \"Worker\": {\n \"backoffLimit\": 5,\n \"port\": {\n \"containerPort\": 32443,\n \"name\": \"flapp-port\"\n },\n \"template\": {\n \"spec\": {\n \"containers\": [\n {\n \"name\": \"psi\",\n \"image\": system.variables.image_repo + \"/pp_lite:\" + system.version,\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"ROLE\",\n \"value\": workflow.variables.role\n },\n {\n \"name\": \"JOB_TYPE\",\n \"value\": \"psi-hash\"\n },\n {\n \"name\": \"PEER_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"AUTHORITY\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"PEER_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"INPUT_PATH\",\n \"value\": workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1] + '/ids'\n },\n {\n \"name\": \"OUTPUT_PATH\",\n \"value\": workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1] + '/joined'\n },\n {\n \"name\": \"KEY_COLUMN\",\n \"value\": workflow.variables.part_key\n }\n ],\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"resources\": {\n \"limits\": {\n \"cpu\": workflow.variables.worker_cpu,\n \"memory\": workflow.variables.worker_mem\n },\n \"requests\": {\n \"cpu\": workflow.variables.worker_cpu,\n \"memory\": workflow.variables.worker_mem\n }\n },\n \"ports\": [\n {\n \"containerPort\": 32443,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50051,\n \"name\": \"server-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tunnel-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 1212,\n \"name\": \"joiner-port\",\n \"protocol\": \"TCP\"\n }\n ],\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list),\n }\n },\n \"pair\": True,\n \"replicas\": int(workflow.variables.replicas)\n }\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "hash-data-join" + } + ], + "easy_mode": false, + "is_federated": false, + "job_type": "TRANSFORMER", + "name": "feature-extraction", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": list(system.variables.volumes_list),\n \"mainApplicationFile\": \"/opt/spark/work-dir/feature_extraction_v2.py\",\n \"arguments\": [\n \"--original_data_path=\" + workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1] + '/raw',\n \"--joined_data_path=\" + workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1] + '/joined',\n \"--part_key=\" + workflow.variables.part_key,\n \"--part_num=\" + workflow.variables.part_num,\n \"--file_format=\" + 'tfrecords',\n \"--output_file_format=\" + 'tfrecords',\n \"--output_batch_name=\" + workflow.variables.output_batch_path.split('/')[-1],\n \"--output_dataset_path=\" + workflow.variables.output_dataset_path\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": 2,\n \"memory\": '4g',\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + [],\n },\n \"executor\": {\n \"cores\": int(workflow.variables.executor_cores),\n \"instances\": 1,\n \"memory\": workflow.variables.executor_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": int(workflow.variables.initial_executor_num),\n \"minExecutors\": int(workflow.variables.min_executor_num),\n \"maxExecutors\": int(workflow.variables.max_executor_num)\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "feature-extraction" + } + ], + "easy_mode": false, + "is_federated": false, + "job_type": "ANALYZER", + "name": "analyzer", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": list(system.variables.volumes_list),\n \"mainApplicationFile\": \"/opt/spark/work-dir/analyzer_v2.py\",\n \"arguments\": [\n \"tabular\",\n \"--data_path=\"+ (str(workflow.variables.output_dataset_path) or str(project.variables.storage_root_path) + \"/\" + \"dataset\" + \"/\" + \"\"),\n \"--file_wildcard=\" + \"batch/**/**\",\n \"--buckets_num=\" + str(10),\n \"--thumbnail_path=\" + \"\",\n \"--batch_name=\" + str(workflow.variables.output_batch_name),\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": 1,\n \"coreLimit\": \"1200m\",\n \"memory\": \"4g\",\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.basic_envs_list + system.variables.envs_list + []\n },\n \"executor\": {\n \"cores\": int(workflow.variables.executor_cores),\n \"instances\": 1,\n \"memory\": workflow.variables.executor_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": int(workflow.variables.initial_executor_num),\n \"minExecutors\": int(workflow.variables.min_executor_num),\n \"maxExecutors\": int(workflow.variables.max_executor_num)\n }\n }\n}\n" + } + ], + "variables": [ + { + "access_mode": "PEER_READABLE", + "name": "input_dataset_path", + "tag": "INPUT_PATH", + "typed_value": "undefined", + "value": "undefined", + "value_type": "STRING", + "widget_schema": "{\"component\":\"DatasetPath\",\"required\":true,\"tooltip\":\"输入数据地址\",\"tag\":\"INPUT_PATH\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "part_key", + "tag": "INPUT_PARAM", + "typed_value": "raw_id", + "value": "raw_id", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"求交的key\",\"tag\":\"INPUT_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "part_num", + "tag": "INPUT_PARAM", + "typed_value": "10", + "value": "10", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"分区数量\",\"tag\":\"INPUT_PARAM\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "output_batch_path", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"输出数据batch地址\",\"tag\":\"OUTPUT_PATH\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "output_dataset_path", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"输出数据集地址\",\"tag\":\"OUTPUT_PATH\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "role", + "tag": "OPERATING_PARAM", + "typed_value": "undefined", + "value": "undefined", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"tooltip\":\"OtPsi角色\",\"enum\":[\"client\",\"server\"],\"hidden\":false,\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "replicas", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "10", + "value": "10", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"求交worker数量\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "worker_cpu", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2000m", + "value": "2000m", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "worker_mem", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "4Gi", + "value": "4Gi", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "input_batch_path", + "tag": "INPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"输入batch地址\",\"tag\":\"INPUT_PATH\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "executor_cores", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"executor核数\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "executor_mem", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "4g", + "value": "4g", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"executor内存\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "initial_executor_num", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"spark任务初始化executor数量\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "min_executor_num", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"spark任务最小executor数量\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "max_executor_num", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "64", + "value": "64", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"spark任务最大executor数量\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "output_batch_name", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"OUTPUT_PATH\"}" + } + ] + }, + "editor_info": {}, + "group_alias": "hash-data-join", + "name": "sys-preset-hash-data-join-analyzer" +} \ No newline at end of file diff --git a/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-light-ot-data-join.json b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-light-ot-data-join.json new file mode 100644 index 000000000..a8a6c5d32 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-light-ot-data-join.json @@ -0,0 +1,214 @@ +{ + "comment": "", + "config": { + "group_alias": "light-ot-psi", + "job_definitions": [ + { + "dependencies": [], + "easy_mode": false, + "is_federated": false, + "job_type": "TRANSFORMER", + "name": "partition-job", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": list(system.variables.volumes_list),\n \"mainApplicationFile\": \"/opt/spark/work-dir/partition.py\",\n \"arguments\": [\n \"--input_path=\" + workflow.variables.input_batch_path + '/' + 'part*',\n \"--file_format=\" + workflow.variables.file_format,\n \"--part_key=\" + workflow.variables.part_key,\n \"--part_num=\" + workflow.variables.part_num,\n \"--output_file_format=\" + workflow.variables.file_format,\n \"--output_dir=\" + workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1]\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": 2,\n \"memory\": \"4g\",\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + [],\n },\n \"executor\": {\n \"cores\": int(workflow.variables.executor_cores),\n \"instances\": 1,\n \"memory\": workflow.variables.executor_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": int(workflow.variables.initial_executor_num),\n \"minExecutors\": int(workflow.variables.min_executor_num),\n \"maxExecutors\": int(workflow.variables.max_executor_num)\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "partition-job" + } + ], + "easy_mode": false, + "is_federated": false, + "job_type": "PSI_DATA_JOIN", + "name": "lc-start-server", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FedApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\",\n \"min-member\": \"1\",\n \"resource-cpu\": str(workflow.variables.worker_cpu),\n \"resource-mem\": str(workflow.variables.worker_mem),\n },\n },\n \"spec\": {\n \"fedReplicaSpecs\": {\n \"Worker\": {\n \"backoffLimit\": 5,\n \"port\": {\n \"containerPort\": 32443,\n \"name\": \"flapp-port\"\n },\n \"template\": {\n \"spec\": {\n \"containers\": [\n {\n \"name\": \"psi\",\n \"image\": system.variables.image_repo + \"/pp_lite:\" + system.version,\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"ROLE\",\n \"value\": \"server\"\n },\n {\n \"name\": \"JOB_TYPE\",\n \"value\": \"psi-ot\"\n },\n {\n \"name\": \"PEER_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"AUTHORITY\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"PEER_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"INPUT_PATH\",\n \"value\": workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1] + '/ids'\n },\n {\n \"name\": \"OUTPUT_PATH\",\n \"value\": workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1] + '/joined'\n },\n {\n \"name\": \"KEY_COLUMN\",\n \"value\": workflow.variables.part_key\n }\n ],\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"resources\": {\n \"limits\": {\n \"cpu\": workflow.variables.worker_cpu,\n \"memory\": workflow.variables.worker_mem\n },\n \"requests\": {\n \"cpu\": workflow.variables.worker_cpu,\n \"memory\": workflow.variables.worker_mem\n }\n },\n \"ports\": [\n {\n \"containerPort\": 32443,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50051,\n \"name\": \"server-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tunnel-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 1212,\n \"name\": \"joiner-port\",\n \"protocol\": \"TCP\"\n }\n ],\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list),\n }\n },\n \"pair\": True,\n \"replicas\": int(workflow.variables.replicas)\n }\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "lc-start-server" + } + ], + "easy_mode": false, + "is_federated": false, + "job_type": "TRANSFORMER", + "name": "feature-extraction", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": list(system.variables.volumes_list),\n \"mainApplicationFile\": \"/opt/spark/work-dir/feature_extraction_v2.py\",\n \"arguments\": [\n \"--original_data_path=\" + workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1] + '/raw',\n \"--joined_data_path=\" + workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1] + '/joined',\n \"--part_key=\" + workflow.variables.part_key,\n \"--part_num=\" + workflow.variables.part_num,\n \"--file_format=\" + workflow.variables.file_format,\n \"--output_file_format=\" + workflow.variables.output_file_format,\n \"--output_batch_name=\" + workflow.variables.output_batch_path.split('/')[-1],\n \"--output_dataset_path=\" + workflow.variables.output_dataset_path\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": 2,\n \"memory\": '4g',\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + [],\n },\n \"executor\": {\n \"cores\": int(workflow.variables.executor_cores),\n \"instances\": 1,\n \"memory\": workflow.variables.executor_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": int(workflow.variables.initial_executor_num),\n \"minExecutors\": int(workflow.variables.min_executor_num),\n \"maxExecutors\": int(workflow.variables.max_executor_num)\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "feature-extraction" + } + ], + "easy_mode": false, + "is_federated": false, + "job_type": "ANALYZER", + "name": "analyzer", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": list(system.variables.volumes_list),\n \"mainApplicationFile\": \"/opt/spark/work-dir/analyzer_v2.py\",\n \"arguments\": [\n \"tabular\",\n \"--data_path=\"+ (str(workflow.variables.output_dataset_path) or str(project.variables.storage_root_path) + \"/\" + \"dataset\" + \"/\" + \"\"),\n \"--file_wildcard=\" + \"batch/**/**\",\n \"--buckets_num=\" + str(10),\n \"--thumbnail_path=\" + \"\",\n \"--batch_name=\" + str(workflow.variables.output_batch_name),\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": 1,\n \"coreLimit\": \"1200m\",\n \"memory\": \"4g\",\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.basic_envs_list + system.variables.envs_list + []\n },\n \"executor\": {\n \"cores\": int(workflow.variables.executor_cores),\n \"instances\": 1,\n \"memory\": workflow.variables.executor_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": int(workflow.variables.initial_executor_num),\n \"minExecutors\": int(workflow.variables.min_executor_num),\n \"maxExecutors\": int(workflow.variables.max_executor_num)\n }\n }\n}\n" + } + ], + "variables": [ + { + "access_mode": "PEER_READABLE", + "name": "input_dataset_path", + "tag": "INPUT_PATH", + "typed_value": "undefined", + "value": "undefined", + "value_type": "STRING", + "widget_schema": "{\"component\":\"DatasetPath\",\"required\":true,\"tooltip\":\"输入数据地址\",\"tag\":\"INPUT_PATH\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "part_key", + "tag": "INPUT_PARAM", + "typed_value": "raw_id", + "value": "raw_id", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"求交的key\",\"tag\":\"INPUT_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "part_num", + "tag": "INPUT_PARAM", + "typed_value": "10", + "value": "10", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"分区数量\",\"tag\":\"INPUT_PARAM\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "output_batch_path", + "tag": "OUTPUT_PATH", + "typed_value": "0", + "value": "0", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"输出数据batch地址\",\"tag\":\"OUTPUT_PATH\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "output_dataset_path", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"输出数据集地址\",\"tag\":\"OUTPUT_PATH\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "replicas", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "10", + "value": "10", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"求交worker数量\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "worker_cpu", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2000m", + "value": "2000m", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "worker_mem", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "4Gi", + "value": "4Gi", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "input_batch_path", + "tag": "INPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"输入batch地址\",\"tag\":\"INPUT_PATH\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "executor_cores", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"executor核数\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "executor_mem", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "4g", + "value": "4g", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"executor内存\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "initial_executor_num", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"spark任务初始化executor数量\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "min_executor_num", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"spark任务最小executor数量\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "max_executor_num", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "64", + "value": "64", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"spark任务最大executor数量\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "file_format", + "tag": "OPERATING_PARAM", + "typed_value": "tfrecords", + "value": "tfrecords", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"输入数据格式,支持csv或tfrecords\",\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "output_file_format", + "tag": "OPERATING_PARAM", + "typed_value": "tfrecords", + "value": "tfrecords", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"输出数据格式,支持csv或tfrecords\",\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "output_batch_name", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"OUTPUT_PATH\"}" + } + ] + }, + "editor_info": {}, + "group_alias": "ot-psi", + "name": "sys-preset-light-ot-data-join" +} \ No newline at end of file diff --git a/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-light-psi-data-join.json b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-light-psi-data-join.json new file mode 100644 index 000000000..1fce367bc --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-light-psi-data-join.json @@ -0,0 +1,1107 @@ +{ + "comment": "", + "config": { + "group_alias": "light-psi", + "job_definitions": [ + { + "dependencies": [], + "easy_mode": false, + "is_federated": false, + "job_type": "TRANSFORMER", + "name": "lc-sign-raw-data", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"Always\",\n \"volumes\": list(system.variables.volumes_list),\n \"arguments\": ['/opt/spark/work-dir/psi.py'],\n \"sparkVersion\": \"3.0.0\",\n \"sparkConf\":{\n \"spark.shuffle.service.enabled\": \"false\"\n },\n \"restartPolicy\": {\n \"type\": \"Never\"\n },\n \"driver\": {\n \"cores\": 1,\n \"memory\": \"4g\",\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + [\n { \n \"name\": \"INPUT_DIR\",\n \"value\": workflow.variables.input_batch_path\n },\n { \n \"name\": \"OUTPUT_DIR\",\n \"value\": workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1]\n },\n { \n \"name\": \"RSA_KEY_PATH\",\n \"value\": workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1] + '/rsa_private.key'\n },\n { \n \"name\": \"PART_NUM\",\n \"value\": workflow.variables.part_num\n },\n {\n \"name\": \"PART_KEY\",\n \"value\": workflow.variables.part_key\n },\n { \n \"name\": \"FILE_FORMAT\",\n \"value\": \"tfrecords\"\n },\n { \n \"name\": \"RSA_KEY_BITS\",\n \"value\": workflow.variables.rsa_key_bits\n },\n { \n \"name\": \"WILDCARD\",\n \"value\": \"part*\"\n }\n ]\n },\n \"executor\": {\n \"cores\": int(workflow.variables.executor_cores),\n \"instances\": 1,\n \"memory\": workflow.variables.executor_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": int(workflow.variables.initial_executor_num),\n \"minExecutors\": int(workflow.variables.min_executor_num),\n \"maxExecutors\": int(workflow.variables.max_executor_num)\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "lc-sign-raw-data" + } + ], + "easy_mode": false, + "is_federated": false, + "job_type": "RAW_DATA", + "name": "lc-start-server", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FedApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": system.variables.labels,\n \"annotations\": {\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\",\n \"min-member\": \"1\",\n \"resource-cpu\": str(workflow.variables.server_cpu),\n \"resource-mem\": str(workflow.variables.server_mem),\n },\n },\n \"spec\": {\n \"activeDeadlineSeconds\": int(workflow.variables.active_deadline_seconds),\n \"cleanPodPolicy\": \"All\",\n \"fedReplicaSpecs\": {\n \"Worker\": {\n \"replicas\": 1,\n \"template\": {\n \"spec\": {\n \"containers\": [\n {\n \"image\": system.variables.image_repo + \"/pp_lite:\" + system.version,\n \"ports\": [\n {\n \"containerPort\": 32443,\n \"name\": \"flapp-port\"\n },\n {\n \"containerPort\": 50051,\n \"name\": \"server-port\"\n },\n ],\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"JOB_TYPE\",\n \"value\": \"psi-rsa\"\n },\n {\n \"name\": \"ROLE\",\n \"value\": \"server\"\n },\n {\n \"name\": \"PRIVATE_KEY_PATH\",\n \"value\": workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1] + '/rsa_private.key'\n },\n {\n \"name\": \"INPUT_DIR\",\n \"value\": workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1] + '/signed'\n },\n {\n \"name\": \"OUTPUT_DIR\",\n \"value\": workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1]\n },\n {\n \"name\": \"KEY_COLUMN\",\n \"value\": workflow.variables.part_key\n },\n {\n \"name\": \"SIGNED_COLUMN\",\n \"value\": \"signed\"\n },\n {\n \"name\": \"NUM_SIGN_PARALLEL\",\n \"value\": workflow.variables.sign_number_workers\n },\n {\n \"name\": \"BATCH_SIZE\",\n \"value\": \"4096\"\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"METRIC_COLLECTOR_SERVICE_NAME\",\n \"value\": \"pplite_psi\"\n },\n ],\n \"volumeMounts\": system.variables.volume_mounts_list,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\"\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": system.variables.volumes_list,\n \"restartPolicy\": \"Never\"\n }\n }\n }\n },\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ]\n }\n}\n " + }, + { + "dependencies": [ + { + "source": "lc-start-server" + } + ], + "easy_mode": false, + "is_federated": false, + "job_type": "DATA_JOIN", + "name": "lc-feature-extraction", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": list(system.variables.volumes_list),\n \"mainApplicationFile\": \"/opt/spark/work-dir/feature_extraction_v2.py\",\n \"arguments\": [\n \"--original_data_path=\" + workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1] + '/raw',\n \"--joined_data_path=\" + workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1] + '/joined',\n \"--part_key=\" + workflow.variables.part_key,\n \"--part_num=\" + workflow.variables.part_num,\n \"--file_format=\" + 'csv',\n \"--output_file_format=\" + 'tfrecords',\n \"--output_batch_name=\" + workflow.variables.output_batch_path.split('/')[-1],\n \"--output_dataset_path=\" + workflow.variables.output_dataset_path\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": 1,\n \"memory\": '4g',\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + [\n { \n \"name\": \"ORIGINAL_DATA_PATH\",\n \"value\": workflow.variables.input_dataset_path\n },\n { \n \"name\": \"ORIGINAL_FILE_WILDCARD\",\n \"value\": \"batch/*/part*\"\n },\n { \n \"name\": \"JOINED_DATA_PATH\",\n \"value\": workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1] + '/joined'\n },\n { \n \"name\": \"DATABLOCK_PATH\",\n \"value\": workflow.variables.output_batch_path\n },\n { \n \"name\": \"PART_KEY\",\n \"value\": workflow.variables.part_key\n },\n {\n \"name\": \"FILE_FORMAT\",\n \"value\": 'tfrecords'\n }\n ]\n },\n \"executor\": {\n \"cores\": int(workflow.variables.executor_cores),\n \"instances\": 1,\n \"memory\": workflow.variables.executor_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": int(workflow.variables.initial_executor_num),\n \"minExecutors\": int(workflow.variables.min_executor_num),\n \"maxExecutors\": int(workflow.variables.max_executor_num)\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "lc-feature-extraction" + } + ], + "easy_mode": false, + "is_federated": false, + "job_type": "ANALYZER", + "name": "analyzer", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": list(system.variables.volumes_list),\n \"mainApplicationFile\": \"/opt/spark/work-dir/analyzer_v2.py\",\n \"arguments\": [\n \"tabular\",\n \"--data_path=\"+ workflow.variables.output_dataset_path,\n \"--file_wildcard=\" + \"batch/**/**\",\n \"--buckets_num=\" + str(10),\n \"--thumbnail_path=\" + \"\",\n \"--batch_name=\" + str(workflow.variables.output_batch_name),\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": 1,\n \"coreLimit\": \"1200m\",\n \"memory\": \"4g\",\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.basic_envs_list + system.variables.envs_list + []\n },\n \"executor\": {\n \"cores\": int(workflow.variables.executor_cores),\n \"instances\": 2,\n \"memory\": workflow.variables.executor_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.basic_envs_list + system.variables.envs_list + []\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": int(workflow.variables.initial_executor_num),\n \"minExecutors\": int(workflow.variables.min_executor_num),\n \"maxExecutors\": int(workflow.variables.max_executor_num)\n }\n }\n}\n" + } + ], + "variables": [ + { + "access_mode": "PEER_READABLE", + "name": "input_dataset_path", + "tag": "INPUT_PATH", + "typed_value": "undefined", + "value": "undefined", + "value_type": "STRING", + "widget_schema": "{\"component\":\"DatasetPath\",\"required\":true,\"tooltip\":\"输入数据\",\"hidden\":false,\"tag\":\"INPUT_PATH\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "part_num", + "tag": "INPUT_PARAM", + "typed_value": "4", + "value": "4", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"分区数量\",\"tag\":\"INPUT_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "part_key", + "tag": "INPUT_PARAM", + "typed_value": "raw_id", + "value": "raw_id", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"分区以及求交的key\",\"tag\":\"INPUT_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "rsa_key_bits", + "tag": "INPUT_PARAM", + "typed_value": "1024", + "value": "1024", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"RSA密钥长度\",\"tag\":\"INPUT_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "min_executor_num", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"spark任务最小executor数量\",\"hidden\":false,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "max_executor_num", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "64", + "value": "64", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"spark任务最大executor数量\",\"hidden\":false,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "initial_executor_num", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"spark任务初始化executor数量\",\"hidden\":false,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "executor_cores", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"executor核数\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "executor_mem", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "4g", + "value": "4g", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"executor内存\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "server_cpu", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "16000m", + "value": "16000m", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"求交服务端cpu数量\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "server_mem", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "20Gi", + "value": "20Gi", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"求交服务端内存容量\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "sign_number_workers", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "30", + "value": "30", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"求交服务端进程数量\",\"hidden\":false,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "active_deadline_seconds", + "tag": "INPUT_PARAM", + "typed_value": "86400", + "value": "86400", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"求交任务最长运行时间,超过该运行时间求交任务会自动停止。\",\"tag\":\"INPUT_PARAM\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "output_dataset_path", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"OUTPUT_PATH\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "output_batch_path", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"OUTPUT_PATH\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "input_batch_path", + "tag": "INPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"输入batch路径\",\"hidden\":true,\"tag\":\"INPUT_PATH\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "output_batch_name", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"OUTPUT_PATH\",\"hidden\":true}" + } + ] + }, + "editor_info": { + "yaml_editor_infos": { + "analyzer": { + "meta_yaml": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": ${Slot_labels},\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": ${Slot_image} or system.variables.image_repo + \"/pp_data_inspection:\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": ${Slot_volumes},\n \"mainApplicationFile\": ${Slot_spark_main_file},\n \"arguments\": [\n \"--data_path=\"+ (${Slot_dataset_path} or ${Slot_storage_root_path} + \"/\" + ${Slot_inner_folder_name} + \"/\" + ${Slot_input_job_name}),\n \"--file_wildcard=\" + ${Slot_wildcard},\n \"--buckets_num=\" + str(${Slot_buckets_num}),\n \"--thumbnail_path=\" + ${Slot_thumbnail_path},\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": ${Slot_driver_cores},\n \"coreLimit\": ${Slot_driver_core_limit},\n \"memory\": ${Slot_driver_memory},\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"env\": system.basic_envs_list + system.variables.envs_list + ${Slot_drvier_envs}\n },\n \"executor\": {\n \"cores\": ${Slot_executor_cores},\n \"instances\": ${Slot_executor_instances},\n \"memory\": ${Slot_executor_memory},\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": ${Slot_volume_mounts},\n \"env\": system.basic_envs_list + system.variables.envs_list + ${Slot_executor_envs}\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": ${Slot_initial_executors},\n \"maxExecutors\": ${Slot_max_executors},\n \"minExecutors\": ${Slot_min_executors},\n }\n }\n}\n", + "slots": { + "Slot_buckets_num": { + "default": "", + "default_value": 10.0, + "help": "用于数据探查时统计直方图的分通数", + "label": "直方图分桶数", + "reference": "", + "reference_type": "WORKFLOW", + "value_type": "INT" + }, + "Slot_dataset_path": { + "default": "", + "default_value": "", + "help": "用于数据集存储的路径", + "label": "数据集存储路径", + "reference": "", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_driver_core_limit": { + "default": "", + "default_value": "1200m", + "help": "driver核心数限制", + "label": "driver核心数限制", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_driver_cores": { + "default": "", + "default_value": 1.0, + "help": "driver核心数", + "label": "driver核心数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_driver_memory": { + "default": "", + "default_value": "4g", + "help": "driver内存", + "label": "driver内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_drvier_envs": { + "default": "", + "default_value": [], + "help": "driver环境变量", + "label": "driver环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_executor_cores": { + "default": "", + "default_value": 2.0, + "help": "executor核心数", + "label": "executor核心数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_executor_envs": { + "default": "", + "default_value": [], + "help": "executor环境变量", + "label": "executor环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_executor_instances": { + "default": "", + "default_value": 2.0, + "help": "executor实例数", + "label": "executor实例数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_executor_memory": { + "default": "", + "default_value": "4g", + "help": "executor内存", + "label": "executor内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_image": { + "default": "", + "default_value": "", + "help": "镜像地址,建议不填写,默认会使用system.variables.image_repo + '/pp_data_inspection:' + system.version", + "label": "镜像", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_initial_executors": { + "default": "", + "default_value": 2.0, + "help": "初始化executor数量", + "label": "初始化executor数量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_inner_folder_name": { + "default": "", + "default_value": "dataset", + "help": "为了兼容老的路径的临时Slot,['dataset', 'datasource']", + "label": "中间文件夹名", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_input_job_name": { + "default": "", + "default_value": "", + "help": "必须修改,求交任务名或数据集名称", + "label": "数据集名", + "reference": "", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_max_executors": { + "default": "", + "default_value": 64.0, + "help": "初始化executor数量", + "label": "最大executor数量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_min_executors": { + "default": "", + "default_value": 2.0, + "help": "初始化executor数量", + "label": "最小executor数量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_spark_main_file": { + "default": "", + "default_value": "/opt/spark/work-dir/analyzer_v2.py", + "help": "spark入口脚本", + "label": "入口脚本文件", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_thumbnail_path": { + "default": "", + "default_value": "", + "help": "用于存放预览图像的位置", + "label": "预览图像位置", + "reference": "", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_wildcard": { + "default": "", + "default_value": "batch/**/*.data", + "help": "文件通配符", + "label": "文件通配符", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + } + }, + "variables": [] + }, + "lc-feature-extraction": { + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": ${Slot_role},\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if ${Slot_role}==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"BATCH_MODE\",\n \"value\": ${Slot_batch_mode}\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(${Slot_partition_num})\n },\n {\n \"name\": \"START_TIME\",\n \"value\": str(${Slot_start_time})\n },\n {\n \"name\": \"END_TIME\",\n \"value\": str(${Slot_end_time})\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + ${Slot_raw_data_name}\n },\n {\n # not work, remove it after prepare_launch_data_join_cli been removed\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(${Slot_negative_sampling_rate})\n }\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\":\n ${Slot_volume_mounts}\n ,\n \"image\": system.variables.image_repo + \"/fedlearner:\" + ${Slot_image_version},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/data_join/run_data_join_master.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\":\n ${Slot_volumes}\n\n }\n },\n \"pair\": true,\n \"replicas\": ${Slot_master_replicas}\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(${Slot_partition_num})\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + ${Slot_raw_data_name}\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_INTERVAL\",\n \"value\": str(${Slot_data_block_dump_interval})\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_THRESHOLD\",\n \"value\": str(${Slot_data_block_dump_threshold})\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_INTERVAL\",\n \"value\": str(${Slot_example_id_dump_interval})\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_THRESHOLD\",\n \"value\": str(${Slot_example_id_dump_threshold})\n },\n {\n \"name\": \"MIN_MATCHING_WINDOW\",\n \"value\": str(${Slot_min_matching_window})\n },\n {\n \"name\": \"MAX_MATCHING_WINDOW\",\n \"value\": str(${Slot_max_matching_window})\n },\n {\n \"name\": \"RAW_DATA_ITER\",\n \"value\": ${Slot_raw_data_iter}\n },\n {\n \"name\": \"DATA_BLOCK_BUILDER\",\n \"value\": ${Slot_data_block_builder}\n },\n {\n \"name\": \"ENABLE_NEGATIVE_EXAMPLE_GENERATOR\",\n \"value\": str(${Slot_enable_negative_example_generator})\n },\n {\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(${Slot_negative_sampling_rate})\n },\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\":\n ${Slot_volume_mounts}\n ,\n \"image\": system.variables.image_repo + \"/fedlearner:\" + ${Slot_image_version},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/data_join/run_data_join_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\":\n ${Slot_volumes}\n\n }\n },\n \"pair\": true,\n \"replicas\": ${Slot_partition_num}\n }\n }\n }\n}\n", + "slots": { + "Slot_batch_mode": { + "default": "", + "default_value": "--batch_mode", + "help": "如果为空则为常驻求交", + "label": "是否为批处理模式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_block_builder": { + "default": "", + "default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "label": "data block output数据类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_block_dump_interval": { + "default": "", + "default_value": -1.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次data block,小于0则无此限制", + "label": "数据dump时间间隔", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_data_block_dump_threshold": { + "default": "", + "default_value": 4096.0, + "help": "建议不修改,最多多少个样本就dump为一个data block,小于等于0则无此限制", + "label": "数据dump临界点", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_enable_negative_example_generator": { + "default": "", + "default_value": false, + "help": "建议不修改,是否开启负采样,当follower求交时遇到无法匹配上的leader的example id,会以negative_sampling_rate为概率生成一个新的样本。", + "label": "负采样比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "BOOL" + }, + "Slot_end_time": { + "default": "", + "default_value": 999999999999.0, + "help": "建议不修改,使用自这个时间以前的数据,仅从文件名筛选所以格式依据文件名(yyyymmdd或timestamp)", + "label": "数据末尾时间", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_example_id_dump_interval": { + "default": "", + "default_value": -1.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次example id,小于0则无此限制", + "label": "数据id dump时间间隔", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_example_id_dump_threshold": { + "default": "", + "default_value": 4096.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次example id,小于0则无此限制", + "label": "数据id dump临界点", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_image_version": { + "default": "", + "default_value": "882310f", + "help": "建议不修改,指定Pod中运行的容器镜像版本,前缀为system.variables.image_repo + '/fedlearner:'", + "label": "容器镜像版本", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_master_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "Master的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_envs": { + "default": "", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "label": "Master额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_master_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "Master的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_replicas": { + "default": "", + "default_value": 1.0, + "help": "同时运行的完全相同的Master Pods数量", + "label": "Master的Pod个数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_max_matching_window": { + "default": "", + "default_value": 4096.0, + "help": "建议不修改,the max matching window for example join. <=0 means window size is infinite", + "label": "最大匹配滑窗", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_min_matching_window": { + "default": "", + "default_value": 1024.0, + "help": "建议不修改,the min matching window for example join ,<=0 means window size is infinite", + "label": "最小匹配滑窗", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_negative_sampling_rate": { + "default": "", + "default_value": 0.0, + "help": "建议不修改,负采样比例,当follower求交时遇到无法匹配上的leader的example id,会以此概率生成一个新的样本。", + "label": "负采样比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "NUMBER" + }, + "Slot_partition_num": { + "default": "", + "default_value": 4.0, + "help": "建议修改,求交后数据分区的数量,建议和raw_data一致", + "label": "数据分区的数量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_raw_data_iter": { + "default": "", + "default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "label": "raw_data文件类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_raw_data_name": { + "default": "", + "default_value": "", + "help": "必须修改,原始数据的发布地址,根据参数内容在portal_publish_dir地址下寻找", + "label": "raw_data名字", + "reference": "", + "reference_type": "JOB_PROPERTY", + "value_type": "STRING" + }, + "Slot_role": { + "default": "", + "default_value": "Leader", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "label": "Flapp通讯时角色", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_start_time": { + "default": "", + "default_value": 0.0, + "help": "建议不修改,使用自这个时间起的数据,仅从文件名筛选所以格式依据文件名(yyyymmdd或timestamp)", + "label": "数据起始时间", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_worker_cpu": { + "default": "", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "label": "Worker的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "default": "", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_worker_memory": { + "default": "", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "label": "Worker的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + } + }, + "variables": [] + }, + "lc-sign-raw-data": { + "meta_yaml": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": ${Slot_labels},\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": ${Slot_image} or system.variables.image_repo + \"/pp_data_inspection:\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": ${Slot_volumes},\n \"mainApplicationFile\": ${Slot_spark_transformer_file},\n \"arguments\": [\n ${Slot_dataset},\n \"rds/**\",\n str(${Slot_configs})\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": ${Slot_driver_cores},\n \"coreLimit\": ${Slot_driver_core_limit},\n \"memory\": ${Slot_driver_memory},\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": ${Slot_volume_mounts}\n },\n \"executor\": {\n \"cores\": ${Slot_executor_cores},\n \"instances\": ${Slot_executor_instances},\n \"memory\": ${Slot_executor_memory},\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": ${Slot_volume_mounts}\n }\n }\n}\n", + "slots": { + "Slot_configs": { + "default": "", + "default_value": {}, + "help": "使用特征选择组件", + "label": "配置", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "OBJECT" + }, + "Slot_dataset": { + "default": "", + "default_value": "", + "help": "", + "label": "输入数据集", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_driver_core_limit": { + "default": "", + "default_value": "1200m", + "help": "driver核心数限制", + "label": "driver核心数限制", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_driver_cores": { + "default": "", + "default_value": "1000m", + "help": "driver核心数", + "label": "driver核心数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_driver_memory": { + "default": "", + "default_value": "1024m", + "help": "driver内存", + "label": "driver内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_executor_cores": { + "default": "", + "default_value": "1000m", + "help": "excutor核心数", + "label": "excutor核心数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_executor_instances": { + "default": "", + "default_value": 1.0, + "help": "excutor实例数", + "label": "excutor实例数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_executor_memory": { + "default": "", + "default_value": "512m", + "help": "excutor内存", + "label": "excutor内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_image": { + "default": "", + "default_value": "", + "help": "镜像地址,建议不填写,默认会使用system.variables.image_repo + '/pp_data_inspection:' + system.version", + "label": "镜像", + "reference": "system.variables.spark_image", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_spark_transformer_file": { + "default": "", + "default_value": "transformer.py", + "help": "特征工程的脚本", + "label": "特征工程脚本文件", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + } + }, + "variables": [] + }, + "lc-start-server": { + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": \"Follower\",\n \"peerSpecs\": {\n \"Leader\": {\n \"peerURL\": \"\",\n \"authority\": \"\"\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_NAME\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_TYPE\",\n \"value\": ${Slot_data_portal_type}\n },\n {\n \"name\": \"OUTPUT_PARTITION_NUM\",\n \"value\": str(${Slot_output_partition_num})\n },\n {\n \"name\": \"INPUT_BASE_DIR\",\n \"value\": ${Slot_input_base_dir}\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/raw_data/\" + self.name\n },\n {\n \"name\": \"RAW_DATA_PUBLISH_DIR\",\n \"value\": \"portal_publish_dir/\" + self.name\n },\n {\n \"name\": \"FILE_WILDCARD\",\n \"value\": ${Slot_file_wildcard}\n },\n {\n \"name\": \"LONG_RUNNING\",\n \"value\": ${Slot_long_running}\n },\n {\n \"name\": \"CHECK_SUCCESS_TAG\",\n \"value\": ${Slot_check_success_tag}\n },\n {\n \"name\": \"FILES_PER_JOB_LIMIT\",\n \"value\": str(${Slot_files_per_job_limit})\n },\n {\n \"name\": \"SINGLE_SUBFOLDER\",\n \"value\": ${Slot_single_subfolder}\n },\n {\n \"name\": \"RAW_DATA_METRICS_SAMPLE_RATE\",\n \"value\": str(${Slot_raw_data_metrics_sample_rate})\n }\n\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": system.variables.image_repo + \"/fedlearner:\" + ${Slot_image_version},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n\n {\n \"name\": \"BATCH_SIZE\",\n \"value\": str(${Slot_batch_size})\n },\n {\n \"name\": \"INPUT_DATA_FORMAT\",\n \"value\": ${Slot_input_data_format}\n },\n {\n \"name\": \"COMPRESSED_TYPE\",\n \"value\": ${Slot_compressed_type}\n },\n {\n \"name\": \"OUTPUT_DATA_FORMAT\",\n \"value\": ${Slot_output_data_format}\n },\n {\n \"name\": \"BUILDER_COMPRESSED_TYPE\",\n \"value\": ${Slot_builder_compressed_type}\n },\n {\n \"name\": \"MEMORY_LIMIT_RATIO\",\n \"value\": str(${Slot_memory_limit_ratio})\n },\n {\n \"name\": \"OPTIONAL_FIELDS\",\n \"value\": ${Slot_optional_fields}\n },\n {\n \"name\": \"RAW_DATA_METRICS_SAMPLE_RATE\",\n \"value\": str(${Slot_raw_data_metrics_sample_rate})\n }\n\n\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": system.variables.image_repo + \"/fedlearner:\" + ${Slot_image_version},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_worker.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": ${Slot_output_partition_num}\n }\n }\n }\n}\n", + "slots": { + "Slot_batch_size": { + "default": "", + "default_value": 1024.0, + "help": "原始数据是一批一批的从文件系统中读出来,batch_size为batch的大小", + "label": "Batch大小", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_builder_compressed_type": { + "default": "", + "default_value": "", + "help": "choices=['', 'ZLIB', 'GZIP'] the format for output file", + "label": "输出压缩格式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_check_success_tag": { + "default": "", + "default_value": "", + "help": "choices:['','--check_success_tag'] means false and true, Check that a _SUCCESS file exists before processing files in a subfolder", + "label": "是否检查成功标志", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_compressed_type": { + "default": "", + "default_value": "", + "help": "choices=['', 'ZLIB', 'GZIP'] the compressed type of input data file", + "label": "压缩方式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_portal_type": { + "default": "", + "default_value": "Streaming", + "help": "运行过一次后修改无效!! the type of data portal type ,choices=['PSI', 'Streaming']", + "label": "数据入口类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_file_wildcard": { + "default": "", + "default_value": "*.rd", + "help": "文件名称的通配符, 将会读取input_base_dir下所以满足条件的文件,如\n1. *.csv,意为读取所有csv格式文件\n2. *.tfrecord,意为读取所有tfrecord格式文件\n3. xxx.txt,意为读取文件名为xxx.txt的文件", + "label": "文件名称的通配符", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_files_per_job_limit": { + "default": "", + "default_value": null, + "help": "空即不设限制,Max number of files in a job", + "label": "每个任务最多文件数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_image_version": { + "default": "", + "default_value": "882310f", + "help": "建议不修改,指定Pod中运行的容器镜像版本,前缀为system.variables.image_repo + '/fedlearner:'", + "label": "容器镜像版本", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_input_base_dir": { + "default": "", + "default_value": "/app/deploy/integrated_test/tfrecord_raw_data", + "help": "必须修改,运行过一次后修改无效!!the base dir of input directory", + "label": "输入路径", + "reference": "", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_input_data_format": { + "default": "", + "default_value": "TF_RECORD", + "help": "choices=['TF_RECORD', 'CSV_DICT'] the type for input data iterator", + "label": "输入数据格式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_long_running": { + "default": "", + "default_value": "", + "help": "choices: ['','--long_running']否,是。是否为常驻上传原始数据", + "label": "是否常驻", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "Master的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_envs": { + "default": "", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "label": "Master额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_master_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "Master的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_memory_limit_ratio": { + "default": "", + "default_value": 70.0, + "help": "预测是否会OOM的时候用到,如果预测继续执行下去时占用内存会超过这个比例,就阻塞,直到尚未处理的任务处理完成。 注意这是个40-81之间的整数。", + "label": "内存限制比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_optional_fields": { + "default": "", + "default_value": "", + "help": "optional stat fields used in joiner, separated by comma between fields, e.g. \"label,rit\"Each field will be stripped", + "label": "可选字段", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_output_data_format": { + "default": "", + "default_value": "TF_RECORD", + "help": "choices=['TF_RECORD', 'CSV_DICT'] the format for output file", + "label": "输出格式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_output_partition_num": { + "default": "", + "default_value": 4.0, + "help": "运行过一次后修改无效!!输出数据的文件数量,对应Worker数量", + "label": "数据分区的数量", + "reference": "", + "reference_type": "WORKFLOW", + "value_type": "INT" + }, + "Slot_raw_data_metrics_sample_rate": { + "default": "", + "default_value": "1", + "help": "建议不修改,es metrics 取样比例", + "label": "metrics_sample_rate", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_single_subfolder": { + "default": "", + "default_value": "", + "help": "choices:['','--single_subfolder'] 否 是,Only process one subfolder at a time", + "label": "是否单一子文件夹", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_worker_cpu": { + "default": "", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "label": "Worker的CPU", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "default": "", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_worker_memory": { + "default": "", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "label": "Worker的内存", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + } + }, + "variables": [] + } + } + }, + "name": "sys-preset-light-psi-data-join" +} \ No newline at end of file diff --git a/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-nn-horizontal-eval-model.json b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-nn-horizontal-eval-model.json new file mode 100644 index 000000000..de56968ba --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-nn-horizontal-eval-model.json @@ -0,0 +1,451 @@ +{ + "name": "sys-preset-nn-horizontal-eval-model", + "group_alias": "sys_preset_nn_horizontal_model", + "config": { + "group_alias": "sys_preset_nn_horizontal_model", + "job_definitions": [ + { + "name": "train-job", + "job_type": "NN_MODEL_TRANINING", + "variables": [ + { + "name": "algorithm", + "value": "{\"path\":[],\"config\":[]}", + "access_mode": "PEER_READABLE", + "widget_schema": "{\"component\":\"AlgorithmSelect\",\"required\":true,\"tag\":\"OPERATING_PARAM\"}", + "value_type": "OBJECT", + "typed_value": { + "config": [], + "path": [] + }, + "tag": "OPERATING_PARAM" + }, + { + "name": "image_version", + "value": "50a6945", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"INPUT_PARAM\"}", + "typed_value": "50a6945", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "data_path", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tag\":\"INPUT_PATH\"}", + "typed_value": "", + "tag": "INPUT_PATH", + "value": "", + "value_type": "STRING" + }, + { + "name": "load_model_name", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"INPUT_PATH\"}", + "typed_value": "", + "tag": "INPUT_PATH", + "value": "", + "value_type": "STRING" + }, + { + "name": "steps_per_sync", + "value": "10", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tag\":\"INPUT_PARAM\",\"tooltip\":\"每隔几步同步一次\"}", + "typed_value": "10", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "worker_cpu", + "value": "4000m", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "4000m", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "worker_mem", + "value": "8Gi", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "8Gi", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "load_checkpoint_path", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"INPUT_PATH\"}", + "typed_value": "", + "tag": "INPUT_PATH", + "value": "", + "value_type": "STRING" + } + ], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\",\n \"min-member\": \"1\",\n \"resource-cpu\": str(self.variables.worker_cpu),\n \"resource-mem\": str(self.variables.worker_mem),\n },\n },\n \"spec\": {\n \"role\": \"Follower\",\n \"peerSpecs\": {\n \"Leader\": {\n \"peerURL\": \"\",\n \"authority\": \"\"\n }\n },\n \"flReplicaSpecs\": {\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": project.variables.storage_root_path\n },\n {\n \"name\": \"ROLE\",\n \"value\": \"follower\"\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": project.variables.storage_root_path + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": self.variables.algorithm.path\n },\n {\n \"name\": \"DATA_PATH\",\n \"value\": self.variables.data_path\n },\n {\n \"name\": \"LOAD_MODEL_FROM\",\n \"value\": project.variables.storage_root_path + \"/job_output/\" + self.variables.load_model_name + \"/checkpoints\" if self.variables.load_model_name else self.variables.load_checkpoint_path\n },\n {\n \"name\": \"EXPORT_PATH\",\n \"value\": project.variables.storage_root_path + \"/job_output/\" + self.name + \"/exported_models\"\n },\n {\n \"name\": \"MODE\",\n \"value\": \"eval\"\n },\n {\n \"name\": \"FL_STEPS_PER_SYNC\",\n \"value\": self.variables.steps_per_sync\n }\n ] + list(self.variables.algorithm.config),\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": system.variables.image_repo + \"/fedlearner:\" + str(self.variables.image_version),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": self.variables.worker_cpu,\n \"memory\": self.variables.worker_mem\n },\n \"requests\": {\n \"cpu\": self.variables.worker_cpu,\n \"memory\": self.variables.worker_mem\n }\n },\n \"command\": [\n \"/app/deploy/scripts/trainer/run_fedavg.sh\"\n ],\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n\n }\n },\n \"pair\": False,\n \"replicas\": int(1)\n }\n }\n }\n}\n", + "is_federated": false, + "dependencies": [], + "easy_mode": false + } + ], + "variables": [] + }, + "editor_info": { + "yaml_editor_infos": { + "train-job": { + "slots": { + "Slot_worker_cpu": { + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "label": "Worker的CPU", + "default_value": "2000m", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_ps_memory": { + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "PS的内存", + "default_value": "3Gi", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_mode": { + "help": "choices:['train','eval'] 训练还是验证", + "label": "模式", + "default_value": "train", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_volume_mounts": { + "reference": "system.variables.volume_mounts_list", + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "reference_type": "SYSTEM", + "label": "卷挂载位置", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "value_type": "LIST", + "default": "" + }, + "Slot_summary_save_steps": { + "help": "int, Number of steps to save summary files.", + "label": "SUMMARY_SAVE_STEPS", + "default_value": null, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_start_date": { + "help": "training data start date", + "label": "开始时间", + "default_value": null, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_labels": { + "reference": "system.variables.labels", + "help": "建议不修改,格式: {}", + "reference_type": "SYSTEM", + "label": "FLAPP额外元信息", + "default_value": {}, + "value_type": "OBJECT", + "default": "" + }, + "Slot_epoch_num": { + "help": "number of epoch for training, not support in online training", + "label": "epoch数量", + "default_value": 1.0, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_storage_root_path": { + "reference": "project.variables.storage_root_path", + "help": "联邦学习中任务存储根目录", + "reference_type": "PROJECT", + "label": "存储根目录", + "default_value": "/data", + "default": "", + "value_type": "STRING" + }, + "Slot_master_envs": { + "help": "数组类型,master pod额外的环境变量", + "label": "Master额外环境变量", + "default_value": [], + "value_type": "LIST", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_master_memory": { + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "Master的内存", + "default_value": "3Gi", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_volumes": { + "reference": "system.variables.volumes_list", + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "reference_type": "SYSTEM", + "label": "为Pod提供的卷", + "default_value": [ + { + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + }, + "name": "data" + } + ], + "value_type": "LIST", + "default": "" + }, + "Slot_load_checkpoint_from_job": { + "help": "指定任务名job_output下的latest checkpoint", + "label": "以任务名加载checkpoint", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_code_tar": { + "help": "代码包,variable中请使用代码类型", + "label": "代码", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_end_date": { + "help": "training data end date", + "label": "结束时间", + "default_value": null, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_load_checkpoint_filename": { + "help": "加载checkpoint_path下的相对路径的checkpoint, 默认会加载checkpoint_path下的latest checkpoint", + "label": "LOAD_CHECKPOINT_FILENAME", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_checkpoint_path": { + "help": "不建议修改,checkpoint输出路径,建议为空,会默认使用{storage_root_path}/job_output/{job_name}/checkpoints,强烈建议保持空值", + "label": "CHECKPOINT_PATH", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_suffle_data_block": { + "help": "['','--shuffle_data_block'] 否 是,shuffle the data block or not", + "label": "是否shuffle数据块", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_replicas": { + "help": "同时运行的完全相同的Master Pods数量", + "label": "Master的Pod个数", + "default_value": 1.0, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_code_key": { + "reference": "self.variables.algorithm.path", + "help": "代码tar包地址,如果为空则使用code tar", + "reference_type": "SELF", + "label": "模型代码路径", + "default_value": "", + "default": "", + "value_type": "STRING" + }, + "Slot_export_path": { + "help": "使用默认空值,将把models保存到$OUTPUT_BASE_DIR/exported_models 路径下。", + "label": "EXPORT_PATH", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_ps_envs": { + "help": "数组类型,ps pod额外的环境变量", + "label": "PS额外环境变量", + "default_value": [], + "value_type": "LIST", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_worker_memory": { + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "label": "Worker的内存", + "default_value": "3Gi", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_load_checkpoint_filename_with_path": { + "help": "加载绝对路径下的checkpoint,需要细致到文件名", + "label": "从绝对路径加载checkpoint", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_ps_replicas": { + "help": "同时运行的完全相同的PS Pods数量", + "label": "PS的Pod个数", + "default_value": 1.0, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_save_checkpoint_secs": { + "help": "int,Number of secs between checkpoints.", + "label": "SAVE_CHECKPOINT_SECS", + "default_value": null, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_ps_cpu": { + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "PS的CPU", + "default_value": "2000m", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_source": { + "help": "必须修改,求交任务的名字", + "label": "数据源", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_cpu": { + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "Master的CPU", + "default_value": "2000m", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "reference": "self.variables.algorithm.config", + "help": "数组类型,worker pod额外的环境变量", + "reference_type": "SELF", + "label": "Worker额外环境变量", + "default_value": [], + "value_type": "LIST", + "default": "" + }, + "Slot_worker_replicas": { + "help": "同时运行的完全相同的Worker Pods数量", + "label": "Worker的Pod个数", + "default_value": 1.0, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_sparse_estimator": { + "help": "bool,default False Whether using sparse estimator.", + "label": "SPARSE_ESTIMATOR", + "default_value": false, + "value_type": "BOOL", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_verbosity": { + "help": "int, Logging level", + "label": "日志等级", + "default_value": 1.0, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_role": { + "reference": "self.variables.undefined", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "reference_type": "SELF", + "label": "Flapp通讯时角色", + "default_value": "Leader", + "default": "", + "value_type": "STRING" + }, + "Slot_image": { + "reference": "self.variables.image_version", + "help": "建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模板不适用", + "reference_type": "SELF", + "label": "容器镜像", + "default_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "default": "", + "value_type": "STRING" + }, + "Slot_online_training": { + "help": "['','--online_training'] 否 是,the train master run for online training", + "label": "是否在线训练", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_save_checkpoint_steps": { + "help": "int, Number of steps between checkpoints.", + "label": "SAVE_CHECKPOINT_STEPS", + "default_value": 1000.0, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + } + }, + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": ${Slot_role},\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if ${Slot_role}==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"EPOCH_NUM\",\n \"value\": str(${Slot_epoch_num})\n },\n {\n \"name\": \"START_DATE\",\n \"value\": str(${Slot_start_date})\n },\n {\n \"name\": \"END_DATE\",\n \"value\": str(${Slot_end_date})\n },\n {\n \"name\": \"DATA_SOURCE\",\n \"value\": ${Slot_data_source}\n },\n {\n \"name\": \"ONLINE_TRAINING\",\n \"value\": ${Slot_online_training}\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(${Slot_sparse_estimator})\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": ${Slot_code_key}\n },\n {\n \"name\": \"CODE_TAR\",\n \"value\": ${Slot_code_tar}\n },\n {\n \"name\": \"CHECKPOINT_PATH\",\n \"value\": ${Slot_checkpoint_path}\n },\n {\n \"name\": \"LOAD_CHECKPOINT_FILENAME\",\n \"value\": ${Slot_load_checkpoint_filename}\n },\n {\n \"name\": \"LOAD_CHECKPOINT_FILENAME_WITH_PATH\",\n \"value\": ${Slot_load_checkpoint_filename_with_path}\n },\n {\n \"name\": \"LOAD_CHECKPOINT_PATH\",\n \"value\": ${Slot_load_checkpoint_from_job} and ${Slot_storage_root_path} + \"/job_output/\" + ${Slot_load_checkpoint_from_job} + \"/checkpoints\"\n },\n {\n \"name\": \"EXPORT_PATH\",\n \"value\": ${Slot_export_path}\n }\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/trainer/run_trainer_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n\n }\n },\n \"pair\": False,\n \"replicas\": int(${Slot_master_replicas})\n },\n \"PS\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n }\n\n ] + ${Slot_ps_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/trainer/run_trainer_ps.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_ps_cpu},\n \"memory\": ${Slot_ps_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_ps_cpu},\n \"memory\": ${Slot_ps_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": int(${Slot_ps_replicas})\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"MODE\",\n \"value\": ${Slot_mode}\n },\n {\n \"name\": \"VERBOSITY\",\n \"value\": str(${Slot_verbosity})\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": ${Slot_code_key}\n },\n {\n \"name\": \"CODE_TAR\",\n \"value\": ${Slot_code_tar}\n },\n {\n \"name\": \"SAVE_CHECKPOINT_STEPS\",\n \"value\": str(${Slot_save_checkpoint_steps})\n },\n {\n \"name\": \"SAVE_CHECKPOINT_SECS\",\n \"value\": str(${Slot_save_checkpoint_secs})\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(${Slot_sparse_estimator})\n },\n {\n \"name\": \"SUMMARY_SAVE_STEPS\",\n \"value\": str(${Slot_summary_save_steps})\n }\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/trainer/run_trainer_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": True,\n \"replicas\": int(${Slot_worker_replicas})\n }\n }\n }\n}\n", + "variables": [] + } + } + }, + "comment": "" +} \ No newline at end of file diff --git a/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-nn-horizontal-model.json b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-nn-horizontal-model.json new file mode 100644 index 000000000..fa318b43f --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-nn-horizontal-model.json @@ -0,0 +1,460 @@ +{ + "name": "sys-preset-nn-horizontal-model", + "group_alias": "sys_preset_nn_horizontal_model", + "config": { + "group_alias": "sys_preset_nn_horizontal_model", + "job_definitions": [ + { + "name": "train-job", + "job_type": "NN_MODEL_TRANINING", + "variables": [ + { + "name": "algorithm", + "value": "{\"path\":[],\"config\":[]}", + "access_mode": "PEER_READABLE", + "widget_schema": "{\"component\":\"AlgorithmSelect\",\"required\":true,\"tag\":\"OPERATING_PARAM\"}", + "value_type": "OBJECT", + "typed_value": { + "path": [], + "config": [] + }, + "tag": "OPERATING_PARAM" + }, + { + "name": "image_version", + "value": "50a6945", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"INPUT_PARAM\"}", + "typed_value": "50a6945", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "role", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"enum\":[\"Leader\",\"Follower\"],\"tag\":\"INPUT_PARAM\"}", + "typed_value": "", + "tag": "INPUT_PARAM", + "value": "", + "value_type": "STRING" + }, + { + "name": "data_path", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tag\":\"INPUT_PATH\"}", + "typed_value": "", + "tag": "INPUT_PATH", + "value": "", + "value_type": "STRING" + }, + { + "name": "steps_per_sync", + "value": "10", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"INPUT_PARAM\"}", + "typed_value": "10", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "worker_cpu", + "value": "4000m", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "4000m", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "worker_mem", + "value": "8Gi", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "8Gi", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "epoch_num", + "value": "1", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"INPUT_PARAM\"}", + "typed_value": "1", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "verbosity", + "value": "0", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"enum\":[],\"tooltip\":\"\",\"tag\":\"INPUT_PARAM\"}", + "typed_value": "0", + "tag": "INPUT_PARAM", + "value_type": "STRING" + } + ], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\",\n \"min-member\": \"1\",\n \"resource-cpu\": str(self.variables.worker_cpu),\n \"resource-mem\": str(self.variables.worker_mem),\n },\n },\n \"spec\": {\n \"role\": self.variables.role,\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if self.variables.role==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": project.variables.storage_root_path\n },\n {\n \"name\": \"ROLE\",\n \"value\": self.variables.role.lower()\n },\n {\n \"name\": \"MODE\",\n \"value\": \"train\"\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": project.variables.storage_root_path + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": self.variables.algorithm.path\n },\n {\n \"name\": \"DATA_PATH\",\n \"value\": self.variables.data_path\n },\n {\n \"name\": \"EXPORT_PATH\",\n \"value\": project.variables.storage_root_path + \"/job_output/\" + self.name + \"/exported_models\"\n },\n {\n \"name\": \"EPOCH_NUM\",\n \"value\": self.variables.epoch_num\n },\n {\n \"name\": \"FL_STEPS_PER_SYNC\",\n \"value\": self.variables.steps_per_sync\n }\n ] + list(self.variables.algorithm.config),\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": system.variables.image_repo + \"/fedlearner:\" + str(self.variables.image_version),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": self.variables.worker_cpu,\n \"memory\": self.variables.worker_mem\n },\n \"requests\": {\n \"cpu\": self.variables.worker_cpu,\n \"memory\": self.variables.worker_mem\n }\n },\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/trainer/run_fedavg.sh\"\n ]\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n\n }\n },\n \"pair\": True,\n \"replicas\": int(1)\n }\n }\n }\n}\n", + "is_federated": false, + "dependencies": [], + "easy_mode": false + } + ], + "variables": [] + }, + "editor_info": { + "yaml_editor_infos": { + "train-job": { + "slots": { + "Slot_end_date": { + "help": "training data end date", + "label": "结束时间", + "default_value": null, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_role": { + "reference": "self.variables.role", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "reference_type": "SELF", + "label": "Flapp通讯时角色", + "default_value": "Leader", + "default": "", + "value_type": "STRING" + }, + "Slot_labels": { + "reference": "system.variables.labels", + "help": "建议不修改,格式: {}", + "reference_type": "SYSTEM", + "label": "FLAPP额外元信息", + "default_value": {}, + "value_type": "OBJECT", + "default": "" + }, + "Slot_save_checkpoint_steps": { + "help": "int, Number of steps between checkpoints.", + "label": "SAVE_CHECKPOINT_STEPS", + "default_value": 1000.0, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_code_tar": { + "help": "代码包,variable中请使用代码类型", + "label": "代码", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_load_checkpoint_filename": { + "help": "加载checkpoint_path下的相对路径的checkpoint, 默认会加载checkpoint_path下的latest checkpoint", + "label": "LOAD_CHECKPOINT_FILENAME", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_envs": { + "help": "数组类型,master pod额外的环境变量", + "label": "Master额外环境变量", + "default_value": [], + "value_type": "LIST", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_image": { + "reference": "self.variables.image_version", + "help": "建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模板不适用", + "reference_type": "SELF", + "label": "容器镜像", + "default_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "default": "", + "value_type": "STRING" + }, + "Slot_data_source": { + "help": "必须修改,求交任务的名字", + "label": "数据源", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_load_checkpoint_filename_with_path": { + "help": "加载绝对路径下的checkpoint,需要细致到文件名", + "label": "从绝对路径加载checkpoint", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_mode": { + "help": "choices:['train','eval'] 训练还是验证", + "label": "模式", + "default_value": "train", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_code_key": { + "reference": "self.variables.algorithm.path", + "help": "代码tar包地址,如果为空则使用code tar", + "reference_type": "SELF", + "label": "模型代码路径", + "default_value": "", + "default": "", + "value_type": "STRING" + }, + "Slot_master_replicas": { + "help": "同时运行的完全相同的Master Pods数量", + "label": "Master的Pod个数", + "default_value": 1.0, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_worker_envs": { + "reference": "self.variables.algorithm.config", + "help": "数组类型,worker pod额外的环境变量", + "reference_type": "SELF", + "label": "Worker额外环境变量", + "default_value": [], + "value_type": "LIST", + "default": "" + }, + "Slot_storage_root_path": { + "reference": "project.variables.storage_root_path", + "help": "联邦学习中任务存储根目录", + "reference_type": "PROJECT", + "label": "存储根目录", + "default_value": "/data", + "default": "", + "value_type": "STRING" + }, + "Slot_master_cpu": { + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "Master的CPU", + "default_value": "2000m", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_suffle_data_block": { + "help": "['','--shuffle_data_block'] 否 是,shuffle the data block or not", + "label": "是否shuffle数据块", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_cpu": { + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "label": "Worker的CPU", + "default_value": "2000m", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_replicas": { + "help": "同时运行的完全相同的Worker Pods数量", + "label": "Worker的Pod个数", + "default_value": 1.0, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_volume_mounts": { + "reference": "system.variables.volume_mounts_list", + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "reference_type": "SYSTEM", + "label": "卷挂载位置", + "default_value": [ + { + "name": "data", + "mountPath": "/data" + } + ], + "value_type": "LIST", + "default": "" + }, + "Slot_ps_replicas": { + "help": "同时运行的完全相同的PS Pods数量", + "label": "PS的Pod个数", + "default_value": 1.0, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_start_date": { + "help": "training data start date", + "label": "开始时间", + "default_value": null, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_volumes": { + "reference": "system.variables.volumes_list", + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "reference_type": "SYSTEM", + "label": "为Pod提供的卷", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "value_type": "LIST", + "default": "" + }, + "Slot_load_checkpoint_from_job": { + "help": "指定任务名job_output下的latest checkpoint", + "label": "以任务名加载checkpoint", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_sparse_estimator": { + "help": "bool,default False Whether using sparse estimator.", + "label": "SPARSE_ESTIMATOR", + "default_value": false, + "value_type": "BOOL", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_verbosity": { + "help": "int, Logging level", + "label": "日志等级", + "default_value": 1.0, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_ps_envs": { + "help": "数组类型,ps pod额外的环境变量", + "label": "PS额外环境变量", + "default_value": [], + "value_type": "LIST", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_export_path": { + "help": "使用默认空值,将把models保存到$OUTPUT_BASE_DIR/exported_models 路径下。", + "label": "EXPORT_PATH", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_ps_memory": { + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "PS的内存", + "default_value": "3Gi", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_memory": { + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "Master的内存", + "default_value": "3Gi", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_online_training": { + "help": "['','--online_training'] 否 是,the train master run for online training", + "label": "是否在线训练", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_ps_cpu": { + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "PS的CPU", + "default_value": "2000m", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_summary_save_steps": { + "help": "int, Number of steps to save summary files.", + "label": "SUMMARY_SAVE_STEPS", + "default_value": null, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_save_checkpoint_secs": { + "help": "int,Number of secs between checkpoints.", + "label": "SAVE_CHECKPOINT_SECS", + "default_value": null, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_worker_memory": { + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "label": "Worker的内存", + "default_value": "3Gi", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_checkpoint_path": { + "help": "不建议修改,checkpoint输出路径,建议为空,会默认使用{storage_root_path}/job_output/{job_name}/checkpoints,强烈建议保持空值", + "label": "CHECKPOINT_PATH", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_epoch_num": { + "help": "number of epoch for training, not support in online training", + "label": "epoch数量", + "default_value": 1.0, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + } + }, + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": ${Slot_role},\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if ${Slot_role}==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"EPOCH_NUM\",\n \"value\": str(${Slot_epoch_num})\n },\n {\n \"name\": \"START_DATE\",\n \"value\": str(${Slot_start_date})\n },\n {\n \"name\": \"END_DATE\",\n \"value\": str(${Slot_end_date})\n },\n {\n \"name\": \"DATA_SOURCE\",\n \"value\": ${Slot_data_source}\n },\n {\n \"name\": \"ONLINE_TRAINING\",\n \"value\": ${Slot_online_training}\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(${Slot_sparse_estimator})\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": ${Slot_code_key}\n },\n {\n \"name\": \"CODE_TAR\",\n \"value\": ${Slot_code_tar}\n },\n {\n \"name\": \"CHECKPOINT_PATH\",\n \"value\": ${Slot_checkpoint_path}\n },\n {\n \"name\": \"LOAD_CHECKPOINT_FILENAME\",\n \"value\": ${Slot_load_checkpoint_filename}\n },\n {\n \"name\": \"LOAD_CHECKPOINT_FILENAME_WITH_PATH\",\n \"value\": ${Slot_load_checkpoint_filename_with_path}\n },\n {\n \"name\": \"LOAD_CHECKPOINT_PATH\",\n \"value\": ${Slot_load_checkpoint_from_job} and ${Slot_storage_root_path} + \"/job_output/\" + ${Slot_load_checkpoint_from_job} + \"/checkpoints\"\n },\n {\n \"name\": \"EXPORT_PATH\",\n \"value\": ${Slot_export_path}\n }\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/trainer/run_trainer_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n\n }\n },\n \"pair\": False,\n \"replicas\": int(${Slot_master_replicas})\n },\n \"PS\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n }\n\n ] + ${Slot_ps_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/trainer/run_trainer_ps.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_ps_cpu},\n \"memory\": ${Slot_ps_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_ps_cpu},\n \"memory\": ${Slot_ps_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": int(${Slot_ps_replicas})\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"MODE\",\n \"value\": ${Slot_mode}\n },\n {\n \"name\": \"VERBOSITY\",\n \"value\": str(${Slot_verbosity})\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": ${Slot_code_key}\n },\n {\n \"name\": \"CODE_TAR\",\n \"value\": ${Slot_code_tar}\n },\n {\n \"name\": \"SAVE_CHECKPOINT_STEPS\",\n \"value\": str(${Slot_save_checkpoint_steps})\n },\n {\n \"name\": \"SAVE_CHECKPOINT_SECS\",\n \"value\": str(${Slot_save_checkpoint_secs})\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(${Slot_sparse_estimator})\n },\n {\n \"name\": \"SUMMARY_SAVE_STEPS\",\n \"value\": str(${Slot_summary_save_steps})\n }\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/trainer/run_trainer_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": True,\n \"replicas\": int(${Slot_worker_replicas})\n }\n }\n }\n}\n", + "variables": [] + } + } + }, + "comment": "" +} \ No newline at end of file diff --git a/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-nn-model.json b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-nn-model.json new file mode 100644 index 000000000..2d5b5a582 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-nn-model.json @@ -0,0 +1,667 @@ +{ + "name": "sys-preset-nn-model", + "group_alias": "sys_preset_nn_model", + "config": { + "group_alias": "sys_preset_nn_model", + "job_definitions": [ + { + "name": "nn-model", + "job_type": "NN_MODEL_TRANINING", + "is_federated": true, + "variables": [ + { + "name": "master_cpu", + "value": "3000m", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "3000m", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "master_mem", + "value": "4Gi", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "4Gi", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "worker_cpu", + "value": "2000m", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "2000m", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "worker_mem", + "value": "4Gi", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "4Gi", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "ps_replicas", + "value": "1", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "1", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "master_replicas", + "value": "1", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "1", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "ps_cpu", + "value": "2000m", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "2000m", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "ps_mem", + "value": "4Gi", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "4Gi", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "worker_replicas", + "value": "1", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "1", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "data_source", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tag\":\"INPUT_PATH\"}", + "typed_value": "", + "tag": "INPUT_PATH", + "value": "", + "value_type": "STRING" + }, + { + "name": "epoch_num", + "value": "1", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tag\":\"INPUT_PARAM\"}", + "typed_value": "1", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "shuffle_data_block", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tag\":\"INPUT_PARAM\"}", + "typed_value": "", + "tag": "INPUT_PARAM", + "value": "", + "value_type": "STRING" + }, + { + "name": "verbosity", + "value": "1", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Select\",\"required\":false,\"enum\":[\"0\",\"1\",\"2\"],\"tag\":\"INPUT_PARAM\"}", + "typed_value": "1", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "mode", + "value": "train", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"enum\":[\"train\",\"eval\"],\"tag\":\"OPERATING_PARAM\"}", + "typed_value": "train", + "tag": "OPERATING_PARAM", + "value_type": "STRING" + }, + { + "name": "save_checkpoint_secs", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tag\":\"INPUT_PARAM\"}", + "typed_value": "", + "tag": "INPUT_PARAM", + "value": "", + "value_type": "STRING" + }, + { + "name": "save_checkpoint_steps", + "value": "1000", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tag\":\"INPUT_PARAM\"}", + "typed_value": "1000", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "load_checkpoint_path", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tag\":\"INPUT_PATH\"}", + "typed_value": "", + "tag": "INPUT_PATH", + "value": "", + "value_type": "STRING" + }, + { + "name": "load_checkpoint_filename_with_path", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tag\":\"INPUT_PATH\"}", + "typed_value": "", + "tag": "INPUT_PATH", + "value": "", + "value_type": "STRING" + }, + { + "name": "role", + "value": "Leader", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"enum\":[\"Leader\",\"Follower\"],\"tag\":\"INPUT_PARAM\"}", + "typed_value": "Leader", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "load_model_name", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tag\":\"INPUT_PARAM\"}", + "typed_value": "", + "tag": "INPUT_PARAM", + "value": "", + "value_type": "STRING" + }, + { + "name": "algorithm", + "value": "{\"config\":[],\"path\":\"\"}", + "access_mode": "PEER_READABLE", + "widget_schema": "{\"component\":\"AlgorithmSelect\",\"required\":true,\"tag\":\"OPERATING_PARAM\"}", + "value_type": "OBJECT", + "typed_value": { + "config": [], + "path": "" + }, + "tag": "OPERATING_PARAM" + }, + { + "name": "image_version", + "value": "50a6945", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"镜像版本\",\"tag\":\"INPUT_PARAM\"}", + "typed_value": "50a6945", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "start_date", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"数据开始时间\",\"tag\":\"INPUT_PARAM\"}", + "typed_value": "", + "tag": "INPUT_PARAM", + "value": "", + "value_type": "STRING" + }, + { + "name": "end_date", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"数据结束时间\",\"tag\":\"INPUT_PARAM\"}", + "typed_value": "", + "tag": "INPUT_PARAM", + "value": "", + "value_type": "STRING" + }, + { + "name": "export_model", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"可不填,或输入true或false。如果不填,那么训练任务会export模型,如果填,则根据输入值决定是否export\",\"tag\":\"INPUT_PARAM\"}", + "typed_value": "", + "tag": "INPUT_PARAM", + "value": "", + "value_type": "STRING" + }, + { + "name": "data_path", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"和 data_source 二选一,输入输入数据集所在的路径。\",\"tag\":\"INPUT_PATH\"}", + "typed_value": "", + "tag": "INPUT_PATH", + "value": "", + "value_type": "STRING" + }, + { + "name": "data_path_wildcard", + "value": "**/part*", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"配合 data_path 使用,输入数据集文件的 wildcard,默认是 **/part*。\",\"tag\":\"INPUT_PATH\"}", + "typed_value": "**/part*", + "tag": "INPUT_PATH", + "value_type": "STRING" + }, + { + "name": "fedapp_active_ttl", + "value": "86400", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"OPERATING_PARAM\",\"tooltip\":\"运行最大时长,单位秒\"}", + "typed_value": "86400", + "tag": "OPERATING_PARAM", + "value_type": "STRING" + }, + { + "name": "local_worker_replicas", + "value": "0", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tag\":\"INPUT_PARAM\",\"tooltip\":\"local worker数量\"}", + "typed_value": "0", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "local_data_path", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tag\":\"INPUT_PARAM\"}", + "typed_value": "", + "tag": "INPUT_PARAM", + "value": "", + "value_type": "STRING" + }, + { + "name": "sparse_estimator", + "value": "false", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"是否使用sparse_estimator;true为使用,false为不使用。\",\"tag\":\"INPUT_PARAM\"}", + "typed_value": "false", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "worker_backoff_limit", + "value": "6", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"worker最大重试次数\"}", + "typed_value": "6", + "tag": "INPUT_PARAM", + "value_type": "STRING" + } + ], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FedApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\",\n \"min-member\": \"1\",\n \"resource-cpu\": str(self.variables.worker_cpu),\n \"resource-mem\": str(self.variables.worker_mem),\n },\n },\n \"spec\": {\n \"activeDeadlineSeconds\": int(self.variables.fedapp_active_ttl),\n \"fedReplicaSpecs\": {\n \"Master\": {\n \"backoffLimit\": 1,\n \"mustSuccess\": False,\n \"template\": {\n \"spec\": {\n \"containers\": [\n {\n \"env\": system.variables.envs_list + system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"ROLE\",\n \"value\": str(self.variables.role).lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"MODE\",\n \"value\": str(self.variables.mode)\n },\n {\n \"name\": \"EPOCH_NUM\",\n \"value\": str(int(self.variables.epoch_num))\n },\n {\n \"name\": \"START_DATE\",\n \"value\": str(int(self.variables.start_date))\n },\n {\n \"name\": \"END_DATE\",\n \"value\": str(int(self.variables.end_date))\n },\n {\n \"name\": \"DATA_SOURCE\",\n \"value\": str(self.variables.data_source)\n },\n {\n \"name\": \"DATA_PATH\",\n \"value\": str(self.variables.data_path)\n },\n {\n \"name\": \"DATA_PATH_WILDCARD\",\n \"value\": str(self.variables.data_path_wildcard)\n },\n {\n \"name\": \"EXPORT_MODEL\",\n \"value\": str(self.variables.export_model)\n },\n {\n \"name\": \"ONLINE_TRAINING\",\n \"value\": \"\"\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(bool(self.variables.sparse_estimator))\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": str(self.variables.algorithm.path)\n },\n {\n \"name\": \"CODE_TAR\",\n \"value\": \"\"\n },\n {\n \"name\": \"CHECKPOINT_PATH\",\n \"value\": \"\"\n },\n {\n \"name\": \"LOAD_CHECKPOINT_FILENAME_WITH_PATH\",\n \"value\": str(self.variables.load_checkpoint_filename_with_path)\n },\n {\n \"name\": \"LOAD_CHECKPOINT_PATH\",\n \"value\": str(project.variables.storage_root_path) + \"/job_output/\" + str(self.variables.load_model_name) + \"/checkpoints\" if str(self.variables.load_model_name) else str(self.variables.load_checkpoint_path)\n },\n {\n \"name\": \"EXPORT_PATH\",\n \"value\": \"\"\n },\n {\n \"name\": \"SAVE_CHECKPOINT_STEPS\",\n \"value\": str(int(self.variables.save_checkpoint_steps))\n },\n {\n \"name\": \"SAVE_CHECKPOINT_SECS\",\n \"value\": str(int(self.variables.save_checkpoint_secs))\n },\n {\n \"name\": \"METRIC_COLLECTOR_ENABLE\",\n \"value\": str(True)\n },\n {\n \"name\": \"METRIC_COLLECTOR_SERVICE_NAME\",\n \"value\": \"fedlearner_model\"\n },\n {\n \"name\": \"LOCAL_DATA_PATH\",\n \"value\": self.variables.local_data_path\n },\n {\n \"name\": \"SHUFFLE_IN_DAY\",\n \"value\": \"true\"\n }\n ] + list(self.variables.algorithm.config),\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": system.variables.image_repo + \"/fedlearner:\" + str(self.variables.image_version),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tf-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/trainer/run_trainer_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": str(self.variables.master_cpu),\n \"memory\": str(self.variables.master_mem)\n },\n \"requests\": {\n \"cpu\": str(self.variables.master_cpu),\n \"memory\": str(self.variables.master_mem)\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n\n }\n },\n \"replicas\": int(int(self.variables.master_replicas))\n },\n \"PS\": {\n \"mustSuccess\": False,\n \"backoffLimit\": 1,\n \"template\": {\n \"spec\": {\n \"containers\": [\n {\n \"env\": system.variables.envs_list + system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"METRIC_COLLECTOR_ENABLE\",\n \"value\": str(True)\n },\n {\n \"name\": \"METRIC_COLLECTOR_SERVICE_NAME\",\n \"value\": \"fedlearner_model\"\n },\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": system.variables.image_repo + \"/fedlearner:\" + str(self.variables.image_version),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tf-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/trainer/run_trainer_ps.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": str(self.variables.ps_cpu),\n \"memory\": str(self.variables.ps_mem)\n },\n \"requests\": {\n \"cpu\": str(self.variables.ps_cpu),\n \"memory\": str(self.variables.ps_mem)\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"replicas\": int(int(self.variables.ps_replicas))\n },\n \"Worker\": {\n \"mustSuccess\": True,\n \"backoffLimit\": int(self.variables.worker_backoff_limit),\n \"template\": {\n \"spec\": {\n \"containers\": [\n {\n \"env\": system.variables.envs_list + system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"ROLE\",\n \"value\": str(self.variables.role).lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"MODE\",\n \"value\": str(self.variables.mode)\n },\n {\n \"name\": \"VERBOSITY\",\n \"value\": str(int(self.variables.verbosity))\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": str(self.variables.algorithm.path)\n },\n {\n \"name\": \"CODE_TAR\",\n \"value\": \"\"\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(bool(self.variables.sparse_estimator))\n },\n {\n \"name\": \"SUMMARY_SAVE_STEPS\",\n \"value\": str(None)\n },\n {\n \"name\": \"METRIC_COLLECTOR_ENABLE\",\n \"value\": str(True)\n },\n {\n \"name\": \"METRIC_COLLECTOR_SERVICE_NAME\",\n \"value\": \"fedlearner_model\"\n },\n ] + list(self.variables.algorithm.config),\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": system.variables.image_repo + \"/fedlearner:\" + str(self.variables.image_version),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tf-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\":[\"/bin/bash\",\"-c\"],\n \"args\": [\"export WORKER_RANK=$$INDEX && export PEER_ADDR=$$SERVICE_ID && /app/deploy/scripts/trainer/run_trainer_worker.sh\"],\n \"resources\": {\n \"limits\": {\n \"cpu\": str(self.variables.worker_cpu),\n \"memory\": str(self.variables.worker_mem)\n },\n \"requests\": {\n \"cpu\": str(self.variables.worker_cpu),\n \"memory\": str(self.variables.worker_mem)\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"replicas\": int(int(self.variables.worker_replicas))\n },\n \"LocalWorker\": {\n \"backoffLimit\": 6,\n \"template\": {\n \"spec\": {\n \"containers\": [\n {\n \"name\": \"localworker\",\n \"image\": system.variables.image_repo + \"/fedlearner:\" + str(self.variables.image_version),\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"command\": [\"/app/deploy/scripts/trainer/run_trainer_local_worker.sh\"],\n \"env\": system.variables.envs_list + system.basic_envs_list + [\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\":\"FL_STATS_URL\",\n \"value\":\"udp://statsd-v1-service.fedlearner:8125\"\n },\n {\n \"name\": \"ROLE\",\n \"value\": str(self.variables.role).lower()\n },\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": project.variables.storage_root_path\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": project.variables.storage_root_path + \"/job_output/\" + self.name\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": str(self.variables.algorithm.path)\n },\n {\n \"name\": \"MODE\",\n \"value\": self.variables.mode\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": self.variables.sparse_estimator\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(bool(self.variables.sparse_estimator))\n },\n {\n \"name\": \"METRIC_COLLECTOR_ENABLE\",\n \"value\": str(True)\n },\n {\n \"name\": \"METRIC_COLLECTOR_SERVICE_NAME\",\n \"value\": \"fedlearner_model\"\n },\n ] + system.variables.envs_list + list(self.variables.algorithm.config),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tf-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": self.variables.worker_cpu,\n \"memory\": self.variables.worker_mem\n },\n \"requests\": {\n \"cpu\": self.variables.worker_cpu,\n \"memory\": self.variables.worker_mem\n }\n }\n },\n ],\n \"imagePullSecrets\": [{\"name\": \"regcred\"}],\n \"volumes\": list(system.variables.volumes_list) + []\n }\n },\n \"replicas\": int(self.variables.local_worker_replicas)\n }\n }\n }\n}\n", + "dependencies": [], + "easy_mode": false + } + ], + "variables": [] + }, + "editor_info": { + "yaml_editor_infos": { + "nn-model": { + "slots": { + "Slot_start_date": { + "reference": "self.variables.start_date", + "help": "training data start date", + "reference_type": "SELF", + "label": "开始时间", + "default_value": null, + "value_type": "INT", + "default": "" + }, + "Slot_worker_replicas": { + "reference": "self.variables.worker_replicas", + "help": "同时运行的完全相同的Worker Pods数量", + "reference_type": "SELF", + "label": "Worker的Pod个数", + "default_value": 1.0, + "value_type": "INT", + "default": "" + }, + "Slot_load_checkpoint_filename": { + "reference": "self.variables.load_checkpoint_path", + "help": "加载checkpoint_path下的相对路径的checkpoint, 默认会加载checkpoint_path下的latest checkpoint", + "reference_type": "SELF", + "label": "LOAD_CHECKPOINT_FILENAME", + "default_value": "", + "default": "", + "value_type": "STRING" + }, + "Slot_checkpoint_path": { + "help": "不建议修改,checkpoint输出路径,建议为空,会默认使用{storage_root_path}/job_output/{job_name}/checkpoints,强烈建议保持空值", + "label": "CHECKPOINT_PATH", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_volume_mounts": { + "reference": "system.variables.volume_mounts_list", + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "reference_type": "SYSTEM", + "label": "卷挂载位置", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "value_type": "LIST", + "default": "" + }, + "Slot_ps_envs": { + "help": "数组类型,ps pod额外的环境变量", + "label": "PS额外环境变量", + "default_value": [], + "value_type": "LIST", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_role": { + "reference": "self.variables.role", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "reference_type": "SELF", + "label": "Flapp通讯时角色", + "default_value": "Leader", + "default": "", + "value_type": "STRING" + }, + "Slot_labels": { + "reference": "system.variables.labels", + "help": "建议不修改,格式: {}", + "reference_type": "SYSTEM", + "label": "FLAPP额外元信息", + "default_value": {}, + "value_type": "OBJECT", + "default": "" + }, + "Slot_mode": { + "reference": "self.variables.mode", + "help": "choices:['train','eval'] 训练还是验证", + "reference_type": "SELF", + "label": "模式", + "default_value": "train", + "default": "", + "value_type": "STRING" + }, + "Slot_ps_cpu": { + "reference": "self.variables.ps_cpu", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "reference_type": "SELF", + "label": "PS的CPU", + "default_value": "2000m", + "default": "", + "value_type": "STRING" + }, + "Slot_epoch_num": { + "reference": "self.variables.epoch_num", + "help": "number of epoch for training, not support in online training", + "reference_type": "SELF", + "label": "epoch数量", + "default_value": 1.0, + "value_type": "INT", + "default": "" + }, + "Slot_code_tar": { + "help": "代码包,variable中请使用代码类型", + "label": "代码", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_worker_memory": { + "reference": "self.variables.worker_mem", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "reference_type": "SELF", + "label": "Worker的内存", + "default_value": "3Gi", + "default": "", + "value_type": "STRING" + }, + "Slot_storage_root_path": { + "reference": "project.variables.storage_root_path", + "help": "联邦学习中任务存储根目录", + "reference_type": "PROJECT", + "label": "存储根目录", + "default_value": "/data", + "default": "", + "value_type": "STRING" + }, + "Slot_ps_memory": { + "reference": "self.variables.ps_mem", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "reference_type": "SELF", + "label": "PS的内存", + "default_value": "3Gi", + "default": "", + "value_type": "STRING" + }, + "Slot_save_checkpoint_secs": { + "reference": "self.variables.save_checkpoint_secs", + "help": "int,Number of secs between checkpoints.", + "reference_type": "SELF", + "label": "SAVE_CHECKPOINT_SECS", + "default_value": null, + "value_type": "INT", + "default": "" + }, + "Slot_online_training": { + "help": "['','--online_training'] 否 是,the train master run for online training", + "label": "是否在线训练", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_cpu": { + "reference": "self.variables.master_cpu", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "reference_type": "SELF", + "label": "Master的CPU", + "default_value": "2000m", + "default": "", + "value_type": "STRING" + }, + "Slot_master_replicas": { + "reference": "self.variables.master_replicas", + "help": "同时运行的完全相同的Master Pods数量", + "reference_type": "SELF", + "label": "Master的Pod个数", + "default_value": 1.0, + "value_type": "INT", + "default": "" + }, + "Slot_export_path": { + "help": "使用默认空值,将把models保存到$OUTPUT_BASE_DIR/exported_models 路径下。", + "label": "EXPORT_PATH", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_ps_replicas": { + "reference": "self.variables.ps_replicas", + "help": "同时运行的完全相同的PS Pods数量", + "reference_type": "SELF", + "label": "PS的Pod个数", + "default_value": 1.0, + "value_type": "INT", + "default": "" + }, + "Slot_verbosity": { + "reference": "self.variables.verbosity", + "help": "int, Logging level", + "reference_type": "SELF", + "label": "日志等级", + "default_value": 1.0, + "value_type": "INT", + "default": "" + }, + "Slot_summary_save_steps": { + "help": "int, Number of steps to save summary files.", + "label": "SUMMARY_SAVE_STEPS", + "default_value": null, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_load_checkpoint_filename_with_path": { + "reference": "self.variables.load_checkpoint_filename_with_path", + "help": "加载绝对路径下的checkpoint,需要细致到文件名", + "reference_type": "SELF", + "label": "从绝对路径加载checkpoint", + "default_value": "", + "default": "", + "value_type": "STRING" + }, + "Slot_load_checkpoint_from_job": { + "reference": "self.variables.load_model_name", + "help": "指定任务名job_output下的latest checkpoint", + "reference_type": "SELF", + "label": "以任务名加载checkpoint", + "default_value": "", + "default": "", + "value_type": "STRING" + }, + "Slot_worker_cpu": { + "reference": "self.variables.worker_cpu", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "reference_type": "SELF", + "label": "Worker的CPU", + "default_value": "2000m", + "default": "", + "value_type": "STRING" + }, + "Slot_sparse_estimator": { + "reference": "self.variables.sparse_estimator", + "help": "bool,default False Whether using sparse estimator.", + "reference_type": "SELF", + "label": "SPARSE_ESTIMATOR", + "default_value": false, + "value_type": "BOOL", + "default": "" + }, + "Slot_image_version": { + "reference": "self.variables.image_version", + "help": "建议不修改,指定Pod中运行的容器镜像版本,前缀为system.variables.image_repo + '/fedlearner:'", + "reference_type": "SELF", + "label": "容器镜像版本", + "default_value": "882310f", + "default": "", + "value_type": "STRING" + }, + "Slot_data_source": { + "reference": "self.variables.data_source", + "help": "必须修改,求交任务的名字", + "reference_type": "SELF", + "label": "数据源", + "default_value": "", + "default": "", + "value_type": "STRING" + }, + "Slot_suffle_data_block": { + "reference": "self.variables.shuffle_data_block", + "help": "['','--shuffle_data_block'] 否 是,shuffle the data block or not", + "reference_type": "SELF", + "label": "是否shuffle数据块", + "default_value": "", + "default": "", + "value_type": "STRING" + }, + "Slot_code_key": { + "reference": "self.variables.algorithm.path", + "help": "代码tar包地址,如果为空则使用code tar", + "reference_type": "SELF", + "label": "模型代码路径", + "default_value": "", + "default": "", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "reference": "self.variables.algorithm.config", + "help": "数组类型,worker pod额外的环境变量", + "reference_type": "SELF", + "label": "Worker额外环境变量", + "default_value": [], + "value_type": "LIST", + "default": "" + }, + "Slot_end_date": { + "reference": "self.variables.end_date", + "help": "training data end date", + "reference_type": "SELF", + "label": "结束时间", + "default_value": null, + "value_type": "INT", + "default": "" + }, + "Slot_master_memory": { + "reference": "self.variables.master_mem", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "reference_type": "SELF", + "label": "Master的内存", + "default_value": "3Gi", + "default": "", + "value_type": "STRING" + }, + "Slot_save_checkpoint_steps": { + "reference": "self.variables.save_checkpoint_steps", + "help": "int, Number of steps between checkpoints.", + "reference_type": "SELF", + "label": "SAVE_CHECKPOINT_STEPS", + "default_value": 1000.0, + "value_type": "INT", + "default": "" + }, + "Slot_volumes": { + "reference": "system.variables.volumes_list", + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "reference_type": "SYSTEM", + "label": "为Pod提供的卷", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "value_type": "LIST", + "default": "" + }, + "Slot_master_envs": { + "reference": "self.variables.algorithm.config", + "help": "数组类型,master pod额外的环境变量", + "reference_type": "SELF", + "label": "Master额外环境变量", + "default_value": [], + "value_type": "LIST", + "default": "" + } + }, + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": ${Slot_role},\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if ${Slot_role}==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.variables.envs_list + system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"EPOCH_NUM\",\n \"value\": str(${Slot_epoch_num})\n },\n {\n \"name\": \"START_DATE\",\n \"value\": str(${Slot_start_date})\n },\n {\n \"name\": \"END_DATE\",\n \"value\": str(${Slot_end_date})\n },\n {\n \"name\": \"DATA_SOURCE\",\n \"value\": ${Slot_data_source}\n },\n {\n \"name\": \"ONLINE_TRAINING\",\n \"value\": ${Slot_online_training}\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(${Slot_sparse_estimator})\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": ${Slot_code_key}\n },\n {\n \"name\": \"CODE_TAR\",\n \"value\": ${Slot_code_tar}\n },\n {\n \"name\": \"CHECKPOINT_PATH\",\n \"value\": ${Slot_checkpoint_path}\n },\n {\n \"name\": \"LOAD_CHECKPOINT_FILENAME\",\n \"value\": ${Slot_load_checkpoint_filename}\n },\n {\n \"name\": \"LOAD_CHECKPOINT_FILENAME_WITH_PATH\",\n \"value\": ${Slot_load_checkpoint_filename_with_path}\n },\n {\n \"name\": \"LOAD_CHECKPOINT_PATH\",\n \"value\": ${Slot_load_checkpoint_from_job} and ${Slot_storage_root_path} + \"/job_output/\" + ${Slot_load_checkpoint_from_job} + \"/checkpoints\"\n },\n {\n \"name\": \"EXPORT_PATH\",\n \"value\": ${Slot_export_path}\n }\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": system.variables.image_repo + \"/fedlearner:\" + ${Slot_image_version},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/trainer/run_trainer_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n\n }\n },\n \"pair\": False,\n \"replicas\": int(${Slot_master_replicas})\n },\n \"PS\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.variables.envs_list + system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n }\n\n ] + ${Slot_ps_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": system.variables.image_repo + \"/fedlearner:\" + ${Slot_image_version},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/trainer/run_trainer_ps.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_ps_cpu},\n \"memory\": ${Slot_ps_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_ps_cpu},\n \"memory\": ${Slot_ps_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": int(${Slot_ps_replicas})\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.variables.envs_list + system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"MODE\",\n \"value\": ${Slot_mode}\n },\n {\n \"name\": \"VERBOSITY\",\n \"value\": str(${Slot_verbosity})\n },\n {\n \"name\": \"CODE_KEY\",\n \"value\": ${Slot_code_key}\n },\n {\n \"name\": \"CODE_TAR\",\n \"value\": ${Slot_code_tar}\n },\n {\n \"name\": \"SAVE_CHECKPOINT_STEPS\",\n \"value\": str(${Slot_save_checkpoint_steps})\n },\n {\n \"name\": \"SAVE_CHECKPOINT_SECS\",\n \"value\": str(${Slot_save_checkpoint_secs})\n },\n {\n \"name\": \"SPARSE_ESTIMATOR\",\n \"value\": str(${Slot_sparse_estimator})\n },\n {\n \"name\": \"SUMMARY_SAVE_STEPS\",\n \"value\": str(${Slot_summary_save_steps})\n }\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": system.variables.image_repo + \"/fedlearner:\" + ${Slot_image_version},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/trainer/run_trainer_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": True,\n \"replicas\": int(${Slot_worker_replicas})\n }\n }\n }\n}\n", + "variables": [] + } + } + }, + "comment": "" +} \ No newline at end of file diff --git a/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-ot-psi-analyzer.json b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-ot-psi-analyzer.json new file mode 100644 index 000000000..d60d6626f --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-ot-psi-analyzer.json @@ -0,0 +1,223 @@ +{ + "comment": "", + "config": { + "group_alias": "ot-psi", + "job_definitions": [ + { + "dependencies": [], + "easy_mode": false, + "is_federated": false, + "job_type": "TRANSFORMER", + "name": "partition-job", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": list(system.variables.volumes_list),\n \"mainApplicationFile\": \"/opt/spark/work-dir/partition.py\",\n \"arguments\": [\n \"--input_path=\" + workflow.variables.input_batch_path + '/' + 'part*',\n \"--file_format=\" + workflow.variables.file_format,\n \"--part_key=\" + workflow.variables.part_key,\n \"--part_num=\" + workflow.variables.part_num,\n \"--output_file_format=\" + workflow.variables.file_format,\n \"--output_dir=\" + workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1]\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": 2,\n \"memory\": \"4g\",\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + [],\n },\n \"executor\": {\n \"cores\": int(workflow.variables.executor_cores),\n \"instances\": 1,\n \"memory\": workflow.variables.executor_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": int(workflow.variables.initial_executor_num),\n \"minExecutors\": int(workflow.variables.min_executor_num),\n \"maxExecutors\": int(workflow.variables.max_executor_num)\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "partition-job" + } + ], + "easy_mode": false, + "is_federated": true, + "job_type": "PSI_DATA_JOIN", + "name": "ot-psi", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FedApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"fedReplicaSpecs\": {\n \"Worker\": {\n \"backoffLimit\": 5,\n \"port\": {\n \"containerPort\": 32443,\n \"name\": \"flapp-port\"\n },\n \"template\": {\n \"spec\": {\n \"containers\": [\n {\n \"name\": \"psi\",\n \"image\": system.variables.image_repo + \"/pp_lite:\" + system.version,\n \"env\": system.variables.envs_list + system.basic_envs_list + [\n {\n \"name\": \"ROLE\",\n \"value\": workflow.variables.role\n },\n {\n \"name\": \"JOB_TYPE\",\n \"value\": \"psi-ot\"\n },\n {\n \"name\": \"PEER_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"AUTHORITY\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"PEER_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"INPUT_PATH\",\n \"value\": workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1] + '/ids'\n },\n {\n \"name\": \"OUTPUT_PATH\",\n \"value\": workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1] + '/joined'\n },\n {\n \"name\": \"KEY_COLUMN\",\n \"value\": workflow.variables.part_key\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"METRIC_COLLECTOR_SERVICE_NAME\",\n \"value\": \"pplite_psi\"\n }\n ],\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"resources\": {\n \"limits\": {\n \"cpu\": workflow.variables.worker_cpu,\n \"memory\": workflow.variables.worker_mem\n },\n \"requests\": {\n \"cpu\": workflow.variables.worker_cpu,\n \"memory\": workflow.variables.worker_mem\n }\n },\n \"ports\": [\n {\n \"containerPort\": 32443,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50051,\n \"name\": \"server-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tunnel-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 1212,\n \"name\": \"joiner-port\",\n \"protocol\": \"TCP\"\n }\n ],\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list),\n }\n },\n \"pair\": True,\n \"replicas\": int(workflow.variables.replicas)\n }\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "ot-psi" + } + ], + "easy_mode": false, + "is_federated": false, + "job_type": "TRANSFORMER", + "name": "feature-extraction", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": list(system.variables.volumes_list),\n \"mainApplicationFile\": \"/opt/spark/work-dir/feature_extraction_v2.py\",\n \"arguments\": [\n \"--original_data_path=\" + workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1] + '/raw',\n \"--joined_data_path=\" + workflow.variables.output_dataset_path + '/side_output/' + workflow.variables.output_batch_path.split('/')[-1] + '/joined',\n \"--part_key=\" + workflow.variables.part_key,\n \"--part_num=\" + workflow.variables.part_num,\n \"--file_format=\" + workflow.variables.file_format,\n \"--output_file_format=\" + workflow.variables.output_file_format,\n \"--output_batch_name=\" + workflow.variables.output_batch_path.split('/')[-1],\n \"--output_dataset_path=\" + workflow.variables.output_dataset_path\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": 2,\n \"memory\": '4g',\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + [],\n },\n \"executor\": {\n \"cores\": int(workflow.variables.executor_cores),\n \"instances\": 1,\n \"memory\": workflow.variables.executor_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": int(workflow.variables.initial_executor_num),\n \"minExecutors\": int(workflow.variables.min_executor_num),\n \"maxExecutors\": int(workflow.variables.max_executor_num)\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "feature-extraction" + } + ], + "easy_mode": false, + "is_federated": false, + "job_type": "ANALYZER", + "name": "analyzer", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": list(system.variables.volumes_list),\n \"mainApplicationFile\": \"/opt/spark/work-dir/analyzer_v2.py\",\n \"arguments\": [\n \"tabular\",\n \"--data_path=\"+ (str(workflow.variables.output_dataset_path) or str(project.variables.storage_root_path) + \"/\" + \"dataset\" + \"/\" + \"\"),\n \"--file_wildcard=\" + \"batch/**/**\",\n \"--buckets_num=\" + str(10),\n \"--thumbnail_path=\" + \"\",\n \"--batch_name=\" + str(workflow.variables.output_batch_name),\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": 1,\n \"coreLimit\": \"1200m\",\n \"memory\": \"4g\",\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.basic_envs_list + system.variables.envs_list + []\n },\n \"executor\": {\n \"cores\": int(workflow.variables.executor_cores),\n \"instances\": 1,\n \"memory\": workflow.variables.executor_mem,\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": int(workflow.variables.initial_executor_num),\n \"minExecutors\": int(workflow.variables.min_executor_num),\n \"maxExecutors\": int(workflow.variables.max_executor_num)\n }\n }\n}\n" + } + ], + "variables": [ + { + "access_mode": "PEER_READABLE", + "name": "input_dataset_path", + "tag": "INPUT_PATH", + "typed_value": "undefined", + "value": "undefined", + "value_type": "STRING", + "widget_schema": "{\"component\":\"DatasetPath\",\"required\":true,\"tooltip\":\"输入数据地址\",\"tag\":\"INPUT_PATH\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "part_key", + "tag": "INPUT_PARAM", + "typed_value": "raw_id", + "value": "raw_id", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"求交的key\",\"tag\":\"INPUT_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "part_num", + "tag": "INPUT_PARAM", + "typed_value": "10", + "value": "10", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"分区数量\",\"tag\":\"INPUT_PARAM\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "output_batch_path", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"输出数据batch地址\",\"tag\":\"OUTPUT_PATH\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "output_dataset_path", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"输出数据集地址\",\"tag\":\"OUTPUT_PATH\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "role", + "tag": "OPERATING_PARAM", + "typed_value": "undefined", + "value": "undefined", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"tooltip\":\"OtPsi角色\",\"enum\":[\"client\",\"server\"],\"hidden\":false,\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "replicas", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "10", + "value": "10", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"求交worker数量\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "worker_cpu", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2000m", + "value": "2000m", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "worker_mem", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "4Gi", + "value": "4Gi", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "input_batch_path", + "tag": "INPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"输入batch地址\",\"tag\":\"INPUT_PATH\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "executor_cores", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"executor核数\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "executor_mem", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "4g", + "value": "4g", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"executor内存\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "initial_executor_num", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"spark任务初始化executor数量\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "min_executor_num", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2", + "value": "2", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"spark任务最小executor数量\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "max_executor_num", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "64", + "value": "64", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"spark任务最大executor数量\",\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "file_format", + "tag": "OPERATING_PARAM", + "typed_value": "tfrecords", + "value": "tfrecords", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"输入数据格式,支持csv或tfrecords\",\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "output_file_format", + "tag": "OPERATING_PARAM", + "typed_value": "tfrecords", + "value": "tfrecords", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"输出数据格式,支持csv或tfrecords\",\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "output_batch_name", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true,\"tag\":\"OUTPUT_PATH\"}" + } + ] + }, + "editor_info": {}, + "name": "sys-preset-ot-psi-analyzer", + "revision_index": 32 +} \ No newline at end of file diff --git a/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-psi-data-join-analyzer.json b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-psi-data-join-analyzer.json new file mode 100644 index 000000000..e7fc038be --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-psi-data-join-analyzer.json @@ -0,0 +1,240 @@ +{ + "comment": "", + "config": { + "group_alias": "sys_preset_psi_data_join", + "job_definitions": [ + { + "dependencies": [], + "easy_mode": false, + "is_federated": false, + "job_type": "RAW_DATA", + "name": "raw-data-job", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FedApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"activeDeadlineSeconds\": int(workflow.variables.fedapp_active_ttl),\n \"fedReplicaSpecs\": {\n \"Master\": {\n \"mustSuccess\": False,\n \"template\": {\n \"spec\": {\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_NAME\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_TYPE\",\n \"value\": str(workflow.variables.data_portal_type)\n },\n {\n \"name\": \"OUTPUT_PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"INPUT_BASE_DIR\",\n \"value\": str(workflow.variables.dataset)\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/raw_data/\" + self.name\n },\n {\n \"name\": \"RAW_DATA_PUBLISH_DIR\",\n \"value\": \"portal_publish_dir/\" + self.name\n },\n {\n \"name\": \"FILE_WILDCARD\",\n \"value\": str(workflow.variables.file_wildcard)\n },\n {\n \"name\": \"LONG_RUNNING\",\n \"value\": \"\"\n },\n {\n \"name\": \"CHECK_SUCCESS_TAG\",\n \"value\": \"\"\n },\n {\n \"name\": \"FILES_PER_JOB_LIMIT\",\n \"value\": str(None)\n },\n {\n \"name\": \"SINGLE_SUBFOLDER\",\n \"value\": \"\"\n },\n {\n \"name\": \"RAW_DATA_METRICS_SAMPLE_RATE\",\n \"value\": str(\"0\")\n }\n\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": system.variables.image_repo + \"/fedlearner:\" + workflow.variables.fedlearner_image_version,\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": str(workflow.variables.master_cpu),\n \"memory\": str(workflow.variables.master_mem)\n },\n \"requests\": {\n \"cpu\": str(workflow.variables.master_cpu),\n \"memory\": str(workflow.variables.master_mem)\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"replicas\": 1\n },\n \"Worker\": {\n \"backoffLimit\": 6,\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n\n {\n \"name\": \"BATCH_SIZE\",\n \"value\": str(int(workflow.variables.batch_size))\n },\n {\n \"name\": \"INPUT_DATA_FORMAT\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"COMPRESSED_TYPE\",\n \"value\": \"\"\n },\n {\n \"name\": \"OUTPUT_DATA_FORMAT\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"BUILDER_COMPRESSED_TYPE\",\n \"value\": \"\"\n },\n {\n \"name\": \"MEMORY_LIMIT_RATIO\",\n \"value\": str(70)\n },\n {\n \"name\": \"OPTIONAL_FIELDS\",\n \"value\": \"\"\n },\n {\n \"name\": \"RAW_DATA_METRICS_SAMPLE_RATE\",\n \"value\": str(\"0\")\n }\n\n\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": system.variables.image_repo + \"/fedlearner:\" + workflow.variables.fedlearner_image_version,\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_worker.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": str(workflow.variables.raw_worker_cpu),\n \"memory\": str(workflow.variables.raw_worker_mem)\n },\n \"requests\": {\n \"cpu\": str(workflow.variables.raw_worker_cpu),\n \"memory\": str(workflow.variables.raw_worker_mem)\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"replicas\": int(workflow.variables.num_partitions)\n }\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "raw-data-job" + } + ], + "easy_mode": false, + "is_federated": true, + "job_type": "TRANSFORMER", + "name": "psi-data-join-job", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FedApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"activeDeadlineSeconds\": int(workflow.variables.fedapp_active_ttl),\n \"fedReplicaSpecs\": {\n \"Master\": {\n \"mustSuccess\": False,\n \"template\": {\n \"spec\": {\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"ROLE\",\n \"value\": str(workflow.variables.role).lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"START_TIME\",\n \"value\": str(0)\n },\n {\n \"name\": \"END_TIME\",\n \"value\": str(999999999999)\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + str(workflow.jobs['raw-data-job'].name)\n },\n {\n # not work, remove it after prepare_launch_data_join_cli been removed\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(0.0)\n },\n {\n \"name\": \"DATA_JOIN_METRICS_SAMPLE_RATE\",\n \"value\": str(\"0\")\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": system.variables.image_repo + \"/fedlearner:\" + workflow.variables.fedlearner_image_version,\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\":[\"/bin/bash\",\"-c\"],\n \"args\": [\"export PEER_ADDR=$$SERVICE_ID && /app/deploy/scripts/rsa_psi/run_psi_data_join_master.sh\"],\n \n \"resources\": {\n \"limits\": {\n \"cpu\": str(workflow.variables.master_cpu),\n \"memory\": str(workflow.variables.master_mem)\n },\n \"requests\": {\n \"cpu\": str(workflow.variables.master_cpu),\n \"memory\": str(workflow.variables.master_mem)\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"replicas\": 1\n },\n \"Worker\": {\n \"backoffLimit\": 6,\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"ROLE\",\n \"value\": str(workflow.variables.role).lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"BATCH_MODE\",\n \"value\": \"--batch_mode\"\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + str(workflow.jobs['raw-data-job'].name)\n },\n {\n \"name\": \"RSA_KEY_PEM\",\n \"value\": str(workflow.variables.rsa_key_pem)\n },\n {\n \"name\": \"RSA_KEY_PATH\",\n \"value\": str(workflow.variables.rsa_key_path)\n },\n {\n \"name\": \"RSA_PRIVATE_KEY_PATH\",\n \"value\": str(workflow.variables.rsa_key_path)\n },\n {\n \"name\": \"KMS_KEY_NAME\",\n \"value\": \"\"\n },\n {\n \"name\": \"KMS_CLIENT\",\n \"value\": \"data.aml.fl\"\n },\n {\n \"name\": \"PSI_RAW_DATA_ITER\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"DATA_BLOCK_BUILDER\",\n \"value\": str(workflow.variables.output_type)\n },\n {\n \"name\": \"PSI_OUTPUT_BUILDER\",\n \"value\": str(workflow.variables.output_type)\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_INTERVAL\",\n \"value\": str(-1)\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_THRESHOLD\",\n \"value\": str(4096)\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_INTERVAL\",\n \"value\": str(-1)\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_THRESHOLD\",\n \"value\": str(4096)\n },\n {\n \"name\": \"EXAMPLE_JOINER\",\n \"value\": \"SORT_RUN_JOINER\"\n },\n {\n \"name\": \"PSI_READ_AHEAD_SIZE\",\n \"value\": str(None)\n },\n {\n \"name\": \"SORT_RUN_MERGER_READ_AHEAD_BUFFER\",\n \"value\": str(None)\n },\n {\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(0.0)\n },\n {\n \"name\": \"ENABLE_NEGATIVE_EXAMPLE_GENERATOR\",\n \"value\": str(False)\n },\n {\n \"name\": \"DATA_JOIN_METRICS_SAMPLE_RATE\",\n \"value\": str(\"0\")\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": system.variables.image_repo + \"/fedlearner:\" + workflow.variables.fedlearner_image_version,\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\":[\"/bin/bash\",\"-c\"],\n \"args\": [\"export WORKER_RANK=$$INDEX && export PEER_ADDR=$$SERVICE_ID && /app/deploy/scripts/rsa_psi/run_psi_data_join_worker.sh\"],\n \"resources\": {\n \"limits\": {\n \"cpu\": str(workflow.variables.psi_worker_cpu),\n \"memory\": str(workflow.variables.psi_worker_mem)\n },\n \"requests\": {\n \"cpu\": str(workflow.variables.psi_worker_cpu),\n \"memory\": str(workflow.variables.psi_worker_mem)\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"replicas\": int(int(workflow.variables.num_partitions))\n }\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "psi-data-join-job" + } + ], + "easy_mode": false, + "is_federated": false, + "job_type": "TRANSFORMER", + "name": "converter", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": list(system.variables.volumes_list),\n \"mainApplicationFile\": \"/opt/spark/work-dir/converter_v2.py\",\n \"arguments\": [\n \"tabular\",\n \"--output_dataset_path=\" + workflow.variables.output_dataset_path, \n \"--output_batch_path=\" + workflow.variables.output_batch_path,\n \"--input_batch_path=\" + str(project.variables.storage_root_path) + \"/data_source/\" + workflow.jobs['psi-data-join-job'].name + \"/data_block/**/*.data\",\n \"--format=tfrecords\",\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20,\n },\n \"driver\": {\n \"cores\": 1,\n \"coreLimit\": \"1200m\",\n \"memory\": \"4g\",\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + [],\n },\n \"executor\": {\n \"cores\": 2,\n \"instances\": 2,\n \"memory\": \"4g\",\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + [],\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": 2,\n \"maxExecutors\": 64,\n \"minExecutors\": 2,\n },\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "converter" + } + ], + "easy_mode": false, + "is_federated": false, + "job_type": "ANALYZER", + "name": "analyzer", + "variables": [], + "yaml_template": "{\n \"apiVersion\": \"sparkoperator.k8s.io/v1beta2\",\n \"kind\": \"SparkApplication\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner-spark\",\n \"schedulerName\": \"batch\",\n },\n },\n \"spec\": {\n \"type\": \"Python\",\n \"pythonVersion\": \"3\",\n \"mode\": \"cluster\",\n \"image\": (system.variables.get(\"spark_image_repo\") or str(system.variables.image_repo + \"/pp_data_inspection\")) + \":\" + system.version,\n \"imagePullPolicy\": \"IfNotPresent\",\n \"volumes\": list(system.variables.volumes_list),\n \"mainApplicationFile\": \"/opt/spark/work-dir/analyzer_v2.py\",\n \"arguments\": [\n \"tabular\",\n \"--data_path=\" + workflow.variables.output_dataset_path,\n \"--file_wildcard=\" + \"batch/**/**\",\n \"--batch_name=\" + str(workflow.variables.output_batch_name),\n ],\n \"sparkVersion\": \"3.0.0\",\n \"restartPolicy\": {\n \"type\": \"OnFailure\",\n \"onFailureRetries\": 3,\n \"onFailureRetryInterval\": 10,\n \"onSubmissionFailureRetries\": 5,\n \"onSubmissionFailureRetryInterval\": 20\n },\n \"driver\": {\n \"cores\": 1,\n \"coreLimit\": \"1200m\",\n \"memory\": \"4g\",\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"serviceAccount\": \"spark\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n },\n \"executor\": {\n \"cores\": 2,\n \"instances\": 2,\n \"memory\": \"4g\",\n \"labels\": {\n \"version\": \"3.0.0\"\n },\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"env\": system.variables.envs_list + []\n },\n \"dynamicAllocation\": {\n \"enabled\": True,\n \"initialExecutors\": 2,\n \"maxExecutors\": 64,\n \"minExecutors\": 2,\n }\n }\n}\n" + } + ], + "variables": [ + { + "access_mode": "PEER_READABLE", + "name": "fedlearner_image_version", + "tag": "INPUT_PARAM", + "typed_value": "50a6945", + "value": "50a6945", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"镜像版本不建议修改,如若修改请使用新于此版本的镜像\",\"tag\":\"INPUT_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "num_partitions", + "tag": "INPUT_PARAM", + "typed_value": 2.0, + "value": "2", + "value_type": "NUMBER", + "widget_schema": "{\"component\":\"NumberPicker\",\"required\":true,\"tag\":\"INPUT_PARAM\"}" + }, + { + "access_mode": "PEER_READABLE", + "name": "dataset", + "tag": "INPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"DatasetPath\",\"required\":true,\"hidden\":false,\"tag\":\"INPUT_PATH\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "raw_worker_cpu", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "4000m", + "value": "4000m", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "raw_worker_mem", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "8Gi", + "value": "8Gi", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "file_wildcard", + "tag": "INPUT_PATH", + "typed_value": "*part*", + "value": "*part*", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"INPUT_PATH\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "batch_size", + "tag": "INPUT_PARAM", + "typed_value": "102400", + "value": "102400", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"INPUT_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "data_portal_type", + "tag": "OPERATING_PARAM", + "typed_value": "PSI", + "value": "PSI", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"enum\":[\"PSI\"],\"hidden\":true,\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "role", + "tag": "OPERATING_PARAM", + "typed_value": "Leader", + "value": "Leader", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"enum\":[\"Leader\",\"Follower\"],\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "rsa_key_path", + "tag": "OPERATING_PARAM", + "typed_value": "/app/deploy/integrated_test/rsa_private.key", + "value": "/app/deploy/integrated_test/rsa_private.key", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"RSA公钥或私钥的地址,在无RSA_KEY_PEM时必填,私钥需要同时填写rsa_key_path和rsa_private_key_path,且内容一致\",\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PRIVATE", + "name": "rsa_key_pem", + "tag": "OPERATING_PARAM", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"TextArea\",\"required\":false,\"tooltip\":\"直接输入RSA公钥和私钥,Leader会从中读取私钥,Follower会从中读取公钥。如果为空会使用path读取。\",\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "output_type", + "tag": "OPERATING_PARAM", + "typed_value": "TF_RECORD", + "value": "TF_RECORD", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"enum\":[\"TF_RECORD\"],\"tooltip\":\"输出的datablock的格式,支持csv和tfrecord两种格式\",\"hidden\":true,\"tag\":\"OPERATING_PARAM\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "psi_worker_cpu", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "4000m", + "value": "4000m", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "psi_worker_mem", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "8Gi", + "value": "8Gi", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "master_cpu", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "2000m", + "value": "2000m", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "master_mem", + "tag": "RESOURCE_ALLOCATION", + "typed_value": "4Gi", + "value": "4Gi", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "output_dataset_path", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"结果数据集的路径\",\"tag\":\"OUTPUT_PATH\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "output_batch_path", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"结果数据集的 batch 的路径\",\"tag\":\"OUTPUT_PATH\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "fedapp_active_ttl", + "tag": "", + "typed_value": "259200", + "value": "259200", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"单个分片或求交任务运行最大时间\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "output_batch_name", + "tag": "OUTPUT_PATH", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"OUTPUT_PATH\",\"hidden\":true}" + } + ] + }, + "editor_info": {}, + "name": "sys-preset-psi-data-join-analyzer" +} \ No newline at end of file diff --git a/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-psi-data-join.json b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-psi-data-join.json new file mode 100644 index 000000000..3d862f073 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-psi-data-join.json @@ -0,0 +1,778 @@ +{ + "comment": null, + "config": { + "group_alias": "sys_preset_psi_data_join", + "job_definitions": [ + { + "dependencies": [], + "easy_mode": true, + "is_federated": false, + "job_type": "RAW_DATA", + "name": "raw-data-job", + "variables": [ + { + "access_mode": "PEER_READABLE", + "name": "dataset", + "tag": "", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"DatasetPath\",\"required\":true,\"hidden\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "raw_worker_cpu", + "tag": "", + "typed_value": "4000m", + "value": "4000m", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "raw_worker_mem", + "tag": "", + "typed_value": "8Gi", + "value": "8Gi", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "file_wildcard", + "tag": "", + "typed_value": "*part*", + "value": "*part*", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"hidden\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "batch_size", + "tag": "", + "typed_value": "102400", + "value": "102400", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "data_portal_type", + "tag": "", + "typed_value": "PSI", + "value": "PSI", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Select\",\"required\":false,\"enum\":[\"PSI\"],\"hidden\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "raw_master_cpu", + "tag": "", + "typed_value": "2000m", + "value": "2000m", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "raw_master_mem", + "tag": "", + "typed_value": "4Gi", + "value": "4Gi", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false}" + } + ], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"role\": \"Follower\",\n \"peerSpecs\": {\n \"Leader\": {\n \"peerURL\": \"\",\n \"authority\": \"\"\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_NAME\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_TYPE\",\n \"value\": str(self.variables.data_portal_type)\n },\n {\n \"name\": \"OUTPUT_PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"INPUT_BASE_DIR\",\n \"value\": str(self.variables.dataset)\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/raw_data/\" + self.name\n },\n {\n \"name\": \"RAW_DATA_PUBLISH_DIR\",\n \"value\": \"portal_publish_dir/\" + self.name\n },\n {\n \"name\": \"FILE_WILDCARD\",\n \"value\": str(self.variables.file_wildcard)\n },\n {\n \"name\": \"LONG_RUNNING\",\n \"value\": \"\"\n },\n {\n \"name\": \"CHECK_SUCCESS_TAG\",\n \"value\": \"\"\n },\n {\n \"name\": \"FILES_PER_JOB_LIMIT\",\n \"value\": str(None)\n },\n {\n \"name\": \"SINGLE_SUBFOLDER\",\n \"value\": \"\"\n },\n {\n \"name\": \"RAW_DATA_METRICS_SAMPLE_RATE\",\n \"value\": str(\"0\")\n }\n\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": str(self.variables.raw_master_cpu),\n \"memory\": str(self.variables.raw_master_mem)\n },\n \"requests\": {\n \"cpu\": str(self.variables.raw_master_cpu),\n \"memory\": str(self.variables.raw_master_mem)\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": False,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n\n {\n \"name\": \"BATCH_SIZE\",\n \"value\": str(int(self.variables.batch_size))\n },\n {\n \"name\": \"INPUT_DATA_FORMAT\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"COMPRESSED_TYPE\",\n \"value\": \"\"\n },\n {\n \"name\": \"OUTPUT_DATA_FORMAT\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"BUILDER_COMPRESSED_TYPE\",\n \"value\": \"\"\n },\n {\n \"name\": \"MEMORY_LIMIT_RATIO\",\n \"value\": str(70)\n },\n {\n \"name\": \"OPTIONAL_FIELDS\",\n \"value\": \"\"\n },\n {\n \"name\": \"RAW_DATA_METRICS_SAMPLE_RATE\",\n \"value\": str(\"0\")\n }\n\n\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_worker.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": str(self.variables.raw_worker_cpu),\n \"memory\": str(self.variables.raw_worker_mem)\n },\n \"requests\": {\n \"cpu\": str(self.variables.raw_worker_cpu),\n \"memory\": str(self.variables.raw_worker_mem)\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": False,\n \"replicas\": int(workflow.variables.num_partitions)\n }\n }\n }\n}\n" + }, + { + "dependencies": [ + { + "source": "raw-data-job" + } + ], + "easy_mode": true, + "is_federated": true, + "job_type": "PSI_DATA_JOIN", + "name": "psi-data-join-job", + "variables": [ + { + "access_mode": "PEER_WRITABLE", + "name": "role", + "tag": "", + "typed_value": "Leader", + "value": "Leader", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"enum\":[\"Leader\",\"Follower\"],\"tooltip\":\"\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "rsa_key_path", + "tag": "", + "typed_value": "/app/deploy/integrated_test/rsa_private.key", + "value": "/app/deploy/integrated_test/rsa_private.key", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"RSA公钥或私钥的地址,在无RSA_KEY_PEM时必填,私钥需要同时填写rsa_key_path和rsa_private_key_path,且内容一致\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "rsa_key_pem", + "tag": "", + "typed_value": "", + "value": "", + "value_type": "STRING", + "widget_schema": "{\"component\":\"TextArea\",\"required\":false,\"tooltip\":\"直接输入RSA公钥和私钥,Leader会从中读取私钥,Follower会从中读取公钥。如果为空会使用path读取。\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "output_type", + "tag": "", + "typed_value": "TF_RECORD", + "value": "TF_RECORD", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"enum\":[\"TF_RECORD\"],\"tooltip\":\"输出的datablock的格式,支持csv和tfrecord两种格式\",\"hidden\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "worker_cpu", + "tag": "", + "typed_value": "4000m", + "value": "4000m", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "worker_mem", + "tag": "", + "typed_value": "8Gi", + "value": "8Gi", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "master_cpu", + "tag": "", + "typed_value": "2000m", + "value": "2000m", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "master_mem", + "tag": "", + "typed_value": "4Gi", + "value": "4Gi", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":false}" + } + ], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": dict(system.variables.labels)\n },\n \"spec\": {\n \"role\": str(self.variables.role),\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if str(self.variables.role)==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"ROLE\",\n \"value\": str(self.variables.role).lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"START_TIME\",\n \"value\": str(0)\n },\n {\n \"name\": \"END_TIME\",\n \"value\": str(999999999999)\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + str(workflow.jobs['raw-data-job'].name)\n },\n {\n # not work, remove it after prepare_launch_data_join_cli been removed\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(0.0)\n },\n {\n \"name\": \"DATA_JOIN_METRICS_SAMPLE_RATE\",\n \"value\": str(\"0\")\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/rsa_psi/run_psi_data_join_master.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": str(self.variables.master_cpu),\n \"memory\": str(self.variables.master_mem)\n },\n \"requests\": {\n \"cpu\": str(self.variables.master_cpu),\n \"memory\": str(self.variables.master_mem)\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": True,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"ROLE\",\n \"value\": str(self.variables.role).lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"BATCH_MODE\",\n \"value\": \"--batch_mode\"\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/data_source/\" + self.name\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(int(workflow.variables.num_partitions))\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + str(workflow.jobs['raw-data-job'].name)\n },\n {\n \"name\": \"RSA_KEY_PEM\",\n \"value\": str(self.variables.rsa_key_pem)\n },\n {\n \"name\": \"RSA_KEY_PATH\",\n \"value\": str(self.variables.rsa_key_path)\n },\n {\n \"name\": \"RSA_PRIVATE_KEY_PATH\",\n \"value\": str(self.variables.rsa_key_path)\n },\n {\n \"name\": \"KMS_KEY_NAME\",\n \"value\": \"\"\n },\n {\n \"name\": \"KMS_CLIENT\",\n \"value\": \"data.aml.fl\"\n },\n {\n \"name\": \"PSI_RAW_DATA_ITER\",\n \"value\": \"TF_RECORD\"\n },\n {\n \"name\": \"DATA_BLOCK_BUILDER\",\n \"value\": str(self.variables.output_type)\n },\n {\n \"name\": \"PSI_OUTPUT_BUILDER\",\n \"value\": str(self.variables.output_type)\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_INTERVAL\",\n \"value\": str(-1)\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_THRESHOLD\",\n \"value\": str(4096)\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_INTERVAL\",\n \"value\": str(-1)\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_THRESHOLD\",\n \"value\": str(4096)\n },\n {\n \"name\": \"EXAMPLE_JOINER\",\n \"value\": \"SORT_RUN_JOINER\"\n },\n {\n \"name\": \"PSI_READ_AHEAD_SIZE\",\n \"value\": str(None)\n },\n {\n \"name\": \"SORT_RUN_MERGER_READ_AHEAD_BUFFER\",\n \"value\": str(None)\n },\n {\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(0.0)\n },\n {\n \"name\": \"ENABLE_NEGATIVE_EXAMPLE_GENERATOR\",\n \"value\": str(False)\n },\n {\n \"name\": \"DATA_JOIN_METRICS_SAMPLE_RATE\",\n \"value\": str(\"0\")\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": str(workflow.variables.image),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/rsa_psi/run_psi_data_join_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": str(self.variables.worker_cpu),\n \"memory\": str(self.variables.worker_mem)\n },\n \"requests\": {\n \"cpu\": str(self.variables.worker_cpu),\n \"memory\": str(self.variables.worker_mem)\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"pair\": True,\n \"replicas\": int(int(workflow.variables.num_partitions))\n }\n }\n }\n}\n" + } + ], + "variables": [ + { + "access_mode": "PEER_READABLE", + "name": "image", + "tag": "", + "typed_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "value_type": "STRING", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"镜像版本不建议修改,如若修改请使用新于此版本的镜像\"}" + }, + { + "access_mode": "PEER_WRITABLE", + "name": "num_partitions", + "tag": "", + "typed_value": 2.0, + "value": "2", + "value_type": "NUMBER", + "widget_schema": "{\"component\":\"NumberPicker\",\"required\":true}" + } + ] + }, + "editor_info": { + "yaml_editor_infos": { + "psi-data-join-job": { + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": ${Slot_role},\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if ${Slot_role}==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(${Slot_partition_num})\n },\n {\n \"name\": \"START_TIME\",\n \"value\": str(${Slot_start_time})\n },\n {\n \"name\": \"END_TIME\",\n \"value\": str(${Slot_end_time})\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + ${Slot_raw_data_name}\n },\n {\n # not work, remove it after prepare_launch_data_join_cli been removed\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(${Slot_negative_sampling_rate})\n },\n {\n \"name\": \"DATA_JOIN_METRICS_SAMPLE_RATE\",\n \"value\": str(${Slot_data_join_metrics_sample_rate})\n }\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/rsa_psi/run_psi_data_join_master.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": True,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"BATCH_MODE\",\n \"value\": ${Slot_batch_mode}\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"PARTITION_NUM\",\n \"value\": str(${Slot_partition_num})\n },\n {\n \"name\": \"RAW_DATA_SUB_DIR\",\n \"value\": \"portal_publish_dir/\" + ${Slot_raw_data_name}\n },\n {\n \"name\": \"RSA_KEY_PEM\",\n \"value\": ${Slot_rsa_key_pem}\n },\n {\n \"name\": \"RSA_KEY_PATH\",\n \"value\": ${Slot_rsa_key_path}\n },\n {\n \"name\": \"RSA_PRIVATE_KEY_PATH\",\n \"value\": ${Slot_rsa_key_path}\n },\n {\n \"name\": \"KMS_KEY_NAME\",\n \"value\": ${Slot_kms_key_name}\n },\n {\n \"name\": \"KMS_CLIENT\",\n \"value\": ${Slot_kms_client}\n },\n {\n \"name\": \"PSI_RAW_DATA_ITER\",\n \"value\": ${Slot_psi_raw_data_iter}\n },\n {\n \"name\": \"DATA_BLOCK_BUILDER\",\n \"value\": ${Slot_data_block_builder}\n },\n {\n \"name\": \"PSI_OUTPUT_BUILDER\",\n \"value\": ${Slot_psi_output_builder}\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_INTERVAL\",\n \"value\": str(${Slot_data_block_dump_interval})\n },\n {\n \"name\": \"DATA_BLOCK_DUMP_THRESHOLD\",\n \"value\": str(${Slot_data_block_dump_threshold})\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_INTERVAL\",\n \"value\": str(${Slot_example_id_dump_interval})\n },\n {\n \"name\": \"EXAMPLE_ID_DUMP_THRESHOLD\",\n \"value\": str(${Slot_example_id_dump_threshold})\n },\n {\n \"name\": \"EXAMPLE_JOINER\",\n \"value\": \"SORT_RUN_JOINER\"\n },\n {\n \"name\": \"PSI_READ_AHEAD_SIZE\",\n \"value\": str(${Slot_psi_read_ahead_size})\n },\n {\n \"name\": \"SORT_RUN_MERGER_READ_AHEAD_BUFFER\",\n \"value\": str(${Slot_run_merger_read_ahead_buffer})\n },\n {\n \"name\": \"NEGATIVE_SAMPLING_RATE\",\n \"value\": str(${Slot_negative_sampling_rate})\n },\n {\n \"name\": \"ENABLE_NEGATIVE_EXAMPLE_GENERATOR\",\n \"value\": str(${Slot_enable_negative_example_generator})\n },\n {\n \"name\": \"DATA_JOIN_METRICS_SAMPLE_RATE\",\n \"value\": str(${Slot_data_join_metrics_sample_rate})\n }\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/rsa_psi/run_psi_data_join_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": True,\n \"replicas\": int(${Slot_partition_num})\n }\n }\n }\n}\n", + "slots": { + "Slot_batch_mode": { + "default": "", + "default_value": "--batch_mode", + "help": "如果为空则为常驻求交", + "label": "是否为批处理模式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_block_builder": { + "default": "", + "default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "label": "data block output数据类型", + "reference": "self.variables.output_type", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_data_block_dump_interval": { + "default": "", + "default_value": -1.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次data block,小于0则无此限制", + "label": "数据dump时间间隔", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_data_block_dump_threshold": { + "default": "", + "default_value": 4096.0, + "help": "建议不修改,最多多少个样本就dump为一个data block,小于等于0则无此限制", + "label": "数据dump临界点", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_data_join_metrics_sample_rate": { + "default": "", + "default_value": "0", + "help": "建议不修改,es metrics 取样比例", + "label": "metrics_sample_rate", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_enable_negative_example_generator": { + "default": "", + "default_value": false, + "help": "建议不修改,是否开启负采样,当follower求交时遇到无法匹配上的leader的example id,会以negative_sampling_rate为概率生成一个新的样本。", + "label": "负采样比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "BOOL" + }, + "Slot_end_time": { + "default": "", + "default_value": 999999999999.0, + "help": "建议不修改,使用自这个时间以前的数据,仅从文件名筛选所以格式依据文件名(yyyymmdd或timestamp)", + "label": "数据末尾时间", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_example_id_dump_interval": { + "default": "", + "default_value": -1.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次example id,小于0则无此限制", + "label": "数据id dump时间间隔", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_example_id_dump_threshold": { + "default": "", + "default_value": 4096.0, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次example id,小于0则无此限制", + "label": "数据id dump临界点", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_image": { + "default": "", + "default_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "help": "建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模板不适用", + "label": "容器镜像", + "reference": "workflow.variables.image", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_kms_client": { + "default": "", + "default_value": "data.aml.fl", + "help": "kms client", + "label": "kms client", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_kms_key_name": { + "default": "", + "default_value": "", + "help": "kms中的密钥名称,站内镜像需使用KMS", + "label": "密钥名称", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_master_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "Master的CPU", + "reference": "self.variables.master_cpu", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_master_envs": { + "default": "", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "label": "Master额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_master_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "Master的内存", + "reference": "self.variables.master_mem", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_negative_sampling_rate": { + "default": "", + "default_value": 0.0, + "help": "建议不修改,负采样比例,当follower求交时遇到无法匹配上的leader的example id,会以此概率生成一个新的样本。", + "label": "负采样比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "NUMBER" + }, + "Slot_partition_num": { + "default": "", + "default_value": 4.0, + "help": "建议修改,求交后数据分区的数量,建议和raw_data一致", + "label": "数据分区的数量", + "reference": "workflow.variables.num_partitions", + "reference_type": "WORKFLOW", + "value_type": "INT" + }, + "Slot_psi_output_builder": { + "default": "", + "default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "label": "PSI output数据类型", + "reference": "self.variables.output_type", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_psi_raw_data_iter": { + "default": "", + "default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "label": "raw data数据类型", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_psi_read_ahead_size": { + "default": "", + "default_value": null, + "help": "建议不填, the read ahead size for raw data", + "label": "psi_read_ahead_size", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_raw_data_name": { + "default": "", + "default_value": "", + "help": "必须修改,原始数据的发布地址,根据参数内容在portal_publish_dir地址下寻找", + "label": "raw_data名字", + "reference": "workflow.jobs['raw-data-job'].name", + "reference_type": "JOB_PROPERTY", + "value_type": "STRING" + }, + "Slot_role": { + "default": "", + "default_value": "Leader", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "label": "Flapp通讯时角色", + "reference": "self.variables.role", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_rsa_key_path": { + "default": "", + "default_value": "", + "help": "RSA公钥或私钥的地址,在无RSA_KEY_PEM时必填", + "label": "RSA钥匙地址", + "reference": "self.variables.rsa_key_path", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_rsa_key_pem": { + "default": "", + "default_value": "", + "help": "直接输入RSA公钥和私钥,请使用Textarea,Leader会从中读取私钥,Follower会从中读取公钥。如果为空会使用path读取。", + "label": "RSA公钥", + "reference": "self.variables.rsa_key_pem", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_run_merger_read_ahead_buffer": { + "default": "", + "default_value": null, + "help": "建议不填, sort run merger read ahead buffer", + "label": "run_merger_read_ahead_buffer", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_start_time": { + "default": "", + "default_value": 0.0, + "help": "建议不修改,使用自这个时间起的数据,仅从文件名筛选所以格式依据文件名(yyyymmdd或timestamp)", + "label": "数据起始时间", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_worker_cpu": { + "default": "", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "label": "Worker的CPU", + "reference": "self.variables.worker_cpu", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "default": "", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_worker_memory": { + "default": "", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "label": "Worker的内存", + "reference": "self.variables.worker_mem", + "reference_type": "SELF", + "value_type": "STRING" + } + }, + "variables": [] + }, + "raw-data-job": { + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": \"Follower\",\n \"peerSpecs\": {\n \"Leader\": {\n \"peerURL\": \"\",\n \"authority\": \"\"\n }\n },\n \"flReplicaSpecs\": {\n \"Master\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_NAME\",\n \"value\": self.name\n },\n {\n \"name\": \"DATA_PORTAL_TYPE\",\n \"value\": ${Slot_data_portal_type}\n },\n {\n \"name\": \"OUTPUT_PARTITION_NUM\",\n \"value\": str(${Slot_output_partition_num})\n },\n {\n \"name\": \"INPUT_BASE_DIR\",\n \"value\": ${Slot_input_base_dir}\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/raw_data/\" + self.name\n },\n {\n \"name\": \"RAW_DATA_PUBLISH_DIR\",\n \"value\": \"portal_publish_dir/\" + self.name\n },\n {\n \"name\": \"FILE_WILDCARD\",\n \"value\": ${Slot_file_wildcard}\n },\n {\n \"name\": \"LONG_RUNNING\",\n \"value\": ${Slot_long_running}\n },\n {\n \"name\": \"CHECK_SUCCESS_TAG\",\n \"value\": ${Slot_check_success_tag}\n },\n {\n \"name\": \"FILES_PER_JOB_LIMIT\",\n \"value\": str(${Slot_files_per_job_limit})\n },\n {\n \"name\": \"SINGLE_SUBFOLDER\",\n \"value\": ${Slot_single_subfolder}\n },\n {\n \"name\": \"RAW_DATA_METRICS_SAMPLE_RATE\",\n \"value\": str(${Slot_raw_data_metrics_sample_rate})\n }\n\n ] + ${Slot_master_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_master.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_master_cpu},\n \"memory\": ${Slot_master_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": 1\n },\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/data_source/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n\n {\n \"name\": \"BATCH_SIZE\",\n \"value\": str(${Slot_batch_size})\n },\n {\n \"name\": \"INPUT_DATA_FORMAT\",\n \"value\": ${Slot_input_data_format}\n },\n {\n \"name\": \"COMPRESSED_TYPE\",\n \"value\": ${Slot_compressed_type}\n },\n {\n \"name\": \"OUTPUT_DATA_FORMAT\",\n \"value\": ${Slot_output_data_format}\n },\n {\n \"name\": \"BUILDER_COMPRESSED_TYPE\",\n \"value\": ${Slot_builder_compressed_type}\n },\n {\n \"name\": \"MEMORY_LIMIT_RATIO\",\n \"value\": str(${Slot_memory_limit_ratio})\n },\n {\n \"name\": \"OPTIONAL_FIELDS\",\n \"value\": ${Slot_optional_fields}\n },\n {\n \"name\": \"RAW_DATA_METRICS_SAMPLE_RATE\",\n \"value\": str(${Slot_raw_data_metrics_sample_rate})\n }\n\n\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": ${Slot_image},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/data_portal/run_data_portal_worker.sh\"\n ],\n \"args\": [\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_memory}\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": False,\n \"replicas\": ${Slot_output_partition_num}\n }\n }\n }\n}\n", + "slots": { + "Slot_batch_size": { + "default": "", + "default_value": 1024.0, + "help": "原始数据是一批一批的从文件系统中读出来,batch_size为batch的大小", + "label": "Batch大小", + "reference": "self.variables.batch_size", + "reference_type": "SELF", + "value_type": "INT" + }, + "Slot_builder_compressed_type": { + "default": "", + "default_value": "", + "help": "choices=['', 'ZLIB', 'GZIP'] the format for output file", + "label": "输出压缩格式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_check_success_tag": { + "default": "", + "default_value": "", + "help": "choices:['','--check_success_tag'] means false and true, Check that a _SUCCESS file exists before processing files in a subfolder", + "label": "是否检查成功标志", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_compressed_type": { + "default": "", + "default_value": "", + "help": "choices=['', 'ZLIB', 'GZIP'] the compressed type of input data file", + "label": "压缩方式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_data_portal_type": { + "default": "", + "default_value": "PSI", + "help": "运行过一次后修改无效!! the type of data portal type ,choices=['PSI', 'Streaming']", + "label": "数据入口类型", + "reference": "self.variables.data_portal_type", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_file_wildcard": { + "default": "", + "default_value": "*.rd", + "help": "文件名称的通配符, 将会读取input_base_dir下所以满足条件的文件,如\n1. *.csv,意为读取所有csv格式文件\n2. *.tfrecord,意为读取所有tfrecord格式文件\n3. xxx.txt,意为读取文件名为xxx.txt的文件", + "label": "文件名称的通配符", + "reference": "self.variables.file_wildcard", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_files_per_job_limit": { + "default": "", + "default_value": null, + "help": "空即不设限制,Max number of files in a job", + "label": "每个任务最多文件数", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_image": { + "default": "", + "default_value": "artifact.bytedance.com/fedlearner/fedlearner:50a6945", + "help": "建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模板不适用", + "label": "容器镜像", + "reference": "workflow.variables.image", + "reference_type": "WORKFLOW", + "value_type": "STRING" + }, + "Slot_input_base_dir": { + "default": "", + "default_value": "/app/deploy/integrated_test/tfrecord_raw_data", + "help": "必须修改,运行过一次后修改无效!!the base dir of input directory", + "label": "输入路径", + "reference": "self.variables.dataset", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_input_data_format": { + "default": "", + "default_value": "TF_RECORD", + "help": "choices=['TF_RECORD', 'CSV_DICT'] the type for input data iterator", + "label": "输入数据格式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_labels": { + "default": "", + "default_value": {}, + "help": "建议不修改,格式: {}", + "label": "FLAPP额外元信息", + "reference": "system.variables.labels", + "reference_type": "SYSTEM", + "value_type": "OBJECT" + }, + "Slot_long_running": { + "default": "", + "default_value": "", + "help": "choices: ['','--long_running']否,是。是否为常驻上传原始数据", + "label": "是否常驻", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_master_cpu": { + "default": "", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "label": "Master的CPU", + "reference": "self.variables.raw_master_cpu", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_master_envs": { + "default": "", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "label": "Master额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_master_memory": { + "default": "", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "label": "Master的内存", + "reference": "self.variables.raw_master_mem", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_memory_limit_ratio": { + "default": "", + "default_value": 70.0, + "help": "预测是否会OOM的时候用到,如果预测继续执行下去时占用内存会超过这个比例,就阻塞,直到尚未处理的任务处理完成。 注意这是个40-81之间的整数。", + "label": "内存限制比例", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "INT" + }, + "Slot_optional_fields": { + "default": "", + "default_value": "", + "help": "optional stat fields used in joiner, separated by comma between fields, e.g. \"label,rit\"Each field will be stripped", + "label": "可选字段", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_output_data_format": { + "default": "", + "default_value": "TF_RECORD", + "help": "choices=['TF_RECORD', 'CSV_DICT'] the format for output file", + "label": "输出格式", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_output_partition_num": { + "default": "", + "default_value": 4.0, + "help": "运行过一次后修改无效!!输出数据的文件数量,对应Worker数量", + "label": "数据分区的数量", + "reference": "workflow.variables.num_partitions", + "reference_type": "WORKFLOW", + "value_type": "INT" + }, + "Slot_raw_data_metrics_sample_rate": { + "default": "", + "default_value": "0", + "help": "建议不修改,es metrics 取样比例", + "label": "metrics_sample_rate", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_single_subfolder": { + "default": "", + "default_value": "", + "help": "choices:['','--single_subfolder'] 否 是,Only process one subfolder at a time", + "label": "是否单一子文件夹", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_storage_root_path": { + "default": "", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "label": "存储根目录", + "reference": "project.variables.storage_root_path", + "reference_type": "PROJECT", + "value_type": "STRING" + }, + "Slot_volume_mounts": { + "default": "", + "default_value": [ + { + "mountPath": "/data", + "name": "data" + } + ], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "label": "卷挂载位置", + "reference": "system.variables.volume_mounts_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_volumes": { + "default": "", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "label": "为Pod提供的卷", + "reference": "system.variables.volumes_list", + "reference_type": "SYSTEM", + "value_type": "LIST" + }, + "Slot_worker_cpu": { + "default": "", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "label": "Worker的CPU", + "reference": "self.variables.raw_worker_cpu", + "reference_type": "SELF", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "default": "", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "reference": "", + "reference_type": "DEFAULT", + "value_type": "LIST" + }, + "Slot_worker_memory": { + "default": "", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "label": "Worker的内存", + "reference": "self.variables.raw_worker_mem", + "reference_type": "SELF", + "value_type": "STRING" + } + }, + "variables": [] + } + } + }, + "group_alias": "sys_preset_psi_data_join", + "name": "sys-preset-psi-data-join" +} \ No newline at end of file diff --git a/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-tree-model.json b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-tree-model.json new file mode 100644 index 000000000..9598943e7 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/sys_preset_templates/sys-preset-tree-model.json @@ -0,0 +1,592 @@ +{ + "name": "sys-preset-tree-model", + "group_alias": "sys_preset_tree_model", + "config": { + "group_alias": "sys_preset_tree_model", + "job_definitions": [ + { + "name": "tree-model", + "job_type": "TREE_MODEL_TRAINING", + "is_federated": true, + "variables": [ + { + "name": "image_version", + "value": "50a6945", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tooltip\":\"镜像版本\",\"tag\":\"INPUT_PARAM\"}", + "typed_value": "50a6945", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "mode", + "value": "train", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"enum\":[\"train\",\"eval\"],\"tag\":\"OPERATING_PARAM\"}", + "typed_value": "train", + "tag": "OPERATING_PARAM", + "value_type": "STRING" + }, + { + "name": "data_source", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"求交数据集名称\",\"tag\":\"INPUT_PATH\"}", + "typed_value": "", + "tag": "INPUT_PATH", + "value": "", + "value_type": "STRING" + }, + { + "name": "validation_data_path", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tag\":\"INPUT_PARAM\"}", + "typed_value": "", + "tag": "INPUT_PARAM", + "value": "", + "value_type": "STRING" + }, + { + "name": "file_type", + "value": "tfrecord", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"enum\":[\"csv\",\"tfrecord\"],\"tooltip\":\"文件类型,csv或tfrecord\",\"tag\":\"INPUT_PATH\"}", + "typed_value": "tfrecord", + "tag": "INPUT_PATH", + "value_type": "STRING" + }, + { + "name": "load_model_path", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"模型文件地址\",\"tag\":\"INPUT_PATH\"}", + "typed_value": "", + "tag": "INPUT_PATH", + "value": "", + "value_type": "STRING" + }, + { + "name": "loss_type", + "value": "logistic", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Select\",\"required\":false,\"enum\":[\"logistic\",\"mse\"],\"tooltip\":\"损失函数类型,logistic或mse,默认logistic\",\"tag\":\"INPUT_PARAM\"}", + "typed_value": "logistic", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "learning_rate", + "value": "0.3", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tag\":\"INPUT_PARAM\"}", + "typed_value": "0.3", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "max_iters", + "value": "10", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"树的数量\",\"tag\":\"INPUT_PARAM\"}", + "typed_value": "10", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "max_depth", + "value": "5", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tag\":\"INPUT_PARAM\"}", + "typed_value": "5", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "max_bins", + "value": "33", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"最大分箱数\",\"tag\":\"INPUT_PARAM\"}", + "typed_value": "33", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "l2_regularization", + "value": "1", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"L2惩罚系数\",\"tag\":\"INPUT_PARAM\"}", + "typed_value": "1", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "num_parallel", + "value": "5", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"进程数量\",\"tag\":\"INPUT_PARAM\"}", + "typed_value": "5", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "enable_packing", + "value": "true", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Select\",\"required\":false,\"enum\":[\"true\",\"false\"],\"tooltip\":\"是否开启优化\",\"tag\":\"INPUT_PARAM\"}", + "typed_value": "true", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "ignore_fields", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"不入模特征,以逗号分隔如:name,age,sex\",\"tag\":\"INPUT_PARAM\"}", + "typed_value": "", + "tag": "INPUT_PARAM", + "value": "", + "value_type": "STRING" + }, + { + "name": "cat_fields", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"类别类型特征,特征的值需要是非负整数。以逗号分隔如:alive,country,sex\",\"tag\":\"INPUT_PARAM\"}", + "typed_value": "", + "tag": "INPUT_PARAM", + "value": "", + "value_type": "STRING" + }, + { + "name": "verify_example_ids", + "value": "false", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Select\",\"required\":false,\"tooltip\":\"是否检查example_id对齐 If set to true, the first column of the data will be treated as example ids that must match between leader and follower\",\"enum\":[\"false\",\"true\"],\"tag\":\"INPUT_PARAM\"}", + "typed_value": "false", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "verbosity", + "value": "1", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Select\",\"required\":false,\"enum\":[\"0\",\"1\",\"2\"],\"tooltip\":\"日志输出等级\",\"tag\":\"INPUT_PARAM\"}", + "typed_value": "1", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "no_data", + "value": "false", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Select\",\"required\":false,\"tooltip\":\"Leader是否没数据,不建议乱用\",\"enum\":[\"false\",\"true\"],\"tag\":\"INPUT_PARAM\"}", + "typed_value": "false", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "worker_cpu", + "value": "8000m", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "8000m", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "worker_mem", + "value": "16Gi", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"RESOURCE_ALLOCATION\"}", + "typed_value": "16Gi", + "tag": "RESOURCE_ALLOCATION", + "value_type": "STRING" + }, + { + "name": "role", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Select\",\"required\":true,\"enum\":[\"Leader\",\"Follower\"],\"tag\":\"INPUT_PARAM\"}", + "typed_value": "", + "tag": "INPUT_PARAM", + "value": "", + "value_type": "STRING" + }, + { + "name": "label_field", + "value": "label", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"label特征名\",\"tag\":\"INPUT_PARAM\"}", + "typed_value": "label", + "tag": "INPUT_PARAM", + "value_type": "STRING" + }, + { + "name": "load_model_name", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tooltip\":\"按任务名称加载模型,{STORAGE_ROOT_PATH}/job_output/{LOAD_MODEL_NAME}/exported_models\",\"tag\":\"INPUT_PATH\"}", + "typed_value": "", + "tag": "INPUT_PATH", + "value": "", + "value_type": "STRING" + }, + { + "name": "data_path", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":false,\"tag\":\"INPUT_PATH\"}", + "typed_value": "", + "tag": "INPUT_PATH", + "value": "", + "value_type": "STRING" + }, + { + "name": "file_wildcard", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true,\"tag\":\"INPUT_PATH\",\"tooltip\":\"*.data或**/part*\"}", + "typed_value": "", + "tag": "INPUT_PATH", + "value": "", + "value_type": "STRING" + }, + { + "name": "fedapp_active_ttl", + "value": "86400", + "access_mode": "PEER_WRITABLE", + "widget_schema": "{\"component\":\"Input\",\"required\":true}", + "typed_value": "86400", + "value_type": "STRING", + "tag": "" + } + ], + "yaml_template": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FedApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"labels\": dict(system.variables.labels),\n \"annotations\": {\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\",\n \"min-member\": \"1\",\n \"resource-cpu\": str(self.variables.worker_cpu),\n \"resource-mem\": str(self.variables.worker_mem),\n },\n },\n \"spec\": {\n \"activeDeadlineSeconds\": int(self.variables.fedapp_active_ttl),\n \"fedReplicaSpecs\": {\n \"Worker\": {\n \"backoffLimit\": 6,\n \"port\": # 可以没有,没有就是{containerPort: 50051, name: flapp-port, protocol: TCP}\n { \n \"containerPort\": 50051,\n \"name\": \"flapp-port\"\n },\n \"template\": {\n \"spec\": {\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": str(project.variables.storage_root_path)\n },\n {\n \"name\": \"ROLE\",\n \"value\": str(self.variables.role).lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": str(project.variables.storage_root_path) + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"MODE\",\n \"value\": str(self.variables.mode)\n },\n {\n \"name\": \"LOSS_TYPE\",\n \"value\": str(self.variables.loss_type)\n },\n {\n \"name\": \"DATA_SOURCE\",\n \"value\": str(self.variables.data_source)\n },\n {\n \"name\": \"DATA_PATH\",\n \"value\": str(self.variables.data_path)\n },\n {\n \"name\": \"VALIDATION_DATA_PATH\",\n \"value\": str(self.variables.validation_data_path)\n },\n {\n \"name\": \"NO_DATA\",\n \"value\": str(bool(self.variables.no_data))\n },\n {\n \"name\": \"FILE_WILDCARD\",\n \"value\": str(self.variables.file_wildcard)\n },\n {\n \"name\": \"FILE_TYPE\",\n \"value\": str(self.variables.file_type)\n },\n {\n \"name\": \"LOAD_MODEL_PATH\",\n \"value\": str(self.variables.load_model_path)\n },\n {\n \"name\": \"LOAD_MODEL_NAME\",\n \"value\": str(self.variables.load_model_name)\n },\n {\n \"name\": \"VERBOSITY\",\n \"value\": str(int(self.variables.verbosity))\n },\n {\n \"name\": \"LEARNING_RATE\",\n \"value\": str(float(self.variables.learning_rate))\n },\n {\n \"name\": \"MAX_ITERS\",\n \"value\": str(int(self.variables.max_iters))\n },\n {\n \"name\": \"MAX_DEPTH\",\n \"value\": str(int(self.variables.max_depth))\n },\n {\n \"name\": \"MAX_BINS\",\n \"value\": str(int(self.variables.max_bins))\n },\n {\n \"name\": \"L2_REGULARIZATION\",\n \"value\": str(float(self.variables.l2_regularization))\n },\n {\n \"name\": \"NUM_PARALLEL\",\n \"value\": str(int(self.variables.num_parallel))\n },\n {\n \"name\": \"VERIFY_EXAMPLE_IDS\",\n \"value\": str(bool(self.variables.verify_example_ids))\n },\n {\n \"name\": \"IGNORE_FIELDS\",\n \"value\": str(self.variables.ignore_fields)\n },\n {\n \"name\": \"CAT_FIELDS\",\n \"value\": str(self.variables.cat_fields)\n },\n {\n \"name\": \"LABEL_FIELD\",\n \"value\": str(self.variables.label_field)\n },\n {\n \"name\": \"SEND_SCORES_TO_FOLLOWER\",\n \"value\": str(False)\n },\n {\n \"name\": \"SEND_METRICS_TO_FOLLOWER\",\n \"value\": str(False)\n },\n {\n \"name\": \"ENABLE_PACKING\",\n \"value\": str(bool(self.variables.enable_packing))\n },\n {\n \"name\": \"ES_BATCH_SIZE\",\n \"value\": str(10)\n },\n {\n \"name\": \"METRIC_COLLECTOR_ENABLE\",\n \"value\": str(True)\n },\n {\n \"name\": \"METRIC_COLLECTOR_SERVICE_NAME\",\n \"value\": \"fedlearner_model\"\n }\n ] + [],\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": list(system.variables.volume_mounts_list),\n \"image\": system.variables.image_repo + \"/fedlearner:\" + str(self.variables.image_version),\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tf-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\":[\"/bin/bash\",\"-c\"],\n \"args\": [\"export WORKER_RANK=$$INDEX && export PEER_ADDR=$$SERVICE_ID && /app/deploy/scripts/trainer/run_tree_worker.sh\"],\n \"resources\": {\n \"limits\": {\n \"cpu\": str(self.variables.worker_cpu),\n \"memory\": str(self.variables.worker_mem)\n },\n \"requests\": {\n \"cpu\": str(self.variables.worker_cpu),\n \"memory\": str(self.variables.worker_mem)\n }\n }\n }\n ],\n \"imagePullSecrets\": [\n {\n \"name\": \"regcred\"\n }\n ],\n \"volumes\": list(system.variables.volumes_list)\n }\n },\n \"replicas\": 1\n }\n }\n }\n}\n", + "dependencies": [], + "easy_mode": false + } + ], + "variables": [] + }, + "editor_info": { + "yaml_editor_infos": { + "tree-model": { + "slots": { + "Slot_send_scores_to_follower": { + "help": "是否发送结果到follower", + "label": "是否发送结果到follower", + "default_value": false, + "value_type": "BOOL", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_l2_regularization": { + "reference": "self.variables.l2_regularization", + "help": "L2惩罚系数", + "reference_type": "SELF", + "label": "L2惩罚系数", + "default_value": 1.0, + "value_type": "NUMBER", + "default": "" + }, + "Slot_verify_example_ids": { + "reference": "self.variables.verify_example_ids", + "help": "是否检查example_id对齐 If set to true, the first column of the data will be treated as example ids that must match between leader and follower", + "reference_type": "SELF", + "label": "是否检查example_id对齐", + "default_value": false, + "value_type": "BOOL", + "default": "" + }, + "Slot_max_iters": { + "reference": "self.variables.max_iters", + "help": "树的数量", + "reference_type": "SELF", + "label": "迭代数", + "default_value": 5.0, + "value_type": "INT", + "default": "" + }, + "Slot_file_type": { + "reference": "self.variables.file_type", + "help": "文件类型,csv或tfrecord", + "reference_type": "SELF", + "label": "文件类型,csv或tfrecord", + "default_value": "tfrecord", + "default": "", + "value_type": "STRING" + }, + "Slot_load_model_name": { + "reference": "self.variables.load_model_name", + "help": "按任务名称加载模型,{STORAGE_ROOT_PATH}/job_output/{LOAD_MODEL_NAME}/exported_models", + "reference_type": "SELF", + "label": "模型任务名称", + "default_value": "", + "default": "", + "value_type": "STRING" + }, + "Slot_verbosity": { + "reference": "self.variables.verbosity", + "help": "日志输出等级", + "reference_type": "SELF", + "label": "日志输出等级", + "default_value": 1.0, + "value_type": "INT", + "default": "" + }, + "Slot_load_model_path": { + "reference": "self.variables.load_model_path", + "help": "模型文件地址", + "reference_type": "SELF", + "label": "模型文件地址", + "default_value": "", + "default": "", + "value_type": "STRING" + }, + "Slot_max_bins": { + "reference": "self.variables.max_bins", + "help": "最大分箱数", + "reference_type": "SELF", + "label": "最大分箱数", + "default_value": 33.0, + "value_type": "INT", + "default": "" + }, + "Slot_data_path": { + "help": "数据存放位置", + "label": "数据存放位置", + "default_value": "", + "reference": "", + "default": "", + "reference_type": "DEFAULT", + "value_type": "STRING" + }, + "Slot_no_data": { + "reference": "self.variables.no_data", + "help": "Leader是否没数据", + "reference_type": "SELF", + "label": "Leader是否没数据", + "default_value": false, + "value_type": "BOOL", + "default": "" + }, + "Slot_file_ext": { + "reference": "self.variables.undefined", + "help": "文件后缀", + "reference_type": "SELF", + "label": "文件后缀", + "default_value": ".data", + "default": "", + "value_type": "STRING" + }, + "Slot_label_field": { + "reference": "self.variables.label_field", + "help": "label特征名", + "reference_type": "SELF", + "label": "label特征名", + "default_value": "label", + "default": "", + "value_type": "STRING" + }, + "Slot_labels": { + "reference": "system.variables.labels", + "help": "建议不修改,格式: {}", + "reference_type": "SYSTEM", + "label": "FLAPP额外元信息", + "default_value": {}, + "value_type": "OBJECT", + "default": "" + }, + "Slot_loss_type": { + "reference": "self.variables.loss_type", + "help": "损失函数类型,logistic或mse,默认logistic", + "reference_type": "SELF", + "label": "损失函数类型", + "default_value": "logistic", + "default": "", + "value_type": "STRING" + }, + "Slot_enable_packing": { + "reference": "self.variables.enable_packing", + "help": "是否开启优化", + "reference_type": "SELF", + "label": "是否开启优化", + "default_value": true, + "value_type": "BOOL", + "default": "" + }, + "Slot_worker_cpu": { + "reference": "self.variables.worker_cpu", + "help": "所需CPU", + "reference_type": "SELF", + "label": "所需CPU", + "default_value": "8000m", + "default": "", + "value_type": "STRING" + }, + "Slot_ignore_fields": { + "reference": "self.variables.ignore_fields", + "help": "以逗号分隔如:name,age,sex", + "reference_type": "SELF", + "label": "不入模的特征", + "default_value": "", + "default": "", + "value_type": "STRING" + }, + "Slot_image_version": { + "reference": "self.variables.image_version", + "help": "建议不修改,指定Pod中运行的容器镜像版本,前缀为system.variables.image_repo + '/fedlearner:'", + "reference_type": "SELF", + "label": "容器镜像版本", + "default_value": "882310f", + "default": "", + "value_type": "STRING" + }, + "Slot_storage_root_path": { + "reference": "project.variables.storage_root_path", + "help": "联邦学习中任务存储根目录", + "reference_type": "PROJECT", + "label": "存储根目录", + "default_value": "/data", + "default": "", + "value_type": "STRING" + }, + "Slot_num_parallel": { + "reference": "self.variables.num_parallel", + "help": "进程数量", + "reference_type": "SELF", + "label": "进程数量", + "default_value": 1.0, + "value_type": "INT", + "default": "" + }, + "Slot_validation_data_path": { + "reference": "self.variables.validation_data_path", + "help": "验证数据集地址", + "reference_type": "SELF", + "label": "验证数据集地址", + "default_value": "", + "default": "", + "value_type": "STRING" + }, + "Slot_worker_envs": { + "help": "数组类型,worker pod额外的环境变量", + "label": "Worker额外环境变量", + "default_value": [], + "value_type": "LIST", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_send_metrics_to_follower": { + "help": "是否发送指标到follower", + "label": "是否发送指标到follower", + "default_value": false, + "value_type": "BOOL", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_max_depth": { + "reference": "self.variables.max_depth", + "help": "最大深度", + "reference_type": "SELF", + "label": "最大深度", + "default_value": 3.0, + "value_type": "INT", + "default": "" + }, + "Slot_volume_mounts": { + "reference": "system.variables.volume_mounts_list", + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "reference_type": "SYSTEM", + "label": "卷挂载位置", + "default_value": [ + { + "name": "data", + "mountPath": "/data" + } + ], + "value_type": "LIST", + "default": "" + }, + "Slot_mode": { + "reference": "self.variables.mode", + "help": "任务类型,train或eval", + "reference_type": "SELF", + "label": "任务类型,train或eval", + "default_value": "train", + "default": "", + "value_type": "STRING" + }, + "Slot_learning_rate": { + "reference": "self.variables.learning_rate", + "help": "学习率", + "reference_type": "SELF", + "label": "学习率", + "default_value": 0.3, + "value_type": "NUMBER", + "default": "" + }, + "Slot_worker_mem": { + "reference": "self.variables.worker_mem", + "help": "所需内存", + "reference_type": "SELF", + "label": "所需内存", + "default_value": "16Gi", + "default": "", + "value_type": "STRING" + }, + "Slot_es_batch_size": { + "help": "ES_BATCH_SIZE", + "label": "ES_BATCH_SIZE", + "default_value": 10.0, + "value_type": "INT", + "reference": "", + "default": "", + "reference_type": "DEFAULT" + }, + "Slot_data_source": { + "reference": "self.variables.data_source", + "help": "求交数据集名称", + "reference_type": "SELF", + "label": "求交数据集名称", + "default_value": "", + "default": "", + "value_type": "STRING" + }, + "Slot_role": { + "reference": "self.variables.role", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "reference_type": "SELF", + "label": "Flapp通讯时角色", + "default_value": "Leader", + "default": "", + "value_type": "STRING" + }, + "Slot_volumes": { + "reference": "system.variables.volumes_list", + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "reference_type": "SYSTEM", + "label": "为Pod提供的卷", + "default_value": [ + { + "name": "data", + "persistentVolumeClaim": { + "claimName": "pvc-fedlearner-default" + } + } + ], + "value_type": "LIST", + "default": "" + }, + "Slot_cat_fields": { + "reference": "self.variables.cat_fields", + "help": "类别类型特征,特征的值需要是非负整数。以逗号分隔如:alive,country,sex", + "reference_type": "SELF", + "label": "类别类型特征", + "default_value": "", + "default": "", + "value_type": "STRING" + } + }, + "meta_yaml": "{\n \"apiVersion\": \"fedlearner.k8s.io/v1alpha1\",\n \"kind\": \"FLApp\",\n \"metadata\": {\n \"name\": self.name,\n \"namespace\": system.variables.namespace,\n \"annotations\":{\n \"queue\": \"fedlearner\",\n \"schedulerName\": \"batch\"\n },\n \"labels\": ${Slot_labels}\n },\n \"spec\": {\n \"role\": ${Slot_role},\n \"cleanPodPolicy\": \"All\",\n \"peerSpecs\": {\n \"Leader\" if ${Slot_role}==\"Follower\" else \"Follower\": {\n \"peerURL\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\",\n \"authority\": project.participants[0].egress_host,\n \"extraHeaders\": {\n \"x-host\": \"fedlearner-operator.\" + project.participants[0].egress_domain\n }\n }\n },\n \"flReplicaSpecs\": {\n \"Worker\": {\n \"template\": {\n \"spec\": {\n \"restartPolicy\": \"Never\",\n \"containers\": [\n {\n \"env\": system.basic_envs_list + [\n {\n \"name\": \"STORAGE_ROOT_PATH\",\n \"value\": ${Slot_storage_root_path}\n },\n {\n \"name\": \"ROLE\",\n \"value\": ${Slot_role}.lower()\n },\n {\n \"name\": \"APPLICATION_ID\",\n \"value\": self.name\n },\n {\n \"name\": \"OUTPUT_BASE_DIR\",\n \"value\": ${Slot_storage_root_path} + \"/job_output/\" + self.name\n },\n {\n \"name\": \"EGRESS_URL\",\n \"value\": \"fedlearner-stack-ingress-nginx-controller.default.svc:80\"\n },\n {\n \"name\": \"EGRESS_HOST\",\n \"value\": project.participants[0].egress_host\n },\n {\n \"name\": \"EGRESS_DOMAIN\",\n \"value\": project.participants[0].egress_domain\n },\n {\n \"name\": \"MODE\",\n \"value\": ${Slot_mode}\n },\n {\n \"name\": \"LOSS_TYPE\",\n \"value\": ${Slot_loss_type}\n },\n {\n \"name\": \"DATA_SOURCE\",\n \"value\": ${Slot_data_source}\n },\n {\n \"name\": \"DATA_PATH\",\n \"value\": ${Slot_data_path}\n },\n {\n \"name\": \"VALIDATION_DATA_PATH\",\n \"value\": ${Slot_validation_data_path}\n },\n {\n \"name\": \"NO_DATA\",\n \"value\": str(${Slot_no_data})\n },\n {\n \"name\": \"FILE_EXT\",\n \"value\": ${Slot_file_ext}\n },\n {\n \"name\": \"FILE_TYPE\",\n \"value\": ${Slot_file_type}\n },\n {\n \"name\": \"LOAD_MODEL_PATH\",\n \"value\": ${Slot_load_model_path}\n },\n {\n \"name\": \"LOAD_MODEL_NAME\",\n \"value\": ${Slot_load_model_name}\n },\n {\n \"name\": \"VERBOSITY\",\n \"value\": str(${Slot_verbosity})\n },\n {\n \"name\": \"LEARNING_RATE\",\n \"value\": str(${Slot_learning_rate})\n },\n {\n \"name\": \"MAX_ITERS\",\n \"value\": str(${Slot_max_iters})\n },\n {\n \"name\": \"MAX_DEPTH\",\n \"value\": str(${Slot_max_depth})\n },\n {\n \"name\": \"MAX_BINS\",\n \"value\": str(${Slot_max_bins})\n },\n {\n \"name\": \"L2_REGULARIZATION\",\n \"value\": str(${Slot_l2_regularization})\n },\n {\n \"name\": \"NUM_PARALLEL\",\n \"value\": str(${Slot_num_parallel})\n },\n {\n \"name\": \"VERIFY_EXAMPLE_IDS\",\n \"value\": str(${Slot_verify_example_ids})\n },\n {\n \"name\": \"IGNORE_FIELDS\",\n \"value\": ${Slot_ignore_fields}\n },\n {\n \"name\": \"CAT_FIELDS\",\n \"value\": ${Slot_cat_fields}\n },\n {\n \"name\": \"LABEL_FIELD\",\n \"value\": ${Slot_label_field}\n },\n {\n \"name\": \"SEND_SCORES_TO_FOLLOWER\",\n \"value\": str(${Slot_send_scores_to_follower})\n },\n {\n \"name\": \"SEND_METRICS_TO_FOLLOWER\",\n \"value\": str(${Slot_send_metrics_to_follower})\n },\n {\n \"name\": \"ENABLE_PACKING\",\n \"value\": str(${Slot_enable_packing})\n },\n {\n \"name\": \"ES_BATCH_SIZE\",\n \"value\": str(${Slot_es_batch_size})\n }\n ] + ${Slot_worker_envs},\n \"imagePullPolicy\": \"IfNotPresent\",\n \"name\": \"tensorflow\",\n \"volumeMounts\": ${Slot_volume_mounts},\n \"image\": system.variables.image_repo + \"/fedlearner:\" + ${Slot_image_version},\n \"ports\": [\n {\n \"containerPort\": 50051,\n \"name\": \"flapp-port\",\n \"protocol\": \"TCP\"\n },\n {\n \"containerPort\": 50052,\n \"name\": \"tf-port\",\n \"protocol\": \"TCP\"\n }\n ],\n \"command\": [\n \"/app/deploy/scripts/wait4pair_wrapper.sh\"\n ],\n \"args\": [\n \"/app/deploy/scripts/trainer/run_tree_worker.sh\"\n ],\n \"resources\": {\n \"limits\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_mem}\n },\n \"requests\": {\n \"cpu\": ${Slot_worker_cpu},\n \"memory\": ${Slot_worker_mem}\n }\n }\n }\n ],\n \"volumes\": ${Slot_volumes}\n }\n },\n \"pair\": True,\n \"replicas\": 1\n }\n }\n }\n}\n", + "variables": [] + } + } + }, + "comment": "" +} \ No newline at end of file diff --git a/web_console_v2/api/fedlearner_webconsole/tee/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/tee/BUILD.bazel new file mode 100644 index 000000000..e67137801 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/tee/BUILD.bazel @@ -0,0 +1,268 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "models_lib", + srcs = ["models.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "models_lib_test", + size = "small", + srcs = [ + "models_test.py", + ], + imports = ["../.."], + main = "models_test.py", + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "services_lib", + srcs = [ + "services.py", + "tee_job_template.py", + ], + imports = ["../.."], + deps = [ + ":models_lib", + ":utils_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:fetcher_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:data_path_lib", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/job:controller_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:utils_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_sqlalchemy//:pkg", + ], +) + +py_library( + name = "utils_lib", + srcs = [ + "utils.py", + ], + imports = ["../.."], + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:fetcher_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "utils_lib_test", + size = "small", + srcs = [ + "utils_test.py", + ], + imports = ["../.."], + main = "utils_test.py", + deps = [ + ":models_lib", + ":utils_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "//web_console_v2/api/testing/rpc:client_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + ], +) + +py_library( + name = "controller_lib", + srcs = [ + "controller.py", + ], + imports = ["../.."], + deps = [ + ":models_lib", + ":services_lib", + ":utils_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:client_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:job_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:system_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/two_pc:transaction_manager_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + ], +) + +py_test( + name = "controller_lib_test", + size = "small", + srcs = [ + "controller_test.py", + ], + imports = ["../.."], + main = "controller_test.py", + deps = [ + ":controller_lib", + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "//web_console_v2/api/testing/rpc:client_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "runners_lib", + srcs = [ + "runners.py", + ], + imports = ["../.."], + deps = [ + ":controller_lib", + ":models_lib", + ":utils_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:services_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "runners_lib_test", + size = "small", + srcs = [ + "runners_test.py", + ], + imports = ["../.."], + main = "runners_test.py", + deps = [ + ":models_lib", + ":runners_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/review:common_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "apis_lib", + srcs = ["apis.py"], + imports = ["../.."], + deps = [ + ":controller_lib", + ":models_lib", + ":services_lib", + ":utils_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:fetcher_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/audit:decorators_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:third_party_sso_lib", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/review:ticket_helper_lib", + "//web_console_v2/api/fedlearner_webconsole/swagger:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:filtering_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:paginate_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:resource_name_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_flask_restful//:pkg", + "@common_marshmallow//:pkg", + "@common_webargs//:pkg", + ], +) + +py_test( + name = "apis_lib_test", + size = "medium", + srcs = [ + "apis_test.py", + ], + imports = ["../.."], + main = "apis_test.py", + deps = [ + ":apis_lib", + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:data_path_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/flag:models_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2:py_proto", + "//web_console_v2/api/testing:common_lib", + "//web_console_v2/api/testing/rpc:client_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/tee/__init__.py b/web_console_v2/api/fedlearner_webconsole/tee/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/web_console_v2/api/fedlearner_webconsole/tee/apis.py b/web_console_v2/api/fedlearner_webconsole/tee/apis.py new file mode 100644 index 000000000..8c46b9fd6 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/tee/apis.py @@ -0,0 +1,834 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Optional +from flask_restful import Resource +from http import HTTPStatus +from marshmallow import Schema, fields, post_load, validate +from webargs.flaskparser import use_kwargs +from google.protobuf.json_format import ParseDict +from fedlearner_webconsole.db import db +from fedlearner_webconsole.review.ticket_helper import get_ticket_helper +from fedlearner_webconsole.utils.decorators.pp_flask import input_validator +from fedlearner_webconsole.utils.flask_utils import make_flask_response, FilterExpField, get_current_user +from fedlearner_webconsole.utils.filtering import SupportedField, FieldType, FilterBuilder +from fedlearner_webconsole.utils.paginate import paginate +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.auth.third_party_sso import credentials_required +from fedlearner_webconsole.audit.decorators import emits_event +from fedlearner_webconsole.proto.audit_pb2 import Event +from fedlearner_webconsole.proto.filtering_pb2 import FilterExpression, FilterOp +from fedlearner_webconsole.proto.review_pb2 import TicketType, TicketDetails +from fedlearner_webconsole.exceptions import ResourceConflictException, InvalidArgumentException, NoAccessException, \ + InternalException, NotFoundException +from fedlearner_webconsole.tee.models import TrustedJobGroup, GroupCreateStatus, TrustedJob, \ + TrustedJobStatus, TrustedJobType +from fedlearner_webconsole.tee.controller import TrustedJobGroupController, launch_trusted_job, stop_trusted_job, \ + get_tee_enabled_participants, TrustedJobController +from fedlearner_webconsole.tee.services import TrustedJobGroupService, TrustedJobService +from fedlearner_webconsole.tee.utils import get_project, get_algorithm, get_dataset, get_participant, \ + get_trusted_job_group, get_trusted_job, get_algorithm_with_uuid +from fedlearner_webconsole.proto.tee_pb2 import Resource as ResourcePb, ParticipantDatasetList +from fedlearner_webconsole.swagger.models import schema_manager +from fedlearner_webconsole.utils.resource_name import resource_uuid +from fedlearner_webconsole.flag.models import Flag +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.algorithm.models import AlgorithmType +from fedlearner_webconsole.algorithm.fetcher import AlgorithmFetcher + + +class ParticipantDatasetParams(Schema): + participant_id = fields.Integer(required=True) + uuid = fields.Str(required=True) + name = fields.Str(required=True) + + +class ResourceParams(Schema): + cpu = fields.Integer(required=True) + memory = fields.Integer(required=True) + replicas = fields.Integer(required=True) + + +class CreateTrustedJobGroupParams(Schema): + name = fields.Str(required=True) + comment = fields.Str(required=False, load_default=None) + # TODO(liuledian): remove algorithm_id after frontend completed + algorithm_id = fields.Integer(required=False, load_default=None) + algorithm_uuid = fields.Str(required=False, load_default=None) + dataset_id = fields.Integer(required=False, load_default=None) + participant_datasets = fields.List(fields.Nested(ParticipantDatasetParams), required=False, load_default=None) + resource = fields.Nested(ResourceParams, required=True) + + @post_load() + def make(self, data, **kwargs): + data['resource'] = ParseDict(data['resource'], ResourcePb()) + data['participant_datasets'] = ParseDict({'items': data['participant_datasets']}, ParticipantDatasetList()) + return data + + +class ConfigTrustedJobGroupParams(Schema): + comment = fields.Str(required=False, load_default=None) + auth_status = fields.Str(required=False, + load_default=None, + validate=validate.OneOf([AuthStatus.PENDING.name, AuthStatus.AUTHORIZED.name])) + # TODO(liuledian): remove algorithm_id after frontend completed + algorithm_id = fields.Integer(required=False, load_default=None) + algorithm_uuid = fields.Str(required=False, load_default=None) + resource = fields.Nested(ResourceParams, required=False, load_default=None) + + @post_load() + def make(self, data, **kwargs): + if data['resource'] is not None: + data['resource'] = ParseDict(data['resource'], ResourcePb()) + if data['auth_status'] is not None: + data['auth_status'] = AuthStatus[data['auth_status']] + return data + + +class TrustedJobGroupsApi(Resource): + + FILTER_FIELDS = { + 'name': SupportedField(type=FieldType.STRING, ops={FilterOp.CONTAIN: None}), + } + + def __init__(self): + self._filter_builder = FilterBuilder(model_class=TrustedJobGroup, supported_fields=self.FILTER_FIELDS) + + @credentials_required + @use_kwargs( + { + 'page': fields.Integer(required=False, load_default=None), + 'page_size': fields.Integer(required=False, load_default=None), + 'filter_exp': FilterExpField(data_key='filter', required=False, load_default=None), + }, + location='query') + def get( + self, + page: Optional[int], + page_size: Optional[int], + filter_exp: Optional[FilterExpression], + project_id: int, + ): + """Get the list of trusted job groups + --- + tags: + - tee + description: get the list of trusted job groups + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: query + name: page + schema: + type: integer + - in: query + name: page_size + schema: + type: integer + - in: query + name: filter + schema: + type: string + responses: + 200: + description: the list of trusted job groups + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.TrustedJobGroupRef' + 400: + description: invalid argument + 403: + description: the trusted job group is forbidden to access + """ + if not Flag.TRUSTED_COMPUTING_ENABLED.value: + raise NoAccessException('trusted computing is not enabled') + with db.session_scope() as session: + # TODO(liuledian): filter out groups in notification + query = session.query(TrustedJobGroup).filter(TrustedJobGroup.resource.isnot(None)).order_by( + TrustedJobGroup.created_at.desc()) + if project_id: + query = query.filter_by(project_id=project_id) + if filter_exp: + try: + query = self._filter_builder.build_query(query, filter_exp) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid filter: {str(e)}') from e + pagination = paginate(query, page, page_size) + data = [d.to_ref() for d in pagination.get_items()] + session.commit() + return make_flask_response(data=data, page_meta=pagination.get_metadata()) + + @input_validator + @credentials_required + @emits_event(resource_type=Event.ResourceType.TRUSTED_JOB_GROUP, op_type=Event.OperationType.CREATE) + @use_kwargs(CreateTrustedJobGroupParams(), location='json') + def post(self, name: str, comment: Optional[str], algorithm_id: Optional[int], algorithm_uuid: Optional[str], + dataset_id: Optional[int], participant_datasets: ParticipantDatasetList, resource: ResourcePb, + project_id: int): + """Create a trusted job group + --- + tags: + - tee + description: create a trusted job group + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/CreateTrustedJobGroupParams' + responses: + 201: + description: the detail of the trusted job group + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.TrustedJobGroupPb' + 400: + description: invalid argument + 403: + description: the trusted job group is forbidden to create + 409: + description: the trusted job group already exists + """ + if not Flag.TRUSTED_COMPUTING_ENABLED.value: + raise NoAccessException('trusted computing is not enabled') + if dataset_id is None and not participant_datasets.items: + raise InvalidArgumentException('dataset_id and participant_datasets are both missing') + with db.session_scope() as session: + project = get_project(session, project_id) + # TODO(liuledian): remove algorithm_id logic after frontend completed + if not algorithm_uuid: + algorithm_uuid = get_algorithm(session, algorithm_id).uuid + algorithm = get_algorithm_with_uuid(project_id, algorithm_uuid) + if algorithm.type != AlgorithmType.TRUSTED_COMPUTING.name: + raise InvalidArgumentException(f'algorithm {algorithm_uuid} invalid type') + if dataset_id is not None: + dataset = get_dataset(session, dataset_id) + if not dataset.is_published: + raise InvalidArgumentException(f'dataset {dataset.id} not published') + for pd in participant_datasets.items: + get_participant(session, pd.participant_id) + group = session.query(TrustedJobGroup).filter_by(name=name, project_id=project_id).first() + if group is not None: + raise ResourceConflictException(f'trusted job group {name} in project {project_id} already exists') + # TODO(liuledian): let creator assign analyzer id + enabled_pids = get_tee_enabled_participants(session, project_id) + if len(enabled_pids) != 1: + raise InternalException('tee enabled participants not valid') + analyzer_id = enabled_pids[0] + + with db.session_scope() as session: + group = TrustedJobGroup( + name=name, + uuid=resource_uuid(), + latest_version=0, + comment=comment, + project_id=project.id, + creator_username=get_current_user().username, + coordinator_id=0, + analyzer_id=analyzer_id, + auth_status=AuthStatus.AUTHORIZED, + algorithm_uuid=algorithm_uuid, + dataset_id=dataset_id, + ) + participants = ParticipantService(session).get_participants_by_project(project.id) + group.set_unauth_participant_ids([p.id for p in participants]) + group.set_resource(resource) + group.set_participant_datasets(participant_datasets) + session.add(group) + get_ticket_helper(session).create_ticket(TicketType.TK_CREATE_TRUSTED_JOB_GROUP, + TicketDetails(uuid=group.uuid)) + session.commit() + return make_flask_response(data=group.to_proto(), status=HTTPStatus.CREATED) + + +class TrustedJobGroupApi(Resource): + + @credentials_required + def get(self, project_id: int, group_id: int): + """Get the trusted job group + --- + tags: + - tee + descriptions: get the trusted job group + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: group_id + schema: + type: integer + required: true + responses: + 200: + description: detail of the trusted job group + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.TrustedJobGroupPb' + 403: + description: the trusted job group is forbidden access + 404: + description: trusted job group is not found + """ + if not Flag.TRUSTED_COMPUTING_ENABLED.value: + raise NoAccessException('trusted computing is not enabled') + with db.session_scope() as session: + group = get_trusted_job_group(session, project_id, group_id) + try: + TrustedJobGroupController(session, project_id).update_unauth_participant_ids(group) + data = group.to_proto() + algorithm = AlgorithmFetcher(project_id).get_algorithm(group.algorithm_uuid) + data.algorithm_project_uuid = algorithm.algorithm_project_uuid + data.algorithm_participant_id = algorithm.participant_id + except InternalException: + logging.warning(f'[trusted-job-group] group {group_id} update unauth_participant_ids failed') + except NotFoundException: + logging.warning(f'[trusted-job-group] group {group_id} fetch algorithm {group.algorithm_uuid} failed') + session.commit() + return make_flask_response(data) + + @input_validator + @credentials_required + @emits_event(resource_type=Event.ResourceType.TRUSTED_JOB_GROUP, op_type=Event.OperationType.UPDATE) + @use_kwargs(ConfigTrustedJobGroupParams(), location='json') + def put(self, comment: Optional[str], auth_status: Optional[AuthStatus], algorithm_id: Optional[int], + algorithm_uuid: Optional[str], resource: Optional[ResourcePb], project_id: int, group_id: int): + """Update the trusted job group + --- + tags: + - tee + description: update the trusted job group + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: group_id + schema: + type: integer + required: true + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/ConfigTrustedJobGroupParams' + responses: + 200: + description: update the trusted job group successfully + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.TrustedJobGroupPb' + 400: + description: invalid argument + 403: + description: the trusted job group is forbidden to update + 404: + description: trusted job group is not found + 409: + description: the trusted job group has not been fully created + """ + if not Flag.TRUSTED_COMPUTING_ENABLED.value: + raise NoAccessException('trusted computing is not enabled') + with db.session_scope() as session: + group = get_trusted_job_group(session, project_id, group_id) + controller = TrustedJobGroupController(session, project_id) + if group.status != GroupCreateStatus.SUCCEEDED: + raise ResourceConflictException('the trusted job group has not been fully created') + if comment is not None: + group.comment = comment + if auth_status is not None and auth_status != group.auth_status: + controller.inform_trusted_job_group(group, auth_status) + if algorithm_uuid or algorithm_id: + if group.coordinator_id: + raise NoAccessException('only coordinator can update algorithm') + # TODO(liuledian): remove after frontend completed + if not algorithm_uuid: + algorithm_uuid = get_algorithm(session, algorithm_id).uuid + algorithm = get_algorithm_with_uuid(project_id, algorithm_uuid) + old_algorithm = get_algorithm_with_uuid(project_id, group.algorithm_uuid) + if algorithm.algorithm_project_uuid != old_algorithm.algorithm_project_uuid: + raise InvalidArgumentException('algorithm project mismatch between old and new algorithm') + controller.update_trusted_job_group(group, algorithm_uuid) + if resource is not None: + group.set_resource(resource) + data = group.to_proto() + session.commit() + return make_flask_response(data) + + @credentials_required + @emits_event(resource_type=Event.ResourceType.TRUSTED_JOB_GROUP, op_type=Event.OperationType.DELETE) + def delete(self, project_id: int, group_id: int): + """Delete the trusted job group + --- + tags: + - tee + description: delete the trusted job group + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: group_id + schema: + type: integer + required: true + responses: + 204: + description: delete the trusted job group successfully + 403: + description: the trusted job group is forbidden to delete + 409: + description: the trusted job group cannot be deleted + """ + if not Flag.TRUSTED_COMPUTING_ENABLED.value: + raise NoAccessException('trusted computing is not enabled') + with db.session_scope() as session: + group = session.query(TrustedJobGroup).filter_by(project_id=project_id, id=group_id).first() + if group is not None: + if group.coordinator_id: + raise NoAccessException('only creator can delete the trusted job group') + if not group.is_deletable(): + raise ResourceConflictException('the trusted job group cannot be deleted') + TrustedJobGroupController(session, project_id).delete_trusted_job_group(group) + session.commit() + return make_flask_response(status=HTTPStatus.NO_CONTENT) + + +class LaunchTrustedJobApi(Resource): + + @input_validator + @credentials_required + @emits_event(resource_type=Event.ResourceType.TRUSTED_JOB, op_type=Event.OperationType.LAUNCH) + @use_kwargs({'comment': fields.Str(required=False, load_default=None)}, location='json') + def post(self, comment: Optional[str], project_id: int, group_id: int): + """Launch the trusted job + --- + tags: + - tee + description: launch the trusted job + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: group_id + schema: + type: integer + required: true + requestBody: + required: False + content: + application/json: + schema: + type: object + properties: + comment: + type: string + responses: + 201: + description: launch the trusted job successfully + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.TrustedJobPb' + 403: + description: the trusted job is forbidden to launch + 404: + description: trusted job group is not found + 409: + description: the trusted job group is not fully created or authorized + 500: + description: internal exception + """ + if not Flag.TRUSTED_COMPUTING_ENABLED.value: + raise NoAccessException('trusted computing is not enabled') + with db.session_scope() as session: + group = get_trusted_job_group(session, project_id, group_id) + if (group.status != GroupCreateStatus.SUCCEEDED or group.get_unauth_participant_ids() or + group.auth_status != AuthStatus.AUTHORIZED): + raise ResourceConflictException('the trusted job group is not fully created or authorized') + group = TrustedJobGroupService(session).lock_and_update_version(group_id) + session.commit() + succeeded, msg = launch_trusted_job(project_id, group.uuid, group.latest_version) + if not succeeded: + raise InternalException(f'launching trusted job failed with message: {msg}') + with db.session_scope() as session: + trusted_job: TrustedJob = session.query(TrustedJob).filter_by(trusted_job_group_id=group_id, + version=group.latest_version).first() + trusted_job.comment = comment + session.commit() + return make_flask_response(trusted_job.to_proto(), status=HTTPStatus.CREATED) + + +class GetTrustedJobsParams(Schema): + trusted_job_group_id = fields.Integer(required=True) + page = fields.Integer(required=False, load_default=None) + page_size = fields.Integer(required=False, load_default=None) + trusted_job_type = fields.Str(required=False, + data_key='type', + load_default=TrustedJobType.ANALYZE.name, + validate=validate.OneOf([TrustedJobType.ANALYZE.name, TrustedJobType.EXPORT.name])) + + @post_load() + def make(self, data, **kwargs): + if data['trusted_job_type'] is not None: + data['trusted_job_type'] = TrustedJobType[data['trusted_job_type']] + return data + + +class TrustedJobsApi(Resource): + + @credentials_required + @use_kwargs(GetTrustedJobsParams(), location='query') + def get(self, trusted_job_group_id: int, page: Optional[int], page_size: Optional[int], trusted_job_type: str, + project_id: int): + """Get the list of trusted jobs + --- + tags: + - tee + description: get the list of trusted jobs + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: query + name: group_id + schema: + type: integer + - in: query + name: page + schema: + type: integer + - in: query + name: page_size + schema: + type: integer + - in: query + name: type + schema: + type: string + responses: + 200: + description: list of trusted jobs + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.TrustedJobRef' + 403: + description: trusted job list is forbidden to access + """ + if not Flag.TRUSTED_COMPUTING_ENABLED.value: + raise NoAccessException('trusted computing is not enabled') + with db.session_scope() as session: + query = session.query(TrustedJob).filter_by(type=trusted_job_type) + # filter out trusted jobs in notification when getting the export type + if trusted_job_type == TrustedJobType.EXPORT: + query = query.filter(TrustedJob.auth_status != AuthStatus.PENDING) + if project_id: + query = query.filter_by(project_id=project_id) + if trusted_job_group_id: + query = query.filter_by(trusted_job_group_id=trusted_job_group_id) + if trusted_job_type == TrustedJobType.ANALYZE: + query = query.order_by(TrustedJob.version.desc()) + else: + # the version of tee export job equals to corresponding tee analyze job, so sort by creation time + query = query.order_by(TrustedJob.created_at.desc()) + pagination = paginate(query, page, page_size) + data = [d.to_ref() for d in pagination.get_items()] + session.commit() + return make_flask_response(data=data, page_meta=pagination.get_metadata()) + + +class UpdateTrustedJobParams(Schema): + comment = fields.Str(required=False, load_default=None) + auth_status = fields.Str(required=False, + load_default=None, + validate=validate.OneOf([AuthStatus.AUTHORIZED.name, AuthStatus.WITHDRAW.name])) + + @post_load() + def make(self, data, **kwargs): + if data['auth_status'] is not None: + data['auth_status'] = AuthStatus[data['auth_status']] + return data + + +class TrustedJobApi(Resource): + + @credentials_required + def get(self, project_id: int, trusted_job_id: int): + """Get the trusted job by id + --- + tags: + - tee + description: get the trusted job by id + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: trusted_job_id + schema: + type: integer + required: true + responses: + 200: + description: detail of the trusted job + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.TrustedJobPb' + 403: + description: the trusted job is forbidden to access + 404: + description: the trusted job is not found + """ + if not Flag.TRUSTED_COMPUTING_ENABLED.value: + raise NoAccessException('trusted computing is not enabled') + with db.session_scope() as session: + trusted_job = get_trusted_job(session, project_id, trusted_job_id) + if trusted_job.type == TrustedJobType.EXPORT: + TrustedJobController(session, project_id).update_participants_info(trusted_job) + data = trusted_job.to_proto() + session.commit() + return make_flask_response(data) + + @input_validator + @credentials_required + @emits_event(resource_type=Event.ResourceType.TRUSTED_JOB, op_type=Event.OperationType.UPDATE) + @use_kwargs(UpdateTrustedJobParams(), location='json') + def put(self, comment: str, auth_status: AuthStatus, project_id: int, trusted_job_id: int): + """Update the trusted job + --- + tags: + - tee + description: update the trusted job + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: trusted_job_id + schema: + type: integer + required: true + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/UpdateTrustedJobParams' + responses: + 200: + description: detail of the model job + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.TrustedJobPb' + 403: + description: the trusted job is forbidden to update + 404: + description: the trusted job is not found + """ + if not Flag.TRUSTED_COMPUTING_ENABLED.value: + raise NoAccessException('trusted computing is not enabled') + with db.session_scope() as session: + trusted_job = get_trusted_job(session, project_id, trusted_job_id) + if comment is not None: + trusted_job.comment = comment + if auth_status is not None: + TrustedJobController(session, project_id).inform_auth_status(trusted_job, auth_status) + data = trusted_job.to_proto() + session.commit() + return make_flask_response(data) + + +class StopTrustedJobApi(Resource): + + @credentials_required + def post(self, project_id: int, trusted_job_id: int): + """Stop the trusted job + --- + tags: + - tee + description: stop the trusted job + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: trusted_job_id + schema: + type: integer + required: true + responses: + 204: + description: stop the trusted job successfully + 403: + description: the trusted job is forbidden to stop + 404: + description: the trusted job is not found + 409: + description: the trusted job is not running + """ + if not Flag.TRUSTED_COMPUTING_ENABLED.value: + raise NoAccessException('trusted computing is not enabled') + with db.session_scope() as session: + trusted_job = get_trusted_job(session, project_id, trusted_job_id) + if trusted_job.get_status() != TrustedJobStatus.RUNNING: + raise ResourceConflictException(f'the trusted job {trusted_job.id} is not running') + succeeded, msg = stop_trusted_job(project_id, trusted_job.uuid) + if not succeeded: + raise InternalException(f'stop trusted job failed with msg {msg}') + return make_flask_response(status=HTTPStatus.NO_CONTENT) + + +class TrustedNotificationsApi(Resource): + + @credentials_required + def get(self, project_id: int): + """Get the list of trusted notifications + --- + tags: + - tee + description: get the list of trusted notifications + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + responses: + 200: + description: list of trusted notifications + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.TrustedNotification' + 403: + description: trusted notification is forbidden to access + """ + if not Flag.TRUSTED_COMPUTING_ENABLED.value: + raise NoAccessException('trusted computing is not enabled') + with db.session_scope() as session: + query = session.query(TrustedJobGroup).filter_by(resource=None) + if project_id: + query = query.filter_by(project_id=project_id) + data = [d.to_notification() for d in query.all()] + query = session.query(TrustedJob).filter_by(auth_status=AuthStatus.PENDING, type=TrustedJobType.EXPORT) + if project_id: + query = query.filter_by(project_id=project_id) + data += [d.to_notification() for d in query.all()] + data.sort(key=lambda x: x.created_at, reverse=True) + return make_flask_response(data) + + +class ExportTrustedJobApi(Resource): + + @credentials_required + @emits_event(resource_type=Event.ResourceType.TRUSTED_EXPORT_JOB, op_type=Event.OperationType.CREATE) + def post(self, project_id: int, trusted_job_id: int): + """Export the trusted job + --- + tags: + - tee + description: export the trusted job + parameters: + - in: path + name: project_id + schema: + type: integer + required: true + - in: path + name: trusted_job_id + schema: + type: integer + required: true + responses: + 204: + description: export the trusted job successfully + 403: + description: the trusted job is forbidden to export + 404: + description: the trusted job is not found + 409: + description: the trusted job is not succeeded + """ + if not Flag.TRUSTED_COMPUTING_ENABLED.value: + raise NoAccessException('trusted computing is not enabled') + with db.session_scope() as session: + trusted_job = get_trusted_job(session, project_id, trusted_job_id) + if trusted_job.type != TrustedJobType.ANALYZE or trusted_job.get_status() != TrustedJobStatus.SUCCEEDED: + raise ResourceConflictException(f'the trusted job {trusted_job.id} is not valid') + trusted_job = TrustedJobService(session).lock_and_update_export_count(trusted_job_id) + session.commit() + with db.session_scope() as session: + uuid = resource_uuid() + TrustedJobService(session).create_internal_export(uuid, trusted_job) + get_ticket_helper(session).create_ticket(TicketType.TK_CREATE_TRUSTED_EXPORT_JOB, TicketDetails(uuid=uuid)) + session.commit() + return make_flask_response(status=HTTPStatus.NO_CONTENT) + + +def initialize_tee_apis(api): + api.add_resource(TrustedJobGroupsApi, '/projects//trusted_job_groups') + api.add_resource(TrustedJobGroupApi, '/projects//trusted_job_groups/') + api.add_resource(LaunchTrustedJobApi, '/projects//trusted_job_groups/:launch') + api.add_resource(TrustedJobsApi, '/projects//trusted_jobs') + api.add_resource(TrustedJobApi, '/projects//trusted_jobs/') + api.add_resource(StopTrustedJobApi, '/projects//trusted_jobs/:stop') + api.add_resource(TrustedNotificationsApi, '/projects//trusted_notifications') + api.add_resource(ExportTrustedJobApi, '/projects//trusted_jobs/:export') + + schema_manager.append(CreateTrustedJobGroupParams) + schema_manager.append(ConfigTrustedJobGroupParams) + schema_manager.append(GetTrustedJobsParams) + schema_manager.append(UpdateTrustedJobParams) diff --git a/web_console_v2/api/fedlearner_webconsole/tee/apis_test.py b/web_console_v2/api/fedlearner_webconsole/tee/apis_test.py new file mode 100644 index 000000000..dc6a17869 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/tee/apis_test.py @@ -0,0 +1,832 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +import urllib.parse +from unittest.mock import patch, MagicMock +from datetime import datetime + +import grpc +from google.protobuf.text_format import MessageToString +from google.protobuf.json_format import MessageToDict +from google.protobuf.empty_pb2 import Empty +from testing.common import BaseTestCase +from testing.rpc.client import FakeRpcError +from http import HTTPStatus +from fedlearner_webconsole.db import db +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.algorithm.models import Algorithm, AlgorithmProject, AlgorithmType +from fedlearner_webconsole.dataset.models import Dataset, DataBatch +from fedlearner_webconsole.tee.models import TrustedJobGroup, TrustedJob, TrustedJobStatus, \ + GroupCreateStatus, TrustedJobType +from fedlearner_webconsole.proto.tee_pb2 import ParticipantDataset, ParticipantDatasetList, Resource +from fedlearner_webconsole.proto.setting_pb2 import SystemInfo +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo, ParticipantInfo +from fedlearner_webconsole.flag.models import Flag +from fedlearner_webconsole.job.models import JobType, Job, JobState +from fedlearner_webconsole.proto.rpc.v2 import system_service_pb2 +from fedlearner_webconsole.proto.rpc.v2.job_service_pb2 import GetTrustedJobGroupResponse +from fedlearner_webconsole.setting.service import SettingService + + +class TrustedJobGroupsApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + Flag.TRUSTED_COMPUTING_ENABLED.value = True + project = Project(id=1, name='project') + participant1 = Participant(id=1, name='part2', domain_name='fl-domain2.com') + participant2 = Participant(id=2, name='part3', domain_name='fl-domain3.com') + proj_part1 = ProjectParticipant(project_id=1, participant_id=1) + proj_part2 = ProjectParticipant(project_id=1, participant_id=2) + dataset1 = Dataset(id=1, name='dataset-name1', uuid='dataset-uuid1', is_published=True) + dataset2 = Dataset(id=2, name='dataset-name2', uuid='dataset-uuid2', is_published=False) + algorithm = Algorithm(id=1, + uuid='algorithm-uuid1', + algorithm_project_id=1, + type=AlgorithmType.TRUSTED_COMPUTING) + algorithm_proj = AlgorithmProject(id=1, uuid='algorithm-proj-uuid') + resource = MessageToString(Resource(cpu=1, memory=1, replicas=1)) + group1 = TrustedJobGroup(name='g1', + project_id=1, + coordinator_id=0, + created_at=datetime(2021, 1, 1, 0, 0, 1), + resource=resource) + group2 = TrustedJobGroup(name='g2-filter', + project_id=1, + coordinator_id=0, + created_at=datetime(2021, 1, 1, 0, 0, 2), + resource=resource) + group3 = TrustedJobGroup(name='g3', + project_id=2, + coordinator_id=0, + created_at=datetime(2021, 1, 1, 0, 0, 3), + resource=resource) + group4 = TrustedJobGroup(name='g4-filter', + project_id=1, + coordinator_id=0, + created_at=datetime(2021, 1, 1, 0, 0, 4), + resource=resource) + with db.session_scope() as session: + session.add_all([ + project, participant1, participant2, proj_part1, proj_part2, dataset1, dataset2, algorithm, + algorithm_proj + ]) + session.add_all([group1, group2, group3, group4]) + session.commit() + + def test_get_trusted_groups(self): + # get with project id 1 + resp = self.get_helper('/api/v2/projects/1/trusted_job_groups') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual([d['name'] for d in data], ['g4-filter', 'g2-filter', 'g1']) + # get with project id 0 + resp = self.get_helper('/api/v2/projects/0/trusted_job_groups') + data = self.get_response_data(resp) + self.assertEqual([d['name'] for d in data], ['g4-filter', 'g3', 'g2-filter', 'g1']) + # get with filter + filter_param = urllib.parse.quote('(name~="filter")') + resp = self.get_helper(f'/api/v2/projects/1/trusted_job_groups?filter={filter_param}') + data = self.get_response_data(resp) + self.assertEqual([d['name'] for d in data], ['g4-filter', 'g2-filter']) + # get with page + resp = self.get_helper(f'/api/v2/projects/1/trusted_job_groups?page=2&page_size=1&filter={filter_param}') + data = self.get_response_data(resp) + self.assertEqual([d['name'] for d in data], ['g2-filter']) + # get nothing for invalid project id + resp = self.get_helper('/api/v2/projects/2/trusted_job_groups?page=2&page_size=1') + self.assertEqual(self.get_response_data(resp), []) + + @patch('fedlearner_webconsole.rpc.v2.system_service_client.SystemServiceClient.check_tee_enabled') + def test_post_trusted_job_groups(self, mock_client: MagicMock): + mock_client.side_effect = [ + system_service_pb2.CheckTeeEnabledResponse(tee_enabled=True), + system_service_pb2.CheckTeeEnabledResponse(tee_enabled=False), + ] + resp = self.post_helper('/api/v2/projects/1/trusted_job_groups', + data={ + 'name': 'group-name', + 'comment': 'This is a comment.', + 'algorithm_uuid': 'algorithm-uuid1', + 'dataset_id': 1, + 'participant_datasets': [{ + 'participant_id': 1, + 'uuid': 'dataset-uuid3', + 'name': 'dataset-name3', + }], + 'resource': { + 'cpu': 2, + 'memory': 2, + 'replicas': 1, + }, + }) + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + with db.session_scope() as session: + group: TrustedJobGroup = session.query(TrustedJobGroup).filter_by(name='group-name', project_id=1).first() + self.assertEqual(group.name, 'group-name') + self.assertEqual(group.latest_version, 0) + self.assertEqual(group.comment, 'This is a comment.') + self.assertEqual(group.project_id, 1) + self.assertEqual(group.coordinator_id, 0) + self.assertEqual(group.analyzer_id, 1) + self.assertEqual(group.ticket_status, TicketStatus.APPROVED) + self.assertEqual(group.auth_status, AuthStatus.AUTHORIZED) + self.assertEqual(group.unauth_participant_ids, '1,2') + self.assertEqual(group.algorithm_uuid, 'algorithm-uuid1') + self.assertEqual(group.resource, MessageToString(Resource(cpu=2, memory=2, replicas=1))) + self.assertEqual(group.dataset_id, 1) + participant_datasets = ParticipantDatasetList( + items=[ParticipantDataset( + participant_id=1, + uuid='dataset-uuid3', + name='dataset-name3', + )]) + self.assertEqual(group.participant_datasets, MessageToString(participant_datasets)) + + @patch('fedlearner_webconsole.algorithm.fetcher.AlgorithmFetcher.get_algorithm_from_participant') + @patch('fedlearner_webconsole.tee.apis.get_tee_enabled_participants') + def test_post_trusted_job_groups_failed(self, mock_get_tee_enabled_participants: MagicMock, + mock_get_algorithm: MagicMock): + mock_get_algorithm.side_effect = FakeRpcError(grpc.StatusCode.NOT_FOUND, 'not found') + mock_get_tee_enabled_participants.return_value = [0] + resource = {'cpu': 2, 'memory': 2, 'replicas': 1} + # fail due to no dataset is provided + resp = self.post_helper('/api/v2/projects/1/trusted_job_groups', + data={ + 'name': 'group-name', + 'algorithm_uuid': 'algorithm-uuid1', + 'resource': resource, + }) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + # fail due to dataset not found + resp = self.post_helper('/api/v2/projects/1/trusted_job_groups', + data={ + 'name': 'group-name', + 'algorithm_uuid': 'algorithm-uuid1', + 'dataset_id': 20, + 'resource': resource, + }) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + # fail due to dataset not published + resp = self.post_helper('/api/v2/projects/1/trusted_job_groups', + data={ + 'name': 'group-name', + 'algorithm_uuid': 'algorithm-uuid1', + 'dataset_id': 2, + 'resource': resource, + }) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + # fail due to participant not found + resp = self.post_helper( + '/api/v2/projects/1/trusted_job_groups', + data={ + 'name': 'group-name', + 'algorithm_uuid': 'algorithm-uuid1', + 'resource': resource, + 'participant_datasets': [{ + 'participant_id': 10, + 'uuid': 'dataset-uuid3', + 'name': 'dataset-name3', + }], + }) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + # fail due to algorithm not found + resp = self.post_helper('/api/v2/projects/1/trusted_job_groups', + data={ + 'name': 'group-name', + 'algorithm_uuid': 'algorithm-uuid10', + 'resource': resource, + 'dataset_id': 2, + }) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + # fail due to duplicate name in project + with db.session_scope() as session: + group = TrustedJobGroup(name='group-name', project_id=1) + session.add(group) + session.commit() + resp = self.post_helper('/api/v2/projects/1/trusted_job_groups', + data={ + 'name': 'group-name', + 'algorithm_uuid': 'algorithm-uuid1', + 'resource': resource, + 'dataset_id': 1, + }) + self.assertEqual(resp.status_code, HTTPStatus.CONFLICT) + + +class TrustedJobGroupApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + Flag.TRUSTED_COMPUTING_ENABLED.value = True + with db.session_scope() as session: + project = Project(id=1, name='project') + algorithm_proj1 = AlgorithmProject(id=1, uuid='algo-proj-uuid1') + algorithm1 = Algorithm(id=1, uuid='algorithm-uuid1', algorithm_project_id=1) + participant = Participant(id=1, name='part2', domain_name='fl-domain2.com') + proj_part = ProjectParticipant(project_id=1, participant_id=1) + group1 = TrustedJobGroup(id=1, + name='group-name', + uuid='uuid', + comment='this is a comment', + project_id=1, + creator_username='admin', + coordinator_id=0, + created_at=datetime(2022, 7, 1, 0, 0, 0), + updated_at=datetime(2022, 7, 1, 0, 0, 0), + ticket_status=TicketStatus.APPROVED, + status=GroupCreateStatus.SUCCEEDED, + auth_status=AuthStatus.AUTHORIZED, + unauth_participant_ids='1,2', + algorithm_uuid='algorithm-uuid1') + group1.set_resource(Resource(cpu=2, memory=2, replicas=1)) + group1.set_participant_datasets( + ParticipantDatasetList( + items=[ParticipantDataset(participant_id=1, name='dataset-name', uuid='dataset-uuid')])) + group2 = TrustedJobGroup(id=2, + name='group-name2', + uuid='uuid2', + project_id=1, + coordinator_id=1, + ticket_status=TicketStatus.APPROVED, + status=GroupCreateStatus.SUCCEEDED, + auth_status=AuthStatus.AUTHORIZED, + algorithm_uuid='algorithm-uuid1') + session.add_all([project, group1, group2, algorithm_proj1, algorithm1, participant, proj_part]) + session.commit() + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.get_trusted_job_group') + def test_get_trusted_job_group(self, mock_client: MagicMock): + mock_client.return_value = GetTrustedJobGroupResponse(auth_status='AUTHORIZED') + resp = self.get_helper('/api/v2/projects/1/trusted_job_groups/1') + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + data.pop('updated_at') + self.assertEqual( + data, { + 'id': 1, + 'name': 'group-name', + 'comment': 'this is a comment', + 'analyzer_id': 0, + 'coordinator_id': 0, + 'created_at': 1656633600, + 'creator_username': 'admin', + 'dataset_id': 0, + 'latest_job_status': 'NEW', + 'latest_version': 0, + 'project_id': 1, + 'resource': { + 'cpu': 2, + 'memory': 2, + 'replicas': 1 + }, + 'status': 'SUCCEEDED', + 'algorithm_id': 1, + 'algorithm_uuid': 'algorithm-uuid1', + 'algorithm_project_uuid': 'algo-proj-uuid1', + 'algorithm_participant_id': 0, + 'auth_status': 'AUTHORIZED', + 'ticket_auth_status': 'AUTH_PENDING', + 'ticket_status': 'APPROVED', + 'ticket_uuid': '', + 'unauth_participant_ids': [2], + 'uuid': 'uuid', + 'participant_datasets': { + 'items': [{ + 'participant_id': 1, + 'name': 'dataset-name', + 'uuid': 'dataset-uuid' + }] + }, + }) + # failed due to not found + resp = self.get_helper('/api/v2/projects/1/trusted_job_groups/10') + self.assertEqual(resp.status_code, HTTPStatus.NOT_FOUND) + # get nothing due to project invalid + resp = self.get_helper('/api/v2/projects/10/trusted_job_groups/1') + self.assertIsNone(self.get_response_data(resp)) + + @patch('fedlearner_webconsole.algorithm.fetcher.AlgorithmFetcher.get_algorithm_from_participant') + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.update_trusted_job_group') + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.inform_trusted_job_group') + def test_put_trusted_job_group(self, mock_inform: MagicMock, mock_update: MagicMock, mock_get_algorithm: MagicMock): + mock_get_algorithm.side_effect = FakeRpcError(grpc.StatusCode.NOT_FOUND, 'not found') + mock_inform.return_value = None + mock_update.return_value = None + with db.session_scope() as session: + algorithm_proj2 = AlgorithmProject(id=2, uuid='algo-proj-uuid2') + algorithm2 = Algorithm(id=2, uuid='algorithm-uuid2', algorithm_project_id=1) + algorithm3 = Algorithm(id=3, uuid='algorithm-uuid3', algorithm_project_id=2) + session.add_all([algorithm_proj2, algorithm2, algorithm3]) + session.commit() + resp = self.put_helper('/api/v2/projects/1/trusted_job_groups/1', + data={ + 'comment': 'new comment', + 'auth_status': 'PENDING', + 'algorithm_uuid': 'algorithm-uuid2', + 'resource': { + 'cpu': 4, + 'memory': 4, + 'replicas': 1 + } + }) + self.assertEqual(resp.status_code, HTTPStatus.OK) + with db.session_scope() as session: + group = session.query(TrustedJobGroup).get(1) + self.assertEqual(group.comment, 'new comment') + self.assertEqual(group.auth_status, AuthStatus.PENDING) + self.assertEqual(group.algorithm_uuid, 'algorithm-uuid2') + self.assertEqual(group.resource, MessageToString(Resource(cpu=4, memory=4, replicas=1))) + # failed due to group not found + resp = self.put_helper('/api/v2/projects/1/trusted_job_groups/10', data={'comment': 'new comment'}) + self.assertEqual(resp.status_code, HTTPStatus.NOT_FOUND) + # failed due to not creator but update algorithm + resp = self.put_helper('/api/v2/projects/1/trusted_job_groups/2', data={'algorithm_uuid': 'algorithm-uuid2'}) + self.assertEqual(resp.status_code, HTTPStatus.FORBIDDEN) + # failed due to algorithm not found + resp = self.put_helper('/api/v2/projects/1/trusted_job_groups/1', + data={'algorithm_uuid': 'algorithm-not-exist'}) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + # failed due to algorithm project mismatch + resp = self.put_helper('/api/v2/projects/1/trusted_job_groups/1', data={'algorithm_uuid': 'algorithm-uuid3'}) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + # failed due to group not fully created + with db.session_scope() as session: + group3 = TrustedJobGroup(id=3, project_id=1, status=GroupCreateStatus.PENDING) + session.add(group3) + session.commit() + resp = self.put_helper('/api/v2/projects/1/trusted_job_groups/3', data={'comment': 'new comment'}) + self.assertEqual(resp.status_code, HTTPStatus.CONFLICT) + # failed due to grpc error, inconsistency in participants + mock_update.side_effect = FakeRpcError(grpc.StatusCode.INVALID_ARGUMENT, 'mismatched algorithm project') + resp = self.put_helper('/api/v2/projects/1/trusted_job_groups/1', data={'algorithm_uuid': 'algorithm-uuid1'}) + self.assertEqual(resp.status_code, HTTPStatus.INTERNAL_SERVER_ERROR) + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.delete_trusted_job_group') + def test_delete_trusted_job_group(self, mock_delete: MagicMock): + Flag.TRUSTED_COMPUTING_ENABLED.value = True + mock_delete.return_value = None + # fail due to trusted job is running + with db.session_scope() as session: + trusted_job1 = TrustedJob(id=1, + name='V1', + trusted_job_group_id=1, + job_id=1, + status=TrustedJobStatus.RUNNING) + job1 = Job(id=1, name='job-name1', job_type=JobType.CUSTOMIZED, workflow_id=0, project_id=1) + trusted_job2 = TrustedJob(id=2, + name='V2', + trusted_job_group_id=1, + job_id=2, + status=TrustedJobStatus.SUCCEEDED) + job2 = Job(id=2, name='job-name2', job_type=JobType.CUSTOMIZED, workflow_id=0, project_id=1) + session.add_all([trusted_job1, job1, trusted_job2, job2]) + session.commit() + resp = self.delete_helper('/api/v2/projects/1/trusted_job_groups/1') + self.assertEqual(resp.status_code, HTTPStatus.CONFLICT) + # fail due to grpc err + with db.session_scope() as session: + session.query(TrustedJob).filter_by(id=1).update({'status': TrustedJobStatus.FAILED}) + session.commit() + mock_delete.side_effect = FakeRpcError(grpc.StatusCode.FAILED_PRECONDITION, 'trusted job is not deletable') + resp = self.delete_helper('/api/v2/projects/1/trusted_job_groups/1') + self.assertEqual(resp.status_code, HTTPStatus.INTERNAL_SERVER_ERROR) + # fail due to not creator + resp = self.delete_helper('/api/v2/projects/1/trusted_job_groups/2') + self.assertEqual(resp.status_code, HTTPStatus.FORBIDDEN) + # successfully delete group not exist + resp = self.delete_helper('/api/v2/projects/1/trusted_job_groups/3') + self.assertEqual(resp.status_code, HTTPStatus.NO_CONTENT) + # successfully delete + mock_delete.side_effect = None + resp = self.delete_helper('/api/v2/projects/1/trusted_job_groups/1') + self.assertEqual(resp.status_code, HTTPStatus.NO_CONTENT) + with db.session_scope() as session: + self.assertIsNone(session.query(TrustedJobGroup).get(1)) + self.assertIsNone(session.query(TrustedJob).get(1)) + self.assertIsNone(session.query(TrustedJob).get(2)) + self.assertIsNone(session.query(Job).get(1)) + self.assertIsNone(session.query(Job).get(2)) + + +class LaunchTrustedJobApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + Flag.TRUSTED_COMPUTING_ENABLED.value = True + with db.session_scope() as session: + project = Project(id=1, name='project-name') + participant1 = Participant(id=1, name='part2', domain_name='fl-domain2.com') + proj_part1 = ProjectParticipant(project_id=1, participant_id=1) + algorithm = Algorithm(id=1, + uuid='algorithm-uuid1', + path='file:///data/algorithm/test', + type=AlgorithmType.TRUSTED_COMPUTING) + dataset1 = Dataset(id=1, name='dataset-name1', uuid='dataset-uuid1', is_published=True) + data_batch1 = DataBatch(id=1, dataset_id=1) + group = TrustedJobGroup(id=1, + uuid='group-uuid', + project_id=1, + latest_version=1, + coordinator_id=0, + status=GroupCreateStatus.SUCCEEDED, + auth_status=AuthStatus.AUTHORIZED, + algorithm_uuid='algorithm-uuid1', + dataset_id=1, + resource=MessageToString(Resource(cpu=2000, memory=2, replicas=1))) + session.add_all([project, participant1, proj_part1, algorithm, dataset1, data_batch1, group]) + sys_var = SettingService(session).get_system_variables_dict() + session.commit() + sys_var['sgx_image'] = 'artifact.bytedance.com/fedlearner/pp_bioinformatics:e13eb8a1d96ad046ca7354b8197d41fd' + self.sys_var = sys_var + + @patch('fedlearner_webconsole.tee.services.get_batch_data_path') + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_variables_dict') + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info') + @patch('fedlearner_webconsole.two_pc.transaction_manager.TransactionManager._remote_do_two_pc') + def test_launch_trusted_job(self, mock_remote_do_two_pc: MagicMock, mock_get_system_info: MagicMock, + mock_get_system_variables_dict: MagicMock, mock_get_batch_data_path: MagicMock): + mock_remote_do_two_pc.return_value = True, '' + mock_get_system_info.return_value = SystemInfo(domain_name='domain1') + mock_get_system_variables_dict.return_value = self.sys_var + mock_get_batch_data_path.return_value = 'file:///data/test' + # successful + resp = self.post_helper('/api/v2/projects/1/trusted_job_groups/1:launch', data={'comment': 'this is a comment'}) + self.assertEqual(resp.status_code, HTTPStatus.CREATED) + with db.session_scope() as session: + trusted_job = session.query(TrustedJob).filter_by(trusted_job_group_id=1, version=2).first() + self.assertIsNotNone(trusted_job) + self.assertEqual(trusted_job.comment, 'this is a comment') + self.assertEqual(trusted_job.coordinator_id, 0) + # fail due to not found group + resp = self.post_helper('/api/v2/projects/1/trusted_job_groups/10:launch', data={}) + self.assertEqual(resp.status_code, HTTPStatus.NOT_FOUND) + # fail due to not fully auth + with db.session_scope() as session: + session.query(TrustedJobGroup).filter_by(id=1).update({'coordinator_id': 0, 'unauth_participant_ids': '1'}) + session.commit() + resp = self.post_helper('/api/v2/projects/1/trusted_job_groups/1:launch', data={}) + self.assertEqual(resp.status_code, HTTPStatus.CONFLICT) + + +class TrustedJobsApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + Flag.TRUSTED_COMPUTING_ENABLED.value = True + with db.session_scope() as session: + trusted_job1 = TrustedJob(id=1, + name='V1', + version=1, + project_id=1, + trusted_job_group_id=1, + job_id=1, + status=TrustedJobStatus.RUNNING) + trusted_job2 = TrustedJob(id=2, + name='V2', + version=2, + project_id=1, + trusted_job_group_id=1, + job_id=2, + status=TrustedJobStatus.SUCCEEDED) + trusted_job3 = TrustedJob(id=3, + name='V1', + version=1, + project_id=1, + trusted_job_group_id=2, + job_id=3, + status=TrustedJobStatus.RUNNING) + trusted_job4 = TrustedJob(id=4, + name='V1', + version=1, + project_id=2, + trusted_job_group_id=3, + job_id=4, + status=TrustedJobStatus.RUNNING) + trusted_job5 = TrustedJob(id=5, + name='V1-1', + type=TrustedJobType.EXPORT, + auth_status=AuthStatus.AUTHORIZED, + version=1, + project_id=1, + trusted_job_group_id=1, + job_id=5, + status=TrustedJobStatus.NEW, + created_at=datetime(2022, 11, 23, 12, 0, 0)) + trusted_job6 = TrustedJob(id=6, + name='V2-1', + type=TrustedJobType.EXPORT, + auth_status=AuthStatus.WITHDRAW, + version=2, + project_id=1, + trusted_job_group_id=1, + job_id=6, + status=TrustedJobStatus.CREATED, + created_at=datetime(2022, 11, 23, 12, 0, 1)) + job1 = Job(id=1, + name='job-name1', + job_type=JobType.CUSTOMIZED, + workflow_id=0, + project_id=1, + state=JobState.FAILED) + session.add_all([trusted_job1, trusted_job2, trusted_job3, trusted_job4, trusted_job5, trusted_job6, job1]) + session.commit() + + def test_get_trusted_job(self): + # successful and trusted job status is refreshed when api is called + resp = self.get_helper('/api/v2/projects/1/trusted_jobs?trusted_job_group_id=1') + data = self.get_response_data(resp) + self.assertEqual([(d['name'], d['status']) for d in data], [('V2', 'SUCCEEDED'), ('V1', 'FAILED')]) + + def test_get_export_trusted_job(self): + resp = self.get_helper('/api/v2/projects/1/trusted_jobs?trusted_job_group_id=1&type=EXPORT') + data = self.get_response_data(resp) + self.assertEqual([(d['name'], d['status']) for d in data], [('V2-1', 'CREATED'), ('V1-1', 'NEW')]) + + +class TrustedJobApi(BaseTestCase): + + def setUp(self): + super().setUp() + Flag.TRUSTED_COMPUTING_ENABLED.value = True + with db.session_scope() as session: + project = Project(id=1, name='project') + participant1 = Participant(id=1, name='part2', domain_name='fl-domain2.com') + proj_part1 = ProjectParticipant(project_id=1, participant_id=1) + self.participants_info = ParticipantsInfo(participants_map={ + 'domain1': ParticipantInfo(auth_status='AUTHORIZED'), + 'domain2': ParticipantInfo(auth_status='PENDING'), + }) + trusted_job1 = TrustedJob(id=1, + name='V1', + type=TrustedJobType.EXPORT, + job_id=1, + uuid='uuid1', + version=1, + comment='this is a comment', + project_id=1, + trusted_job_group_id=1, + status=TrustedJobStatus.PENDING, + auth_status=AuthStatus.AUTHORIZED, + algorithm_uuid='algorithm-uuid1', + created_at=datetime(2022, 6, 14, 0, 0, 0), + updated_at=datetime(2022, 6, 14, 0, 0, 1), + participants_info=MessageToString(self.participants_info)) + job1 = Job(id=1, + name='job-name1', + job_type=JobType.CUSTOMIZED, + workflow_id=0, + project_id=1, + state=JobState.STARTED) + session.add_all([project, participant1, proj_part1, trusted_job1, job1]) + session.commit() + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.get_trusted_job') + def test_get_trusted_job(self, mock_client: MagicMock): + mock_client.return_value = GetTrustedJobGroupResponse(auth_status='AUTHORIZED') + # successful + resp = self.get_helper('/api/v2/projects/1/trusted_jobs/1') + data = self.get_response_data(resp) + part_info_dict = MessageToDict( + self.participants_info, + preserving_proto_field_name=True, + including_default_value_fields=True, + ) + part_info_dict['participants_map']['domain2']['auth_status'] = 'AUTHORIZED' + del data['updated_at'] + self.assertEqual( + data, { + 'algorithm_id': 0, + 'algorithm_uuid': 'algorithm-uuid1', + 'comment': 'this is a comment', + 'auth_status': 'AUTHORIZED', + 'export_dataset_id': 0, + 'finished_at': 0, + 'id': 1, + 'job_id': 1, + 'name': 'V1', + 'project_id': 1, + 'started_at': 0, + 'status': 'RUNNING', + 'ticket_status': 'APPROVED', + 'ticket_uuid': '', + 'trusted_job_group_id': 1, + 'coordinator_id': 0, + 'type': 'EXPORT', + 'uuid': 'uuid1', + 'version': 1, + 'created_at': 1655164800, + 'participants_info': part_info_dict, + 'ticket_auth_status': 'AUTHORIZED', + }) + # fail due to not found + resp = self.get_helper('/api/v2/projects/1/trusted_jobs/10') + self.assertEqual(resp.status_code, HTTPStatus.NOT_FOUND) + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info') + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.inform_trusted_job') + def test_put_trusted_job(self, mock_client: MagicMock, mock_get_system_info: MagicMock): + mock_client.return_value = Empty() + mock_get_system_info.return_value = SystemInfo(pure_domain_name='domain1') + # successful update comment + resp = self.put_helper('/api/v2/projects/1/trusted_jobs/1', data={'comment': 'new comment'}) + data = self.get_response_data(resp) + self.assertEqual(data['comment'], 'new comment') + # successful update auth_status + resp = self.put_helper('/api/v2/projects/1/trusted_jobs/1', data={'auth_status': 'WITHDRAW'}) + data = self.get_response_data(resp) + self.assertEqual(data['auth_status'], 'WITHDRAW') + self.assertEqual(data['participants_info']['participants_map']['domain1']['auth_status'], 'WITHDRAW') + # fail due to not found + resp = self.put_helper('/api/v2/projects/1/trusted_jobs/10', data={'comment': 'new comment'}) + self.assertEqual(resp.status_code, HTTPStatus.NOT_FOUND) + + +class StopTrustedJobApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + Flag.TRUSTED_COMPUTING_ENABLED.value = True + with db.session_scope() as session: + project = Project(id=1, name='project-name') + participant1 = Participant(id=1, name='part2', domain_name='fl-domain2.com') + proj_part1 = ProjectParticipant(project_id=1, participant_id=1) + group = TrustedJobGroup(id=1, + uuid='group-uuid', + project_id=1, + latest_version=1, + coordinator_id=0, + status=GroupCreateStatus.SUCCEEDED, + auth_status=AuthStatus.AUTHORIZED, + algorithm_uuid='algorithm-uuid') + trusted_job1 = TrustedJob(id=1, + uuid='trusted-job-uuid1', + name='V1', + project_id=1, + trusted_job_group_id=1, + job_id=1, + status=TrustedJobStatus.PENDING) + job1 = Job(id=1, + name='job-name1', + job_type=JobType.CUSTOMIZED, + project_id=1, + workflow_id=0, + state=JobState.STARTED) + session.add_all([project, participant1, proj_part1, group, trusted_job1, job1]) + session.commit() + + @patch('fedlearner_webconsole.two_pc.transaction_manager.TransactionManager._remote_do_two_pc') + def test_stop_trusted_job(self, mock_remote_do_two_pc): + mock_remote_do_two_pc.return_value = True, '' + # successful and trusted job status is refreshed to RUNNING before STOPPED + resp = self.post_helper('/api/v2/projects/1/trusted_jobs/1:stop') + self.assertEqual(resp.status_code, HTTPStatus.NO_CONTENT) + with db.session_scope() as session: + trusted_job1: TrustedJob = session.query(TrustedJob).get(1) + self.assertEqual(trusted_job1.status, TrustedJobStatus.STOPPED) + # fail due to not in RUNNING status since it is STOPPED before + resp = self.post_helper('/api/v2/projects/1/trusted_jobs/1:stop') + self.assertEqual(resp.status_code, HTTPStatus.CONFLICT) + # fail due to trusted job not found + resp = self.post_helper('/api/v2/projects/10/trusted_jobs/1:stop') + self.assertEqual(resp.status_code, HTTPStatus.NOT_FOUND) + + +class TrustedNotificationsApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + Flag.TRUSTED_COMPUTING_ENABLED.value = True + resource = MessageToString(Resource(cpu=1, memory=2, replicas=1)) + with db.session_scope() as session: + group1 = TrustedJobGroup(id=1, project_id=1, name='group1', resource=resource) + group2 = TrustedJobGroup(id=2, project_id=1, name='group2', created_at=datetime(2022, 10, 1, 0, 0, 0)) + group3 = TrustedJobGroup(id=3, project_id=1, name='group3', resource=resource) + group4 = TrustedJobGroup(id=4, project_id=2, name='group4', created_at=datetime(2022, 10, 1, 0, 0, 1)) + group5 = TrustedJobGroup(id=5, project_id=1, name='group5', created_at=datetime(2022, 10, 1, 0, 0, 4)) + trusted_job1 = TrustedJob(id=1, + name='V10-1', + auth_status=AuthStatus.PENDING, + type=TrustedJobType.EXPORT, + project_id=1, + trusted_job_group_id=1, + created_at=datetime(2022, 10, 1, 0, 0, 2)) + trusted_job2 = TrustedJob(id=2, + name='V10-2', + auth_status=AuthStatus.WITHDRAW, + type=TrustedJobType.EXPORT, + project_id=1, + trusted_job_group_id=1) + trusted_job3 = TrustedJob(id=3, + name='V10', + auth_status=AuthStatus.PENDING, + type=TrustedJobType.ANALYZE, + project_id=1, + trusted_job_group_id=1) + trusted_job4 = TrustedJob(id=4, + name='V9-1', + auth_status=AuthStatus.PENDING, + type=TrustedJobType.EXPORT, + project_id=1, + trusted_job_group_id=1, + created_at=datetime(2022, 10, 1, 0, 0, 3)) + trusted_job5 = TrustedJob(id=5, + name='V9-2', + auth_status=AuthStatus.PENDING, + type=TrustedJobType.EXPORT, + project_id=2, + trusted_job_group_id=4, + created_at=datetime(2022, 10, 1, 0, 0, 5)) + + session.add_all([ + group1, group2, group3, group4, group5, trusted_job1, trusted_job2, trusted_job3, trusted_job4, + trusted_job5 + ]) + session.commit() + + def test_get_trusted_notifications(self): + resp = self.get_helper('/api/v2/projects/1/trusted_notifications') + data = self.get_response_data(resp) + self.assertEqual([d['name'] for d in data], ['group5', 'group1-V9-1', 'group1-V10-1', 'group2']) + resp = self.get_helper('/api/v2/projects/2/trusted_notifications') + data = self.get_response_data(resp) + self.assertEqual([d['name'] for d in data], ['group4-V9-2', 'group4']) + + +class ExportTrustedJobApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + Flag.TRUSTED_COMPUTING_ENABLED.value = True + with db.session_scope() as session: + project = Project(id=1, name='project-name') + participant1 = Participant(id=1, name='part2', domain_name='fl-domain2.com') + proj_part1 = ProjectParticipant(project_id=1, participant_id=1) + trusted_job1 = TrustedJob(id=1, + uuid='uuid1', + project_id=1, + status=TrustedJobStatus.SUCCEEDED, + version=1, + trusted_job_group_id=1, + resource=MessageToString(Resource(cpu=1, memory=1, replicas=1))) + trusted_job2 = TrustedJob(id=2, + uuid='uuid2', + project_id=1, + status=TrustedJobStatus.RUNNING, + version=2, + trusted_job_group_id=1) + session.add_all([project, participant1, proj_part1, trusted_job1, trusted_job2]) + session.commit() + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info') + def test_export_trusted_job(self, mock_get_system_info: MagicMock): + mock_get_system_info.return_value = SystemInfo(pure_domain_name='domain1') + # successful + resp = self.post_helper('/api/v2/projects/1/trusted_jobs/1:export') + self.assertEqual(resp.status_code, HTTPStatus.NO_CONTENT) + resp = self.post_helper('/api/v2/projects/1/trusted_jobs/1:export') + self.assertEqual(resp.status_code, HTTPStatus.NO_CONTENT) + with db.session_scope() as session: + tee_export_job = session.query(TrustedJob).filter_by(type=TrustedJobType.EXPORT, + version=1, + project_id=1, + trusted_job_group_id=1, + export_count=1).first() + self.assertEqual(tee_export_job.name, 'V1-domain1-1') + self.assertEqual(tee_export_job.coordinator_id, 0) + self.assertEqual(tee_export_job.status, TrustedJobStatus.NEW) + self.assertIsNotNone(tee_export_job.ticket_uuid) + tee_export_job = session.query(TrustedJob).filter_by(type=TrustedJobType.EXPORT, + version=1, + project_id=1, + trusted_job_group_id=1, + export_count=2).first() + self.assertEqual(tee_export_job.name, 'V1-domain1-2') + tee_analyze_job = session.query(TrustedJob).get(1) + self.assertEqual(tee_analyze_job.export_count, 2) + # not found + resp = self.post_helper('/api/v2/projects/1/trusted_jobs/10:export') + self.assertEqual(resp.status_code, HTTPStatus.NOT_FOUND) + # not succeeded + resp = self.post_helper('/api/v2/projects/1/trusted_jobs/2:export') + self.assertEqual(resp.status_code, HTTPStatus.CONFLICT) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/tee/controller.py b/web_console_v2/api/fedlearner_webconsole/tee/controller.py new file mode 100644 index 000000000..da5f3bb96 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/tee/controller.py @@ -0,0 +1,232 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Tuple, List +import logging +import grpc +from sqlalchemy.orm import Session + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.two_pc.transaction_manager import TransactionManager +from fedlearner_webconsole.proto.two_pc_pb2 import TwoPcType, TransactionData, CreateTrustedJobGroupData, \ + LaunchTrustedJobData, StopTrustedJobData, LaunchTrustedExportJobData +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.tee.models import TrustedJobGroup, TrustedJob +from fedlearner_webconsole.tee.services import TrustedJobGroupService, check_tee_enabled +from fedlearner_webconsole.tee.utils import get_participant +from fedlearner_webconsole.proto.tee_pb2 import DomainNameDataset +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.rpc.v2.job_service_client import JobServiceClient +from fedlearner_webconsole.exceptions import InternalException +from fedlearner_webconsole.utils.resource_name import resource_uuid +from fedlearner_webconsole.rpc.v2.system_service_client import SystemServiceClient + + +def _get_transaction_manager(project_id: int, two_pc_type: TwoPcType) -> TransactionManager: + with db.session_scope() as session: + project = session.query(Project).get(project_id) + participants = ParticipantService(session).get_platform_participants_by_project(project_id) + tm = TransactionManager(project_name=project.name, + project_token=project.token, + two_pc_type=two_pc_type, + participants=[participant.domain_name for participant in participants]) + return tm + + +def create_trusted_job_group(group: TrustedJobGroup) -> Tuple[bool, str]: + coordinator_pure_domain_name = SettingService.get_system_info().pure_domain_name + with db.session_scope() as session: + project = group.project + if project is None: + raise InternalException(f'project {group.project_id} of group {group.id} not found') + domain_name_datasets = [] + dataset_self = group.dataset + if dataset_self is not None: + domain_name_datasets.append( + DomainNameDataset(pure_domain_name=coordinator_pure_domain_name, + dataset_uuid=dataset_self.uuid, + dataset_name=dataset_self.name)) + participant_datasets = group.get_participant_datasets() + if participant_datasets is not None: + for pd in participant_datasets.items: + participant = get_participant(session, pd.participant_id) + domain_name_datasets.append( + DomainNameDataset(pure_domain_name=participant.pure_domain_name(), + dataset_uuid=pd.uuid, + dataset_name=pd.name)) + analyzer_id = group.analyzer_id + if analyzer_id: + analyzer_pure_domain_name = get_participant(session, analyzer_id).pure_domain_name() + else: + analyzer_pure_domain_name = coordinator_pure_domain_name + tm = _get_transaction_manager(project_id=project.id, two_pc_type=TwoPcType.CREATE_TRUSTED_JOB_GROUP) + create_trusted_job_group_data = CreateTrustedJobGroupData( + name=group.name, + uuid=group.uuid, + ticket_uuid=group.ticket_uuid, + project_name=project.name, + creator_username=group.creator_username, + algorithm_uuid=group.algorithm_uuid, + domain_name_datasets=domain_name_datasets, + coordinator_pure_domain_name=coordinator_pure_domain_name, + analyzer_pure_domain_name=analyzer_pure_domain_name, + ) + return tm.run(data=TransactionData(create_trusted_job_group_data=create_trusted_job_group_data)) + + +def launch_trusted_job(project_id: int, group_uuid: str, version: int): + initiator_pure_domain_name = SettingService.get_system_info().pure_domain_name + tm = _get_transaction_manager(project_id=project_id, two_pc_type=TwoPcType.LAUNCH_TRUSTED_JOB) + data = TransactionData( + launch_trusted_job_data=LaunchTrustedJobData(uuid=resource_uuid(), + group_uuid=group_uuid, + version=version, + initiator_pure_domain_name=initiator_pure_domain_name)) + return tm.run(data) + + +def stop_trusted_job(project_id: int, uuid: str): + tm = _get_transaction_manager(project_id=project_id, two_pc_type=TwoPcType.STOP_TRUSTED_JOB) + data = TransactionData(stop_trusted_job_data=StopTrustedJobData(uuid=uuid)) + return tm.run(data) + + +def launch_trusted_export_job(project_id: int, uuid: str): + tm = _get_transaction_manager(project_id=project_id, two_pc_type=TwoPcType.LAUNCH_TRUSTED_EXPORT_JOB) + data = TransactionData(launch_trusted_export_job_data=LaunchTrustedExportJobData(uuid=uuid)) + return tm.run(data) + + +def get_tee_enabled_participants(session: Session, project_id: int) -> List[int]: + enabled_pids = [] + if check_tee_enabled(): + enabled_pids.append(0) + participants = ParticipantService(session).get_platform_participants_by_project(project_id) + for p in participants: + client = SystemServiceClient.from_participant(p.domain_name) + try: + resp = client.check_tee_enabled() + if resp.tee_enabled: + enabled_pids.append(p.id) + except grpc.RpcError as e: + raise InternalException(f'failed to get participant {p.id}\'s tee enabled status ' + f'with grpc code {e.code()} and details {e.details()}') from e + return enabled_pids + + +class TrustedJobGroupController: + + def __init__(self, session: Session, project_id: int): + self._session = session + self._clients = [] + self._participant_ids = [] + project = session.query(Project).get(project_id) + participants = ParticipantService(session).get_platform_participants_by_project(project_id) + for p in participants: + self._clients.append(JobServiceClient.from_project_and_participant(p.domain_name, project.name)) + self._participant_ids.append(p.id) + + def inform_trusted_job_group(self, group: TrustedJobGroup, auth_status: AuthStatus): + group.auth_status = auth_status + for client, pid in zip(self._clients, self._participant_ids): + try: + client.inform_trusted_job_group(group.uuid, auth_status) + except grpc.RpcError as e: + logging.warning(f'[trusted-job-group] failed to inform participant {pid}\'s ' + f'trusted job group {group.uuid} with grpc code {e.code()} and details {e.details()}') + + def update_trusted_job_group(self, group: TrustedJobGroup, algorithm_uuid: str): + for client, pid in zip(self._clients, self._participant_ids): + try: + client.update_trusted_job_group(group.uuid, algorithm_uuid) + except grpc.RpcError as e: + raise InternalException(f'failed to update participant {pid}\'s trusted job group {group.uuid} ' + f'with grpc code {e.code()} and details {e.details()}') from e + group.algorithm_uuid = algorithm_uuid + + def delete_trusted_job_group(self, group: TrustedJobGroup): + + for client, pid in zip(self._clients, self._participant_ids): + try: + client.delete_trusted_job_group(group.uuid) + except grpc.RpcError as e: + raise InternalException(f'failed to delete participant {pid}\'s trusted job group {group.uuid} ' + f'with grpc code {e.code()} and details {e.details()}') from e + TrustedJobGroupService(self._session).delete(group) + + def update_unauth_participant_ids(self, group: TrustedJobGroup): + unauth_set = set(group.get_unauth_participant_ids()) + for client, pid in zip(self._clients, self._participant_ids): + try: + resp = client.get_trusted_job_group(group.uuid) + status = AuthStatus[resp.auth_status] + if status == AuthStatus.AUTHORIZED: + unauth_set.discard(pid) + else: + unauth_set.add(pid) + except grpc.RpcError as e: + logging.warning(f'[trusted-job-group] failed to get participant {pid}\'s ' + f'trusted job group {group.uuid} with grpc code {e.code()} and details {e.details()}') + group.set_unauth_participant_ids(list(unauth_set)) + + +class TrustedJobController: + + def __init__(self, session: Session, project_id: int): + self._session = session + self._clients = [] + self._participants = ParticipantService(session).get_platform_participants_by_project(project_id) + project = session.query(Project).get(project_id) + for p in self._participants: + self._clients.append(JobServiceClient.from_project_and_participant(p.domain_name, project.name)) + + def inform_auth_status(self, trusted_job: TrustedJob, auth_status: AuthStatus): + trusted_job.auth_status = auth_status + participants_info: ParticipantsInfo = trusted_job.get_participants_info() + self_pure_dn = SettingService.get_system_info().pure_domain_name + participants_info.participants_map[self_pure_dn].auth_status = auth_status.name + trusted_job.set_participants_info(participants_info) + for client, p in zip(self._clients, self._participants): + try: + client.inform_trusted_job(trusted_job.uuid, auth_status) + except grpc.RpcError as e: + logging.warning(f'[trusted-job] failed to inform participant {p.id}\'s ' + f'trusted job {trusted_job.uuid} with grpc code {e.code()} and details {e.details()}') + + def update_participants_info(self, trusted_job: TrustedJob): + participants_info = trusted_job.get_participants_info() + for client, p in zip(self._clients, self._participants): + try: + resp = client.get_trusted_job(trusted_job.uuid) + auth_status = AuthStatus[resp.auth_status] + participants_info.participants_map[p.pure_domain_name()].auth_status = auth_status.name + except grpc.RpcError as e: + logging.warning(f'[trusted-job] failed to get participant {p.id}\'s ' + f'trusted job {trusted_job.uuid} with grpc code {e.code()} and details {e.details()}') + trusted_job.set_participants_info(participants_info) + + def create_trusted_export_job(self, tee_export_job: TrustedJob, tee_analyze_job: TrustedJob): + # local trusted export job is already created by apis and this func is only used by runner + for client, p in zip(self._clients, self._participants): + try: + client.create_trusted_export_job(tee_export_job.uuid, tee_export_job.name, tee_export_job.export_count, + tee_analyze_job.uuid, tee_export_job.ticket_uuid) + except grpc.RpcError as e: + raise InternalException( + f'failed to create participant {p.id}\'s trusted export job {tee_export_job.uuid} ' + f'with grpc code {e.code()} and details {e.details()}') from e diff --git a/web_console_v2/api/fedlearner_webconsole/tee/controller_test.py b/web_console_v2/api/fedlearner_webconsole/tee/controller_test.py new file mode 100644 index 000000000..e1ba14752 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/tee/controller_test.py @@ -0,0 +1,213 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch, MagicMock +import grpc +from testing.no_web_server_test_case import NoWebServerTestCase +from testing.rpc.client import FakeRpcError +from google.protobuf.empty_pb2 import Empty +from google.protobuf.text_format import MessageToString +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.tee.models import TrustedJobGroup, TrustedJob, TrustedJobType, TrustedJobStatus +from fedlearner_webconsole.tee.controller import TrustedJobGroupController, TrustedJobController +from fedlearner_webconsole.proto.rpc.v2.job_service_pb2 import GetTrustedJobGroupResponse +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo +from fedlearner_webconsole.proto.setting_pb2 import SystemInfo +from fedlearner_webconsole.exceptions import InternalException +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus + + +class TrustedJobGroupControllerTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project') + participant1 = Participant(id=1, name='part2', domain_name='fl-domain2.com') + participant2 = Participant(id=2, name='part3', domain_name='fl-domain3.com') + proj_part1 = ProjectParticipant(project_id=1, participant_id=1) + proj_part2 = ProjectParticipant(project_id=1, participant_id=2) + group = TrustedJobGroup(id=1, uuid='uuid', auth_status=AuthStatus.AUTHORIZED, unauth_participant_ids='1,2') + session.add_all([project, participant1, participant2, proj_part1, proj_part2, group]) + session.commit() + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.inform_trusted_job_group') + def test_inform_trusted_job_group(self, mock_client: MagicMock): + mock_client.return_value = Empty() + with db.session_scope() as session: + group = session.query(TrustedJobGroup).get(1) + TrustedJobGroupController(session, 1).inform_trusted_job_group(group, AuthStatus.AUTHORIZED) + self.assertEqual(group.auth_status, AuthStatus.AUTHORIZED) + self.assertEqual(mock_client.call_args_list, [(('uuid', AuthStatus.AUTHORIZED),), + (('uuid', AuthStatus.AUTHORIZED),)]) + # grpc abort with error + mock_client.side_effect = FakeRpcError(grpc.StatusCode.NOT_FOUND, 'trusted job group request.uuid not found') + with db.session_scope() as session: + group = session.query(TrustedJobGroup).get(1) + TrustedJobGroupController(session, 1).inform_trusted_job_group(group, AuthStatus.AUTHORIZED) + self.assertEqual(group.auth_status, AuthStatus.AUTHORIZED) + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.update_trusted_job_group') + def test_update_trusted_job_group(self, mock_client: MagicMock): + mock_client.return_value = Empty() + with db.session_scope() as session: + group = session.query(TrustedJobGroup).get(1) + TrustedJobGroupController(session, 1).update_trusted_job_group(group, 'algorithm-uuid') + self.assertEqual(group.algorithm_uuid, 'algorithm-uuid') + self.assertEqual(mock_client.call_args_list, [(('uuid', 'algorithm-uuid'),), (('uuid', 'algorithm-uuid'),)]) + # grpc abort with error + mock_client.side_effect = FakeRpcError(grpc.StatusCode.INVALID_ARGUMENT, 'mismatched algorithm project') + with self.assertRaises(InternalException): + with db.session_scope() as session: + group = session.query(TrustedJobGroup).get(1) + TrustedJobGroupController(session, 1).update_trusted_job_group(group, 'algorithm-uuid') + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.delete_trusted_job_group') + def test_delete_trusted_job_group(self, mock_client: MagicMock): + mock_client.return_value = Empty() + with db.session_scope() as session: + group = session.query(TrustedJobGroup).get(1) + TrustedJobGroupController(session, 1).delete_trusted_job_group(group) + self.assertEqual(mock_client.call_args_list, [(('uuid',),), (('uuid',),)]) + # grpc abort with err + mock_client.side_effect = FakeRpcError(grpc.StatusCode.INVALID_ARGUMENT, 'trusted job is not deletable') + with self.assertRaises(InternalException): + with db.session_scope() as session: + group = session.query(TrustedJobGroup).get(1) + TrustedJobGroupController(session, 1).delete_trusted_job_group(group) + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.get_trusted_job_group') + def test_update_unauth_participant_ids(self, mock_client: MagicMock): + mock_client.side_effect = [ + GetTrustedJobGroupResponse(auth_status='PENDING'), + GetTrustedJobGroupResponse(auth_status='AUTHORIZED') + ] + with db.session_scope() as session: + group = session.query(TrustedJobGroup).get(1) + TrustedJobGroupController(session, 1).update_unauth_participant_ids(group) + self.assertCountEqual(group.get_unauth_participant_ids(), [1]) + # grpc abort with err + mock_client.side_effect = FakeRpcError(grpc.StatusCode.NOT_FOUND, 'trusted job group uuid not found') + with db.session_scope() as session: + group = session.query(TrustedJobGroup).get(1) + TrustedJobGroupController(session, 1).update_unauth_participant_ids(group) + self.assertCountEqual(group.get_unauth_participant_ids(), [1, 2]) + + +class TrustedJobControllerTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project') + participant1 = Participant(id=1, name='part2', domain_name='fl-domain2.com') + participant2 = Participant(id=2, name='part3', domain_name='fl-domain3.com') + proj_part1 = ProjectParticipant(project_id=1, participant_id=1) + proj_part2 = ProjectParticipant(project_id=1, participant_id=2) + participants_info = ParticipantsInfo() + participants_info.participants_map['domain1'].auth_status = AuthStatus.PENDING.name + participants_info.participants_map['domain2'].auth_status = AuthStatus.PENDING.name + participants_info.participants_map['domain3'].auth_status = AuthStatus.WITHDRAW.name + tee_export_job = TrustedJob(id=1, + uuid='uuid1', + name='V1-domain1-1', + type=TrustedJobType.EXPORT, + version=1, + trusted_job_group_id=1, + ticket_status=TicketStatus.APPROVED, + ticket_uuid='ticket-uuid', + auth_status=AuthStatus.PENDING, + status=TrustedJobStatus.NEW, + export_count=1, + participants_info=MessageToString(participants_info)) + tee_analyze_job = TrustedJob(id=2, + uuid='uuid2', + type=TrustedJobType.ANALYZE, + version=1, + trusted_job_group_id=1, + export_count=1, + status=TrustedJobStatus.SUCCEEDED) + session.add_all( + [project, participant1, participant2, proj_part1, proj_part2, tee_export_job, tee_analyze_job]) + session.commit() + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info') + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.inform_trusted_job') + def test_inform_auth_status(self, mock_client: MagicMock, mock_get_system_info: MagicMock): + mock_client.return_value = Empty() + mock_get_system_info.return_value = SystemInfo(pure_domain_name='domain1') + with db.session_scope() as session: + trusted_job = session.query(TrustedJob).get(1) + TrustedJobController(session, 1).inform_auth_status(trusted_job, AuthStatus.AUTHORIZED) + self.assertEqual(trusted_job.auth_status, AuthStatus.AUTHORIZED) + self.assertEqual(trusted_job.get_participants_info().participants_map['domain1'].auth_status, 'AUTHORIZED') + self.assertEqual(mock_client.call_args_list, [(('uuid1', AuthStatus.AUTHORIZED),), + (('uuid1', AuthStatus.AUTHORIZED),)]) + # grpc abort with error + mock_client.side_effect = FakeRpcError(grpc.StatusCode.NOT_FOUND, 'not found') + with db.session_scope() as session: + group = session.query(TrustedJob).get(1) + TrustedJobController(session, 1).inform_auth_status(group, AuthStatus.AUTHORIZED) + self.assertEqual(group.auth_status, AuthStatus.AUTHORIZED) + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.get_trusted_job') + def test_update_participants_info(self, mock_client: MagicMock): + mock_client.side_effect = [ + GetTrustedJobGroupResponse(auth_status='WITHDRAW'), + GetTrustedJobGroupResponse(auth_status='AUTHORIZED') + ] + with db.session_scope() as session: + trusted_job = session.query(TrustedJob).get(1) + TrustedJobController(session, 1).update_participants_info(trusted_job) + self.assertEqual(trusted_job.get_participants_info().participants_map['domain2'].auth_status, 'WITHDRAW') + self.assertEqual(trusted_job.get_participants_info().participants_map['domain3'].auth_status, 'AUTHORIZED') + + # grpc abort with err + mock_client.side_effect = [ + FakeRpcError(grpc.StatusCode.NOT_FOUND, 'trusted job uuid not found'), + GetTrustedJobGroupResponse(auth_status='AUTHORIZED') + ] + with db.session_scope() as session: + trusted_job = session.query(TrustedJob).get(1) + TrustedJobController(session, 1).update_participants_info(trusted_job) + self.assertEqual(trusted_job.get_participants_info().participants_map['domain2'].auth_status, 'PENDING') + self.assertEqual(trusted_job.get_participants_info().participants_map['domain3'].auth_status, 'AUTHORIZED') + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.create_trusted_export_job') + def test_create_trusted_export_job(self, mock_client: MagicMock): + mock_client.return_value = Empty() + with db.session_scope() as session: + tee_export_job = session.query(TrustedJob).get(1) + tee_analyze_job = session.query(TrustedJob).get(2) + TrustedJobController(session, 1).create_trusted_export_job(tee_export_job, tee_analyze_job) + self.assertEqual(mock_client.call_args_list, [(('uuid1', 'V1-domain1-1', 1, 'uuid2', 'ticket-uuid'),)] * 2) + # grpc abort with err + mock_client.side_effect = [ + FakeRpcError(grpc.StatusCode.INVALID_ARGUMENT, 'tee_analyze_job uuid2 invalid'), + Empty() + ] + with self.assertRaises(InternalException): + with db.session_scope() as session: + tee_export_job = session.query(TrustedJob).get(1) + tee_analyze_job = session.query(TrustedJob).get(2) + TrustedJobController(session, 1).create_trusted_export_job(tee_export_job, tee_analyze_job) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/tee/models.py b/web_console_v2/api/fedlearner_webconsole/tee/models.py new file mode 100644 index 000000000..6c08708d7 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/tee/models.py @@ -0,0 +1,368 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import enum +from google.protobuf import text_format +from typing import List, Optional +from sqlalchemy.sql import func +from sqlalchemy.sql.schema import Index + +from fedlearner_webconsole.algorithm.models import Algorithm +from fedlearner_webconsole.dataset.models import Dataset +from fedlearner_webconsole.db import db, default_table_args +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto.tee_pb2 import TrustedJobGroupPb, TrustedJobGroupRef, TrustedJobPb, TrustedJobRef, \ + Resource, ParticipantDatasetList, TrustedNotification +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.utils.base_model.review_ticket_and_auth_model import ReviewTicketAndAuthModel +from fedlearner_webconsole.job.models import JobState, Job + + +class GroupCreateStatus(enum.Enum): + PENDING = 'PENDING' + FAILED = 'FAILED' + SUCCEEDED = 'SUCCEEDED' + + +class TicketAuthStatus(enum.Enum): + TICKET_PENDING = 'TICKET_PENDING' + TICKET_DECLINED = 'TICKET_DECLINED' + CREATE_PENDING = 'CREATE_PENDING' + CREATE_FAILED = 'CREATE_FAILED' + AUTH_PENDING = 'AUTH_PENDING' + AUTHORIZED = 'AUTHORIZED' + + +class TrustedJobStatus(enum.Enum): + NEW = 'NEW' + CREATED = 'CREATED' + CREATE_FAILED = 'CREATE_FAILED' + PENDING = 'PENDING' + RUNNING = 'RUNNING' + SUCCEEDED = 'SUCCEEDED' + FAILED = 'FAILED' + STOPPED = 'STOPPED' + + +class TrustedJobType(enum.Enum): + ANALYZE = 'ANALYZE' + EXPORT = 'EXPORT' + + +class TrustedJobGroup(db.Model, ReviewTicketAndAuthModel): + __tablename__ = 'trusted_job_groups_v2' + __table_args__ = ( + Index('idx_trusted_group_name', 'name'), + Index('idx_trusted_group_project_id', 'project_id'), + default_table_args('trusted_job_groups_v2'), + ) + id = db.Column(db.Integer, primary_key=True, comment='id', autoincrement=True) + name = db.Column(db.String(255), comment='name') + uuid = db.Column(db.String(64), comment='uuid') + latest_version = db.Column(db.Integer, default=0, comment='latest version') + comment = db.Column('cmt', db.Text(), key='comment', comment='comment of trusted job group') + project_id = db.Column(db.Integer, comment='project id') + created_at = db.Column(db.DateTime(timezone=True), comment='created at', server_default=func.now()) + updated_at = db.Column(db.DateTime(timezone=True), + comment='updated at', + server_default=func.now(), + onupdate=func.now()) + creator_username = db.Column(db.String(255), comment='creator username') + coordinator_id = db.Column(db.Integer, comment='coordinator participant id') + analyzer_id = db.Column(db.Integer, comment='analyzer participant id') + status = db.Column(db.Enum(GroupCreateStatus, native_enum=False, length=32, create_constraint=False), + default=GroupCreateStatus.PENDING, + comment='create state') + unauth_participant_ids = db.Column(db.Text(), comment='unauth participant ids') + algorithm_uuid = db.Column(db.String(64), comment='algorithm uuid') + resource = db.Column('rsc', db.String(255), comment='resource') + dataset_id = db.Column(db.Integer, comment='dataset id') + participant_datasets = db.Column(db.Text(), comment='list of participant-to-dataset mapping') + # relationship to other tables + project = db.relationship(Project.__name__, primaryjoin='Project.id == foreign(TrustedJobGroup.project_id)') + algorithm = db.relationship(Algorithm.__name__, + primaryjoin='Algorithm.uuid == foreign(TrustedJobGroup.algorithm_uuid)') + trusted_jobs = db.relationship( + 'TrustedJob', + order_by='desc(TrustedJob.version)', + primaryjoin='TrustedJobGroup.id == foreign(TrustedJob.trusted_job_group_id)', + # To disable the warning of back_populates + overlaps='group') + dataset = db.relationship(Dataset.__name__, primaryjoin='Dataset.id == foreign(TrustedJobGroup.dataset_id)') + + def to_proto(self) -> TrustedJobGroupPb: + group = TrustedJobGroupPb( + id=self.id, + name=self.name, + uuid=self.uuid, + latest_version=self.latest_version, + comment=self.comment, + project_id=self.project_id, + created_at=to_timestamp(self.created_at), + updated_at=to_timestamp(self.updated_at), + creator_username=self.creator_username, + coordinator_id=self.coordinator_id, + analyzer_id=self.analyzer_id, + ticket_uuid=self.ticket_uuid, + ticket_status=self.ticket_status.name, + status=self.status.name, + auth_status=self.auth_status.name, + ticket_auth_status=self.get_ticket_auth_status().name, + latest_job_status=self.get_latest_job_status().name, + algorithm_id=self.algorithm.id if self.algorithm else 0, + algorithm_uuid=self.algorithm_uuid, + dataset_id=self.dataset_id, + ) + if self.unauth_participant_ids is not None: + group.unauth_participant_ids.extend(self.get_unauth_participant_ids()) + if self.resource is not None: + group.resource.MergeFrom(self.get_resource()) + if self.participant_datasets is not None: + group.participant_datasets.MergeFrom(self.get_participant_datasets()) + return group + + def to_ref(self) -> TrustedJobGroupRef: + group = TrustedJobGroupRef( + id=self.id, + name=self.name, + created_at=to_timestamp(self.created_at), + ticket_status=self.ticket_status.name, + status=self.status.name, + auth_status=self.auth_status.name, + ticket_auth_status=self.get_ticket_auth_status().name, + latest_job_status=self.get_latest_job_status().name, + is_configured=self.resource is not None, + ) + if self.coordinator_id == 0: + group.is_creator = True + else: + group.is_creator = False + group.creator_id = self.coordinator_id + group.unauth_participant_ids.extend(self.get_unauth_participant_ids()) + return group + + def get_latest_job_status(self) -> TrustedJobStatus: + for trusted_job in self.trusted_jobs: + if trusted_job.type == TrustedJobType.ANALYZE: + return trusted_job.get_status() + return TrustedJobStatus.NEW + + def get_ticket_auth_status(self) -> TicketAuthStatus: + if self.ticket_status == TicketStatus.PENDING: + return TicketAuthStatus.TICKET_PENDING + if self.ticket_status == TicketStatus.DECLINED: + return TicketAuthStatus.TICKET_DECLINED + if self.status == GroupCreateStatus.PENDING: + return TicketAuthStatus.CREATE_PENDING + if self.status == GroupCreateStatus.FAILED: + return TicketAuthStatus.CREATE_FAILED + if self.auth_status != AuthStatus.AUTHORIZED or len(self.get_unauth_participant_ids()) > 0: + return TicketAuthStatus.AUTH_PENDING + return TicketAuthStatus.AUTHORIZED + + def get_resource(self) -> Optional[Resource]: + if self.resource is not None: + return text_format.Parse(self.resource, Resource()) + return None + + def set_resource(self, resource: Optional[Resource] = None): + if resource is None: + resource = Resource() + self.resource = text_format.MessageToString(resource) + + def get_participant_datasets(self) -> Optional[ParticipantDatasetList]: + if self.participant_datasets is not None: + return text_format.Parse(self.participant_datasets, ParticipantDatasetList()) + return None + + def set_participant_datasets(self, participant_datasets: Optional[ParticipantDatasetList] = None): + if participant_datasets is None: + participant_datasets = ParticipantDatasetList() + self.participant_datasets = text_format.MessageToString(participant_datasets) + + def get_unauth_participant_ids(self) -> List[int]: + if self.unauth_participant_ids is not None and self.unauth_participant_ids: + sids = self.unauth_participant_ids.split(',') + return [int(s) for s in sids] + return [] + + def set_unauth_participant_ids(self, ids: List[int]): + if len(ids) > 0: + self.unauth_participant_ids = ','.join([str(i) for i in ids]) + else: + self.unauth_participant_ids = None + + def is_deletable(self) -> bool: + for trusted_job in self.trusted_jobs: + if trusted_job.get_status() in [TrustedJobStatus.PENDING, TrustedJobStatus.RUNNING]: + return False + return True + + def to_notification(self) -> TrustedNotification: + return TrustedNotification( + type=TrustedNotification.TRUSTED_JOB_GROUP_CREATE, + id=self.id, + name=self.name, + created_at=to_timestamp(self.created_at), + coordinator_id=self.coordinator_id, + ) + + +class TrustedJob(db.Model, ReviewTicketAndAuthModel): + __tablename__ = 'trusted_jobs_v2' + __table_args__ = ( + Index('idx_trusted_name', 'name'), + Index('idx_trusted_project_id', 'project_id'), + default_table_args('trusted_jobs_v2'), + ) + id = db.Column(db.Integer, primary_key=True, comment='id', autoincrement=True) + name = db.Column(db.String(255), comment='name') + type = db.Column('trusted_job_type', + db.Enum(TrustedJobType, native_enum=False, length=32, create_constraint=False), + default=TrustedJobType.ANALYZE, + key='type', + comment='trusted job type') + job_id = db.Column(db.Integer, comment='job id') + uuid = db.Column(db.String(64), comment='uuid') + version = db.Column(db.Integer, comment='version') + export_count = db.Column(db.Integer, default=0, comment='export count') + comment = db.Column('cmt', db.Text(), key='comment', comment='comment of trusted job') + project_id = db.Column(db.Integer, comment='project id') + trusted_job_group_id = db.Column(db.Integer, comment='trusted job group id') + coordinator_id = db.Column(db.Integer, comment='coordinator participant id') + created_at = db.Column(db.DateTime(timezone=True), comment='created at', server_default=func.now()) + updated_at = db.Column(db.DateTime(timezone=True), + comment='updated at', + server_default=func.now(), + onupdate=func.now()) + started_at = db.Column(db.DateTime(timezone=True), comment='started_at') + finished_at = db.Column(db.DateTime(timezone=True), comment='finished_at') + status = db.Column(db.Enum(TrustedJobStatus, native_enum=False, length=32, create_constraint=False), + default=TrustedJobStatus.NEW, + comment='trusted job status') + algorithm_uuid = db.Column(db.String(64), comment='algorithm uuid') + resource = db.Column('rsc', db.String(255), comment='resource') + export_dataset_id = db.Column(db.Integer, comment='export dataset id') + result_key = db.Column(db.Text(), comment='result key') + # relationship to other tables + job = db.relationship(Job.__name__, primaryjoin='Job.id == foreign(TrustedJob.job_id)') + project = db.relationship(Project.__name__, primaryjoin='Project.id == foreign(TrustedJob.project_id)') + group = db.relationship('TrustedJobGroup', + primaryjoin='TrustedJobGroup.id == foreign(TrustedJob.trusted_job_group_id)') + algorithm = db.relationship(Algorithm.__name__, primaryjoin='Algorithm.uuid == foreign(TrustedJob.algorithm_uuid)') + export_dataset = db.relationship(Dataset.__name__, + primaryjoin='Dataset.id == foreign(TrustedJob.export_dataset_id)') + + def to_proto(self) -> TrustedJobPb: + trusted_job = TrustedJobPb( + id=self.id, + type=self.type.name, + name=self.name, + job_id=self.job_id, + uuid=self.uuid, + version=self.version, + comment=self.comment, + project_id=self.project_id, + trusted_job_group_id=self.trusted_job_group_id, + coordinator_id=self.coordinator_id, + status=self.get_status().name, + created_at=to_timestamp(self.created_at), + updated_at=to_timestamp(self.updated_at), + started_at=to_timestamp(self.started_at) if self.started_at is not None else None, + finished_at=to_timestamp(self.finished_at) if self.finished_at is not None else None, + algorithm_id=self.algorithm.id if self.algorithm else 0, + algorithm_uuid=self.algorithm_uuid, + ticket_uuid=self.ticket_uuid, + ticket_status=self.ticket_status.name, + auth_status=self.auth_status.name, + participants_info=self.get_participants_info(), + ticket_auth_status=self.get_ticket_auth_status().name, + export_dataset_id=self.export_dataset_id, + ) + if self.resource is not None: + trusted_job.resource.MergeFrom(self.get_resource()) + return trusted_job + + def to_ref(self) -> TrustedJobRef: + return TrustedJobRef( + id=self.id, + type=self.type.name, + name=self.name, + coordinator_id=self.coordinator_id, + job_id=self.job_id, + comment=self.comment, + status=self.get_status().name, + participants_info=self.get_participants_info(), + ticket_auth_status=self.get_ticket_auth_status().name, + started_at=to_timestamp(self.started_at) if self.started_at is not None else None, + finished_at=to_timestamp(self.finished_at) if self.finished_at is not None else None, + ) + + def get_resource(self) -> Optional[Resource]: + if self.resource is not None: + return text_format.Parse(self.resource, Resource()) + return None + + def set_resource(self, resource: Optional[Resource] = None): + if resource is None: + resource = Resource() + self.resource = text_format.MessageToString(resource) + + def update_status(self): + if self.status in [TrustedJobStatus.FAILED, TrustedJobStatus.STOPPED, TrustedJobStatus.SUCCEEDED]: + return + if self.job is None: + return + job_state = self.job.state + if job_state == JobState.FAILED: + self.status = TrustedJobStatus.FAILED + if job_state == JobState.COMPLETED: + self.status = TrustedJobStatus.SUCCEEDED + if job_state == JobState.STARTED: + self.status = TrustedJobStatus.RUNNING + if job_state == job_state.STOPPED: + self.status = TrustedJobStatus.STOPPED + if job_state in [JobState.NEW, JobState.WAITING]: + self.status = TrustedJobStatus.PENDING + if self.status in [TrustedJobStatus.FAILED, TrustedJobStatus.STOPPED, TrustedJobStatus.SUCCEEDED]: + self.finished_at = self.job.updated_at + + def get_status(self) -> TrustedJobStatus: + self.update_status() + return self.status + + def to_notification(self) -> TrustedNotification: + return TrustedNotification( + type=TrustedNotification.TRUSTED_JOB_EXPORT, + id=self.id, + name=f'{self.group.name}-{self.name}', + created_at=to_timestamp(self.created_at), + coordinator_id=self.coordinator_id, + ) + + def get_ticket_auth_status(self) -> TicketAuthStatus: + if self.ticket_status == TicketStatus.PENDING: + return TicketAuthStatus.TICKET_PENDING + if self.ticket_status == TicketStatus.DECLINED: + return TicketAuthStatus.TICKET_DECLINED + if self.status == TrustedJobStatus.NEW: + return TicketAuthStatus.CREATE_PENDING + if self.status == TrustedJobStatus.CREATE_FAILED: + return TicketAuthStatus.CREATE_FAILED + if not self.is_all_participants_authorized(): + return TicketAuthStatus.AUTH_PENDING + return TicketAuthStatus.AUTHORIZED diff --git a/web_console_v2/api/fedlearner_webconsole/tee/models_test.py b/web_console_v2/api/fedlearner_webconsole/tee/models_test.py new file mode 100644 index 000000000..28c7c7646 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/tee/models_test.py @@ -0,0 +1,404 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from datetime import datetime +from google.protobuf import text_format +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.tee.models import TrustedJobGroup, TrustedJob, TrustedJobStatus, \ + GroupCreateStatus, TrustedJobType, TicketAuthStatus +from fedlearner_webconsole.proto.tee_pb2 import TrustedJobGroupPb, TrustedJobGroupRef, TrustedJobPb, TrustedJobRef, \ + Resource, ParticipantDataset, ParticipantDatasetList, TrustedNotification +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo, ParticipantInfo +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.job.models import JobState, Job, JobType + + +class TrustedJobGroupTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + participant = Participant(id=1, name='p1', domain_name='test.domain.name') + group = TrustedJobGroup( + id=1, + name='trusted job group test', + latest_version=1, + comment='This is comment for test.', + project_id=2, + created_at=datetime(2022, 6, 15, 0, 0, 0), + updated_at=datetime(2022, 6, 15, 0, 0, 0), + creator_username='admin', + coordinator_id=1, + analyzer_id=1, + ticket_uuid='ticket-uuid', + ticket_status=TicketStatus.APPROVED, + status=GroupCreateStatus.SUCCEEDED, + auth_status=AuthStatus.AUTHORIZED, + unauth_participant_ids='1,2', + algorithm_uuid='algorithm-uuid3', + resource='cpu: 2\nmemory: 2\nreplicas: 1\n', + dataset_id=4, + participant_datasets=""" + items { + participant_id: 1 + uuid: "uuid1" + name: "name1" + } + items { + participant_id: 2 + uuid: "uuid2" + name: "name2" + } + """, + ) + session.add_all([group, participant]) + session.commit() + + def test_to_proto(self): + with db.session_scope() as session: + group: TrustedJobGroup = session.query(TrustedJobGroup).get(1) + pb = TrustedJobGroupPb(id=1, + name='trusted job group test', + latest_version=1, + comment='This is comment for test.', + project_id=2, + created_at=1655251200, + updated_at=1655251200, + creator_username='admin', + coordinator_id=1, + analyzer_id=1, + ticket_uuid='ticket-uuid', + ticket_status='APPROVED', + status='SUCCEEDED', + auth_status='AUTHORIZED', + latest_job_status='NEW', + ticket_auth_status='AUTH_PENDING', + unauth_participant_ids=[1, 2], + algorithm_id=0, + algorithm_uuid='algorithm-uuid3', + resource=Resource(cpu=2, memory=2, replicas=1), + dataset_id=4, + participant_datasets=ParticipantDatasetList(items=[ + ParticipantDataset(participant_id=1, uuid='uuid1', name='name1'), + ParticipantDataset(participant_id=2, uuid='uuid2', name='name2'), + ])) + self.assertEqual(pb, group.to_proto()) + + def test_to_ref(self): + with db.session_scope() as session: + group: TrustedJobGroup = session.query(TrustedJobGroup).get(1) + ref = TrustedJobGroupRef( + id=1, + name='trusted job group test', + created_at=1655251200, + is_creator=False, + creator_id=1, + ticket_status='APPROVED', + status='SUCCEEDED', + auth_status='AUTHORIZED', + latest_job_status='NEW', + ticket_auth_status='AUTH_PENDING', + unauth_participant_ids=[1, 2], + is_configured=True, + ) + self.assertEqual(ref, group.to_ref()) + + def test_get_latest_job_status(self): + with db.session_scope() as session: + group: TrustedJobGroup = session.query(TrustedJobGroup).get(1) + job_status = group.get_latest_job_status() + self.assertEqual(TrustedJobStatus.NEW, job_status) + with db.session_scope() as session: + new_job = TrustedJob(id=1, version=1, trusted_job_group_id=1, status=TrustedJobStatus.RUNNING) + session.add(new_job) + session.commit() + with db.session_scope() as session: + group: TrustedJobGroup = session.query(TrustedJobGroup).get(1) + job_status = group.get_latest_job_status() + self.assertEqual(TrustedJobStatus.RUNNING, job_status) + + def test_get_ticket_auth_status(self): + with db.session_scope() as session: + group: TrustedJobGroup = session.query(TrustedJobGroup).get(1) + # AUTH_PENDING + self.assertEqual(group.get_ticket_auth_status(), TicketAuthStatus.AUTH_PENDING) + # AUTHORIZED + group.unauth_participant_ids = None + self.assertEqual(group.get_ticket_auth_status(), TicketAuthStatus.AUTHORIZED) + # CREATED_FAILED + group.status = GroupCreateStatus.FAILED + self.assertEqual(group.get_ticket_auth_status(), TicketAuthStatus.CREATE_FAILED) + # CREATED_PENDING + group.status = GroupCreateStatus.PENDING + self.assertEqual(group.get_ticket_auth_status(), TicketAuthStatus.CREATE_PENDING) + # TICKET_DECLINED + group.ticket_status = TicketStatus.DECLINED + self.assertEqual(group.get_ticket_auth_status(), TicketAuthStatus.TICKET_DECLINED) + # TICKET_PENDING + group.ticket_status = TicketStatus.PENDING + self.assertEqual(group.get_ticket_auth_status(), TicketAuthStatus.TICKET_PENDING) + + def test_get_resource(self): + with db.session_scope() as session: + group: TrustedJobGroup = session.query(TrustedJobGroup).get(1) + self.assertEqual(Resource(cpu=2, memory=2, replicas=1), group.get_resource()) + group.resource = None + self.assertIsNone(group.get_resource()) + + def test_set_resource(self): + with db.session_scope() as session: + group: TrustedJobGroup = session.query(TrustedJobGroup).get(1) + group.set_resource(Resource(cpu=4, memory=4, replicas=1)) + self.assertEqual('cpu: 4\nmemory: 4\nreplicas: 1\n', group.resource) + group.set_resource() + self.assertEqual('', group.resource) + + def test_get_participant_datasets(self): + with db.session_scope() as session: + group: TrustedJobGroup = session.query(TrustedJobGroup).get(1) + expected = ParticipantDatasetList(items=[ + ParticipantDataset(participant_id=1, uuid='uuid1', name='name1'), + ParticipantDataset(participant_id=2, uuid='uuid2', name='name2'), + ]) + self.assertEqual(expected, group.get_participant_datasets()) + group.participant_datasets = None + self.assertIsNone(group.get_participant_datasets()) + + def test_set_participant_datasets(self): + with db.session_scope() as session: + group: TrustedJobGroup = session.query(TrustedJobGroup).get(1) + pds = ParticipantDatasetList(items=[ParticipantDataset(participant_id=1, uuid='uuid1', name='name1')]) + group.set_participant_datasets(pds) + self.assertEqual('items {\n participant_id: 1\n uuid: "uuid1"\n name: "name1"\n}\n', + group.participant_datasets) + + def test_get_unauth_participant_ids(self): + with db.session_scope() as session: + group: TrustedJobGroup = session.query(TrustedJobGroup).get(1) + group.unauth_participant_ids = '2' + self.assertEqual([2], group.get_unauth_participant_ids()) + group.unauth_participant_ids = '2,3,4' + self.assertEqual([2, 3, 4], group.get_unauth_participant_ids()) + group.unauth_participant_ids = None + self.assertEqual([], group.get_unauth_participant_ids()) + + def test_set_unauth_participant_ids(self): + with db.session_scope() as session: + group: TrustedJobGroup = session.query(TrustedJobGroup).get(1) + group.set_unauth_participant_ids([]) + self.assertIsNone(group.unauth_participant_ids) + group.set_unauth_participant_ids([1, 2, 3]) + self.assertEqual('1,2,3', group.unauth_participant_ids) + + def test_is_deletable(self): + with db.session_scope() as session: + trusted_job1 = TrustedJob(id=1, trusted_job_group_id=1, status=TrustedJobStatus.STOPPED) + trusted_job2 = TrustedJob(id=2, trusted_job_group_id=1, status=TrustedJobStatus.FAILED) + session.add_all([trusted_job1, trusted_job2]) + session.commit() + with db.session_scope() as session: + group: TrustedJobGroup = session.query(TrustedJobGroup).get(1) + self.assertTrue(group.is_deletable()) + # not deletable + with db.session_scope() as session: + session.query(TrustedJob).filter_by(id=1).update({'status': TrustedJobStatus.RUNNING}) + session.commit() + with db.session_scope() as session: + group: TrustedJobGroup = session.query(TrustedJobGroup).get(1) + self.assertFalse(group.is_deletable()) + + def test_to_notification(self): + with db.session_scope() as session: + group: TrustedJobGroup = session.query(TrustedJobGroup).get(1) + notif = group.to_notification() + self.assertEqual(notif.type, TrustedNotification.TRUSTED_JOB_GROUP_CREATE) + self.assertEqual(notif.id, 1) + self.assertEqual(notif.name, 'trusted job group test') + self.assertEqual(notif.created_at, 1655251200) + self.assertEqual(notif.coordinator_id, 1) + + +class TrustedJobTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + participant = Participant(id=1, name='p1', domain_name='test.domain.name') + group = TrustedJobGroup(id=1, name='trusted job group name', project_id=2, coordinator_id=1) + self.participants_info = ParticipantsInfo(participants_map={ + 'self': ParticipantInfo(auth_status='AUTHORIZED'), + 'part1': ParticipantInfo(auth_status='WITHDRAW'), + }) + trusted_job = TrustedJob( + id=1, + type=TrustedJobType.ANALYZE, + name='V1', + job_id=1, + uuid='uuid test', + version=1, + comment='This is comment for test.', + project_id=2, + trusted_job_group_id=1, + coordinator_id=1, + auth_status=AuthStatus.AUTHORIZED, + ticket_status=TicketStatus.APPROVED, + participants_info=text_format.MessageToString(self.participants_info), + created_at=datetime(2022, 6, 14, 0, 0, 0), + updated_at=datetime(2022, 6, 14, 0, 0, 1), + started_at=datetime(2022, 6, 15, 0, 0, 0), + finished_at=datetime(2022, 6, 15, 0, 0, 1), + status=TrustedJobStatus.PENDING, + algorithm_uuid='algorithm-uuid3', + resource='cpu: 2\nmemory: 2\nreplicas: 1\n', + export_dataset_id=2, + ) + job = Job(id=1, + name='trusted-job-1-1-1-uuid', + state=JobState.NEW, + job_type=JobType.CUSTOMIZED, + workflow_id=0, + project_id=1) + session.add_all([group, trusted_job, job, participant]) + session.commit() + + def test_to_proto(self): + with db.session_scope() as session: + trusted_job: TrustedJob = session.query(TrustedJob).get(1) + pb = TrustedJobPb( + id=1, + type='ANALYZE', + name='V1', + job_id=1, + uuid='uuid test', + version=1, + comment='This is comment for test.', + project_id=2, + trusted_job_group_id=1, + coordinator_id=1, + ticket_status='APPROVED', + auth_status='AUTHORIZED', + participants_info=self.participants_info, + ticket_auth_status='AUTH_PENDING', + created_at=1655164800, + updated_at=1655164801, + started_at=1655251200, + finished_at=1655251201, + status='PENDING', + algorithm_id=0, + algorithm_uuid='algorithm-uuid3', + resource=Resource(cpu=2, memory=2, replicas=1), + export_dataset_id=2, + ) + self.assertEqual(pb, trusted_job.to_proto()) + + def test_to_ref(self): + with db.session_scope() as session: + trusted_job: TrustedJob = session.query(TrustedJob).get(1) + ref = TrustedJobRef( + id=1, + type='ANALYZE', + name='V1', + coordinator_id=1, + job_id=1, + comment='This is comment for test.', + started_at=1655251200, + finished_at=1655251201, + status='PENDING', + participants_info=self.participants_info, + ticket_auth_status='AUTH_PENDING', + ) + self.assertEqual(ref, trusted_job.to_ref()) + + def test_get_resource(self): + with db.session_scope() as session: + trusted_job: TrustedJob = session.query(TrustedJob).get(1) + self.assertEqual(Resource(cpu=2, memory=2, replicas=1), trusted_job.get_resource()) + trusted_job.resource = None + self.assertIsNone(trusted_job.get_resource()) + + def test_set_resource(self): + with db.session_scope() as session: + trusted_job: TrustedJob = session.query(TrustedJob).get(1) + trusted_job.set_resource(Resource(cpu=4, memory=4, replicas=1)) + self.assertEqual('cpu: 4\nmemory: 4\nreplicas: 1\n', trusted_job.resource) + trusted_job.set_resource() + self.assertEqual('', trusted_job.resource) + + def test_update_status(self): + with db.session_scope() as session: + trusted_job: TrustedJob = session.query(TrustedJob).get(1) + job: Job = session.query(Job).get(1) + # case 1 + trusted_job.update_status() + self.assertEqual(TrustedJobStatus.PENDING, trusted_job.status) + # case 2 + job.state = JobState.FAILED + trusted_job.update_status() + self.assertEqual(TrustedJobStatus.FAILED, trusted_job.status) + # case 3 + trusted_job.status = TrustedJobStatus.RUNNING + job.state = JobState.COMPLETED + trusted_job.update_status() + self.assertEqual(TrustedJobStatus.SUCCEEDED, trusted_job.status) + # case 4 + job.state = JobState.WAITING + trusted_job.update_status() + self.assertEqual(TrustedJobStatus.SUCCEEDED, trusted_job.status) + + def test_get_status(self): + with db.session_scope() as session: + trusted_job: TrustedJob = session.query(TrustedJob).get(1) + job: Job = session.query(Job).get(1) + self.assertEqual(trusted_job.get_status(), TrustedJobStatus.PENDING) + job.state = JobState.STARTED + self.assertEqual(trusted_job.get_status(), TrustedJobStatus.RUNNING) + job.state = JobState.FAILED + self.assertEqual(trusted_job.get_status(), TrustedJobStatus.FAILED) + job.state = JobState.WAITING + self.assertEqual(trusted_job.get_status(), TrustedJobStatus.FAILED) + + def test_to_notification(self): + with db.session_scope() as session: + trusted_job: TrustedJob = session.query(TrustedJob).get(1) + notif = trusted_job.to_notification() + self.assertEqual(notif.type, TrustedNotification.TRUSTED_JOB_EXPORT) + self.assertEqual(notif.id, 1) + self.assertEqual(notif.name, 'trusted job group name-V1') + self.assertEqual(notif.created_at, 1655164800) + self.assertEqual(notif.coordinator_id, 1) + + def test_get_ticket_auth_status(self): + with db.session_scope() as session: + trusted_job: TrustedJob = session.query(TrustedJob).get(1) + self.assertEqual(trusted_job.get_ticket_auth_status(), TicketAuthStatus.AUTH_PENDING) + self.participants_info.participants_map['part1'].auth_status = 'AUTHORIZED' + trusted_job.set_participants_info(self.participants_info) + self.assertEqual(trusted_job.get_ticket_auth_status(), TicketAuthStatus.AUTHORIZED) + trusted_job.status = TrustedJobStatus.CREATE_FAILED + self.assertEqual(trusted_job.get_ticket_auth_status(), TicketAuthStatus.CREATE_FAILED) + trusted_job.status = TrustedJobStatus.NEW + self.assertEqual(trusted_job.get_ticket_auth_status(), TicketAuthStatus.CREATE_PENDING) + trusted_job.ticket_status = TicketStatus.PENDING + self.assertEqual(trusted_job.get_ticket_auth_status(), TicketAuthStatus.TICKET_PENDING) + trusted_job.ticket_status = TicketStatus.DECLINED + self.assertEqual(trusted_job.get_ticket_auth_status(), TicketAuthStatus.TICKET_DECLINED) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/tee/runners.py b/web_console_v2/api/fedlearner_webconsole/tee/runners.py new file mode 100644 index 000000000..db9811ae2 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/tee/runners.py @@ -0,0 +1,222 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Tuple + +from envs import Envs +from fedlearner_webconsole.db import db +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.interface import IRunnerV2 +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.proto.composer_pb2 import RunnerOutput, TeeRunnerOutput +from fedlearner_webconsole.proto import dataset_pb2 +from fedlearner_webconsole.tee.models import TrustedJobGroup, GroupCreateStatus, TrustedJob, TrustedJobType, \ + TrustedJobStatus +from fedlearner_webconsole.tee.utils import get_pure_path +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.tee.controller import create_trusted_job_group, TrustedJobGroupController, \ + launch_trusted_job, TrustedJobController, launch_trusted_export_job +from fedlearner_webconsole.exceptions import WebConsoleApiException +from fedlearner_webconsole.dataset.services import DatasetService, BatchService +from fedlearner_webconsole.dataset.models import DatasetType, DatasetKindV2, DatasetFormat, ImportType, StoreFormat + + +class TeeCreateRunner(IRunnerV2): + + @staticmethod + def _create_trusted_job_group(): + # schedule all groups with ticket APPROVED, status PENDING and coordinator_id 0 + processed_groups = set() + with db.session_scope() as session: + groups_ids = session.query(TrustedJobGroup.id).filter_by(ticket_status=TicketStatus.APPROVED, + status=GroupCreateStatus.PENDING, + coordinator_id=0).all() + for group_id, *_ in groups_ids: + with db.session_scope() as session: + group: TrustedJobGroup = session.query(TrustedJobGroup).populate_existing().with_for_update().get( + group_id) + if group.status != GroupCreateStatus.PENDING: + continue + processed_groups.add(group.id) + try: + succeeded, msg = create_trusted_job_group(group) + except WebConsoleApiException as e: + succeeded = False + msg = e.details + if not succeeded: + group.status = GroupCreateStatus.FAILED + logging.warning(f'[create trusted job group scheduler]: group {group.id} failed, exception {msg}') + else: + group.status = GroupCreateStatus.SUCCEEDED + session.commit() + return processed_groups + + @staticmethod + def _launch_trusted_job(): + # schedule all newly created trusted job group satisfy + # state == SUCCEEDED and coordinator_id == 0 and version == 0 and + # auth_status == AUTHORIZED and unauth_participant_ids == None + processed_groups = set() + with db.session_scope() as session: + groups_ids = session.query(TrustedJobGroup.id).filter_by(status=GroupCreateStatus.SUCCEEDED, + coordinator_id=0, + latest_version=0, + unauth_participant_ids=None, + auth_status=AuthStatus.AUTHORIZED) + for group_id, *_ in groups_ids: + with db.session_scope() as session: + group: TrustedJobGroup = session.query(TrustedJobGroup).populate_existing().with_for_update().get( + group_id) + if group.latest_version or group.auth_status != AuthStatus.AUTHORIZED or group.unauth_participant_ids: + continue + group.latest_version = 1 + session.commit() + processed_groups.add(group.id) + succeeded, msg = launch_trusted_job(group.project_id, group.uuid, group.latest_version) + if not succeeded: + logging.warning(f'[launch trusted job scheduler]: group {group.id} failed, exception {msg}') + return processed_groups + + @staticmethod + def _create_trusted_export_job(): + processed_ids = set() + with db.session_scope() as session: + trusted_jobs_ids = session.query(TrustedJob.id).filter_by( + type=TrustedJobType.EXPORT, + ticket_status=TicketStatus.APPROVED, + status=TrustedJobStatus.NEW, + ).all() + for trusted_job_id, *_ in trusted_jobs_ids: + with db.session_scope() as session: + tee_export_job = session.query(TrustedJob).populate_existing().with_for_update().get(trusted_job_id) + tee_analyze_job = session.query(TrustedJob).filter_by( + type=TrustedJobType.ANALYZE, + trusted_job_group_id=tee_export_job.trusted_job_group_id, + version=tee_export_job.version).first() + processed_ids.add(tee_export_job.id) + try: + TrustedJobController(session, tee_export_job.project_id).create_trusted_export_job( + tee_export_job, tee_analyze_job) + tee_export_job.status = TrustedJobStatus.CREATED + except WebConsoleApiException as e: + tee_export_job.status = TrustedJobStatus.CREATE_FAILED + logging.warning( + f'[create trusted export job scheduler]: {tee_export_job.id} failed, exception {e.details}') + session.commit() + return processed_ids + + @staticmethod + def _launch_trusted_export_job(): + processed_ids = set() + with db.session_scope() as session: + trusted_jobs_ids = session.query(TrustedJob.id).filter_by( + type=TrustedJobType.EXPORT, + status=TrustedJobStatus.CREATED, + coordinator_id=0, + ).all() + for trusted_job_id, *_ in trusted_jobs_ids: + with db.session_scope() as session: + tee_export_job = session.query(TrustedJob).get(trusted_job_id) + if not tee_export_job.is_all_participants_authorized(): + continue + processed_ids.add(tee_export_job.id) + succeeded, msg = launch_trusted_export_job(tee_export_job.project_id, tee_export_job.uuid) + if not succeeded: + with db.session_scope() as session: + tee_export_job = session.query(TrustedJob).get(trusted_job_id) + tee_export_job.status = TrustedJobStatus.FAILED + session.commit() + logging.warning(f'[launch trusted export job scheduler]: {tee_export_job.id} failed, exception {msg}') + return processed_ids + + @staticmethod + def _create_export_dataset(): + processed_ids = set() + with db.session_scope() as session: + trusted_jobs_ids = session.query(TrustedJob.id).filter_by( + type=TrustedJobType.EXPORT, + status=TrustedJobStatus.SUCCEEDED, + coordinator_id=0, + export_dataset_id=None, + ).all() + for trusted_job_id, *_ in trusted_jobs_ids: + with db.session_scope() as session: + tee_export_job = session.query(TrustedJob).populate_existing().with_for_update().get(trusted_job_id) + if tee_export_job.export_dataset_id: + continue + processed_ids.add(tee_export_job.id) + dataset = DatasetService(session).create_dataset( + dataset_pb2.DatasetParameter( + name=f'{tee_export_job.group.name}-{tee_export_job.name}', + is_published=False, + type=DatasetType.PSI.value, + project_id=tee_export_job.project_id, + kind=DatasetKindV2.INTERNAL_PROCESSED.value, + format=DatasetFormat.NONE_STRUCTURED.name, + path=f'{get_pure_path(Envs.STORAGE_ROOT)}/job_output/{tee_export_job.job.name}/export', + import_type=ImportType.COPY.value, + store_format=StoreFormat.UNKNOWN.value, + auth_status=AuthStatus.AUTHORIZED.name)) + session.flush() + BatchService(session).create_batch(dataset_pb2.BatchParameter(dataset_id=dataset.id)) + tee_export_job.export_dataset_id = dataset.id + session.commit() + return processed_ids + + def run(self, context: RunnerContext) -> Tuple[RunnerStatus, RunnerOutput]: + # high-frequency runner compared to TEEAuthRunner do the following 4 tasks + # 1. create trusted job groups with state is PENDING and ticket_status is APPROVED + # 2. launch newly-self-created trusted jobs with version 0 and status SUCCEEDED when fully authed + # 3. create remotely for local trusted export jobs with ticket_status APPROVED and status NEW + # 4. launch trusted export jobs with coordinator_id 0 when fully created and authed + # 5. create export dataset for successful trusted export job + created_group_ids = self._create_trusted_job_group() + launched_group_ids = self._launch_trusted_job() + created_trusted_export_job_ids = self._create_trusted_export_job() + launched_trusted_export_job_ids = self._launch_trusted_export_job() + created_dataset_trusted_export_job_ids = self._create_export_dataset() + return RunnerStatus.DONE, RunnerOutput(tee_runner_output=TeeRunnerOutput( + created_group_ids=list(created_group_ids), + launched_group_ids=list(launched_group_ids), + created_trusted_export_job_ids=list(created_trusted_export_job_ids), + launched_trusted_export_job_ids=list(launched_trusted_export_job_ids), + created_dataset_trusted_export_job_ids=list(created_dataset_trusted_export_job_ids), + )) + + +class TeeResourceCheckRunner(IRunnerV2): + + @staticmethod + def _update_unauth_participant_ids(): + processed_groups = set() + with db.session_scope() as session: + group_ids = session.query(TrustedJobGroup.id).filter_by(status=GroupCreateStatus.SUCCEEDED).all() + for group_id, *_ in group_ids: + with db.session_scope() as session: + group = session.query(TrustedJobGroup).populate_existing().with_for_update().get(group_id) + TrustedJobGroupController(session, group.project_id).update_unauth_participant_ids(group) + processed_groups.add(group_id) + session.commit() + return processed_groups + + def run(self, context: RunnerContext) -> Tuple[RunnerStatus, RunnerOutput]: + # low-frequency runner compared to TEECreateRunner do the following 2 tasks + # 1. get auth_status of participants actively in case grpc InformTrustedJobGroup failed + # 2. TODO(liuledian): get export_auth_status actively + checked_group_ids = self._update_unauth_participant_ids() + return RunnerStatus.DONE, RunnerOutput(tee_runner_output=TeeRunnerOutput( + checked_group_ids=list(checked_group_ids))) diff --git a/web_console_v2/api/fedlearner_webconsole/tee/runners_test.py b/web_console_v2/api/fedlearner_webconsole/tee/runners_test.py new file mode 100644 index 000000000..bd97c39e5 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/tee/runners_test.py @@ -0,0 +1,403 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch, MagicMock +from google.protobuf.empty_pb2 import Empty +from google.protobuf.text_format import MessageToString +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.job.models import Job, JobType, JobState +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.dataset.models import Dataset, DataBatch +from fedlearner_webconsole.algorithm.models import Algorithm, AlgorithmProject, AlgorithmType +from fedlearner_webconsole.tee.models import TrustedJobGroup, GroupCreateStatus, TrustedJob, TrustedJobType, \ + TrustedJobStatus +from fedlearner_webconsole.tee.runners import TeeCreateRunner, TeeResourceCheckRunner +from fedlearner_webconsole.proto.tee_pb2 import ParticipantDatasetList, ParticipantDataset, Resource +from fedlearner_webconsole.proto.rpc.v2.job_service_pb2 import GetTrustedJobGroupResponse +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.proto.setting_pb2 import SystemInfo +from fedlearner_webconsole.review.common import NO_CENTRAL_SERVER_UUID + + +class TeeRunnerTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project') + participant1 = Participant(id=1, name='part2', domain_name='fl-domain2.com') + participant2 = Participant(id=2, name='part3', domain_name='fl-domain3.com') + proj_part1 = ProjectParticipant(project_id=1, participant_id=1) + proj_part2 = ProjectParticipant(project_id=1, participant_id=2) + dataset1 = Dataset(id=1, name='dataset-name1', uuid='dataset-uuid1', is_published=True) + data_batch1 = DataBatch(id=1, dataset_id=1) + dataset2 = Dataset(id=2, name='dataset-name2', uuid='dataset-uuid2', is_published=False) + algorithm = Algorithm(id=1, + uuid='algorithm-uuid1', + type=AlgorithmType.TRUSTED_COMPUTING, + algorithm_project_id=1) + algorithm_proj = AlgorithmProject(id=1, uuid='algorithm-proj-uuid') + session.add_all([ + project, participant1, proj_part1, participant2, proj_part2, dataset1, data_batch1, dataset2, algorithm, + algorithm_proj + ]) + session.commit() + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info') + @patch('fedlearner_webconsole.two_pc.transaction_manager.TransactionManager._remote_do_two_pc') + def test_create_trusted_job_group(self, mock_remote_do_two_pc, mock_get_system_info): + mock_remote_do_two_pc.return_value = True, '' + mock_get_system_info.return_value = SystemInfo(pure_domain_name='domain1') + with db.session_scope() as session: + participant_datasets = ParticipantDatasetList( + items=[ParticipantDataset(participant_id=1, uuid='dataset-uuid3', name='dataset-name3')]) + # group in ticket_status APPROVED / status PENDING / valid params + group1 = TrustedJobGroup(id=1, + project_id=1, + ticket_status=TicketStatus.APPROVED, + ticket_uuid=NO_CENTRAL_SERVER_UUID, + algorithm_uuid='algorithm-uuid1', + dataset_id=1, + coordinator_id=0, + analyzer_id=1, + uuid='uuid1') + group1.set_participant_datasets(participant_datasets) + # group in ticket_status APPROVED / status PENDING / invalid params + # error at controller run + group2 = TrustedJobGroup(id=2, + project_id=10, + ticket_status=TicketStatus.APPROVED, + ticket_uuid=NO_CENTRAL_SERVER_UUID, + algorithm_uuid='algorithm-uuid2', + dataset_id=1, + coordinator_id=0, + analyzer_id=0, + uuid='uuid2') + # error at prepare + group3 = TrustedJobGroup(id=3, + project_id=1, + ticket_status=TicketStatus.APPROVED, + ticket_uuid=NO_CENTRAL_SERVER_UUID, + algorithm_uuid='algorithm-uuid1', + dataset_id=2, + coordinator_id=0, + analyzer_id=0, + uuid='uuid3') + # status FAILED + group4 = TrustedJobGroup(id=4, + project_id=1, + ticket_status=TicketStatus.APPROVED, + ticket_uuid=NO_CENTRAL_SERVER_UUID, + algorithm_uuid='algorithm-uuid1', + dataset_id=1, + coordinator_id=0, + analyzer_id=0, + status=GroupCreateStatus.FAILED, + uuid='uuid4') + # status SUCCEEDED + group5 = TrustedJobGroup(id=5, + project_id=1, + ticket_status=TicketStatus.APPROVED, + ticket_uuid=NO_CENTRAL_SERVER_UUID, + algorithm_uuid='algorithm-uuid1', + dataset_id=1, + coordinator_id=0, + analyzer_id=0, + status=GroupCreateStatus.SUCCEEDED, + uuid='uuid5') + session.add_all([group1, group2, group3, group4, group5]) + session.commit() + runner = TeeCreateRunner() + # first run + # pylint: disable=protected-access + processed_groups = runner._create_trusted_job_group() + self.assertEqual(processed_groups, set([1, 2, 3])) + with db.session_scope() as session: + group1: TrustedJobGroup = session.query(TrustedJobGroup).get(1) + self.assertEqual(group1.status, GroupCreateStatus.SUCCEEDED) + group2: TrustedJobGroup = session.query(TrustedJobGroup).get(2) + self.assertEqual(group2.status, GroupCreateStatus.FAILED) + group3: TrustedJobGroup = session.query(TrustedJobGroup).get(3) + self.assertEqual(group3.status, GroupCreateStatus.FAILED) + # second run should do nothing + processed_groups = runner._create_trusted_job_group() + self.assertEqual(processed_groups, set()) + + @patch('fedlearner_webconsole.tee.services.get_batch_data_path') + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info') + @patch('fedlearner_webconsole.two_pc.transaction_manager.TransactionManager._remote_do_two_pc') + def test_launch_trusted_job(self, mock_remote_do_two_pc, mock_get_system_info, mock_get_batch_data_path): + mock_remote_do_two_pc.return_value = True, '' + mock_get_system_info.return_value = SystemInfo(pure_domain_name='domain1') + mock_get_batch_data_path.return_value = 'file:///data/test' + with db.session_scope() as session: + # valid + group1 = TrustedJobGroup(id=1, + project_id=1, + uuid='uuid1', + status=GroupCreateStatus.SUCCEEDED, + coordinator_id=0, + latest_version=0, + algorithm_uuid='algorithm-uuid1', + dataset_id=1, + auth_status=AuthStatus.AUTHORIZED, + resource=MessageToString(Resource(cpu=1000, memory=1, replicas=1))) + # not fully authorized + group2 = TrustedJobGroup(id=2, + project_id=1, + uuid='uuid2', + status=GroupCreateStatus.SUCCEEDED, + coordinator_id=0, + latest_version=0, + algorithm_uuid='algorithm-uuid1', + auth_status=AuthStatus.AUTHORIZED, + unauth_participant_ids='1,2') + # not creator + group3 = TrustedJobGroup(id=3, + project_id=1, + uuid='uuid3', + status=GroupCreateStatus.SUCCEEDED, + coordinator_id=1, + latest_version=0, + algorithm_uuid='algorithm-uuid1', + auth_status=AuthStatus.AUTHORIZED) + session.add_all([group1, group2, group3]) + session.commit() + runner = TeeCreateRunner() + # first run + # pylint: disable=protected-access + processed_groups = runner._launch_trusted_job() + self.assertCountEqual(processed_groups, [1]) + with db.session_scope() as session: + group1: TrustedJobGroup = session.query(TrustedJobGroup).get(1) + self.assertEqual(group1.latest_version, 1) + trusted_job: TrustedJob = session.query(TrustedJob).filter_by(trusted_job_group_id=1, version=1).first() + self.assertIsNotNone(trusted_job) + self.assertEqual(group2.latest_version, 0) + self.assertEqual(group3.latest_version, 0) + # second run + processed_groups = runner._launch_trusted_job() + self.assertEqual(processed_groups, set()) + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.get_trusted_job_group') + def test_update_unauth_participant_ids(self, mock_client: MagicMock): + with db.session_scope() as session: + group1 = TrustedJobGroup(id=1, + uuid='uuid1', + project_id=1, + unauth_participant_ids='1,2', + status=GroupCreateStatus.SUCCEEDED) + group2 = TrustedJobGroup(id=2, + uuid='uuid2', + project_id=1, + unauth_participant_ids='2', + status=GroupCreateStatus.SUCCEEDED) + group3 = TrustedJobGroup(id=3, + uuid='uuid3', + project_id=1, + unauth_participant_ids='1,2', + status=GroupCreateStatus.FAILED) + session.add_all([group1, group2, group3]) + session.commit() + # for 2 participants and 2 valid groups, client should be called 4 times + # pylint: disable=protected-access + mock_client.side_effect = [ + GetTrustedJobGroupResponse(auth_status='PENDING'), + GetTrustedJobGroupResponse(auth_status='AUTHORIZED'), + GetTrustedJobGroupResponse(auth_status='AUTHORIZED'), + GetTrustedJobGroupResponse(auth_status='AUTHORIZED') + ] + runner = TeeResourceCheckRunner() + processed_groups = runner._update_unauth_participant_ids() + self.assertCountEqual(processed_groups, [1, 2]) + with db.session_scope() as session: + group1: TrustedJobGroup = session.query(TrustedJobGroup).get(1) + self.assertCountEqual(group1.get_unauth_participant_ids(), [1]) + group2: TrustedJobGroup = session.query(TrustedJobGroup).get(2) + self.assertCountEqual(group2.get_unauth_participant_ids(), []) + + @patch('fedlearner_webconsole.rpc.v2.job_service_client.JobServiceClient.create_trusted_export_job') + def test_create_trusted_export_job(self, mock_client: MagicMock): + mock_client.return_value = Empty() + with db.session_scope() as session: + tee_analyze_job = TrustedJob(id=1, + uuid='uuid1', + type=TrustedJobType.ANALYZE, + project_id=1, + version=1, + trusted_job_group_id=1, + export_count=2, + status=TrustedJobStatus.SUCCEEDED) + tee_export_job1 = TrustedJob(id=2, + uuid='uuid2', + type=TrustedJobType.EXPORT, + project_id=1, + version=1, + trusted_job_group_id=1, + export_count=1, + ticket_status=TicketStatus.APPROVED, + status=TrustedJobStatus.NEW) + tee_export_job2 = TrustedJob(id=3, + uuid='uuid3', + type=TrustedJobType.EXPORT, + project_id=1, + version=1, + trusted_job_group_id=1, + export_count=2, + ticket_status=TicketStatus.APPROVED, + status=TrustedJobStatus.NEW) + tee_export_job3 = TrustedJob(id=4, + uuid='uuid4', + type=TrustedJobType.EXPORT, + project_id=1, + version=1, + trusted_job_group_id=1, + export_count=1, + ticket_status=TicketStatus.APPROVED, + status=TrustedJobStatus.CREATED) + tee_export_job4 = TrustedJob(id=5, + uuid='uuid5', + type=TrustedJobType.EXPORT, + project_id=1, + version=1, + trusted_job_group_id=1, + export_count=3, + ticket_status=TicketStatus.PENDING, + status=TrustedJobStatus.NEW) + session.add_all([tee_analyze_job, tee_export_job1, tee_export_job2, tee_export_job3, tee_export_job4]) + session.commit() + runner = TeeCreateRunner() + # pylint: disable=protected-access + processed_ids = runner._create_trusted_export_job() + self.assertCountEqual(processed_ids, [2, 3]) + with db.session_scope() as session: + tee_export_job1 = session.query(TrustedJob).get(2) + self.assertEqual(tee_export_job1.status, TrustedJobStatus.CREATED) + tee_export_job2 = session.query(TrustedJob).get(3) + self.assertEqual(tee_export_job2.status, TrustedJobStatus.CREATED) + + @patch('fedlearner_webconsole.two_pc.transaction_manager.TransactionManager.run') + def test_launch_trusted_export_job(self, mock_run): + mock_run.return_value = True, '' + with db.session_scope() as session: + tee_export_job1 = TrustedJob(id=1, + type=TrustedJobType.EXPORT, + project_id=1, + version=1, + trusted_job_group_id=1, + export_count=1, + ticket_status=TicketStatus.APPROVED, + status=TrustedJobStatus.CREATED, + coordinator_id=0) + tee_export_job2 = TrustedJob(id=2, + type=TrustedJobType.EXPORT, + project_id=1, + version=1, + trusted_job_group_id=1, + export_count=2, + ticket_status=TicketStatus.APPROVED, + status=TrustedJobStatus.CREATED, + coordinator_id=0) + participants_info = tee_export_job2.get_participants_info() + participants_info.participants_map['domain1'].auth_status = 'WITHDRAW' + tee_export_job2.set_participants_info(participants_info) + tee_export_job3 = TrustedJob(id=3, + type=TrustedJobType.EXPORT, + project_id=1, + version=1, + trusted_job_group_id=1, + export_count=3, + ticket_status=TicketStatus.APPROVED, + status=TrustedJobStatus.CREATED, + coordinator_id=1) + tee_export_job4 = TrustedJob(id=4, + type=TrustedJobType.EXPORT, + project_id=1, + version=2, + trusted_job_group_id=1, + export_count=1, + ticket_status=TicketStatus.APPROVED, + status=TrustedJobStatus.NEW, + coordinator_id=0) + session.add_all([tee_export_job1, tee_export_job2, tee_export_job3, tee_export_job4]) + session.commit() + runner = TeeCreateRunner() + # pylint: disable=protected-access + processed_ids = runner._launch_trusted_export_job() + self.assertCountEqual(processed_ids, [1]) + + def test_create_export_dataset(self): + with db.session_scope() as session: + group1 = TrustedJobGroup(id=1, name='group1', project_id=1, uuid='group-uuid1') + tee_export_job1 = TrustedJob(id=1, + name='V1-me-1', + type=TrustedJobType.EXPORT, + project_id=1, + job_id=1, + version=1, + trusted_job_group_id=1, + export_count=1, + status=TrustedJobStatus.SUCCEEDED, + coordinator_id=0) + tee_export_job2 = TrustedJob(id=2, + type=TrustedJobType.EXPORT, + project_id=1, + version=1, + trusted_job_group_id=1, + export_count=2, + status=TrustedJobStatus.RUNNING, + coordinator_id=0) + tee_export_job3 = TrustedJob(id=3, + type=TrustedJobType.EXPORT, + project_id=1, + version=1, + trusted_job_group_id=1, + export_count=3, + status=TrustedJobStatus.SUCCEEDED, + coordinator_id=1) + tee_export_job4 = TrustedJob(id=4, + type=TrustedJobType.EXPORT, + project_id=1, + version=2, + trusted_job_group_id=1, + export_count=1, + status=TrustedJobStatus.SUCCEEDED, + coordinator_id=0, + export_dataset_id=1) + job1 = Job(id=1, + name='job-name1', + job_type=JobType.CUSTOMIZED, + workflow_id=0, + project_id=1, + state=JobState.COMPLETED) + session.add_all([group1, tee_export_job1, tee_export_job2, tee_export_job3, tee_export_job4, job1]) + session.commit() + runner = TeeCreateRunner() + # pylint: disable=protected-access + processed_ids = runner._create_export_dataset() + self.assertCountEqual(processed_ids, [1]) + with db.session_scope() as session: + tee_export_job1 = session.query(TrustedJob).get(1) + dataset = session.query(Dataset).get(tee_export_job1.export_dataset_id) + self.assertEqual(dataset.name, 'group1-V1-me-1') + self.assertEqual(len(dataset.data_batches), 1) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/tee/services.py b/web_console_v2/api/fedlearner_webconsole/tee/services.py new file mode 100644 index 000000000..28849626f --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/tee/services.py @@ -0,0 +1,262 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Optional +from google.protobuf.text_format import MessageToString +from envs import Envs +from sqlalchemy.orm import Session +from fedlearner_webconsole.tee.models import TrustedJobGroup, TrustedJob, TrustedJobStatus, TrustedJobType +from fedlearner_webconsole.tee.tee_job_template import TEE_YAML_TEMPLATE +from fedlearner_webconsole.tee.utils import get_pure_path +from fedlearner_webconsole.utils.pp_datetime import now +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.flag.models import Flag +from fedlearner_webconsole.workflow_template.utils import make_variable +from fedlearner_webconsole.proto.workflow_definition_pb2 import JobDefinition +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo, ParticipantInfo +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.job.controller import create_job_without_workflow, stop_job, \ + schedule_job, start_job_if_ready +from fedlearner_webconsole.algorithm.fetcher import AlgorithmFetcher +from fedlearner_webconsole.dataset.data_path import get_batch_data_path + + +def check_tee_enabled() -> bool: + # TODO(liuledian): call k8s api to check whether it has sgx machines instead of using system variables + return Flag.TEE_MACHINE_DEPLOYED.value + + +def creat_tee_analyze_job_definition(session: Session, job_name: str, trusted_job: TrustedJob, + group: TrustedJobGroup) -> JobDefinition: + domain_name = SettingService.get_system_info().domain_name + analyzer_domain = domain_name + if group.analyzer_id: + analyzer_domain = session.query(Participant).get(group.analyzer_id).domain_name + provider_domain_ls = [] + pds = group.get_participant_datasets() + if pds: + for pd in pds.items: + provider_domain_ls.append(session.query(Participant).get(pd.participant_id).domain_name) + input_data_path = '' + if group.dataset_id: + input_data_path = get_pure_path(get_batch_data_path(group.dataset.get_single_batch())) + provider_domain_ls.append(domain_name) + algorithm = AlgorithmFetcher(trusted_job.project_id).get_algorithm(trusted_job.algorithm_uuid) + variables_dict = { + 'project_name': group.project.name, + 'data_role': 'PROVIDER' if group.analyzer_id else 'ANALYZER', + 'task_type': 'ANALYZE', + 'input_data_path': input_data_path, + 'output_data_path': f'{get_pure_path(Envs.STORAGE_ROOT)}/job_output/{job_name}/output', + 'algorithm': { + 'path': get_pure_path(algorithm.path), + 'config': MessageToString(algorithm.parameter, as_one_line=True), + }, + 'domain_name': domain_name, + 'analyzer_domain': analyzer_domain, + 'providers_domain': ','.join(provider_domain_ls), + 'pccs_url': 'https://sgx-dcap-server.bytedance.com/sgx/certification/v3/', + 'sgx_mem': '' if group.analyzer_id else '100', + 'worker_cpu': f'{trusted_job.get_resource().cpu}m', + 'worker_mem': f'{trusted_job.get_resource().memory}Gi', + 'worker_replicas': 1, + } + variables = [make_variable(name=k, typed_value=v) for k, v in variables_dict.items()] + return JobDefinition(name=job_name, + job_type=JobDefinition.CUSTOMIZED, + is_federated=False, + variables=variables, + yaml_template=TEE_YAML_TEMPLATE) + + +def creat_tee_export_job_definition(session: Session, job_name: str, tee_export_job: TrustedJob, + tee_analyze_job: TrustedJob, group: TrustedJobGroup) -> JobDefinition: + domain_name = SettingService.get_system_info().domain_name + analyzer_domain = domain_name + if group.analyzer_id: + analyzer_domain = session.query(Participant).get(group.analyzer_id).domain_name + receiver_domain = domain_name + if tee_export_job.coordinator_id: + receiver_domain = session.query(Participant).get(tee_export_job.coordinator_id).domain_name + variables_dict = { + 'data_role': 'PROVIDER' if group.analyzer_id else 'ANALYZER', + 'task_type': 'EXPORT', + 'output_data_path': f'{get_pure_path(Envs.STORAGE_ROOT)}/job_output/{tee_analyze_job.job.name}/output', + 'export_data_path': f'{get_pure_path(Envs.STORAGE_ROOT)}/job_output/{job_name}/export/batch/0', + 'algorithm': { + 'path': '', + 'config': '', + }, + 'domain_name': domain_name, + 'receiver_domain': receiver_domain, + 'analyzer_domain': analyzer_domain, + 'worker_cpu': f'{tee_export_job.get_resource().cpu}m', + 'worker_mem': f'{tee_export_job.get_resource().memory}Gi', + 'worker_replicas': 1, + } + variables = [make_variable(name=k, typed_value=v) for k, v in variables_dict.items()] + return JobDefinition(name=job_name, + job_type=JobDefinition.CUSTOMIZED, + is_federated=False, + variables=variables, + yaml_template=TEE_YAML_TEMPLATE) + + +class TrustedJobGroupService: + + def __init__(self, session: Session): + self._session = session + + def delete(self, group: TrustedJobGroup): + for trusted_job in group.trusted_jobs: + self._session.delete(trusted_job.job) + self._session.query(TrustedJob).filter_by(trusted_job_group_id=group.id).delete() + self._session.delete(group) + + def lock_and_update_version(self, group_id: int, version: Optional[int] = None) -> TrustedJobGroup: + """ + If param version is None, increment the latest version by 1. + Otherwise, set the latest version as param version only if param version is larger + """ + group: TrustedJobGroup = self._session.query(TrustedJobGroup).populate_existing().with_for_update().get( + group_id) + if version is None: + group.latest_version = group.latest_version + 1 + elif version > group.latest_version: + group.latest_version = version + return group + + def launch_trusted_job(self, group: TrustedJobGroup, uuid: str, version: int, coordinator_id: int): + self.lock_and_update_version(group.id, version) + name = f'V{version}' + trusted_job = TrustedJob( + name=name, + uuid=uuid, + version=version, + coordinator_id=coordinator_id, + project_id=group.project_id, + trusted_job_group_id=group.id, + started_at=now(), + status=TrustedJobStatus.CREATED, + algorithm_uuid=group.algorithm_uuid, + resource=group.resource, + ) + job_name = f'trusted-job-{version}-{uuid}' + job_definition = creat_tee_analyze_job_definition(self._session, job_name, trusted_job, group) + job = create_job_without_workflow(self._session, job_definition, group.project_id, job_name) + schedule_job(self._session, job) + start_job_if_ready(self._session, job) + trusted_job.job_id = job.id + trusted_job.update_status() + self._session.add(trusted_job) + self._session.flush() + + +class TrustedJobService: + + def __init__(self, session: Session): + self._session = session + + def lock_and_update_export_count(self, trusted_job_id: int) -> TrustedJob: + trusted_job = self._session.query(TrustedJob).populate_existing().with_for_update().get(trusted_job_id) + if not trusted_job.export_count: + trusted_job.export_count = 1 + else: + trusted_job.export_count += 1 + return trusted_job + + def stop_trusted_job(self, trusted_job: TrustedJob): + if trusted_job.get_status() != TrustedJobStatus.RUNNING: + return + if trusted_job.job is not None: + stop_job(self._session, trusted_job.job) + self._session.flush() + + def create_external_export(self, uuid: str, name: str, coordinator_id: int, export_count: int, ticket_uuid: str, + tee_analyze_job: TrustedJob): + """Create trusted export job for non-coordinator, called by rpc server layer""" + tee_export_job = TrustedJob( + name=name, + type=TrustedJobType.EXPORT, + uuid=uuid, + version=tee_analyze_job.version, + export_count=export_count, + project_id=tee_analyze_job.project_id, + trusted_job_group_id=tee_analyze_job.trusted_job_group_id, + coordinator_id=coordinator_id, + ticket_uuid=ticket_uuid, + ticket_status=TicketStatus.APPROVED, + auth_status=AuthStatus.PENDING, + status=TrustedJobStatus.CREATED, + resource=tee_analyze_job.resource, + result_key=tee_analyze_job.result_key, + ) + participants = ParticipantService(self._session).get_participants_by_project(tee_analyze_job.project_id) + participants_info = ParticipantsInfo() + for p in participants: + participants_info.participants_map[p.pure_domain_name()].CopyFrom( + ParticipantInfo(auth_status=AuthStatus.PENDING.name)) + self_pure_dn = SettingService.get_system_info().pure_domain_name + participants_info.participants_map[self_pure_dn].auth_status = AuthStatus.PENDING.name + coordinator_pure_dn = self._session.query(Participant).get(coordinator_id).pure_domain_name() + participants_info.participants_map[coordinator_pure_dn].auth_status = AuthStatus.AUTHORIZED.name + tee_export_job.set_participants_info(participants_info) + self._session.add(tee_export_job) + self._session.flush() + + def create_internal_export(self, uuid: str, tee_analyze_job: TrustedJob): + """Create trusted export job for coordinator, called by api layer""" + self_pure_dn = SettingService.get_system_info().pure_domain_name + tee_export_job = TrustedJob( + name=f'V{tee_analyze_job.version}-{self_pure_dn}-{tee_analyze_job.export_count}', + type=TrustedJobType.EXPORT, + uuid=uuid, + version=tee_analyze_job.version, + export_count=tee_analyze_job.export_count, + project_id=tee_analyze_job.project_id, + trusted_job_group_id=tee_analyze_job.trusted_job_group_id, + coordinator_id=0, + auth_status=AuthStatus.AUTHORIZED, + status=TrustedJobStatus.NEW, + resource=tee_analyze_job.resource, + result_key=tee_analyze_job.result_key, + ) + participants = ParticipantService(self._session).get_participants_by_project(tee_analyze_job.project_id) + participants_info = ParticipantsInfo() + for p in participants: + participants_info.participants_map[p.pure_domain_name()].auth_status = AuthStatus.PENDING.name + participants_info.participants_map[self_pure_dn].auth_status = AuthStatus.AUTHORIZED.name + tee_export_job.set_participants_info(participants_info) + self._session.add(tee_export_job) + self._session.flush() + + def launch_trusted_export_job(self, tee_export_job: TrustedJob): + job_name = f'trusted-job-{tee_export_job.version}-{tee_export_job.uuid}' + tee_analyze_job = self._session.query(TrustedJob).filter_by( + type=TrustedJobType.ANALYZE, + trusted_job_group_id=tee_export_job.trusted_job_group_id, + version=tee_export_job.version).first() + job_definition = creat_tee_export_job_definition(self._session, job_name, tee_export_job, tee_analyze_job, + tee_export_job.group) + job = create_job_without_workflow(self._session, job_definition, tee_export_job.project_id, job_name) + tee_export_job.started_at = now() + schedule_job(self._session, job) + start_job_if_ready(self._session, job) + tee_export_job.job_id = job.id + tee_export_job.update_status() + self._session.flush() diff --git a/web_console_v2/api/fedlearner_webconsole/tee/tee_job_template.py b/web_console_v2/api/fedlearner_webconsole/tee/tee_job_template.py new file mode 100644 index 000000000..08d063a0a --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/tee/tee_job_template.py @@ -0,0 +1,154 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +TEE_YAML_TEMPLATE = """{ + "apiVersion": "fedlearner.k8s.io/v1alpha1", + "kind": "FedApp", + "metadata": { + "name": self.name, + "namespace": system.variables.namespace, + "labels": dict(system.variables.labels), + "annotations": { + "queue": "fedlearner", + "schedulerName": "batch", + }, + }, + "spec": { + "activeDeadlineSeconds": 86400, + "fedReplicaSpecs": { + "Worker": { + "mustSuccess": True, + "port": { + "containerPort": 50051, + "name": "grpc-port", + "protocol": "TCP" + }, + "template": { + "spec": { + "restartPolicy": "Never", + "containers": [ + { + "name": "gene-analysis", + "image": str(system.variables.sgx_image), + "volumeMounts": list(system.variables.volume_mounts_list), + "command": [ + '/bin/bash' + ], + "args": [ + '/app/entrypoint.sh' + ], + "env": system.basic_envs_list + [ + { + "name": "PROJECT_NAME", + "value": str(self.variables.get("project_name", "")) + }, + { + "name": "DATA_ROLE", + "value": str(self.variables.get("data_role", "")) + }, + { + "name": "TASK_TYPE", + "value": str(self.variables.get("task_type", "")) + }, + { + "name": "INPUT_DATA_PATH", + "value": str(self.variables.get("input_data_path", "")) + }, + { + "name": "OUTPUT_DATA_PATH", + "value": str(self.variables.get("output_data_path", "")) + }, + { + "name": "EXPORT_DATA_PATH", + "value": str(self.variables.get("export_data_path", "")) + }, + { + "name": "ALGORITHM_PATH", + "value": str(self.variables.algorithm.path) + }, + { + "name": "ALGORITHM_PARAMETERS", + "value": str(self.variables.algorithm.config) + }, + { + "name": "DOMAIN_NAME", + "value": str(self.variables.get("domain_name", "")) + }, + { + "name": "ANALYZER_DOMAIN", + "value": str(self.variables.get("analyzer_domain", "")) + }, + { + "name": "PROVIDERS_DOMAIN", + "value": str(self.variables.get("providers_domain", "")) + }, + { + "name": "RECEIVER_DOMAIN", + "value": str(self.variables.get("receiver_domain", "")) + }, + { + "name": "PCCS_URL", + "value": str(self.variables.get("pccs_url", "")) + }, + { + "name": "RESULT_KEY", + "value": str(self.variables.get("result_key", "")) + } + ] + [], + "imagePullPolicy": "IfNotPresent", + "ports": [ + { + "containerPort": 50051, + "name": "grpc-port", + "protocol": "TCP" + } + ], + "resources": { + "limits": { + "cpu": self.variables.worker_cpu, + "memory": self.variables.worker_mem + } , + "requests": { + "cpu": self.variables.worker_cpu, + "memory": self.variables.worker_mem + } + } if not self.variables.get("sgx_mem", "") else { + "limits": { + sgx_epc_mem": str(self.variables.sgx_mem), + "cpu": self.variables.worker_cpu, + "memory": self.variables.worker_mem + } , + "requests": { + "sgx_epc_mem": str(self.variables.sgx_mem), + "cpu": self.variables.worker_cpu, + "memory": self.variables.worker_mem + } + } + + } + ], + "imagePullSecrets": [ + { + "name": "regcred" + } + ], + "volumes": list(system.variables.volumes_list) + } + }, + "replicas": self.variables.worker_replicas + } + } + } +}""" diff --git a/web_console_v2/api/fedlearner_webconsole/tee/utils.py b/web_console_v2/api/fedlearner_webconsole/tee/utils.py new file mode 100644 index 000000000..9421cca0c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/tee/utils.py @@ -0,0 +1,78 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from urllib.parse import urlparse +from sqlalchemy.orm import Session +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.dataset.models import Dataset +from fedlearner_webconsole.algorithm.models import Algorithm +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.exceptions import InvalidArgumentException, NotFoundException +from fedlearner_webconsole.tee.models import TrustedJobGroup, TrustedJob +from fedlearner_webconsole.proto.algorithm_pb2 import AlgorithmPb +from fedlearner_webconsole.algorithm.fetcher import AlgorithmFetcher + + +def get_project(session: Session, project_id: int) -> Project: + project = session.query(Project).get(project_id) + if project is None: + raise InvalidArgumentException(f'project {project_id} is not found') + return project + + +def get_dataset(session: Session, dataset_id: int) -> Dataset: + dataset = session.query(Dataset).get(dataset_id) + if dataset is None: + raise InvalidArgumentException(f'dataset {dataset_id} is not found') + return dataset + + +def get_algorithm(session: Session, algorithm_id: int) -> Algorithm: + algorithm = session.query(Algorithm).get(algorithm_id) + if algorithm is None: + raise InvalidArgumentException(f'algorithm {algorithm_id} is not found') + return algorithm + + +def get_participant(session: Session, participant_id: int) -> Participant: + participant = session.query(Participant).get(participant_id) + if participant is None: + raise InvalidArgumentException(f'participant {participant_id} is not found') + return participant + + +def get_trusted_job_group(session: Session, project_id: int, group_id: int) -> TrustedJobGroup: + group = session.query(TrustedJobGroup).filter_by(id=group_id, project_id=project_id).first() + if group is None: + raise NotFoundException(f'trusted job group {group_id} is not found') + return group + + +def get_trusted_job(session: Session, project_id: int, trusted_job_id: int) -> TrustedJob: + trusted_job = session.query(TrustedJob).filter_by(id=trusted_job_id, project_id=project_id).first() + if trusted_job is None: + raise NotFoundException(f'trusted job {trusted_job_id} is not found') + return trusted_job + + +def get_algorithm_with_uuid(project_id: int, algorithm_uuid: str) -> AlgorithmPb: + try: + return AlgorithmFetcher(project_id).get_algorithm(algorithm_uuid) + except NotFoundException as e: + raise InvalidArgumentException(f'algorithm {algorithm_uuid} is not found') from e + + +def get_pure_path(path: str) -> str: + return urlparse(path).path diff --git a/web_console_v2/api/fedlearner_webconsole/tee/utils_test.py b/web_console_v2/api/fedlearner_webconsole/tee/utils_test.py new file mode 100644 index 000000000..8e2246bd4 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/tee/utils_test.py @@ -0,0 +1,104 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch, MagicMock +import grpc +from testing.no_web_server_test_case import NoWebServerTestCase +from testing.rpc.client import FakeRpcError +from fedlearner_webconsole.tee.utils import get_project, get_dataset, get_algorithm, get_participant, \ + get_trusted_job_group, get_trusted_job, get_algorithm_with_uuid, get_pure_path +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.algorithm.models import Algorithm +from fedlearner_webconsole.dataset.models import Dataset +from fedlearner_webconsole.exceptions import InvalidArgumentException, NotFoundException +from fedlearner_webconsole.tee.models import TrustedJobGroup, TrustedJob + + +class UtilsTest(NoWebServerTestCase): + + def setUp(self) -> None: + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project') + dataset = Dataset(id=1, name='dataset') + algorithm = Algorithm(id=1, name='algorithm', project_id=1, uuid='uuid1') + participant = Participant(id=1, name='part', domain_name='domain') + group = TrustedJobGroup(id=1, name='trusted-group', project_id=1) + trusted_job = TrustedJob(id=1, name='V1', version=1, project_id=1, trusted_job_group_id=1) + session.add_all([project, dataset, algorithm, participant, group, trusted_job]) + session.commit() + + def test_get_project(self): + with db.session_scope() as session: + project = get_project(session, 1) + self.assertEqual(project.name, 'project') + with self.assertRaises(InvalidArgumentException): + get_project(session, 2) + + def test_get_dataset(self): + with db.session_scope() as session: + dataset = get_dataset(session, 1) + self.assertEqual(dataset.name, 'dataset') + with self.assertRaises(InvalidArgumentException): + get_dataset(session, 2) + + def test_get_algorithm(self): + with db.session_scope() as session: + algorithm = get_algorithm(session, 1) + self.assertEqual(algorithm.name, 'algorithm') + with self.assertRaises(InvalidArgumentException): + get_algorithm(session, 2) + + def test_get_participant(self): + with db.session_scope() as session: + participant = get_participant(session, 1) + self.assertEqual(participant.name, 'part') + with self.assertRaises(InvalidArgumentException): + get_participant(session, 2) + + def test_get_trusted_job_group(self): + with db.session_scope() as session: + group = get_trusted_job_group(session, 1, 1) + self.assertEqual(group.name, 'trusted-group') + with self.assertRaises(NotFoundException): + get_trusted_job_group(session, 1, 2) + + def test_get_trusted_job(self): + with db.session_scope() as session: + trusted_job = get_trusted_job(session, 1, 1) + self.assertEqual(trusted_job.name, 'V1') + with self.assertRaises(NotFoundException): + get_trusted_job(session, 1, 2) + + @patch('fedlearner_webconsole.algorithm.fetcher.AlgorithmFetcher.get_algorithm_from_participant') + def test_get_algorithm_with_uuid(self, mock_get_algorithm: MagicMock): + mock_get_algorithm.side_effect = FakeRpcError(grpc.StatusCode.NOT_FOUND, 'not found') + algorithm = get_algorithm_with_uuid(1, 'uuid1') + self.assertEqual(algorithm.name, 'algorithm') + with self.assertRaises(InvalidArgumentException): + get_algorithm_with_uuid(1, 'not-exist') + + def test_get_pure_path(self): + self.assertEqual(get_pure_path('file:///data/test'), '/data/test') + self.assertEqual(get_pure_path('/data/test'), '/data/test') + self.assertEqual(get_pure_path('hdfs:///data/test'), '/data/test') + self.assertEqual(get_pure_path('hdfs://fl.net/data/test'), '/data/test') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/two_pc/BUILD.bazel new file mode 100644 index 000000000..f8d1585b0 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/BUILD.bazel @@ -0,0 +1,519 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "dataset_job_launcher_lib", + srcs = ["dataset_job_launcher.py"], + imports = ["../.."], + deps = [ + ":resource_manager_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:services_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:workflow_controller_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "dataset_job_launcher_lib_test", + size = "small", + srcs = [ + "dataset_job_launcher_test.py", + ], + imports = ["../.."], + main = "dataset_job_launcher_test.py", + deps = [ + ":dataset_job_launcher_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "dataset_job_stopper_lib", + srcs = ["dataset_job_stopper.py"], + imports = ["../.."], + deps = [ + ":resource_manager_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:services_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:workflow_controller_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "dataset_job_stopper_lib_test", + size = "small", + srcs = [ + "dataset_job_stopper_test.py", + ], + imports = ["../.."], + main = "dataset_job_stopper_test.py", + deps = [ + ":dataset_job_stopper_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "handlers_lib", + srcs = ["handlers.py"], + imports = ["../.."], + deps = [ + ":dataset_job_launcher_lib", + ":dataset_job_stage_launcher_lib", + ":dataset_job_stage_stopper_lib", + ":dataset_job_stopper_lib", + ":model_job_creator_lib", + ":model_job_group_creator_lib", + ":model_job_launcher_lib", + ":models_lib", + ":trusted_export_job_launcher_lib", + ":trusted_job_group_creator_lib", + ":trusted_job_launcher_lib", + ":trusted_job_stopper_lib", + ":workflow_state_controller_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "handlers_lib_test", + size = "small", + srcs = [ + "handlers_test.py", + ], + imports = ["../.."], + main = "handlers_test.py", + deps = [ + ":handlers_lib", + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "model_job_creator_lib", + srcs = ["model_job_creator.py"], + imports = ["../.."], + deps = [ + ":resource_manager_lib", + ":trusted_job_group_creator_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "model_job_creator_lib_test", + size = "small", + srcs = [ + "model_job_creator_test.py", + ], + imports = ["../.."], + main = "model_job_creator_test.py", + deps = [ + ":model_job_creator_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "model_job_group_creator_lib", + srcs = ["model_job_group_creator.py"], + imports = ["../.."], + deps = [ + ":resource_manager_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "model_job_group_creator_lib_test", + size = "small", + srcs = [ + "model_job_group_creator_test.py", + ], + imports = ["../.."], + main = "model_job_group_creator_test.py", + deps = [ + ":model_job_group_creator_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "model_job_launcher_lib", + srcs = ["model_job_launcher.py"], + imports = ["../.."], + deps = [ + ":resource_manager_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:service_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "model_job_launcher_lib_test", + size = "small", + srcs = [ + "model_job_launcher_test.py", + ], + imports = ["../.."], + main = "model_job_launcher_test.py", + deps = [ + ":model_job_launcher_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:initial_db_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "models_lib", + srcs = ["models.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:mixins_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "models_lib_test", + size = "small", + srcs = [ + "models_test.py", + ], + imports = ["../.."], + main = "models_test.py", + deps = [ + ":models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_library( + name = "resource_manager_lib", + srcs = ["resource_manager.py"], + imports = ["../.."], + deps = ["//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto"], +) + +py_library( + name = "trusted_job_group_creator_lib", + srcs = ["trusted_job_group_creator.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:fetcher_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/review:ticket_helper_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/tee:models_lib", + "//web_console_v2/api/fedlearner_webconsole/two_pc:resource_manager_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:domain_name_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "trusted_job_group_creator_lib_test", + size = "small", + srcs = [ + "trusted_job_group_creator_test.py", + ], + imports = ["../.."], + main = "trusted_job_group_creator_test.py", + deps = [ + ":trusted_job_group_creator_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/review:common_lib", + "//web_console_v2/api/fedlearner_webconsole/tee:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "//web_console_v2/api/testing/rpc:client_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "workflow_state_controller_lib", + srcs = ["workflow_state_controller.py"], + imports = ["../.."], + deps = [ + ":resource_manager_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:service_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:workflow_controller_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_library( + name = "transaction_manager_lib", + srcs = [ + "transaction_manager.py", + ], + imports = ["../.."], + deps = [ + ":handlers_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:client_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:metrics_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "transaction_manager_lib_test", + size = "small", + srcs = [ + "transaction_manager_test.py", + ], + imports = ["../.."], + main = "transaction_manager_test.py", + deps = [ + ":transaction_manager_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "trusted_job_launcher_lib", + srcs = [ + "trusted_job_launcher.py", + ], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/algorithm:fetcher_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/tee:models_lib", + "//web_console_v2/api/fedlearner_webconsole/tee:services_lib", + "//web_console_v2/api/fedlearner_webconsole/two_pc:resource_manager_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:domain_name_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "trusted_job_launcher_lib_test", + size = "small", + srcs = [ + "trusted_job_launcher_test.py", + ], + imports = ["../.."], + main = "trusted_job_launcher_test.py", + deps = [ + ":trusted_job_launcher_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/algorithm:models_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:data_path_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/tee:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:common_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "trusted_job_stopper_lib", + srcs = [ + "trusted_job_stopper.py", + ], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/tee:models_lib", + "//web_console_v2/api/fedlearner_webconsole/tee:services_lib", + "//web_console_v2/api/fedlearner_webconsole/two_pc:resource_manager_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "trusted_job_stopper_lib_test", + size = "small", + srcs = [ + "trusted_job_stopper_test.py", + ], + imports = ["../.."], + main = "trusted_job_stopper_test.py", + deps = [ + ":trusted_job_stopper_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/tee:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_library( + name = "trusted_export_job_launcher_lib", + srcs = [ + "trusted_export_job_launcher.py", + ], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/tee:models_lib", + "//web_console_v2/api/fedlearner_webconsole/tee:services_lib", + "//web_console_v2/api/fedlearner_webconsole/two_pc:resource_manager_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "trusted_export_job_launcher_lib_test", + size = "small", + srcs = [ + "trusted_export_job_launcher_test.py", + ], + imports = ["../.."], + main = "trusted_export_job_launcher_test.py", + deps = [ + ":trusted_export_job_launcher_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "//web_console_v2/api/fedlearner_webconsole/tee:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/base_model:base_model_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "dataset_job_stage_launcher_lib", + srcs = ["dataset_job_stage_launcher.py"], + imports = ["../.."], + deps = [ + ":resource_manager_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:local_controllers_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "dataset_job_stage_launcher_lib_test", + size = "small", + srcs = [ + "dataset_job_stage_launcher_test.py", + ], + imports = ["../.."], + main = "dataset_job_stage_launcher_test.py", + deps = [ + ":dataset_job_stage_launcher_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "dataset_job_stage_stopper_lib", + srcs = ["dataset_job_stage_stopper.py"], + imports = ["../.."], + deps = [ + ":resource_manager_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:local_controllers_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "dataset_job_stage_stopper_lib_test", + size = "small", + srcs = [ + "dataset_job_stage_stopper_test.py", + ], + imports = ["../.."], + main = "dataset_job_stage_stopper_test.py", + deps = [ + ":dataset_job_stage_stopper_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/__init__.py b/web_console_v2/api/fedlearner_webconsole/two_pc/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_launcher.py b/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_launcher.py new file mode 100644 index 000000000..55db153e5 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_launcher.py @@ -0,0 +1,76 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Tuple + +from sqlalchemy.orm import Session + +from fedlearner_webconsole.workflow.workflow_controller import start_workflow_locally +from fedlearner_webconsole.dataset.models import DatasetJob, DatasetJobState +from fedlearner_webconsole.dataset.services import DatasetJobService +from fedlearner_webconsole.proto.two_pc_pb2 import TransactionData +from fedlearner_webconsole.two_pc.resource_manager import ResourceManager + + +class DatasetJobLauncher(ResourceManager): + + def __init__(self, session: Session, tid: str, data: TransactionData): + super().__init__(tid, data) + assert data.launch_dataset_job_data is not None + self._data = data.launch_dataset_job_data + self._session = session + + def prepare(self) -> Tuple[bool, str]: + dataset_job = self._session.query(DatasetJob).populate_existing().with_for_update(read=True).filter_by( + uuid=self._data.dataset_job_uuid).first() + if dataset_job is None: + message = f'failed to find dataset_job, uuid is {self._data.dataset_job_uuid}' + logging.warning(f'[dataset_job launch 2pc] prepare: {message}, uuid: {self._data.dataset_job_uuid}') + return False, message + if not dataset_job.state in [DatasetJobState.PENDING, DatasetJobState.RUNNING]: + message = f'dataset_job state check failed! current: {dataset_job.state.value}, ' \ + f'expected: {DatasetJobState.PENDING.value} or {DatasetJobState.RUNNING.value},' \ + f'uuid is {self._data.dataset_job_uuid}' + logging.warning(f'[dataset_job launch 2pc] prepare: {message}, uuid: {self._data.dataset_job_uuid}') + return False, message + if dataset_job.workflow is None: + message = f'failed to find workflow, uuid is {self._data.dataset_job_uuid}' + logging.warning(f'[dataset_job launch 2pc] prepare: {message}, uuid: {self._data.dataset_job_uuid}') + return False, message + return True, '' + + def commit(self) -> Tuple[bool, str]: + dataset_job = self._session.query(DatasetJob).populate_existing().with_for_update().filter_by( + uuid=self._data.dataset_job_uuid).first() + if dataset_job.state == DatasetJobState.RUNNING: + return True, '' + if dataset_job.state != DatasetJobState.PENDING: + message = f'dataset_job state check failed! current: {dataset_job.state.value}, ' \ + f'expected: {DatasetJobState.PENDING.value} or {DatasetJobState.RUNNING.value},' \ + f'uuid is {self._data.dataset_job_uuid}' + logging.warning(f'[dataset_job launch 2pc] prepare: {message}, uuid: {self._data.dataset_job_uuid}') + return False, message + try: + start_workflow_locally(self._session, dataset_job.workflow) + except RuntimeError as e: + logging.error(f'[dataset_job launch 2pc] commit: {e}, uuid: {self._data.dataset_job_uuid}') + raise + DatasetJobService(self._session).start_dataset_job(dataset_job) + return True, '' + + def abort(self) -> Tuple[bool, str]: + logging.info('[dataset_job launch 2pc] abort') + return True, '' diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_launcher_test.py b/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_launcher_test.py new file mode 100644 index 000000000..b28b9367a --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_launcher_test.py @@ -0,0 +1,163 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch, MagicMock, ANY + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.dataset.models import DatasetJob, DatasetJobKind, DatasetJobState +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.two_pc.dataset_job_launcher import DatasetJobLauncher +from fedlearner_webconsole.proto.two_pc_pb2 import LaunchDatasetJobData, \ + TransactionData + + +class DatasetJobLauncherTest(NoWebServerTestCase): + _PROJECT_ID = 1 + _DATASET_JOB_ID = 1 + _WORKFLOW_ID = 1 + _DATASET_JOB_UUID = 'test_uuid' + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=self._PROJECT_ID, name='test') + session.add(project) + workflow = Workflow(id=self._WORKFLOW_ID, uuid=self._DATASET_JOB_UUID) + session.add(workflow) + session.commit() + launch_dataset_job_data = LaunchDatasetJobData(dataset_job_uuid=self._DATASET_JOB_UUID) + self.data = TransactionData(launch_dataset_job_data=launch_dataset_job_data) + + def test_prepare_no_dataset_job(self): + with db.session_scope() as session: + creator = DatasetJobLauncher(session, tid='1', data=self.data) + flag, _ = creator.prepare() + self.assertFalse(flag) + + def test_prepare_illegal_state(self): + with db.session_scope() as session: + dataset_job = DatasetJob(id=self._DATASET_JOB_ID, + project_id=self._PROJECT_ID, + input_dataset_id=1, + output_dataset_id=2, + uuid=self._DATASET_JOB_UUID, + workflow_id=self._WORKFLOW_ID, + state=DatasetJobState.FAILED, + kind=DatasetJobKind.DATA_ALIGNMENT) + session.add(dataset_job) + session.commit() + with db.session_scope() as session: + creator = DatasetJobLauncher(session, tid='1', data=self.data) + flag, _ = creator.prepare() + self.assertFalse(flag) + + def test_prepare_no_related_workflow(self): + with db.session_scope() as session: + dataset_job = DatasetJob(id=self._DATASET_JOB_ID, + project_id=self._PROJECT_ID, + input_dataset_id=1, + output_dataset_id=2, + uuid=self._DATASET_JOB_UUID, + workflow_id=0, + state=DatasetJobState.PENDING, + kind=DatasetJobKind.DATA_ALIGNMENT) + session.add(dataset_job) + session.commit() + with db.session_scope() as session: + creator = DatasetJobLauncher(session, tid='1', data=self.data) + flag, _ = creator.prepare() + self.assertFalse(flag) + + def test_prepare_successfully(self): + with db.session_scope() as session: + dataset_job = DatasetJob(id=self._DATASET_JOB_ID, + project_id=self._PROJECT_ID, + input_dataset_id=1, + output_dataset_id=2, + uuid=self._DATASET_JOB_UUID, + workflow_id=self._WORKFLOW_ID, + state=DatasetJobState.PENDING, + kind=DatasetJobKind.DATA_ALIGNMENT) + session.add(dataset_job) + session.commit() + with db.session_scope() as session: + creator = DatasetJobLauncher(session, tid='1', data=self.data) + flag, _ = creator.prepare() + self.assertTrue(flag) + + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(self._DATASET_JOB_ID) + dataset_job.state = DatasetJobState.RUNNING + session.commit() + with db.session_scope() as session: + creator = DatasetJobLauncher(session, tid='1', data=self.data) + flag, _ = creator.prepare() + self.assertTrue(flag) + + @patch('fedlearner_webconsole.two_pc.dataset_job_launcher.start_workflow_locally') + def test_commit(self, mock_start_workflow_locally: MagicMock): + with db.session_scope() as session: + dataset_job = DatasetJob(id=self._DATASET_JOB_ID, + project_id=self._PROJECT_ID, + input_dataset_id=1, + output_dataset_id=2, + uuid=self._DATASET_JOB_UUID, + workflow_id=self._WORKFLOW_ID, + state=DatasetJobState.RUNNING, + kind=DatasetJobKind.DATA_ALIGNMENT) + session.add(dataset_job) + session.commit() + with db.session_scope() as session: + creator = DatasetJobLauncher(session, tid='1', data=self.data) + flag, _ = creator.commit() + self.assertTrue(flag) + session.commit() + mock_start_workflow_locally.assert_not_called() + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(self._DATASET_JOB_ID) + self.assertEqual(dataset_job.state, DatasetJobState.RUNNING) + self.assertIsNone(dataset_job.started_at) + + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(self._DATASET_JOB_ID) + dataset_job.state = DatasetJobState.PENDING + session.commit() + with db.session_scope() as session: + creator = DatasetJobLauncher(session, tid='1', data=self.data) + flag, _ = creator.commit() + self.assertTrue(flag) + workflow = session.query(Workflow).get(self._WORKFLOW_ID) + mock_start_workflow_locally.assert_called_once_with(ANY, workflow) + session.commit() + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(self._DATASET_JOB_ID) + self.assertEqual(dataset_job.state, DatasetJobState.RUNNING) + self.assertIsNotNone(dataset_job.started_at) + + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(self._DATASET_JOB_ID) + dataset_job.state = DatasetJobState.SUCCEEDED + session.commit() + with db.session_scope() as session: + creator = DatasetJobLauncher(session, tid='1', data=self.data) + flag, _ = creator.commit() + self.assertFalse(flag) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_stage_launcher.py b/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_stage_launcher.py new file mode 100644 index 000000000..feb4e092d --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_stage_launcher.py @@ -0,0 +1,76 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Tuple + +from sqlalchemy.orm import Session + +from fedlearner_webconsole.dataset.models import DatasetJobStage, DatasetJobState +from fedlearner_webconsole.dataset.local_controllers import DatasetJobStageLocalController +from fedlearner_webconsole.proto.two_pc_pb2 import TransactionData +from fedlearner_webconsole.two_pc.resource_manager import ResourceManager + + +class DatasetJobStageLauncher(ResourceManager): + + def __init__(self, session: Session, tid: str, data: TransactionData): + super().__init__(tid, data) + assert data.launch_dataset_job_stage_data is not None + self._data = data.launch_dataset_job_stage_data + self._session = session + + def prepare(self) -> Tuple[bool, str]: + dataset_job_stage: DatasetJobStage = self._session.query(DatasetJobStage).filter_by( + uuid=self._data.dataset_job_stage_uuid).first() + if dataset_job_stage is None: + message = 'failed to find dataset_job_stage' + logging.warning( + f'[dataset_job_stage launch 2pc] prepare: {message}, uuid: {self._data.dataset_job_stage_uuid}') + return False, message + if not dataset_job_stage.state in [DatasetJobState.PENDING, DatasetJobState.RUNNING]: + message = 'dataset_job_stage state check failed! invalid state!' + logging.warning( + f'[dataset_job_stage launch 2pc] prepare: {message}, uuid: {self._data.dataset_job_stage_uuid}') + return False, message + if dataset_job_stage.workflow is None: + message = 'failed to find workflow' + logging.warning( + f'[dataset_job_stage launch 2pc] prepare: {message}, uuid: {self._data.dataset_job_stage_uuid}') + return False, message + return True, '' + + def commit(self) -> Tuple[bool, str]: + # use x lock here, it will keep waiting if it find other lock until lock release or timeout. + # we dont't use s lock as it may raise deadlock exception. + dataset_job_stage: DatasetJobStage = self._session.query(DatasetJobStage).populate_existing().with_for_update( + ).filter_by(uuid=self._data.dataset_job_stage_uuid).first() + if dataset_job_stage.state == DatasetJobState.RUNNING: + return True, '' + if dataset_job_stage.state != DatasetJobState.PENDING: + message = 'dataset_job_stage state check failed! invalid state!' + logging.warning( + f'[dataset_job_stage launch 2pc] prepare: {message}, uuid: {self._data.dataset_job_stage_uuid}') + return False, message + try: + DatasetJobStageLocalController(session=self._session).start(dataset_job_stage) + except RuntimeError as e: + logging.error(f'[dataset_job_stage launch 2pc] commit: {e}, uuid: {self._data.dataset_job_stage_uuid}') + raise + return True, '' + + def abort(self) -> Tuple[bool, str]: + logging.info('[dataset_job_stage launch 2pc] abort') + return True, '' diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_stage_launcher_test.py b/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_stage_launcher_test.py new file mode 100644 index 000000000..a23e3e97b --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_stage_launcher_test.py @@ -0,0 +1,202 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch, MagicMock + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.dataset.models import DatasetJob, DatasetJobKind, DatasetJobStage, DatasetJobState +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.two_pc.dataset_job_stage_launcher import DatasetJobStageLauncher +from fedlearner_webconsole.proto.two_pc_pb2 import LaunchDatasetJobStageData, \ + TransactionData + + +class DatasetJobStageLauncherTest(NoWebServerTestCase): + _PROJECT_ID = 1 + _DATASET_JOB_ID = 1 + _DATA_BATCH_ID = 1 + _DATASET_JOB_STAGE_ID = 1 + _WORKFLOW_ID = 1 + _DATASET_JOB_UUID = 'dataset_job uuid' + _DATASET_JOB_STAGE_UUID = 'dataset_job_stage uuid' + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=self._PROJECT_ID, name='test') + session.add(project) + workflow = Workflow(id=self._WORKFLOW_ID, uuid=self._DATASET_JOB_UUID) + session.add(workflow) + session.commit() + launch_dataset_job_stage_data = LaunchDatasetJobStageData(dataset_job_stage_uuid=self._DATASET_JOB_STAGE_UUID) + self.data = TransactionData(launch_dataset_job_stage_data=launch_dataset_job_stage_data) + + def test_prepare_no_dataset_job_stage(self): + with db.session_scope() as session: + creator = DatasetJobStageLauncher(session, tid='1', data=self.data) + flag, _ = creator.prepare() + self.assertFalse(flag) + + def test_prepare_illegal_state(self): + with db.session_scope() as session: + dataset_job = DatasetJob(id=self._DATASET_JOB_ID, + project_id=0, + input_dataset_id=1, + output_dataset_id=2, + uuid=self._DATASET_JOB_UUID, + workflow_id=self._WORKFLOW_ID, + state=DatasetJobState.FAILED, + kind=DatasetJobKind.DATA_ALIGNMENT) + session.add(dataset_job) + dataset_job_stage = DatasetJobStage(id=self._DATASET_JOB_STAGE_ID, + project_id=self._PROJECT_ID, + dataset_job_id=self._DATASET_JOB_ID, + uuid=self._DATASET_JOB_STAGE_UUID, + workflow_id=self._WORKFLOW_ID, + data_batch_id=self._DATA_BATCH_ID, + state=DatasetJobState.FAILED) + session.add(dataset_job_stage) + session.commit() + with db.session_scope() as session: + creator = DatasetJobStageLauncher(session, tid='1', data=self.data) + flag, _ = creator.prepare() + self.assertFalse(flag) + + def test_prepare_no_related_workflow(self): + with db.session_scope() as session: + dataset_job = DatasetJob(id=self._DATASET_JOB_ID, + project_id=self._PROJECT_ID, + input_dataset_id=1, + output_dataset_id=2, + uuid=self._DATASET_JOB_UUID, + workflow_id=0, + state=DatasetJobState.PENDING, + kind=DatasetJobKind.DATA_ALIGNMENT) + session.add(dataset_job) + dataset_job_stage = DatasetJobStage(id=self._DATASET_JOB_STAGE_ID, + project_id=self._PROJECT_ID, + dataset_job_id=self._DATASET_JOB_ID, + uuid=self._DATASET_JOB_STAGE_UUID, + workflow_id=100, + data_batch_id=self._DATA_BATCH_ID, + state=DatasetJobState.PENDING) + session.add(dataset_job_stage) + session.commit() + with db.session_scope() as session: + creator = DatasetJobStageLauncher(session, tid='1', data=self.data) + flag, _ = creator.prepare() + self.assertFalse(flag) + + def test_prepare_successfully(self): + with db.session_scope() as session: + dataset_job = DatasetJob(id=self._DATASET_JOB_ID, + project_id=self._PROJECT_ID, + input_dataset_id=1, + output_dataset_id=2, + uuid=self._DATASET_JOB_UUID, + workflow_id=self._WORKFLOW_ID, + state=DatasetJobState.PENDING, + kind=DatasetJobKind.DATA_ALIGNMENT) + session.add(dataset_job) + dataset_job_stage = DatasetJobStage(id=self._DATASET_JOB_STAGE_ID, + project_id=self._PROJECT_ID, + dataset_job_id=self._DATASET_JOB_ID, + uuid=self._DATASET_JOB_STAGE_UUID, + workflow_id=self._WORKFLOW_ID, + data_batch_id=self._DATA_BATCH_ID, + state=DatasetJobState.PENDING) + session.add(dataset_job_stage) + session.commit() + with db.session_scope() as session: + creator = DatasetJobStageLauncher(session, tid='1', data=self.data) + flag, _ = creator.prepare() + self.assertTrue(flag) + + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(self._DATASET_JOB_ID) + dataset_job.state = DatasetJobState.RUNNING + dataset_job_stage = session.query(DatasetJobStage).get(self._DATASET_JOB_STAGE_ID) + dataset_job_stage.state = DatasetJobState.RUNNING + session.commit() + with db.session_scope() as session: + creator = DatasetJobStageLauncher(session, tid='1', data=self.data) + flag, _ = creator.prepare() + self.assertTrue(flag) + + @patch('fedlearner_webconsole.two_pc.dataset_job_stage_launcher.DatasetJobStageLocalController.start') + def test_commit(self, mock_start: MagicMock): + with db.session_scope() as session: + dataset_job = DatasetJob(id=self._DATASET_JOB_ID, + project_id=self._PROJECT_ID, + input_dataset_id=1, + output_dataset_id=2, + uuid=self._DATASET_JOB_UUID, + workflow_id=self._WORKFLOW_ID, + state=DatasetJobState.RUNNING, + kind=DatasetJobKind.DATA_ALIGNMENT) + session.add(dataset_job) + dataset_job_stage = DatasetJobStage(id=self._DATASET_JOB_STAGE_ID, + project_id=self._PROJECT_ID, + dataset_job_id=self._DATASET_JOB_ID, + uuid=self._DATASET_JOB_STAGE_UUID, + workflow_id=self._WORKFLOW_ID, + data_batch_id=self._DATA_BATCH_ID, + state=DatasetJobState.RUNNING) + session.add(dataset_job_stage) + session.commit() + with db.session_scope() as session: + creator = DatasetJobStageLauncher(session, tid='1', data=self.data) + flag, _ = creator.commit() + self.assertTrue(flag) + session.commit() + mock_start.assert_not_called() + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(self._DATASET_JOB_ID) + self.assertEqual(dataset_job.state, DatasetJobState.RUNNING) + dataset_job_stage = session.query(DatasetJobStage).get(self._DATASET_JOB_STAGE_ID) + self.assertEqual(dataset_job_stage.state, DatasetJobState.RUNNING) + self.assertIsNone(dataset_job_stage.started_at) + + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(self._DATASET_JOB_ID) + dataset_job.state = DatasetJobState.PENDING + dataset_job_stage = session.query(DatasetJobStage).get(self._DATASET_JOB_STAGE_ID) + dataset_job_stage.state = DatasetJobState.PENDING + session.commit() + with db.session_scope() as session: + creator = DatasetJobStageLauncher(session, tid='1', data=self.data) + flag, _ = creator.commit() + self.assertTrue(flag) + dataset_job_stage = session.query(DatasetJobStage).get(self._DATASET_JOB_STAGE_ID) + mock_start.assert_called_once_with(dataset_job_stage) + session.commit() + + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(self._DATASET_JOB_ID) + dataset_job.state = DatasetJobState.SUCCEEDED + dataset_job_stage = session.query(DatasetJobStage).get(self._DATASET_JOB_STAGE_ID) + dataset_job_stage.state = DatasetJobState.SUCCEEDED + session.commit() + with db.session_scope() as session: + creator = DatasetJobStageLauncher(session, tid='1', data=self.data) + flag, _ = creator.commit() + self.assertFalse(flag) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_stage_stopper.py b/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_stage_stopper.py new file mode 100644 index 000000000..45aa7f516 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_stage_stopper.py @@ -0,0 +1,68 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Tuple + +from sqlalchemy.orm import Session + +from fedlearner_webconsole.dataset.models import DatasetJobStage, DatasetJobState +from fedlearner_webconsole.dataset.local_controllers import DatasetJobStageLocalController +from fedlearner_webconsole.proto.two_pc_pb2 import TransactionData +from fedlearner_webconsole.two_pc.resource_manager import ResourceManager + + +class DatasetJobStageStopper(ResourceManager): + + def __init__(self, session: Session, tid: str, data: TransactionData): + super().__init__(tid, data) + assert data.stop_dataset_job_stage_data is not None + self._data = data.stop_dataset_job_stage_data + self._session = session + + def prepare(self) -> Tuple[bool, str]: + dataset_job_stage: DatasetJobStage = self._session.query(DatasetJobStage).filter_by( + uuid=self._data.dataset_job_stage_uuid).first() + if dataset_job_stage is None: + message = 'dataset_job_stage not found' + logging.warning( + f'[dataset_job_stage stop 2pc] prepare: {message}, uuid: {self._data.dataset_job_stage_uuid}') + return False, message + if dataset_job_stage.state in [DatasetJobState.SUCCEEDED, DatasetJobState.FAILED]: + message = 'dataset_job_stage state check failed! ' \ + f'current state {dataset_job_stage.state.value} cannot stop' + logging.warning( + f'[dataset_job_stage stop 2pc] prepare: {message}, uuid: {self._data.dataset_job_stage_uuid}') + return False, message + return True, '' + + def commit(self) -> Tuple[bool, str]: + # use x lock here, it will keep waiting if it find other lock until lock release or timeout. + # we dont't use s lock as it may raise deadlock exception. + dataset_job_stage: DatasetJobStage = self._session.query(DatasetJobStage).populate_existing().with_for_update( + ).filter_by(uuid=self._data.dataset_job_stage_uuid).first() + # allow stop to stop state transfer + if dataset_job_stage.state == DatasetJobState.STOPPED: + return True, '' + try: + DatasetJobStageLocalController(session=self._session).stop(dataset_job_stage) + except RuntimeError as e: + logging.error(f'[dataset_job_stage stop 2pc] commit: {e}, uuid: {self._data.dataset_job_stage_uuid}') + raise + return True, '' + + def abort(self) -> Tuple[bool, str]: + logging.info('[dataset_job_stage stop 2pc] abort') + return True, '' diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_stage_stopper_test.py b/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_stage_stopper_test.py new file mode 100644 index 000000000..9b8cb8f92 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_stage_stopper_test.py @@ -0,0 +1,156 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch, MagicMock + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.dataset.models import DatasetJob, DatasetJobKind, DatasetJobState, DatasetJobStage +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.two_pc.dataset_job_stage_stopper import DatasetJobStageStopper +from fedlearner_webconsole.proto.two_pc_pb2 import StopDatasetJobStageData, \ + TransactionData + + +class DatasetJobStageStopperTest(NoWebServerTestCase): + _PROJECT_ID = 1 + _DATASET_JOB_ID = 1 + _DATA_BATCH_ID = 1 + _DATASET_JOB_STAGE_ID = 1 + _WORKFLOW_ID = 1 + _DATASET_JOB_UUID = 'dataset_job uuid' + _DATASET_JOB_STAGE_UUID = 'dataset_job_stage uuid' + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=self._PROJECT_ID, name='test') + session.add(project) + workflow = Workflow(id=self._WORKFLOW_ID, uuid=self._DATASET_JOB_UUID) + session.add(workflow) + session.commit() + stop_dataset_job_stage_data = StopDatasetJobStageData(dataset_job_stage_uuid=self._DATASET_JOB_STAGE_UUID) + self.data = TransactionData(stop_dataset_job_stage_data=stop_dataset_job_stage_data) + + def test_prepare_no_dataset_job_stage(self): + with db.session_scope() as session: + creator = DatasetJobStageStopper(session, tid='1', data=self.data) + flag, _ = creator.prepare() + self.assertFalse(flag) + + def test_prepare_state(self): + with db.session_scope() as session: + dataset_job = DatasetJob(id=self._DATASET_JOB_ID, + project_id=self._PROJECT_ID, + input_dataset_id=1, + output_dataset_id=2, + uuid=self._DATASET_JOB_UUID, + workflow_id=self._WORKFLOW_ID, + state=DatasetJobState.FAILED, + kind=DatasetJobKind.DATA_ALIGNMENT) + session.add(dataset_job) + dataset_job_stage = DatasetJobStage(id=self._DATASET_JOB_STAGE_ID, + project_id=self._PROJECT_ID, + dataset_job_id=self._DATASET_JOB_ID, + uuid=self._DATASET_JOB_STAGE_UUID, + workflow_id=self._WORKFLOW_ID, + data_batch_id=self._DATA_BATCH_ID, + state=DatasetJobState.FAILED) + session.add(dataset_job_stage) + session.commit() + # test prepare state failed + with db.session_scope() as session: + creator = DatasetJobStageStopper(session, tid='1', data=self.data) + flag, _ = creator.prepare() + self.assertFalse(flag) + + # test prepare state succeeded + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(self._DATASET_JOB_ID) + dataset_job.state = DatasetJobState.PENDING + dataset_job_stage = session.query(DatasetJobStage).get(self._DATASET_JOB_STAGE_ID) + dataset_job_stage.state = DatasetJobState.PENDING + session.commit() + with db.session_scope() as session: + creator = DatasetJobStageStopper(session, tid='1', data=self.data) + flag, _ = creator.prepare() + self.assertTrue(flag) + + # test prepare state stop to stop + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(self._DATASET_JOB_ID) + dataset_job.state = DatasetJobState.STOPPED + dataset_job_stage = session.query(DatasetJobStage).get(self._DATASET_JOB_STAGE_ID) + dataset_job_stage.state = DatasetJobState.STOPPED + session.commit() + with db.session_scope() as session: + creator = DatasetJobStageStopper(session, tid='1', data=self.data) + flag, _ = creator.prepare() + self.assertTrue(flag) + + @patch('fedlearner_webconsole.two_pc.dataset_job_stage_stopper.DatasetJobStageLocalController.stop') + def test_commit_state(self, mock_stop: MagicMock): + with db.session_scope() as session: + dataset_job = DatasetJob(id=self._DATASET_JOB_ID, + project_id=self._PROJECT_ID, + input_dataset_id=1, + output_dataset_id=2, + uuid=self._DATASET_JOB_UUID, + workflow_id=self._WORKFLOW_ID, + state=DatasetJobState.STOPPED, + kind=DatasetJobKind.DATA_ALIGNMENT) + session.add(dataset_job) + dataset_job_stage = DatasetJobStage(id=self._DATASET_JOB_STAGE_ID, + project_id=self._PROJECT_ID, + dataset_job_id=self._DATASET_JOB_ID, + uuid=self._DATASET_JOB_STAGE_UUID, + workflow_id=self._WORKFLOW_ID, + data_batch_id=self._DATA_BATCH_ID, + state=DatasetJobState.STOPPED) + session.add(dataset_job_stage) + session.commit() + + # test commit state stop to stop + with db.session_scope() as session: + creator = DatasetJobStageStopper(session, tid='1', data=self.data) + flag, _ = creator.commit() + self.assertTrue(flag) + mock_stop.assert_not_called() + session.flush() + dataset_job = session.query(DatasetJob).get(self._DATASET_JOB_ID) + self.assertEqual(dataset_job.state, DatasetJobState.STOPPED) + dataset_job_stage = session.query(DatasetJobStage).get(self._DATASET_JOB_STAGE_ID) + self.assertEqual(dataset_job_stage.state, DatasetJobState.STOPPED) + self.assertIsNone(dataset_job_stage.finished_at) + + # test commit state succeeded + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(self._DATASET_JOB_ID) + dataset_job.state = DatasetJobState.PENDING + dataset_job_stage = session.query(DatasetJobStage).get(self._DATASET_JOB_STAGE_ID) + dataset_job_stage.state = DatasetJobState.PENDING + session.commit() + with db.session_scope() as session: + creator = DatasetJobStageStopper(session, tid='1', data=self.data) + flag, _ = creator.commit() + self.assertTrue(flag) + dataset_job_stage = session.query(DatasetJobStage).get(self._DATASET_JOB_STAGE_ID) + mock_stop.assert_called_once_with(dataset_job_stage) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_stopper.py b/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_stopper.py new file mode 100644 index 000000000..3c014e14f --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_stopper.py @@ -0,0 +1,72 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Tuple + +from sqlalchemy.orm import Session + +from fedlearner_webconsole.workflow.workflow_controller import stop_workflow_locally +from fedlearner_webconsole.dataset.models import DatasetJob, DatasetJobState +from fedlearner_webconsole.dataset.services import DatasetJobService +from fedlearner_webconsole.proto.two_pc_pb2 import TransactionData +from fedlearner_webconsole.two_pc.resource_manager import ResourceManager + + +class DatasetJobStopper(ResourceManager): + + def __init__(self, session: Session, tid: str, data: TransactionData): + super().__init__(tid, data) + assert data.stop_dataset_job_data is not None + self._data = data.stop_dataset_job_data + self._session = session + + def prepare(self) -> Tuple[bool, str]: + dataset_job = self._session.query(DatasetJob).populate_existing().with_for_update(read=True).filter_by( + uuid=self._data.dataset_job_uuid).first() + if dataset_job is None: + message = 'dataset_job not found' + logging.warning(f'[dataset_job stop 2pc] prepare: {message}, uuid: {self._data.dataset_job_uuid}') + return False, message + if dataset_job.state in [DatasetJobState.SUCCEEDED, DatasetJobState.FAILED]: + message = f'dataset_job state check failed! current state {dataset_job.state.value} cannot stop, ' \ + f'expected: {DatasetJobState.PENDING.value}, {DatasetJobState.RUNNING.value} or ' \ + f'{DatasetJobState.STOPPED.value}, uuid is {self._data.dataset_job_uuid}' + logging.warning(f'[dataset_job stop 2pc] prepare: {message}, uuid: {self._data.dataset_job_uuid}') + return False, message + return True, '' + + def commit(self) -> Tuple[bool, str]: + dataset_job = self._session.query(DatasetJob).populate_existing().with_for_update().filter_by( + uuid=self._data.dataset_job_uuid).first() + # allow stop to stop state transfer + if dataset_job.state == DatasetJobState.STOPPED: + return True, '' + try: + if dataset_job.workflow is not None: + stop_workflow_locally(self._session, dataset_job.workflow) + else: + logging.info(f'[dataset_job stop 2pc] commit: workflow not found, just skip, ' \ + f'uuid: {self._data.dataset_job_uuid}') + except RuntimeError as e: + logging.error(f'[dataset_job stop 2pc] commit: {e}, uuid: {self._data.dataset_job_uuid}') + raise + DatasetJobService(self._session).finish_dataset_job(dataset_job=dataset_job, + finish_state=DatasetJobState.STOPPED) + return True, '' + + def abort(self) -> Tuple[bool, str]: + logging.info('[dataset_job stop 2pc] abort') + return True, '' diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_stopper_test.py b/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_stopper_test.py new file mode 100644 index 000000000..5c385552f --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/dataset_job_stopper_test.py @@ -0,0 +1,152 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch, MagicMock, ANY + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.dataset.models import DatasetJob, DatasetJobKind, DatasetJobState +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.two_pc.dataset_job_stopper import DatasetJobStopper +from fedlearner_webconsole.proto.two_pc_pb2 import StopDatasetJobData, \ + TransactionData + + +class DatasetJobStopperTest(NoWebServerTestCase): + _PROJECT_ID = 1 + _DATASET_JOB_ID = 1 + _WORKFLOW_ID = 1 + _DATASET_JOB_UUID = 'test_uuid' + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=self._PROJECT_ID, name='test') + session.add(project) + workflow = Workflow(id=self._WORKFLOW_ID, uuid=self._DATASET_JOB_UUID) + session.add(workflow) + session.commit() + stop_dataset_job_data = StopDatasetJobData(dataset_job_uuid=self._DATASET_JOB_UUID) + self.data = TransactionData(stop_dataset_job_data=stop_dataset_job_data) + + def test_prepare_no_dataset_job(self): + with db.session_scope() as session: + creator = DatasetJobStopper(session, tid='1', data=self.data) + flag, _ = creator.prepare() + self.assertFalse(flag) + + def test_prepare_state(self): + with db.session_scope() as session: + dataset_job = DatasetJob(id=self._DATASET_JOB_ID, + project_id=self._PROJECT_ID, + input_dataset_id=1, + output_dataset_id=2, + uuid=self._DATASET_JOB_UUID, + workflow_id=self._WORKFLOW_ID, + state=DatasetJobState.FAILED, + kind=DatasetJobKind.DATA_ALIGNMENT) + session.add(dataset_job) + session.commit() + # test prepare state failed + with db.session_scope() as session: + creator = DatasetJobStopper(session, tid='1', data=self.data) + flag, _ = creator.prepare() + self.assertFalse(flag) + + # test prepare state succeeded + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(self._DATASET_JOB_ID) + dataset_job.state = DatasetJobState.PENDING + session.commit() + with db.session_scope() as session: + creator = DatasetJobStopper(session, tid='1', data=self.data) + flag, _ = creator.prepare() + self.assertTrue(flag) + + # test prepare state stop to stop + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(self._DATASET_JOB_ID) + dataset_job.state = DatasetJobState.STOPPED + session.commit() + with db.session_scope() as session: + creator = DatasetJobStopper(session, tid='1', data=self.data) + flag, _ = creator.prepare() + self.assertTrue(flag) + + def test_commit_no_workflow(self): + with db.session_scope() as session: + dataset_job = DatasetJob(id=self._DATASET_JOB_ID, + project_id=self._PROJECT_ID, + input_dataset_id=1, + output_dataset_id=2, + uuid=self._DATASET_JOB_UUID, + state=DatasetJobState.RUNNING, + kind=DatasetJobKind.DATA_ALIGNMENT) + session.add(dataset_job) + session.commit() + with db.session_scope() as session: + creator = DatasetJobStopper(session, tid='1', data=self.data) + flag, _ = creator.commit() + self.assertTrue(flag) + session.flush() + dataset_job = session.query(DatasetJob).get(self._DATASET_JOB_ID) + self.assertEqual(dataset_job.state, DatasetJobState.STOPPED) + + @patch('fedlearner_webconsole.two_pc.dataset_job_stopper.stop_workflow_locally') + def test_commit_state(self, mock_stop_workflow_locally: MagicMock): + with db.session_scope() as session: + dataset_job = DatasetJob(id=self._DATASET_JOB_ID, + project_id=self._PROJECT_ID, + input_dataset_id=1, + output_dataset_id=2, + uuid=self._DATASET_JOB_UUID, + workflow_id=self._WORKFLOW_ID, + state=DatasetJobState.STOPPED, + kind=DatasetJobKind.DATA_ALIGNMENT) + session.add(dataset_job) + session.commit() + + # test commit state stop to stop + with db.session_scope() as session: + creator = DatasetJobStopper(session, tid='1', data=self.data) + flag, _ = creator.commit() + self.assertTrue(flag) + mock_stop_workflow_locally.assert_not_called() + session.flush() + dataset_job = session.query(DatasetJob).get(self._DATASET_JOB_ID) + self.assertEqual(dataset_job.state, DatasetJobState.STOPPED) + self.assertIsNone(dataset_job.finished_at) + + # test commit state succeeded + with db.session_scope() as session: + dataset_job = session.query(DatasetJob).get(self._DATASET_JOB_ID) + dataset_job.state = DatasetJobState.PENDING + session.commit() + with db.session_scope() as session: + creator = DatasetJobStopper(session, tid='1', data=self.data) + flag, _ = creator.commit() + self.assertTrue(flag) + workflow = session.query(Workflow).get(self._WORKFLOW_ID) + mock_stop_workflow_locally.assert_called_once_with(ANY, workflow) + session.flush() + dataset_job = session.query(DatasetJob).get(self._DATASET_JOB_ID) + self.assertEqual(dataset_job.state, DatasetJobState.STOPPED) + self.assertIsNotNone(dataset_job.finished_at) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/handlers.py b/web_console_v2/api/fedlearner_webconsole/two_pc/handlers.py new file mode 100644 index 000000000..b218341e7 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/handlers.py @@ -0,0 +1,85 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +from typing import Tuple + +from sqlalchemy.orm import Session + +from fedlearner_webconsole.proto.two_pc_pb2 import TwoPcType, TwoPcAction, TransactionData +from fedlearner_webconsole.two_pc.dataset_job_stage_launcher import DatasetJobStageLauncher +from fedlearner_webconsole.two_pc.dataset_job_stage_stopper import DatasetJobStageStopper +from fedlearner_webconsole.two_pc.model_job_creator import ModelJobCreator +from fedlearner_webconsole.two_pc.trusted_export_job_launcher import TrustedExportJobLauncher +from fedlearner_webconsole.two_pc.workflow_state_controller import WorkflowStateController +from fedlearner_webconsole.two_pc.model_job_group_creator import ModelJobGroupCreator +from fedlearner_webconsole.two_pc.model_job_launcher import ModelJobLauncher +from fedlearner_webconsole.two_pc.dataset_job_launcher import DatasetJobLauncher +from fedlearner_webconsole.two_pc.dataset_job_stopper import DatasetJobStopper +from fedlearner_webconsole.two_pc.trusted_job_group_creator import TrustedJobGroupCreator +from fedlearner_webconsole.two_pc.trusted_job_launcher import TrustedJobLauncher +from fedlearner_webconsole.two_pc.trusted_job_stopper import TrustedJobStopper +from fedlearner_webconsole.two_pc.models import Transaction, TransactionState + + +def run_two_pc_action(session: Session, tid: str, two_pc_type: TwoPcType, action: TwoPcAction, + data: TransactionData) -> Tuple[bool, str]: + # Checks idempotent + trans = session.query(Transaction).filter_by(uuid=tid).first() + if trans is None: + trans = Transaction( + uuid=tid, + state=TransactionState.NEW, + ) + trans.set_type(two_pc_type) + session.add(trans) + executed, result, message = trans.check_idempotent(action) + if executed: + return result, message + + rm = None + if two_pc_type == TwoPcType.CREATE_MODEL_JOB: + rm = ModelJobCreator(session=session, tid=tid, data=data) + elif two_pc_type == TwoPcType.CONTROL_WORKFLOW_STATE: + rm = WorkflowStateController(session=session, tid=tid, data=data) + elif two_pc_type == TwoPcType.CREATE_MODEL_JOB_GROUP: + rm = ModelJobGroupCreator(session=session, tid=tid, data=data) + elif two_pc_type == TwoPcType.LAUNCH_MODEL_JOB: + rm = ModelJobLauncher(session=session, tid=tid, data=data) + elif two_pc_type == TwoPcType.LAUNCH_DATASET_JOB: + rm = DatasetJobLauncher(session=session, tid=tid, data=data) + elif two_pc_type == TwoPcType.STOP_DATASET_JOB: + rm = DatasetJobStopper(session=session, tid=tid, data=data) + elif two_pc_type == TwoPcType.CREATE_TRUSTED_JOB_GROUP: + rm = TrustedJobGroupCreator(session=session, tid=tid, data=data) + elif two_pc_type == TwoPcType.LAUNCH_TRUSTED_JOB: + rm = TrustedJobLauncher(session=session, tid=tid, data=data) + elif two_pc_type == TwoPcType.STOP_TRUSTED_JOB: + rm = TrustedJobStopper(session=session, tid=tid, data=data) + elif two_pc_type == TwoPcType.LAUNCH_TRUSTED_EXPORT_JOB: + rm = TrustedExportJobLauncher(session=session, tid=tid, data=data) + elif two_pc_type == TwoPcType.LAUNCH_DATASET_JOB_STAGE: + rm = DatasetJobStageLauncher(session=session, tid=tid, data=data) + elif two_pc_type == TwoPcType.STOP_DATASET_JOB_STAGE: + rm = DatasetJobStageStopper(session=session, tid=tid, data=data) + if rm is None: + raise NotImplementedError() + + succeeded = False + try: + if trans.is_valid_action(action): + succeeded, message = rm.run_two_pc(action) + except Exception as e: # pylint: disable=broad-except + message = str(e) + return trans.update(action, succeeded, message) diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/handlers_test.py b/web_console_v2/api/fedlearner_webconsole/two_pc/handlers_test.py new file mode 100644 index 000000000..3734dabd6 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/handlers_test.py @@ -0,0 +1,99 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from unittest import mock +from unittest.mock import patch, MagicMock + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto.two_pc_pb2 import TwoPcType, TwoPcAction, TransactionData +from fedlearner_webconsole.two_pc.handlers import run_two_pc_action +from fedlearner_webconsole.two_pc.models import Transaction, TransactionState + + +class HandlersTest(NoWebServerTestCase): + + @patch('fedlearner_webconsole.two_pc.handlers.ModelJobCreator') + def test_run_two_pc_action_new_transaction(self, mock_model_job_creator_class): + mock_model_job_creator = MagicMock() + mock_model_job_creator.run_two_pc = MagicMock(return_value=(True, 'aloha')) + mock_model_job_creator_class.return_value = mock_model_job_creator + + tid = '123' + tdata = TransactionData() + with db.session_scope() as session: + succeeded, message = run_two_pc_action(session=session, + tid=tid, + two_pc_type=TwoPcType.CREATE_MODEL_JOB, + action=TwoPcAction.PREPARE, + data=tdata) + session.commit() + self.assertTrue(succeeded) + self.assertEqual(message, 'aloha') + mock_model_job_creator_class.assert_called_once_with(tid=tid, data=tdata, session=mock.ANY) + mock_model_job_creator.run_two_pc.assert_called_once_with(TwoPcAction.PREPARE) + with db.session_scope() as session: + trans: Transaction = session.query(Transaction).filter_by(uuid=tid).first() + self.assertEqual(trans.get_type(), TwoPcType.CREATE_MODEL_JOB) + self.assertEqual(trans.state, TransactionState.PREPARE_SUCCEEDED) + self.assertEqual(trans.message, 'aloha') + + @patch('fedlearner_webconsole.two_pc.handlers.ModelJobCreator') + def test_run_two_pc_action_redundant_action_idempotent(self, mock_model_job_creator_class): + tid = '234234' + with db.session_scope() as session: + trans = Transaction(uuid=tid, state=TransactionState.PREPARE_SUCCEEDED, message='prepared') + session.add(trans) + session.commit() + with db.session_scope() as session: + succeeded, message = run_two_pc_action(session=session, + tid=tid, + two_pc_type=TwoPcType.CREATE_MODEL_JOB, + action=TwoPcAction.PREPARE, + data=TransactionData()) + self.assertTrue(succeeded) + self.assertEqual(message, 'prepared') + mock_model_job_creator_class.assert_not_called() + + @patch('fedlearner_webconsole.two_pc.handlers.ModelJobCreator') + def test_run_two_pc_action_exception(self, mock_model_job_creator_class): + mock_model_job_creator = MagicMock() + mock_model_job_creator.run_two_pc = MagicMock(side_effect=RuntimeError('Unknown error')) + mock_model_job_creator_class.return_value = mock_model_job_creator + + tid = '123234234' + tdata = TransactionData() + with db.session_scope() as session: + trans = Transaction(uuid=tid, state=TransactionState.PREPARE_SUCCEEDED, message='prepared') + session.add(trans) + session.commit() + succeeded, message = run_two_pc_action(session=session, + tid=tid, + two_pc_type=TwoPcType.CREATE_MODEL_JOB, + action=TwoPcAction.COMMIT, + data=tdata) + session.commit() + self.assertFalse(succeeded) + self.assertIn('Unknown error', message) + mock_model_job_creator.run_two_pc.assert_called_once_with(TwoPcAction.COMMIT) + with db.session_scope() as session: + trans: Transaction = session.query(Transaction).filter_by(uuid=tid).first() + self.assertEqual(trans.state, TransactionState.INVALID) + self.assertIn('Unknown error', trans.message) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/model_job_creator.py b/web_console_v2/api/fedlearner_webconsole/two_pc/model_job_creator.py new file mode 100644 index 000000000..ce2618945 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/model_job_creator.py @@ -0,0 +1,143 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import logging +from typing import Tuple, Optional + +from sqlalchemy.orm import Session +from fedlearner_webconsole.dataset.models import Dataset, ResourceState +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.mmgr.models import Model, ModelJob, ModelJobType, ModelJobGroup +from fedlearner_webconsole.proto.two_pc_pb2 import TransactionData +from fedlearner_webconsole.two_pc.resource_manager import ResourceManager + + +class ModelJobCreator(ResourceManager): + """Create model job without configuration""" + + def __init__(self, session: Session, tid: str, data: TransactionData): + super().__init__(tid, data) + assert data.create_model_job_data is not None + self._data = data.create_model_job_data + self._session = session + + def _check_model_job(self) -> Tuple[bool, str]: + model_job_name = self._data.model_job_name + model_job = self._session.query(ModelJob).filter_by(name=model_job_name).first() + if model_job: + message = f'model job {model_job_name} already exist' + logging.info('[model-job-2pc] prepare failed: %s', message) + return False, message + return True, '' + + def _check_model_job_group(self) -> Tuple[bool, str]: + model_job_group_name = self._data.group_name + # there is no model group for eval/predict model job + if model_job_group_name: + model_job_group = self._session.query(ModelJobGroup).filter_by(name=model_job_group_name).first() + if model_job_group is None: + message = f'model group {model_job_group_name} not exists' + logging.info('[model-job-2pc] prepare failed: %s', message) + return False, message + return True, '' + + def _check_model(self) -> Tuple[bool, str]: + model_uuid = self._data.model_uuid + # there is no model for training model job + if model_uuid: + model = self._session.query(Model).filter_by(uuid=model_uuid).first() + if model is None: + message = f'model {self._data.model_uuid} not found' + logging.info('[model-job-2pc] prepare failed: %s', message) + return False, message + return True, '' + + def _check_dataset(self) -> Tuple[bool, str]: + if self._data.dataset_uuid: + dataset: Dataset = self._session.query(Dataset).filter_by(uuid=self._data.dataset_uuid).first() + if not dataset: + message = f'dataset {self._data.dataset_uuid} not exists' + logging.info('[model-job-2pc] prepare failed: %s', message) + return False, message + if dataset.get_frontend_state() != ResourceState.SUCCEEDED: + message = f'dataset {self._data.dataset_uuid} is not succeeded' + logging.info('[model-group-2pc] prepare failed: %s', message) + return False, message + if not dataset.is_published: + message = f'dataset {self._data.dataset_uuid} is not published' + logging.info('[model-job-2pc] prepare failed: %s', message) + return False, message + return True, '' + + def prepare(self) -> Tuple[bool, str]: + check_fn_list = [self._check_model_job, self._check_model_job_group, self._check_model, self._check_dataset] + for check_fn in check_fn_list: + succeeded, message = check_fn() + if not succeeded: + return False, message + logging.info('[model-job-2pc] prepare succeeded') + return True, '' + + def _get_model_job_group_id(self) -> Optional[int]: + if self._data.group_name: + model_job_group = self._session.query(ModelJobGroup).filter_by(name=self._data.group_name).first() + return model_job_group.id + return None + + def _get_model_id(self) -> Optional[int]: + if self._data.model_uuid: + model = self._session.query(Model).filter_by(uuid=self._data.model_uuid).first() + return model.id + return None + + def _get_project_id(self) -> int: + project = self._session.query(Project).filter_by(name=self._data.project_name).first() + return project.id + + def _get_dataset_id(self) -> Optional[int]: + if self._data.dataset_uuid: + dataset = self._session.query(Dataset).filter_by(uuid=self._data.dataset_uuid).first() + return dataset.id + return None + + def commit(self) -> Tuple[bool, str]: + model_job_group_id = self._get_model_job_group_id() + model_id = self._get_model_id() + project_id = self._get_project_id() + dataset_id = self._get_dataset_id() + coordinator = ParticipantService( + self._session).get_participant_by_pure_domain_name(pure_domain_name=self._data.coordinator_pure_domain_name) + coordinator_id = None + if coordinator is not None: + coordinator_id = coordinator.id + model_job = ModelJob(name=self._data.model_job_name, + model_job_type=ModelJobType[self._data.model_job_type], + project_id=project_id, + uuid=self._data.model_job_uuid, + group_id=model_job_group_id, + dataset_id=dataset_id, + model_id=model_id, + workflow_uuid=self._data.workflow_uuid, + algorithm_type=self._data.algorithm_type, + coordinator_id=coordinator_id) + self._session.add(model_job) + logging.info('[model-job-2pc] commit succeeded') + return True, '' + + def abort(self) -> Tuple[bool, str]: + logging.info('[model-job-2pc] abort') + # As we did not preserve any resource, do nothing + return True, '' diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/model_job_creator_test.py b/web_console_v2/api/fedlearner_webconsole/two_pc/model_job_creator_test.py new file mode 100644 index 000000000..5d4b6ae77 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/model_job_creator_test.py @@ -0,0 +1,124 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import Mock, patch + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.models import Dataset, ResourceState +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.algorithm.models import AlgorithmType +from fedlearner_webconsole.two_pc.model_job_creator import ModelJobCreator +from fedlearner_webconsole.mmgr.models import Model, ModelJob, ModelJobType, ModelJobGroup +from fedlearner_webconsole.proto.two_pc_pb2 import CreateModelJobData, \ + TransactionData + + +class ModelJobCreatorTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(name='project') + model = Model(name='model', uuid='model-uuid') + model_job_group = ModelJobGroup(name='group') + dataset = Dataset(name='dataset', uuid='dataset-uuid', is_published=True) + session.add_all([project, model, model_job_group, dataset]) + session.commit() + create_model_job_data = CreateModelJobData(model_job_name='model-job', + model_job_type=ModelJobType.EVALUATION.name, + model_job_uuid='model-job-uuid', + workflow_uuid='workflow-uuid', + group_name=model_job_group.name, + algorithm_type=AlgorithmType.NN_VERTICAL.name, + model_uuid=model.uuid, + project_name=project.name, + dataset_uuid='dataset-uuid') + self.data = TransactionData(create_model_job_data=create_model_job_data) + + @patch('fedlearner_webconsole.dataset.models.Dataset.get_frontend_state') + def test_prepare(self, mock_get_frontend_state: Mock): + mock_get_frontend_state.return_value = ResourceState.SUCCEEDED + with db.session_scope() as session: + creator = ModelJobCreator(session, tid='12', data=self.data) + flag, message = creator.prepare() + self.assertTrue(flag) + # fail due to model not found + self.data.create_model_job_data.model_uuid = 'uuid' + flag, message = creator.prepare() + self.assertFalse(flag) + self.assertEqual(message, 'model uuid not found') + with db.session_scope() as session: + self.data.create_model_job_data.model_uuid = 'model-uuid' + model_job = ModelJob(name='model-job') + session.add(model_job) + session.commit() + with db.session_scope() as session: + # fail due to model job with the same name + flag, message = ModelJobCreator(session, tid='12', data=self.data).prepare() + self.assertFalse(flag) + self.assertEqual(message, 'model job model-job already exist') + with db.session_scope() as session: + self.data.create_model_job_data.group_name = 'group-1' + self.data.create_model_job_data.model_job_name = 'model-job-1' + flag, message = ModelJobCreator(session, tid='12', data=self.data).prepare() + self.assertFalse(flag) + self.assertEqual(message, 'model group group-1 not exists') + with db.session_scope() as session: + self.data.create_model_job_data.group_name = 'group' + # fail due to dataset is not found + self.data.create_model_job_data.dataset_uuid = 'dataset-uuid-1' + flag, message = ModelJobCreator(session, tid='12', data=self.data).prepare() + self.assertFalse(flag) + self.assertEqual(message, 'dataset dataset-uuid-1 not exists') + with db.session_scope() as session: + dataset = Dataset(name='dataset-test-failed', uuid='dataset-uuid-1', is_published=False) + session.add(dataset) + session.commit() + # fail due to dataset is not published + flag, message = ModelJobCreator(session, tid='12', data=self.data).prepare() + self.assertFalse(flag) + self.assertEqual(message, 'dataset dataset-uuid-1 is not published') + with db.session_scope() as session: + mock_get_frontend_state.return_value = ResourceState.FAILED + # fail due to dataset is not succeeded + flag, message = ModelJobCreator(session, tid='12', data=self.data).prepare() + self.assertFalse(flag) + self.assertEqual(message, 'dataset dataset-uuid-1 is not succeeded') + + def test_commit(self): + with db.session_scope() as session: + creator = ModelJobCreator(session, tid='12', data=self.data) + creator.commit() + session.commit() + with db.session_scope() as session: + model = session.query(Model).filter_by(uuid='model-uuid').first() + project = session.query(Project).filter_by(name='project').first() + model_job: ModelJob = session.query(ModelJob).filter_by(name='model-job').first() + model_job_group = session.query(ModelJobGroup).filter_by(name='group').first() + dataset = session.query(Dataset).filter_by(uuid='dataset-uuid').first() + self.assertEqual(model_job.uuid, 'model-job-uuid') + self.assertEqual(model_job.model_job_type, ModelJobType.EVALUATION) + self.assertEqual(model_job.workflow_uuid, 'workflow-uuid') + self.assertEqual(model_job.algorithm_type, AlgorithmType.NN_VERTICAL) + self.assertEqual(model_job.model_id, model.id) + self.assertEqual(model_job.project_id, project.id) + self.assertEqual(model_job.group_id, model_job_group.id) + self.assertEqual(model_job.dataset_id, dataset.id) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/model_job_group_creator.py b/web_console_v2/api/fedlearner_webconsole/two_pc/model_job_group_creator.py new file mode 100644 index 000000000..6f08afb44 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/model_job_group_creator.py @@ -0,0 +1,127 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Tuple + +from sqlalchemy.orm import Session + +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.dataset.models import Dataset, ResourceState +from fedlearner_webconsole.mmgr.models import ModelJobGroup, ModelJobRole, GroupCreateStatus +from fedlearner_webconsole.algorithm.models import AlgorithmType +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.proto.two_pc_pb2 import TransactionData +from fedlearner_webconsole.proto.project_pb2 import ParticipantsInfo, ParticipantInfo +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.two_pc.resource_manager import ResourceManager +from fedlearner_webconsole.setting.service import SettingService + + +class ModelJobGroupCreator(ResourceManager): + + def __init__(self, session: Session, tid: str, data: TransactionData): + super().__init__(tid, data) + assert data.create_model_job_group_data is not None + self._data = data.create_model_job_group_data + self._session = session + + def _check_project(self) -> Tuple[bool, str]: + project_name = self._data.project_name + project = self._session.query(Project).filter_by(name=project_name).first() + if not project: + message = f'project {self._data.model_job_group_name} not exists' + logging.info('[model-group-2pc] prepare failed: %s', message) + return False, message + return True, '' + + def _check_group(self) -> Tuple[bool, str]: + model_job_group_name = self._data.model_job_group_name + model_job_group = self._session.query(ModelJobGroup).filter_by(name=model_job_group_name).first() + if model_job_group: + if model_job_group.uuid != self._data.model_job_group_uuid: + message = f'model group {model_job_group_name} with different uuid already exist' + logging.info('[model-group-2pc] prepare failed: %s', message) + return False, message + return True, '' + + def _check_dataset(self) -> Tuple[bool, str]: + if self._data.dataset_uuid: + dataset = self._session.query(Dataset).filter_by(uuid=self._data.dataset_uuid).first() + if not dataset: + message = f'dataset {self._data.dataset_uuid} not exists' + logging.info('[model-group-2pc] prepare failed: %s', message) + return False, message + if dataset.get_frontend_state() != ResourceState.SUCCEEDED: + message = f'dataset {self._data.dataset_uuid} is not succeeded' + logging.info('[model-group-2pc] prepare failed: %s', message) + return False, message + if not dataset.is_published: + message = f'dataset {self._data.dataset_uuid} is not published' + logging.info('[model-group-2pc] prepare failed: %s', message) + return False, message + return True, '' + + def prepare(self) -> Tuple[bool, str]: + check_fn_list = [self._check_project, self._check_group, self._check_dataset] + for check_fn in check_fn_list: + succeeded, message = check_fn() + if not succeeded: + return False, message + logging.info('[model-group-2pc] prepare succeeded') + return True, '' + + def commit(self) -> Tuple[bool, str]: + model_job_group_name = self._data.model_job_group_name + project = self._session.query(Project).filter_by(name=self._data.project_name).first() + group = self._session.query(ModelJobGroup).filter_by(name=model_job_group_name).first() + dataset_id = None + if self._data.dataset_uuid: + dataset_id = self._session.query(Dataset).filter_by(uuid=self._data.dataset_uuid).first().id + coordinator = ParticipantService( + self._session).get_participant_by_pure_domain_name(pure_domain_name=self._data.coordinator_pure_domain_name) + coordinator_id = None + if coordinator is not None: + coordinator_id = coordinator.id + if not group: + group = ModelJobGroup(name=model_job_group_name, + uuid=self._data.model_job_group_uuid, + project_id=project.id, + dataset_id=dataset_id, + authorized=False, + role=ModelJobRole.PARTICIPANT, + algorithm_type=AlgorithmType[self._data.algorithm_type], + coordinator_id=coordinator_id) + participants = ParticipantService(self._session).get_participants_by_project(project.id) + participants_info = ParticipantsInfo(participants_map={ + p.pure_domain_name(): ParticipantInfo(auth_status=AuthStatus.PENDING.name) for p in participants + }) + participants_info.participants_map[ + self._data.coordinator_pure_domain_name].auth_status = AuthStatus.AUTHORIZED.name + pure_domain_name = SettingService.get_system_info().pure_domain_name + participants_info.participants_map[pure_domain_name].auth_status = AuthStatus.PENDING.name + group.set_participants_info(participants_info) + self._session.add(group) + group.status = GroupCreateStatus.SUCCEEDED + logging.info('[model-group-2pc] commit succeeded') + return True, '' + + def abort(self) -> Tuple[bool, str]: + logging.info('[model-group-2pc] abort') + group = self._session.query(ModelJobGroup).filter_by(name=self._data.model_job_group_name).first() + if group is not None: + group.status = GroupCreateStatus.FAILED + # As we did not preserve any resource, do nothing + return True, '' diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/model_job_group_creator_test.py b/web_console_v2/api/fedlearner_webconsole/two_pc/model_job_group_creator_test.py new file mode 100644 index 000000000..484305e1f --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/model_job_group_creator_test.py @@ -0,0 +1,157 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import Mock, patch + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.models import Dataset, ResourceState +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.algorithm.models import AlgorithmType +from fedlearner_webconsole.two_pc.model_job_group_creator import ModelJobGroupCreator +from fedlearner_webconsole.mmgr.models import ModelJobGroup, GroupCreateStatus +from fedlearner_webconsole.proto.two_pc_pb2 import CreateModelJobGroupData, \ + TransactionData +from fedlearner_webconsole.proto.project_pb2 import ParticipantInfo, ParticipantsInfo +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus + + +class ModelJobGroupCreatorTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + project = Project(id=1, name='project') + participant = Participant(id=123, name='party', domain_name='fl-demo.com') + relationship = ProjectParticipant(project_id=1, participant_id=123) + dataset = Dataset(name='dataset', uuid='dataset_uuid', is_published=True) + with db.session_scope() as session: + session.add_all([project, participant, dataset, relationship]) + session.commit() + + @staticmethod + def get_transaction_data(group_name: str, group_uuid: str, project_name: str, dataset_uuid: str): + return TransactionData( + create_model_job_group_data=CreateModelJobGroupData(model_job_group_name=group_name, + model_job_group_uuid=group_uuid, + project_name=project_name, + algorithm_type=AlgorithmType.NN_VERTICAL.name, + coordinator_pure_domain_name='demo', + dataset_uuid=dataset_uuid)) + + @patch('fedlearner_webconsole.dataset.models.Dataset.get_frontend_state') + def test_prepare(self, mock_get_frontend_state: Mock): + mock_get_frontend_state.return_value = ResourceState.SUCCEEDED + data = self.get_transaction_data('group', 'uuid', 'project', 'dataset_uuid') + with db.session_scope() as session: + creator = ModelJobGroupCreator(session, '12', data) + flag, msg = creator.prepare() + self.assertTrue(flag) + with db.session_scope() as session: + model_job_group = ModelJobGroup(name='group', uuid='uuid') + session.add(model_job_group) + session.commit() + # test for idempotence for creating group with same name and uuid + with db.session_scope() as session: + creator = ModelJobGroupCreator(session, '12', data) + flag, msg = creator.prepare() + self.assertTrue(flag) + # fail due to uuid not consistent + data = self.get_transaction_data('group', 'uuid-1', 'project', 'dataset_uuid') + with db.session_scope() as session: + creator = ModelJobGroupCreator(session, '12', data) + flag, msg = creator.prepare() + self.assertFalse(flag) + self.assertEqual(msg, 'model group group with different uuid already exist') + # fail due to project not found + data = self.get_transaction_data('group', 'uuid', 'project-1', 'dataset_uuid') + with db.session_scope() as session: + creator = ModelJobGroupCreator(session, '12', data) + flag, msg = creator.prepare() + self.assertFalse(flag) + self.assertEqual(msg, 'project group not exists') + # fail due to dataset not found + data = self.get_transaction_data('group', 'uuid', 'project', 'dataset_uuid-1') + with db.session_scope() as session: + creator = ModelJobGroupCreator(session, '12', data) + flag, msg = creator.prepare() + self.assertFalse(flag) + self.assertEqual(msg, 'dataset dataset_uuid-1 not exists') + # fail due to dataset is not published + with db.session_scope() as session: + dataset = session.query(Dataset).filter_by(uuid='dataset_uuid').first() + dataset.is_published = False + session.add(dataset) + session.commit() + data = self.get_transaction_data('group', 'uuid', 'project', 'dataset_uuid') + with db.session_scope() as session: + creator = ModelJobGroupCreator(session, '12', data) + flag, msg = creator.prepare() + self.assertFalse(flag) + self.assertEqual(msg, 'dataset dataset_uuid is not published') + # fail due to dataset is not succeeded + mock_get_frontend_state.return_value = ResourceState.PROCESSING + with db.session_scope() as session: + creator = ModelJobGroupCreator(session, '12', data) + flag, msg = creator.prepare() + self.assertFalse(flag) + self.assertEqual(msg, 'dataset dataset_uuid is not succeeded') + + @patch('fedlearner_webconsole.setting.service.get_pure_domain_name') + def test_commit(self, mock_pure_domain_name: Mock): + mock_pure_domain_name.return_value = 'test' + data = self.get_transaction_data('group', 'uuid', 'project', 'dataset_uuid') + with db.session_scope() as session: + creator = ModelJobGroupCreator(session, '12', data) + flag, msg = creator.commit() + session.commit() + self.assertTrue(flag) + with db.session_scope() as session: + model_job_group = session.query(ModelJobGroup).filter_by(name='group').first() + project = session.query(Project).filter_by(name='project').first() + dataset = session.query(Dataset).filter_by(uuid='dataset_uuid').first() + self.assertEqual(model_job_group.uuid, 'uuid') + self.assertEqual(model_job_group.project_id, project.id) + self.assertEqual(model_job_group.algorithm_type, AlgorithmType.NN_VERTICAL) + self.assertEqual(model_job_group.coordinator_id, 123) + self.assertEqual(model_job_group.dataset_id, dataset.id) + self.assertEqual( + model_job_group.get_participants_info(), + ParticipantsInfo( + participants_map={ + 'test': ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'demo': ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + })) + + def test_abort(self): + data = self.get_transaction_data('group', 'uuid', 'project', 'dataset_uuid') + with db.session_scope() as session: + creator = ModelJobGroupCreator(session, '12', data) + flag, msg = creator.abort() + session.commit() + self.assertTrue(flag) + with db.session_scope() as session: + creator = ModelJobGroupCreator(session, '12', data) + group = ModelJobGroup(name='group', uuid='uuid', status=GroupCreateStatus.PENDING) + session.add(group) + session.flush() + flag, msg = creator.abort() + self.assertTrue(flag) + self.assertEqual(group.status, GroupCreateStatus.FAILED) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/model_job_launcher.py b/web_console_v2/api/fedlearner_webconsole/two_pc/model_job_launcher.py new file mode 100644 index 000000000..8bfcdffa3 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/model_job_launcher.py @@ -0,0 +1,77 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Tuple + +from sqlalchemy.orm import Session +from fedlearner_webconsole.mmgr.service import ModelJobGroupService +from fedlearner_webconsole.mmgr.models import ModelJobGroup, ModelJobRole +from fedlearner_webconsole.proto.two_pc_pb2 import TransactionData +from fedlearner_webconsole.two_pc.resource_manager import ResourceManager + + +class ModelJobLauncher(ResourceManager): + """Launch a configured model job based on the config of model job group""" + + def __init__(self, session: Session, tid: str, data: TransactionData): + super().__init__(tid, data) + assert data.create_model_job_data is not None + self._data = data.create_model_job_data + self._session = session + + def prepare(self) -> Tuple[bool, str]: + if self._data.group_uuid is None: + message = 'group_uuid not found in create_model_job_data' + logging.info('[launch-model-job-2pc] prepare failed: %s', message) + return False, message + group: ModelJobGroup = self._session.query(ModelJobGroup).filter_by(uuid=self._data.group_uuid).first() + if group is None: + message = f'model group not found by uuid {self._data.group_uuid}' + logging.info('[launch-model-job-2pc] prepare failed: %s', message) + return False, message + if group.role == ModelJobRole.PARTICIPANT and not group.authorized: + message = f'model group {self._data.group_uuid} not authorized to coordinator' + logging.info('[launch-model-job-2pc] prepare failed: %s', message) + return False, message + if group.config is None: + message = f'the config of model group {group.name} not found' + logging.info('[launch-model-job-2pc] prepare failed: %s', message) + return False, message + # the latest version of group at coordinator is the same with the given version + if group.latest_version >= self._data.version and group.role == ModelJobRole.PARTICIPANT: + message = f'the latest version of model group {group.name} is larger than or equal to the given version' + logging.info('[launch-model-job-2pc] prepare failed: %s', message) + return False, message + if group.algorithm_id is not None and group.algorithm is None: + message = f'the algorithm {group.algorithm_id} of group {group.name} is not found' + logging.warning('[launch-model-job-2pc] prepare failed: %s', message) + return False, message + return True, '' + + def commit(self) -> Tuple[bool, str]: + group: ModelJobGroup = self._session.query(ModelJobGroup).filter_by(uuid=self._data.group_uuid).first() + + ModelJobGroupService(self._session).launch_model_job(group=group, + name=self._data.model_job_name, + uuid=self._data.model_job_uuid, + version=self._data.version) + logging.info(f'[launch-model-job-2pc] commit succeeded for group {group.name}') + return True, '' + + def abort(self) -> Tuple[bool, str]: + logging.info('[model-job-2pc] abort') + # As we did not preserve any resource, do nothing + return True, '' diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/model_job_launcher_test.py b/web_console_v2/api/fedlearner_webconsole/two_pc/model_job_launcher_test.py new file mode 100644 index 000000000..7b8e5c3db --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/model_job_launcher_test.py @@ -0,0 +1,159 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from google.protobuf.struct_pb2 import Value + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.initial_db import _insert_or_update_templates +from fedlearner_webconsole.algorithm.models import AlgorithmType, Algorithm +from fedlearner_webconsole.two_pc.model_job_launcher import ModelJobLauncher +from fedlearner_webconsole.workflow.models import WorkflowState +from fedlearner_webconsole.mmgr.models import ModelJob, ModelJobGroup, ModelJobType, ModelJobRole +from fedlearner_webconsole.dataset.models import Dataset, DatasetJob, DatasetJobState, DatasetJobKind, DatasetType, \ + DatasetJobStage +from fedlearner_webconsole.proto.two_pc_pb2 import CreateModelJobData, \ + TransactionData +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition, JobDefinition +from fedlearner_webconsole.proto.common_pb2 import Variable + + +def _get_workflow_config(): + return WorkflowDefinition(job_definitions=[ + JobDefinition(name='train-job', + job_type=JobDefinition.JobType.NN_MODEL_TRANINING, + variables=[ + Variable(name='mode', value='train'), + Variable(name='data_source', + value='dataset-job-stage-uuid-psi-data-join-job', + typed_value=Value(string_value='dataset-job-stage-uuid-psi-data-join-job')), + Variable(name='data_path', typed_value=Value(string_value='')), + ]) + ]) + + +class ModelJobCreatorTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + _insert_or_update_templates(session) + dataset_job = DatasetJob(id=1, + name='datasetjob', + uuid='dataset-job-uuid', + state=DatasetJobState.SUCCEEDED, + project_id=1, + input_dataset_id=1, + output_dataset_id=2, + kind=DatasetJobKind.RSA_PSI_DATA_JOIN) + dataset = Dataset(id=2, + uuid='uuid', + name='datasetjob', + dataset_type=DatasetType.PSI, + path='/data/dataset/haha') + dataset_job_stage = DatasetJobStage(id=1, + name='data-join', + uuid='dataset-job-stage-uuid', + project_id=1, + state=DatasetJobState.SUCCEEDED, + dataset_job_id=1, + data_batch_id=1) + algorithm = Algorithm(id=2, name='algo') + model_job_group = ModelJobGroup(name='group', + uuid='uuid', + project_id=1, + algorithm_type=AlgorithmType.NN_VERTICAL, + algorithm_id=2, + role=ModelJobRole.PARTICIPANT, + authorized=True, + dataset_id=2, + latest_version=2) + model_job_group.set_config(_get_workflow_config()) + session.add_all([dataset_job, dataset_job_stage, dataset, model_job_group, algorithm]) + session.commit() + + def test_prepare(self): + create_model_job_data = CreateModelJobData(model_job_name='model-job', + model_job_uuid='model-job-uuid', + group_uuid='uuid', + version=3) + data = TransactionData(create_model_job_data=create_model_job_data) + with db.session_scope() as session: + # succeeded + flag, _ = ModelJobLauncher(session, tid='12', data=data).prepare() + self.assertTrue(flag) + with db.session_scope() as session: + # fail due to group is not authorized + group: ModelJobGroup = session.query(ModelJobGroup).filter_by(uuid='uuid').first() + group.authorized = False + flag, msg = ModelJobLauncher(session, tid='12', data=data).prepare() + self.assertFalse(flag) + self.assertEqual(msg, 'model group uuid not authorized to coordinator') + with db.session_scope() as session: + # fail due to algorithm not found + group: ModelJobGroup = session.query(ModelJobGroup).filter_by(uuid='uuid').first() + group.algorithm_id = 3 + data = TransactionData(create_model_job_data=create_model_job_data) + flag, msg = ModelJobLauncher(session, tid='12', data=data).prepare() + self.assertFalse(flag) + self.assertEqual(msg, 'the algorithm 3 of group group is not found') + with db.session_scope() as session: + # fail due to group is not configured + group: ModelJobGroup = session.query(ModelJobGroup).filter_by(uuid='uuid').first() + group.config = None + flag, msg = ModelJobLauncher(session, tid='12', data=data).prepare() + self.assertFalse(flag) + self.assertEqual(msg, 'the config of model group group not found') + with db.session_scope() as session: + # fail due to group is not found + data.create_model_job_data.group_uuid = '1' + flag, msg = ModelJobLauncher(session, tid='12', data=data).prepare() + self.assertFalse(flag) + self.assertEqual(msg, 'model group not found by uuid 1') + data.create_model_job_data.group_uuid = 'uuid' + # fail due to version mismatch + data.create_model_job_data.version = 2 + flag, msg = ModelJobLauncher(session, tid='12', data=data).prepare() + self.assertFalse(flag) + self.assertEqual(msg, + 'the latest version of model group group is larger than or equal to the given version') + + def test_commit(self): + create_model_job_data = CreateModelJobData(model_job_name='model-job', + model_job_uuid='model-job-uuid', + group_uuid='uuid', + version=2) + data = TransactionData(create_model_job_data=create_model_job_data) + with db.session_scope() as session: + flag, msg = ModelJobLauncher(session, tid='12', data=data).commit() + self.assertTrue(flag) + session.commit() + with db.session_scope() as session: + group: ModelJobGroup = session.query(ModelJobGroup).filter_by(uuid='uuid').first() + model_job: ModelJob = session.query(ModelJob).filter_by(name='model-job').first() + self.assertEqual(model_job.group_id, group.id) + self.assertTrue(model_job.project_id, group.project_id) + self.assertTrue(model_job.algorithm_type, group.algorithm_type) + self.assertTrue(model_job.model_job_type, ModelJobType.TRAINING) + self.assertTrue(model_job.dataset_id, group.dataset_id) + self.assertEqual(model_job.workflow.get_config(), group.get_config()) + self.assertEqual(model_job.workflow.state, WorkflowState.READY) + self.assertTrue(model_job.version, 2) + self.assertTrue(group.latest_version, 2) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/models.py b/web_console_v2/api/fedlearner_webconsole/two_pc/models.py new file mode 100644 index 000000000..213f7b0ab --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/models.py @@ -0,0 +1,113 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import enum +from typing import Tuple, Optional + +from sqlalchemy import func, Index + +from fedlearner_webconsole.db import db, default_table_args +from fedlearner_webconsole.proto.two_pc_pb2 import TwoPcType, TwoPcAction +from fedlearner_webconsole.utils.mixins import to_dict_mixin + + +class TransactionState(enum.Enum): + NEW = 'NEW' + PREPARE_SUCCEEDED = 'PREPARE_SUCCEEDED' + PREPARE_FAILED = 'PREPARE_FAILED' + COMMITTED = 'COMMITTED' + ABORTED = 'ABORTED' + INVALID = 'INVALID' + + +# Valid transition mappings: +# Current state - action - result - new state +_VALID_TRANSITIONS = { + TransactionState.NEW: { + TwoPcAction.PREPARE: { + True: TransactionState.PREPARE_SUCCEEDED, + False: TransactionState.PREPARE_FAILED, + } + }, + TransactionState.PREPARE_SUCCEEDED: { + TwoPcAction.COMMIT: { + True: TransactionState.COMMITTED, + }, + TwoPcAction.ABORT: { + True: TransactionState.ABORTED, + } + }, + TransactionState.PREPARE_FAILED: { + TwoPcAction.ABORT: { + True: TransactionState.ABORTED, + } + } +} + + +@to_dict_mixin(ignores=['_type'], extras={'type': lambda t: t.get_type()}) +class Transaction(db.Model): + __tablename__ = 'transactions_v2' + __table_args__ = (Index('uniq_uuid', 'uuid', unique=True), default_table_args('2pc transactions')) + id = db.Column(db.Integer, primary_key=True, autoincrement=True, comment='id') + uuid = db.Column(db.String(64), comment='uuid') + # 2PC type, consistent with TwoPcType in proto + _two_pc_type = db.Column('type', db.String(32), comment='2pc type name') + state = db.Column(db.Enum(TransactionState, native_enum=False, create_constraint=False, length=32), + default=TransactionState.NEW, + comment='state') + message = db.Column(db.Text(), comment='message of the last action') + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), comment='created_at') + updated_at = db.Column(db.DateTime(timezone=True), + onupdate=func.now(), + server_default=func.now(), + comment='update_at') + + def get_type(self) -> TwoPcType: + return TwoPcType.Value(self._two_pc_type) + + def set_type(self, t: TwoPcType): + self._two_pc_type = TwoPcType.Name(t) + + def is_valid_action(self, action: TwoPcAction) -> bool: + """Checks if the action is valid for current state or not.""" + possible_results = _VALID_TRANSITIONS.get(self.state, {}).get(action, None) + return possible_results is not None + + def check_idempotent(self, current_action: TwoPcAction) -> Tuple[bool, Optional[bool], Optional[str]]: + """Checks if the action executed and the result. + + Returns: + (executed or not, result, message) + """ + if self.state == TransactionState.INVALID: + return True, False, self.message + for current_state, actions in _VALID_TRANSITIONS.items(): + for action, results in actions.items(): + for result, new_state in results.items(): + if new_state == self.state and action == current_action: + # Hits the history + return True, result, self.message + return False, None, None + + def update(self, action: TwoPcAction, succeeded: bool, message: str) -> Tuple[bool, str]: + new_state = _VALID_TRANSITIONS.get(self.state, {}).get(action, {}).get(succeeded, None) + if new_state is None: + self.message = f'[2pc] Invalid transition: [{self.state} - {action} - {succeeded}], extra: {message}' + self.state = TransactionState.INVALID + return False, self.message + self.state = new_state + self.message = message + return succeeded, message diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/models_test.py b/web_console_v2/api/fedlearner_webconsole/two_pc/models_test.py new file mode 100644 index 000000000..13b3b47fe --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/models_test.py @@ -0,0 +1,79 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest + +from fedlearner_webconsole.proto.two_pc_pb2 import TwoPcType, TwoPcAction +from fedlearner_webconsole.two_pc.models import Transaction, TransactionState + + +class TransactionTest(unittest.TestCase): + + def test_two_pc_type(self): + trans = Transaction() + trans.set_type(TwoPcType.CREATE_MODEL_JOB) + self.assertEqual(trans._two_pc_type, 'CREATE_MODEL_JOB') # pylint: disable=protected-access + self.assertEqual(trans.get_type(), TwoPcType.CREATE_MODEL_JOB) + + def test_is_valid_action(self): + trans = Transaction(state=TransactionState.NEW) + self.assertTrue(trans.is_valid_action(TwoPcAction.PREPARE)) + self.assertFalse(trans.is_valid_action(TwoPcAction.COMMIT)) + trans.state = TransactionState.PREPARE_FAILED + self.assertTrue(trans.is_valid_action(TwoPcAction.ABORT)) + self.assertFalse(trans.is_valid_action(TwoPcAction.COMMIT)) + trans.state = TransactionState.INVALID + self.assertFalse(trans.is_valid_action(TwoPcAction.ABORT)) + + def test_check_idempotent_invalid(self): + trans = Transaction(state=TransactionState.INVALID, message='invalid') + executed, result, message = trans.check_idempotent(TwoPcAction.COMMIT) + self.assertTrue(executed) + self.assertFalse(result) + self.assertEqual(message, 'invalid') + + def test_check_idempotent_executed(self): + trans = Transaction(state=TransactionState.PREPARE_SUCCEEDED, message='prepared') + executed, result, message = trans.check_idempotent(TwoPcAction.PREPARE) + self.assertTrue(executed) + self.assertTrue(result) + self.assertEqual(message, 'prepared') + + def test_check_idempotent_has_not_executed(self): + trans = Transaction(state=TransactionState.PREPARE_SUCCEEDED, message='prepared') + self.assertEqual(trans.check_idempotent(TwoPcAction.COMMIT), (False, None, None)) + + def test_update_failed(self): + trans = Transaction(state=TransactionState.PREPARE_SUCCEEDED) + trans.update(TwoPcAction.COMMIT, False, 'failed to abort') + self.assertEqual(trans.state, TransactionState.INVALID) + self.assertEqual( + trans.message, + '[2pc] Invalid transition: [TransactionState.PREPARE_SUCCEEDED - 1 - False], extra: failed to abort') + + def test_update_successfully(self): + trans = Transaction(state=TransactionState.PREPARE_SUCCEEDED) + trans.update(TwoPcAction.COMMIT, True, 'yeah') + self.assertEqual(trans.state, TransactionState.COMMITTED) + self.assertEqual(trans.message, 'yeah') + + trans = Transaction(state=TransactionState.PREPARE_SUCCEEDED) + trans.update(action=TwoPcAction.ABORT, succeeded=True, message='yep') + self.assertEqual(trans.state, TransactionState.ABORTED) + self.assertEqual(trans.message, 'yep') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/resource_manager.py b/web_console_v2/api/fedlearner_webconsole/two_pc/resource_manager.py new file mode 100644 index 000000000..bb11f4a9a --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/resource_manager.py @@ -0,0 +1,50 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +from abc import abstractmethod +from typing import Tuple + +from fedlearner_webconsole.proto.two_pc_pb2 import TransactionData, TwoPcAction + + +class ResourceManager(object): + """An abstract class to manage resource in 2pc. + + The recommendation practice is to keep those methods idempotent. + """ + + def __init__(self, tid: str, data: TransactionData): + self.tid = tid + self.data = data + + @abstractmethod + def prepare(self) -> Tuple[bool, str]: + pass + + @abstractmethod + def commit(self) -> Tuple[bool, str]: + pass + + @abstractmethod + def abort(self) -> Tuple[bool, str]: + pass + + def run_two_pc(self, action: TwoPcAction) -> Tuple[bool, str]: + if action == TwoPcAction.PREPARE: + return self.prepare() + if action == TwoPcAction.COMMIT: + return self.commit() + assert action == TwoPcAction.ABORT + return self.abort() diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/transaction_manager.py b/web_console_v2/api/fedlearner_webconsole/two_pc/transaction_manager.py new file mode 100644 index 000000000..6cb81be88 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/transaction_manager.py @@ -0,0 +1,95 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import logging +from typing import List, Tuple +from uuid import uuid4 + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto import common_pb2 +from fedlearner_webconsole.proto.two_pc_pb2 import TwoPcAction, TwoPcType, TransactionData +from fedlearner_webconsole.rpc.client import RpcClient +from fedlearner_webconsole.two_pc.handlers import run_two_pc_action +from fedlearner_webconsole.utils.metrics import emit_store + + +class TransactionManager(object): + + def __init__(self, project_name: str, project_token: str, participants: List[str], two_pc_type: TwoPcType): + self.type = two_pc_type + self._project_name = project_name + self._project_token = project_token + self._clients = [] + for domain_name in participants: + self._clients.append( + RpcClient.from_project_and_participant(project_name=self._project_name, + project_token=self._project_token, + domain_name=domain_name)) + + def run(self, data: TransactionData) -> Tuple[bool, str]: + tid = str(uuid4()) + prepared, pre_message = self.do_two_pc_action(tid, TwoPcAction.PREPARE, data) + # TODO(hangweiqiang): catch exception and maybe retry sometime? + if prepared: + succeeded, act_message = self.do_two_pc_action(tid, TwoPcAction.COMMIT, data) + else: + succeeded, act_message = self.do_two_pc_action(tid, TwoPcAction.ABORT, data) + if not succeeded: + emit_store('2pc.transaction_failure', 1) + return (prepared, pre_message) if not prepared else (succeeded, act_message) + + def do_two_pc_action(self, tid: str, action: TwoPcAction, data: TransactionData) -> Tuple[bool, str]: + # TODO(hangweiqiang): using multi-thread + succeeded = True + message = None + for client in self._clients: + result, res_message = self._remote_do_two_pc(client, tid, action, data) + if not result and succeeded: + succeeded = False + message = res_message + result, res_message = self._local_do_two_pc(tid, action, data) + if not result and succeeded: + succeeded = False + message = res_message + return succeeded, message + + def _remote_do_two_pc(self, client: RpcClient, tid: str, action: TwoPcAction, + data: TransactionData) -> Tuple[bool, str]: + response = client.run_two_pc(transaction_uuid=tid, two_pc_type=self.type, action=action, data=data) + if response.status.code != common_pb2.STATUS_SUCCESS: + # Something wrong during rpc call + logging.info('[%s] 2pc [%s] error [%s]: %s', self.type, action, tid, response.status.msg) + return False, response.message + if not response.succeeded: + # Failed + logging.info('[%s] 2pc [%s] failed [%s]: %s', self.type, action, tid, response.message) + return False, response.message + return True, response.message + + def _local_do_two_pc(self, tid: str, action: TwoPcAction, data: TransactionData) -> Tuple[bool, str]: + try: + with db.session_scope() as session: + succeeded, message = run_two_pc_action(session=session, + tid=tid, + two_pc_type=self.type, + action=action, + data=data) + session.commit() + except Exception as e: # pylint: disable=broad-except + succeeded = False + message = str(e) + if not succeeded: + logging.info('[%s] 2pc [%s] failed locally [%s]: %s', self.type, action, tid, message) + return succeeded, message diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/transaction_manager_test.py b/web_console_v2/api/fedlearner_webconsole/two_pc/transaction_manager_test.py new file mode 100644 index 000000000..24df28298 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/transaction_manager_test.py @@ -0,0 +1,172 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +# pylint: disable=protected-access +import unittest +from unittest import mock +from unittest.mock import patch, MagicMock, call + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.proto import common_pb2 +from fedlearner_webconsole.proto.service_pb2 import TwoPcResponse +from fedlearner_webconsole.proto.two_pc_pb2 import TwoPcType, TwoPcAction, TransactionData, \ + CreateModelJobData +from fedlearner_webconsole.two_pc.transaction_manager import TransactionManager + + +class TransactionManagerTest(NoWebServerTestCase): + _PROJECT_NAME = 'test-project' + _PROJECT_TOKEN = 'testtoken' + + @patch('fedlearner_webconsole.two_pc.transaction_manager.RpcClient.from_project_and_participant') + def test_init(self, mock_rpc_client_factory): + mock_rpc_client_factory.return_value = MagicMock() + tm = TransactionManager(project_name=self._PROJECT_NAME, + project_token=self._PROJECT_TOKEN, + two_pc_type=TwoPcType.CREATE_MODEL_JOB, + participants=['fl1.com', 'fl2.com']) + self.assertEqual(tm.type, TwoPcType.CREATE_MODEL_JOB) + self.assertEqual(len(tm._clients), 2) + + calls = [ + call(project_name=self._PROJECT_NAME, project_token=self._PROJECT_TOKEN, domain_name='fl1.com'), + call(project_name=self._PROJECT_NAME, project_token=self._PROJECT_TOKEN, domain_name='fl2.com') + ] + mock_rpc_client_factory.assert_has_calls(calls) + + @patch('fedlearner_webconsole.two_pc.transaction_manager.run_two_pc_action') + @patch('fedlearner_webconsole.two_pc.transaction_manager.uuid4') + def test_run(self, mock_uuid4, mock_local_run_two_pc_action): + tid = 'testttttt' + transaction_data = TransactionData(create_model_job_data=CreateModelJobData(model_job_name='test model name')) + mock_uuid4.return_value = tid + # Two participants + p1 = MagicMock() + p1.run_two_pc = MagicMock() + p2 = MagicMock() + p2.run_two_pc = MagicMock() + # A hack to avoid mocking RpcClient.from_project_and_participant + tm = TransactionManager(project_name=self._PROJECT_NAME, + project_token=self._PROJECT_TOKEN, + two_pc_type=TwoPcType.CREATE_MODEL_JOB, + participants=[]) + tm._clients = [p1, p2] + + # Test successfully + p1.run_two_pc.return_value = TwoPcResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), + succeeded=True) + p2.run_two_pc.return_value = TwoPcResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), + succeeded=True) + mock_local_run_two_pc_action.return_value = (True, '') + succeeded, _ = tm.run(transaction_data) + self.assertTrue(succeeded) + mock_uuid4.assert_called_once() + calls = [ + call(transaction_uuid=tid, + two_pc_type=TwoPcType.CREATE_MODEL_JOB, + action=TwoPcAction.PREPARE, + data=transaction_data), + call(transaction_uuid=tid, + two_pc_type=TwoPcType.CREATE_MODEL_JOB, + action=TwoPcAction.COMMIT, + data=transaction_data), + ] + p1.run_two_pc.assert_has_calls(calls) + p2.run_two_pc.assert_has_calls(calls) + mock_local_run_two_pc_action.assert_has_calls([ + call(session=mock.ANY, + tid=tid, + two_pc_type=TwoPcType.CREATE_MODEL_JOB, + action=TwoPcAction.PREPARE, + data=transaction_data), + call(session=mock.ANY, + tid=tid, + two_pc_type=TwoPcType.CREATE_MODEL_JOB, + action=TwoPcAction.COMMIT, + data=transaction_data), + ]) + + # Test failed + def p2_run_two_pc(action: TwoPcAction, *args, **kwargs) -> TwoPcResponse: + if action == TwoPcAction.PREPARE: + return TwoPcResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), succeeded=False) + return TwoPcResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), succeeded=True) + + p2.run_two_pc.side_effect = p2_run_two_pc + mock_uuid4.reset_mock() + succeeded, _ = tm.run(transaction_data) + self.assertFalse(succeeded) + mock_uuid4.assert_called_once() + calls = [ + call(transaction_uuid=tid, + two_pc_type=TwoPcType.CREATE_MODEL_JOB, + action=TwoPcAction.PREPARE, + data=transaction_data), + call(transaction_uuid=tid, + two_pc_type=TwoPcType.CREATE_MODEL_JOB, + action=TwoPcAction.ABORT, + data=transaction_data), + ] + p1.run_two_pc.assert_has_calls(calls) + p2.run_two_pc.assert_has_calls(calls) + mock_local_run_two_pc_action.assert_has_calls([ + call(session=mock.ANY, + tid=tid, + two_pc_type=TwoPcType.CREATE_MODEL_JOB, + action=TwoPcAction.PREPARE, + data=transaction_data), + call(session=mock.ANY, + tid=tid, + two_pc_type=TwoPcType.CREATE_MODEL_JOB, + action=TwoPcAction.ABORT, + data=transaction_data), + ]) + + @patch('fedlearner_webconsole.two_pc.transaction_manager.run_two_pc_action') + def test_do_two_pc_action(self, mock_local_run_two_pc_action): + tid = 'test-id' + transaction_data = TransactionData(create_model_job_data=CreateModelJobData(model_job_name='test model name')) + # Two participants + p1 = MagicMock() + p1.run_two_pc = MagicMock( + return_value=TwoPcResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), succeeded=True)) + p2 = MagicMock() + p2.run_two_pc = MagicMock( + return_value=TwoPcResponse(status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS), succeeded=True)) + mock_local_run_two_pc_action.return_value = (True, '') + # A hack to avoid mocking RpcClient.from_project_and_participant + tm = TransactionManager(project_name=self._PROJECT_NAME, + project_token=self._PROJECT_TOKEN, + two_pc_type=TwoPcType.CREATE_MODEL_JOB, + participants=[]) + tm._clients = [p1, p2] + self.assertTrue(tm.do_two_pc_action(tid=tid, action=TwoPcAction.PREPARE, data=transaction_data)) + p1.run_two_pc.assert_called_once_with(transaction_uuid=tid, + two_pc_type=TwoPcType.CREATE_MODEL_JOB, + action=TwoPcAction.PREPARE, + data=transaction_data) + p2.run_two_pc.assert_called_once_with(transaction_uuid=tid, + two_pc_type=TwoPcType.CREATE_MODEL_JOB, + action=TwoPcAction.PREPARE, + data=transaction_data) + mock_local_run_two_pc_action.assert_called_once_with(session=mock.ANY, + tid=tid, + two_pc_type=TwoPcType.CREATE_MODEL_JOB, + action=TwoPcAction.PREPARE, + data=transaction_data) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_export_job_launcher.py b/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_export_job_launcher.py new file mode 100644 index 000000000..7dd0ac5ea --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_export_job_launcher.py @@ -0,0 +1,76 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Tuple + +from sqlalchemy.orm import Session +from fedlearner_webconsole.tee.services import TrustedJobService +from fedlearner_webconsole.tee.models import TrustedJob, TrustedJobStatus +from fedlearner_webconsole.proto.two_pc_pb2 import TransactionData +from fedlearner_webconsole.two_pc.resource_manager import ResourceManager +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus + + +class TrustedExportJobLauncher(ResourceManager): + """Launch a configured trusted export job""" + + def __init__(self, session: Session, tid: str, data: TransactionData): + super().__init__(tid, data) + assert data.launch_trusted_export_job_data is not None + self._data = data.launch_trusted_export_job_data + self._session = session + self._tee_export_job = None + + def _check_trusted_export_job(self) -> Tuple[bool, str]: + self._tee_export_job = self._session.query(TrustedJob).filter_by(uuid=self._data.uuid).first() + if self._tee_export_job is None: + message = f'trusted export job {self._data.uuid} not found' + logging.info('[launch-trusted-export-job-2pc] prepare failed: %s', message) + return False, message + return True, '' + + def _check_auth(self) -> Tuple[bool, str]: + if self._tee_export_job.auth_status != AuthStatus.AUTHORIZED: + message = f'trusted export job {self._data.uuid} not authorized' + logging.info('[launch-trusted-export-job-2pc] prepare failed: %s', message) + return False, message + return True, '' + + def prepare(self) -> Tuple[bool, str]: + # _check_trusted_export_job should be the first + check_fn_list = [ + self._check_trusted_export_job, + self._check_auth, + ] + for check_fn in check_fn_list: + succeeded, message = check_fn() + if not succeeded: + return False, message + logging.info('[launch-trusted-export-job-2pc] prepare succeeded') + return True, '' + + def commit(self) -> Tuple[bool, str]: + tee_export_job = self._session.query(TrustedJob).filter_by(uuid=self._data.uuid).first() + if tee_export_job.coordinator_id == 0 or tee_export_job.group.analyzer_id == 0: + TrustedJobService(self._session).launch_trusted_export_job(tee_export_job) + else: + tee_export_job.status = TrustedJobStatus.SUCCEEDED + return True, '' + + def abort(self) -> Tuple[bool, str]: + logging.info('[launch-trusted-export-job-2pc] abort') + # As we did not preserve any resource, do nothing + return True, '' diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_export_job_launcher_test.py b/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_export_job_launcher_test.py new file mode 100644 index 000000000..8298a394e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_export_job_launcher_test.py @@ -0,0 +1,117 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch, MagicMock +from testing.no_web_server_test_case import NoWebServerTestCase +from google.protobuf.text_format import MessageToString +from fedlearner_webconsole.db import db +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.two_pc.trusted_export_job_launcher import TrustedExportJobLauncher +from fedlearner_webconsole.tee.models import TrustedJobGroup, TrustedJob, TrustedJobType, TrustedJobStatus +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto.tee_pb2 import Resource +from fedlearner_webconsole.proto.two_pc_pb2 import TransactionData, LaunchTrustedExportJobData +from fedlearner_webconsole.proto.setting_pb2 import SystemInfo +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.job.models import Job, JobType, JobState + + +class TrustedExportJobLauncherTest(NoWebServerTestCase): + + def setUp(self) -> None: + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project-name') + participant1 = Participant(id=1, name='part2', domain_name='fl-domain2.com') + proj_part1 = ProjectParticipant(project_id=1, participant_id=1) + group = TrustedJobGroup(id=1, analyzer_id=0) + tee_export_job = TrustedJob(id=1, + uuid='uuid1', + name='V1-domain1-1', + type=TrustedJobType.EXPORT, + version=1, + project_id=1, + trusted_job_group_id=1, + auth_status=AuthStatus.AUTHORIZED, + status=TrustedJobStatus.CREATED, + export_count=1, + coordinator_id=1, + resource=MessageToString(Resource(cpu=1000, memory=1, replicas=1))) + tee_analyze_job = TrustedJob(id=2, + uuid='uuid2', + type=TrustedJobType.ANALYZE, + version=1, + trusted_job_group_id=1, + job_id=1, + status=TrustedJobStatus.SUCCEEDED) + job = Job(id=1, + name='trusted-job-1-uuid2', + job_type=JobType.CUSTOMIZED, + state=JobState.COMPLETED, + workflow_id=0, + project_id=1) + sys_var = SettingService(session).get_system_variables_dict() + session.add_all([project, participant1, proj_part1, group, tee_export_job, tee_analyze_job, job]) + session.commit() + sys_var['sgx_image'] = 'artifact.bytedance.com/fedlearner/pp_bioinformatics:e13eb8a1d96ad046ca7354b8197d41fd' + self.sys_var = sys_var + + @staticmethod + def get_transaction_data(uuid: str): + return TransactionData(launch_trusted_export_job_data=LaunchTrustedExportJobData(uuid=uuid)) + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info') + def test_prepare(self, mock_get_system_info: MagicMock): + mock_get_system_info.return_value = SystemInfo(pure_domain_name='domain1') + with db.session_scope() as session: + tee_export_job = session.query(TrustedJob).get(1) + # successful + data = self.get_transaction_data('uuid1') + launcher = TrustedExportJobLauncher(session, '13', data) + flag, msg = launcher.prepare() + self.assertTrue(flag) + # fail due to tee_export_job not exist + data = self.get_transaction_data('not-exist') + launcher = TrustedExportJobLauncher(session, '13', data) + flag, msg = launcher.prepare() + self.assertFalse(flag) + # fail due to auth + tee_export_job.auth_status = AuthStatus.WITHDRAW + data = self.get_transaction_data('uuid1') + launcher = TrustedExportJobLauncher(session, '13', data) + flag, msg = launcher.prepare() + self.assertFalse(flag) + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_variables_dict') + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info') + def test_commit(self, mock_get_system_info: MagicMock, mock_sys_dict: MagicMock): + mock_get_system_info.return_value = SystemInfo(pure_domain_name='domain1') + mock_sys_dict.return_value = self.sys_var + with db.session_scope() as session: + data = self.get_transaction_data('uuid1') + launcher = TrustedExportJobLauncher(session, '13', data) + flag, msg = launcher.commit() + self.assertTrue(flag) + session.commit() + with db.session_scope() as session: + tee_export_job = session.query(TrustedJob).get(1) + self.assertIsNotNone(tee_export_job.job_id) + self.assertEqual(tee_export_job.get_status(), TrustedJobStatus.PENDING) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_job_group_creator.py b/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_job_group_creator.py new file mode 100644 index 000000000..ce1b32349 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_job_group_creator.py @@ -0,0 +1,180 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Tuple +from sqlalchemy.orm import Session +from fedlearner_webconsole.review.ticket_helper import get_ticket_helper +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.tee.models import TrustedJobGroup, GroupCreateStatus +from fedlearner_webconsole.proto.tee_pb2 import ParticipantDatasetList, ParticipantDataset +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.dataset.models import Dataset +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.proto.two_pc_pb2 import TransactionData +from fedlearner_webconsole.two_pc.resource_manager import ResourceManager +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.algorithm.fetcher import AlgorithmFetcher +from fedlearner_webconsole.algorithm.models import AlgorithmType +from fedlearner_webconsole.exceptions import NotFoundException + + +class TrustedJobGroupCreator(ResourceManager): + + def __init__(self, session: Session, tid: str, data: TransactionData): + super().__init__(tid, data) + assert data.create_trusted_job_group_data is not None + self._data = data.create_trusted_job_group_data + self._session = session + self._project_id = None + self.pure_domain_name = SettingService.get_system_info().pure_domain_name + + def _check_ticket(self) -> Tuple[bool, str]: + validate = get_ticket_helper(self._session).validate_ticket( + self._data.ticket_uuid, lambda ticket: ticket.details.uuid == self._data.uuid) + if not validate: + message = f'ticket {self._data.ticket_uuid} is not valid' + logging.info('[trusted-group-2pc] prepare failed: %s', message) + return False, message + return True, '' + + def _check_group(self) -> Tuple[bool, str]: + name = self._data.name + project_name = self._data.project_name + project = self._session.query(Project).filter_by(name=project_name).first() + self._project_id = project.id + group = self._session.query(TrustedJobGroup).filter_by(name=name, project_id=project.id).first() + if group is not None and group.uuid != self._data.uuid: + message = f'trusted job group {name} in project {project_name} with different uuid already exists' + logging.info('[trusted-group-2pc] prepare failed: %s', message) + return False, message + return True, '' + + def _check_algorithm(self) -> Tuple[bool, str]: + try: + algorithm = AlgorithmFetcher(self._project_id).get_algorithm(self._data.algorithm_uuid) + if algorithm.type != AlgorithmType.TRUSTED_COMPUTING.name: + message = f'algorithm {self._data.algorithm_uuid} is not TRUSTED_COMPUTING type' + logging.info('[trusted-group-2pc] prepare failed: %s', message) + return False, message + except NotFoundException as e: + message = e.message + logging.info('[trusted-group-2pc] prepare failed: %s', message) + return False, message + return True, '' + + def _check_participant_dataset(self) -> Tuple[bool, str]: + for dnd in self._data.domain_name_datasets: + if dnd.pure_domain_name == self.pure_domain_name: + dataset = self._session.query(Dataset).filter_by(uuid=dnd.dataset_uuid, name=dnd.dataset_name).first() + if dataset is None: + message = f'dataset {dnd.dataset_uuid} not exists' + logging.info('[trusted-group-2pc] prepare failed: %s', message) + return False, message + if not dataset.is_published: + message = f'dataset {dnd.dataset_uuid} is not published' + logging.info('[trusted-group-2pc] prepare failed: %s', message) + return False, message + else: + participant = ParticipantService( + self._session).get_participant_by_pure_domain_name(pure_domain_name=dnd.pure_domain_name) + if participant is None: + message = f'participant with pure domain name {dnd.pure_domain_name} not exists' + logging.info('[trusted-group-2pc] prepare failed: %s', message) + return False, message + return True, '' + + def _check_participant(self) -> Tuple[bool, str]: + for pure_domain_name in [self._data.coordinator_pure_domain_name, self._data.analyzer_pure_domain_name]: + if pure_domain_name != self.pure_domain_name: + participant = ParticipantService( + self._session).get_participant_by_pure_domain_name(pure_domain_name=pure_domain_name) + if participant is None: + message = f'participant with pure domain name {pure_domain_name} not exists' + logging.info('[trusted-group-2pc] prepare failed: %s', message) + return False, message + return True, '' + + def prepare(self) -> Tuple[bool, str]: + # _check_algorithm should be after _check_group + check_fn_list = [ + self._check_ticket, + self._check_group, + self._check_algorithm, + self._check_participant_dataset, + self._check_participant, + ] + for check_fn in check_fn_list: + succeeded, message = check_fn() + if not succeeded: + return False, message + logging.info('[trusted-group-2pc] prepare succeeded') + return True, '' + + def commit(self) -> Tuple[bool, str]: + coordinator_pure_domain_name = self._data.coordinator_pure_domain_name + # The coordinator has already created in POST api so do nothing + if self.pure_domain_name == coordinator_pure_domain_name: + logging.info('[trusted-group-2pc] commit succeeded') + return True, '' + name = self._data.name + uuid = self._data.uuid + project = self._session.query(Project).filter_by(name=self._data.project_name).first() + coordinator_id = ParticipantService( + self._session).get_participant_by_pure_domain_name(pure_domain_name=coordinator_pure_domain_name).id + if self.pure_domain_name == self._data.analyzer_pure_domain_name: + analyzer_id = 0 + else: + analyzer_id = ParticipantService(self._session).get_participant_by_pure_domain_name( + pure_domain_name=self._data.analyzer_pure_domain_name).id + dataset_id = None + participant_datasets = ParticipantDatasetList() + for dnd in self._data.domain_name_datasets: + if dnd.pure_domain_name == self.pure_domain_name: + dataset = self._session.query(Dataset).filter_by(uuid=dnd.dataset_uuid).first() + dataset_id = dataset.id + else: + participant = ParticipantService( + self._session).get_participant_by_pure_domain_name(pure_domain_name=dnd.pure_domain_name) + participant_datasets.items.append( + ParticipantDataset(participant_id=participant.id, uuid=dnd.dataset_uuid, name=dnd.dataset_name)) + participants = ParticipantService(self._session).get_participants_by_project(project.id) + unauth_participant_ids = [p.id for p in participants if p.id != coordinator_id] + group = self._session.query(TrustedJobGroup).filter_by(uuid=uuid).first() + if not group: + group = TrustedJobGroup( + name=name, + uuid=uuid, + latest_version=0, + creator_username=self._data.creator_username, + project_id=project.id, + coordinator_id=coordinator_id, + analyzer_id=analyzer_id, + ticket_uuid=self._data.ticket_uuid, + ticket_status=TicketStatus.APPROVED, + status=GroupCreateStatus.SUCCEEDED, + algorithm_uuid=self._data.algorithm_uuid, + dataset_id=dataset_id, + ) + group.set_participant_datasets(participant_datasets) + group.set_unauth_participant_ids(unauth_participant_ids) + self._session.add(group) + logging.info('[trusted-group-2pc] commit succeeded') + return True, '' + + def abort(self) -> Tuple[bool, str]: + logging.info('[trusted-group-2pc] abort') + # As we did not preserve any resource, do nothing + return True, '' diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_job_group_creator_test.py b/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_job_group_creator_test.py new file mode 100644 index 000000000..bfc587dd9 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_job_group_creator_test.py @@ -0,0 +1,245 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch +from typing import List +import grpc +from google.protobuf.text_format import MessageToString +from testing.no_web_server_test_case import NoWebServerTestCase +from testing.rpc.client import FakeRpcError +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.models import Dataset +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.algorithm.models import AlgorithmProject, Algorithm, AlgorithmType +from fedlearner_webconsole.proto.two_pc_pb2 import TransactionData, CreateTrustedJobGroupData +from fedlearner_webconsole.proto.tee_pb2 import DomainNameDataset, ParticipantDataset, ParticipantDatasetList +from fedlearner_webconsole.tee.models import TrustedJobGroup, GroupCreateStatus +from fedlearner_webconsole.two_pc.trusted_job_group_creator import TrustedJobGroupCreator +from fedlearner_webconsole.proto.setting_pb2 import SystemInfo +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.review.common import NO_CENTRAL_SERVER_UUID + + +class TrustedJobGroupCreatorTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + project = Project(id=1, name='project') + participant1 = Participant(id=1, name='part2', domain_name='fl-domain2.com') + participant2 = Participant(id=2, name='part3', domain_name='fl-domain3.com') + proj_part1 = ProjectParticipant(project_id=1, participant_id=1) + proj_part2 = ProjectParticipant(project_id=1, participant_id=2) + dataset1 = Dataset(id=1, name='dataset-name1', uuid='dataset-uuid1', is_published=True) + dataset2 = Dataset(id=2, name='dataset-name3', uuid='dataset-uuid3', is_published=False) + algorithm_proj1 = AlgorithmProject(id=1, uuid='algorithm-proj-uuid1', type=AlgorithmType.TRUSTED_COMPUTING) + algorithm1 = Algorithm(id=1, + uuid='algorithm-uuid1', + type=AlgorithmType.TRUSTED_COMPUTING, + algorithm_project_id=1) + algorithm2 = Algorithm(id=2, uuid='algorithm-uuid2', algorithm_project_id=1) + with db.session_scope() as session: + session.add_all([ + project, + participant1, + participant2, + proj_part1, + proj_part2, + dataset1, + dataset2, + algorithm1, + algorithm2, + algorithm_proj1, + ]) + session.commit() + + @staticmethod + def get_transaction_data(name: str, uuid: str, ticket_uuid: str, project_name: str, algorithm_project_uuid: str, + algorithm_uuid: str, domain_name_datasets: List[DomainNameDataset], + coordinator_pure_domain_name: str, analyzer_pure_domain_name: str): + return TransactionData(create_trusted_job_group_data=CreateTrustedJobGroupData( + name=name, + uuid=uuid, + ticket_uuid=ticket_uuid, + project_name=project_name, + algorithm_project_uuid=algorithm_project_uuid, + algorithm_uuid=algorithm_uuid, + domain_name_datasets=domain_name_datasets, + coordinator_pure_domain_name=coordinator_pure_domain_name, + analyzer_pure_domain_name=analyzer_pure_domain_name, + )) + + @patch('fedlearner_webconsole.algorithm.fetcher.AlgorithmFetcher.get_algorithm_from_participant') + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info') + def test_prepare(self, mock_get_system_info, mock_get_algorithm): + # successful case + mock_get_system_info.return_value = SystemInfo(pure_domain_name='domain1') + mock_get_algorithm.side_effect = FakeRpcError(grpc.StatusCode.NOT_FOUND, 'not found') + data = self.get_transaction_data( + 'group-name', + 'group-uuid', + NO_CENTRAL_SERVER_UUID, + 'project', + 'algorithm-proj-uuid1', + 'algorithm-uuid1', + [ + DomainNameDataset( + pure_domain_name='domain1', dataset_uuid='dataset-uuid1', dataset_name='dataset-name1'), + DomainNameDataset( + pure_domain_name='domain2', dataset_uuid='dataset-uuid2', dataset_name='dataset-name2'), + ], + 'domain2', + 'domain2', + ) + with db.session_scope() as session: + creator = TrustedJobGroupCreator(session, '12', data) + flag, msg = creator.prepare() + self.assertTrue(flag) + # fail due to algorithm not found + data.create_trusted_job_group_data.algorithm_uuid = 'algorithm-not-exist' + with db.session_scope() as session: + creator = TrustedJobGroupCreator(session, '12', data) + flag, msg = creator.prepare() + self.assertFalse(flag) + # fail due to algorithm type invalid + data.create_trusted_job_group_data.algorithm_uuid = 'algorithm-uuid2' + with db.session_scope() as session: + creator = TrustedJobGroupCreator(session, '12', data) + flag, msg = creator.prepare() + self.assertFalse(flag) + # fail due to participant not found + data = self.get_transaction_data( + 'group-name', + 'group-uuid', + NO_CENTRAL_SERVER_UUID, + 'project', + 'algorithm-proj-uuid1', + 'algorithm-uuid1', + [ + DomainNameDataset( + pure_domain_name='domain1', dataset_uuid='dataset-uuid1', dataset_name='dataset-name1'), + DomainNameDataset( + pure_domain_name='domain-not-exist', dataset_uuid='dataset-uuid2', dataset_name='dataset-name2'), + ], + 'domain2', + 'domain2', + ) + with db.session_scope() as session: + creator = TrustedJobGroupCreator(session, '12', data) + flag, msg = creator.prepare() + self.assertFalse(flag) + # fail due to dataset not found + data = self.get_transaction_data( + 'group-name', + 'group-uuid', + NO_CENTRAL_SERVER_UUID, + 'project', + 'algorithm-proj-uuid1', + 'algorithm-uuid1', + [ + DomainNameDataset( + pure_domain_name='domain1', dataset_uuid='dataset-uuid-not-exist', dataset_name='dataset-name1'), + DomainNameDataset( + pure_domain_name='domain2', dataset_uuid='dataset-uuid2', dataset_name='dataset-name2'), + ], + 'domain2', + 'domain2', + ) + with db.session_scope() as session: + creator = TrustedJobGroupCreator(session, '12', data) + flag, msg = creator.prepare() + self.assertFalse(flag) + # fail due to dataset not published + data = self.get_transaction_data( + 'group-name', + 'group-uuid', + NO_CENTRAL_SERVER_UUID, + 'project', + 'algorithm-proj-uuid1', + 'algorithm-uuid1', + [ + DomainNameDataset( + pure_domain_name='domain1', dataset_uuid='dataset-uuid3', dataset_name='dataset-name3'), + DomainNameDataset( + pure_domain_name='domain2', dataset_uuid='dataset-uuid2', dataset_name='dataset-name2'), + ], + 'domain2', + 'domain2', + ) + with db.session_scope() as session: + creator = TrustedJobGroupCreator(session, '12', data) + flag, msg = creator.prepare() + self.assertFalse(flag) + # fail due to same trusted job group name with different uuid in project + with db.session_scope() as session: + group = TrustedJobGroup(name='group-name', uuid='other-group-uuid', project_id=1) + session.add(group) + session.commit() + with db.session_scope() as session: + creator = TrustedJobGroupCreator(session, '12', data) + flag, msg = creator.prepare() + self.assertFalse(flag) + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info') + def test_commit(self, mock_get_system_info): + mock_get_system_info.return_value = SystemInfo(pure_domain_name='domain1') + data = self.get_transaction_data( + 'group-name', + 'group-uuid', + NO_CENTRAL_SERVER_UUID, + 'project', + 'algorithm-proj-uuid1', + 'algorithm-uuid1', + [ + DomainNameDataset( + pure_domain_name='domain1', dataset_uuid='dataset-uuid1', dataset_name='dataset-name1'), + DomainNameDataset( + pure_domain_name='domain2', dataset_uuid='dataset-uuid2', dataset_name='dataset-name2'), + ], + 'domain2', + 'domain2', + ) + with db.session_scope() as session: + creator = TrustedJobGroupCreator(session, '12', data) + flag, msg = creator.commit() + session.commit() + self.assertTrue(flag) + with db.session_scope() as session: + group: TrustedJobGroup = session.query(TrustedJobGroup).filter_by(name='group-name').first() + self.assertEqual(group.uuid, 'group-uuid') + self.assertEqual(group.latest_version, 0) + self.assertEqual(group.project_id, 1) + self.assertEqual(group.coordinator_id, 1) + self.assertEqual(group.analyzer_id, 1) + self.assertEqual(group.ticket_uuid, NO_CENTRAL_SERVER_UUID) + self.assertEqual(group.ticket_status, TicketStatus.APPROVED) + self.assertEqual(group.status, GroupCreateStatus.SUCCEEDED) + self.assertEqual(group.auth_status, AuthStatus.PENDING) + self.assertEqual(group.unauth_participant_ids, '2') + self.assertEqual(group.algorithm_uuid, 'algorithm-uuid1') + self.assertEqual(group.dataset_id, 1) + participant_datasets = ParticipantDatasetList( + items=[ParticipantDataset( + participant_id=1, + uuid='dataset-uuid2', + name='dataset-name2', + )]) + self.assertEqual(group.participant_datasets, MessageToString(participant_datasets)) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_job_launcher.py b/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_job_launcher.py new file mode 100644 index 000000000..f56eec725 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_job_launcher.py @@ -0,0 +1,136 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Tuple + +from sqlalchemy.orm import Session +from fedlearner_webconsole.tee.services import TrustedJobGroupService +from fedlearner_webconsole.tee.models import TrustedJobGroup +from fedlearner_webconsole.dataset.models import Dataset +from fedlearner_webconsole.proto.two_pc_pb2 import TransactionData +from fedlearner_webconsole.two_pc.resource_manager import ResourceManager +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.algorithm.fetcher import AlgorithmFetcher +from fedlearner_webconsole.algorithm.models import AlgorithmType +from fedlearner_webconsole.exceptions import NotFoundException +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.participant.services import ParticipantService + + +class TrustedJobLauncher(ResourceManager): + """Launch a configured trusted job based on the config of trusted job group""" + + def __init__(self, session: Session, tid: str, data: TransactionData): + super().__init__(tid, data) + assert data.launch_trusted_job_data is not None + self._data = data.launch_trusted_job_data + self._session = session + self._group = None + + def _check_group(self) -> Tuple[bool, str]: + if self._data.group_uuid: + group: TrustedJobGroup = self._session.query(TrustedJobGroup).filter_by(uuid=self._data.group_uuid).first() + if group is not None: + self._group = group + return True, '' + message = f'trusted job group {self._data.group_uuid} not found' + logging.info('[launch-trusted-job-2pc] prepare failed: %s', message) + return False, message + + def _check_version(self) -> Tuple[bool, str]: + self_pure_domain_name = SettingService.get_system_info().pure_domain_name + if (self._group.latest_version >= self._data.version and + self._data.initiator_pure_domain_name != self_pure_domain_name): + message = (f'the latest version of trusted job group {self._data.group_uuid} ' + f'is greater than or equal to the given version') + logging.info('[launch-trusted-job-2pc] prepare failed: %s', message) + return False, message + return True, '' + + def _check_auth(self) -> Tuple[bool, str]: + if self._group.auth_status != AuthStatus.AUTHORIZED: + message = f'trusted job group {self._data.group_uuid} not authorized' + logging.info('[launch-trusted-job-2pc] prepare failed: %s', message) + return False, message + return True, '' + + def _check_algorithm(self) -> Tuple[bool, str]: + try: + algorithm = AlgorithmFetcher(self._group.project_id).get_algorithm(self._group.algorithm_uuid) + if algorithm.type != AlgorithmType.TRUSTED_COMPUTING.name: + message = f'algorithm {self._group.algorithm_uuid} is not TRUSTED_COMPUTING type' + logging.info('[launch-trusted-job-2pc] prepare failed: %s', message) + return False, message + except NotFoundException as e: + message = e.message + logging.info('[launch-trusted-job-2pc] prepare failed: %s', message) + return False, message + return True, '' + + def _check_dataset(self) -> Tuple[bool, str]: + if self._group.dataset_id is not None: + dataset: Dataset = self._session.query(Dataset).get(self._group.dataset_id) + if dataset is None or not dataset.is_published: + message = f'dataset {self._group.dataset_id} is not found' + logging.info('[launch-trusted-job-2pc] prepare failed: %s', message) + return False, message + return True, '' + + def _check_initiator(self) -> Tuple[bool, str]: + init_pure_dn = self._data.initiator_pure_domain_name + if SettingService.get_system_info().pure_domain_name == init_pure_dn: + return True, '' + participant = ParticipantService(self._session).get_participant_by_pure_domain_name(init_pure_dn) + if participant is None: + message = f'initiator {self._data.initiator_pure_domain_name} is not found' + logging.info('[launch-trusted-job-2pc] prepare failed: %s', message) + return False, message + return True, '' + + def prepare(self) -> Tuple[bool, str]: + # _check_group should be the first + check_fn_list = [ + self._check_group, + self._check_version, + self._check_auth, + self._check_algorithm, + self._check_dataset, + self._check_initiator, + ] + for check_fn in check_fn_list: + succeeded, message = check_fn() + if not succeeded: + return False, message + logging.info('[launch-trusted-job-2pc] prepare succeeded') + return True, '' + + def commit(self) -> Tuple[bool, str]: + group: TrustedJobGroup = self._session.query(TrustedJobGroup).filter_by(uuid=self._data.group_uuid).first() + pure_dn = self._data.initiator_pure_domain_name + if SettingService.get_system_info().pure_domain_name == pure_dn: + coordinator_id = 0 + else: + participant = ParticipantService(self._session).get_participant_by_pure_domain_name(pure_dn) + coordinator_id = participant.id + TrustedJobGroupService(self._session).launch_trusted_job(group, self._data.uuid, self._data.version, + coordinator_id) + logging.info(f'[launch-trusted-job-2pc] commit succeeded for group {group.name}') + return True, '' + + def abort(self) -> Tuple[bool, str]: + logging.info('[launch-trusted-job-2pc] abort') + # As we did not preserve any resource, do nothing + return True, '' diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_job_launcher_test.py b/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_job_launcher_test.py new file mode 100644 index 000000000..5e774c7dd --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_job_launcher_test.py @@ -0,0 +1,130 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch +from testing.common import NoWebServerTestCase +from google.protobuf.text_format import MessageToString +from fedlearner_webconsole.db import db +from fedlearner_webconsole.tee.models import TrustedJobGroup, GroupCreateStatus, TrustedJob +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.algorithm.models import Algorithm, AlgorithmType +from fedlearner_webconsole.dataset.models import Dataset, DataBatch +from fedlearner_webconsole.proto.two_pc_pb2 import TransactionData, LaunchTrustedJobData +from fedlearner_webconsole.two_pc.trusted_job_launcher import TrustedJobLauncher +from fedlearner_webconsole.proto.setting_pb2 import SystemInfo +from fedlearner_webconsole.proto.tee_pb2 import Resource +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.participant.models import Participant + + +class TrustedJobLauncherTest(NoWebServerTestCase): + + def setUp(self) -> None: + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project-name') + participant = Participant(id=1, name='part2', domain_name='fl-domain2.com') + algorithm = Algorithm(id=1, + uuid='algorithm-uuid1', + type=AlgorithmType.TRUSTED_COMPUTING, + path='file:///data/algorithm/test/run.sh') + dataset1 = Dataset(id=1, name='dataset-name1', uuid='dataset-uuid1', is_published=True) + data_batch1 = DataBatch(id=1, dataset_id=1) + dataset2 = Dataset(id=2, name='dataset-name2', uuid='dataset-uuid2', is_published=False) + group = TrustedJobGroup(id=1, + uuid='group-uuid', + project_id=1, + latest_version=1, + coordinator_id=1, + status=GroupCreateStatus.SUCCEEDED, + auth_status=AuthStatus.AUTHORIZED, + algorithm_uuid='algorithm-uuid1', + dataset_id=1, + resource=MessageToString(Resource(cpu=2, memory=2, replicas=1))) + session.add_all([project, participant, algorithm, dataset1, data_batch1, dataset2, group]) + session.commit() + + @staticmethod + def get_transaction_data(uuid: str, version: int, group_uuid: str, initiator_pure_domain_name: str): + return TransactionData(launch_trusted_job_data=LaunchTrustedJobData( + uuid=uuid, version=version, group_uuid=group_uuid, initiator_pure_domain_name=initiator_pure_domain_name)) + + @patch('fedlearner_webconsole.setting.service.SettingService.get_system_info') + def test_prepare(self, mock_get_system_info): + mock_get_system_info.return_value = SystemInfo(pure_domain_name='domain1') + with db.session_scope() as session: + group: TrustedJobGroup = session.query(TrustedJobGroup).get(1) + # successful + data = self.get_transaction_data('trusted-job-uuid', 2, 'group-uuid', 'domain2') + launcher = TrustedJobLauncher(session, '13', data) + flag, msg = launcher.prepare() + self.assertTrue(flag) + # fail due to initiator not found + data = self.get_transaction_data('trusted-job-uuid', 2, 'group-uuid', 'domain3') + launcher = TrustedJobLauncher(session, '13', data) + flag, msg = launcher.prepare() + self.assertFalse(flag) + # fail due to group not found + data = self.get_transaction_data('trusted-job-uuid', 2, 'not-exist', 'domain2') + launcher = TrustedJobLauncher(session, '13', data) + flag, msg = launcher.prepare() + self.assertFalse(flag) + # fail due to version conflict + data = self.get_transaction_data('trusted-job-uuid', 1, 'group-uuid', 'domain2') + launcher = TrustedJobLauncher(session, '13', data) + flag, msg = launcher.prepare() + self.assertFalse(flag) + # fail due to auth + group.auth_status = AuthStatus.PENDING + data = self.get_transaction_data('trusted-job-uuid', 2, 'group-uuid', 'domain2') + launcher = TrustedJobLauncher(session, '13', data) + flag, msg = launcher.prepare() + self.assertFalse(flag) + # fail due to dataset unpublished + group.auth_status = AuthStatus.AUTHORIZED + group.dataset_id = 2 + data = self.get_transaction_data('trusted-job-uuid', 2, 'group-uuid', 'domain2') + launcher = TrustedJobLauncher(session, '13', data) + flag, msg = launcher.prepare() + self.assertFalse(flag) + # fail due to algorithm not found + group.dataset_id = 1 + group.algorithm_uuid = 'algorithm-not-exist' + data = self.get_transaction_data('trusted-job-uuid', 2, 'group-uuid', 'domain2') + launcher = TrustedJobLauncher(session, '13', data) + flag, msg = launcher.prepare() + self.assertFalse(flag) + + @patch('fedlearner_webconsole.tee.services.get_batch_data_path') + def test_commit(self, mock_get_batch_data_path): + mock_get_batch_data_path.return_value = 'file:///data/test' + with db.session_scope() as session: + data = self.get_transaction_data('trusted-job-uuid', 2, 'group-uuid', 'domain2') + launcher = TrustedJobLauncher(session, '13', data) + flag, msg = launcher.commit() + session.commit() + self.assertTrue(flag) + with db.session_scope() as session: + trusted_job = session.query(TrustedJob).filter_by(trusted_job_group_id=1, version=2).first() + self.assertIsNotNone(trusted_job) + self.assertEqual(trusted_job.name, 'V2') + self.assertEqual(trusted_job.coordinator_id, 1) + group = session.query(TrustedJobGroup).get(1) + self.assertEqual(group.latest_version, 2) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_job_stopper.py b/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_job_stopper.py new file mode 100644 index 000000000..e2977fb6e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_job_stopper.py @@ -0,0 +1,60 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Tuple + +from sqlalchemy.orm import Session + +from fedlearner_webconsole.proto.two_pc_pb2 import TransactionData +from fedlearner_webconsole.two_pc.resource_manager import ResourceManager +from fedlearner_webconsole.tee.models import TrustedJob, TrustedJobStatus +from fedlearner_webconsole.tee.services import TrustedJobService + + +class TrustedJobStopper(ResourceManager): + + def __init__(self, session: Session, tid: str, data: TransactionData): + super().__init__(tid, data) + assert data.stop_trusted_job_data is not None + self._data = data.stop_trusted_job_data + self._session = session + + def prepare(self) -> Tuple[bool, str]: + trusted_job = self._session.query(TrustedJob).filter_by(uuid=self._data.uuid).first() + if trusted_job is None: + message = f'failed to find trusted job by uuid {self._data.uuid}' + logging.info(f'[stop-trusted-job-2pc] prepare: {message}') + return False, message + if trusted_job.get_status() == TrustedJobStatus.PENDING: + message = 'trusted job status PENDING is unstoppable' + logging.info(f'[stop-trusted-job-2pc] prepare: {message}') + return False, message + return True, '' + + def commit(self) -> Tuple[bool, str]: + trusted_job = self._session.query(TrustedJob).filter_by(uuid=self._data.uuid).first() + if trusted_job is None: + logging.error(f'[trusted-job-stop-2pc] commit: trusted job with uuid {self._data.uuid} not found') + return True, '' + TrustedJobService(self._session).stop_trusted_job(trusted_job) + if trusted_job.get_status() != TrustedJobStatus.STOPPED: + logging.warning(f'[trusted-job-stop-2pc] commit: stop trusted job with uuid {self._data.uuid} ' + f'ending with status {trusted_job.get_status()}') + return True, '' + + def abort(self) -> Tuple[bool, str]: + logging.info('[trusted-job-stop-2pc] abort') + return True, '' diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_job_stopper_test.py b/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_job_stopper_test.py new file mode 100644 index 000000000..f6cadfec0 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/trusted_job_stopper_test.py @@ -0,0 +1,88 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from testing.common import NoWebServerTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto.two_pc_pb2 import TransactionData, StopTrustedJobData +from fedlearner_webconsole.tee.models import TrustedJob, TrustedJobStatus +from fedlearner_webconsole.job.models import Job, JobState, JobType +from fedlearner_webconsole.two_pc.trusted_job_stopper import TrustedJobStopper + + +class TrustedJobStopperTest(NoWebServerTestCase): + + def setUp(self) -> None: + super().setUp() + with db.session_scope() as session: + trusted_job1 = TrustedJob(id=1, uuid='uuid1', job_id=1) + job1 = Job(id=1, + name='job-name1', + job_type=JobType.CUSTOMIZED, + project_id=1, + workflow_id=0, + state=JobState.STARTED) + trusted_job2 = TrustedJob(id=2, uuid='uuid2', job_id=2) + job2 = Job(id=2, + name='job-name2', + job_type=JobType.CUSTOMIZED, + project_id=1, + workflow_id=0, + state=JobState.WAITING) + session.add_all([trusted_job1, trusted_job2, job1, job2]) + session.commit() + + @staticmethod + def get_transaction_data(uuid: str): + return TransactionData(stop_trusted_job_data=StopTrustedJobData(uuid=uuid)) + + def test_prepare(self): + with db.session_scope() as session: + # successful + data = self.get_transaction_data(uuid='uuid1') + stopper = TrustedJobStopper(session, '13', data) + flag, msg = stopper.prepare() + self.assertTrue(flag) + # fail due to trusted job not found + data = self.get_transaction_data(uuid='not-exist') + stopper = TrustedJobStopper(session, '13', data) + flag, msg = stopper.prepare() + self.assertFalse(flag) + # fail due to status not valid + data = self.get_transaction_data(uuid='uuid2') + stopper = TrustedJobStopper(session, '13', data) + flag, msg = stopper.prepare() + self.assertFalse(flag) + + def test_commit(self): + with db.session_scope() as session: + # successful + data = self.get_transaction_data(uuid='uuid1') + stopper = TrustedJobStopper(session, '13', data) + stopper.commit() + # status not valid + data = self.get_transaction_data(uuid='uuid2') + stopper = TrustedJobStopper(session, '13', data) + stopper.commit() + session.commit() + with db.session_scope() as session: + trusted_job1 = session.query(TrustedJob).get(1) + self.assertEqual(trusted_job1.status, TrustedJobStatus.STOPPED) + trusted_job2 = session.query(TrustedJob).get(2) + self.assertEqual(trusted_job2.status, TrustedJobStatus.PENDING) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/two_pc/workflow_state_controller.py b/web_console_v2/api/fedlearner_webconsole/two_pc/workflow_state_controller.py new file mode 100644 index 000000000..2892321b0 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/two_pc/workflow_state_controller.py @@ -0,0 +1,86 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import logging +from typing import Tuple + +from sqlalchemy.orm import Session + +from fedlearner_webconsole.workflow.service import WorkflowService +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState +from fedlearner_webconsole.proto.two_pc_pb2 import TransactionData +from fedlearner_webconsole.two_pc.resource_manager import ResourceManager +from fedlearner_webconsole.workflow.workflow_controller import start_workflow_locally, stop_workflow_locally + + +class WorkflowStateController(ResourceManager): + + def __init__(self, session: Session, tid: str, data: TransactionData): + super().__init__(tid, data) + assert data.transit_workflow_state_data is not None + self._data = data.transit_workflow_state_data + self._session = session + self._state_convert_map = { + WorkflowState.RUNNING: lambda workflow: start_workflow_locally(self._session, workflow), + WorkflowState.STOPPED: lambda workflow: stop_workflow_locally(self._session, workflow), + } + + def prepare(self) -> Tuple[bool, str]: + workflow = self._session.query(Workflow).filter_by(uuid=self._data.workflow_uuid).first() + if workflow is None: + message = f'failed to find workflow, uuid is {self._data.workflow_uuid}' + logging.warning(f'[workflow state 2pc] prepare: {message}, uuid: {self._data.workflow_uuid}') + return False, message + + if WorkflowState[self._data.target_state] not in self._state_convert_map: + message = f'illegal target state {self._data.target_state}, uuid: {self._data.workflow_uuid}' + logging.warning(f'[workflow state 2pc] prepare: {message}') + return False, message + if not workflow.can_transit_to(WorkflowState[self._data.target_state]): + message = f'change worflow state from {workflow.state.name} to {self._data.target_state} is forbidden, \ + uuid: {self._data.workflow_uuid}' + + logging.warning(f'[workflow state 2pc] prepare: {message}') + return False, message + + if WorkflowState[self._data.target_state] == WorkflowState.STOPPED: + return True, '' + + is_valid, info = WorkflowService(self._session).validate_workflow(workflow) + if not is_valid: + job_name, validate_e = info + message = f'Invalid variable when try to format the job ' f'{job_name}:{str(validate_e)}, \ + uuid: {self._data.workflow_uuid}' + + logging.warning(f'[workflow state 2pc] prepare: {message}') + return False, message + return True, '' + + def commit(self) -> Tuple[bool, str]: + workflow = self._session.query(Workflow).filter_by(uuid=self._data.workflow_uuid).first() + if workflow.is_invalid(): + message = 'workflow is already invalidated by participant' + logging.error(f'[workflow state 2pc] commit: {message}, uuid: {self._data.workflow_uuid}') + raise ValueError(message) + try: + self._state_convert_map[WorkflowState[self._data.target_state]](workflow) + except RuntimeError as e: + logging.error(f'[workflow state 2pc] commit: {e}, uuid: {self._data.workflow_uuid}') + raise + return True, '' + + def abort(self) -> Tuple[bool, str]: + logging.info('[workflow state 2pc] abort') + return True, '' diff --git a/web_console_v2/api/fedlearner_webconsole/utils/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/utils/BUILD.bazel new file mode 100644 index 000000000..5db582165 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/BUILD.bazel @@ -0,0 +1,610 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = [ + "//web_console_v2/api:console_api_package", +]) + +py_library( + name = "app_version_lib", + srcs = ["app_version.py"], + imports = ["../.."], + deps = ["//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto"], +) + +py_test( + name = "app_version_test", + srcs = ["app_version_test.py"], + imports = ["../.."], + main = "app_version_test.py", + deps = [ + ":app_version_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_library( + name = "pp_base64_lib", + srcs = ["pp_base64.py"], + imports = ["../.."], +) + +py_test( + name = "pp_base64_test", + srcs = ["pp_base64_test.py"], + imports = ["../.."], + main = "pp_base64_test.py", + deps = [ + ":pp_base64_lib", + ], +) + +py_library( + name = "const_lib", + srcs = ["const.py"], + imports = ["../.."], +) + +py_library( + name = "pp_datetime_lib", + srcs = ["pp_datetime.py"], + imports = ["../.."], + deps = ["@common_python_dateutil//:pkg"], +) + +py_test( + name = "pp_datetime_test", + srcs = ["pp_datetime_test.py"], + imports = ["../.."], + main = "pp_datetime_test.py", + deps = [ + ":pp_datetime_lib", + ], +) + +py_library( + name = "domain_name_lib", + srcs = ["domain_name.py"], + imports = ["../.."], +) + +py_test( + name = "domain_name_test", + srcs = ["domain_name_test.py"], + imports = ["../.."], + main = "domain_name_test.py", + deps = [ + ":domain_name_lib", + ], +) + +py_library( + name = "es_lib", + srcs = [ + "es.py", + "es_misc.py", + ], + imports = ["../.."], + deps = [ + "//web_console_v2/api:envs_lib", + "@common_elasticsearch//:pkg", + ], +) + +py_library( + name = "file_lib", + srcs = [ + "file_manager.py", + "file_operator.py", + "file_tree.py", + "stream_tars.py", + "stream_untars.py", + ], + imports = ["../.."], + deps = [ + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_fsspec//:pkg", + # this is needed when using fsspec hdfs feature. + "@common_pyarrow//:pkg", # keep + "@common_tensorflow//:pkg", + ], +) + +py_test( + name = "file_lib_test", + size = "small", + srcs = ["file_manager_test.py"], + imports = ["../.."], + main = "file_manager_test.py", + deps = [ + ":file_lib", + "//web_console_v2/api/testing:common_lib", + "//web_console_v2/api/testing:fake_lib", + "@common_tensorflow//:pkg", + ], +) + +py_test( + name = "file_operator_test", + size = "small", + srcs = ["file_operator_test.py"], + data = [ + "//web_console_v2/api/testing/test_data", + "//web_console_v2/api/testing/test_data/algorithm", + ], + imports = ["../.."], + main = "file_operator_test.py", + deps = [ + ":file_lib", + "//web_console_v2/api:envs_lib", + ], +) + +py_test( + name = "file_tree_test", + size = "small", + srcs = ["file_tree_test.py"], + imports = ["../.."], + main = "file_tree_test.py", + deps = [ + ":file_lib", + "//web_console_v2/api/testing:common_lib", + "//web_console_v2/api/testing:fake_lib", + "@common_tensorflow//:pkg", + ], +) + +py_test( + name = "tars_test", + srcs = ["tars_test.py"], + imports = ["../.."], + main = "tars_test.py", + deps = [ + ":file_lib", + ], +) + +py_library( + name = "filtering_lib", + srcs = ["filtering.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_pyparsing//:pkg", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "filtering_test", + size = "small", + srcs = ["filtering_test.py"], + imports = ["../.."], + main = "filtering_test.py", + deps = [ + ":filtering_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "flask_utils_lib", + srcs = ["flask_utils.py"], + imports = ["../.."], + deps = [ + ":filtering_lib", + ":proto_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_flask//:pkg", + "@common_marshmallow//:pkg", + "@common_webargs//:pkg", + ], +) + +py_test( + name = "flask_utils_test", + size = "medium", + srcs = ["flask_utils_test.py"], + data = [ + "//web_console_v2/api/testing/test_data", + ], + imports = ["../.."], + main = "flask_utils_test.py", + deps = [ + ":flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/testing:py_proto", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_library( + name = "hooks_lib", + srcs = ["hooks.py"], + imports = ["../.."], + deps = [ + ":metrics_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/middleware:api_latency_lib", + "//web_console_v2/api/fedlearner_webconsole/middleware:middlewares_lib", + "//web_console_v2/api/fedlearner_webconsole/middleware:request_id_lib", + ], +) + +py_test( + name = "hooks_test", + srcs = ["hooks_test.py"], + data = [ + "//web_console_v2/api/testing/test_data", + ], + imports = ["../.."], + main = "hooks_test.py", + deps = [ + ":hooks_lib", + ], +) + +py_library( + name = "images_lib", + srcs = ["images.py"], + imports = ["../.."], + deps = ["//web_console_v2/api/fedlearner_webconsole/setting:service_lib"], +) + +py_test( + name = "images_test", + srcs = ["images_test.py"], + imports = ["../.."], + main = "images_test.py", + deps = [ + ":images_lib", + ], +) + +py_library( + name = "job_metrics_lib", + srcs = ["job_metrics.py"], + imports = ["../.."], + deps = [ + ":file_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_tensorflow//:pkg", + ], +) + +py_library( + name = "kibana_lib", + srcs = ["kibana.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "@common_prison//:pkg", + "@common_pytz//:pkg", + "@common_requests//:pkg", + ], +) + +py_test( + name = "kibana_test", + srcs = ["kibana_test.py"], + imports = ["../.."], + main = "kibana_test.py", + deps = [ + ":kibana_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + ], +) + +py_library( + name = "metrics_lib", + srcs = ["metrics.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api:envs_lib", + "@common_opentelemetry_exporter_otlp//:pkg", + "@common_opentelemetry_sdk//:pkg", + ], +) + +py_test( + name = "metrics_test", + size = "small", + srcs = ["metrics_test.py"], + imports = ["../.."], + main = "metrics_test.py", + deps = [ + ":metrics_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "mixins_lib", + srcs = ["mixins.py"], + imports = ["../.."], + deps = [ + ":pp_datetime_lib", + "@com_google_protobuf//:protobuf_python", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "mixins_test", + srcs = ["mixins_test.py"], + imports = ["../.."], + main = "mixins_test.py", + deps = [ + ":mixins_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_library( + name = "paginate_lib", + srcs = ["paginate.py"], + imports = ["../.."], + deps = ["@common_sqlalchemy//:pkg"], +) + +py_test( + name = "paginate_lib_test", + size = "small", + srcs = ["paginate_test.py"], + imports = ["../.."], + main = "paginate_test.py", + deps = [ + ":paginate_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "pp_flatten_dict_lib", + srcs = ["pp_flatten_dict.py"], + imports = ["../.."], + deps = [ + "@common_flatten_dict//:pkg", + "@common_six//:pkg", + ], +) + +py_test( + name = "pp_flatten_dict_test", + srcs = ["pp_flatten_dict_test.py"], + imports = ["../.."], + main = "pp_flatten_dict_test.py", + deps = [ + ":pp_flatten_dict_lib", + ], +) + +py_library( + name = "pp_yaml_lib", + srcs = ["pp_yaml.py"], + imports = ["../.."], + deps = [ + ":const_lib", + ":pp_flatten_dict_lib", + ":system_envs_lib", + "//web_console_v2/api/fedlearner_webconsole/setting:service_lib", + "@common_simpleeval//:pkg", + ], +) + +py_test( + name = "pp_yaml_test", + srcs = ["pp_yaml_test.py"], + imports = ["../.."], + main = "pp_yaml_test.py", + deps = [ + ":pp_yaml_lib", + ], +) + +py_library( + name = "proto_lib", + srcs = ["proto.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_test( + name = "proto_test", + srcs = ["proto_test.py"], + imports = ["../.."], + main = "proto_test.py", + deps = [ + ":proto_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/testing:py_proto", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "resource_name_lib", + srcs = ["resource_name.py"], + imports = ["../.."], +) + +py_test( + name = "resource_name_test", + srcs = ["resource_name_test.py"], + imports = ["../.."], + main = "resource_name_test.py", + deps = [ + ":resource_name_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/testing:py_proto", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "schema_lib", + srcs = ["schema.py"], + imports = ["../.."], + deps = ["//web_console_v2/api/fedlearner_webconsole:exceptions_lib"], +) + +py_test( + name = "schema_test", + srcs = ["schema_test.py"], + imports = ["../.."], + main = "schema_test.py", + deps = [ + ":schema_lib", + ], +) + +py_library( + name = "sorting_lib", + srcs = ["sorting.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "sorting_test", + srcs = ["sorting_test.py"], + imports = ["../.."], + main = "sorting_test.py", + deps = [ + ":sorting_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "swagger_lib", + srcs = ["swagger.py"], + imports = ["../.."], +) + +py_test( + name = "swagger_test", + srcs = ["swagger_test.py"], + imports = ["../.."], + main = "swagger_test.py", + deps = [ + ":swagger_lib", + ], +) + +py_library( + name = "system_envs_lib", + srcs = ["system_envs.py"], + imports = ["../.."], + deps = ["//web_console_v2/api:envs_lib"], +) + +py_test( + name = "system_envs_test", + srcs = ["system_envs_test.py"], + imports = ["../.."], + main = "system_envs_test.py", + deps = [ + ":system_envs_lib", + ], +) + +py_library( + name = "tfrecords_reader_lib", + srcs = ["tfrecords_reader.py"], + imports = ["../.."], + deps = ["@common_tensorflow//:pkg"], +) + +py_test( + name = "tfrecord_test", + srcs = ["tfrecord_test.py"], + data = [ + "//web_console_v2/api/testing/test_data", + ], + imports = ["../.."], + main = "tfrecord_test.py", + deps = [ + ":tfrecords_reader_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_library( + name = "pp_time_lib", + srcs = ["pp_time.py"], + imports = ["../.."], +) + +py_library( + name = "validator_lib", + srcs = ["validator.py"], + imports = ["../.."], +) + +py_test( + name = "validator_test", + srcs = ["validator_test.py"], + imports = ["../.."], + main = "validator_test.py", + deps = [ + ":validator_lib", + ], +) + +py_library( + name = "workflow_lib", + srcs = ["workflow.py"], + imports = ["../.."], + deps = ["//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto"], +) + +py_test( + name = "workflow_test", + srcs = ["workflow_test.py"], + imports = ["../.."], + main = "workflow_test.py", + deps = [ + ":workflow_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "process_utils_lib", + srcs = ["process_utils.py"], + imports = ["../.."], + deps = [ + ":hooks_lib", + ], +) + +py_test( + name = "process_utils_test", + srcs = ["process_utils_test.py"], + imports = ["../.."], + main = "process_utils_test.py", + deps = [ + ":process_utils_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/utils/app_version.py b/web_console_v2/api/fedlearner_webconsole/utils/app_version.py new file mode 100644 index 000000000..ee6b94f4d --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/app_version.py @@ -0,0 +1,121 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import re +from typing import Optional + +from fedlearner_webconsole.proto import common_pb2 + +_VERSION_RE = re.compile(r'^(\d+).(\d+).(\d+)') + + +class Version(object): + + def __init__(self, version: Optional[str] = None): + self._version = version + self._major = None + self._minor = None + self._patch = None + if version is not None: + self._parse_version(version) + + def _parse_version(self, version: str): + matches = _VERSION_RE.match(version) + if matches: + self._major = int(matches.group(1)) + self._minor = int(matches.group(2)) + self._patch = int(matches.group(3)) + + @property + def version(self): + return self._version + + @property + def major(self): + return self._major + + @property + def minor(self): + return self._minor + + @property + def patch(self): + return self._patch + + def is_standard(self): + return self.major is not None and self.minor is not None and self.patch is not None + + def __eq__(self, other): + assert isinstance(other, Version) + if self.is_standard(): + return self.major == other.major and self.minor == other.minor and self.patch == other.patch + return self.version == other.version + + def __ne__(self, other): + assert isinstance(other, Version) + return not self.__eq__(other) + + def __gt__(self, other): + assert isinstance(other, Version) + if not self.is_standard() or not other.is_standard(): + # Not compatible + return False + if self.major > other.major: + return True + if self.major < other.major: + return False + if self.minor > other.minor: + return True + if self.minor < other.minor: + return False + return self.patch > other.patch + + def __lt__(self, other): + assert isinstance(other, Version) + if not self.is_standard() or not other.is_standard(): + # Not compatible + return False + return not self.__ge__(other) + + def __ge__(self, other): + assert isinstance(other, Version) + return self.__gt__(other) or self.__eq__(other) + + def __le__(self, other): + assert isinstance(other, Version) + return self.__lt__(other) or self.__eq__(other) + + +class ApplicationVersion(object): + """Version of the application. + + Attributes: + revision: Commit id of the head + branch_name: Branch name of the image + pub_date: Date when image is built in ISO format + version: Semantic version + """ + + def __init__(self, revision: str, branch_name: str, pub_date: str, version: Optional[str] = None): + self.revision = revision + self.branch_name = branch_name + self.pub_date = pub_date + self.version = Version(version) + + def to_proto(self) -> common_pb2.ApplicationVersion: + return common_pb2.ApplicationVersion(pub_date=self.pub_date, + revision=self.revision, + branch_name=self.branch_name, + version=self.version.version) diff --git a/web_console_v2/api/fedlearner_webconsole/utils/app_version_test.py b/web_console_v2/api/fedlearner_webconsole/utils/app_version_test.py new file mode 100644 index 000000000..e0ea0298f --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/app_version_test.py @@ -0,0 +1,124 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +from fedlearner_webconsole.proto import common_pb2 +from fedlearner_webconsole.utils.app_version import Version, ApplicationVersion + + +class VersionTest(unittest.TestCase): + + def test_version_number(self): + v = Version() + self.assertIsNone(v.version) + v = Version('non') + self.assertEqual(v.version, 'non') + self.assertIsNone(v.major) + self.assertIsNone(v.minor) + self.assertIsNone(v.patch) + v = Version('2.1.33.3') + self.assertEqual(v.version, '2.1.33.3') + self.assertEqual(v.major, 2) + self.assertEqual(v.minor, 1) + self.assertEqual(v.patch, 33) + + def test_is_standard(self): + self.assertTrue(Version('2.1.33').is_standard()) + self.assertTrue(Version('2.1.33.1').is_standard()) + self.assertFalse(Version('non').is_standard()) + + # Tests == and != + def test_eq_and_ne(self): + v1 = Version('non') + v2 = Version('non') + v3 = Version('2.1.33.1') + v4 = Version('2.1.33') + v5 = Version('2.1.34') + self.assertTrue(v1 == v2) + self.assertFalse(v1 != v2) + self.assertTrue(v3 == v4) + self.assertFalse(v3 != v4) + self.assertFalse(v1 == v3) + self.assertTrue(v1 != v3) + self.assertFalse(v4 == v5) + self.assertTrue(v4 != v5) + + # Tests > + def test_gt(self): + v1 = Version('nffff') + v2 = Version('2.1.33') + v3 = Version('2.1.34') + v4 = Version('2.2.33') + self.assertFalse(v2 > v1) + self.assertTrue(v3 > v2) + self.assertFalse(v2 > v3) + self.assertTrue(v4 > v3) + self.assertFalse(v3 > v4) + self.assertTrue(v4 > v2) + self.assertFalse(v2 > v4) + + # Tests < + def test_lt(self): + v1 = Version() + v2 = Version('1.1.33') + v3 = Version('2.1.34') + v4 = Version('2.2.34') + self.assertFalse(v1 < v2) + self.assertTrue(v2 < v3) + self.assertFalse(v3 < v2) + self.assertTrue(v3 < v4) + self.assertFalse(v4 < v3) + self.assertTrue(v2 < v4) + self.assertFalse(v4 < v2) + + # Tests >= + def test_ge(self): + v1 = Version('nffff') + v2 = Version('2.1.33') + v3 = Version('2.1.34') + self.assertFalse(v1 >= v2) + self.assertFalse(v2 >= v1) + self.assertTrue(v3 >= v2) + self.assertFalse(v2 >= v3) + + # Tests <= + def test_le(self): + v1 = Version() + v2 = Version('2.1.33') + v3 = Version('2.1.34') + self.assertFalse(v1 <= v2) + self.assertFalse(v2 <= v1) + self.assertTrue(v2 <= v3) + self.assertFalse(v3 <= v2) + + +class ApplicationVersionTest(unittest.TestCase): + + def test_to_proto(self): + v = ApplicationVersion(revision='1234234234', + branch_name='dev', + version='non-standard', + pub_date='Fri Jul 16 12:23:19 CST 2021') + self.assertEqual( + v.to_proto(), + common_pb2.ApplicationVersion(revision='1234234234', + branch_name='dev', + version='non-standard', + pub_date='Fri Jul 16 12:23:19 CST 2021')) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/base64.py b/web_console_v2/api/fedlearner_webconsole/utils/base64.py deleted file mode 100644 index 06272b638..000000000 --- a/web_console_v2/api/fedlearner_webconsole/utils/base64.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -from base64 import b64encode, b64decode - - -def base64encode(s: str) -> str: - return b64encode(s.encode('UTF-8')).decode('UTF-8') - - -def base64decode(s: str) -> str: - return b64decode(s).decode('UTF-8') diff --git a/web_console_v2/api/fedlearner_webconsole/utils/base_model/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/utils/base_model/BUILD.bazel new file mode 100644 index 000000000..18883a3c6 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/base_model/BUILD.bazel @@ -0,0 +1,64 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "base_model_lib", + srcs = [ + "auth_model.py", + "review_ticket_and_auth_model.py", + "review_ticket_model.py", + "softdelete_model.py", + ], + imports = ["../../.."], + deps = [ + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "auth_model_test", + size = "small", + srcs = [ + "auth_model_test.py", + ], + imports = ["../../.."], + main = "auth_model_test.py", + deps = [ + ":base_model_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/testing:common_lib", + ], +) + +py_test( + name = "review_ticket_and_auth_model_test", + size = "small", + srcs = [ + "review_ticket_and_auth_model_test.py", + ], + imports = ["../../.."], + main = "review_ticket_and_auth_model_test.py", + deps = [ + ":base_model_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_test( + name = "review_ticket_model_test", + srcs = [ + "review_ticket_model_test.py", + ], + imports = ["../../.."], + main = "review_ticket_model_test.py", + deps = [ + ":base_model_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/utils/base_model/auth_model.py b/web_console_v2/api/fedlearner_webconsole/utils/base_model/auth_model.py new file mode 100644 index 000000000..1960fde35 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/base_model/auth_model.py @@ -0,0 +1,30 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import enum +from sqlalchemy import Column, Enum + + +class AuthStatus(enum.Enum): + PENDING = 'PENDING' + AUTHORIZED = 'AUTHORIZED' + WITHDRAW = 'WITHDRAW' + + +class AuthModel(object): + + auth_status = Column(Enum(AuthStatus, length=32, native_enum=False, create_constraint=False), + default=AuthStatus.PENDING, + comment='auth status') diff --git a/web_console_v2/api/fedlearner_webconsole/utils/base_model/auth_model_test.py b/web_console_v2/api/fedlearner_webconsole/utils/base_model/auth_model_test.py new file mode 100644 index 000000000..3c3048252 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/base_model/auth_model_test.py @@ -0,0 +1,42 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from testing.common import NoWebServerTestCase +from fedlearner_webconsole.utils.base_model.auth_model import AuthModel, AuthStatus +from fedlearner_webconsole.db import db, default_table_args + + +class TestModel(db.Model, AuthModel): + __tablename__ = 'test_model' + __table_args__ = (default_table_args('This is webconsole dataset table')) + id = db.Column(db.Integer, primary_key=True, comment='id', autoincrement=True) + + +class AuthModelTest(NoWebServerTestCase): + + def test_mixins(self): + with db.session_scope() as session: + model = TestModel(auth_status=AuthStatus.PENDING) + session.add(model) + session.commit() + with db.session_scope() as session: + models = session.query(TestModel).all() + self.assertEqual(len(models), 1) + self.assertEqual(models[0].auth_status, AuthStatus.PENDING) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/base_model/review_ticket_and_auth_model.py b/web_console_v2/api/fedlearner_webconsole/utils/base_model/review_ticket_and_auth_model.py new file mode 100644 index 000000000..b11fc5f99 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/base_model/review_ticket_and_auth_model.py @@ -0,0 +1,58 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import enum +import sqlalchemy as sa +from google.protobuf import text_format + +from fedlearner_webconsole.utils.base_model.auth_model import AuthModel, AuthStatus +from fedlearner_webconsole.utils.base_model.review_ticket_model import ReviewTicketModel, TicketStatus +from fedlearner_webconsole.proto import project_pb2 + + +class AuthFrontendState(enum.Enum): + TICKET_PENDING = 'TICKET_PENDING' + TICKET_DECLINED = 'TICKET_DECLINED' + AUTH_PENDING = 'AUTH_PENDING' + AUTH_APPROVED = 'AUTH_APPROVED' + + +class ReviewTicketAndAuthModel(AuthModel, ReviewTicketModel): + + participants_info = sa.Column(sa.Text(), comment='participants info') + + @property + def auth_frontend_state(self) -> AuthFrontendState: + if self.ticket_status == TicketStatus.PENDING: + return AuthFrontendState.TICKET_PENDING + if self.ticket_status == TicketStatus.DECLINED: + return AuthFrontendState.TICKET_DECLINED + if self.ticket_status == TicketStatus.APPROVED and self.is_all_participants_authorized(): + return AuthFrontendState.AUTH_APPROVED + return AuthFrontendState.AUTH_PENDING + + def set_participants_info(self, participants_info: project_pb2.ParticipantsInfo): + self.participants_info = text_format.MessageToString(participants_info) + + def get_participants_info(self) -> project_pb2.ParticipantsInfo: + participants_info = project_pb2.ParticipantsInfo() + if self.participants_info is not None: + participants_info = text_format.Parse(self.participants_info, project_pb2.ParticipantsInfo()) + return participants_info + + def is_all_participants_authorized(self) -> bool: + participants_info_list = self.get_participants_info().participants_map.values() + return all( + participant_info.auth_status == AuthStatus.AUTHORIZED.name for participant_info in participants_info_list) diff --git a/web_console_v2/api/fedlearner_webconsole/utils/base_model/review_ticket_and_auth_model_test.py b/web_console_v2/api/fedlearner_webconsole/utils/base_model/review_ticket_and_auth_model_test.py new file mode 100644 index 000000000..0ed9c16b3 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/base_model/review_ticket_and_auth_model_test.py @@ -0,0 +1,106 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import MagicMock, patch + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.utils.base_model.review_ticket_and_auth_model import ReviewTicketAndAuthModel, \ + AuthFrontendState +from fedlearner_webconsole.utils.base_model.auth_model import AuthStatus +from fedlearner_webconsole.utils.base_model.review_ticket_model import TicketStatus +from fedlearner_webconsole.db import db, default_table_args +from fedlearner_webconsole.proto import project_pb2 + + +class TestModel(db.Model, ReviewTicketAndAuthModel): + __tablename__ = 'test_model' + __table_args__ = (default_table_args('This is webconsole test_model table')) + id = db.Column(db.Integer, primary_key=True, comment='id', autoincrement=True) + + +class ReviewTicketAndAuthModelTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + model = TestModel(auth_status=AuthStatus.PENDING) + session.add(model) + session.commit() + + @patch( + 'fedlearner_webconsole.utils.base_model.review_ticket_and_auth_model.ReviewTicketAndAuthModel.' \ + 'is_all_participants_authorized' + ) + def test_auth_frontend_state(self, mock_authorized: MagicMock): + with db.session_scope() as session: + test_model: TestModel = session.query(TestModel).get(1) + mock_authorized.return_value = True + self.assertEqual(test_model.auth_frontend_state, AuthFrontendState.AUTH_APPROVED) + + mock_authorized.return_value = False + self.assertEqual(test_model.auth_frontend_state, AuthFrontendState.AUTH_PENDING) + + test_model.ticket_status = TicketStatus.PENDING + self.assertEqual(test_model.auth_frontend_state, AuthFrontendState.TICKET_PENDING) + + test_model.ticket_status = TicketStatus.DECLINED + self.assertEqual(test_model.auth_frontend_state, AuthFrontendState.TICKET_DECLINED) + + def test_set_and_get_participants_info(self): + participants_info = project_pb2.ParticipantsInfo( + participants_map={ + 'test_1': project_pb2.ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'test_2': project_pb2.ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + }) + with db.session_scope() as session: + test_model = session.query(TestModel).get(1) + test_model.set_participants_info(participants_info) + session.commit() + with db.session_scope() as session: + test_model = session.query(TestModel).get(1) + self.assertEqual(test_model.get_participants_info(), participants_info) + + def test_is_all_participants_authorized(self): + # test no participants_info + with db.session_scope() as session: + test_model = session.query(TestModel).get(1) + self.assertTrue(test_model.is_all_participants_authorized()) + + # test all authorized + participants_info = project_pb2.ParticipantsInfo( + participants_map={ + 'test_1': project_pb2.ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name), + 'test_2': project_pb2.ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + }) + with db.session_scope() as session: + test_model = session.query(TestModel).get(1) + test_model.set_participants_info(participants_info) + self.assertTrue(test_model.is_all_participants_authorized()) + + # test not all authorized + participants_info = project_pb2.ParticipantsInfo( + participants_map={ + 'test_1': project_pb2.ParticipantInfo(auth_status=AuthStatus.PENDING.name), + 'test_2': project_pb2.ParticipantInfo(auth_status=AuthStatus.AUTHORIZED.name) + }) + with db.session_scope() as session: + test_model = session.query(TestModel).get(1) + test_model.set_participants_info(participants_info) + self.assertFalse(test_model.is_all_participants_authorized()) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/base_model/review_ticket_model.py b/web_console_v2/api/fedlearner_webconsole/utils/base_model/review_ticket_model.py new file mode 100644 index 000000000..0b66d16f0 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/base_model/review_ticket_model.py @@ -0,0 +1,34 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import enum +import sqlalchemy as sa + + +class TicketStatus(enum.Enum): + PENDING = 'PENDING' + APPROVED = 'APPROVED' + DECLINED = 'DECLINED' + + +class ReviewTicketModel(object): + + ticket_uuid = sa.Column(sa.String(255), + nullable=True, + comment='review ticket uuid, empty if review function is disable') + ticket_status = sa.Column(sa.Enum(TicketStatus, length=32, native_enum=False, create_constraint=False), + default=TicketStatus.APPROVED, + server_default=TicketStatus.APPROVED.name, + comment='review ticket status') diff --git a/web_console_v2/api/fedlearner_webconsole/utils/base_model/review_ticket_model_test.py b/web_console_v2/api/fedlearner_webconsole/utils/base_model/review_ticket_model_test.py new file mode 100644 index 000000000..54cfcc232 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/base_model/review_ticket_model_test.py @@ -0,0 +1,43 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.utils.base_model.review_ticket_model import ReviewTicketModel, TicketStatus +from fedlearner_webconsole.db import db, default_table_args + + +class TestModel(db.Model, ReviewTicketModel): + __tablename__ = 'test_model' + __table_args__ = (default_table_args('This is webconsole dataset table')) + id = db.Column(db.Integer, primary_key=True, comment='id', autoincrement=True) + + +class ReviewTicketModelTest(NoWebServerTestCase): + + def test_mixins(self): + with db.session_scope() as session: + model = TestModel(ticket_uuid='u1234', ticket_status=TicketStatus.APPROVED) + session.add(model) + session.commit() + with db.session_scope() as session: + models = session.query(TestModel).all() + self.assertEqual(len(models), 1) + self.assertEqual(models[0].ticket_status, TicketStatus.APPROVED) + self.assertEqual(models[0].ticket_uuid, 'u1234') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/base_model/softdelete_model.py b/web_console_v2/api/fedlearner_webconsole/utils/base_model/softdelete_model.py new file mode 100644 index 000000000..c6563243e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/base_model/softdelete_model.py @@ -0,0 +1,21 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from sqlalchemy import DateTime, Column + + +class SoftDeleteModel(object): + + deleted_at = Column(DateTime(timezone=True), comment='deleted time') diff --git a/web_console_v2/api/fedlearner_webconsole/utils/certificate.py b/web_console_v2/api/fedlearner_webconsole/utils/certificate.py deleted file mode 100644 index 9d400e94f..000000000 --- a/web_console_v2/api/fedlearner_webconsole/utils/certificate.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import os -import json -from base64 import b64encode - -from fedlearner_webconsole.utils.k8s_client import k8s_client - - -def create_image_pull_secret(): - """Create certificate for image hub (Once for a system)""" - image_hub_url = os.environ.get('IMAGE_HUB_URL') - image_hub_username = os.environ.get('IMAGE_HUB_USERNAME') - image_hub_password = os.environ.get('IMAGE_HUB_PASSWORD') - if image_hub_url is None or image_hub_username is None or \ - image_hub_password is None: - return - - # using base64 to encode authorization information - encoded_username_password = str(b64encode( - '{}:{}'.format(image_hub_username, image_hub_password) - )) - encoded_image_cert = str(b64encode( - json.dumps({ - 'auths': { - image_hub_url: { - 'username': image_hub_username, - 'password': image_hub_password, - 'auth': encoded_username_password - } - }})), 'utf-8') - - k8s_client.create_or_update_secret( - data={ - '.dockerconfigjson': encoded_image_cert - }, - metadata={ - 'name': 'regcred', - 'namespace': 'default' - }, - secret_type='kubernetes.io/dockerconfigjson', - name='regcred' - ) diff --git a/web_console_v2/api/fedlearner_webconsole/utils/const.py b/web_console_v2/api/fedlearner_webconsole/utils/const.py new file mode 100644 index 000000000..994aa0d72 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/const.py @@ -0,0 +1,63 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +# API +API_VERSION = '/api/v2' + +# Pagination +DEFAULT_PAGE = 1 +DEFAULT_PAGE_SIZE = 50 + +# name of preset data join workflow template +SYS_DATA_JOIN_TEMPLATE = [ + # data join + 'sys-preset-data-join', + 'sys-preset-fe-data-join', + # psi data join + 'sys-preset-psi-data-join', + 'sys-preset-fe-psi-data-join', + # light client + 'sys-preset-light-psi-data-join', + # TODO(xiangyuxuan.prs): change psi job type from TRANSFORMER to PSI_DATA_JOIN, when remove sys-preset-psi-data-join + # psi data join with analyzer + 'sys-preset-psi-data-join-analyzer', + 'sys-preset-converter-analyzer' +] +# name of preset model workflow template +SYS_PRESET_VERTICAL_NN_TEMPLATE = 'sys-preset-nn-model' +SYS_PRESET_HORIZONTAL_NN_TEMPLATE = 'sys-preset-nn-horizontal-model' +SYS_PRESET_HORIZONTAL_NN_EVAL_TEMPLATE = 'sys-preset-nn-horizontal-eval-model' +SYS_PRESET_TREE_TEMPLATE = 'sys-preset-tree-model' + +SYS_PRESET_TEMPLATE = [ + *SYS_DATA_JOIN_TEMPLATE, SYS_PRESET_VERTICAL_NN_TEMPLATE, SYS_PRESET_HORIZONTAL_NN_TEMPLATE, + SYS_PRESET_HORIZONTAL_NN_EVAL_TEMPLATE, SYS_PRESET_TREE_TEMPLATE +] + +# dataset +DATASET_PREVIEW_NUM = 20 + +DEFAULT_OWNER = 'no___user' + +DEFAULT_OWNER_FOR_JOB_WITHOUT_WORKFLOW = 'no___workflow' + +# auth related +SIGN_IN_INTERVAL_SECONDS = 1800 +MAX_SIGN_IN_ATTEMPTS = 3 + +SYSTEM_WORKFLOW_CREATOR_USERNAME = 's_y_s_t_e_m' + +SSO_HEADER = 'x-pc-auth' diff --git a/web_console_v2/api/fedlearner_webconsole/utils/decorators.py b/web_console_v2/api/fedlearner_webconsole/utils/decorators.py deleted file mode 100644 index 3f0a0aee2..000000000 --- a/web_console_v2/api/fedlearner_webconsole/utils/decorators.py +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -# coding=utf-8 - -import logging -from functools import wraps -from traceback import format_exc -import flask_jwt_extended -from flask_jwt_extended.utils import get_current_user -from fedlearner_webconsole.auth.models import Role -from fedlearner_webconsole.exceptions import UnauthorizedException -from envs import Envs - - -def admin_required(f): - @wraps(f) - def wrapper_inside(*args, **kwargs): - current_user = get_current_user() - if current_user.role != Role.ADMIN: - raise UnauthorizedException('only admin can operate this') - return f(*args, **kwargs) - return wrapper_inside - - -def jwt_required(*jwt_args, **jwt_kwargs): - def decorator(f): - if Envs.DEBUG: - @wraps(f) - def wrapper(*args, **kwargs): - return f(*args, **kwargs) - else: - wrapper = flask_jwt_extended.jwt_required( - *jwt_args, **jwt_kwargs)(f) - return wrapper - return decorator - - -def retry_fn(retry_times: int = 3, needed_exceptions=None): - def decorator_retry_fn(f): - # to resolve pylint warning - # Dangerous default value [] as argument (dangerous-default-value) - nonlocal needed_exceptions - if needed_exceptions is None: - needed_exceptions = [Exception] - - @wraps(f) - def wrapper(*args, **kwargs): - for i in range(retry_times): - try: - return f(*args, **kwargs) - except tuple(needed_exceptions): - logging.error('Call function failed, retrying %s times...', - i + 1) - logging.error('Exceptions:\n%s', format_exc()) - logging.error( - 'function name is %s, args are %s, kwargs are %s', - f.__name__, repr(args), repr(kwargs)) - if i == retry_times - 1: - raise - continue - - return wrapper - - return decorator_retry_fn diff --git a/web_console_v2/api/fedlearner_webconsole/utils/decorators/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/utils/decorators/BUILD.bazel new file mode 100644 index 000000000..57d66a3b4 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/decorators/BUILD.bazel @@ -0,0 +1,62 @@ +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "decorators_lib", + srcs = [ + "lru_cache.py", + "pp_flask.py", + "retry.py", + ], + imports = ["../../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "@common_flask//:pkg", + "@common_marshmallow//:pkg", + "@common_webargs//:pkg", + ], +) + +py_test( + name = "lru_cache_test", + srcs = [ + "lru_cache_test.py", + ], + imports = ["../../.."], + main = "lru_cache_test.py", + deps = [ + ":decorators_lib", + ], +) + +py_test( + name = "pp_flask_test", + size = "medium", + srcs = [ + "pp_flask_test.py", + ], + imports = ["../../.."], + main = "pp_flask_test.py", + deps = [ + ":decorators_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/testing:common_lib", + "@common_flask//:pkg", + "@common_marshmallow//:pkg", + ], +) + +py_test( + name = "retry_test", + srcs = [ + "retry_test.py", + ], + imports = ["../../.."], + main = "retry_test.py", + deps = [ + ":decorators_lib", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/utils/decorators/lru_cache.py b/web_console_v2/api/fedlearner_webconsole/utils/decorators/lru_cache.py new file mode 100644 index 000000000..bfd92ef03 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/decorators/lru_cache.py @@ -0,0 +1,49 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import functools +from datetime import datetime, timedelta + + +# TODO(xiangyuxuan): use custom lru to implement cache_clear(key) +def lru_cache(timeout: int = 600, maxsize: int = 10000): + """Extension of functools lru_cache with a timeout. + + Notice!: Do not use this decorator in class methods, or it will leak memory. + https://stackoverflow.com/questions/1227121/ + compare-object-instances-for-equality-by-their-attributes + + Args: + timeout (int): Timeout in seconds to clear the WHOLE cache, default = 10 minutes + maxsize (int): Maximum Size of the Cache + """ + + def wrapper_cache(func): + func = functools.lru_cache(maxsize=maxsize)(func) + func.delta = timedelta(seconds=timeout) + func.expiration = datetime.utcnow() + func.delta + + @functools.wraps(func) + def wrapped_func(*args, **kwargs): + if datetime.utcnow() >= func.expiration: + func.cache_clear() + func.expiration = datetime.utcnow() + func.delta + + return func(*args, **kwargs) + + wrapped_func.cache_clear = func.cache_clear + return wrapped_func + + return wrapper_cache diff --git a/web_console_v2/api/fedlearner_webconsole/utils/decorators/lru_cache_test.py b/web_console_v2/api/fedlearner_webconsole/utils/decorators/lru_cache_test.py new file mode 100644 index 000000000..0776b275c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/decorators/lru_cache_test.py @@ -0,0 +1,57 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import time +import unittest + +from fedlearner_webconsole.utils.decorators.lru_cache import lru_cache + + +class LruCacheTest(unittest.TestCase): + + def test_lru_cache(self): + count = 0 + count2 = 0 + + @lru_cache(timeout=1) + def test(arg1): + nonlocal count + count += 1 + return count + + @lru_cache(timeout=10) + def test_another(arg2): + nonlocal count2 + count2 += 1 + return count2 + + self.assertEqual(test(1), 1) + self.assertEqual(test(1), 1) + + self.assertEqual(test(-1), 2) + self.assertEqual(test(-1), 2) + + self.assertEqual(test_another(1), 1) + self.assertEqual(test_another(1), 1) + + # test cache expired + time.sleep(1) + self.assertEqual(test(1), 3) + self.assertEqual(test(-1), 4) + self.assertEqual(test_another(1), 1) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/decorators/pp_flask.py b/web_console_v2/api/fedlearner_webconsole/utils/decorators/pp_flask.py new file mode 100644 index 000000000..214d11bda --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/decorators/pp_flask.py @@ -0,0 +1,91 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import re +from functools import wraps +from flask import request +from marshmallow import EXCLUDE +from webargs.flaskparser import FlaskParser + +from fedlearner_webconsole.utils.flask_utils import get_current_user +from fedlearner_webconsole.auth.models import Role +from fedlearner_webconsole.exceptions import InvalidArgumentException, \ + UnauthorizedException + +# TODO(xiangyuxuan.prs): valid Kubernetes Object with its own regex +# [DNS Subdomain Names](https://kubernetes.io/docs/concepts/overview/working-with-objects/names/) +# Regex to match the pattern: +# Start/end with English/Chinese characters or numbers +# other content could be English/Chinese character, -, _ or numbers +# Max length 64 +UNIVERSAL_NAME_PATTERN = r'^[a-zA-Z0-9\u4e00-\u9fa5]' \ + r'[a-zA-Z0-9\u4e00-\u9fa5\-_\.]' \ + r'{0,62}[a-zA-Z0-9\u4e00-\u9fa5]$' +MAX_COMMENT_LENGTH = 200 + + +def admin_required(f): + + @wraps(f) + def wrapper_inside(*args, **kwargs): + current_user = get_current_user() + if current_user.role != Role.ADMIN: + raise UnauthorizedException('only admin can operate this') + return f(*args, **kwargs) + + return wrapper_inside + + +def input_validator(f): + + @wraps(f) + def wrapper_inside(*args, **kwargs): + if hasattr(request, 'content_type') and request.content_type.startswith('multipart/form-data'): + params = request.form + else: + params = request.get_json() or {} + name = params.get('name', None) + comment = params.get('comment', '') + if name is not None: + _validate_name(name) + if comment: + _validate_comment(comment) + return f(*args, **kwargs) + + return wrapper_inside + + +def _validate_name(name: str): + if re.match(UNIVERSAL_NAME_PATTERN, name) is None: + raise InvalidArgumentException(f'Invalid name {name}: Must start/end' + f' with uppercase and lowercase letters,' + f' numbers or Chinese characters, could' + f' contain - or _ in the middle, and ' + f'max length is 63 characters. ') + + +def _validate_comment(comment: str): + if len(comment) > MAX_COMMENT_LENGTH: + raise InvalidArgumentException(f'Input comment too long, max length' f' is {MAX_COMMENT_LENGTH}') + + +# Ref: https://webargs.readthedocs.io/en/latest/advanced.html#default-unknown +class _Parser(FlaskParser): + DEFAULT_UNKNOWN_BY_LOCATION = {'query': EXCLUDE, 'json': EXCLUDE, 'form': EXCLUDE} + + +parser = _Parser() +use_args = parser.use_args +use_kwargs = parser.use_kwargs diff --git a/web_console_v2/api/fedlearner_webconsole/utils/decorators/pp_flask_test.py b/web_console_v2/api/fedlearner_webconsole/utils/decorators/pp_flask_test.py new file mode 100644 index 000000000..953548d40 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/decorators/pp_flask_test.py @@ -0,0 +1,119 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from http import HTTPStatus + +import flask +import unittest +from unittest.mock import patch + +from marshmallow import fields + +from fedlearner_webconsole.auth.models import User, Role +from fedlearner_webconsole.utils.decorators.pp_flask import admin_required, input_validator, use_args, use_kwargs +from fedlearner_webconsole.exceptions import InvalidArgumentException, UnauthorizedException +from fedlearner_webconsole.utils.flask_utils import make_flask_response +from testing.common import BaseTestCase + + +@admin_required +def some_authorized_login(): + return 1 + + +@input_validator +def test_func(): + return 1 + + +class FlaskTest(unittest.TestCase): + + @staticmethod + def generator_helper(inject_res): + for r in inject_res: + yield r + + @patch('fedlearner_webconsole.utils.decorators.pp_flask.get_current_user') + def test_admin_required(self, mock_get_current_user): + admin = User(id=0, username='adamin', password='admin', role=Role.ADMIN) + user = User(id=1, username='ada', password='ada', role=Role.USER) + mock_get_current_user.return_value = admin + self.assertTrue(some_authorized_login() == 1) + + mock_get_current_user.return_value = user + self.assertRaises(UnauthorizedException, some_authorized_login) + + def test_input_validator(self): + app = flask.Flask(__name__) + with app.test_request_context('/', json={'name': 'valid_name', 'comment': 'valid comment'}): + self.assertTrue(test_func() == 1) + with app.test_request_context('/', json={'name': '', 'comment': 'valid comment'}): + self.assertRaises(InvalidArgumentException, test_func) + with app.test_request_context('/', json={'name': '???invalid_name', 'comment': 'valid comment'}): + self.assertRaises(InvalidArgumentException, test_func) + with app.test_request_context('/', json={'name': 'a' * 65, 'comment': 'valid comment'}): + self.assertRaises(InvalidArgumentException, test_func) + with app.test_request_context('/', json={'name': 'valid_name', 'comment': 'a' * 201}): + self.assertRaises(InvalidArgumentException, test_func) + with app.test_request_context('/', json={'name': 'valid_name'}): + self.assertTrue(test_func() == 1) + with app.test_request_context('/', json={'unrelated': '??'}): + self.assertTrue(test_func() == 1) + with app.test_request_context('/', json={'name': 'valid_name.test'}): + self.assertTrue(test_func() == 1) + + +class ParserTest(BaseTestCase): + + def test_unknown_query(self): + + @self.app.route('/hello') + @use_args({'msg': fields.String(required=True)}, location='query') + def test_route(params): + return make_flask_response({'msg': params['msg']}) + + resp = self.get_helper('/hello?msg=123&unknown=fff', use_auth=False) + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertResponseDataEqual(resp, {'msg': '123'}) + + def test_unknown_body(self): + + @self.app.route('/test_create', methods=['POST']) + @use_kwargs({'msg': fields.String(required=True)}, location='json') + def test_route(msg: str): + return make_flask_response({'msg': msg}) + + resp = self.post_helper('/test_create?ufj=4', use_auth=False, data={ + 'msg': 'hello', + 'unknown': 'fasdf', + }) + self.assertEqual(resp.status_code, HTTPStatus.OK) + self.assertResponseDataEqual(resp, {'msg': 'hello'}) + + def test_invalid_parameter(self): + + @self.app.route('/test', methods=['POST']) + @use_kwargs({'n': fields.Integer(required=True)}, location='json') + def test_route(n: int): + return make_flask_response({'n': n}) + + resp = self.post_helper('/test', use_auth=False, data={ + 'n': 'hello', + }) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/decorators/retry.py b/web_console_v2/api/fedlearner_webconsole/utils/decorators/retry.py new file mode 100644 index 000000000..6252f013b --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/decorators/retry.py @@ -0,0 +1,74 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Optional, Callable + +import time +from functools import wraps + + +def _default_need_retry(unused_exception: Exception) -> bool: + del unused_exception + # Retry for all exceptions + return True + + +def retry_fn(retry_times: int = 3, + delay: int = 0, + backoff: float = 1.0, + need_retry: Optional[Callable[[Exception], bool]] = None): + """A function to generate a decorator for retry. + + Args: + retry_times: Times to try. + delay: Intervals in milliseconds between attempts, default is 0 (no delay). + backoff: Multiplier on the delay between attempts, default is 1. For example, if delay is set to 1000, + and backoff is 2, then the first retry will be delayed 1000ms, and second one will be 2000ms, + third one will be 4000ms. + need_retry: A callable function to check if the raised exception will trigger retry or not. + """ + + def decorator_retry_fn(f): + nonlocal need_retry + if need_retry is None: + # By default retry for all exceptions + need_retry = _default_need_retry + + @wraps(f) + def wrapper(*args, **kwargs): # pylint: disable=inconsistent-return-statements + nonlocal delay + # NOTE: a local variable for delay is a MUST-HAVE, if you reuse the delay in the parameter, + # it will be accumulated for all function executions. + local_delay = delay + for i in range(retry_times): + try: + return f(*args, **kwargs) + except Exception as e: # pylint: disable=broad-except + # Re-raise if there is no need for retry + if not need_retry(e): + raise + logging.exception( + f'Call function failed, retrying {i + 1} times...\n' + f'function name is {f.__name__}, args are {repr(args)}, kwargs are {repr(kwargs)}') + if i == retry_times - 1: + raise + if local_delay > 0: + time.sleep(local_delay / 1000.0) + local_delay = local_delay * backoff + + return wrapper + + return decorator_retry_fn diff --git a/web_console_v2/api/fedlearner_webconsole/utils/decorators/retry_test.py b/web_console_v2/api/fedlearner_webconsole/utils/decorators/retry_test.py new file mode 100644 index 000000000..5aa751f19 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/decorators/retry_test.py @@ -0,0 +1,105 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import MagicMock, patch, Mock + +from fedlearner_webconsole.utils.decorators.retry import retry_fn + + +class RpcError(Exception): + + def __init__(self, status: int = 0): + super().__init__() + self.status = status + + +def some_unstable_connect(grpc_call): + res = grpc_call() + if res['status'] != 0: + raise RpcError(res['status']) + return res['data'] + + +class RetryTest(unittest.TestCase): + + def test_retry_fn(self): + + @retry_fn(retry_times=2, need_retry=lambda e: isinstance(e, RpcError)) + def retry_twice(grpc_call): + return some_unstable_connect(grpc_call) + + grpc_call = MagicMock() + grpc_call.side_effect = [{'status': -1, 'data': 'hhhhhh'}, {'status': -1, 'data': 'hhhh'}] + with self.assertRaises(RpcError): + retry_twice(grpc_call=grpc_call) + + grpc_call = MagicMock() + grpc_call.side_effect = [{'status': -1, 'data': 'hhhhhh'}, {'status': 0, 'data': 'hhhh'}] + self.assertEqual(retry_twice(grpc_call=grpc_call), 'hhhh') + + @patch('fedlearner_webconsole.utils.decorators.retry.time.sleep') + def test_retry_fn_with_delay(self, mock_sleep: Mock): + sleep_time = 0 + + def fake_sleep(s): + nonlocal sleep_time + sleep_time = sleep_time + s + + mock_sleep.side_effect = fake_sleep + + @retry_fn(retry_times=5, delay=1000, backoff=2) + def retry_with_delay(grpc_call): + return some_unstable_connect(grpc_call) + + grpc_call = MagicMock() + grpc_call.return_value = {'status': 0, 'data': '123'} + self.assertEqual(retry_with_delay(grpc_call), '123') + mock_sleep.assert_not_called() + + grpc_call = MagicMock() + grpc_call.side_effect = [{'status': 255}, {'status': -1}, {'status': 2}, {'status': 0, 'data': '123'}] + self.assertEqual(retry_with_delay(grpc_call), '123') + self.assertEqual(mock_sleep.call_count, 3) + # 1 + 2 + 4 + self.assertEqual(sleep_time, 7) + + # Failed case + sleep_time = 0 + mock_sleep.reset_mock() + grpc_call = MagicMock() + grpc_call.side_effect = RuntimeError() + with self.assertRaises(RuntimeError): + retry_with_delay(grpc_call=grpc_call) + self.assertEqual(mock_sleep.call_count, 4) + # 1 + 2 + 4 + 8 + self.assertEqual(sleep_time, 15) + + def test_retry_fn_with_need_retry(self): + + @retry_fn(retry_times=10, need_retry=lambda e: e.status == 3) + def custom_retry(grpc_call): + return some_unstable_connect(grpc_call) + + grpc_call = MagicMock() + grpc_call.side_effect = [{'status': 3}, {'status': 3}, {'status': 5}, {'status': 6}] + with self.assertRaises(RpcError): + custom_retry(grpc_call) + # When status is 5, it will not retry again. + self.assertEqual(grpc_call.call_count, 3) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/domain_name.py b/web_console_v2/api/fedlearner_webconsole/utils/domain_name.py new file mode 100644 index 000000000..c572c72b1 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/domain_name.py @@ -0,0 +1,33 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import re +from typing import Optional + + +def get_pure_domain_name(common_name: str) -> Optional[str]: + """Get domain name from common name filed in x.509 + + Args: + common_name (str): common name that parse from x.509 + + Returns: + str: domain name, like bytedance/bytedance-test + """ + for regex in [r'.*fl-([^\.]+)(\.com)?', r'(.+)\.fedlearner\.net']: + matched = re.match(regex, common_name) + if matched: + return matched.group(1) + return None diff --git a/web_console_v2/api/fedlearner_webconsole/utils/domain_name_test.py b/web_console_v2/api/fedlearner_webconsole/utils/domain_name_test.py new file mode 100644 index 000000000..6e652b54c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/domain_name_test.py @@ -0,0 +1,32 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +from fedlearner_webconsole.utils.domain_name import get_pure_domain_name + + +class DomainNameTest(unittest.TestCase): + + def test_get_pure_domain_name(self): + self.assertEqual(get_pure_domain_name('*.fl-bytedance.com'), 'bytedance') + self.assertEqual(get_pure_domain_name('fl-bytedance.com'), 'bytedance') + self.assertEqual(get_pure_domain_name('fl-bytedance-test'), 'bytedance-test') + self.assertEqual(get_pure_domain_name('bytedance.fedlearner.net'), 'bytedance') + self.assertIsNone(get_pure_domain_name('bytedancefedlearner.net')) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/es.py b/web_console_v2/api/fedlearner_webconsole/utils/es.py index 058d7fbf6..3e90ea39c 100644 --- a/web_console_v2/api/fedlearner_webconsole/utils/es.py +++ b/web_console_v2/api/fedlearner_webconsole/utils/es.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. @@ -15,6 +15,8 @@ # coding: utf-8 # pylint: disable=invalid-string-quote import json +import time +from typing import Dict, List, Optional from elasticsearch import Elasticsearch @@ -26,26 +28,45 @@ class ElasticSearchClient(object): def __init__(self): self._es_client = None self._es_client = Elasticsearch([{ - 'host': Envs.ES_READ_HOST or Envs.ES_HOST, + 'host': Envs.ES_HOST, 'port': Envs.ES_PORT }], - http_auth=(Envs.ES_USERNAME, - Envs.ES_PASSWORD)) + http_auth=(Envs.ES_USERNAME, Envs.ES_PASSWORD), + timeout=10000) def search(self, *args, **kwargs): return self._es_client.search(*args, **kwargs) def query_log(self, - index, - keyword, - pod_name, - start_time, - end_time, - match_phrase=None): + index: str, + keyword: str, + pod_name: str, + start_time: int = 0, + end_time: Optional[int] = None, + match_phrase: Optional[Dict[str, str]] = None) -> List[str]: + """query log from es + + Args: + index (str): the es index you that you want to search from + keyword (str): some keyword you may want to filter + pod_name (str): the pod that you want to query + start_time (int, optional): start time for search range in microsecond + end_time (int, optional): end time for search range in microsecond. Defaults to None. + match_phrase (Dict[str, str], optional): match phrase. Defaults to None. + + Returns: + List[str]: List for logs per line + """ + end_time = end_time or int(time.time() * 1000) query_body = { 'version': True, 'size': 8000, 'sort': [{ + 'log.nanostimestamp': { + 'order': 'desc', + 'unmapped_type': 'long' + } + }, { '@timestamp': 'desc' }, { 'log.offset': { @@ -66,7 +87,7 @@ def query_log(self, 'query': keyword, 'analyze_wildcard': True, 'default_operator': 'AND', - 'default_field': '*' + 'default_field': 'message' } }] if keyword else [] match_phrase_list = [ @@ -88,17 +109,16 @@ def query_log(self, response = self._es_client.search(index=index, body=query_body) return [item['_source']['message'] for item in response['hits']['hits']] - def query_events(self, - index, - keyword, - pod_name, - start_time, - end_time, - match_phrase=None): + def query_events(self, index, keyword, pod_name, start_time, end_time, match_phrase=None): query_body = { 'version': True, 'size': 8000, 'sort': [{ + 'log.nanostimestamp': { + 'order': 'desc', + 'unmapped_type': 'long' + } + }, { '@timestamp': 'desc' }, { 'log.offset': { @@ -119,7 +139,7 @@ def query_events(self, 'query': f'{keyword} AND Event', 'analyze_wildcard': True, 'default_operator': 'AND', - 'default_field': '*' + 'default_field': 'message' } }] if keyword else [] match_phrase_list = [ @@ -141,11 +161,7 @@ def query_events(self, response = self._es_client.search(index=index, body=query_body) return [item['_source']['message'] for item in response['hits']['hits']] - def put_ilm(self, - ilm_name, - hot_size='50gb', - hot_age='10d', - delete_age='30d'): + def put_ilm(self, ilm_name, hot_size='50gb', hot_age='10d', delete_age='30d'): if self._es_client is None: raise RuntimeError('ES client not yet initialized.') ilm_body = { @@ -264,43 +280,50 @@ def query_data_join_metrics(self, job_name, num_buckets): } } } - return es.search(index='data_join*', body=query) - def query_nn_metrics(self, job_name, num_buckets): + def query_nn_metrics(self, job_name: str, metric_list: List[str], num_buckets: int = 30): query = { - "size": 0, - "query": { - "bool": { - "must": [{ - "term": { - "tags.application_id": job_name + 'size': 0, + 'query': { + 'bool': { + 'must': [{ + 'term': { + 'tags.application_id': job_name } }, { - "term": { - "name": "auc" + 'terms': { + 'name': metric_list } }] } }, - "aggs": { - "PROCESS_TIME": { - "auto_date_histogram": { - "field": "tags.process_time", - "format": "strict_date_optional_time", - "buckets": num_buckets + 'aggs': { + metric: { + 'filter': { + 'term': { + 'name': metric + } }, - "aggs": { - "AUC": { - "avg": { - "field": "value" + 'aggs': { + 'PROCESS_TIME': { + 'auto_date_histogram': { + 'field': 'tags.process_time', + 'format': 'strict_date_optional_time', + 'buckets': num_buckets + }, + 'aggs': { + 'VALUE': { + 'avg': { + 'field': 'value' + } + } } } } - } + } for metric in metric_list } } - return es.search(index='metrics*', body=query) def query_tree_metrics(self, job_name, metric_list): @@ -340,7 +363,9 @@ def query_tree_metrics(self, job_name, metric_list): "TOP": { "top_hits": { "size": 100, - "sort": [{"tags.process_time": "asc"}], + "sort": [{ + "tags.process_time": "asc" + }], "_source": ["value", "tags.iteration"] } } @@ -350,8 +375,7 @@ def query_tree_metrics(self, job_name, metric_list): } for metric in metric_list } } - response = es.search(index='metrics*', body=query) - return response['aggregations'] + return es.search(index='metrics*', body=query) def query_time_metrics(self, job_name, num_buckets, index='raw_data*'): query = { diff --git a/web_console_v2/api/fedlearner_webconsole/utils/es_misc.py b/web_console_v2/api/fedlearner_webconsole/utils/es_misc.py index 87861097a..2915c308f 100644 --- a/web_console_v2/api/fedlearner_webconsole/utils/es_misc.py +++ b/web_console_v2/api/fedlearner_webconsole/utils/es_misc.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. @@ -19,16 +19,14 @@ _es_datetime_format = 'strict_date_optional_time' RAW_DATA_MAPPINGS = { 'dynamic': True, - 'dynamic_templates': [ - { - 'strings': { - 'match_mapping_type': 'string', - 'mapping': { - 'type': 'keyword' - } + 'dynamic_templates': [{ + 'strings': { + 'match_mapping_type': 'string', + 'mapping': { + 'type': 'keyword' } } - ], + }], 'properties': { 'tags': { 'properties': { @@ -54,16 +52,14 @@ DATA_JOIN_MAPPINGS = { 'dynamic': True, # for dynamically adding string fields, use keyword to reduce space - 'dynamic_templates': [ - { - 'strings': { - 'match_mapping_type': 'string', - 'mapping': { - 'type': 'keyword' - } + 'dynamic_templates': [{ + 'strings': { + 'match_mapping_type': 'string', + 'mapping': { + 'type': 'keyword' } } - ], + }], 'properties': { 'tags': { 'properties': { @@ -105,16 +101,14 @@ } METRICS_MAPPINGS = { 'dynamic': True, - 'dynamic_templates': [ - { - 'strings': { - 'match_mapping_type': 'string', - 'mapping': { - 'type': 'keyword' - } + 'dynamic_templates': [{ + 'strings': { + 'match_mapping_type': 'string', + 'mapping': { + 'type': 'keyword' } } - ], + }], 'properties': { 'name': { 'type': 'keyword' @@ -155,33 +149,33 @@ } } } -ALIAS_NAME = {'metrics': 'metrics_v2', - 'raw_data': 'raw_data', - 'data_join': 'data_join'} -INDEX_MAP = {'metrics': METRICS_MAPPINGS, - 'raw_data': RAW_DATA_MAPPINGS, - 'data_join': DATA_JOIN_MAPPINGS} +ALIAS_NAME = {'metrics': 'metrics_v2', 'raw_data': 'raw_data', 'data_join': 'data_join'} +INDEX_MAP = {'metrics': METRICS_MAPPINGS, 'raw_data': RAW_DATA_MAPPINGS, 'data_join': DATA_JOIN_MAPPINGS} def get_es_template(index_type, shards): assert index_type in ALIAS_NAME alias_name = ALIAS_NAME[index_type] - template = {'index_patterns': ['{}-*'.format(alias_name)], - 'settings': { - 'index': { - 'lifecycle': { - 'name': 'fedlearner_{}_ilm'.format(index_type), - 'rollover_alias': alias_name - }, - 'codec': 'best_compression', - 'routing': { - 'allocation': { - 'total_shards_per_node': '1' - } - }, - 'number_of_shards': str(shards), - 'number_of_replicas': '1', + # pylint: disable=consider-using-f-string + template = { + 'index_patterns': ['{}-*'.format(alias_name)], + 'settings': { + 'index': { + 'lifecycle': { + 'name': 'fedlearner_{}_ilm'.format(index_type), + 'rollover_alias': alias_name + }, + 'codec': 'best_compression', + 'routing': { + 'allocation': { + 'total_shards_per_node': '1' } }, - 'mappings': INDEX_MAP[index_type]} + 'number_of_shards': str(shards), + 'number_of_replicas': '1', + } + }, + 'mappings': INDEX_MAP[index_type] + } + # pylint: enable=consider-using-f-string return template diff --git a/web_console_v2/api/fedlearner_webconsole/utils/fake_k8s_client.py b/web_console_v2/api/fedlearner_webconsole/utils/fake_k8s_client.py deleted file mode 100644 index 1c24824be..000000000 --- a/web_console_v2/api/fedlearner_webconsole/utils/fake_k8s_client.py +++ /dev/null @@ -1,263 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -# pylint: disable=logging-format-interpolation -import logging -import datetime -from kubernetes import client - -_RAISE_EXCEPTION_KEY = 'raise_exception' - - -class FakeK8sClient(object): - """A fake k8s client for development. - - With this client we can decouple the dependency of k8s cluster. - """ - def close(self): - pass - - def create_or_update_secret(self, - data, - metadata, - secret_type, - name, - namespace='default'): - # User may pass two type of data: - # 1. dictionary - # 2. K8s Object - # They are both accepted by real K8s client, - # but K8s Object is not iterable. - if isinstance(data, dict) and _RAISE_EXCEPTION_KEY in data: - raise RuntimeError('[500] Fake exception for save_secret') - # Otherwise succeeds - logging.info('======================') - logging.info('Saved a secret with: data: {}, ' - 'metadata: {}, type: {}'.format(data, metadata, - secret_type)) - - def delete_secret(self, name, namespace='default'): - logging.info('======================') - logging.info('Deleted a secret with: name: {}'.format(name)) - - def get_secret(self, name, namespace='default'): - return client.V1Secret(api_version='v1', - data={'test': 'test'}, - kind='Secret', - metadata={ - 'name': name, - 'namespace': namespace - }, - type='Opaque') - - def create_or_update_service(self, - metadata, - spec, - name, - namespace='default'): - logging.info('======================') - logging.info('Saved a service with: spec: {}, metadata: {}'.format( - spec, metadata)) - - def delete_service(self, name, namespace='default'): - logging.info('======================') - logging.info('Deleted a service with: name: {}'.format(name)) - - def get_service(self, name, namespace='default'): - return client.V1Service( - api_version='v1', - kind='Service', - metadata=client.V1ObjectMeta(name=name, namespace=namespace), - spec=client.V1ServiceSpec(selector={'app': 'nginx'})) - - def create_or_update_ingress(self, - metadata, - spec, - name, - namespace='default'): - logging.info('======================') - logging.info('Saved a ingress with: spec: {}, metadata: {}'.format( - spec, metadata)) - - def delete_ingress(self, name, namespace='default'): - logging.info('======================') - logging.info('Deleted a ingress with: name: {}'.format(name)) - - def get_ingress(self, name, namespace='default'): - return client.NetworkingV1beta1Ingress( - api_version='networking.k8s.io/v1beta1', - kind='Ingress', - metadata=client.V1ObjectMeta(name=name, namespace=namespace), - spec=client.NetworkingV1beta1IngressSpec()) - - def create_or_update_deployment(self, - metadata, - spec, - name, - namespace='default'): - logging.info('======================') - logging.info('Saved a deployment with: spec: {}, metadata: {}'.format( - spec, metadata)) - - def delete_deployment(self, name, namespace='default'): - logging.info('======================') - logging.info('Deleted a deployment with: name: {}'.format(name)) - - def get_deployment(self, name, namespace='default'): - return client.V1Deployment( - api_version='apps/v1', - kind='Deployment', - metadata=client.V1ObjectMeta(name=name, namespace=namespace), - spec=client.V1DeploymentSpec( - selector={'matchLabels': { - 'app': 'fedlearner-operator' - }}, - template=client.V1PodTemplateSpec(spec=client.V1PodSpec( - containers=[ - client.V1Container(name='fedlearner-operator', - args=['test']) - ])))) - - def delete_flapp(self, flapp_name): - pass - - def create_flapp(self, flapp_yaml): - pass - - def get_flapp(self, flapp_name): - pods = { - 'pods': { - 'metadata': { - 'selfLink': '/api/v1/namespaces/default/pods', - 'resourceVersion': '780480990' - } - }, - 'items': [{ - 'metadata': { - 'name': '{}-0'.format(flapp_name) - } - }, { - 'metadata': { - 'name': '{}-1'.format(flapp_name) - } - }] - } - flapp = { - 'kind': 'FLAPP', - 'metadata': { - 'name': flapp_name, - 'namesapce': 'default' - }, - 'status': { - 'appState': 'FLStateRunning', - 'flReplicaStatus': { - 'Master': { - 'active': { - 'laomiao-raw-data-1223-v1-follower' - '-master-0-717b53c4-' - 'fef7-4d65-a309-63cf62494286': {} - } - }, - 'Worker': { - 'active': { - 'laomiao-raw-data-1223-v1-follower' - '-worker-0-61e49961-' - 'e6dd-4015-a246-b6d25e69a61c': {}, - 'laomiao-raw-data-1223-v1-follower' - '-worker-1-accef16a-' - '317f-440f-8f3f-7dd5b3552d25': {} - } - } - } - } - } - return {'flapp': flapp, 'pods': pods} - - def get_webshell_session(self, - flapp_name, - container_name: str, - namespace='default'): - return {'id': 1} - - def get_sparkapplication(self, - name: str, - namespace: str = 'default') -> dict: - logging.info('======================') - logging.info( - f'get spark application, name: {name}, namespace: {namespace}') - return { - 'apiVersion': 'sparkoperator.k8s.io/v1beta2', - 'kind': 'SparkApplication', - 'metadata': { - 'creationTimestamp': '2021-04-15T10:43:15Z', - 'generation': 1, - 'name': name, - 'namespace': namespace, - }, - 'status': { - 'applicationState': { - 'state': 'COMPLETED' - }, - } - } - - def create_sparkapplication(self, - json_object: dict, - namespace: str = 'default') -> dict: - logging.info('======================') - logging.info(f'create spark application, namespace: {namespace}, ' - f'json: {json_object}') - return { - 'apiVersion': 'sparkoperator.k8s.io/v1beta2', - 'kind': 'SparkApplication', - 'metadata': { - 'creationTimestamp': '2021-04-15T10:43:15Z', - 'generation': 1, - 'name': 'fl-transformer-yaml', - 'namespace': 'fedlearner', - 'resourceVersion': '348817823', - }, - 'spec': { - 'arguments': [ - 'hdfs://user/feature/data.csv', - 'hdfs://user/feature/data_tfrecords/' - ], - } - } - - def delete_sparkapplication(self, - name: str, - namespace: str = 'default') -> dict: - logging.info('======================') - logging.info( - f'delete spark application, name: {name}, namespace: {namespace}') - return { - 'kind': 'Status', - 'apiVersion': 'v1', - 'metadata': {}, - 'status': 'Success', - 'details': { - 'name': name, - 'group': 'sparkoperator.k8s.io', - 'kind': 'sparkapplications', - 'uid': '790603b6-9dd6-11eb-9282-b8599fb51ea8' - } - } - - def get_pod_log(self, name: str, namespace: str, tail_lines: int): - return [str(datetime.datetime.now())] - - def get_pods(self, namespace, label_selector): - return ['fake_fedlearner_web_console_v2'] diff --git a/web_console_v2/api/fedlearner_webconsole/utils/file_manager.py b/web_console_v2/api/fedlearner_webconsole/utils/file_manager.py index be9e51110..18420df2d 100644 --- a/web_console_v2/api/fedlearner_webconsole/utils/file_manager.py +++ b/web_console_v2/api/fedlearner_webconsole/utils/file_manager.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,29 +17,38 @@ import logging import os import re +import fsspec from collections import namedtuple -from typing import List +from typing import List, Dict, Union, Optional -from tensorflow.io import gfile +from tensorflow.io import gfile # pylint: disable=import-error + +from envs import Envs # path: absolute path of the file # size: file size in bytes # mtime: time of last modification, unix timestamp in seconds. -File = namedtuple('File', ['path', 'size', 'mtime']) -# Currently the supported format '/' or 'hdfs://' +File = namedtuple('File', ['path', 'size', 'mtime', 'is_directory']) +# Currently the supported format '/', 'hdfs://' or 'file://' # TODO(chenyikan): Add oss format when verified. -SUPPORTED_FILE_PREFIXES = r'\.+\/|^\/|^hdfs:\/\/' +SUPPORTED_FILE_PREFIXES = r'\.+\/|^\/|^hdfs:\/\/|^file:\/\/' +FILE_PREFIX = 'file://' class FileManagerBase(object): """A base interface for file manager, please implement this interface if you have specific logic to handle files, for example, HDFS with ACL.""" + def can_handle(self, path: str) -> bool: """If the manager can handle such file.""" raise NotImplementedError() - def ls(self, path: str, recursive=False) -> List[str]: + def info(self) -> Dict: + """Give details of entry at path.""" + raise NotImplementedError() + + def ls(self, path: str, include_directory=False) -> List[File]: """Lists files under a path. Raises: ValueError: When the path does not exist. @@ -53,7 +62,7 @@ def move(self, source: str, destination: str) -> bool: raise NotImplementedError() def remove(self, path: str) -> bool: - """Removes files under a path.""" + """Removes files under a path. Raises exception when path is not exists""" raise NotImplementedError() def copy(self, source: str, destination: str) -> bool: @@ -67,42 +76,101 @@ def mkdir(self, path: str) -> bool: raise NotImplementedError() def read(self, path: str) -> str: + """Read from a file path.""" + raise NotImplementedError() + + def read_bytes(self, path: str) -> bytes: + """Read from a file path by Bytes""" + raise NotImplementedError() + + def write(self, path: str, payload: str, mode: str = 'w') -> bool: + """Write payload to a file path. Will override original content.""" + raise NotImplementedError() + + def exists(self, path: str) -> bool: + """Determine whether a path exists or not""" + raise NotImplementedError() + + def isdir(self, path: str) -> bool: + """Return whether the path is a directory or not""" + raise NotImplementedError() + + def listdir(self, path: str) -> List[str]: + """Return all file/directory names in this path, not recursive""" + raise NotImplementedError() + + def rename(self, source: str, dest: str): + """Rename or move a file / directory""" raise NotImplementedError() class GFileFileManager(FileManagerBase): """Gfile file manager for all FS supported by TF, currently it covers all file types we have.""" + + # TODO(gezhengqiang): change the class name + def __init__(self): + self._fs_dict = {} + + def get_customized_fs(self, path: str) -> fsspec.spec.AbstractFileSystem: + """ + Ref: https://filesystem-spec.readthedocs.io/en/latest/_modules/fsspec/core.html?highlight=split_protocol# + # >>> from fsspec.core import split_protocol + # >>> split_protocol('hdfs:///user/test') + # >>> ('hdfs', '/user/test') + """ + protocol = self._get_protocol_from_path(path) or 'file' + if protocol not in self._fs_dict: + self._fs_dict[protocol] = fsspec.get_mapper(path).fs + return self._fs_dict[protocol] + def can_handle(self, path): if path.startswith('fake://'): return False return re.match(SUPPORTED_FILE_PREFIXES, path) - def ls(self, path: str, recursive=False) -> List[File]: - def _get_file_stats(path: str): - stat = gfile.stat(path) - return File(path=path, - size=stat.length, - mtime=int(stat.mtime_nsec / 1e9)) - - if not gfile.exists(path): - raise ValueError( - f'cannot access {path}: No such file or directory') + @staticmethod + def _get_protocol_from_path(path: str) -> Optional[str]: + """If path is '/data', then return None. If path is 'file:///data', then return 'file'.""" + return fsspec.core.split_protocol(path)[0] + + @staticmethod + def _get_file_stats_from_dict(file: Dict) -> File: + return File(path=file['path'], + size=file['size'], + mtime=int(file['mtime'] if 'mtime' in file else file['last_modified_time']), + is_directory=(file['type'] == 'directory')) + + def info(self, path: str) -> str: + fs = self.get_customized_fs(path) + info = fs.info(path) + if 'last_modified' in info: + info['last_modified_time'] = info['last_modified'] + return info + + def ls(self, path: str, include_directory=False) -> List[File]: + fs = self.get_customized_fs(path) + if not fs.exists(path): + raise ValueError(f'cannot access {path}: No such file or directory') # If it is a file - if not gfile.isdir(path): - return [_get_file_stats(path)] + info = self.info(path) + if info['type'] != 'directory': + info['path'] = path + return [self._get_file_stats_from_dict(info)] files = [] - if recursive: - for root, _, res in gfile.walk(path): - for file in res: - if not gfile.isdir(os.path.join(root, file)): - files.append(_get_file_stats(os.path.join(root, file))) - else: - for file in gfile.listdir(path): - if not gfile.isdir(os.path.join(path, file)): - files.append(_get_file_stats(os.path.join(path, file))) - # Files only + for file in fs.ls(path, detail=True): + # file['name'] from 'fs.ls' delete the protocol of the path, + # here use 'join' to obtain the file['path'] with protocol + base_path = self.info(path)['name'] # base_path does not have protocol + rel_path = os.path.relpath(file['name'], base_path) # file['name'] does not have protocol + file['path'] = os.path.join(path, rel_path) # file['path'] has protocol as well as path + if file['type'] == 'directory': + if include_directory: + files.append(self._get_file_stats_from_dict(file)) + else: + files.append(self._get_file_stats_from_dict(file)) + return files def move(self, source: str, destination: str) -> bool: @@ -112,16 +180,13 @@ def move(self, source: str, destination: str) -> bool: def remove(self, path: str) -> bool: if not gfile.isdir(path): - return os.remove(path) + return gfile.remove(path) return gfile.rmtree(path) def copy(self, source: str, destination: str) -> bool: if gfile.isdir(destination): # gfile requires a file name for copy destination. - return gfile.copy(source, - os.path.join(destination, - os.path.basename(source)), - overwrite=True) + return gfile.copy(source, os.path.join(destination, os.path.basename(source)), overwrite=True) return gfile.copy(source, destination, overwrite=True) def mkdir(self, path: str) -> bool: @@ -130,6 +195,33 @@ def mkdir(self, path: str) -> bool: def read(self, path: str) -> str: return gfile.GFile(path).read() + def read_bytes(self, path: str) -> bytes: + return gfile.GFile(path, 'rb').read() + + def write(self, path: str, payload: str, mode: str = 'w') -> bool: + if gfile.isdir(path): + raise ValueError(f'{path} is a directory: Must provide a filename') + if gfile.exists(path): + self.remove(path) + if not gfile.exists(os.path.dirname(path)): + self.mkdir(os.path.dirname(path)) + return gfile.GFile(path, mode).write(payload) + + def exists(self, path: str) -> bool: + return gfile.exists(path) + + def isdir(self, path: str) -> bool: + return gfile.isdir(path) + + def listdir(self, path: str) -> List[str]: + """Return all file/directory names in this path, not recursive""" + if not gfile.isdir(path): + raise ValueError(f'{path} must be a directory!') + return gfile.listdir(path) + + def rename(self, source: str, dest: str): + gfile.rename(source, dest) + class FileManager(FileManagerBase): """A centralized manager to handle files. @@ -138,9 +230,10 @@ class FileManager(FileManagerBase): `CUSTOMIZED_FILE_MANAGER`. For example, 'fedlearner_webconsole.utils.file_manager:HdfsFileManager' """ + def __init__(self): self._file_managers = [] - cfm_path = os.environ.get('CUSTOMIZED_FILE_MANAGER') + cfm_path = Envs.CUSTOMIZED_FILE_MANAGER if cfm_path: module_path, class_name = cfm_path.split(':') module = importlib.import_module(module_path) @@ -149,16 +242,22 @@ def __init__(self): self._file_managers.append(customized_file_manager()) self._file_managers.append(GFileFileManager()) - def can_handle(self, path): + def can_handle(self, path) -> bool: for fm in self._file_managers: if fm.can_handle(path): return True return False - def ls(self, path: str, recursive=False) -> List[File]: + def info(self, path: str) -> Dict: + for fm in self._file_managers: + if fm.can_handle(path): + return fm.info(path) + raise RuntimeError(f'info is not supported for {path}') + + def ls(self, path: str, include_directory=False) -> List[File]: for fm in self._file_managers: if fm.can_handle(path): - return fm.ls(path, recursive=recursive) + return fm.ls(path, include_directory=include_directory) raise RuntimeError(f'ls is not supported for {path}') def move(self, source: str, destination: str) -> bool: @@ -167,8 +266,7 @@ def move(self, source: str, destination: str) -> bool: if fm.can_handle(source) and fm.can_handle(destination): return fm.move(source, destination) # TODO(chenyikan): Support cross FileManager move by using buffers. - raise RuntimeError( - f'move is not supported for {source} and {destination}') + raise RuntimeError(f'move is not supported for {source} and {destination}') def remove(self, path: str) -> bool: logging.info('Removing file [%s]', path) @@ -183,8 +281,7 @@ def copy(self, source: str, destination: str) -> bool: if fm.can_handle(source) and fm.can_handle(destination): return fm.copy(source, destination) # TODO(chenyikan): Support cross FileManager move by using buffers. - raise RuntimeError( - f'copy is not supported for {source} and {destination}') + raise RuntimeError(f'copy is not supported for {source} and {destination}') def mkdir(self, path: str) -> bool: logging.info('Create directory [%s]', path) @@ -199,3 +296,49 @@ def read(self, path: str) -> str: if fm.can_handle(path): return fm.read(path) raise RuntimeError(f'read is not supported for {path}') + + def read_bytes(self, path: str) -> bytes: + logging.info(f'Read file from [{path}]') + for fm in self._file_managers: + if fm.can_handle(path): + return fm.read_bytes(path) + raise RuntimeError(f'read_bytes is not supported for {path}') + + def write(self, path: str, payload: Union[str, bytes], mode: str = 'w') -> bool: + logging.info(f'Write file to [{path}]') + for fm in self._file_managers: + if fm.can_handle(path): + return fm.write(path, payload, mode) + raise RuntimeError(f'write is not supported for {path}') + + def exists(self, path: str) -> bool: + logging.info(f'Check [{path}] existence') + for fm in self._file_managers: + if fm.can_handle(path): + return fm.exists(path) + raise RuntimeError(f'check existence is not supported for {path}') + + def isdir(self, path: str) -> bool: + logging.info(f'Determine whether [{path}] is a directory') + for fm in self._file_managers: + if fm.can_handle(path): + return fm.isdir(path) + raise RuntimeError(f'check isdir is not supported for {path}') + + def listdir(self, path: str) -> List[str]: + logging.info(f'get file/directory names from [{path}]') + for fm in self._file_managers: + if fm.can_handle(path): + return fm.listdir(path) + raise RuntimeError(f'listdir is not supported for {path}') + + def rename(self, source: str, dest: str): + logging.info(f'Rename[{source}] to [{dest}]') + for fm in self._file_managers: + if fm.can_handle(source): + fm.rename(source, dest) + return + raise RuntimeError(f'rename is not supported for {source}') + + +file_manager = FileManager() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/file_manager_test.py b/web_console_v2/api/fedlearner_webconsole/utils/file_manager_test.py new file mode 100644 index 000000000..60d28d3ed --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/file_manager_test.py @@ -0,0 +1,232 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import os +import shutil +import tempfile +import unittest + +from pathlib import Path +from unittest.mock import patch +from tensorflow.python.framework.errors_impl import NotFoundError, InvalidArgumentError + +from fedlearner_webconsole.utils.file_manager import GFileFileManager, FileManager, File + + +class GFileFileManagerTest(unittest.TestCase): + _F1_SIZE = 3 + _F2_SIZE = 3 + _S1_SIZE = 3 + _SUB_SIZE = 4096 + + def setUp(self): + # Create a temporary directory + self._test_dir = tempfile.mkdtemp() + subdir = Path(self._test_dir).joinpath('subdir') + subdir.mkdir(exist_ok=True) + Path(self._test_dir).joinpath('f1.txt').write_text('xxx', encoding='utf-8') + Path(self._test_dir).joinpath('f2.txt').write_text('xxx', encoding='utf-8') + subdir.joinpath('s1.txt').write_text('xxx', encoding='utf-8') + + self._fm = GFileFileManager() + + def tearDown(self): + # Remove the directory after the test + shutil.rmtree(self._test_dir) + + def _assert_file(self, file: File, path: str, size: int, is_directory: bool): + self.assertEqual(file.path, path) + self.assertEqual(file.size, size) + self.assertEqual(file.is_directory, is_directory) + + def _get_temp_path(self, file_path: str = None) -> str: + return str(Path(self._test_dir, file_path or '').absolute()) + + def test_can_handle(self): + self.assertTrue(self._fm.can_handle('/data/abc')) + self.assertFalse(self._fm.can_handle('data')) + + def test_info(self): + info = self._fm.info(self._get_temp_path('f1.txt')) + self.assertEqual(info['name'], self._get_temp_path('f1.txt')) + self.assertEqual(info['type'], 'file') + with patch('fsspec.implementations.local.LocalFileSystem.info') as mock_info: + mock_info.return_value = {'last_modified': 1} + info = self._fm.info(self._get_temp_path('f1.txt')) + self.assertEqual(info, {'last_modified': 1, 'last_modified_time': 1}) + + def test_ls(self): + # List file + files = self._fm.ls(self._get_temp_path('f1.txt')) + self.assertEqual(len(files), 1) + self._assert_file(files[0], self._get_temp_path('f1.txt'), self._F1_SIZE, False) + with patch('fsspec.implementations.local.LocalFileSystem.info') as mock_info: + mock_info.return_value = { + 'name': self._get_temp_path('f1.txt'), + 'size': 3, + 'type': 'file', + 'last_modified': 1 + } + files = self._fm.ls(self._get_temp_path('f1.txt')) + self._assert_file(files[0], self._get_temp_path('f1.txt'), self._F1_SIZE, False) + self.assertEqual(files[0].mtime, 1) + # List folder + files = sorted(self._fm.ls(self._get_temp_path()), key=lambda file: file.path) + self.assertEqual(len(files), 2) + self._assert_file(files[0], self._get_temp_path('f1.txt'), self._F1_SIZE, False) + self._assert_file(files[1], self._get_temp_path('f2.txt'), self._F2_SIZE, False) + # List directories + files = sorted(self._fm.ls(self._get_temp_path(), include_directory=True), key=lambda file: file.path) + self.assertEqual(len(files), 3) + self._assert_file(files[0], self._get_temp_path('f1.txt'), self._F1_SIZE, False) + self._assert_file(files[1], self._get_temp_path('f2.txt'), self._F2_SIZE, False) + self._assert_file(files[2], self._get_temp_path('subdir'), self._SUB_SIZE, True) + + def test_ls_when_path_has_protocol(self): + path1 = 'file://' + self._get_temp_path('f1.txt') + files = self._fm.ls(path1) + self.assertEqual(len(files), 1) + self.assertEqual(files[0].path, path1) + path2 = 'file://' + self._get_temp_path() + files = sorted(self._fm.ls(path2), key=lambda file: file.path) + self.assertEqual(len(files), 2) + self.assertEqual(files[0].path, 'file://' + self._get_temp_path('f1.txt')) + self.assertEqual(files[1].path, 'file://' + self._get_temp_path('f2.txt')) + files = sorted(self._fm.ls(path2, include_directory=True), key=lambda file: file.path) + self.assertEqual(len(files), 3) + self.assertEqual(files[0].path, 'file://' + self._get_temp_path('f1.txt')) + self.assertEqual(files[1].path, 'file://' + self._get_temp_path('f2.txt')) + self.assertEqual(files[2].path, 'file://' + self._get_temp_path('subdir')) + + def test_move(self): + # Moves to another folder + self._fm.move(self._get_temp_path('f1.txt'), self._get_temp_path('subdir/')) + files = sorted(self._fm.ls(self._get_temp_path('subdir')), key=lambda file: file.path) + self.assertEqual(len(files), 2) + self._assert_file(files[0], self._get_temp_path('subdir/f1.txt'), self._F1_SIZE, False) + self._assert_file(files[1], self._get_temp_path('subdir/s1.txt'), self._S1_SIZE, False) + # Renames + self._fm.move(self._get_temp_path('f2.txt'), self._get_temp_path('f3.txt')) + with self.assertRaises(ValueError): + self._fm.ls(self._get_temp_path('f2.txt')) + files = self._fm.ls(self._get_temp_path('f3.txt')) + self.assertEqual(len(files), 1) + self._assert_file(files[0], self._get_temp_path('f3.txt'), self._F2_SIZE, False) + + def test_remove(self): + self._fm.remove(self._get_temp_path('f1.txt')) + self._fm.remove(self._get_temp_path('subdir')) + files = self._fm.ls(self._get_temp_path(), include_directory=True) + self.assertEqual(len(files), 1) + self._assert_file(files[0], self._get_temp_path('f2.txt'), self._F2_SIZE, False) + + def test_copy(self): + self._fm.copy(self._get_temp_path('f1.txt'), self._get_temp_path('subdir')) + files = self._fm.ls(self._get_temp_path('f1.txt')) + self.assertEqual(len(files), 1) + self._assert_file(files[0], self._get_temp_path('f1.txt'), self._F1_SIZE, False) + files = self._fm.ls(self._get_temp_path('subdir/f1.txt')) + self.assertEqual(len(files), 1) + self._assert_file(files[0], self._get_temp_path('subdir/f1.txt'), self._F1_SIZE, False) + + def test_mkdir(self): + self._fm.mkdir(os.path.join(self._get_temp_path(), 'subdir2')) + self.assertTrue(os.path.isdir(self._get_temp_path('subdir2'))) + + def test_read(self): + content = self._fm.read(self._get_temp_path('f1.txt')) + self.assertEqual('xxx', content) + + def test_write(self): + self.assertRaises(ValueError, lambda: self._fm.write(self._get_temp_path(), 'aaa')) + + first_write_content = 'aaaa' + second_write_content = 'bbbb' + self._fm.write(self._get_temp_path('abc/write.txt'), first_write_content) + self.assertEqual(first_write_content, self._fm.read(self._get_temp_path('abc/write.txt'))) + self._fm.write(self._get_temp_path('abc/write.txt'), second_write_content) + self.assertEqual(second_write_content, self._fm.read(self._get_temp_path('abc/write.txt'))) + + def test_listdir(self): + names = self._fm.listdir(self._get_temp_path()) + self.assertCountEqual(names, ['f1.txt', 'f2.txt', 'subdir']) + with self.assertRaises(ValueError): + self._fm.listdir(self._get_temp_path('not_exist_path')) + + def test_rename(self): + first_write_content = 'aaaa' + self._fm.write(self._get_temp_path('abc/write.txt'), first_write_content) + self.assertRaises( + NotFoundError, + lambda: self._fm.rename(self._get_temp_path('abc/write.txt'), self._get_temp_path('abcd/write.txt'))) + self._fm.rename(self._get_temp_path('abc/write.txt'), self._get_temp_path('read.txt')) + self.assertEqual(first_write_content, self._fm.read(self._get_temp_path('read.txt'))) + self.assertRaises(InvalidArgumentError, + lambda: self._fm.rename(self._get_temp_path('abc'), self._get_temp_path('abc/abc'))) + self.assertRaises(NotFoundError, + lambda: self._fm.rename(self._get_temp_path('abc'), self._get_temp_path('abcd/abc'))) + self._fm.mkdir(self._get_temp_path('abcd')) + self._fm.rename(self._get_temp_path('abc'), self._get_temp_path('abcd/abcd')) + self.assertTrue(os.path.isdir(self._get_temp_path('abcd/abcd'))) + + +class FileManagerTest(unittest.TestCase): + + def setUp(self): + super().setUp() + fake_fm = 'testing.fake_file_manager:FakeFileManager' + self._patcher = patch('fedlearner_webconsole.utils.file_manager.Envs.CUSTOMIZED_FILE_MANAGER', fake_fm) + self._patcher.start() + self._fm = FileManager() + + def tearDown(self): + self._patcher.stop() + + def test_can_handle(self): + self.assertTrue(self._fm.can_handle('fake://123')) + # Falls back to default manager + self.assertTrue(self._fm.can_handle('/data/123')) + self.assertFalse(self._fm.can_handle('unsupported:///123')) + + def test_ls(self): + self.assertEqual(self._fm.ls('fake://data'), [{'path': 'fake://data/f1.txt', 'size': 0}]) + + def test_move(self): + self.assertTrue(self._fm.move('fake://move/123', 'fake://move/234')) + self.assertFalse(self._fm.move('fake://do_not_move/123', 'fake://move/234')) + # No file manager can handle this + self.assertRaises(RuntimeError, lambda: self._fm.move('hdfs://123', 'fake://abc')) + + def test_remove(self): + self.assertTrue(self._fm.remove('fake://remove/123')) + self.assertFalse(self._fm.remove('fake://do_not_remove/123')) + # No file manager can handle this + self.assertRaises(RuntimeError, lambda: self._fm.remove('unsupported://123')) + + def test_copy(self): + self.assertTrue(self._fm.copy('fake://copy/123', 'fake://copy/234')) + self.assertFalse(self._fm.copy('fake://do_not_copy/123', 'fake://copy/234')) + # No file manager can handle this + self.assertRaises(RuntimeError, lambda: self._fm.copy('hdfs://123', 'fake://abc')) + + def test_mkdir(self): + self.assertTrue(self._fm.mkdir('fake://mkdir/123')) + self.assertFalse(self._fm.mkdir('fake://do_not_mkdir/123')) + # No file manager can handle this + self.assertRaises(RuntimeError, lambda: self._fm.mkdir('unsupported:///123')) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/file_operator.py b/web_console_v2/api/fedlearner_webconsole/utils/file_operator.py new file mode 100644 index 000000000..5af5f5b82 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/file_operator.py @@ -0,0 +1,230 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import logging +import os +import tempfile + +import fsspec +from fedlearner_webconsole.utils.file_manager import FileManager +from fedlearner_webconsole.utils.stream_untars import StreamingUntar +from fedlearner_webconsole.utils.stream_tars import StreamingTar +from tensorflow.io import gfile # pylint: disable=import-error +from typing import Union + +HDFS_PREFIX = 'hdfs://' +TAR_SUFFIX = ('.tar',) +GZIP_SUFFIX = ('.gz', '.tgz') + + +class FileOperator(object): + + def __init__(self): + self._fm = FileManager() + self._streaming_untar = StreamingUntar(self._fm) + self._streaming_tar = StreamingTar(self._fm) + + def clear_and_make_an_empty_dir(self, dir_name: str): + try: + self._fm.remove(dir_name) + except Exception as err: # pylint: disable=broad-except + logging.debug('failed to remove %s with exception %s', dir_name, err) + finally: + self._fm.mkdir(dir_name) + + def getsize(self, path: str) -> float: + """Return all files size under path and dont skip the sybolic link + Args: + path (str): file/directory + + Returns: + total_size (float): total size(B) + """ + fs: fsspec.AbstractFileSystem = fsspec.get_mapper(path).fs + + def get_dsize(dpath: str) -> int: + """Gets size for directory.""" + total = 0 + for sub_path in fs.ls(dpath, detail=True): + if sub_path.get('type') == 'directory': + total += get_dsize(sub_path.get('name')) + else: + total += sub_path.get('size', 0) + return total + + if not fs.exists(path): + return 0 + if fs.isdir(path): + return get_dsize(path) + # File + return fs.size(path) + + def archive_to(self, source: Union[str, list], destination: str, gzip_compress: bool = False, move: bool = False): + """compress the file/directory to the destination tarfile/gzip file. + src and dst should be path-like objects or strings. + eg: + + Args: + source (str): source file/directory + destination (str): tarfile/gzip file + gzip_compress (bool): if gzip_compress is true, will compress to gzip file + move (bool): if move is true, will delete source after archive + Raises: + ValueError: if destination tarfile not ends with .tar/.tar.gz + Exception: if io operation failed + """ + logging.info(f'File Operator: will archive {source} to {destination}') + # check destination suffix + if not gzip_compress and not destination.endswith(TAR_SUFFIX): + logging.error(f'Error in archive_to: destination:{destination} is not endswith TAR_SUFFIX') + raise ValueError(f'destination:{destination} is not endswith TAR_SUFFIX') + if gzip_compress and not destination.endswith(GZIP_SUFFIX): + logging.error(f'Error in archive_to: destination:{destination} is not endswith GZIP_SUFFIX') + raise ValueError(f'destination:{destination} is not endswith GZIP_SUFFIX') + src_paths = source + if isinstance(source, str): + src_paths = [source] + # check the source list is on the same platform or not. + is_from_hdfs = src_paths[0].startswith(HDFS_PREFIX) + for src_path in src_paths: + if src_path.startswith(HDFS_PREFIX) != is_from_hdfs: + logging.error(f'Error in archive_to: source list:{source} is not on the same platform.') + raise ValueError(f'source list:{source} is not the same platform.') + is_to_hdfs = destination.startswith(HDFS_PREFIX) + is_hdfs = is_from_hdfs or is_to_hdfs + if is_hdfs: + # src_parent_dir/src_basename/xx -> tmp_dir/src_basename/xx -> tmp_dir/dest_basename -> dest + with tempfile.TemporaryDirectory() as tmp_dir: + tmp_archive_path = os.path.join(tmp_dir, os.path.basename(destination)) + tmp_src_paths = [] + for src_path in src_paths: + tmp_src_path = os.path.join(tmp_dir, os.path.basename(src_path)) + # if src_path is dir, copytree only copy the sub-items in src_path + if self._fm.isdir(src_path): + self._fm.mkdir(tmp_src_path) + self._copytree(src_path, tmp_src_path) + tmp_src_paths.append(tmp_src_path) + self._streaming_tar.archive(tmp_src_paths, tmp_archive_path, gzip_compress=gzip_compress) + self._fm.copy(tmp_archive_path, destination) + else: + self._streaming_tar.archive(source, destination, gzip_compress=gzip_compress) + if move: + self._fm.remove(source) + + def extract_to(self, source: str, destination: str, create_dir: bool = False): + """extract the file to the directory dst. src and dst should be path-like objects or strings. + + Args: + source (str): source file/directory/tarfile + destination (str): directory + create_dir (bool): if create_dir is true, will create the destination dir + Raises: + ValueError: if tarfile not ends with .tar/.tar.gz + Exception: if io operation failed + """ + self.copy_to(source, destination, extract=True, move=False, create_dir=create_dir) + + def copy_to(self, + source: str, + destination: str, + extract: bool = False, + move: bool = False, + create_dir: bool = False): + """Copies the file src to the directory dst. src and dst should be path-like objects or strings, + the file will be copied into dst using the base filename from src. + + Args: + source (str): source file/directory/tarfile + destination (str): directory + extract (bool): extract source file if it is tarfile + move (bool): if move is true, will delete source after copy + create_dir (bool): if create_dir is true, will create the destination dir + + Raises: + ValueError: if tarfile not ends with .tar/.tar.gz + Exception: if io operation failed + """ + # create the destination dir + if create_dir and not self._fm.exists(destination): + self._fm.mkdir(destination) + if not self._fm.isdir(destination): + logging.error(f'Error in copy_to: destination:{destination} is not a existed directory') + raise ValueError(f'destination:{destination} is not a existed directory') + if not extract: + self._copytree(source, destination) + if move: + self._fm.remove(source) + return + is_hdfs = source.startswith(HDFS_PREFIX) or destination.startswith(HDFS_PREFIX) + if is_hdfs: + self._unpack_hdfs_tarfile(source, destination, is_move=move) + else: + self._unpack_tarfile(source, destination, is_move=move) + + def _unpack_tarfile(self, filename: str, extract_dir: str, is_move: bool = False): + """Unpack tar/tar.gz/ `filename` to `extract_dir` + """ + self._streaming_untar.untar(filename, extract_dir) + if is_move: + self._fm.remove(filename) + + def _unpack_hdfs_tarfile(self, filename: str, extract_dir: str, is_move: bool = False): + """Unpack tar/tar.gz/ `filename` to `extract_dir` + will copy the tarfile locally to unzip it and then upload + """ + with tempfile.TemporaryDirectory() as tmp_dir: + try: + self._fm.copy(filename, tmp_dir) + tmp_tarfile = os.path.join(tmp_dir, os.path.basename(filename)) + tmp_sub_dir = os.path.join(tmp_dir, 'tmp_sub_dir') + self._fm.mkdir(tmp_sub_dir) + self._streaming_untar.untar(tmp_tarfile, tmp_sub_dir) + except Exception as e: # pylint: disable=broad-except + logging.error(f'failed to untar file {filename}, exception: {e}') + return + self._copytree(tmp_sub_dir, extract_dir) + if is_move: + self._fm.remove(filename) + + def _copytree(self, source: str, dest: str): + """Recursively copy an entire directory tree rooted at src to a directory named dest + + Args: + source (str): source file/directory/tarfile + dest (str): directory + + Raises: + Exception: if io operation failed + """ + # file + if self._fm.exists(source) and not self._fm.isdir(source): + self._fm.copy(source, dest) + # directory + # TODO(wangzeju): use file manager instead of gfile + for root, dirs, files in gfile.walk(source): + relative_path = os.path.relpath(root, source) + for f in files: + file_path = os.path.join(root, f) + dest_file = os.path.join(dest, relative_path, f) + try: + self._fm.copy(file_path, dest_file) + except Exception as e: # pylint: disable=broad-except + logging.error(f'failed to copy file, from {file_path} to {dest_file}, ex: {e}') + for d in dirs: + dest_dir = os.path.join(dest, relative_path, d) + try: + self._fm.mkdir(dest_dir) + except Exception as e: # pylint: disable=broad-except + logging.error(f'failed to mkdir {dest_dir}, ex: {e}') diff --git a/web_console_v2/api/fedlearner_webconsole/utils/file_operator_test.py b/web_console_v2/api/fedlearner_webconsole/utils/file_operator_test.py new file mode 100644 index 000000000..12d8d52ae --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/file_operator_test.py @@ -0,0 +1,89 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import os +import tempfile +import unittest +from pathlib import Path + +from envs import Envs + +from fedlearner_webconsole.utils.file_operator import FileOperator +from fedlearner_webconsole.utils.file_manager import FILE_PREFIX + + +class FileOperatorTest(unittest.TestCase): + + def test_copy(self): + fo = FileOperator() + source = os.path.join(Envs.BASE_DIR, 'testing/test_data/sparkapp.tar') + with tempfile.TemporaryDirectory() as tmp_dir: + fo.copy_to(source, tmp_dir) + dest = os.path.join(tmp_dir, os.path.basename(source)) + self.assertTrue(os.path.exists(dest), 'sparkapp.tar not found') + + with tempfile.TemporaryDirectory() as tmp_dir: + fo.copy_to(source, tmp_dir, extract=True) + self.assertTrue(len(os.listdir(tmp_dir)) > 0, 'sparkapp/ not found') + + def test_getsize(self): + temp_dir = tempfile.mkdtemp() + # 1 byte + Path(temp_dir).joinpath('f1.txt').write_text('1', encoding='utf-8') + # 2 bytes + Path(temp_dir).joinpath('f2.txt').write_text('22', encoding='utf-8') + subdir = Path(temp_dir).joinpath('subdir') + subdir.mkdir(exist_ok=True) + # 3 bytes + Path(subdir).joinpath('f3.txt').write_text('333', encoding='utf-8') + fo = FileOperator() + # Folder + self.assertEqual(fo.getsize(str(Path(temp_dir).resolve())), 6) + # File + self.assertEqual(fo.getsize(str(Path(temp_dir).joinpath('f2.txt').resolve())), 2) + # Invalid path + self.assertEqual(fo.getsize('/invalidfolder/notexist'), 0) + + def test_archive_to(self): + fo = FileOperator() + source = os.path.join(Envs.BASE_DIR, 'testing/test_data/algorithm/e2e_test') + with tempfile.TemporaryDirectory() as tmp_dir: + dest = os.path.join(tmp_dir, os.path.basename(source)) + dest = dest + '.tar' + fo.archive_to(source, dest) + self.assertTrue(os.path.exists(dest), 'dest tar file not found') + + with tempfile.TemporaryDirectory() as tmp_dir: + dest = os.path.join(tmp_dir, os.path.basename(source)) + dest = dest + '.tar' + fo.archive_to(FILE_PREFIX + source, FILE_PREFIX + dest) + self.assertTrue(os.path.exists(dest), 'dest tar file not found') + + def test_extract_to(self): + fo = FileOperator() + source = os.path.join(Envs.BASE_DIR, 'testing/test_data/sparkapp.tar') + with tempfile.TemporaryDirectory() as tmp_dir: + fo.extract_to(source, tmp_dir) + dest = os.path.join(tmp_dir, 'class.csv') + self.assertTrue(os.path.exists(dest), 'dest tar file not found') + + with tempfile.TemporaryDirectory() as tmp_dir: + fo.extract_to(FILE_PREFIX + source, FILE_PREFIX + tmp_dir) + dest = os.path.join(tmp_dir, 'class.csv') + self.assertTrue(os.path.exists(dest), 'dest tar file not found') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/file_tree.py b/web_console_v2/api/fedlearner_webconsole/utils/file_tree.py new file mode 100644 index 000000000..46542de22 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/file_tree.py @@ -0,0 +1,71 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +from typing import List +from fedlearner_webconsole.utils.file_manager import FileManager +from fedlearner_webconsole.proto.algorithm_pb2 import FileTreeNode + + +# TODO(hangweiqiang): make it object oriented +class FileTreeBuilder: + + def __init__(self, path: str, relpath: bool = False): + self.file_manager = FileManager() + self.path = path + self.relpath = relpath + + def _recursive_build(self, path: str) -> List[FileTreeNode]: + files = self.file_manager.ls(path, include_directory=True) + file_nodes = [] + for file in files: + filename = os.path.split(file.path)[-1] + filepath = file.path # filepath has protocol + relpath = os.path.relpath(filepath, self.path) # relative path does not have protocol + tree_node = FileTreeNode(filename=filename, + path=relpath if self.relpath else filepath, + mtime=file.mtime, + size=file.size, + is_directory=file.is_directory) + if file.is_directory: + dir_path = os.path.join(self.path, relpath) # dir_path has protocol + files = self._recursive_build(path=dir_path) # path needs protocol + tree_node.files.extend(files) + file_nodes.append(tree_node) + return file_nodes + + def build(self) -> List[FileTreeNode]: + return self._recursive_build(self.path) + + def build_with_root(self) -> FileTreeNode: + info = self.file_manager.info(self.path) + filename = os.path.split(self.path)[-1] + root = FileTreeNode(filename=filename, + mtime=int(info['mtime'] if 'mtime' in info else info['last_modified_time']), + size=info['size'], + is_directory=(info['type'] == 'directory')) + root.files.extend(self._recursive_build(path=self.path)) + return root + + def _get_size(self, tree_node: FileTreeNode): + file_size = tree_node.size + if tree_node.is_directory: + for file in tree_node.files: + file_size += self._get_size(file) + return file_size + + def size(self): + tree_nodes = self.build() + return sum([self._get_size(node) for node in tree_nodes]) diff --git a/web_console_v2/api/fedlearner_webconsole/utils/file_tree_test.py b/web_console_v2/api/fedlearner_webconsole/utils/file_tree_test.py new file mode 100644 index 000000000..726222b9d --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/file_tree_test.py @@ -0,0 +1,163 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +import tempfile +from pathlib import Path +from unittest.mock import patch +from testing.common import BaseTestCase +from fedlearner_webconsole.utils.proto import to_dict +from fedlearner_webconsole.utils.file_tree import FileTreeBuilder +from fedlearner_webconsole.utils.file_manager import File + + +class FileTreeTest(BaseTestCase): + + def setUp(self): + super().setUp() + path = tempfile.mkdtemp() + path = Path(path, 'e2e_test').resolve() + self._base_path = str(path) + path.mkdir() + path.joinpath('follower').mkdir() + path.joinpath('leader').mkdir() + file_path = path.joinpath('leader').joinpath('main.py') + file_path.touch() + file_path.write_text('import tensorflow') # pylint: disable=unspecified-encoding + + def test_build(self): + file_trees = FileTreeBuilder(self._base_path, relpath=True).build() + data = [to_dict(file_tree) for file_tree in file_trees] + data = sorted(data, key=lambda d: d['filename']) + self.assertPartiallyEqual(data[0], { + 'filename': 'follower', + 'path': 'follower', + 'is_directory': True, + 'files': [] + }, + ignore_fields=['mtime', 'size']) + self.assertPartiallyEqual(data[1], { + 'filename': 'leader', + 'path': 'leader', + 'is_directory': True + }, + ignore_fields=['size', 'mtime', 'files']) + self.assertPartiallyEqual(data[1]['files'][0], { + 'filename': 'main.py', + 'path': 'leader/main.py', + 'is_directory': False + }, + ignore_fields=['size', 'mtime', 'files']) + + def test_build_with_root(self): + root = FileTreeBuilder(self._base_path, relpath=True).build_with_root() + data = to_dict(root) + self.assertPartiallyEqual(data, { + 'filename': 'e2e_test', + 'path': '', + 'is_directory': True + }, + ignore_fields=['size', 'mtime', 'files']) + files = data['files'] + files = sorted(files, key=lambda f: f['filename']) + self.assertPartiallyEqual(files[0], { + 'filename': 'follower', + 'path': 'follower', + 'is_directory': True, + 'files': [] + }, + ignore_fields=['mtime', 'size']) + self.assertPartiallyEqual(files[1], { + 'filename': 'leader', + 'path': 'leader', + 'is_directory': True + }, + ignore_fields=['size', 'mtime', 'files']) + self.assertPartiallyEqual(files[1]['files'][0], { + 'filename': 'main.py', + 'path': 'leader/main.py', + 'is_directory': False + }, + ignore_fields=['size', 'mtime', 'files']) + + @patch('fedlearner_webconsole.utils.file_manager.GFileFileManager.info') + @patch('fedlearner_webconsole.utils.file_manager.GFileFileManager.ls') + def test_build_when_ls_corner_case(self, mock_ls, mock_info): + mock_ls.side_effect = [ + [ + File(path='hdfs://browser-hdfs/business/content-cloud/fedlearner/20221113/leader', + size=1, + is_directory=True, + mtime=1), + File(path='hdfs://browser-hdfs/business/content-cloud/fedlearner/20221113/leader.py', + size=1, + is_directory=False, + mtime=1), + File(path='hdfs://browser-hdfs/business/content-cloud/fedlearner/20221113/follower.py', + size=1, + is_directory=False, + mtime=1) + ], + [ + File(path='hdfs://browser-hdfs/business/content-cloud/fedlearner/20221113/leader/main.py', + size=1, + is_directory=False, + mtime=1) + ] + ] + mock_info.side_effect = [{ + 'name': '/business/content-cloud/fedlearner/20221113' + }, { + 'name': '/business/content-cloud/fedlearner/20221113' + }] + path = 'hdfs://browser-hdfs/business/content-cloud/fedlearner/20221113' + file_trees = FileTreeBuilder(path=path, relpath=True).build() + data = [to_dict(file_tree) for file_tree in file_trees] + data = sorted(data, key=lambda d: d['filename']) + self.assertPartiallyEqual(data[0], { + 'filename': 'follower.py', + 'path': 'follower.py', + 'is_directory': False, + 'files': [] + }, + ignore_fields=['mtime', 'size']) + self.assertPartiallyEqual(data[1], { + 'filename': + 'leader', + 'path': + 'leader', + 'is_directory': + True, + 'files': [{ + 'filename': 'main.py', + 'path': 'leader/main.py', + 'size': 1, + 'mtime': 1, + 'is_directory': False, + 'files': [] + }] + }, + ignore_fields=['mtime', 'size']) + self.assertPartiallyEqual(data[2], { + 'filename': 'leader.py', + 'path': 'leader.py', + 'is_directory': False, + 'files': [] + }, + ignore_fields=['mtime', 'size']) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/filtering.py b/web_console_v2/api/fedlearner_webconsole/utils/filtering.py new file mode 100644 index 000000000..efb2314e6 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/filtering.py @@ -0,0 +1,230 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import enum +import logging +from typing import Type, NamedTuple, Dict, Optional, Callable, Any + +from pyparsing import Keyword, replace_with, common, dbl_quoted_string, remove_quotes, Suppress, Group, Word, \ + alphas, Literal, Forward, delimited_list, alphanums, Opt, ParseResults, ParseException +from sqlalchemy import Column, and_ +from sqlalchemy.orm import Query + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto.filtering_pb2 import FilterExpression, SimpleExpression, FilterOp, FilterExpressionKind + +# Using pyparsing to construct a DSL for filtering syntax. +# Why not using regex to parse the expression directly? It has a lot of corner cases of handling +# string literally, for example, we use brackets to split sub-expressions, but there may be ')(' +# in the value, so we need to try different parsing solution brute-forcefully, which is inefficient +# and buggy. DSL is a more elegant way. Ref: https://pypi.org/project/pyparsing/ +# --------------------Grammar--------------------------- +# Names +_SIMPLE_EXP_RNAME = 'simple_exp' +_EXP_COMBINER_RNAME = 'exp_combiner' +_SUB_EXPS_RNAME = 'sub_exps' +_EXP_RNAME = 'exp' +# Those values follow json standards, +# ref: https://github.com/pyparsing/pyparsing/blob/master/examples/jsonParser.py +_LEFT_SQUARE_BRACKET = Suppress('[') +_RIGHT_SQUARE_BRACKET = Suppress(']') +_TRUE = Literal('true').set_parse_action(replace_with(True)) +_FALSE = Literal('false').set_parse_action(replace_with(False)) +_BOOL_VALUE = _TRUE | _FALSE +_STRING_VALUE = dbl_quoted_string().set_parse_action(remove_quotes) +_NUMBER_VALUE = common.number() +_NUMBER_LIST = Group(_LEFT_SQUARE_BRACKET + Opt(delimited_list(_NUMBER_VALUE, delim=',')) + _RIGHT_SQUARE_BRACKET, + aslist=True) +_STRING_LIST = Group(_LEFT_SQUARE_BRACKET + Opt(delimited_list(_STRING_VALUE, delim=',')) + _RIGHT_SQUARE_BRACKET, + aslist=True) +PRIMITIVE_VALUE = _BOOL_VALUE | _STRING_VALUE | _NUMBER_VALUE +LIST_VALUE = _NUMBER_LIST | _STRING_LIST +VALUE = PRIMITIVE_VALUE | LIST_VALUE + +_LEFT_BRACKET = Suppress('(') +_RIGHT_BRACKET = Suppress(')') +FIELD = Word(init_chars=alphas, body_chars=alphanums + '_' + '.', min=1) +# IN op only support number list value +_IN_EXP_MEMBER = FIELD + Literal(':') + LIST_VALUE +_EQUAL_EXP_MEMBER = FIELD + Literal('=') + PRIMITIVE_VALUE +_GREATER_THAN_EXP_MEMBER = FIELD + Literal('>') + _NUMBER_VALUE +_LESS_THAN_EXP_MEMBER = FIELD + Literal('<') + _NUMBER_VALUE +_CONTAIN_EXP_MEMBER = FIELD + Literal('~=') + _STRING_VALUE +_EXP_MEMBER = _IN_EXP_MEMBER | _EQUAL_EXP_MEMBER | _CONTAIN_EXP_MEMBER | \ + _GREATER_THAN_EXP_MEMBER | _LESS_THAN_EXP_MEMBER +SIMPLE_EXP = Group(_LEFT_BRACKET + _EXP_MEMBER + _RIGHT_BRACKET).set_results_name(_SIMPLE_EXP_RNAME) + +EXP_COMBINER = Keyword('and').set_results_name(_EXP_COMBINER_RNAME) +EXP = Forward() +EXP <<= Group(SIMPLE_EXP | (_LEFT_BRACKET + EXP_COMBINER + Group(EXP[2, ...]).set_results_name(_SUB_EXPS_RNAME) + + _RIGHT_BRACKET)).set_results_name(_EXP_RNAME) +# --------------------End of grammar-------------------- + + +def _build_simple_expression(parse_results: ParseResults) -> SimpleExpression: + """Builds simple expression by parsed result, ref to `SIMPLE_EXP`.""" + field, op_str, typed_value = parse_results.as_list() + + op = FilterOp.EQUAL + if op_str == ':': + op = FilterOp.IN + elif op_str == '~=': + op = FilterOp.CONTAIN + elif op_str == '>': + op = FilterOp.GREATER_THAN + elif op_str == '<': + op = FilterOp.LESS_THAN + exp = SimpleExpression( + field=field, + op=op, + ) + if isinstance(typed_value, bool): + exp.bool_value = typed_value + elif isinstance(typed_value, str): + exp.string_value = typed_value + elif isinstance(typed_value, (int, float)): + exp.number_value = typed_value + elif isinstance(typed_value, list): + if len(typed_value) > 0 and isinstance(typed_value[0], str): + exp.list_value.string_list.extend(typed_value) + else: + exp.list_value.number_list.extend(typed_value) + else: + logging.warning('[FilterExpression] Unsupportd value: %s', typed_value) + raise ValueError(f'Unsupported value: {typed_value}') + return exp + + +def _build_expression(exp: ParseResults) -> FilterExpression: + """Builds expression by parsed results, ref to `EXP`.""" + if _SIMPLE_EXP_RNAME in exp: + return FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=_build_simple_expression(exp[_SIMPLE_EXP_RNAME])) + combiner = exp.get(_EXP_COMBINER_RNAME) + assert combiner == 'and', 'Combiner must be and as of now' + exp_pb = FilterExpression(kind=FilterExpressionKind.AND) + for sub_exp in exp.get(_SUB_EXPS_RNAME, []): + exp_pb.exps.append(_build_expression(sub_exp)) + return exp_pb + + +def parse_expression(exp: str) -> FilterExpression: + try: + parse_results = EXP.parse_string(exp, parse_all=True) + return _build_expression(parse_results[0]) + except ParseException as e: + error_message = f'[FilterExpression] unsupported expression {exp}' + logging.exception(error_message) + raise ValueError(error_message) from e + + +class FieldType(enum.Enum): + STRING = 'STRING' + NUMBER = 'NUMBER' + BOOL = 'BOOL' + + +class SupportedField(NamedTuple): + # Field type + type: FieldType + # Supported ops, key is the op, value is the custom criterion builder. + ops: Dict['FilterOp', Optional[Callable[[SimpleExpression], Any]]] + + +def validate_expression(exp: FilterExpression, supported_fields: Dict[str, SupportedField]): + """Validates if the expression is supported. + + Raises: + ValueError: if the expression is unsupported. + """ + if exp.kind == FilterExpressionKind.SIMPLE: + simple_exp = exp.simple_exp + if simple_exp.field not in supported_fields: + raise ValueError(f'Unsupported field {simple_exp.field}') + supported_field = supported_fields[simple_exp.field] + if simple_exp.op not in supported_field.ops: + raise ValueError(f'Unsupported op {FilterOp.Name(simple_exp.op)} on {simple_exp.field}') + pb_value_field = simple_exp.WhichOneof('value') + value_type = FieldType.STRING + if pb_value_field == 'bool_value': + value_type = FieldType.BOOL + elif pb_value_field == 'number_value': + value_type = FieldType.NUMBER + elif pb_value_field == 'list_value': + if len(simple_exp.list_value.number_list) > 0: + value_type = FieldType.NUMBER + else: + value_type = FieldType.STRING + if value_type != supported_field.type: + raise ValueError( + f'Type of {simple_exp.field} is invalid, expected {supported_field.type}, actual {value_type}') + return + for sub_exp in exp.exps: + validate_expression(sub_exp, supported_fields) + + +class FilterBuilder(object): + + def __init__(self, model_class: Type[db.Model], supported_fields: Dict[str, SupportedField]): + self.model_class = model_class + self.supported_fields = supported_fields + + def _build_criterions(self, exp: FilterExpression): + """Builds sqlalchemy criterions for the filter expression.""" + if exp.kind == FilterExpressionKind.SIMPLE: + simple_exp = exp.simple_exp + supported_field = self.supported_fields.get(simple_exp.field) + custom_builder = None + if supported_field: + custom_builder = supported_field.ops.get(simple_exp.op) + # Calls custom builder if it is specified + if callable(custom_builder): + return custom_builder(simple_exp) + + column: Optional[Column] = getattr(self.model_class, simple_exp.field, None) + assert column is not None, f'{simple_exp.field} is not a column key' + if simple_exp.op == FilterOp.EQUAL: + pb_value_field = simple_exp.WhichOneof('value') + return column == getattr(simple_exp, pb_value_field) + if simple_exp.op == FilterOp.IN: + number_list = simple_exp.list_value.number_list + string_list = simple_exp.list_value.string_list + list_value = number_list + if len(string_list) > 0: + list_value = string_list + return column.in_(list_value) + if simple_exp.op == FilterOp.CONTAIN: + return column.ilike(f'%{simple_exp.string_value}%') + if simple_exp.op == FilterOp.GREATER_THAN: + return column > simple_exp.number_value + if simple_exp.op == FilterOp.LESS_THAN: + return column < simple_exp.number_value + raise ValueError(f'Unsupported filter op: {simple_exp.op}') + # AND-combined sub expressions + assert exp.kind == FilterExpressionKind.AND + criterions = [self._build_criterions(sub_exp) for sub_exp in exp.exps] + return and_(*criterions) + + def build_query(self, query: Query, exp: FilterExpression) -> Query: + """Build query by expression. + + Raises: + ValueError: if the expression is unsupported. + """ + # A special case that the expression is empty + if exp.ByteSize() == 0: + return query + validate_expression(exp, self.supported_fields) + return query.filter(self._build_criterions(exp)) diff --git a/web_console_v2/api/fedlearner_webconsole/utils/filtering_test.py b/web_console_v2/api/fedlearner_webconsole/utils/filtering_test.py new file mode 100644 index 000000000..188c73d87 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/filtering_test.py @@ -0,0 +1,453 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from typing import Any + +from pyparsing import ParseException, ParserElement +from sqlalchemy import and_ + +from fedlearner_webconsole.db import db, default_table_args +from fedlearner_webconsole.proto.filtering_pb2 import FilterExpression, SimpleExpression, FilterOp, FilterExpressionKind +from fedlearner_webconsole.utils.filtering import VALUE, SIMPLE_EXP, EXP, parse_expression, FilterBuilder, \ + SupportedField, FieldType, validate_expression +from testing.no_web_server_test_case import NoWebServerTestCase + + +class TestModel(db.Model): + __tablename__ = 'test_table' + __table_args__ = (default_table_args('Test table')) + id = db.Column(db.Integer, primary_key=True, autoincrement=True) + name = db.Column(db.String(255)) + disabled = db.Column(db.Boolean, default=False) + amount = db.Column(db.Float, default=0) + + +class DslTest(unittest.TestCase): + + def _parse_single(self, e: ParserElement, s: str) -> Any: + results = e.parse_string(s, parse_all=True).as_list() + self.assertEqual(len(results), 1) + return results[0] + + def test_bool_value(self): + self.assertEqual(self._parse_single(VALUE, 'true'), True) + self.assertEqual(self._parse_single(VALUE, ' false '), False) + with self.assertRaises(ParseException): + self._parse_single(VALUE, 'True') + + def test_string_value(self): + self.assertEqual(self._parse_single(VALUE, '"u你好🤩 ok"'), 'u你好🤩 ok') + self.assertEqual(self._parse_single(VALUE, '"hey"'), 'hey') + with self.assertRaises(ParseException): + self._parse_single(VALUE, '\'single quote\'') + with self.assertRaises(ParseException): + self._parse_single(VALUE, '"no quote pair') + with self.assertRaises(ParseException): + self._parse_single(VALUE, 'no quote') + + def test_number_value(self): + self.assertEqual(self._parse_single(VALUE, '01234'), 1234) + self.assertEqual(self._parse_single(VALUE, '-56.877'), -56.877) + self.assertEqual(self._parse_single(VALUE, '1e4'), 10000) + with self.assertRaises(ParseException): + self._parse_single(VALUE, '2^20') + + def test_number_list(self): + self.assertEqual(self._parse_single(VALUE, '[]'), []) + self.assertEqual(self._parse_single(VALUE, '[-2e2]'), [-200]) + self.assertEqual(self._parse_single(VALUE, '[-1, +2.06, 3]'), [-1, 2.06, 3]) + with self.assertRaises(ParseException): + self._parse_single(VALUE, '-2 ]') + + def test_string_list(self): + self.assertEqual(self._parse_single(VALUE, '["hello world"]'), ['hello world']) + self.assertEqual(self._parse_single(VALUE, '["🐷", "行\\\"卫\'qiang", "🤩"]'), ['🐷', '行\\"卫\'qiang', '🤩']) + with self.assertRaises(ParseException): + self._parse_single(VALUE, '[\'hello\']') + with self.assertRaises(ParseException): + self._parse_single(VALUE, '["hello]') + + def test_simple_exp_with_equal(self): + self.assertEqual(self._parse_single(SIMPLE_EXP, '(abc=123)'), ['abc', '=', 123]) + self.assertEqual(self._parse_single(SIMPLE_EXP, '(a_b_c=false)'), ['a_b_c', '=', False]) + self.assertEqual(self._parse_single(SIMPLE_EXP, '(x123="test值")'), ['x123', '=', 'test值']) + with self.assertRaises(ParseException): + # Without brackets + self._parse_single(SIMPLE_EXP, 'abc=123') + with self.assertRaises(ParseException): + # Invalid value + self._parse_single(SIMPLE_EXP, 'abc=abc') + with self.assertRaises(ParseException): + # List value is not supported for equal + self._parse_single(SIMPLE_EXP, '(f=[1,-2])') + + def test_simple_exp_with_in(self): + self.assertEqual(self._parse_single(SIMPLE_EXP, '(abc:[1,-2])'), ['abc', ':', [1, -2]]) + self.assertEqual(self._parse_single(SIMPLE_EXP, '(x12_3:[2.3333])'), ['x12_3', ':', [2.3333]]) + self.assertEqual(self._parse_single(SIMPLE_EXP, '(s1:["h","w"])'), ['s1', ':', ['h', 'w']]) + with self.assertRaises(ParseException): + # Without brackets + self._parse_single(SIMPLE_EXP, 'abc:[-1]') + with self.assertRaises(ParseException): + # Primitive value is not supported + self._parse_single(SIMPLE_EXP, '(f:"hello")') + + def test_simple_exp_with_greater_than(self): + self.assertEqual(self._parse_single(SIMPLE_EXP, '(start_at>123)'), ['start_at', '>', 123]) + self.assertEqual(self._parse_single(SIMPLE_EXP, '(amount>-1.2)'), ['amount', '>', -1.2]) + with self.assertRaises(ParseException): + # String value is not supported + self._parse_single(SIMPLE_EXP, '(s>"hello")') + + def test_simple_exp_with_less_than(self): + self.assertEqual(self._parse_single(SIMPLE_EXP, '(end_at<187777)'), ['end_at', '<', 187777]) + self.assertEqual(self._parse_single(SIMPLE_EXP, '(amount < -1.23)'), ['amount', '<', -1.23]) + with self.assertRaises(ParseException): + # String value is not supported + self._parse_single(SIMPLE_EXP, '(amount<"hello")') + + def test_simple_exp_with_contain(self): + self.assertEqual(self._parse_single(SIMPLE_EXP, '(abc~="keyword")'), ['abc', '~=', 'keyword']) + self.assertEqual(self._parse_single(SIMPLE_EXP, '(a_b_c~="~=你好")'), ['a_b_c', '~=', '~=你好']) + with self.assertRaises(ParseException): + # Without brackets + self._parse_single(SIMPLE_EXP, 'abc~="keyword"') + with self.assertRaises(ParseException): + # Invalid value + self._parse_single(SIMPLE_EXP, 'abc~=abc') + with self.assertRaises(ParseException): + # List value is not supported + self._parse_single(SIMPLE_EXP, '(f~=["fff"])') + + def test_exp_simple(self): + self.assertEqual(self._parse_single(EXP, '(a.b:[1,2,3])'), [['a.b', ':', [1, 2, 3]]]) + self.assertEqual(self._parse_single(EXP, '(x123="h h")'), [['x123', '=', 'h h']]) + self.assertEqual(self._parse_single(EXP, '(s1~="ooo")'), [['s1', '~=', 'ooo']]) + + def test_exp_and_combined(self): + result = self._parse_single(EXP, '(and(a:[1])(b=true)(c=")(")(d~="like"))') + self.assertEqual(result, + ['and', [[['a', ':', [1]]], [['b', '=', True]], [['c', '=', ')(']], [['d', '~=', 'like']]]]) + with self.assertRaises(ParseException): + # No brackets + self._parse_single(EXP, 'and(f=false)(x=true)') + with self.assertRaises(ParseException): + # Only one sub-exp + self._parse_single(EXP, '(and(f=false))') + with self.assertRaises(ParseException): + # Invalid value + self._parse_single(EXP, '(and(f=false)(x=)())') + + def test_exp_nested(self): + result = self._parse_single(EXP, '(and(a:[1,2])(and(x1=true)(x2=false)(x3~="ss"))(and(y1="and()")(y2="中文")))') + self.assertEqual(result, [ + 'and', + [[['a', ':', [1, 2]]], ['and', [[['x1', '=', True]], [['x2', '=', False]], [['x3', '~=', 'ss']]]], + ['and', [[['y1', '=', 'and()']], [['y2', '=', '中文']]]]] + ]) + + +class ParseExpressionTest(unittest.TestCase): + + def test_simple_expression(self): + self.assertEqual( + parse_expression('(test_field="hey 🐷")'), + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='test_field', op=FilterOp.EQUAL, string_value='hey 🐷'))) + self.assertEqual( + parse_expression('(test_field:[ -2, 3 ])'), + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='test_field', + op=FilterOp.IN, + list_value=SimpleExpression.ListValue(number_list=[-2, 3])))) + self.assertEqual( + parse_expression('(test_field:["你 好", "🐷"])'), + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression( + field='test_field', + op=FilterOp.IN, + list_value=SimpleExpression.ListValue(string_list=['你 好', '🐷'])))) + self.assertEqual( + parse_expression('(test_field~="like")'), + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='test_field', op=FilterOp.CONTAIN, string_value='like'))) + self.assertEqual( + parse_expression('(start_at > 123)'), + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='start_at', op=FilterOp.GREATER_THAN, number_value=123))) + self.assertEqual( + parse_expression('(test_field<-12.3)'), + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='test_field', op=FilterOp.LESS_THAN, + number_value=-12.3))) + + def test_and_expression(self): + self.assertEqual( + parse_expression('(and(x1="床前明月光")(x2:["o","y"])(x3=true))'), + FilterExpression(kind=FilterExpressionKind.AND, + exps=[ + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='x1', + op=FilterOp.EQUAL, + string_value='床前明月光')), + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression( + field='x2', + op=FilterOp.IN, + list_value=SimpleExpression.ListValue(string_list=['o', 'y']))), + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='x3', + op=FilterOp.EQUAL, + bool_value=True)) + ])) + + def test_nested_expression(self): + self.assertEqual( + parse_expression('(and(and(x1="(and(x1=true)(x2=false))")(x2:[1]))(and(x3=false)(x4=1.1e3))(x5~="x"))'), + FilterExpression(kind=FilterExpressionKind.AND, + exps=[ + FilterExpression(kind=FilterExpressionKind.AND, + exps=[ + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression( + field='x1', + op=FilterOp.EQUAL, + string_value='(and(x1=true)(x2=false))')), + FilterExpression( + kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression( + field='x2', + op=FilterOp.IN, + list_value=SimpleExpression.ListValue(number_list=[1]))), + ]), + FilterExpression(kind=FilterExpressionKind.AND, + exps=[ + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='x3', + op=FilterOp.EQUAL, + bool_value=False)), + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='x4', + op=FilterOp.EQUAL, + number_value=1100)), + ]), + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='x5', + op=FilterOp.CONTAIN, + string_value='x')), + ])) + + def test_invalid_expression(self): + with self.assertRaises(ValueError): + # No brackets + parse_expression('x1=true') + with self.assertRaises(ValueError): + # No brackets + parse_expression('and(x1=true)(x2=false)') + with self.assertRaises(ValueError): + # Only one sub expression + parse_expression('(and(x1=true))') + + +class ValidateExpressionTest(unittest.TestCase): + SUPPORTED_FIELDS = { + 'f1': SupportedField(type=FieldType.NUMBER, ops={FilterOp.IN: None}), + 'f2': SupportedField(type=FieldType.STRING, ops={ + FilterOp.EQUAL: lambda exp: True, + FilterOp.IN: None + }), + 'f3': SupportedField(type=FieldType.BOOL, ops={FilterOp.EQUAL: None}), + 'f4': SupportedField(type=FieldType.STRING, ops={FilterOp.CONTAIN: None}), + } + + def test_valid(self): + exp = FilterExpression(kind=FilterExpressionKind.AND, + exps=[ + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression( + field='f1', + op=FilterOp.IN, + list_value=SimpleExpression.ListValue(number_list=[1, 2]))), + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression( + field='f2', + op=FilterOp.IN, + list_value=SimpleExpression.ListValue(string_list=['hello']))), + FilterExpression(kind=FilterExpressionKind.AND, + exps=[ + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression( + field='f2', + op=FilterOp.EQUAL, + string_value='hello')), + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='f3', + op=FilterOp.EQUAL, + bool_value=True)), + ]), + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='f4', + op=FilterOp.CONTAIN, + string_value='lifjfasdf asdf')), + ]) + validate_expression(exp, self.SUPPORTED_FIELDS) + + def test_unsupported_field(self): + exp = FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='f123123', + op=FilterOp.IN, + list_value=SimpleExpression.ListValue(number_list=[1, 2]))) + with self.assertRaisesRegex(ValueError, 'Unsupported field f123123'): + validate_expression(exp, self.SUPPORTED_FIELDS) + + def test_unsupported_op(self): + exp = FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='f3', + op=FilterOp.IN, + list_value=SimpleExpression.ListValue(number_list=[1, 2]))) + with self.assertRaisesRegex(ValueError, 'Unsupported op IN on f3'): + validate_expression(exp, self.SUPPORTED_FIELDS) + + def test_invalid_type(self): + exp = FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='f1', op=FilterOp.IN, string_value='invalid')) + with self.assertRaisesRegex(ValueError, + 'Type of f1 is invalid, expected FieldType.NUMBER, actual FieldType.STRING'): + validate_expression(exp, self.SUPPORTED_FIELDS) + exp = FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='f2', + op=FilterOp.IN, + list_value=SimpleExpression.ListValue(number_list=[1]))) + with self.assertRaisesRegex(ValueError, + 'Type of f2 is invalid, expected FieldType.STRING, actual FieldType.NUMBER'): + validate_expression(exp, self.SUPPORTED_FIELDS) + + +class FilterBuilderTest(NoWebServerTestCase): + + def test_build_query(self): + exp = FilterExpression(kind=FilterExpressionKind.AND, + exps=[ + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression( + field='id', + op=FilterOp.IN, + list_value=SimpleExpression.ListValue(number_list=[1, 2]))), + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='name', + op=FilterOp.EQUAL, + string_value='test name')), + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='name', + op=FilterOp.CONTAIN, + string_value='test')), + FilterExpression( + kind=FilterExpressionKind.AND, + exps=[ + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='disabled', + op=FilterOp.EQUAL, + bool_value=True)), + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='amount', + op=FilterOp.EQUAL, + number_value=666.6)), + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='id', + op=FilterOp.GREATER_THAN, + number_value=1)), + FilterExpression(kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression(field='id', + op=FilterOp.LESS_THAN, + number_value=1999)), + ]), + ]) + with db.session_scope() as session: + models = [ + TestModel( + id=1, + name='test name', + disabled=False, + amount=666.6, + ), + TestModel( + id=2, + name='test name', + disabled=True, + amount=666.6, + ), + TestModel( + id=3, + name='test name', + disabled=True, + amount=666.6, + ) + ] + session.add_all(models) + session.commit() + + def amount_filter(exp: FilterExpression): + return and_(TestModel.amount > 600, TestModel.amount < 700) + + builder = FilterBuilder(TestModel, + supported_fields={ + 'id': + SupportedField(type=FieldType.NUMBER, + ops={ + FilterOp.EQUAL: None, + FilterOp.IN: None, + FilterOp.GREATER_THAN: None, + FilterOp.LESS_THAN: None, + }), + 'name': + SupportedField(type=FieldType.STRING, + ops={ + FilterOp.EQUAL: None, + FilterOp.CONTAIN: None + }), + 'disabled': + SupportedField(type=FieldType.BOOL, ops={FilterOp.EQUAL: None}), + 'amount': + SupportedField(type=FieldType.NUMBER, ops={FilterOp.EQUAL: amount_filter}), + }) + with db.session_scope() as session: + query = session.query(TestModel) + query = builder.build_query(query, exp) + self.assertEqual( + self.generate_mysql_statement(query), + 'SELECT test_table.id, test_table.name, test_table.disabled, test_table.amount \n' + 'FROM test_table \n' + # lower() is called since it is meant to be case-insensitive + # pylint: disable-next=line-too-long + 'WHERE test_table.id IN (1.0, 2.0) AND test_table.name = \'test name\' AND lower(test_table.name) LIKE lower(\'%%test%%\') AND test_table.disabled = true AND test_table.amount > 600 AND test_table.amount < 700 AND test_table.id > 1.0 AND test_table.id < 1999.0' + ) + model_ids = [m.id for m in query.all()] + self.assertCountEqual(model_ids, [2]) + + def test_build_query_for_empty_exp(self): + exp = FilterExpression() + builder = FilterBuilder(TestModel, supported_fields={}) + with db.session_scope() as session: + query = session.query(TestModel) + query = builder.build_query(query, exp) + self.assertEqual( + self.generate_mysql_statement(query), + 'SELECT test_table.id, test_table.name, test_table.disabled, test_table.amount \n' + 'FROM test_table') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/flask_utils.py b/web_console_v2/api/fedlearner_webconsole/utils/flask_utils.py new file mode 100644 index 000000000..9349e8a2e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/flask_utils.py @@ -0,0 +1,105 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import io +import json +import typing +import urllib.parse +from http import HTTPStatus +from typing import Optional, Tuple, Union +from flask import send_file, g, has_request_context, request +from google.protobuf.message import Message +from marshmallow import ValidationError +from webargs import fields + +from envs import Envs +from fedlearner_webconsole.auth.models import User +from fedlearner_webconsole.proto.filtering_pb2 import FilterExpression +from fedlearner_webconsole.utils.const import SSO_HEADER +from fedlearner_webconsole.utils.filtering import parse_expression +from fedlearner_webconsole.utils.proto import to_dict + + +def download_json(content: dict, filename: str): + in_memory_file = io.BytesIO() + # `ensure_ascii=False` to make sure non-ascii show correctly + in_memory_file.write(json.dumps(content, ensure_ascii=False).encode('utf-8')) + in_memory_file.seek(0) + return send_file(in_memory_file, + as_attachment=True, + attachment_filename=f'{filename}.json', + mimetype='application/json; charset=UTF-8', + cache_timeout=0) + + +def get_current_sso() -> Optional[str]: + sso_headers = request.headers.get(SSO_HEADER, None) + if sso_headers: + return sso_headers.split()[0] + return None + + +def get_current_user() -> Optional[User]: + if has_request_context() and hasattr(g, 'current_user'): + return g.current_user + return None + + +def set_current_user(current_user: User): + g.current_user = current_user + + +def _normalize_data(data: Union[Message, dict, list]) -> Union[dict, list]: + if isinstance(data, Message): + return to_dict(data) + if isinstance(data, list): + return [_normalize_data(d) for d in data] + if isinstance(data, dict): + return {k: _normalize_data(v) for k, v in data.items()} + return data + + +def make_flask_response(data: Optional[Union[Message, dict, list]] = None, + page_meta: Optional[dict] = None, + status: int = HTTPStatus.OK) -> Tuple[dict, int]: + if data is None: + data = {} + data = _normalize_data(data) + + if page_meta is None: + page_meta = {} + return { + 'data': data, + 'page_meta': page_meta, + }, status + + +def get_link(path: str) -> str: + host_url = None + if has_request_context(): + host_url = request.host_url + if not host_url: + host_url = Envs.SERVER_HOST + return urllib.parse.urljoin(host_url, path) + + +class FilterExpField(fields.Field): + """A marshmallow field represents the filtering expression. See details in filtering.py.""" + + def _deserialize(self, value: str, attr: str, data: typing.Any, **kwargs) -> FilterExpression: + try: + return parse_expression(value) + except ValueError as e: + raise ValidationError(f'Failed to parse filter {value}') from e diff --git a/web_console_v2/api/fedlearner_webconsole/utils/flask_utils_test.py b/web_console_v2/api/fedlearner_webconsole/utils/flask_utils_test.py new file mode 100644 index 000000000..7c5793cec --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/flask_utils_test.py @@ -0,0 +1,157 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# coding: utf-8 +import json +import unittest + +from http import HTTPStatus +from unittest.mock import patch +from google.protobuf import struct_pb2 + +from fedlearner_webconsole.auth.models import User +from fedlearner_webconsole.proto.filtering_pb2 import FilterExpression +from fedlearner_webconsole.proto.testing.testing_pb2 import Tdata +from fedlearner_webconsole.utils.decorators.pp_flask import use_kwargs +from fedlearner_webconsole.utils.flask_utils import download_json, get_current_user, set_current_user, \ + make_flask_response, _normalize_data, get_link, FilterExpField, get_current_sso +from testing.common import BaseTestCase + + +class FlaskUtilsTest(BaseTestCase): + + def test_download_json(self): + test_content = {'haha': {'hh': [0]}, 'abc': 123, 'unicode': ['boss行味镪', '🤑']} + + @self.app.route('/test', methods=['POST']) + def test_route(): + return download_json(test_content, 'test_file') + + response = self.client.post('/test') + self.assertEqual( + response.data, b'{"haha": {"hh": [0]}, "abc": 123,' + b' "unicode": ["boss\xe8\xa1\x8c\xe5\x91\xb3\xe9\x95\xaa", "\xf0\x9f\xa4\x91"]}') + self.assertEqual(response.data.decode('utf-8'), + '{"haha": {"hh": [0]}, "abc": 123, "unicode": ["boss行味镪", "🤑"]}') + self.assertEqual(json.loads(response.data.decode('utf-8')), test_content) + self.assertEqual(response.headers['Content-Disposition'], 'attachment; filename=test_file.json') + self.assertEqual(response.headers['Content-Type'], 'application/json; charset=UTF-8') + + def test_get_current_user(self): + test_user = User(id=1, username='test') + + @self.app.route('/test', methods=['POST']) + def test_route(): + set_current_user(test_user) + return {}, HTTPStatus.OK + + self.client.post('/test') + self.assertEqual(test_user, get_current_user()) + + def test_normalize_data(self): + # Dict + d = {'a': 123} + self.assertEqual(_normalize_data(d), d) + # Proto + self.assertEqual(_normalize_data(Tdata(id=134)), { + 'id': 134, + 'mappers': {}, + 'projects': [], + 'tt': 'UNSPECIFIED', + }) + # Array of proto + self.assertEqual(_normalize_data([Tdata(id=1), Tdata(id=2)]), [{ + 'id': 1, + 'mappers': {}, + 'projects': [], + 'tt': 'UNSPECIFIED', + }, { + 'id': 2, + 'mappers': {}, + 'projects': [], + 'tt': 'UNSPECIFIED', + }]) + # Array + l = [{'a': 44}, {'b': '123'}] + self.assertEqual(_normalize_data(l), l) + # Dict with nested Protobuf Message and map structure. + self.assertEqual(_normalize_data({'a': Tdata(id=1, mappers={0: struct_pb2.Value(string_value='test')})}), + {'a': { + 'id': 1, + 'mappers': { + '0': 'test', + }, + 'projects': [], + 'tt': 'UNSPECIFIED', + }}) + + def test_make_flask_response(self): + resp, status = make_flask_response() + self.assertDictEqual(resp, {'data': {}, 'page_meta': {}}) + self.assertEqual(HTTPStatus.OK, status) + + data = [{'name': 'kiyoshi'} for _ in range(5)] + page_meta = {'page': 1, 'page_size': 0, 'total_items': 5, 'total_pages': 1} + resp, status = make_flask_response(data, page_meta) + self.assertDictEqual(data[0], resp.get('data')[0]) + self.assertDictEqual(page_meta, resp.get('page_meta')) + + def test_get_link_in_flask(self): + + @self.app.route('/test') + def test_route(): + return get_link('/v2/workflow-center/workflows/123') + + resp = self.get_helper('/test', use_auth=False) + self.assertEqual(resp.data.decode('utf-8'), 'http://localhost/v2/workflow-center/workflows/123') + + @patch('fedlearner_webconsole.utils.flask_utils.request.headers.get') + def test_get_current_sso(self, mock_headers): + mock_headers.return_value = 'test oauth access_token' + sso_name = get_current_sso() + self.assertEqual(sso_name, 'test') + + +class NonFlaskTest(unittest.TestCase): + + def test_get_link_not_in_flask(self): + self.assertEqual(get_link('/v2/test'), 'http://localhost:666/v2/test') + + +class FilterExpFieldTest(BaseTestCase): + + def test_custom_field(self): + + @self.app.route('/test') + @use_kwargs({ + 'filter_exp': FilterExpField(required=False, load_default=None), + }, location='query') + def test_route(filter_exp: FilterExpression): + return make_flask_response(data=filter_exp) + + resp = self.get_helper('/test', use_auth=False) + self.assertEqual(resp.status_code, HTTPStatus.OK) + + resp = self.get_helper('/test?filter_exp=invalid', use_auth=False) + self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) + + resp = self.get_helper('/test?filter_exp=(x%3D123)', use_auth=False) + self.assertEqual(resp.status_code, HTTPStatus.OK) + data = self.get_response_data(resp) + self.assertEqual(data['simple_exp']['field'], 'x') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/hooks.py b/web_console_v2/api/fedlearner_webconsole/utils/hooks.py index 25d0b9a6b..b9327d3a7 100644 --- a/web_console_v2/api/fedlearner_webconsole/utils/hooks.py +++ b/web_console_v2/api/fedlearner_webconsole/utils/hooks.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,18 +14,37 @@ # coding: utf-8 import importlib +from typing import Any from envs import Envs -from fedlearner_webconsole.db import db_handler as db, get_database_uri +from fedlearner_webconsole.db import db, get_database_uri +from fedlearner_webconsole.middleware.middlewares import flask_middlewares +from fedlearner_webconsole.middleware.request_id import FlaskRequestId +from fedlearner_webconsole.middleware.api_latency import api_latency_middleware + + +def parse_and_get_fn(module_fn_path: str) -> Any: + if module_fn_path.find(':') == -1: + raise RuntimeError(f'Invalid module_fn_path: {module_fn_path}') + + module_path, func_name = module_fn_path.split(':') + try: + module = importlib.import_module(module_path) + fn = getattr(module, func_name) + except (ModuleNotFoundError, AttributeError) as e: + raise RuntimeError(f'Skipping run {module_fn_path} for no fn found') from e + # Dynamically run the function + return fn def pre_start_hook(): before_hook_path = Envs.PRE_START_HOOK if before_hook_path: - module_path, func_name = before_hook_path.split(':') - module = importlib.import_module(module_path) - # Dynamically run the function - getattr(module, func_name)() + parse_and_get_fn(before_hook_path)() # explicit rebind db engine to make hook work db.rebind(get_database_uri()) + + # Applies middlewares + flask_middlewares.register(FlaskRequestId()) + flask_middlewares.register(api_latency_middleware) diff --git a/web_console_v2/api/fedlearner_webconsole/utils/hooks_test.py b/web_console_v2/api/fedlearner_webconsole/utils/hooks_test.py new file mode 100644 index 000000000..84f5e3743 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/hooks_test.py @@ -0,0 +1,36 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +import unittest + +from fedlearner_webconsole.utils.hooks import parse_and_get_fn + + +class HookTest(unittest.TestCase): + + def test_parse_and_get_fn(self): + # right one + right_hook = 'testing.test_data.hello:hello' + self.assertEqual(parse_and_get_fn(right_hook)(), 1) + + # unexisted one + unexisted_hook = 'hello:hello' + with self.assertRaises(RuntimeError): + parse_and_get_fn(unexisted_hook) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/images.py b/web_console_v2/api/fedlearner_webconsole/utils/images.py new file mode 100644 index 000000000..0464ca4ac --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/images.py @@ -0,0 +1,23 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from fedlearner_webconsole.setting.service import SettingService + + +def generate_unified_version_image(image_prefix: str) -> str: + # remove tag if input image_prefix has tag + if image_prefix.find(':') != -1: + image_prefix = image_prefix.rsplit(':', 1)[0] + return f'{image_prefix}:{SettingService.get_application_version().version.version}' diff --git a/web_console_v2/api/fedlearner_webconsole/utils/images_test.py b/web_console_v2/api/fedlearner_webconsole/utils/images_test.py new file mode 100644 index 000000000..e8ebcc5fb --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/images_test.py @@ -0,0 +1,36 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import MagicMock, patch + +from fedlearner_webconsole.utils.images import generate_unified_version_image + + +class ImageUtilsTest(unittest.TestCase): + + @patch('fedlearner_webconsole.utils.images.SettingService.get_application_version') + def test_generate_unified_version_image(self, mock_get_application_version: MagicMock): + mock_version = MagicMock() + mock_version.version.version = '2.2.2.2' + mock_get_application_version.return_value = mock_version + self.assertEqual(generate_unified_version_image('artifact.bytedance.com/fedlearner/pp_data_inspection'), + 'artifact.bytedance.com/fedlearner/pp_data_inspection:2.2.2.2') + self.assertEqual(generate_unified_version_image('artifact.bytedance.com/fedlearner/pp_data_inspection:test'), + 'artifact.bytedance.com/fedlearner/pp_data_inspection:2.2.2.2') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/job_metrics.py b/web_console_v2/api/fedlearner_webconsole/utils/job_metrics.py new file mode 100644 index 000000000..7ad8f9976 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/job_metrics.py @@ -0,0 +1,52 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import os +import logging +import tensorflow.compat.v1 as tf +from typing import Dict +from google.protobuf import text_format +from fedlearner_webconsole.job.models import Job +from fedlearner_webconsole.proto.tree_model_pb2 import BoostingTreeEnsambleProto +from fedlearner_webconsole.utils.file_manager import file_manager + + +def get_feature_importance(job: Job) -> Dict[str, float]: + storage_root_dir = job.project.get_storage_root_path(None) + if storage_root_dir is None: + return {} + job_name = job.name + path = os.path.join(storage_root_dir, 'job_output', job_name, 'exported_models') + if not file_manager.exists(path): + return {} + fin = tf.io.gfile.GFile(path, 'r') + model = BoostingTreeEnsambleProto() + try: + text_format.Parse(fin.read(), model, allow_unknown_field=True) + except Exception as e: # pylint: disable=broad-except + logging.warning('parsing tree proto with error %s', str(e)) + return {} + fscore = model.feature_importance + feature_names = list(model.feature_names) + cat_feature_names = list(model.cat_feature_names) + feature_names.extend(cat_feature_names) + if len(feature_names) == 0: + feature_names = [f'f{i}' for i in range(len(fscore))] + feature_importance = {} + for i, name in enumerate(feature_names): + feature_importance[name] = fscore[i] + for j in range(len(feature_names), len(fscore)): + feature_importance[f'peer_f{j}'] = fscore[j] + return feature_importance diff --git a/web_console_v2/api/fedlearner_webconsole/utils/k8s_cache.py b/web_console_v2/api/fedlearner_webconsole/utils/k8s_cache.py deleted file mode 100644 index 44783205b..000000000 --- a/web_console_v2/api/fedlearner_webconsole/utils/k8s_cache.py +++ /dev/null @@ -1,111 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import threading -from enum import Enum - - -class EventType(Enum): - ADDED = 'ADDED' - MODIFIED = 'MODIFIED' - DELETED = 'DELETED' - - -class ObjectType(Enum): - POD = 'POD' - FLAPP = 'FLAPP' - - -class Event(object): - def __init__(self, flapp_name, event_type, obj_type, obj_dict): - self.flapp_name = flapp_name - self.event_type = event_type - self.obj_type = obj_type - # {'status': {}, 'metadata': {}} - self.obj_dict = obj_dict - - @staticmethod - def from_json(event, obj_type): - # TODO(xiangyuxuan): move this to k8s/models.py - event_type = event['type'] - obj = event['object'] - if obj_type == ObjectType.POD: - obj = obj.to_dict() - metadata = obj.get('metadata') - status = obj.get('status') - flapp_name = metadata['labels']['app-name'] - return Event(flapp_name, - EventType(event_type), - obj_type, - obj_dict={'status': status, - 'metadata': metadata}) - metadata = obj.get('metadata') - status = obj.get('status') - # put event to queue - return Event(metadata['name'], - EventType(event_type), - obj_type, - obj_dict={'status': status}) - - -class K8sCache(object): - - def __init__(self): - self._lock = threading.Lock() - # key: flapp_name, value: a dict - # {'flapp': flapp cache, 'pods': pods cache, - # 'deleted': is flapp deleted} - self._cache = {} - - # TODO(xiangyuxuan): use class instead of json to manage cache and queue - def update_cache(self, event: Event): - with self._lock: - flapp_name = event.flapp_name - if flapp_name not in self._cache: - self._cache[flapp_name] = {'pods': {'items': []}, - 'deleted': False} - # if not flapp's then pod's event - if event.obj_type == ObjectType.FLAPP: - if event.event_type == EventType.DELETED: - self._cache[flapp_name] = {'pods': {'items': []}, - 'deleted': True} - else: - self._cache[flapp_name]['deleted'] = False - self._cache[flapp_name]['flapp'] = event.obj_dict - else: - if self._cache[flapp_name]['deleted']: - return - existed = False - for index, pod in enumerate( - self._cache[flapp_name]['pods']['items']): - if pod['metadata']['name'] == \ - event.obj_dict['metadata']['name']: - existed = True - self._cache[flapp_name]['pods']['items'][index] \ - = event.obj_dict - break - if not existed: - self._cache[flapp_name]['pods'][ - 'items'].append(event.obj_dict) - - def get_cache(self, flapp_name): - # use read-write lock to fast - with self._lock: - if flapp_name in self._cache: - return self._cache[flapp_name] - return {'flapp': None, 'pods': {'items': []}} - - -k8s_cache = K8sCache() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/k8s_client.py b/web_console_v2/api/fedlearner_webconsole/utils/k8s_client.py deleted file mode 100644 index 3106bdecd..000000000 --- a/web_console_v2/api/fedlearner_webconsole/utils/k8s_client.py +++ /dev/null @@ -1,389 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import enum -import logging -import os -from http import HTTPStatus -from typing import Optional - -import kubernetes -import requests -from kubernetes import client -from kubernetes.client.exceptions import ApiException - -from envs import Envs -from fedlearner_webconsole.exceptions import (InvalidArgumentException, - NotFoundException, - ResourceConflictException, - InternalException) -from fedlearner_webconsole.utils.decorators import retry_fn -from fedlearner_webconsole.utils.fake_k8s_client import FakeK8sClient -from fedlearner_webconsole.utils.k8s_cache import k8s_cache - - -class CrdKind(enum.Enum): - FLAPP = 'flapps' - SPARK_APPLICATION = 'sparkapplications' - - -FEDLEARNER_CUSTOM_GROUP = 'fedlearner.k8s.io' -FEDLEARNER_CUSTOM_VERSION = 'v1alpha1' - -SPARKOPERATOR_CUSTOM_GROUP = 'sparkoperator.k8s.io' -SPARKOPERATOR_CUSTOM_VERSION = 'v1beta2' -SPARKOPERATOR_NAMESPACE = Envs.K8S_NAMESPACE - - -class K8sClient(object): - def __init__(self): - self.core = None - self.crds = None - self._networking = None - self._app = None - self._api_server_url = 'http://{}:{}'.format( - os.environ.get('FL_API_SERVER_HOST', 'fedlearner-apiserver'), - os.environ.get('FL_API_SERVER_PORT', 8101)) - - def init(self, config_path: Optional[str] = None): - # Sets config - if config_path is None: - kubernetes.config.load_incluster_config() - else: - kubernetes.config.load_kube_config(config_path) - # Inits API clients - self.core = client.CoreV1Api() - self.crds = client.CustomObjectsApi() - self._networking = client.NetworkingV1beta1Api() - self._app = client.AppsV1Api() - - def close(self): - self.core.api_client.close() - self._networking.api_client.close() - - def _raise_runtime_error(self, exception: ApiException): - raise RuntimeError('[{}] {}'.format(exception.status, - exception.reason)) - - def create_or_update_secret(self, - data, - metadata, - secret_type, - name, - namespace='default'): - """Create secret. If existed, then replace""" - request = client.V1Secret(api_version='v1', - data=data, - kind='Secret', - metadata=metadata, - type=secret_type) - try: - self.core.read_namespaced_secret(name, namespace) - # If the secret already exists, then we use patch to replace it. - # We don't use replace method because it requires `resourceVersion`. - self.core.patch_namespaced_secret(name, namespace, request) - return - except ApiException as e: - # 404 is expected if the secret does not exist - if e.status != HTTPStatus.NOT_FOUND: - self._raise_runtime_error(e) - try: - self.core.create_namespaced_secret(namespace, request) - except ApiException as e: - self._raise_runtime_error(e) - - def delete_secret(self, name, namespace='default'): - try: - self.core.delete_namespaced_secret(name, namespace) - except ApiException as e: - if e.status != HTTPStatus.NOT_FOUND: - self._raise_runtime_error(e) - - def get_secret(self, name, namespace='default'): - try: - return self.core.read_namespaced_secret(name, namespace) - except ApiException as e: - self._raise_runtime_error(e) - - def create_or_update_service(self, - metadata, - spec, - name, - namespace='default'): - """Create secret. If existed, then replace""" - request = client.V1Service(api_version='v1', - kind='Service', - metadata=metadata, - spec=spec) - try: - self.core.read_namespaced_service(name, namespace) - # If the service already exists, then we use patch to replace it. - # We don't use replace method because it requires `resourceVersion`. - self.core.patch_namespaced_service(name, namespace, request) - return - except ApiException as e: - # 404 is expected if the service does not exist - if e.status != HTTPStatus.NOT_FOUND: - self._raise_runtime_error(e) - try: - self.core.create_namespaced_service(namespace, request) - except ApiException as e: - self._raise_runtime_error(e) - - def delete_service(self, name, namespace='default'): - try: - self.core.delete_namespaced_service(name, namespace) - except ApiException as e: - if e.status != HTTPStatus.NOT_FOUND: - self._raise_runtime_error(e) - - def get_service(self, name, namespace='default'): - try: - return self.core.read_namespaced_service(name, namespace) - except ApiException as e: - self._raise_runtime_error(e) - - def create_or_update_ingress(self, - metadata, - spec, - name, - namespace='default'): - request = client.NetworkingV1beta1Ingress( - api_version='networking.k8s.io/v1beta1', - kind='Ingress', - metadata=metadata, - spec=spec) - try: - self._networking.read_namespaced_ingress(name, namespace) - # If the ingress already exists, then we use patch to replace it. - # We don't use replace method because it requires `resourceVersion`. - self._networking.patch_namespaced_ingress(name, namespace, request) - return - except ApiException as e: - # 404 is expected if the ingress does not exist - if e.status != HTTPStatus.NOT_FOUND: - self._raise_runtime_error(e) - try: - self._networking.create_namespaced_ingress(namespace, request) - except ApiException as e: - self._raise_runtime_error(e) - - def delete_ingress(self, name, namespace='default'): - try: - self._networking.delete_namespaced_ingress(name, namespace) - except ApiException as e: - self._raise_runtime_error(e) - - def get_ingress(self, name, namespace='default'): - try: - return self._networking.read_namespaced_ingress(name, namespace) - except ApiException as e: - if e.status != HTTPStatus.NOT_FOUND: - self._raise_runtime_error(e) - - def create_or_update_deployment(self, - metadata, - spec, - name, - namespace='default'): - request = client.V1Deployment(api_version='apps/v1', - kind='Deployment', - metadata=metadata, - spec=spec) - try: - self._app.read_namespaced_deployment(name, namespace) - # If the deployment already exists, then we use patch to replace it. - # We don't use replace method because it requires `resourceVersion`. - self._app.patch_namespaced_deployment(name, namespace, request) - return - except ApiException as e: - # 404 is expected if the deployment does not exist - if e.status != HTTPStatus.NOT_FOUND: - self._raise_runtime_error(e) - try: - self._app.create_namespaced_deployment(namespace, request) - except ApiException as e: - self._raise_runtime_error(e) - - def delete_deployment(self, name, namespace='default'): - try: - self._app.delete_namespaced_deployment(name, namespace) - except ApiException as e: - if e.status != HTTPStatus.NOT_FOUND: - self._raise_runtime_error(e) - - def get_deployment(self, name, namespace='default'): - try: - return self._app.read_namespaced_deployment(name, namespace) - except ApiException as e: - self._raise_runtime_error(e) - - @retry_fn(retry_times=3) - def delete_flapp(self, flapp_name): - try: - self.crds.delete_namespaced_custom_object( - group=FEDLEARNER_CUSTOM_GROUP, - version=FEDLEARNER_CUSTOM_VERSION, - namespace=Envs.K8S_NAMESPACE, - plural=CrdKind.FLAPP.value, - name=flapp_name) - except ApiException as e: - # If the flapp has been deleted then the exception gets ignored - if e.status != HTTPStatus.NOT_FOUND: - self._raise_runtime_error(e) - - @retry_fn(retry_times=3) - def create_flapp(self, flapp_yaml): - try: - self.crds.create_namespaced_custom_object( - group=FEDLEARNER_CUSTOM_GROUP, - version=FEDLEARNER_CUSTOM_VERSION, - namespace=Envs.K8S_NAMESPACE, - plural=CrdKind.FLAPP.value, - body=flapp_yaml) - except ApiException as e: - # If the flapp exists then we delete it - if e.status == HTTPStatus.CONFLICT: - self.delete_flapp(flapp_yaml['metadata']['name']) - # Raise to make it retry - raise - - def get_flapp(self, flapp_name): - return k8s_cache.get_cache(flapp_name) - - def get_webshell_session(self, - flapp_name: str, - container_name: str, - namespace='default'): - response = requests.get( - '{api_server_url}/namespaces/{namespace}/pods/{custom_object_name}/' - 'shell/${container_name}'.format( - api_server_url=self._api_server_url, - namespace=namespace, - custom_object_name=flapp_name, - container_name=container_name)) - if response.status_code != HTTPStatus.OK: - raise RuntimeError('{}:{}'.format(response.status_code, - response.content)) - return response.json() - - def get_sparkapplication(self, - name: str, - namespace: str = SPARKOPERATOR_NAMESPACE) -> dict: - """get sparkapp - - Args: - name (str): sparkapp name - namespace (str, optional): namespace to submit. - - Raises: - ApiException - - Returns: - dict: resp of k8s - """ - try: - return self.crds.get_namespaced_custom_object( - group=SPARKOPERATOR_CUSTOM_GROUP, - version=SPARKOPERATOR_CUSTOM_VERSION, - namespace=namespace, - plural=CrdKind.SPARK_APPLICATION.value, - name=name) - except ApiException as err: - if err.status == 404: - raise NotFoundException() - raise InternalException(details=err.body) - - def create_sparkapplication( - self, - json_object: dict, - namespace: str = SPARKOPERATOR_NAMESPACE) -> dict: - """ create sparkapp - - Args: - json_object (dict): json object of config - namespace (str, optional): namespace to submit. - - Raises: - ApiException - - Returns: - dict: resp of k8s - """ - try: - logging.debug('create sparkapp json is %s', json_object) - return self.crds.create_namespaced_custom_object( - group=SPARKOPERATOR_CUSTOM_GROUP, - version=SPARKOPERATOR_CUSTOM_VERSION, - namespace=namespace, - plural=CrdKind.SPARK_APPLICATION.value, - body=json_object) - except ApiException as err: - if err.status == 409: - raise ResourceConflictException(message=err.reason) - if err.status == 400: - raise InvalidArgumentException(details=err.reason) - raise InternalException(details=err.body) - - def delete_sparkapplication(self, - name: str, - namespace: str = SPARKOPERATOR_NAMESPACE - ) -> dict: - """ delete sparkapp - - Args: - name (str): sparkapp name - namespace (str, optional): namespace to delete. - - Raises: - ApiException - - Returns: - dict: resp of k8s - """ - try: - return self.crds.delete_namespaced_custom_object( - group=SPARKOPERATOR_CUSTOM_GROUP, - version=SPARKOPERATOR_CUSTOM_VERSION, - namespace=namespace, - plural=CrdKind.SPARK_APPLICATION.value, - name=name, - body=client.V1DeleteOptions()) - except ApiException as err: - if err.status == 404: - raise NotFoundException() - raise InternalException(details=err.body) - - def get_pod_log(self, name: str, namespace: str, tail_lines: int): - try: - return self.core.read_namespaced_pod_log(name=name, - namespace=namespace, - tail_lines=tail_lines) - except ApiException as e: - self._raise_runtime_error(e) - - def get_pods(self, namespace, label_selector): - try: - return self.core.list_namespaced_pod(namespace=namespace, - label_selector=label_selector) - except ApiException as e: - self._raise_runtime_error(e) - - -k8s_client = FakeK8sClient() -if Envs.FLASK_ENV == 'production' or \ - Envs.K8S_CONFIG_PATH is not None: - k8s_client = K8sClient() - k8s_client.init(Envs.K8S_CONFIG_PATH) diff --git a/web_console_v2/api/fedlearner_webconsole/utils/k8s_watcher.py b/web_console_v2/api/fedlearner_webconsole/utils/k8s_watcher.py deleted file mode 100644 index 22372a1ab..000000000 --- a/web_console_v2/api/fedlearner_webconsole/utils/k8s_watcher.py +++ /dev/null @@ -1,182 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import logging -import threading -import queue -import traceback -from http import HTTPStatus -from kubernetes import client, watch -from envs import Envs, Features -from fedlearner_webconsole.utils.k8s_cache import k8s_cache, \ - Event, ObjectType -from fedlearner_webconsole.utils.k8s_client import ( - k8s_client, FEDLEARNER_CUSTOM_GROUP, - FEDLEARNER_CUSTOM_VERSION) -from fedlearner_webconsole.mmgr.service import ModelService -from fedlearner_webconsole.db import make_session_context -from fedlearner_webconsole.job.service import JobService - - -session_context = make_session_context() - -class K8sWatcher(object): - def __init__(self): - self._lock = threading.Lock() - self._running = False - self._flapp_watch_thread = None - self._pods_watch_thread = None - self._event_consumer_thread = None - - # https://stackoverflow.com/questions/62223424/ - # simplequeue-vs-queue-in-python-what-is-the- - # advantage-of-using-simplequeue - # if use simplequeue, put opt never block. - # TODO(xiangyuxuan): change to simplequeue - self._queue = queue.Queue() - self._cache = {} - self._cache_lock = threading.Lock() - - def start(self): - with self._lock: - if self._running: - logging.warning('K8s watcher has already started') - return - self._running = True - self._flapp_watch_thread = threading.Thread( - target=self._k8s_flapp_watcher, - name='flapp_watcher', - daemon=True) - self._pods_watch_thread = threading.Thread( - target=self._k8s_pods_watch, - name='pods_watcher', - daemon=True) - self._event_consumer_thread = threading.Thread( - target=self._event_consumer, - name='cache_consumer', - daemon=True) - self._pods_watch_thread.start() - self._flapp_watch_thread.start() - self._event_consumer_thread.start() - logging.info('K8s watcher started') - - def _event_consumer(self): - # TODO(xiangyuxuan): do more business level operations - while True: - try: - event = self._queue.get() - k8s_cache.update_cache(event) - # job state must be updated before model service - self._update_hook(event) - if Features.FEATURE_MODEL_K8S_HOOK: - with session_context() as session: - ModelService(session).k8s_watcher_hook(event) - session.commit() - except Exception as e: # pylint: disable=broad-except - logging.error(f'K8s event_consumer : {str(e)}. ' - f'traceback:{traceback.format_exc()}') - - def _update_hook(self, event: Event): - if event.obj_type == ObjectType.FLAPP: - logging.debug('[k8s_watcher][_update_hook]receive event %s', - event.flapp_name) - with session_context() as session: - JobService(session).update_running_state(event.flapp_name) - session.commit() - - def _k8s_flapp_watcher(self): - resource_version = '0' - watcher = watch.Watch() - while True: - logging.info(f'new stream of flapps watch rv:{resource_version}') - if not self._running: - watcher.stop() - break - # resource_version '0' means getting a recent resource without - # consistency guarantee, this is to reduce the load of etcd. - # Ref: https://kubernetes.io/docs/reference/using-api - # /api-concepts/ #the-resourceversion-parameter - stream = watcher.stream( - k8s_client.crds.list_namespaced_custom_object, - group=FEDLEARNER_CUSTOM_GROUP, - version=FEDLEARNER_CUSTOM_VERSION, - namespace=Envs.K8S_NAMESPACE, - plural='flapps', - resource_version=resource_version, - _request_timeout=900, # Sometimes watch gets stuck - ) - try: - for event in stream: - - self._produce_event(event, ObjectType.FLAPP) - - metadata = event['object'].get('metadata') - if metadata['resourceVersion'] is not None: - resource_version = max(metadata['resourceVersion'], - resource_version) - logging.debug( - f'resource_version now: {resource_version}') - except client.exceptions.ApiException as e: - logging.error(f'watcher:{str(e)}') - if e.status == HTTPStatus.GONE: - # It has been too old, resources should be relisted - resource_version = '0' - except Exception as e: # pylint: disable=broad-except - logging.error(f'K8s watcher gets event error: {str(e)}', - exc_info=True) - - def _produce_event(self, event, obj_type): - self._queue.put(Event.from_json(event, obj_type)) - - def _k8s_pods_watch(self): - resource_version = '0' - watcher = watch.Watch() - while True: - logging.info(f'new stream of pods watch rv: {resource_version}') - if not self._running: - watcher.stop() - break - # resource_version '0' means getting a recent resource without - # consistency guarantee, this is to reduce the load of etcd. - # Ref: https://kubernetes.io/docs/reference/using-api - # /api-concepts/ #the-resourceversion-parameter - stream = watcher.stream( - k8s_client.core.list_namespaced_pod, - namespace=Envs.K8S_NAMESPACE, - label_selector='app-name', - resource_version=resource_version, - _request_timeout=900, # Sometimes watch gets stuck - ) - - try: - for event in stream: - self._produce_event(event, ObjectType.POD) - metadata = event['object'].metadata - if metadata.resource_version is not None: - resource_version = max(metadata.resource_version, - resource_version) - logging.debug( - f'resource_version now: {resource_version}') - except client.exceptions.ApiException as e: - logging.error(f'watcher:{str(e)}') - if e.status == HTTPStatus.GONE: - # It has been too old, resources should be relisted - resource_version = '0' - except Exception as e: # pylint: disable=broad-except - logging.error(f'K8s watcher gets event error: {str(e)}', - exc_info=True) - - -k8s_watcher = K8sWatcher() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/kibana.py b/web_console_v2/api/fedlearner_webconsole/utils/kibana.py index d9271adcf..1a7824cd3 100644 --- a/web_console_v2/api/fedlearner_webconsole/utils/kibana.py +++ b/web_console_v2/api/fedlearner_webconsole/utils/kibana.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. @@ -13,7 +13,7 @@ # limitations under the License. # coding: utf-8 -# pylint: disable=invalid-string-quote +# pylint: disable=invalid-string-quote,missing-type-doc,missing-return-type-doc,consider-using-f-string import hashlib import os import re @@ -37,34 +37,22 @@ class Kibana(object): """ TSVB = ('Rate', 'Ratio', 'Numeric') TIMELION = ('Time', 'Timer') - RISON_REPLACEMENT = {' ': '%20', - '"': '%22', - '#': '%23', - '%': '%25', - '&': '%26', - '+': '%2B', - '/': '%2F', - '=': '%3D'} - TIMELION_QUERY_REPLACEMENT = {' and ': ' AND ', - ' or ': ' OR '} + RISON_REPLACEMENT = {' ': '%20', '"': '%22', '#': '%23', '%': '%25', '&': '%26', '+': '%2B', '/': '%2F', '=': '%3D'} + TIMELION_QUERY_REPLACEMENT = {' and ': ' AND ', ' or ': ' OR '} LOGICAL_PATTERN = re.compile(' and | or ', re.IGNORECASE) - TSVB_AGG_TYPE = {'Average': 'avg', - 'Sum': 'sum', - 'Max': 'max', - 'Min': 'min', - 'Variance': 'variance', - 'Std. Deviation': 'std_deviation', - 'Sum of Squares': 'sum_of_squares'} - TIMELION_AGG_TYPE = {'Average': 'avg', - 'Sum': 'sum', - 'Max': 'max', - 'Min': 'min'} - COLORS = ['#DA6E6E', '#FA8080', '#789DFF', - '#66D4FF', '#6EB518', '#9AF02E'] + TSVB_AGG_TYPE = { + 'Average': 'avg', + 'Sum': 'sum', + 'Max': 'max', + 'Min': 'min', + 'Variance': 'variance', + 'Std. Deviation': 'std_deviation', + 'Sum of Squares': 'sum_of_squares' + } + TIMELION_AGG_TYPE = {'Average': 'avg', 'Sum': 'sum', 'Max': 'max', 'Min': 'min'} + COLORS = ['#DA6E6E', '#FA8080', '#789DFF', '#66D4FF', '#6EB518', '#9AF02E'] # metrics* for all other job types - JOB_INDEX = {JobType.RAW_DATA: 'raw_data', - JobType.DATA_JOIN: 'data_join', - JobType.PSI_DATA_JOIN: 'data_join'} + JOB_INDEX = {JobType.RAW_DATA: 'raw_data', JobType.DATA_JOIN: 'data_join', JobType.PSI_DATA_JOIN: 'data_join'} BASIC_QUERY = "app/kibana#/visualize/create" \ "?type={type}&_g=(refreshInterval:(pause:!t,value:0)," \ "time:(from:'{start_time}',to:'{end_time}'))&" \ @@ -83,17 +71,10 @@ def remote_query(job, args): if 'query' in args and args['query']: panel['filter']['query'] += ' and ({})'.format(args['query']) st, et = Kibana._parse_start_end_time(args, use_now=False) - req = { - 'timerange': { - 'timezone': Envs.TZ.zone, - 'min': st, - 'max': et - }, - 'panels': [panel] - } - res = requests.post( - os.path.join(Envs.KIBANA_SERVICE_ADDRESS, 'api/metrics/vis/data'), - json=req, headers={'kbn-xsrf': 'true'}) + req = {'timerange': {'timezone': Envs.TZ.zone, 'min': st, 'max': et}, 'panels': [panel]} + res = requests.post(os.path.join(Envs.KIBANA_SERVICE_ADDRESS, 'api/metrics/vis/data'), + json=req, + headers={'kbn-xsrf': 'true'}) try: res.raise_for_status() @@ -102,18 +83,15 @@ def remote_query(job, args): data = list(map(lambda x: [x[0], x[1] or 0], data)) return data except Exception as e: # pylint: disable=broad-except - raise InternalException(repr(e)) + raise InternalException(repr(e)) from e @staticmethod def _check_remote_args(args): - for arg in ('type', 'interval', 'x_axis_field', - 'start_time', 'end_time'): + for arg in ('type', 'interval', 'x_axis_field', 'start_time', 'end_time'): Kibana._check_present(args, arg) Kibana._check_authorization(args.get('query')) - Kibana._check_authorization(args['x_axis_field'], - extra_allowed={'tags.event_time', - 'tags.process_time'}) + Kibana._check_authorization(args['x_axis_field'], extra_allowed={'tags.event_time', 'tags.process_time'}) if args['type'] == 'Ratio': for arg in ('numerator', 'denominator'): @@ -128,8 +106,7 @@ def _check_remote_args(args): @staticmethod def _check_present(args, arg_name): if arg_name not in args or args[arg_name] is None: - raise InvalidArgumentException( - 'Missing required argument [{}].'.format(arg_name)) + raise InvalidArgumentException('Missing required argument [{}].'.format(arg_name)) @staticmethod def _check_authorization(arg, extra_allowed: set = None): @@ -140,8 +117,7 @@ def _check_authorization(arg, extra_allowed: set = None): if not query: continue if query.split(':')[0] not in allowed_fields: - raise UnauthorizedException( - 'Query [{}] is not authorized.'.format(query)) + raise UnauthorizedException('Query [{}] is not authorized.'.format(query)) @staticmethod def create_tsvb(job, args): @@ -163,17 +139,14 @@ def create_tsvb(job, args): vis_state['params']['filter']['query'] += \ ' and ({})'.format(args['query']) # rison-ify and replace - vis_state = Kibana._regex_process( - prison.dumps(vis_state), Kibana.RISON_REPLACEMENT - ) + vis_state = Kibana._regex_process(prison.dumps(vis_state), Kibana.RISON_REPLACEMENT) start_time, end_time = Kibana._parse_start_end_time(args) # a single-item list return [ - os.path.join(Envs.KIBANA_ADDRESS, - Kibana.BASIC_QUERY.format(type='metrics', - start_time=start_time, - end_time=end_time, - vis_state=vis_state)) + os.path.join( + Envs.KIBANA_ADDRESS, + Kibana.BASIC_QUERY.format(type='metrics', start_time=start_time, end_time=end_time, + vis_state=vis_state)) ] @staticmethod @@ -190,16 +163,11 @@ def create_timelion(job, args): vis_states, times = Kibana._create_timer_visualization(job, args) # a generator, rison-ify and replace - vis_states = ( - Kibana._regex_process(vs, Kibana.RISON_REPLACEMENT) - for vs in map(prison.dumps, vis_states) - ) + vis_states = (Kibana._regex_process(vs, Kibana.RISON_REPLACEMENT) for vs in map(prison.dumps, vis_states)) return [ - os.path.join(Envs.KIBANA_ADDRESS, - Kibana.BASIC_QUERY.format(type='timelion', - start_time=start, - end_time=end, - vis_state=vis_state)) + os.path.join( + Envs.KIBANA_ADDRESS, + Kibana.BASIC_QUERY.format(type='timelion', start_time=start, end_time=end, vis_state=vis_state)) for (start, end), vis_state in zip(times, vis_states) ] @@ -210,15 +178,13 @@ def _parse_start_end_time(args, use_now=True): else Kibana._normalize_datetime( datetime.now(tz=pytz.utc) - timedelta(days=365 * 5)) else: - st = Kibana._normalize_datetime( - datetime.fromtimestamp(args['start_time'], tz=pytz.utc)) + st = Kibana._normalize_datetime(datetime.fromtimestamp(args['start_time'], tz=pytz.utc)) if args['end_time'] < 0: et = 'now' if use_now \ else Kibana._normalize_datetime(datetime.now(tz=pytz.utc)) else: - et = Kibana._normalize_datetime( - datetime.fromtimestamp(args['end_time'], tz=pytz.utc)) + et = Kibana._normalize_datetime(datetime.fromtimestamp(args['end_time'], tz=pytz.utc)) return st, et @staticmethod @@ -238,10 +204,7 @@ def _regex_process(string, replacement): re_mode = re.IGNORECASE escaped_keys = map(re.escape, replacement) pattern = re.compile("|".join(escaped_keys), re_mode) - return pattern.sub( - lambda match: replacement[match.group(0).lower()], - string - ) + return pattern.sub(lambda match: replacement[match.group(0).lower()], string) @staticmethod def _create_rate_visualization(job, args): @@ -259,51 +222,46 @@ def _create_rate_visualization(job, args): params = vis_state['params'] # `w/`, `w/o` = `with`, `without` # Total w/ Fake series - twf = Kibana._tsvb_series( - label='Total w/ Fake', - metrics={'type': 'count'} - ) + twf = Kibana._tsvb_series(label='Total w/ Fake', metrics={'type': 'count'}) # Total w/o Fake series twof = Kibana._tsvb_series( labele='Total w/o Fake', metrics={'type': 'count'}, # unjoined and normal joined - series_filter={'query': 'tags.joined: "-1" or tags.joined: 1'} - ) + series_filter={'query': 'tags.joined: "-1" or tags.joined: 1'}) # Joined w/ Fake series jwf = Kibana._tsvb_series( label='Joined w/ Fake', metrics={'type': 'count'}, # faked joined and normal joined - series_filter={'query': 'tags.joined: 0 or tags.joined: 1'} - ) + series_filter={'query': 'tags.joined: 0 or tags.joined: 1'}) # Joined w/o Fake series jwof = Kibana._tsvb_series( label='Joined w/o Fake', metrics={'type': 'count'}, # normal joined - series_filter={'query': 'tags.joined: 1'} - ) + series_filter={'query': 'tags.joined: 1'}) # Join Rate w/ Fake series jrwf = Kibana._tsvb_series( series_type='ratio', label='Join Rate w/ Fake', - metrics={'numerator': 'tags.joined: 1 or tags.joined: 0', - 'denominator': '*', # joined == -1 or 0 or 1 - 'type': 'filter_ratio'}, + metrics={ + 'numerator': 'tags.joined: 1 or tags.joined: 0', + 'denominator': '*', # joined == -1 or 0 or 1 + 'type': 'filter_ratio' + }, line_width='2', - fill='0' - ) + fill='0') # Join Rate w/o Fake series - jrwof = Kibana._tsvb_series( - series_type='ratio', - label='Join Rate w/o Fake', - metrics={'numerator': 'tags.joined: 1', - 'denominator': 'tags.joined: 1 or tags.joined: "-1"', - 'type': 'filter_ratio'}, - line_width='2', - fill='0' - ) + jrwof = Kibana._tsvb_series(series_type='ratio', + label='Join Rate w/o Fake', + metrics={ + 'numerator': 'tags.joined: 1', + 'denominator': 'tags.joined: 1 or tags.joined: "-1"', + 'type': 'filter_ratio' + }, + line_width='2', + fill='0') series = [twf, twof, jwf, jwof, jrwf, jrwof] for series_, color in zip(series, Kibana.COLORS): series_['color'] = color @@ -323,37 +281,34 @@ def _create_ratio_visualization(job, args): Returns: dict. A Kibana vis state dict + Raises: + ValueError: if some args not exist + This method will create 3 time series and stack them in vis state. """ for k in ('numerator', 'denominator'): if k not in args or args[k] is None: - raise ValueError( - '[{}] should be provided in Ratio visualization'.format(k) - ) + raise ValueError('[{}] should be provided in Ratio visualization'.format(k)) vis_state = Kibana._basic_tsvb_vis_state(job, args) params = vis_state['params'] # Denominator series - denominator = Kibana._tsvb_series( - label=args['denominator'], - metrics={'type': 'count'}, - series_filter={'query': args['denominator']} - ) + denominator = Kibana._tsvb_series(label=args['denominator'], + metrics={'type': 'count'}, + series_filter={'query': args['denominator']}) # Numerator series - numerator = Kibana._tsvb_series( - label=args['numerator'], - metrics={'type': 'count'}, - series_filter={'query': args['numerator']} - ) + numerator = Kibana._tsvb_series(label=args['numerator'], + metrics={'type': 'count'}, + series_filter={'query': args['numerator']}) # Ratio series - ratio = Kibana._tsvb_series( - series_type='ratio', - label='Ratio', - metrics={'numerator': args['numerator'], - 'denominator': args['denominator'], - 'type': 'filter_ratio'}, - line_width='2', - fill='0' - ) + ratio = Kibana._tsvb_series(series_type='ratio', + label='Ratio', + metrics={ + 'numerator': args['numerator'], + 'denominator': args['denominator'], + 'type': 'filter_ratio' + }, + line_width='2', + fill='0') series = [denominator, numerator, ratio] for series_, color in zip(series, Kibana.COLORS[1::2]): series_['color'] = color @@ -371,6 +326,9 @@ def _create_numeric_visualization(job, args): Returns: dict. A Kibana vis state dict + Raises: + ValueError: if some args not exist + This method will create 1 time series. The series will filter data further by `name: args['metric_name']`. Aggregation will be applied on data's `args['value_field']` field. Aggregation types @@ -378,21 +336,17 @@ def _create_numeric_visualization(job, args): """ for k in ('aggregator', 'value_field'): if k not in args or args[k] is None: - raise ValueError( - '[{}] should be provided in Numeric visualization.' - .format(k) - ) + raise ValueError('[{}] should be provided in Numeric visualization.'.format(k)) assert args['aggregator'] in Kibana.TSVB_AGG_TYPE vis_state = Kibana._basic_tsvb_vis_state(job, args) params = vis_state['params'] - series = Kibana._tsvb_series( - label='{} of {}'.format(args['aggregator'], - args['value_field']), - metrics={'type': Kibana.TSVB_AGG_TYPE[args['aggregator']], - 'field': args['value_field']}, - line_width=2, - fill='0.5' - ) + series = Kibana._tsvb_series(label='{} of {}'.format(args['aggregator'], args['value_field']), + metrics={ + 'type': Kibana.TSVB_AGG_TYPE[args['aggregator']], + 'field': args['value_field'] + }, + line_width=2, + fill='0.5') series['color'] = Kibana.COLORS[-2] params['series'] = [series] return vis_state @@ -422,23 +376,14 @@ def _create_time_visualization(job, args): for t1, t2 in ((et, pt), (pt, et)): # t1 vs t2, max/min/median of t1 as Y axis, t2 as X axis # aggregate on t1 and histogram on t2 - max_series = Kibana._timelion_series( - query=query, index=index, - metric='max:' + t1, timefield=t2 - ) - min_series = Kibana._timelion_series( - query=query, index=index, - metric='min:' + t1, timefield=t2 - ) - median_series = Kibana._timelion_series( - query=query, index=index, - metric='percentiles:' + t1 + ':50', timefield=t2 - ) + max_series = Kibana._timelion_series(query=query, index=index, metric='max:' + t1, timefield=t2) + min_series = Kibana._timelion_series(query=query, index=index, metric='min:' + t1, timefield=t2) + median_series = Kibana._timelion_series(query=query, + index=index, + metric='percentiles:' + t1 + ':50', + timefield=t2) series = ','.join((max_series, min_series, median_series)) - vis_state = {"type": "timelion", - "params": {"expression": series, - "interval": interval}, - "aggs": []} + vis_state = {"type": "timelion", "params": {"expression": series, "interval": interval}, "aggs": []} vis_states.append(vis_state) by_pt_start = Kibana._get_start_from_job(job) by_pt_end = 'now' @@ -451,12 +396,10 @@ def _create_timer_visualization(job, args): if not names: return [], [] # split by comma, strip whitespaces of each name, filter out empty ones - args['timer_names'] = [name for name in - map(str.strip, names.split(',')) if name] + args['timer_names'] = [name for name in map(str.strip, names.split(',')) if name] if args['aggregator'] not in Kibana.TIMELION_AGG_TYPE: - raise TypeError('Aggregator [{}] is not supported in Timer ' - 'visualization.'.format(args['aggregator'])) + raise TypeError('Aggregator [{}] is not supported in Timer ' 'visualization.'.format(args['aggregator'])) metric = '{}:value'.format(Kibana.TIMELION_AGG_TYPE[args['aggregator']]) query = 'tags.application_id:{}'.format(job.name) @@ -465,25 +408,29 @@ def _create_timer_visualization(job, args): interval = args['interval'] if args['interval'] != '' else 'auto' series = [] for timer in args['timer_names']: - s = Kibana._timelion_series( - query=query + ' AND name:{}'.format(timer), index='metrics*', - metric=metric, timefield='tags.process_time' - ) + s = Kibana._timelion_series(query=query + ' AND name:{}'.format(timer), + index='metrics*', + metric=metric, + timefield='tags.process_time') series.append(s) if args['split']: # split series to different plots vis_states = [{ "type": "timelion", - "params": {"expression": s, - "interval": interval}, + "params": { + "expression": s, + "interval": interval + }, "aggs": [] } for s in series] else: # multiple series in one plot, a single-item list vis_states = [{ "type": "timelion", - "params": {"expression": ','.join(series), - "interval": interval}, + "params": { + "expression": ','.join(series), + "interval": interval + }, "aggs": [] }] start = Kibana._get_start_from_job(job) @@ -508,27 +455,29 @@ def _basic_tsvb_vis_state(job, args): """ assert 'x_axis_field' in args and args['x_axis_field'] - vis_state = {"aggs": [], - "params": {"axis_formatter": "number", - "axis_min": "", - "axis_position": "left", - "axis_scale": "normal", - "default_index_pattern": "metrics*", - "filter": {}, - "index_pattern": "", - "interval": "", - "isModelInvalid": False, - "show_grid": 1, - "show_legend": 1, - "time_field": "", - "type": "timeseries"}} + vis_state = { + "aggs": [], + "params": { + "axis_formatter": "number", + "axis_min": "", + "axis_position": "left", + "axis_scale": "normal", + "default_index_pattern": "metrics*", + "filter": {}, + "index_pattern": "", + "interval": "", + "isModelInvalid": False, + "show_grid": 1, + "show_legend": 1, + "time_field": "", + "type": "timeseries" + } + } params = vis_state['params'] params['interval'] = args.get('interval', '') params['index_pattern'] = Kibana.JOB_INDEX \ .get(job.job_type, 'metrics') + '*' - params['filter'] = Kibana._filter_query( - 'tags.application_id:"{}"'.format(job.name) - ) + params['filter'] = Kibana._filter_query('tags.application_id:"{}"'.format(job.name)) params['time_field'] = args['x_axis_field'] return vis_state @@ -547,7 +496,8 @@ def _tsvb_series(series_type='normal', **kwargs): 'series_filter': dict, additional filter on data, only applied on this series. - Returns: dict, a Kibana TSVB visualization time series definition + Returns: + dict, a Kibana TSVB visualization time series definition """ # series_id is meaningless and arbitrary to us but necessary @@ -576,9 +526,7 @@ def _tsvb_series(series_type='normal', **kwargs): } if 'series_filter' in kwargs and 'query' in kwargs['series_filter']: series['split_mode'] = 'filter' - series['filter'] = Kibana._filter_query( - kwargs['series_filter']['query'] - ) + series['filter'] = Kibana._filter_query(kwargs['series_filter']['query']) if series_type == 'ratio': # if this is a ratio series, split axis and set axis range series['separate_axis'] = 1 @@ -591,9 +539,7 @@ def _timelion_series(**kwargs): assert 'metric' in kwargs assert 'timefield' in kwargs # convert all logical `and` and `or` to `AND` and `OR` - query = Kibana._regex_process( - kwargs.get('query', '*'), Kibana.TIMELION_QUERY_REPLACEMENT - ) + query = Kibana._regex_process(kwargs.get('query', '*'), Kibana.TIMELION_QUERY_REPLACEMENT) return ".es(q=\"{query}\", index={index}, " \ "metric={metric}, timefield={timefield})" \ ".legend(showTime=true)" \ @@ -602,8 +548,10 @@ def _timelion_series(**kwargs): @staticmethod def _filter_query(query): - return {'language': 'kuery', # Kibana query - 'query': query} + return { + 'language': 'kuery', # Kibana query + 'query': query + } @staticmethod def _get_start_from_job(job): diff --git a/web_console_v2/api/fedlearner_webconsole/utils/kibana_test.py b/web_console_v2/api/fedlearner_webconsole/utils/kibana_test.py new file mode 100644 index 000000000..881c93331 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/kibana_test.py @@ -0,0 +1,52 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# pylint: disable=protected-access +import unittest + +from fedlearner_webconsole.exceptions import UnauthorizedException +from fedlearner_webconsole.utils.kibana import Kibana + + +class KibanaTest(unittest.TestCase): + + def test_auth(self): + self.assertRaises(UnauthorizedException, Kibana._check_authorization, 'tags.1') + self.assertRaises(UnauthorizedException, Kibana._check_authorization, 'tags.1:2') + self.assertRaises(UnauthorizedException, Kibana._check_authorization, 'x:3 and y:4', {'x'}) + self.assertRaises(UnauthorizedException, Kibana._check_authorization, 'x:3 OR y:4 AND z:5', {'x', 'z'}) + try: + Kibana._check_authorization('x:1', {'x'}) + Kibana._check_authorization('x:1 AND y:2 OR z:3', {'x', 'y', 'z'}) + Kibana._check_authorization('x:1 oR y:2 aNd z:3', {'x', 'y', 'z'}) + Kibana._check_authorization('*', {'x', '*'}) + Kibana._check_authorization(None, None) + except UnauthorizedException: + self.fail() + + def test_parse_time(self): + dt1 = 0 + dt2 = 60 * 60 * 24 + args = {'start_time': dt1, 'end_time': dt2} + st, et = Kibana._parse_start_end_time(args) + self.assertEqual(st, '1970-01-01T00:00:00Z') + self.assertEqual(et, '1970-01-02T00:00:00Z') + st, et = Kibana._parse_start_end_time({'start_time': -1, 'end_time': -1}) + self.assertEqual(st, 'now-5y') + self.assertEqual(et, 'now') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/metrics.py b/web_console_v2/api/fedlearner_webconsole/utils/metrics.py index c7c1971e2..d468e8703 100644 --- a/web_console_v2/api/fedlearner_webconsole/utils/metrics.py +++ b/web_console_v2/api/fedlearner_webconsole/utils/metrics.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. @@ -15,11 +15,56 @@ # coding: utf-8 import logging from abc import ABCMeta, abstractmethod +import sys +from typing import Dict, Union +from threading import Lock + +from opentelemetry import trace, _metrics as metrics +from opentelemetry._metrics.instrument import UpDownCounter +from opentelemetry._metrics.measurement import Measurement +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk._metrics import MeterProvider +from opentelemetry.sdk._metrics.export import (PeriodicExportingMetricReader, ConsoleMetricExporter, MetricExporter, + MetricExportResult, Metric, Sequence) +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +from opentelemetry.exporter.otlp.proto.grpc._metric_exporter import OTLPMetricExporter +from opentelemetry.sdk.trace.export import (BatchSpanProcessor, ConsoleSpanExporter, SpanExportResult, SpanExporter, + ReadableSpan) + +from envs import Envs + + +def _validate_tags(tags: Dict[str, str]): + if tags is None: + return + for k, v in tags.items(): + if not isinstance(k, str) or not isinstance(v, str): + raise TypeError(f'Expected str, actually {type(k)}: {type(v)}') + + +class DevNullSpanExporter(SpanExporter): + + def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult: + return SpanExportResult.SUCCESS + + def shutdown(self): + pass + + +class DevNullMetricExporter(MetricExporter): + + def export(self, metrics: Sequence[Metric]) -> MetricExportResult: # pylint: disable=redefined-outer-name + return MetricExportResult.SUCCESS + + def shutdown(self): + pass class MetricsHandler(metaclass=ABCMeta): + @abstractmethod - def emit_counter(self, name, value: int, tags: dict = None): + def emit_counter(self, name: str, value: Union[int, float], tags: Dict[str, str] = None): """Emits counter metrics which will be accumulated. Args: @@ -29,7 +74,7 @@ def emit_counter(self, name, value: int, tags: dict = None): """ @abstractmethod - def emit_store(self, name, value: int, tags: dict = None): + def emit_store(self, name: str, value: Union[int, float], tags: Dict[str, str] = None): """Emits store metrics. Args: @@ -41,11 +86,99 @@ def emit_store(self, name, value: int, tags: dict = None): class _DefaultMetricsHandler(MetricsHandler): - def emit_counter(self, name, value: int, tags: dict = None): - logging.info(f'[Metric][Counter] {name}: {value}', extra=tags or {}) - - def emit_store(self, name, value: int, tags: dict = None): - logging.info(f'[Metric][Store] {name}: {value}', extra=tags or {}) + def emit_counter(self, name, value: Union[int, float], tags: Dict[str, str] = None): + tags = tags or {} + logging.info(f'[Metric][Counter] {name}: {value}, tags={tags}') + + def emit_store(self, name, value: Union[int, float], tags: Dict[str, str] = None): + tags = tags or {} + logging.info(f'[Metric][Store] {name}: {value}, tags={tags}') + + +class OpenTelemetryMetricsHandler(MetricsHandler): + + class Callback: + + def __init__(self) -> None: + self._measurement_list = [] + + def add(self, value: Union[int, float], tags: Dict[str, str]): + self._measurement_list.append(Measurement(value=value, attributes=tags)) + + def __iter__(self): + return self + + def __next__(self): + if len(self._measurement_list) == 0: + raise StopIteration + return self._measurement_list.pop(0) + + def __call__(self): + return iter(self) + + @classmethod + def new_handler(cls) -> 'OpenTelemetryMetricsHandler': + instrument_module_name = 'fedlearner_webconsole' + resource = Resource.create(attributes={ + 'service.name': instrument_module_name, + 'deployment.environment': Envs.CLUSTER + }) + # initiailized trace stuff + if Envs.APM_SERVER_ENDPOINT == 'stdout': + span_exporter = ConsoleSpanExporter(out=sys.stdout) + elif Envs.APM_SERVER_ENDPOINT == '/dev/null': + span_exporter = DevNullSpanExporter() + else: + span_exporter = OTLPSpanExporter(endpoint=Envs.APM_SERVER_ENDPOINT) + tracer_provider = TracerProvider(resource=resource) + tracer_provider.add_span_processor(BatchSpanProcessor(span_exporter)) + trace.set_tracer_provider(tracer_provider) + + # initiailized meter stuff + if Envs.APM_SERVER_ENDPOINT == 'stdout': + metric_exporter = ConsoleMetricExporter(out=sys.stdout) + elif Envs.APM_SERVER_ENDPOINT == '/dev/null': + metric_exporter = DevNullMetricExporter() + else: + metric_exporter = OTLPMetricExporter(endpoint=Envs.APM_SERVER_ENDPOINT) + reader = PeriodicExportingMetricReader(metric_exporter, export_interval_millis=60000) + meter_provider = MeterProvider(metric_readers=[reader], resource=resource) + metrics.set_meter_provider(meter_provider=meter_provider) + + return cls(tracer=tracer_provider.get_tracer(instrument_module_name), + meter=meter_provider.get_meter(instrument_module_name)) + + def __init__(self, tracer: trace.Tracer, meter: metrics.Meter): + self._tracer = tracer + self._meter = meter + + self._lock = Lock() + self._cache: Dict[str, Union[UpDownCounter, OpenTelemetryMetricsHandler.Callback]] = {} + + def emit_counter(self, name: str, value: Union[int, float], tags: Dict[str, str] = None): + # Note that the `values.` prefix is used for Elastic Index Dynamic Inference. + # Optimize by decreasing lock. + if name not in self._cache: + with self._lock: + # Double check `self._cache` content. + if name not in self._cache: + counter = self._meter.create_up_down_counter(name=f'values.{name}') + self._cache[name] = counter + assert isinstance(self._cache[name], UpDownCounter) + self._cache[name].add(value, attributes=tags) + + def emit_store(self, name: str, value: Union[int, float], tags: Dict[str, str] = None): + # Note that the `values.` prefix is used for Elastic Index Dynamic Inference. + # Optimize by decreasing lock. + if name not in self._cache: + with self._lock: + # Double check `self._cache` content. + if name not in self._cache: + cb = OpenTelemetryMetricsHandler.Callback() + self._meter.create_observable_gauge(name=f'values.{name}', callback=cb) + self._cache[name] = cb + assert isinstance(self._cache[name], OpenTelemetryMetricsHandler.Callback) + self._cache[name].add(value=value, tags=tags) class _Client(MetricsHandler): @@ -57,12 +190,16 @@ class _Client(MetricsHandler): def __init__(self): self._handlers.append(_DefaultMetricsHandler()) + # TODO(wangsen.0914): unify this behaviour to py_libs + self._handlers.append(OpenTelemetryMetricsHandler.new_handler()) - def emit_counter(self, name, value: int, tags: dict = None): + def emit_counter(self, name, value: Union[int, float], tags: Dict[str, str] = None): + _validate_tags(tags) for handler in self._handlers: handler.emit_counter(name, value, tags) - def emit_store(self, name, value: int, tags: dict = None): + def emit_store(self, name, value: Union[int, float], tags: Dict[str, str] = None): + _validate_tags(tags) for handler in self._handlers: handler.emit_store(name, value, tags) diff --git a/web_console_v2/api/fedlearner_webconsole/utils/metrics_test.py b/web_console_v2/api/fedlearner_webconsole/utils/metrics_test.py new file mode 100644 index 000000000..dc7179586 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/metrics_test.py @@ -0,0 +1,204 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import json +import logging +import unittest +from io import StringIO +from unittest.mock import patch +from typing import Dict + +from opentelemetry import trace as otel_trace, _metrics as otel_metrics +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor +from opentelemetry.sdk._metrics import MeterProvider +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace.export import ConsoleSpanExporter +from opentelemetry.sdk._metrics.export import ConsoleMetricExporter, PeriodicExportingMetricReader + +from fedlearner_webconsole.utils import metrics +from fedlearner_webconsole.utils.metrics import _DefaultMetricsHandler, MetricsHandler, OpenTelemetryMetricsHandler + + +class _FakeMetricsHandler(MetricsHandler): + + def emit_counter(self, name, value: int, tags: Dict[str, str] = None): + logging.info(f'[Test][Counter] {name} - {value}') + + def emit_store(self, name, value: int, tags: Dict[str, str] = None): + logging.info(f'[Test][Store] {name} - {value}') + + +class DefaultMetricsHandler(unittest.TestCase): + + def setUp(self): + self._handler = _DefaultMetricsHandler() + + def test_emit_counter(self): + with self.assertLogs() as cm: + self._handler.emit_counter('test', 1) + self._handler.emit_counter('test2', 2) + logs = [r.msg for r in cm.records] + self.assertEqual(logs, ['[Metric][Counter] test: 1, tags={}', '[Metric][Counter] test2: 2, tags={}']) + + def test_emit_store(self): + with self.assertLogs() as cm: + self._handler.emit_store('test', 199) + self._handler.emit_store('test2', 299) + logs = [r.msg for r in cm.records] + self.assertEqual(logs, ['[Metric][Store] test: 199, tags={}', '[Metric][Store] test2: 299, tags={}']) + + +class ClientTest(unittest.TestCase): + + def setUp(self): + metrics.add_handler(_FakeMetricsHandler()) + + def tearDown(self): + metrics.reset_handlers() + + def test_emit_counter(self): + with self.assertRaises(TypeError): + metrics.emit_counter('test', 1, tags={'name': 1}) + + with self.assertLogs() as cm: + metrics.emit_counter('test', 1) + logs = [r.msg for r in cm.records] + self.assertEqual(logs, ['[Metric][Counter] test: 1, tags={}', '[Test][Counter] test - 1']) + + def test_emit_store(self): + with self.assertRaises(TypeError): + metrics.emit_store('test', 1, tags={'name': 1}) + + with self.assertLogs() as cm: + metrics.emit_store('test', 199) + logs = [r.msg for r in cm.records] + self.assertEqual(logs, ['[Metric][Store] test: 199, tags={}', '[Test][Store] test - 199']) + + +class OpenTelemetryMetricsHandlerClassMethodTest(unittest.TestCase): + + def setUp(self): + self._span_out = StringIO() + self._span_exporter_patcher = patch('fedlearner_webconsole.utils.metrics.OTLPSpanExporter', + lambda **kwargs: ConsoleSpanExporter(out=self._span_out)) + self._metric_out = StringIO() + self._metric_exporter_patcher = patch('fedlearner_webconsole.utils.metrics.OTLPMetricExporter', + lambda **kwargs: ConsoleMetricExporter(out=self._metric_out)) + self._span_exporter_patcher.start() + self._metric_exporter_patcher.start() + + def tearDown(self): + self._metric_exporter_patcher.stop() + self._span_exporter_patcher.stop() + + def test_new_handler(self): + OpenTelemetryMetricsHandler.new_handler() + self.assertEqual( + otel_trace.get_tracer_provider().resource, + Resource( + attributes={ + 'telemetry.sdk.language': 'python', + 'telemetry.sdk.name': 'opentelemetry', + 'telemetry.sdk.version': '1.10.0', + 'service.name': 'fedlearner_webconsole', + 'deployment.environment': 'default', + })) + self.assertEqual( + otel_metrics.get_meter_provider()._sdk_config.resource, # pylint: disable=protected-access + Resource( + attributes={ + 'telemetry.sdk.language': 'python', + 'telemetry.sdk.name': 'opentelemetry', + 'telemetry.sdk.version': '1.10.0', + 'service.name': 'fedlearner_webconsole', + 'deployment.environment': 'default', + })) + + +class OpenTelemetryMetricsHandlerTest(unittest.TestCase): + + def setUp(self): + self._span_out = StringIO() + self._metric_out = StringIO() + tracer_provider = TracerProvider() + tracer_provider.add_span_processor(BatchSpanProcessor(ConsoleSpanExporter(out=self._span_out))) + reader = PeriodicExportingMetricReader(ConsoleMetricExporter(out=self._metric_out), + export_interval_millis=60000) + meter_provider = MeterProvider(metric_readers=[reader]) + self._tracer_provider = tracer_provider + self._meter_provider = meter_provider + self._handler = OpenTelemetryMetricsHandler(tracer=tracer_provider.get_tracer(__file__), + meter=meter_provider.get_meter(__file__)) + + def _force_flush(self): + self._meter_provider.force_flush() + self._metric_out.flush() + self._tracer_provider.force_flush() + self._span_out.flush() + + def test_emit_store(self): + # Note that same instrument with different tags won't be aggregated. + # Aggregation rule for `emit_store` is delivering the last value of this interval. + # If no value at this interval, no `Metric` will be sent. + self._handler.emit_store(name='test_store', value=1, tags={'module': 'dataset', 'uuid': 'tag1'}) + self._handler.emit_store(name='test_store', value=5, tags={'module': 'dataset', 'uuid': 'tag2'}) + self._handler.emit_store(name='test_store', value=2, tags={'module': 'dataset', 'uuid': 'tag1'}) + self._force_flush() + self._force_flush() + self._force_flush() + self._handler.emit_store(name='test_store', value=0, tags={'module': 'dataset', 'uuid': 'tag1'}) + self._force_flush() + self.assertEqual(self._span_out.getvalue(), '') + self._metric_out.seek(0) + lines = self._metric_out.readlines() + measurements = [] + for l in lines: + measurement = json.loads(l) + measurements.append(measurement) + self.assertEqual(len(measurements), 3) + self.assertEqual(measurements[0]['attributes'], {'uuid': 'tag1', 'module': 'dataset'}) + self.assertEqual(measurements[1]['attributes'], {'uuid': 'tag2', 'module': 'dataset'}) + self.assertEqual(measurements[0]['name'], 'values.test_store') + self.assertEqual([m['point']['value'] for m in measurements], [2, 5, 0]) + + def test_emit_counter(self): + # Note that same instrument with different tags won't be aggregated. + # Aggregation rule for `emit_counter` is delivering the accumulated \ + # value with the same tags during this interval. + # If no value at this interval, a `Metric` with value of last interval will be sent. + self._handler.emit_counter(name='test_counter', value=1, tags={'module': 'dataset', 'uuid': 'tag1'}) + self._handler.emit_counter(name='test_counter', value=5, tags={'module': 'dataset', 'uuid': 'tag2'}) + self._handler.emit_counter(name='test_counter', value=2, tags={'module': 'dataset', 'uuid': 'tag1'}) + self._force_flush() + self._force_flush() + self._handler.emit_counter(name='test_counter', value=-1, tags={'module': 'dataset', 'uuid': 'tag1'}) + self._force_flush() + self.assertEqual(self._span_out.getvalue(), '') + self._metric_out.seek(0) + lines = self._metric_out.readlines() + measurements = [] + for l in lines: + measurement = json.loads(l) + measurements.append(measurement) + self.assertEqual(len(measurements), 6) + self.assertEqual(measurements[0]['attributes'], {'uuid': 'tag1', 'module': 'dataset'}) + self.assertEqual(measurements[1]['attributes'], {'uuid': 'tag2', 'module': 'dataset'}) + self.assertEqual(measurement['name'], 'values.test_counter') + self.assertEqual([m['point']['value'] for m in measurements], [3, 5, 3, 5, 2, 5]) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/middlewares.py b/web_console_v2/api/fedlearner_webconsole/utils/middlewares.py deleted file mode 100644 index cda20152d..000000000 --- a/web_console_v2/api/fedlearner_webconsole/utils/middlewares.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import logging - - -class _MiddlewareRegistry(object): - def __init__(self): - self.middlewares = [] - - def register(self, middleware): - self.middlewares.append(middleware) - - -_middleware_registry = _MiddlewareRegistry() -register = _middleware_registry.register - - -def init_app(app): - logging.info('Initializing app with middlewares') - # Wraps app with middlewares - for middleware in _middleware_registry.middlewares: - app = middleware(app) - return app diff --git a/web_console_v2/api/fedlearner_webconsole/utils/mixins.py b/web_console_v2/api/fedlearner_webconsole/utils/mixins.py index 6ec201857..6206fbf87 100644 --- a/web_console_v2/api/fedlearner_webconsole/utils/mixins.py +++ b/web_console_v2/api/fedlearner_webconsole/utils/mixins.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,13 +14,30 @@ # coding: utf-8 from typing import List, Dict, Callable -from datetime import datetime, timezone +from datetime import datetime from enum import Enum + from sqlalchemy.ext.declarative import DeclarativeMeta from google.protobuf.message import Message from google.protobuf.json_format import MessageToDict +from fedlearner_webconsole.utils.pp_datetime import to_timestamp + + +def _to_dict_value(value): + if isinstance(value, datetime): + return to_timestamp(value) + if isinstance(value, Message): + return MessageToDict(value, preserving_proto_field_name=True, including_default_value_fields=True) + if isinstance(value, Enum): + return value.name + if isinstance(value, list): + return [_to_dict_value(v) for v in value] + if hasattr(value, 'to_dict'): + return value.to_dict() + return value + def to_dict_mixin(ignores: List[str] = None, extras: Dict[str, Callable] = None, @@ -40,6 +57,7 @@ def _get_fields(self: object) -> List[str]: def decorator(cls): """A decorator to add a to_dict method to a class.""" + def to_dict(self: object): """A helper function to convert a class to dict.""" dic = {} @@ -54,27 +72,7 @@ def to_dict(self: object): dic[extra_key] = func(self) # Converts type for key in dic: - value = dic[key] - if isinstance(value, datetime): - # If there is no timezone, we should treat it as - # UTC datetime,otherwise it will be calculated - # as local time when converting to timestamp. - # Context: all datetime in db is UTC datetime, - # see details in config.py#turn_db_timezone_to_utc - if value.tzinfo is None: - dic[key] = int( - value.replace(tzinfo=timezone.utc).timestamp()) - else: - dic[key] = int(value.timestamp()) - elif isinstance(value, Message): - dic[key] = MessageToDict( - value, - preserving_proto_field_name=True, - including_default_value_fields=True) - elif isinstance(value, Enum): - dic[key] = value.name - elif hasattr(value, 'to_dict'): - dic[key] = value.to_dict() + dic[key] = _to_dict_value(dic[key]) # remove None and emtry list and dict if ignore_none: @@ -86,33 +84,3 @@ def to_dict(self: object): return cls return decorator - - -def from_dict_mixin(from_dict_fields: List[str] = None, - required_fields: List[str] = None): - if from_dict_fields is None: - from_dict_fields = [] - if required_fields is None: - required_fields = [] - - def decorator(cls: object): - @classmethod - def from_dict(cls: object, content: dict): - obj = cls() # pylint: disable=no-value-for-parameter - for k in from_dict_fields: - if k in content: - current_type = type(getattr(obj, k)) - if hasattr(current_type, 'from_dict'): - setattr(obj, k, current_type.from_dict(content[k])) - else: - setattr(obj, k, content[k]) - for k in required_fields: - if getattr(obj, k) is None: - raise ValueError(f'{type(obj)} should have attribute {k}') - - return obj - - setattr(cls, 'from_dict', from_dict) - return cls - - return decorator diff --git a/web_console_v2/api/fedlearner_webconsole/utils/mixins_test.py b/web_console_v2/api/fedlearner_webconsole/utils/mixins_test.py new file mode 100644 index 000000000..6561231be --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/mixins_test.py @@ -0,0 +1,89 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from datetime import datetime, timezone + +from sqlalchemy.ext.declarative import declarative_base + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto import common_pb2 +from fedlearner_webconsole.utils.mixins import to_dict_mixin + +Base = declarative_base() + + +@to_dict_mixin(ignores=['token', 'grpc_spec'], + extras={ + 'grpc_spec': lambda model: model.get_grpc_spec(), + 'list': lambda _: ['hello', 'world'] + }) +class DeclarativeClass(Base): + __tablename__ = 'just_a_test' + + id = db.Column(db.Integer, primary_key=True) + name = db.Column(db.String(255)) + token = db.Column('token_string', db.String(64), index=True, key='token') + updated_at = db.Column(db.DateTime(timezone=True)) + grpc_spec = db.Column(db.Text()) + + def set_grpc_spec(self, proto): + self.grpc_spec = proto.SerializeToString() + + def get_grpc_spec(self): + proto = common_pb2.GrpcSpec() + proto.ParseFromString(self.grpc_spec) + return proto + + +@to_dict_mixin(to_dict_fields=['hhh']) +class SpecifyColumnsClass(object): + + def __init__(self) -> None: + self.hhh = None + self.not_include = None + + +class MixinsTest(unittest.TestCase): + + def test_to_dict_declarative_api(self): + # 2021/04/23 10:42:01 UTC + updated_at = datetime(2021, 4, 23, 10, 42, 1, tzinfo=timezone.utc) + updated_at_ts = int(updated_at.timestamp()) + test_model = DeclarativeClass(id=123, name='test-model', token='test-token', updated_at=updated_at) + test_grpc_spec = common_pb2.GrpcSpec(authority='test-authority') + test_model.set_grpc_spec(test_grpc_spec) + + self.assertDictEqual( + test_model.to_dict(), { + 'id': 123, + 'name': 'test-model', + 'updated_at': updated_at_ts, + 'grpc_spec': { + 'authority': 'test-authority', + }, + 'list': ['hello', 'world'] + }) + + def test_to_dict_specify_columns(self): + obj = SpecifyColumnsClass() + obj.hhh = 'hhh' + res = obj.to_dict() + self.assertEqual(len(res), 1) + self.assertTrue('hhh' in res) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/paginate.py b/web_console_v2/api/fedlearner_webconsole/utils/paginate.py new file mode 100644 index 000000000..2bb6b60c8 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/paginate.py @@ -0,0 +1,121 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +# Ref: https://github.com/pallets/flask-sqlalchemy/blob/main/src/flask_sqlalchemy/__init__.py +from typing import Optional + +from math import ceil + +from sqlalchemy import func +from sqlalchemy.orm import Query + + +class Pagination: + + def __init__(self, query: Query, page: int, page_size: int): + """Constructor for pagination + + Args: + query (Query): A SQLAlchemy query + page (int): The selected page + page_size (int): The number of items on each page + """ + self.query = query + self.page = page + self.page_size = page_size + self._total_of_items = None + + def get_number_of_items(self) -> int: + """Get the total number of items in the query. + + Returns: + The total of items from the original query. + """ + if self._total_of_items is None: + # A raw query without any WHERE clause will result in a SQL statement without FROM clause + # Therefore, if there is not FROM clause detected, we use a subquery to count the items + # Ref: https://stackoverflow.com/questions/12941416/how-to-count-rows-with-select-count-with-sqlalchemy#comment118672248_57934541 # pylint:disable=line-too-long + # FYI: Even 1.4.35 did not resolve this issue + if ' FROM ' not in str(self.query).upper(): + self._total_of_items = self.query.count() + else: + self._total_of_items = self.query.with_entities(func.count()).scalar() + + return self._total_of_items + + def get_items(self) -> iter: + """Get a "page" of items. + CAUTION: Returns all records if {self.page_size} is 0. + + Returns: + An iterable contains {self.page_size} items on {self.page} page. + """ + if self.page_size == 0: + return self.query.all() + return self.query.limit(self.page_size).offset((self.page - 1) * self.page_size).all() + + def get_number_of_pages(self) -> int: + """Get the number of pages of the query according to the specified + per_page value. + CAUTION: Returns 1 if {self.page_size} is 0 and the query has records. + + Returns: + The number of pages of all items from the original query. + """ + if self.get_number_of_items() == 0: + return 0 + if self.page_size == 0: + return 1 + return int(ceil(self.get_number_of_items() / float(self.page_size))) + + def get_metadata(self) -> dict: + """Get pagination metadata in a dictionary. + + Returns: + A dictionary contains information needed for current page. + """ + return { + 'current_page': self.page, + 'page_size': self.page_size, + 'total_pages': self.get_number_of_pages(), + 'total_items': self.get_number_of_items() + } + + +def paginate(query: Query, page: Optional[int] = None, page_size: Optional[int] = None) -> Pagination: + """Paginate a query. + + Check if page and page_size are valid and construct a new Pagination + object by a SQLAlchemy Query. + CAUTION: page starts at one + + Args: + query (Query): Query to be paginated + page (int): Page selected in pagination (page >= 0) + page_size (int): Number of items on each page (page_size <= 100) + + Returns: + A Pagination object contains the selected items and metadata. + + Raises: + ValueError: page >= 1 and 0 <= page_size <= 100. + """ + page = page or 1 + page_size = page_size or 0 + if not (page >= 1 and 0 <= page_size <= 100): + raise ValueError('page should be positive and page_size ranges between 0 and 100') + + return Pagination(query, page, page_size) diff --git a/web_console_v2/api/fedlearner_webconsole/utils/paginate_test.py b/web_console_v2/api/fedlearner_webconsole/utils/paginate_test.py new file mode 100644 index 000000000..89f91243b --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/paginate_test.py @@ -0,0 +1,84 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# coding: utf-8 + +import unittest +from fedlearner_webconsole.db import db +from fedlearner_webconsole.utils.paginate import paginate +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState +from testing.no_web_server_test_case import NoWebServerTestCase + + +def generate_workflow(state: WorkflowState = WorkflowState.RUNNING) -> Workflow: + return Workflow(state=state) + + +class PaginateTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + workflows = [generate_workflow() for _ in range(7)] + workflows += [generate_workflow(state=WorkflowState.INVALID) for _ in range(7)] + + with db.session_scope() as session: + session.bulk_save_objects(workflows) + session.commit() + + def test_paginate(self): + with db.session_scope() as session: + query = session.query(Workflow).filter(Workflow.state == WorkflowState.RUNNING) + pagination = paginate(query, page=1, page_size=3) + + self.assertEqual(3, pagination.get_number_of_pages()) + self.assertEqual(3, len(pagination.get_items())) + + pagination = paginate(query, page=3, page_size=3) + + self.assertEqual(1, len(pagination.get_items())) + + pagination = paginate(query, page=4, page_size=3) + + self.assertEqual(0, len(pagination.get_items())) + + def test_page_meta(self): + with db.session_scope() as session: + query = session.query(Workflow) + page_meta = paginate(query, page=1, page_size=3).get_metadata() + self.assertDictEqual(page_meta, {'current_page': 1, 'page_size': 3, 'total_pages': 5, 'total_items': 14}) + + query = session.query(Workflow).filter(Workflow.state == WorkflowState.RUNNING) + page_meta = paginate(query, page=1, page_size=3).get_metadata() + self.assertDictEqual(page_meta, {'current_page': 1, 'page_size': 3, 'total_pages': 3, 'total_items': 7}) + + page_meta = paginate(query, page=4, page_size=10).get_metadata() + self.assertDictEqual(page_meta, {'current_page': 4, 'page_size': 10, 'total_pages': 1, 'total_items': 7}) + + def test_fallback_page_size(self): + with db.session_scope() as session: + query = session.query(Workflow).filter(Workflow.state == WorkflowState.RUNNING) + pagination = paginate(query) + + self.assertEqual(7, len(pagination.get_items())) + self.assertDictEqual(pagination.get_metadata(), { + 'current_page': 1, + 'page_size': 0, + 'total_pages': 1, + 'total_items': 7 + }) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/pp_base64.py b/web_console_v2/api/fedlearner_webconsole/utils/pp_base64.py new file mode 100644 index 000000000..1a588329c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/pp_base64.py @@ -0,0 +1,24 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +from base64 import b64encode, b64decode + + +def base64encode(s: str) -> str: + return b64encode(s.encode('UTF-8')).decode('UTF-8') + + +def base64decode(s: str) -> str: + return b64decode(s).decode('UTF-8') diff --git a/web_console_v2/api/fedlearner_webconsole/utils/pp_base64_test.py b/web_console_v2/api/fedlearner_webconsole/utils/pp_base64_test.py new file mode 100644 index 000000000..72db50be5 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/pp_base64_test.py @@ -0,0 +1,37 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest + +from fedlearner_webconsole.utils.pp_base64 import base64encode, base64decode + + +class Base64Test(unittest.TestCase): + + def test_base64encode(self): + self.assertEqual(base64encode('hello 1@2'), 'aGVsbG8gMUAy') + self.assertEqual(base64encode('😈'), '8J+YiA==') + + def test_base64decode(self): + self.assertEqual(base64decode('aGVsbG8gMUAy'), 'hello 1@2') + self.assertEqual(base64decode('JjEzOVlUKiYm'), '&139YT*&&') + + def test_base64_encode_and_decode(self): + self.assertEqual(base64decode(base64encode('test')), 'test') + self.assertEqual(base64encode(base64decode('aGVsbG8gMUAy')), 'aGVsbG8gMUAy') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/pp_datetime.py b/web_console_v2/api/fedlearner_webconsole/utils/pp_datetime.py new file mode 100644 index 000000000..14c4a382f --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/pp_datetime.py @@ -0,0 +1,46 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +from typing import Optional, Union +from datetime import datetime, timezone +from dateutil.parser import isoparse + + +def to_timestamp(dt: Union[datetime, str]) -> int: + """Converts DB datetime to timestamp in second.""" + # If there is no timezone, we should treat it as UTC datetime, + # otherwise it will be calculated as local time when converting + # to timestamp. + # Context: all datetime in db is UTC datetime, + # see details in config.py#turn_db_timezone_to_utc + if isinstance(dt, str): + dt = isoparse(dt) + if dt.tzinfo is None: + return int(dt.replace(tzinfo=timezone.utc).timestamp()) + return int(dt.timestamp()) + + +def from_timestamp(timestamp: int) -> datetime: + """Converts timestamp to datetime with utc timezone.""" + return datetime.fromtimestamp(timestamp, timezone.utc) + + +def now(tz: Optional[timezone] = timezone.utc) -> datetime: + """A wrapper of datetime.now. + + This is for easy testing, as datetime.now is referred by a lot + of components, mock that will break tests easily. Using this wrapper + so that developers can mock this function to get a fake datetime.""" + return datetime.now(tz) diff --git a/web_console_v2/api/fedlearner_webconsole/utils/pp_datetime_test.py b/web_console_v2/api/fedlearner_webconsole/utils/pp_datetime_test.py new file mode 100644 index 000000000..d7c746829 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/pp_datetime_test.py @@ -0,0 +1,55 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from datetime import datetime, timezone, timedelta + +from fedlearner_webconsole.utils.pp_datetime import from_timestamp, to_timestamp + + +class DatetimeTest(unittest.TestCase): + + def test_to_timestamp(self): + # 2020/12/17 13:58:59 UTC+8 + dt_utc8 = datetime(2020, 12, 17, 13, 58, 59, tzinfo=timezone(timedelta(hours=8))) + # datetime will be stored without timezone info + dt_utc8_ts = int(dt_utc8.timestamp()) + 8 * 60 * 60 + self.assertEqual(to_timestamp(dt_utc8.replace(tzinfo=None)), dt_utc8_ts) + # 2021/04/23 10:42:01 UTC + dt_utc = datetime(2021, 4, 23, 10, 42, 1, tzinfo=timezone.utc) + dt_utc_ts = int(dt_utc.timestamp()) + self.assertEqual(to_timestamp(dt_utc), dt_utc_ts) + + def test_from_timestamp(self): + # 2020/12/17 13:58:59 UTC+8 + dt_utc8 = datetime(2020, 12, 17, 13, 58, 59, tzinfo=timezone(timedelta(hours=8))) + self.assertEqual(from_timestamp(to_timestamp(dt_utc8)), datetime(2020, 12, 17, 5, 58, 59, tzinfo=timezone.utc)) + dt_utc = datetime(2021, 4, 23, 10, 42, 1, tzinfo=timezone.utc) + self.assertEqual(from_timestamp(to_timestamp(dt_utc)), datetime(2021, 4, 23, 10, 42, 1, tzinfo=timezone.utc)) + + def test_to_timestamp_with_str_input(self): + dt_str = '2021-04-15T10:43:15Z' + real_dt = datetime(2021, 4, 15, 10, 43, 15, tzinfo=timezone.utc) + ts = to_timestamp(dt_str) + self.assertEqual(real_dt.timestamp(), ts) + + dt_str = '2021-09-24T17:58:27+08:00' + real_dt = datetime(2021, 9, 24, 17, 58, 27, tzinfo=timezone(timedelta(hours=8))) + ts = to_timestamp(dt_str) + self.assertEqual(real_dt.timestamp(), ts) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/pp_flatten_dict.py b/web_console_v2/api/fedlearner_webconsole/utils/pp_flatten_dict.py new file mode 100644 index 000000000..78e6c7802 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/pp_flatten_dict.py @@ -0,0 +1,49 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +# pylint: disable=deprecated-class +from typing import Mapping +import six +from flatten_dict.flatten_dict import dot_reducer + + +def flatten(d): + """ + Copied and modified from flatten_dict.flatten_dict, because the origin method + convert {'a': {'b': 1}} to {'a.b': 1}, but we want {'a': {'b': 1}, 'a.b': 1} + + Flatten `Mapping` object. + """ + flattenable_types = (Mapping,) + if not isinstance(d, flattenable_types): + raise ValueError(f'argument type {type(d)} is not in the flattenalbe types {flattenable_types}') + + reducer = dot_reducer + flat_dict = {} + + def _flatten(d, parent=None): + key_value_iterable = six.viewitems(d) + for key, value in key_value_iterable: + flat_key = reducer(parent, key) + if isinstance(value, flattenable_types): + flat_dict[flat_key] = value + if value: + # recursively build the result + _flatten(value, flat_key) + continue + flat_dict[flat_key] = value + + _flatten(d) + return flat_dict diff --git a/web_console_v2/api/fedlearner_webconsole/utils/pp_flatten_dict_test.py b/web_console_v2/api/fedlearner_webconsole/utils/pp_flatten_dict_test.py new file mode 100644 index 000000000..82e82fb28 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/pp_flatten_dict_test.py @@ -0,0 +1,53 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from fedlearner_webconsole.utils.pp_flatten_dict import flatten + + +class FlattenDictTestCase(unittest.TestCase): + + def test_flatten(self): + self.assertEqual(flatten({'a': [1], 'b': {'c': 2}, 'd': 3}), {'a': [1], 'b': {'c': 2}, 'b.c': 2, 'd': 3}) + self.assertEqual(flatten({'a': { + 'b': { + 'c': { + 'd': 1 + } + } + }}), { + 'a': { + 'b': { + 'c': { + 'd': 1 + } + } + }, + 'a.b': { + 'c': { + 'd': 1 + } + }, + 'a.b.c': { + 'd': 1 + }, + 'a.b.c.d': 1 + }) + + self.assertEqual(flatten({'a': {}}), {'a': {}}) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/pp_time.py b/web_console_v2/api/fedlearner_webconsole/utils/pp_time.py new file mode 100644 index 000000000..221eee830 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/pp_time.py @@ -0,0 +1,24 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import time + + +def sleep(seconds: int): + """A wrapper of time.sleep. + This is for easy testing, as time.sleep is referred by a lot + of components, mock that will break tests easily. Using this wrapper + so that developers can mock this function to fake the time tick.""" + time.sleep(seconds) diff --git a/web_console_v2/api/fedlearner_webconsole/utils/pp_yaml.py b/web_console_v2/api/fedlearner_webconsole/utils/pp_yaml.py new file mode 100644 index 000000000..05b805686 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/pp_yaml.py @@ -0,0 +1,243 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import logging +import json +from ast import Attribute, Name, Subscript, Add, Call +from string import Template +from typing import Callable, List, Optional +from simpleeval import EvalWithCompoundTypes +from fedlearner_webconsole.utils.pp_flatten_dict import flatten +from fedlearner_webconsole.setting.service import SettingService +from fedlearner_webconsole.utils.const import DEFAULT_OWNER +from fedlearner_webconsole.utils.system_envs import get_system_envs + + +class _YamlTemplate(Template): + delimiter = '$' + # Which placeholders in the template should be interpreted + idpattern = r'[a-zA-Z_\-\[0-9\]]+(\.[a-zA-Z_\-\[0-9\]]+)*' + + +def _format_yaml(yaml, **kwargs): + """Formats a yaml template. + + Example usage: + format_yaml('{"abc": ${x.y}}', x={'y': 123}) + output should be '{"abc": 123}' + """ + template = _YamlTemplate(yaml) + try: + return template.substitute(flatten(kwargs or {})) + except KeyError as e: + raise RuntimeError(f'Unknown placeholder: {e.args[0]}') from e + + +def _to_str(x=None) -> str: + if x is None: + return '' + if isinstance(x, dict): + return json.dumps(x) + return str(x) + + +def _to_int(x=None) -> Optional[int]: + if x is None or x == '': + return None + try: + return int(float(x)) + except Exception as e: + raise ValueError(f'{str(e)}. The input is: {x}') from e + + +def _to_float(x=None) -> Optional[float]: + if x is None or x == '': + return None + try: + return float(x) + except Exception as e: + raise ValueError(f'{str(e)}. The input is: {x}') from e + + +def _to_bool(x=None) -> bool: + if x is None or x == '': + return False + if isinstance(x, bool): + return x + if not isinstance(x, str): + raise ValueError(f'{x} can not convert boolean') + if x.lower() in ('yes', 'true', 't', 'y', '1'): + return True + if x.lower() in ('no', 'false', 'f', 'n', '0'): + return False + raise ValueError(f'{x} can not convert boolean') + + +def _to_dict(x) -> dict: + if isinstance(x, dict): + return x + raise ValueError(f'{x} is not dict') + + +def _to_list(x) -> list: + if isinstance(x, list): + return x + raise ValueError(f'{x} is not list') + + +def _eval_attribute(self, node): + """ + Copy from simpleeval, and modified the last exception raising to has more information about the attribute. + Before changed, the exception message would be "can't find 'c'", when can't find the attribute c in a.b. + Such as eval('a.b.c', names:{'a':{'b': {'d':1}}}). + After changed, the exception message will be "can't find 'a.b.c'". + """ + max_depth = 10 + for prefix in ['_', 'func_']: + if node.attr.startswith(prefix): + raise ValueError('Sorry, access to __attributes ' + ' or func_ attributes is not available. ' + f'({node.attr})') + if node.attr in ['format', 'format_map', 'mro']: + raise ValueError(f'Sorry, this method is not available. ({node.attr})') + # eval node + node_evaluated = self._eval(node.value) # pylint: disable=protected-access + + # Maybe the base object is an actual object, not just a dict + try: + return getattr(node_evaluated, node.attr) + except (AttributeError, TypeError): + pass + + if self.ATTR_INDEX_FALLBACK: + try: + return node_evaluated[node.attr] + except (KeyError, TypeError): + pass + + # If it is neither, raise an exception + # Modified(xiangyuxuan.prs) from simpleeval to make the error message has more information. + pre_node = node.value + attr_chains = [node.attr] + for i in range(max_depth): + if not isinstance(pre_node, Attribute): + break + attr_chains.append(pre_node.attr) + pre_node = pre_node.value + if isinstance(pre_node, Name): + attr_chains.append(pre_node.id) + raise ValueError('.'.join(attr_chains[::-1]), self.expr) + + +def compile_yaml_template(yaml_template: str, + post_processors: List[Callable], + ignore_variables: bool = False, + use_old_formater: bool = False, + **kwargs) -> dict: + """ + Args: + yaml_template (str): The original string to format. + post_processors (List): List of methods to process the dict which yaml_template generated. + ignore_variables (bool): If True then Compile the yaml_template without any variables. + All variables will be treated as None. Such as: "{var_a.attr_a: 1, 'b': 2}" -> {None:1, 'b':2} + **kwargs: variables to format the yaml_template. + use_old_formater (bool): If True then use old ${} placeholder formatter. + Raises: + ValueError: foramte failed + Returns: + a dict which can submit to k8s. + """ + # TODO(xiangyuxuan.prs): this is old version formatter, should be deleted after no flapp in used + if use_old_formater: + yaml_template = _format_yaml(yaml_template, **kwargs) + try: + # names={'true': True, 'false': False, 'null': None} support json symbol in python + eval_with_types = EvalWithCompoundTypes(names={'true': True, 'false': False, 'null': None, **kwargs}) + + # replace the built-in functions in eval stage, + # Ref: https://github.com/danthedeckie/simpleeval + if ignore_variables: + eval_with_types.nodes[Attribute] = lambda x: None + eval_with_types.nodes[Name] = lambda x: None + eval_with_types.nodes[Subscript] = lambda x: None + eval_with_types.nodes[Call] = lambda x: None + eval_with_types.operators[Add] = lambda x, y: None + return eval_with_types.eval(yaml_template) + eval_with_types.functions.update(str=_to_str, int=_to_int, bool=_to_bool, dict=_to_dict, list=_to_list) + + # Overwrite to let the exceptions message have more information. + eval_with_types.nodes[Attribute] = lambda x: _eval_attribute(eval_with_types, x) + loaded_json = eval_with_types.eval(yaml_template) + except SyntaxError as e: + raise ValueError(f'Invalid python dict syntax error msg: {e.args}') from e + except Exception as e: # pylint: disable=broad-except + # use args[0] to simplify the error message + raise ValueError(f'Invalid python dict placeholder error msg: {e.args[0]}') from e + # post processor for flapp yaml + for post_processor in post_processors: + loaded_json = post_processor(loaded_json) + return loaded_json + + +def add_username_in_label(loaded_json: dict, username: Optional[str] = None) -> dict: + if 'labels' not in loaded_json['metadata']: + loaded_json['metadata']['labels'] = {} + loaded_json['metadata']['labels']['owner'] = username or DEFAULT_OWNER + return loaded_json + + +class GenerateDictService: + + def __init__(self, session): + self._session = session + + def generate_system_dict(self): + sys_vars_dict = SettingService(self._session).get_system_variables_dict() + # TODO(xiangyuxuan.prs): basic_envs is old method to inject the envs, delete in the future. + basic_envs_list = get_system_envs() + basic_envs = ','.join([json.dumps(env) for env in basic_envs_list]) + version = SettingService(self._session).get_application_version().version.version + return { + 'basic_envs': basic_envs, + 'variables': sys_vars_dict, + 'basic_envs_list': basic_envs_list, + 'version': version + } + + +def _envs_to_dict(flapp_envs: List[dict]) -> dict: + return {env['name']: env['value'] for env in flapp_envs} + + +def extract_flapp_envs(flapp_json: dict) -> Optional[dict]: + """Extract flapp envs + + Returns: + dict of environment variables under different type of pods is returned, e.g. + {'master': {'INPUT_BASE_DIR': '/data'} + 'worker': {'INPUT_DATA_FORMAT': 'TF_RECORD'}} + """ + try: + if flapp_json['kind'] != 'FLApp': + return None + flapp_specs = flapp_json['spec']['flReplicaSpecs'] + flapp_envs = {} + for role in flapp_specs: + assert len(flapp_specs[role]['template']['spec']['containers']) == 1 + flapp_envs[role] = _envs_to_dict(flapp_specs[role]['template']['spec']['containers'][0]['env']) + return flapp_envs + except Exception as e: # pylint: disable=broad-except + logging.error(f'extracting environment variables with error {str(e)}') + return None diff --git a/web_console_v2/api/fedlearner_webconsole/utils/pp_yaml_test.py b/web_console_v2/api/fedlearner_webconsole/utils/pp_yaml_test.py new file mode 100644 index 000000000..117d2b3ab --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/pp_yaml_test.py @@ -0,0 +1,166 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from fedlearner_webconsole.utils.pp_yaml import compile_yaml_template, extract_flapp_envs, _to_bool, \ + _to_dict, _to_int, \ + _to_float, _to_list + + +def test_postprocessor(loaded_json: dict): + loaded_json['test'] = 1 + return loaded_json + + +class YamlTestCase(unittest.TestCase): + + def test_compile_yaml_template(self): + result = compile_yaml_template('{"test${a.b}": 1}', [], use_old_formater=True, a={'b': 'placeholder'}) + self.assertEqual(result, {'testplaceholder': 1}) + + def test_compile_yaml_template_with_postprocessor(self): + result = compile_yaml_template('{"test${a.b}": 1}', [test_postprocessor], + use_old_formater=True, + a={'b': 'placeholder'}) + self.assertEqual(result, {'testplaceholder': 1, 'test': 1}) + + def test_compile_yaml_template_with_list_merge(self): + result = compile_yaml_template('[{"test": false}]+${a.b}', [], use_old_formater=True, a={'b': '[{True: true}]'}) + self.assertEqual(result, [{'test': False}, {True: True}]) + result = compile_yaml_template('[{"test": false}]+${a.b}', [], + use_old_formater=True, + a={'b': [{ + 'test': False + }]}) + self.assertEqual(result, [{'test': False}, {'test': False}]) + result = compile_yaml_template('${a.b}', [], use_old_formater=True, a={'b': {'v': 123}}) + self.assertEqual(result, {'v': 123}) + + def test_extract_flapp_evs(self): + flapp_json = { + 'kind': 'FLApp', + 'spec': { + 'flReplicaSpecs': { + 'master': { + 'template': { + 'spec': { + 'containers': [{ + 'env': [{ + 'name': 'CODE_KEY', + 'value': 'test-code-key' + }] + }] + } + } + }, + 'worker': { + 'template': { + 'spec': { + 'containers': [{ + 'env': [{ + 'name': 'CODE_TAR', + 'value': 'test-code-tar' + }, { + 'name': 'EPOCH_NUM', + 'value': '3' + }] + }] + } + } + } + } + } + } + flapp_envs = extract_flapp_envs(flapp_json) + expected_flapp_envs = { + 'master': { + 'CODE_KEY': 'test-code-key' + }, + 'worker': { + 'CODE_TAR': 'test-code-tar', + 'EPOCH_NUM': '3' + } + } + self.assertEqual(flapp_envs, expected_flapp_envs) + + def test_convert_built_in_functions(self): + self.assertEqual(_to_int(''), None) + self.assertEqual(_to_int('1.6'), 1) + self.assertEqual(_to_float(''), None) + self.assertEqual(_to_float('1.9'), 1.9) + self.assertEqual(_to_bool('0'), False) + self.assertEqual(_to_bool('false'), False) + with self.assertRaises(ValueError): + _to_dict('{}') + with self.assertRaises(ValueError): + _to_list('[]') + self.assertEqual(_to_list([]), []) + self.assertEqual(_to_dict({}), {}) + + def test_eval_attribute_exception(self): + with self.assertRaises(ValueError) as e: + compile_yaml_template(yaml_template='a.b.c', post_processors=[], a={'b': 'd'}) + self.assertEqual(str(e.exception), 'Invalid python dict placeholder error msg: a.b.c') + self.assertEqual(compile_yaml_template(yaml_template='a.b.c', post_processors=[], a={'b': {'c': 1}}), 1) + + def test_eval_syntax_exception(self): + with self.assertRaises(ValueError) as e: + compile_yaml_template(yaml_template='{,,}', post_processors=[], a={'b': 'd'}) + self.assertEqual( + str(e.exception), + """Invalid python dict syntax error msg: ('invalid syntax', ('', 1, 2, '{,,}\\n'))""") + + def test_compile_yaml_template_ignore_variables(self): + self.assertEqual(compile_yaml_template('jaweof', [], True), None) + self.assertEqual(compile_yaml_template('{asdf: 12312, aaa:333}', [], True), {None: 333}) + self.assertEqual(compile_yaml_template('{asdf.a[1].b: 12312, "a": 3}', [], True), {None: 12312, 'a': 3}) + test_yaml_tpl = """ + { + "apiVersion": "fedlearner.k8s.io/v1alpha1", + "kind": "FLApp", + "metadata": { + "name": self.name, + "namespace": system.variables.namespace, + "labels": dict(system.variables.labels) + }, + "containers": [ + { + "env": system.basic_envs_list + [ + { + "name": "EGRESS_HOST", + "value": project.participants[0].egress_host.lower() + }, + { + "name": "OUTPUT_PARTITION_NUM", + "value": str(int(workflow.variables.partition_num)) + }, + { + "name": "OUTPUT_BASE_DIR", + "value": project.variables.storage_root_path + "/raw_data/" + self.name + }, + { + "name": "RAW_DATA_METRICS_SAMPLE_RATE", + "value": str(asdfasdf) + } + ] + list(system.variables.volumes_list), + } + ] + } + """ + self.assertEqual(compile_yaml_template(test_yaml_tpl, [], True).get('kind'), 'FLApp') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/process_utils.py b/web_console_v2/api/fedlearner_webconsole/utils/process_utils.py new file mode 100644 index 000000000..446b85873 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/process_utils.py @@ -0,0 +1,46 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +import queue +from multiprocessing import get_context +from typing import Optional, Callable, Any, Dict + +from fedlearner_webconsole.utils.hooks import pre_start_hook + + +def _sub_process_wrapper(target: Optional[Callable[..., Any]], kwargs: Dict[str, Any]): + pre_start_hook() + target(**kwargs) + + +def get_result_by_sub_process(name: str, target: Optional[Callable[..., Any]], kwargs: Dict[str, Any]): + context = get_context('spawn') + internal_queue = context.Queue() + kwargs['q'] = internal_queue + wrapper_args = {'target': target, 'kwargs': kwargs} + sub_process = context.Process(target=_sub_process_wrapper, kwargs=wrapper_args, daemon=True) + sub_process.start() + try: + result = internal_queue.get(timeout=60) + except queue.Empty as e: + sub_process.terminate() + raise RuntimeError(f'[subprocess] {name} task failed') from e + finally: + sub_process.join() + sub_process.close() + internal_queue.close() + logging.info(f'[subprocess]: {name} task finished') + return result diff --git a/web_console_v2/api/fedlearner_webconsole/utils/process_utils_test.py b/web_console_v2/api/fedlearner_webconsole/utils/process_utils_test.py new file mode 100644 index 000000000..071618054 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/process_utils_test.py @@ -0,0 +1,36 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from multiprocessing import Queue + +from fedlearner_webconsole.utils.process_utils import get_result_by_sub_process + + +def _fake_sub_process(num: int, q: Queue): + q.put([num, num + 1]) + + +class SubProcessTestCase(unittest.TestCase): + + def test_sub_process(self): + result = get_result_by_sub_process(name='fake sub process', target=_fake_sub_process, kwargs={ + 'num': 2, + }) + self.assertEqual(result, [2, 3]) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/proto.py b/web_console_v2/api/fedlearner_webconsole/utils/proto.py new file mode 100644 index 000000000..faf814c88 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/proto.py @@ -0,0 +1,141 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import copy +from typing import Dict, Any + +from google.protobuf import json_format +from google.protobuf.descriptor import FieldDescriptor +from google.protobuf.message import Message +from google.protobuf.struct_pb2 import Struct, Value + +from fedlearner_webconsole.proto.common.extension_pb2 import secret + + +def _is_map(descriptor: FieldDescriptor) -> bool: + """Checks if a field is map or normal repeated field. + + Inspired by https://github.com/protocolbuffers/protobuf/blob/3.6.x/python/google/protobuf/json_format.py#L159 + """ + return (descriptor.type == FieldDescriptor.TYPE_MESSAGE and descriptor.message_type.has_options and + descriptor.message_type.GetOptions().map_entry) + + +def remove_secrets(proto: Message) -> Message: + """Removes secrete fields in proto.""" + proto = copy.copy(proto) + field: FieldDescriptor + for field, value in proto.ListFields(): + if field.type != FieldDescriptor.TYPE_MESSAGE: + # Clears field if it has secret annotation and its message is not message (no matter repeated or not) + if field.GetOptions().Extensions[secret]: + proto.ClearField(field.name) + continue + if field.label != FieldDescriptor.LABEL_REPEATED: + # Nested message + value.CopyFrom(remove_secrets(value)) + continue + if _is_map(field): + # Checks value type + map_value_field: FieldDescriptor = field.message_type.fields_by_name['value'] + for k in list(value.keys()): + if map_value_field.type == FieldDescriptor.TYPE_MESSAGE: + value[k].CopyFrom(remove_secrets(value[k])) + else: + value[k] = map_value_field.default_value + else: + # Replace the repeated field (list of message) + new_protos = [remove_secrets(m) for m in value] + del value[:] + value.extend(new_protos) + + return proto + + +_INT_TYPES = frozenset( + [FieldDescriptor.TYPE_INT64, FieldDescriptor.TYPE_UINT64, FieldDescriptor.TYPE_INT32, FieldDescriptor.TYPE_UINT32]) + + +def _normalize_singular(value: Any, proto_type: int) -> Any: + if proto_type in _INT_TYPES: + return int(value) + return value + + +def _normalize_dict(dct: Dict, message: Message): + """Normalizes the dict in place. + + Converts int64 type in dict to python int instead of string. Currently python + proto lib will convert int64 to str, ref: https://github.com/protocolbuffers/protobuf/issues/2954 + + So this is a hack to make the dict converting work as our expectation. If you do not want this + behavior someday, you can use extension in the field case by case.""" + if isinstance(message, (Struct, Value)): + # For those well-known protobuf types, we do not normalize it as + # there are some magics. + return + descriptors = message.DESCRIPTOR.fields_by_name + for key in dct: + descriptor = descriptors.get(key) + # Defensively + if descriptor is None: + continue + # Repeated field + if descriptor.label == FieldDescriptor.LABEL_REPEATED: + nested = getattr(message, key) + if _is_map(descriptor): + # 1. Map + map_key_type: FieldDescriptor = descriptor.message_type.fields_by_name['key'].type + map_value_type: FieldDescriptor = descriptor.message_type.fields_by_name['value'].type + for k, v in dct[key].items(): + if map_value_type == FieldDescriptor.TYPE_MESSAGE: + # If type of key of mapper is int, + # we should convert it from string back to int for fetching information from Message + k = _normalize_singular(k, map_key_type) + _normalize_dict(v, nested[k]) + else: + dct[key][k] = _normalize_singular(v, map_value_type) + else: + # 2. List + for i, v in enumerate(dct[key]): + if descriptor.type == FieldDescriptor.TYPE_MESSAGE: + _normalize_dict(v, nested[i]) + else: + dct[key][i] = _normalize_singular(v, descriptor.type) + continue + # Nested message + if descriptor.type == FieldDescriptor.TYPE_MESSAGE: + _normalize_dict(dct[key], getattr(message, key)) + continue + # Singular field + dct[key] = _normalize_singular(dct[key], descriptor.type) + + +def to_dict(proto: Message, with_secret: bool = True): + if not with_secret: + proto = remove_secrets(proto) + dct = json_format.MessageToDict(proto, preserving_proto_field_name=True, including_default_value_fields=True) + _normalize_dict(dct, proto) + return dct + + +def to_json(proto: Message) -> str: + """Converts proto to json string.""" + return json_format.MessageToJson(proto, preserving_proto_field_name=True) + + +def parse_from_json(json_str: str, proto: Message) -> Message: + """Parses json string to a proto.""" + return json_format.Parse(json_str or '{}', proto, ignore_unknown_fields=True) diff --git a/web_console_v2/api/fedlearner_webconsole/utils/proto_test.py b/web_console_v2/api/fedlearner_webconsole/utils/proto_test.py new file mode 100644 index 000000000..24e6709b0 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/proto_test.py @@ -0,0 +1,211 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# pylint: disable=unsupported-assignment-operation +import json +import unittest + +from google.protobuf.struct_pb2 import Struct, Value, ListValue + +from fedlearner_webconsole.proto.testing.testing_pb2 import PrivateInfo, RichMessage, Tdata, Int64Message, StructWrapper +from fedlearner_webconsole.utils.proto import remove_secrets, to_dict, to_json, parse_from_json + + +class ProtoTest(unittest.TestCase): + + def test_remove_secrets(self): + proto = RichMessage( + field1='f1', + field2=123, + pinfo=PrivateInfo(pii='pii', non_pii='non pii'), + infos=[PrivateInfo(pii='only pii'), PrivateInfo(non_pii='only non pii')], + pinfo_map={ + 'k1': PrivateInfo(non_pii='hello non pii'), + 'k2': PrivateInfo(pii='hello pii') + }, + pstring_map={'s1': 'v1'}, + pstring_list=['p1'], + ) + proto_without_secret = RichMessage( + field1='f1', + pinfo=PrivateInfo(non_pii='non pii'), + infos=[PrivateInfo(), PrivateInfo(non_pii='only non pii')], + pinfo_map={ + 'k1': PrivateInfo(non_pii='hello non pii'), + 'k2': PrivateInfo() + }, + pstring_map={'s1': ''}, + ) + self.assertEqual(remove_secrets(proto), proto_without_secret) + + def test_to_dict_with_secret(self): + proto = RichMessage(field1='f1', + field2=123, + pinfo=PrivateInfo(pii='pii', non_pii='non pii'), + infos=[PrivateInfo(pii='only pii'), + PrivateInfo(non_pii='only non pii')], + pinfo_map={ + 'k1': PrivateInfo(non_pii='hello non pii'), + 'k2': PrivateInfo(pii='hello pii') + }, + pstring_map={'s1': 'v1'}, + pstring_list=['p1']) + self.assertEqual( + to_dict(proto), { + 'field1': 'f1', + 'field2': 123, + 'infos': [{ + 'pii': 'only pii', + 'non_pii': '' + }, { + 'pii': '', + 'non_pii': 'only non pii' + }], + 'pinfo': { + 'pii': 'pii', + 'non_pii': 'non pii' + }, + 'pinfo_map': { + 'k1': { + 'non_pii': 'hello non pii', + 'pii': '' + }, + 'k2': { + 'non_pii': '', + 'pii': 'hello pii' + } + }, + 'pstring_map': { + 's1': 'v1' + }, + 'pstring_list': ['p1'] + }) + self.assertEqual( + to_dict(proto, with_secret=False), { + 'field1': 'f1', + 'field2': 0, + 'infos': [{ + 'pii': '', + 'non_pii': '' + }, { + 'pii': '', + 'non_pii': 'only non pii' + }], + 'pinfo': { + 'pii': '', + 'non_pii': 'non pii' + }, + 'pinfo_map': { + 'k1': { + 'non_pii': 'hello non pii', + 'pii': '' + }, + 'k2': { + 'non_pii': '', + 'pii': '' + } + }, + 'pstring_map': { + 's1': '' + }, + 'pstring_list': [] + }) + + def test_to_dict_int64(self): + proto = Int64Message(id=123456789, + uuid='123123', + project_id=666, + data=[Tdata(id=987), Tdata(projects=[1, 2, 3])]) + self.assertEqual( + to_dict(proto), { + 'uuid': + '123123', + 'project_id': + 666, + 'id': + 123456789, + 'data': [ + { + 'id': 987, + 'mappers': {}, + 'projects': [], + 'tt': 'UNSPECIFIED', + }, + { + 'id': 0, + 'mappers': {}, + 'projects': [1, 2, 3], + 'tt': 'UNSPECIFIED', + }, + ] + }) + + def test_to_dict_struct(self): + list_value = ListValue(values=[Value(string_value='string in list')]) + nested_struct = Struct() + nested_struct['haha'] = 2.33 + struct = Struct() + struct['nested_list'] = list_value + struct['nested_struct'] = nested_struct + + struct_wrapper = StructWrapper(typed_value=Value(string_value='str'), struct=struct) + self.assertEqual(to_dict(struct_wrapper), { + 'typed_value': 'str', + 'struct': { + 'nested_list': ['string in list'], + 'nested_struct': { + 'haha': 2.33 + } + } + }) + + def test_to_json(self): + proto = RichMessage(field1='field1', + field2=123123, + pinfo=PrivateInfo(pii='pii', non_pii='non pii'), + pstring_map={'s1': 'v1'}, + pstring_list=['p1']) + self.assertEqual( + json.loads(to_json(proto)), { + 'field1': 'field1', + 'field2': 123123, + 'pinfo': { + 'pii': 'pii', + 'non_pii': 'non pii' + }, + 'pstring_map': { + 's1': 'v1' + }, + 'pstring_list': ['p1'] + }) + + def test_parse_from_json(self): + proto = RichMessage(field1='field1', field2=123123, pstring_map={'s1': 'v1'}, pstring_list=['p1']) + self.assertEqual( + to_dict( + parse_from_json( + json.dumps({ + 'field1': 'field1', + 'field2': 123123, + 'pstring_map': { + 's1': 'v1' + }, + 'pstring_list': ['p1'], + 'unknown_f': '123' + }), RichMessage())), to_dict(proto)) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/resource_name.py b/web_console_v2/api/fedlearner_webconsole/utils/resource_name.py new file mode 100644 index 000000000..d2668332b --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/resource_name.py @@ -0,0 +1,28 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +from uuid import uuid4 + + +def resource_uuid() -> str: + """Build resource uuid + Returns: + A DNS-1035 label. A DNS-1035 label must start with an + alphabetic character. Since k8s resource name is limited to 64 chars, + job_def name is limited to 24 chars and pod name suffix is limit to + 19 chars, 20 chars are left for uuid. + substring uuid[:19] has no collision in 10 million draws. + """ + return f'u{uuid4().hex[:19]}' diff --git a/web_console_v2/api/fedlearner_webconsole/utils/resource_name_test.py b/web_console_v2/api/fedlearner_webconsole/utils/resource_name_test.py new file mode 100644 index 000000000..c1818a5f2 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/resource_name_test.py @@ -0,0 +1,29 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from fedlearner_webconsole.utils.resource_name import resource_uuid + + +class UtilsTest(unittest.TestCase): + + def test_resource_uuid(self): + uuid = resource_uuid() + self.assertEqual(len(uuid), 20) + self.assertEqual(uuid[0], 'u') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/schema.py b/web_console_v2/api/fedlearner_webconsole/utils/schema.py new file mode 100644 index 000000000..c8161c53b --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/schema.py @@ -0,0 +1,104 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Optional + +from fedlearner_webconsole.exceptions import InternalException + +_SPARK_TO_JSON = { + 'integer': 'integer', + 'long': 'integer', + 'short': 'integer', + 'float': 'number', + 'double': 'number', + 'string': 'string', + 'binary': 'string', + 'boolean': 'boolean', + 'null': 'null', +} + + +def spark_schema_to_json_schema(spark_schema: Optional[dict]): + """ + all fields in spark schema are deemed required in json schema + any fields not in spark schema is deemed forbidden in json schema + type convert from spark schema to json schema by _SPARK_TO_JSON + Ref: https://spark.apache.org/docs/latest/api/python/reference/api/pyspark.sql.types.StructField.html + Ref: https://json-schema.org/learn/getting-started-step-by-step.html + + e.g. + [from] spark schema: + { + 'type': 'struct', + 'fields': [ + { + 'name': 'raw_id', + 'type': 'integer', + 'nullable': True, + 'metadata': {} + }, + { + 'name': 'f01', + 'type': 'float', + 'nullable': True, + 'metadata': {} + }, + { + 'name': 'image', + 'type': 'binary', + 'nullable': True, + 'metadata': {} + } + ] + } + + [to] json schema: + { + 'type': 'object', + 'properties':{ + 'raw_id': { + 'type': 'integer' + }, + 'f01': { + 'type': 'number' + }, + 'image': { + 'type': 'string' + } + }, + 'additionalProperties': False, + 'required': [ + 'raw_id', + 'f01', + 'image' + ] + } + """ + if spark_schema is None: + return {} + properties = {} + required = [] + fields = spark_schema.get('fields') + for field in fields: + name = field.get('name') + field_type = field.get('type') + json_type = _SPARK_TO_JSON.get(field_type) + if json_type is None: + raise InternalException( + f'spark schema to json schema convert failed! reason: unrecognized type [{field_type}]') + properties[name] = {'type': json_type} + required.append(name) + json_schema = {'type': 'object', 'properties': properties, 'additionalProperties': False, 'required': required} + return json_schema diff --git a/web_console_v2/api/fedlearner_webconsole/utils/schema_test.py b/web_console_v2/api/fedlearner_webconsole/utils/schema_test.py new file mode 100644 index 000000000..a32f18fd2 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/schema_test.py @@ -0,0 +1,74 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +from fedlearner_webconsole.utils.schema import spark_schema_to_json_schema + + +class SchemaConvertTest(unittest.TestCase): + + def test_spark_schema_to_json_schema(self): + spark_schema = { + 'type': + 'struct', + 'fields': [{ + 'name': 'raw_id', + 'type': 'integer', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'f01', + 'type': 'float', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'image', + 'type': 'binary', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'hight', + 'type': 'long', + 'nullable': True, + 'metadata': {} + }] + } + + json_schema = { + 'type': 'object', + 'properties': { + 'raw_id': { + 'type': 'integer' + }, + 'f01': { + 'type': 'number' + }, + 'image': { + 'type': 'string' + }, + 'hight': { + 'type': 'integer' + } + }, + 'additionalProperties': False, + 'required': ['raw_id', 'f01', 'image', 'hight'] + } + res = spark_schema_to_json_schema(spark_schema) + self.assertEqual(res, json_schema) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/sorting.py b/web_console_v2/api/fedlearner_webconsole/utils/sorting.py new file mode 100644 index 000000000..31867083b --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/sorting.py @@ -0,0 +1,60 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +import re +from typing import NamedTuple, Type, List + +from sqlalchemy import asc, desc +from sqlalchemy.orm import Query + +from fedlearner_webconsole.db import db + +_REGEX = re.compile(r'^([a-zA-Z0-9._\-]+)\s(asc|desc)$') + + +class SortExpression(NamedTuple): + is_asc: bool + field: str + + +def parse_expression(exp: str) -> SortExpression: + matches = _REGEX.match(exp) + if not matches: + error_message = f'[SortExpression] unsupported expression {exp}' + logging.error(error_message) + raise ValueError(error_message) + is_asc = True + if matches.group(2) == 'desc': + is_asc = False + return SortExpression(field=matches.group(1), is_asc=is_asc) + + +class SorterBuilder(object): + + def __init__(self, model_class: Type[db.Model], supported_fields: List[str]): + self.model_class = model_class + for field in supported_fields: + assert getattr(self.model_class, field, None) is not None, f'{field} is not a column key' + self.supported_fields = set(supported_fields) + + def build_query(self, query: Query, exp: SortExpression) -> Query: + if exp.field not in self.supported_fields: + raise ValueError(f'[SortExpression] unsupported field: {exp.field}') + column = getattr(self.model_class, exp.field) + order_fn = asc + if not exp.is_asc: + order_fn = desc + return query.order_by(order_fn(column)) diff --git a/web_console_v2/api/fedlearner_webconsole/utils/sorting_test.py b/web_console_v2/api/fedlearner_webconsole/utils/sorting_test.py new file mode 100644 index 000000000..0b1984afe --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/sorting_test.py @@ -0,0 +1,76 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +from fedlearner_webconsole.db import db, default_table_args +from fedlearner_webconsole.utils.sorting import parse_expression, SortExpression, SorterBuilder +from testing.no_web_server_test_case import NoWebServerTestCase + + +class ParseExpressionTest(unittest.TestCase): + + def test_invalid_exp(self): + # No space + with self.assertRaises(ValueError): + parse_expression('fieldasc') + # Invalid field name + with self.assertRaises(ValueError): + parse_expression('field你 asc') + # Invalid asc sign + with self.assertRaises(ValueError): + parse_expression('fiele dasc') + + def test_valid_exp(self): + self.assertEqual(parse_expression('ff_GG-1 asc'), SortExpression(field='ff_GG-1', is_asc=True)) + self.assertEqual(parse_expression('f.a.b desc'), SortExpression(field='f.a.b', is_asc=False)) + + +class TestModel(db.Model): + __tablename__ = 'test_table' + __table_args__ = (default_table_args('Test table')) + id = db.Column(db.Integer, primary_key=True, autoincrement=True) + amount = db.Column(db.Float, default=0) + + +class SorterBuilderTest(NoWebServerTestCase): + + def test_supported_field(self): + with self.assertRaises(AssertionError): + SorterBuilder(model_class=TestModel, supported_fields=['id', 'amount', 'non-existing']) + + def test_build_query(self): + self.maxDiff = None + builder = SorterBuilder(model_class=TestModel, supported_fields=['id', 'amount']) + with db.session_scope() as session: + query = session.query(TestModel) + # Invalid one + sort_exp = SortExpression(field='f1', is_asc=True) + with self.assertRaisesRegex(ValueError, 'unsupported field: f1'): + builder.build_query(query, sort_exp) + # Valid ones + sort_exp = SortExpression(field='id', is_asc=True) + statement = self.generate_mysql_statement(builder.build_query(query, sort_exp)) + self.assertEqual(statement, 'SELECT test_table.id, test_table.amount \n' + 'FROM test_table ORDER BY test_table.id ASC') + sort_exp = SortExpression(field='amount', is_asc=False) + statement = self.generate_mysql_statement(builder.build_query(query, sort_exp)) + self.assertEqual( + statement, 'SELECT test_table.id, test_table.amount \n' + 'FROM test_table ORDER BY test_table.amount DESC') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/stream_tars.py b/web_console_v2/api/fedlearner_webconsole/utils/stream_tars.py new file mode 100644 index 000000000..7087c99d1 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/stream_tars.py @@ -0,0 +1,166 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# coding: utf-8 +# pylint: disable=redefined-builtin, no-else-continue, broad-except, consider-using-with +import os +from io import BytesIO +from tarfile import TarFile, NUL, BLOCKSIZE, TarInfo +import tempfile +import gzip +import logging +from typing import BinaryIO, AnyStr, Union + +from fedlearner_webconsole.utils.file_manager import FileManager, FILE_PREFIX + +CHUNK_SIZE = 1 << 22 + + +class FileStream: + + def __init__(self): + self.buffer = BytesIO() + self.offset = 0 + + def write(self, s: AnyStr): + self.buffer.write(s) + self.offset += len(s) + + def tell(self): + return self.offset + + def close(self): + self.buffer.close() + + def read_all(self): + try: + return self.buffer.getvalue() + finally: + self.buffer.close() + self.buffer = BytesIO() + + +class _TarFileWithoutCache(TarFile): + """ Building a tar file chunk-by-chunk. + """ + + def __init__(self, directories: Union[str, list], file_chunk_size: int = CHUNK_SIZE): # pylint: disable=super-init-not-called + self._contents = [directories] + self._file_chunk_size = file_chunk_size + self._is_multiple = False + if isinstance(directories, list): + self._is_multiple = True + self._contents = directories + + @staticmethod + def _stream_file_into_tar(tarinfo: TarInfo, tar: TarFile, fh: BinaryIO, buf_size: int): + out = tar.fileobj + + for b in iter(lambda: fh.read(buf_size), b''): + out.write(b) + yield + + blocks, remainder = divmod(tarinfo.size, BLOCKSIZE) + if remainder > 0: + out.write(NUL * (BLOCKSIZE - remainder)) + blocks += 1 + tar.offset += blocks * BLOCKSIZE + yield + + def __iter__(self): + out = FileStream() + tar = TarFile(fileobj=out, mode='w') + for content in self._contents: + if os.path.isdir(content): + prefix, name = os.path.split(content) + prefix_len = len(prefix) + len(os.path.sep) + tar.add(name=content, arcname=name, recursive=False) + for path, dirs, files in os.walk(content): + arcpath = path[prefix_len:] + # Add files + # Use this script instead of tar.add() to avoid the non-fixed memory usage caused by the invoke of + # tar.addfile(), which will cache tarinfo in TarFile.members + for f in files: + filepath = os.path.join(path, f) + with open(filepath, 'rb') as fh: + tarinfo = tar.gettarinfo(name=filepath, arcname=os.path.join(arcpath, f), fileobj=fh) + tar.addfile(tarinfo) + for _ in self._stream_file_into_tar(tarinfo, tar, fh, self._file_chunk_size): + yield out.read_all() + + # Add directories + for d in dirs: + tar.add(name=os.path.join(path, d), arcname=os.path.join(arcpath, d), recursive=False) + yield out.read_all() + else: + filepath = content + filename = os.path.basename(filepath) + with open(filepath, 'rb') as fh: + tarinfo = tar.gettarinfo(name=filepath, arcname=filename, fileobj=fh) + tar.addfile(tarinfo) + for _ in self._stream_file_into_tar(tarinfo, tar, fh, self._file_chunk_size): + yield out.read_all() + + tar.close() + yield out.read_all() + out.close() + + +class StreamingTar(object): + """ Building a tar file chunk-by-chunk. + """ + + def __init__(self, fm: FileManager, chunksize: int = CHUNK_SIZE) -> None: + super().__init__() + self._fm = fm + self.chunksize = chunksize + + def _archive(self, source_path: Union[str, list], target_path: str): + logging.info(f'will archive {source_path} to {target_path}') + tarfile = _TarFileWithoutCache(source_path, self.chunksize) + with open(target_path, 'wb') as target_f: + for chunk in tarfile: + target_f.write(chunk) + + def _compress(self, filename: str, target_path: str): + with open(filename, 'rb') as tar_f: + with gzip.GzipFile(target_path, 'wb') as gzip_f: + stream = tar_f.read(self.chunksize) + while stream: + gzip_f.write(stream) + stream = tar_f.read(self.chunksize) + + # TODO(lixiaoguang.01): remove this function after using FileManager + def _trim_prefix(self, path: str) -> str: + if path.startswith(FILE_PREFIX): + return path.split(FILE_PREFIX, 1)[1] + return path + + # TODO(zeju): provide tar file in-memory option + def archive(self, source_path: Union[str, list], target_path: str, gzip_compress: bool = False): + # TODO(lixiaoguang.01): use FileManager in archive and compress + if isinstance(source_path, str): + source_path = self._trim_prefix(source_path) + else: # list + trimmed_source_path = [] + for single_path in source_path: + trimmed_source_path.append(self._trim_prefix(single_path)) + source_path = trimmed_source_path + target_path = self._trim_prefix(target_path) + + with tempfile.NamedTemporaryFile('wb') as temp: + if gzip_compress: + self._archive(source_path, temp.name) + self._compress(temp.name, target_path) + else: + self._archive(source_path, target_path) diff --git a/web_console_v2/api/fedlearner_webconsole/utils/stream_untars.py b/web_console_v2/api/fedlearner_webconsole/utils/stream_untars.py new file mode 100644 index 000000000..28a48f33e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/stream_untars.py @@ -0,0 +1,145 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# coding: utf-8 +# pylint: disable=redefined-builtin, no-else-continue, broad-except, consider-using-f-string, consider-using-with +import tarfile +import gzip +from tarfile import (BLOCKSIZE, TarFile, ReadError, EOFHeaderError, InvalidHeaderError, EmptyHeaderError, + TruncatedHeaderError, SubsequentHeaderError) +import tempfile +import logging +from typing import BinaryIO + +from fedlearner_webconsole.utils.file_manager import FileManager, FILE_PREFIX + +CHUNK_SIZE = 1 << 22 + +TAR_SUFFIX = ('.tar',) +GZIP_SUFFIX = ('.gz', '.tgz') + + +class _TarFileWithoutCache(TarFile): + + def next(self): + self._check('ra') + if self.firstmember is not None: + m = self.firstmember + self.firstmember = None + return m + + # Advance the file pointer. + if self.offset != self.fileobj.tell(): + self.fileobj.seek(self.offset - 1) + if not self.fileobj.read(1): + raise tarfile.ReadError('unexpected end of data') + + # Read the next block. + tarinfo = None + while True: + try: + tarinfo = self.tarinfo.fromtarfile(self) + except EOFHeaderError as e: + if self.ignore_zeros: + self._dbg(2, '0x%X: %s' % (self.offset, e)) + self.offset += BLOCKSIZE + continue + except InvalidHeaderError as e: + if self.ignore_zeros: + self._dbg(2, '0x%X: %s' % (self.offset, e)) + self.offset += BLOCKSIZE + continue + elif self.offset == 0: + raise ReadError(str(e)) from e + except EmptyHeaderError as e: + if self.offset == 0: + raise ReadError('empty file') from e + except TruncatedHeaderError as e: + if self.offset == 0: + raise ReadError(str(e)) from e + except SubsequentHeaderError as e: + raise ReadError(str(e)) from e + break + + if tarinfo is None: + self._loaded = True + + return tarinfo + + +class StreamingUntar(object): + """ + A class used to support decompressing .tar.gz streamly. + 1. The first step is to decompress the gzip file, chunk by chunk, to the tarball + 2. Then use TarFileWithoutCache to untar the tarball with a fixed memory usage. + 3. TarFileWithoutCache is a subclass of TarFile, but remove the cache in its next function. + eg: + convert xxx.tar.gz -> xxx + """ + + def __init__(self, fm: FileManager, chunksize: int = CHUNK_SIZE) -> None: + super().__init__() + self._fm = fm + self.chunksize = chunksize + + def _uncompressed(self, source: str, temp_file: BinaryIO) -> str: + try: + with gzip.GzipFile(source, 'rb') as gf: + stream = gf.read(self.chunksize) + while stream: + temp_file.write(stream) + stream = gf.read(self.chunksize) + except Exception as e: + logging.error(f'failed to streaming decompress file from:{source}, ex: {e}') + return temp_file.name + + def _untar(self, source: str, dest: str) -> None: + tar = _TarFileWithoutCache.open(source) + try: + entry = tar.next() + while entry: + tar.extract(entry, path=dest) + entry = tar.next() + except Exception as e: + logging.error(f'failed to streaming untar file, from {source} to {dest}, ex: {e}') + finally: + tar.close() + + # TODO(lixiaoguang.01): remove this function after using FileManager + def _trim_prefix(self, path: str) -> str: + if path.startswith(FILE_PREFIX): + return path.split(FILE_PREFIX, 1)[1] + return path + + def untar(self, source: str, dest: str) -> None: + """ + untar the source.tar.gz to the dest directory, with a fixed memory usage. + + Args: + source: source path, only support local file system + dest: destination path, only support local file system + + Raises: + ValueError: if tarfile not ends with .tar/.tar.gz + Exception: if io operation failed + """ + # TODO(lixiaoguang.01): use FileManager in untar and uncompressed + source = self._trim_prefix(source) + dest = self._trim_prefix(dest) + + if not source.endswith(TAR_SUFFIX + GZIP_SUFFIX): + raise ValueError(f'{source} is not ends with tarfile or gzip extension') + with tempfile.NamedTemporaryFile('wb') as temp: + if source.endswith(GZIP_SUFFIX): + source = self._uncompressed(source, temp) + self._untar(source, dest) diff --git a/web_console_v2/api/fedlearner_webconsole/utils/swagger.py b/web_console_v2/api/fedlearner_webconsole/utils/swagger.py new file mode 100644 index 000000000..540b4170c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/swagger.py @@ -0,0 +1,53 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from pathlib import Path + + +def replace_ref_name(schema: dict, ref_name: str, message_name: str) -> dict: + for k, v in schema.items(): + if isinstance(v, dict): + schema[k] = replace_ref_name(v, ref_name, message_name) + if '$ref' in schema and schema['$ref'] == f'#/definitions/{message_name}': + schema['$ref'] = f'#/definitions/{ref_name}' + return schema + + +def remove_title(schema: dict) -> dict: + for k, v in schema.items(): + if isinstance(v, dict): + schema[k] = remove_title(v) + if 'title' in schema: + del schema['title'] + return schema + + +def normalize_schema(definitions: dict, jsonschema_path: Path) -> dict: + # "prefix_schema_files_with_package" option in Makefile will generate a directory with + # the name of the corresponding package name, therefore the full name of a message is + # {directory_name}.{message_name} + package_name = jsonschema_path.parent.name + message_name = jsonschema_path.stem + ref_name = f'{package_name}.{message_name}' + + # Title gets generated in newer version of jsonschema plugin; just remove it manually + definitions = remove_title(definitions) + + # The name of the first message defined in .proto file will be the used as the generated + # json file's name, which does not have a package name. Therefore, we prepend the package + # name for it + definitions[ref_name] = replace_ref_name(definitions[message_name], ref_name, message_name) + del definitions[message_name] + return definitions diff --git a/web_console_v2/api/fedlearner_webconsole/utils/swagger_test.py b/web_console_v2/api/fedlearner_webconsole/utils/swagger_test.py new file mode 100644 index 000000000..6b4960712 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/swagger_test.py @@ -0,0 +1,163 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +from pathlib import Path + +from fedlearner_webconsole.utils.swagger import remove_title, replace_ref_name, normalize_schema + + +class SwaggerTest(unittest.TestCase): + + def test_replace_ref_name(self): + candidate = { + '$ref': '#/definitions/no', + 'hello': { + '$ref': '#/definitions/no', + 'world': { + '$ref': '#/definitions/no' + } + } + } + candidate = replace_ref_name(candidate, ref_name='yes', message_name='no') + self.assertDictEqual( + { + '$ref': '#/definitions/yes', + 'hello': { + '$ref': '#/definitions/yes', + 'world': { + '$ref': '#/definitions/yes' + } + } + }, candidate) + + def test_remove_title(self): + candidate = {'title': 'hello', 'inner': {'title': 'world', 'inner': {'title': '!',}}} + candidate = remove_title(candidate) + self.assertDictEqual({'inner': {'inner': {}}}, candidate) + + def test_normalize_schema(self): + candidate = { + 'FileTreeNode': { + 'properties': { + 'files': { + 'items': { + '$ref': '#/definitions/FileTreeNode' + }, + 'additionalProperties': False, + 'type': 'array' + } + }, + 'additionalProperties': False, + 'type': 'object', + 'title': 'File Tree Node' + } + } + + candidate = normalize_schema(candidate, Path('aaa/FileTreeNode.json')) + self.assertEqual( + { + # here + 'aaa.FileTreeNode': { + 'properties': { + 'files': { + 'items': { + # here + '$ref': '#/definitions/aaa.FileTreeNode' + }, + 'additionalProperties': False, + 'type': 'array' + } + }, + 'additionalProperties': False, + 'type': 'object', + # no title + } + }, + candidate) + + candidate = { + 'AlgorithmData': { + 'properties': { + 'version': { + '$ref': '#/definitions/AlgorithmData', + }, + 'parameter': { + '$ref': '#/definitions/fedlearner_webconsole.proto.AlgorithmParameter', + 'additionalProperties': False + }, + }, + 'additionalProperties': False, + 'type': 'object', + 'title': 'Algorithm Data' + }, + 'fedlearner_webconsole.proto.AlgorithmParameter': { + 'properties': { + 'variables': { + 'items': { + '$ref': '#/definitions/fedlearner_webconsole.proto.AlgorithmVariable' + }, + 'additionalProperties': False, + 'type': 'array' + } + }, + 'additionalProperties': False, + 'type': 'object', + 'title': 'Algorithm Parameter' + }, + } + + candidate = normalize_schema(candidate, Path('aaa/AlgorithmData.json')) + self.assertDictEqual( + { + # here + 'aaa.AlgorithmData': { + 'properties': { + 'version': { + # here + '$ref': '#/definitions/aaa.AlgorithmData', + }, + 'parameter': { + # this does not change + '$ref': '#/definitions/fedlearner_webconsole.proto.AlgorithmParameter', + 'additionalProperties': False + }, + }, + 'additionalProperties': False, + 'type': 'object', + # no title + }, + 'fedlearner_webconsole.proto.AlgorithmParameter': { + 'properties': { + 'variables': { + 'items': { + # this does not change + '$ref': '#/definitions/fedlearner_webconsole.proto.AlgorithmVariable' + }, + 'additionalProperties': False, + 'type': 'array' + } + }, + 'additionalProperties': False, + 'type': 'object', + # no title + } + }, + candidate) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/system_envs.py b/web_console_v2/api/fedlearner_webconsole/utils/system_envs.py index b75f607a6..6431a70b8 100644 --- a/web_console_v2/api/fedlearner_webconsole/utils/system_envs.py +++ b/web_console_v2/api/fedlearner_webconsole/utils/system_envs.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. @@ -13,8 +13,8 @@ # limitations under the License. # coding: utf-8 -import json -import os + +from envs import Envs def _is_valid_env(env: dict) -> bool: @@ -22,105 +22,118 @@ def _is_valid_env(env: dict) -> bool: env.get('value', None) is not None +def _normalize_env(env: dict) -> dict: + if 'value' in env: + env['value'] = str(env['value']) + return env + + def get_system_envs(): """Gets a JSON string to represent system envs.""" # Most envs should be from pod's env - envs = [ - { - 'name': 'POD_IP', - 'valueFrom': { - 'fieldRef': { - 'fieldPath': 'status.podIP' - } + envs = [{ + 'name': 'POD_IP', + 'valueFrom': { + 'fieldRef': { + 'fieldPath': 'status.podIP' } - }, - { - 'name': 'POD_NAME', - 'valueFrom': { - 'fieldRef': { - 'fieldPath': 'metadata.name' - } + } + }, { + 'name': 'POD_NAME', + 'valueFrom': { + 'fieldRef': { + 'fieldPath': 'metadata.name' } - }, - { - 'name': 'CPU_REQUEST', - 'valueFrom': { - 'resourceFieldRef': { - 'resource': 'requests.cpu' - } + } + }, { + 'name': 'CPU_REQUEST', + 'valueFrom': { + 'resourceFieldRef': { + 'resource': 'requests.cpu' } - }, - { - 'name': 'MEM_REQUEST', - 'valueFrom': { - 'resourceFieldRef': { - 'resource': 'requests.memory' - } + } + }, { + 'name': 'MEM_REQUEST', + 'valueFrom': { + 'resourceFieldRef': { + 'resource': 'requests.memory' } - }, - { - 'name': 'CPU_LIMIT', - 'valueFrom': { - 'resourceFieldRef': { - 'resource': 'limits.cpu' - } + } + }, { + 'name': 'CPU_LIMIT', + 'valueFrom': { + 'resourceFieldRef': { + 'resource': 'limits.cpu' } - }, - { - 'name': 'MEM_LIMIT', - 'valueFrom': { - 'resourceFieldRef': { - 'resource': 'limits.memory' - } + } + }, { + 'name': 'MEM_LIMIT', + 'valueFrom': { + 'resourceFieldRef': { + 'resource': 'limits.memory' } - }, - { - 'name': 'ES_HOST', - 'value': os.getenv('ES_HOST') - }, - { - 'name': 'ES_PORT', - 'value': os.getenv('ES_PORT') - }, - { - 'name': 'DB_HOST', - 'value': os.getenv('DB_HOST') - }, - { - 'name': 'DB_PORT', - 'value': os.getenv('DB_PORT') - }, - { - 'name': 'DB_DATABASE', - 'value': os.getenv('DB_DATABASE') - }, - { - 'name': 'DB_USERNAME', - 'value': os.getenv('DB_USERNAME') - }, - { - 'name': 'DB_PASSWORD', - 'value': os.getenv('DB_PASSWORD') - }, - { - 'name': 'KVSTORE_TYPE', - 'value': os.getenv('KVSTORE_TYPE') - }, - { - 'name': 'ETCD_NAME', - 'value': os.getenv('ETCD_NAME') - }, - { - 'name': 'ETCD_ADDR', - 'value': os.getenv('ETCD_ADDR') - }, - { - 'name': 'ETCD_BASE_DIR', - 'value': os.getenv('ETCD_BASE_DIR') } - ] - return ','.join([json.dumps(env) - for env in envs if _is_valid_env(env)]) + }, { + 'name': 'ES_HOST', + 'value': Envs.ES_HOST + }, { + 'name': 'ES_PORT', + 'value': Envs.ES_PORT + }, { + 'name': 'DB_HOST', + 'value': Envs.DB_HOST + }, { + 'name': 'DB_PORT', + 'value': Envs.DB_PORT + }, { + 'name': 'DB_DATABASE', + 'value': Envs.DB_DATABASE + }, { + 'name': 'DB_USERNAME', + 'value': Envs.DB_USERNAME + }, { + 'name': 'DB_PASSWORD', + 'value': Envs.DB_PASSWORD + }, { + 'name': 'KVSTORE_TYPE', + 'value': Envs.KVSTORE_TYPE + }, { + 'name': 'ETCD_NAME', + 'value': Envs.ETCD_NAME + }, { + 'name': 'ETCD_ADDR', + 'value': Envs.ETCD_ADDR + }, { + 'name': 'ETCD_BASE_DIR', + 'value': Envs.ETCD_BASE_DIR + }, { + 'name': 'ROBOT_USERNAME', + 'value': Envs.ROBOT_USERNAME + }, { + 'name': 'ROBOT_PWD', + 'value': Envs.ROBOT_PWD + }, { + 'name': 'WEB_CONSOLE_V2_ENDPOINT', + 'value': Envs.WEB_CONSOLE_V2_ENDPOINT + }, { + 'name': 'HADOOP_HOME', + 'value': Envs.HADOOP_HOME + }, { + 'name': 'JAVA_HOME', + 'value': Envs.JAVA_HOME + }, { + 'name': 'PRE_START_HOOK', + 'value': Envs.PRE_START_HOOK + }, { + 'name': 'METRIC_COLLECTOR_EXPORT_ENDPOINT', + 'value': Envs.APM_SERVER_ENDPOINT + }, { + 'name': 'CLUSTER', + 'value': Envs.CLUSTER + }] + valid_envs = [env for env in envs if _is_valid_env(env)] + envs = [_normalize_env(env) for env in valid_envs] + return envs if __name__ == '__main__': diff --git a/web_console_v2/api/fedlearner_webconsole/utils/system_envs_test.py b/web_console_v2/api/fedlearner_webconsole/utils/system_envs_test.py new file mode 100644 index 000000000..ca628cb47 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/system_envs_test.py @@ -0,0 +1,133 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from unittest.mock import patch + +from fedlearner_webconsole.utils.system_envs import get_system_envs + + +class _FakeEnvs(object): + ES_HOST = 'test es host' + ES_PORT = '9200' + DB_HOST = 'test db host' + DB_PORT = '3306' + DB_DATABASE = 'fedlearner' + DB_USERNAME = 'username' + DB_PASSWORD = 'password' + KVSTORE_TYPE = 'mysql' + ETCD_NAME = 'fedlearner' + ETCD_ADDR = 'fedlearner-stack-etcd.default.svc.cluster.local:2379' + ETCD_BASE_DIR = 'fedlearner' + APM_SERVER_ENDPOINT = 'http://apm-server-apm-server:8200' + CLUSTER = 'cloudnative-hl' + ROBOT_USERNAME = None + ROBOT_PWD = None + WEB_CONSOLE_V2_ENDPOINT = None + HADOOP_HOME = None + JAVA_HOME = None + PRE_START_HOOK = None + + +class SystemEnvsTest(unittest.TestCase): + + @patch('fedlearner_webconsole.utils.system_envs.Envs', _FakeEnvs) + def test_get_available_envs(self): + self.assertEqual(get_system_envs(), [{ + 'name': 'POD_IP', + 'valueFrom': { + 'fieldRef': { + 'fieldPath': 'status.podIP' + } + } + }, { + 'name': 'POD_NAME', + 'valueFrom': { + 'fieldRef': { + 'fieldPath': 'metadata.name' + } + } + }, { + 'name': 'CPU_REQUEST', + 'valueFrom': { + 'resourceFieldRef': { + 'resource': 'requests.cpu' + } + } + }, { + 'name': 'MEM_REQUEST', + 'valueFrom': { + 'resourceFieldRef': { + 'resource': 'requests.memory' + } + } + }, { + 'name': 'CPU_LIMIT', + 'valueFrom': { + 'resourceFieldRef': { + 'resource': 'limits.cpu' + } + } + }, { + 'name': 'MEM_LIMIT', + 'valueFrom': { + 'resourceFieldRef': { + 'resource': 'limits.memory' + } + } + }, { + 'name': 'ES_HOST', + 'value': 'test es host' + }, { + 'name': 'ES_PORT', + 'value': '9200' + }, { + 'name': 'DB_HOST', + 'value': 'test db host' + }, { + 'name': 'DB_PORT', + 'value': '3306' + }, { + 'name': 'DB_DATABASE', + 'value': 'fedlearner' + }, { + 'name': 'DB_USERNAME', + 'value': 'username' + }, { + 'name': 'DB_PASSWORD', + 'value': 'password' + }, { + 'name': 'KVSTORE_TYPE', + 'value': 'mysql' + }, { + 'name': 'ETCD_NAME', + 'value': 'fedlearner' + }, { + 'name': 'ETCD_ADDR', + 'value': 'fedlearner-stack-etcd.default.svc.cluster.local:2379' + }, { + 'name': 'ETCD_BASE_DIR', + 'value': 'fedlearner' + }, { + 'name': 'METRIC_COLLECTOR_EXPORT_ENDPOINT', + 'value': 'http://apm-server-apm-server:8200' + }, { + 'name': 'CLUSTER', + 'value': 'cloudnative-hl' + }]) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/tars.py b/web_console_v2/api/fedlearner_webconsole/utils/tars.py deleted file mode 100644 index 3e7a59ea1..000000000 --- a/web_console_v2/api/fedlearner_webconsole/utils/tars.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import tarfile - - -class TarCli: - @staticmethod - def untar_file(tar_name, extract_path_prefix): - with tarfile.open(tar_name, 'r:*') as tar_pack: - tar_pack.extractall(extract_path_prefix) - - return True diff --git a/web_console_v2/api/fedlearner_webconsole/utils/tars_test.py b/web_console_v2/api/fedlearner_webconsole/utils/tars_test.py new file mode 100644 index 000000000..6a646acea --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/tars_test.py @@ -0,0 +1,69 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +import os +import tempfile +import shutil +from pathlib import Path +from fedlearner_webconsole.utils.stream_untars import StreamingUntar +from fedlearner_webconsole.utils.stream_tars import StreamingTar +from fedlearner_webconsole.utils.file_manager import FileManager + + +class StreamingTarTest(unittest.TestCase): + + def setUp(self): + super().setUp() + self._file_manager = FileManager() + self._tempdir = os.path.join(tempfile.gettempdir(), 'tar_dir') + os.makedirs(self._tempdir, exist_ok=True) + + def _get_temp_path(self, file_path: str = None) -> str: + return str(Path(self._tempdir, file_path or '').absolute()) + + def test_untar(self): + + # init a dir with some files + tar_path = os.path.join(self._tempdir, 'tar') + self._file_manager.mkdir(tar_path) + file1_path = os.path.join(tar_path, 'test-tar1.py') + file2_path = os.path.join(tar_path, 'test-tar2.py') + file3_path = os.path.join(tar_path, 'new/test-tar3.py') + + self._file_manager.write(file1_path, 'abc') + self._file_manager.write(file2_path, 'abc') + self._file_manager.write(file3_path, 'abc') + + # Create a tar file + tar_file_path = os.path.join(tar_path, 'test-tar.tar.gz') + StreamingTar(self._file_manager).archive(source_path=[file1_path, file2_path, file3_path], + target_path=tar_file_path, + gzip_compress=True) + + # test streaming untar file + untar_dir = os.path.join(tar_path, 'untar') + StreamingUntar(self._file_manager).untar(tar_file_path, untar_dir) + + self._file_manager.exists(os.path.join(tar_path, os.path.basename(file1_path))) + self._file_manager.exists(os.path.join(tar_path, os.path.basename(file2_path))) + self._file_manager.exists(os.path.join(tar_path, os.path.basename(file3_path))) + + def __del__(self): + shutil.rmtree(self._tempdir) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/tfrecord_test.py b/web_console_v2/api/fedlearner_webconsole/utils/tfrecord_test.py new file mode 100644 index 000000000..3494b9ede --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/tfrecord_test.py @@ -0,0 +1,55 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import json +import unittest +from testing.common import BaseTestCase +from http import HTTPStatus + +from envs import Envs + + +class TfRecordReaderTest(BaseTestCase): + + def test_reader(self): + metrix = [['id', 'x_1', 'x_2', 'x_3', 'x_4'], + [['0'], [0.4660772681236267], [0.9965257048606873], [0.15621308982372284], [0.9282205700874329]], + [['1'], [0.04800121858716011], [0.1965402364730835], [0.6086887121200562], [0.9214732646942139]], + [['2'], [0.05255622789263725], [0.8994112610816956], [0.6675127744674683], [0.577964186668396]], + [['3'], [0.7057438492774963], [0.5592560172080994], [0.6767191886901855], [0.6311695575714111]], + [['4'], [0.9203364253044128], [0.9567945599555969], [0.19533273577690125], [0.17610156536102295]]] + data = { + 'path': f'{Envs.BASE_DIR}/testing/test_data/' + f'tfrecord_test.xx.aaa.data', + 'wrong_path': 'adsad.data', + 'lines': 5 + } + # test right path + resp = self.get_helper('/api/v2/debug/tfrecord?path={}&lines={}'.format(data['path'], data['lines'])) # pylint: disable=consider-using-f-string + my_data = json.loads(resp.data).get('data') + self.assertEqual(metrix, my_data) + self.assertEqual(HTTPStatus.OK, resp.status_code) + + # test None path + resp = self.get_helper('/api/v2/debug/tfrecord') + self.assertEqual(HTTPStatus.BAD_REQUEST, resp.status_code) + + # test wrong path + resp = self.get_helper('/api/v2/debug/tfrecord?path={}&lines={}'.format(data['wrong_path'], data['lines'])) # pylint: disable=consider-using-f-string + self.assertEqual(HTTPStatus.BAD_REQUEST, resp.status_code) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/tfrecords_reader.py b/web_console_v2/api/fedlearner_webconsole/utils/tfrecords_reader.py new file mode 100644 index 000000000..77e659c39 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/tfrecords_reader.py @@ -0,0 +1,89 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +import itertools +import tensorflow.compat.v1 as tf +from typing import List + + +def _parse_tfrecord(record) -> dict: + example = tf.train.Example() + example.ParseFromString(record) + + parsed = {} + for key, value in example.features.feature.items(): + kind = value.WhichOneof('kind') + if kind == 'float_list': + parsed[key] = [float(num) for num in value.float_list.value] + elif kind == 'int64_list': + parsed[key] = [int(num) for num in value.int64_list.value] + elif kind == 'bytes_list': + parsed[key] = [byte.decode() for byte in value.bytes_list.value] + else: + raise ValueError('Invalid tfrecord format') + + return parsed + + +def _get_data(path: str, max_lines: int) -> List: + reader = tf.io.tf_record_iterator(path) + reader, _ = itertools.tee(reader) + records = [] + counter = 0 + for line in reader: + features = _parse_tfrecord(line) + records.append(features) + counter += 1 + if counter >= max_lines: + break + return records + + +def _convert_to_matrix_view(records: List[dict]) -> List: + first_line = set() + for features in records: + first_line = first_line.union(features.keys()) + sort_first_line = list(first_line) + sort_first_line.sort() + matrix = [sort_first_line] + for features in records: + current_line = [] + for column in sort_first_line: + if column in features: + current_line.append(features[column]) + else: + current_line.append('N/A') + matrix.append(current_line) + return matrix + + +def tf_record_reader(path: str, max_lines: int = 10, matrix_view: bool = False) -> List: + """Read tfrecord from given path + + Args: + path: the path of tfrecord file + max_lines: the maximum number of lines read from file + matrix_view: whether convert the data to csv-like matrix + Returns: + Dictionary or csv-like data + """ + # read data from tfrecord + records = _get_data(path, max_lines) + if not matrix_view: + return records + # get sorted first row of the matrix + matrix = _convert_to_matrix_view(records) + return matrix diff --git a/web_console_v2/api/fedlearner_webconsole/utils/validator.py b/web_console_v2/api/fedlearner_webconsole/utils/validator.py new file mode 100644 index 000000000..ac5cb6e18 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/validator.py @@ -0,0 +1,49 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# coding: utf-8 +from typing import Callable, Optional, TypeVar, List, Dict, Union, Tuple, Any + +T = TypeVar('T') + + +class Validator: + + def __init__(self, name: str, is_valid: Callable[[T], bool]): + self.name = name + self._is_valid = is_valid + + def is_valid(self, candidate: Optional[T]) -> Tuple[bool, Optional[str]]: + if candidate is None: + return False, f'"{self.name}" is required.' + + if not self._is_valid(candidate): + return False, f'"{candidate}" is not a valid "{self.name}".' + + return True, None + + @staticmethod + def validate(candidates: Dict[str, T], + validators: List['Validator']) -> Tuple[Union[bool, Any], List[Optional[str]]]: + flag = True + error_messages = [] + + for validator in validators: + passed, error_message = validator.is_valid(candidates.get(validator.name)) + flag = passed and flag + if not passed: + error_messages.append(error_message) + + return flag, error_messages diff --git a/web_console_v2/api/fedlearner_webconsole/utils/validator_test.py b/web_console_v2/api/fedlearner_webconsole/utils/validator_test.py new file mode 100644 index 000000000..3f118c850 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/validator_test.py @@ -0,0 +1,50 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +from fedlearner_webconsole.utils.validator import Validator + + +class MyTestCase(unittest.TestCase): + + def test_validator(self): + validators = [ + Validator('field_1', lambda x: x > 0), + Validator('field_2', lambda x: x > 0), + Validator('field_3', lambda x: x > 0) + ] + + dct_1 = {'field_1': 1, 'field_2': 2, 'field_3': 3} + + dct_2 = {'field_1': -1, 'field_2': 2, 'field_3': 3} + + dct_3 = {'field_1': 1, 'field_2': 2} + + res_1, err_1 = Validator.validate(dct_1, validators) + res_2, err_2 = Validator.validate(dct_2, validators) + res_3, err_3 = Validator.validate(dct_3, validators) + + self.assertTrue(res_1) + self.assertFalse(res_2) + self.assertFalse(res_3) + + self.assertEqual(0, len(err_1)) + self.assertEqual(1, len(err_2)) + self.assertEqual(1, len(err_3)) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/utils/workflow.py b/web_console_v2/api/fedlearner_webconsole/utils/workflow.py new file mode 100644 index 000000000..19e426854 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/workflow.py @@ -0,0 +1,50 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +from typing import Generator, List + +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition + + +def build_job_name(workflow_uuid: str, job_def_name: str) -> str: + return f'{workflow_uuid}-{job_def_name}' + + +def zip_workflow_variables(config: WorkflowDefinition) -> Generator[Variable, None, None]: + for v in config.variables: + yield v + for job in config.job_definitions: + for v in job.variables: + yield v + + +def fill_variables(config: WorkflowDefinition, + variables: List[Variable], + *, + dry_run: bool = False) -> WorkflowDefinition: + variables_mapper = {v.name: v for v in variables} + for slot_variable in zip_workflow_variables(config): + variable = variables_mapper.get(slot_variable.name) + if variable is None: + continue + if variable.value_type != slot_variable.value_type: + raise TypeError(f'unmatched variable type! {variable.value_type} != {slot_variable.value_type}') + if dry_run: + continue + slot_variable.typed_value.MergeFrom(variable.typed_value) + slot_variable.value = variable.value + + return config diff --git a/web_console_v2/api/fedlearner_webconsole/utils/workflow_test.py b/web_console_v2/api/fedlearner_webconsole/utils/workflow_test.py new file mode 100644 index 000000000..9df21222a --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/utils/workflow_test.py @@ -0,0 +1,111 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +import unittest + +from google.protobuf.struct_pb2 import Value + +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.workflow_definition_pb2 import JobDefinition, WorkflowDefinition +from fedlearner_webconsole.utils.workflow import build_job_name, fill_variables, zip_workflow_variables + + +class UtilsTest(unittest.TestCase): + + def test_build_job_name(self): + self.assertEqual(build_job_name('uuid', 'job_name'), 'uuid-job_name') + + def test_zip_workflow_variables(self): + config = WorkflowDefinition( + variables=[ + Variable(name='test', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='test_value')), + Variable(name='hello', value_type=Variable.ValueType.NUMBER, typed_value=Value(number_value=1)) + ], + job_definitions=[ + JobDefinition(variables=[ + Variable( + name='hello_from_job', value_type=Variable.ValueType.NUMBER, typed_value=Value(number_value=3)) + ]) + ]) + self.assertEqual(sum(1 for v in zip_workflow_variables(config)), 3) + + def test_fill_variables(self): + config = WorkflowDefinition( + variables=[ + Variable(name='test', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='test_value')), + Variable(name='hello', value_type=Variable.ValueType.NUMBER, typed_value=Value(number_value=1)) + ], + job_definitions=[ + JobDefinition(variables=[ + Variable( + name='hello_from_job', value_type=Variable.ValueType.NUMBER, typed_value=Value(number_value=3)) + ]) + ]) + variables = [ + Variable(name='test', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='new_test_value')) + ] + config = fill_variables(config, variables) + self.assertEqual(config.variables[0].typed_value.string_value, 'new_test_value') + + def test_fill_variables_invalid(self): + config = WorkflowDefinition( + variables=[ + Variable(name='test', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='test_value')), + Variable(name='hello', value_type=Variable.ValueType.NUMBER, typed_value=Value(number_value=1)) + ], + job_definitions=[ + JobDefinition(variables=[ + Variable( + name='hello_from_job', value_type=Variable.ValueType.NUMBER, typed_value=Value(number_value=3)) + ]) + ]) + variables = [Variable(name='test', value_type=Variable.ValueType.NUMBER, typed_value=Value(number_value=2))] + with self.assertRaises(TypeError): + fill_variables(config, variables) + + def test_fill_variables_dry_run(self): + config = WorkflowDefinition( + variables=[ + Variable(name='test', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='test_value')), + Variable(name='hello', value_type=Variable.ValueType.NUMBER, typed_value=Value(number_value=1)) + ], + job_definitions=[ + JobDefinition(variables=[ + Variable( + name='hello_from_job', value_type=Variable.ValueType.NUMBER, typed_value=Value(number_value=3)) + ]) + ]) + variables = [ + Variable(name='test', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='new_test_value')) + ] + config = fill_variables(config, variables, dry_run=True) + self.assertEqual(config.variables[0].typed_value.string_value, 'test_value') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/workflow/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/workflow/BUILD.bazel new file mode 100644 index 000000000..98dda7ae2 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow/BUILD.bazel @@ -0,0 +1,342 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "utils_lib", + srcs = ["utils.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:client_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:metrics_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "utils_test", + size = "small", + srcs = [ + "utils_test.py", + ], + imports = ["../.."], + main = "utils_test.py", + deps = [ + ":utils_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "models_lib", + srcs = ["models.py"], + imports = ["../.."], + deps = [ + ":utils_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:mixins_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "models_test", + size = "small", + srcs = [ + "models_test.py", + ], + imports = ["../.."], + main = "models_test.py", + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "resource_manager_lib", + srcs = ["resource_manager.py"], + imports = ["../.."], + deps = [ + ":models_lib", + ":service_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_library( + name = "service_lib", + srcs = ["service.py"], + imports = ["../.."], + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:composer_service_lib", + "//web_console_v2/api/fedlearner_webconsole/job:service_lib", + "//web_console_v2/api/fedlearner_webconsole/job:yaml_formatter_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:client_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:const_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:filtering_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:metrics_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:resource_name_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:workflow_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "service_test", + size = "small", + srcs = [ + "service_test.py", + ], + imports = ["../.."], + main = "service_test.py", + deps = [ + ":service_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "workflow_controller_lib", + srcs = ["workflow_controller.py"], + imports = ["../.."], + deps = [ + ":models_lib", + ":service_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:services_lib", + "//web_console_v2/api/fedlearner_webconsole/job:controller_lib", + "//web_console_v2/api/fedlearner_webconsole/notification:notification_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:const_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "workflow_controller_test", + size = "small", + srcs = [ + "workflow_controller_test.py", + ], + imports = ["../.."], + main = "workflow_controller_test.py", + deps = [ + ":workflow_controller_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:models_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/notification:notification_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "workflow_job_controller_lib", + srcs = [ + "workflow_job_controller.py", + ], + imports = ["../.."], + deps = [ + ":models_lib", + ":workflow_controller_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:client_lib", + "//web_console_v2/api/fedlearner_webconsole/two_pc:transaction_manager_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "workflow_job_controller_test", + size = "small", + srcs = [ + "workflow_job_controller_test.py", + ], + imports = ["../.."], + main = "workflow_job_controller_test.py", + deps = [ + ":workflow_job_controller_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "cronjob_lib", + srcs = [ + "cronjob.py", + ], + imports = ["../.."], + deps = [ + ":models_lib", + ":workflow_job_controller_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "cronjob_test", + size = "small", + srcs = [ + "cronjob_test.py", + ], + imports = ["../.."], + main = "cronjob_test.py", + deps = [ + ":cronjob_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:fake_lib", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "workflow_scheduler_lib", + srcs = [ + "workflow_scheduler.py", + ], + imports = ["../.."], + deps = [ + ":models_lib", + ":service_lib", + ":workflow_job_controller_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:const_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "workflow_scheduler_test", + size = "small", + srcs = [ + "workflow_scheduler_test.py", + ], + imports = ["../.."], + main = "workflow_scheduler_test.py", + deps = [ + ":workflow_scheduler_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:initial_db_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "apis_lib", + srcs = ["apis.py"], + imports = ["../.."], + deps = [ + ":models_lib", + ":service_lib", + ":workflow_job_controller_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/audit:decorators_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:third_party_sso_lib", + "//web_console_v2/api/fedlearner_webconsole/iam:iam_required_lib", + "//web_console_v2/api/fedlearner_webconsole/iam:permission_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:services_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc:client_lib", + "//web_console_v2/api/fedlearner_webconsole/scheduler:scheduler_lib", + "//web_console_v2/api/fedlearner_webconsole/swagger:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:paginate_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:proto_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:service_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_flask_restful//:pkg", + "@common_marshmallow//:pkg", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "apis_lib_test", + size = "medium", + srcs = [ + "apis_test.py", + ], + data = [ + "//web_console_v2/api/testing/test_data", + ], + imports = ["../.."], + main = "apis_test.py", + deps = [ + ":apis_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/scheduler:scheduler_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:common_lib", + "@com_google_protobuf//:protobuf_python", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/workflow/apis.py b/web_console_v2/api/fedlearner_webconsole/workflow/apis.py index abc5c19a5..5e7012d59 100644 --- a/web_console_v2/api/fedlearner_webconsole/workflow/apis.py +++ b/web_console_v2/api/fedlearner_webconsole/workflow/apis.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,423 +14,698 @@ # pylint: disable=global-statement # coding: utf-8 -import logging import json -from uuid import uuid4 +import logging from http import HTTPStatus -from flask_restful import Resource, reqparse, request +from typing import Optional, List + +from flask_restful import Resource from google.protobuf.json_format import MessageToDict -from fedlearner_webconsole.composer.models import ItemStatus -from fedlearner_webconsole.utils.decorators import jwt_required -from fedlearner_webconsole.workflow.models import ( - Workflow, WorkflowState, TransactionState -) -from fedlearner_webconsole.job.yaml_formatter import generate_job_run_yaml -from fedlearner_webconsole.proto import common_pb2 -from fedlearner_webconsole.workflow_template.apis import \ - dict_to_workflow_definition +from sqlalchemy.orm import Session +from marshmallow import Schema, fields, validate, post_load + +from fedlearner_webconsole.audit.decorators import emits_event from fedlearner_webconsole.db import db -from fedlearner_webconsole.exceptions import ( - NotFoundException, ResourceConflictException, InvalidArgumentException, - InternalException, NoAccessException) -from fedlearner_webconsole.scheduler.scheduler import scheduler +from fedlearner_webconsole.exceptions import (NotFoundException, InvalidArgumentException, InternalException) +from fedlearner_webconsole.iam.permission import Permission +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.proto import common_pb2 +from fedlearner_webconsole.proto.filtering_pb2 import FilterExpression +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition from fedlearner_webconsole.rpc.client import RpcClient -from fedlearner_webconsole.composer.composer import composer -from fedlearner_webconsole.workflow.cronjob import WorkflowCronJobItem -from fedlearner_webconsole.utils.metrics import emit_counter +from fedlearner_webconsole.scheduler.scheduler import scheduler +from fedlearner_webconsole.swagger.models import schema_manager +from fedlearner_webconsole.utils.decorators.pp_flask import input_validator, use_kwargs +from fedlearner_webconsole.auth.third_party_sso import credentials_required +from fedlearner_webconsole.utils.flask_utils import download_json, get_current_user, make_flask_response, FilterExpField +from fedlearner_webconsole.utils.paginate import paginate +from fedlearner_webconsole.utils.proto import to_dict +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.workflow.service import WorkflowService, \ + ForkWorkflowParams, CreateNewWorkflowParams +from fedlearner_webconsole.workflow_template.service import \ + dict_to_workflow_definition + +from fedlearner_webconsole.iam.iam_required import iam_required +from fedlearner_webconsole.workflow.workflow_job_controller import start_workflow, stop_workflow, \ + invalidate_workflow_job +from fedlearner_webconsole.proto.audit_pb2 import Event -def _get_workflow(workflow_id) -> Workflow: - result = Workflow.query.filter_by(id=workflow_id).first() - if result is None: +def _get_workflow(workflow_id: int, project_id: int, session: Session) -> Workflow: + workflow_query = session.query(Workflow) + # project_id 0 means search in all projects + if project_id != 0: + workflow_query = workflow_query.filter_by(project_id=project_id) + workflow = workflow_query.filter_by(id=workflow_id).first() + if workflow is None: raise NotFoundException() - return result - -def start_or_stop_cronjob(batch_update_interval: int, workflow: Workflow): - """start a cronjob for workflow if batch_update_interval is valid - - Args: - batch_update_interval (int): restart workflow interval, unit is minutes - - Returns: - raise when workflow is_left is False - """ - item_name = f'workflow_cron_job_{workflow.id}' - batch_update_interval = batch_update_interval * 60 - if workflow.get_config().is_left and batch_update_interval > 0: - status = composer.get_item_status(name=item_name) - # create a cronjob - if not status: - composer.collect(name=item_name, - items=[WorkflowCronJobItem(workflow.id)], - metadata={}, - interval=batch_update_interval) - return - if status == ItemStatus.OFF: - raise InvalidArgumentException( - f'cannot set item [{item_name}], since item is off') - # patch a cronjob - try: - composer.patch_item_attr(name=item_name, - key='interval_time', - value=batch_update_interval) - except ValueError as err: - raise InvalidArgumentException(details=repr(err)) - - - elif batch_update_interval < 0: - composer.finish(name=item_name) - elif not workflow.get_config().is_left: - raise InvalidArgumentException('Only left can operate this') - else: - logging.info('skip cronjob since batch_update_interval is -1') - -def is_peer_job_inheritance_matched(workflow): - # TODO: Move it to workflow service - if workflow.forked_from is None: - return True - job_flags = workflow.get_create_job_flags() - peer_job_flags = workflow.get_peer_create_job_flags() - job_defs = workflow.get_config().job_definitions - project = workflow.project - if project is None: - return True - project_config = project.get_config() - # TODO: Fix for multi-peer - client = RpcClient(project_config, project_config.participants[0]) - parent_workflow = db.session.query(Workflow).get(workflow.forked_from) - resp = client.get_workflow(parent_workflow.name) - if resp.status.code != common_pb2.STATUS_SUCCESS: - emit_counter('get_workflow_failed', 1) - raise InternalException(resp.status.msg) - peer_job_defs = resp.config.job_definitions - for i, job_def in enumerate(job_defs): - if job_def.is_federated: - for j, peer_job_def in enumerate(peer_job_defs): - if job_def.name == peer_job_def.name: - if job_flags[i] != peer_job_flags[j]: - return False - return True + return workflow + + +class GetWorkflowsParameter(Schema): + keyword = fields.String(required=False, load_default=None) + page = fields.Integer(required=False, load_default=None) + page_size = fields.Integer(required=False, load_default=None) + states = fields.List(fields.String(required=False, + validate=validate.OneOf([ + 'completed', 'failed', 'stopped', 'running', 'warmup', 'pending', 'ready', + 'configuring', 'invalid' + ])), + required=False, + load_default=None) + favour = fields.Integer(required=False, load_default=None, validate=validate.OneOf([0, 1])) + uuid = fields.String(required=False, load_default=None) + name = fields.String(required=False, load_default=None) + template_revision_id = fields.Integer(required=False, load_default=None) + filter_exp = FilterExpField(data_key='filter', required=False, load_default=None) + + +class PostWorkflowsParameter(Schema): + name = fields.Str(required=True) + config = fields.Dict(required=True) + template_id = fields.Int(required=False, load_default=None) + template_revision_id = fields.Int(required=False, load_default=None) + forkable = fields.Bool(required=True) + forked_from = fields.Int(required=False, load_default=None) + create_job_flags = fields.List(required=False, load_default=None, cls_or_instance=fields.Int) + peer_create_job_flags = fields.List(required=False, load_default=None, cls_or_instance=fields.Int) + fork_proposal_config = fields.Dict(required=False, load_default=None) + comment = fields.Str(required=False, load_default=None) + cron_config = fields.Str(required=False, load_default=None) + + @post_load() + def make(self, data, **kwargs): + data['config'] = dict_to_workflow_definition(data['config']) + data['fork_proposal_config'] = dict_to_workflow_definition(data['fork_proposal_config']) + return data + + +class PutWorkflowParameter(Schema): + config = fields.Dict(required=True) + template_id = fields.Integer(required=False, load_default=None) + template_revision_id = fields.Integer(required=False, load_default=None) + forkable = fields.Boolean(required=True) + create_job_flags = fields.List(required=False, load_default=None, cls_or_instance=fields.Integer) + comment = fields.String(required=False, load_default=None) + cron_config = fields.String(required=False, load_default=None) + + @post_load() + def make(self, data, **kwargs): + data['config'] = dict_to_workflow_definition(data['config']) + return data + + +class PatchWorkflowParameter(Schema): + config = fields.Dict(required=False, load_default=None) + template_id = fields.Integer(required=False, load_default=None) + template_revision_id = fields.Integer(required=False, load_default=None) + forkable = fields.Boolean(required=False, load_default=None) + create_job_flags = fields.List(required=False, load_default=None, cls_or_instance=fields.Integer) + cron_config = fields.String(required=False, load_default=None) + favour = fields.Boolean(required=False, load_default=None) + metric_is_public = fields.Boolean(required=False, load_default=None) + + @post_load() + def make(self, data, **kwargs): + data['config'] = data['config'] and dict_to_workflow_definition(data['config']) + return data + + +class PatchPeerWorkflowParameter(Schema): + config = fields.Dict(required=False, load_default=None) + + @post_load() + def make(self, data, **kwargs): + data['config'] = data['config'] and dict_to_workflow_definition(data['config']) + return data -class WorkflowsApi(Resource): - @jwt_required() - def get(self): - result = Workflow.query - if 'project' in request.args and request.args['project'] is not None: - project_id = request.args['project'] - result = result.filter_by(project_id=project_id) - if 'keyword' in request.args and request.args['keyword'] is not None: - keyword = request.args['keyword'] - result = result.filter(Workflow.name.like( - '%{}%'.format(keyword))) - if 'uuid' in request.args and request.args['uuid'] is not None: - uuid = request.args['uuid'] - result = result.filter_by(uuid=uuid) - res = [] - for row in result.order_by(Workflow.created_at.desc()).all(): - try: - wf_dict = row.to_dict() - except Exception as e: # pylint: disable=broad-except - wf_dict = { - 'id': row.id, - 'name': row.name, - 'uuid': row.uuid, - 'error': f'Failed to get workflow state {repr(e)}' - } - res.append(wf_dict) - return {'data': res}, HTTPStatus.OK - - @jwt_required() - def post(self): - parser = reqparse.RequestParser() - parser.add_argument('name', required=True, help='name is empty') - parser.add_argument('project_id', type=int, required=True, - help='project_id is empty') - # TODO: should verify if the config is compatible with - # workflow template - parser.add_argument('config', type=dict, required=True, - help='config is empty') - parser.add_argument('forkable', type=bool, required=True, - help='forkable is empty') - parser.add_argument('forked_from', type=int, required=False, - help='fork from base workflow') - parser.add_argument('create_job_flags', type=list, required=False, - location='json', - help='flags in common.CreateJobFlag') - parser.add_argument('peer_create_job_flags', type=list, - required=False, location='json', - help='peer flags in common.CreateJobFlag') - parser.add_argument('fork_proposal_config', type=dict, required=False, - help='fork and edit peer config') - parser.add_argument('batch_update_interval', - type=int, - required=False, - help='interval for workflow cronjob in minute') - parser.add_argument('extra', - type=str, - required=False, - help='extra json string that needs send to peer') - - parser.add_argument('comment') - data = parser.parse_args() - name = data['name'] - if Workflow.query.filter_by(name=name).first() is not None: - raise ResourceConflictException( - 'Workflow {} already exists.'.format(name)) - - # form to proto buffer - template_proto = dict_to_workflow_definition(data['config']) - workflow = Workflow(name=name, - # 20 bytes - # a DNS-1035 label must start with an - # alphabetic character. substring uuid[:19] has - # no collision in 10 million draws - uuid=f'u{uuid4().hex[:19]}', - comment=data['comment'], - project_id=data['project_id'], - forkable=data['forkable'], - forked_from=data['forked_from'], - state=WorkflowState.NEW, - target_state=WorkflowState.READY, - transaction_state=TransactionState.READY, - extra=data['extra'] - ) - workflow.set_config(template_proto) - workflow.set_create_job_flags(data['create_job_flags']) - - if workflow.forked_from is not None: - fork_config = dict_to_workflow_definition( - data['fork_proposal_config']) - # TODO: more validations - if len(fork_config.job_definitions) != \ - len(template_proto.job_definitions): - raise InvalidArgumentException( - 'Forked workflow\'s template does not match base workflow') - workflow.set_fork_proposal_config(fork_config) - workflow.set_peer_create_job_flags(data['peer_create_job_flags']) - if not is_peer_job_inheritance_matched(workflow): - raise InvalidArgumentException('Forked workflow has federated \ - job with unmatched inheritance') - - db.session.add(workflow) - db.session.commit() - logging.info('Inserted a workflow to db') - scheduler.wakeup(workflow.id) - - # start cronjob every interval time - # should start after inserting to db - batch_update_interval = data['batch_update_interval'] - if batch_update_interval: - start_or_stop_cronjob(batch_update_interval, workflow) - - return {'data': workflow.to_dict()}, HTTPStatus.CREATED +class WorkflowsApi(Resource): -class WorkflowApi(Resource): - @jwt_required() - def get(self, workflow_id): - workflow = _get_workflow(workflow_id) - result = workflow.to_dict() - result['jobs'] = [job.to_dict() for job in workflow.get_jobs()] - result['owned_jobs'] = [job.to_dict() for job in workflow.owned_jobs] - result['config'] = None - if workflow.get_config() is not None: - result['config'] = MessageToDict( - workflow.get_config(), - preserving_proto_field_name=True, - including_default_value_fields=True) - return {'data': result}, HTTPStatus.OK - - @jwt_required() - def put(self, workflow_id): - parser = reqparse.RequestParser() - parser.add_argument('config', type=dict, required=True, - help='config is empty') - parser.add_argument('forkable', type=bool, required=True, - help='forkable is empty') - parser.add_argument('create_job_flags', type=list, required=False, - location='json', - help='flags in common.CreateJobFlag') - parser.add_argument( - 'batch_update_interval', - type=int, - required=False, - help='interval time for cronjob of workflow in minute') - parser.add_argument('comment') - data = parser.parse_args() - - workflow = _get_workflow(workflow_id) - if workflow.config: - raise ResourceConflictException( - 'Resetting workflow is not allowed') - - batch_update_interval = data['batch_update_interval'] - if batch_update_interval: - start_or_stop_cronjob(batch_update_interval, workflow) - - workflow.comment = data['comment'] - workflow.forkable = data['forkable'] - workflow.set_config(dict_to_workflow_definition(data['config'])) - workflow.set_create_job_flags(data['create_job_flags']) - workflow.update_target_state(WorkflowState.READY) - db.session.commit() - scheduler.wakeup(workflow_id) - logging.info('update workflow %d target_state to %s', - workflow.id, workflow.target_state) - return {'data': workflow.to_dict()}, HTTPStatus.OK - - @jwt_required() - def patch(self, workflow_id): - parser = reqparse.RequestParser() - parser.add_argument('target_state', type=str, required=False, - default=None, help='target_state is empty') - parser.add_argument('state', - type=str, - required=False, - help='state is empty') - parser.add_argument('forkable', type=bool) - parser.add_argument('metric_is_public', type=bool) - parser.add_argument('config', - type=dict, - required=False, - help='updated config') - parser.add_argument('create_job_flags', type=list, required=False, - location='json', - help='flags in common.CreateJobFlag') - parser.add_argument('batch_update_interval', - type=int, - required=False, - help='interval for restart workflow in minute') - data = parser.parse_args() - - workflow = _get_workflow(workflow_id) - - # start workflow every interval time - batch_update_interval = data['batch_update_interval'] - if batch_update_interval: - start_or_stop_cronjob(batch_update_interval, workflow) - - forkable = data['forkable'] - if forkable is not None: - workflow.forkable = forkable - db.session.flush() - - metric_is_public = data['metric_is_public'] - if metric_is_public is not None: - workflow.metric_is_public = metric_is_public - db.session.flush() - - target_state = data['target_state'] - if target_state: + @credentials_required + @use_kwargs(GetWorkflowsParameter(), location='query') + def get( + self, + page: Optional[int], + page_size: Optional[int], + name: Optional[str], + uuid: Optional[str], + keyword: Optional[str], + favour: Optional[bool], + template_revision_id: Optional[int], + states: Optional[List[str]], + filter_exp: Optional[FilterExpression], + project_id: int, + ): + """Get workflows. + --- + tags: + - workflow + description: Get workflows. + parameters: + - in: path + name: project_id + required: true + schema: + type: integer + description: The ID of the project. 0 means get all workflows. + - in: query + name: page + schema: + type: integer + - in: query + name: page_size + schema: + type: integer + - in: query + name: name + schema: + type: string + - in: query + name: uuid + schema: + type: string + - in: query + name: keyword + schema: + type: string + - in: query + name: favour + schema: + type: boolean + - in: query + name: template_revision_id + schema: + type: integer + - in: query + name: states + schema: + type: array + collectionFormat: multi + items: + type: string + enum: [completed, failed, stopped, running, warmup, pending, ready, configuring, invalid] + - in: query + name: filter + schema: + type: string + responses: + 200: + description: list of workflows. + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.WorkflowRef' + """ + with db.session_scope() as session: + result = session.query(Workflow) + if project_id != 0: + result = result.filter_by(project_id=project_id) + if name is not None: + result = result.filter_by(name=name) + if keyword is not None: + result = result.filter(Workflow.name.like(f'%{keyword}%')) + if uuid is not None: + result = result.filter_by(uuid=uuid) + if favour is not None: + result = result.filter_by(favour=favour) + if states is not None: + result = WorkflowService.filter_workflows(result, states) + if template_revision_id is not None: + result = result.filter_by(template_revision_id=template_revision_id) + if filter_exp is not None: + result = WorkflowService(session).build_filter_query(result, filter_exp) + result = result.order_by(Workflow.id.desc()) + pagination = paginate(result, page, page_size) + res = [] + for item in pagination.get_items(): + try: + wf_dict = to_dict(item.to_workflow_ref()) + except Exception as e: # pylint: disable=broad-except + wf_dict = { + 'id': item.id, + 'name': item.name, + 'uuid': item.uuid, + 'error': f'Failed to get workflow state {repr(e)}' + } + res.append(wf_dict) + # To resolve the issue of that MySQL 8 Select Count(*) is very slow + # https://bugs.mysql.com/bug.php?id=97709 + pagination.query = pagination.query.filter(Workflow.id > -1) + page_meta = pagination.get_metadata() + return make_flask_response(data=res, page_meta=page_meta) + + @input_validator + @credentials_required + @iam_required(Permission.WORKFLOWS_POST) + @emits_event(resource_type=Event.ResourceType.WORKFLOW, audit_fields=['forkable']) + @use_kwargs(PostWorkflowsParameter(), location='json') + def post( + self, + name: str, + comment: Optional[str], + forkable: bool, + forked_from: Optional[bool], + create_job_flags: Optional[List[int]], + peer_create_job_flags: Optional[List[int]], + # Peer config + fork_proposal_config: Optional[WorkflowDefinition], + template_id: Optional[int], + config: WorkflowDefinition, + cron_config: Optional[str], + template_revision_id: Optional[int], + project_id: int): + """Create workflows. + --- + tags: + - workflow + description: Get workflows. + parameters: + - in: path + description: The ID of the project. + name: project_id + required: true + schema: + type: integer + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/PostWorkflowsParameter' + responses: + 201: + description: detail of workflows. + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.WorkflowPb' + """ + with db.session_scope() as session: + if forked_from: + params = ForkWorkflowParams(fork_from_id=forked_from, + fork_proposal_config=fork_proposal_config, + peer_create_job_flags=peer_create_job_flags) + else: + params = CreateNewWorkflowParams(project_id=project_id, + template_id=template_id, + template_revision_id=template_revision_id) try: - if WorkflowState[target_state] == WorkflowState.RUNNING: - for job in workflow.owned_jobs: - try: - generate_job_run_yaml(job) - # TODO: check if peer variables is valid - except Exception as e: # pylint: disable=broad-except - raise ValueError( - f'Invalid Variable when try ' - f'to format the job {job.name}:{str(e)}') - workflow.update_target_state(WorkflowState[target_state]) - db.session.flush() - logging.info('updated workflow %d target_state to %s', - workflow.id, workflow.target_state) + workflow = WorkflowService(session).create_workflow(name=name, + comment=comment, + forkable=forkable, + config=config, + create_job_flags=create_job_flags, + cron_config=cron_config, + params=params, + creator_username=get_current_user().username) except ValueError as e: raise InvalidArgumentException(details=str(e)) from e + session.commit() + logging.info('Inserted a workflow to db') + scheduler.wakeup(workflow.id) + return make_flask_response(data=workflow.to_proto(), status=HTTPStatus.CREATED) + - state = data['state'] - if state: +class WorkflowApi(Resource): + + @credentials_required + @use_kwargs({'download': fields.Bool(required=False, load_default=False)}, location='query') + def get(self, download: Optional[bool], project_id: int, workflow_id: int): + """Get workflow and with jobs. + --- + tags: + - workflow + description: Get workflow. + parameters: + - in: path + name: project_id + required: true + schema: + type: integer + description: The ID of the project. 0 means get all workflows. + - in: path + name: workflow_id + schema: + type: integer + required: true + - in: query + name: download + schema: + type: boolean + responses: + 200: + description: detail of workflow. + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.WorkflowPb' + """ + del project_id + with db.session_scope() as session: + workflow = session.query(Workflow).get(workflow_id) + if workflow is None: + raise NotFoundException(f'workflow {workflow_id} is not found') + result = workflow.to_proto() + result.jobs.extend([job.to_proto() for job in workflow.get_jobs(session)]) + if download: + return download_json(content=to_dict(result), filename=workflow.name) + return make_flask_response(data=result) + + @credentials_required + @iam_required(Permission.WORKFLOW_PUT) + @emits_event(resource_type=Event.ResourceType.WORKFLOW, audit_fields=['forkable']) + @use_kwargs(PutWorkflowParameter(), location='json') + def put(self, config: WorkflowDefinition, template_id: Optional[int], forkable: bool, + create_job_flags: Optional[List[int]], cron_config: Optional[str], comment: Optional[str], + template_revision_id: Optional[int], project_id: int, workflow_id: int): + """Config workflow. + --- + tags: + - workflow + description: Config workflow. + parameters: + - in: path + name: project_id + required: true + schema: + type: integer + description: The ID of the project. + - in: path + name: workflow_id + required: true + schema: + type: integer + description: The ID of the workflow. + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/PutWorkflowParameter' + responses: + 200: + description: detail of workflow. + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.WorkflowPb' + """ + with db.session_scope() as session: + workflow = _get_workflow(workflow_id, project_id, session) try: - assert state == 'INVALID', \ - 'Can only set state to INVALID for invalidation' - workflow.invalidate() - db.session.flush() - logging.info('invalidate workflow %d', workflow.id) + WorkflowService(session).config_workflow(workflow=workflow, + template_id=template_id, + config=config, + forkable=forkable, + comment=comment, + cron_config=cron_config, + create_job_flags=create_job_flags, + creator_username=get_current_user().username, + template_revision_id=template_revision_id) except ValueError as e: raise InvalidArgumentException(details=str(e)) from e - - config = data['config'] - if config: + session.commit() + scheduler.wakeup(workflow_id) + logging.info('update workflow %d target_state to %s', workflow.id, workflow.target_state) + return make_flask_response(data=workflow.to_proto()) + + @input_validator + @credentials_required + @iam_required(Permission.WORKFLOW_PATCH) + @emits_event(resource_type=Event.ResourceType.WORKFLOW, audit_fields=['forkable', 'metric_is_public']) + @use_kwargs(PatchWorkflowParameter(), location='json') + def patch(self, forkable: Optional[bool], metric_is_public: Optional[bool], config: Optional[WorkflowDefinition], + template_id: Optional[int], create_job_flags: Optional[List[int]], cron_config: Optional[str], + favour: Optional[bool], template_revision_id: Optional[int], project_id: int, workflow_id: int): + """Patch workflow. + --- + tags: + - workflow + description: Patch workflow. + parameters: + - in: path + name: project_id + required: true + schema: + type: integer + description: The ID of the project. + - in: path + name: workflow_id + required: true + schema: + type: integer + description: The ID of the workflow. + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/PatchWorkflowParameter' + responses: + 200: + description: detail of workflow. + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.WorkflowPb' + """ + with db.session_scope() as session: + workflow = _get_workflow(workflow_id, project_id, session) try: - if workflow.target_state != WorkflowState.INVALID or \ - workflow.state not in \ - [WorkflowState.READY, WorkflowState.STOPPED]: - raise NoAccessException('Cannot edit running workflow') - config_proto = dict_to_workflow_definition(data['config']) - workflow.set_config(config_proto) - db.session.flush() + WorkflowService(session).patch_workflow(workflow=workflow, + forkable=forkable, + metric_is_public=metric_is_public, + config=config, + template_id=template_id, + create_job_flags=create_job_flags, + cron_config=cron_config, + favour=favour, + template_revision_id=template_revision_id) + session.commit() except ValueError as e: raise InvalidArgumentException(details=str(e)) from e - - create_job_flags = data['create_job_flags'] - if create_job_flags: - jobs = workflow.get_jobs() - if len(create_job_flags) != len(jobs): - raise InvalidArgumentException( - details='Number of job defs does not match number ' - f'of create_job_flags {len(jobs)} ' - f'vs {len(create_job_flags)}') - workflow.set_create_job_flags(create_job_flags) - flags = workflow.get_create_job_flags() - for i, job in enumerate(jobs): - if job.workflow_id == workflow.id: - job.is_disabled = flags[i] == \ - common_pb2.CreateJobFlag.DISABLED - - db.session.commit() - scheduler.wakeup(workflow.id) - return {'data': workflow.to_dict()}, HTTPStatus.OK + return make_flask_response(data=workflow.to_proto()) class PeerWorkflowsApi(Resource): - @jwt_required() - def get(self, workflow_id): - workflow = _get_workflow(workflow_id) - project_config = workflow.project.get_config() + + @credentials_required + def get(self, project_id: int, workflow_id: int): + """Get peer workflow and with jobs. + --- + tags: + - workflow + description: Get peer workflow. + parameters: + - in: path + name: project_id + required: true + schema: + type: integer + - in: path + name: workflow_id + schema: + type: integer + required: true + responses: + 200: + description: detail of workflow. + content: + application/json: + schema: + type: object + additionalProperties: + $ref: '#/definitions/fedlearner_webconsole.proto.WorkflowPb' + """ peer_workflows = {} - for party in project_config.participants: - client = RpcClient(project_config, party) - # TODO(xiangyxuan): use uuid to identify the workflow - resp = client.get_workflow(workflow.name) - if resp.status.code != common_pb2.STATUS_SUCCESS: - raise InternalException(resp.status.msg) - peer_workflow = MessageToDict( - resp, - preserving_proto_field_name=True, - including_default_value_fields=True) - for job in peer_workflow['jobs']: - if 'pods' in job: - job['pods'] = json.loads(job['pods']) - peer_workflows[party.name] = peer_workflow - return {'data': peer_workflows}, HTTPStatus.OK - - @jwt_required() - def patch(self, workflow_id): - parser = reqparse.RequestParser() - parser.add_argument('config', type=dict, required=True, - help='new config for peer') - data = parser.parse_args() - config_proto = dict_to_workflow_definition(data['config']) - - workflow = _get_workflow(workflow_id) - project_config = workflow.project.get_config() + with db.session_scope() as session: + workflow = _get_workflow(workflow_id, project_id, session) + service = ParticipantService(session) + participants = service.get_platform_participants_by_project(workflow.project.id) + + for participant in participants: + client = RpcClient.from_project_and_participant(workflow.project.name, workflow.project.token, + participant.domain_name) + # TODO(xiangyxuan): use uuid to identify the workflow + resp = client.get_workflow(workflow.uuid, workflow.name) + if resp.status.code != common_pb2.STATUS_SUCCESS: + raise InternalException(resp.status.msg) + peer_workflow = MessageToDict(resp, + preserving_proto_field_name=True, + including_default_value_fields=True) + for job in peer_workflow['jobs']: + if 'pods' in job: + job['pods'] = json.loads(job['pods']) + peer_workflows[participant.name] = peer_workflow + return make_flask_response(peer_workflows) + + @credentials_required + @iam_required(Permission.WORKFLOW_PATCH) + @use_kwargs(PatchPeerWorkflowParameter(), location='json') + def patch(self, config: WorkflowDefinition, project_id: int, workflow_id: int): + """Patch peer workflow. + --- + tags: + - workflow + description: patch peer workflow. + parameters: + - in: path + name: project_id + required: true + schema: + type: integer + - in: path + name: workflow_id + schema: + type: integer + required: true + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/PatchPeerWorkflowParameter' + responses: + 200: + description: detail of workflow. + content: + application/json: + schema: + type: object + additionalProperties: + $ref: '#/definitions/fedlearner_webconsole.proto.WorkflowPb' + """ peer_workflows = {} - for party in project_config.participants: - client = RpcClient(project_config, party) - resp = client.update_workflow( - workflow.name, config_proto) - if resp.status.code != common_pb2.STATUS_SUCCESS: - raise InternalException(resp.status.msg) - peer_workflows[party.name] = MessageToDict( - resp, - preserving_proto_field_name=True, - including_default_value_fields=True) - return {'data': peer_workflows}, HTTPStatus.OK + with db.session_scope() as session: + workflow = _get_workflow(workflow_id, project_id, session) + service = ParticipantService(session) + participants = service.get_platform_participants_by_project(workflow.project.id) + for participant in participants: + client = RpcClient.from_project_and_participant(workflow.project.name, workflow.project.token, + participant.domain_name) + resp = client.update_workflow(workflow.uuid, workflow.name, config) + if resp.status.code != common_pb2.STATUS_SUCCESS: + raise InternalException(resp.status.msg) + peer_workflows[participant.name] = MessageToDict(resp, + preserving_proto_field_name=True, + including_default_value_fields=True) + return make_flask_response(peer_workflows) + + +class WorkflowInvalidateApi(Resource): + + @credentials_required + @emits_event(resource_type=Event.ResourceType.WORKFLOW, op_type=Event.OperationType.INVALIDATE) + def post(self, project_id: int, workflow_id: int): + """Invalidates the workflow job. + --- + tags: + - workflow + description: Invalidates the workflow job. + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: workflow_id + schema: + type: integer + responses: + 200: + description: Invalidated workflow + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.WorkflowPb' + """ + with db.session_scope() as session: + workflow = _get_workflow(workflow_id, project_id, session) + invalidate_workflow_job(session, workflow) + session.commit() + return make_flask_response(workflow.to_proto()) + + +class WorkflowStartApi(Resource): + + @credentials_required + @emits_event(resource_type=Event.ResourceType.WORKFLOW, op_type=Event.OperationType.UPDATE) + def post(self, project_id: int, workflow_id: int): + """Starts the workflow job. + --- + tags: + - workflow + description: Starts the workflow job. + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: workflow_id + schema: + type: integer + responses: + 200: + description: Started workflow + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.WorkflowPb' + """ + start_workflow(workflow_id) + with db.session_scope() as session: + workflow = _get_workflow(workflow_id, project_id, session) + return make_flask_response(workflow.to_proto()) + + +class WorkflowStopApi(Resource): + + @credentials_required + @emits_event(resource_type=Event.ResourceType.WORKFLOW, op_type=Event.OperationType.UPDATE) + def post(self, project_id: int, workflow_id: int): + """Stops the workflow job. + --- + tags: + - workflow + description: Stops the workflow job. + parameters: + - in: path + name: project_id + schema: + type: integer + - in: path + name: workflow_id + schema: + type: integer + responses: + 200: + description: Stopped workflow + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.WorkflowPb' + """ + stop_workflow(workflow_id) + with db.session_scope() as session: + workflow = _get_workflow(workflow_id, project_id, session) + return make_flask_response(workflow.to_proto()) def initialize_workflow_apis(api): - api.add_resource(WorkflowsApi, '/workflows') - api.add_resource(WorkflowApi, '/workflows/') - api.add_resource(PeerWorkflowsApi, - '/workflows//peer_workflows') + api.add_resource(WorkflowsApi, '/projects//workflows') + api.add_resource(WorkflowApi, '/projects//workflows/') + api.add_resource(PeerWorkflowsApi, '/projects//workflows//peer_workflows') + api.add_resource(WorkflowInvalidateApi, '/projects//workflows/:invalidate') + api.add_resource(WorkflowStartApi, '/projects//workflows/:start') + api.add_resource(WorkflowStopApi, '/projects//workflows/:stop') + + # if a schema is used, one has to append it to schema_manager so Swagger knows there is a schema available + schema_manager.append(PostWorkflowsParameter) + schema_manager.append(PutWorkflowParameter) + schema_manager.append(PatchWorkflowParameter) + schema_manager.append(PatchPeerWorkflowParameter) diff --git a/web_console_v2/api/fedlearner_webconsole/workflow/apis_test.py b/web_console_v2/api/fedlearner_webconsole/workflow/apis_test.py new file mode 100644 index 000000000..27bbf363c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow/apis_test.py @@ -0,0 +1,772 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import random +import string +import time +import json +import unittest +import urllib.parse +from http import HTTPStatus +from pathlib import Path +from unittest.mock import (patch, call) +from google.protobuf.json_format import ParseDict +from fedlearner_webconsole.utils.const import SYSTEM_WORKFLOW_CREATOR_USERNAME +from fedlearner_webconsole.composer.interface import ItemType +from fedlearner_webconsole.composer.models import ItemStatus +from fedlearner_webconsole.dataset.models import Dataset, DatasetType +from fedlearner_webconsole.participant.models import Participant, ProjectParticipant +from fedlearner_webconsole.proto.composer_pb2 import WorkflowCronJobInput, RunnerInput +from fedlearner_webconsole.proto.workflow_definition_pb2 import \ + WorkflowDefinition +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.utils.proto import to_dict +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState +from fedlearner_webconsole.job.models import Job, JobType, JobState +from fedlearner_webconsole.scheduler.transaction import TransactionState +from fedlearner_webconsole.proto import (project_pb2, service_pb2, common_pb2) +from fedlearner_webconsole.db import db +from fedlearner_webconsole.workflow_template.models import WorkflowTemplate +from testing.common import BaseTestCase + + +class WorkflowsApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def setUp(self): + self.maxDiff = None + super().setUp() + # Inserts data + template1 = WorkflowTemplate(name='t1', comment='comment for t1', group_alias='g1') + template1.set_config(WorkflowDefinition(group_alias='g1',)) + workflow1 = Workflow(name='workflow_key_get1', + project_id=1, + state=WorkflowState.READY, + target_state=WorkflowState.INVALID, + transaction_state=TransactionState.READY, + creator=SYSTEM_WORKFLOW_CREATOR_USERNAME, + favour=True) + workflow2 = Workflow(name='workflow_key_get2', + project_id=2, + state=WorkflowState.NEW, + target_state=WorkflowState.READY, + transaction_state=TransactionState.COORDINATOR_COMMITTABLE) + workflow3 = Workflow(name='workflow_key_get3', project_id=2) + workflow4 = Workflow(name='workflow_key_get4', + project_id=4, + state=WorkflowState.INVALID, + target_state=WorkflowState.INVALID, + transaction_state=TransactionState.READY, + favour=True) + project = Project(id=123, name='project_123') + dataset1 = Dataset( + name='default dataset1', + dataset_type=DatasetType.STREAMING, + comment='test comment1', + path='/data/dataset/123', + project_id=1, + ) + with db.session_scope() as session: + session.add(project) + session.add(workflow1) + session.add(workflow2) + session.add(workflow3) + session.add(workflow4) + session.add(template1) + session.add(dataset1) + session.commit() + + def test_get_with_name(self): + response = self.get_helper('/api/v2/projects/0/workflows?name=workflow_key_get3') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 1) + self.assertEqual(data[0]['name'], 'workflow_key_get3') + + def test_get_with_project(self): + response = self.get_helper('/api/v2/projects/1/workflows') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 1) + self.assertEqual(data[0]['name'], 'workflow_key_get1') + + def test_get_with_keyword(self): + response = self.get_helper('/api/v2/projects/0/workflows?keyword=key') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 4) + self.assertEqual(data[0]['name'], 'workflow_key_get4') + + def test_get_with_states(self): + response = self.get_helper('/api/v2/projects/0/workflows?states=configuring&states=ready') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 2) + self.assertEqual(data[0]['name'], 'workflow_key_get2') + + def test_get_with_state_invalid(self): + response = self.get_helper('/api/v2/projects/0/workflows?states=invalid') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 2) + self.assertEqual('workflow_key_get4', data[0]['name']) + + def test_get_with_favour(self): + response = self.get_helper('/api/v2/projects/0/workflows?favour=1') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 2) + self.assertEqual('workflow_key_get4', data[0]['name']) + + def test_get_with_filter(self): + filter_exp = urllib.parse.quote('(system=true)') + response = self.get_helper(f'/api/v2/projects/0/workflows?filter={filter_exp}') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 1) + self.assertEqual('workflow_key_get1', data[0]['name']) + filter_exp = urllib.parse.quote('(system=false)') + response = self.get_helper(f'/api/v2/projects/0/workflows?filter={filter_exp}') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 3) + + def test_get_workflows(self): + # Sleeps 1 second for making workflow create_at bigger + time.sleep(1) + workflow = Workflow(name='last', project_id=1) + with db.session_scope() as session: + session.add(workflow) + session.commit() + response = self.get_helper('/api/v2/projects/0/workflows') + data = self.get_response_data(response) + self.assertEqual(data[0]['name'], 'last') + + @patch('fedlearner_webconsole.workflow.apis.scheduler.wakeup') + @patch('fedlearner_webconsole.workflow.service.resource_uuid') + def test_create_new_workflow(self, mock_resource_uuid, mock_wakeup): + mock_resource_uuid.return_value = 'test-uuid' + with open(Path(__file__, '../../../testing/test_data/workflow_config.json').resolve(), + encoding='utf-8') as workflow_config: + config = json.load(workflow_config) + # TODO(hangweiqiang): remove this in workflow test + extra = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + # extra should be a valid json string so we mock one + extra = f'{{"parent_job_name":"{extra}"}}' + + local_extra = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + # local_extra should be a valid json string so we mock one + local_extra = f'{{"model_desc":"{local_extra}"}}' + + workflow = { + 'name': 'test-workflow', + 'project_id': 1234567, + 'forkable': True, + 'comment': 'test-comment', + 'config': config, + 'extra': extra, + 'local_extra': local_extra, + 'template_id': 1, + 'template_revision_id': 1 + } + response = self.post_helper('/api/v2/projects/1234567/workflows', data=workflow) + self.assertEqual(response.status_code, HTTPStatus.CREATED) + created_workflow = self.get_response_data(response) + # Check scheduler + mock_wakeup.assert_called_once_with(created_workflow['id']) + self.assertIsNotNone(created_workflow['id']) + self.assertIsNotNone(created_workflow['created_at']) + self.assertIsNotNone(created_workflow['updated_at']) + self.assertResponseDataEqual(response, { + 'cron_config': '', + 'name': 'test-workflow', + 'project_id': 1234567, + 'forkable': True, + 'forked_from': 0, + 'is_local': False, + 'metric_is_public': False, + 'comment': 'test-comment', + 'state': 'PARTICIPANT_CONFIGURING', + 'create_job_flags': [1, 1, 1], + 'peer_create_job_flags': [], + 'job_ids': [1, 2, 3], + 'uuid': 'test-uuid', + 'template_revision_id': 1, + 'template_id': 1, + 'creator': 'ada', + 'favour': False, + 'jobs': [] + }, + ignore_fields=[ + 'id', 'created_at', 'updated_at', 'start_at', 'stop_at', 'config', + 'editor_info', 'template_info' + ]) + # Check DB + with db.session_scope() as session: + self.assertEqual(len(session.query(Workflow).all()), 5) + + # Post again + mock_wakeup.reset_mock() + response = self.post_helper('/api/v2/projects/1234567/workflows', data=workflow) + self.assertEqual(response.status_code, HTTPStatus.CONFLICT) + # Check mock + mock_wakeup.assert_not_called() + # Check DB + with db.session_scope() as session: + self.assertEqual(len(session.query(Workflow).all()), 5) + + @patch('fedlearner_webconsole.participant.services.ParticipantService.get_platform_participants_by_project') + @patch('fedlearner_webconsole.workflow.utils.is_peer_job_inheritance_matched') + def test_fork_local_workflow(self, mock_is_peer_job_inheritance_matched, mock_get_platform_participants_by_project): + config = { + 'groupAlias': 'test', + 'job_definitions': [{ + 'name': 'raw-data-job', + 'is_federated': False, + 'yaml_template': '{}', + }] + } + config_proto = ParseDict(config, WorkflowDefinition()) + with db.session_scope() as session: + project = Project(name='test project') + session.add(project) + template = WorkflowTemplate(group_alias='test') + template.set_config(config_proto) + session.add(template) + session.flush() + parent_workflow = Workflow(name='local-workflow', + state=WorkflowState.READY, + forkable=True, + project_id=project.id, + template_id=template.id, + template_revision_id=1) + parent_workflow.set_config(config_proto) + session.add(parent_workflow) + session.commit() + + fork_request = { + 'name': 'test-fork-local-workflow', + 'project_id': project.id, + 'forkable': True, + 'config': config, + 'comment': 'test-comment', + 'forked_from': parent_workflow.id, + 'fork_proposal_config': config, + } + response = self.post_helper(f'/api/v2/projects/{project.id}/workflows', data=fork_request) + mock_get_platform_participants_by_project.assert_not_called() + mock_is_peer_job_inheritance_matched.assert_not_called() + self.assertEqual(response.status_code, HTTPStatus.CREATED) + self.assertResponseDataEqual( + response, { + 'name': 'test-fork-local-workflow', + 'project_id': project.id, + 'template_id': template.id, + 'template_revision_id': 1, + 'comment': 'test-comment', + 'metric_is_public': False, + 'create_job_flags': [1], + 'job_ids': [1], + 'forkable': True, + 'forked_from': parent_workflow.id, + 'peer_create_job_flags': [], + 'state': 'PARTICIPANT_CONFIGURING', + 'start_at': 0, + 'stop_at': 0, + 'cron_config': '', + 'is_local': True, + 'creator': 'ada', + 'favour': False, + }, + ignore_fields=['id', 'uuid', 'created_at', 'updated_at', 'config', 'template_info', 'editor_info', 'jobs']) + + @patch('fedlearner_webconsole.participant.services.ParticipantService.get_platform_participants_by_project') + @patch('fedlearner_webconsole.workflow.service.is_peer_job_inheritance_matched') + def test_fork_workflow(self, mock_is_peer_job_inheritance_matched, mock_get_platform_participants_by_project): + # Prepares data + with open(Path(__file__, '../../../testing/test_data/workflow_config.json').resolve(), + encoding='utf-8') as workflow_config: + config = json.load(workflow_config) + with db.session_scope() as session: + project = Project(id=1, name='test project') + session.add(project) + config_proto = ParseDict(config, WorkflowDefinition()) + template = WorkflowTemplate(name='parent-template', group_alias=config['group_alias']) + template.set_config(config_proto) + session.add(template) + session.flush() + parent_workflow = Workflow(name='parent_workflow', + project_id=1, + template_id=template.id, + state=WorkflowState.READY) + parent_workflow.set_config(config_proto) + session.add(parent_workflow) + session.commit() + fork_request = { + 'name': 'test-fork-workflow', + 'project_id': project.id, + 'forkable': True, + 'config': config, + 'comment': 'test-comment', + 'forked_from': parent_workflow.id, + 'fork_proposal_config': config, + 'peer_create_job_flags': [1, 1, 1], + } + # By default it is not forkable + response = self.post_helper(f'/api/v2/projects/{project.id}/workflows', data=fork_request) + self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) + self.assertEqual(json.loads(response.data)['details'], 'workflow not forkable') + + # Forks after parent workflow is forkable + with db.session_scope() as session: + parent_workflow = session.query(Workflow).get(parent_workflow.id) + parent_workflow.forkable = True + session.commit() + mock_get_platform_participants_by_project.return_value = None + mock_is_peer_job_inheritance_matched.return_value = True + response = self.post_helper(f'/api/v2/projects/{project.id}/workflows', data=fork_request) + mock_is_peer_job_inheritance_matched.assert_called_once() + self.assertEqual(response.status_code, HTTPStatus.CREATED) + self.assertResponseDataEqual(response, { + 'cron_config': '', + 'name': 'test-fork-workflow', + 'project_id': project.id, + 'forkable': True, + 'forked_from': parent_workflow.id, + 'is_local': False, + 'metric_is_public': False, + 'comment': 'test-comment', + 'state': 'PARTICIPANT_CONFIGURING', + 'create_job_flags': [1, 1, 1], + 'peer_create_job_flags': [1, 1, 1], + 'job_ids': [1, 2, 3], + 'template_id': template.id, + 'template_revision_id': 0, + 'creator': 'ada', + 'favour': False, + }, + ignore_fields=[ + 'id', 'created_at', 'updated_at', 'start_at', 'stop_at', 'uuid', 'config', + 'editor_info', 'template_info', 'jobs' + ]) + + @patch('fedlearner_webconsole.composer.composer_service.ComposerService.get_item_status') + @patch('fedlearner_webconsole.composer.composer_service.ComposerService.collect_v2') + @patch('fedlearner_webconsole.workflow.apis.scheduler.wakeup') + def test_post_cron_job(self, mock_wakeup, mock_collect, mock_get_item_status): + mock_get_item_status.return_value = None + with open(Path(__file__, '../../../testing/test_data/workflow_config.json').resolve(), + encoding='utf-8') as workflow_config: + config = json.load(workflow_config) + workflow = { + 'name': 'test-workflow-left', + 'project_id': 123, + 'forkable': True, + 'config': config, + 'cron_config': '*/10 * * * *', + 'template_id': 1 + } + responce = self.post_helper('/api/v2/projects/123/workflows', data=workflow) + self.assertEqual(responce.status_code, HTTPStatus.CREATED) + + with open(Path(__file__, '../../../testing/test_data/workflow_config_right.json').resolve(), + encoding='utf-8') as workflow_config: + config = json.load(workflow_config) + workflow = { + 'name': 'test-workflow-right', + 'project_id': 1234567, + 'forkable': True, + 'config': config, + 'cron_config': '*/10 * * * *', + } + responce = self.post_helper('/api/v2/projects/1234567/workflows', data=workflow) + self.assertEqual(responce.status_code, HTTPStatus.CREATED) + + mock_collect.assert_called() + mock_wakeup.assert_called() + + +class WorkflowApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + with db.session_scope() as session: + self._project = Project(id=123, name='project_123') + self._template1 = WorkflowTemplate(name='t1', comment='comment for t1', group_alias='g1') + self._template1.set_config(WorkflowDefinition(group_alias='g1',)) + session.add(self._project) + session.add(self._template1) + session.commit() + self.signin_as_admin() + + def test_get_workflow(self): + workflow = Workflow(name='test-workflow', + project_id=self._project.id, + config=WorkflowDefinition(group_alias='g1',).SerializeToString(), + template_id=self._template1.id, + forkable=False, + state=WorkflowState.RUNNING, + job_ids='1') + job1 = Job(name='job 1', workflow_id=3, project_id=self._project.id, job_type=JobType.RAW_DATA) + with db.session_scope() as session: + session.add(workflow) + session.add(job1) + session.commit() + + response = self.get_helper(f'/api/v2/projects/{self._project.id}/workflows/{workflow.id}') + self.assertEqual(response.status_code, HTTPStatus.OK) + workflow_data = self.get_response_data(response) + self.assertEqual(workflow_data['name'], 'test-workflow') + self.assertEqual(len(workflow_data['jobs']), 1) + self.assertEqual(workflow_data['jobs'][0]['name'], 'job 1') + response = self.get_helper(f'/api/v2/projects/{self._project.id}/workflows/6666') + self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) + + @patch('fedlearner_webconsole.scheduler.scheduler.Scheduler.wakeup') + def test_put_successfully(self, mock_wake_up): + config = { + 'variables': [{ + 'name': 'namespace', + 'value': 'leader' + }, { + 'name': 'basic_envs', + 'value': '{}' + }, { + 'name': 'storage_root_dir', + 'value': '/' + }] + } + with db.session_scope() as session: + project = Project(id=1, + name='test', + config=ParseDict(config, project_pb2.ProjectConfig()).SerializeToString()) + participant = Participant(name='party_leader', host='127.0.0.1', port=5000, domain_name='fl-leader.com') + relationship = ProjectParticipant(project_id=1, participant_id=1) + session.add(project) + session.add(participant) + session.add(relationship) + workflow = Workflow(name='test-workflow', + project_id=project.id, + state=WorkflowState.NEW, + transaction_state=TransactionState.PARTICIPANT_PREPARE, + target_state=WorkflowState.READY) + session.add(workflow) + session.commit() + + response = self.put_helper(f'/api/v2/projects/{project.id}/workflows/{workflow.id}', + data={ + 'forkable': True, + 'config': { + 'group_alias': 'test-template' + }, + 'comment': 'test comment', + 'template_id': 1, + 'template_revision_id': 1 + }) + self.assertEqual(response.status_code, HTTPStatus.OK) + mock_wake_up.assert_called_with(workflow.id) + with db.session_scope() as session: + updated_workflow = session.query(Workflow).get(workflow.id) + self.assertIsNotNone(updated_workflow.config) + self.assertTrue(updated_workflow.forkable) + self.assertEqual(updated_workflow.comment, 'test comment') + self.assertEqual(updated_workflow.target_state, WorkflowState.READY) + self.assertEqual(updated_workflow.template_revision_id, 1) + + def test_put_resetting(self): + with db.session_scope() as session: + project_id = 123 + workflow = Workflow( + name='test-workflow', + project_id=project_id, + config=WorkflowDefinition(group_alias='test-template').SerializeToString(), + state=WorkflowState.NEW, + ) + session.add(workflow) + session.commit() + session.refresh(workflow) + + response = self.put_helper(f'/api/v2/projects/{project_id}/workflows/{workflow.id}', + data={ + 'forkable': True, + 'config': { + 'group_alias': 'test-template' + }, + 'template_id': 1 + }) + self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) + + @patch('fedlearner_webconsole.composer.composer_service.ComposerService.get_item_status') + @patch('fedlearner_webconsole.composer.composer_service.ComposerService.patch_item_attr') + @patch('fedlearner_webconsole.composer.composer_service.ComposerService.finish') + @patch('fedlearner_webconsole.composer.composer_service.ComposerService.collect_v2') + def test_patch_cron_config(self, mock_collect, mock_finish, mock_patch_item, mock_get_item_status): + mock_get_item_status.side_effect = [None, ItemStatus.ON] + project_id = 123 + workflow = Workflow( + name='test-workflow-left', + project_id=project_id, + config=WorkflowDefinition().SerializeToString(), + forkable=False, + state=WorkflowState.STOPPED, + ) + cron_config = '*/20 * * * *' + with db.session_scope() as session: + session.add(workflow) + session.commit() + session.refresh(workflow) + + # test create cronjob + response = self.patch_helper(f'/api/v2/projects/{project_id}/workflows/{workflow.id}', + data={'cron_config': cron_config}) + self.assertEqual(response.status_code, HTTPStatus.OK) + + mock_collect.assert_called_with( + name=f'workflow_cron_job_{workflow.id}', + items=[(ItemType.WORKFLOW_CRON_JOB, + RunnerInput(workflow_cron_job_input=WorkflowCronJobInput(workflow_id=workflow.id)))], + cron_config=cron_config) + + # patch new config for cronjob + cron_config = '*/30 * * * *' + response = self.patch_helper(f'/api/v2/projects/{project_id}/workflows/{workflow.id}', + data={'cron_config': cron_config}) + self.assertEqual(response.status_code, HTTPStatus.OK) + mock_patch_item.assert_called_with(name=f'workflow_cron_job_{workflow.id}', + key='cron_config', + value=cron_config) + + # test stop cronjob + response = self.patch_helper(f'/api/v2/projects/{project_id}/workflows/{workflow.id}', data={'cron_config': ''}) + self.assertEqual(response.status_code, HTTPStatus.OK) + mock_finish.assert_called_with(name=f'workflow_cron_job_{workflow.id}') + + def test_patch_not_found(self): + response = self.patch_helper('/api/v2/projects/123/workflows/1', data={'target_state': 'RUNNING'}) + self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) + + def test_patch_create_job_flags(self): + with db.session_scope() as session: + workflow, job = add_fake_workflow(session) + job_id = job.id + response = self.patch_helper(f'/api/v2/projects/{workflow.project_id}/workflows/{workflow.id}', + data={'create_job_flags': [3]}) + self.assertEqual(response.status_code, HTTPStatus.OK) + with db.session_scope() as session: + patched_job = session.query(Job).get(job_id) + self.assertEqual(patched_job.is_disabled, True) + response = self.patch_helper(f'/api/v2/projects/{workflow.project_id}/workflows/{workflow.id}', + data={'create_job_flags': [1]}) + self.assertEqual(response.status_code, HTTPStatus.OK) + with db.session_scope() as session: + patched_job = session.query(Job).get(job_id) + self.assertEqual(patched_job.is_disabled, False) + + def test_patch_favour(self): + with db.session_scope() as session: + workflow, job = add_fake_workflow(session) + response = self.patch_helper(f'/api/v2/projects/{workflow.project_id}/workflows/{workflow.id}', + data={'favour': True}) + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(data['favour'], True) + with db.session_scope() as session: + workflow = session.query(Workflow).get(workflow.id) + self.assertEqual(workflow.favour, True) + + def test_ptach_template(self): + with db.session_scope() as session: + workflow, job = add_fake_workflow(session) + response = self.patch_helper(f'/api/v2/projects/{workflow.project_id}/workflows/{workflow.id}', + data={ + 'config': to_dict(workflow.get_config()), + 'template_revision_id': 1 + }) + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(data['template_revision_id'], 1) + with db.session_scope() as session: + workflow = session.query(Workflow).get(workflow.id) + self.assertEqual(workflow.template_revision_id, 1) + + def test_is_local(self): + with db.session_scope() as session: + workflow, job = add_fake_workflow(session) + self.assertTrue(workflow.is_local()) + config = workflow.get_config() + config.job_definitions[0].is_federated = True + workflow.set_config(config) + self.assertFalse(workflow.is_local()) + + +class WorkflowInvalidateApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project_1') + participant1 = Participant(name='party_1', id=1, host='127.0.0.1', port=1997, domain_name='fl-party1.com') + + participant2 = Participant(name='party_2', id=2, host='127.0.0.1', port=1998, domain_name='fl-party2.com') + relationship1 = ProjectParticipant(project_id=1, participant_id=1) + relationship2 = ProjectParticipant(project_id=1, participant_id=2) + ready_workflow = Workflow(name='workflow_invalidate1', + project_id=1, + uuid='11111', + state=WorkflowState.NEW, + target_state=WorkflowState.READY, + transaction_state=TransactionState.READY) + session.add(project) + session.add(participant1) + session.add(participant2) + session.add(relationship1) + session.add(relationship2) + session.add(ready_workflow) + session.commit() + self.signin_as_admin() + + @patch('fedlearner_webconsole.rpc.client.RpcClient.invalidate_workflow') + def test_invalidate_after_created(self, mock_invalidate_workflow): + mock_invalidate_workflow.return_value = service_pb2.InvalidateWorkflowResponse( + status=common_pb2.Status(code=common_pb2.STATUS_SUCCESS, msg=''), + succeeded=True, + ) + response = self.post_helper('/api/v2/projects/1/workflows/1:invalidate') + self.assertEqual(response.status_code, HTTPStatus.OK) + expected = [call('11111'), call('11111')] + self.assertEqual(mock_invalidate_workflow.call_args_list, expected) + response = self.get_helper('/api/v2/projects/0/workflows/1') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(data['state'], WorkflowState.INVALID.name) + + +class WorkflowStartAndStopApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def setUp(self): + super().setUp() + with db.session_scope() as session: + project = Project(id=1, name='project_1') + participant1 = Participant(name='party_1', id=1, host='127.0.0.1', port=1997, domain_name='fl-party1.com') + + participant2 = Participant(name='party_2', id=2, host='127.0.0.1', port=1998, domain_name='fl-party2.com') + relationship1 = ProjectParticipant(project_id=1, participant_id=1) + relationship2 = ProjectParticipant(project_id=1, participant_id=2) + workflow_test_start_fed = Workflow(name='workflow_test_start_fed', + project_id=1, + uuid='11111', + state=WorkflowState.READY, + target_state=WorkflowState.INVALID, + transaction_state=TransactionState.READY) + workflow_test_stop_fed = Workflow(name='workflow_test_stop_fed', + project_id=1, + uuid='22222', + state=WorkflowState.RUNNING, + target_state=WorkflowState.INVALID, + transaction_state=TransactionState.READY) + workflow_test_start_local = Workflow(name='workflow_test_start_local', + project_id=1, + uuid='33333', + state=WorkflowState.STOPPED, + target_state=WorkflowState.INVALID, + transaction_state=TransactionState.READY) + workflow_test_stop_local = Workflow(name='workflow_test_stop_local', + project_id=1, + uuid='44444', + state=WorkflowState.RUNNING, + target_state=WorkflowState.INVALID, + transaction_state=TransactionState.READY) + session.add(project) + session.add(participant1) + session.add(participant2) + session.add(relationship1) + session.add(relationship2) + session.add(workflow_test_start_fed) + session.add(workflow_test_stop_fed) + session.add(workflow_test_start_local) + session.add(workflow_test_stop_local) + session.commit() + self.signin_as_admin() + + @patch('fedlearner_webconsole.two_pc.transaction_manager.TransactionManager.run') + def test_start_workflow_fed(self, mock_run): + mock_run.return_value = (True, '') + response = self.post_helper('/api/v2/projects/1/workflows/1:start') + self.assertEqual(response.status_code, HTTPStatus.OK) + mock_run.assert_called_once() + + @patch('fedlearner_webconsole.two_pc.transaction_manager.TransactionManager.run') + def test_stop_workflow_fed(self, mock_run): + mock_run.return_value = (True, '') + response = self.post_helper('/api/v2/projects/1/workflows/2:stop') + self.assertEqual(response.status_code, HTTPStatus.OK) + mock_run.assert_called_once() + + @patch('fedlearner_webconsole.workflow.models.Workflow.is_local') + @patch('fedlearner_webconsole.workflow.workflow_job_controller.start_workflow_locally') + def test_start_workflow_local(self, mock_start_workflow_locally, mock_is_local): + mock_is_local.return_value = True + response = self.post_helper('/api/v2/projects/1/workflows/3:start') + self.assertEqual(response.status_code, HTTPStatus.OK) + mock_start_workflow_locally.assert_called_once() + + @patch('fedlearner_webconsole.workflow.models.Workflow.is_local') + @patch('fedlearner_webconsole.workflow.workflow_job_controller.stop_workflow_locally') + def test_stop_workflow_local(self, mock_stop_workflow_locally, mock_is_local): + mock_is_local.return_value = True + response = self.post_helper('/api/v2/projects/1/workflows/4:stop') + self.assertEqual(response.status_code, HTTPStatus.OK) + mock_stop_workflow_locally.assert_called_once() + response = self.post_helper('/api/v2/projects/1/workflows/4:stop') + self.assertEqual(response.status_code, HTTPStatus.OK) + + +def add_fake_workflow(session): + wd = WorkflowDefinition() + jd = wd.job_definitions.add() + jd.yaml_template = '{}' + workflow = Workflow( + name='test-workflow', + project_id=123, + config=wd.SerializeToString(), + forkable=False, + state=WorkflowState.READY, + ) + session.add(workflow) + session.flush() + job = Job(name='test_job', + job_type=JobType(1), + config=jd.SerializeToString(), + workflow_id=workflow.id, + project_id=123, + state=JobState.STOPPED, + is_disabled=False) + session.add(job) + session.flush() + workflow.job_ids = str(job.id) + session.commit() + return workflow, job + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/workflow/cronjob.py b/web_console_v2/api/fedlearner_webconsole/workflow/cronjob.py index 184393e32..58df1d82e 100644 --- a/web_console_v2/api/fedlearner_webconsole/workflow/cronjob.py +++ b/web_console_v2/api/fedlearner_webconsole/workflow/cronjob.py @@ -1,94 +1,46 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# # coding: utf-8 - +import logging from typing import Tuple -from time import sleep - -from fedlearner_webconsole.composer.interface import IItem, IRunner, ItemType -from fedlearner_webconsole.composer.models import Context, RunnerStatus -from fedlearner_webconsole.db import get_session -from fedlearner_webconsole.workflow.models import Workflow, WorkflowState - - -class WorkflowCronJobItem(IItem): - def __init__(self, task_id: int): - self.id = task_id - def type(self) -> ItemType: - return ItemType.WORKFLOW_CRON_JOB +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.interface import IRunnerV2 +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto.composer_pb2 import RunnerOutput, WorkflowCronJobOutput +from fedlearner_webconsole.workflow.models import Workflow, WorkflowExternalState +from fedlearner_webconsole.workflow.workflow_job_controller import start_workflow - def get_id(self) -> int: - return self.id - def __eq__(self, obj: IItem): - return self.id == obj.id and self.type() == obj.type() - - -class WorkflowCronJob(IRunner): - """ start workflow every intervals +class WorkflowCronJob(IRunnerV2): + """Starts workflow periodically. """ - def __init__(self, task_id: int): - self._workflow_id = task_id - self._msg = None - - def start(self, context: Context): - with get_session(context.db_engine) as session: - try: - workflow: Workflow = session.query(Workflow).filter_by( - id=self._workflow_id).one() - # TODO: This is a hack!!! Templatelly use this method - # cc @hangweiqiang: Transaction State Refactor - state = workflow.get_state_for_frontend() - if state in ('COMPLETED', 'FAILED', 'READY', 'STOPPED', 'NEW'): - if state in ('COMPLETED', 'FAILED'): - workflow.update_target_state( - target_state=WorkflowState.STOPPED) - session.commit() - # check workflow stopped - # TODO: use composer timeout cc @yurunyu - for _ in range(24): - # use session refresh to get the latest info - # otherwise it'll use the indentity map locally - session.refresh(workflow) - if workflow.state == WorkflowState.STOPPED: - break - sleep(5) - else: - self._msg = f'failed to stop \ - workflow[{self._workflow_id}]' - return - workflow.update_target_state( - target_state=WorkflowState.RUNNING) - session.commit() - self._msg = f'restarted workflow[{self._workflow_id}]' - elif state == 'RUNNING': - self._msg = f'skip restarting workflow[{self._workflow_id}]' - elif state == 'INVALID': - self._msg = f'current workflow[{self._workflow_id}] \ - is invalid' - else: - self._msg = f'workflow[{self._workflow_id}] \ - state is {state}, which is out of expection' - - except Exception as err: # pylint: disable=broad-except - self._msg = f'exception of workflow[{self._workflow_id}], \ - details is {err}' - - def result(self, context: Context) -> Tuple[RunnerStatus, dict]: - del context # unused by result - if self._msg is None: - return RunnerStatus.RUNNING, {} - output = {'msg': self._msg} - return RunnerStatus.DONE, output + def run(self, context: RunnerContext) -> Tuple[RunnerStatus, RunnerOutput]: + output = WorkflowCronJobOutput() + with db.session_scope() as session: + workflow_id = context.input.workflow_cron_job_input.workflow_id + workflow: Workflow = session.query(Workflow).get(workflow_id) + state = workflow.get_state_for_frontend() + logging.info(f'[WorkflowCronJob] Try to start workflow {workflow_id}, state: {state.name}') + if state in (WorkflowExternalState.READY_TO_RUN, WorkflowExternalState.COMPLETED, + WorkflowExternalState.FAILED, WorkflowExternalState.STOPPED): + start_workflow(workflow_id) + output.message = 'Restarted workflow' + else: + output.message = f'Skip starting workflow, state is {state.name}' + return RunnerStatus.DONE, RunnerOutput(workflow_cron_job_output=output) diff --git a/web_console_v2/api/fedlearner_webconsole/workflow/cronjob_test.py b/web_console_v2/api/fedlearner_webconsole/workflow/cronjob_test.py new file mode 100644 index 000000000..113036f11 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow/cronjob_test.py @@ -0,0 +1,107 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from datetime import datetime +from unittest.mock import patch, Mock + +from sqlalchemy import and_ + +from fedlearner_webconsole.composer.composer import ComposerConfig, Composer +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.interface import ItemType +from fedlearner_webconsole.composer.models import RunnerStatus, SchedulerItem, SchedulerRunner +from fedlearner_webconsole.composer.composer_service import ComposerService +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput, WorkflowCronJobInput +from fedlearner_webconsole.workflow.cronjob import WorkflowCronJob +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState +from testing.no_web_server_test_case import NoWebServerTestCase +from testing.fake_time_patcher import FakeTimePatcher + + +class CronJobTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + self.time_patcher = FakeTimePatcher() + self.time_patcher.start(datetime(2012, 1, 14, 12, 0, 5)) + + self.test_id = 8848 + workflow = Workflow(id=self.test_id, state=WorkflowState.RUNNING) + with db.session_scope() as session: + session.add(workflow) + session.commit() + + def tearDown(self): + self.time_patcher.stop() + + super().tearDown() + + def test_run_skip_running_workflow(self): + workflow_id = 123 + with db.session_scope() as session: + workflow = Workflow(id=workflow_id, state=WorkflowState.RUNNING) + session.add(workflow) + session.commit() + + context = RunnerContext(0, RunnerInput(workflow_cron_job_input=WorkflowCronJobInput(workflow_id=workflow_id))) + runner = WorkflowCronJob() + status, output = runner.run(context) + self.assertEqual(status, RunnerStatus.DONE) + self.assertEqual(output.workflow_cron_job_output.message, 'Skip starting workflow, state is RUNNING') + + @patch('fedlearner_webconsole.workflow.cronjob.start_workflow') + def test_run_ready_workflow(self, mock_start_workflow: Mock): + workflow_id = 234 + with db.session_scope() as session: + workflow = Workflow(id=workflow_id, state=WorkflowState.READY) + session.add(workflow) + session.commit() + + context = RunnerContext(0, RunnerInput(workflow_cron_job_input=WorkflowCronJobInput(workflow_id=workflow_id))) + runner = WorkflowCronJob() + status, output = runner.run(context) + self.assertEqual(status, RunnerStatus.DONE) + self.assertEqual(output.workflow_cron_job_output.message, 'Restarted workflow') + mock_start_workflow.assert_called_once_with(workflow_id) + + def test_cronjob_with_composer(self): + item_name = f'workflow_cronjob_{self.test_id}' + config = ComposerConfig(runner_fn={ItemType.WORKFLOW_CRON_JOB.value: WorkflowCronJob}, name='test_cronjob') + composer = Composer(config=config) + with db.session_scope() as session: + service = ComposerService(session) + service.collect_v2(name=item_name, + items=[ + (ItemType.WORKFLOW_CRON_JOB, + RunnerInput(workflow_cron_job_input=WorkflowCronJobInput(workflow_id=self.test_id))) + ], + cron_config='* * * * * */10') + session.commit() + composer.run(db_engine=db.engine) + # Interrupts twice since we need two rounds of tick for + # composer to schedule items in fake world + self.time_patcher.interrupt(10) + self.time_patcher.interrupt(10) + with db.session_scope() as session: + runners = session.query(SchedulerRunner).filter( + and_(SchedulerRunner.item_id == SchedulerItem.id, SchedulerItem.name == item_name)).all() + self.assertEqual(len(runners), 2) + composer.stop() + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/workflow/models.py b/web_console_v2/api/fedlearner_webconsole/workflow/models.py index f988f93db..33d226645 100644 --- a/web_console_v2/api/fedlearner_webconsole/workflow/models.py +++ b/web_console_v2/api/fedlearner_webconsole/workflow/models.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,23 +13,24 @@ # limitations under the License. # coding: utf-8 -# pylint: disable=broad-except -import json -import logging +# pylint: disable=use-a-generator import enum -from datetime import datetime +from typing import List, Optional + +from sqlalchemy.orm import deferred from sqlalchemy.sql import func from sqlalchemy import UniqueConstraint -from envs import Features -from fedlearner_webconsole.composer.models import SchedulerItem +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition +from fedlearner_webconsole.proto.workflow_pb2 import WorkflowRef, WorkflowPb from fedlearner_webconsole.utils.mixins import to_dict_mixin from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project from fedlearner_webconsole.proto import (common_pb2, workflow_definition_pb2) -from fedlearner_webconsole.job.models import (Job, JobState, JobType, - JobDependency) -from fedlearner_webconsole.rpc.client import RpcClient -from fedlearner_webconsole.mmgr.service import ModelService +from fedlearner_webconsole.job.models import JobState, Job +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.workflow.utils import is_local +from fedlearner_webconsole.workflow_template.models import WorkflowTemplate, WorkflowTemplateRevision class WorkflowState(enum.Enum): @@ -38,20 +39,60 @@ class WorkflowState(enum.Enum): READY = 2 RUNNING = 3 STOPPED = 4 - - -class RecurType(enum.Enum): - NONE = 0 - ON_NEW_DATA = 1 - HOURLY = 2 - DAILY = 3 - WEEKLY = 4 - - -VALID_TRANSITIONS = [(WorkflowState.NEW, WorkflowState.READY), - (WorkflowState.READY, WorkflowState.RUNNING), - (WorkflowState.RUNNING, WorkflowState.STOPPED), - (WorkflowState.STOPPED, WorkflowState.RUNNING)] + COMPLETED = 5 + FAILED = 6 + + +class WorkflowExternalState(enum.Enum): + # state of workflow is unknown + UNKNOWN = 0 + # workflow is completed + COMPLETED = 1 + # workflow is failed + FAILED = 2 + # workflow is stopped + STOPPED = 3 + # workflow is running + RUNNING = 4 + # workflow is prepare to run + PREPARE_RUN = 5 + # workflow is prepare to stop + PREPARE_STOP = 6 + # workflow is warming up under the hood + WARMUP_UNDERHOOD = 7 + # workflow is pending participant accept + PENDING_ACCEPT = 8 + # workflow is ready to run + READY_TO_RUN = 9 + # workflow is waiting for participant configure + PARTICIPANT_CONFIGURING = 10 + # workflow is invalid + INVALID = 11 + + +# yapf: disable +VALID_TRANSITIONS = [ + (WorkflowState.NEW, WorkflowState.READY), + (WorkflowState.READY, WorkflowState.RUNNING), + (WorkflowState.READY, WorkflowState.STOPPED), + + (WorkflowState.RUNNING, WorkflowState.STOPPED), + # Transitions below are not used, because state controller treat COMPLETED and FAILED as STOPPED. + # (WorkflowState.RUNNING, WorkflowState.COMPLETED), + # (WorkflowState.RUNNING, WorkflowState.FAILED), + + + (WorkflowState.STOPPED, WorkflowState.RUNNING), + (WorkflowState.COMPLETED, WorkflowState.RUNNING), + (WorkflowState.FAILED, WorkflowState.RUNNING), + (WorkflowState.RUNNING, WorkflowState.RUNNING), + + # This is hack to make workflow_state_controller's committing stage idempotent. + (WorkflowState.STOPPED, WorkflowState.STOPPED), + (WorkflowState.COMPLETED, WorkflowState.STOPPED), + (WorkflowState.FAILED, WorkflowState.STOPPED) +] +# yapf: enable class TransactionState(enum.Enum): @@ -75,86 +116,69 @@ class TransactionState(enum.Enum): (TransactionState.READY, TransactionState.COORDINATOR_PREPARE), # (TransactionState.COORDINATOR_PREPARE, # TransactionState.COORDINATOR_COMMITTABLE), - (TransactionState.COORDINATOR_COMMITTABLE, - TransactionState.COORDINATOR_COMMITTING), + (TransactionState.COORDINATOR_COMMITTABLE, TransactionState.COORDINATOR_COMMITTING), # (TransactionState.COORDINATOR_PREPARE, # TransactionState.COORDINATOR_ABORTING), - (TransactionState.COORDINATOR_COMMITTABLE, - TransactionState.COORDINATOR_ABORTING), + (TransactionState.COORDINATOR_COMMITTABLE, TransactionState.COORDINATOR_ABORTING), (TransactionState.COORDINATOR_ABORTING, TransactionState.ABORTED), (TransactionState.READY, TransactionState.PARTICIPANT_PREPARE), # (TransactionState.PARTICIPANT_PREPARE, # TransactionState.PARTICIPANT_COMMITTABLE), - (TransactionState.PARTICIPANT_COMMITTABLE, - TransactionState.PARTICIPANT_COMMITTING), + (TransactionState.PARTICIPANT_COMMITTABLE, TransactionState.PARTICIPANT_COMMITTING), # (TransactionState.PARTICIPANT_PREPARE, # TransactionState.PARTICIPANT_ABORTING), - (TransactionState.PARTICIPANT_COMMITTABLE, - TransactionState.PARTICIPANT_ABORTING), + (TransactionState.PARTICIPANT_COMMITTABLE, TransactionState.PARTICIPANT_ABORTING), # (TransactionState.PARTICIPANT_ABORTING, # TransactionState.ABORTED), ] IGNORED_TRANSACTION_TRANSITIONS = [ - (TransactionState.PARTICIPANT_COMMITTABLE, - TransactionState.PARTICIPANT_PREPARE), + (TransactionState.PARTICIPANT_COMMITTABLE, TransactionState.PARTICIPANT_PREPARE), ] -def _merge_variables(base, new, access_mode): - new_dict = {i.name: i.value for i in new} - for var in base: - if var.access_mode in access_mode and var.name in new_dict: - # use json.dumps to escape " in peer's input, a"b ----> "a\"b" - # and use [1:-1] to remove ", "a\"b" ----> a\"b - var.value = json.dumps(new_dict[var.name])[1:-1] - +def compare_yaml_templates_in_wf(wf_a: workflow_definition_pb2.WorkflowDefinition, + wf_b: workflow_definition_pb2.WorkflowDefinition): + """"Compare two WorkflowDefinition's each template, + return True if any job different""" + if len(wf_a.job_definitions) != len(wf_b.job_definitions): + return False + job_defs_a = wf_a.job_definitions + job_defs_b = wf_b.job_definitions + return any([ + job_defs_a[i].yaml_template != job_defs_b[i].yaml_template or job_defs_a[i].name != job_defs_b[i].name + for i in range(len(job_defs_a)) + ]) -def _merge_workflow_config(base, new, access_mode): - _merge_variables(base.variables, new.variables, access_mode) - if not new.job_definitions: - return - assert len(base.job_definitions) == len(new.job_definitions) - for base_job, new_job in \ - zip(base.job_definitions, new.job_definitions): - _merge_variables(base_job.variables, new_job.variables, access_mode) - -@to_dict_mixin(ignores=['fork_proposal_config', 'config'], +@to_dict_mixin(ignores=['fork_proposal_config', 'config', 'editor_info'], extras={ 'job_ids': (lambda wf: wf.get_job_ids()), 'create_job_flags': (lambda wf: wf.get_create_job_flags()), - 'peer_create_job_flags': - (lambda wf: wf.get_peer_create_job_flags()), + 'peer_create_job_flags': (lambda wf: wf.get_peer_create_job_flags()), 'state': (lambda wf: wf.get_state_for_frontend()), - 'transaction_state': - (lambda wf: wf.get_transaction_state_for_frontend()), - 'batch_update_interval': - (lambda wf: wf.get_batch_update_interval()), + 'is_local': (lambda wf: wf.is_local()) }) class Workflow(db.Model): __tablename__ = 'workflow_v2' __table_args__ = (UniqueConstraint('uuid', name='uniq_uuid'), - UniqueConstraint('name', name='uniq_name'), { + UniqueConstraint('project_id', 'name', name='uniq_name_in_project'), { 'comment': 'workflow_v2', 'mysql_engine': 'innodb', 'mysql_charset': 'utf8mb4', }) - id = db.Column(db.Integer, primary_key=True, comment='id') + id = db.Column(db.Integer, primary_key=True, comment='id', autoincrement=True) uuid = db.Column(db.String(64), comment='uuid') name = db.Column(db.String(255), comment='name') project_id = db.Column(db.Integer, comment='project_id') + template_id = db.Column(db.Integer, comment='template_id', nullable=True) + template_revision_id = db.Column(db.Integer, comment='template_revision_id', nullable=True) + editor_info = deferred(db.Column(db.LargeBinary(16777215), comment='editor_info', default=b'', nullable=True)) # max store 16777215 bytes (16 MB) - config = db.Column(db.LargeBinary(16777215), comment='config') - comment = db.Column('cmt', - db.String(255), - key='comment', - comment='comment') - - metric_is_public = db.Column(db.Boolean(), - default=False, - nullable=False, - comment='metric_is_public') + config = deferred(db.Column(db.LargeBinary(16777215), comment='config')) + comment = db.Column('cmt', db.String(255), key='comment', comment='comment') + + metric_is_public = db.Column(db.Boolean(), default=False, nullable=False, comment='metric_is_public') create_job_flags = db.Column(db.TEXT(), comment='create_job_flags') job_ids = db.Column(db.TEXT(), comment='job_ids') @@ -162,31 +186,22 @@ class Workflow(db.Model): forkable = db.Column(db.Boolean, default=False, comment='forkable') forked_from = db.Column(db.Integer, default=None, comment='forked_from') # index in config.job_defs instead of job's id - peer_create_job_flags = db.Column(db.TEXT(), - comment='peer_create_job_flags') + peer_create_job_flags = db.Column(db.TEXT(), comment='peer_create_job_flags') # max store 16777215 bytes (16 MB) - fork_proposal_config = db.Column(db.LargeBinary(16777215), - comment='fork_proposal_config') - - recur_type = db.Column(db.Enum(RecurType, native_enum=False), - default=RecurType.NONE, - comment='recur_type') - recur_at = db.Column(db.Interval, comment='recur_at') + fork_proposal_config = db.Column(db.LargeBinary(16777215), comment='fork_proposal_config') trigger_dataset = db.Column(db.Integer, comment='trigger_dataset') - last_triggered_batch = db.Column(db.Integer, - comment='last_triggered_batch') + last_triggered_batch = db.Column(db.Integer, comment='last_triggered_batch') - state = db.Column(db.Enum(WorkflowState, - native_enum=False, - name='workflow_state'), + state = db.Column(db.Enum(WorkflowState, native_enum=False, create_constraint=False, name='workflow_state'), default=WorkflowState.INVALID, comment='state') target_state = db.Column(db.Enum(WorkflowState, native_enum=False, + create_constraint=False, name='workflow_target_state'), default=WorkflowState.INVALID, comment='target_state') - transaction_state = db.Column(db.Enum(TransactionState, native_enum=False), + transaction_state = db.Column(db.Enum(TransactionState, native_enum=False, create_constraint=False), default=TransactionState.READY, comment='transaction_state') transaction_err = db.Column(db.Text(), comment='transaction_err') @@ -194,56 +209,86 @@ class Workflow(db.Model): start_at = db.Column(db.Integer, comment='start_at') stop_at = db.Column(db.Integer, comment='stop_at') - created_at = db.Column(db.DateTime(timezone=True), - server_default=func.now(), - comment='created_at') + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), comment='created_at') updated_at = db.Column(db.DateTime(timezone=True), onupdate=func.now(), server_default=func.now(), comment='update_at') - extra = db.Column(db.Text(), comment='extra') # json string + extra = db.Column(db.Text(), comment='json string that will be send to peer') # deprecated + local_extra = db.Column(db.Text(), comment='json string that will only be store locally') # deprecated + cron_config = db.Column('cronjob_config', db.Text(), key='cron_config', comment='cronjob json string') + + creator = db.Column(db.String(255), comment='the username of the creator') + favour = db.Column(db.Boolean, default=False, comment='favour') owned_jobs = db.relationship( - 'Job', primaryjoin='foreign(Job.workflow_id) == Workflow.id') - project = db.relationship( - 'Project', primaryjoin='Project.id == foreign(Workflow.project_id)') + 'Job', + primaryjoin='foreign(Job.workflow_id) == Workflow.id', + # To disable the warning of back_populates + overlaps='workflow') + project = db.relationship(Project.__name__, primaryjoin='Project.id == foreign(Workflow.project_id)') + template = db.relationship(WorkflowTemplate.__name__, + primaryjoin='WorkflowTemplate.id == foreign(Workflow.template_id)') + template_revision = db.relationship( + WorkflowTemplateRevision.__name__, + primaryjoin='WorkflowTemplateRevision.id == foreign(Workflow.template_revision_id)', + # To disable the warning of back_populates + overlaps='workflows') + + def is_finished(self) -> bool: + return all([job.is_disabled or job.state == JobState.COMPLETED for job in self.owned_jobs]) + + def is_failed(self) -> bool: + return any([job.state == JobState.FAILED for job in self.owned_jobs]) + + def get_state_for_frontend(self) -> WorkflowExternalState: + """Get workflow states that frontend need.""" + + # states in workflow creating stage. + if self.state == WorkflowState.NEW \ + and self.target_state == WorkflowState.READY: + if self.transaction_state in [ + TransactionState.PARTICIPANT_COMMITTABLE, TransactionState.PARTICIPANT_COMMITTING, + TransactionState.COORDINATOR_COMMITTING + ]: + return WorkflowExternalState.WARMUP_UNDERHOOD + if self.transaction_state == TransactionState.PARTICIPANT_PREPARE: + return WorkflowExternalState.PENDING_ACCEPT + if self.transaction_state in [ + TransactionState.READY, TransactionState.COORDINATOR_COMMITTABLE, + TransactionState.COORDINATOR_PREPARE + ]: + return WorkflowExternalState.PARTICIPANT_CONFIGURING + + # static state + if self.state == WorkflowState.READY: + return WorkflowExternalState.READY_TO_RUN - def get_state_for_frontend(self): if self.state == WorkflowState.RUNNING: - is_complete = all([job.is_disabled or - job.state == JobState.COMPLETED - for job in self.owned_jobs]) - if is_complete: - return 'COMPLETED' - is_failed = any([job.state == JobState.FAILED - for job in self.owned_jobs]) - if is_failed: - return 'FAILED' - return self.state.name - - def get_transaction_state_for_frontend(self): - # TODO(xiangyuxuan): remove this hack by redesign 2pc - if (self.transaction_state == TransactionState.PARTICIPANT_PREPARE - and self.config is not None): - return 'PARTICIPANT_COMMITTABLE' - return self.transaction_state.name - - def set_config(self, proto): + return WorkflowExternalState.RUNNING + + if self.state == WorkflowState.STOPPED: + return WorkflowExternalState.STOPPED + if self.state == WorkflowState.COMPLETED: + return WorkflowExternalState.COMPLETED + if self.state == WorkflowState.FAILED: + return WorkflowExternalState.FAILED + + if self.state == WorkflowState.INVALID: + return WorkflowExternalState.INVALID + + return WorkflowExternalState.UNKNOWN + + def set_config(self, proto: WorkflowDefinition): if proto is not None: self.config = proto.SerializeToString() - job_defs = {i.name: i for i in proto.job_definitions} - for job in self.owned_jobs: - name = job.get_config().name - assert name in job_defs, \ - f'Invalid workflow template: job {name} is missing' - job.set_config(job_defs[name]) else: self.config = None - def get_config(self): + def get_config(self) -> Optional[WorkflowDefinition]: if self.config is not None: - proto = workflow_definition_pb2.WorkflowDefinition() + proto = WorkflowDefinition() proto.ParseFromString(self.config) return proto return None @@ -269,8 +314,9 @@ def get_job_ids(self): return [] return [int(i) for i in self.job_ids.split(',')] - def get_jobs(self): - return [Job.query.get(i) for i in self.get_job_ids()] + def get_jobs(self, session) -> List[Job]: + job_ids = self.get_job_ids() + return session.query(Job).filter(Job.id.in_(job_ids)).all() def set_create_job_flags(self, create_job_flags): if not create_job_flags: @@ -306,256 +352,76 @@ def get_peer_create_job_flags(self): return None return [int(i) for i in self.peer_create_job_flags.split(',')] - def get_batch_update_interval(self): - item = SchedulerItem.query.filter_by( - name=f'workflow_cron_job_{self.id}').first() - if not item: - return -1 - return int(item.interval_time) / 60 + def to_workflow_ref(self) -> WorkflowRef: + return WorkflowRef(id=self.id, + name=self.name, + uuid=self.uuid, + project_id=self.project_id, + state=self.get_state_for_frontend().name, + created_at=to_timestamp(self.created_at), + forkable=self.forkable, + metric_is_public=self.metric_is_public, + favour=self.favour) + + def to_proto(self) -> WorkflowPb: + return WorkflowPb(id=self.id, + name=self.name, + uuid=self.uuid, + project_id=self.project_id, + state=self.get_state_for_frontend().name, + created_at=to_timestamp(self.created_at), + forkable=self.forkable, + metric_is_public=self.metric_is_public, + favour=self.favour, + template_revision_id=self.template_revision_id, + template_id=self.template_id, + config=self.get_config(), + editor_info=self.get_editor_info(), + comment=self.comment, + job_ids=self.get_job_ids(), + create_job_flags=self.get_create_job_flags(), + is_local=self.is_local(), + forked_from=self.forked_from, + peer_create_job_flags=self.get_peer_create_job_flags(), + start_at=self.start_at, + stop_at=self.stop_at, + updated_at=to_timestamp(self.updated_at), + cron_config=self.cron_config, + creator=self.creator, + template_info=self.get_template_info()) + + def is_local(self): + return is_local(self.get_config(), self.get_create_job_flags()) + + def get_template_info(self) -> WorkflowPb.TemplateInfo: + template_info = WorkflowPb.TemplateInfo(id=self.template_id, is_modified=True) + if self.template is not None: + template_info.name = self.template.name + template_info.is_modified = compare_yaml_templates_in_wf(self.get_config(), self.template.get_config()) + + if self.template_revision is not None: + template_info.is_modified = False + template_info.revision_index = self.template_revision.revision_index + return template_info + + def get_editor_info(self): + proto = workflow_definition_pb2.WorkflowTemplateEditorInfo() + if self.editor_info is not None: + proto.ParseFromString(self.editor_info) + return proto + + def is_invalid(self): + return self.state == WorkflowState.INVALID + + def can_transit_to(self, target_state: WorkflowState): + return (self.state, target_state) in VALID_TRANSITIONS def update_target_state(self, target_state): - if self.target_state != target_state \ - and self.target_state != WorkflowState.INVALID: - raise ValueError(f'Another transaction is in progress [{self.id}]') - if target_state not in [ - WorkflowState.READY, WorkflowState.RUNNING, - WorkflowState.STOPPED - ]: - raise ValueError(f'Invalid target_state {self.target_state}') + if self.target_state not in [target_state, WorkflowState.INVALID]: + raise ValueError(f'Another transaction is in progress ' f'[{self.id}]') + if target_state != WorkflowState.READY: + raise ValueError(f'Invalid target_state ' f'{self.target_state}') if (self.state, target_state) not in VALID_TRANSITIONS: - raise ValueError( - f'Invalid transition from {self.state} to {target_state}') + raise ValueError(f'Invalid transition from ' f'{self.state} to {target_state}') self.target_state = target_state - - def update_state(self, asserted_state, target_state, transaction_state): - assert asserted_state is None or self.state == asserted_state, \ - 'Cannot change current state directly' - - if transaction_state != self.transaction_state: - if (self.transaction_state, transaction_state) in \ - IGNORED_TRANSACTION_TRANSITIONS: - return self.transaction_state - assert (self.transaction_state, transaction_state) in \ - VALID_TRANSACTION_TRANSITIONS, \ - 'Invalid transaction transition from {} to {}'.format( - self.transaction_state, transaction_state) - self.transaction_state = transaction_state - - # coordinator prepare & rollback - if self.transaction_state == TransactionState.COORDINATOR_PREPARE: - self.prepare(target_state) - if self.transaction_state == TransactionState.COORDINATOR_ABORTING: - self.rollback() - - # participant prepare & rollback & commit - if self.transaction_state == TransactionState.PARTICIPANT_PREPARE: - self.prepare(target_state) - if self.transaction_state == TransactionState.PARTICIPANT_ABORTING: - self.rollback() - self.transaction_state = TransactionState.ABORTED - if self.transaction_state == TransactionState.PARTICIPANT_COMMITTING: - self.commit() - - return self.transaction_state - - def prepare(self, target_state): - assert self.transaction_state in [ - TransactionState.COORDINATOR_PREPARE, - TransactionState.PARTICIPANT_PREPARE], \ - 'Workflow not in prepare state' - - # TODO(tjulinfan): remove this - if target_state is None: - # No action - return - - # Validation - try: - self.update_target_state(target_state) - except ValueError as e: - logging.warning('Error during update target state in prepare: %s', - str(e)) - self.transaction_state = TransactionState.ABORTED - return - - success = True - if self.target_state == WorkflowState.READY: - success = self._prepare_for_ready() - - if success: - if self.transaction_state == TransactionState.COORDINATOR_PREPARE: - self.transaction_state = \ - TransactionState.COORDINATOR_COMMITTABLE - else: - self.transaction_state = \ - TransactionState.PARTICIPANT_COMMITTABLE - - def rollback(self): - self.target_state = WorkflowState.INVALID - - def start(self): - self.start_at = int(datetime.now().timestamp()) - for job in self.owned_jobs: - if not job.is_disabled: - job.schedule() - - def stop(self): - self.stop_at = int(datetime.now().timestamp()) - for job in self.owned_jobs: - job.stop() - - # TODO: separate this method to another module - def commit(self): - assert self.transaction_state in [ - TransactionState.COORDINATOR_COMMITTING, - TransactionState.PARTICIPANT_COMMITTING], \ - 'Workflow not in prepare state' - - if self.target_state == WorkflowState.STOPPED: - try: - self.stop() - except RuntimeError as e: - # errors from k8s - logging.error('Stop workflow %d has error msg: %s', - self.id, e.args) - return - elif self.target_state == WorkflowState.READY: - self._setup_jobs() - self.fork_proposal_config = None - elif self.target_state == WorkflowState.RUNNING: - self.start() - - self.state = self.target_state - self.target_state = WorkflowState.INVALID - self.transaction_state = TransactionState.READY - - def invalidate(self): - self.state = WorkflowState.INVALID - self.target_state = WorkflowState.INVALID - self.transaction_state = TransactionState.READY - for job in self.owned_jobs: - try: - job.stop() - except Exception as e: # pylint: disable=broad-except - logging.warning( - 'Error while stopping job %s during invalidation: %s', - job.name, repr(e)) - - def _setup_jobs(self): - if self.forked_from is not None: - trunk = Workflow.query.get(self.forked_from) - assert trunk is not None, \ - 'Source workflow %d not found' % self.forked_from - trunk_job_defs = trunk.get_config().job_definitions - trunk_name2index = { - job.name: i - for i, job in enumerate(trunk_job_defs) - } - - job_defs = self.get_config().job_definitions - flags = self.get_create_job_flags() - assert len(job_defs) == len(flags), \ - 'Number of job defs does not match number of create_job_flags ' \ - '%d vs %d'%(len(job_defs), len(flags)) - jobs = [] - for i, (job_def, flag) in enumerate(zip(job_defs, flags)): - if flag == common_pb2.CreateJobFlag.REUSE: - assert job_def.name in trunk_name2index, \ - f'Job {job_def.name} not found in base workflow' - j = trunk.get_job_ids()[trunk_name2index[job_def.name]] - job = Job.query.get(j) - assert job is not None, \ - 'Job %d not found' % j - # TODO: check forked jobs does not depend on non-forked jobs - else: - job = Job( - name=f'{self.uuid}-{job_def.name}', - job_type=JobType(job_def.job_type), - config=job_def.SerializeToString(), - workflow_id=self.id, - project_id=self.project_id, - state=JobState.NEW, - is_disabled=(flag == common_pb2.CreateJobFlag.DISABLED)) - db.session.add(job) - jobs.append(job) - db.session.flush() - name2index = {job.name: i for i, job in enumerate(job_defs)} - for i, (job, flag) in enumerate(zip(jobs, flags)): - if flag == common_pb2.CreateJobFlag.REUSE: - continue - for j, dep_def in enumerate(job.get_config().dependencies): - dep = JobDependency( - src_job_id=jobs[name2index[dep_def.source]].id, - dst_job_id=job.id, - dep_index=j) - db.session.add(dep) - - self.set_job_ids([job.id for job in jobs]) - if Features.FEATURE_MODEL_WORKFLOW_HOOK: - for job in jobs: - ModelService(db.session).workflow_hook(job) - - - def log_states(self): - logging.debug( - 'workflow %d updated to state=%s, target_state=%s, ' - 'transaction_state=%s', self.id, self.state.name, - self.target_state.name, self.transaction_state.name) - - def _get_peer_workflow(self): - project_config = self.project.get_config() - # TODO: find coordinator for multiparty - client = RpcClient(project_config, project_config.participants[0]) - return client.get_workflow(self.name) - - def _prepare_for_ready(self): - # This is a hack, if config is not set then - # no action needed - if self.transaction_state == TransactionState.COORDINATOR_PREPARE: - # TODO(tjulinfan): validate if the config is legal or not - return bool(self.config) - - if self.forked_from: - peer_workflow = self._get_peer_workflow() - base_workflow = Workflow.query.get(self.forked_from) - if base_workflow is None or not base_workflow.forkable: - return False - self.forked_from = base_workflow.id - self.forkable = base_workflow.forkable - self.set_create_job_flags(peer_workflow.peer_create_job_flags) - self.set_peer_create_job_flags(peer_workflow.create_job_flags) - config = base_workflow.get_config() - _merge_workflow_config(config, peer_workflow.fork_proposal_config, - [common_pb2.Variable.PEER_WRITABLE]) - self.set_config(config) - return True - - return bool(self.config) - - def is_local(self): - # since _setup_jobs has not been called, job_definitions is used - job_defs = self.get_config().job_definitions - flags = self.get_create_job_flags() - for i, (job_def, flag) in enumerate(zip(job_defs, flags)): - if flag != common_pb2.CreateJobFlag.REUSE and job_def.is_federated: - return False - return True - - def update_local_state(self): - if self.target_state == WorkflowState.INVALID: - return - if self.target_state == WorkflowState.READY: - self._setup_jobs() - elif self.target_state == WorkflowState.RUNNING: - self.start() - elif self.target_state == WorkflowState.STOPPED: - try: - self.stop() - except Exception as e: - # errors from k8s - logging.error('Stop workflow %d has error msg: %s', - self.id, e.args) - return - self.state = self.target_state - self.target_state = WorkflowState.INVALID diff --git a/web_console_v2/api/fedlearner_webconsole/workflow/models_test.py b/web_console_v2/api/fedlearner_webconsole/workflow/models_test.py new file mode 100644 index 000000000..11c89cca0 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow/models_test.py @@ -0,0 +1,212 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from datetime import datetime, timezone + +from unittest.mock import patch, PropertyMock + +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowTemplateEditorInfo +from fedlearner_webconsole.proto.workflow_pb2 import WorkflowRef, WorkflowPb +from fedlearner_webconsole.workflow_template.models import WorkflowTemplateRevision, WorkflowTemplate +from fedlearner_webconsole.db import db +from fedlearner_webconsole.workflow.models import (Workflow, WorkflowState, TransactionState, WorkflowExternalState) +from fedlearner_webconsole.job.models import Job, JobState, JobType +from fedlearner_webconsole.project.models import Project +from testing.no_web_server_test_case import NoWebServerTestCase + + +class WorkflowTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + project = Project(id=0) + + with db.session_scope() as session: + session.add(project) + session.commit() + + def test_get_jobs(self): + workflow = Workflow(id=100, job_ids='1,2,3') + job1 = Job(id=1, name='job 1', workflow_id=3, project_id=0, job_type=JobType.RAW_DATA) + job2 = Job(id=2, name='job 2', workflow_id=3, project_id=0, job_type=JobType.RAW_DATA) + job3 = Job(id=3, name='job 3', workflow_id=100, project_id=0, job_type=JobType.RAW_DATA) + with db.session_scope() as session: + session.add_all([workflow, job1, job2, job3]) + session.commit() + jobs = workflow.get_jobs(session) + jobs.sort(key=lambda job: job.name) + self.assertEqual(jobs[0].name, 'job 1') + self.assertEqual(jobs[1].name, 'job 2') + self.assertEqual(jobs[2].name, 'job 3') + + def test_workflow_state(self): + with db.session_scope() as session: + completed_workflow = Workflow(state=WorkflowState.COMPLETED) + failed_workflow = Workflow(state=WorkflowState.FAILED) + + stopped_workflow_1 = Workflow(state=WorkflowState.STOPPED, target_state=WorkflowState.INVALID) + stopped_workflow_2 = Workflow(state=WorkflowState.STOPPED) + + running_workflow = Workflow(state=WorkflowState.RUNNING, target_state=WorkflowState.INVALID) + + warmup_underhood_workflow_1 = Workflow(state=WorkflowState.NEW, + target_state=WorkflowState.READY, + transaction_state=TransactionState.PARTICIPANT_COMMITTABLE) + warmup_underhood_workflow_2 = Workflow(state=WorkflowState.NEW, + target_state=WorkflowState.READY, + transaction_state=TransactionState.COORDINATOR_COMMITTING) + + pending_accept_workflow = Workflow(state=WorkflowState.NEW, + target_state=WorkflowState.READY, + transaction_state=TransactionState.PARTICIPANT_PREPARE) + + ready_to_run_workflow = Workflow(state=WorkflowState.READY, + target_state=WorkflowState.INVALID, + transaction_state=TransactionState.READY) + + participant_configuring_workflow_1 = Workflow(state=WorkflowState.NEW, + target_state=WorkflowState.READY, + transaction_state=TransactionState.READY) + participant_configuring_workflow_2 = Workflow(state=WorkflowState.NEW, + target_state=WorkflowState.READY, + transaction_state=TransactionState.COORDINATOR_COMMITTABLE) + participant_configuring_workflow_3 = Workflow(state=WorkflowState.NEW, + target_state=WorkflowState.READY, + transaction_state=TransactionState.COORDINATOR_PREPARE) + + invalid_workflow = Workflow(state=WorkflowState.INVALID) + + unknown_workflow = Workflow(state=WorkflowState.NEW, target_state=WorkflowState.INVALID) + session.add_all([ + completed_workflow, failed_workflow, stopped_workflow_1, stopped_workflow_2, running_workflow, + warmup_underhood_workflow_1, warmup_underhood_workflow_2, pending_accept_workflow, + ready_to_run_workflow, participant_configuring_workflow_1, participant_configuring_workflow_2, + participant_configuring_workflow_3, invalid_workflow, unknown_workflow + ]) + session.commit() + + completed_job_cw = Job(job_type=JobType.RAW_DATA, + workflow_id=completed_workflow.id, + project_id=0, + state=JobState.COMPLETED) + failed_job_fw = Job(job_type=JobType.RAW_DATA, + workflow_id=failed_workflow.id, + project_id=0, + state=JobState.FAILED) + running_job_rw = Job(job_type=JobType.RAW_DATA, + workflow_id=running_workflow.id, + project_id=0, + state=JobState.STARTED) + session.add_all([completed_job_cw, failed_job_fw, running_job_rw]) + session.commit() + + self.assertEqual(completed_workflow.get_state_for_frontend(), WorkflowExternalState.COMPLETED) + self.assertEqual(failed_workflow.get_state_for_frontend(), WorkflowExternalState.FAILED) + + self.assertEqual(stopped_workflow_1.get_state_for_frontend(), WorkflowExternalState.STOPPED) + self.assertEqual(stopped_workflow_2.get_state_for_frontend(), WorkflowExternalState.STOPPED) + + self.assertEqual(running_workflow.get_state_for_frontend(), WorkflowExternalState.RUNNING) + + self.assertEqual(warmup_underhood_workflow_1.get_state_for_frontend(), + WorkflowExternalState.WARMUP_UNDERHOOD) + self.assertEqual(warmup_underhood_workflow_2.get_state_for_frontend(), + WorkflowExternalState.WARMUP_UNDERHOOD) + + self.assertEqual(pending_accept_workflow.get_state_for_frontend(), WorkflowExternalState.PENDING_ACCEPT) + self.assertEqual(ready_to_run_workflow.get_state_for_frontend(), WorkflowExternalState.READY_TO_RUN) + + self.assertEqual(participant_configuring_workflow_1.get_state_for_frontend(), + WorkflowExternalState.PARTICIPANT_CONFIGURING) + self.assertEqual(participant_configuring_workflow_2.get_state_for_frontend(), + WorkflowExternalState.PARTICIPANT_CONFIGURING) + self.assertEqual(participant_configuring_workflow_3.get_state_for_frontend(), + WorkflowExternalState.PARTICIPANT_CONFIGURING) + + self.assertEqual(invalid_workflow.get_state_for_frontend(), WorkflowExternalState.INVALID) + self.assertEqual(unknown_workflow.get_state_for_frontend(), WorkflowExternalState.UNKNOWN) + + def test_to_workflow_ref(self): + created_at = datetime(2021, 10, 1, 8, 8, 8, tzinfo=timezone.utc) + workflow = Workflow( + id=123, + name='test', + uuid='uuid', + project_id=1, + state=WorkflowState.STOPPED, + target_state=WorkflowState.INVALID, + created_at=created_at, + forkable=True, + metric_is_public=False, + extra='{}', + ) + workflow_ref = WorkflowRef( + id=123, + name='test', + uuid='uuid', + project_id=1, + state=WorkflowExternalState.STOPPED.name, + created_at=int(created_at.timestamp()), + forkable=True, + metric_is_public=False, + ) + self.assertEqual(workflow.to_workflow_ref(), workflow_ref) + + def test_to_proto(self): + created_at = datetime(2021, 10, 1, 8, 8, 8, tzinfo=timezone.utc) + updated_at = datetime(2021, 10, 1, 8, 8, 8, tzinfo=timezone.utc) + workflow = Workflow(id=123, + name='test', + uuid='uuid', + project_id=1, + state=WorkflowState.STOPPED, + target_state=WorkflowState.INVALID, + created_at=created_at, + forkable=True, + metric_is_public=False, + extra='{}', + updated_at=updated_at) + workflow_pb = WorkflowPb(id=123, + name='test', + uuid='uuid', + project_id=1, + state=WorkflowExternalState.STOPPED.name, + created_at=int(created_at.timestamp()), + forkable=True, + metric_is_public=False, + updated_at=int(updated_at.timestamp()), + editor_info=WorkflowTemplateEditorInfo(), + template_info=WorkflowPb.TemplateInfo(is_modified=True)) + self.assertEqual(workflow.to_proto(), workflow_pb) + + @patch('fedlearner_webconsole.workflow.models.Workflow.template_revision', new_callable=PropertyMock) + @patch('fedlearner_webconsole.workflow.models.Workflow.template', new_callable=PropertyMock) + def test_get_template_info(self, mock_template, mock_template_revision): + workflow = Workflow(id=123, + name='test', + uuid='uuid', + project_id=1, + template_id=1, + template_revision_id=1, + config=b'') + mock_template.return_value = WorkflowTemplate(id=1, name='test', config=b'') + mock_template_revision.return_value = WorkflowTemplateRevision(id=1, revision_index=3, template_id=1) + self.assertEqual(workflow.get_template_info(), + WorkflowPb.TemplateInfo(name='test', id=1, is_modified=False, revision_index=3)) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/workflow/resource_manager.py b/web_console_v2/api/fedlearner_webconsole/workflow/resource_manager.py new file mode 100644 index 000000000..3b9be6080 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow/resource_manager.py @@ -0,0 +1,178 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import logging +from sqlalchemy.orm import Session +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState, \ + VALID_TRANSACTION_TRANSITIONS, \ + IGNORED_TRANSACTION_TRANSITIONS, TransactionState +from fedlearner_webconsole.proto import common_pb2 +from fedlearner_webconsole.workflow.service import WorkflowService + + +def _merge_variables(base, new, access_mode): + new_dict = {i.name: i for i in new} + for var in base: + if var.access_mode in access_mode and var.name in new_dict: + var.typed_value.CopyFrom(new_dict[var.name].typed_value) + # TODO(xiangyuxuan.prs): remove when value is deprecated in Variable + var.value = new_dict[var.name].value + + +# TODO(hangweiqiang): move it to utils +def merge_workflow_config(base, new, access_mode): + _merge_variables(base.variables, new.variables, access_mode) + if not new.job_definitions: + return + assert len(base.job_definitions) == len(new.job_definitions) + for base_job, new_job in \ + zip(base.job_definitions, new.job_definitions): + _merge_variables(base_job.variables, new_job.variables, access_mode) + + +class ResourceManager: + + def __init__(self, session: Session, workflow: Workflow): + self._session = session + self._workflow = workflow + + def update_state(self, asserted_state, target_state, transaction_state): + if self._workflow.is_invalid(): + return self._workflow.transaction_state + + assert asserted_state is None or \ + self._workflow.state == asserted_state, \ + 'Cannot change current state directly' + + if transaction_state != self._workflow.transaction_state: + if (self._workflow.transaction_state, transaction_state) in \ + IGNORED_TRANSACTION_TRANSITIONS: + return self._workflow.transaction_state + assert (self._workflow.transaction_state, transaction_state) in \ + VALID_TRANSACTION_TRANSITIONS, \ + f'Invalid transaction transition from {self._workflow.transaction_state} to {transaction_state}' + self._workflow.transaction_state = transaction_state + + # coordinator prepare & rollback + if self._workflow.transaction_state == \ + TransactionState.COORDINATOR_PREPARE: + self.prepare(target_state) + if self._workflow.transaction_state == \ + TransactionState.COORDINATOR_ABORTING: + self.rollback() + + # participant prepare & rollback & commit + if self._workflow.transaction_state == \ + TransactionState.PARTICIPANT_PREPARE: + self.prepare(target_state) + if self._workflow.transaction_state == \ + TransactionState.PARTICIPANT_ABORTING: + self.rollback() + self._workflow.transaction_state = TransactionState.ABORTED + if self._workflow.transaction_state == \ + TransactionState.PARTICIPANT_COMMITTING: + self.commit() + + return self._workflow.transaction_state + + def prepare(self, target_state): + assert self._workflow.transaction_state in [ + TransactionState.COORDINATOR_PREPARE, + TransactionState.PARTICIPANT_PREPARE], \ + 'Workflow not in prepare state' + + # TODO(tjulinfan): remove this + if target_state is None: + # No action + return + + # Validation + try: + self._workflow.update_target_state(target_state) + except ValueError as e: + logging.warning('Error during update target state in prepare: %s', str(e)) + self._workflow.transaction_state = TransactionState.ABORTED + return + + success = True + if self._workflow.target_state == WorkflowState.READY: + success = self._prepare_for_ready() + + if success: + if self._workflow.transaction_state == \ + TransactionState.COORDINATOR_PREPARE: + self._workflow.transaction_state = \ + TransactionState.COORDINATOR_COMMITTABLE + else: + self._workflow.transaction_state = \ + TransactionState.PARTICIPANT_COMMITTABLE + + def rollback(self): + self._workflow.target_state = WorkflowState.INVALID + + # TODO: separate this method to another module + def commit(self): + assert self._workflow.transaction_state in [ + TransactionState.COORDINATOR_COMMITTING, + TransactionState.PARTICIPANT_COMMITTING], \ + 'Workflow not in prepare state' + + if self._workflow.target_state == WorkflowState.READY: + self._workflow.fork_proposal_config = None + + self._workflow.state = self._workflow.target_state + self._workflow.target_state = WorkflowState.INVALID + self._workflow.transaction_state = TransactionState.READY + + def _prepare_for_ready(self): + # This is a hack, if config is not set then + # no action needed + if self._workflow.transaction_state == \ + TransactionState.COORDINATOR_PREPARE: + # TODO(tjulinfan): validate if the config is legal or not + return bool(self._workflow.config) + + if self._workflow.forked_from: + peer_workflow = WorkflowService(self._session).get_peer_workflow(self._workflow) + base_workflow = self._session.query(Workflow).get(self._workflow.forked_from) + if base_workflow is None or not base_workflow.forkable: + return False + self._workflow.forked_from = base_workflow.id + self._workflow.forkable = base_workflow.forkable + self._workflow.set_create_job_flags(peer_workflow.peer_create_job_flags) + self._workflow.set_peer_create_job_flags(peer_workflow.create_job_flags) + config = base_workflow.get_config() + merge_workflow_config(config, peer_workflow.fork_proposal_config, [common_pb2.Variable.PEER_WRITABLE]) + WorkflowService(self._session).update_config(self._workflow, config) + logging.error(base_workflow.to_dict()) + self._workflow.template_id = base_workflow.template_id + self._workflow.editor_info = base_workflow.editor_info + # TODO: set forked workflow in grpc server + WorkflowService(self._session).setup_jobs(self._workflow) + return True + + return bool(self._workflow.config) + + def log_states(self): + workflow = self._workflow + logging.debug('workflow %d updated to state=%s, target_state=%s, ' + 'transaction_state=%s', workflow.id, workflow.state.name, workflow.target_state.name, + workflow.transaction_state.name) + + def update_local_state(self): + if self._workflow.target_state == WorkflowState.INVALID: + return + self._workflow.state = self._workflow.target_state + self._workflow.target_state = WorkflowState.INVALID diff --git a/web_console_v2/api/fedlearner_webconsole/workflow/service.py b/web_console_v2/api/fedlearner_webconsole/workflow/service.py new file mode 100644 index 000000000..e75f8ab4e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow/service.py @@ -0,0 +1,372 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +from typing import List, Tuple, Union, Optional + +from sqlalchemy import or_, and_ +from sqlalchemy.orm import Session, Query + +from fedlearner_webconsole.composer.interface import ItemType +from fedlearner_webconsole.job.service import JobService +from fedlearner_webconsole.proto.composer_pb2 import RunnerInput, WorkflowCronJobInput +from fedlearner_webconsole.proto.filtering_pb2 import FilterExpression, FilterOp, SimpleExpression +from fedlearner_webconsole.rpc.client import RpcClient +from fedlearner_webconsole.utils.const import SYSTEM_WORKFLOW_CREATOR_USERNAME +from fedlearner_webconsole.utils.filtering import SupportedField, FilterBuilder, FieldType +from fedlearner_webconsole.utils.metrics import emit_store +from fedlearner_webconsole.composer.composer_service import CronJobService +from fedlearner_webconsole.exceptions import InvalidArgumentException, ResourceConflictException +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition +from fedlearner_webconsole.job.yaml_formatter import YamlFormatterService +from fedlearner_webconsole.utils.workflow import build_job_name +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState, TransactionState +from fedlearner_webconsole.workflow_template.models import WorkflowTemplate +from fedlearner_webconsole.workflow.utils import is_local, is_peer_job_inheritance_matched +from fedlearner_webconsole.job.models import Job, JobType, JobState, JobDependency +from fedlearner_webconsole.proto import common_pb2 +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.utils.resource_name import resource_uuid + + +def update_cronjob_config(workflow_id: int, cron_config: str, session: Session): + """Starts a cronjob for workflow if cron_config is valid. + + Args: + workflow_id (int): id of the workflow + cron_config (str): cron expression; + if cron_config is None or '', cancel previous cron setting + session: db session + Raises: + Raise if some check violates + InvalidArgumentException: if some check violates + """ + item_name = f'workflow_cron_job_{workflow_id}' + if cron_config: + rinput = RunnerInput(workflow_cron_job_input=WorkflowCronJobInput(workflow_id=workflow_id)) + items = [(ItemType.WORKFLOW_CRON_JOB, rinput)] + CronJobService(session).start_cronjob(item_name=item_name, items=items, cron_config=cron_config) + else: + CronJobService(session).stop_cronjob(item_name=item_name) + + +class ForkWorkflowParams(object): + + def __init__(self, fork_from_id: int, fork_proposal_config: WorkflowDefinition, peer_create_job_flags: List[int]): + self.fork_from_id = fork_from_id + self.fork_proposal_config = fork_proposal_config + self.peer_create_job_flags = peer_create_job_flags + + +class CreateNewWorkflowParams(object): + + def __init__(self, project_id: int, template_id: Optional[int], template_revision_id: Optional[int] = None): + self.project_id = project_id + self.template_id = template_id + self.template_revision_id = template_revision_id + + +def _filter_system_workflow(exp: SimpleExpression): + if exp.bool_value: + return Workflow.creator == SYSTEM_WORKFLOW_CREATOR_USERNAME + # != Null or == Null will always return Null in mysql. + return or_(Workflow.creator != SYSTEM_WORKFLOW_CREATOR_USERNAME, Workflow.creator.is_(None)) + + +class WorkflowService: + FILTER_FIELDS = {'system': SupportedField(type=FieldType.BOOL, ops={FilterOp.EQUAL: _filter_system_workflow})} + + def __init__(self, session): + self._session = session + self._filter_builder = FilterBuilder(model_class=Workflow, supported_fields=self.FILTER_FIELDS) + + def build_filter_query(self, query: Query, exp: FilterExpression) -> Query: + return self._filter_builder.build_query(query, exp) + + def validate_workflow(self, workflow: Workflow) -> Tuple[bool, tuple]: + for job in workflow.owned_jobs: + try: + YamlFormatterService(self._session).generate_job_run_yaml(job) + except Exception as e: # pylint: disable=broad-except + return False, (job.name, e) + return True, () + + @staticmethod + def filter_workflows(query: Query, states: List[str]) -> Query: + query_states = [] + filters = [] + for state in states: + query_states.append(state.upper()) + # TODO(xiangyuxuan.prs): simplify Workflow create to remove the specific process for states below. + # The logic of process is same as get_state_for_frontend. + if state == 'warmup': + filters.append( + and_( + Workflow.state == WorkflowState.NEW, Workflow.target_state == WorkflowState.READY, + Workflow.transaction_state.in_([ + TransactionState.PARTICIPANT_COMMITTABLE, TransactionState.PARTICIPANT_COMMITTING, + TransactionState.COORDINATOR_COMMITTING + ]))) + if state == 'pending': + filters.append( + and_(Workflow.state == WorkflowState.NEW, Workflow.target_state == WorkflowState.READY, + Workflow.transaction_state == TransactionState.PARTICIPANT_PREPARE)) + if state == 'configuring': + filters.append( + and_( + Workflow.state == WorkflowState.NEW, Workflow.target_state == WorkflowState.READY, + Workflow.transaction_state.in_([ + TransactionState.READY, TransactionState.COORDINATOR_COMMITTABLE, + TransactionState.COORDINATOR_PREPARE + ]))) + filters.append(Workflow.state.in_(query_states)) + query = query.filter(or_(*filters)) + return query + + def _check_conflict(self, workflow_name: str, project_id: int): + if self._session.query(Workflow).filter_by(name=workflow_name).filter_by( + project_id=project_id).first() is not None: + raise ResourceConflictException(f'Workflow {workflow_name} already exists in project: {project_id}.') + + def create_workflow(self, + name: str, + config: WorkflowDefinition, + params: Union[CreateNewWorkflowParams, ForkWorkflowParams], + forkable: bool = False, + comment: Optional[str] = None, + create_job_flags: Optional[List[int]] = None, + cron_config: Optional[str] = None, + creator_username: str = None, + uuid: Optional[str] = None, + state: WorkflowState = WorkflowState.NEW, + target_state: WorkflowState = WorkflowState.READY): + # Parameter validations + parent_workflow = None + template = None + project_id = None + if isinstance(params, ForkWorkflowParams): + # Fork mode + parent_workflow = self._session.query(Workflow).get(params.fork_from_id) + if parent_workflow is None: + raise InvalidArgumentException('fork_from_id is not valid') + if not parent_workflow.forkable: + raise InvalidArgumentException('workflow not forkable') + project_id = parent_workflow.project_id + self._check_conflict(name, project_id) + # it is possible that parent_workflow.template is None + template = parent_workflow.template + if not is_local(config, create_job_flags): + participants = ParticipantService(self._session).get_platform_participants_by_project( + parent_workflow.project.id) + if not is_peer_job_inheritance_matched(project=parent_workflow.project, + workflow_definition=config, + job_flags=create_job_flags, + peer_job_flags=params.peer_create_job_flags, + parent_uuid=parent_workflow.uuid, + parent_name=parent_workflow.name, + participants=participants): + raise ValueError('Forked workflow has federated job with ' 'unmatched inheritance') + else: + # Create new mode + project_id = params.project_id + self._check_conflict(name, project_id) + if params.template_id: + template = self._session.query(WorkflowTemplate).get(params.template_id) + assert template is not None + assert project_id is not None + if uuid is None: + uuid = resource_uuid() + workflow = Workflow(name=name, + uuid=uuid, + comment=comment, + project_id=project_id, + forkable=forkable, + forked_from=None if parent_workflow is None else parent_workflow.id, + state=state, + target_state=target_state, + transaction_state=TransactionState.READY, + creator=creator_username, + template_revision_id=parent_workflow.template_revision_id + if parent_workflow else params.template_revision_id) + if template: + workflow.template_id = template.id + workflow.editor_info = template.editor_info + self.update_config(workflow, config) + workflow.set_create_job_flags(create_job_flags) + if isinstance(params, ForkWorkflowParams): + # Fork mode + # TODO(hangweiqiang): more validations + workflow.set_fork_proposal_config(params.fork_proposal_config) + workflow.set_peer_create_job_flags(params.peer_create_job_flags) + self._session.add(workflow) + # To get workflow id + self._session.flush() + if cron_config is not None: + workflow.cron_config = cron_config + update_cronjob_config(workflow.id, cron_config, self._session) + self.setup_jobs(workflow) + return workflow + + def config_workflow(self, + workflow: Workflow, + template_id: int, + config: Optional[WorkflowDefinition] = None, + forkable: bool = False, + comment: Optional[str] = None, + cron_config: Optional[str] = None, + create_job_flags: Optional[List[int]] = None, + creator_username: Optional[str] = None, + template_revision_id: Optional[int] = None) -> Workflow: + if workflow.config: + raise ValueError('Resetting workflow is not allowed') + workflow.comment = comment + workflow.forkable = forkable + workflow.creator = creator_username + workflow.set_config(config) + workflow.set_create_job_flags(create_job_flags) + workflow.update_target_state(WorkflowState.READY) + workflow.template_id = template_id + workflow.template_revision_id = template_revision_id + self._session.flush() + if workflow.template is None: + emit_store('template_not_found', 1) + raise ValueError('template not found') + workflow.editor_info = workflow.template.editor_info + if cron_config is not None: + workflow.cron_config = cron_config + update_cronjob_config(workflow.id, cron_config, self._session) + self.setup_jobs(workflow) + return workflow + + def patch_workflow(self, + workflow: Workflow, + forkable: Optional[bool] = None, + metric_is_public: Optional[bool] = None, + config: Optional[WorkflowDefinition] = None, + template_id: Optional[int] = None, + create_job_flags: List[int] = None, + cron_config: Optional[str] = None, + favour: Optional[bool] = None, + template_revision_id: Optional[int] = None): + if forkable is not None: + workflow.forkable = forkable + if metric_is_public is not None: + workflow.metric_is_public = metric_is_public + + if config: + if workflow.target_state != WorkflowState.INVALID or \ + workflow.state not in \ + [WorkflowState.READY, WorkflowState.STOPPED, WorkflowState.COMPLETED, + WorkflowState.FAILED]: + raise ValueError('Cannot edit running workflow') + self.update_config(workflow, config) + workflow.template_id = template_id + self._session.flush() + if workflow.template is not None: + workflow.editor_info = workflow.template.editor_info + self._session.flush() + + if create_job_flags: + jobs = [self._session.query(Job).get(i) for i in workflow.get_job_ids()] + if len(create_job_flags) != len(jobs): + raise ValueError(f'Number of job defs does not match number of ' + f'create_job_flags {len(jobs)} vs {len(create_job_flags)}') + workflow.set_create_job_flags(create_job_flags) + flags = workflow.get_create_job_flags() + for i, job in enumerate(jobs): + if job.workflow_id == workflow.id: + job.is_disabled = flags[i] == \ + common_pb2.CreateJobFlag.DISABLED + + # start workflow periodically. + # Session.commit inside, so this part must be the last of the api + # to guarantee atomicity. + if cron_config is not None: + workflow.cron_config = cron_config + update_cronjob_config(workflow.id, cron_config, self._session) + + if favour is not None: + workflow.favour = favour + + if template_revision_id is not None: + workflow.template_revision_id = template_revision_id + + def setup_jobs(self, workflow: Workflow): + if workflow.forked_from is not None: + trunk = self._session.query(Workflow).get(workflow.forked_from) + assert trunk is not None, \ + f'Source workflow {workflow.forked_from} not found' + trunk_job_defs = trunk.get_config().job_definitions + trunk_name2index = {job.name: i for i, job in enumerate(trunk_job_defs)} + + job_defs = workflow.get_config().job_definitions + flags = workflow.get_create_job_flags() + assert len(job_defs) == len(flags), \ + f'Number of job defs does not match number of create_job_flags {len(job_defs)} vs {len(flags)}' + jobs = [] + for i, (job_def, flag) in enumerate(zip(job_defs, flags)): + if flag == common_pb2.CreateJobFlag.REUSE: + assert job_def.name in trunk_name2index, \ + f'Job {job_def.name} not found in base workflow' + j = trunk.get_job_ids()[trunk_name2index[job_def.name]] + job = self._session.query(Job).get(j) + assert job is not None, f'Job {j} not found' + # TODO: check forked jobs does not depend on non-forked jobs + else: + job = Job(name=build_job_name(workflow.uuid, job_def.name), + job_type=JobType(job_def.job_type), + workflow_id=workflow.id, + project_id=workflow.project_id, + state=JobState.NEW, + is_disabled=(flag == common_pb2.CreateJobFlag.DISABLED)) + self._session.add(job) + self._session.flush() + JobService(self._session).set_config_and_crd_info(job, job_def) + jobs.append(job) + self._session.refresh(workflow) + name2index = {job.name: i for i, job in enumerate(job_defs)} + for i, (job, flag) in enumerate(zip(jobs, flags)): + if flag == common_pb2.CreateJobFlag.REUSE: + continue + for j, dep_def in enumerate(job.get_config().dependencies): + dep = JobDependency(src_job_id=jobs[name2index[dep_def.source]].id, dst_job_id=job.id, dep_index=j) + self._session.add(dep) + + workflow.set_job_ids([job.id for job in jobs]) + + def get_peer_workflow(self, workflow: Workflow): + service = ParticipantService(self._session) + participants = service.get_platform_participants_by_project(workflow.project.id) + # TODO: find coordinator for multiparty + client = RpcClient.from_project_and_participant(workflow.project.name, workflow.project.token, + participants[0].domain_name) + return client.get_workflow(workflow.uuid, workflow.name) + + def is_federated_workflow_finished(self, workflow: Workflow): + if not workflow.is_finished(): + return False + return workflow.is_local() or self.get_peer_workflow(workflow).is_finished + + def should_auto_stop(self, workflow: Workflow): + return workflow.is_failed() or self.is_federated_workflow_finished(workflow) + + def update_config(self, workflow: Workflow, proto: WorkflowDefinition): + workflow.set_config(proto) + if proto is not None: + job_defs = {i.name: i for i in proto.job_definitions} + for job in workflow.owned_jobs: + name = job.get_config().name + assert name in job_defs, \ + f'Invalid workflow template: job {name} is missing' + JobService(self._session).set_config_and_crd_info(job, job_defs[name]) diff --git a/web_console_v2/api/fedlearner_webconsole/workflow/service_test.py b/web_console_v2/api/fedlearner_webconsole/workflow/service_test.py new file mode 100644 index 000000000..a2dce2033 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow/service_test.py @@ -0,0 +1,123 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from unittest.mock import patch + +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.workflow.service import WorkflowService +from fedlearner_webconsole.proto.workflow_definition_pb2 import JobDefinition +from fedlearner_webconsole.job.models import Job, JobType, JobState +from fedlearner_webconsole.workflow.models import (Workflow, WorkflowState, TransactionState) +from fedlearner_webconsole.workflow.service import update_cronjob_config +from fedlearner_webconsole.composer.models import SchedulerItem, ItemStatus + + +class WorkflowServiceTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + project = Project(id=0) + + with db.session_scope() as session: + session.add(project) + session.commit() + + @patch('fedlearner_webconsole.workflow.service.YamlFormatterService.generate_job_run_yaml') + def test_valid_workflow(self, mock_generate_job_run_yaml): + with db.session_scope() as session: + workflow = Workflow(id=0, project_id=99) + session.add(workflow) + session.flush() + job = Job(id=0, + name='test-job-0', + job_type=JobType.RAW_DATA, + workflow_id=0, + project_id=99, + config=JobDefinition(name='test-job').SerializeToString()) + session.add(job) + session.flush() + sample_json = {'apiVersion': 'v1', 'kind': 'FLApp', 'metadata': {}} + mock_generate_job_run_yaml.return_value = sample_json + + workflow_valid = WorkflowService(session).validate_workflow(workflow) + self.assertTrue(workflow_valid[0]) + mock_generate_job_run_yaml.side_effect = ValueError + workflow_valid = WorkflowService(session).validate_workflow(workflow) + self.assertFalse(workflow_valid[0]) + + def test_filter_workflow_state(self): + with db.session_scope() as session: + configuring_workflow = Workflow(id=1, + state=WorkflowState.NEW, + target_state=WorkflowState.READY, + transaction_state=TransactionState.READY) + ready_workflow = Workflow(id=2, + state=WorkflowState.READY, + target_state=WorkflowState.INVALID, + transaction_state=TransactionState.READY) + completed_workflow = Workflow(id=3, state=WorkflowState.COMPLETED) + failed_workflow = Workflow(id=4, state=WorkflowState.FAILED) + running_workflow = Workflow(id=5, state=WorkflowState.RUNNING) + session.add_all( + [configuring_workflow, ready_workflow, running_workflow, completed_workflow, failed_workflow]) + session.flush() + completed_job = Job(id=1, job_type=JobType.RAW_DATA, workflow_id=3, project_id=99, state=JobState.COMPLETED) + failed_job = Job(id=2, job_type=JobType.RAW_DATA, workflow_id=4, project_id=99, state=JobState.FAILED) + running_job = Job(id=3, job_type=JobType.RAW_DATA, workflow_id=5, project_id=99, state=JobState.STARTED) + session.add_all([completed_job, failed_job, running_job]) + session.flush() + all_workflows = session.query(Workflow) + self.assertEqual( + WorkflowService.filter_workflows(all_workflows, ['configuring', 'ready']).all(), + [configuring_workflow, ready_workflow]) + self.assertEqual(WorkflowService.filter_workflows(all_workflows, ['failed']).all(), [failed_workflow]) + self.assertEqual(WorkflowService.filter_workflows(all_workflows, ['completed']).all(), [completed_workflow]) + self.assertEqual( + WorkflowService.filter_workflows(all_workflows, ['running', 'completed']).all(), + [completed_workflow, running_workflow]) + + def _get_scheduler_item(self, session) -> SchedulerItem: + item: SchedulerItem = session.query(SchedulerItem).filter_by(name='workflow_cron_job_1').first() + return item + + def test_update_cronjob_config(self): + with db.session_scope() as session: + # test for collect + update_cronjob_config(1, '1 2 3 4 5', session) + session.commit() + item = self._get_scheduler_item(session) + self.assertEqual(item.cron_config, '1 2 3 4 5') + self.assertEqual(item.status, ItemStatus.ON.value) + item.status = ItemStatus.OFF.value + session.commit() + with db.session_scope() as session: + update_cronjob_config(1, '1 2 3 4 6', session) + session.commit() + item = self._get_scheduler_item(session) + self.assertEqual(item.status, ItemStatus.ON.value) + self.assertEqual(item.cron_config, '1 2 3 4 6') + with db.session_scope() as session: + update_cronjob_config(1, None, session) + session.commit() + item = self._get_scheduler_item(session) + self.assertEqual(item.status, ItemStatus.OFF.value) + self.assertEqual(item.cron_config, '1 2 3 4 6') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/workflow/utils.py b/web_console_v2/api/fedlearner_webconsole/workflow/utils.py new file mode 100644 index 000000000..bad3153a6 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow/utils.py @@ -0,0 +1,65 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +from typing import List, Optional + +from fedlearner_webconsole.rpc.client import RpcClient +from fedlearner_webconsole.exceptions import InternalException +from fedlearner_webconsole.utils.metrics import emit_store +from fedlearner_webconsole.proto import common_pb2 +from fedlearner_webconsole.proto.common_pb2 import CreateJobFlag +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto.workflow_definition_pb2 import \ + WorkflowDefinition + + +def is_local(config: WorkflowDefinition, job_flags: Optional[List[int]] = None) -> bool: + # if self.config is None, it must be created by the opposite side + if config is None: + return False + # since _setup_jobs has not been called, job_definitions is used + job_defs = config.job_definitions + if job_flags is None: + num_jobs = len(job_defs) + job_flags = [common_pb2.CreateJobFlag.NEW] * num_jobs + for i, (job_def, flag) in enumerate(zip(job_defs, job_flags)): + if flag != CreateJobFlag.REUSE and job_def.is_federated: + return False + return True + + +def is_peer_job_inheritance_matched(project: Project, workflow_definition: WorkflowDefinition, job_flags: List[int], + peer_job_flags: List[int], parent_uuid: str, parent_name: str, + participants: List) -> bool: + """Checks if the job inheritance is matched with peer workflow. + + We should make sure the federated jobs should have the same job flag + (inherit from parent or not).""" + # TODO(hangweiqiang): Fix for multi-peer + client = RpcClient.from_project_and_participant(project.name, project.token, participants[0].domain_name) + # Gets peer parent workflow + resp = client.get_workflow(parent_uuid, parent_name) + if resp.status.code != common_pb2.STATUS_SUCCESS: + emit_store('get_peer_workflow_failed', 1) + raise InternalException(resp.status.msg) + job_defs = workflow_definition.job_definitions + peer_job_defs = resp.config.job_definitions + for i, job_def in enumerate(job_defs): + if job_def.is_federated: + for j, peer_job_def in enumerate(peer_job_defs): + if job_def.name == peer_job_def.name: + if job_flags[i] != peer_job_flags[j]: + return False + return True diff --git a/web_console_v2/api/fedlearner_webconsole/workflow/utils_test.py b/web_console_v2/api/fedlearner_webconsole/workflow/utils_test.py new file mode 100644 index 000000000..7790cecb4 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow/utils_test.py @@ -0,0 +1,111 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest +from unittest.mock import patch, MagicMock +from google.protobuf.json_format import ParseDict +from testing.no_web_server_test_case import NoWebServerTestCase +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.proto.common_pb2 import CreateJobFlag +from fedlearner_webconsole.proto.service_pb2 import GetWorkflowResponse +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto.workflow_definition_pb2 import JobDefinition, \ + WorkflowDefinition +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.workflow.utils import \ + is_peer_job_inheritance_matched, is_local + + +class UtilsTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + project = Project(id=0) + + with db.session_scope() as session: + session.add(project) + session.commit() + + def test_is_local(self): + config = { + 'job_definitions': [ + { + 'name': 'raw-data', + 'is_federated': False + }, + { + 'name': 'raw-data', + 'is_federated': True + }, + ] + } + config = ParseDict(config, WorkflowDefinition()) + self.assertFalse(is_local(config)) + job_flags = [CreateJobFlag.NEW, CreateJobFlag.NEW] + self.assertFalse(is_local(config, job_flags)) + job_flags = [CreateJobFlag.NEW, CreateJobFlag.REUSE] + self.assertTrue(is_local(config, job_flags)) + job_flags = [CreateJobFlag.REUSE, CreateJobFlag.REUSE] + self.assertTrue(is_local(config, job_flags)) + + @patch('fedlearner_webconsole.rpc.client.RpcClient' '.from_project_and_participant') + def test_is_peer_job_inheritance_matched(self, mock_rpc_client_factory): + # Mock RPC + peer_job_0 = JobDefinition(name='raw-data-job') + peer_job_1 = JobDefinition(name='train-job', is_federated=True) + peer_config = WorkflowDefinition(job_definitions=[peer_job_0, peer_job_1]) + resp = GetWorkflowResponse(config=peer_config) + mock_rpc_client = MagicMock() + mock_rpc_client.get_workflow = MagicMock(return_value=resp) + mock_rpc_client_factory.return_value = mock_rpc_client + + job_0 = JobDefinition(name='train-job', is_federated=True) + workflow_definition = WorkflowDefinition(job_definitions=[job_0]) + + participant = Participant(domain_name='fl-test.com') + + project = Project(name='test-project', token='test-token') + parent_workflow = Workflow(project=project, uuid='workflow-uuid-0000', name='workflow-0') + self.assertTrue( + is_peer_job_inheritance_matched(project=project, + workflow_definition=workflow_definition, + job_flags=[CreateJobFlag.REUSE], + peer_job_flags=[CreateJobFlag.NEW, CreateJobFlag.REUSE], + parent_uuid=parent_workflow.uuid, + parent_name=parent_workflow.name, + participants=[participant])) + mock_rpc_client.get_workflow.assert_called_once_with(parent_workflow.uuid, parent_workflow.name) + mock_rpc_client_factory.assert_called_once() + args, kwargs = mock_rpc_client_factory.call_args_list[0] + # Comparing call args one by one because message list + # can not compare directly + self.assertEqual(len(args), 3) + self.assertEqual(args[0], 'test-project') + self.assertEqual(args[1], 'test-token') + self.assertEqual(args[2], 'fl-test.com') + + self.assertFalse( + is_peer_job_inheritance_matched(project=project, + workflow_definition=workflow_definition, + job_flags=[CreateJobFlag.NEW], + peer_job_flags=[CreateJobFlag.NEW, CreateJobFlag.REUSE], + parent_uuid=parent_workflow.uuid, + parent_name=parent_workflow.name, + participants=[participant])) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/workflow/workflow_controller.py b/web_console_v2/api/fedlearner_webconsole/workflow/workflow_controller.py new file mode 100644 index 000000000..371c614b6 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow/workflow_controller.py @@ -0,0 +1,135 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Optional + +from sqlalchemy.orm import Session + +from fedlearner_webconsole.auth.services import UserService +from fedlearner_webconsole.job.controller import stop_job, schedule_job, \ + start_job_if_ready +from fedlearner_webconsole.notification.email import send_email +from fedlearner_webconsole.notification.template import NotificationTemplateName +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition +from fedlearner_webconsole.utils import pp_datetime +from fedlearner_webconsole.utils.const import SYSTEM_WORKFLOW_CREATOR_USERNAME +from fedlearner_webconsole.utils.flask_utils import get_link +from fedlearner_webconsole.workflow.models import WorkflowState, Workflow, TransactionState +from fedlearner_webconsole.workflow.service import WorkflowService, CreateNewWorkflowParams, update_cronjob_config + + +# TODO(xiangyuxuan.prs): uses system workflow template revision instead of template directly +def create_ready_workflow( + session: Session, + name: str, + config: WorkflowDefinition, + project_id: int, + uuid: str, + template_id: Optional[int] = None, + comment: Optional[str] = None, +) -> Workflow: + """Creates a workflow in ready(configured) state, this is for our internal usage, such as dataset module. + + Args: + session: DB session of the transaction. + name: Workflow name. + config: Workflow configurations. + project_id: Which project this workflow belongs to. + uuid: Global uinique id of this workflow, practicely we use it for pairing. + template_id: Which template this workflow will use. + comment: Optional comment of the workflow. + """ + return WorkflowService(session).create_workflow( + name=name, + config=config, + params=CreateNewWorkflowParams(project_id=project_id, template_id=template_id), + uuid=uuid, + comment=comment, + creator_username=SYSTEM_WORKFLOW_CREATOR_USERNAME, + state=WorkflowState.READY, + target_state=WorkflowState.INVALID, + ) + + +def start_workflow_locally(session: Session, workflow: Workflow): + """Starts the workflow locally, it does not affect other participants.""" + if not workflow.can_transit_to(WorkflowState.RUNNING): + raise RuntimeError(f'invalid workflow state {workflow.state} when try to start') + is_valid, info = WorkflowService(session).validate_workflow(workflow) + if not is_valid: + job_name, validate_e = info + raise ValueError(f'Invalid Variable when try to format the job {job_name}: {str(validate_e)}') + + workflow.start_at = int(pp_datetime.now().timestamp()) + workflow.state = WorkflowState.RUNNING + # Schedules all jobs to make them executable + for job in workflow.owned_jobs: + schedule_job(session, job) + # A workaround to speed up the workflow execution: manually trigger the start of jobs + for job in workflow.owned_jobs: + start_job_if_ready(session, job) + + +def _notify_if_finished(session: Session, workflow: Workflow): + if workflow.state not in [WorkflowState.FAILED, WorkflowState.STOPPED, WorkflowState.COMPLETED]: + return + creator = UserService(session).get_user_by_username(workflow.creator) + email_address = None + if creator: + email_address = creator.email + send_email(email_address, + NotificationTemplateName.WORKFLOW_COMPLETE, + name=workflow.name, + state=workflow.state.name, + link=get_link(f'/v2/workflow-center/workflows/{workflow.id}')) + + +def stop_workflow_locally(session: Session, workflow: Workflow): + """Stops the workflow locally, it does not affect other participants.""" + if not workflow.can_transit_to(WorkflowState.STOPPED): + raise RuntimeError(f'invalid workflow state {workflow.state} when try to stop') + + workflow.stop_at = int(pp_datetime.now().timestamp()) + if workflow.is_failed(): + workflow.state = WorkflowState.FAILED + elif workflow.is_finished(): + workflow.state = WorkflowState.COMPLETED + else: + workflow.state = WorkflowState.STOPPED + try: + for job in workflow.owned_jobs: + stop_job(session, job) + _notify_if_finished(session, workflow) + except RuntimeError as e: + logging.error(f'Failed to stop workflow {workflow.id}: {str(e)}') + raise + + +def invalidate_workflow_locally(session: Session, workflow: Workflow): + """Invalidates workflow locally and stops related jobs.""" + logging.info(f'Invalidating workflow {workflow.id}') + # Stops the related cron jobs + update_cronjob_config(workflow.id, None, session) + # Marks the workflow's state + workflow.state = WorkflowState.INVALID + workflow.target_state = WorkflowState.INVALID + workflow.transaction_state = TransactionState.READY + # Stops owned jobs + for job in workflow.owned_jobs: + try: + stop_job(session, job) + except Exception as e: # pylint: disable=broad-except + logging.warning('Error while stopping job %s during invalidation: %s', job.name, repr(e)) diff --git a/web_console_v2/api/fedlearner_webconsole/workflow/workflow_controller_test.py b/web_console_v2/api/fedlearner_webconsole/workflow/workflow_controller_test.py new file mode 100644 index 000000000..a3e1c6807 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow/workflow_controller_test.py @@ -0,0 +1,444 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from datetime import datetime, timezone +from unittest.mock import patch, Mock + +from fedlearner_webconsole.auth.models import User +from fedlearner_webconsole.db import db +from fedlearner_webconsole.job.models import JobType, Job, JobState, JobDependency +from fedlearner_webconsole.notification.template import NotificationTemplateName +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition, JobDefinition +from fedlearner_webconsole.utils.const import SYSTEM_WORKFLOW_CREATOR_USERNAME +from fedlearner_webconsole.utils.proto import to_dict +from fedlearner_webconsole.workflow.models import WorkflowState, Workflow +from fedlearner_webconsole.workflow.workflow_controller import create_ready_workflow, start_workflow_locally, \ + stop_workflow_locally, \ + invalidate_workflow_locally, _notify_if_finished +from fedlearner_webconsole.workflow_template.models import WorkflowTemplate +from testing.no_web_server_test_case import NoWebServerTestCase + + +class CreateReadyWorkflowTest(NoWebServerTestCase): + + def test_create_ready_workflow_with_template(self): + with db.session_scope() as session: + workflow_template = WorkflowTemplate( + id=123, + name='t123', + group_alias='test group', + ) + workflow_template.set_config( + WorkflowDefinition( + group_alias='test group', + variables=[ + Variable(name='var'), + ], + )) + session.add(workflow_template) + session.commit() + + # Changes one variable + config = WorkflowDefinition() + config.CopyFrom(workflow_template.get_config()) + config.variables[0].value = 'new_value' + workflow = create_ready_workflow( + session, + name='workflow1', + config=config, + project_id=2333, + uuid='uuid', + template_id=workflow_template.id, + ) + session.commit() + workflow_id = workflow.id + with db.session_scope() as session: + workflow: Workflow = session.query(Workflow).get(workflow_id) + self.assertPartiallyEqual( + to_dict(workflow.to_proto()), + { + 'id': workflow_id, + 'name': 'workflow1', + 'comment': '', + 'uuid': 'uuid', + 'project_id': 2333, + 'creator': SYSTEM_WORKFLOW_CREATOR_USERNAME, + 'state': 'READY_TO_RUN', + 'forkable': False, + 'forked_from': 0, + 'template_id': 123, + 'template_revision_id': 0, + 'template_info': { + 'id': 123, + 'is_modified': False, + 'name': 't123', + 'revision_index': 0, + }, + 'config': { + 'group_alias': + 'test group', + 'job_definitions': [], + 'variables': [{ + 'access_mode': 'UNSPECIFIED', + 'name': 'var', + 'tag': '', + 'value': 'new_value', + 'value_type': 'STRING', + 'widget_schema': '', + }], + }, + 'is_local': True, + 'favour': False, + 'job_ids': [], + 'jobs': [], + 'metric_is_public': False, + 'create_job_flags': [], + 'peer_create_job_flags': [], + 'cron_config': '', + 'editor_info': { + 'yaml_editor_infos': {}, + }, + }, + ignore_fields=['created_at', 'updated_at', 'start_at', 'stop_at'], + ) + + def test_create_ready_workflow_without_template(self): + with db.session_scope() as session: + # Changes one variable + config = WorkflowDefinition( + group_alias='test group', + variables=[ + Variable(name='var', value='cofff'), + ], + ) + workflow = create_ready_workflow( + session, + name='workflow222', + config=config, + project_id=23334, + uuid='uuid222', + ) + session.commit() + workflow_id = workflow.id + with db.session_scope() as session: + workflow: Workflow = session.query(Workflow).get(workflow_id) + self.maxDiff = None + self.assertPartiallyEqual( + to_dict(workflow.to_proto()), + { + 'id': workflow_id, + 'name': 'workflow222', + 'comment': '', + 'uuid': 'uuid222', + 'project_id': 23334, + 'creator': SYSTEM_WORKFLOW_CREATOR_USERNAME, + 'state': 'READY_TO_RUN', + 'forkable': False, + 'forked_from': 0, + 'template_id': 0, + 'template_revision_id': 0, + 'template_info': { + 'id': 0, + 'is_modified': True, + 'name': '', + 'revision_index': 0, + }, + 'config': { + 'group_alias': + 'test group', + 'job_definitions': [], + 'variables': [{ + 'access_mode': 'UNSPECIFIED', + 'name': 'var', + 'tag': '', + 'value': 'cofff', + 'value_type': 'STRING', + 'widget_schema': '', + }], + }, + 'is_local': True, + 'favour': False, + 'job_ids': [], + 'jobs': [], + 'metric_is_public': False, + 'create_job_flags': [], + 'peer_create_job_flags': [], + 'cron_config': '', + 'editor_info': { + 'yaml_editor_infos': {}, + }, + }, + ignore_fields=['created_at', 'updated_at', 'start_at', 'stop_at'], + ) + + +class StartWorkflowLocallyTest(NoWebServerTestCase): + + def test_start_workflow_locally_invalid_template(self): + running_workflow = Workflow(id=1, state=WorkflowState.RUNNING) + with db.session_scope() as session: + start_workflow_locally(session, running_workflow) + + @patch('fedlearner_webconsole.workflow.workflow_controller.WorkflowService.validate_workflow') + def test_start_workflow_locally_invalid_state(self, mock_validate_workflow: Mock): + mock_validate_workflow.return_value = False, ('test_job', 'fake error') + workflow = Workflow(id=1, state=WorkflowState.READY) + with db.session_scope() as session: + self.assertRaisesRegex(ValueError, 'Invalid Variable when try to format the job test_job: fake error', + lambda: start_workflow_locally(session, workflow)) + + @patch('fedlearner_webconsole.workflow.workflow_controller.pp_datetime.now') + @patch('fedlearner_webconsole.job.controller.YamlFormatterService.generate_job_run_yaml') + @patch('fedlearner_webconsole.workflow.workflow_controller.WorkflowService.validate_workflow') + def test_start_workflow_locally_successfully(self, mock_validate_workflow: Mock, mock_gen_yaml: Mock, + mock_now: Mock): + mock_validate_workflow.return_value = True, None + now_dt = datetime(2021, 9, 1, 10, 20, tzinfo=timezone.utc) + mock_now.return_value = now_dt + + workflow_id = 123 + with db.session_scope() as session: + workflow = Workflow(id=workflow_id, state=WorkflowState.READY) + job1 = Job(id=1, + name='test job 1', + job_type=JobType.RAW_DATA, + state=JobState.NEW, + workflow_id=workflow_id, + project_id=1) + job2 = Job(id=2, + name='test job 2', + job_type=JobType.RAW_DATA, + state=JobState.NEW, + workflow_id=workflow_id, + project_id=1) + job_def = JobDependency(src_job_id=2, dst_job_id=1, dep_index=0) + config = JobDefinition(is_federated=False) + job1.set_config(config) + job2.set_config(config) + session.add_all([workflow, job1, job2, job_def]) + session.commit() + mock_gen_yaml.return_value = {} + start_workflow_locally(session, workflow) + session.commit() + with db.session_scope() as session: + workflow = session.query(Workflow).get(workflow_id) + self.assertEqual(workflow.start_at, now_dt.timestamp()) + self.assertEqual(workflow.state, WorkflowState.RUNNING) + job1 = session.query(Job).get(1) + job2 = session.query(Job).get(2) + self.assertEqual(job1.state, JobState.WAITING) + self.assertEqual(job2.state, JobState.STARTED) + mock_gen_yaml.assert_called_once() + + +class StopWorkflowLocallyTest(NoWebServerTestCase): + + def test_stop_workflow_locally_invalid_state(self): + with db.session_scope() as session: + new_workflow = Workflow(id=1, state=WorkflowState.NEW) + self.assertRaisesRegex(RuntimeError, 'invalid workflow state WorkflowState.NEW when try to stop', + lambda: stop_workflow_locally(session, new_workflow)) + + @patch('fedlearner_webconsole.workflow.workflow_controller.pp_datetime.now') + @patch('fedlearner_webconsole.workflow.workflow_controller.stop_job') + def test_stop_workflow_locally_successfully(self, mock_stop_job: Mock, mock_now: Mock): + now_dt = datetime(2021, 9, 1, 10, 20, tzinfo=timezone.utc) + mock_now.return_value = now_dt + + workflow_id = 123 + with db.session_scope() as session: + workflow = Workflow(id=workflow_id, state=WorkflowState.RUNNING) + job1 = Job(id=1, + name='test job 1', + job_type=JobType.RAW_DATA, + state=JobState.NEW, + workflow_id=workflow_id, + project_id=1) + job2 = Job(id=2, + name='test job 2', + job_type=JobType.RAW_DATA, + state=JobState.NEW, + workflow_id=workflow_id, + project_id=1) + session.add_all([workflow, job1, job2]) + session.commit() + stop_workflow_locally(session, workflow) + session.commit() + # Stopped 2 jobs + self.assertEqual(mock_stop_job.call_count, 2) + with db.session_scope() as session: + workflow = session.query(Workflow).get(workflow_id) + self.assertEqual(workflow.stop_at, now_dt.timestamp()) + self.assertEqual(workflow.state, WorkflowState.STOPPED) + + def test_stop_ready_workflow(self): + with db.session_scope() as session: + ready_workflow = Workflow(id=1, state=WorkflowState.READY) + job1 = Job(id=1, + name='test job 1', + job_type=JobType.RAW_DATA, + state=JobState.NEW, + workflow_id=1, + project_id=1) + job2 = Job(id=2, + name='test job 2', + job_type=JobType.RAW_DATA, + state=JobState.NEW, + workflow_id=1, + project_id=1) + session.add_all([ready_workflow, job1, job2]) + session.commit() + stop_workflow_locally(session, ready_workflow) + self.assertEqual(ready_workflow.state, WorkflowState.STOPPED) + + @patch('fedlearner_webconsole.workflow.workflow_controller.pp_datetime.now') + @patch('fedlearner_webconsole.workflow.workflow_controller.stop_job') + def test_stop_workflow_to_completed(self, mock_stop_job: Mock, mock_now: Mock): + now_dt = datetime(2021, 9, 1, 10, 20, tzinfo=timezone.utc) + mock_now.return_value = now_dt + workflow_id = 123 + with db.session_scope() as session: + workflow = Workflow(id=workflow_id, state=WorkflowState.RUNNING) + job1 = Job(id=1, + name='test job 1', + job_type=JobType.RAW_DATA, + state=JobState.COMPLETED, + workflow_id=workflow_id, + project_id=1) + job2 = Job(id=2, + name='test job 2', + job_type=JobType.RAW_DATA, + state=JobState.COMPLETED, + workflow_id=workflow_id, + project_id=1) + session.add_all([workflow, job1, job2]) + session.commit() + stop_workflow_locally(session, workflow) + session.commit() + with db.session_scope() as session: + workflow = session.query(Workflow).get(workflow_id) + self.assertEqual(workflow.state, WorkflowState.COMPLETED) + + @patch('fedlearner_webconsole.workflow.workflow_controller.stop_job') + def test_stop_workflow_to_failed(self, mock_stop_job: Mock): + workflow_id = 123 + with db.session_scope() as session: + workflow = Workflow(id=workflow_id, state=WorkflowState.RUNNING) + job1 = Job(id=1, + name='test job 1', + job_type=JobType.RAW_DATA, + state=JobState.COMPLETED, + workflow_id=workflow_id, + project_id=1) + job2 = Job(id=2, + name='test job 2', + job_type=JobType.RAW_DATA, + state=JobState.FAILED, + workflow_id=workflow_id, + project_id=1) + session.add_all([workflow, job1, job2]) + session.commit() + stop_workflow_locally(session, workflow) + session.commit() + with db.session_scope() as session: + workflow = session.query(Workflow).get(workflow_id) + self.assertEqual(workflow.state, WorkflowState.FAILED) + + @patch('fedlearner_webconsole.workflow.workflow_controller.stop_job') + def test_stop_workflow_locally_failed(self, mock_stop_job: Mock): + mock_stop_job.side_effect = RuntimeError('fake error') + + workflow_id = 123 + with db.session_scope() as session: + workflow = Workflow(id=workflow_id, state=WorkflowState.RUNNING) + job1 = Job(id=1, + name='test job 1', + job_type=JobType.RAW_DATA, + state=JobState.NEW, + workflow_id=workflow_id, + project_id=1) + session.add_all([workflow, job1]) + session.commit() + # Simulates the normal action by following a session commit + with self.assertRaises(RuntimeError): + stop_workflow_locally(session, workflow) + session.commit() + with db.session_scope() as session: + workflow = session.query(Workflow).get(workflow_id) + self.assertIsNone(workflow.stop_at) + self.assertEqual(workflow.state, WorkflowState.RUNNING) + + +class InvalidateWorkflowLocallyTest(NoWebServerTestCase): + + @patch('fedlearner_webconsole.workflow.workflow_controller.stop_job') + @patch('fedlearner_webconsole.workflow.workflow_controller.update_cronjob_config') + def test_invalidate_workflow_locally(self, mock_update_cronjob_config: Mock, mock_stop_job: Mock): + workflow_id = 6 + project_id = 99 + with db.session_scope() as session: + workflow = Workflow(id=workflow_id, project_id=project_id, state=WorkflowState.RUNNING) + job1 = Job(id=1, + name='test job 1', + job_type=JobType.RAW_DATA, + state=JobState.NEW, + workflow_id=workflow_id, + project_id=project_id) + job2 = Job(id=2, + name='test job 2', + job_type=JobType.RAW_DATA, + state=JobState.NEW, + workflow_id=workflow_id, + project_id=project_id) + session.add_all([workflow, job1, job2]) + session.commit() + invalidate_workflow_locally(session, workflow) + session.commit() + mock_update_cronjob_config.assert_called_with(workflow_id, None, session) + self.assertEqual(mock_stop_job.call_count, 2) + with db.session_scope() as session: + workflow = session.query(Workflow).get(workflow_id) + self.assertEqual(workflow.state, WorkflowState.INVALID) + self.assertEqual(workflow.target_state, WorkflowState.INVALID) + + +@patch('fedlearner_webconsole.workflow.workflow_controller.send_email') +class NotifyIfFinishedTest(NoWebServerTestCase): + + def test_running_workflow(self, mock_send_email: Mock): + with db.session_scope() as session: + workflow = Workflow(state=WorkflowState.RUNNING) + _notify_if_finished(session, workflow) + mock_send_email.assert_not_called() + + def test_notify(self, mock_send_email: Mock): + with db.session_scope() as session: + user = User(username='test_user', email='a@b.com') + session.add(user) + session.commit() + with db.session_scope() as session: + workflow = Workflow(id=1234, name='test-workflow', state=WorkflowState.FAILED, creator='test_user') + _notify_if_finished(session, workflow) + mock_send_email.assert_called_with('a@b.com', + NotificationTemplateName.WORKFLOW_COMPLETE, + name='test-workflow', + state='FAILED', + link='http://localhost:666/v2/workflow-center/workflows/1234') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/workflow/workflow_job_controller.py b/web_console_v2/api/fedlearner_webconsole/workflow/workflow_job_controller.py new file mode 100644 index 000000000..177cbff65 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow/workflow_job_controller.py @@ -0,0 +1,115 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import List + +from sqlalchemy.orm import Session + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.rpc.client import RpcClient +from fedlearner_webconsole.two_pc.transaction_manager import TransactionManager +from fedlearner_webconsole.proto.two_pc_pb2 import (TwoPcType, TransactionData, TransitWorkflowStateData) +from fedlearner_webconsole.exceptions import InternalException, InvalidArgumentException +from fedlearner_webconsole.participant.services import ParticipantService +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.workflow.workflow_controller import start_workflow_locally, stop_workflow_locally, \ + invalidate_workflow_locally + + +def _change_workflow_status(target_state: WorkflowState, project: Project, uuid: str, participants_domain_name: List): + assert target_state in (WorkflowState.RUNNING, WorkflowState.STOPPED) + tm = TransactionManager(project_name=project.name, + project_token=project.token, + participants=participants_domain_name, + two_pc_type=TwoPcType.CONTROL_WORKFLOW_STATE) + data = TransitWorkflowStateData(target_state=target_state.name, workflow_uuid=uuid) + successed, message = tm.run(data=TransactionData(transit_workflow_state_data=data)) + if not successed: + raise InternalException(f'error when converting workflow state by 2PC: {message}') + + +def _start_2pc_workflow(project: Project, uuid: str, participants_domain_name: List): + return _change_workflow_status(WorkflowState.RUNNING, project, uuid, participants_domain_name) + + +def _stop_2pc_workflow(project: Project, uuid: str, participants_domain_name: List): + return _change_workflow_status(WorkflowState.STOPPED, project, uuid, participants_domain_name) + + +# start workflow main entry +def start_workflow(workflow_id: int): + # TODO(liuhehan): add uuid as entrypoint + with db.session_scope() as session: + workflow = session.query(Workflow).filter_by(id=workflow_id).first() + if workflow.is_local(): + # local entry + try: + # TODO(linfan.fine): gets rid of the session, the controller should be in a separate session + start_workflow_locally(session, workflow) + session.commit() + return + except RuntimeError as e: + raise InternalException(e) from e + except ValueError as e: + raise InvalidArgumentException(str(e)) from e + participants = ParticipantService(session).get_platform_participants_by_project(workflow.project.id) + project = session.query(Project).filter_by(id=workflow.project.id).first() + participants_domain_name = [participant.domain_name for participant in participants] + # new version fed entry + _start_2pc_workflow(project, workflow.uuid, participants_domain_name) + + +# stop workflow main entry +def stop_workflow(workflow_id: int): + with db.session_scope() as session: + workflow = session.query(Workflow).get(workflow_id) + if workflow.is_local(): + # local entry + try: + # TODO(linfan.fine): gets rid of the session, the controller should be in a separate session + stop_workflow_locally(session, workflow) + session.commit() + return + except RuntimeError as e: + raise InternalException(e) from e + participants = ParticipantService(session).get_platform_participants_by_project(workflow.project.id) + project = session.query(Project).filter_by(id=workflow.project.id).first() + participants_domain_name = [participant.domain_name for participant in participants] + # new version fed entry + _stop_2pc_workflow(project, workflow.uuid, participants_domain_name) + with db.session_scope() as session: + workflow = session.query(Workflow).get(workflow_id) + + +def invalidate_workflow_job(session: Session, workflow: Workflow): + """Invalidates workflow job across all participants.""" + invalidate_workflow_locally(session, workflow) + if workflow.is_local(): + # No actions needed + return + + service = ParticipantService(session) + participants = service.get_platform_participants_by_project(workflow.project.id) + # Invalidates peer's workflow + for participant in participants: + client = RpcClient.from_project_and_participant(workflow.project.name, workflow.project.token, + participant.domain_name) + resp = client.invalidate_workflow(workflow.uuid) + if not resp.succeeded: + # Ignores those errors as it will be handled by their workflow schedulers + logging.warning( + f'failed to invalidate peer workflow, workflow id: {workflow.id}, participant name: {participant.name}') diff --git a/web_console_v2/api/fedlearner_webconsole/workflow/workflow_job_controller_test.py b/web_console_v2/api/fedlearner_webconsole/workflow/workflow_job_controller_test.py new file mode 100644 index 000000000..affaf02bd --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow/workflow_job_controller_test.py @@ -0,0 +1,73 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch, Mock + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.participant.models import ProjectParticipant, Participant +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto.workflow_definition_pb2 import JobDefinition, WorkflowDefinition +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState +from fedlearner_webconsole.workflow.workflow_job_controller import invalidate_workflow_job +from testing.no_web_server_test_case import NoWebServerTestCase + + +class InvalidateWorkflowJobTest(NoWebServerTestCase): + + @patch('fedlearner_webconsole.rpc.client.RpcClient.invalidate_workflow') + def test_invalidate_workflow_job_local(self, mock_invalidate_workflow: Mock): + workflow_id = 777 + with db.session_scope() as session: + workflow = Workflow(id=workflow_id, project_id=1, state=WorkflowState.RUNNING) + workflow.set_config( + WorkflowDefinition(job_definitions=[JobDefinition(name='raw-data', is_federated=False)])) + session.add(workflow) + session.commit() + with db.session_scope() as session: + workflow = session.query(Workflow).get(workflow_id) + invalidate_workflow_job(session, workflow) + session.commit() + mock_invalidate_workflow.assert_not_called() + with db.session_scope() as session: + workflow = session.query(Workflow).get(workflow_id) + self.assertTrue(workflow.is_invalid()) + + @patch('fedlearner_webconsole.rpc.client.RpcClient.invalidate_workflow') + def test_invalidate_workflow_job_across_participants(self, mock_invalidate_workflow: Mock): + workflow_id = 6789 + with db.session_scope() as session: + project = Project(id=1) + participant = Participant(id=123, name='testp', domain_name='fl-test.com') + project_participant = ProjectParticipant(project_id=project.id, participant_id=participant.id) + session.add_all([project, participant, project_participant]) + + workflow = Workflow(id=workflow_id, project_id=1, state=WorkflowState.RUNNING, uuid='test_uuid') + workflow.set_config( + WorkflowDefinition(job_definitions=[JobDefinition(name='data-join', is_federated=True)])) + session.add(workflow) + session.commit() + with db.session_scope() as session: + workflow = session.query(Workflow).get(workflow_id) + invalidate_workflow_job(session, workflow) + session.commit() + mock_invalidate_workflow.assert_called_once_with('test_uuid') + with db.session_scope() as session: + workflow = session.query(Workflow).get(workflow_id) + self.assertTrue(workflow.is_invalid()) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/workflow/workflow_scheduler.py b/web_console_v2/api/fedlearner_webconsole/workflow/workflow_scheduler.py new file mode 100644 index 000000000..cdc0b94e3 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow/workflow_scheduler.py @@ -0,0 +1,95 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +import traceback +from typing import Tuple + +from sqlalchemy.orm import load_only, joinedload + +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.db import db +from fedlearner_webconsole.job.models import Job +from fedlearner_webconsole.proto.composer_pb2 import RunnerOutput, WorkflowSchedulerOutput +from fedlearner_webconsole.utils import const +from fedlearner_webconsole.workflow.service import WorkflowService +from fedlearner_webconsole.workflow.workflow_job_controller import start_workflow, stop_workflow +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState +from fedlearner_webconsole.workflow_template.models import WorkflowTemplate +from fedlearner_webconsole.composer.interface import IRunnerV2 + + +class ScheduleWorkflowRunner(IRunnerV2): + + def auto_run_workflows(self) -> WorkflowSchedulerOutput: + with db.session_scope() as session: + # Workflows (with system preset template) whose state is ready will auto run. + query = session.query(Workflow.id).join(Workflow.template).filter( + Workflow.state == WorkflowState.READY, WorkflowTemplate.name.in_(const.SYS_PRESET_TEMPLATE)) + workflow_ids = [result[0] for result in query.all()] + output = WorkflowSchedulerOutput() + for workflow_id in workflow_ids: + execution = output.executions.add() + execution.id = workflow_id + try: + logging.info(f'[WorkflowScheduler] auto start workflow {workflow_id}') + start_workflow(workflow_id) + except Exception as e: # pylint: disable=broad-except + error = str(e) + logging.warning(f'[WorkflowScheduler] auto start workflow {workflow_id} with error {error}') + execution.error_message = error + return output + + def auto_stop_workflows(self) -> WorkflowSchedulerOutput: + with db.session_scope() as session: + # only query fields necessary for is_finished and is_failed. + q = session.query(Workflow).options( + load_only(Workflow.id, Workflow.name, Workflow.target_state, Workflow.state), + joinedload(Workflow.owned_jobs).load_only(Job.state, + Job.is_disabled)).filter_by(state=WorkflowState.RUNNING) + workflow_ids = [w.id for w in q.all() if WorkflowService(session).should_auto_stop(w)] + output = WorkflowSchedulerOutput() + for workflow_id in workflow_ids: + execution = output.executions.add() + execution.id = workflow_id + try: + stop_workflow(workflow_id) + except Exception as e: # pylint: disable=broad-except + error = f'Error while auto-stop workflow {workflow_id}:\n{traceback.format_exc()}' + logging.warning(error) + execution.error_message = error + return output + + def run(self, context: RunnerContext) -> Tuple[RunnerStatus, RunnerOutput]: + output = RunnerOutput() + try: + output.workflow_scheduler_output.MergeFrom(self.auto_stop_workflows()) + except Exception as e: # pylint: disable=broad-except + error_message = str(e) + output.error_message = error_message + logging.warning(f'[SchedulerWorkflowRunner] auto stop workflow with error {error_message}') + + try: + # TODO(xiangyuxuan.prs): remove in future when model module don't need config workflow. + output.workflow_scheduler_output.MergeFrom(self.auto_run_workflows()) + except Exception as e: # pylint: disable=broad-except + error_message = str(e) + output.error_message = f'{output.error_message} {error_message}' + logging.warning(f'[SchedulerWorkflowRunner] auto run workflow with error {error_message}') + + if output.error_message: + return RunnerStatus.FAILED, output + return RunnerStatus.DONE, output diff --git a/web_console_v2/api/fedlearner_webconsole/workflow/workflow_scheduler_test.py b/web_console_v2/api/fedlearner_webconsole/workflow/workflow_scheduler_test.py new file mode 100644 index 000000000..80e56cf3b --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow/workflow_scheduler_test.py @@ -0,0 +1,149 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from unittest.mock import patch + +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.job.models import Job, JobState, JobType +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.proto.composer_pb2 import RunnerOutput, WorkflowSchedulerOutput, RunnerInput +from fedlearner_webconsole.workflow.workflow_scheduler import ScheduleWorkflowRunner +from fedlearner_webconsole.db import db +from fedlearner_webconsole.initial_db import _insert_schedule_workflow_item +from fedlearner_webconsole.composer.models import SchedulerItem, RunnerStatus, ItemStatus +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState +from fedlearner_webconsole.workflow_template.models import WorkflowTemplate +from testing.no_web_server_test_case import NoWebServerTestCase + + +class WorkflowSchedulerTest(NoWebServerTestCase): + + def test_get_workflows_need_auto_run(self): + with db.session_scope() as session: + template_1 = WorkflowTemplate(name='local-test', group_alias='local-test', config=b'') + template_2 = WorkflowTemplate(name='sys-preset-nn-model', group_alias='nn', config=b'') + session.add_all([template_1, template_2]) + session.flush() + workflow_1 = Workflow(name='w1') + workflow_2 = Workflow(name='w2', + template_id=template_1.id, + state=WorkflowState.READY, + target_state=WorkflowState.INVALID) + workflow_3 = Workflow(name='w3', template_id=template_2.id) + workflow_5 = Workflow(name='w5', + template_id=template_2.id, + state=WorkflowState.READY, + target_state=WorkflowState.INVALID) + workflow_6 = Workflow(name='w6', + template_id=template_2.id, + state=WorkflowState.READY, + target_state=WorkflowState.INVALID) + session.add_all([workflow_1, workflow_2, workflow_3, workflow_5, workflow_6]) + session.commit() + + def fake_start_workflow(workflow_id): + if workflow_id == workflow_6.id: + raise RuntimeError('error workflow_6') + + with patch('fedlearner_webconsole.workflow.workflow_scheduler.start_workflow') as mock_start_workflow: + mock_start_workflow.side_effect = fake_start_workflow + # workflow_5 and workflow_6 will be auto-run + runner = ScheduleWorkflowRunner() + status, output = runner.run(RunnerContext(0, RunnerInput())) + self.assertEqual(status, RunnerStatus.DONE) + self.assertEqual( + output, + RunnerOutput(workflow_scheduler_output=WorkflowSchedulerOutput(executions=[ + WorkflowSchedulerOutput.WorkflowExecution(id=workflow_5.id), + WorkflowSchedulerOutput.WorkflowExecution(id=workflow_6.id, error_message='error workflow_6'), + ]))) + + def test_insert_schedule_workflow_item(self): + with db.session_scope() as session: + item = SchedulerItem(name='workflow_scheduler', cron_config='* * * * * */30', status=ItemStatus.ON.value) + session.add(item) + session.commit() + _insert_schedule_workflow_item(session) + session.commit() + with db.session_scope() as session: + old_item = session.query(SchedulerItem).filter_by(name='workflow_scheduler').first() + self.assertEqual(old_item.status, ItemStatus.OFF.value) + new_item = session.query(SchedulerItem).filter_by(name='workflow_scheduler_v2').first() + self.assertEqual(new_item.status, ItemStatus.ON.value) + + @patch('fedlearner_webconsole.workflow.workflow_scheduler.Workflow.is_local') + def test_auto_stop(self, mock_is_local): + mock_is_local.return_value = True + with db.session_scope() as session: + session.add(Project(name='test')) + session.add( + Job(name='testtes', state=JobState.COMPLETED, job_type=JobType.DATA_JOIN, workflow_id=30, project_id=1)) + + session.add( + Job(name='testtest', state=JobState.COMPLETED, job_type=JobType.DATA_JOIN, workflow_id=30, + project_id=1)) + session.add(Workflow(name='test_complete', id=30, project_id=1, state=WorkflowState.RUNNING)) + session.commit() + output = ScheduleWorkflowRunner().auto_stop_workflows() + self.assertEqual(output, WorkflowSchedulerOutput(executions=[WorkflowSchedulerOutput.WorkflowExecution(id=30)])) + with db.session_scope() as session: + workflow = session.query(Workflow).get(30) + self.assertEqual(workflow.state, WorkflowState.COMPLETED) + with db.session_scope() as session: + session.add( + Job(name='testtes_failed', + state=JobState.FAILED, + job_type=JobType.DATA_JOIN, + workflow_id=31, + project_id=1)) + session.add(Workflow(name='test_failed', id=31, project_id=1, state=WorkflowState.RUNNING)) + session.commit() + output = ScheduleWorkflowRunner().auto_stop_workflows() + self.assertEqual(output, WorkflowSchedulerOutput(executions=[WorkflowSchedulerOutput.WorkflowExecution(id=31)])) + with db.session_scope() as session: + workflow = session.query(Workflow).get(31) + self.assertEqual(workflow.state, WorkflowState.FAILED) + + @patch('fedlearner_webconsole.workflow.workflow_scheduler.ScheduleWorkflowRunner.auto_run_workflows') + @patch('fedlearner_webconsole.workflow.workflow_scheduler.ScheduleWorkflowRunner.auto_stop_workflows') + def test_run_workflow_scheduler(self, mock_auto_stop, mock_auto_run): + # test all succeeded + mock_auto_run.return_value = WorkflowSchedulerOutput( + executions=[WorkflowSchedulerOutput.WorkflowExecution(id=1)]) + mock_auto_stop.return_value = WorkflowSchedulerOutput( + executions=[WorkflowSchedulerOutput.WorkflowExecution(id=2)]) + expected_result = (RunnerStatus.DONE, + RunnerOutput(workflow_scheduler_output=WorkflowSchedulerOutput(executions=[ + WorkflowSchedulerOutput.WorkflowExecution(id=2), + WorkflowSchedulerOutput.WorkflowExecution(id=1) + ]))) + self.assertEqual(ScheduleWorkflowRunner().run(RunnerContext(1, RunnerInput())), expected_result) + + # test auto run failed + mock_auto_stop.side_effect = Exception('Test') + expected_result = (RunnerStatus.FAILED, + RunnerOutput(error_message='Test', + workflow_scheduler_output=WorkflowSchedulerOutput( + executions=[WorkflowSchedulerOutput.WorkflowExecution(id=1)]))) + self.assertEqual(ScheduleWorkflowRunner().run(RunnerContext(1, RunnerInput())), expected_result) + # test all failed + mock_auto_run.side_effect = Exception('Test') + expected_result = (RunnerStatus.FAILED, RunnerOutput(error_message='Test Test')) + self.assertEqual(ScheduleWorkflowRunner().run(RunnerContext(1, RunnerInput())), expected_result) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/workflow_template/BUILD.bazel b/web_console_v2/api/fedlearner_webconsole/workflow_template/BUILD.bazel new file mode 100644 index 000000000..e7a98a55e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow_template/BUILD.bazel @@ -0,0 +1,196 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "models_lib", + srcs = ["models.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:mixins_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:utils_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "models_lib_test", + size = "small", + srcs = [ + "models_test.py", + ], + imports = ["../.."], + main = "models_test.py", + deps = [ + ":models_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_datetime_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + ], +) + +py_library( + name = "service_lib", + srcs = ["service.py"], + imports = ["../.."], + deps = [ + ":models_lib", + ":slots_formatter_lib", + ":template_validator_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:filtering_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:paginate_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "service_lib_test", + size = "small", + srcs = [ + "service_test.py", + ], + imports = ["../.."], + main = "service_test.py", + deps = [ + ":service_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "slots_formatter_lib", + srcs = ["slots_formatter.py"], + imports = ["../.."], + deps = [ + ":template_validator_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_flatten_dict_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:proto_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) + +py_test( + name = "slots_formater_lib_test", + size = "small", + srcs = [ + "slots_formater_test.py", + ], + imports = ["../.."], + main = "slots_formater_test.py", + deps = [ + ":slots_formatter_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "template_validator_lib", + srcs = ["template_validaor.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/job:yaml_formatter_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_flatten_dict_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_yaml_lib", + ], +) + +py_test( + name = "template_validator_lib_test", + size = "small", + srcs = [ + "template_validator_test.py", + ], + imports = ["../.."], + main = "template_validator_test.py", + deps = [ + ":template_validator_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "//web_console_v2/api/testing/workflow_template", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "utils_lib", + srcs = ["utils.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_test( + name = "utils_lib_test", + size = "small", + srcs = [ + "utils_test.py", + ], + imports = ["../.."], + main = "utils_test.py", + deps = [ + ":utils_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + ], +) + +py_library( + name = "apis_lib", + srcs = ["apis.py"], + imports = ["../.."], + deps = [ + ":models_lib", + ":service_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:exceptions_lib", + "//web_console_v2/api/fedlearner_webconsole/audit:decorators_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:third_party_sso_lib", + "//web_console_v2/api/fedlearner_webconsole/rpc/v2:project_service_client_lib", + "//web_console_v2/api/fedlearner_webconsole/swagger:models_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:flask_utils_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:paginate_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:proto_lib", + "//web_console_v2/api/fedlearner_webconsole/utils/decorators:decorators_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_flask_restful//:pkg", + "@common_marshmallow//:pkg", + "@common_sqlalchemy//:pkg", + ], +) + +py_test( + name = "apis_lib_test", + size = "medium", + srcs = [ + "apis_test.py", + ], + imports = ["../.."], + main = "apis_test.py", + deps = [ + ":apis_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:service_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/testing:common_lib", + "@com_google_protobuf//:protobuf_python", + ], +) diff --git a/web_console_v2/api/fedlearner_webconsole/workflow_template/apis.py b/web_console_v2/api/fedlearner_webconsole/workflow_template/apis.py index 791f2ba89..36f9cdc6e 100644 --- a/web_console_v2/api/fedlearner_webconsole/workflow_template/apis.py +++ b/web_console_v2/api/fedlearner_webconsole/workflow_template/apis.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,281 +13,504 @@ # limitations under the License. # coding: utf-8 -import io -import json -import re from http import HTTPStatus -import logging -import tarfile -from flask import send_file -from flask_restful import Resource, reqparse, request -from google.protobuf.json_format import ParseDict, ParseError - -from fedlearner_webconsole.utils.decorators import jwt_required -from fedlearner_webconsole.workflow_template.models import WorkflowTemplate, \ +import grpc +from flask_restful import Resource +from sqlalchemy.orm import undefer +from marshmallow import fields, Schema, post_load +from fedlearner_webconsole.audit.decorators import emits_event +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.proto.workflow_template_pb2 import WorkflowTemplateRevisionJson +from fedlearner_webconsole.rpc.v2.project_service_client import ProjectServiceClient +from fedlearner_webconsole.swagger.models import schema_manager +from fedlearner_webconsole.utils.decorators.pp_flask import input_validator, use_args, use_kwargs +from fedlearner_webconsole.auth.third_party_sso import credentials_required +from fedlearner_webconsole.utils.flask_utils import download_json, make_flask_response, get_current_user, FilterExpField +from fedlearner_webconsole.utils.paginate import paginate +from fedlearner_webconsole.utils.proto import to_dict +from fedlearner_webconsole.workflow_template.models import WorkflowTemplate, WorkflowTemplateRevision, \ WorkflowTemplateKind -from fedlearner_webconsole.proto import workflow_definition_pb2 +from fedlearner_webconsole.workflow_template.service import (WorkflowTemplateService, _format_template_with_yaml_editor, + _check_config_and_editor_info, + WorkflowTemplateRevisionService) from fedlearner_webconsole.db import db -from fedlearner_webconsole.exceptions import (NotFoundException, - InvalidArgumentException, - ResourceConflictException) -from fedlearner_webconsole.workflow_template.slots_formatter import \ - generate_yaml_template -from fedlearner_webconsole.workflow_template.template_validaor\ - import check_workflow_definition - - -def _classify_variable(variable): - if variable.value_type == 'CODE': - try: - json.loads(variable.value) - except json.JSONDecodeError as e: - raise InvalidArgumentException(str(e)) - return variable - - -def dict_to_workflow_definition(config): - try: - template_proto = ParseDict( - config, workflow_definition_pb2.WorkflowDefinition()) - for variable in template_proto.variables: - _classify_variable(variable) - for job in template_proto.job_definitions: - for variable in job.variables: - _classify_variable(variable) - except ParseError as e: - raise InvalidArgumentException(details={'config': str(e)}) - return template_proto - - -def dict_to_editor_info(editor_info): - try: - editor_info_proto = ParseDict( - editor_info, workflow_definition_pb2.WorkflowTemplateEditorInfo()) - except ParseError as e: - raise InvalidArgumentException(details={'editor_info': str(e)}) - return editor_info_proto - - -def _dic_without_key(d, keys): - result = dict(d) - for key in keys: - del result[key] - return result +from fedlearner_webconsole.exceptions import NotFoundException, InvalidArgumentException, ResourceConflictException, \ + NetworkException +from fedlearner_webconsole.proto.workflow_template_pb2 import WorkflowTemplateJson + + +class PostWorkflowTemplatesParams(Schema): + config = fields.Dict(required=True) + editor_info = fields.Dict(required=False, load_default={}) + name = fields.String(required=True) + comment = fields.String(required=False, load_default=None) + kind = fields.Integer(required=False, load_default=0) + + @post_load() + def make(self, data, **kwargs): + data['config'], data['editor_info'] = _check_config_and_editor_info(data['config'], data['editor_info']) + return data + + +class PutWorkflowTemplatesParams(Schema): + config = fields.Dict(required=True) + editor_info = fields.Dict(required=False, load_default={}) + name = fields.String(required=True) + comment = fields.String(required=False, load_default=None) + + @post_load() + def make(self, data, **kwargs): + data['config'], data['editor_info'] = _check_config_and_editor_info(data['config'], data['editor_info']) + return data + + +class GetWorkflowTemplatesParams(Schema): + filter = FilterExpField(required=False, load_default=None) + page = fields.Integer(required=False, load_default=None) + page_size = fields.Integer(required=False, load_default=None) class WorkflowTemplatesApi(Resource): - @jwt_required() - def get(self): - preset_datajoin = request.args.get('from', '') == 'preset_datajoin' - templates = WorkflowTemplate.query - if 'group_alias' in request.args: - templates = templates.filter_by( - group_alias=request.args['group_alias']) - if 'is_left' in request.args: - is_left = request.args.get(key='is_left', type=int) - if is_left is None: - raise InvalidArgumentException('is_left must be 0 or 1') - templates = templates.filter_by(is_left=is_left) - if preset_datajoin: - templates = templates.filter_by( - kind=WorkflowTemplateKind.PRESET_DATAJOIN.value) - # remove config from dicts to reduce the size of the list - return { - 'data': [ - _dic_without_key(t.to_dict(), ['config', 'editor_info']) - for t in templates.all() - ] - }, HTTPStatus.OK - - @jwt_required() - def post(self): - parser = reqparse.RequestParser() - parser.add_argument('name', required=True, help='name is empty') - parser.add_argument('comment') - parser.add_argument('config', - type=dict, - required=True, - help='config is empty') - parser.add_argument('editor_info', type=dict, default={}) - parser.add_argument('kind', type=int, default=0) - data = parser.parse_args() - name = data['name'] - comment = data['comment'] - config = data['config'] - editor_info = data['editor_info'] - kind = data['kind'] - if WorkflowTemplate.query.filter_by(name=name).first() is not None: - raise ResourceConflictException( - 'Workflow template {} already exists'.format(name)) - template_proto, editor_info_proto = _check_config_and_editor_info( - config, editor_info) - template_proto = _format_template_with_yaml_editor( - template_proto, editor_info_proto) - template = WorkflowTemplate(name=name, - comment=comment, - group_alias=template_proto.group_alias, - is_left=template_proto.is_left, - kind=kind) - template.set_config(template_proto) - template.set_editor_info(editor_info_proto) - db.session.add(template) - db.session.commit() - logging.info('Inserted a workflow_template to db') - result = template.to_dict() - return {'data': result}, HTTPStatus.CREATED + + @credentials_required + @use_args(GetWorkflowTemplatesParams(), location='query') + def get(self, params: dict): + """Get templates. + --- + tags: + - workflow_template + description: Get templates list. + parameters: + - in: query + name: filter + schema: + type: string + required: true + - in: query + name: page + schema: + type: integer + - in: query + name: page_size + schema: + type: integer + responses: + 200: + description: list of workflow templates. + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.WorkflowTemplateRef' + """ + with db.session_scope() as session: + try: + pagination = WorkflowTemplateService(session).list_workflow_templates( + filter_exp=params['filter'], + page=params['page'], + page_size=params['page_size'], + ) + except ValueError as e: + raise InvalidArgumentException(details=f'Invalid filter: {str(e)}') from e + data = [t.to_ref() for t in pagination.get_items()] + return make_flask_response(data=data, page_meta=pagination.get_metadata()) + + @input_validator + @credentials_required + @emits_event(audit_fields=['name']) + @use_args(PostWorkflowTemplatesParams(), location='json') + def post(self, params: dict): + """Create a workflow_template. + --- + tags: + - workflow_template + description: Create a template. + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/PostWorkflowTemplatesParams' + required: true + responses: + 201: + description: detail of workflow template. + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.WorkflowTemplatePb' + """ + with db.session_scope() as session: + template = WorkflowTemplateService(session).post_workflow_template( + name=params['name'], + comment=params['comment'], + config=params['config'], + editor_info=params['editor_info'], + kind=params['kind'], + creator_username=get_current_user().username) + session.commit() + return make_flask_response(data=template.to_proto(), status=HTTPStatus.CREATED) class WorkflowTemplateApi(Resource): - @jwt_required() - def get(self, template_id): - download = request.args.get('download', 'false') == 'true' - - template = WorkflowTemplate.query.filter_by(id=template_id).first() - if template is None: - raise NotFoundException(f'Failed to find template: {template_id}') - - result = template.to_dict() - if download: - in_memory_file = io.BytesIO() - in_memory_file.write(json.dumps(result).encode('utf-8')) - in_memory_file.seek(0) - return send_file(in_memory_file, - as_attachment=True, - attachment_filename=f'{template.name}.json', - mimetype='application/json; charset=UTF-8', - cache_timeout=0) - return {'data': result}, HTTPStatus.OK - - @jwt_required() + + @credentials_required + @use_args({'download': fields.Bool(required=False, load_default=False)}, location='query') + def get(self, params: dict, template_id: int): + """Get template by id. + --- + tags: + - workflow_template + description: Get a template. + parameters: + - in: path + name: template_id + schema: + type: integer + required: true + - in: query + name: download + schema: + type: boolean + responses: + 200: + description: detail of workflow template. + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.WorkflowTemplatePb' + """ + with db.session_scope() as session: + template = session.query(WorkflowTemplate).filter_by(id=template_id).first() + if template is None: + raise NotFoundException(f'Failed to find template: {template_id}') + template_proto = template.to_proto() + if params['download']: + # Note this is a workaround to removes some fields from the proto. + # WorkflowTemplateJson and WorkflowTemplatePb are compatible. + template_json_pb = WorkflowTemplateJson() + template_json_pb.ParseFromString(template_proto.SerializeToString()) + return download_json(content=to_dict(template_json_pb), filename=template.name) + return make_flask_response(template_proto) + + @credentials_required + @emits_event() def delete(self, template_id): - result = WorkflowTemplate.query.filter_by(id=template_id) - if result.first() is None: - raise NotFoundException(f'Failed to find template: {template_id}') - result.delete() - db.session.commit() - return {'data': {}}, HTTPStatus.OK - - @jwt_required() - def put(self, template_id): - parser = reqparse.RequestParser() - parser.add_argument('name', required=True, help='name is empty') - parser.add_argument('comment') - parser.add_argument('config', - type=dict, - required=True, - help='config is empty') - parser.add_argument('editor_info', type=dict, default={}) - parser.add_argument('kind', type=int, default=0) - data = parser.parse_args() - name = data['name'] - comment = data['comment'] - config = data['config'] - editor_info = data['editor_info'] - kind = data['kind'] - tmp = WorkflowTemplate.query.filter_by(name=name).first() - if tmp is not None and tmp.id != template_id: - raise ResourceConflictException( - 'Workflow template {} already exists'.format(name)) - template = WorkflowTemplate.query.filter_by(id=template_id).first() - if template is None: - raise NotFoundException(f'Failed to find template: {template_id}') - template_proto, editor_info_proto = _check_config_and_editor_info( - config, editor_info) - template_proto = _format_template_with_yaml_editor( - template_proto, editor_info_proto) - template.set_config(template_proto) - template.set_editor_info(editor_info_proto) - template.name = name - template.comment = comment - template.group_alias = template_proto.group_alias - template.is_left = template_proto.is_left - template.kind = kind - db.session.commit() - result = template.to_dict() - return {'data': result}, HTTPStatus.OK - - -def _format_template_with_yaml_editor(template_proto, editor_info_proto): - for job_def in template_proto.job_definitions: - # if job is in editor_info, than use meta_yaml format with - # slots instead of yaml_template - yaml_editor_infos = editor_info_proto.yaml_editor_infos - if not job_def.expert_mode and job_def.name in yaml_editor_infos: - yaml_editor_info = yaml_editor_infos[job_def.name] - job_def.yaml_template = generate_yaml_template( - yaml_editor_info.meta_yaml, - yaml_editor_info.slots) - try: - check_workflow_definition(template_proto) - except ValueError as e: - raise InvalidArgumentException( - details={'config.yaml_template': str(e)}) - return template_proto - - -def _check_config_and_editor_info(config, editor_info): - # TODO: needs tests - if 'group_alias' not in config: - raise InvalidArgumentException( - details={'config.group_alias': 'config.group_alias is required'}) - if 'is_left' not in config: - raise InvalidArgumentException( - details={'config.is_left': 'config.is_left is required'}) - - # form to proto buffer - editor_info_proto = dict_to_editor_info(editor_info) - template_proto = dict_to_workflow_definition(config) - for index, job_def in enumerate(template_proto.job_definitions): - # pod label name must be no more than 63 characters. - # workflow.uuid is 20 characters, pod name suffix such as - # '-follower-master-0' is less than 19 characters, so the - # job name must be no more than 24 - if len(job_def.name) > 24: - raise InvalidArgumentException( - details={ - f'config.job_definitions[{index}].job_name': - 'job_name must be no more than 24 characters' - }) - # limit from k8s - if not re.match('[a-z0-9-]*', job_def.name): - raise InvalidArgumentException( - details={ - f'config.job_definitions[{index}].job_name': - 'Only letters(a-z), numbers(0-9) ' - 'and dashes(-) are supported.' - }) - return template_proto, editor_info_proto - - -class CodeApi(Resource): - @jwt_required() - def get(self): - parser = reqparse.RequestParser() - parser.add_argument('code_path', - type=str, - location='args', - required=True, - help='code_path is required') - data = parser.parse_args() - code_path = data['code_path'] - try: - with tarfile.open(code_path) as tar: - code_dict = {} - for file in tar.getmembers(): - if tar.extractfile(file) is not None: - if '._' not in file.name and file.isfile(): - code_dict[file.name] = str( - tar.extractfile(file).read(), encoding='utf-8') - return {'data': code_dict}, HTTPStatus.OK - except Exception as e: - logging.error(f'Get code, code_path: {code_path}, exception: {e}') - raise InvalidArgumentException(details={'code_path': 'wrong path'}) + """delete template by id. + --- + tags: + - workflow_template + description: Delete a template. + parameters: + - in: path + name: template_id + schema: + type: integer + required: true + responses: + 204: + description: Successfully deleted. + """ + with db.session_scope() as session: + result = session.query(WorkflowTemplate).filter_by(id=template_id) + if result.first() is None: + raise NotFoundException(f'Failed to find template: {template_id}') + result.delete() + session.commit() + return make_flask_response(status=HTTPStatus.NO_CONTENT) + + @input_validator + @credentials_required + @emits_event(audit_fields=['name']) + @use_args(PutWorkflowTemplatesParams(), location='json') + def put(self, params: dict, template_id: int): + """Put a workflow_template. + --- + tags: + - workflow_template + description: edit a template. + parameters: + - in: path + name: template_id + schema: + type: integer + required: true + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/definitions/PutWorkflowParams' + required: true + responses: + 200: + description: detail of workflow template. + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.WorkflowTemplatePb' + """ + with db.session_scope() as session: + tmp = session.query(WorkflowTemplate).filter_by(name=params['name']).first() + if tmp is not None and tmp.id != template_id: + raise ResourceConflictException(f'Workflow template {params["name"]} already exists') + template = session.query(WorkflowTemplate).filter_by(id=template_id).first() + if template is None: + raise NotFoundException(f'Failed to find template: {template_id}') + template_proto = _format_template_with_yaml_editor(params['config'], params['editor_info'], session) + template.set_config(template_proto) + template.set_editor_info(params['editor_info']) + template.name = params['name'] + template.comment = params['comment'] + template.group_alias = template_proto.group_alias + session.commit() + return make_flask_response(template.to_proto()) + + +class WorkflowTemplateRevisionsApi(Resource): + + @credentials_required + @use_args( + { + 'page': fields.Integer(required=False, load_default=None), + 'page_size': fields.Integer(required=False, load_default=None) + }, + location='query') + def get(self, params: dict, template_id: int): + """Get all template revisions for specific template. + --- + tags: + - workflow_template + description: Get all template revisions for specific template. + parameters: + - in: path + name: template_id + required: true + schema: + type: integer + description: The ID of the template + - in: query + name: page + schema: + type: integer + - in: query + name: page_size + schema: + type: integer + responses: + 200: + description: list of workflow template revisions. + content: + application/json: + schema: + type: array + items: + $ref: '#/definitions/fedlearner_webconsole.proto.WorkflowTemplateRevisionRef' + """ + with db.session_scope() as session: + query = session.query(WorkflowTemplateRevision).filter_by(template_id=template_id) + query = query.order_by(WorkflowTemplateRevision.revision_index.desc()) + pagination = paginate(query, params['page'], params['page_size']) + data = [t.to_ref() for t in pagination.get_items()] + return make_flask_response(data=data, page_meta=pagination.get_metadata()) + + +class WorkflowTemplateRevisionsCreateApi(Resource): + + @credentials_required + def post(self, template_id: int): + """Create a new template revision for specific template if config has been changed. + --- + tags: + - workflow_template + description: Create a new template revision for specific template if config has been changed. + parameters: + - in: path + name: template_id + required: true + schema: + type: integer + description: The ID of the template + responses: + 200: + description: detail of workflow template revision. + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.WorkflowTemplateRevisionPb' + """ + with db.session_scope() as session: + revision = WorkflowTemplateRevisionService(session).create_new_revision_if_template_updated( + template_id=template_id) + session.commit() + return make_flask_response(data=revision.to_proto()) + + +class WorkflowTemplateRevisionApi(Resource): + + @credentials_required + @use_args({'download': fields.Boolean(required=False, load_default=None)}, location='query') + def get(self, params: dict, revision_id: int): + """Get template revision by id. + --- + tags: + - workflow_template + description: Get template revision. + parameters: + - in: path + name: revision_id + required: true + schema: + type: integer + description: The ID of the template revision + - in: query + name: download + schema: + type: boolean + responses: + 200: + description: detail of workflow template revision. + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.WorkflowTemplateRevisionPb' + """ + with db.session_scope() as session: + template_revision = session.query(WorkflowTemplateRevision).options( + undefer(WorkflowTemplateRevision.config), + undefer(WorkflowTemplateRevision.editor_info)).get(revision_id) + if template_revision is None: + raise NotFoundException(f'Cant not find template revision {revision_id}') + if params['download']: + # Note this is a workaround to removes some fields from the proto. + # WorkflowTemplateRevisionJson and WorkflowTemplateRevisionPb are compatible. + revision_proto = template_revision.to_proto() + revision_json_pb = WorkflowTemplateRevisionJson() + revision_json_pb.ParseFromString(revision_proto.SerializeToString()) + return download_json(content=to_dict(revision_json_pb), filename=template_revision.id) + return make_flask_response(data=template_revision.to_proto()) + + @credentials_required + def delete(self, revision_id: int): + """Delete template revision by id. + --- + tags: + - workflow_template + description: Delete template revision. + parameters: + - in: path + name: revision_id + required: true + schema: + type: integer + description: The ID of the template revision + responses: + 204: + description: No content. + """ + with db.session_scope() as session: + WorkflowTemplateRevisionService(session).delete_revision(revision_id=revision_id) + session.commit() + return make_flask_response(status=HTTPStatus.NO_CONTENT) + + @credentials_required + @use_args({'comment': fields.String(required=False, load_default=None)}) + def patch(self, params: dict, revision_id: int): + """Patch template revision by id. + --- + tags: + - workflow_template + description: Patch template revision. + parameters: + - in: path + name: revision_id + required: true + schema: + type: integer + description: The ID of the template revision + - in: body + name: comment + schema: + type: string + required: false + responses: + 200: + description: detail of workflow template revision. + content: + application/json: + schema: + $ref: '#/definitions/fedlearner_webconsole.proto.WorkflowTemplateRevisionPb' + """ + with db.session_scope() as session: + template_revision = session.query(WorkflowTemplateRevision).options( + undefer(WorkflowTemplateRevision.config), + undefer(WorkflowTemplateRevision.editor_info)).get(revision_id) + if template_revision is None: + raise NotFoundException(f'Cant not find template revision {revision_id}') + if params['comment']: + template_revision.comment = params['comment'] + session.commit() + return make_flask_response(data=template_revision.to_proto()) + + +class WorkflowTemplateRevisionSendApi(Resource): + + @use_kwargs({ + 'participant_id': fields.Integer(required=True), + }, location='query') + def post(self, revision_id: int, participant_id: int): + """Send a template revision to participant. + --- + tags: + - workflow_template + description: Send a template revision to participant. + parameters: + - in: path + name: revision_id + required: true + schema: + type: integer + description: The ID of the template revision + - in: query + name: participant_id + required: true + schema: + type: integer + description: The ID of the participant + responses: + 204: + description: No content. + """ + with db.session_scope() as session: + part: Participant = session.query(Participant).get(participant_id) + if part is None: + raise NotFoundException(f'participant {participant_id} is not exist') + revision: WorkflowTemplateRevision = session.query(WorkflowTemplateRevision).get(revision_id) + if revision is None: + raise NotFoundException(f'participant {revision_id} is not exist') + try: + ProjectServiceClient.from_participant(part.domain_name).send_template_revision( + config=revision.get_config(), + name=revision.template.name, + comment=revision.comment, + kind=WorkflowTemplateKind.PEER, + revision_index=revision.revision_index) + except grpc.RpcError as e: + raise NetworkException(str(e)) from e + + return make_flask_response(status=HTTPStatus.NO_CONTENT) def initialize_workflow_template_apis(api): api.add_resource(WorkflowTemplatesApi, '/workflow_templates') - api.add_resource(WorkflowTemplateApi, - '/workflow_templates/') - api.add_resource(CodeApi, '/codes') + api.add_resource(WorkflowTemplateApi, '/workflow_templates/') + api.add_resource(WorkflowTemplateRevisionsApi, '/workflow_templates//workflow_template_revisions') + api.add_resource(WorkflowTemplateRevisionsCreateApi, '/workflow_templates/:create_revision') + api.add_resource(WorkflowTemplateRevisionApi, '/workflow_template_revisions/') + api.add_resource(WorkflowTemplateRevisionSendApi, '/workflow_template_revisions/:send') + + # if a schema is used, one has to append it to schema_manager so Swagger knows there is a schema available + schema_manager.append(PostWorkflowTemplatesParams) + schema_manager.append(PutWorkflowTemplatesParams) diff --git a/web_console_v2/api/fedlearner_webconsole/workflow_template/apis_test.py b/web_console_v2/api/fedlearner_webconsole/workflow_template/apis_test.py new file mode 100644 index 000000000..bd457927e --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow_template/apis_test.py @@ -0,0 +1,346 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import json +import unittest +import urllib.parse +from http import HTTPStatus +from unittest.mock import patch, MagicMock + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition +from fedlearner_webconsole.utils.proto import to_dict +from fedlearner_webconsole.workflow_template.models import WorkflowTemplate, WorkflowTemplateKind, \ + WorkflowTemplateRevision +from fedlearner_webconsole.workflow_template.service import dict_to_workflow_definition +from testing.common import BaseTestCase + + +class WorkflowTemplatesApiTest(BaseTestCase): + + class Config(BaseTestCase.Config): + START_SCHEDULER = False + + def setUp(self): + super().setUp() + # Inserts data + template1 = WorkflowTemplate(name='t1', + comment='comment for t1', + group_alias='g1', + kind=WorkflowTemplateKind.DEFAULT.value) + template1.set_config(WorkflowDefinition(group_alias='g1',)) + template2 = WorkflowTemplate(name='t2', group_alias='g2', kind=WorkflowTemplateKind.DEFAULT.value) + template2.set_config(WorkflowDefinition(group_alias='g2',)) + with db.session_scope() as session: + session.add(template1) + session.add(template2) + session.commit() + + def test_get_without_filter_and_pagination(self): + response = self.get_helper('/api/v2/workflow_templates') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 20) + + def test_get_with_pagination(self): + response = self.get_helper('/api/v2/workflow_templates?page=1&page_size=3') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 3) + + def test_get_with_invalid_filter(self): + response = self.get_helper('/api/v2/workflow_templates?filter=invalid') + self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) + + def test_get_with_filter(self): + filter_exp = urllib.parse.quote('(and(group_alias="g1")(name~="t")(kind=0))') + response = self.get_helper(f'/api/v2/workflow_templates?filter={filter_exp}') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = json.loads(response.data).get('data') + self.assertEqual(len(data), 1) + self.assertEqual(data[0]['name'], 't1') + + def test_get_with_kind(self): + response = self.get_helper('/api/v2/workflow_templates?filter=%28kind%3D1%29') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + names = [t['name'] for t in data] + self.assertTrue({ + 'e2e-local', + 'sys-preset-nn-model', + 'sys-preset-tree-model', + 'e2e-fed-right', + 'e2e-fed-left', + 'e2e-sparse-estimator-test-right', + 'sys-preset-psi-data-join', + 'sys-preset-psi-data-join-analyzer', + }.issubset(set(names))) + + def test_post_without_required_arguments(self): + response = self.post_helper('/api/v2/workflow_templates', data={}) + self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) + self.assertEqual( + json.loads(response.data).get('details'), + {'json': { + 'config': ['Missing data for required field.'], + 'name': ['Missing data for required field.'] + }}) + + response = self.post_helper('/api/v2/workflow_templates', + data={ + 'name': 'test', + 'comment': 'test-comment', + 'config': {} + }) + self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) + self.assertEqual( + json.loads(response.data).get('details'), {'config.group_alias': 'config.group_alias is required'}) + + def test_post_successfully(self): + with db.session_scope() as session: + template_name = 'test-nb-template' + expected_template = session.query(WorkflowTemplate).filter_by(name=template_name).first() + self.assertIsNone(expected_template) + + response = self.post_helper('/api/v2/workflow_templates', + data={ + 'name': template_name, + 'comment': 'test-comment', + 'config': { + 'group_alias': 'g222', + }, + 'kind': 1, + }) + self.assertEqual(response.status_code, HTTPStatus.CREATED) + data = json.loads(response.data).get('data') + with db.session_scope() as session: + # Checks DB + expected_template = session.query(WorkflowTemplate).filter_by(name=template_name).first() + self.assertEqual(expected_template.name, template_name) + self.assertEqual(expected_template.comment, 'test-comment') + self.assertEqual(expected_template.config, WorkflowDefinition(group_alias='g222').SerializeToString()) + expected_template_dict = { + 'comment': 'test-comment', + 'config': { + 'group_alias': 'g222', + 'job_definitions': [], + 'variables': [] + }, + 'editor_info': { + 'yaml_editor_infos': {} + }, + 'group_alias': 'g222', + 'is_local': True, + 'name': 'test-nb-template', + 'kind': 1, + 'creator_username': 'ada', + 'coordinator_pure_domain_name': '' + } + self.assertPartiallyEqual(data, expected_template_dict, ignore_fields=['id', 'created_at', 'updated_at']) + + def test_delete_workflow_template(self): + response = self.delete_helper('/api/v2/workflow_templates/1') + self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) + response = self.delete_helper('/api/v2/workflow_templates/1') + self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) + + def test_put_workflow_template(self): + with db.session_scope() as session: + data = {'name': 'test_put', 'comment': 'test-comment', 'config': {'group_alias': 'g222'}} + response = self.put_helper('/api/v2/workflow_templates/1', data=data) + self.assertEqual(response.status_code, HTTPStatus.OK) + expected_template = session.query(WorkflowTemplate).filter_by(id=1).first() + self.assertEqual(expected_template.name, data['name']) + self.assertEqual(expected_template.comment, data['comment']) + self.assertEqual(expected_template.group_alias, data['config']['group_alias']) + + def test_dict_to_workflow_definition(self): + config = {'variables': [{'name': 'code', 'value': '{"asdf.py": "asdf"}', 'value_type': 'CODE'}]} + proto = dict_to_workflow_definition(config) + self.assertTrue(isinstance(proto.variables[0].value, str)) + + +class WorkflowTemplateApiTest(BaseTestCase): + TEMPLATE1_ID = 10001 + + def setUp(self): + super().setUp() + # Inserts data + template1 = WorkflowTemplate( + id=self.TEMPLATE1_ID, + name='t1', + comment='comment for t1 fff', + group_alias='g1', + kind=WorkflowTemplateKind.DEFAULT.value, + creator_username='falin', + ) + template1.set_config(WorkflowDefinition(group_alias='g1',)) + with db.session_scope() as session: + session.add(template1) + session.commit() + + def test_get(self): + response = self.get_helper(f'/api/v2/workflow_templates/{self.TEMPLATE1_ID}') + self.assertEqual(response.status_code, HTTPStatus.OK) + self.assertResponseDataEqual( + response, + { + 'id': self.TEMPLATE1_ID, + 'comment': 'comment for t1 fff', + 'name': 't1', + 'kind': 0, + 'creator_username': 'falin', + 'group_alias': 'g1', + 'is_local': True, + 'config': { + 'group_alias': 'g1', + 'job_definitions': [], + 'variables': [] + }, + 'editor_info': { + 'yaml_editor_infos': {} + }, + 'coordinator_pure_domain_name': '' + }, + ignore_fields=['created_at', 'updated_at'], + ) + + def test_download(self): + response = self.get_helper(f'/api/v2/workflow_templates/{self.TEMPLATE1_ID}?download=true') + self.assertEqual(response.status_code, HTTPStatus.OK) + self.assertEqual( + json.loads(response.data.decode('utf-8')), { + 'comment': 'comment for t1 fff', + 'name': 't1', + 'group_alias': 'g1', + 'config': { + 'group_alias': 'g1', + 'job_definitions': [], + 'variables': [] + }, + 'editor_info': { + 'yaml_editor_infos': {} + }, + }) + + +class WorkflowTemplateRevisionsApiTest(BaseTestCase): + + def setUp(self): + super().setUp() + # Inserts data + with db.session_scope() as session: + template1 = WorkflowTemplate(id=33, + name='t1', + comment='comment for t1', + group_alias='g1', + kind=WorkflowTemplateKind.DEFAULT.value) + template1.set_config(WorkflowDefinition(group_alias='g1',)) + session.flush() + self.template_rev1 = WorkflowTemplateRevision(template_id=template1.id, + config=template1.config, + revision_index=1) + self.template_rev2 = WorkflowTemplateRevision(template_id=1, revision_index=1, config=template1.config) + self.template_rev3 = WorkflowTemplateRevision(template_id=1, revision_index=2, config=template1.config) + session.add(template1) + session.add(self.template_rev1) + session.add(self.template_rev2) + session.add(self.template_rev3) + session.commit() + + def test_get_revisions(self): + response = self.get_helper('/api/v2/workflow_templates/1/workflow_template_revisions') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(len(data), 2) + self.assertEqual(data[0]['revision_index'], 2) + + def test_get_revision(self): + response = self.get_helper('/api/v2/workflow_template_revisions/1') + self.assertEqual(response.status_code, HTTPStatus.OK) + data = self.get_response_data(response) + self.assertEqual(data['config'], {'group_alias': 'g1', 'variables': [], 'job_definitions': []}) + response = self.get_helper('/api/v2/workflow_template_revisions/1?download=true') + result = json.loads(response.data) + self.assertEqual( + result, { + 'revision_index': 1, + 'config': { + 'group_alias': 'g1', + 'variables': [], + 'job_definitions': [] + }, + 'editor_info': { + 'yaml_editor_infos': {} + }, + 'name': 't1', + 'comment': '' + }) + + def test_create_revision(self): + tpl_id = 33 + response = self.post_helper(f'/api/v2/workflow_templates/{tpl_id}:create_revision') + self.assertEqual(response.status_code, HTTPStatus.OK) + data1 = self.get_response_data(response) + with db.session_scope() as session: + tpl = session.query(WorkflowTemplate).get(tpl_id) + tpl_config = to_dict(tpl.get_config()) + self.assertEqual(data1['revision_index'], 1) + self.assertEqual(data1['template_id'], tpl_id) + self.assertEqual(data1['config'], tpl_config) + response = self.post_helper('/api/v2/workflow_templates/33:create_revision') + self.assertEqual(response.status_code, HTTPStatus.OK) + data2 = self.get_response_data(response) + self.assertEqual(data1, data2) + + def test_delete_revision(self): + response = self.delete_helper(f'/api/v2/workflow_template_revisions/{self.template_rev2.id}') + self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) + with db.session_scope() as session: + self.assertEqual(session.query(WorkflowTemplateRevision).get(self.template_rev2.id), None) + + def test_delete_latest_revision(self): + response = self.delete_helper(f'/api/v2/workflow_template_revisions/{self.template_rev3.id}') + self.assertEqual(response.status_code, HTTPStatus.CONFLICT) + + def test_patch_revision(self): + response = self.patch_helper('/api/v2/workflow_template_revisions/1', data={'comment': 'test'}) + self.assertEqual(response.status_code, HTTPStatus.OK) + self.assertEqual(self.get_response_data(response)['comment'], 'test') + with db.session_scope() as session: + revision = session.query(WorkflowTemplateRevision).get(1) + self.assertEqual(revision.comment, 'test') + + @patch('fedlearner_webconsole.workflow_template.apis.ProjectServiceClient.from_participant') + def test_send_workflow_template_revision(self, mock_from_participant): + with db.session_scope() as session: + session.add(Participant(id=1, name='test', domain_name='test')) + session.commit() + mock_from_participant.return_value.send_template_revision = MagicMock() + response = self.post_helper( + f'/api/v2/workflow_template_revisions/{self.template_rev1.id}:send?participant_id=1') + self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) + mock_from_participant.return_value.send_template_revision.assert_called_once_with( + config=self.template_rev1.get_config(), + name='t1', + comment=self.template_rev1.comment, + kind=WorkflowTemplateKind.PEER, + revision_index=self.template_rev1.revision_index) + mock_from_participant.assert_called_once_with('test') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/workflow_template/models.py b/web_console_v2/api/fedlearner_webconsole/workflow_template/models.py index 70f2b211c..44b926466 100644 --- a/web_console_v2/api/fedlearner_webconsole/workflow_template/models.py +++ b/web_console_v2/api/fedlearner_webconsole/workflow_template/models.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,48 +15,55 @@ # coding: utf-8 import enum +from sqlalchemy import func +from sqlalchemy.orm import deferred from sqlalchemy.sql.schema import Index, UniqueConstraint + +from fedlearner_webconsole.proto.workflow_template_pb2 import WorkflowTemplateRevisionRef, \ + WorkflowTemplateRevisionPb, WorkflowTemplateRef, WorkflowTemplatePb +from fedlearner_webconsole.utils.pp_datetime import to_timestamp from fedlearner_webconsole.utils.mixins import to_dict_mixin from fedlearner_webconsole.db import db, default_table_args from fedlearner_webconsole.proto import workflow_definition_pb2 +from fedlearner_webconsole.workflow.utils import is_local class WorkflowTemplateKind(enum.Enum): DEFAULT = 0 - PRESET_DATAJOIN = 1 + PRESET = 1 + PEER = 2 @to_dict_mixin( extras={ + 'is_local': (lambda wt: wt.is_local()), 'config': (lambda wt: wt.get_config()), 'editor_info': (lambda wt: wt.get_editor_info()) }) class WorkflowTemplate(db.Model): __tablename__ = 'template_v2' - __table_args__ = (UniqueConstraint('name', name='uniq_name'), - Index('idx_group_alias', 'group_alias'), - default_table_args('workflow template')) - id = db.Column(db.Integer, primary_key=True, comment='id') - name = db.Column(db.String(255), comment='name') - comment = db.Column('cmt', - db.String(255), - key='comment', - comment='comment') - group_alias = db.Column(db.String(255), - nullable=False, - comment='group_alias') + __table_args__ = (UniqueConstraint('name', + name='uniq_name'), Index('idx_group_alias', + 'group_alias'), default_table_args('workflow template')) + id = db.Column(db.Integer, primary_key=True, comment='id', autoincrement=True) + name = db.Column(db.String(255), comment='name', default='') + comment = db.Column('cmt', db.String(255), key='comment', comment='comment') + group_alias = db.Column(db.String(255), nullable=False, comment='group_alias') # max store 16777215 bytes (16 MB) - config = db.Column(db.LargeBinary(16777215), - nullable=False, - comment='config') - is_left = db.Column(db.Boolean, comment='is_left') - editor_info = db.Column(db.LargeBinary(16777215), - comment='editor_info', - default=b'') - kind = db.Column(db.Integer, - comment='template kind') # WorkflowTemplateKind enum - - def set_config(self, proto): + config = deferred(db.Column(db.LargeBinary(16777215), nullable=False, comment='config')) + editor_info = deferred(db.Column(db.LargeBinary(16777215), comment='editor_info', default=b'')) + kind = db.Column(db.Integer, comment='template kind', default=0) # WorkflowTemplateKind enum + creator_username = db.Column(db.String(255), comment='the username of the creator') + coordinator_pure_domain_name = db.Column(db.String(255), comment='name of the coordinator') + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), comment='created_at') + updated_at = db.Column(db.DateTime(timezone=True), + onupdate=func.now(), + server_default=func.now(), + comment='update_at') + template_revisions = db.relationship( + 'WorkflowTemplateRevision', primaryjoin='WorkflowTemplate.id == foreign(WorkflowTemplateRevision.template_id)') + + def set_config(self, proto: workflow_definition_pb2.WorkflowDefinition): self.config = proto.SerializeToString() def set_editor_info(self, proto): @@ -72,3 +79,84 @@ def get_editor_info(self): if self.editor_info is not None: proto.ParseFromString(self.editor_info) return proto + + def is_local(self): + job_defs = self.get_config().job_definitions + for job_def in job_defs: + if job_def.is_federated: + return False + return True + + def to_ref(self) -> WorkflowTemplateRef: + return WorkflowTemplateRef(id=self.id, + name=self.name, + comment=self.comment, + group_alias=self.group_alias, + kind=self.kind, + coordinator_pure_domain_name=self.coordinator_pure_domain_name) + + def to_proto(self) -> WorkflowTemplateRevisionPb: + return WorkflowTemplatePb(id=self.id, + comment=self.comment, + created_at=to_timestamp(self.created_at), + config=self.get_config(), + editor_info=self.get_editor_info(), + is_local=is_local(self.get_config()), + name=self.name, + group_alias=self.group_alias, + kind=self.kind, + creator_username=self.creator_username, + updated_at=to_timestamp(self.updated_at), + coordinator_pure_domain_name=self.coordinator_pure_domain_name) + + +class WorkflowTemplateRevision(db.Model): + __tablename__ = 'template_revisions_v2' + __table_args__ = (Index('idx_template_id', 'template_id'), + UniqueConstraint('template_id', 'revision_index', name='uniq_revision_index_in_template'), + default_table_args('workflow template revision')) + id = db.Column(db.Integer, primary_key=True, comment='id', autoincrement=True) + revision_index = db.Column(db.Integer, comment='index for the same template') + comment = db.Column('cmt', db.String(255), key='comment', comment='comment') + # max store 16777215 bytes (16 MB) + config = deferred(db.Column(db.LargeBinary(16777215), nullable=False, comment='config')) + editor_info = deferred(db.Column(db.LargeBinary(16777215), comment='editor_info', default=b'')) + template_id = db.Column(db.Integer, comment='template_id') + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), comment='created_at') + template = db.relationship( + 'WorkflowTemplate', + primaryjoin='WorkflowTemplate.id == foreign(WorkflowTemplateRevision.template_id)', + # To disable the warning of back_populates + overlaps='template_revisions') + + def set_config(self, proto: workflow_definition_pb2.WorkflowDefinition): + self.config = proto.SerializeToString() + + def get_config(self) -> workflow_definition_pb2.WorkflowDefinition: + proto = workflow_definition_pb2.WorkflowDefinition() + proto.ParseFromString(self.config) + return proto + + def get_editor_info(self) -> workflow_definition_pb2.WorkflowTemplateEditorInfo: + proto = workflow_definition_pb2.WorkflowTemplateEditorInfo() + if self.editor_info is not None: + proto.ParseFromString(self.editor_info) + return proto + + def to_ref(self) -> WorkflowTemplateRevisionRef: + return WorkflowTemplateRevisionRef(id=self.id, + revision_index=self.revision_index, + comment=self.comment, + template_id=self.template_id, + created_at=to_timestamp(self.created_at)) + + def to_proto(self) -> WorkflowTemplateRevisionPb: + return WorkflowTemplateRevisionPb(id=self.id, + revision_index=self.revision_index, + comment=self.comment, + template_id=self.template_id, + created_at=to_timestamp(self.created_at), + config=self.get_config(), + editor_info=self.get_editor_info(), + is_local=is_local(self.get_config()), + name=self.template and self.template.name) diff --git a/web_console_v2/api/fedlearner_webconsole/workflow_template/models_test.py b/web_console_v2/api/fedlearner_webconsole/workflow_template/models_test.py new file mode 100644 index 000000000..3d3af126c --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow_template/models_test.py @@ -0,0 +1,116 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from datetime import datetime, timezone + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition, \ + WorkflowTemplateEditorInfo +from fedlearner_webconsole.proto.workflow_template_pb2 import WorkflowTemplateRevisionRef, WorkflowTemplateRevisionPb, \ + WorkflowTemplatePb, WorkflowTemplateRef +from fedlearner_webconsole.utils.pp_datetime import to_timestamp +from fedlearner_webconsole.workflow.models import Workflow # pylint: disable=unused-import +from fedlearner_webconsole.workflow_template.models import WorkflowTemplateRevision, WorkflowTemplate, \ + WorkflowTemplateKind +from testing.no_web_server_test_case import NoWebServerTestCase + + +class WorkflowTemplateRevisionTest(NoWebServerTestCase): + + def test_to_revision_ref(self): + created_at = datetime(2021, 10, 1, 8, 8, 8, tzinfo=timezone.utc) + config = WorkflowDefinition().SerializeToString() + template_revision = WorkflowTemplateRevision(id=123, + revision_index=1, + comment='a', + created_at=created_at, + config=config, + template_id=1) + revision_ref = WorkflowTemplateRevisionRef(id=123, + revision_index=1, + comment='a', + template_id=1, + created_at=to_timestamp(created_at)) + self.assertEqual(template_revision.to_ref(), revision_ref) + + def test_to_workflow_proto(self): + created_at = datetime(2021, 10, 1, 8, 8, 8, tzinfo=timezone.utc) + config = WorkflowDefinition() + template = WorkflowTemplate(name='test', id=1, group_alias='aa', config=b'') + template_revision = WorkflowTemplateRevision(id=123, + revision_index=1, + comment='a', + created_at=created_at, + config=config.SerializeToString(), + template_id=1) + revision_pb = WorkflowTemplateRevisionPb(id=123, + revision_index=1, + comment='a', + template_id=1, + created_at=to_timestamp(created_at), + config=config, + editor_info=WorkflowTemplateEditorInfo(), + is_local=True, + name='test') + with db.session_scope() as session: + session.add(template) + session.add(template_revision) + session.commit() + self.assertEqual(template_revision.to_proto(), revision_pb) + + +class WorkflowTemplateTest(NoWebServerTestCase): + + def test_to_proto(self): + created_at = datetime(2021, 10, 1, 8, 8, 8, tzinfo=timezone.utc) + config = WorkflowDefinition() + template = WorkflowTemplate(id=123, + comment='a', + created_at=created_at, + config=config.SerializeToString(), + updated_at=created_at, + coordinator_pure_domain_name='test') + tpl_pb = WorkflowTemplatePb(id=123, + comment='a', + created_at=to_timestamp(created_at), + config=config, + editor_info=WorkflowTemplateEditorInfo(), + is_local=True, + updated_at=to_timestamp(created_at), + coordinator_pure_domain_name='test') + self.assertEqual(template.to_proto(), tpl_pb) + + def test_to_ref(self): + created_at = datetime(2021, 10, 1, 8, 8, 8, tzinfo=timezone.utc) + config = WorkflowDefinition() + template = WorkflowTemplate(id=123, + comment='a', + created_at=created_at, + config=config.SerializeToString(), + updated_at=created_at, + coordinator_pure_domain_name='test', + group_alias='test', + kind=WorkflowTemplateKind.PEER.value) + tpl_pb = WorkflowTemplateRef(id=123, + comment='a', + coordinator_pure_domain_name='test', + group_alias='test', + kind=WorkflowTemplateKind.PEER.value) + self.assertEqual(template.to_ref(), tpl_pb) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/workflow_template/service.py b/web_console_v2/api/fedlearner_webconsole/workflow_template/service.py new file mode 100644 index 000000000..f22a2e965 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow_template/service.py @@ -0,0 +1,220 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import re +import json +from typing import Union, Optional + +from sqlalchemy import desc +from sqlalchemy.orm.session import Session +from google.protobuf.json_format import ParseDict, ParseError + +from fedlearner_webconsole.proto.filtering_pb2 import FilterOp, FilterExpression +from fedlearner_webconsole.utils.filtering import SupportedField, FieldType, FilterBuilder +from fedlearner_webconsole.utils.paginate import Pagination, paginate +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.workflow_template.models import WorkflowTemplate, WorkflowTemplateRevision, \ + WorkflowTemplateKind +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition, WorkflowTemplateEditorInfo +from fedlearner_webconsole.exceptions import InvalidArgumentException, NotFoundException +from fedlearner_webconsole.workflow_template.slots_formatter import \ + generate_yaml_template +from fedlearner_webconsole.workflow_template.template_validaor\ + import check_workflow_definition +from fedlearner_webconsole.exceptions import ResourceConflictException + + +def _validate_variable(variable): + if variable.value_type == 'CODE': + try: + json.loads(variable.value) + except json.JSONDecodeError as e: + raise InvalidArgumentException(str(e)) from e + return variable + + +def _format_template_with_yaml_editor(template_proto, editor_info_proto, session): + for job_def in template_proto.job_definitions: + # if job is in editor_info, than use meta_yaml format with + # slots instead of yaml_template + yaml_editor_infos = editor_info_proto.yaml_editor_infos + if job_def.easy_mode and job_def.name in yaml_editor_infos: + yaml_editor_info = yaml_editor_infos[job_def.name] + job_def.yaml_template = generate_yaml_template(yaml_editor_info.meta_yaml, yaml_editor_info.slots) + try: + check_workflow_definition(template_proto, session) + except ValueError as e: + raise InvalidArgumentException(details={'config.yaml_template': str(e)}) from e + return template_proto + + +def _check_config_and_editor_info(config, editor_info): + # TODO: needs tests + if 'group_alias' not in config: + raise InvalidArgumentException(details={'config.group_alias': 'config.group_alias is required'}) + + # form to proto buffer + editor_info_proto = dict_to_editor_info(editor_info) + template_proto = dict_to_workflow_definition(config) + for index, job_def in enumerate(template_proto.job_definitions): + # pod label name must be no more than 63 characters. + # workflow.uuid is 20 characters, pod name suffix such as + # '-follower-master-0' is less than 19 characters, so the + # job name must be no more than 24 + if len(job_def.name) > 24: + raise InvalidArgumentException( + details={f'config.job_definitions[{index}].job_name': 'job_name must be no more than 24 characters'}) + # limit from k8s + if not re.match('[a-z0-9-]*', job_def.name): + raise InvalidArgumentException( + details={ + f'config.job_definitions[{index}].job_name': 'Only letters(a-z), numbers(0-9) ' + 'and dashes(-) are supported.' + }) + return template_proto, editor_info_proto + + +def dict_to_workflow_definition(config): + try: + if config is None: + config = {} + template_proto = ParseDict(config, WorkflowDefinition(), ignore_unknown_fields=True) + for variable in template_proto.variables: + _validate_variable(variable) + for job in template_proto.job_definitions: + for variable in job.variables: + _validate_variable(variable) + except ParseError as e: + raise InvalidArgumentException(details={'config': str(e)}) from e + return template_proto + + +def dict_to_editor_info(editor_info): + try: + editor_info_proto = ParseDict(editor_info, WorkflowTemplateEditorInfo()) + except ParseError as e: + raise InvalidArgumentException(details={'editor_info': str(e)}) from e + return editor_info_proto + + +class WorkflowTemplateService: + FILTER_FIELDS = { + 'name': SupportedField(type=FieldType.STRING, ops={FilterOp.CONTAIN: None}), + 'group_alias': SupportedField(type=FieldType.STRING, ops={FilterOp.EQUAL: None}), + 'kind': SupportedField(type=FieldType.NUMBER, ops={FilterOp.EQUAL: None}), + } + + def __init__(self, session: Session): + self._session = session + self._filter_builder = FilterBuilder(model_class=WorkflowTemplate, supported_fields=self.FILTER_FIELDS) + + def post_workflow_template(self, name: str, comment: str, config: WorkflowDefinition, + editor_info: WorkflowTemplateEditorInfo, kind: int, + creator_username: str) -> Union[WorkflowTemplate, None]: + if self._session.query(WorkflowTemplate).filter_by(name=name).first() is not None: + raise ResourceConflictException(f'Workflow template {name} already exists') + template_proto = _format_template_with_yaml_editor(config, editor_info, self._session) + template = WorkflowTemplate(name=name, + comment=comment, + group_alias=template_proto.group_alias, + kind=kind, + creator_username=creator_username) + template.set_config(template_proto) + template.set_editor_info(editor_info) + self._session.add(template) + return template + + def get_workflow_template(self, name: str) -> WorkflowTemplate: + template = self._session.query(WorkflowTemplate).filter(WorkflowTemplate.name == name).first() + if template is None: + raise NotFoundException(message=f'failed to find workflow template {name}') + return template + + def list_workflow_templates(self, + page: Optional[int] = None, + page_size: Optional[int] = None, + filter_exp: Optional[FilterExpression] = None) -> Pagination: + """Lists workflow templates by filter expression and pagination.""" + query = self._session.query(WorkflowTemplate) + if filter_exp: + query = self._filter_builder.build_query(query, filter_exp) + query = query.order_by(WorkflowTemplate.id.desc()) + return paginate(query, page, page_size) + + +class WorkflowTemplateRevisionService: + + def __init__(self, session: Session): + self._session = session + + def get_latest_revision(self, template_id: int) -> Optional[WorkflowTemplateRevision]: + return self._session.query(WorkflowTemplateRevision).filter_by(template_id=template_id).order_by( + desc(WorkflowTemplateRevision.revision_index)).first() + + def create_new_revision_if_template_updated(self, template_id: int): + # Ensure revision generation not occurring concurrent. + template = self._session.query(WorkflowTemplate).with_for_update().get(template_id) + if template is None: + raise NotFoundException(message=f'failed to find workflow template {template_id}') + latest_revision = self.get_latest_revision(template_id) + if latest_revision and latest_revision.config == template.config: + return latest_revision + new_revision = WorkflowTemplateRevision(revision_index=latest_revision.revision_index + + 1 if latest_revision else 1, + config=template.config, + editor_info=template.editor_info, + template_id=template_id) + self._session.add(new_revision) + return new_revision + + def delete_revision(self, revision_id: int): + revision = self._session.query(WorkflowTemplateRevision).get(revision_id) + if revision is None: + raise NotFoundException(message=f'failed to find template revision {revision_id}') + if revision.revision_index == self.get_latest_revision(revision.template_id).revision_index: + raise ResourceConflictException('can not delete the latest_revision') + # Checks if there is any related workflow + workflow = self._session.query(Workflow).filter_by(template_revision_id=revision_id).first() + if workflow is not None: + raise ResourceConflictException('revision has been used by workflows') + self._session.query(WorkflowTemplateRevision).filter_by(id=revision_id).delete() + + def create_revision(self, + name: str, + kind: str, + config: WorkflowDefinition, + revision_index: int, + comment: Optional[str] = None, + peer_pure_domain: Optional[str] = None): + tpl: WorkflowTemplate = self._session.query(WorkflowTemplate).filter_by( + name=name, coordinator_pure_domain_name=peer_pure_domain).first() + if tpl is None: + tpl = WorkflowTemplate(name=name, + comment=comment, + coordinator_pure_domain_name=peer_pure_domain, + kind=WorkflowTemplateKind[kind].value, + group_alias=config.group_alias) + tpl.set_config(config) + self._session.add(tpl) + self._session.flush() + revision: WorkflowTemplateRevision = self._session.query(WorkflowTemplateRevision).filter_by( + template_id=tpl.id, revision_index=revision_index).first() + if revision is not None: + return + revision = WorkflowTemplateRevision(revision_index=revision_index, template_id=tpl.id, comment=comment) + revision.set_config(config) + self._session.add(revision) + self._session.flush() + tpl.config = self.get_latest_revision(tpl.id).config diff --git a/web_console_v2/api/fedlearner_webconsole/workflow_template/service_test.py b/web_console_v2/api/fedlearner_webconsole/workflow_template/service_test.py new file mode 100644 index 000000000..ade21c6e5 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow_template/service_test.py @@ -0,0 +1,286 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest + +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.filtering_pb2 import FilterExpression, FilterExpressionKind, SimpleExpression, FilterOp +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition +from fedlearner_webconsole.workflow.models import Workflow +from fedlearner_webconsole.workflow_template.models import WorkflowTemplate, WorkflowTemplateRevision, \ + WorkflowTemplateKind +from fedlearner_webconsole.db import db +from fedlearner_webconsole.workflow_template.service import WorkflowTemplateService, WorkflowTemplateRevisionService, \ + _check_config_and_editor_info +from fedlearner_webconsole.exceptions import NotFoundException, ResourceConflictException +from testing.no_web_server_test_case import NoWebServerTestCase + + +class WorkflowTemplateServiceTest(NoWebServerTestCase): + + def test_post_workflow_template(self): + with db.session_scope() as session: + data = { + 'name': 'test_template', + 'comment': 'test-comment-0', + 'config': { + 'group_alias': 'g222', + }, + 'editor_info': { + 'yaml_editor_infos': {} + }, + 'kind': 1, + 'creator_username': 'ada' + } + config, editor_info = _check_config_and_editor_info(config=data['config'], editor_info=data['editor_info']) + template = WorkflowTemplateService(session).post_workflow_template( + name=data['name'], + comment=data['comment'], + config=config, + editor_info=editor_info, + kind=data['kind'], + creator_username=data['creator_username']) + self.assertEqual(template.to_dict()['name'], data['name']) + self.assertEqual(template.to_dict()['creator_username'], data['creator_username']) + session.add(template) + session.commit() + conflict_data = { + 'name': 'test_template', + 'comment': 'test-comment-1', + 'config': { + 'group_alias': 'g222', + }, + 'editor_info': { + 'yaml_editor_infos': {} + }, + 'kind': 0, + } + config, editor_info = _check_config_and_editor_info(config=conflict_data['config'], + editor_info=conflict_data['editor_info']) + with self.assertRaises(ResourceConflictException): + WorkflowTemplateService(session).post_workflow_template(name=conflict_data['name'], + comment=conflict_data['comment'], + config=config, + editor_info=editor_info, + kind=conflict_data['kind'], + creator_username='ada') + + def test_get_workflow_template(self): + with db.session_scope() as session: + config = WorkflowDefinition(group_alias='test_group_alias') + template = WorkflowTemplate(name='test_template', group_alias=config.group_alias, kind=1) + template.set_config(config) + session.add(template) + session.commit() + + with db.session_scope() as session: + template = WorkflowTemplateService(session).get_workflow_template(name='test_template') + self.assertEqual(template.get_config(), config) + self.assertEqual(template.kind, 1) + + with db.session_scope() as session: + with self.assertRaises(NotFoundException): + WorkflowTemplateService(session).get_workflow_template(name='test_unexists_template') + + def test_list_workflow_templates(self): + with db.session_scope() as session: + t1 = WorkflowTemplate(id=145, + name='test_template1', + group_alias='group1', + kind=WorkflowTemplateKind.PRESET.value, + config=b'') + t2 = WorkflowTemplate(id=146, + name='test_template2', + group_alias='group1', + kind=WorkflowTemplateKind.PRESET.value, + config=b'') + t3 = WorkflowTemplate(id=147, + name='hello', + group_alias='group2', + kind=WorkflowTemplateKind.DEFAULT.value, + config=b'') + session.add_all([t1, t2, t3]) + session.commit() + with db.session_scope() as session: + service = WorkflowTemplateService(session) + # No filter and pagination + pagination = service.list_workflow_templates() + self.assertEqual(pagination.get_number_of_items(), 3) + # Filter by name + pagination = service.list_workflow_templates( + filter_exp=FilterExpression( + kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression( + field='name', + op=FilterOp.CONTAIN, + string_value='template', + ), + ), + page=1, + page_size=1, + ) + self.assertEqual(pagination.get_number_of_items(), 2) + templates = pagination.get_items() + self.assertEqual(len(templates), 1) + self.assertEqual(templates[0].id, 146) + # Filter by group alias and kind + pagination = service.list_workflow_templates( + filter_exp=FilterExpression( + kind=FilterExpressionKind.AND, + exps=[ + FilterExpression( + kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression( + field='group_alias', + op=FilterOp.EQUAL, + string_value='group1', + ), + ), + FilterExpression( + kind=FilterExpressionKind.SIMPLE, + simple_exp=SimpleExpression( + field='kind', + op=FilterOp.EQUAL, + number_value=WorkflowTemplateKind.PRESET.value, + ), + ), + ], + ), + page=1, + page_size=10, + ) + self.assertEqual(pagination.get_number_of_items(), 2) + templates = pagination.get_items() + self.assertEqual(len(templates), 2) + self.assertEqual(templates[0].id, 146) + self.assertEqual(templates[1].id, 145) + + +class WorkflowTemplateRevisionServiceTest(NoWebServerTestCase): + + def setUp(self): + super().setUp() + + with db.session_scope() as session: + template = WorkflowTemplate(name='t1', comment='comment for t1', group_alias='g1') + template.set_config(WorkflowDefinition(group_alias='g2',)) + session.add(template) + session.commit() + self.template_id = template.id + + def test_get_latest_revision(self): + with db.session_scope() as session: + for i in range(5): + revision = WorkflowTemplateRevision(template_id=self.template_id, revision_index=i + 1, config=b'1') + session.add(revision) + session.commit() + with db.session_scope() as session: + self.assertEqual( + WorkflowTemplateRevisionService(session).get_latest_revision(self.template_id).revision_index, 5) + + def test_create_new_revision_if_template_updated(self): + with db.session_scope() as session: + WorkflowTemplateRevisionService(session).create_new_revision_if_template_updated(self.template_id) + WorkflowTemplateRevisionService(session).create_new_revision_if_template_updated(self.template_id) + session.commit() + with db.session_scope() as session: + self.assertEqual( + WorkflowTemplateRevisionService(session).get_latest_revision(self.template_id).revision_index, 1) + with db.session_scope() as session: + template = session.query(WorkflowTemplate).get(self.template_id) + template.set_config(WorkflowDefinition(group_alias='g1',)) + session.commit() + with db.session_scope() as session: + WorkflowTemplateRevisionService(session).create_new_revision_if_template_updated(self.template_id) + self.assertEqual( + WorkflowTemplateRevisionService(session).get_latest_revision(self.template_id).revision_index, 2) + + def test_delete_revision(self): + with db.session_scope() as session: + revision_without_wf = WorkflowTemplateRevision(id=3, + template_id=self.template_id, + revision_index=1, + config=b'1') + revision_with_wf = WorkflowTemplateRevision(id=4, + template_id=self.template_id, + revision_index=2, + config=b'1') + workflow = Workflow(template_revision_id=revision_with_wf.id) + revision_latest = WorkflowTemplateRevision(id=5, + template_id=self.template_id, + revision_index=3, + config=b'1') + session.add_all([ + revision_without_wf, + revision_with_wf, + workflow, + revision_latest, + ]) + session.commit() + with db.session_scope() as session: + # OK to delete + WorkflowTemplateRevisionService(session).delete_revision(3) + with self.assertRaises(ResourceConflictException) as cm: + WorkflowTemplateRevisionService(session).delete_revision(5) + self.assertEqual(cm.exception.message, 'can not delete the latest_revision') + with self.assertRaises(ResourceConflictException) as cm: + WorkflowTemplateRevisionService(session).delete_revision(4) + self.assertEqual(cm.exception.message, 'revision has been used by workflows') + session.commit() + with db.session_scope() as session: + revisions = session.query(WorkflowTemplateRevision).filter_by(template_id=self.template_id).all() + self.assertEqual(len(revisions), 2) + self.assertEqual(revisions[0].id, revision_with_wf.id) + self.assertEqual(revisions[1].id, revision_latest.id) + + def test_create_revision(self): + with db.session_scope() as session: + + WorkflowTemplateRevisionService(session).create_revision(config=WorkflowDefinition(group_alias='test'), + name='test', + revision_index=2, + comment='test comment', + kind=WorkflowTemplateKind.PEER.name, + peer_pure_domain='a') + session.commit() + with db.session_scope() as session: + tpl = session.query(WorkflowTemplate).filter_by(name='test').first() + self.assertEqual(tpl.coordinator_pure_domain_name, 'a') + self.assertEqual(tpl.kind, 2) + self.assertEqual(tpl.get_config(), WorkflowDefinition(group_alias='test')) + with db.session_scope() as session: + WorkflowTemplateRevisionService(session).create_revision(config=WorkflowDefinition(group_alias='test', + variables=[Variable()]), + name='test', + revision_index=3, + comment='test comment', + kind=WorkflowTemplateKind.PEER.name, + peer_pure_domain='a') + WorkflowTemplateRevisionService(session).create_revision(config=WorkflowDefinition(group_alias='test'), + name='test', + revision_index=1, + comment='test comment', + kind=WorkflowTemplateKind.PEER.name, + peer_pure_domain='a') + session.commit() + with db.session_scope() as session: + revisions = session.query(WorkflowTemplateRevision).filter_by(template_id=tpl.id).all() + self.assertEqual(sorted([r.revision_index for r in revisions]), [1, 2, 3]) + tpl = session.query(WorkflowTemplate).filter_by(name='test').first() + self.assertEqual(tpl.get_config(), WorkflowDefinition(group_alias='test', variables=[Variable()])) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/workflow_template/slots_formater_test.py b/web_console_v2/api/fedlearner_webconsole/workflow_template/slots_formater_test.py new file mode 100644 index 000000000..cc896c203 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow_template/slots_formater_test.py @@ -0,0 +1,86 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest + +from google.protobuf.json_format import ParseDict + +from fedlearner_webconsole.proto.workflow_definition_pb2 import Slot +from fedlearner_webconsole.workflow_template.slots_formatter import format_yaml, generate_yaml_template, \ + _generate_slots_map + + +class SlotFormatterTest(unittest.TestCase): + + def test_format_yaml(self): + slots = {'Slot_prs': 'prs', 'Slot_prs1': 'prs1', 'dada': 'paopaotang'} + yaml = '${Slot_prs} a${asdf} ${Slot_prs1}' + self.assertEqual(format_yaml(yaml, **slots), 'prs a${asdf} prs1') + + def test_generate_yaml_template(self): + slots = { + 'Slot_prs': ParseDict({ + 'reference_type': 'DEFAULT', + 'default_value': 'prs' + }, Slot()), + 'Slot_prs1': Slot(reference_type=Slot.ReferenceType.PROJECT, reference='project.variables.namespace') + } + yaml = '${Slot_prs} a${asdf} ${Slot_prs1}' + self.assertEqual(generate_yaml_template(yaml, slots), '"prs" a${asdf} str(project.variables.namespace)') + + def test_generate_slots_map(self): + slots = { + 'Slot_prs': + ParseDict({ + 'reference_type': 'DEFAULT', + 'default_value': 'prs' + }, Slot()), + 'Slot_prs1': + Slot(reference_type=Slot.ReferenceType.PROJECT, reference='project.variables.namespace'), + 'Slot_pr2': + Slot(reference_type=Slot.ReferenceType.PROJECT, + reference='project.variables.namespace', + value_type='NUMBER'), + 'Slot_pr3': + Slot(reference_type=Slot.ReferenceType.PROJECT, + reference='project.variables.namespace', + value_type='INT'), + 'Slot_pr4': + Slot(reference_type=Slot.ReferenceType.PROJECT, + reference='project.variables.namespace', + value_type='BOOL'), + 'Slot_pr5': + Slot(reference_type=Slot.ReferenceType.PROJECT, + reference='project.variables.namespace', + value_type='OBJECT'), + 'Slot_pr6': + Slot(reference_type=Slot.ReferenceType.PROJECT, + reference='project.variables.namespace', + value_type='LIST') + } + self.assertEqual( + _generate_slots_map(slots), { + 'Slot_prs': '"prs"', + 'Slot_prs1': 'str(project.variables.namespace)', + 'Slot_pr2': 'float(project.variables.namespace)', + 'Slot_pr3': 'int(project.variables.namespace)', + 'Slot_pr4': 'bool(project.variables.namespace)', + 'Slot_pr5': 'dict(project.variables.namespace)', + 'Slot_pr6': 'list(project.variables.namespace)' + }) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/workflow_template/slots_formatter.py b/web_console_v2/api/fedlearner_webconsole/workflow_template/slots_formatter.py index 2923c8928..0aa8f5ce0 100644 --- a/web_console_v2/api/fedlearner_webconsole/workflow_template/slots_formatter.py +++ b/web_console_v2/api/fedlearner_webconsole/workflow_template/slots_formatter.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,21 +12,49 @@ # See the License for the specific language governing permissions and # limitations under the License. - # coding: utf-8 -from flatten_dict import flatten +from string import Template +from typing import Dict + +from fedlearner_webconsole.utils.pp_flatten_dict import flatten from fedlearner_webconsole.proto.workflow_definition_pb2 import Slot -from fedlearner_webconsole.workflow_template.template_validaor \ - import YamlTemplate +from fedlearner_webconsole.utils.proto import to_dict + + +class YamlTemplate(Template): + """This formatter is used to format placeholders + only can be observed in workflow_template module. + """ + delimiter = '$' + + # overwrite this func to escape the invalid placeholder such as ${UNKNOWN} + def _substitute(self, mapping, fixed_placeholder=None, ignore_invalid=False): + # Helper function for .sub() + def convert(mo): + # Check the most common path first. + named = mo.group('named') or mo.group('braced') + if named is not None: + if fixed_placeholder is not None: + return fixed_placeholder + return str(mapping[named]) + if mo.group('escaped') is not None: + return self.delimiter + if mo.group('invalid') is not None: + # overwrite to escape invalid placeholder + if ignore_invalid: + return mo.group() + self._invalid(mo) + raise ValueError('Unrecognized named group in pattern', self.pattern) + + return self.pattern.sub(convert, self.template) + class _YamlTemplate(YamlTemplate): # Which placeholders in the template should be interpreted idpattern = r'Slot_[a-z0-9_]*' def substitute(self, mapping): - return super()._substitute(mapping, - fixed_placeholder=None, - ignore_invalid=True) + return super()._substitute(mapping, fixed_placeholder=None, ignore_invalid=True) def format_yaml(yaml, **kwargs): @@ -38,26 +66,63 @@ def format_yaml(yaml, **kwargs): """ template = _YamlTemplate(yaml) try: - return template.substitute(flatten(kwargs or {}, - reducer='dot')) + return template.substitute(flatten(kwargs or {})) except KeyError as e: - raise RuntimeError( - 'Unknown placeholder: {}'.format(e.args[0])) from e + raise RuntimeError(f'Unknown placeholder: {e.args[0]}') from e -def generate_yaml_template(base_yaml, slots_proto): +def generate_yaml_template(base_yaml: str, slots_proto: Dict[str, Slot]): """ Args: base_yaml: A string representation of one type job's base yaml. slots_proto: A proto map object representation of modification - template's operable smallest units. + template's operable smallest units. Key is the slot name, and + the value is Slot proto object. Returns: string: A yaml_template """ + slots = _generate_slots_map(slots_proto) + return format_yaml(base_yaml, **slots) + + +def _generate_slots_map(slots_proto: dict) -> dict: slots = {} for key in slots_proto: - if slots_proto[key].reference_type == Slot.ReferenceType.DEFAULT: - slots[key] = slots_proto[key].default + slot = slots_proto[key] + if slot.reference_type == Slot.DEFAULT: + slots[key] = _generate_slot_default(slot) else: - slots[key] = f'${{{slots_proto[key].reference}}}' - return format_yaml(base_yaml, **slots) + slots[key] = _generate_slot_reference(slot) + return slots + + +def _generate_slot_default(slot: Slot): + default_value = to_dict(slot.default_value) + # add quotation for string value to make it be treated as string not a variable + if slot.value_type == Slot.STRING: + return f'"{default_value}"' + if slot.value_type == Slot.INT: + if default_value is None: + return default_value + try: + return int(default_value) + except Exception as e: + raise ValueError(f'default_value of Slot: {slot.label} must be an int.') from e + return default_value + + +def _generate_slot_reference(slot: Slot) -> str: + if slot.value_type == Slot.INT: + return f'int({slot.reference})' + if slot.value_type == Slot.NUMBER: + return f'float({slot.reference})' + if slot.value_type == Slot.BOOL: + return f'bool({slot.reference})' + if slot.value_type == Slot.OBJECT: + return f'dict({slot.reference})' + if slot.value_type == Slot.LIST: + return f'list({slot.reference})' + # Force transform to string, to void format errors. + if slot.value_type == Slot.STRING: + return f'str({slot.reference})' + return slot.reference diff --git a/web_console_v2/api/fedlearner_webconsole/workflow_template/template_validaor.py b/web_console_v2/api/fedlearner_webconsole/workflow_template/template_validaor.py index 2b6e668c7..1714022b5 100644 --- a/web_console_v2/api/fedlearner_webconsole/workflow_template/template_validaor.py +++ b/web_console_v2/api/fedlearner_webconsole/workflow_template/template_validaor.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,116 +13,44 @@ # limitations under the License. # coding: utf-8 -import json -from string import Template -from flatten_dict import flatten -from fedlearner_webconsole.job.yaml_formatter import make_variables_dict - -class YamlTemplate(Template): - """This formatter is used to format placeholders - only can be observed in workflow_template module. - """ - delimiter = '$' - - # overwrite this func to escape the invalid placeholder such as - def _substitute(self, mapping, fixed_placeholder=None, - ignore_invalid=False): - # Helper function for .sub() - def convert(mo): - # Check the most common path first. - named = mo.group('named') or mo.group('braced') - if named is not None: - if fixed_placeholder is not None: - return fixed_placeholder - return str(mapping[named]) - if mo.group('escaped') is not None: - return self.delimiter - if mo.group('invalid') is not None: - # overwrite to escape invalid placeholder - if ignore_invalid: - return mo.group() - self._invalid(mo) - raise ValueError('Unrecognized named group in pattern', - self.pattern) - - return self.pattern.sub(convert, self.template) - - -class _YamlTemplateOnlyFillWorkflow(YamlTemplate): - """This formatter is used to format placeholders - only can be observed in workflow_template module. - """ - # Which placeholders in the template should be interpreted - idpattern = r'(?:workflow\.jobs\.[a-zA-Z_\-0-9\[\]]+|workflow|self)' \ - r'\.variables\.[a-zA-Z_\-0-9\[\]]+' - - def substitute(self, mapping): - return super()._substitute(mapping, - fixed_placeholder=None, - ignore_invalid=True) - - -class _YamlTemplateFillAll(YamlTemplate): - """ - This formatter is used to format all valid placeholders with {} - """ - # Which placeholders in the template should be interpreted - idpattern = r'[a-zA-Z_\-\[0-9\]]+(\.[a-zA-Z_\-\[0-9\]]+)*' - def substitute(self, mapping): - return super()._substitute(mapping, - fixed_placeholder='{}', - ignore_invalid=False) - - -def format_yaml(yaml, **kwargs): - """Formats a yaml template. - - Example usage: - format_yaml('{"abc": ${x.y}}', x={'y': 123}) - output should be '{"abc": 123}' - """ - template = _YamlTemplateOnlyFillWorkflow(yaml) - try: - # checkout whether variables which can be observed at workflow_template - # module is consistent with placeholders in string - format_workflow = template.substitute(flatten(kwargs or {}, - reducer='dot')) - except KeyError as e: - raise ValueError( - f'Unknown placeholder: {e.args[0]}') from e - template = _YamlTemplateFillAll(format_workflow) - try: - # checkout whether other placeholders are valid and - # format them with {} in order to go ahead to next step, - # json format check - return template.substitute(flatten(kwargs or {}, - reducer='dot')) - except ValueError as e: - raise ValueError(f'Wrong placeholder: {str(e)} . ' - f'Origin yaml: {format_workflow}') - - -def check_workflow_definition(workflow_definition): - workflow = {'variables': make_variables_dict(workflow_definition.variables), - 'jobs': {}} +from fedlearner_webconsole.job.yaml_formatter import\ + make_variables_dict +from fedlearner_webconsole.utils.pp_yaml import compile_yaml_template, GenerateDictService + + +def check_workflow_definition(workflow_definition, session): + workflow = { + 'variables': make_variables_dict(workflow_definition.variables), + 'jobs': {}, + 'uuid': 'test', + 'name': 'test', + 'id': 1, + 'creator': 'test' + } + project_stub = { + 'variables': { + 'storage_root_path': '/data' + }, + 'participants': [{ + 'egress_domain': 'domain_name', + 'egress_host': 'client_auth' + }], + 'id': 1, + 'name': 'test' + } for job_def in workflow_definition.job_definitions: - j_dic = {'variables': make_variables_dict(job_def.variables)} + j_dic = {'variables': make_variables_dict(job_def.variables), 'name': 'other_job_name_stub'} workflow['jobs'][job_def.name] = j_dic - for job_def in workflow_definition.job_definitions: - self_dict = {'variables': make_variables_dict(job_def.variables)} - try: - # check placeholders - yaml = format_yaml(job_def.yaml_template, - workflow=workflow, - self=self_dict) - except ValueError as e: - raise ValueError(f'job_name: {job_def.name} ' - f'Invalid placeholder: {str(e)}') + # fake job name to pass the compiler_yaml_template + self_dict = {'name': ' job_name_stub', 'variables': make_variables_dict(job_def.variables)} try: - # check json format - loaded = json.loads(yaml) + # check the result format + compile_yaml_template(job_def.yaml_template, [], + workflow=workflow, + self=self_dict, + system=GenerateDictService(session).generate_system_dict(), + project=project_stub) except Exception as e: # pylint: disable=broad-except - raise ValueError(f'job_name: {job_def.name} Invalid ' - f'json {repr(e)}: {yaml}') + raise ValueError(f'job_name: {job_def.name} Invalid python {str(e)}') from e diff --git a/web_console_v2/api/fedlearner_webconsole/workflow_template/template_validator_test.py b/web_console_v2/api/fedlearner_webconsole/workflow_template/template_validator_test.py new file mode 100644 index 000000000..651689956 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow_template/template_validator_test.py @@ -0,0 +1,61 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import unittest + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.workflow_template.template_validaor\ + import check_workflow_definition +from testing.workflow_template.test_template_left import make_workflow_template +from testing.no_web_server_test_case import NoWebServerTestCase + + +class TemplateValidatorTest(NoWebServerTestCase): + + def test_check_workflow_definition(self): + workflow_definition = make_workflow_template() + with db.session_scope() as session: + check_workflow_definition(workflow_definition, session) + + def test_check_more_json_wrong(self): + yaml_template_more_comma = '{"a": "aa", "b": self.xxx}' + workflow_definition = make_workflow_template() + workflow_definition.job_definitions[0].yaml_template = \ + yaml_template_more_comma + with db.session_scope() as session: + with self.assertRaises(ValueError): + check_workflow_definition(workflow_definition, session) + + def test_check_old_placeholder(self): + workflow_definition = make_workflow_template() + yaml_template_more_placeholder = '{"a": ${self.variables.batch_size}}' + workflow_definition.job_definitions[0].yaml_template = \ + yaml_template_more_placeholder + with db.session_scope() as session: + with self.assertRaises(ValueError): + check_workflow_definition(workflow_definition, session) + + def test_check_wrong_placeholder(self): + workflow_definition = make_workflow_template() + yaml_template_wrong_placeholder = '{"a": self.haha}' + workflow_definition.job_definitions[0].yaml_template =\ + yaml_template_wrong_placeholder + with db.session_scope() as session: + with self.assertRaises(ValueError): + check_workflow_definition(workflow_definition, session) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/fedlearner_webconsole/workflow_template/utils.py b/web_console_v2/api/fedlearner_webconsole/workflow_template/utils.py new file mode 100644 index 000000000..192cabbc8 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow_template/utils.py @@ -0,0 +1,53 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import json +from typing import Dict, Union +from google.protobuf.struct_pb2 import Value, Struct + +from fedlearner_webconsole.proto.common_pb2 import Variable + + +def set_value(variable: Variable, typed_value: Union[None, str, int, float, bool, Dict]): + if typed_value is None: + return + # since `isinstance(True, int)` is true, bool type should be placed before int type + if isinstance(typed_value, bool): + variable.value = str(typed_value) + variable.value_type = Variable.ValueType.BOOL + variable.typed_value.bool_value = typed_value + elif isinstance(typed_value, (int, float)): + variable.value = str(typed_value) + variable.value_type = Variable.ValueType.NUMBER + variable.typed_value.number_value = typed_value + elif isinstance(typed_value, str): + variable.value = typed_value + variable.value_type = Variable.ValueType.STRING + variable.typed_value.string_value = typed_value + elif isinstance(typed_value, dict): + variable.value = json.dumps(typed_value) + variable.value_type = Variable.ValueType.OBJECT + struct_var = Struct() + struct_var.update(typed_value) + variable.typed_value.MergeFrom(Value(struct_value=struct_var)) + else: + raise NotImplementedError() + + +# TODO(xiangyuxuan): implement making variable from list typed_value +def make_variable(name: str, typed_value: Union[None, str, int, float, bool, Dict]) -> Variable: + variable = Variable(name=name) + set_value(variable, typed_value) + return variable diff --git a/web_console_v2/api/fedlearner_webconsole/workflow_template/utils_test.py b/web_console_v2/api/fedlearner_webconsole/workflow_template/utils_test.py new file mode 100644 index 000000000..ff3e79761 --- /dev/null +++ b/web_console_v2/api/fedlearner_webconsole/workflow_template/utils_test.py @@ -0,0 +1,58 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +from google.protobuf.struct_pb2 import Value, Struct + +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.workflow_template.utils import make_variable + + +class UtilsTest(unittest.TestCase): + + def test_make_variable(self): + var = make_variable(name='haha', typed_value=None) + self.assertEqual(var, Variable(name='haha')) + var = make_variable(name='haha', typed_value=1.1) + expected_var = Variable(name='haha', + value='1.1', + value_type=Variable.ValueType.NUMBER, + typed_value=Value(number_value=1.1)) + self.assertEqual(var, expected_var) + var = make_variable(name='haha', typed_value='1') + expected_var = Variable(name='haha', + value='1', + value_type=Variable.ValueType.STRING, + typed_value=Value(string_value='1')) + self.assertEqual(var, expected_var) + var = make_variable(name='haha', typed_value=True) + expected_var = Variable(name='haha', + value='True', + value_type=Variable.ValueType.BOOL, + typed_value=Value(bool_value=True)) + self.assertEqual(var, expected_var) + var = make_variable(name='haha', typed_value={'value': 1}) + typed_value = Struct() + typed_value.update({'value': 1}) + expected_var = Variable(name='haha', + value='{"value": 1}', + value_type=Variable.ValueType.OBJECT, + typed_value=Value(struct_value=typed_value)) + self.assertEqual(var, expected_var) + self.assertRaises(NotImplementedError, lambda: make_variable('haha', [])) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/gunicorn_config.py b/web_console_v2/api/gunicorn_config.py index 66fd81b48..590550fc1 100644 --- a/web_console_v2/api/gunicorn_config.py +++ b/web_console_v2/api/gunicorn_config.py @@ -1,29 +1,30 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# +""" + This file is used by gunicorn command line at runtime, so it's better away from other dependencies. -# coding: utf-8 -import os -from envs import Envs + Thus, `environ` is used instead of `envs.Envs` +""" +from os import environ -bind = ':1991' +bind = f':{environ.get("RESTFUL_LISTEN_PORT", 1991)}' +# For some hook which installing some dependencies at runtime, worker timeout should be longer. +timeout = 600 workers = 1 threads = 10 worker_class = 'gthread' -secure_scheme_headers = { - 'X-FORWARDED-PROTOCOL': 'https', - 'X-FORWARDED-PROTO': 'https', - 'X-FORWARDED-SSL': 'on' -} +secure_scheme_headers = {'X-FORWARDED-PROTOCOL': 'https', 'X-FORWARDED-PROTO': 'https', 'X-FORWARDED-SSL': 'on'} -errorlog = f'{Envs.FEDLEARNER_WEBCONSOLE_LOG_DIR}/error.log' +errorlog = f'{environ.get("FEDLEARNER_WEBCONSOLE_LOG_DIR}", ".")}/error.log' diff --git a/web_console_v2/api/logging_config.py b/web_console_v2/api/logging_config.py index 208487c4a..271491da3 100644 --- a/web_console_v2/api/logging_config.py +++ b/web_console_v2/api/logging_config.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,34 +15,61 @@ # coding: utf-8 import os -import logging + from envs import Envs -LOGGING_CONFIG = { - 'version': 1, - 'disable_existing_loggers': False, - 'root': { - 'handlers': ['console', 'root_file'], - 'level': 'INFO' - }, - 'handlers': { - 'console': { - 'class': 'logging.StreamHandler', +root_file_path = os.path.join(Envs.FEDLEARNER_WEBCONSOLE_LOG_DIR, 'root.log') + +_extra_handlers = {} + + +def set_extra_handlers(handlers: dict): + """Sets extra handlers for logger. + + Incremental configurations are hard, so we keep LOGGING_CONFIG + as the source of truth and inject extra handlers.""" + global _extra_handlers # pylint:disable=global-statement + _extra_handlers = handlers + + +def get_logging_config(): + return { + 'version': + 1, + 'disable_existing_loggers': + False, + 'root': { + 'handlers': ['console', 'root_file'] + list(_extra_handlers.keys()), + 'level': Envs.LOG_LEVEL }, - 'root_file': { - 'class': 'logging.handlers.TimedRotatingFileHandler', - 'formatter': 'generic', - 'filename': os.path.join(Envs.FEDLEARNER_WEBCONSOLE_LOG_DIR, 'root.log'), - 'when': 'D', - 'interval': 1, - 'backupCount': 7 - } - }, - 'formatters': { - 'generic': { - 'format': '%(asctime)s [%(process)d] [%(levelname)s] %(message)s', - 'datefmt': '%Y-%m-%d %H:%M:%S', - 'class': 'logging.Formatter' + 'filters': { + 'requestIdFilter': { + '()': 'fedlearner_webconsole.middleware.log_filter.RequestIdLogFilter' + } + }, + 'handlers': + dict( + { + 'console': { + 'class': 'logging.StreamHandler', + 'formatter': 'generic', + 'filters': ['requestIdFilter'] + }, + 'root_file': { + 'class': 'logging.handlers.TimedRotatingFileHandler', + 'formatter': 'generic', + 'filename': root_file_path, + 'when': 'D', + 'interval': 1, + 'backupCount': 7, + 'filters': ['requestIdFilter'] + } + }, **_extra_handlers), + 'formatters': { + 'generic': { + 'format': '%(asctime)s [%(process)d] [%(request_id)s] [%(levelname)s] %(message)s', + 'datefmt': '%Y-%m-%d %H:%M:%S', + 'class': 'logging.Formatter' + } } } -} diff --git a/web_console_v2/api/migrations/BUILD.bazel b/web_console_v2/api/migrations/BUILD.bazel new file mode 100644 index 000000000..7e0f14be4 --- /dev/null +++ b/web_console_v2/api/migrations/BUILD.bazel @@ -0,0 +1,6 @@ +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +filegroup( + name = "migrations", + srcs = glob(["**/*"]), +) diff --git a/web_console_v2/api/migrations/README b/web_console_v2/api/migrations/README index 98e4f9c44..2500aa1bc 100644 --- a/web_console_v2/api/migrations/README +++ b/web_console_v2/api/migrations/README @@ -1 +1 @@ -Generic single-database configuration. \ No newline at end of file +Generic single-database configuration. diff --git a/web_console_v2/api/migrations/env.py b/web_console_v2/api/migrations/env.py index 1445b1e85..449d2baf2 100644 --- a/web_console_v2/api/migrations/env.py +++ b/web_console_v2/api/migrations/env.py @@ -1,3 +1,18 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + from __future__ import with_statement import logging @@ -20,9 +35,7 @@ # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata -config.set_main_option( - 'sqlalchemy.url', - str(current_app.extensions['migrate'].db.engine.url).replace('%', '%%')) +config.set_main_option('sqlalchemy.url', str(current_app.extensions['migrate'].db.engine.url).replace('%', '%%')) target_metadata = current_app.extensions['migrate'].db.metadata # other values from the config, defined by the needs of env.py, @@ -30,7 +43,8 @@ # my_important_option = config.get_main_option("my_important_option") # ... etc. -BLOCK_AUTOGENERATE_LIST = ['models_v2'] +BLOCK_AUTOGENERATE_LIST = [] + def include_object(object, name, type_, reflected, compare_to): if type_ == 'table' and name in BLOCK_AUTOGENERATE_LIST: @@ -52,10 +66,7 @@ def run_migrations_offline(): """ url = config.get_main_option("sqlalchemy.url") - context.configure(url=url, - target_metadata=target_metadata, - literal_binds=True, - include_object=include_object) + context.configure(url=url, target_metadata=target_metadata, literal_binds=True, include_object=include_object) with context.begin_transaction(): context.run_migrations() @@ -82,12 +93,11 @@ def process_revision_directives(context, revision, directives): connectable = current_app.extensions['migrate'].db.engine with connectable.connect() as connection: - context.configure( - connection=connection, - target_metadata=target_metadata, - include_object=include_object, - process_revision_directives=process_revision_directives, - **current_app.extensions['migrate'].configure_args) + context.configure(connection=connection, + target_metadata=target_metadata, + include_object=include_object, + process_revision_directives=process_revision_directives, + **current_app.extensions['migrate'].configure_args) with context.begin_transaction(): context.run_migrations() diff --git a/web_console_v2/api/migrations/versions/0166249ad82d_alter_serving_deploy_platform_type.py b/web_console_v2/api/migrations/versions/0166249ad82d_alter_serving_deploy_platform_type.py new file mode 100644 index 000000000..a600b5afd --- /dev/null +++ b/web_console_v2/api/migrations/versions/0166249ad82d_alter_serving_deploy_platform_type.py @@ -0,0 +1,36 @@ +"""alter serving deploy platform type + +Revision ID: 0166249ad82d +Revises: 1c4c6f630642 +Create Date: 2022-09-21 10:34:16.507923 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '0166249ad82d' +down_revision = '1c4c6f630642' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('serving_deployments_v2', 'deploy_platform', + existing_type=mysql.VARCHAR(length=255), + type_=sa.Text(), + existing_comment='deploy platform. None means inside this platform', + existing_nullable=True) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('serving_deployments_v2', 'deploy_platform', + existing_type=sa.Text(), + type_=mysql.VARCHAR(length=255), + existing_comment='deploy platform. None means inside this platform', + existing_nullable=True) + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/0d6eb48014d8_add_group_id_to_serving_models.py b/web_console_v2/api/migrations/versions/0d6eb48014d8_add_group_id_to_serving_models.py new file mode 100644 index 000000000..4274379bc --- /dev/null +++ b/web_console_v2/api/migrations/versions/0d6eb48014d8_add_group_id_to_serving_models.py @@ -0,0 +1,52 @@ +"""add group id to serving models + +Revision ID: 0d6eb48014d8 +Revises: 5f322c9d67ea +Create Date: 2022-08-03 20:12:41.173325 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '0d6eb48014d8' +down_revision = '5f322c9d67ea' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('model_groups_v2', sa.Column('ticket_uuid', sa.String(length=255), nullable=True, comment='review ticket uuid, empty if review function is disable')) + op.add_column('model_groups_v2', sa.Column('ticket_status', sa.Enum('PENDING', 'APPROVED', 'DECLINED', name='ticketstatus', native_enum=False, length=64, create_constraint=False), nullable=True, comment='review ticket status')) + op.add_column('model_groups_v2', sa.Column('cron_job_global_config', sa.Text(length=16777215), nullable=True, comment='global config for cron job')) + op.add_column('model_groups_v2', sa.Column('algorithm_uuid_list', sa.Text(length=16777215), nullable=True, comment='algorithm project uuid for all participants')) + op.add_column('model_groups_v2', sa.Column('status', sa.Enum('PENDING', 'FAILED', 'SUCCEEDED', name='groupcreatestatus', native_enum=False, length=64, create_constraint=False), nullable=True, comment='create status')) + op.add_column('model_jobs_v2', sa.Column('global_config', sa.Text(length=16777215), nullable=True, comment='global_config')) + op.add_column('model_jobs_v2', sa.Column('status', sa.Enum('PENDING', 'READY', 'ERROR', 'RUNNING', 'STOPPED', 'SUCCEEDED', 'FAILED', name='modeljobstatus', native_enum=False, length=64, create_constraint=False), nullable=True, comment='model job status')) + op.add_column('model_jobs_v2', sa.Column('auth_status', sa.Enum('PENDING', 'AUTHORIZED', name='authstatus', native_enum=False, length=64, create_constraint=False), nullable=True, comment='authorization status')) + op.add_column('model_jobs_v2', sa.Column('error_message', sa.Text(), nullable=True, comment='error message')) + op.add_column('serving_deployments_v2', sa.Column('deploy_platform', sa.String(length=255), nullable=True, comment='deploy platform. None means inside this platform')) + op.add_column('serving_models_v2', sa.Column('model_group_id', sa.Integer(), nullable=True, comment='model group id for auto update scenario')) + op.add_column('serving_models_v2', sa.Column('pending_model_id', sa.Integer(), nullable=True, comment="model id when waiting for participants' config")) + op.add_column('serving_models_v2', sa.Column('pending_model_group_id', sa.Integer(), nullable=True, comment="model group id when waiting for participants' config")) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('serving_models_v2', 'pending_model_group_id') + op.drop_column('serving_models_v2', 'pending_model_id') + op.drop_column('serving_models_v2', 'model_group_id') + op.drop_column('serving_deployments_v2', 'deploy_platform') + op.drop_column('model_jobs_v2', 'error_message') + op.drop_column('model_jobs_v2', 'auth_status') + op.drop_column('model_jobs_v2', 'status') + op.drop_column('model_jobs_v2', 'global_config') + op.drop_column('model_groups_v2', 'status') + op.drop_column('model_groups_v2', 'algorithm_uuid_list') + op.drop_column('model_groups_v2', 'cron_job_global_config') + op.drop_column('model_groups_v2', 'ticket_status') + op.drop_column('model_groups_v2', 'ticket_uuid') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/0f24b50d4077_add_metric_is_public_to_model_job.py b/web_console_v2/api/migrations/versions/0f24b50d4077_add_metric_is_public_to_model_job.py new file mode 100644 index 000000000..5d36b5464 --- /dev/null +++ b/web_console_v2/api/migrations/versions/0f24b50d4077_add_metric_is_public_to_model_job.py @@ -0,0 +1,29 @@ +"""add_metric_is_public_to_model_job + +Revision ID: 0f24b50d4077 +Revises: 88f6dd8bcb23 +Create Date: 2022-05-27 18:24:06.742262 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '0f24b50d4077' +down_revision = '88f6dd8bcb23' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('model_jobs_v2', sa.Column('metric_is_public', sa.Boolean(), nullable=True, comment='is metric public')) + op.execute('UPDATE model_jobs_v2 SET metric_is_public = true') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('model_jobs_v2', 'metric_is_public') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/1122cc13ef9d_change_databatch_details_from_blob_to_.py b/web_console_v2/api/migrations/versions/1122cc13ef9d_change_databatch_details_from_blob_to_.py new file mode 100644 index 000000000..406910f52 --- /dev/null +++ b/web_console_v2/api/migrations/versions/1122cc13ef9d_change_databatch_details_from_blob_to_.py @@ -0,0 +1,28 @@ +"""Change DataBatch details from blob to mediumblob + +Revision ID: 1122cc13ef9d +Revises: 3433f0ca2193 +Create Date: 2021-11-22 14:59:00.406242 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '1122cc13ef9d' +down_revision = '3433f0ca2193' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('data_batches_v2', 'details', existing_type=sa.LargeBinary(), type_=sa.LargeBinary(16777215), comment='details') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('data_batches_v2', 'details', existing_type=sa.LargeBinary(16777215), type_=mysql.BLOB) + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/14e747dbcc2f_remove_intersection_dataset.py b/web_console_v2/api/migrations/versions/14e747dbcc2f_remove_intersection_dataset.py new file mode 100644 index 000000000..8785c5bed --- /dev/null +++ b/web_console_v2/api/migrations/versions/14e747dbcc2f_remove_intersection_dataset.py @@ -0,0 +1,50 @@ +"""remove intersection dataset + +Revision ID: 14e747dbcc2f +Revises: 48bdf6b0ec1c +Create Date: 2022-04-25 16:42:58.693533 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '14e747dbcc2f' +down_revision = '48bdf6b0ec1c' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('intersection_datasets_v2') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('intersection_datasets_v2', + sa.Column('id', mysql.INTEGER(), autoincrement=True, nullable=False, comment='id'), + sa.Column('project_id', mysql.INTEGER(), autoincrement=False, nullable=False, comment='project id'), + sa.Column('dataset_id', mysql.INTEGER(), autoincrement=False, nullable=False, comment='dataset id'), + sa.Column('workflow_id', mysql.INTEGER(), autoincrement=False, nullable=False, comment='workflow id'), + sa.Column('name', mysql.VARCHAR(length=255), nullable=False, comment='dataset name'), + sa.Column('cmt', mysql.TEXT(), nullable=True, comment='comment of dataset'), + sa.Column('kind', mysql.INTEGER(), autoincrement=False, nullable=True, comment='dataset kind for different purposes'), + sa.Column('meta_info', mysql.LONGTEXT(), nullable=True, comment='dataset meta info'), + sa.Column('created_at', mysql.DATETIME(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True, comment='created time'), + sa.Column('updated_at', mysql.DATETIME(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True, comment='updated time'), + sa.Column('deleted_at', mysql.DATETIME(), nullable=True, comment='deleted time'), + sa.Column('fspath', mysql.VARCHAR(length=512), nullable=True, comment='dataset path'), + sa.Column('file_size', mysql.BIGINT(), autoincrement=False, nullable=True, comment='file_size in bytes'), + sa.Column('num_example', mysql.BIGINT(), autoincrement=False, nullable=True, comment='num_example'), + sa.Column('num_feature', mysql.BIGINT(), autoincrement=False, nullable=True, comment='num_feature'), + sa.PrimaryKeyConstraint('id'), + comment='intersection_datasets_v2', + mysql_collate='utf8mb4_0900_ai_ci', + mysql_comment='intersection_datasets_v2', + mysql_default_charset='utf8mb4', + mysql_engine='InnoDB' + ) + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/1c4c6f630642_add_coordinator_id_for_dataset_job_stage.py b/web_console_v2/api/migrations/versions/1c4c6f630642_add_coordinator_id_for_dataset_job_stage.py new file mode 100644 index 000000000..ce0b00047 --- /dev/null +++ b/web_console_v2/api/migrations/versions/1c4c6f630642_add_coordinator_id_for_dataset_job_stage.py @@ -0,0 +1,28 @@ +"""add coordinator_id for dataset_job_stage + +Revision ID: 1c4c6f630642 +Revises: 9c321d108a16 +Create Date: 2022-09-19 18:50:39.799415 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '1c4c6f630642' +down_revision = '9c321d108a16' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('dataset_job_stages_v2', sa.Column('coordinator_id', sa.Integer(), server_default=sa.text('0'), nullable=False, comment='participant id of this dataset_job_stage, 0 if it is coordinator')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('dataset_job_stages_v2', 'coordinator_id') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/1d646afb8846_remove_sql_enum_constraint.py b/web_console_v2/api/migrations/versions/1d646afb8846_remove_sql_enum_constraint.py new file mode 100644 index 000000000..8d1563b84 --- /dev/null +++ b/web_console_v2/api/migrations/versions/1d646afb8846_remove_sql_enum_constraint.py @@ -0,0 +1,41 @@ +"""Remove sql enum constraint + +Revision ID: 1d646afb8846 +Revises: 5810b21435ae +Create Date: 2021-06-22 14:56:58.517358 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '1d646afb8846' +down_revision = '5810b21435ae' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + bind = op.get_bind() + version = bind.execute('select version()') + # 'drop check' is invalid if mysql version is less than 8 + if version is not None and version.fetchall()[0][0] > '8.0.0': + op.execute('ALTER TABLE job_v2 drop check jobtype') + op.execute('ALTER TABLE users_v2 drop check role') + op.execute('ALTER TABLE users_v2 drop check state') + op.execute('ALTER TABLE datasets_v2 drop check datasettype') + op.execute('ALTER TABLE data_batches_v2 drop check batchstate') + op.execute('ALTER TABLE workflow_v2 drop check recurtype') + op.execute('ALTER TABLE workflow_v2 drop check workflow_state') + op.execute('ALTER TABLE workflow_v2 drop check workflow_target_state') + op.execute('ALTER TABLE workflow_v2 drop check transactionstate') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + bind = op.get_bind() + # DO NOT DOWNGRADE THIS VERSION!!!! + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/1e882d34c760_support_light_client_in_participant.py b/web_console_v2/api/migrations/versions/1e882d34c760_support_light_client_in_participant.py new file mode 100644 index 000000000..244d8a244 --- /dev/null +++ b/web_console_v2/api/migrations/versions/1e882d34c760_support_light_client_in_participant.py @@ -0,0 +1,30 @@ +"""add_type_and_last_connected_at_to_participant + +Revision ID: 1e882d34c760 +Revises: 93d756004237 +Create Date: 2022-01-04 16:33:03.565990 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '1e882d34c760' +down_revision = '93d756004237' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('participants_v2', sa.Column('last_connected_at', sa.DateTime(timezone=True), nullable=True, comment='last connected at')) + op.add_column('participants_v2', sa.Column('participant_type', sa.Enum('PLATFORM', 'LIGHT_CLIENT', name='participanttype', native_enum=False, create_constraint=False, length=32), default='PLATFORM', nullable=True, comment='participant type')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('participants_v2', 'participant_type') + op.drop_column('participants_v2', 'last_connected_at') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/263717c82a6a_adds_cascade_columns_for_settings.py b/web_console_v2/api/migrations/versions/263717c82a6a_adds_cascade_columns_for_settings.py new file mode 100644 index 000000000..8f84d1322 --- /dev/null +++ b/web_console_v2/api/migrations/versions/263717c82a6a_adds_cascade_columns_for_settings.py @@ -0,0 +1,30 @@ +"""adds cascade columns for settings + +Revision ID: 263717c82a6a +Revises: 9a9b20f3804e +Create Date: 2021-08-02 21:54:54.421093 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '263717c82a6a' +down_revision = '9a9b20f3804e' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('settings_v2', sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='created at')) + op.add_column('settings_v2', sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='updated at')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('settings_v2', 'updated_at') + op.drop_column('settings_v2', 'created_at') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/2d9c3afaa7fe_add_model_serving_two_db_tables.py b/web_console_v2/api/migrations/versions/2d9c3afaa7fe_add_model_serving_two_db_tables.py new file mode 100644 index 000000000..de0453dfd --- /dev/null +++ b/web_console_v2/api/migrations/versions/2d9c3afaa7fe_add_model_serving_two_db_tables.py @@ -0,0 +1,63 @@ +"""add model serving two db tables + +Revision ID: 2d9c3afaa7fe +Revises: 7cf3168e68fb +Create Date: 2021-09-25 20:44:13.208086 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '2d9c3afaa7fe' +down_revision = '7cf3168e68fb' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('serving_deployments_v2', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='id'), + sa.Column('project_id', sa.Integer(), nullable=False, comment='project id'), + sa.Column('deployment_name', sa.String(length=255), nullable=True, comment='deployment name'), + sa.Column('rsc', sa.String(length=255), nullable=True, comment='resource'), + sa.Column('endpoint', sa.String(length=255), nullable=True, comment='endpoint'), + sa.Column('status', sa.Enum('UNAVAILABLE', 'AVAILABLE', name='servingdeploymentstatus', native_enum=False, length=64, create_constraint=False), nullable=True, comment='status'), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=True, comment='created_at'), + sa.Column('extra', sa.Text(), nullable=True, comment='extra'), + sa.PrimaryKeyConstraint('id'), + comment='serving deployments in webconsole', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + op.create_table('serving_models_v2', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='id'), + sa.Column('project_id', sa.Integer(), nullable=False, comment='project id'), + sa.Column('name', sa.String(length=255), nullable=True, comment='name'), + sa.Column('serving_deployment_id', sa.Integer(), nullable=True, comment='serving deployment db id'), + sa.Column('cmt', sa.Text(), nullable=True, comment='comment'), + sa.Column('model_id', sa.Integer(), nullable=True, comment='model id'), + sa.Column('model_type', sa.Enum('UNSPECIFIED', 'NN_MODEL', 'TREE_MODEL', name='modeltype', native_enum=False, length=64, create_constraint=False), nullable=True, comment='model type'), + sa.Column('model_path', sa.String(length=255), nullable=True, comment="model's path"), + sa.Column('signature', sa.Text(), nullable=True, comment='model signature'), + sa.Column('status', sa.Enum('UNKNOWN', 'LOADING', 'AVAILABLE', 'UNLOADING', name='servingmodelstatus', native_enum=False, length=64, create_constraint=False), nullable=True, comment='status'), + sa.Column('endpoint', sa.String(length=255), nullable=True, comment='endpoint'), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=True, comment='created_at'), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True, comment='updated_at'), + sa.Column('extra', sa.Text(), nullable=True, comment='extra'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('name', name='uniq_name'), + comment='serving models', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('serving_models_v2') + op.drop_table('serving_deployments_v2') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/2d9ee2c8ac6e_change_workflow_name_unique.py b/web_console_v2/api/migrations/versions/2d9ee2c8ac6e_change_workflow_name_unique.py new file mode 100644 index 000000000..f165f6895 --- /dev/null +++ b/web_console_v2/api/migrations/versions/2d9ee2c8ac6e_change_workflow_name_unique.py @@ -0,0 +1,30 @@ +"""change_workflow_name_unique + +Revision ID: 2d9ee2c8ac6e +Revises: d6b3fa7f23a6 +Create Date: 2022-03-24 15:16:31.251607 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '2d9ee2c8ac6e' +down_revision = 'd6b3fa7f23a6' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_unique_constraint('uniq_name_in_project', 'workflow_v2', ['project_id', 'name']) + op.drop_index('uniq_name', table_name='workflow_v2') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_index('uniq_name', 'workflow_v2', ['name'], unique=True) + op.drop_constraint('uniq_name_in_project', 'workflow_v2', type_='unique') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/2ffa86e5e692_optimize_sign_in_related_tables.py b/web_console_v2/api/migrations/versions/2ffa86e5e692_optimize_sign_in_related_tables.py new file mode 100644 index 000000000..89970b884 --- /dev/null +++ b/web_console_v2/api/migrations/versions/2ffa86e5e692_optimize_sign_in_related_tables.py @@ -0,0 +1,32 @@ +"""optimize sign-in related tables + +Revision ID: 2ffa86e5e692 +Revises: ec68faa511cc +Create Date: 2021-10-22 12:15:10.710910 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '2ffa86e5e692' +down_revision = 'ec68faa511cc' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('session_v2', sa.Column('user_id', sa.Integer(), nullable=False, comment='for whom the session is created')) + op.add_column('users_v2', sa.Column('last_sign_in_at', sa.DateTime(timezone=True), nullable=True, comment='the last time when user tries to sign in')) + op.add_column('users_v2', sa.Column('failed_sign_in_attempts', sa.Integer(), nullable=False, comment='failed sign in attempts since last successful sign in')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('users_v2', 'failed_sign_in_attempts') + op.drop_column('users_v2', 'last_sign_in_at') + op.drop_column('session_v2', 'user_id') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/3433f0ca2193_add_model_serving_negotiator_table.py b/web_console_v2/api/migrations/versions/3433f0ca2193_add_model_serving_negotiator_table.py new file mode 100644 index 000000000..168664022 --- /dev/null +++ b/web_console_v2/api/migrations/versions/3433f0ca2193_add_model_serving_negotiator_table.py @@ -0,0 +1,50 @@ +"""add model serving negotiator table + +Revision ID: 3433f0ca2193 +Revises: e9ce77d87969 +Create Date: 2021-11-12 19:10:01.277875 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '3433f0ca2193' +down_revision = 'e9ce77d87969' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('serving_negotiators_v2', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='id'), + sa.Column('project_id', sa.Integer(), nullable=False, comment='project id'), + sa.Column('serving_model_id', sa.Integer(), nullable=False, comment='serving model id'), + sa.Column('is_local', sa.Boolean(), nullable=True, comment='can serving locally'), + sa.Column('with_label', sa.Boolean(), nullable=True, comment='federal side with label or not'), + sa.Column('serving_model_uuid', sa.String(length=255), nullable=True, comment='uuid for federal model'), + sa.Column('feature_dataset_id', sa.Integer(), nullable=True, comment='feature dataset id'), + sa.Column('data_source_map', sa.Text(), nullable=True, comment='where to get model inference arguments'), + sa.Column('raw_signature', sa.Text(), nullable=True, comment='save raw signature from tf serving'), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=True, comment='created_at'), + sa.Column('extra', sa.Text(), nullable=True, comment='extra'), + sa.PrimaryKeyConstraint('id'), + comment='serving negotiators in webconsole', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + op.create_index('idx_serving_model_uuid', 'serving_negotiators_v2', ['serving_model_uuid'], unique=False) + op.add_column('algorithms_v2', sa.Column('algorithm_project_id', sa.Integer(), nullable=True, comment='algorithm project id')) + op.create_unique_constraint('uniq_source_name_version', 'algorithms_v2', ['source', 'name', 'version']) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint('uniq_source_name_version', 'algorithms_v2', type_='unique') + op.drop_column('algorithms_v2', 'algorithm_project_id') + op.drop_index('idx_serving_model_uuid', table_name='serving_negotiators_v2') + op.drop_table('serving_negotiators_v2') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/351844c6fd92_batches_v2_make_event_time_optional.py b/web_console_v2/api/migrations/versions/351844c6fd92_batches_v2_make_event_time_optional.py new file mode 100644 index 000000000..c0b3b62ce --- /dev/null +++ b/web_console_v2/api/migrations/versions/351844c6fd92_batches_v2_make_event_time_optional.py @@ -0,0 +1,36 @@ +"""[batches_v2]: make event time optional + +Revision ID: 351844c6fd92 +Revises: 8bde5b704062 +Create Date: 2022-06-27 05:26:34.313560 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '351844c6fd92' +down_revision = '7549e6d94cbb' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('data_batches_v2', + 'event_time', + existing_type=mysql.TIMESTAMP(), + nullable=True, + existing_comment='event_time') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('data_batches_v2', + 'event_time', + existing_type=mysql.TIMESTAMP(), + nullable=False, + existing_comment='event_time') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/368b8bd6d559_add_tpl_pure_domain_name.py b/web_console_v2/api/migrations/versions/368b8bd6d559_add_tpl_pure_domain_name.py new file mode 100644 index 000000000..9d6824d6f --- /dev/null +++ b/web_console_v2/api/migrations/versions/368b8bd6d559_add_tpl_pure_domain_name.py @@ -0,0 +1,28 @@ +"""add tpl pure domain name + +Revision ID: 368b8bd6d559 +Revises: c01cdd2253e4 +Create Date: 2022-09-28 17:13:13.883713 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '368b8bd6d559' +down_revision = 'c01cdd2253e4' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('template_v2', sa.Column('coordinator_pure_domain_name', sa.String(length=255), nullable=True, comment='name of the coordinator')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('template_v2', 'coordinator_pure_domain_name') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/3b4198785f3d_refactor_models_and_model_groups_table.py b/web_console_v2/api/migrations/versions/3b4198785f3d_refactor_models_and_model_groups_table.py new file mode 100644 index 000000000..724b7bc18 --- /dev/null +++ b/web_console_v2/api/migrations/versions/3b4198785f3d_refactor_models_and_model_groups_table.py @@ -0,0 +1,100 @@ +"""refactor_models_and_model_groups_table + +Revision ID: 3b4198785f3d +Revises: 9d3a1d2393e7 +Create Date: 2021-09-24 15:16:05.800630 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '3b4198785f3d' +down_revision = '9d3a1d2393e7' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('model_groups_v2', sa.Column('cmt', sa.Text(), nullable=True, comment='comment')) + op.add_column('model_groups_v2', sa.Column('project_id', sa.Integer(), nullable=True, comment='project_id')) + op.create_unique_constraint('uniq_name', 'model_groups_v2', ['name']) + op.add_column('model_jobs_v2', sa.Column('favorite', sa.Boolean(), nullable=True, comment='favorite')) + op.add_column('model_jobs_v2', sa.Column('model_job_type', sa.Enum('UNSPECIFIED', 'NN_TRAINING', 'NN_EVALUATION', 'NN_PREDICTION', 'TREE_TRAINING', 'TREE_EVALUATION', 'TREE_PREDICTION', name='modeljobtype', native_enum=False, create_constraint=False, length=32), default='UNSPECIFIED', nullable=True, comment='type')) + op.create_table_comment( + 'model_jobs_v2', + 'model_jobs_v2', + existing_comment='model', + schema=None + ) + op.drop_column('model_jobs_v2', 'type') + op.add_column('models_v2', sa.Column('cmt', sa.Text(), nullable=True, comment='comment')) + op.add_column('models_v2', sa.Column('favorite', sa.Boolean(), nullable=True, comment='favorite model')) + op.add_column('models_v2', sa.Column('job_id', sa.Integer(), nullable=True, comment='job id')) + op.add_column('models_v2', sa.Column('model_job_id', sa.Integer(), nullable=True, comment='model job id')) + op.add_column('models_v2', sa.Column('model_type', sa.Enum('UNSPECIFIED', 'NN_MODEL', 'TREE_MODEL', name='modeltype', native_enum=False, create_constraint=False, length=32), default='UNSPECIFIED', nullable=True, comment='type')) + op.add_column('models_v2', sa.Column('model_path', sa.String(length=512), nullable=True, comment='model path')) + op.add_column('models_v2', sa.Column('project_id', sa.Integer(), nullable=True, comment='project_id')) + op.add_column('models_v2', sa.Column('uuid', sa.String(length=64), nullable=True, comment='uuid')) + op.drop_index('idx_job_name', table_name='models_v2') + op.drop_index('uniq_job_name', table_name='models_v2') + op.create_unique_constraint('uniq_name', 'models_v2', ['name']) + op.create_unique_constraint('uniq_uuid', 'models_v2', ['uuid']) + op.create_table_comment( + 'models_v2', + 'models_v2', + existing_comment='model', + schema=None + ) + op.drop_column('models_v2', 'params') + op.drop_column('models_v2', 'extra') + op.drop_column('models_v2', 'metrics') + op.drop_column('models_v2', 'type') + op.drop_column('models_v2', 'state') + op.drop_column('models_v2', 'parent_id') + op.drop_column('models_v2', 'job_name') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('models_v2', sa.Column('job_name', mysql.VARCHAR(length=255), nullable=True, comment='job_name')) + op.add_column('models_v2', sa.Column('parent_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True, comment='parent_id')) + op.add_column('models_v2', sa.Column('state', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True, comment='state')) + op.add_column('models_v2', sa.Column('type', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True, comment='type')) + op.add_column('models_v2', sa.Column('metrics', mysql.TEXT(), nullable=True, comment='metrics')) + op.add_column('models_v2', sa.Column('extra', mysql.TEXT(), nullable=True, comment='extra')) + op.add_column('models_v2', sa.Column('params', mysql.TEXT(), nullable=True, comment='params')) + op.create_table_comment( + 'models_v2', + 'model', + existing_comment='models_v2', + schema=None + ) + op.drop_constraint('uniq_uuid', 'models_v2', type_='unique') + op.drop_constraint('uniq_name', 'models_v2', type_='unique') + op.create_index('uniq_job_name', 'models_v2', ['job_name'], unique=True) + op.create_index('idx_job_name', 'models_v2', ['job_name'], unique=False) + op.drop_column('models_v2', 'uuid') + op.drop_column('models_v2', 'project_id') + op.drop_column('models_v2', 'model_path') + op.drop_column('models_v2', 'model_type') + op.drop_column('models_v2', 'model_job_id') + op.drop_column('models_v2', 'job_id') + op.drop_column('models_v2', 'favorite') + op.drop_column('models_v2', 'cmt') + op.add_column('model_jobs_v2', sa.Column('type', mysql.VARCHAR(length=32), nullable=True, comment='type')) + op.create_table_comment( + 'model_jobs_v2', + 'model', + existing_comment='model_jobs_v2', + schema=None + ) + op.drop_column('model_jobs_v2', 'model_job_type') + op.drop_column('model_jobs_v2', 'favorite') + op.drop_constraint('uniq_name', 'model_groups_v2', type_='unique') + op.drop_column('model_groups_v2', 'project_id') + op.drop_column('model_groups_v2', 'cmt') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/3cec138fc005_add_projector_creator.py b/web_console_v2/api/migrations/versions/3cec138fc005_add_projector_creator.py new file mode 100644 index 000000000..75538b7fe --- /dev/null +++ b/web_console_v2/api/migrations/versions/3cec138fc005_add_projector_creator.py @@ -0,0 +1,28 @@ +"""add projector creator + +Revision ID: 3cec138fc005 +Revises: 1122cc13ef9d +Create Date: 2021-12-01 17:26:22.038103 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '3cec138fc005' +down_revision = '1122cc13ef9d' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('projects_v2', sa.Column('creator', sa.String(length=255), nullable=True, comment='creator')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('projects_v2', 'creator') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/3e5b9367ced3_feat_dataset_add_dataset_job_tables.py b/web_console_v2/api/migrations/versions/3e5b9367ced3_feat_dataset_add_dataset_job_tables.py new file mode 100644 index 000000000..ad8fa47ba --- /dev/null +++ b/web_console_v2/api/migrations/versions/3e5b9367ced3_feat_dataset_add_dataset_job_tables.py @@ -0,0 +1,48 @@ +"""feat(dataset): add dataset job tables + +Revision ID: 3e5b9367ced3 +Revises: 539df90fe13e +Create Date: 2022-02-23 13:26:17.935040 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '3e5b9367ced3' +down_revision = '539df90fe13e' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('dataset_jobs_v2', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='id of dataset job'), + sa.Column('uuid', sa.String(length=255), nullable=False, comment='dataset job uuid'), + sa.Column('state', sa.Enum('RUNNING', 'SUCCEEDED', 'FAILED', name='datasetjobstate', length=64, native_enum=False, create_constraint=False), nullable=False, default='RUNNING', comment='dataset job state'), + sa.Column('project_id', sa.Integer(), nullable=False, comment='project id'), + sa.Column('input_dataset_id', sa.Integer(), nullable=False, comment='input dataset id'), + sa.Column('output_dataset_id', sa.Integer(), nullable=False, comment='output dataset id'), + sa.Column('kind', sa.Enum('RSA_PSI_DATA_JOIN', 'LIGHT_CLIENT_RSA_PSI_DATA_JOIN', 'OT_PSI_DATA_JOIN', 'DATA_JOIN', 'DATA_ALIGNMENT', 'IMPORT_SOURCE', name='datasetjobkind', length=128, native_enum=False, create_constraint=False), nullable=False, comment='dataset job kind'), + sa.Column('workflow_id', sa.Integer(), nullable=True, comment='relating workflow id'), + sa.Column('context', sa.Text(), nullable=True, comment='context info of dataset job'), + sa.Column('global_configs', sa.Text(), nullable=True, comment='global configs of this job including related participants only appear in coordinator'), + sa.Column('coordinator_id', sa.Integer(), nullable=False, comment='participant id of this job coordinator'), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='created time'), + sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='updated time'), + sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True, comment='deleted time'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('uuid', name='uniq_dataset_job_uuid'), + comment='dataset_jobs_v2', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('dataset_jobs_v2') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/40569c37cb87_add_algorithm_uuid.py b/web_console_v2/api/migrations/versions/40569c37cb87_add_algorithm_uuid.py new file mode 100644 index 000000000..d4cb1b793 --- /dev/null +++ b/web_console_v2/api/migrations/versions/40569c37cb87_add_algorithm_uuid.py @@ -0,0 +1,38 @@ +"""add algorithm uuid + +Revision ID: 40569c37cb87 +Revises: 2d9ee2c8ac6e +Create Date: 2022-03-30 20:56:11.940565 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '40569c37cb87' +down_revision = '2d9ee2c8ac6e' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('algorithm_projects_v2', sa.Column('uuid', sa.String(length=64), nullable=True, comment='uuid')) + op.create_unique_constraint('uniq_uuid', 'algorithm_projects_v2', ['uuid']) + op.add_column('algorithms_v2', sa.Column('uuid', sa.String(length=64), nullable=True, comment='uuid')) + op.create_unique_constraint('uniq_uuid', 'algorithms_v2', ['uuid']) + op.add_column('pending_algorithms_v2', sa.Column('algorithm_uuid', sa.String(length=64), nullable=True, comment='algorithm uuid')) + op.add_column('pending_algorithms_v2', sa.Column('algorithm_project_uuid', sa.String(length=64), nullable=True, comment='algorithm project uuid')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('pending_algorithms_v2', 'algorithm_project_uuid') + op.drop_column('pending_algorithms_v2', 'algorithm_uuid') + op.drop_constraint('uniq_uuid', 'algorithms_v2', type_='unique') + op.drop_column('algorithms_v2', 'uuid') + op.drop_constraint('uniq_uuid', 'algorithm_projects_v2', type_='unique') + op.drop_column('algorithm_projects_v2', 'uuid') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/42fc79d5a917_add_spark_info_in_jobs_table.py b/web_console_v2/api/migrations/versions/42fc79d5a917_add_spark_info_in_jobs_table.py new file mode 100644 index 000000000..d30533ce6 --- /dev/null +++ b/web_console_v2/api/migrations/versions/42fc79d5a917_add_spark_info_in_jobs_table.py @@ -0,0 +1,41 @@ +"""add spark info in jobs table + +Revision ID: 42fc79d5a917 +Revises: b3290c1bf67a +Create Date: 2021-06-15 12:42:08.060464 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '42fc79d5a917' +down_revision = 'b3290c1bf67a' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('model_groups_v2', + sa.Column('id', sa.Integer(), nullable=False, comment='id'), + sa.Column('name', sa.String(length=255), nullable=True, comment='name'), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='created_at'), + sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='updated_at'), + sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True, comment='deleted_at'), + sa.Column('extra', sa.Text(), nullable=True, comment='extra'), + sa.PrimaryKeyConstraint('id'), + comment='model_groups_v2', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + op.add_column('job_v2', sa.Column('sparkapp_snapshot', sa.Text(length=16777215), nullable=True, comment='sparkapp snapshot')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('job_v2', 'sparkapp_snapshot') + op.drop_table('model_groups_v2') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/45362e49da14_export_dataset.py b/web_console_v2/api/migrations/versions/45362e49da14_export_dataset.py new file mode 100644 index 000000000..2a8aab4a5 --- /dev/null +++ b/web_console_v2/api/migrations/versions/45362e49da14_export_dataset.py @@ -0,0 +1,30 @@ +"""export dataset + +Revision ID: 45362e49da14 +Revises: c3e83aed516c +Create Date: 2022-11-09 16:55:43.572595 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '45362e49da14' +down_revision = 'c3e83aed516c' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('trusted_jobs_v2', sa.Column('export_dataset_id', sa.Integer(), nullable=True, comment='export dataset id')) + op.drop_column('trusted_jobs_v2', 'dataset_job_id') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('trusted_jobs_v2', sa.Column('dataset_job_id', mysql.INTEGER(), autoincrement=False, nullable=True, comment='dataset job id')) + op.drop_column('trusted_jobs_v2', 'export_dataset_id') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/4889e8718b51_add_dataset_job_stage_context.py b/web_console_v2/api/migrations/versions/4889e8718b51_add_dataset_job_stage_context.py new file mode 100644 index 000000000..f037b61c6 --- /dev/null +++ b/web_console_v2/api/migrations/versions/4889e8718b51_add_dataset_job_stage_context.py @@ -0,0 +1,28 @@ +"""add dataset job stage context + +Revision ID: 4889e8718b51 +Revises: cd52ddd1d2ac +Create Date: 2022-08-26 16:40:44.451199 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '4889e8718b51' +down_revision = 'cd52ddd1d2ac' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('dataset_job_stages_v2', sa.Column('context', sa.Text(), nullable=True, comment='context info of dataset job stage')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('dataset_job_stages_v2', 'context') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/48bdf6b0ec1c_add_start_at_and_stop_at_to_dataset_job.py b/web_console_v2/api/migrations/versions/48bdf6b0ec1c_add_start_at_and_stop_at_to_dataset_job.py new file mode 100644 index 000000000..73702c123 --- /dev/null +++ b/web_console_v2/api/migrations/versions/48bdf6b0ec1c_add_start_at_and_stop_at_to_dataset_job.py @@ -0,0 +1,29 @@ +"""add start_at and stop_at to dataset_job + +Revision ID: 48bdf6b0ec1c +Revises: a36a936f2a1f +Create Date: 2022-04-21 12:05:56.406849 + +""" +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = '48bdf6b0ec1c' +down_revision = 'a36a936f2a1f' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('dataset_jobs_v2', sa.Column('started_at', sa.DateTime(timezone=True), nullable=True, comment='started_at')) + op.add_column('dataset_jobs_v2', sa.Column('finished_at', sa.DateTime(timezone=True), nullable=True, comment='finished_at')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('dataset_jobs_v2', 'finished_at') + op.drop_column('dataset_jobs_v2', 'started_at') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/4d281f0968fa_participant_add_migrations.py b/web_console_v2/api/migrations/versions/4d281f0968fa_participant_add_migrations.py new file mode 100644 index 000000000..65bf2d91e --- /dev/null +++ b/web_console_v2/api/migrations/versions/4d281f0968fa_participant_add_migrations.py @@ -0,0 +1,30 @@ +"""participant add migrations + +Revision ID: 4d281f0968fa +Revises: 45362e49da14 +Create Date: 2022-11-23 08:49:38.983606 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '4d281f0968fa' +down_revision = '45362e49da14' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('participants_v2', sa.Column('ticket_uuid', sa.String(length=255), nullable=True, comment='review ticket uuid, empty if review function is disable')) + op.add_column('participants_v2', sa.Column('ticket_status', sa.Enum('PENDING', 'APPROVED', 'DECLINED', name='ticketstatus', native_enum=False, create_constraint=False, length=32), nullable=True, comment='review ticket status')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('participants_v2', 'ticket_status') + op.drop_column('participants_v2', 'ticket_uuid') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/4d7e27d1b38f_add_two_latest_dataset_job_stage_column.py b/web_console_v2/api/migrations/versions/4d7e27d1b38f_add_two_latest_dataset_job_stage_column.py new file mode 100644 index 000000000..4dcc54f78 --- /dev/null +++ b/web_console_v2/api/migrations/versions/4d7e27d1b38f_add_two_latest_dataset_job_stage_column.py @@ -0,0 +1,30 @@ +"""add two latest dataset_job_stage column + +Revision ID: 4d7e27d1b38f +Revises: 0d6eb48014d8 +Create Date: 2022-08-15 18:28:01.649154 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '4d7e27d1b38f' +down_revision = '0d6eb48014d8' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('data_batches_v2', sa.Column('latest_parent_dataset_job_stage_id', sa.Integer(), server_default=sa.text('0'), nullable=False, comment='latest parent dataset_job_stage id')) + op.add_column('data_batches_v2', sa.Column('latest_analyzer_dataset_job_stage_id', sa.Integer(), server_default=sa.text('0'), nullable=False, comment='latest analyzer dataset_job_stage id')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('data_batches_v2', 'latest_analyzer_dataset_job_stage_id') + op.drop_column('data_batches_v2', 'latest_parent_dataset_job_stage_id') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/5082991f34c7_migration_dataset.py b/web_console_v2/api/migrations/versions/5082991f34c7_migration_dataset.py new file mode 100644 index 000000000..e5646bb4e --- /dev/null +++ b/web_console_v2/api/migrations/versions/5082991f34c7_migration_dataset.py @@ -0,0 +1,32 @@ +"""migration dataset + +Revision ID: 5082991f34c7 +Revises: c2a9703b8472 +Create Date: 2022-10-12 18:55:33.760153 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '5082991f34c7' +down_revision = 'c2a9703b8472' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('datasets_v2', sa.Column('auth_status', sa.Enum('PENDING', 'AUTHORIZED', 'WITHDRAW', name='authstatus', native_enum=False, length=64, create_constraint=False), nullable=True, comment='auth status')) + op.add_column('datasets_v2', sa.Column('participants_info', sa.Text(), nullable=True, comment='participants info')) + op.drop_column('datasets_v2', 'state') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('datasets_v2', sa.Column('state', mysql.VARCHAR(length=64), nullable=True, comment='state')) + op.drop_column('datasets_v2', 'participants_info') + op.drop_column('datasets_v2', 'auth_status') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/539df90fe13e_add_version_in_model_job_and_group.py b/web_console_v2/api/migrations/versions/539df90fe13e_add_version_in_model_job_and_group.py new file mode 100644 index 000000000..2ba5bf861 --- /dev/null +++ b/web_console_v2/api/migrations/versions/539df90fe13e_add_version_in_model_job_and_group.py @@ -0,0 +1,30 @@ +"""add_version_in_model_job_and_group + +Revision ID: 539df90fe13e +Revises: 61f65a987341 +Create Date: 2022-02-14 19:54:39.272811 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '539df90fe13e' +down_revision = '61f65a987341' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('model_groups_v2', sa.Column('latest_version', sa.Integer(), nullable=True, comment='latest version')) + op.add_column('model_jobs_v2', sa.Column('version', sa.Integer(), nullable=True, comment='version')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('model_jobs_v2', 'version') + op.drop_column('model_groups_v2', 'latest_version') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/554fd3c48986_update_event_user_id_to_ullable.py b/web_console_v2/api/migrations/versions/554fd3c48986_update_event_user_id_to_ullable.py new file mode 100644 index 000000000..a160fcec2 --- /dev/null +++ b/web_console_v2/api/migrations/versions/554fd3c48986_update_event_user_id_to_ullable.py @@ -0,0 +1,34 @@ +"""update event user_id to ullable + +Revision ID: 554fd3c48986 +Revises: c038f88210a7 +Create Date: 2022-08-19 16:41:53.125112 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '554fd3c48986' +down_revision = 'c038f88210a7' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('events_v2', 'user_id', + existing_type=mysql.INTEGER(), + nullable=True, + existing_comment='the ID of the user who triggered the event') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('events_v2', 'user_id', + existing_type=mysql.INTEGER(), + nullable=False, + existing_comment='the ID of the user who triggered the event') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/56c35c0544e7_change_dataset_meta_info.py b/web_console_v2/api/migrations/versions/56c35c0544e7_change_dataset_meta_info.py new file mode 100644 index 000000000..b438ca3b2 --- /dev/null +++ b/web_console_v2/api/migrations/versions/56c35c0544e7_change_dataset_meta_info.py @@ -0,0 +1,44 @@ +"""change dataset meta info + +Revision ID: 56c35c0544e7 +Revises: e4c8d7a2cf34 +Create Date: 2022-01-14 13:47:58.216698 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '56c35c0544e7' +down_revision = 'e4c8d7a2cf34' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('data_batches_v2', sa.Column('num_example', sa.BigInteger(), nullable=True, comment='num_example')) + op.add_column('data_batches_v2', sa.Column('num_feature', sa.BigInteger(), nullable=True, comment='num_feature')) + op.add_column('data_batches_v2', sa.Column('meta_info', sa.Text(length=16777215), nullable=True, comment='dataset meta info')) + op.drop_column('data_batches_v2', 'num_imported_file') + op.drop_column('data_batches_v2', 'num_file') + op.drop_column('data_batches_v2', 'details') + op.add_column('intersection_datasets_v2', sa.Column('file_size', sa.BigInteger(), nullable=True, comment='file_size in bytes')) + op.add_column('intersection_datasets_v2', sa.Column('num_example', sa.BigInteger(), nullable=True, comment='num_example')) + op.add_column('intersection_datasets_v2', sa.Column('num_feature', sa.BigInteger(), nullable=True, comment='num_feature')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('intersection_datasets_v2', 'num_feature') + op.drop_column('intersection_datasets_v2', 'num_example') + op.drop_column('intersection_datasets_v2', 'file_size') + op.add_column('data_batches_v2', sa.Column('details', mysql.MEDIUMBLOB(), nullable=True, comment='details')) + op.add_column('data_batches_v2', sa.Column('num_file', mysql.INTEGER(), autoincrement=False, nullable=True, comment='num_file')) + op.add_column('data_batches_v2', sa.Column('num_imported_file', mysql.INTEGER(), autoincrement=False, nullable=True, comment='num_imported_file')) + op.drop_column('data_batches_v2', 'meta_info') + op.drop_column('data_batches_v2', 'num_feature') + op.drop_column('data_batches_v2', 'num_example') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/5810b21435ae_add_intersection_datasets.py b/web_console_v2/api/migrations/versions/5810b21435ae_add_intersection_datasets.py new file mode 100644 index 000000000..268aa3abb --- /dev/null +++ b/web_console_v2/api/migrations/versions/5810b21435ae_add_intersection_datasets.py @@ -0,0 +1,51 @@ +"""add intersection datasets + +Revision ID: 5810b21435ae +Revises: 42fc79d5a917 +Create Date: 2021-06-15 14:44:32.165883 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '5810b21435ae' +down_revision = '42fc79d5a917' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('intersection_datasets_v2', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='id'), + sa.Column('project_id', sa.Integer(), nullable=False, comment='project id'), + sa.Column('dataset_id', sa.Integer(), nullable=False, comment='dataset id'), + sa.Column('workflow_id', sa.Integer(), nullable=False, comment='workflow id'), + sa.Column('name', sa.String(length=255), nullable=False, comment='dataset name'), + sa.Column('path', sa.String(length=512), nullable=True, comment='dataset path'), + sa.Column('cmt', sa.Text(), nullable=True, comment='comment of dataset'), + sa.Column('kind', sa.Integer(), nullable=True, comment='dataset kind for different purposes'), + sa.Column('meta_info', sa.Text(length=16777215), nullable=True, comment='dataset meta info'), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='created time'), + sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='updated time'), + sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True, comment='deleted time'), + sa.PrimaryKeyConstraint('id'), + comment='intersection_datasets_v2', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + op.add_column('datasets_v2', sa.Column('dataset_format', sa.Integer(), nullable=True, comment='dataset format')) + op.add_column('datasets_v2', sa.Column('kind', sa.Integer(), nullable=True, comment='dataset kind for different purposes')) + op.add_column('datasets_v2', sa.Column('meta_info', sa.Text(length=16777215), nullable=True, comment='dataset meta info')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('datasets_v2', 'meta_info') + op.drop_column('datasets_v2', 'kind') + op.drop_column('datasets_v2', 'dataset_format') + op.drop_table('intersection_datasets_v2') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/5a3533682d9b_add_completed_failed_state_workflow.py b/web_console_v2/api/migrations/versions/5a3533682d9b_add_completed_failed_state_workflow.py new file mode 100644 index 000000000..8da060856 --- /dev/null +++ b/web_console_v2/api/migrations/versions/5a3533682d9b_add_completed_failed_state_workflow.py @@ -0,0 +1,28 @@ +"""add_state_workflow + +Revision ID: 5a3533682d9b +Revises: 40569c37cb87 +Create Date: 2022-04-13 14:26:02.608377 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '5a3533682d9b' +down_revision = '815ae1dcd9db' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('workflow_v2', 'state', nullable=True, comment='state', type_=sa.Enum('INVALID', 'NEW', 'READY', 'RUNNING', 'STOPPED', 'COMPLETED', 'FAILED', name='workflow_state', native_enum=False, create_constraint=False, length=32)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('workflow_v2', 'state', nullable=True, comment='state', type_=sa.Enum('INVALID', 'NEW', 'READY', 'RUNNING', 'STOPPED', name='workflow_state', native_enum=False, create_constraint=False, length=7)) + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/5a580877595d_user_add_role_and_state.py b/web_console_v2/api/migrations/versions/5a580877595d_user_add_role_and_state.py index 05b674135..22bd0eaae 100644 --- a/web_console_v2/api/migrations/versions/5a580877595d_user_add_role_and_state.py +++ b/web_console_v2/api/migrations/versions/5a580877595d_user_add_role_and_state.py @@ -20,23 +20,15 @@ def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('users_v2', sa.Column('email', sa.String(length=255), nullable=True, comment='email of user')) op.add_column('users_v2', sa.Column('name', sa.String(length=255), nullable=True, comment='name of user')) - op.add_column('users_v2', sa.Column('role', sa.Enum('USER', 'ADMIN', name='role', native_enum=False), nullable=True, comment='role of user')) - op.add_column('users_v2', sa.Column('state', sa.Enum('ACTIVE', 'DELETED', name='state', native_enum=False), nullable=True, comment='state of user')) - op.alter_column('users_v2', 'username', - existing_type=mysql.VARCHAR(length=255), - comment='unique name of user', - existing_comment='user name of user', - existing_nullable=True) + op.add_column('users_v2', sa.Column('role', sa.Enum('USER', 'ADMIN', name='role', native_enum=False, create_constraint=True, length=21), nullable=True, comment='role of user')) + op.add_column('users_v2', sa.Column('state', sa.Enum('ACTIVE', 'DELETED', name='state', native_enum=False, create_constraint=True, length=21), nullable=True, comment='state of user')) + op.alter_column('users_v2', 'username', existing_type=mysql.VARCHAR(length=255), comment='unique name of user', existing_comment='user name of user', existing_nullable=True) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('users_v2', 'username', - existing_type=mysql.VARCHAR(length=255), - comment='user name of user', - existing_comment='unique name of user', - existing_nullable=True) + op.alter_column('users_v2', 'username', existing_type=mysql.VARCHAR(length=255), comment='user name of user', existing_comment='unique name of user', existing_nullable=True) op.drop_column('users_v2', 'state') op.drop_column('users_v2', 'role') op.drop_column('users_v2', 'name') diff --git a/web_console_v2/api/migrations/versions/5f322c9d67ea_add_algorithm_publish_status_and_change_.py b/web_console_v2/api/migrations/versions/5f322c9d67ea_add_algorithm_publish_status_and_change_.py new file mode 100644 index 000000000..f88265a54 --- /dev/null +++ b/web_console_v2/api/migrations/versions/5f322c9d67ea_add_algorithm_publish_status_and_change_.py @@ -0,0 +1,42 @@ +"""add_algorithm_publish_status_and_change_project_to_release + +Revision ID: 5f322c9d67ea +Revises: a5d59be86a15 +Create Date: 2022-07-25 11:32:52.479087 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '5f322c9d67ea' +down_revision = 'a5d59be86a15' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('algorithm_projects_v2', 'publish_status', + type_=sa.Enum('UNPUBLISHED', 'PUBLISHED', 'UNRELEASED', 'RELEASED', name='publish_status', native_enum=False, create_constraint=False, length=32), + comment='release status', + existing_comment='publish status', + existing_nullable=True) + op.add_column('algorithms_v2', sa.Column('ticket_uuid', sa.String(length=255), nullable=True, comment='review ticket uuid, empty if review function is disable')) + op.add_column('algorithms_v2', sa.Column('ticket_status', sa.Enum('PENDING', 'APPROVED', 'DECLINED', name='ticketstatus', native_enum=False, create_constraint=False, length=32), nullable=True, comment='review ticket status')) + op.add_column('algorithms_v2', sa.Column('publish_status', sa.Enum('UNPUBLISHED', 'PUBLISHED', name='publishstatus', native_enum=False, create_constraint=False, length=32), nullable=True, comment='publish status')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('algorithms_v2', 'publish_status') + op.drop_column('algorithms_v2', 'ticket_status') + op.drop_column('algorithms_v2', 'ticket_uuid') + op.alter_column('algorithm_projects_v2', 'publish_status', + type_=sa.Enum('UNPUBLISHED', 'PUBLISHED', name='publish_status', native_enum=False, create_constraint=False, length=32), + comment='publish status', + existing_comment='release status', + existing_nullable=True) + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/61f65a987341_add_uuid_stuff_on_dataset.py b/web_console_v2/api/migrations/versions/61f65a987341_add_uuid_stuff_on_dataset.py new file mode 100644 index 000000000..13766eafd --- /dev/null +++ b/web_console_v2/api/migrations/versions/61f65a987341_add_uuid_stuff_on_dataset.py @@ -0,0 +1,37 @@ +"""add_uuid_stuff_on_dataset + +Revision ID: 61f65a987341 +Revises: e3166ab65528 +Create Date: 2022-02-10 20:17:11.998292 + +""" +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = '61f65a987341' +down_revision = 'e3166ab65528' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('datasets_v2', sa.Column('uuid', sa.String(length=255), nullable=True, comment='dataset uuid')) + op.add_column('datasets_v2', + sa.Column('is_published', sa.Boolean(), nullable=True, comment='dataset is published or not')) + op.add_column( + 'datasets_v2', + sa.Column('dataset_kind', + sa.Enum('RAW', 'PROCESSED', 'SOURCE', 'EXPORTED', name='datasetkindv2', native_enum=False, length=32, create_constraint=False), + nullable=True, + comment='new version of dataset kind, choices [raw, processed, ...]')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('datasets_v2', 'dataset_kind') + op.drop_column('datasets_v2', 'is_published') + op.drop_column('datasets_v2', 'uuid') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/6ffeddd94543_change_composer_runner_pipeline_size.py b/web_console_v2/api/migrations/versions/6ffeddd94543_change_composer_runner_pipeline_size.py new file mode 100644 index 000000000..64664415c --- /dev/null +++ b/web_console_v2/api/migrations/versions/6ffeddd94543_change_composer_runner_pipeline_size.py @@ -0,0 +1,51 @@ +"""change composer runner pipeline size + +Revision ID: 6ffeddd94543 +Revises: 959e368487bd +Create Date: 2021-12-14 10:36:29.518893 + +""" +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = '6ffeddd94543' +down_revision = '959e368487bd' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('scheduler_runner_v2', 'pipeline', + existing_type=sa.Text(), + type_=sa.Text(16777215), + nullable=False, + comment='pipeline from scheduler item') + op.alter_column('scheduler_runner_v2', 'context', + existing_type=sa.Text(), + type_=sa.Text(16777215), + nullable=False, + comment='context') + op.alter_column('scheduler_item_v2', 'pipeline', + existing_type=sa.Text(16777215), + nullable=False, + existing_comment='pipeline') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('scheduler_runner_v2', 'pipeline', + existing_type=sa.Text(16777215), + type_=sa.Text(), + nullable=False) + op.alter_column('scheduler_runner_v2', 'context', + existing_type=sa.Text(16777215), + type_=sa.Text(), + nullable=False) + op.alter_column('scheduler_item_v2', 'pipeline', + existing_type=sa.Text(16777215), + nullable=True, + existing_comment='pipeline') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/721609ac068b_create_tee_tables.py b/web_console_v2/api/migrations/versions/721609ac068b_create_tee_tables.py new file mode 100644 index 000000000..ffadeb922 --- /dev/null +++ b/web_console_v2/api/migrations/versions/721609ac068b_create_tee_tables.py @@ -0,0 +1,87 @@ +"""create tee tables + +Revision ID: 721609ac068b +Revises: 4889e8718b51 +Create Date: 2022-08-26 20:17:49.442756 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '721609ac068b' +down_revision = '4889e8718b51' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('trusted_job_groups_v2', + sa.Column('ticket_uuid', sa.String(length=255), nullable=True, comment='review ticket uuid, empty if review function is disable'), + sa.Column('ticket_status', sa.Enum('PENDING', 'APPROVED', 'DECLINED', name='ticketstatus', native_enum=False, create_constraint=False, length=32), nullable=True, comment='review ticket status'), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='id'), + sa.Column('name', sa.String(length=255), nullable=True, comment='name'), + sa.Column('uuid', sa.String(length=64), nullable=True, comment='uuid'), + sa.Column('latest_version', sa.Integer(), nullable=True, comment='latest version'), + sa.Column('cmt', sa.Text(), nullable=True, comment='comment of trusted job group'), + sa.Column('project_id', sa.Integer(), nullable=True, comment='project id'), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='created at'), + sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='updated at'), + sa.Column('creator_username', sa.String(length=255), nullable=True, comment='creator username'), + sa.Column('coordinator_id', sa.Integer(), nullable=True, comment='coordinator participant id'), + sa.Column('analyzer_id', sa.Integer(), nullable=True, comment='analyzer participant id'), + sa.Column('status', sa.Enum('PENDING', 'FAILED', 'SUCCEEDED', name='groupcreatestatus', native_enum=False, create_constraint=False, length=32), nullable=True, comment='create state'), + sa.Column('auth_status', sa.Enum('PENDING', 'AUTHORIZED', name='authstatus', native_enum=False, create_constraint=False, length=32), nullable=True, comment='auth status'), + sa.Column('unauth_participant_ids', sa.Text(), nullable=True, comment='unauth participant ids'), + sa.Column('algorithm_id', sa.Integer(), nullable=True, comment='algorithm id'), + sa.Column('rsc', sa.String(length=255), nullable=True, comment='resource'), + sa.Column('dataset_id', sa.Integer(), nullable=True, comment='dataset id'), + sa.Column('participant_datasets', sa.Text(), nullable=True, comment='list of participant-to-dataset mapping'), + sa.PrimaryKeyConstraint('id'), + comment='trusted_job_groups_v2', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + op.create_index('idx_trusted_group_name', 'trusted_job_groups_v2', ['name'], unique=False) + op.create_index('idx_trusted_group_project_id', 'trusted_job_groups_v2', ['project_id'], unique=False) + op.create_table('trusted_jobs_v2', + sa.Column('ticket_uuid', sa.String(length=255), nullable=True, comment='review ticket uuid, empty if review function is disable'), + sa.Column('ticket_status', sa.Enum('PENDING', 'APPROVED', 'DECLINED', name='ticketstatus', native_enum=False, create_constraint=False, length=32), nullable=True, comment='review ticket status'), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='id'), + sa.Column('name', sa.String(length=255), nullable=True, comment='name'), + sa.Column('trusted_job_type', sa.Enum('ANALYZE', 'EXPORT', name='trustedjobtype', native_enum=False, create_constraint=False, length=32), nullable=True, comment='trusted job type'), + sa.Column('job_id', sa.Integer(), nullable=True, comment='job id'), + sa.Column('uuid', sa.String(length=64), nullable=True, comment='uuid'), + sa.Column('version', sa.Integer(), nullable=True, comment='version'), + sa.Column('cmt', sa.Text(), nullable=True, comment='comment of trusted job'), + sa.Column('project_id', sa.Integer(), nullable=True, comment='project id'), + sa.Column('trusted_job_group_id', sa.Integer(), nullable=True, comment='trusted job group id'), + sa.Column('started_at', sa.DateTime(timezone=True), nullable=True, comment='started_at'), + sa.Column('finished_at', sa.DateTime(timezone=True), nullable=True, comment='finished_at'), + sa.Column('status', sa.Enum('NEW', 'CREATED', 'PENDING', 'RUNNING', 'SUCCEEDED', 'FAILED', 'STOPPED', name='trustedjobstatus', native_enum=False, create_constraint=False, length=32), nullable=True, comment='trusted job status'), + sa.Column('auth_status', sa.Enum('PENDING', 'AUTHORIZED', name='authstatus', native_enum=False, create_constraint=False, length=32), nullable=True, comment='auth status'), + sa.Column('algorithm_id', sa.Integer(), nullable=True, comment='algorithm id'), + sa.Column('rsc', sa.String(length=255), nullable=True, comment='resource'), + sa.Column('dataset_job_id', sa.Integer(), nullable=True, comment='dataset job id'), + sa.Column('result_key', sa.Text(), nullable=True, comment='result key'), + sa.PrimaryKeyConstraint('id'), + comment='trusted_jobs_v2', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + op.create_index('idx_trusted_name', 'trusted_jobs_v2', ['name'], unique=False) + op.create_index('idx_trusted_project_id', 'trusted_jobs_v2', ['project_id'], unique=False) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index('idx_trusted_project_id', table_name='trusted_jobs_v2') + op.drop_index('idx_trusted_name', table_name='trusted_jobs_v2') + op.drop_table('trusted_jobs_v2') + op.drop_index('idx_trusted_group_project_id', table_name='trusted_job_groups_v2') + op.drop_index('idx_trusted_group_name', table_name='trusted_job_groups_v2') + op.drop_table('trusted_job_groups_v2') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/72ce71d23931_add_favour.py b/web_console_v2/api/migrations/versions/72ce71d23931_add_favour.py new file mode 100644 index 000000000..3c60251f6 --- /dev/null +++ b/web_console_v2/api/migrations/versions/72ce71d23931_add_favour.py @@ -0,0 +1,28 @@ +"""add_favour + +Revision ID: 72ce71d23931 +Revises: 3e5b9367ced3 +Create Date: 2022-03-04 17:34:18.310004 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '72ce71d23931' +down_revision = '3e5b9367ced3' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('workflow_v2', sa.Column('favour', sa.Boolean(), nullable=True, comment='favour')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('workflow_v2', 'favour') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/7549e6d94cbb_drop_old_dataset_kind_column.py b/web_console_v2/api/migrations/versions/7549e6d94cbb_drop_old_dataset_kind_column.py new file mode 100644 index 000000000..61e2c6b19 --- /dev/null +++ b/web_console_v2/api/migrations/versions/7549e6d94cbb_drop_old_dataset_kind_column.py @@ -0,0 +1,28 @@ +"""drop_old_dataset_kind_column + +Revision ID: 7549e6d94cbb +Revises: 8bde5b704062 +Create Date: 2022-06-27 13:04:30.412819 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '7549e6d94cbb' +down_revision = '8bde5b704062' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('datasets_v2', 'kind') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('datasets_v2', sa.Column('kind', mysql.INTEGER(), autoincrement=False, nullable=True, comment='dataset kind for different purposes')) + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/7cf3168e68fb_create_table_events_v2.py b/web_console_v2/api/migrations/versions/7cf3168e68fb_create_table_events_v2.py new file mode 100644 index 000000000..bb68650f6 --- /dev/null +++ b/web_console_v2/api/migrations/versions/7cf3168e68fb_create_table_events_v2.py @@ -0,0 +1,47 @@ +"""create table events_v2 + +Revision ID: 7cf3168e68fb +Revises: 3b4198785f3d +Create Date: 2021-09-25 13:37:11.752303 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '7cf3168e68fb' +down_revision = '3b4198785f3d' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('events_v2', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='auto-incremented id'), + sa.Column('uuid', sa.String(length=255), nullable=False, comment='UUID of the event'), + sa.Column('name', sa.String(length=255), nullable=False, comment='the name of the event'), + sa.Column('user_id', sa.Integer(), nullable=False, comment='the ID of the user who triggered the event'), + sa.Column('resource_type', sa.Enum('IAM', 'WORKSPACE', 'TEMPLATE', 'WORKFLOW', 'DATASET', 'MODEL', 'USER', 'SYSTEM', 'PARTICIPANT', name='resource_type', native_enum=False, length=32, create_constraint=False), nullable=False, comment='the type of the resource'), + sa.Column('resource_name', sa.String(length=512), nullable=False, comment='the name of the resource'), + sa.Column('op_type', sa.Enum('CREATE', 'DELETE', 'UPDATE', name='op_type', native_enum=False, length=32, create_constraint=False), nullable=False, comment='the type of the operation of the event'), + sa.Column('result', sa.Enum('SUCCESS', 'FAILURE', name='result', native_enum=False, length=32, create_constraint=False), nullable=False, comment='the result of the operation'), + sa.Column('source', sa.Enum('UI', 'API', name='source', native_enum=False, length=32, create_constraint=False), nullable=False, comment='the source that triggered the event'), + sa.Column('extra', sa.Text(), nullable=True, comment='extra info in JSON'), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='created at'), + sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='updated at'), + sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True, comment='deleted at'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('uuid', name='uniq_uuid'), + comment='webconsole audit events', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('events_v2') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/8037467dca2c_add_creator_and_coordinator_id_in_model.py b/web_console_v2/api/migrations/versions/8037467dca2c_add_creator_and_coordinator_id_in_model.py new file mode 100644 index 000000000..66c58395c --- /dev/null +++ b/web_console_v2/api/migrations/versions/8037467dca2c_add_creator_and_coordinator_id_in_model.py @@ -0,0 +1,34 @@ +"""add_creator_and_coordinator_id_in_model + +Revision ID: 8037467dca2c +Revises: 72ce71d23931 +Create Date: 2022-03-08 15:15:51.601676 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '8037467dca2c' +down_revision = '72ce71d23931' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('model_groups_v2', sa.Column('coordinator_id', sa.Integer(), nullable=True, comment='coordinator participant id')) + op.add_column('model_groups_v2', sa.Column('creator_username', sa.String(length=255), nullable=True, comment='creator username')) + op.add_column('model_jobs_v2', sa.Column('coordinator_id', sa.Integer(), nullable=True, comment='coordinator participant id')) + op.add_column('model_jobs_v2', sa.Column('creator_username', sa.String(length=255), nullable=True, comment='creator username')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('model_jobs_v2', 'creator_username') + op.drop_column('model_jobs_v2', 'coordinator_id') + op.drop_column('model_groups_v2', 'creator_username') + op.drop_column('model_groups_v2', 'coordinator_id') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/805f74540f34_add_algorithm_project_and_pending.py b/web_console_v2/api/migrations/versions/805f74540f34_add_algorithm_project_and_pending.py new file mode 100644 index 000000000..6eca71f29 --- /dev/null +++ b/web_console_v2/api/migrations/versions/805f74540f34_add_algorithm_project_and_pending.py @@ -0,0 +1,70 @@ +"""add_algorithm_project_and_pending_algorithm_table + +Revision ID: 805f74540f34 +Revises: dd16ff2b58ef +Create Date: 2021-12-06 10:30:06.946566 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '805f74540f34' +down_revision = 'dd16ff2b58ef' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('algorithm_projects_v2', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='id'), + sa.Column('name', sa.String(length=255), nullable=True, comment='name'), + sa.Column('project_id', sa.Integer(), nullable=True, comment='project id'), + sa.Column('latest_version', sa.Integer(), nullable=True, comment='latest version'), + sa.Column('algorithm_type', sa.Enum('UNSPECIFIED', 'NN_LOCAL', 'NN_HORIZONTAL', 'NN_VERTICAL', 'TREE_VERTICAL', name='algorithmtype', native_enum=False, length=64, create_constraint=False), nullable=True, comment='algorithm type'), + sa.Column('source', sa.Enum('UNSPECIFIED', 'PRESET', 'USER', 'THIRD_PARTY', name='source', native_enum=False, length=64, create_constraint=False), nullable=True, comment='algorithm source'), + sa.Column('publish_status', sa.Enum('UNPUBLISHED', 'PUBLISHED', name='publishstatus', native_enum=False, length=64, create_constraint=False), nullable=True, comment='publish status'), + sa.Column('creator_id', sa.Integer(), nullable=True, comment='creator user id'), + sa.Column('participant_id', sa.Integer(), nullable=True, comment='participant id'), + sa.Column('fspath', sa.String(length=512), nullable=True, comment='algorithm project path'), + sa.Column('parameter', sa.Text(), nullable=True, comment='parameter'), + sa.Column('cmt', sa.String(length=255), nullable=True, comment='comment'), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=True, comment='created time'), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True, comment='updated time'), + sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True, comment='deleted time'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('name', 'source', name='uniq_name_source'), + comment='algorithm_projects', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + op.create_table('pending_algorithms_v2', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='id'), + sa.Column('name', sa.String(length=255), nullable=True, comment='name'), + sa.Column('project_id', sa.Integer(), nullable=True, comment='project id'), + sa.Column('version', sa.Integer(), nullable=True, comment='version'), + sa.Column('algorithm_type', sa.Enum('UNSPECIFIED', 'NN_LOCAL', 'NN_HORIZONTAL', 'NN_VERTICAL', 'TREE_VERTICAL', name='algorithmtype', native_enum=False, length=64, create_constraint=False), nullable=True, comment='algorithm type'), + sa.Column('participant_id', sa.Integer(), nullable=True, comment='participant id'), + sa.Column('fspath', sa.String(length=512), nullable=True, comment='algorithm path'), + sa.Column('parameter', sa.Text(), nullable=True, comment='parameter'), + sa.Column('cmt', sa.String(length=255), nullable=True, comment='comment'), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=True, comment='created time'), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True, comment='updated time'), + sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True, comment='deleted time'), + sa.PrimaryKeyConstraint('id'), + comment='pending_algorithms', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + op.add_column('algorithms_v2', sa.Column('participant_id', sa.Integer(), nullable=True, comment='participant id')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('algorithms_v2', 'participant_id') + op.drop_table('pending_algorithms_v2') + op.drop_table('algorithm_projects_v2') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/815ae1dcd9db_add_cleanups_v2_schema.py b/web_console_v2/api/migrations/versions/815ae1dcd9db_add_cleanups_v2_schema.py new file mode 100644 index 000000000..883fc362c --- /dev/null +++ b/web_console_v2/api/migrations/versions/815ae1dcd9db_add_cleanups_v2_schema.py @@ -0,0 +1,39 @@ +"""add cleanups_v2 schema + +Revision ID: 815ae1dcd9db +Revises: 40569c37cb87 +Create Date: 2022-04-08 18:48:44.456038 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '815ae1dcd9db' +down_revision = '40569c37cb87' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('cleanups_v2', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='id'), + sa.Column('state', sa.Enum('WAITING', 'RUNNING', 'SUCCEEDED', 'FAILED', 'CANCELED', name='cleanupstate', native_enum=False, length=64, create_constraint=False), nullable=True, comment='state'), + sa.Column('target_start_at', sa.DateTime(timezone=True), nullable=True, comment='target_start_at'), + sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True, comment='completed_at'), + sa.Column('resource_id', sa.Integer(), nullable=True, comment='resource_id'), + sa.Column('resource_type', sa.Enum('DATASET', 'MODEL', 'ALGORITHM', 'NO_RESOURCE', name='resourcetype', native_enum=False, length=64, create_constraint=False), nullable=True, comment='resource_type'), + sa.Column('payload', sa.Text(), nullable=True, comment='the underlying resources that need to be cleaned up'), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='created_at'), + sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='updated_at'), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('cleanups_v2') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/8259e68074a0_add_algorithm_type_to_model_job.py b/web_console_v2/api/migrations/versions/8259e68074a0_add_algorithm_type_to_model_job.py new file mode 100644 index 000000000..4c80d3760 --- /dev/null +++ b/web_console_v2/api/migrations/versions/8259e68074a0_add_algorithm_type_to_model_job.py @@ -0,0 +1,32 @@ +"""add_algorithm_type_to_model_job + +Revision ID: 8259e68074a0 +Revises: 1e882d34c760 +Create Date: 2022-01-07 17:01:19.349467 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '8259e68074a0' +down_revision = '1e882d34c760' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('model_jobs_v2', sa.Column('algorithm_id', sa.Integer(), nullable=True, comment='algorithm id')) + op.add_column('model_jobs_v2', sa.Column('algorithm_type', sa.Enum('UNSPECIFIED', 'NN_LOCAL', 'NN_HORIZONTAL', 'NN_VERTICAL', 'TREE_VERTICAL', name='algorithmtype', native_enum=False, create_constraint=False, length=32), default='UNSPECIFIED', nullable=True, comment='algorithm type')) + op.drop_column('model_jobs_v2', 'code_id') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('model_jobs_v2', sa.Column('code_id', mysql.INTEGER(), autoincrement=False, nullable=True, comment='code_id')) + op.drop_column('model_jobs_v2', 'algorithm_type') + op.drop_column('model_jobs_v2', 'algorithm_id') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/87c0ca7b825a_add_pending_project.py b/web_console_v2/api/migrations/versions/87c0ca7b825a_add_pending_project.py new file mode 100644 index 000000000..aaf9dc0b1 --- /dev/null +++ b/web_console_v2/api/migrations/versions/87c0ca7b825a_add_pending_project.py @@ -0,0 +1,51 @@ +"""add_pending_project + +Revision ID: 87c0ca7b825a +Revises: 721609ac068b +Create Date: 2022-09-14 21:27:06.438325 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '87c0ca7b825a' +down_revision = '721609ac068b' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('pending_projects_v2', + sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True, comment='deleted time'), + sa.Column('ticket_uuid', sa.String(length=255), nullable=True, comment='review ticket uuid, empty if review function is disable'), + sa.Column('ticket_status', sa.Enum('PENDING', 'APPROVED', 'DECLINED', name='ticketstatus', native_enum=False, length=32, create_constraint=False), server_default='APPROVED', nullable=True, comment='review ticket status'), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='id'), + sa.Column('name', sa.String(length=255), nullable=True, comment='name'), + sa.Column('uuid', sa.String(length=64), nullable=True, comment='uuid'), + sa.Column('config', sa.Text(), nullable=True, comment='config'), + sa.Column('state', sa.Enum('PENDING', 'ACCEPTED', 'FAILED', 'CLOSED', name='pendingprojectstate', native_enum=False, length=32, create_constraint=False), nullable=False, comment='pending project stage state'), + sa.Column('participants_info', sa.Text(), nullable=True, comment='participants info'), + sa.Column('role', sa.Enum('COORDINATOR', 'PARTICIPANT', name='projectrole', native_enum=False, length=32, create_constraint=False), nullable=False, comment='pending project role'), + sa.Column('cmt', sa.Text(), nullable=True, comment='comment'), + sa.Column('creator_username', sa.String(length=255), nullable=True, comment='creator'), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='created at'), + sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='updated at'), + sa.PrimaryKeyConstraint('id'), + comment='This is webconsole pending_project table', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + op.add_column('projects_v2', sa.Column('participants_info', sa.Text(), nullable=True, comment='participants info')) + op.add_column('projects_v2', sa.Column('role', sa.Enum('COORDINATOR', 'PARTICIPANT', name='projectrole', native_enum=False, length=32, create_constraint=False), nullable=True, comment='pending project role')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('projects_v2', 'role') + op.drop_column('projects_v2', 'participants_info') + op.drop_table('pending_projects_v2') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/88f6dd8bcb23_add_dataset_job_name.py b/web_console_v2/api/migrations/versions/88f6dd8bcb23_add_dataset_job_name.py new file mode 100644 index 000000000..7a93e6f6a --- /dev/null +++ b/web_console_v2/api/migrations/versions/88f6dd8bcb23_add_dataset_job_name.py @@ -0,0 +1,28 @@ +"""add dataset job name + +Revision ID: 88f6dd8bcb23 +Revises: f252b315e730 +Create Date: 2022-05-23 21:21:18.910443 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '88f6dd8bcb23' +down_revision = 'f252b315e730' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('dataset_jobs_v2', sa.Column('name', sa.String(length=255), nullable=True, comment='dataset job name')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('dataset_jobs_v2', 'name') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/8b179c0111a8_remove_certificate_row.py b/web_console_v2/api/migrations/versions/8b179c0111a8_remove_certificate_row.py new file mode 100644 index 000000000..cfa62ec07 --- /dev/null +++ b/web_console_v2/api/migrations/versions/8b179c0111a8_remove_certificate_row.py @@ -0,0 +1,30 @@ +"""Remove certificate row + +Revision ID: 8b179c0111a8 +Revises: c06f66dbebc1 +Create Date: 2021-10-21 11:50:53.805840 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '8b179c0111a8' +down_revision = 'c06f66dbebc1' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('participants_v2', 'certificates') + op.drop_column('projects_v2', 'certificate') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('projects_v2', sa.Column('certificate', sa.BLOB(), nullable=True, comment='certificate')) + op.add_column('participants_v2', sa.Column('certificates', sa.BLOB(), nullable=True, comment='certificates')) + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/8bde5b704062_add_status_index_for_composer.py b/web_console_v2/api/migrations/versions/8bde5b704062_add_status_index_for_composer.py new file mode 100644 index 000000000..2e872ba4f --- /dev/null +++ b/web_console_v2/api/migrations/versions/8bde5b704062_add_status_index_for_composer.py @@ -0,0 +1,32 @@ +"""add_status_index_for_composer + +Revision ID: 8bde5b704062 +Revises: 0f24b50d4077 +Create Date: 2022-06-15 11:30:13.769478 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '8bde5b704062' +down_revision = '0f24b50d4077' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_index('idx_item_status', 'scheduler_item_v2', ['status'], unique=False) + op.create_index('idx_runner_item_id', 'scheduler_runner_v2', ['item_id'], unique=False) + op.create_index('idx_runner_status', 'scheduler_runner_v2', ['status'], unique=False) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index('idx_runner_status', table_name='scheduler_runner_v2') + op.drop_index('idx_runner_item_id', table_name='scheduler_runner_v2') + op.drop_index('idx_item_status', table_name='scheduler_item_v2') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/8c2d01a90f9a_model_job_add_auth_frontend_status.py b/web_console_v2/api/migrations/versions/8c2d01a90f9a_model_job_add_auth_frontend_status.py new file mode 100644 index 000000000..4e42b8457 --- /dev/null +++ b/web_console_v2/api/migrations/versions/8c2d01a90f9a_model_job_add_auth_frontend_status.py @@ -0,0 +1,34 @@ +"""model_job_add_auth_frontend_status + +Revision ID: 8c2d01a90f9a +Revises: be2c1a6523cd +Create Date: 2022-12-30 13:27:00.549545 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '8c2d01a90f9a' +down_revision = 'be2c1a6523cd' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('model_jobs_v2', sa.Column('ticket_uuid', sa.String(length=255), nullable=True, comment='review ticket uuid, empty if review function is disable')) + op.add_column('model_jobs_v2', sa.Column('ticket_status', sa.Enum('PENDING', 'APPROVED', 'DECLINED', name='ticketstatus', native_enum=False, create_constraint=False, length=32), server_default='APPROVED', nullable=True, comment='review ticket status')) + op.add_column('model_jobs_v2', sa.Column('participants_info', sa.Text(), nullable=True, comment='participants info')) + op.add_column('model_jobs_v2', sa.Column('create_status', sa.Enum('PENDING', 'FAILED', 'SUCCEEDED', name='modeljobcreatestatus', native_enum=False, create_constraint=False, length=32), nullable=True, comment='create status')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('model_jobs_v2', 'create_status') + op.drop_column('model_jobs_v2', 'participants_info') + op.drop_column('model_jobs_v2', 'ticket_status') + op.drop_column('model_jobs_v2', 'ticket_uuid') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/8f284fd1ddf5_add_settings.py b/web_console_v2/api/migrations/versions/8f284fd1ddf5_add_settings.py new file mode 100644 index 000000000..cc1aa2371 --- /dev/null +++ b/web_console_v2/api/migrations/versions/8f284fd1ddf5_add_settings.py @@ -0,0 +1,37 @@ +"""add_settings + +Revision ID: 8f284fd1ddf5 +Revises: 1d646afb8846 +Create Date: 2021-06-28 17:05:08.826588 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '8f284fd1ddf5' +down_revision = '1d646afb8846' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('settings_v2', + sa.Column('id', sa.Integer(), nullable=False, comment='id'), + sa.Column('uniq_key', sa.String(length=255), nullable=False, comment='uniq_key'), + sa.Column('value', sa.Text(), nullable=True, comment='value'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('uniq_key', name='uniq_key'), + comment='this is webconsole settings table', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('settings_v2') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/93c152b66879_add_template_revision.py b/web_console_v2/api/migrations/versions/93c152b66879_add_template_revision.py new file mode 100644 index 000000000..3234c2fc6 --- /dev/null +++ b/web_console_v2/api/migrations/versions/93c152b66879_add_template_revision.py @@ -0,0 +1,44 @@ +"""add_template_revision + +Revision ID: 93c152b66879 +Revises: a099147f5c69 +Create Date: 2022-04-13 18:59:45.222007 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '93c152b66879' +down_revision = 'a099147f5c69' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('template_revisions_v2', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='id'), + sa.Column('revision_index', sa.Integer(), nullable=True, comment='revision_index'), + sa.Column('cmt', sa.String(length=255), nullable=True, comment='comment'), + sa.Column('config', sa.LargeBinary(length=16777215), nullable=False, comment='config'), + sa.Column('editor_info', sa.LargeBinary(length=16777215), nullable=True, comment='editor_info'), + sa.Column('template_id', sa.Integer(), nullable=True, comment='template_id'), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='created_at'), + sa.PrimaryKeyConstraint('id'), + comment='workflow template revision', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + op.create_index('idx_template_id', 'template_revisions_v2', ['template_id'], unique=False) + op.add_column('workflow_v2', sa.Column('template_revision_id', sa.Integer(), nullable=True, comment='template_revision_id')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('workflow_v2', 'template_revision_id') + op.drop_index('idx_template_id', table_name='template_revisions_v2') + op.drop_table('template_revisions_v2') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/93d756004237_add_username_and_remove_creator_name.py b/web_console_v2/api/migrations/versions/93d756004237_add_username_and_remove_creator_name.py new file mode 100644 index 000000000..9e7213a02 --- /dev/null +++ b/web_console_v2/api/migrations/versions/93d756004237_add_username_and_remove_creator_name.py @@ -0,0 +1,34 @@ +"""add_username_and_remove_creator_name + +Revision ID: 93d756004237 +Revises: 6ffeddd94543 +Create Date: 2021-12-17 10:50:50.478584 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '93d756004237' +down_revision = '6ffeddd94543' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('algorithm_projects_v2', sa.Column('username', sa.String(length=255), nullable=True, comment='creator name')) + op.drop_column('algorithm_projects_v2', 'creator_id') + op.add_column('algorithms_v2', sa.Column('username', sa.String(length=255), nullable=True, comment='creator name')) + op.drop_column('algorithms_v2', 'creator_id') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('algorithms_v2', sa.Column('creator_id', mysql.INTEGER(), autoincrement=False, nullable=True, comment='creator user id')) + op.drop_column('algorithms_v2', 'username') + op.add_column('algorithm_projects_v2', sa.Column('creator_id', mysql.INTEGER(), autoincrement=False, nullable=True, comment='creator user id')) + op.drop_column('algorithm_projects_v2', 'username') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/959e368487bd_change_composer_item_pipeline_size.py b/web_console_v2/api/migrations/versions/959e368487bd_change_composer_item_pipeline_size.py new file mode 100644 index 000000000..0abbd2bb8 --- /dev/null +++ b/web_console_v2/api/migrations/versions/959e368487bd_change_composer_item_pipeline_size.py @@ -0,0 +1,28 @@ +"""change composer item pipeline size + +Revision ID: 959e368487bd +Revises: 805f74540f34 +Create Date: 2021-12-13 16:18:21.742170 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '959e368487bd' +down_revision = '805f74540f34' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('scheduler_item_v2', 'pipeline', existing_type=sa.Text(), type_=sa.Text(16777215), comment='pipeline') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('scheduler_item_v2', 'pipeline', existing_type=sa.Text(16777215), type_=sa.Text()) + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/993096ed6fb3_tee_export_feat.py b/web_console_v2/api/migrations/versions/993096ed6fb3_tee_export_feat.py new file mode 100644 index 000000000..705e9a158 --- /dev/null +++ b/web_console_v2/api/migrations/versions/993096ed6fb3_tee_export_feat.py @@ -0,0 +1,38 @@ +"""tee export feat + +Revision ID: 993096ed6fb3 +Revises: 5082991f34c7 +Create Date: 2022-10-20 19:48:28.048403 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '993096ed6fb3' +down_revision = '5082991f34c7' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('trusted_job_groups_v2', sa.Column('participants_info', sa.Text(), nullable=True, comment='participants info')) + op.add_column('trusted_jobs_v2', sa.Column('participants_info', sa.Text(), nullable=True, comment='participants info')) + op.add_column('trusted_jobs_v2', sa.Column('export_count', sa.Integer(), nullable=True, comment='export count')) + op.add_column('trusted_jobs_v2', sa.Column('coordinator_id', sa.Integer(), nullable=True, comment='coordinator participant id')) + op.add_column('trusted_jobs_v2', sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='created at')) + op.add_column('trusted_jobs_v2', sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='updated at')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('trusted_jobs_v2', 'updated_at') + op.drop_column('trusted_jobs_v2', 'created_at') + op.drop_column('trusted_jobs_v2', 'coordinator_id') + op.drop_column('trusted_jobs_v2', 'export_count') + op.drop_column('trusted_jobs_v2', 'participants_info') + op.drop_column('trusted_job_groups_v2', 'participants_info') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/9a9b20f3804e_create_models_table.py b/web_console_v2/api/migrations/versions/9a9b20f3804e_create_models_table.py new file mode 100644 index 000000000..9694d9680 --- /dev/null +++ b/web_console_v2/api/migrations/versions/9a9b20f3804e_create_models_table.py @@ -0,0 +1,70 @@ +"""create_models_table + +Revision ID: 9a9b20f3804e +Revises: e5d91f0f59a7 +Create Date: 2021-07-20 22:41:51.294181 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '9a9b20f3804e' +down_revision = 'e5d91f0f59a7' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('models_v2', + sa.Column('id', sa.Integer(), nullable=False, comment='id'), + sa.Column('name', sa.String(length=255), nullable=True, comment='name'), + sa.Column('version', sa.Integer(), nullable=True, default=0, comment='version'), + sa.Column('type', sa.Integer(), nullable=True, comment='type'), + sa.Column('state', sa.Integer(), nullable=True, comment='state'), + sa.Column('job_name', sa.String(length=255), nullable=True, comment='job_name'), + sa.Column('parent_id', sa.Integer(), nullable=True, comment='parent_id'), + sa.Column('params', sa.Text(), nullable=True, comment='params'), + sa.Column('metrics', sa.Text(), nullable=True, comment='metrics'), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='created_at'), + sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='updated_at'), + sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True, comment='deleted_at'), + sa.Column('group_id', sa.Integer(), nullable=True, comment='group_id'), + sa.Column('extra', sa.Text(), nullable=True, comment='extra'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('job_name', name='uniq_job_name'), + comment='model', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + op.create_index('idx_job_name', 'models_v2', ['job_name'], unique=False) + op.alter_column('workflow_v2', 'extra', + existing_type=mysql.TEXT(), + comment='json string that will be send to peer', + existing_comment='extra', + existing_nullable=True) + op.alter_column('workflow_v2', 'local_extra', + existing_type=mysql.TEXT(), + comment='json string that will only be store locally', + existing_comment='local_extra', + existing_nullable=True) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('workflow_v2', 'local_extra', + existing_type=mysql.TEXT(), + comment='local_extra', + existing_comment='json string that will only be store locally', + existing_nullable=True) + op.alter_column('workflow_v2', 'extra', + existing_type=mysql.TEXT(), + comment='extra', + existing_comment='json string that will be send to peer', + existing_nullable=True) + op.drop_index('idx_job_name', table_name='models_v2') + op.drop_table('models_v2') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/9c321d108a16_refactor_tee_table.py b/web_console_v2/api/migrations/versions/9c321d108a16_refactor_tee_table.py new file mode 100644 index 000000000..ea02e4814 --- /dev/null +++ b/web_console_v2/api/migrations/versions/9c321d108a16_refactor_tee_table.py @@ -0,0 +1,34 @@ +"""refactor tee table + +Revision ID: 9c321d108a16 +Revises: a2311ac22777 +Create Date: 2022-09-19 11:48:21.461194 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '9c321d108a16' +down_revision = 'a2311ac22777' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('trusted_job_groups_v2', sa.Column('algorithm_uuid', sa.String(length=64), nullable=True, comment='algorithm uuid')) + op.drop_column('trusted_job_groups_v2', 'algorithm_id') + op.add_column('trusted_jobs_v2', sa.Column('algorithm_uuid', sa.String(length=64), nullable=True, comment='algorithm uuid')) + op.drop_column('trusted_jobs_v2', 'algorithm_id') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('trusted_jobs_v2', sa.Column('algorithm_id', mysql.INTEGER(), autoincrement=False, nullable=True, comment='algorithm id')) + op.drop_column('trusted_jobs_v2', 'algorithm_uuid') + op.add_column('trusted_job_groups_v2', sa.Column('algorithm_id', mysql.INTEGER(), autoincrement=False, nullable=True, comment='algorithm id')) + op.drop_column('trusted_job_groups_v2', 'algorithm_uuid') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/9d3a1d2393e7_add_model_jobs_table.py b/web_console_v2/api/migrations/versions/9d3a1d2393e7_add_model_jobs_table.py new file mode 100644 index 000000000..d4472efd7 --- /dev/null +++ b/web_console_v2/api/migrations/versions/9d3a1d2393e7_add_model_jobs_table.py @@ -0,0 +1,57 @@ +"""add_model_jobs_table + +Revision ID: 9d3a1d2393e7 +Revises: a71c7db41200 +Create Date: 2021-09-17 16:38:49.178642 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '9d3a1d2393e7' +down_revision = 'a71c7db41200' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('model_jobs_v2', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='id'), + sa.Column('name', sa.String(length=255), nullable=True, comment='name'), + sa.Column('uuid', sa.String(length=64), nullable=True, comment='uuid'), + sa.Column('type', sa.Enum('UNSPECIFIED', 'NN_TRAINING', 'NN_EVALUATION', 'NN_PREDICTION', 'TREE_TRAINING', 'TREE_EVALUATION', 'TREE_PREDICTION', name='modeljobtype', native_enum=False, create_constraint=False, length=32), default='UNSPECIFIED', nullable=True, comment='type'), + sa.Column('job_name', sa.String(length=255), nullable=True, comment='job_name'), + sa.Column('job_id', sa.Integer(), nullable=True, comment='job id'), + sa.Column('model_id', sa.Integer(), nullable=True, comment='model_id'), + sa.Column('group_id', sa.Integer(), nullable=True, comment='group_id'), + sa.Column('project_id', sa.Integer(), nullable=True, comment='project id'), + sa.Column('workflow_id', sa.Integer(), nullable=True, comment='workflow id'), + sa.Column('workflow_uuid', sa.String(length=64), nullable=True, comment='workflow uuid'), + sa.Column('code_id', sa.Integer(), nullable=True, comment='code_id'), + sa.Column('dataset_id', sa.Integer(), nullable=True, comment='dataset id'), + sa.Column('intersection_dataset_id', sa.Integer(), nullable=True, comment='intersection dataset id'), + sa.Column('params', sa.Text(), nullable=True, comment='params'), + sa.Column('metrics', sa.Text(), nullable=True, comment='metrics'), + sa.Column('extra', sa.Text(), nullable=True, comment='extra'), + sa.Column('cmt', sa.Text(), nullable=True, comment='comment'), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='created_at'), + sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='updated_at'), + sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True, comment='deleted_at'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('job_name', name='uniq_job_name'), + comment='model', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + op.create_index('idx_uuid', 'model_jobs_v2', ['uuid'], unique=False) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index('idx_uuid', table_name='model_jobs_v2') + op.drop_table('model_jobs_v2') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/a099147f5c69_add_cron_config_path_to_model.py b/web_console_v2/api/migrations/versions/a099147f5c69_add_cron_config_path_to_model.py new file mode 100644 index 000000000..133896f21 --- /dev/null +++ b/web_console_v2/api/migrations/versions/a099147f5c69_add_cron_config_path_to_model.py @@ -0,0 +1,34 @@ +"""add_cron_config_path_to_model + +Revision ID: a099147f5c69 +Revises: 5a3533682d9b +Create Date: 2022-04-15 18:42:10.001142 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'a099147f5c69' +down_revision = '5a3533682d9b' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('model_groups_v2', sa.Column('cron_config', sa.String(length=255), nullable=True, comment='cron expression in UTC timezone')) + op.add_column('model_groups_v2', sa.Column('fspath', sa.String(length=512), nullable=True, comment='model job group path')) + op.add_column('model_jobs_v2', sa.Column('fspath', sa.String(length=512), nullable=True, comment='model job path')) + op.add_column('models_v2', sa.Column('algorithm_type', sa.Enum('UNSPECIFIED', 'NN_LOCAL', 'NN_HORIZONTAL', 'NN_VERTICAL', 'TREE_VERTICAL', name='algorithmtype', native_enum=False, create_constraint=False, length=32), default='UNSPECIFIED', nullable=True, comment='algorithm type')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('models_v2', 'algorithm_type') + op.drop_column('model_jobs_v2', 'fspath') + op.drop_column('model_groups_v2', 'fspath') + op.drop_column('model_groups_v2', 'cron_config') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/a0c688644b84_combine_template_to_workflow.py b/web_console_v2/api/migrations/versions/a0c688644b84_combine_template_to_workflow.py new file mode 100644 index 000000000..965cea32c --- /dev/null +++ b/web_console_v2/api/migrations/versions/a0c688644b84_combine_template_to_workflow.py @@ -0,0 +1,30 @@ +"""combine_template_to_workflow + +Revision ID: a0c688644b84 +Revises: 8f284fd1ddf5 +Create Date: 2021-07-07 18:33:14.574123 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'a0c688644b84' +down_revision = '8f284fd1ddf5' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('workflow_v2', sa.Column('editor_info', sa.LargeBinary(length=16777215), nullable=True, comment='editor_info')) + op.add_column('workflow_v2', sa.Column('template_id', sa.Integer(), nullable=True, comment='template_id')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('workflow_v2', 'template_id') + op.drop_column('workflow_v2', 'editor_info') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/a1f5564f7d00_removes_workflow_cron_related_fields.py b/web_console_v2/api/migrations/versions/a1f5564f7d00_removes_workflow_cron_related_fields.py new file mode 100644 index 000000000..ed487b43e --- /dev/null +++ b/web_console_v2/api/migrations/versions/a1f5564f7d00_removes_workflow_cron_related_fields.py @@ -0,0 +1,30 @@ +"""removes workflow cron related fields + +Revision ID: a1f5564f7d00 +Revises: b809788132d0 +Create Date: 2021-08-31 19:12:09.225018 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = 'a1f5564f7d00' +down_revision = 'b809788132d0' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('workflow_v2', 'recur_at') + op.drop_column('workflow_v2', 'recur_type') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('workflow_v2', sa.Column('recur_type', mysql.VARCHAR(length=11), nullable=True, comment='recur_type')) + op.add_column('workflow_v2', sa.Column('recur_at', mysql.DATETIME(), nullable=True, comment='recur_at')) + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/a2311ac22777_add_ticket_uuid_and_status.py b/web_console_v2/api/migrations/versions/a2311ac22777_add_ticket_uuid_and_status.py new file mode 100644 index 000000000..51ee18e95 --- /dev/null +++ b/web_console_v2/api/migrations/versions/a2311ac22777_add_ticket_uuid_and_status.py @@ -0,0 +1,29 @@ +"""add ticket uuid and status + +Revision ID: a2311ac22777 +Revises: 87c0ca7b825a +Create Date: 2022-09-15 21:46:39.309346 + +""" +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = 'a2311ac22777' +down_revision = '87c0ca7b825a' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('datasets_v2', sa.Column('ticket_uuid', sa.String(length=255), nullable=True, comment='review ticket uuid, empty if review function is disable')) + op.add_column('datasets_v2', sa.Column('ticket_status', sa.Enum('PENDING', 'APPROVED', 'DECLINED', name='ticketstatus', native_enum=False, length=32, create_constraint=False), server_default='APPROVED', nullable=True, comment='review ticket status')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('datasets_v2', 'ticket_status') + op.drop_column('datasets_v2', 'ticket_uuid') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/a36a936f2a1f_add_constraint_for_tpl_revision.py b/web_console_v2/api/migrations/versions/a36a936f2a1f_add_constraint_for_tpl_revision.py new file mode 100644 index 000000000..4e88c79f7 --- /dev/null +++ b/web_console_v2/api/migrations/versions/a36a936f2a1f_add_constraint_for_tpl_revision.py @@ -0,0 +1,38 @@ +"""Initial migratio. + +Revision ID: a36a936f2a1f +Revises: 93c152b66879 +Create Date: 2022-04-20 19:14:07.916463 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = 'a36a936f2a1f' +down_revision = '93c152b66879' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('template_revisions_v2', 'revision_index', + existing_type=mysql.INTEGER(), + comment='index for the same template', + existing_comment='revision_index', + existing_nullable=True) + op.create_unique_constraint('uniq_revision_index_in_template', 'template_revisions_v2', ['template_id', 'revision_index']) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint('uniq_revision_index_in_template', 'template_revisions_v2', type_='unique') + op.alter_column('template_revisions_v2', 'revision_index', + existing_type=mysql.INTEGER(), + comment='revision_index', + existing_comment='index for the same template', + existing_nullable=True) + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/a5d59be86a15_refine_dataset_dataset_job_dataset_job_.py b/web_console_v2/api/migrations/versions/a5d59be86a15_refine_dataset_dataset_job_dataset_job_.py new file mode 100644 index 000000000..2e4b5e8ce --- /dev/null +++ b/web_console_v2/api/migrations/versions/a5d59be86a15_refine_dataset_dataset_job_dataset_job_.py @@ -0,0 +1,61 @@ +"""refine dataset/dataset_job/dataset_job_stage model + +Revision ID: a5d59be86a15 +Revises: b18d8fa7232d +Create Date: 2022-07-19 17:46:10.362827 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'a5d59be86a15' +down_revision = 'b18d8fa7232d' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('dataset_job_stages_v2', + sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True, comment='deleted time'), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='id of dataset job stage'), + sa.Column('uuid', sa.String(length=255), nullable=False, comment='dataset job stage uuid'), + sa.Column('name', sa.String(length=255), nullable=True, comment='dataset job stage name'), + sa.Column('state', sa.Enum('PENDING', 'RUNNING', 'SUCCEEDED', 'FAILED', 'STOPPED', name='datasetjobstate', length=64, native_enum=False, create_constraint=False), nullable=False, comment='dataset job stage state'), + sa.Column('project_id', sa.Integer(), nullable=False, comment='project id'), + sa.Column('workflow_id', sa.Integer(), nullable=True, comment='relating workflow id'), + sa.Column('dataset_job_id', sa.Integer(), nullable=False, comment='dataset_job id'), + sa.Column('data_batch_id', sa.Integer(), nullable=False, comment='data_batch id'), + sa.Column('event_time', sa.DateTime(timezone=True), nullable=True, comment='event_time of data upload'), + sa.Column('global_configs', sa.Text(), nullable=True, comment='global configs of this stage including related participants only appear in coordinator'), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='created time'), + sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='updated time'), + sa.Column('started_at', sa.DateTime(timezone=True), nullable=True, comment='started_at'), + sa.Column('finished_at', sa.DateTime(timezone=True), nullable=True, comment='finished_at'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('uuid', name='uniq_dataset_job_stage_uuid'), + comment='dataset_job_stages_v2', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + op.add_column('data_batches_v2', sa.Column('name', sa.String(length=255), nullable=True, comment='data_batch name')) + op.add_column('dataset_jobs_v2', sa.Column('time_range', sa.Interval(), nullable=True, comment='time_range to create new job_stage')) + op.add_column('dataset_jobs_v2', sa.Column('event_time', sa.DateTime(timezone=True), nullable=True, comment='event_time for current data_batch')) + op.add_column('dataset_jobs_v2', sa.Column('scheduler_state', sa.Enum('PENDING', 'RUNNABLE', 'STOPPED', name='datasetjobschedulerstate', length=64, native_enum=False, create_constraint=False), nullable=True, comment='dataset job scheduler state')) + op.add_column('datasets_v2', sa.Column('store_format', sa.Enum('UNKNOWN', 'CSV', 'TFRECORDS', name='storeformat', length=32, native_enum=False, create_constraint=False), nullable=True, comment='dataset store format, like CSV, TFRECORDS, ...')) + op.add_column('datasets_v2', sa.Column('import_type', sa.Enum('COPY', 'NO_COPY', name='importtype', length=64, native_enum=False, create_constraint=False), server_default='COPY', nullable=True, comment='import type')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('datasets_v2', 'import_type') + op.drop_column('datasets_v2', 'store_format') + op.drop_column('dataset_jobs_v2', 'scheduler_state') + op.drop_column('dataset_jobs_v2', 'event_time') + op.drop_column('dataset_jobs_v2', 'time_range') + op.drop_column('data_batches_v2', 'name') + op.drop_table('dataset_job_stages_v2') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/a71c7db41200_adds_cron_fields_for_composer.py b/web_console_v2/api/migrations/versions/a71c7db41200_adds_cron_fields_for_composer.py new file mode 100644 index 000000000..3da627faf --- /dev/null +++ b/web_console_v2/api/migrations/versions/a71c7db41200_adds_cron_fields_for_composer.py @@ -0,0 +1,30 @@ +"""adds cron fields for composer + +Revision ID: a71c7db41200 +Revises: ed5502db91ec +Create Date: 2021-09-04 00:42:07.794368 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = 'a71c7db41200' +down_revision = 'ed5502db91ec' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('scheduler_item_v2', sa.Column('cron_config', sa.String(length=255), nullable=True, comment='cron expression in UTC timezone')) + op.drop_column('scheduler_item_v2', 'interval_time') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('scheduler_item_v2', sa.Column('interval_time', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False, comment='item run interval in second')) + op.drop_column('scheduler_item_v2', 'cron_config') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/b18d8fa7232d_add_dataset_creator_username.py b/web_console_v2/api/migrations/versions/b18d8fa7232d_add_dataset_creator_username.py new file mode 100644 index 000000000..272d2059c --- /dev/null +++ b/web_console_v2/api/migrations/versions/b18d8fa7232d_add_dataset_creator_username.py @@ -0,0 +1,28 @@ +"""add dataset creator_username + +Revision ID: b18d8fa7232d +Revises: 351844c6fd92 +Create Date: 2022-07-11 12:05:46.582108 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'b18d8fa7232d' +down_revision = '351844c6fd92' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('datasets_v2', sa.Column('creator_username', sa.String(length=255), nullable=True, comment='creator username')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('datasets_v2', 'creator_username') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/b3290c1bf67a_add_completed_failed_jobstate.py b/web_console_v2/api/migrations/versions/b3290c1bf67a_add_completed_failed_jobstate.py index 90f4614e0..5081e318f 100644 --- a/web_console_v2/api/migrations/versions/b3290c1bf67a_add_completed_failed_jobstate.py +++ b/web_console_v2/api/migrations/versions/b3290c1bf67a_add_completed_failed_jobstate.py @@ -23,16 +23,12 @@ def upgrade(): # 'drop check' is invalid if mysql version is less than 8 if version is not None and version.fetchall()[0][0] > '8.0.0': op.execute('ALTER TABLE job_v2 drop check jobstate') - op.alter_column('job_v2', 'state', nullable=False, comment='state', type_=sa.Enum('INVALID', 'STOPPED', 'WAITING', 'STARTED', 'COMPLETED', 'FAILED', name='jobstate', native_enum=False)) + op.alter_column('job_v2', 'state', nullable=False, comment='state', type_=sa.Enum('INVALID', 'STOPPED', 'WAITING', 'STARTED', 'NEW', 'COMPLETED', 'FAILED', name='jobstate', native_enum=False, create_constraint=False)) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### bind = op.get_bind() - version = bind.execute('select version()') - # 'drop check' is invalid if mysql version is less than 8 - if version is not None and version.fetchall()[0][0] > '8.0.0': - op.execute('ALTER TABLE job_v2 drop check jobstate') - op.alter_column('job_v2', 'state', nullable=False, comment='state', type_=sa.Enum('INVALID', 'STOPPED', 'WAITING', 'STARTED', name='jobstate', native_enum=False)) + op.alter_column('job_v2', 'state', nullable=False, comment='state', type_=sa.Enum('INVALID', 'STOPPED', 'WAITING', 'STARTED', name='jobstate', native_enum=False, create_constraint=True)) # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/b3512a6ce912_initial_comment.py b/web_console_v2/api/migrations/versions/b3512a6ce912_initial_comment.py index 0ed47c8e9..dbdfa0943 100644 --- a/web_console_v2/api/migrations/versions/b3512a6ce912_initial_comment.py +++ b/web_console_v2/api/migrations/versions/b3512a6ce912_initial_comment.py @@ -23,7 +23,7 @@ def upgrade(): sa.Column('event_time', sa.TIMESTAMP(timezone=True), nullable=False, comment='event_time'), sa.Column('dataset_id', sa.Integer(), nullable=False, comment='dataset_id'), sa.Column('path', sa.String(length=512), nullable=True, comment='path'), - sa.Column('state', sa.Enum('NEW', 'SUCCESS', 'FAILED', 'IMPORTING', name='batchstate', native_enum=False), nullable=True, comment='state'), + sa.Column('state', sa.Enum('NEW', 'SUCCESS', 'FAILED', 'IMPORTING', name='batchstate', native_enum=False, create_constraint=True), nullable=True, comment='state'), sa.Column('move', sa.Boolean(), nullable=True, comment='move'), sa.Column('details', sa.LargeBinary(), nullable=True, comment='details'), sa.Column('file_size', sa.Integer(), nullable=True, comment='file_size'), @@ -42,7 +42,7 @@ def upgrade(): op.create_table('datasets_v2', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='id'), sa.Column('name', sa.String(length=255), nullable=False, comment='dataset name'), - sa.Column('dataset_type', sa.Enum('PSI', 'STREAMING', name='datasettype', native_enum=False), nullable=False, comment='data type'), + sa.Column('dataset_type', sa.Enum('PSI', 'STREAMING', name='datasettype', native_enum=False, create_constraint=True), nullable=False, comment='data type'), sa.Column('path', sa.String(length=512), nullable=True, comment='dataset path'), sa.Column('cmt', sa.Text(), nullable=True, comment='comment of dataset'), sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='created time'), @@ -68,8 +68,8 @@ def upgrade(): op.create_table('job_v2', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='id'), sa.Column('name', sa.String(length=255), nullable=True, comment='name'), - sa.Column('job_type', sa.Enum('UNSPECIFIED', 'RAW_DATA', 'DATA_JOIN', 'PSI_DATA_JOIN', 'NN_MODEL_TRANINING', 'TREE_MODEL_TRAINING', 'NN_MODEL_EVALUATION', 'TREE_MODEL_EVALUATION', name='jobtype', native_enum=False), nullable=False, comment='job type'), - sa.Column('state', sa.Enum('INVALID', 'STOPPED', 'WAITING', 'STARTED', name='jobstate', native_enum=False), nullable=False, comment='state'), + sa.Column('job_type', sa.Enum('UNSPECIFIED', 'RAW_DATA', 'DATA_JOIN', 'PSI_DATA_JOIN', 'NN_MODEL_TRANINING', 'TREE_MODEL_TRAINING', 'NN_MODEL_EVALUATION', 'TREE_MODEL_EVALUATION', name='jobtype', native_enum=False, create_constraint=True), nullable=False, comment='job type'), + sa.Column('state', sa.Enum('INVALID', 'STOPPED', 'WAITING', 'STARTED', name='jobstate', native_enum=False, create_constraint=True), nullable=False, comment='state'), sa.Column('yaml_template', sa.Text(), nullable=True, comment='yaml_template'), sa.Column('config', sa.LargeBinary(), nullable=True, comment='config'), sa.Column('is_disabled', sa.Boolean(), nullable=True, comment='is_disabled'), @@ -142,13 +142,13 @@ def upgrade(): sa.Column('forked_from', sa.Integer(), nullable=True, comment='forked_from'), sa.Column('peer_create_job_flags', sa.TEXT(), nullable=True, comment='peer_create_job_flags'), sa.Column('fork_proposal_config', sa.LargeBinary(), nullable=True, comment='fork_proposal_config'), - sa.Column('recur_type', sa.Enum('NONE', 'ON_NEW_DATA', 'HOURLY', 'DAILY', 'WEEKLY', name='recurtype', native_enum=False), nullable=True, comment='recur_type'), + sa.Column('recur_type', sa.Enum('NONE', 'ON_NEW_DATA', 'HOURLY', 'DAILY', 'WEEKLY', name='recurtype', native_enum=False, create_constraint=True), nullable=True, comment='recur_type'), sa.Column('recur_at', sa.Interval(), nullable=True, comment='recur_at'), sa.Column('trigger_dataset', sa.Integer(), nullable=True, comment='trigger_dataset'), sa.Column('last_triggered_batch', sa.Integer(), nullable=True, comment='last_triggered_batch'), - sa.Column('state', sa.Enum('INVALID', 'NEW', 'READY', 'RUNNING', 'STOPPED', name='workflow_state', native_enum=False), nullable=True, comment='state'), - sa.Column('target_state', sa.Enum('INVALID', 'NEW', 'READY', 'RUNNING', 'STOPPED', name='workflow_target_state', native_enum=False), nullable=True, comment='target_state'), - sa.Column('transaction_state', sa.Enum('READY', 'ABORTED', 'COORDINATOR_PREPARE', 'COORDINATOR_COMMITTABLE', 'COORDINATOR_COMMITTING', 'COORDINATOR_ABORTING', 'PARTICIPANT_PREPARE', 'PARTICIPANT_COMMITTABLE', 'PARTICIPANT_COMMITTING', 'PARTICIPANT_ABORTING', name='transactionstate', native_enum=False), nullable=True, comment='transaction_state'), + sa.Column('state', sa.Enum('INVALID', 'NEW', 'READY', 'RUNNING', 'STOPPED', name='workflow_state', native_enum=False, create_constraint=True), nullable=True, comment='state'), + sa.Column('target_state', sa.Enum('INVALID', 'NEW', 'READY', 'RUNNING', 'STOPPED', name='workflow_target_state', native_enum=False, create_constraint=True), nullable=True, comment='target_state'), + sa.Column('transaction_state', sa.Enum('READY', 'ABORTED', 'COORDINATOR_PREPARE', 'COORDINATOR_COMMITTABLE', 'COORDINATOR_COMMITTING', 'COORDINATOR_ABORTING', 'PARTICIPANT_PREPARE', 'PARTICIPANT_COMMITTABLE', 'PARTICIPANT_COMMITTING', 'PARTICIPANT_ABORTING', name='transactionstate', native_enum=False, create_constraint=True), nullable=True, comment='transaction_state'), sa.Column('transaction_err', sa.Text(), nullable=True, comment='transaction_err'), sa.Column('start_at', sa.Integer(), nullable=True, comment='start_at'), sa.Column('stop_at', sa.Integer(), nullable=True, comment='stop_at'), diff --git a/web_console_v2/api/migrations/versions/b659f5763a27_rename_path_to_fspath.py b/web_console_v2/api/migrations/versions/b659f5763a27_rename_path_to_fspath.py new file mode 100644 index 000000000..29b21ef48 --- /dev/null +++ b/web_console_v2/api/migrations/versions/b659f5763a27_rename_path_to_fspath.py @@ -0,0 +1,30 @@ +"""rename path to fspath + +Revision ID: b659f5763a27 +Revises: a0c688644b84 +Create Date: 2021-07-16 11:06:40.851662 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = 'b659f5763a27' +down_revision = 'a0c688644b84' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('intersection_datasets_v2', sa.Column('fspath', sa.String(length=512), nullable=True, comment='dataset path')) + op.drop_column('intersection_datasets_v2', 'path') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('intersection_datasets_v2', sa.Column('path', mysql.VARCHAR(length=512), nullable=True, comment='dataset path')) + op.drop_column('intersection_datasets_v2', 'fspath') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/b809788132d0_add_cronjob_config.py b/web_console_v2/api/migrations/versions/b809788132d0_add_cronjob_config.py new file mode 100644 index 000000000..e597144f5 --- /dev/null +++ b/web_console_v2/api/migrations/versions/b809788132d0_add_cronjob_config.py @@ -0,0 +1,28 @@ +"""add cronjob config + +Revision ID: b809788132d0 +Revises: 263717c82a6a +Create Date: 2021-08-05 19:06:12.212366 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'b809788132d0' +down_revision = '263717c82a6a' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('workflow_v2', sa.Column('cronjob_config', sa.Text(), nullable=True, comment='cronjob json string')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('workflow_v2', 'cronjob_config') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/be2c1a6523cd_update_algorithm_project_unique_.py b/web_console_v2/api/migrations/versions/be2c1a6523cd_update_algorithm_project_unique_.py new file mode 100644 index 000000000..33e35eeb2 --- /dev/null +++ b/web_console_v2/api/migrations/versions/be2c1a6523cd_update_algorithm_project_unique_.py @@ -0,0 +1,30 @@ +"""update_algorithm_project_unique_constraint + +Revision ID: be2c1a6523cd +Revises: 4d281f0968fa +Create Date: 2022-11-28 16:30:13.084346 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'be2c1a6523cd' +down_revision = '4d281f0968fa' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index('uniq_name_source', table_name='algorithm_projects_v2') + op.create_unique_constraint('uniq_name_source_project_id', 'algorithm_projects_v2', ['name', 'source', 'project_id']) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint('uniq_name_source_project_id', 'algorithm_projects_v2', type_='unique') + op.create_index('uniq_name_source', 'algorithm_projects_v2', ['name', 'source'], unique=True) + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/bf5e0cdc3e49_add_state_to_dataset.py b/web_console_v2/api/migrations/versions/bf5e0cdc3e49_add_state_to_dataset.py new file mode 100644 index 000000000..e00741655 --- /dev/null +++ b/web_console_v2/api/migrations/versions/bf5e0cdc3e49_add_state_to_dataset.py @@ -0,0 +1,28 @@ +"""add state to dataset + +Revision ID: bf5e0cdc3e49 +Revises: 8259e68074a0 +Create Date: 2022-01-06 02:29:18.387080 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'bf5e0cdc3e49' +down_revision = '8259e68074a0' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('datasets_v2', sa.Column('state', sa.Enum('NEW', 'CHECKING', 'CHECKED', 'CHECK_ERR', 'IMPORTING', 'IMPORTED', 'IMPORT_ERR', 'CONVERTING', 'CONVERTED', 'CONVERT_ERR', 'BATCH_STATISTICIZING', 'BATCH_STATISTICIZED', 'BATCH_STATISTICIZE_ERR', 'ANALYZING', 'ANALYZED', 'ANALYZE_ERR', 'UNKNOWN', 'DELETING', 'DELETED', 'DELETE_ERR', name='datasetstate', native_enum=False, length=64, create_constraint=False), nullable=True, comment='state')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('datasets_v2', 'state') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/c01cdd2253e4_removes_workflow_template_is_left.py b/web_console_v2/api/migrations/versions/c01cdd2253e4_removes_workflow_template_is_left.py new file mode 100644 index 000000000..315170942 --- /dev/null +++ b/web_console_v2/api/migrations/versions/c01cdd2253e4_removes_workflow_template_is_left.py @@ -0,0 +1,28 @@ +"""Removes workflow template is_left + +Revision ID: c01cdd2253e4 +Revises: 0166249ad82d +Create Date: 2022-09-22 18:15:23.957189 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = 'c01cdd2253e4' +down_revision = '0166249ad82d' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('template_v2', 'is_left') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('template_v2', sa.Column('is_left', mysql.TINYINT(display_width=1), autoincrement=False, nullable=True, comment='is_left')) + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/c038f88210a7_add_event_result_code_and_project_id.py b/web_console_v2/api/migrations/versions/c038f88210a7_add_event_result_code_and_project_id.py new file mode 100644 index 000000000..ebc81bdac --- /dev/null +++ b/web_console_v2/api/migrations/versions/c038f88210a7_add_event_result_code_and_project_id.py @@ -0,0 +1,32 @@ +"""add event result_code and project_id + +Revision ID: c038f88210a7 +Revises: 4d7e27d1b38f +Create Date: 2022-08-15 21:58:19.172529 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'c038f88210a7' +down_revision = '4d7e27d1b38f' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('events_v2', sa.Column('result_code', sa.String(length=255), nullable=True, comment='the result code of the operation')) + op.add_column('events_v2', sa.Column('coordinator_pure_domain_name', sa.String(length=255), nullable=True, comment='name of the coordinator')) + op.add_column('events_v2', sa.Column('project_id', sa.Integer(), nullable=True, comment='project_id corresponds to participants name')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('events_v2', 'project_id') + op.drop_column('events_v2', 'coordinator_pure_domain_name') + op.drop_column('events_v2', 'result_code') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/c06f66dbebc1_add_username_to_workflow.py b/web_console_v2/api/migrations/versions/c06f66dbebc1_add_username_to_workflow.py new file mode 100644 index 000000000..2123f8491 --- /dev/null +++ b/web_console_v2/api/migrations/versions/c06f66dbebc1_add_username_to_workflow.py @@ -0,0 +1,28 @@ +"""add_username_to_workflow + +Revision ID: c06f66dbebc1 +Revises: cf4d3ba429e0 +Create Date: 2021-10-09 19:03:18.301277 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'c06f66dbebc1' +down_revision = 'cf4d3ba429e0' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('workflow_v2', sa.Column('creator', sa.String(length=255), nullable=True, comment='the username of the creator')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('workflow_v2', 'creator') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/c2a9703b8472_add_mmgr_group_participant_info_and_.py b/web_console_v2/api/migrations/versions/c2a9703b8472_add_mmgr_group_participant_info_and_.py new file mode 100644 index 000000000..864426a3a --- /dev/null +++ b/web_console_v2/api/migrations/versions/c2a9703b8472_add_mmgr_group_participant_info_and_.py @@ -0,0 +1,30 @@ +"""add_mmgr_group_participant_info_and_auth_status + +Revision ID: c2a9703b8472 +Revises: df7a0b85b0c8 +Create Date: 2022-10-10 17:05:59.573061 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = 'c2a9703b8472' +down_revision = 'df7a0b85b0c8' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('model_groups_v2', sa.Column('auth_status', sa.Enum('PENDING', 'AUTHORIZED', 'WITHDRAW', name='authstatus', native_enum=False, create_constraint=False, length=32), nullable=True, comment='auth status')) + op.add_column('model_groups_v2', sa.Column('participants_info', sa.Text(), nullable=True, comment='participants info')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('model_groups_v2', 'participants_info') + op.drop_column('model_groups_v2', 'auth_status') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/c3e83aed516c_mmgr_support_auto_update.py b/web_console_v2/api/migrations/versions/c3e83aed516c_mmgr_support_auto_update.py new file mode 100644 index 000000000..70975232c --- /dev/null +++ b/web_console_v2/api/migrations/versions/c3e83aed516c_mmgr_support_auto_update.py @@ -0,0 +1,34 @@ +"""mmgr support auto update + +Revision ID: c3e83aed516c +Revises: 993096ed6fb3 +Create Date: 2022-11-01 17:11:34.679389 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'c3e83aed516c' +down_revision = '993096ed6fb3' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('model_groups_v2', sa.Column('auto_update_status', sa.Enum('INITIAL', 'ACTIVE', 'STOPPED', name='groupautoupdatestatus', native_enum=False, length=64, create_constraint=False), nullable=True, comment='auto update status')) + op.add_column('model_groups_v2', sa.Column('start_data_batch_id', sa.Integer(), nullable=True, comment='start data_batches id for auto update job')) + op.add_column('model_jobs_v2', sa.Column('auto_update', sa.Boolean(), server_default=sa.text('0'), nullable=True, comment='is auto update')) + op.add_column('model_jobs_v2', sa.Column('data_batch_id', sa.Integer(), nullable=True, comment='data_batches id for auto update job')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('model_jobs_v2', 'data_batch_id') + op.drop_column('model_jobs_v2', 'auto_update') + op.drop_column('model_groups_v2', 'start_data_batch_id') + op.drop_column('model_groups_v2', 'auto_update_status') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/cd52ddd1d2ac_add_algorithm_project_publish_status.py b/web_console_v2/api/migrations/versions/cd52ddd1d2ac_add_algorithm_project_publish_status.py new file mode 100644 index 000000000..f6d9e626c --- /dev/null +++ b/web_console_v2/api/migrations/versions/cd52ddd1d2ac_add_algorithm_project_publish_status.py @@ -0,0 +1,28 @@ +"""add_algorithm_project_publish_status + +Revision ID: cd52ddd1d2ac +Revises: 554fd3c48986 +Create Date: 2022-08-25 13:45:34.551929 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'cd52ddd1d2ac' +down_revision = '554fd3c48986' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('algorithm_projects_v2', sa.Column('publish_status_v2', sa.Enum('UNPUBLISHED', 'PUBLISHED', name='publishstatus', native_enum=False, create_constraint=False, length=32), server_default='UNPUBLISHED', nullable=True, comment='publish status')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('algorithm_projects_v2', 'publish_status_v2') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/cf4d3ba429e0_remove_podssnapshots.py b/web_console_v2/api/migrations/versions/cf4d3ba429e0_remove_podssnapshots.py new file mode 100644 index 000000000..cdfc07df4 --- /dev/null +++ b/web_console_v2/api/migrations/versions/cf4d3ba429e0_remove_podssnapshots.py @@ -0,0 +1,28 @@ +"""remove_podssnapshots + +Revision ID: cf4d3ba429e0 +Revises: 2d9c3afaa7fe +Create Date: 2021-09-27 14:33:05.699137 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = 'cf4d3ba429e0' +down_revision = '2d9c3afaa7fe' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('job_v2', 'pods_snapshot') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('job_v2', sa.Column('pods_snapshot', mysql.LONGTEXT(), nullable=True, comment='pods snapshot')) + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/d6b3fa7f23a6_add_dataset_id_in_model.py b/web_console_v2/api/migrations/versions/d6b3fa7f23a6_add_dataset_id_in_model.py new file mode 100644 index 000000000..940da1946 --- /dev/null +++ b/web_console_v2/api/migrations/versions/d6b3fa7f23a6_add_dataset_id_in_model.py @@ -0,0 +1,28 @@ +"""add_dataset_id_in_model + +Revision ID: d6b3fa7f23a6 +Revises: 8037467dca2c +Create Date: 2022-03-10 13:37:42.477721 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'd6b3fa7f23a6' +down_revision = '8037467dca2c' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('model_groups_v2', sa.Column('dataset_id', sa.Integer(), nullable=True, comment='dataset id')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('model_groups_v2', 'dataset_id') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/dd16ff2b58ef_change_dataset_size_to_int64.py b/web_console_v2/api/migrations/versions/dd16ff2b58ef_change_dataset_size_to_int64.py new file mode 100644 index 000000000..98d835b61 --- /dev/null +++ b/web_console_v2/api/migrations/versions/dd16ff2b58ef_change_dataset_size_to_int64.py @@ -0,0 +1,38 @@ +"""change dataset size to int64 + +Revision ID: dd16ff2b58ef +Revises: 3cec138fc005 +Create Date: 2021-12-02 13:48:20.757407 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = 'dd16ff2b58ef' +down_revision = '3cec138fc005' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('data_batches_v2', 'file_size', + existing_type=sa.Integer(), + type_=sa.BigInteger(), + comment='file_size in bytes', + existing_comment='file_size', + existing_nullable=True) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('data_batches_v2', 'file_size', + existing_type=sa.BigInteger(), + type_=sa.Integer(), + comment='file_size', + existing_comment='file_size in bytes', + existing_nullable=True) + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/df7a0b85b0c8_add_dataset_job_creator_name.py b/web_console_v2/api/migrations/versions/df7a0b85b0c8_add_dataset_job_creator_name.py new file mode 100644 index 000000000..bf132cf19 --- /dev/null +++ b/web_console_v2/api/migrations/versions/df7a0b85b0c8_add_dataset_job_creator_name.py @@ -0,0 +1,29 @@ +"""add dataset_job creator_name + +Revision ID: df7a0b85b0c8 +Revises: 368b8bd6d559 +Create Date: 2022-10-10 15:15:06.964309 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = 'df7a0b85b0c8' +down_revision = '368b8bd6d559' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('dataset_jobs_v2', + sa.Column('creator_username', sa.String(length=255), nullable=True, comment='creator username')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('dataset_jobs_v2', 'creator_username') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/e3166ab65528_add_config_to_model_group.py b/web_console_v2/api/migrations/versions/e3166ab65528_add_config_to_model_group.py new file mode 100644 index 000000000..bc46370ff --- /dev/null +++ b/web_console_v2/api/migrations/versions/e3166ab65528_add_config_to_model_group.py @@ -0,0 +1,44 @@ +"""add_config_to_model_group + +Revision ID: e3166ab65528 +Revises: 56c35c0544e7 +Create Date: 2022-01-17 11:27:24.951416 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = 'e3166ab65528' +down_revision = '56c35c0544e7' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('model_groups_v2', sa.Column('algorithm_id', sa.Integer(), nullable=True, comment='algorithm id')) + op.add_column('model_groups_v2', sa.Column('algorithm_project_id', sa.Integer(), nullable=True, comment='algorithm project id')) + op.add_column('model_groups_v2', sa.Column('algorithm_type', sa.Enum('UNSPECIFIED', 'NN_LOCAL', 'NN_HORIZONTAL', 'NN_VERTICAL', 'TREE_VERTICAL', name='algorithmtype', native_enum=False, create_constraint=False, length=64), default='UNSPECIFIED', nullable=True, comment='algorithm type')) + op.add_column('model_groups_v2', sa.Column('authorized', sa.Boolean(), nullable=True, comment='authorized to participants in project')) + op.add_column('model_groups_v2', sa.Column('config', sa.Text(length=16777215), nullable=True, comment='config')) + op.add_column('model_groups_v2', sa.Column('intersection_dataset_id', sa.Integer(), nullable=True, comment='intersection dataset id')) + op.add_column('model_groups_v2', sa.Column('role', sa.Enum('PARTICIPANT', 'COORDINATOR', name='modeljobrole', native_enum=False, create_constraint=False, length=64), default='PARTICIPANT', nullable=True, comment='role')) + op.add_column('model_groups_v2', sa.Column('uuid', sa.String(length=64), nullable=True, comment='uuid')) + op.add_column('model_jobs_v2', sa.Column('role', sa.Enum('PARTICIPANT', 'COORDINATOR', name='modeljobrole', native_enum=False, create_constraint=False, length=64), default='PARTICIPANT', nullable=True, comment='role')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('model_jobs_v2', 'role') + op.drop_column('model_groups_v2', 'uuid') + op.drop_column('model_groups_v2', 'role') + op.drop_column('model_groups_v2', 'intersection_dataset_id') + op.drop_column('model_groups_v2', 'config') + op.drop_column('model_groups_v2', 'authorized') + op.drop_column('model_groups_v2', 'algorithm_type') + op.drop_column('model_groups_v2', 'algorithm_project_id') + op.drop_column('model_groups_v2', 'algorithm_id') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/e4c8d7a2cf34_add_crd_info2job.py b/web_console_v2/api/migrations/versions/e4c8d7a2cf34_add_crd_info2job.py new file mode 100644 index 000000000..d994e342f --- /dev/null +++ b/web_console_v2/api/migrations/versions/e4c8d7a2cf34_add_crd_info2job.py @@ -0,0 +1,32 @@ +"""add_crd_info2job + +Revision ID: e4c8d7a2cf34 +Revises: bf5e0cdc3e49 +Create Date: 2022-01-13 14:34:54.149554 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'e4c8d7a2cf34' +down_revision = 'bf5e0cdc3e49' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('job_v2', sa.Column('crd_kind', sa.String(length=255), nullable=True, comment='kind')) + op.add_column('job_v2', sa.Column('crd_meta', sa.Text(), nullable=True, comment='metadata')) + op.add_column('job_v2', sa.Column('snapshot', sa.Text(length=16777215), nullable=True, comment='snapshot')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('job_v2', 'snapshot') + op.drop_column('job_v2', 'crd_meta') + op.drop_column('job_v2', 'crd_kind') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/e5d91f0f59a7_add_local_extra_field_to_workflow.py b/web_console_v2/api/migrations/versions/e5d91f0f59a7_add_local_extra_field_to_workflow.py new file mode 100644 index 000000000..5b9e97b73 --- /dev/null +++ b/web_console_v2/api/migrations/versions/e5d91f0f59a7_add_local_extra_field_to_workflow.py @@ -0,0 +1,32 @@ +"""add local_extra field to workflow + +Revision ID: e5d91f0f59a7 +Revises: b659f5763a27 +Create Date: 2021-06-24 16:43:39.271702 + +""" +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = 'e5d91f0f59a7' +down_revision = 'b659f5763a27' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + 'workflow_v2', + sa.Column('local_extra', + sa.Text(), + nullable=True, + comment='local_extra')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('workflow_v2', 'local_extra') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/e9ce77d87969_add_algorithms_table.py b/web_console_v2/api/migrations/versions/e9ce77d87969_add_algorithms_table.py new file mode 100644 index 000000000..899bd9f78 --- /dev/null +++ b/web_console_v2/api/migrations/versions/e9ce77d87969_add_algorithms_table.py @@ -0,0 +1,49 @@ +"""add_algorithms_table + +Revision ID: e9ce77d87969 +Revises: 2ffa86e5e692 +Create Date: 2021-11-01 11:35:27.910796 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'e9ce77d87969' +down_revision = '2ffa86e5e692' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('algorithms_v2', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='id'), + sa.Column('name', sa.String(length=255), nullable=True, comment='name'), + sa.Column('project_id', sa.Integer(), nullable=True, comment='project id'), + sa.Column('version', sa.Integer(), nullable=True, comment='version'), + sa.Column('algorithm_type', sa.Enum('UNSPECIFIED', 'NN_LOCAL', 'NN_HORIZONTAL', 'NN_VERTICAL', 'TREE_VERTICAL', name='algorithmtype', native_enum=False, create_constraint=False, length=64), default='UNSPECIFIED', nullable=True, comment='algorithm type'), + sa.Column('source', sa.Enum('UNSPECIFIED', 'PRESET', 'USER', 'THIRD_PARTY', name='source', native_enum=False, create_constraint=False, length=32), default='UNSPECIFIED', nullable=True, comment='source'), + sa.Column('creator_id', sa.Integer(), nullable=True, comment='creator user id'), + sa.Column('fspath', sa.String(length=512), nullable=True, comment='algorithm path'), + sa.Column('parameter', sa.Text(), nullable=True, comment='parameter'), + sa.Column('favorite', sa.Boolean(), nullable=True, comment='favorite'), + sa.Column('cmt', sa.String(length=255), nullable=True, comment='comment'), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=True, comment='created time'), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True, comment='updated time'), + sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True, comment='deleted time'), + sa.PrimaryKeyConstraint('id'), + comment='algorithms', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + op.create_index('idx_name', 'algorithms_v2', ['name'], unique=False) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index('idx_name', table_name='algorithms_v2') + op.drop_table('algorithms_v2') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/ec68faa511cc_add_2pc_transaction_table.py b/web_console_v2/api/migrations/versions/ec68faa511cc_add_2pc_transaction_table.py new file mode 100644 index 000000000..647f29a8d --- /dev/null +++ b/web_console_v2/api/migrations/versions/ec68faa511cc_add_2pc_transaction_table.py @@ -0,0 +1,42 @@ +"""add_2pc_transaction_table + +Revision ID: ec68faa511cc +Revises: 8b179c0111a8 +Create Date: 2021-10-21 14:22:43.292766 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'ec68faa511cc' +down_revision = '8b179c0111a8' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('transactions_v2', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='id'), + sa.Column('uuid', sa.String(length=64), nullable=True, comment='uuid'), + sa.Column('type', sa.String(length=32), nullable=True, comment='2pc type name'), + sa.Column('state', sa.Enum('NEW', 'PREPARE_SUCCEEDED', 'PREPARE_FAILED', 'COMMITTED', 'ABORTED', 'INVALID', name='transactionstate', native_enum=False, create_constraint=False, length=32), nullable=True, comment='state'), + sa.Column('message', sa.Text(), nullable=True, comment='message of the last action'), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='created_at'), + sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='update_at'), + sa.PrimaryKeyConstraint('id'), + comment='2pc transactions', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + op.create_index('uniq_uuid', 'transactions_v2', ['uuid'], unique=True) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index('uniq_uuid', table_name='transactions_v2') + op.drop_table('transactions_v2') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/ed5502db91ec_add_user_sso_name.py b/web_console_v2/api/migrations/versions/ed5502db91ec_add_user_sso_name.py new file mode 100644 index 000000000..a17129b62 --- /dev/null +++ b/web_console_v2/api/migrations/versions/ed5502db91ec_add_user_sso_name.py @@ -0,0 +1,28 @@ +"""add user sso name + +Revision ID: ed5502db91ec +Revises: efc25abe02fa +Create Date: 2021-08-31 13:23:19.444151 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'ed5502db91ec' +down_revision = 'efc25abe02fa' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('users_v2', sa.Column('sso_name', sa.String(length=255), nullable=True, comment='sso_name')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('users_v2', 'sso_name') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/efc25abe02fa_add_participant_table_and_relationship_.py b/web_console_v2/api/migrations/versions/efc25abe02fa_add_participant_table_and_relationship_.py new file mode 100644 index 000000000..a3fb9aaf1 --- /dev/null +++ b/web_console_v2/api/migrations/versions/efc25abe02fa_add_participant_table_and_relationship_.py @@ -0,0 +1,59 @@ +"""add participant table and relationship table + +Revision ID: efc25abe02fa +Revises: a1f5564f7d00 +Create Date: 2021-09-01 11:53:49.220421 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'efc25abe02fa' +down_revision = 'a1f5564f7d00' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('participants_v2', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='participant id'), + sa.Column('name', sa.String(length=255), nullable=False, comment='participant name'), + sa.Column('domain_name', sa.String(length=255), nullable=False, comment='participant domain_name'), + sa.Column('host', sa.String(length=255), nullable=True, comment='participant host'), + sa.Column('port', sa.Integer(), nullable=True, comment='host port'), + sa.Column('cmt', sa.Text(), nullable=True, comment='comment'), + sa.Column('certificates', sa.LargeBinary(), nullable=True, comment='certificates'), + sa.Column('extra', sa.Text(), nullable=True, comment='extra_info'), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='created at'), + sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='updated at'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('domain_name'), + sa.UniqueConstraint('domain_name', name='uniq_domain_name'), + comment='This is webconsole participant table.', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + op.create_table('projects_participants_v2', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False, comment='relationship id'), + sa.Column('project_id', sa.Integer(), nullable=False, comment='project_id id'), + sa.Column('participant_id', sa.Integer(), nullable=False, comment='participants_id id'), + sa.PrimaryKeyConstraint('id'), + comment='This is webcocsole projects and participants relationship table.', + mysql_charset='utf8mb4', + mysql_engine='innodb' + ) + op.create_index('idx_participant_id', 'projects_participants_v2', ['participant_id'], unique=False) + op.create_index('idx_project_id', 'projects_participants_v2', ['project_id'], unique=False) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index('idx_project_id', table_name='projects_participants_v2') + op.drop_index('idx_participant_id', table_name='projects_participants_v2') + op.drop_table('projects_participants_v2') + op.drop_table('participants_v2') + # ### end Alembic commands ### diff --git a/web_console_v2/api/migrations/versions/f252b315e730_add_creator_and_create_time.py b/web_console_v2/api/migrations/versions/f252b315e730_add_creator_and_create_time.py new file mode 100644 index 000000000..39ee67140 --- /dev/null +++ b/web_console_v2/api/migrations/versions/f252b315e730_add_creator_and_create_time.py @@ -0,0 +1,36 @@ +"""add_creator_and_create_time + +Revision ID: f252b315e730 +Revises: 14e747dbcc2f +Create Date: 2022-04-27 17:17:01.269616 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = 'f252b315e730' +down_revision = '14e747dbcc2f' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('model_groups_v2', 'intersection_dataset_id') + op.drop_column('model_jobs_v2', 'intersection_dataset_id') + op.add_column('template_v2', sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='created_at')) + op.add_column('template_v2', sa.Column('creator_username', sa.String(length=255), nullable=True, comment='the username of the creator')) + op.add_column('template_v2', sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True, comment='update_at')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('template_v2', 'updated_at') + op.drop_column('template_v2', 'creator_username') + op.drop_column('template_v2', 'created_at') + op.add_column('model_jobs_v2', sa.Column('intersection_dataset_id', mysql.INTEGER(), autoincrement=False, nullable=True, comment='intersection dataset id')) + op.add_column('model_groups_v2', sa.Column('intersection_dataset_id', mysql.INTEGER(), autoincrement=False, nullable=True, comment='intersection dataset id')) + # ### end Alembic commands ### diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/BUILD.bazel b/web_console_v2/api/protocols/fedlearner_webconsole/proto/BUILD.bazel new file mode 100644 index 000000000..e8f361673 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/BUILD.bazel @@ -0,0 +1,89 @@ +load("@rules_proto//proto:defs.bzl", "proto_library") +load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") +load("@com_github_grpc_grpc//bazel:python_rules.bzl", "py_grpc_library", "py_proto_library") + +proto_library( + name = "proto", + srcs = [ + "algorithm.proto", + "audit.proto", + "auth.proto", + "bcs_transaction.proto", + "cleanup.proto", + "common.proto", + "composer.proto", + "dataset.proto", + "e2e.proto", + "filtering.proto", + "job.proto", + "k8s.proto", + "metrics.proto", + "mmgr.proto", + "notification.proto", + "participant.proto", + "project.proto", + "review.proto", + "service.proto", + "serving.proto", + "setting.proto", + "sparkapp.proto", + "tee.proto", + "tree_model.proto", + "two_pc.proto", + "workflow.proto", + "workflow_definition.proto", + "workflow_template.proto", + ], + strip_import_prefix = "/web_console_v2/api/protocols", + visibility = ["//visibility:public"], + deps = [ + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/common:proto", + "@com_google_protobuf//:descriptor_proto", + "@com_google_protobuf//:empty_proto", + "@com_google_protobuf//:struct_proto", + "@com_google_protobuf//:wrappers_proto", + ], +) + +py_proto_library( + name = "py_proto_internal", + visibility = ["//visibility:private"], + deps = [ + ":proto", + ], +) + +py_grpc_library( + name = "py_grpc_internal", + srcs = [ + ":proto", + ], + visibility = ["//visibility:private"], + deps = [ + ":py_proto_internal", + ], +) + +py_library( + name = "py_proto", + imports = ["../../.."], + visibility = ["//visibility:public"], + deps = [ + ":py_grpc_internal", + ":py_proto_internal", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/common:py_proto", + ], +) + +go_proto_library( + name = "go_proto", + compilers = ["@io_bazel_rules_go//proto:go_grpc"], + importpath = "fedlearner.net/proto/console", + protos = [ + ":proto", + ], + visibility = ["//visibility:public"], + deps = [ + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/common:go_proto", + ], +) diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/algorithm.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/algorithm.proto new file mode 100644 index 000000000..44b645ff2 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/algorithm.proto @@ -0,0 +1,127 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +import "fedlearner_webconsole/proto/common/extension.proto"; +package fedlearner_webconsole.proto; + + +message AlgorithmVariable { + + enum ValueType { + STRING = 0; + INTEGER = 1; + FLOAT = 2; + } + + string name = 1; + string value = 2; + bool required = 3; + string display_name = 4; + string comment = 5; + ValueType value_type = 6; +} + +message AlgorithmParameter { + repeated AlgorithmVariable variables = 1; +} + +message FileTreeNode { + string filename = 1; + string path = 2; + // size in bytes + int64 size = 3; + int64 mtime = 4; + bool is_directory = 5; + repeated FileTreeNode files = 6; +} + +message AlgorithmData { + string name = 1; + string algorithm_type = 2; + int64 version = 3; + AlgorithmParameter parameter = 4; + // chunk of tar from algorithm file + bytes chunk = 5; + string comment = 6; + string algorithm_uuid = 7; + string algorithm_project_uuid = 8; +} + +message PendingAlgorithmPb { + int64 id = 1; + string algorithm_uuid = 2; + string algorithm_project_uuid = 3; + string name = 4; + int64 project_id = 5; + int64 version = 6; + string type = 7; + int64 participant_id = 8; + string participant_name = 9; + string path = 10; + AlgorithmParameter parameter = 11; + string comment = 12; + int64 created_at = 13; + int64 updated_at = 14; + int64 deleted_at = 15; + int64 algorithm_project_id = 16; +} + +message AlgorithmPb { + int64 id = 1 [(fedlearner_webconsole.proto.secret) = true]; + string uuid = 2; + string name = 3; + int64 project_id = 4; + int64 version = 5; + string type = 6; + string source = 7; + int64 algorithm_project_id = 8 [(fedlearner_webconsole.proto.secret) = true]; + string username = 9 [(fedlearner_webconsole.proto.secret) = true]; + int64 participant_id = 10 [(fedlearner_webconsole.proto.secret) = true]; + string participant_name = 11 [(fedlearner_webconsole.proto.secret) = true]; + string path = 12 [(fedlearner_webconsole.proto.secret) = true]; + AlgorithmParameter parameter = 13; + bool favorite = 14; + string comment = 15; + int64 created_at = 16; + int64 updated_at = 17; + int64 deleted_at = 18; + // ref: AlgorithmStatus + string status = 19; + string algorithm_project_uuid = 20; +} + +message AlgorithmProjectPb { + int64 id = 1 [(fedlearner_webconsole.proto.secret) = true]; + string uuid = 2; + string name = 3; + int64 project_id = 4; + int64 latest_version = 5; + string type = 6; + string source = 7; + string publish_status = 8 [deprecated=true]; + string username = 9 [(fedlearner_webconsole.proto.secret) = true]; + int64 participant_id = 10 [(fedlearner_webconsole.proto.secret) = true]; + string participant_name = 11 [(fedlearner_webconsole.proto.secret) = true]; + string path = 12 [(fedlearner_webconsole.proto.secret) = true]; + AlgorithmParameter parameter = 13; + string comment = 14; + int64 created_at = 15; + int64 updated_at = 16; + int64 deleted_at = 17; + repeated AlgorithmPb algorithms = 18; + string release_status = 19; +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/audit.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/audit.proto new file mode 100644 index 000000000..e8e904b3f --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/audit.proto @@ -0,0 +1,93 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package fedlearner_webconsole.proto; + +import "fedlearner_webconsole/proto/auth.proto"; + +message Event { + enum ResourceType { + UNKNOWN_RESOURCE_TYPE = 0; + IAM = 1; + WORKSPACE = 2; + TEMPLATE = 3; + WORKFLOW = 4; + DATASET = 5; + MODEL = 6; + MODEL_JOB_GROUP = 7; + MODEL_JOB = 8; + USER = 9; + SYSTEM = 10; + PARTICIPANT = 11; + SERVING_SERVICE = 12; + ALGORITHM = 13; + ALGORITHM_PROJECT = 14; + PRESET_ALGORITHM = 15; + TRUSTED_JOB_GROUP = 16; + TRUSTED_JOB = 17; + DATASET_JOB = 18; + DATASET_JOB_STAGE = 19; + PENDING_PROJECT = 20; + PROJECT = 21; + TRUSTED_EXPORT_JOB = 22; + } + + enum OperationType { + UNKNOWN_OPERATION_TYPE = 0; + CREATE = 1; + DELETE = 2; + UPDATE = 3; + INVALIDATE = 4; + UPDATE_STATE = 5; + STOP = 6; + OPERATE = 7; + INFERENCE = 8; + INFORM = 9; + CONTROL_STATE = 10; + LAUNCH = 11; + } + + enum Result { + UNKNOWN_RESULT = 0; + SUCCESS = 1; + FAILURE = 2; + } + + enum Source { + UNKNOWN_SOURCE = 0; + UI = 1; + API = 2; + RPC = 3; + } + + int64 user_id = 1; + ResourceType resource_type = 2; + string resource_name = 3; + OperationType op_type = 4; + Result result = 5; + Source source = 6; + string name = 7; + string extra = 8; + User user = 9; + int64 event_id = 10; + string uuid = 11; + int64 created_at = 12; + // ref: grpc StatusCode + string result_code= 13; + string coordinator_pure_domain_name = 14; + int64 project_id = 15; +} \ No newline at end of file diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/auth.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/auth.proto new file mode 100644 index 000000000..b5425fb05 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/auth.proto @@ -0,0 +1,84 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; +import "fedlearner_webconsole/proto/common/extension.proto"; +package fedlearner_webconsole.proto; + +// Cas more detail in +// https://apereo.github.io/cas/5.1.x/protocol/CAS-Protocol.html +message CasProtocol { + string cas_server_url = 1; + string service_url = 2; + string login_route = 3; + string validate_route = 4; +} + +message OAuthProtocol { + // Client id of our app + string client_id = 1; + // URLs of OAuth server + string authorize_url = 2; + string access_token_url = 3; + string user_info_url = 4; + string logout_url = 5; + string redirect_uri = 6; + // Url for FE getting code + string code_key = 7; + // Not visible for frontend + string secret = 8 [(fedlearner_webconsole.proto.secret) = true]; + string username_key = 9 [(fedlearner_webconsole.proto.secret) = true]; + string email_key = 10 [(fedlearner_webconsole.proto.secret) = true]; +} + +message CustomProtocol { + // Not visible for frontend + string access_key = 1; + string authorize_url = 2; +} + +message Sso { + string name = 1; + string icon_url = 2; + string protocol_type = 3; + oneof protocol { + CasProtocol cas = 4; + OAuthProtocol oauth = 5; + CustomProtocol custom = 6; + } + string display_name = 7; +} + +message User { + int64 id = 1; + string username = 2; + string password = 3 [(fedlearner_webconsole.proto.secret) = true]; + // Name of the role enum + string role = 4; + string name = 5; + string email = 6; + // Name of the state enum + string state = 7; + string sso_name = 8; + int64 last_sign_in_at = 9; + int64 failed_sign_in_attempts = 10; +} + +message SigninParameter { + string username = 1; + string password = 2; + string code = 3; + string ticket = 4; +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/bcs_transaction.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/bcs_transaction.proto new file mode 100644 index 000000000..36127ffa6 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/bcs_transaction.proto @@ -0,0 +1,37 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package fedlearner_webconsole.proto; + +message Transaction { + string trans_hash = 1; + int64 block_number = 2; + int64 trans_index = 3; + string sender_name = 4; + string receiver_name = 5; + int64 value = 6; + // timestamp in seconds + int64 timestamp = 7; + TransactionExtraData extra_data = 8; + string status = 9; +} + +message TransactionExtraData { + // describe transaction info + string transaction_info = 1; + string dataset_uuid = 2; +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/cleanup.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/cleanup.proto new file mode 100644 index 000000000..c9a76fda0 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/cleanup.proto @@ -0,0 +1,49 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package fedlearner_webconsole.proto; + +message CleanupPayload { + repeated string paths = 1; +} + +message CleanupParameter { + int64 resource_id = 1; + // Ref to ResourceType enum, here is the name value. + string resource_type = 2; + // Timestamp in seconds + int64 target_start_at = 3; + CleanupPayload payload = 4; +} + +message CleanupPb { + int64 id = 1; + // Ref to CleanupStatus enum, here is the enum name. + string state = 2; + // Timestamp in seconds + int64 target_start_at = 3; + // Timestamp in seconds + int64 completed_at = 4; + int64 resource_id = 5; + // Ref to ResourceType enum, here is the enum name. + string resource_type = 6; + CleanupPayload payload = 7; + // Timestamp in seconds + int64 created_at = 8; + // Timestamp in seconds + int64 updated_at = 9; +} \ No newline at end of file diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/common.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/common.proto index 3182d796e..4527a4463 100644 --- a/web_console_v2/api/protocols/fedlearner_webconsole/proto/common.proto +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/common.proto @@ -1,4 +1,4 @@ -/* Copyright 2021 The FedLearner Authors. All Rights Reserved. +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,7 +14,8 @@ */ syntax = "proto3"; - +import "google/protobuf/struct.proto"; +import "fedlearner_webconsole/proto/algorithm.proto"; package fedlearner_webconsole.proto; enum StatusCode { @@ -40,18 +41,28 @@ message Variable { enum ValueType { STRING = 0; CODE = 1; + NUMBER = 2; + LIST = 3; + OBJECT = 4; + BOOL = 5; } string name = 1; - string value = 2; + string value = 2 [deprecated = true]; AccessMode access_mode = 3; string widget_schema = 4; ValueType value_type = 5; + + google.protobuf.Value typed_value = 6; + + // used for frontend to group variables + string tag = 7; + } message GrpcSpec { + reserved 2; string authority = 1; - map extra_headers = 2; } enum MethodType{ @@ -76,3 +87,48 @@ enum CreateJobFlag { REUSE = 2; DISABLED = 3; } + +// Message representing the set of files uploaded to fedlearner. +message UploadedFiles { + repeated UploadedFile uploaded_files = 1; +} + +// Message representing the file uploaded to fedlearner. +message UploadedFile { + // File display name with folder displayed in code editor. folders are + // included as part of file name. Examples: + // "test/test.py". + // "syslib.bin" + string display_file_name = 1; + + // Internal store location for uploaded file. + string internal_path = 2; + + // File content that will be visible and editable for users. + // Applicable only to human-readable text files. + string content = 3; + + // Internal store parent directory for upload file. + string internal_directory = 4; +} + +message ApplicationVersion { + // Release time, e.g. Fri Jul 16 12:23:19 CST 2021 + string pub_date = 1; + // Hash of the image, e.g. f09d681b4eda01f053cc1a645fa6fc0775852a48 + string revision = 2; + // Corresponding branch name on gitlab, e.g. release-2.0.1 + string branch_name = 3; + // Version number, e.g. 2.0.1.5 + string version = 4; +} + +enum PayloadType { + ALGORITHM = 0; +} + +message Payload { + oneof data { + AlgorithmData algorithm_data = 1; + } +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/common/BUILD.bazel b/web_console_v2/api/protocols/fedlearner_webconsole/proto/common/BUILD.bazel new file mode 100644 index 000000000..21895b0a3 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/common/BUILD.bazel @@ -0,0 +1,37 @@ +load("@rules_proto//proto:defs.bzl", "proto_library") +load("@com_github_grpc_grpc//bazel:python_rules.bzl", "py_proto_library") +load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") + +proto_library( + name = "proto", + srcs = [ + "extension.proto", + ], + strip_import_prefix = "/web_console_v2/api/protocols", + visibility = ["//visibility:public"], + deps = [ + "@com_google_protobuf//:descriptor_proto", + ], +) + +py_proto_library( + name = "py_proto_internal", + visibility = ["//visibility:private"], + deps = [":proto"], +) + +py_library( + name = "py_proto", + imports = ["../../.."], + visibility = ["//visibility:public"], + deps = [ + ":py_proto_internal", + ], +) + +go_proto_library( + name = "go_proto", + importpath = "fedlearner.net/proto/console/common", + protos = [":proto"], + visibility = ["//visibility:public"], +) diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/common/extension.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/common/extension.proto new file mode 100644 index 000000000..ce96fe012 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/common/extension.proto @@ -0,0 +1,26 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +import "google/protobuf/descriptor.proto"; + +package fedlearner_webconsole.proto; + +extend google.protobuf.FieldOptions { + // If the field is secret, which should not be + // visible for frontend or peer backend. + bool secret = 50000; +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/composer.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/composer.proto new file mode 100644 index 000000000..a87f37596 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/composer.proto @@ -0,0 +1,204 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package fedlearner_webconsole.proto; + +// NextID: 9 +message RunnerInput { + reserved 6; + // {item_type}_{item_id} + string runner_name = 1 [deprecated = true]; + // Ref to ItemType enum, here is the enum value. + string runner_type = 2; + oneof runner_input { + DatasetDeleterInput dataset_deleter_input = 3; + BatchStatsInput batch_stats_input = 4; + WorkflowCronJobInput workflow_cron_job_input = 5; + ModelSignatureParserInput model_signature_parser_input = 7; + ModelTrainingCronJobInput model_training_cron_job_input = 8; + } +} + +// NextID: 2 +message TicketList { + repeated int32 ids = 1; +} + +// NextID: 4 +message TicketHelperOutput { + map updated_ticket = 1; + map unupdated_ticket = 2; + map failed_ticket = 3; +} + +// NextID: 13 +// TODO(liuhehan): deprecate dataset_job_scheduler_output and dataset_cron_job_scheduler_output +message RunnerOutput { + string error_message = 1; + oneof runner_output { + WorkflowSchedulerOutput workflow_scheduler_output = 2; + DatasetJobSchedulerOutput dataset_job_scheduler_output = 3; + WorkflowCronJobOutput workflow_cron_job_output = 4; + CleanupCronJobOutput cleanup_cron_job_output = 5; + JobSchedulerOutput job_scheduler_output = 6; + ModelTrainingCronJobOutput model_training_cron_job_output = 7; + TeeRunnerOutput tee_runner_output = 8; + DatasetCronJobSchedulerOutput dataset_cron_job_scheduler_output = 9; + PendingProjectSchedulerOutput pending_project_scheduler_output = 10; + DatasetSchedulerOutput dataset_scheduler_output = 11; + TicketHelperOutput ticket_helper_output = 12; + } +} + +message Pipeline { + // For compatible reason, we need this flag to indicate the logic. + int32 version = 1; + string name = 2; + // FIFO queue, defines the execution order. + repeated RunnerInput queue = 3; +} + +message PipelineContextData { + // Index of current runner. + int32 current_runner = 1; + // Outputs of runners, key is the runner index. + map outputs = 2; +} + +message BatchStatsInput { + reserved 2; + int64 batch_id = 1; +} + +message DatasetDeleterInput { + int64 dataset_id = 1; +} + +message WorkflowSchedulerOutput { + message WorkflowExecution { + int64 id = 1; + string error_message = 2; + } + repeated WorkflowExecution executions = 1; +} + +message DatasetJobSchedulerOutput { + // Scheduled dataset jobs. + repeated int64 dataset_job_ids = 1; + // Scheduled dataset job stages. + repeated int64 dataset_job_stage_ids = 2; +} + +message CleanupCronJobOutput { + repeated int64 failed_cleanup_ids = 1; + repeated int64 succeeded_cleanup_ids = 2; +} + +message WorkflowCronJobInput { + int64 workflow_id = 1; +} + +message WorkflowCronJobOutput { + string message = 1; +} + +message ModelSignatureParserInput { + int64 serving_model_id = 1; +} + +message JobSchedulerOutput { + repeated int64 started_jobs = 1; + repeated int64 failed_to_start_jobs = 2; + // Key: job id, message is for debug purpose. + map messages = 3; +} + +message ModelTrainingCronJobInput { + int64 group_id = 1; +} + +message ModelTrainingCronJobOutput { + string message = 1; +} + +message SchedulerItemPb { + int64 id = 1; + string name = 2; + Pipeline pipeline = 3; + string status = 4; + string cron_config = 5; + // Timestamp in seconds + int64 last_run_at = 6; + int64 retry_cnt = 7; + // Timestamp in seconds + int64 created_at = 8; + // Timestamp in seconds + int64 updated_at = 9; + int64 deleted_at = 10; +} + +message SchedulerRunnerPb { + int64 id = 1; + int64 item_id = 2; + string status = 3; + int64 start_at = 4; + int64 end_at = 5; + Pipeline pipeline = 6; + RunnerOutput output = 7; + PipelineContextData context = 8; + int64 created_at = 9; + int64 updated_at = 10; + int64 deleted_at = 11; +} + +message TeeRunnerOutput { + repeated int64 created_group_ids = 1; + repeated int64 launched_group_ids = 2; + repeated int64 checked_group_ids = 3; + repeated int64 checked_trusted_job_ids = 4; + repeated int64 created_trusted_export_job_ids = 5; + repeated int64 launched_trusted_export_job_ids = 6; + repeated int64 created_dataset_trusted_export_job_ids = 7; +} + +message DatasetCronJobSchedulerOutput { + // Scheduled dataset jobs + repeated int64 dataset_job_ids = 1; + // Created dataset_job_stages + repeated int64 created_dataset_job_stage_ids = 2; + // update dataset auth_status cache + repeated int64 auth_status_updated_dataset_ids = 3; +} + +message PendingProjectSchedulerOutput { + // Scheduled pending projects. + repeated int64 pending_project_created_ids = 1; + repeated int64 pending_project_updated_ids = 2; + repeated string projects_created_uuids = 3; + repeated int64 pending_project_failed_ids = 4; +} + +message DatasetSchedulerOutput { + // key: executor name + map executor_outputs = 1; +} + +message ExecutorResults { + repeated int64 succeeded_item_ids = 1; + repeated int64 failed_item_ids = 2; + repeated int64 skip_item_ids = 3; +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/dataset.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/dataset.proto index 9ce8ec02f..a6c852d47 100644 --- a/web_console_v2/api/protocols/fedlearner_webconsole/proto/dataset.proto +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/dataset.proto @@ -1,4 +1,4 @@ -/* Copyright 2021 The FedLearner Authors. All Rights Reserved. +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,25 +15,359 @@ syntax = "proto3"; +import "fedlearner_webconsole/proto/workflow_definition.proto"; +import "fedlearner_webconsole/proto/common.proto"; +import "fedlearner_webconsole/proto/bcs_transaction.proto"; +import "fedlearner_webconsole/proto/project.proto"; + package fedlearner_webconsole.proto; -message File { - enum State { - UNSPECIFIED = 0; - COMPLETED = 1; - FAILED = 2; - } - // Absolute path - string source_path = 1; - string destination_path = 2; - // If the file is moved/copied - State state = 3; - // File size in byte - int64 size = 4; - // This will be populated if the file is failed to move/copy - string error_message = 5; +enum CronType { + DAILY = 0; + HOURLY = 1; +} + +message DataSource { + string name = 1; + // ref: DataSourceType + string type = 2; + // url like: hdfs:///home/ + string url = 3; + int64 created_at = 4; + int64 project_id = 5; + int64 id = 6; + bool is_user_upload = 7; + bool is_user_export = 8; + string creator_username = 9; + // ref: DatasetFormat + string dataset_format = 10; + // ref: StoreFormat + string store_format = 11; + // ref: DatasetType + string dataset_type = 12; + string uuid = 13; + string comment = 14; +} + +message DatasetParameter { + string name = 1; + string type = 2; + string comment = 3; + int64 project_id = 4; + string path = 5; + string kind = 6; + string format = 7; + string uuid = 8; + bool is_published = 9; + // need publish after create raw_dataset + bool need_publish = 10; + // dataset value per use, unit point + int64 value = 11; + // ref: DatasetSchemaChecker + repeated string schema_checkers = 12; + // ref: StoreFormat + string store_format = 13; + // ref: ImportType + string import_type = 14; + // ref: AuthStatus + string auth_status = 15; + string creator_username = 16; +} + +message BatchParameter { + reserved 4, 5, 6; + int64 dataset_id = 1; + string comment = 2; + string path = 3; + string file_format = 7; + int64 data_source_id = 8; + int64 event_time = 9; + // used to decide batch folder name type is YYYYMMDD or YYYYMMDD-HH + CronType cron_type = 10; } message DataBatch { - repeated File files = 1; + reserved 13; + int64 id = 1; + int64 dataset_id = 2; + string path = 3; + int64 file_size = 4; + int64 num_example = 5; + int64 num_feature = 6; + string comment = 7; + // Timestamp in seconds + int64 created_at = 8; + // Timestamp in seconds + int64 updated_at = 9; + string name = 10; + // ref: ResourceState + string state = 11; + int64 event_time = 12; + int64 latest_parent_dataset_job_stage_id = 14; + int64 latest_analyzer_dataset_job_stage_id = 15; +} + +message DatasetRef { + reserved 5, 14, 17; + int64 id = 1; + int64 project_id = 2; + string name = 3; + // Timestamp in seconds + int64 created_at = 4; + int64 file_size = 6; + string path = 7; + string dataset_format = 8; + string comment = 9; + bool is_published = 10; + string state_frontend = 11; + int64 num_example = 12; + string uuid = 13; + string dataset_kind = 15; + string data_source = 16 [deprecated=true]; + // dataset total value, unit point + int64 total_value = 18; + string creator_username = 19; + // ref: StoreFormat + string store_format = 20; + // ref: DatasetType + string dataset_type = 21; + // ref: ImportType + string import_type = 22; + // ref: PublishFrontendState + string publish_frontend_state = 23; + // frontend auth status for all participants + // ref: AuthFrontendState + string auth_frontend_state = 24; + // auth status for local dataset + // ref: AuthStatus + string local_auth_status = 25; + // frontend auth status details + ParticipantsInfo participants_info = 26; +} + +// this is for comptiable reason +// TODO(wangsen.0914): refactor in the near future +message Dataset { + reserved 5, 7, 9, 10, 11, 19, 20; + int64 id = 1; + int64 project_id = 2; + string name = 3; + int64 workflow_id = 4; + string path = 6; + // Timestamp in seconds + int64 created_at = 8; + // data_source is userd for adapting fedlearner dataset_path + string data_source = 12 [deprecated=true]; + int64 file_size = 13; + int64 num_example = 14; + string comment = 15; + int64 num_feature = 16; + // Timestamp in seconds + int64 updated_at = 17; + // Timestamp in seconds + int64 deleted_at = 18; + // the dataset job that produced this dataset + int64 parent_dataset_job_id = 21; + string dataset_format = 22; + // ref: ResourceState + string state_frontend= 23; + string uuid = 24; + bool is_published = 25; + string dataset_kind = 26; + // dataset value per use, unit point + int64 value = 27; + // ref: DatasetSchemaChecker + repeated string schema_checkers = 28; + string creator_username = 29; + // ref: ImportType + string import_type = 30; + // ref: DatasetType + string dataset_type = 31; + // ref: StoreFormat + string store_format = 32; + int64 analyzer_dataset_job_id = 33; + // ref: PublishFrontendState + string publish_frontend_state = 34; + // frontend auth status for all participants + // ref: AuthFrontendState + string auth_frontend_state = 35; + // auth status for local dataset + // ref: AuthStatus + string local_auth_status = 36; + // frontend auth status details + ParticipantsInfo participants_info = 37; +} + +message DatasetLedger { + // dataset total value, unit point + int64 total_value = 1; + repeated Transaction transactions = 2; +} + +message DatasetMetaInfo { + reserved 1; + // for datasource + string datasource_type = 2; + // is_user_upload: True: datasource is created by system when user local upload + // False: not user local upload datasource + bool is_user_upload = 3; + // is_user_export: True: datasource is created by system when user export dataset + // False: not user export datasource + bool is_user_export = 4; + // dataset value per use, unit point + int64 value = 5; + // need publish after create raw_dataset + bool need_publish = 6; + // ref: DatasetSchemaChecker + repeated string schema_checkers = 7; +} + +message ParticipantDatasetRef { + // same between participants + string uuid = 1; + int64 project_id = 2; + string name = 3; + int64 participant_id = 4; + // choices: tabuler and image + string format = 5; + int64 file_size = 6; + // Timestamp in seconds + int64 updated_at = 7; + // dataset value per use, unit point + int64 value = 8; + // ref: DatasetKindV2 + string dataset_kind = 9; + // ref: DatasetType + string dataset_type = 10; + // ref: AuthStatus + string auth_status = 11; +} + +message DatasetJobConfig { + // Every dataset has uuid, but is_published is decided by other info. + string dataset_uuid = 1; + repeated Variable variables = 2; +} + +message DatasetJobGlobalConfigs { + // key: domain_name value: DatasetJobConfig + // If this job runs locally, there's only one pair inside the map + map global_configs = 1; +} + +// for datasetjob rpc response and datasetjob api response +message DatasetJob { + string uuid = 1; + int64 project_id = 2; + // ref: DatasetJobKind + string kind = 3; + DatasetJobGlobalConfigs global_configs = 4; + WorkflowDefinition workflow_definition = 5; + string result_dataset_uuid = 6; + string result_dataset_name = 7; + // whether participant dataset_job is ready to start + bool is_ready = 8; + int64 input_data_batch_num_example = 9; + int64 output_data_batch_num_example = 10; + int64 id = 11; + // ref: DatasetJobState + string state = 12; + int64 coordinator_id = 13; + int64 workflow_id = 14; + int64 created_at = 15; + int64 finished_at = 16; + int64 updated_at = 17; + int64 started_at = 18; + string name = 19; + // if a dataset_job support dataset_job_stage, has_stages = True + bool has_stages = 20; + string creator_username = 21; + // ref: DatasetJobSchedulerState + string scheduler_state = 22; + TimeRange time_range = 23; + string scheduler_message = 24; +} + +// for datasetjobs api response +message DatasetJobRef { + string uuid = 1; + int64 project_id = 2; + // ref: DatasetJobKind + string kind = 3; + int64 result_dataset_id = 4; + // ref: DatasetJobState + string state = 5; + int64 created_at = 6; + string result_dataset_name = 7; + int64 id = 8; + int64 coordinator_id = 9; + string name = 10; + // if a dataset_job support dataset_job_stage, has_stages = True + bool has_stages = 11; + string creator_username = 12; +} + +message DatasetJobContext { + // Name for batch stats scheduler item. + string batch_stats_item_name = 1 [deprecated=true]; + int64 input_data_batch_num_example = 2 [deprecated=true]; + int64 output_data_batch_num_example = 3 [deprecated=true]; + // if a dataset_job support dataset_job_stage, we set has_stages = True + bool has_stages = 4; + bool need_create_stage = 5; + string scheduler_message = 6; +} + +message DatasetJobStageContext { + // Name for batch stats scheduler item. + string batch_stats_item_name = 1; + int64 input_data_batch_num_example = 2; + int64 output_data_batch_num_example = 3; + string scheduler_message = 4; +} + +// for datasetjobstage rpc response and datasetjobstage api response +message DatasetJobStage { + int64 id = 1; + string name = 2; + string uuid = 3; + string dataset_job_uuid = 4; + int64 dataset_job_id = 5; + int64 output_data_batch_id = 6; + int64 workflow_id = 7; + int64 project_id = 8; + // ref: DatasetJobState + string state = 9; + int64 event_time = 10; + DatasetJobGlobalConfigs global_configs = 11; + int64 created_at = 12; + int64 updated_at = 13; + int64 started_at = 14; + int64 finished_at = 15; + bool is_ready = 16; + WorkflowDefinition workflow_definition = 17; + // ref: DatasetJobKind + string kind = 18; + int64 input_data_batch_num_example = 19; + int64 output_data_batch_num_example = 20; + string scheduler_message = 21; +} + +// for datasetjobstages api response +message DatasetJobStageRef { + int64 id = 1; + string name = 2; + int64 dataset_job_id = 3; + int64 output_data_batch_id = 4; + int64 project_id = 5; + // ref: DatasetJobState + string state = 6; + int64 created_at = 7; + // ref: DatasetJobKind + string kind = 8; +} + +message TimeRange { + int32 days = 1; + int32 hours = 2; } diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/e2e.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/e2e.proto new file mode 100644 index 000000000..e78de0104 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/e2e.proto @@ -0,0 +1,50 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package fedlearner_webconsole.proto; + +message E2eJob { + string project_name = 1; + // the path of the Python script to be run relative to e2e_test folder; + // e.g. workflow_test/test_local_workflow.py + string script_path = 2; + // the image of FLApp/FedApp to be used for federated learning; + // e.g. artifact.bytedance.com/fedlearner/fedlearner:90d9257 + string fedlearner_image_uri = 3; + // the e2e image to be used; + // e.g. artifact.bytedance.com/fedlearner/fedlearner_e2e:2.2.3.2 + string e2e_image_uri = 4; + string job_name = 5; + string platform_endpoint = 6; + // a string used to generate names for federated workflows + string name_prefix = 7; +} + +message InitiateE2eJobsParameter { + // coordinator vs. participant + string role = 1; + // a string used to generate names for federated workflows + string name_prefix = 2; + string project_name = 3; + // e2e image URI + // e.g. artifact.bytedance.com/fedlearner/fedlearner_e2e:2.2.3.2 + string e2e_image_uri = 4; + // FedLearner image URI + // e.g. artifact.bytedance.com/fedlearner/fedlearner:90d9257 + string fedlearner_image_uri = 5; + string platform_endpoint = 6; +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/filtering.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/filtering.proto new file mode 100644 index 000000000..b3e05a148 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/filtering.proto @@ -0,0 +1,59 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; +package fedlearner_webconsole.proto; + +enum FilterOp { + // EQUAL as default + EQUAL = 0; + // If it is in a list. + IN = 1; + // If it contains a substring. + CONTAIN = 2; + GREATER_THAN = 3; + LESS_THAN = 4; +} + +message SimpleExpression { + message ListValue { + repeated double number_list = 1; + repeated string string_list = 2; + } + + string field = 1; + FilterOp op = 2; + oneof value { + bool bool_value = 3; + string string_value = 4; + double number_value = 5; + ListValue list_value = 6; + } +} + +enum FilterExpressionKind { + // SimpleExpression + SIMPLE = 0; + // Combines sub expressions by AND + AND = 1; +} + +message FilterExpression { + FilterExpressionKind kind = 1; + // basic + SimpleExpression simple_exp = 2; + // and/or + repeated FilterExpression exps = 3; +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/job.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/job.proto new file mode 100644 index 000000000..cb000b9be --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/job.proto @@ -0,0 +1,58 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +syntax = "proto3"; + +import "fedlearner_webconsole/proto/workflow_definition.proto"; + +package fedlearner_webconsole.proto; + +message CrdMetaData { + // {group}/{version}, such as k8s.io.fedlearner/v1alpha1 + string api_version = 1; +} + +message JobErrorMessage { + string app = 1; + map pods = 2; +} + +message JobPb { + int64 id = 1; + string name = 2; + JobDefinition.JobType job_type = 3; + string state = 4; + bool is_disabled = 5; + int64 workflow_id = 6; + int64 project_id = 7; + string snapshot = 8; + JobErrorMessage error_message = 9; + CrdMetaData crd_meta = 10; + string crd_kind = 11; + int64 created_at = 12; + int64 updated_at = 13; + int64 complete_at = 14; + repeated PodPb pods = 15; + // start_at is the create time of the real custom resource app in k8s. + int64 start_at = 16; +} + +message PodPb { + string name = 1; + string pod_type = 3; + string state = 4; + string pod_ip = 5; + string message = 6; + int64 creation_timestamp = 7; +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/k8s.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/k8s.proto new file mode 100644 index 000000000..f6f3ecf83 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/k8s.proto @@ -0,0 +1,42 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package fedlearner_webconsole.proto; + +message Condition { + enum ConditionType { + // Use alias to make enum name consistent with others + option allow_alias = true; + succeeded = 0; + SUCCEEDED = 0; + } + enum ConditionStatus { + // Use alias to make enum name consistent with others + option allow_alias = true; + Unknown = 0; + UNKNOWN = 0; + True = 1; + TRUE = 1; + False = 2; + FALSE = 2; + } + ConditionType type = 1; + ConditionStatus status = 2; + string last_transition_time = 3; + string reason = 4; + string message = 5; +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/metrics.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/metrics.proto new file mode 100644 index 000000000..90ec4a5c1 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/metrics.proto @@ -0,0 +1,40 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package fedlearner_webconsole.proto; + + +message Metric { + // training or validation steps + repeated double steps = 1; + // metric values + repeated double values = 2; +} + +message ConfusionMatrix { + int64 tp = 1; + int64 tn = 2; + int64 fp = 3; + int64 fn = 4; +} + +message ModelJobMetrics { + map train = 1; + map eval = 2; + ConfusionMatrix confusion_matrix = 3; + map feature_importance = 4; +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/mmgr.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/mmgr.proto new file mode 100644 index 000000000..841256f6c --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/mmgr.proto @@ -0,0 +1,218 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package fedlearner_webconsole.proto; + +import "fedlearner_webconsole/proto/common/extension.proto"; +import "fedlearner_webconsole/proto/common.proto"; +import "fedlearner_webconsole/proto/algorithm.proto"; +import "fedlearner_webconsole/proto/workflow_definition.proto"; +import "fedlearner_webconsole/proto/project.proto"; + + +// Put in web API response, get list +message ModelJobRef { + int64 id = 1; + string name = 2; + string uuid = 3; + string role = 4; + // ref: ModelJobType + string model_job_type = 5; + string algorithm_type = 6; + string state = 7 [deprecated=true]; + bool configured = 8; + string creator_username = 9; + int64 coordinator_id = 10; + int64 created_at = 11; + int64 updated_at = 12; + int64 version = 13; + int64 project_id = 14; + int64 group_id = 15; + int64 started_at = 16; + int64 stopped_at = 17; + bool metric_is_public = 18; + int64 algorithm_id = 19; + // ref: AuthStatus + string auth_status = 20; + bool auto_update = 21; + // ref: ModelJobStatus + string status = 22; + // ref: ModelJobAuthFrontendStatus + string auth_frontend_status = 23; + ParticipantsInfo participants_info = 24; +} + + +// Put in web API response, get one +message ModelJobPb { + int64 id = 1; + string name = 2; + string uuid = 3; + string role = 4; + // ref: ModelJobType + string model_job_type = 5; + // ref: AlgorithmType + string algorithm_type = 6; + int64 algorithm_id = 7; + int64 group_id = 8; + int64 project_id = 9; + string state = 10; + bool configured = 11; + // the model used for evaluation, prediction or retraining. + int64 model_id = 12; + string model_name = 13; + int64 job_id = 14; + int64 workflow_id = 15; + int64 dataset_id = 16; + string dataset_name = 17; + string creator_username = 18 [(fedlearner_webconsole.proto.secret) = true]; + int64 coordinator_id = 19; + // the generated model + string output_model_name = 20; + WorkflowDefinition config = 21; + // timestamp in seconds + int64 created_at = 22; + int64 updated_at = 23; + int64 started_at = 24; + int64 stopped_at = 25; + string comment = 26; + int64 version = 27; + repeated ModelPb output_models = 28 [deprecated=true]; + // the name of associated job in workflow + string job_name = 29; + bool metric_is_public = 30; + // ref: AuthStatus + string auth_status = 31; + // ref: ModelJobStatus + string status = 32; + // error message during creating, configuring and starting model job + string error_message = 33; + bool auto_update = 34; + int64 data_batch_id = 35; + ModelJobGlobalConfig global_config = 36; + // ref: ModelJobAuthFrontendStatus + string auth_frontend_status = 37; + ParticipantsInfo participants_info = 38; +} + + +// Put in web API response, get list +message ModelJobGroupRef { + int64 id = 1; + string name = 2; + string uuid = 3; + string role = 4; + int64 project_id = 5; + bool authorized = 6; + string algorithm_type = 7; + bool configured = 8; + string creator_username = 9; + int64 coordinator_id = 10; + string latest_job_state = 11; + int64 latest_version = 12; + int64 created_at = 13; + int64 updated_at = 14; + string auth_frontend_status = 15; + string auth_status = 16; + ParticipantsInfo participants_info = 17; +} + + +// Put in web API response, get one +message ModelJobGroupPb { + int64 id = 1; + string name = 2; + string uuid = 3; + string role = 4; + int64 project_id = 5; + bool authorized = 6; + int64 dataset_id = 7; + string algorithm_type = 8; + int64 algorithm_project_id = 9; + int64 algorithm_id = 10; + WorkflowDefinition config = 11; + bool configured = 12; + string creator_username = 13 [(fedlearner_webconsole.proto.secret) = true]; + int64 coordinator_id = 14; + string cron_config = 15; + string latest_job_state = 16; + int64 latest_version = 17; + int64 created_at = 18; + int64 updated_at = 19; + string comment = 20; + repeated ModelJobRef model_jobs = 21 [deprecated=true]; + string auth_frontend_status = 22; + string auth_status = 23; + ParticipantsInfo participants_info = 24; + // ref: GroupAutoUpdateStatus + string auto_update_status = 25; + int64 start_data_batch_id = 26; + AlgorithmProjectList algorithm_project_uuid_list = 27; +} + +message ModelPb { + int64 id = 1; + string name = 2; + string uuid = 3; + string algorithm_type = 4; + int64 group_id = 5; + int64 project_id = 6; + int64 model_job_id = 7; + string model_job_name = 8; + int64 job_id = 9; + string job_name = 10; + int64 workflow_id = 11; + string workflow_name = 12; + int64 version = 13; + int64 created_at = 14; + int64 updated_at = 15; + string comment = 16; + string model_path = 17; +} + +message PeerModelJobPb{ + string name = 1; + string uuid = 2; + string algorithm_type = 3; + string model_job_type = 4; + string state = 5; + string group_uuid = 6; + WorkflowDefinition config = 7; + bool metric_is_public = 8; +} + +message ModelJobConfig { + string algorithm_uuid = 1; + AlgorithmParameter algorithm_parameter = 2; + repeated Variable variables = 3; +} + +message AlgorithmProjectList { + // key is the pure domain name, value is the uuid of algorithm project + map algorithm_projects = 1; +} + +// provide enough information when configuring template +message ModelJobGlobalConfig { + // key is the domain name + map global_config = 1; + string dataset_uuid = 2; + string model_uuid = 3; + bool auto_update = 4; + // for auto update jobs, use latest job stage uuid to get target data_batch + string dataset_job_stage_uuid = 5; +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/notification.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/notification.proto new file mode 100644 index 000000000..f06349338 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/notification.proto @@ -0,0 +1,25 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; +package fedlearner_webconsole.proto; + +message Notification { + // Identifiers of the receivers according to the notification system. + // E.g. for email notification, it will be email addresses. + repeated string receivers = 1; + string subject = 2; + string content = 3; +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/participant.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/participant.proto new file mode 100644 index 000000000..7b72fe38d --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/participant.proto @@ -0,0 +1,44 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package fedlearner_webconsole.proto; + +message ParticipantExtra { + reserved 2; + bool is_manual_configured = 1; +} + +// NextID: 12 +message Participant { + int64 id = 1; + string name = 2; + string domain_name = 3; + string host = 4; + int32 port = 5; + // Ref to enum ParticipantType, this is the name. + string type = 6; + string comment = 7; + int64 num_project = 8; + // Timestamp in seconds + int64 last_connected_at = 9; + // Timestamp in seconds + int64 created_at = 10; + ParticipantExtra extra = 11; + // Timestamp in seconds + int64 updated_at = 12; + string pure_domain_name = 13; +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/project.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/project.proto index 62574bf0f..3bf881027 100644 --- a/web_console_v2/api/protocols/fedlearner_webconsole/proto/project.proto +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/project.proto @@ -1,4 +1,4 @@ -/* Copyright 2021 The FedLearner Authors. All Rights Reserved. +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,31 +16,105 @@ syntax = "proto3"; import "fedlearner_webconsole/proto/common.proto"; +import "fedlearner_webconsole/proto/participant.proto"; package fedlearner_webconsole.proto; -message CertificateStorage { - message Certificate { - // key is file name, e.g. client/client.pem - // value is the content of certificate - map certs = 1; - } - map domain_name_to_cert = 1; +message ProjectRef { + int64 id = 1; + string name = 2; + string participant_type = 3; + // Username of creator + string creator = 4; + // Timestamp in seconds + int64 created_at = 5; + // Related workflow count in the project + int64 num_workflow = 6; + // NOTE: this kind of proto should not be added into Ref proto, + // this is not a good practice, but we need this for compatible reason. + repeated Participant participants = 7; + ParticipantsInfo participants_info = 8; + // ref: ProjectRole + string role = 9; } -message Participant { - string name = 1; - string domain_name = 2; - // participant's address - // e.g. 127.0.0.1:32443, localhost:32443 - string url = 3; +message Project { + int64 id = 1; + string name = 2; + string token = 3; + string comment = 4; + string participant_type = 5; + // Username of creator + string creator = 6; + // Timestamp in seconds + int64 created_at = 7; + // Timestamp in seconds + int64 updated_at = 8; + repeated Variable variables = 9; + repeated Participant participants = 10; + ParticipantsInfo participants_info = 11; + ProjectConfig config = 12; + // ref: ProjectRole + string role = 13; +} + +message ProjectConfig { + enum ProjectAbilityType { + ID_ALIGNMENT = 0; + HORIZONTAL_FL = 1; + VERTICAL_FL = 3; + TEE = 4; + } + enum AuthorizationRule { + ALWAYS_ALLOW = 0; + ONCE = 1; + MANUAL = 2; + ALWAYS_REFUSE = 3; + } + reserved 1, 2, 3; repeated Variable variables = 4; - GrpcSpec grpc_spec = 5; + repeated ProjectAbilityType abilities = 5; + // key ref: Action + map action_rules = 6; + bool support_blockchain = 7; } -message Project { +message ParticipantsInfo { + // key is the pure domain name of participant + map participants_map = 1; +} + +message ParticipantInfo { string name = 1; - string token = 2; - repeated Participant participants = 3; - repeated Variable variables = 4; + // ref: PendingProjectState + string state = 2; + // ref: ProjectRole + string role = 3; + // Ref to enum ParticipantType + string type = 4; + // ref: AuthStatus + string auth_status = 5; +} + +message PendingProjectPb { + int64 id = 1; + string name = 2; + string uuid = 3; + ProjectConfig config = 4; + // ref: PendingProjectState + string state = 5; + ParticipantsInfo participants_info = 6; + // ref: ProjectRole + string role = 7; + string comment = 8; + // Username of creator + string creator_username = 9; + // Timestamp in seconds + int64 created_at = 10; + // Timestamp in seconds + int64 updated_at = 11; + // ref: TicketStatus + string ticket_status = 12; + string ticket_uuid = 13; + string participant_type = 14; } diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/review.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/review.proto new file mode 100644 index 000000000..0ddf8b2a2 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/review.proto @@ -0,0 +1,65 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package fedlearner_webconsole.proto; + +enum TicketType { + UNKOWN_TYPE = 0; + CREATE_NODE = 1; + CREATE_PARTICIPANT = 2; + CREATE_PROJECT = 3; + PUBLISH_DATASET = 4; + CREATE_PROCESSED_DATASET = 5; + CREATE_MODELJOB_GROUP = 6; + TK_CREATE_TRUSTED_JOB_GROUP = 7; + TK_CREATE_TRUSTED_EXPORT_JOB = 8; +} + +enum ReviewStrategy { + MANUAL = 0; + AUTO = 1; +} + +enum ReviewStatus { + PENDING = 0; + APPROVED = 1; + DECLINED = 2; +} + +message TicketDetails { + // uuid for review ticket resource. + string uuid = 1; +} + +message Ticket { + int64 id = 1; + string uuid = 2; + // Username of creator + string creator_username = 3; + string coordinator_pure_domain_name = 4; + TicketType type = 5; + string reviewer_username = 6; + ReviewStrategy review_strategy = 7; + ReviewStatus status = 8; + TicketDetails details = 9; + // Timestamp in seconds + int64 created_at = 10; + // Timestamp in seconds + int64 updated_at = 11; + // Timestamp in seconds + int64 deleted_at = 12; +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2/BUILD.bazel b/web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2/BUILD.bazel new file mode 100644 index 000000000..5864bdefb --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2/BUILD.bazel @@ -0,0 +1,67 @@ +load("@rules_proto//proto:defs.bzl", "proto_library") +load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") +load("@com_github_grpc_grpc//bazel:python_rules.bzl", "py_grpc_library", "py_proto_library") + +proto_library( + name = "proto", + srcs = [ + "job_service.proto", + "project_service.proto", + "resource_service.proto", + "review_service.proto", + "system_service.proto", + ], + strip_import_prefix = "/web_console_v2/api/protocols", + visibility = ["//visibility:public"], + deps = [ + "//web_console_v2/api/protocols/fedlearner_webconsole/proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/common:proto", + "@com_google_protobuf//:empty_proto", + "@com_google_protobuf//:struct_proto", + ], +) + +py_proto_library( + name = "py_proto_internal", + visibility = ["//visibility:private"], + deps = [ + ":proto", + ], +) + +py_grpc_library( + name = "py_grpc_internal", + srcs = [ + ":proto", + ], + visibility = ["//visibility:private"], + deps = [ + ":py_proto_internal", + ], +) + +py_library( + name = "py_proto", + imports = ["../../.."], + visibility = ["//visibility:public"], + deps = [ + ":py_grpc_internal", + ":py_proto_internal", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/common:py_proto", + ], +) + +go_proto_library( + name = "go_proto", + compilers = ["@io_bazel_rules_go//proto:go_grpc"], + importpath = "fedlearner.net/proto/console/rpc/v2", + protos = [ + ":proto", + ], + visibility = ["//visibility:public"], + deps = [ + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:go_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/common:go_proto", + ], +) diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2/job_service.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2/job_service.proto new file mode 100644 index 000000000..ec14919e2 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2/job_service.proto @@ -0,0 +1,160 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package fedlearner_webconsole.proto.rpc.v2; + +import "google/protobuf/empty.proto"; +import "fedlearner_webconsole/proto/tee.proto"; +import "fedlearner_webconsole/proto/mmgr.proto"; +import "fedlearner_webconsole/proto/dataset.proto"; + +message InformTrustedJobGroupRequest { + string uuid = 1; + // ref: AuthStatus + string auth_status = 2; +} + +message UpdateTrustedJobGroupRequest { + string uuid = 1; + string algorithm_uuid = 2; +} + +message DeleteTrustedJobGroupRequest { + string uuid = 1; +} + +message GetTrustedJobGroupRequest { + string uuid = 1; +} + +message GetTrustedJobGroupResponse { + // ref: AuthStatus + string auth_status = 1; +} + +message InformTrustedJobRequest { + string uuid = 1; + // ref: AuthStatus + string auth_status = 2; +} + +message GetTrustedJobRequest { + string uuid = 1; +} + +message GetTrustedJobResponse { + // ref: AuthStatus + string auth_status = 1; +} + +message CreateTrustedExportJobRequest { + string uuid = 1; + string name = 2; + int64 export_count = 3; + string parent_uuid = 4; + string ticket_uuid = 5; +} + +message CreateModelJobRequest { + string name = 1; + string uuid = 2; + string group_uuid = 3; + string model_job_type = 4; + string algorithm_type = 5; + ModelJobGlobalConfig global_config = 6; + // version from model job group + int64 version = 7; +} + +message InformModelJobRequest { + string uuid = 1; + // ref: AuthStatus + string auth_status = 2; +} + +message CreateDatasetJobStageRequest { + string dataset_job_uuid = 1; + string dataset_job_stage_uuid = 2; + string name = 3; + // timestamp in seconds + // event_time == 0 when dataset_type is PSI + int64 event_time = 4; +} + +message GetDatasetJobStageRequest { + string dataset_job_stage_uuid = 1; +} + +message GetDatasetJobStageResponse { + DatasetJobStage dataset_job_stage = 1; +} + +message CreateModelJobGroupRequest { + string name = 1; + string uuid = 2; + // ref: AlgorithmType + string algorithm_type = 3; + string dataset_uuid = 4; + AlgorithmProjectList algorithm_project_list = 5; +} + +message GetModelJobRequest { + string uuid = 1; +} + +message GetModelJobGroupRequest { + string uuid = 1; +} + +message InformModelJobGroupRequest { + string uuid = 1; + // ref: AuthStatus + string auth_status = 2; +} + +message UpdateModelJobGroupRequest { + string uuid = 1; + // ref: GroupAutoUpdateStatus + string auto_update_status = 2; + string start_dataset_job_stage_uuid = 3; +} + +message UpdateDatasetJobSchedulerStateRequest { + string uuid = 1; + // ref: DatasetJobSchedulerState + string scheduler_state = 2; +} + +service JobService { + rpc InformTrustedJobGroup (InformTrustedJobGroupRequest) returns (google.protobuf.Empty) {} + rpc UpdateTrustedJobGroup (UpdateTrustedJobGroupRequest) returns (google.protobuf.Empty) {} + rpc DeleteTrustedJobGroup (DeleteTrustedJobGroupRequest) returns (google.protobuf.Empty) {} + rpc GetTrustedJobGroup (GetTrustedJobGroupRequest) returns (GetTrustedJobGroupResponse) {} + rpc InformTrustedJob (InformTrustedJobRequest) returns (google.protobuf.Empty) {} + rpc CreateTrustedExportJob (CreateTrustedExportJobRequest) returns (google.protobuf.Empty) {} + rpc GetTrustedJob (GetTrustedJobRequest) returns (GetTrustedJobResponse) {} + rpc CreateModelJob (CreateModelJobRequest) returns (google.protobuf.Empty) {} + rpc CreateDatasetJobStage (CreateDatasetJobStageRequest) returns (google.protobuf.Empty) {} + rpc GetDatasetJobStage (GetDatasetJobStageRequest) returns (GetDatasetJobStageResponse) {} + rpc UpdateDatasetJobSchedulerState (UpdateDatasetJobSchedulerStateRequest) returns (google.protobuf.Empty) {} + rpc CreateModelJobGroup (CreateModelJobGroupRequest) returns (google.protobuf.Empty) {} + rpc GetModelJob (GetModelJobRequest) returns (ModelJobPb) {} + rpc InformModelJob (InformModelJobRequest) returns (google.protobuf.Empty) {} + rpc GetModelJobGroup (GetModelJobGroupRequest) returns (ModelJobGroupPb) {} + rpc InformModelJobGroup (InformModelJobGroupRequest) returns (google.protobuf.Empty) {} + rpc UpdateModelJobGroup (UpdateModelJobGroupRequest) returns (google.protobuf.Empty) {} +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2/project_service.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2/project_service.proto new file mode 100644 index 000000000..2f321334f --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2/project_service.proto @@ -0,0 +1,76 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package fedlearner_webconsole.proto.rpc.v2; + +import "google/protobuf/empty.proto"; +import "fedlearner_webconsole/proto/project.proto"; +import "fedlearner_webconsole/proto/workflow_definition.proto"; + +message CreatePendingProjectRequest { + string name = 1; + string uuid = 2; + ParticipantsInfo participants_info = 3; + string comment = 4; + // Username of creator in Coordinator + string creator_username = 5; + ProjectConfig config = 6; + string ticket_uuid = 8; +} + +message UpdatePendingProjectRequest { + string uuid = 1; + map participants_map = 2; +} + +message SyncPendingProjectStateRequest { + string uuid = 1; + // ACCEPTED or CLOSED + string state = 2; +} + +message CreateProjectRequest { + string uuid = 1; +} + +message DeletePendingProjectRequest { + string uuid = 1; +} + +message SendTemplateRevisionRequest { + WorkflowDefinition config = 1; + string name = 2; + string comment = 3; + // WorkflowTemplateKind enum + string kind = 4; + int64 revision_index = 5; +} + +service ProjectService { + // Only Coordinator used method + rpc CreatePendingProject (CreatePendingProjectRequest) returns (google.protobuf.Empty) {} + // Only Coordinator used method + rpc UpdatePendingProject (UpdatePendingProjectRequest) returns (google.protobuf.Empty) {} + // Only Coordinator used method + rpc DeletePendingProject (DeletePendingProjectRequest) returns (google.protobuf.Empty) {} + // Only Coordinator used method + rpc CreateProject (CreateProjectRequest) returns (google.protobuf.Empty) {} + // Only Participant used method + rpc SyncPendingProjectState (SyncPendingProjectStateRequest) returns (google.protobuf.Empty) {} + + rpc SendTemplateRevision (SendTemplateRevisionRequest) returns (google.protobuf.Empty) {} +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2/resource_service.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2/resource_service.proto new file mode 100644 index 000000000..c571c93db --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2/resource_service.proto @@ -0,0 +1,84 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package fedlearner_webconsole.proto.rpc.v2; + +import "google/protobuf/empty.proto"; +import "fedlearner_webconsole/proto/algorithm.proto"; +import "fedlearner_webconsole/proto/filtering.proto"; +import "fedlearner_webconsole/proto/dataset.proto"; + +message ListAlgorithmProjectsRequest { + FilterExpression filter_exp = 1; +} + +message ListAlgorithmProjectsResponse { + repeated AlgorithmProjectPb algorithm_projects = 1; +} + +message ListAlgorithmsRequest { + string algorithm_project_uuid = 1; +} + +message ListAlgorithmsResponse { + repeated AlgorithmPb algorithms = 1; +} + +message GetAlgorithmProjectRequest { + string algorithm_project_uuid = 1; +} + +message GetAlgorithmRequest { + string algorithm_uuid = 1; +} + +message GetAlgorithmFilesRequest { + string algorithm_uuid = 1; +} + +message GetAlgorithmFilesResponse { + string hash = 1; + bytes chunk = 2; +} + +message InformDatasetRequest { + string uuid = 1; + // ref: AuthStatus + string auth_status = 2; +} + +message ListDatasetsRequest { + string uuid = 1; + string kind = 2; + // ref: ResourceState + string state = 3; + TimeRange time_range = 4; +} + +message ListDatasetsResponse { + repeated ParticipantDatasetRef participant_datasets = 1; +} + +service ResourceService { + rpc ListAlgorithmProjects(ListAlgorithmProjectsRequest) returns (ListAlgorithmProjectsResponse) {} + rpc ListAlgorithms(ListAlgorithmsRequest) returns (ListAlgorithmsResponse) {} + rpc GetAlgorithmProject(GetAlgorithmProjectRequest) returns (AlgorithmProjectPb) {} + rpc GetAlgorithm(GetAlgorithmRequest) returns (AlgorithmPb) {} + rpc GetAlgorithmFiles(GetAlgorithmFilesRequest) returns (stream GetAlgorithmFilesResponse) {} + rpc InformDataset(InformDatasetRequest) returns (google.protobuf.Empty) {} + rpc ListDatasets(ListDatasetsRequest) returns (ListDatasetsResponse) {} +} \ No newline at end of file diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2/review_service.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2/review_service.proto new file mode 100644 index 000000000..83225837c --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2/review_service.proto @@ -0,0 +1,36 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package fedlearner_webconsole.proto.rpc.v2; + +import "fedlearner_webconsole/proto/review.proto"; + +message CreateTicketRequest { + TicketType ttype = 1; + string creator_username = 2; + TicketDetails details = 3; +} + +message GetTicketRequest { + string uuid = 1; +} + +service ReviewService { + rpc CreateTicket (CreateTicketRequest) returns (Ticket) {} + rpc GetTicket (GetTicketRequest) returns (Ticket) {} + +} \ No newline at end of file diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2/system_service.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2/system_service.proto new file mode 100644 index 000000000..bf56dd902 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/rpc/v2/system_service.proto @@ -0,0 +1,48 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package fedlearner_webconsole.proto.rpc.v2; + +import "fedlearner_webconsole/proto/common.proto"; +import "google/protobuf/struct.proto"; + + +message CheckHealthRequest {} + +message CheckHealthResponse { + ApplicationVersion application_version = 1; + bool healthy = 2; + string message = 3; +} + +message ListFlagsRequest {} + +message ListFlagsResponse { + google.protobuf.Struct flags = 1; +} + +message CheckTeeEnabledRequest {} + +message CheckTeeEnabledResponse { + bool tee_enabled = 1; +} + +service SystemService { + rpc CheckHealth (CheckHealthRequest) returns (CheckHealthResponse) {} + rpc ListFlags (ListFlagsRequest) returns (ListFlagsResponse) {} + rpc CheckTeeEnabled (CheckTeeEnabledRequest) returns (CheckTeeEnabledResponse) {} +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/service.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/service.proto index 0a3f89350..ebb9251ff 100644 --- a/web_console_v2/api/protocols/fedlearner_webconsole/proto/service.proto +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/service.proto @@ -1,4 +1,4 @@ -/* Copyright 2021 The FedLearner Authors. All Rights Reserved. +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,8 +15,14 @@ syntax = "proto3"; +import "google/protobuf/struct.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/wrappers.proto"; import "fedlearner_webconsole/proto/common.proto"; +import "fedlearner_webconsole/proto/two_pc.proto"; import "fedlearner_webconsole/proto/workflow_definition.proto"; +import "fedlearner_webconsole/proto/serving.proto"; +import "fedlearner_webconsole/proto/dataset.proto"; package fedlearner_webconsole.proto; @@ -25,10 +31,13 @@ message ProjAuthInfo { string target_domain = 2; string auth_token = 3; } + message JobDetail{ string name = 1; string state = 2; string pods = 3; + // Timestamp in second + int64 created_at = 4; } message CheckConnectionRequest { @@ -39,11 +48,11 @@ message CheckConnectionResponse { Status status = 1; } -message PingRequest {} +message CheckPeerConnectionRequest {} -message PingResponse { +message CheckPeerConnectionResponse { Status status = 1; - string msg = 2; + ApplicationVersion application_version = 2; } message UpdateWorkflowStateRequest { @@ -65,9 +74,20 @@ message UpdateWorkflowStateResponse { int64 transaction_state = 4; } +message InvalidateWorkflowRequest { + ProjAuthInfo auth_info = 1; + string workflow_uuid = 2; +} + +message InvalidateWorkflowResponse { + Status status = 1; + bool succeeded = 2; +} + message GetWorkflowRequest { ProjAuthInfo auth_info = 1; - string workflow_name = 2; + string workflow_name = 2 [deprecated=true]; + string workflow_uuid = 3; } message GetWorkflowResponse{ @@ -87,18 +107,22 @@ message GetWorkflowResponse{ WorkflowDefinition fork_proposal_config = 12; string uuid = 13; bool metric_is_public = 14; + // True, when all jobs of workflow are completed. + bool is_finished = 15; } message UpdateWorkflowRequest { ProjAuthInfo auth_info = 1; - string workflow_name = 2; + string workflow_name = 2 [deprecated=true]; WorkflowDefinition config = 3; + string workflow_uuid = 4; } message UpdateWorkflowResponse { Status status = 1; - string workflow_name = 2; + string workflow_name = 2 [deprecated=true]; WorkflowDefinition config = 3; + string workflow_uuid =4; } message GetJobMetricsRequest { @@ -147,14 +171,164 @@ message GetJobKibanaResponse { string metrics = 2; } +message TwoPcRequest { + ProjAuthInfo auth_info = 1; + string transaction_uuid = 2; + TwoPcType type = 3; + TwoPcAction action = 4; + TransactionData data = 5; +} + +message TwoPcResponse { + Status status = 1; + string transaction_uuid = 2; + TwoPcType type = 3; + TwoPcAction action = 4; + bool succeeded = 5; + string message = 6; +} + +message ServingServiceRequest { + ProjAuthInfo auth_info = 1; + ServingServiceType operation_type = 2; + string serving_model_uuid = 3; + // same uuid among participants + string model_uuid = 4; + string serving_model_name = 5; + bool is_auto_update = 6; + bool is_manual_triggered = 7; +} + +message ServingServiceResponse { + Status status = 1; + ServingServiceResultCode code = 2; + string msg = 3; +} + +message ServingServiceInferenceRequest { + ProjAuthInfo auth_info = 1; + string serving_model_uuid = 2; + string example_id = 3; + // e.g. "act1_f" + repeated string expected_output = 4; +} + +message ServingServiceInferenceResponse { + Status status = 1; + ServingServiceResultCode code = 2; + string msg = 3; + // data["act1_f"]: tensorflow_serving.apis.predict_pb2.PredictResponse + google.protobuf.Struct data = 4; +} + +message SendDataRequest { + ProjAuthInfo auth_info = 1; + PayloadType type = 2; + Payload data = 3; +} + +message SendDataResponse { + bool succeeded = 1; + string message = 2; +} + +message ClientHeartBeatRequest { + string domain_name = 1; + string message = 2; +} + +message ClientHeartBeatResponse { + bool succeeded = 1; +} + +message ListParticipantDatasetsRequest { + ProjAuthInfo auth_info = 1; + string kind = 2; + string uuid = 3; +} + +message ListParticipantDatasetsResponse { + repeated ParticipantDatasetRef participant_datasets = 1; +} + +message GetModelJobRequest { + ProjAuthInfo auth_info = 1; + string uuid = 2; + bool need_metrics = 3; +} + +message GetModelJobResponse { + string name = 1; + string uuid = 2; + string algorithm_type = 3; + string model_job_type = 4; + string state = 5; + string group_uuid = 6; + WorkflowDefinition config = 7; + string metrics = 8; + google.protobuf.BoolValue metric_is_public = 9; +} + +message GetModelJobGroupRequest { + ProjAuthInfo auth_info = 1; + string uuid = 2; +} + +message GetModelJobGroupResponse { + string name = 1; + string uuid = 2; + string role = 3; + bool authorized = 4; + string algorithm_type = 5; + WorkflowDefinition config = 6; +} + +message UpdateModelJobGroupRequest { + ProjAuthInfo auth_info = 1; + string uuid = 2; + WorkflowDefinition config = 3; +} + +message UpdateModelJobGroupResponse { + string uuid = 1; + WorkflowDefinition config = 2; +} + +message GetDatasetJobRequest { + ProjAuthInfo auth_info = 1; + string uuid = 2; +} + +message GetDatasetJobResponse { + DatasetJob dataset_job = 1; +} + +message CreateDatasetJobRequest { + ProjAuthInfo auth_info = 1; + DatasetJob dataset_job = 2; + string ticket_uuid = 3; + Dataset dataset = 4; +} + service WebConsoleV2Service { rpc CheckConnection (CheckConnectionRequest) returns (CheckConnectionResponse) {} - rpc Ping (PingRequest) returns (PingResponse) {} + rpc CheckPeerConnection (CheckPeerConnectionRequest) returns (CheckPeerConnectionResponse) {} rpc UpdateWorkflowState (UpdateWorkflowStateRequest) returns (UpdateWorkflowStateResponse) {} + rpc InvalidateWorkflow (InvalidateWorkflowRequest) returns (InvalidateWorkflowResponse) {} rpc GetWorkflow (GetWorkflowRequest) returns (GetWorkflowResponse) {} rpc UpdateWorkflow(UpdateWorkflowRequest) returns (UpdateWorkflowResponse) {} rpc GetJobMetrics (GetJobMetricsRequest) returns (GetJobMetricsResponse) {} rpc GetJobEvents (GetJobEventsRequest) returns (GetJobEventsResponse) {} rpc CheckJobReady (CheckJobReadyRequest) returns (CheckJobReadyResponse) {} rpc GetJobKibana (GetJobKibanaRequest) returns (GetJobKibanaResponse) {} + rpc Run2Pc (TwoPcRequest) returns (TwoPcResponse) {} + rpc ServingServiceManagement (ServingServiceRequest) returns (ServingServiceResponse) {} + rpc ServingServiceInference (ServingServiceInferenceRequest) returns (ServingServiceInferenceResponse) {} + rpc ClientHeartBeat (ClientHeartBeatRequest) returns (ClientHeartBeatResponse) {} + rpc ListParticipantDatasets (ListParticipantDatasetsRequest) returns (ListParticipantDatasetsResponse) {} + rpc GetModelJob(GetModelJobRequest) returns (GetModelJobResponse) {} + rpc GetModelJobGroup (GetModelJobGroupRequest) returns (GetModelJobGroupResponse) {} + rpc UpdateModelJobGroup (UpdateModelJobGroupRequest) returns (UpdateModelJobGroupResponse) {} + rpc GetDatasetJob(GetDatasetJobRequest) returns (GetDatasetJobResponse) {} + rpc CreateDatasetJob(CreateDatasetJobRequest) returns (google.protobuf.Empty) {} } diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/serving.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/serving.proto new file mode 100644 index 000000000..d511a95c7 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/serving.proto @@ -0,0 +1,118 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +syntax = "proto3"; + +package fedlearner_webconsole.proto; + +// Part1. Put in web restful API response +message ServingService { + int64 id = 1; + int64 project_id = 2; + string name = 3; + string status = 4; + int64 created_at = 5; + int64 updated_at = 6; + string instance_num_status = 7; + // can serving locally. False means need participants involved + bool is_local = 8; + ServingServiceResource resource = 9; + bool support_inference = 10; + string comment = 11; +} + +message ServingServiceInstance { + string name = 1; + string status = 2; + string cpu = 3; + string memory = 4; + int64 created_at = 5; +} + +message ServingServiceResource { + string cpu = 1; + string memory = 2; + int32 replicas = 3; +} + +message ServingServiceRemotePlatform { + string platform = 1; + string payload = 2; +} + +message ServingServiceDetail { + int64 id = 1; + int64 project_id = 2; + string name = 3; + string status = 4; + int64 created_at = 5; + int64 updated_at = 6; + string instance_num_status = 7; + bool is_local = 8; + ServingServiceResource resource = 9; + bool support_inference = 10; + string comment = 11; + int64 model_id = 12; + string model_type = 13; + reserved 14; + string endpoint = 15; + string signature = 16; + repeated ServingServiceInstance instances = 17; + int64 model_group_id = 18; + ServingServiceRemotePlatform remote_platform = 19; +} + +message ServingServiceSignatureInput { + string name = 1; + string type = 2; + // Reference: https://github.com/tensorflow/tensorflow/blob/5dcfc51118817f27fad5246812d83e5dccdc5f72/tensorflow/core/framework/tensor_shape.proto#L13 + repeated int64 dim = 3; +} + +message ServingServiceSignature { + repeated ServingServiceSignatureInput inputs = 1; +} + +// Part2. Communicate with participants +enum ServingServiceType { + SERVING_SERVICE_CREATE = 0; + SERVING_SERVICE_QUERY = 1; + SERVING_SERVICE_DESTROY = 2; + SERVING_SERVICE_UPDATE = 3; +} + +enum ServingServiceResultCode { + SERVING_SERVICE_UNKNOWN = 0; + SERVING_SERVICE_SUCCESS = 1; + SERVING_SERVICE_MODEL_NOT_FOUND = 2; + SERVING_SERVICE_NAME_DUPLICATED = 3; + SERVING_SERVICE_PENDING_ACCEPT = 4; + SERVING_SERVICE_NOT_READY = 5; + SERVING_SERVICE_NEGOTIATOR_NOT_FOUND = 6; +} + +// Part3. used internal +enum RemoteDeployState { + REMOTE_DEPLOY_UNKNOWN = 0; + REMOTE_DEPLOY_READY = 1; + REMOTE_DEPLOY_NOT_READY = 2; +} + +message RemoteDeployConfig { + string platform = 1; + string payload = 2; + int64 deploy_id = 3; + string deploy_name = 4; + string model_src_path = 5; +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/setting.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/setting.proto new file mode 100644 index 000000000..cb61dbf69 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/setting.proto @@ -0,0 +1,57 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; +import "google/protobuf/struct.proto"; +package fedlearner_webconsole.proto; + +message SystemVariables { + repeated SystemVariable variables = 1; +} + +message SystemVariable { + string name = 1; + google.protobuf.Value value = 2; + + enum ValueType{ + STRING = 0; + INT = 1; + LIST = 2; + OBJECT = 3; + } + + ValueType value_type = 4; + // When the SystemVariable's fixed is True, user can't delete the Variable or edit its name. + bool fixed = 5; +} + +message SystemInfo { + // common name parsed from x.509 certificate, e.g. fl-test.com + string domain_name = 1; + string name = 2; + // pure domain name parsed from common name, e.g. test + string pure_domain_name = 3; +} + +message SettingPb { + string uniq_key = 1; + string value = 2; +} + +message DashboardInformation { + string name = 1; + string uuid = 2; + string url = 3; +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/sparkapp.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/sparkapp.proto new file mode 100644 index 000000000..377093923 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/sparkapp.proto @@ -0,0 +1,88 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; +package fedlearner_webconsole.proto; + +//ref: https://github.com/GoogleCloudPlatform/spark-on-k8s-operator/blob/master/manifest/crds/sparkoperator.k8s.io_sparkapplications.yaml#L1043 +message VolumeMount { + string mount_path = 1; + string name = 2; + bool read_only = 3; + string sub_path = 4; +} + +//ref: https://github.com/GoogleCloudPlatform/spark-on-k8s-operator/blob/master/manifest/crds/sparkoperator.k8s.io_sparkapplications.yaml#L3744 +message Volume { + message HostPath { + string path = 1; + string type = 2; + } + message PersistentVolumeClaim { + string claim_name = 1; + bool ready_only = 2; + } + string name = 1; + HostPath host_path = 2; + PersistentVolumeClaim persistent_volume_claim = 3; +} + +message Env { + string name = 1; + string value = 2; +} + +message SparkPodConfig { + uint32 cores = 1; + string memory = 2; + uint32 instances = 3; + string core_limit = 4; + repeated VolumeMount volume_mounts = 5; + repeated Env env = 6; +} + +message DynamicAllocation { + bool enabled = 1; + uint32 initial_executors = 2; + uint32 min_executors = 3; + uint32 max_executors = 4; + +} + +message SparkAppConfig { + string name = 1; + string files_path = 2; + string image_url = 3; + repeated Volume volumes = 4; + SparkPodConfig driver_config = 5; + SparkPodConfig executor_config = 6; + repeated string py_files = 7; + repeated string command = 8; + string main_application = 9; + DynamicAllocation dynamic_allocation = 10; +} + +message SparkAppInfo { + string name = 1; + string state = 2; + string namespace = 3; + repeated string command = 4; + SparkPodConfig driver = 5; + SparkPodConfig executor = 6; + string image_url = 7; + string main_application = 8; + string spark_version = 9; + string type = 10; +} \ No newline at end of file diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/tee.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/tee.proto new file mode 100644 index 000000000..2469060b2 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/tee.proto @@ -0,0 +1,158 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package fedlearner_webconsole.proto; + +import "fedlearner_webconsole/proto/project.proto"; + +message DomainNameDataset { + string pure_domain_name = 1; + string dataset_uuid = 2; + string dataset_name = 3; +} + +message Resource { + int32 cpu = 1; // Unit is the number of core + int32 memory = 2; // Unit is Gi + int32 replicas = 3; +} + +message ParticipantDataset { + int64 participant_id = 1; + string uuid = 2; + string name = 3; +} + +message ParticipantDatasetList { + repeated ParticipantDataset items = 1; +} + +message TrustedJobGroupPb { + int64 id = 1; + string name = 2; + string uuid = 3; + int64 latest_version = 4; + string comment = 5; + int64 project_id = 6; + int64 created_at = 7; + int64 updated_at = 8; + int64 coordinator_id = 9; + string ticket_uuid = 10; + // ref: TicketStatus + string ticket_status = 11; + // ref: AuthStatus + string auth_status = 12; + // ref: TrustedJobStatus + string latest_job_status = 13; + // ref: TicketAuthStatus + string ticket_auth_status = 14; + repeated int64 unauth_participant_ids = 15; + int64 algorithm_id = 16; + Resource resource = 17; + int64 dataset_id = 18; + ParticipantDatasetList participant_datasets = 19; + // ref: GroupCreateStatus + string status = 20; + int64 analyzer_id = 21; + string creator_username = 22; + // TODO(liuledian): remove algorithm_id after frontend completed + string algorithm_uuid = 23; + string algorithm_project_uuid = 24; + int64 algorithm_participant_id = 25; +} + +message TrustedJobGroupRef { + int64 id = 1; + string name = 2; + int64 created_at = 3; + bool is_creator = 4; + int64 creator_id = 5; + // ref: TicketStatus + string ticket_status = 6; + // ref: AuthStatus + string auth_status = 7; + // ref: TrustedJobStatus + string latest_job_status = 8; + // ref: TicketAuthStatus + string ticket_auth_status = 9; + repeated int64 unauth_participant_ids = 10; + // ref: GroupCreateStatus + string status = 11; + bool is_configured = 12; +} + +message TrustedJobPb { + int64 id = 1; + string name = 2; + int64 job_id = 3; + string uuid = 4; + int64 version = 5; + string comment = 6; + int64 project_id = 7; + int64 trusted_job_group_id = 8; + int64 started_at = 9; + int64 finished_at = 10; + // ref: TrustedJobStatus + string status = 11; + int64 algorithm_id = 12; + Resource resource = 13; + // ref: AuthStatus + string auth_status = 14; + int64 export_dataset_id = 15; + // ref: TrustedJobType + string type = 16; + // ref: TicketStatus + string ticket_status = 17; + string ticket_uuid = 18; + // TODO(liuledian): remove algorithm_id after frontend completed + string algorithm_uuid = 19; + // ref: TicketAuthStatus + string ticket_auth_status = 20; + ParticipantsInfo participants_info = 21; + int64 created_at = 22; + int64 updated_at = 23; + int64 coordinator_id = 24; +} + +message TrustedJobRef { + int64 id = 1; + string name = 2; + int64 job_id = 3; + string comment = 4; + int64 started_at = 5; + int64 finished_at = 6; + // ref: TrustedJobStatus + string status = 7; + // ref: TrustedJobType + string type = 8; + // ref: TicketAuthStatus + string ticket_auth_status = 9; + ParticipantsInfo participants_info = 10; + int64 coordinator_id = 11; +} + +message TrustedNotification { + enum Type { + TRUSTED_JOB_GROUP_CREATE = 0; + TRUSTED_JOB_EXPORT = 1; + } + Type type = 1; + int64 id = 2; + string name = 3; + int64 created_at = 4; + int64 coordinator_id = 5; +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/testing/BUILD.bazel b/web_console_v2/api/protocols/fedlearner_webconsole/proto/testing/BUILD.bazel new file mode 100644 index 000000000..cadafc750 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/testing/BUILD.bazel @@ -0,0 +1,60 @@ +load("@rules_proto//proto:defs.bzl", "proto_library") +load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") +load("@com_github_grpc_grpc//bazel:python_rules.bzl", "py_grpc_library", "py_proto_library") + +proto_library( + name = "proto", + srcs = [ + "service.proto", + "testing.proto", + ], + strip_import_prefix = "/web_console_v2/api/protocols", + visibility = ["//visibility:public"], + deps = [ + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/common:proto", + "@com_google_protobuf//:struct_proto", + ], +) + +py_proto_library( + name = "py_proto_internal", + visibility = ["//visibility:private"], + deps = [ + ":proto", + ], +) + +py_grpc_library( + name = "py_grpc_internal", + srcs = [ + ":proto", + ], + visibility = ["//visibility:private"], + deps = [ + ":py_proto_internal", + ], +) + +py_library( + name = "py_proto", + imports = ["../../.."], + visibility = ["//visibility:public"], + deps = [ + ":py_grpc_internal", + ":py_proto_internal", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/common:py_proto", + ], +) + +go_proto_library( + name = "go_proto", + compilers = ["@io_bazel_rules_go//proto:go_grpc"], + importpath = "fedlearner.net/proto/console/testing", + protos = [ + ":proto", + ], + visibility = ["//visibility:public"], + deps = [ + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/common:go_proto", + ], +) diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/testing/service.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/testing/service.proto new file mode 100644 index 000000000..b1cd6d858 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/testing/service.proto @@ -0,0 +1,31 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package fedlearner_webconsole.proto.testing; + +message FakeUnaryUnaryRequest {} + +message FakeUnaryUnaryResponse {} + +message FakeStreamStreamRequest {} + +message FakeStreamStreamResponse {} + +service TestService { + rpc FakeUnaryUnary (FakeUnaryUnaryRequest) returns (FakeUnaryUnaryResponse) {} + rpc FakeStreamStream (stream FakeStreamStreamRequest) returns (stream FakeStreamStreamResponse) {} +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/testing/testing.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/testing/testing.proto new file mode 100644 index 000000000..868b0c4b1 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/testing/testing.proto @@ -0,0 +1,61 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +import "google/protobuf/struct.proto"; +import "fedlearner_webconsole/proto/common/extension.proto"; + +package fedlearner_webconsole.proto.testing; + +enum Tenum { + UNSPECIFIED = 0; + E1 = 1; + E2 = 2; +} + +message PrivateInfo { + string pii = 1 [(fedlearner_webconsole.proto.secret) = true]; + string non_pii = 2; +} + +message RichMessage { + string field1 = 1; + int32 field2 = 2 [(fedlearner_webconsole.proto.secret) = true]; + repeated PrivateInfo infos = 3; + PrivateInfo pinfo = 4; + map pinfo_map = 5; + map pstring_map = 6 [(fedlearner_webconsole.proto.secret) = true]; + repeated string pstring_list = 7 [(fedlearner_webconsole.proto.secret) = true]; +} + +message Tdata { + int64 id = 1; + repeated uint64 projects = 2; + map mappers = 3; + Tenum tt = 4; +} + +message Int64Message { + int64 id = 1; + repeated Tdata data = 2; + string uuid = 3; + int32 project_id = 4; +} + +message StructWrapper { + google.protobuf.Value typed_value = 1; + google.protobuf.Struct struct = 2; +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/tree_model.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/tree_model.proto new file mode 100644 index 000000000..5498906a5 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/tree_model.proto @@ -0,0 +1,47 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//ref: https://github.com/bytedance/fedlearner/blob/master/protocols/fedlearner/common/tree_model.proto + +syntax = "proto3"; + +package fedlearner.common; + +message RegressionTreeNodeProto { + int32 node_id = 1; + int32 left_child = 2; + int32 right_child = 3; + int32 parent = 4; + bool is_owner = 5; + int32 owner_id = 6; + int32 feature_id = 7; + float threshold = 8; + float weight = 9; + bool default_left = 10; + bool is_cat_feature = 11; + repeated int32 cat_threshold = 12; +} + +message RegressionTreeProto { + repeated RegressionTreeNodeProto nodes = 1; + repeated float feature_importance = 3; +}; + +message BoostingTreeEnsambleProto { + repeated RegressionTreeProto trees = 1; + repeated float feature_importance = 2; + repeated string feature_names = 3; + repeated string cat_feature_names = 4; +}; diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/two_pc.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/two_pc.proto new file mode 100644 index 000000000..7ef4eb749 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/two_pc.proto @@ -0,0 +1,132 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +import "fedlearner_webconsole/proto/tee.proto"; +import "fedlearner_webconsole/proto/mmgr.proto"; + +package fedlearner_webconsole.proto; + +enum TwoPcType { + CREATE_MODEL_JOB = 0; + CONTROL_WORKFLOW_STATE = 1; + CREATE_MODEL_JOB_GROUP = 2; + LAUNCH_DATASET_JOB = 3; + LAUNCH_MODEL_JOB = 4; + STOP_DATASET_JOB = 5; + CREATE_TRUSTED_JOB_GROUP = 6; + LAUNCH_TRUSTED_JOB = 7; + STOP_TRUSTED_JOB = 8; + LAUNCH_DATASET_JOB_STAGE = 9; + STOP_DATASET_JOB_STAGE = 10; + LAUNCH_TRUSTED_EXPORT_JOB = 11; +} + +enum TwoPcAction { + PREPARE = 0; + COMMIT = 1; + ABORT = 2; +} + +message CreateModelJobGroupData { + string model_job_group_name = 1; + string model_job_group_uuid = 2; + string project_name = 3; + string algorithm_type = 4; + string coordinator_pure_domain_name = 5; + string dataset_uuid = 6; +} + +message CreateModelJobData { + string model_job_name = 1; + string model_job_type = 2; + string model_job_uuid = 3; + string group_name = 4[deprecated=true]; + string workflow_uuid = 5; + string algorithm_type = 6; + string model_uuid = 7; + string project_name = 8; + string group_uuid = 9; + int64 version = 10; + string coordinator_pure_domain_name = 11; + string dataset_uuid = 12; + ModelJobGlobalConfig global_config = 13; +} + +message TransitWorkflowStateData { + string target_state = 1; + string workflow_uuid = 2; +} + +message LaunchDatasetJobData { + string dataset_job_uuid = 1; +} + +message StopDatasetJobData { + string dataset_job_uuid = 1; +} + +message CreateTrustedJobGroupData { + string name = 1; + string project_name = 2; + string algorithm_project_uuid = 3; + string algorithm_uuid = 4; + repeated DomainNameDataset domain_name_datasets = 5; + string coordinator_pure_domain_name = 6; + string uuid = 7; + string ticket_uuid = 8; + string analyzer_pure_domain_name = 9; + string creator_username = 10; +} + +message LaunchTrustedJobData { + string uuid = 2; + int64 version = 3; + string group_uuid = 4; + string initiator_pure_domain_name = 5; +} + +message StopTrustedJobData { + string uuid = 1; +} + +message LaunchTrustedExportJobData { + string uuid = 1; +} + +message LaunchDatasetJobStageData { + string dataset_job_stage_uuid = 1; +} + +message StopDatasetJobStageData { + string dataset_job_stage_uuid = 1; +} + +message TransactionData { + oneof data { + CreateModelJobData create_model_job_data = 1; + TransitWorkflowStateData transit_workflow_state_data = 2; + CreateModelJobGroupData create_model_job_group_data = 3; + LaunchDatasetJobData launch_dataset_job_data = 4; + StopDatasetJobData stop_dataset_job_data = 5; + CreateTrustedJobGroupData create_trusted_job_group_data = 6; + LaunchTrustedJobData launch_trusted_job_data = 7; + StopTrustedJobData stop_trusted_job_data = 8; + LaunchDatasetJobStageData launch_dataset_job_stage_data = 9; + StopDatasetJobStageData stop_dataset_job_stage_data = 10; + LaunchTrustedExportJobData launch_trusted_export_job_data = 11; + } +} \ No newline at end of file diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/workflow.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/workflow.proto new file mode 100644 index 000000000..39c055e79 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/workflow.proto @@ -0,0 +1,79 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +import "google/protobuf/struct.proto"; + +import "fedlearner_webconsole/proto/workflow_definition.proto"; + +import "fedlearner_webconsole/proto/job.proto"; + +package fedlearner_webconsole.proto; + +// Lite version of workflow. +message WorkflowRef { + int64 id = 1; + string name = 2; + string uuid = 3; + int64 project_id = 4; + string state = 5; + // Timestamp in int + int64 created_at = 6; + bool forkable = 7; + // Whether the metric is public for peers or not. + bool metric_is_public = 8; + // Extra info in json format. + string extra = 9 [deprecated = true]; + bool favour = 10; +} + + +message WorkflowPb { + message TemplateInfo { + int64 id = 1; + bool is_modified = 2; + string name = 3; + int64 revision_index = 4; + } + int64 id = 1; + string name = 2; + string uuid = 3; + int64 project_id = 4; + string state = 5; + // Timestamp in int + int64 created_at = 6; + bool forkable = 7; + // Whether the metric is public for peers or not. + bool metric_is_public = 8; + bool favour = 10; + int64 template_revision_id = 11; + WorkflowDefinition config = 12; + WorkflowTemplateEditorInfo editor_info = 13; + int64 template_id = 14; + string comment = 15; + repeated int64 job_ids = 16; + repeated int64 create_job_flags = 17; + bool is_local = 18; + int64 forked_from = 19; + repeated int64 peer_create_job_flags = 20; + int64 start_at = 21; + int64 stop_at = 22; + int64 updated_at = 23; + string cron_config = 24; + string creator = 25; + TemplateInfo template_info = 26; + repeated JobPb jobs = 27; +} diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/workflow_definition.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/workflow_definition.proto index 27f58fab6..5895d314a 100644 --- a/web_console_v2/api/protocols/fedlearner_webconsole/proto/workflow_definition.proto +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/workflow_definition.proto @@ -1,4 +1,4 @@ -/* Copyright 2021 The FedLearner Authors. All Rights Reserved. +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,8 @@ syntax = "proto3"; import "fedlearner_webconsole/proto/common.proto"; +import "google/protobuf/struct.proto"; + package fedlearner_webconsole.proto; @@ -36,6 +38,9 @@ message JobDefinition { TREE_MODEL_TRAINING = 5; NN_MODEL_EVALUATION = 6; TREE_MODEL_EVALUATION = 7; + TRANSFORMER = 8; + ANALYZER = 9; + CUSTOMIZED = 10; } string name = 1; @@ -44,7 +49,8 @@ message JobDefinition { repeated Variable variables = 4; repeated JobDependency dependencies = 5; string yaml_template = 6; - bool expert_mode = 7; + // If true, the job's latest edition used easy editing mode + bool easy_mode = 8; } message Slot { @@ -60,11 +66,24 @@ message Slot { JOB_PROPERTY = 6; } string reference = 1; - string default = 2; + + // will be delete in the future + string default = 2 [deprecated = true]; + string help = 3; ReferenceType reference_type = 4; string label = 5; + google.protobuf.Value default_value = 6; + enum ValueType{ + STRING = 0; + NUMBER = 1; + LIST = 2; + OBJECT = 3; + BOOL = 4; + INT = 5; + } + ValueType value_type = 7; } message YamlEditorInfo { @@ -79,8 +98,8 @@ message WorkflowTemplateEditorInfo { } message WorkflowDefinition { + reserved 2; string group_alias = 1; - bool is_left = 2; repeated Variable variables = 3; repeated JobDefinition job_definitions = 4; } diff --git a/web_console_v2/api/protocols/fedlearner_webconsole/proto/workflow_template.proto b/web_console_v2/api/protocols/fedlearner_webconsole/proto/workflow_template.proto new file mode 100644 index 000000000..2e76fec69 --- /dev/null +++ b/web_console_v2/api/protocols/fedlearner_webconsole/proto/workflow_template.proto @@ -0,0 +1,91 @@ +/* Copyright 2023 The FedLearner Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +import "fedlearner_webconsole/proto/workflow_definition.proto"; + +package fedlearner_webconsole.proto; + +// Lite version of workflow template. +message WorkflowTemplateRef { + reserved 5; + int64 id = 1; + string name = 2; + string comment = 3; + string group_alias = 4; + int32 kind = 6; + string coordinator_pure_domain_name = 7; +} + +message WorkflowTemplateRevisionRef { + int64 id = 1; + int32 revision_index = 2; + string comment = 3; + int64 template_id = 4; + int64 created_at = 5; +} + +message WorkflowTemplateRevisionPb { + int64 id = 1; + int32 revision_index = 2; + string comment = 3; + int64 template_id = 4; + int64 created_at = 5; + WorkflowDefinition config = 6; + WorkflowTemplateEditorInfo editor_info = 7; + bool is_local = 8; + string name = 9; +} + +// A message used for downloading to json. +// It removes a lot of useless field from `WorkflowTemplateRevisionPb`, so please +// make sure this proto is compatible with `WorkflowTemplateRevisionPb`. +message WorkflowTemplateRevisionJson { + reserved 1, 4, 5, 8; + int32 revision_index = 2; + string comment = 3; + WorkflowDefinition config = 6; + WorkflowTemplateEditorInfo editor_info = 7; + string name = 9; +} + +message WorkflowTemplatePb { + reserved 5; + int64 id = 1; + string name = 2; + string comment = 3; + string group_alias = 4; + int32 kind = 6; + WorkflowDefinition config = 7; + WorkflowTemplateEditorInfo editor_info = 8; + string creator_username = 9; + int64 created_at = 10; + int64 updated_at = 11; + bool is_local = 12; + string coordinator_pure_domain_name = 13; +} + +// A message used for downloading to json. +// It removes a lot of useless field from `WorkflowTemplatePb`, so please +// make sure this proto is compatible with `WorkflowTemplatePb`. +message WorkflowTemplateJson { + reserved 1, 5, 6, 9, 10, 11, 12; + string name = 2; + string comment = 3; + string group_alias = 4; + WorkflowDefinition config = 7; + WorkflowTemplateEditorInfo editor_info = 8; +} diff --git a/web_console_v2/api/requirements.txt b/web_console_v2/api/requirements.txt index 28ee70aaf..2825712a9 100644 --- a/web_console_v2/api/requirements.txt +++ b/web_console_v2/api/requirements.txt @@ -1,9 +1,9 @@ Flask==1.1.2 -Flask-Migrate==2.7.0 +Flask-Migrate==3.1.0 Flask-HTTPAuth==4.2.0 flask-restful==0.3.8 passlib==1.7.4 -flask-jwt-extended>=4.0.0 +PyJWT~=2.0.1 Flask-Testing==0.8.1 gunicorn==20.0.4 # grpc-related stuff has be 1.32.0 to be compatible with tensorflow @@ -19,15 +19,35 @@ flatten-dict==0.3.0 pymysql==1.0.2 setuptools==41.0.0 tensorflow==1.15.2 -pyopenssl==20.0.1 +pyopenssl==22.0.0 +# matplotlib latest version 3.5.0 has some dependency issue, and mpld3 will install latest matplotlib. +matplotlib==3.3.4 mpld3==0.5.2 python-slugify==4.0.1 -SQLAlchemy==1.3.20 +SQLAlchemy==1.4.23 prison==0.1.3 tensorflow-io==0.8.1 -pyspark==3.1.1 -# Lint -pylint==2.4.4 -pylint-quotes==0.2.1 setproctitle==1.2.2 -mypy-protobuf==2.4 \ No newline at end of file +mypy-protobuf==2.4 +werkzeug==0.16.0 +croniter==1.0.15 +freezegun~=1.1.0 +# serving stuff +tensorflow-serving-api==1.15.0 +xmltodict==0.12.0 +simpleeval==0.9.10 +webargs==8.0.1 +marshmallow==3.13.0 +flasgger==0.9.5 +apispec_webframeworks==0.5.2 +pyparsing==3.0.7 +opentelemetry-api==1.10.0 +opentelemetry-sdk==1.10.0 +opentelemetry-instrumentation==0.29b0 +opentelemetry.instrumentation.flask==0.29b1 +opentelemetry-exporter-otlp==1.10.0 +fsspec==2022.1.0 +# pyarrow is required by fsspec to access hdfs file +pyarrow==6.0.0 +# supervisor is used to monitor multiple processes +supervisor==4.2.4 diff --git a/web_console_v2/api/requirements_dev.txt b/web_console_v2/api/requirements_dev.txt new file mode 100644 index 000000000..d2fc05188 --- /dev/null +++ b/web_console_v2/api/requirements_dev.txt @@ -0,0 +1,5 @@ +yapf==0.31.0 +paramunittest +# Lint +pylint==2.12.2 +pylint-quotes==0.2.3 diff --git a/web_console_v2/api/rpc_server.py b/web_console_v2/api/rpc_server.py new file mode 100644 index 000000000..e6d415ac4 --- /dev/null +++ b/web_console_v2/api/rpc_server.py @@ -0,0 +1,24 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from envs import Envs +from fedlearner_webconsole.utils.hooks import pre_start_hook +from fedlearner_webconsole.rpc.server import rpc_server + +if __name__ == '__main__': + pre_start_hook() + rpc_server.stop() + rpc_server.start(Envs.GRPC_LISTEN_PORT) + rpc_server.wait_for_termination() diff --git a/web_console_v2/api/run_coverage.sh b/web_console_v2/api/run_coverage.sh index ba66b6df6..909a504a6 100755 --- a/web_console_v2/api/run_coverage.sh +++ b/web_console_v2/api/run_coverage.sh @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/web_console_v2/api/run_dev.sh b/web_console_v2/api/run_dev.sh deleted file mode 100755 index 6dd179682..000000000 --- a/web_console_v2/api/run_dev.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash -# -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -e -export FLASK_ENV=development - -# Migrates DB schemas -FLASK_APP=command:app flask create-db -# Loads initial data -FLASK_APP=command:app flask create-initial-data -# Runs flask -FLASK_APP=server:app flask run --eager-loading --port=1991 --host=0.0.0.0 diff --git a/web_console_v2/api/run_prod.sh b/web_console_v2/api/run_prod.sh deleted file mode 100755 index d6a9e2157..000000000 --- a/web_console_v2/api/run_prod.sh +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/bash -# -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -e - -# Adds root directory to python path to make the modules findable. -ROOT_DIRECTORY=$(dirname "$0") -export PYTHONPATH=$PYTHONPATH:"$ROOT_DIRECTORY" -python3 es_configuration.py -# Iterates arguments -while test $# -gt 0 -do - case "$1" in - --migrate) - echo "Migrating DB" - # Migrates DB schemas - FLASK_APP=command:app flask db upgrade - ;; - esac - shift -done - -# Loads initial data -FLASK_APP=command:app flask create-initial-data - -export FEDLEARNER_WEBCONSOLE_LOG_DIR=/var/log/fedlearner_webconsole/ -mkdir -p $FEDLEARNER_WEBCONSOLE_LOG_DIR -gunicorn server:app \ - --config="$ROOT_DIRECTORY/gunicorn_config.py" diff --git a/web_console_v2/api/server.py b/web_console_v2/api/server.py index bdb8e9e81..19c7b80cf 100644 --- a/web_console_v2/api/server.py +++ b/web_console_v2/api/server.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,12 +13,18 @@ # limitations under the License. # coding: utf-8 +import logging + +from checks import validity_check from config import Config + from fedlearner_webconsole.app import create_app -from fedlearner_webconsole.utils import middlewares -from fedlearner_webconsole.utils.hooks import pre_start_hook +from fedlearner_webconsole.middleware.middlewares import wsgi_middlewares -pre_start_hook() +logging.info('Initializing WebConsole Api...') app = create_app(Config()) + # Middlewares -app = middlewares.init_app(app) +app = wsgi_middlewares.init_app(app) +validity_check() +logging.info('Initializing WebConsole Api... [DONE]') diff --git a/web_console_v2/api/test/__init__.py b/web_console_v2/api/test/__init__.py deleted file mode 100644 index 3e28547fe..000000000 --- a/web_console_v2/api/test/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 diff --git a/web_console_v2/api/test/auth_test.py b/web_console_v2/api/test/auth_test.py deleted file mode 100644 index 995a90458..000000000 --- a/web_console_v2/api/test/auth_test.py +++ /dev/null @@ -1,166 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -import unittest -from http import HTTPStatus - -from fedlearner_webconsole.utils.base64 import base64encode -from testing.common import BaseTestCase -from fedlearner_webconsole.auth.models import State, User -from fedlearner_webconsole.db import db_handler as db - - -class AuthApiTest(BaseTestCase): - def test_get_all_users(self): - deleted_user = User(username='deleted_one', - email='who.knows@hhh.com', - state=State.DELETED) - with db.session_scope() as session: - session.add(deleted_user) - session.commit() - - resp = self.get_helper('/api/v2/auth/users') - self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) - - self.signin_as_admin() - - resp = self.get_helper('/api/v2/auth/users') - self.assertEqual(resp.status_code, HTTPStatus.OK) - self.assertEqual(len(self.get_response_data(resp)), 2) - - def test_partial_update_user_info(self): - self.signin_as_admin() - resp = self.get_helper('/api/v2/auth/users') - resp_data = self.get_response_data(resp) - user_id = resp_data[0]['id'] - admin_id = resp_data[1]['id'] - - self.signin_helper() - resp = self.patch_helper('/api/v2/auth/users/10', data={}) - self.assertEqual(resp.status_code, HTTPStatus.FORBIDDEN) - - resp = self.patch_helper(f'/api/v2/auth/users/{user_id}', - data={ - 'email': 'a_new_email@bytedance.com', - }) - self.assertEqual(resp.status_code, HTTPStatus.OK) - self.assertEqual( - self.get_response_data(resp).get('email'), - 'a_new_email@bytedance.com') - - resp = self.patch_helper(f'/api/v2/auth/users/{admin_id}', - data={ - 'name': 'cannot_modify', - }) - self.assertEqual(resp.status_code, HTTPStatus.FORBIDDEN) - - # now we are signing in as admin - self.signin_as_admin() - resp = self.patch_helper(f'/api/v2/auth/users/{user_id}', - data={ - 'role': 'ADMIN', - }) - self.assertEqual(resp.status_code, HTTPStatus.OK) - self.assertEqual(self.get_response_data(resp).get('role'), 'ADMIN') - - resp = self.patch_helper(f'/api/v2/auth/users/{user_id}', - data={ - 'password': base64encode('fl@1234.'), - }) - self.assertEqual(resp.status_code, HTTPStatus.OK) - - def test_create_new_user(self): - new_user = { - 'username': 'fedlearner', - 'password': 'fedlearner', - 'email': 'hello@bytedance.com', - 'role': 'USER', - 'name': 'codemonkey', - } - resp = self.post_helper('/api/v2/auth/users', data=new_user) - self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) - - self.signin_as_admin() - illegal_cases = ['aaaaaaaa', '11111111', '!@#$%^[]', - 'aaaA1111', 'AAAa!@#$', '1111!@#-', - 'aa11!@', 'fl@123.', - 'fl@1234567890abcdefg.'] - legal_case = 'fl@1234.' - - for case in illegal_cases: - new_user['password'] = base64encode(case) - resp = self.post_helper(f'/api/v2/auth/users', data=new_user) - self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) - - new_user['password'] = base64encode(legal_case) - resp = self.post_helper(f'/api/v2/auth/users', data=new_user) - self.assertEqual(resp.status_code, HTTPStatus.CREATED) - self.assertEqual( - self.get_response_data(resp).get('username'), 'fedlearner') - - # test_repeat_create - resp = self.post_helper(f'/api/v2/auth/users', data=new_user) - self.assertEqual(resp.status_code, HTTPStatus.CONFLICT) - - def test_delete_user(self): - self.signin_as_admin() - resp = self.get_helper('/api/v2/auth/users') - resp_data = self.get_response_data(resp) - user_id = resp_data[0]['id'] - admin_id = resp_data[1]['id'] - - self.signin_helper() - resp = self.delete_helper(url=f'/api/v2/auth/users/{user_id}') - self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) - - self.signin_as_admin() - - resp = self.delete_helper(url=f'/api/v2/auth/users/{admin_id}') - self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) - - resp = self.delete_helper(url=f'/api/v2/auth/users/{user_id}') - self.assertEqual(resp.status_code, HTTPStatus.OK) - self.assertEqual(self.get_response_data(resp).get('username'), 'ada') - - def test_get_specific_user(self): - resp = self.get_helper(url='/api/v2/auth/users/10086') - self.assertEqual(resp.status_code, HTTPStatus.FORBIDDEN) - - resp = self.get_helper(url='/api/v2/auth/users/1') - self.assertEqual(resp.status_code, HTTPStatus.OK) - self.assertEqual(self.get_response_data(resp).get('username'), 'ada') - - self.signin_as_admin() - - resp = self.get_helper(url='/api/v2/auth/users/1') - self.assertEqual(resp.status_code, HTTPStatus.OK) - self.assertEqual(self.get_response_data(resp).get('username'), 'ada') - - resp = self.get_helper(url='/api/v2/auth/users/10086') - self.assertEqual(resp.status_code, HTTPStatus.NOT_FOUND) - - def test_signout(self): - self.signin_helper() - - resp = self.delete_helper(url='/api/v2/auth/signin') - self.assertEqual(resp.status_code, HTTPStatus.OK, resp.json) - - resp = self.get_helper(url='/api/v2/auth/users/1') - self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED) - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/app_test.py b/web_console_v2/api/test/fedlearner_webconsole/app_test.py deleted file mode 100644 index 2a04ec419..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/app_test.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import unittest -from http import HTTPStatus - -from testing.common import BaseTestCase - - -class ExceptionHandlersTest(BaseTestCase): - def test_not_found(self): - response = self.get_helper('/api/v2/not_found', - use_auth=False) - self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/composer/common.py b/web_console_v2/api/test/fedlearner_webconsole/composer/common.py deleted file mode 100644 index f189f23ce..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/composer/common.py +++ /dev/null @@ -1,129 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import logging -import time -from typing import Tuple - -from fedlearner_webconsole.composer.interface import IItem, ItemType, IRunner -from fedlearner_webconsole.composer.models import RunnerStatus, Context - - -class Task(IItem): - def __init__(self, task_id: int): - self.id = task_id - - def type(self) -> ItemType: - return ItemType.TASK - - def get_id(self) -> int: - return self.id - - -# used in lambda -def _raise(ex): - raise ex - - -def sleep_and_log(id: int, sec: int): - time.sleep(sec) - logging.info(f'id-{id}, sleep {sec}') - - -RunnerCases = [ - # normal: 1, 2, 3 - { - 'id': 1, - 'start': (lambda _: True), - 'result': (lambda _: sleep_and_log(1, 1) or (RunnerStatus.DONE, {})), - }, - { - 'id': 2, - 'start': (lambda _: True), - 'result': (lambda _: sleep_and_log(2, 1) or (RunnerStatus.DONE, {})), - }, - { - 'id': 3, - 'start': (lambda _: True), - 'result': (lambda _: sleep_and_log(3, 1) or (RunnerStatus.DONE, {})), - }, - # failed: 4, 5, 6 - { - 'id': 4, - 'start': (lambda _: sleep_and_log(4, 5) and False), - 'result': (lambda _: (RunnerStatus.FAILED, {})), - }, - { - 'id': 5, - 'start': (lambda _: _raise(TimeoutError)), - 'result': (lambda _: (RunnerStatus.FAILED, {})), - }, - { - 'id': 6, - 'start': (lambda _: sleep_and_log(6, 10) and False), - 'result': (lambda _: (RunnerStatus.FAILED, {})), - }, - # busy: 7, 8, 9 - { - 'id': 7, - 'start': (lambda _: True), - 'result': (lambda _: sleep_and_log(7, 15) or (RunnerStatus.DONE, {})), - }, - { - 'id': 8, - 'start': (lambda _: True), - 'result': (lambda _: sleep_and_log(8, 15) or (RunnerStatus.DONE, {})), - }, - { - 'id': 9, - 'start': (lambda _: True), - 'result': (lambda _: sleep_and_log(9, 15) or (RunnerStatus.DONE, {})), - }, -] - - -class TaskRunner(IRunner): - def __init__(self, task_id: int): - self.task_id = task_id - - def start(self, context: Context): - logging.info( - f"[mock_task_runner] {self.task_id} started, ctx: {context}") - RunnerCases[self.task_id - 1]['start'](context) - - def result(self, context: Context) -> Tuple[RunnerStatus, dict]: - result = RunnerCases[self.task_id - 1]['result'](context) - logging.info(f"[mock_task_runner] {self.task_id} done result {result}") - return result - - -class InputDirTaskRunner(IRunner): - def __init__(self, task_id: int): - self.task_id = task_id - self.input_dir = '' - - def start(self, context: Context): - self.input_dir = context.data.get(str(self.task_id), - {}).get('input_dir', '') - logging.info( - f'[mock_inputdir_task_runner] start, input_dir: {self.input_dir}') - - def result(self, context: Context) -> Tuple[RunnerStatus, dict]: - s = { - 1: RunnerStatus.RUNNING, - 2: RunnerStatus.DONE, - 3: RunnerStatus.FAILED, - } - return s.get(self.task_id, RunnerStatus.RUNNING), {} diff --git a/web_console_v2/api/test/fedlearner_webconsole/composer/composer_test.py b/web_console_v2/api/test/fedlearner_webconsole/composer/composer_test.py deleted file mode 100644 index b377d9b6d..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/composer/composer_test.py +++ /dev/null @@ -1,225 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -import logging -import pprint -import sys -import threading -import time -import unittest - -from testing.common import BaseTestCase -from test.fedlearner_webconsole.composer.common import TaskRunner, Task, InputDirTaskRunner - -from fedlearner_webconsole.db import db -from fedlearner_webconsole.composer.composer import Composer, ComposerConfig -from fedlearner_webconsole.composer.models import ItemStatus, RunnerStatus, SchedulerItem, \ - SchedulerRunner -from fedlearner_webconsole.composer.interface import ItemType - - -class ComposerTest(BaseTestCase): - runner_fn = { - ItemType.TASK.value: TaskRunner, - } - - class Config(BaseTestCase.Config): - STORAGE_ROOT = '/tmp' - START_SCHEDULER = False - START_GRPC_SERVER = False - - def test_normal_items(self): - logging.info('+++++++++++++++++++++++++++ test normal items') - cfg = ComposerConfig(runner_fn=self.runner_fn, - name='scheduler for normal items') - composer = Composer(config=cfg) - composer.run(db_engine=db.engine) - normal_items = [Task(1), Task(2), Task(3)] - name = 'normal items' - composer.collect(name, normal_items, {}) - self.assertEqual(1, len(db.session.query(SchedulerItem).all()), - 'incorrect items') - # test unique item name - composer.collect(name, normal_items, {}) - self.assertEqual(1, len(db.session.query(SchedulerItem).all()), - 'incorrect items') - time.sleep(20) - self.assertEqual(1, len(db.session.query(SchedulerRunner).all()), - 'incorrect runners') - self.assertEqual(RunnerStatus.DONE.value, - composer.get_recent_runners(name)[-1].status, - 'should finish runner') - # finish item - composer.finish(name) - self.assertEqual(ItemStatus.OFF, composer.get_item_status(name), - 'should finish item') - composer.stop() - - def test_failed_items(self): - logging.info('+++++++++++++++++++++++++++ test failed items') - cfg = ComposerConfig(runner_fn=self.runner_fn, - name='scheduler for failed items') - composer = Composer(config=cfg) - composer.run(db_engine=db.engine) - failed_items = [Task(4), Task(5), Task(6)] - name = 'failed items' - composer.collect(name, failed_items, {}) - self.assertEqual(1, len(db.session.query(SchedulerItem).all()), - 'incorrect failed items') - time.sleep(30) - self.assertEqual(1, len(db.session.query(SchedulerRunner).all()), - 'incorrect runners') - self.assertEqual(RunnerStatus.FAILED.value, - composer.get_recent_runners(name)[-1].status, - 'should finish it') - composer.stop() - - def test_busy_items(self): - logging.info('+++++++++++++++++++++++++++ test busy items') - cfg = ComposerConfig(runner_fn=self.runner_fn, - name='scheduler for busy items', - worker_num=1) - composer = Composer(config=cfg) - composer.run(db_engine=db.engine) - busy_items = [Task(7), Task(8), Task(9)] - name = 'busy items' - composer.collect(name, busy_items, {}) - self.assertEqual(1, len(db.session.query(SchedulerItem).all()), - 'incorrect busy items') - time.sleep(20) - self.assertEqual(1, len(db.session.query(SchedulerRunner).all()), - 'incorrect runners') - self.assertEqual(RunnerStatus.RUNNING.value, - composer.get_recent_runners(name)[-1].status, - 'should finish it') - composer.stop() - time.sleep(5) - - def test_interval_items(self): - logging.info( - '+++++++++++++++++++++++++++ test finishing interval items') - cfg = ComposerConfig(runner_fn=self.runner_fn, - name='finish normal items') - composer = Composer(config=cfg) - composer.run(db_engine=db.engine) - name = 'cronjob' - # test invalid interval - self.assertRaises(ValueError, - composer.collect, - name, [Task(1)], {}, - interval=9) - - composer.collect(name, [Task(1)], {}, interval=10) - self.assertEqual(1, len(db.session.query(SchedulerItem).all()), - 'incorrect items') - time.sleep(20) - self.assertEqual(2, len(db.session.query(SchedulerRunner).all()), - 'incorrect runners') - self.assertEqual(RunnerStatus.DONE.value, - composer.get_recent_runners(name)[-1].status, - 'should finish runner') - composer.finish(name) - self.assertEqual(ItemStatus.OFF, composer.get_item_status(name), - 'should finish item') - composer.stop() - - def test_multiple_composers(self): - logging.info('+++++++++++++++++++++++++++ test multiple composers') - cfg = ComposerConfig(runner_fn=self.runner_fn, - name='scheduler for normal items') - composer1 = Composer(cfg) - composer2 = Composer(cfg) - c1 = threading.Thread(target=composer1.run, args=[db.engine]) - c1.start() - c2 = threading.Thread(target=composer2.run, args=[db.engine]) - c2.start() - time.sleep(15) - composer1.stop() - composer2.stop() - - def test_runner_cache(self): - logging.info('+++++++++++++++++++++++++++ test runner cache') - composer = Composer( - config=ComposerConfig(runner_fn={ - ItemType.TASK.value: InputDirTaskRunner, - }, - name='runner cache')) - composer.run(db_engine=db.engine) - composer.collect('item1', [Task(1)], { - 1: { - 'input_dir': 'item1_input_dir', - }, - }) - composer.collect('item2', [Task(1)], { - 1: { - 'input_dir': 'item2_input_dir', - }, - }) - time.sleep(15) - self.assertEqual(2, len(db.session.query(SchedulerItem).all()), - 'incorrect items') - self.assertEqual(2, len(composer.runner_cache.data), - 'should be equal runner number') - pprint.pprint(composer.runner_cache) - self.assertEqual( - 'item1_input_dir', - composer.runner_cache.find_runner(1, 'task_1').input_dir, - 'should be item1_input_dir') - self.assertEqual( - 'item2_input_dir', - composer.runner_cache.find_runner(2, 'task_1').input_dir, - 'should be item2_input_dir') - # test delete cache item - composer.collect( - 'item3', [Task(2), Task(3)], { - 2: { - 'input_dir': 'item3_input_dir_2', - }, - 3: { - 'input_dir': 'item3_input_dir_3', - } - }) - time.sleep(15) - self.assertEqual(2, len(composer.runner_cache.data), - 'should be equal runner number') - composer.stop() - - def test_patch_item_attr(self): - test_name = 'test' - - config = ComposerConfig( - runner_fn={ItemType.TASK.value: InputDirTaskRunner}, - name='test_cronjob') - with self.composer_scope(config=config) as composer: - composer.collect(test_name, [Task(1)], { - 1: { - 'input_dir': 'item1_input_dir', - }, - }, interval=60) - composer.patch_item_attr(name=test_name, key='interval_time', value=30) - item = db.session.query(SchedulerItem).filter( - SchedulerItem.name == test_name).one() - self.assertEqual(item.interval_time, 30) - - with self.assertRaises(ValueError): - composer.patch_item_attr(name=test_name, - key='create_at', - value='2021-04-01 00:00:00') - - -if __name__ == '__main__': - logging.basicConfig(stream=sys.stderr, level=logging.INFO) - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/composer/op_locker_test.py b/web_console_v2/api/test/fedlearner_webconsole/composer/op_locker_test.py deleted file mode 100644 index ff5dbf624..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/composer/op_locker_test.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -import logging -import sys -import unittest - -from testing.common import BaseTestCase, OptimisticLock -from fedlearner_webconsole.db import db -from fedlearner_webconsole.composer.op_locker import OpLocker - - -class OpLockTest(BaseTestCase): - class Config(BaseTestCase.Config): - STORAGE_ROOT = '/tmp' - START_SCHEDULER = False - START_GRPC_SERVER = False - START_COMPOSER = False - - def setUp(self): - super().setUp() - - def test_lock(self): - lock = OpLocker('test', db.engine).try_lock() - self.assertEqual(True, lock.is_latest_version(), - 'should be latest version') - - # update database version - new_lock = db.session.query(OptimisticLock).filter_by( - name=lock.name).first() - new_lock.version = new_lock.version + 1 - db.session.commit() - self.assertEqual(False, lock.is_latest_version(), - 'should not be latest version') - - -if __name__ == '__main__': - logging.basicConfig(stream=sys.stderr, level=logging.INFO) - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/composer/runner_cache_test.py b/web_console_v2/api/test/fedlearner_webconsole/composer/runner_cache_test.py deleted file mode 100644 index a648a4710..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/composer/runner_cache_test.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -import unittest - -from test.fedlearner_webconsole.composer.common import TaskRunner -from testing.common import BaseTestCase - -from fedlearner_webconsole.composer.interface import ItemType -from fedlearner_webconsole.composer.runner_cache import RunnerCache - - -class RunnerCacheTest(BaseTestCase): - class Config(BaseTestCase.Config): - STORAGE_ROOT = '/tmp' - START_SCHEDULER = False - START_GRPC_SERVER = False - - def test_runner(self): - c = RunnerCache(runner_fn={ - ItemType.TASK.value: TaskRunner, - }) - runners = [ - (1, 'task_1'), - (2, 'task_2'), - (3, 'task_3'), - ] - for runner in runners: - rid, name = runner - c.find_runner(rid, name) - self.assertEqual(len(runners), len(c.data), - 'should be equal runners number') - - for runner in runners: - rid, name = runner - c.del_runner(rid, name) - self.assertEqual(0, len(c.data), 'should be equal 0') - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/composer/thread_reaper_test.py b/web_console_v2/api/test/fedlearner_webconsole/composer/thread_reaper_test.py deleted file mode 100644 index 53ead076d..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/composer/thread_reaper_test.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -import logging -import sys -import time -import unittest -from typing import Tuple - -from testing.common import BaseTestCase -from fedlearner_webconsole.composer.models import Context, RunnerStatus -from fedlearner_webconsole.db import db -from fedlearner_webconsole.composer.interface import IRunner -from fedlearner_webconsole.composer.thread_reaper import ThreadReaper - - -class TaskRunner(IRunner): - def __init__(self, task_id: int): - self.task_id = task_id - - def start(self, context: Context): - logging.info( - f"[mock_task_runner] {self.task_id} started, ctx: {context}") - time.sleep(5) - - def result(self, context: Context) -> Tuple[RunnerStatus, dict]: - time.sleep(3) - return RunnerStatus.DONE, {} - - -class ThreadReaperTest(BaseTestCase): - class Config(BaseTestCase.Config): - STORAGE_ROOT = '/tmp' - START_SCHEDULER = False - START_GRPC_SERVER = False - - def setUp(self): - super().setUp() - - def test_thread_reaper(self): - tr = ThreadReaper(worker_num=1) - - runner = TaskRunner(1) - tr.enqueue('1', runner, - Context(data={}, internal={}, db_engine=db.engine)) - self.assertEqual(True, tr.is_full(), 'should be full') - ok = tr.enqueue('2', runner, - Context(data={}, internal={}, db_engine=db.engine)) - self.assertEqual(False, ok, 'should not be enqueued') - time.sleep(10) - self.assertEqual(False, tr.is_full(), 'should not be full') - ok = tr.enqueue('3', runner, - Context(data={}, internal={}, db_engine=db.engine)) - self.assertEqual(True, ok, 'should be enqueued') - tr.stop(wait=True) - - -if __name__ == '__main__': - logging.basicConfig(stream=sys.stderr, level=logging.INFO) - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/dataset/apis_test.py b/web_console_v2/api/test/fedlearner_webconsole/dataset/apis_test.py deleted file mode 100644 index 23e2b4bed..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/dataset/apis_test.py +++ /dev/null @@ -1,349 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import json -import time -import os -import shutil -import tempfile -import unittest -from datetime import datetime, timezone -from http import HTTPStatus -from pathlib import Path -from unittest import mock -from unittest.mock import patch, MagicMock - -from collections import namedtuple -from testing.common import BaseTestCase -from fedlearner_webconsole.db import db_handler as db -from fedlearner_webconsole.dataset.models import (Dataset, DatasetType) -from tensorflow.io import gfile - -FakeFileStatistics = namedtuple('FakeFileStatistics', ['length', 'mtime_nsec']) - - -class DatasetApiTest(BaseTestCase): - class Config(BaseTestCase.Config): - STORAGE_ROOT = tempfile.gettempdir() - - def setUp(self): - super().setUp() - with db.session_scope() as session: - self.default_dataset1 = Dataset( - name='default dataset1', - dataset_type=DatasetType.STREAMING, - comment='test comment1', - path='/data/dataset/123', - project_id=1, - ) - session.add(self.default_dataset1) - session.commit() - time.sleep(1) - with db.session_scope() as session: - self.default_dataset2 = Dataset( - name='default dataset2', - dataset_type=DatasetType.STREAMING, - comment='test comment2', - path=os.path.join(tempfile.gettempdir(), 'dataset/123'), - project_id=2, - ) - session.add(self.default_dataset2) - session.commit() - - def test_get_dataset(self): - get_response = self.get_helper( - f'/api/v2/datasets/{self.default_dataset1.id}') - self.assertEqual(get_response.status_code, HTTPStatus.OK) - dataset = self.get_response_data(get_response) - self.assertEqual( - { - 'id': 1, - 'name': 'default dataset1', - 'dataset_type': 'STREAMING', - 'comment': 'test comment1', - 'path': '/data/dataset/123', - 'created_at': mock.ANY, - 'updated_at': mock.ANY, - 'deleted_at': None, - 'data_batches': [], - 'project_id': 1, - }, dataset) - - def test_get_dataset_not_found(self): - get_response = self.get_helper('/api/v2/datasets/10086') - self.assertEqual(get_response.status_code, HTTPStatus.NOT_FOUND) - - def test_get_datasets(self): - get_response = self.get_helper('/api/v2/datasets') - self.assertEqual(get_response.status_code, HTTPStatus.OK) - datasets = self.get_response_data(get_response) - self.assertEqual(len(datasets), 2) - self.assertEqual(datasets[0]['name'], 'default dataset2') - self.assertEqual(datasets[1]['name'], 'default dataset1') - - def test_get_datasets_with_project_id(self): - get_response = self.get_helper('/api/v2/datasets?project=1') - self.assertEqual(get_response.status_code, HTTPStatus.OK) - datasets = self.get_response_data(get_response) - self.assertEqual(len(datasets), 1) - self.assertEqual(datasets[0]['name'], 'default dataset1') - - def test_preview_dataset_and_feature_metrics(self): - # write data - gfile.makedirs(self.default_dataset2.path) - meta_path = os.path.join(self.default_dataset2.path, '_META') - meta_data = { - 'dtypes': { - 'f01': 'bigint' - }, - 'samples': [ - [1], - [0], - ], - } - with gfile.GFile(meta_path, 'w') as f: - f.write(json.dumps(meta_data)) - - features_path = os.path.join(self.default_dataset2.path, '_FEATURES') - features_data = { - 'f01': { - 'count': '2', - 'mean': '0.0015716767309123998', - 'stddev': '0.03961485047808605', - 'min': '0', - 'max': '1', - 'missing_count': '0' - } - } - with gfile.GFile(features_path, 'w') as f: - f.write(json.dumps(features_data)) - - hist_path = os.path.join(self.default_dataset2.path, '_HIST') - hist_data = { - "f01": { - "x": [ - 0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, - 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1 - ], - "y": [12070, 0, 0, 0, 0, 0, 0, 0, 0, 19] - } - } - with gfile.GFile(hist_path, 'w') as f: - f.write(json.dumps(hist_data)) - - response = self.client.get('/api/v2/datasets/2/preview') - self.assertEqual(response.status_code, 200) - preview_data = self.get_response_data(response) - meta_data['metrics'] = features_data - self.assertEqual(preview_data, meta_data, 'should has preview data') - - feat_name = 'f01' - feature_response = self.client.get( - f'/api/v2/datasets/2/feature_metrics?name={feat_name}') - self.assertEqual(response.status_code, 200) - feature_data = self.get_response_data(feature_response) - self.assertEqual( - feature_data, { - 'name': feat_name, - 'metrics': features_data.get(feat_name, {}), - 'hist': hist_data.get(feat_name, {}) - }, 'should has feature data') - - @patch('fedlearner_webconsole.dataset.apis.datetime') - def test_post_datasets(self, mock_datetime): - mock_datetime.now = MagicMock( - return_value=datetime(2020, 6, 8, 6, 6, 6)) - name = 'test post dataset' - dataset_type = DatasetType.STREAMING.value - comment = 'test comment' - create_response = self.post_helper('/api/v2/datasets', - data={ - 'name': name, - 'dataset_type': dataset_type, - 'comment': comment, - 'project_id': 1, - }) - self.assertEqual(create_response.status_code, HTTPStatus.OK) - created_dataset = self.get_response_data(create_response) - - dataset_path = os.path.join( - tempfile.gettempdir(), 'dataset/20200608_060606_test-post-dataset') - self.assertEqual( - { - 'id': 3, - 'name': 'test post dataset', - 'dataset_type': dataset_type, - 'comment': comment, - 'path': dataset_path, - 'created_at': mock.ANY, - 'updated_at': mock.ANY, - 'deleted_at': None, - 'data_batches': [], - 'project_id': 1, - }, created_dataset) - # patch datasets - updated_comment = 'updated comment' - put_response = self.patch_helper('/api/v2/datasets/3', - data={'comment': updated_comment}) - updated_dataset = self.get_response_data(put_response) - self.assertEqual( - { - 'id': 3, - 'name': 'test post dataset', - 'dataset_type': dataset_type, - 'comment': updated_comment, - 'path': dataset_path, - 'created_at': mock.ANY, - 'updated_at': mock.ANY, - 'deleted_at': None, - 'data_batches': [], - 'project_id': 1, - }, updated_dataset) - - @patch('fedlearner_webconsole.dataset.apis.scheduler.wakeup') - def test_post_batches(self, mock_wakeup): - dataset_id = self.default_dataset1.id - event_time = int( - datetime(2020, 6, 8, 6, 8, 8, tzinfo=timezone.utc).timestamp()) - files = ['/data/upload/1.csv', '/data/upload/2.csv'] - move = False - comment = 'test post comment' - create_response = self.post_helper( - f'/api/v2/datasets/{dataset_id}/batches', - data={ - 'event_time': event_time, - 'files': files, - 'move': move, - 'comment': comment - }) - self.assertEqual(create_response.status_code, HTTPStatus.OK) - created_data_batch = self.get_response_data(create_response) - - self.maxDiff = None - self.assertEqual( - { - 'id': 1, - 'dataset_id': 1, - 'comment': comment, - 'event_time': event_time, - 'created_at': mock.ANY, - 'updated_at': mock.ANY, - 'deleted_at': None, - 'file_size': 0, - 'move': False, - 'num_file': 2, - 'num_imported_file': 0, - 'path': '/data/dataset/123/batch/20200608_060808', - 'state': 'NEW', - 'details': { - 'files': [{ - 'destination_path': - '/data/dataset/123/batch/20200608_060808/1.csv', - 'error_message': '', - 'size': '0', - 'source_path': '/data/upload/1.csv', - 'state': 'UNSPECIFIED' - }, { - 'destination_path': - '/data/dataset/123/batch/20200608_060808/2.csv', - 'error_message': '', - 'size': '0', - 'source_path': '/data/upload/2.csv', - 'state': 'UNSPECIFIED' - }] - } - }, created_data_batch) - mock_wakeup.assert_called_once_with( - data_batch_ids=[created_data_batch['id']]) - - -class FilesApiTest(BaseTestCase): - class Config(BaseTestCase.Config): - STORAGE_ROOT = tempfile.gettempdir() - - def setUp(self): - super().setUp() - # Create a temporary directory - self._tempdir = os.path.join(tempfile.gettempdir(), 'upload') - os.makedirs(self._tempdir, exist_ok=True) - subdir = Path(self._tempdir).joinpath('s') - subdir.mkdir() - Path(self._tempdir).joinpath('f1.txt').write_text('f1') - Path(self._tempdir).joinpath('f2.txt').write_text('f2f2') - subdir.joinpath('s3.txt').write_text('s3s3s3') - - # Mocks os.stat - self._orig_os_stat = os.stat - - def fake_stat(path, *arg, **kwargs): - return self._get_file_stat(self._orig_os_stat, path) - - gfile.stat = fake_stat - - def tearDown(self): - os.stat = self._orig_os_stat - # Remove the directory after the test - shutil.rmtree(self._tempdir) - super().tearDown() - - def _get_temp_path(self, file_path: str = None) -> str: - return str(Path(self._tempdir, file_path or '').absolute()) - - def _get_file_stat(self, orig_os_stat, path): - if path == self._get_temp_path('f1.txt') or \ - path == self._get_temp_path('f2.txt') or \ - path == self._get_temp_path('s/s3.txt'): - return FakeFileStatistics(2, 1613982390 * 1e9) - else: - return orig_os_stat(path) - - def test_get_default_storage_root(self): - get_response = self.get_helper('/api/v2/files') - self.assertEqual(get_response.status_code, HTTPStatus.OK) - files = self.get_response_data(get_response) - self.assertEqual(sorted(files, key=lambda f: f['path']), [ - { - 'path': self._get_temp_path('f1.txt'), - 'size': 2, - 'mtime': 1613982390 - }, - { - 'path': self._get_temp_path('f2.txt'), - 'size': 2, - 'mtime': 1613982390 - }, - { - 'path': self._get_temp_path('s/s3.txt'), - 'size': 2, - 'mtime': 1613982390 - }, - ]) - - def test_get_specified_directory(self): - dir = self._get_temp_path('s') - get_response = self.get_helper(f'/api/v2/files?directory={dir}') - self.assertEqual(get_response.status_code, HTTPStatus.OK) - files = self.get_response_data(get_response) - self.assertEqual(files, [ - { - 'path': self._get_temp_path('s/s3.txt'), - 'size': 2, - 'mtime': 1613982390 - }, - ]) - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/db_test.py b/web_console_v2/api/test/fedlearner_webconsole/db_test.py deleted file mode 100644 index d49a1983a..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/db_test.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import os -import unittest - -from fedlearner_webconsole.db import get_database_uri, _turn_db_timezone_to_utc -from fedlearner_webconsole.proto import common_pb2 - - -class EngineSessionTest(unittest.TestCase): - def test_turn_db_timezone_to_utc(self): - sqlite_uri = 'sqlite:///app.db' - self.assertEqual(_turn_db_timezone_to_utc(sqlite_uri), - 'sqlite:///app.db') - - mysql_uri_naive = 'mysql+pymysql://root:root@localhost:33600/fedlearner' - self.assertEqual( - _turn_db_timezone_to_utc(mysql_uri_naive), - 'mysql+pymysql://root:root@localhost:33600/fedlearner?init_command=SET SESSION time_zone=\'%2B00:00\'' - ) - - mysql_uri_with_init_command = 'mysql+pymysql://root:root@localhost:33600/fedlearner?init_command=HELLO' - self.assertEqual( - _turn_db_timezone_to_utc(mysql_uri_with_init_command), - 'mysql+pymysql://root:root@localhost:33600/fedlearner?init_command=SET SESSION time_zone=\'%2B00:00\';HELLO' - ) - - mysql_uri_with_other_args = 'mysql+pymysql://root:root@localhost:33600/fedlearner?charset=utf8mb4' - self.assertEqual( - _turn_db_timezone_to_utc(mysql_uri_with_other_args), - 'mysql+pymysql://root:root@localhost:33600/fedlearner?init_command=SET SESSION time_zone=\'%2B00:00\'&&charset=utf8mb4' - ) - - mysql_uri_with_set_time_zone = 'mysql+pymysql://root:root@localhost:33600/fedlearner?init_command=SET SESSION time_zone=\'%2B08:00\'' - self.assertEqual( - _turn_db_timezone_to_utc(mysql_uri_with_set_time_zone), - 'mysql+pymysql://root:root@localhost:33600/fedlearner?init_command=SET SESSION time_zone=\'%2B00:00\'' - ) - - def test_get_database_uri(self): - # test with environmental variable - os.environ[ - 'SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:root@localhost:33600/fedlearner' - self.assertTrue(get_database_uri().startswith( - 'mysql+pymysql://root:root@localhost:33600/fedlearner')) - - # test with fallback options - os.environ.pop('SQLALCHEMY_DATABASE_URI') - self.assertTrue(get_database_uri().startswith('sqlite:///')) - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/exceptions_test.py b/web_console_v2/api/test/fedlearner_webconsole/exceptions_test.py deleted file mode 100644 index 6e7af1a3c..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/exceptions_test.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import unittest - -from http import HTTPStatus -from fedlearner_webconsole.exceptions import (InvalidArgumentException, - NotFoundException) - - -class ExceptionsTest(unittest.TestCase): - - def test_invalid_argument_exception(self): - """Checks if the information of the exception is correct.""" - exception = InvalidArgumentException(['123', 'df']) - self.assertEqual(exception.status_code, HTTPStatus.BAD_REQUEST) - self.assertEqual( - exception.to_dict(), { - 'code': 400, - 'message': 'Invalid argument or payload.', - 'details': [ - '123', - 'df', - ] - }) - - def test_not_found_exception(self): - exception1 = NotFoundException('User A not found.') - self.assertEqual(exception1.status_code, HTTPStatus.NOT_FOUND) - self.assertEqual( - exception1.to_dict(), { - 'code': 404, - 'message': 'User A not found.', - }) - exception2 = NotFoundException() - self.assertEqual(exception2.status_code, HTTPStatus.NOT_FOUND) - self.assertEqual( - exception2.to_dict(), { - 'code': 404, - 'message': 'Resource not found.', - }) - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/job/metrics_test.py b/web_console_v2/api/test/fedlearner_webconsole/job/metrics_test.py deleted file mode 100644 index 6aeff6379..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/job/metrics_test.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import time -import unittest -from http import HTTPStatus - -from testing.common import BaseTestCase, TestAppProcess - -from fedlearner_webconsole.proto import workflow_definition_pb2 -from fedlearner_webconsole.db import db -from fedlearner_webconsole.workflow.models import Workflow -from fedlearner_webconsole.job.models import Job, JobType -from fedlearner_webconsole.job.metrics import JobMetricsBuilder - - -class JobMetricsBuilderTest(BaseTestCase): - class Config(BaseTestCase.Config): - ES_HOST = '' - ES_PORT = 80 - - class FollowerConfig(Config): - GRPC_LISTEN_PORT = 4990 - - def test_data_join_metrics(self): - job = Job( - name='multi-indices-test27', - job_type=JobType.DATA_JOIN) - import json - print(json.dumps(JobMetricsBuilder(job).plot_metrics())) - - def test_nn_metrics(self): - job = Job( - name='automl-2782410011', - job_type=JobType.NN_MODEL_TRANINING) - print(JobMetricsBuilder(job).plot_metrics()) - - def test_peer_metrics(self): - proc = TestAppProcess( - JobMetricsBuilderTest, - 'follower_test_peer_metrics', - JobMetricsBuilderTest.FollowerConfig) - proc.start() - self.leader_test_peer_metrics() - proc.terminate() - - def leader_test_peer_metrics(self): - self.setup_project( - 'leader', - JobMetricsBuilderTest.FollowerConfig.GRPC_LISTEN_PORT) - workflow = Workflow( - name='test-workflow', - project_id=1) - db.session.add(workflow) - db.session.commit() - - while True: - resp = self.get_helper( - '/api/v2/workflows/1/peer_workflows' - '/0/jobs/test-job/metrics') - if resp.status_code == HTTPStatus.OK: - break - time.sleep(1) - - def follower_test_peer_metrics(self): - self.setup_project( - 'follower', - JobMetricsBuilderTest.Config.GRPC_LISTEN_PORT) - workflow = Workflow( - name='test-workflow', - project_id=1, - metric_is_public=True) - workflow.set_job_ids([1]) - db.session.add(workflow) - job = Job( - name='automl-2782410011', - job_type=JobType.NN_MODEL_TRANINING, - workflow_id=1, - project_id=1, - config=workflow_definition_pb2.JobDefinition( - name='test-job' - ).SerializeToString()) - db.session.add(job) - db.session.commit() - - while True: - time.sleep(1) - - -if __name__ == '__main__': - # no es in test env skip this test - # unittest.main() - pass diff --git a/web_console_v2/api/test/fedlearner_webconsole/job/service_test.py b/web_console_v2/api/test/fedlearner_webconsole/job/service_test.py deleted file mode 100644 index 8161ea56c..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/job/service_test.py +++ /dev/null @@ -1,112 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -from unittest.mock import patch -from testing.common import BaseTestCase - -from fedlearner_webconsole.proto import workflow_definition_pb2 -from fedlearner_webconsole.db import db -from fedlearner_webconsole.workflow.models import Workflow -from fedlearner_webconsole.job.models import Job, JobDependency, JobType, JobState -from fedlearner_webconsole.job.service import JobService - - -class JobServiceTest(BaseTestCase): - - def setUp(self): - super().setUp() - workflow_0 = Workflow(id=0, name='test-workflow-0', project_id=0) - workflow_1 = Workflow(id=1, name='test-workflow-1', project_id=0) - db.session.add_all([workflow_0, workflow_1]) - - config = workflow_definition_pb2.JobDefinition( - name='test-job').SerializeToString() - job_0 = Job(id=0, - name='raw_data_0', - job_type=JobType.RAW_DATA, - state=JobState.STARTED, - workflow_id=0, - project_id=0, - config=config) - job_1 = Job(id=1, - name='raw_data_1', - job_type=JobType.RAW_DATA, - state=JobState.COMPLETED, - workflow_id=0, - project_id=0, - config=config) - job_2 = Job(id=2, - name='data_join_0', - job_type=JobType.DATA_JOIN, - state=JobState.WAITING, - workflow_id=0, - project_id=0, - config=config) - job_3 = Job(id=3, - name='data_join_1', - job_type=JobType.DATA_JOIN, - state=JobState.COMPLETED, - workflow_id=1, - project_id=0, - config=config) - job_4 = Job(id=4, - name='train_job_0', - job_type=JobType.NN_MODEL_TRANINING, - state=JobState.WAITING, - workflow_id=1, - project_id=0, - config=config) - db.session.add_all([job_0, job_1, job_2, job_3, job_4]) - - job_dep_0 = JobDependency(src_job_id=job_0.id, - dst_job_id=job_2.id, - dep_index=0) - job_dep_1 = JobDependency(src_job_id=job_1.id, - dst_job_id=job_2.id, - dep_index=1) - job_dep_2 = JobDependency(src_job_id=job_3.id, - dst_job_id=job_4.id, - dep_index=0) - - db.session.add_all([job_dep_0, job_dep_1, job_dep_2]) - db.session.commit() - - def test_is_ready(self): - job_0 = db.session.query(Job).get(0) - job_2 = db.session.query(Job).get(2) - job_4 = db.session.query(Job).get(4) - job_service = JobService(db.session) - self.assertTrue(job_service.is_ready(job_0)) - self.assertFalse(job_service.is_ready(job_2)) - self.assertTrue(job_service.is_ready(job_4)) - - @patch('fedlearner_webconsole.job.models.Job.is_flapp_failed') - @patch('fedlearner_webconsole.job.models.Job.is_flapp_complete') - def test_update_running_state(self, mock_is_complete, mock_is_failed): - job_0 = db.session.query(Job).get(0) - job_2 = db.session.query(Job).get(2) - mock_is_complete.return_value = True - job_service = JobService(db.session) - job_service.update_running_state(job_0.name) - self.assertEqual(job_0.state, JobState.COMPLETED) - self.assertTrue(job_service.is_ready(job_2)) - job_0.state = JobState.STARTED - mock_is_complete.return_value = False - mock_is_failed = True - job_service.update_running_state(job_0.name) - self.assertEqual(job_0.state, JobState.FAILED) - - diff --git a/web_console_v2/api/test/fedlearner_webconsole/job/yaml_formatter_test.py b/web_console_v2/api/test/fedlearner_webconsole/job/yaml_formatter_test.py deleted file mode 100644 index 8ad1e6269..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/job/yaml_formatter_test.py +++ /dev/null @@ -1,152 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import unittest -import tarfile -import base64 -from io import BytesIO -from google.protobuf.json_format import ParseDict -from fedlearner_webconsole.job.yaml_formatter import format_yaml, code_dict_encode, generate_self_dict -from fedlearner_webconsole.job.models import Job, JobState -from fedlearner_webconsole.proto.workflow_definition_pb2 import JobDefinition -from testing.common import BaseTestCase - - -class YamlFormatterTest(BaseTestCase): - def test_format_with_phs(self): - project = { - 'variables[0]': - {'storage_root_dir': 'root_dir'} - - } - workflow = { - 'jobs': { - 'raw_data_job': {'name': 'raw_data123'} - } - } - yaml = format_yaml(""" - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables[0].storage_root_dir}/raw_data/${workflow.jobs.raw_data_job.name}" - } - """, project=project, workflow=workflow) - self.assertEqual(yaml, """ - { - "name": "OUTPUT_BASE_DIR", - "value": "root_dir/raw_data/raw_data123" - } - """) - - self.assertEqual(format_yaml('$project.variables[0].storage_root_dir', - project=project), - project['variables[0]']['storage_root_dir']) - - def test_format_with_no_ph(self): - self.assertEqual(format_yaml('{a: 123, b: 234}'), - '{a: 123, b: 234}') - - def test_format_yaml_unknown_ph(self): - x = { - 'y': 123 - } - with self.assertRaises(RuntimeError) as cm: - format_yaml('$x.y is $i.j.k', x=x) - self.assertEqual(str(cm.exception), 'Unknown placeholder: i.j.k') - with self.assertRaises(RuntimeError) as cm: - format_yaml('$x.y is ${i.j}', x=x) - self.assertEqual(str(cm.exception), 'Unknown placeholder: i.j') - - def test_encode_code(self): - test_data = {'test/a.py': 'awefawefawefawefwaef', - 'test1/b.py': 'asdfasd', - 'c.py': '', - 'test/d.py': 'asdf'} - code_base64 = code_dict_encode(test_data) - code_dict = {} - if code_base64.startswith('base64://'): - tar_binary = BytesIO(base64.b64decode(code_base64[9:])) - with tarfile.open(fileobj=tar_binary) as tar: - for file in tar.getmembers(): - code_dict[file.name] = str(tar.extractfile(file).read(), - encoding='utf-8') - self.assertEqual(code_dict, test_data) - - def test_generate_self_dict(self): - config = { - 'variables': [ - { - 'name': 'namespace', - 'value': 'leader' - }, - { - 'name': 'basic_envs', - 'value': '{}' - }, - { - 'name': 'storage_root_dir', - 'value': '/' - }, - { - 'name': 'EGRESS_URL', - 'value': '127.0.0.1:1991' - } - ] - } - job = Job(name='aa', project_id=1, workflow_id=1, state=JobState.NEW) - job.set_config(ParseDict(config, JobDefinition())) - self.assertEqual(generate_self_dict(job), - {'id': None, 'name': 'aa', - 'job_type': None, 'state': 'NEW', 'config': - {'expert_mode': False, - 'variables': [ - { - 'name': 'namespace', - 'value': 'leader', - 'access_mode': 'UNSPECIFIED', - 'widget_schema': '', - 'value_type': 'STRING'}, - { - 'name': 'basic_envs', - 'value': '{}', - 'access_mode': 'UNSPECIFIED', - 'widget_schema': '', - 'value_type': 'STRING'}, - { - 'name': 'storage_root_dir', - 'value': '/', - 'access_mode': 'UNSPECIFIED', - 'widget_schema': '', - 'value_type': 'STRING'}, - { - 'name': 'EGRESS_URL', - 'value': '127.0.0.1:1991', - 'access_mode': 'UNSPECIFIED', - 'widget_schema': '', - 'value_type': 'STRING'}], - 'name': '', - 'job_type': 'UNSPECIFIED', - 'is_federated': False, - 'dependencies': [], - 'yaml_template': ''}, - 'is_disabled': None, 'workflow_id': 1, 'project_id': 1, 'flapp_snapshot': None, - 'pods_snapshot': None, 'error_message': None, 'created_at': None, 'updated_at': None, - 'deleted_at': None, 'pods': [], 'complete_at': None, - 'variables': {'namespace': 'leader', 'basic_envs': '{}', 'storage_root_dir': '/', - 'EGRESS_URL': '127.0.0.1:1991'}} - ) - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/k8s/models_test.py b/web_console_v2/api/test/fedlearner_webconsole/k8s/models_test.py deleted file mode 100644 index 8086a1488..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/k8s/models_test.py +++ /dev/null @@ -1,203 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import unittest -from datetime import datetime, timezone - -from fedlearner_webconsole.k8s.models import PodType, PodState, ContainerState, PodCondition, Pod, FlAppState, FlApp - - -class PodTypeTest(unittest.TestCase): - def test_from_string(self): - self.assertEqual(PodType.from_value('master'), PodType.MASTER) - self.assertEqual(PodType.from_value('Ps'), PodType.PS) - self.assertEqual(PodType.from_value('WORKER'), - PodType.WORKER) - - def test_from_unknown(self): - self.assertEqual(PodType.from_value('hhhhhhhh'), - PodType.UNKNOWN) - self.assertEqual(PodType.from_value(1), - PodType.UNKNOWN) - - -class PodStateTest(unittest.TestCase): - def test_from_string(self): - self.assertEqual(PodState.from_value('Running'), PodState.RUNNING) - self.assertEqual(PodState.from_value('Unknown'), PodState.UNKNOWN) - - def test_from_unknown(self): - self.assertEqual(PodState.from_value('hhhhhhhh'), - PodState.UNKNOWN) - - -class ContainerStateTest(unittest.TestCase): - def test_get_message(self): - state = ContainerState(state='haha', - message='test message', - reason='test reason') - self.assertEqual(state.get_message(), 'haha:test reason') - self.assertEqual(state.get_message(private=True), 'haha:test message') - state.message = None - self.assertEqual(state.get_message(), 'haha:test reason') - self.assertEqual(state.get_message(private=True), 'haha:test reason') - - -class PodConditionTest(unittest.TestCase): - def test_get_message(self): - cond = PodCondition(cond_type='t1', - message='test message', - reason='test reason') - self.assertEqual(cond.get_message(), 't1:test reason') - self.assertEqual(cond.get_message(private=True), 't1:test message') - cond.message = None - self.assertEqual(cond.get_message(), 't1:test reason') - self.assertEqual(cond.get_message(private=True), 't1:test reason') - - -class PodTest(unittest.TestCase): - def test_to_dict(self): - pod = Pod(name='this-is-a-pod', - state=PodState.RUNNING, - pod_type=PodType.WORKER, - pod_ip='172.10.0.20', - container_states=[ContainerState( - state='h1', - message='test message' - )], - pod_conditions=[PodCondition( - cond_type='h2', - reason='test reason' - )]) - self.assertEqual(pod.to_dict(include_private_info=True), - { - 'name': 'this-is-a-pod', - 'pod_type': 'WORKER', - 'state': 'RUNNING', - 'pod_ip': '172.10.0.20', - 'message': 'h1:test message, h2:test reason' - }) - - def test_from_json(self): - json = { - 'metadata': { - 'name': 'test-pod', - 'labels': { - 'app-name': 'u244777dac51949c5b2b-data-join-job', - 'fl-replica-type': 'master' - }, - }, - 'status': { - 'pod_ip': '172.10.0.20', - 'phase': 'Running', - 'conditions': [ - { - 'type': 'Failed', - 'reason': 'Test reason' - } - ], - 'containerStatuses': [ - { - 'containerID': 'docker://034eaf58d4e24581232832661636da9949b6e2fb056398939fc2c0f2809d4c64', - 'image': 'artifact.bytedance.com/fedlearner/fedlearner:438d603', - 'state': { - 'running': { - 'message': 'Test message' - } - } - } - ] - } - } - expected_pod = Pod( - name='test-pod', - state=PodState.RUNNING, - pod_type=PodType.MASTER, - pod_ip='172.10.0.20', - container_states=[ - ContainerState( - state='running', - message='Test message' - ) - ], - pod_conditions=[ - PodCondition( - cond_type='Failed', - reason='Test reason' - ) - ] - ) - self.assertEqual(Pod.from_json(json), expected_pod) - - -class FlAppStateTest(unittest.TestCase): - def test_from_string(self): - self.assertEqual(FlAppState.from_value('FLStateComplete'), - FlAppState.COMPLETED) - self.assertEqual(FlAppState.from_value('Unknown'), FlAppState.UNKNOWN) - - def test_from_unknown(self): - self.assertEqual(FlAppState.from_value('hhh123hhh'), - FlAppState.UNKNOWN) - - -class FlAppTest(unittest.TestCase): - def test_from_json(self): - json = { - 'status': { - 'appState': 'FLStateComplete', - 'completionTime': '2021-04-26T08:33:45Z', - 'flReplicaStatus': { - 'Master': { - 'failed': { - 'test-pod1': {} - } - }, - 'Worker': { - 'succeeded': { - 'test-pod2': {}, - 'test-pod3': {} - } - } - } - } - } - completed_at = int(datetime(2021, 4, 26, 8, 33, 45, tzinfo=timezone.utc).timestamp()) - expected_flapp = FlApp( - state=FlAppState.COMPLETED, - completed_at=completed_at, - pods=[ - Pod( - name='test-pod1', - state=PodState.FAILED_AND_FREED, - pod_type=PodType.MASTER - ), - Pod( - name='test-pod2', - state=PodState.SUCCEEDED_AND_FREED, - pod_type=PodType.WORKER - ), - Pod( - name='test-pod3', - state=PodState.SUCCEEDED_AND_FREED, - pod_type=PodType.WORKER - ) - ] - ) - self.assertEqual(FlApp.from_json(json), expected_flapp) - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/mmgr/model_test.py b/web_console_v2/api/test/fedlearner_webconsole/mmgr/model_test.py deleted file mode 100644 index 84c8644e2..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/mmgr/model_test.py +++ /dev/null @@ -1,163 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -import unittest -from unittest.mock import MagicMock, patch - -from testing.common import BaseTestCase -from fedlearner_webconsole.db import db, get_session -from fedlearner_webconsole.mmgr.models import Model -from fedlearner_webconsole.mmgr.models import ModelState -from fedlearner_webconsole.mmgr.service import ModelService -from fedlearner_webconsole.job.models import Job, JobType, JobState -from fedlearner_webconsole.utils.k8s_cache import Event, EventType, ObjectType - - -class ModelTest(BaseTestCase): - @patch( - 'fedlearner_webconsole.mmgr.service.ModelService.get_checkpoint_path') - def setUp(self, mock_get_checkpoint_path): - super().setUp() - self.model_service = ModelService(db.session) - self.train_job = Job(name='train-job', - job_type=JobType.NN_MODEL_TRANINING, - workflow_id=1, - project_id=1) - self.eval_job = Job(name='eval-job', - job_type=JobType.NN_MODEL_EVALUATION, - workflow_id=1, - project_id=1) - mock_get_checkpoint_path.return_value = 'output' - self.model_service.create(job=self.train_job, parent_job_name=None) - model = db.session.query(Model).filter_by( - job_name=self.train_job.name).one() - self.model_service.create(job=self.eval_job, - parent_job_name=model.job_name) - db.session.add(self.train_job) - db.session.add(self.eval_job) - db.session.commit() - - @patch('fedlearner_webconsole.mmgr.service.ModelService.plot_metrics') - def test_on_job_update(self, mock_plot_metrics: MagicMock): - mock_plot_metrics.return_value = 'plot metrics return' - - # TODO: change get_session to db.session_scope - with get_session(db.engine) as session: - model = session.query(Model).filter_by( - job_name=self.train_job.name).one() - self.assertEqual(model.state, ModelState.COMMITTED.value) - - train_job = session.query(Job).filter_by(name='train-job').one() - train_job.state = JobState.STARTED - session.commit() - - # TODO: change get_session to db.session_scope - with get_session(db.engine) as session: - train_job = session.query(Job).filter_by(name='train-job').one() - train_job.state = JobState.STARTED - model = session.query(Model).filter_by( - job_name=self.train_job.name).one() - model_service = ModelService(session) - - model_service.on_job_update(train_job) - self.assertEqual(model.state, ModelState.RUNNING.value) - session.commit() - - # TODO: change get_session to db.session_scope - with get_session(db.engine) as session: - train_job = session.query(Job).filter_by(name='train-job').one() - train_job.state = JobState.COMPLETED - model = session.query(Model).filter_by( - job_name=self.train_job.name).one() - model_service = ModelService(session) - - model_service.on_job_update(train_job) - self.assertEqual(model.state, ModelState.SUCCEEDED.value) - session.commit() - - # TODO: change get_session to db.session_scope - with get_session(db.engine) as session: - train_job = session.query(Job).filter_by(name='train-job').one() - train_job.state = JobState.FAILED - model = session.query(Model).filter_by( - job_name=self.train_job.name).one() - model_service = ModelService(session) - - model_service.on_job_update(train_job) - self.assertEqual(model.state, ModelState.FAILED.value) - session.commit() - - def test_hook(self): - train_job = Job(id=0, - state=JobState.STARTED, - name='nn-train', - job_type=JobType.NN_MODEL_TRANINING, - workflow_id=0, - project_id=0) - db.session.add(train_job) - db.session.commit() - event = Event(flapp_name='nn-train', - event_type=EventType.ADDED, - obj_type=ObjectType.FLAPP, - obj_dict={}) - self.model_service.workflow_hook(train_job) - model = Model.query.filter_by(job_name='nn-train').one() - self.assertEqual(model.state, ModelState.COMMITTED.value) - - event.event_type = EventType.MODIFIED - train_job.state = JobState.STARTED - self.model_service.k8s_watcher_hook(event) - self.assertEqual(model.state, ModelState.RUNNING.value) - - train_job.state = JobState.COMPLETED - self.model_service.k8s_watcher_hook(event) - self.assertEqual(model.state, ModelState.SUCCEEDED.value) - - train_job.state = JobState.STARTED - self.model_service.k8s_watcher_hook(event) - self.assertEqual(model.state, ModelState.RUNNING.value) - self.assertEqual(model.version, 2) - - train_job.state = JobState.STOPPED - self.model_service.k8s_watcher_hook(event) - self.assertEqual(model.state, ModelState.PAUSED.value) - db.session.rollback() - - def test_api(self): - resp = self.get_helper('/api/v2/models/1') - data = self.get_response_data(resp) - self.assertEqual(data.get('id'), 1) - - resp = self.get_helper('/api/v2/models') - model_list = self.get_response_data(resp) - self.assertEqual(len(model_list), 1) - - model = Model.query.first() - model.state = ModelState.FAILED.value - db.session.add(model) - db.session.commit() - self.delete_helper('/api/v2/models/1') - resp = self.get_helper('/api/v2/models/1') - data = self.get_response_data(resp) - self.assertEqual(data.get('state'), ModelState.DROPPED.value) - - def test_get_eval(self): - model = Model.query.filter_by(job_name=self.train_job.name).one() - self.assertEqual(len(model.get_eval_model()), 1) - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/project/add_on_test.py b/web_console_v2/api/test/fedlearner_webconsole/project/add_on_test.py deleted file mode 100644 index 56f3e9cfa..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/project/add_on_test.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -import os -import unittest -from base64 import b64decode, b64encode -from fedlearner_webconsole.project.add_on import parse_certificates - - -class AddOnTest(unittest.TestCase): - - def test_parse_certificates(self): - file_names = [ - 'client/client.pem', 'client/client.key', 'client/intermediate.pem', 'client/root.pem', - 'server/server.pem', 'server/server.key', 'server/intermediate.pem', 'server/root.pem' - ] - with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'test.tar.gz'), 'rb') as file: - certificates = parse_certificates(b64encode(file.read())) - for file_name in file_names: - self.assertEqual(str(b64decode(certificates.get(file_name)), encoding='utf-8'), - 'test {}'.format(file_name)) - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/project/apis_test.py b/web_console_v2/api/test/fedlearner_webconsole/project/apis_test.py deleted file mode 100644 index c91401d73..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/project/apis_test.py +++ /dev/null @@ -1,183 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import os -import json -import unittest - -from base64 import b64encode -from http import HTTPStatus -from google.protobuf.json_format import ParseDict -from unittest.mock import patch, MagicMock - -from testing.common import BaseTestCase -from fedlearner_webconsole.db import db -from fedlearner_webconsole.project.models import Project -from fedlearner_webconsole.project.add_on import parse_certificates, verify_certificates -from fedlearner_webconsole.proto.project_pb2 import Project as ProjectProto, \ - CertificateStorage -from fedlearner_webconsole.workflow.models import Workflow - - -class ProjectApiTest(BaseTestCase): - - def setUp(self): - super().setUp() - with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'test.tar.gz'), 'rb') as file: - self.TEST_CERTIFICATES = str(b64encode(file.read()), encoding='utf-8') - self.default_project = Project() - self.default_project.name = 'test-self.default_project' - self.default_project.set_config(ParseDict({ - 'participants': [ - { - 'name': 'test-participant', - 'domain_name': 'fl-test.com', - 'url': '127.0.0.1:32443' - } - ], - 'variables': [ - { - 'name': 'test', - 'value': 'test' - } - ] - }, ProjectProto())) - self.default_project.set_certificate(ParseDict({ - 'domain_name_to_cert': {'fl-test.com': - {'certs': - parse_certificates(self.TEST_CERTIFICATES)}} - }, CertificateStorage())) - self.default_project.comment = 'test comment' - db.session.add(self.default_project) - workflow = Workflow(name='workflow_key_get1', - project_id=1) - db.session.add(workflow) - db.session.commit() - - def test_get_project(self): - get_response = self.get_helper( - '/api/v2/projects/{}'.format(1) - ) - self.assertEqual(get_response.status_code, HTTPStatus.OK) - queried_project = json.loads(get_response.data).get('data') - self.assertEqual(queried_project, self.default_project.to_dict()) - - def test_get_not_found_project(self): - get_response = self.get_helper( - '/api/v2/projects/{}'.format(1000) - ) - self.assertEqual(get_response.status_code, HTTPStatus.NOT_FOUND) - - @patch('fedlearner_webconsole.project.apis.verify_certificates') - def test_post_project(self, mock_verify_certificates): - mock_verify_certificates.return_value = (True, '') - name = 'test-post-project' - config = { - 'participants': [ - { - 'name': 'test-post-participant', - 'domain_name': 'fl-test-post.com', - 'url': '127.0.0.1:32443', - 'certificates': self.TEST_CERTIFICATES - } - ], - 'variables': [ - { - 'name': 'test-post', - 'value': 'test' - } - ] - } - comment = 'test post project' - create_response = self.post_helper( - '/api/v2/projects', - data={ - 'name': name, - 'config': config, - 'comment': comment - }) - self.assertEqual(create_response.status_code, HTTPStatus.OK) - created_project = json.loads(create_response.data).get('data') - - queried_project = Project.query.filter_by(name=name).first() - self.assertEqual(created_project, queried_project.to_dict()) - - mock_verify_certificates.assert_called_once_with( - parse_certificates(self.TEST_CERTIFICATES)) - - def test_post_conflict_name_project(self): - config = { - 'participants': { - 'fl-test-post.com': { - 'name': 'test-post-participant', - 'url': '127.0.0.1:32443', - 'certificates': self.TEST_CERTIFICATES - } - }, - 'variables': [ - { - 'name': 'test-post', - 'value': 'test' - } - ] - } - create_response = self.post_helper( - '/api/v2/projects', - data={ - 'name': self.default_project.name, - 'config': config, - 'comment': '' - }) - self.assertEqual(create_response.status_code, HTTPStatus.BAD_REQUEST) - - def test_list_project(self): - list_response = self.get_helper('/api/v2/projects') - project_list = json.loads(list_response.data).get('data') - self.assertEqual(len(project_list), 1) - for project in project_list: - queried_project = Project.query.filter_by( - name=project['name']).first() - result = queried_project.to_dict() - result['num_workflow'] = 1 - self.assertEqual(project, result) - - def test_update_project(self): - updated_name = 'updated name' - updated_comment = 'updated comment' - update_response = self.patch_helper( - '/api/v2/projects/{}'.format(1), - data={ - 'participant_name': updated_name, - 'comment': updated_comment - }) - self.assertEqual(update_response.status_code, HTTPStatus.OK) - queried_project = Project.query.filter_by(id=1).first() - participant = queried_project.get_config().participants[0] - self.assertEqual(participant.name, updated_name) - self.assertEqual(queried_project.comment, updated_comment) - - def test_update_not_found_project(self): - updated_comment = 'updated comment' - update_response = self.patch_helper( - '/api/v2/projects/{}'.format(1000), - data={ - 'comment': updated_comment - }) - self.assertEqual(update_response.status_code, HTTPStatus.NOT_FOUND) - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/project/models_test.py b/web_console_v2/api/test/fedlearner_webconsole/project/models_test.py deleted file mode 100644 index f6e13658a..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/project/models_test.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import unittest - -from fedlearner_webconsole.project.models import Project -from fedlearner_webconsole.proto import common_pb2, project_pb2 -from testing.common import BaseTestCase - - -class ProjectTest(BaseTestCase): - def test_get_namespace_fallback(self): - project = Project() - self.assertEqual(project.get_namespace(), 'default') - - project.set_config(project_pb2.Project( - variables=[ - common_pb2.Variable( - name='test_name', - value='test_value' - ) - ] - )) - self.assertEqual(project.get_namespace(), 'default') - - def test_get_namespace_from_variables(self): - project = Project() - project.set_config(project_pb2.Project( - variables=[ - common_pb2.Variable( - name='namespace', - value='haha' - ) - ] - )) - - self.assertEqual(project.get_namespace(), 'haha') - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/project/test.tar.gz b/web_console_v2/api/test/fedlearner_webconsole/project/test.tar.gz deleted file mode 100644 index 4558fd7fa..000000000 Binary files a/web_console_v2/api/test/fedlearner_webconsole/project/test.tar.gz and /dev/null differ diff --git a/web_console_v2/api/test/fedlearner_webconsole/rpc/client_test.py b/web_console_v2/api/test/fedlearner_webconsole/rpc/client_test.py deleted file mode 100644 index b2200e4ea..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/rpc/client_test.py +++ /dev/null @@ -1,146 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import unittest -from unittest.mock import patch - -import grpc_testing -from grpc import StatusCode -from grpc.framework.foundation import logging_pool - -from testing.common import NoWebServerTestCase - -from fedlearner_webconsole.proto.service_pb2 import DESCRIPTOR -from fedlearner_webconsole.rpc.client import RpcClient -from fedlearner_webconsole.project.models import Project as ProjectModel -from fedlearner_webconsole.job.models import Job -from fedlearner_webconsole.proto.common_pb2 import (GrpcSpec, Status, - StatusCode as - FedLearnerStatusCode) -from fedlearner_webconsole.proto.project_pb2 import Project, Participant -from fedlearner_webconsole.proto.service_pb2 import (CheckConnectionRequest, - ProjAuthInfo) -from fedlearner_webconsole.proto.service_pb2 import CheckConnectionResponse, \ - CheckJobReadyResponse, CheckJobReadyRequest - -TARGET_SERVICE = DESCRIPTOR.services_by_name['WebConsoleV2Service'] - - -class RpcClientTest(NoWebServerTestCase): - _TEST_PROJECT_NAME = 'test-project' - _TEST_RECEIVER_NAME = 'test-receiver' - _TEST_URL = 'localhost:123' - _TEST_AUTHORITY = 'test-authority' - _X_HOST_HEADER_KEY = 'x-host' - _TEST_X_HOST = 'default.fedlearner.webconsole' - _TEST_SELF_DOMAIN_NAME = 'fl-test-self.com' - - @classmethod - def setUpClass(cls): - - grpc_spec = GrpcSpec( - authority=cls._TEST_AUTHORITY, - extra_headers={cls._X_HOST_HEADER_KEY: cls._TEST_X_HOST}) - participant = Participant(name=cls._TEST_RECEIVER_NAME, - domain_name='fl-test.com', - grpc_spec=grpc_spec) - project_config = Project(name=cls._TEST_PROJECT_NAME, - token='test-auth-token', - participants=[participant], - variables=[{ - 'name': 'EGRESS_URL', - 'value': cls._TEST_URL - }]) - job = Job(name='test-job') - - cls._participant = participant - cls._project_config = project_config - cls._project = ProjectModel(name=cls._TEST_PROJECT_NAME) - cls._project.set_config(project_config) - cls._job = job - - def setUp(self): - self._client_execution_thread_pool = logging_pool.pool(1) - - # Builds a testing channel - self._fake_channel = grpc_testing.channel( - DESCRIPTOR.services_by_name.values(), - grpc_testing.strict_real_time()) - self._build_channel_patcher = patch( - 'fedlearner_webconsole.rpc.client._build_channel') - self._mock_build_channel = self._build_channel_patcher.start() - self._mock_build_channel.return_value = self._fake_channel - self._client = RpcClient(self._project_config, self._participant) - - self._mock_build_channel.assert_called_once_with( - self._TEST_URL, self._TEST_AUTHORITY) - - def tearDown(self): - self._build_channel_patcher.stop() - self._client_execution_thread_pool.shutdown(wait=False) - - def test_check_connection(self): - call = self._client_execution_thread_pool.submit( - self._client.check_connection) - - invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( - TARGET_SERVICE.methods_by_name['CheckConnection']) - - self.assertIn((self._X_HOST_HEADER_KEY, self._TEST_X_HOST), - invocation_metadata) - self.assertEqual( - request, - CheckConnectionRequest(auth_info=ProjAuthInfo( - project_name=self._project_config.name, - target_domain=self._participant.domain_name, - auth_token=self._project_config.token))) - - expected_status = Status(code=FedLearnerStatusCode.STATUS_SUCCESS, - msg='test') - rpc.terminate(response=CheckConnectionResponse(status=expected_status), - code=StatusCode.OK, - trailing_metadata=(), - details=None) - self.assertEqual(call.result().status, expected_status) - - def test_check_job_ready(self): - call = self._client_execution_thread_pool.submit( - self._client.check_job_ready, self._job.name) - - invocation_metadata, request, rpc = self._fake_channel.take_unary_unary( - TARGET_SERVICE.methods_by_name['CheckJobReady']) - - self.assertIn((self._X_HOST_HEADER_KEY, self._TEST_X_HOST), - invocation_metadata) - self.assertEqual( - request, - CheckJobReadyRequest( - job_name=self._job.name, - auth_info=ProjAuthInfo( - project_name=self._project_config.name, - target_domain=self._participant.domain_name, - auth_token=self._project_config.token))) - - expected_status = Status(code=FedLearnerStatusCode.STATUS_SUCCESS, - msg='test') - rpc.terminate(response=CheckJobReadyResponse(status=expected_status), - code=StatusCode.OK, - trailing_metadata=(), - details=None) - self.assertEqual(call.result().status, expected_status) - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/scheduler/scheduler_func_test.py b/web_console_v2/api/test/fedlearner_webconsole/scheduler/scheduler_func_test.py deleted file mode 100644 index a9c0d1d6a..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/scheduler/scheduler_func_test.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -import unittest -from testing.common import BaseTestCase -from fedlearner_webconsole.db import db -from fedlearner_webconsole.job.models import JobState, Job, JobType -from fedlearner_webconsole.scheduler.scheduler import _get_waiting_jobs - - -class SchedulerFuncTestCase(BaseTestCase): - def test_get_waiting_jobs(self): - db.session.add(Job(name='testtes', state=JobState.STOPPED, - job_type=JobType.DATA_JOIN, - workflow_id=1, - project_id=1)) - db.session.add(Job(name='testtest', state=JobState.WAITING, - job_type=JobType.DATA_JOIN, - workflow_id=1, - project_id=1)) - db.session.commit() - self.assertEqual(_get_waiting_jobs(), [2]) - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/scheduler/scheduler_test.py b/web_console_v2/api/test/fedlearner_webconsole/scheduler/scheduler_test.py deleted file mode 100644 index 95caafac4..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/scheduler/scheduler_test.py +++ /dev/null @@ -1,240 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -import os -import time -import copy -import unittest -import secrets -import logging -from http import HTTPStatus - -from envs import Envs -from testing.common import BaseTestCase -from fedlearner_webconsole.proto.common_pb2 import CreateJobFlag -from fedlearner_webconsole.job.models import Job - -from testing.common import multi_process_test - -ROLE = os.environ.get('TEST_ROLE', 'leader') - - -class LeaderConfig(object): - SQLALCHEMY_DATABASE_URI = f'sqlite:///{Envs.BASE_DIR}/leader.db' - SQLALCHEMY_TRACK_MODIFICATIONS = False - JWT_SECRET_KEY = secrets.token_urlsafe(64) - PROPAGATE_EXCEPTIONS = True - LOGGING_LEVEL = logging.DEBUG - GRPC_LISTEN_PORT = 3990 - START_COMPOSER = False - - -class FollowerConfig(object): - SQLALCHEMY_DATABASE_URI = f'sqlite:///{Envs.BASE_DIR}/follower.db' - SQLALCHEMY_TRACK_MODIFICATIONS = False - JWT_SECRET_KEY = secrets.token_urlsafe(64) - PROPAGATE_EXCEPTIONS = True - LOGGING_LEVEL = logging.DEBUG - GRPC_LISTEN_PORT = 4990 - START_COMPOSER = False - - -class WorkflowTest(BaseTestCase): - class Config(LeaderConfig): - pass - - @classmethod - def setUpClass(self): - os.environ['FEDLEARNER_WEBCONSOLE_POLLING_INTERVAL'] = '1' - - def setUp(self): - super().setUp() - self._wf_template = { - 'group_alias': - 'test-template', - 'job_definitions': [{ - 'is_federated': True, - 'name': - 'job1', - 'variables': [{ - 'name': 'x', - 'value': '1', - 'access_mode': 3 - }] - }, { - 'is_federated': True, - 'name': - 'job2', - 'variables': [{ - 'name': 'y', - 'value': '2', - 'access_mode': 2 - }] - }] - } - - def leader_test_workflow(self): - self.setup_project('leader', FollowerConfig.GRPC_LISTEN_PORT) - cwf_resp = self.post_helper('/api/v2/workflows', - data={ - 'name': 'test-workflow', - 'project_id': 1, - 'forkable': True, - 'config': self._wf_template, - }) - self.assertEqual(cwf_resp.status_code, HTTPStatus.CREATED) - - self._check_workflow_state(1, 'READY', 'INVALID', 'READY') - - # test update - patch_config = copy.deepcopy(self._wf_template) - patch_config['job_definitions'][1]['variables'][0]['value'] = '4' - resp = self.patch_helper('/api/v2/workflows/1', - data={ - 'config': patch_config, - }) - self.assertEqual(resp.status_code, HTTPStatus.OK) - - resp = self.get_helper('/api/v2/workflows/1') - self.assertEqual(resp.status_code, HTTPStatus.OK) - ret_wf = resp.json['data']['config'] - self.assertEqual(ret_wf['job_definitions'][1]['variables'][0]['value'], - '4') - - # test update remote - patch_config['job_definitions'][0]['variables'][0]['value'] = '5' - resp = self.patch_helper('/api/v2/workflows/1/peer_workflows', - data={ - 'config': patch_config, - }) - self.assertEqual(resp.status_code, HTTPStatus.OK) - - resp = self.get_helper('/api/v2/workflows/1/peer_workflows') - self.assertEqual(resp.status_code, HTTPStatus.OK) - ret_wf = list(resp.json['data'].values())[0]['config'] - self.assertEqual(ret_wf['job_definitions'][0]['variables'][0]['value'], - '5') - - # test fork - cwf_resp = self.post_helper('/api/v2/workflows', - data={ - 'name': - 'test-workflow2', - 'project_id': - 1, - 'forkable': - True, - 'forked_from': - 1, - 'create_job_flags': [ - CreateJobFlag.REUSE, - CreateJobFlag.NEW, - ], - 'peer_create_job_flags': [ - CreateJobFlag.NEW, - CreateJobFlag.REUSE, - ], - 'config': - self._wf_template, - 'fork_proposal_config': { - 'job_definitions': [{ - 'variables': [{ - 'name': 'x', - 'value': '2' - }] - }, { - 'variables': [{ - 'name': 'y', - 'value': '3' - }] - }] - } - }) - - self.assertEqual(cwf_resp.status_code, HTTPStatus.CREATED) - self._check_workflow_state(2, 'READY', 'INVALID', 'READY') - - resp = self.patch_helper('/api/v2/workflows/2', - data={ - 'state': 'INVALID', - }) - self._check_workflow_state(2, 'INVALID', 'INVALID', 'READY') - - def follower_test_workflow(self): - self.setup_project('follower', LeaderConfig.GRPC_LISTEN_PORT) - self._check_workflow_state(1, 'NEW', 'READY', 'PARTICIPANT_PREPARE') - - self.put_helper('/api/v2/workflows/1', - data={ - 'forkable': True, - 'config': self._wf_template, - }) - self._check_workflow_state(1, 'READY', 'INVALID', 'READY') - self.assertEqual(len(Job.query.filter(Job.workflow_id == 1).all()), 2) - - # test fork - json = self._check_workflow_state(2, 'READY', 'INVALID', 'READY') - self.assertEqual(len(Job.query.all()), 3) - self.assertEqual(json['data']['create_job_flags'], [ - CreateJobFlag.NEW, - CreateJobFlag.REUSE, - ]) - self.assertEqual(json['data']['peer_create_job_flags'], [ - CreateJobFlag.REUSE, - CreateJobFlag.NEW, - ]) - jobs = json['data']['config']['job_definitions'] - self.assertEqual(jobs[0]['variables'][0]['value'], '2') - self.assertEqual(jobs[1]['variables'][0]['value'], '2') - - resp = self.patch_helper('/api/v2/workflows/2', - data={ - 'state': 'INVALID', - }) - self._check_workflow_state(2, 'INVALID', 'INVALID', 'READY') - - def _check_workflow_state(self, - workflow_id, - state, - target_state, - transaction_state, - max_retries=10): - cnt = 0 - while True: - time.sleep(1) - cnt = cnt + 1 - if cnt > max_retries: - self.fail(f'workflow [{workflow_id}] state is unexpected') - resp = self.get_helper(f'/api/v2/workflows/{workflow_id}') - if resp.status_code != HTTPStatus.OK: - continue - if resp.json['data']['state'] == state and \ - resp.json['data']['target_state'] == target_state and \ - resp.json['data']['transaction_state'] == transaction_state: - return resp.json - - -if __name__ == '__main__': - multi_process_test([{ - 'class': WorkflowTest, - 'method': 'leader_test_workflow', - 'config': LeaderConfig - }, { - 'class': WorkflowTest, - 'method': 'follower_test_workflow', - 'config': FollowerConfig - }]) - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/scheduler/workflow_commit_test.py b/web_console_v2/api/test/fedlearner_webconsole/scheduler/workflow_commit_test.py deleted file mode 100644 index b94468c09..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/scheduler/workflow_commit_test.py +++ /dev/null @@ -1,139 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import os -import time -import unittest -from google.protobuf.json_format import ParseDict -from unittest.mock import patch -from testing.common import BaseTestCase -from fedlearner_webconsole.db import db -from fedlearner_webconsole.job.models import JobState -from fedlearner_webconsole.project.models import Project -from fedlearner_webconsole.workflow.models import Workflow, WorkflowState -from fedlearner_webconsole.scheduler.transaction import TransactionState -from fedlearner_webconsole.scheduler.scheduler import \ - scheduler -from fedlearner_webconsole.proto import project_pb2 -from workflow_template_test import make_workflow_template - - -class WorkflowsCommitTest(BaseTestCase): - class Config(BaseTestCase.Config): - START_GRPC_SERVER = False - START_SCHEDULER = True - - @classmethod - def setUpClass(self): - os.environ['FEDLEARNER_WEBCONSOLE_POLLING_INTERVAL'] = '1' - - def setUp(self): - super().setUp() - # Inserts project - config = { - 'participants': [{ - 'name': 'party_leader', - 'url': '127.0.0.1:5000', - 'domain_name': 'fl-leader.com', - 'grpc_spec': { - 'authority': 'fl-leader.com' - } - }], - 'variables': [{ - 'name': 'namespace', - 'value': 'leader' - }, { - 'name': 'basic_envs', - 'value': '{}' - }, { - 'name': 'storage_root_dir', - 'value': '/' - }, { - 'name': 'EGRESS_URL', - 'value': '127.0.0.1:1991' - }] - } - project = Project( - name='test', - config=ParseDict(config, - project_pb2.Project()).SerializeToString()) - db.session.add(project) - db.session.commit() - - @staticmethod - def _wait_until(cond, retry_times: int = 5): - for _ in range(retry_times): - time.sleep(5) - db.session.expire_all() - if cond(): - return - - def test_workflow_commit(self): - # test the committing stage for workflow creating - workflow_def = make_workflow_template() - workflow = Workflow( - id=20, - name='job_test1', - comment='这是一个测试工作流', - config=workflow_def.SerializeToString(), - project_id=1, - forkable=True, - state=WorkflowState.NEW, - target_state=WorkflowState.READY, - transaction_state=TransactionState.PARTICIPANT_COMMITTING) - db.session.add(workflow) - db.session.commit() - scheduler.wakeup(20) - self._wait_until( - lambda: Workflow.query.get(20).state == WorkflowState.READY) - workflow = Workflow.query.get(20) - self.assertEqual(len(workflow.get_jobs()), 2) - self.assertEqual(workflow.get_jobs()[0].state, JobState.NEW) - self.assertEqual(workflow.get_jobs()[1].state, JobState.NEW) - - # test the committing stage for workflow running - workflow.target_state = WorkflowState.RUNNING - workflow.transaction_state = TransactionState.PARTICIPANT_COMMITTING - db.session.commit() - scheduler.wakeup(20) - self._wait_until( - lambda: Workflow.query.get(20).state == WorkflowState.RUNNING) - workflow = Workflow.query.get(20) - self._wait_until( - lambda: workflow.get_jobs()[0].state == JobState.STARTED) - self.assertEqual(workflow.get_jobs()[1].state, JobState.WAITING) - workflow = Workflow.query.get(20) - for job in workflow.owned_jobs: - job.state = JobState.COMPLETED - self.assertEqual(workflow.to_dict()['state'], 'COMPLETED') - workflow.get_jobs()[0].state = JobState.FAILED - self.assertEqual(workflow.to_dict()['state'], 'FAILED') - # test the committing stage for workflow stopping - workflow.target_state = WorkflowState.STOPPED - workflow.transaction_state = TransactionState.PARTICIPANT_COMMITTING - for job in workflow.owned_jobs: - job.state = JobState.STARTED - db.session.commit() - scheduler.wakeup(20) - self._wait_until( - lambda: Workflow.query.get(20).state == WorkflowState.STOPPED) - workflow = Workflow.query.get(20) - self._wait_until( - lambda: workflow.get_jobs()[0].state == JobState.STOPPED) - self.assertEqual(workflow.get_jobs()[1].state, JobState.STOPPED) - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/scheduler/workflow_template_test.py b/web_console_v2/api/test/fedlearner_webconsole/scheduler/workflow_template_test.py deleted file mode 100644 index 7b041d3a7..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/scheduler/workflow_template_test.py +++ /dev/null @@ -1,738 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -from google.protobuf.json_format import MessageToDict -from fedlearner_webconsole.proto.workflow_definition_pb2 import ( - WorkflowDefinition, JobDefinition, JobDependency -) -from fedlearner_webconsole.proto.common_pb2 import ( - Variable -) - - -def make_workflow_template(): - workflow = WorkflowDefinition( - group_alias='test_template', - is_left=True, - variables=[ - Variable( - name='image_version', - value='v1.5-rc3', - access_mode=Variable.PEER_READABLE), - Variable( - name='num_partitions', - value='4', - access_mode=Variable.PEER_WRITABLE), - ], - job_definitions=[ - JobDefinition( - name='raw_data_job', - job_type=JobDefinition.RAW_DATA, - is_federated=False, - variables=[ - Variable( - name='input_dir', - value='/app/deploy/integrated_test/tfrecord_raw_data', - access_mode=Variable.PRIVATE), - Variable( - name='file_wildcard', - value='*.rd', - access_mode=Variable.PRIVATE), - Variable( - name='batch_size', - value='1024', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='input_format', - value='TF_RECORD', - access_mode=Variable.PRIVATE), - Variable( - name='output_format', - value='TF_RECORD', - access_mode=Variable.PRIVATE), - Variable( - name='master_cpu', - value='2', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='master_mem', - value='3Gi', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='worker_cpu', - value='2', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='worker_mem', - value='3Gi', - access_mode=Variable.PEER_WRITABLE), - ], - yaml_template='''{ - "apiVersion": "fedlearner.k8s.io/v1alpha1", - "kind": "FLApp", - "metadata": { - "name": "${workflow.jobs.raw_data_job.name}", - "namespace": "${project.variables.namespace}" - }, - "spec": { - "cleanPodPolicy": "All", - "flReplicaSpecs": { - "Master": { - "pair": false, - "replicas": 1, - "template": { - "spec": { - "containers": [ - { - "command": [ - "/app/deploy/scripts/data_portal/run_data_portal_master.sh" - ], - "env": [ - { - "name": "POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - } - }, - { - "name": "POD_NAME", - "valueFrom": { - "fieldRef": { - "fieldPath": "metadata.name" - } - } - }, - ${system.basic_envs}, - ${project.variables.basic_envs}, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.raw_data_job.name}" - }, - { - "name": "DATA_PORTAL_NAME", - "value": "${workflow.jobs.raw_data_job.name}" - }, - { - "name": "OUTPUT_PARTITION_NUM", - "value": "${workflow.variables.num_partitions}" - }, - { - "name": "INPUT_BASE_DIR", - "value": "${workflow.jobs.raw_data_job.variables.input_dir}" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/raw_data/${workflow.jobs.raw_data_job.name}" - }, - { - "name": "RAW_DATA_PUBLISH_DIR", - "value": "portal_publish_dir/${workflow.jobs.raw_data_job.name}" - }, - { - "name": "DATA_PORTAL_TYPE", - "value": "Streaming" - }, - { - "name": "FILE_WILDCARD", - "value": "${workflow.jobs.raw_data_job.variables.file_wildcard}" - } - ], - "image": "hub.docker.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "resources": { - "limits": { - "cpu": "${workflow.jobs.raw_data_job.variables.master_cpu}", - "memory": "${workflow.jobs.raw_data_job.variables.master_mem}" - }, - "requests": { - "cpu": "${workflow.jobs.raw_data_job.variables.master_cpu}", - "memory": "${workflow.jobs.raw_data_job.variables.master_mem}" - } - }, - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ] - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "restartPolicy": "Never", - "volumes": [ - { - "name": "data", - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - } - } - ] - } - } - }, - "Worker": { - "pair": false, - "replicas": ${workflow.variables.num_partitions}, - "template": { - "metadata": { - "creationTimestamp": null - }, - "spec": { - "containers": [ - { - "command": [ - "/app/deploy/scripts/data_portal/run_data_portal_worker.sh" - ], - "env": [ - { - "name": "POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - } - }, - { - "name": "POD_NAME", - "valueFrom": { - "fieldRef": { - "fieldPath": "metadata.name" - } - } - }, - ${system.basic_envs}, - ${project.variables.basic_envs}, - { - "name": "CPU_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "divisor": "0", - "resource": "requests.cpu" - } - } - }, - { - "name": "MEM_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "divisor": "0", - "resource": "requests.memory" - } - } - }, - { - "name": "CPU_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "divisor": "0", - "resource": "limits.cpu" - } - } - }, - { - "name": "MEM_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "divisor": "0", - "resource": "limits.memory" - } - } - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.raw_data_job.name}" - }, - { - "name": "BATCH_SIZE", - "value": "${workflow.jobs.raw_data_job.variables.batch_size}" - }, - { - "name": "INPUT_DATA_FORMAT", - "value": "${workflow.jobs.raw_data_job.variables.input_format}" - }, - { - "name": "COMPRESSED_TYPE" - }, - { - "name": "OUTPUT_DATA_FORMAT", - "value": "${workflow.jobs.raw_data_job.variables.output_format}" - } - ], - "image": "hub.docker.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow", - "resources": { - "limits": { - "cpu": "${workflow.jobs.raw_data_job.variables.worker_cpu}", - "memory": "${workflow.jobs.raw_data_job.variables.worker_mem}" - }, - "requests": { - "cpu": "${workflow.jobs.raw_data_job.variables.worker_cpu}", - "memory": "${workflow.jobs.raw_data_job.variables.worker_mem}" - } - }, - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ] - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "restartPolicy": "Never", - "volumes": [ - { - "name": "data", - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - } - } - ] - } - } - } - }, - "peerSpecs": { - "Leader": { - "peerURL": "" - } - }, - "role": "Follower" - } -} - ''' - ), - JobDefinition( - name='data_join_job', - job_type=JobDefinition.DATA_JOIN, - is_federated=True, - variables=[ - Variable( - name='master_cpu', - value='2', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='master_mem', - value='3Gi', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='worker_cpu', - value='2', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='worker_mem', - value='3Gi', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='role', - value='Follower', - access_mode=Variable.PEER_WRITABLE), - ], - dependencies=[ - JobDependency(source='raw_data_job') - ], - yaml_template=''' -{ - "apiVersion": "fedlearner.k8s.io/v1alpha1", - "kind": "FLApp", - "metadata": { - "name": "${workflow.jobs.data_join_job.name}", - "namespace": "${project.variables.namespace}" - }, - "spec": { - "cleanPodPolicy": "All", - "flReplicaSpecs": { - "Master": { - "pair": true, - "replicas": 1, - "template": { - "metadata": { - "creationTimestamp": null - }, - "spec": { - "containers": [ - { - "args": [ - "/app/deploy/scripts/data_join/run_data_join_master.sh" - ], - "command": [ - "/app/deploy/scripts/wait4pair_wrapper.sh" - ], - "env": [ - { - "name": "POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - } - }, - { - "name": "POD_NAME", - "valueFrom": { - "fieldRef": { - "fieldPath": "metadata.name" - } - } - }, - ${system.basic_envs}, - ${project.variables.basic_envs}, - { - "name": "ROLE", - "value": "${workflow.jobs.data_join_job.variables.role}" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.data_join_job.name}" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/data_source/${workflow.jobs.data_join_job.name}" - }, - { - "name": "CPU_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "divisor": "0", - "resource": "requests.cpu" - } - } - }, - { - "name": "MEM_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "divisor": "0", - "resource": "requests.memory" - } - } - }, - { - "name": "CPU_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "divisor": "0", - "resource": "limits.cpu" - } - } - }, - { - "name": "MEM_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "divisor": "0", - "resource": "limits.memory" - } - } - }, - { - "name": "BATCH_MODE", - "value": "--batch_mode" - }, - { - "name": "PARTITION_NUM", - "value": "${workflow.jobs.raw_data_job.variables.num_partitions}" - }, - { - "name": "START_TIME", - "value": "0" - }, - { - "name": "END_TIME", - "value": "999999999999" - }, - { - "name": "NEGATIVE_SAMPLING_RATE", - "value": "1.0" - }, - { - "name": "RAW_DATA_SUB_DIR", - "value": "portal_publish_dir/${workflow.jobs.data_join_job.name}" - }, - { - "name": "RAW_DATA_SUB_DIR", - "value": "portal_publish_dir/${workflow.jobs.data_join_job.name}" - }, - { - "name": "PARTITION_NUM", - "value": "${workflow.jobs.raw_data_job.variables.num_partitions}" - } - ], - "image": "hub.docker.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "resources": { - "limits": { - "cpu": "${workflow.jobs.data_join_job.variables.master_cpu}", - "memory": "${workflow.jobs.data_join_job.variables.master_mem}" - }, - "requests": { - "cpu": "${workflow.jobs.data_join_job.variables.master_cpu}", - "memory": "${workflow.jobs.data_join_job.variables.master_mem}" - } - }, - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ] - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "restartPolicy": "Never", - "volumes": [ - { - "name": "data", - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - } - } - ] - } - } - }, - "Worker": { - "pair": true, - "replicas": ${workflow.jobs.raw_data_job.variables.num_partitions}, - "template": { - "metadata": { - "creationTimestamp": null - }, - "spec": { - "containers": [ - { - "args": [ - "/app/deploy/scripts/data_join/run_data_join_worker.sh" - ], - "command": [ - "/app/deploy/scripts/wait4pair_wrapper.sh" - ], - "env": [ - { - "name": "POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - } - }, - { - "name": "POD_NAME", - "valueFrom": { - "fieldRef": { - "fieldPath": "metadata.name" - } - } - }, - ${system.basic_envs}, - ${project.variables.basic_envs}, - { - "name": "ROLE", - "value": "${workflow.jobs.data_join_job.variables.role}" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.data_join_job.name}" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/data_source/${workflow.jobs.data_join_job.name}" - }, - { - "name": "CPU_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "divisor": "0", - "resource": "requests.cpu" - } - } - }, - { - "name": "MEM_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "divisor": "0", - "resource": "requests.memory" - } - } - }, - { - "name": "CPU_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "divisor": "0", - "resource": "limits.cpu" - } - } - }, - { - "name": "MEM_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "divisor": "0", - "resource": "limits.memory" - } - } - }, - { - "name": "PARTITION_NUM", - "value": "${workflow.jobs.raw_data_job.variables.num_partitions}" - }, - { - "name": "RAW_DATA_SUB_DIR", - "value": "portal_publish_dir/${workflow.jobs.data_join_job.name}" - }, - { - "name": "DATA_BLOCK_DUMP_INTERVAL", - "value": "600" - }, - { - "name": "DATA_BLOCK_DUMP_THRESHOLD", - "value": "65536" - }, - { - "name": "EXAMPLE_ID_DUMP_INTERVAL", - "value": "600" - }, - { - "name": "EXAMPLE_ID_DUMP_THRESHOLD", - "value": "65536" - }, - { - "name": "EXAMPLE_ID_BATCH_SIZE", - "value": "4096" - }, - { - "name": "MAX_FLYING_EXAMPLE_ID", - "value": "307152" - }, - { - "name": "MIN_MATCHING_WINDOW", - "value": "2048" - }, - { - "name": "MAX_MATCHING_WINDOW", - "value": "8192" - }, - { - "name": "RAW_DATA_ITER", - "value": "${workflow.jobs.raw_data_job.variables.output_format}" - }, - { - "name": "RAW_DATA_SUB_DIR", - "value": "portal_publish_dir/${workflow.jobs.raw_data_job.name}" - }, - { - "name": "PARTITION_NUM", - "value": "${workflow.jobs.raw_data_job.variables.num_partitions}" - } - ], - "image": "artifact.bytedance.com/fedlearner/fedlearner:5b499dd", - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "resources": { - "limits": { - "cpu": "${workflow.jobs.data_join_job.variables.master_cpu}", - "memory": "${workflow.jobs.data_join_job.variables.master_mem}" - }, - "requests": { - "cpu": "${workflow.jobs.data_join_job.variables.master_cpu}", - "memory": "${workflow.jobs.data_join_job.variables.master_mem}" - } - }, - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ] - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "restartPolicy": "Never", - "volumes": [ - { - "name": "data", - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - } - } - ] - } - } - } - }, - "peerSpecs": { - "Follower": { - "authority": "external.name", - "extraHeaders": { - "x-host": "leader.flapp.operator" - }, - "peerURL": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - } - }, - "role": "Leader" - } -} - ''' - ) - ]) - - return workflow -import json -if __name__ == '__main__': - print(json.dumps(MessageToDict( - make_workflow_template(), - preserving_proto_field_name=True, - including_default_value_fields=True))) diff --git a/web_console_v2/api/test/fedlearner_webconsole/setting/apis_test.py b/web_console_v2/api/test/fedlearner_webconsole/setting/apis_test.py deleted file mode 100644 index 8b1ce5654..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/setting/apis_test.py +++ /dev/null @@ -1,135 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import logging -import unittest -from http import HTTPStatus -from types import SimpleNamespace -from unittest.mock import patch, MagicMock - -from testing.common import BaseTestCase -from fedlearner_webconsole.setting.apis import _POD_NAMESPACE - - -class SettingsApiTest(BaseTestCase): - class Config(BaseTestCase.Config): - START_GRPC_SERVER = False - START_SCHEDULER = False - - def setUp(self): - super().setUp() - self._deployment = SimpleNamespace( - **{ - 'metadata': - SimpleNamespace(**{ - 'name': 'fedlearner-web-console-v2', - 'namespace': 'testns' - }), - 'spec': - SimpleNamespace( - **{ - 'template': - SimpleNamespace( - **{ - 'spec': - SimpleNamespace( - **{ - 'containers': [ - SimpleNamespace( - **{'image': 'fedlearner:test'}) - ] - }) - }) - }) - }) - self._system_pods = SimpleNamespace( - **{ - 'items': [ - SimpleNamespace( - **{ - 'metadata': - SimpleNamespace( - **{'name': 'fake-fedlearner-web-console-v2-1'}) - }), - SimpleNamespace( - **{ - 'metadata': - SimpleNamespace( - **{'name': 'fake-fedlearner-web-console-v2-2'}) - }), - ] - }) - self._system_pod_log = 'log1\nlog2' - self._mock_k8s_client = MagicMock() - self._mock_k8s_client.get_deployment = MagicMock( - return_value=self._deployment) - self._mock_k8s_client.get_pods = MagicMock( - return_value=self._system_pods) - self._mock_k8s_client.get_pod_log = MagicMock( - return_value=self._system_pod_log) - self.signin_as_admin() - - @patch('fedlearner_webconsole.setting.apis._POD_NAMESPACE', 'testns') - def test_get_settings(self): - with patch('fedlearner_webconsole.setting.apis.k8s_client', - self._mock_k8s_client): - response_data = self.get_response_data( - self.get_helper('/api/v2/settings')) - self.assertEqual(response_data, - {'webconsole_image': 'fedlearner:test'}) - self._mock_k8s_client.get_deployment.assert_called_with( - name='fedlearner-web-console-v2', namespace='testns') - - def test_update_image(self): - self._mock_k8s_client.create_or_update_deployment = MagicMock() - with patch('fedlearner_webconsole.setting.apis.k8s_client', - self._mock_k8s_client): - resp = self.patch_helper( - '/api/v2/settings', - data={'webconsole_image': 'test-new-image'}) - self.assertEqual(resp.status_code, HTTPStatus.OK) - _, kwargs = self._mock_k8s_client.create_or_update_deployment.call_args - self.assertEqual(kwargs['spec'].template.spec.containers[0].image, - 'test-new-image') - self.assertEqual(kwargs['name'], self._deployment.metadata.name) - self.assertEqual(kwargs['namespace'], - self._deployment.metadata.namespace) - - def test_get_system_pods(self): - with patch('fedlearner_webconsole.setting.apis.k8s_client', - self._mock_k8s_client): - resp = self.get_helper('/api/v2/system_pods/name') - self.assertEqual(resp.status_code, HTTPStatus.OK) - self.assertEqual(self.get_response_data(resp), [ - 'fake-fedlearner-web-console-v2-1', - 'fake-fedlearner-web-console-v2-2' - ]) - - def test_get_system_pods_log(self): - fake_pod_name = 'fake-fedlearner-web-console-v2-1' - with patch('fedlearner_webconsole.setting.apis.k8s_client', - self._mock_k8s_client): - resp = self.get_helper( - '/api/v2/system_pods/{}/logs?tail_lines={}'.format( - fake_pod_name, 100)) - self.assertEqual(resp.status_code, HTTPStatus.OK) - self.assertEqual(self.get_response_data(resp), ['log1', 'log2']) - self._mock_k8s_client.get_pod_log.assert_called_with( - name=fake_pod_name, namespace=_POD_NAMESPACE, tail_lines=100) - - -if __name__ == '__main__': - logging.basicConfig(level=logging.DEBUG) - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/sparkapp/__init__.py b/web_console_v2/api/test/fedlearner_webconsole/sparkapp/__init__.py deleted file mode 100644 index 3e28547fe..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/sparkapp/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 diff --git a/web_console_v2/api/test/fedlearner_webconsole/sparkapp/apis_test.py b/web_console_v2/api/test/fedlearner_webconsole/sparkapp/apis_test.py deleted file mode 100644 index 711425562..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/sparkapp/apis_test.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import os -import unittest -import base64 -from unittest import mock - -from unittest.mock import MagicMock, patch -from os.path import dirname -from fedlearner_webconsole.sparkapp.schema import SparkAppInfo - -from testing.common import BaseTestCase -from envs import Envs - -BASE_DIR = Envs.BASE_DIR - - -class SparkAppApiTest(BaseTestCase): - def setUp(self): - super().setUp() - self._upload_path = os.path.join(BASE_DIR, 'test') - self._upload_path_patcher = patch( - 'fedlearner_webconsole.sparkapp.service.UPLOAD_PATH', - self._upload_path) - self._upload_path_patcher.start() - - def tearDown(self): - self._upload_path_patcher.stop() - super().tearDown() - - @patch( - 'fedlearner_webconsole.sparkapp.service.SparkAppService.submit_sparkapp' - ) - def test_submit_sparkapp(self, mock_submit_sparkapp: MagicMock): - mock_submit_sparkapp.return_value = SparkAppInfo() - tarball_file_path = os.path.join( - BASE_DIR, 'test/fedlearner_webconsole/test_data/sparkapp.tar') - with open(tarball_file_path, 'rb') as f: - files_bin = f.read() - - self.post_helper( - '/api/v2/sparkapps', { - 'name': 'fl-transformer-yaml', - 'files': base64.b64encode(files_bin).decode(), - 'image_url': 'dockerhub.com', - 'driver_config': { - 'cores': 1, - 'memory': '200m', - 'core_limit': '4000m', - }, - 'executor_config': { - 'cores': 1, - 'memory': '200m', - 'instances': 5, - }, - 'command': ['data.csv', 'data.rd'], - 'main_application': '${prefix}/convertor.py' - }).json - - mock_submit_sparkapp.assert_called_once() - _, kwargs = mock_submit_sparkapp.call_args - self.assertTrue(kwargs['config'].name, 'fl-transformer-yaml') - - @patch( - 'fedlearner_webconsole.sparkapp.service.SparkAppService.get_sparkapp_info' - ) - def test_get_sparkapp_info(self, mock_get_sparkapp: MagicMock): - mock_get_sparkapp.return_value = SparkAppInfo() - - self.get_helper('/api/v2/sparkapps/fl-transformer-yaml').json - - mock_get_sparkapp.assert_called_once_with('fl-transformer-yaml') - - @patch( - 'fedlearner_webconsole.sparkapp.service.SparkAppService.delete_sparkapp' - ) - def test_delete_sparkapp(self, mock_delete_sparkapp: MagicMock): - mock_delete_sparkapp.return_value = SparkAppInfo() - resp = self.delete_helper('/api/v2/sparkapps/fl-transformer-yaml').json - mock_delete_sparkapp.assert_called_once_with('fl-transformer-yaml') - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/sparkapp/schema_test.py b/web_console_v2/api/test/fedlearner_webconsole/sparkapp/schema_test.py deleted file mode 100644 index 21a0f35ca..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/sparkapp/schema_test.py +++ /dev/null @@ -1,153 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import unittest - -from fedlearner_webconsole.sparkapp.schema import SparkAppConfig, SparkAppInfo, SparkPodConfig - - -class SparkAppSchemaTest(unittest.TestCase): - def test_spark_pod_config(self): - inputs = { - 'cores': 1, - 'memory': '200m', - 'core_limit': '4000m', - 'envs': { - 'HELLO': '1' - } - } - spark_pod_config: SparkPodConfig = SparkPodConfig.from_dict(inputs) - config = spark_pod_config.build_config() - self.assertDictEqual( - config, { - 'cores': 1, - 'memory': '200m', - 'coreLimit': '4000m', - 'env': [{ - 'name': 'HELLO', - 'value': '1' - }] - }) - - def test_sparkapp_config(self): - inputs = { - 'name': 'test', - 'files': bytes(100), - 'image_url': 'dockerhub.com', - 'driver_config': { - 'cores': 1, - 'memory': '200m', - 'core_limit': '4000m', - 'envs': { - 'HELLO': '1' - } - }, - 'executor_config': { - 'cores': 1, - 'memory': '200m', - 'instances': 5, - 'envs': { - 'HELLO': '1' - } - }, - 'command': ['hhh', 'another'], - 'main_application': '${prefix}/main.py' - } - sparkapp_config: SparkAppConfig = SparkAppConfig.from_dict(inputs) - config = sparkapp_config.build_config('./test') - self.assertEqual(config['spec']['mainApplicationFile'], - './test/main.py') - self.assertNotIn('instances', config['spec']['driver']) - - def test_sparkapp_info(self): - resp = { - 'apiVersion': 'sparkoperator.k8s.io/v1beta2', - 'kind': 'SparkApplication', - 'metadata': { - 'creationTimestamp': '2021-05-18T08:59:16Z', - 'generation': 1, - 'name': 'fl-transformer-yaml', - 'namespace': 'fedlearner', - 'resourceVersion': '432649442', - 'selfLink': - '/apis/sparkoperator.k8s.io/v1beta2/namespaces/fedlearner/sparkapplications/fl-transformer-yaml', - 'uid': '52d66d27-b7b7-11eb-b9df-b8599fdb0aac' - }, - 'spec': { - 'arguments': ['data.csv', 'data_tfrecords/'], - 'driver': { - 'coreLimit': '4000m', - 'cores': 1, - 'labels': { - 'version': '3.0.0' - }, - 'memory': '512m', - 'serviceAccount': 'spark', - }, - 'dynamicAllocation': { - 'enabled': False - }, - 'executor': { - 'cores': 1, - 'instances': 1, - 'labels': { - 'version': '3.0.0' - }, - 'memory': '512m', - }, - 'image': 'dockerhub.com', - 'imagePullPolicy': 'Always', - 'mainApplicationFile': 'transformer.py', - 'mode': 'cluster', - 'pythonVersion': '3', - 'restartPolicy': { - 'type': 'Never' - }, - 'sparkConf': { - 'spark.shuffle.service.enabled': 'false' - }, - 'sparkVersion': '3.0.0', - 'type': 'Python', - }, - 'status': { - 'applicationState': { - 'state': 'COMPLETED' - }, - 'driverInfo': { - 'podName': 'fl-transformer-yaml-driver', - 'webUIAddress': '11.249.131.12:4040', - 'webUIPort': 4040, - 'webUIServiceName': 'fl-transformer-yaml-ui-svc' - }, - 'executionAttempts': 1, - 'executorState': { - 'fl-transformer-yaml-bdc15979a314310b-exec-1': 'PENDING', - 'fl-transformer-yaml-bdc15979a314310b-exec-2': 'COMPLETED' - }, - 'lastSubmissionAttemptTime': '2021-05-18T10:31:13Z', - 'sparkApplicationId': 'spark-a380bfd520164d828a334bcb3a6404f9', - 'submissionAttempts': 1, - 'submissionID': '5bc7e2e7-cc0f-420c-8bc7-138b651a1dde', - 'terminationTime': '2021-05-18T10:32:08Z' - } - } - - sparkapp_info = SparkAppInfo.from_k8s_resp(resp) - self.assertTrue(sparkapp_info.namespace, 'fedlearner') - self.assertTrue(sparkapp_info.name, 'fl-transformer-yaml') - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/sparkapp/service_test.py b/web_console_v2/api/test/fedlearner_webconsole/sparkapp/service_test.py deleted file mode 100644 index d59b57520..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/sparkapp/service_test.py +++ /dev/null @@ -1,296 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import os -import shutil -import tempfile -import unittest - -from unittest.mock import MagicMock, patch -from os.path import dirname - -from envs import Envs -from fedlearner_webconsole.sparkapp.schema import SparkAppConfig -from fedlearner_webconsole.sparkapp.service import SparkAppService - -BASE_DIR = Envs.BASE_DIR - - -class SparkAppServiceTest(unittest.TestCase): - def setUp(self) -> None: - super().setUp() - self._upload_path = os.path.join(BASE_DIR, 'test-spark') - os.makedirs(self._upload_path) - self._patch_upload_path = patch( - 'fedlearner_webconsole.sparkapp.service.UPLOAD_PATH', - self._upload_path) - self._patch_upload_path.start() - self._sparkapp_service = SparkAppService() - - def tearDown(self) -> None: - self._patch_upload_path.stop() - shutil.rmtree(self._upload_path) - return super().tearDown() - - def _get_tar_file_path(self) -> str: - return os.path.join( - BASE_DIR, 'test/fedlearner_webconsole/test_data/sparkapp.tar') - - def test_get_sparkapp_upload_path(self): - existable, sparkapp_path = self._sparkapp_service._get_sparkapp_upload_path( - 'test') - self.assertFalse(existable) - - os.makedirs(sparkapp_path) - existable, _ = self._sparkapp_service._get_sparkapp_upload_path('test') - self.assertTrue(existable) - - def test_copy_files_to_target_filesystem(self): - _, sparkapp_path = self._sparkapp_service._get_sparkapp_upload_path( - 'test') - self._sparkapp_service._clear_and_make_an_empty_dir(sparkapp_path) - files_path = self._get_tar_file_path() - with tempfile.TemporaryDirectory() as temp_dir: - file_name = files_path.rsplit('/', 1)[-1] - temp_file_path = os.path.join(temp_dir, file_name) - shutil.copy(files_path, temp_file_path) - self._sparkapp_service._copy_files_to_target_filesystem( - source_filesystem_path=temp_file_path, - target_filesystem_path=sparkapp_path) - - self.assertTrue( - os.path.exists(os.path.join(sparkapp_path, 'convertor.py'))) - - @patch( - 'fedlearner_webconsole.utils.k8s_client.k8s_client.create_sparkapplication' - ) - def test_submit_sparkapp(self, mock_create_sparkapp: MagicMock): - mock_create_sparkapp.return_value = { - 'apiVersion': 'sparkoperator.k8s.io/v1beta2', - 'kind': 'SparkApplication', - 'metadata': { - 'creationTimestamp': '2021-05-18T08:59:16Z', - 'generation': 1, - 'name': 'fl-transformer-yaml', - 'namespace': 'fedlearner', - 'resourceVersion': '432649442', - 'selfLink': - '/apis/sparkoperator.k8s.io/v1beta2/namespaces/fedlearner/sparkapplications/fl-transformer-yaml', - 'uid': '52d66d27-b7b7-11eb-b9df-b8599fdb0aac' - }, - 'spec': { - 'arguments': ['data.csv', 'data_tfrecords/'], - 'driver': { - 'coreLimit': '4000m', - 'cores': 1, - 'labels': { - 'version': '3.0.0' - }, - 'memory': '512m', - 'serviceAccount': 'spark', - }, - 'dynamicAllocation': { - 'enabled': False - }, - 'executor': { - 'cores': 1, - 'instances': 1, - 'labels': { - 'version': '3.0.0' - }, - 'memory': '512m', - }, - 'image': 'dockerhub.com', - 'imagePullPolicy': 'Always', - 'mainApplicationFile': 'transformer.py', - 'mode': 'cluster', - 'pythonVersion': '3', - 'restartPolicy': { - 'type': 'Never' - }, - 'sparkConf': { - 'spark.shuffle.service.enabled': 'false' - }, - 'sparkVersion': '3.0.0', - 'type': 'Python', - }, - 'status': { - 'applicationState': { - 'state': 'COMPLETED' - }, - 'driverInfo': { - 'podName': 'fl-transformer-yaml-driver', - 'webUIAddress': '11.249.131.12:4040', - 'webUIPort': 4040, - 'webUIServiceName': 'fl-transformer-yaml-ui-svc' - }, - 'executionAttempts': 1, - 'executorState': { - 'fl-transformer-yaml-bdc15979a314310b-exec-1': 'PENDING', - 'fl-transformer-yaml-bdc15979a314310b-exec-2': 'COMPLETED' - }, - 'lastSubmissionAttemptTime': '2021-05-18T10:31:13Z', - 'sparkApplicationId': 'spark-a380bfd520164d828a334bcb3a6404f9', - 'submissionAttempts': 1, - 'submissionID': '5bc7e2e7-cc0f-420c-8bc7-138b651a1dde', - 'terminationTime': '2021-05-18T10:32:08Z' - } - } - - tarball_file_path = os.path.join( - BASE_DIR, 'test/fedlearner_webconsole/test_data/sparkapp.tar') - with open(tarball_file_path, 'rb') as f: - files_bin = f.read() - - inputs = { - 'name': 'fl-transformer-yaml', - 'files': files_bin, - 'image_url': 'dockerhub.com', - 'driver_config': { - 'cores': 1, - 'memory': '200m', - 'coreLimit': '4000m', - }, - 'executor_config': { - 'cores': 1, - 'memory': '200m', - 'instances': 5, - }, - 'command': ['data.csv', 'data.rd'], - 'main_application': '${prefix}/convertor.py' - } - config = SparkAppConfig.from_dict(inputs) - resp = self._sparkapp_service.submit_sparkapp(config) - - self.assertTrue( - os.path.exists( - os.path.join(self._upload_path, 'sparkapp', - 'fl-transformer-yaml', 'convertor.py'))) - mock_create_sparkapp.assert_called_once() - self.assertTrue(resp.namespace, 'fedlearner') - - @patch( - 'fedlearner_webconsole.utils.k8s_client.k8s_client.get_sparkapplication' - ) - def test_get_sparkapp_info(self, mock_get_sparkapp: MagicMock): - mock_get_sparkapp.return_value = { - 'apiVersion': 'sparkoperator.k8s.io/v1beta2', - 'kind': 'SparkApplication', - 'metadata': { - 'creationTimestamp': '2021-05-18T08:59:16Z', - 'generation': 1, - 'name': 'fl-transformer-yaml', - 'namespace': 'fedlearner', - 'resourceVersion': '432649442', - 'selfLink': - '/apis/sparkoperator.k8s.io/v1beta2/namespaces/fedlearner/sparkapplications/fl-transformer-yaml', - 'uid': '52d66d27-b7b7-11eb-b9df-b8599fdb0aac' - }, - 'spec': { - 'arguments': ['data.csv', 'data_tfrecords/'], - 'driver': { - 'coreLimit': '4000m', - 'cores': 1, - 'labels': { - 'version': '3.0.0' - }, - 'memory': '512m', - 'serviceAccount': 'spark', - }, - 'dynamicAllocation': { - 'enabled': False - }, - 'executor': { - 'cores': 1, - 'instances': 1, - 'labels': { - 'version': '3.0.0' - }, - 'memory': '512m', - }, - 'image': 'dockerhub.com', - 'imagePullPolicy': 'Always', - 'mainApplicationFile': 'transformer.py', - 'mode': 'cluster', - 'pythonVersion': '3', - 'restartPolicy': { - 'type': 'Never' - }, - 'sparkConf': { - 'spark.shuffle.service.enabled': 'false' - }, - 'sparkVersion': '3.0.0', - 'type': 'Python', - }, - 'status': { - 'applicationState': { - 'state': 'COMPLETED' - }, - 'driverInfo': { - 'podName': 'fl-transformer-yaml-driver', - 'webUIAddress': '11.249.131.12:4040', - 'webUIPort': 4040, - 'webUIServiceName': 'fl-transformer-yaml-ui-svc' - }, - 'executionAttempts': 1, - 'executorState': { - 'fl-transformer-yaml-bdc15979a314310b-exec-1': 'PENDING', - 'fl-transformer-yaml-bdc15979a314310b-exec-2': 'COMPLETED' - }, - 'lastSubmissionAttemptTime': '2021-05-18T10:31:13Z', - 'sparkApplicationId': 'spark-a380bfd520164d828a334bcb3a6404f9', - 'submissionAttempts': 1, - 'submissionID': '5bc7e2e7-cc0f-420c-8bc7-138b651a1dde', - 'terminationTime': '2021-05-18T10:32:08Z' - } - } - - resp = self._sparkapp_service.get_sparkapp_info('fl-transformer-yaml') - - mock_get_sparkapp.assert_called_once() - self.assertTrue(resp.namespace, 'fedlearner') - - @patch( - 'fedlearner_webconsole.sparkapp.service.SparkAppService._get_sparkapp_upload_path' - ) - @patch('fedlearner_webconsole.utils.file_manager.FileManager.remove') - @patch( - 'fedlearner_webconsole.utils.k8s_client.k8s_client.delete_sparkapplication' - ) - def test_delete_sparkapp(self, mock_delete_sparkapp: MagicMock, - mock_file_mananger_remove: MagicMock, - mock_upload_path: MagicMock): - mock_delete_sparkapp.return_value = { - 'kind': 'Status', - 'apiVersion': 'v1', - 'metadata': {}, - 'status': 'Success', - 'details': { - 'name': 'fl-transformer-yaml', - 'group': 'sparkoperator.k8s.io', - 'kind': 'sparkapplications', - 'uid': '52d66d27-b7b7-11eb-b9df-b8599fdb0aac' - } - } - mock_upload_path.return_value = (True, 'test') - resp = self._sparkapp_service.delete_sparkapp( - name='fl-transformer-yaml') - mock_delete_sparkapp.assert_called_once() - mock_file_mananger_remove.assert_called_once() - self.assertTrue(resp.name, 'fl-transformer-yaml') - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/test_data/code.tar.gz b/web_console_v2/api/test/fedlearner_webconsole/test_data/code.tar.gz deleted file mode 100644 index 6e9b56c57..000000000 Binary files a/web_console_v2/api/test/fedlearner_webconsole/test_data/code.tar.gz and /dev/null differ diff --git a/web_console_v2/api/test/fedlearner_webconsole/test_data/dataset_metainfo/_FEATURES b/web_console_v2/api/test/fedlearner_webconsole/test_data/dataset_metainfo/_FEATURES deleted file mode 100644 index c4b56a721..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/test_data/dataset_metainfo/_FEATURES +++ /dev/null @@ -1 +0,0 @@ -{"f00312": {"count": "12089", "mean": "0.0015716767309123998", "stddev": "0.03961485047808605", "min": "0", "max": "1", "missing_count": "0"}, "f00207": {"count": "12089", "mean": "7.969889982628836", "stddev": "6.298925249171136", "min": "0", "max": "17", "missing_count": "0"}, "f00197": {"count": "12089", "mean": "5.790387955993051E-4", "stddev": "0.02405725220983047", "min": "0", "max": "1", "missing_count": "0"}, "f00021": {"count": "12089", "mean": "22.46943502357515", "stddev": "62.78511338652187", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00380": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00129": {"count": "12089", "mean": "127.65687815369344", "stddev": "110.84802347935845", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00009": {"count": "12089", "mean": "50.4625692778559", "stddev": "88.88581381823657", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00333": {"count": "12089", "mean": "8.271982794275788E-5", "stddev": "0.009095044141880672", "min": "0", "max": "1", "missing_count": "0"}, "f00036": {"count": "12089", "mean": "40.138555711804116", "stddev": "81.46548446285466", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00000": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0.0", "max": "0.0", "missing_count": "0"}, "f00135": {"count": "12089", "mean": "26.622549425097194", "stddev": "68.94643988549662", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00213": {"count": "12089", "mean": "7.0170402845562085", "stddev": "6.28331287275781", "min": "0", "max": "17", "missing_count": "0"}, "f00389": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00108": {"count": "12089", "mean": "11.317561419472247", "stddev": "45.850510883253484", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00098": {"count": "12089", "mean": "98.65836711059642", "stddev": "113.51899823220415", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00120": {"count": "12089", "mean": "75.51890148068492", "stddev": "104.405877965072", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00306": {"count": "12089", "mean": "8.271982794275788E-5", "stddev": "0.009095044141880672", "min": "0", "max": "1", "missing_count": "0"}, "f00015": {"count": "12089", "mean": "146.97046902142444", "stddev": "107.57419770880752", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00234": {"count": "12089", "mean": "7.249234841591529", "stddev": "5.960881019889561", "min": "0", "max": "16", "missing_count": "0"}, "f00228": {"count": "12089", "mean": "0.30962031598974277", "stddev": "0.9148967149961357", "min": "0", "max": "4", "missing_count": "0"}, "f00114": {"count": "12089", "mean": "0.8296798742658615", "stddev": "11.826621946672537", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00296": {"count": "12089", "mean": "0.44412275622466707", "stddev": "1.2207702582037445", "min": "0", "max": "5", "missing_count": "0"}, "f00327": {"count": "12089", "mean": "0.004384150880966168", "stddev": "0.06607035037106973", "min": "0", "max": "1", "missing_count": "0"}, "f00315": {"count": "12089", "mean": "0.01091901728844404", "stddev": "0.1039263478660159", "min": "0", "max": "1", "missing_count": "0"}, "f00201": {"count": "12089", "mean": "1.6287534121929026", "stddev": "2.84809905187513", "min": "0", "max": "9", "missing_count": "0"}, "f00170": {"count": "12089", "mean": "1.573579286955083", "stddev": "16.69196932487125", "min": "0.0", "max": "255.0", "missing_count": "0"}, "raw_id": {"count": "12089", "mean": "6044.0", "stddev": "3489.938036699219", "min": "0", "max": "12088", "missing_count": "0"}, "f00278": {"count": "12089", "mean": "1.6543965588551577E-4", "stddev": "0.012861802735756753", "min": "0", "max": "1", "missing_count": "0"}, "f00117": {"count": "12089", "mean": "37.35991397137894", "stddev": "81.07098158275089", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00191": {"count": "12089", "mean": "22.46099760112499", "stddev": "62.86460287162058", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00216": {"count": "12089", "mean": "2.7689635205558774", "stddev": "4.064744417560437", "min": "0", "max": "13", "missing_count": "0"}, "f00185": {"count": "12089", "mean": "136.2555215485152", "stddev": "109.1495475656237", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00071": {"count": "12089", "mean": "115.2010091819009", "stddev": "113.15610412333592", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00284": {"count": "12089", "mean": "0.018198362147406732", "stddev": "0.13367370667048667", "min": "0", "max": "1", "missing_count": "0"}, "f00299": {"count": "12089", "mean": "0.06650674166597734", "stddev": "0.3520912962783987", "min": "0", "max": "2", "missing_count": "0"}, "f00179": {"count": "12089", "mean": "116.24807676400033", "stddev": "112.10561248326009", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00362": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00309": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00018": {"count": "12089", "mean": "121.69418479609563", "stddev": "110.4481389455011", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00383": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00377": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00086": {"count": "12089", "mean": "0.5124493341053851", "stddev": "9.554020953390804", "min": "0.0", "max": "254.0", "missing_count": "0"}, "f00300": {"count": "12089", "mean": "0.018446521631235006", "stddev": "0.13456502272330065", "min": "0", "max": "1", "missing_count": "0"}, "f00290": {"count": "12089", "mean": "0.8305070725452891", "stddev": "1.8063905280636319", "min": "0", "max": "7", "missing_count": "0"}, "f00003": {"count": "12089", "mean": "0.16982380676648193", "stddev": "5.260742308809774", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00092": {"count": "12089", "mean": "64.64372570105054", "stddev": "99.94772315175193", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00102": {"count": "12089", "mean": "124.01604764662089", "stddev": "111.03878118521327", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00344": {"count": "12089", "mean": "3.3087931177103153E-4", "stddev": "0.018187830935519945", "min": "0", "max": "1", "missing_count": "0"}, "f00095": {"count": "12089", "mean": "84.91827280999256", "stddev": "108.35978233028061", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00365": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00068": {"count": "12089", "mean": "101.33484986351229", "stddev": "110.4669075685077", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00272": {"count": "12089", "mean": "0.29133923401439327", "stddev": "0.9105375835637824", "min": "0", "max": "4", "missing_count": "0"}, "f00167": {"count": "12089", "mean": "0.020597237157746712", "stddev": "1.6019271796631058", "min": "0.0", "max": "128.0", "missing_count": "0"}, "f00371": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00194": {"count": "12089", "mean": "0.4916039374638101", "stddev": "8.90504079934277", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00080": {"count": "12089", "mean": "7.424269997518405", "stddev": "37.33719005251688", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00074": {"count": "12089", "mean": "122.3101166349574", "stddev": "111.195396244587", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00089": {"count": "12089", "mean": "24.765985606749936", "stddev": "66.94669354107191", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00293": {"count": "12089", "mean": "1.0765985606749937", "stddev": "2.2088405666718938", "min": "0", "max": "8", "missing_count": "0"}, "f00287": {"count": "12089", "mean": "0.2876995615849119", "stddev": "0.9106955529735481", "min": "0", "max": "4", "missing_count": "0"}, "f00152": {"count": "12089", "mean": "96.18181818181819", "stddev": "112.25217366871706", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00188": {"count": "12089", "mean": "85.78013069732815", "stddev": "106.30064004700931", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00251": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00350": {"count": "12089", "mean": "0.0010753577632558525", "stddev": "0.03277636700490423", "min": "0", "max": "1", "missing_count": "0"}, "f00173": {"count": "12089", "mean": "52.889486309868474", "stddev": "93.34585106719244", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00053": {"count": "12089", "mean": "2.0326743320373892", "stddev": "19.627575883927236", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00359": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00266": {"count": "12089", "mean": "3.797419141368186", "stddev": "4.630854277554116", "min": "0", "max": "13", "missing_count": "0"}, "f00386": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00353": {"count": "12089", "mean": "8.271982794275787E-4", "stddev": "0.028750346035965654", "min": "0", "max": "1", "missing_count": "0"}, "f00062": {"count": "12089", "mean": "25.966333030027297", "stddev": "68.53716697947962", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00248": {"count": "12089", "mean": "0.030854495822648688", "stddev": "0.1729305341578591", "min": "0", "max": "1", "missing_count": "0"}, "f00275": {"count": "12089", "mean": "0.023078831996029447", "stddev": "0.15016013013366547", "min": "0", "max": "1", "missing_count": "0"}, "f00347": {"count": "12089", "mean": "9.099181073703367E-4", "stddev": "0.030152369101126467", "min": "0", "max": "1", "missing_count": "0"}, "f00326": {"count": "12089", "mean": "0.0076102241707337245", "stddev": "0.08690761437388105", "min": "0", "max": "1", "missing_count": "0"}, "f00374": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00233": {"count": "12089", "mean": "7.51261477376127", "stddev": "6.336175108889156", "min": "0", "max": "17", "missing_count": "0"}, "f00077": {"count": "12089", "mean": "42.78674828356357", "stddev": "84.18417527697498", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00332": {"count": "12089", "mean": "8.271982794275788E-5", "stddev": "0.009095044141880672", "min": "0", "max": "1", "missing_count": "0"}, "f00155": {"count": "12089", "mean": "109.07047729340724", "stddev": "110.81584969724295", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00035": {"count": "12089", "mean": "24.930846223839854", "stddev": "66.37760252845683", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00161": {"count": "12089", "mean": "69.5351145669617", "stddev": "101.00911072125321", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00056": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0.0", "max": "0.0", "missing_count": "0"}, "f00083": {"count": "12089", "mean": "0.059144676979071886", "stddev": "3.061344888196", "min": "0.0", "max": "223.0", "missing_count": "0"}, "f00269": {"count": "12089", "mean": "1.6582016709405245", "stddev": "2.929709427278822", "min": "0", "max": "10", "missing_count": "0"}, "f00134": {"count": "12089", "mean": "43.70609645131938", "stddev": "85.1577461190761", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00182": {"count": "12089", "mean": "112.46033584250145", "stddev": "110.37074173570745", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00041": {"count": "12089", "mean": "122.2917528331541", "stddev": "110.3844621437854", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00368": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00176": {"count": "12089", "mean": "116.19058648358012", "stddev": "111.5950606075992", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00140": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0.0", "max": "0.0", "missing_count": "0"}, "f00260": {"count": "12089", "mean": "2.6654810157994873", "stddev": "3.8407366321952625", "min": "0", "max": "12", "missing_count": "0"}, "f00281": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00254": {"count": "12089", "mean": "0.004053271569195136", "stddev": "0.06353877960213003", "min": "0", "max": "1", "missing_count": "0"}, "f00149": {"count": "12089", "mean": "97.22565969062785", "stddev": "110.34982443353475", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00023": {"count": "12089", "mean": "4.017950202663578", "stddev": "26.91164598600779", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00158": {"count": "12089", "mean": "135.2027462982877", "stddev": "109.24927327835356", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00017": {"count": "12089", "mean": "143.10224170733724", "stddev": "107.52214831526439", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00050": {"count": "12089", "mean": "15.526759864339482", "stddev": "52.80830936027394", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00236": {"count": "12089", "mean": "7.231698238067665", "stddev": "5.998910757868576", "min": "0", "max": "16", "missing_count": "0"}, "f00143": {"count": "12089", "mean": "9.34130201009182", "stddev": "41.454427254989774", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00044": {"count": "12089", "mean": "135.7823641326826", "stddev": "109.00787635707023", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00038": {"count": "12089", "mean": "81.47365373480024", "stddev": "104.10037134701264", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00242": {"count": "12089", "mean": "3.188849367193316", "stddev": "4.430056635612022", "min": "0", "max": "13", "missing_count": "0"}, "f00116": {"count": "12089", "mean": "19.636032757051865", "stddev": "60.33937808755006", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00137": {"count": "12089", "mean": "5.4586814459425925", "stddev": "31.331337049123192", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00257": {"count": "12089", "mean": "0.3081313590867731", "stddev": "0.9153993013019526", "min": "0", "max": "4", "missing_count": "0"}, "f00320": {"count": "12089", "mean": "0.06278434940855324", "stddev": "0.3479336134540428", "min": "0", "max": "2", "missing_count": "0"}, "f00164": {"count": "12089", "mean": "13.817271900074449", "stddev": "49.910045947034355", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00215": {"count": "12089", "mean": "3.963685995533129", "stddev": "4.840533295725966", "min": "0", "max": "14", "missing_count": "0"}, "f00308": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00122": {"count": "12089", "mean": "82.65232856315659", "stddev": "108.00734853519677", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00341": {"count": "12089", "mean": "1.6543965588551577E-4", "stddev": "0.012861802735756756", "min": "0", "max": "1", "missing_count": "0"}, "f00335": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00329": {"count": "12089", "mean": "9.099181073703367E-4", "stddev": "0.03015236910112646", "min": "0", "max": "1", "missing_count": "0"}, "f00221": {"count": "12089", "mean": "0.018363801803292248", "stddev": "0.1342686257655253", "min": "0", "max": "1", "missing_count": "0"}, "f00356": {"count": "12089", "mean": "2.4815948382827364E-4", "stddev": "0.015751775297162093", "min": "0", "max": "1", "missing_count": "0"}, "f00059": {"count": "12089", "mean": "1.670609645131938", "stddev": "16.617605185022406", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00065": {"count": "12089", "mean": "67.83968897344694", "stddev": "99.88982320561412", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00314": {"count": "12089", "mean": "0.006783025891306146", "stddev": "0.08208272523502139", "min": "0", "max": "1", "missing_count": "0"}, "f00263": {"count": "12089", "mean": "4.683017619323352", "stddev": "5.112106252543775", "min": "0", "max": "14", "missing_count": "0"}, "f00323": {"count": "12089", "mean": "0.021672594921002566", "stddev": "0.14561815681841794", "min": "0", "max": "1", "missing_count": "0"}, "f00203": {"count": "12089", "mean": "5.274712548597899", "stddev": "5.491439999810407", "min": "0", "max": "15", "missing_count": "0"}, "f00026": {"count": "12089", "mean": "0.39308462238398545", "stddev": "7.996566426227542", "min": "0.0", "max": "254.0", "missing_count": "0"}, "f00391": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00125": {"count": "12089", "mean": "91.59459012325254", "stddev": "112.05550695302972", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00032": {"count": "12089", "mean": "2.2057242120936387", "stddev": "19.17599219583217", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00193": {"count": "12089", "mean": "3.351311109272893", "stddev": "23.793676933625665", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00047": {"count": "12089", "mean": "93.57473736454628", "stddev": "107.85198969607525", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00110": {"count": "12089", "mean": "0.9364711721399619", "stddev": "12.433034422804973", "min": "0.0", "max": "253.0", "missing_count": "0"}, "f00245": {"count": "12089", "mean": "0.6315658863429564", "stddev": "1.5311403179365195", "min": "0", "max": "6", "missing_count": "0"}, "f00146": {"count": "12089", "mean": "69.49028042021672", "stddev": "101.90158284684969", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00005": {"count": "12089", "mean": "2.562742989494582", "stddev": "20.401310971721", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00104": {"count": "12089", "mean": "81.10786665563735", "stddev": "104.36877459445203", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00218": {"count": "12089", "mean": "0.8396889734469353", "stddev": "1.8901700948075482", "min": "0", "max": "7", "missing_count": "0"}, "f00131": {"count": "12089", "mean": "116.78550748614443", "stddev": "109.83853395976982", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00317": {"count": "12089", "mean": "0.02109355612540326", "stddev": "0.1437022136462013", "min": "0", "max": "1", "missing_count": "0"}, "f00224": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00230": {"count": "12089", "mean": "2.335842501447597", "stddev": "3.590911567257245", "min": "0", "max": "11", "missing_count": "0"}, "f00239": {"count": "12089", "mean": "5.6979071883530485", "stddev": "5.581541304147346", "min": "0", "max": "15", "missing_count": "0"}, "f00094": {"count": "12089", "mean": "77.82893539581438", "stddev": "105.52048530589909", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00119": {"count": "12089", "mean": "68.77905533956489", "stddev": "101.61313020983097", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00302": {"count": "12089", "mean": "0.006038547439821325", "stddev": "0.0774763184270929", "min": "0", "max": "1", "missing_count": "0"}, "f00292": {"count": "12089", "mean": "1.0894201340061214", "stddev": "2.213914558722607", "min": "0", "max": "8", "missing_count": "0"}, "f00011": {"count": "12089", "mean": "100.26718504425511", "stddev": "109.47690624367854", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00338": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00020": {"count": "12089", "mean": "48.47075854082224", "stddev": "87.6721375293745", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00175": {"count": "12089", "mean": "100.43262470014062", "stddev": "110.3676589106749", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00206": {"count": "12089", "mean": "8.096451319381256", "stddev": "6.449682666545672", "min": "0", "max": "18", "missing_count": "0"}, "f00029": {"count": "12089", "mean": "0.01869468111506328", "stddev": "2.05547997606503", "min": "0.0", "max": "226.0", "missing_count": "0"}, "f00274": {"count": "12089", "mean": "0.07279344858962693", "stddev": "0.3603832071918777", "min": "0", "max": "2", "missing_count": "0"}, "f00128": {"count": "12089", "mean": "114.94449499545041", "stddev": "112.13157547309869", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00373": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00196": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00295": {"count": "12089", "mean": "0.6386797915460336", "stddev": "1.546000832656912", "min": "0", "max": "6", "missing_count": "0"}, "f00305": {"count": "12089", "mean": "4.135991397137894E-4", "stddev": "0.020333771833917866", "min": "0", "max": "1", "missing_count": "0"}, "f00289": {"count": "12089", "mean": "0.6302423690958723", "stddev": "1.4983772214095004", "min": "0", "max": "6", "missing_count": "0"}, "f00212": {"count": "12089", "mean": "6.817271900074448", "stddev": "5.974489070884012", "min": "0", "max": "16", "missing_count": "0"}, "f00181": {"count": "12089", "mean": "109.2416246174208", "stddev": "110.54913441390448", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00076": {"count": "12089", "mean": "70.24402349243114", "stddev": "100.58430391863487", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00280": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00014": {"count": "12089", "mean": "140.2613946562991", "stddev": "109.92329691158204", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00388": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00227": {"count": "12089", "mean": "0.07750847878236414", "stddev": "0.36544744196793927", "min": "0", "max": "2", "missing_count": "0"}, "f00082": {"count": "12089", "mean": "0.8301761932335181", "stddev": "11.676183114704884", "min": "0.0", "max": "254.0", "missing_count": "0"}, "f00008": {"count": "12089", "mean": "28.923153279841177", "stddev": "70.18855997812747", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00113": {"count": "12089", "mean": "0.020928116469517744", "stddev": "1.8185770446835063", "min": "0.0", "max": "197.0", "missing_count": "0"}, "f00107": {"count": "12089", "mean": "20.359665811895113", "stddev": "60.86079945589884", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00097": {"count": "12089", "mean": "95.71230043841508", "stddev": "112.9203359237509", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00311": {"count": "12089", "mean": "3.3087931177103153E-4", "stddev": "0.018187830935519955", "min": "0", "max": "1", "missing_count": "0"}, "f00184": {"count": "12089", "mean": "132.06774753908513", "stddev": "109.61468807022078", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00079": {"count": "12089", "mean": "13.677309951195301", "stddev": "50.07519014387303", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00101": {"count": "12089", "mean": "122.54561998511043", "stddev": "111.69458255685223", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00002": {"count": "12089", "mean": "0.007527504342790967", "stddev": "0.6438064000663923", "min": "0.0", "max": "69.0", "missing_count": "0"}, "f00070": {"count": "12089", "mean": "111.54016047646621", "stddev": "112.96571914608131", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00064": {"count": "12089", "mean": "53.12217718587145", "stddev": "92.07991097584222", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00209": {"count": "12089", "mean": "7.506162627181736", "stddev": "6.335218435677862", "min": "0", "max": "17", "missing_count": "0"}, "f00283": {"count": "12089", "mean": "0.00703118537513442", "stddev": "0.08356030986125715", "min": "0", "max": "1", "missing_count": "0"}, "f00277": {"count": "12089", "mean": "0.002233435354454463", "stddev": "0.047208383503186485", "min": "0", "max": "1", "missing_count": "0"}, "f00091": {"count": "12089", "mean": "54.057821159731986", "stddev": "93.49125294106308", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00163": {"count": "12089", "mean": "27.330465712631316", "stddev": "69.49360582065684", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00058": {"count": "12089", "mean": "0.32682604020183637", "stddev": "7.428200945373068", "min": "0.0", "max": "254.0", "missing_count": "0"}, "f00190": {"count": "12089", "mean": "39.01877740094301", "stddev": "80.64309515978327", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00200": {"count": "12089", "mean": "0.6344610803209529", "stddev": "1.5439106575916022", "min": "0", "max": "6", "missing_count": "0"}, "f00376": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00199": {"count": "12089", "mean": "0.16171726362809166", "stddev": "0.6209476669547512", "min": "0", "max": "3", "missing_count": "0"}, "f00085": {"count": "12089", "mean": "0.02034907767391844", "stddev": "1.4125452537479488", "min": "0.0", "max": "133.0", "missing_count": "0"}, "f00298": {"count": "12089", "mean": "0.09305980643560262", "stddev": "0.38333800268403145", "min": "0", "max": "2", "missing_count": "0"}, "f00157": {"count": "12089", "mean": "135.45338737695425", "stddev": "109.17069463834251", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00361": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00256": {"count": "12089", "mean": "0.09256348746794607", "stddev": "0.3823779459828604", "min": "0", "max": "2", "missing_count": "0"}, "f00262": {"count": "12089", "mean": "4.334270824716684", "stddev": "4.9958968239833395", "min": "0", "max": "14", "missing_count": "0"}, "f00355": {"count": "12089", "mean": "4.135991397137894E-4", "stddev": "0.020333771833917845", "min": "0", "max": "1", "missing_count": "0"}, "f00178": {"count": "12089", "mean": "120.58689717925387", "stddev": "112.42989836060832", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00382": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00364": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00358": {"count": "12089", "mean": "8.271982794275788E-5", "stddev": "0.009095044141880676", "min": "0", "max": "1", "missing_count": "0"}, "f00067": {"count": "12089", "mean": "92.11828935395815", "stddev": "107.880515215011", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00271": {"count": "12089", "mean": "0.4656299114897841", "stddev": "1.2226481099427282", "min": "0", "max": "5", "missing_count": "0"}, "f00286": {"count": "12089", "mean": "0.15824303085449581", "stddev": "0.616900267416377", "min": "0", "max": "3", "missing_count": "0"}, "f00244": {"count": "12089", "mean": "1.0981884357680536", "stddev": "2.2035418049877964", "min": "0", "max": "8", "missing_count": "0"}, "f00379": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00337": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00151": {"count": "12089", "mean": "96.78128877491935", "stddev": "111.93369626697235", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00259": {"count": "12089", "mean": "1.6998097443957316", "stddev": "2.9808512186328775", "min": "0", "max": "10", "missing_count": "0"}, "f00073": {"count": "12089", "mean": "126.48788154520639", "stddev": "110.94063872421407", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00172": {"count": "12089", "mean": "29.05037637521714", "stddev": "72.62815547455598", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00166": {"count": "12089", "mean": "0.6783025891306146", "stddev": "10.775627874334255", "min": "0.0", "max": "253.0", "missing_count": "0"}, "f00385": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00088": {"count": "12089", "mean": "11.935892133344362", "stddev": "47.22589878357941", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00139": {"count": "12089", "mean": "0.04020183638018033", "stddev": "2.305908122408037", "min": "0.0", "max": "153.0", "missing_count": "0"}, "f00343": {"count": "12089", "mean": "2.4815948382827364E-4", "stddev": "0.0157517752971621", "min": "0", "max": "1", "missing_count": "0"}, "f00052": {"count": "12089", "mean": "4.207378608652494", "stddev": "28.074171257801822", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00046": {"count": "12089", "mean": "122.67954338654975", "stddev": "110.99635048312369", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00187": {"count": "12089", "mean": "110.35875589378774", "stddev": "110.55640335116942", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00238": {"count": "12089", "mean": "6.148316651501365", "stddev": "5.678318834411271", "min": "0", "max": "15", "missing_count": "0"}, "f00250": {"count": "12089", "mean": "9.099181073703367E-4", "stddev": "0.030152369101126463", "min": "0", "max": "1", "missing_count": "0"}, "f00370": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00265": {"count": "12089", "mean": "4.134006121267268", "stddev": "4.719032514814078", "min": "0", "max": "13", "missing_count": "0"}, "f00145": {"count": "12089", "mean": "48.23409711307801", "stddev": "90.33569256677949", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00028": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0.0", "max": "0.0", "missing_count": "0"}, "f00169": {"count": "12089", "mean": "0.061295392505583586", "stddev": "3.047245179854696", "min": "0.0", "max": "185.0", "missing_count": "0"}, "f00232": {"count": "12089", "mean": "5.942426999751841", "stddev": "5.896385905213651", "min": "0", "max": "16", "missing_count": "0"}, "f00352": {"count": "12089", "mean": "9.926379353130945E-4", "stddev": "0.03149181864169439", "min": "0", "max": "1", "missing_count": "0"}, "f00127": {"count": "12089", "mean": "101.6755728348085", "stddev": "112.17723446501321", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00055": {"count": "12089", "mean": "0.05327156919513607", "stddev": "2.648309231570656", "min": "0.0", "max": "190.0", "missing_count": "0"}, "f00310": {"count": "12089", "mean": "1.6543965588551577E-4", "stddev": "0.012861802735756758", "min": "0", "max": "1", "missing_count": "0"}, "f00154": {"count": "12089", "mean": "99.49805608404334", "stddev": "110.42238643156286", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00049": {"count": "12089", "mean": "31.03631400446687", "stddev": "73.15778023500965", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00253": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00367": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00331": {"count": "12089", "mean": "8.271982794275788E-5", "stddev": "0.009095044141880676", "min": "0", "max": "1", "missing_count": "0"}, "f00325": {"count": "12089", "mean": "0.012573413847299198", "stddev": "0.11142867759187992", "min": "0", "max": "1", "missing_count": "0"}, "f00034": {"count": "12089", "mean": "13.855571180411944", "stddev": "49.713397507452996", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00247": {"count": "12089", "mean": "0.1573331127471255", "stddev": "0.6182713526833413", "min": "0", "max": "3", "missing_count": "0"}, "f00319": {"count": "12089", "mean": "0.029944577715278354", "stddev": "0.17044149441025006", "min": "0", "max": "1", "missing_count": "0"}, "f00160": {"count": "12089", "mean": "94.73463479195964", "stddev": "108.36629716102476", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00346": {"count": "12089", "mean": "8.271982794275787E-4", "stddev": "0.02875034603596567", "min": "0", "max": "1", "missing_count": "0"}, "f00040": {"count": "12089", "mean": "113.14327074199686", "stddev": "109.84226245861849", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00148": {"count": "12089", "mean": "93.87616841756969", "stddev": "109.01563028272538", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00226": {"count": "12089", "mean": "0.008437422450161304", "stddev": "0.09147089407626231", "min": "0", "max": "1", "missing_count": "0"}, "f00061": {"count": "12089", "mean": "14.143188022168914", "stddev": "50.65214934645993", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00268": {"count": "12089", "mean": "2.3289767557283483", "stddev": "3.5853110900424756", "min": "0", "max": "11", "missing_count": "0"}, "f00133": {"count": "12089", "mean": "65.5060799073538", "stddev": "99.12986246813963", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00022": {"count": "12089", "mean": "8.979650922326082", "stddev": "39.77712456709073", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00142": {"count": "12089", "mean": "1.3251716436429812", "stddev": "15.47932255385905", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00037": {"count": "12089", "mean": "59.9388700471503", "stddev": "94.72699453234434", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00241": {"count": "12089", "mean": "4.451567540739515", "stddev": "5.214539351483691", "min": "0", "max": "15", "missing_count": "0"}, "f00334": {"count": "12089", "mean": "8.271982794275788E-5", "stddev": "0.009095044141880672", "min": "0", "max": "1", "missing_count": "0"}, "f00229": {"count": "12089", "mean": "1.086442220200182", "stddev": "2.2141075688107272", "min": "0", "max": "8", "missing_count": "0"}, "f00043": {"count": "12089", "mean": "132.4968152866242", "stddev": "109.93326299918938", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00121": {"count": "12089", "mean": "78.94383323682686", "stddev": "105.7465897141154", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00001": {"count": "12089", "mean": "0.01323517247084126", "stddev": "1.057570263408246", "min": "0.0", "max": "99.0", "missing_count": "0"}, "f00115": {"count": "12089", "mean": "6.736785507486144", "stddev": "34.6562065366746", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00307": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00016": {"count": "12089", "mean": "149.63578459756803", "stddev": "106.31345045889744", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00220": {"count": "12089", "mean": "0.0876002977913806", "stddev": "0.3770193023195932", "min": "0", "max": "2", "missing_count": "0"}, "f00214": {"count": "12089", "mean": "5.902638762511374", "stddev": "5.87517974874268", "min": "0", "max": "16", "missing_count": "0"}, "f00109": {"count": "12089", "mean": "5.193977996525767", "stddev": "30.916493868435143", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00328": {"count": "12089", "mean": "0.0023988750103399784", "stddev": "0.048921553367709744", "min": "0", "max": "1", "missing_count": "0"}, "f00136": {"count": "12089", "mean": "14.2381503846472", "stddev": "50.832117307731565", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00340": {"count": "12089", "mean": "8.271982794275788E-5", "stddev": "0.009095044141880667", "min": "0", "max": "1", "missing_count": "0"}, "f00349": {"count": "12089", "mean": "9.926379353130945E-4", "stddev": "0.031491818641694355", "min": "0", "max": "1", "missing_count": "0"}, "f00100": {"count": "12089", "mean": "111.91397137893954", "stddev": "112.72662611143275", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00235": {"count": "12089", "mean": "7.245016130366449", "stddev": "5.975173808089956", "min": "0", "max": "16", "missing_count": "0"}, "f00313": {"count": "12089", "mean": "0.003556952601538589", "stddev": "0.05953649215335385", "min": "0", "max": "1", "missing_count": "0"}, "f00208": {"count": "12089", "mean": "7.521631235007031", "stddev": "6.319573669900177", "min": "0", "max": "17", "missing_count": "0"}, "f00186": {"count": "12089", "mean": "129.9644304739846", "stddev": "109.8452037535058", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00217": {"count": "12089", "mean": "1.6715195632393085", "stddev": "2.9719709285257014", "min": "0", "max": "10", "missing_count": "0"}, "f00025": {"count": "12089", "mean": "1.1352469186864091", "stddev": "14.074838590340523", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00390": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00384": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00093": {"count": "12089", "mean": "71.79386218876665", "stddev": "103.22560088342621", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00285": {"count": "12089", "mean": "0.06733393994540492", "stddev": "0.35404327285513265", "min": "0", "max": "2", "missing_count": "0"}, "f00103": {"count": "12089", "mean": "108.62867069236496", "stddev": "109.98131124810823", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00202": {"count": "12089", "mean": "3.416080734552072", "stddev": "4.423602482890862", "min": "0", "max": "13", "missing_count": "0"}, "f00192": {"count": "12089", "mean": "11.229299363057326", "stddev": "44.94778055349097", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00031": {"count": "12089", "mean": "0.5988088344776243", "stddev": "9.70173242684006", "min": "0.0", "max": "254.0", "missing_count": "0"}, "f00124": {"count": "12089", "mean": "88.7223922574241", "stddev": "111.03041701868655", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00130": {"count": "12089", "mean": "130.69021424435437", "stddev": "110.07541968527855", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00019": {"count": "12089", "mean": "85.66663909339069", "stddev": "105.4276935007043", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00087": {"count": "12089", "mean": "3.809744395731657", "stddev": "25.920786926965643", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00301": {"count": "12089", "mean": "0.009926379353130945", "stddev": "0.09913959537480871", "min": "0", "max": "1", "missing_count": "0"}, "f00010": {"count": "12089", "mean": "76.15460335842502", "stddev": "102.68836546035861", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00004": {"count": "12089", "mean": "0.7785590205972371", "stddev": "10.986462860696165", "min": "0.0", "max": "254.0", "missing_count": "0"}, "f00223": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00118": {"count": "12089", "mean": "55.26817768219042", "stddev": "94.35202600920245", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00291": {"count": "12089", "mean": "1.07858383654562", "stddev": "2.198672689278667", "min": "0", "max": "8", "missing_count": "0"}, "f00322": {"count": "12089", "mean": "0.026966663909339068", "stddev": "0.1619926962870867", "min": "0", "max": "1", "missing_count": "0"}, "f00316": {"count": "12089", "mean": "0.015551327653238481", "stddev": "0.12373661690904557", "min": "0", "max": "1", "missing_count": "0"}, "f00013": {"count": "12089", "mean": "131.31838861775168", "stddev": "110.71027810705532", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00189": {"count": "12089", "mean": "61.11332616428158", "stddev": "96.67351803163615", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00195": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0.0", "max": "0.0", "missing_count": "0"}, "f00112": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0.0", "max": "0.0", "missing_count": "0"}, "f00081": {"count": "12089", "mean": "3.595913640499628", "stddev": "25.670929304588366", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00007": {"count": "12089", "mean": "14.629415170816445", "stddev": "50.59485801451282", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00211": {"count": "12089", "mean": "6.821904210439242", "stddev": "5.978061865979983", "min": "0", "max": "16", "missing_count": "0"}, "f00180": {"count": "12089", "mean": "111.31235007031185", "stddev": "111.11268456059636", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00366": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00075": {"count": "12089", "mean": "100.55488460584002", "stddev": "109.45989244366011", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00069": {"count": "12089", "mean": "107.7127140375548", "stddev": "112.3468540877581", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00288": {"count": "12089", "mean": "0.4475969889982629", "stddev": "1.2037283759312305", "min": "0", "max": "5", "missing_count": "0"}, "f00174": {"count": "12089", "mean": "78.69972702456779", "stddev": "105.22183163444049", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00387": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00205": {"count": "12089", "mean": "8.0", "stddev": "6.331562444720613", "min": "0", "max": "17", "missing_count": "0"}, "f00060": {"count": "12089", "mean": "5.7965919430887585", "stddev": "32.18877971708031", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00304": {"count": "12089", "mean": "0.0015716767309123998", "stddev": "0.03961485047808601", "min": "0", "max": "1", "missing_count": "0"}, "f00273": {"count": "12089", "mean": "0.15973198775746547", "stddev": "0.6187933067584905", "min": "0", "max": "3", "missing_count": "0"}, "f00294": {"count": "12089", "mean": "0.8423360079411035", "stddev": "1.8570588236371768", "min": "0", "max": "7", "missing_count": "0"}, "f00168": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0.0", "max": "0.0", "missing_count": "0"}, "f00372": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00267": {"count": "12089", "mean": "3.1091074530564975", "stddev": "4.261348449492594", "min": "0", "max": "13", "missing_count": "0"}, "f00106": {"count": "12089", "mean": "34.45950864422202", "stddev": "76.81328321940498", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00096": {"count": "12089", "mean": "90.9721234179833", "stddev": "111.33220332628122", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00360": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00369": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00183": {"count": "12089", "mean": "120.96525767226404", "stddev": "110.18288051322584", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00141": {"count": "12089", "mean": "0.033005211349160395", "stddev": "1.9509163733029178", "min": "0.0", "max": "166.0", "missing_count": "0"}, "f00090": {"count": "12089", "mean": "40.18032922491521", "stddev": "83.35755041571151", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00354": {"count": "12089", "mean": "5.790387955993051E-4", "stddev": "0.024057252209830484", "min": "0", "max": "1", "missing_count": "0"}, "f00249": {"count": "12089", "mean": "0.009512780213417156", "stddev": "0.09707248168499928", "min": "0", "max": "1", "missing_count": "0"}, "f00084": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0.0", "max": "0.0", "missing_count": "0"}, "f00063": {"count": "12089", "mean": "38.99966912068823", "stddev": "82.02909344030543", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00198": {"count": "12089", "mean": "0.01166349573992886", "stddev": "0.10737044395610035", "min": "0", "max": "1", "missing_count": "0"}, "f00297": {"count": "12089", "mean": "0.18661593183886177", "stddev": "0.6334876659516991", "min": "0", "max": "3", "missing_count": "0"}, "f00156": {"count": "12089", "mean": "123.76226321449252", "stddev": "110.6793588639301", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00255": {"count": "12089", "mean": "0.02291339234014393", "stddev": "0.14963362224724616", "min": "0", "max": "1", "missing_count": "0"}, "f00078": {"count": "12089", "mean": "24.413847299197617", "stddev": "66.22492190582788", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00282": {"count": "12089", "mean": "0.0010753577632558525", "stddev": "0.032776367004904286", "min": "0", "max": "1", "missing_count": "0"}, "f00177": {"count": "12089", "mean": "122.51269749358922", "stddev": "111.89599689676162", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00042": {"count": "12089", "mean": "128.07585408222351", "stddev": "110.366261006918", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00381": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00240": {"count": "12089", "mean": "5.473571014972289", "stddev": "5.788460172878884", "min": "0", "max": "16", "missing_count": "0"}, "f00276": {"count": "12089", "mean": "0.009264620729588882", "stddev": "0.0958099517944918", "min": "0", "max": "1", "missing_count": "0"}, "f00099": {"count": "12089", "mean": "103.48225659690628", "stddev": "113.22365623216328", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00162": {"count": "12089", "mean": "46.30143105302341", "stddev": "87.10377794599654", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00348": {"count": "12089", "mean": "0.0012407974191413682", "stddev": "0.03520455028234003", "min": "0", "max": "1", "missing_count": "0"}, "f00057": {"count": "12089", "mean": "0.01646124576060882", "stddev": "1.2833803824909489", "min": "0.0", "max": "107.0", "missing_count": "0"}, "f00261": {"count": "12089", "mean": "3.9408553230209282", "stddev": "4.861386978193615", "min": "0", "max": "14", "missing_count": "0"}, "f00375": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00039": {"count": "12089", "mean": "99.77177599470593", "stddev": "108.31829419146803", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00237": {"count": "12089", "mean": "6.5093059806435605", "stddev": "5.6578573995671455", "min": "0", "max": "15", "missing_count": "0"}, "f00051": {"count": "12089", "mean": "7.6584498304243525", "stddev": "37.189378357535595", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00243": {"count": "12089", "mean": "2.0367276036065847", "stddev": "3.369183505988024", "min": "0", "max": "11", "missing_count": "0"}, "f00357": {"count": "12089", "mean": "1.6543965588551577E-4", "stddev": "0.012861802735756751", "min": "0", "max": "1", "missing_count": "0"}, "f00150": {"count": "12089", "mean": "97.63123500703118", "stddev": "111.40678708057216", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00270": {"count": "12089", "mean": "0.8702953097857556", "stddev": "1.8930413308456493", "min": "0", "max": "7", "missing_count": "0"}, "f00024": {"count": "12089", "mean": "2.2891885184878817", "stddev": "20.550395266526383", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00165": {"count": "12089", "mean": "4.835552982049798", "stddev": "29.151866610566092", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00159": {"count": "12089", "mean": "119.84432128381172", "stddev": "109.38668553919553", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00363": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00258": {"count": "12089", "mean": "0.8446521631235007", "stddev": "1.8386759859743027", "min": "0", "max": "7", "missing_count": "0"}, "f00123": {"count": "12089", "mean": "85.5991397137894", "stddev": "109.8586042350572", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00378": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00321": {"count": "12089", "mean": "0.06253618992472496", "stddev": "0.34809715324227564", "min": "0", "max": "2", "missing_count": "0"}, "f00336": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00342": {"count": "12089", "mean": "1.6543965588551577E-4", "stddev": "0.012861802735756756", "min": "0", "max": "1", "missing_count": "0"}, "f00045": {"count": "12089", "mean": "136.13441972040698", "stddev": "109.50079499209558", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00264": {"count": "12089", "mean": "4.694102076267681", "stddev": "5.135851246319809", "min": "0", "max": "14", "missing_count": "0"}, "f00072": {"count": "12089", "mean": "120.74977252047316", "stddev": "112.17942783302091", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00171": {"count": "12089", "mean": "10.538754239391181", "stddev": "43.87517272978043", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00222": {"count": "12089", "mean": "0.00272975432211101", "stddev": "0.05217784942384286", "min": "0", "max": "1", "missing_count": "0"}, "f00030": {"count": "12089", "mean": "0.10141450905782116", "stddev": "3.6217098742574323", "min": "0.0", "max": "254.0", "missing_count": "0"}, "f00066": {"count": "12089", "mean": "80.45983952353379", "stddev": "104.43754476777885", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00138": {"count": "12089", "mean": "0.6825213003556952", "stddev": "10.372143610013369", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00144": {"count": "12089", "mean": "26.195963272396394", "stddev": "68.96982859941298", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00279": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00153": {"count": "12089", "mean": "96.34485896269335", "stddev": "111.13053348852189", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00012": {"count": "12089", "mean": "118.59442468359666", "stddev": "111.38255919311455", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00111": {"count": "12089", "mean": "0.03391512945653073", "stddev": "1.5822396665547114", "min": "0.0", "max": "102.0", "missing_count": "0"}, "f00027": {"count": "12089", "mean": "0.011001737116386797", "stddev": "0.7069626758862559", "min": "0.0", "max": "52.0", "missing_count": "0"}, "f00033": {"count": "12089", "mean": "6.480602200347423", "stddev": "33.72465553421396", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00147": {"count": "12089", "mean": "85.32881131607246", "stddev": "107.40750641089546", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00246": {"count": "12089", "mean": "0.2963851435189015", "stddev": "0.9128581829116996", "min": "0", "max": "4", "missing_count": "0"}, "f00132": {"count": "12089", "mean": "90.81727190007445", "stddev": "107.22421003303963", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00345": {"count": "12089", "mean": "4.135991397137894E-4", "stddev": "0.02033377183391784", "min": "0", "max": "1", "missing_count": "0"}, "f00204": {"count": "12089", "mean": "6.807510960377202", "stddev": "5.992888723745428", "min": "0", "max": "16", "missing_count": "0"}, "f00048": {"count": "12089", "mean": "58.81553478368765", "stddev": "94.13977835736269", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00231": {"count": "12089", "mean": "4.466208950285384", "stddev": "5.257014839826631", "min": "0", "max": "15", "missing_count": "0"}, "f00303": {"count": "12089", "mean": "0.0038051120853668623", "stddev": "0.06157066504311279", "min": "0", "max": "1", "missing_count": "0"}, "f00126": {"count": "12089", "mean": "94.28621060468194", "stddev": "112.25243505297904", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00006": {"count": "12089", "mean": "6.519315079824634", "stddev": "33.39763014346889", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00330": {"count": "12089", "mean": "4.135991397137894E-4", "stddev": "0.02033377183391786", "min": "0", "max": "1", "missing_count": "0"}, "f00339": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00219": {"count": "12089", "mean": "0.3002729754322111", "stddev": "0.913988335427884", "min": "0", "max": "4", "missing_count": "0"}, "f00225": {"count": "12089", "mean": "2.4815948382827364E-4", "stddev": "0.015751775297162093", "min": "0", "max": "1", "missing_count": "0"}, "f00252": {"count": "12089", "mean": "0.0", "stddev": "0.0", "min": "0", "max": "0", "missing_count": "0"}, "f00324": {"count": "12089", "mean": "0.017371163867979156", "stddev": "0.13065534290273315", "min": "0", "max": "1", "missing_count": "0"}, "f00351": {"count": "12089", "mean": "9.926379353130945E-4", "stddev": "0.03149181864169435", "min": "0", "max": "1", "missing_count": "0"}, "f00210": {"count": "12089", "mean": "6.837786417404252", "stddev": "5.996903149368257", "min": "0", "max": "16", "missing_count": "0"}, "f00105": {"count": "12089", "mean": "54.79915625775499", "stddev": "92.83279267076065", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00054": {"count": "12089", "mean": "0.6340474811812391", "stddev": "10.467706738001793", "min": "0.0", "max": "255.0", "missing_count": "0"}, "f00318": {"count": "12089", "mean": "0.025891306146083218", "stddev": "0.15881760877519568", "min": "0", "max": "1", "missing_count": "0"}} \ No newline at end of file diff --git a/web_console_v2/api/test/fedlearner_webconsole/test_data/dataset_metainfo/_HIST b/web_console_v2/api/test/fedlearner_webconsole/test_data/dataset_metainfo/_HIST deleted file mode 100644 index 0f7948d5c..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/test_data/dataset_metainfo/_HIST +++ /dev/null @@ -1 +0,0 @@ -{"f00312": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12070, 0, 0, 0, 0, 0, 0, 0, 0, 19]}, "f00207": {"x": [0.0, 1.7, 3.4, 5.1, 6.8, 8.5, 10.2, 11.9, 13.6, 15.299999999999999, 17], "y": [3415, 748, 754, 387, 748, 750, 394, 747, 3098, 1048]}, "f00197": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12082, 0, 0, 0, 0, 0, 0, 0, 0, 7]}, "f00021": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [10427, 222, 162, 127, 117, 128, 110, 117, 132, 547]}, "f00380": {"x": [0, 0], "y": [12089]}, "f00129": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4384, 406, 404, 360, 370, 388, 402, 482, 598, 4295]}, "f00009": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [8594, 338, 291, 261, 227, 265, 226, 228, 283, 1376]}, "f00333": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12088, 0, 0, 0, 0, 0, 0, 0, 0, 1]}, "f00036": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [9249, 339, 243, 199, 187, 209, 167, 185, 224, 1087]}, "f00000": {"x": [0.0, 0.0], "y": [12089]}, "f00135": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [10241, 171, 147, 137, 133, 139, 135, 123, 173, 690]}, "f00213": {"x": [0.0, 1.7, 3.4, 5.1, 6.8, 8.5, 10.2, 11.9, 13.6, 15.299999999999999, 17], "y": [4165, 766, 751, 366, 770, 744, 397, 740, 2591, 799]}, "f00389": {"x": [0, 0], "y": [12089]}, "f00108": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11250, 108, 61, 67, 63, 76, 77, 56, 69, 262]}, "f00098": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6319, 316, 277, 240, 225, 246, 208, 264, 399, 3595]}, "f00120": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [7339, 319, 340, 269, 231, 263, 256, 279, 326, 2467]}, "f00306": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12088, 0, 0, 0, 0, 0, 0, 0, 0, 1]}, "f00015": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [3377, 401, 401, 370, 404, 445, 467, 562, 593, 5069]}, "f00234": {"x": [0.0, 1.6, 3.2, 4.800000000000001, 6.4, 8.0, 9.600000000000001, 11.200000000000001, 12.8, 14.4, 16], "y": [3781, 753, 387, 759, 375, 752, 749, 384, 3120, 1029]}, "f00228": {"x": [0.0, 0.4, 0.8, 1.2000000000000002, 1.6, 2.0, 2.4000000000000004, 2.8000000000000003, 3.2, 3.6, 4], "y": [10609, 0, 346, 0, 0, 380, 0, 379, 0, 375]}, "f00114": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [12015, 13, 6, 11, 11, 9, 5, 3, 1, 15]}, "f00296": {"x": [0.0, 0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5], "y": [10485, 0, 93, 0, 377, 0, 390, 0, 368, 376]}, "f00327": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12036, 0, 0, 0, 0, 0, 0, 0, 0, 53]}, "f00315": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11957, 0, 0, 0, 0, 0, 0, 0, 0, 132]}, "f00201": {"x": [0.0, 0.9, 1.8, 2.7, 3.6, 4.5, 5.4, 6.3, 7.2, 8.1, 9], "y": [8544, 145, 385, 373, 376, 380, 381, 394, 791, 320]}, "f00170": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11961, 16, 9, 17, 15, 16, 7, 13, 9, 26]}, "raw_id": {"x": [0.0, 1208.8, 2417.6, 3626.3999999999996, 4835.2, 6044.0, 7252.799999999999, 8461.6, 9670.4, 10879.199999999999, 12088], "y": [1209, 1209, 1209, 1209, 1208, 1209, 1209, 1209, 1209, 1209]}, "f00278": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12087, 0, 0, 0, 0, 0, 0, 0, 0, 2]}, "f00117": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [9624, 207, 204, 128, 135, 162, 156, 178, 208, 1087]}, "f00191": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [10446, 183, 142, 135, 152, 136, 104, 129, 115, 547]}, "f00216": {"x": [0.0, 1.3, 2.6, 3.9000000000000004, 5.2, 6.5, 7.800000000000001, 9.1, 10.4, 11.700000000000001, 13], "y": [7562, 378, 377, 757, 374, 385, 765, 407, 694, 390]}, "f00185": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [3845, 467, 409, 375, 370, 470, 464, 475, 601, 4613]}, "f00071": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5239, 390, 338, 305, 269, 302, 333, 397, 497, 4019]}, "f00284": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11869, 0, 0, 0, 0, 0, 0, 0, 0, 220]}, "f00299": {"x": [0.0, 0.2, 0.4, 0.6000000000000001, 0.8, 1.0, 1.2000000000000002, 1.4000000000000001, 1.6, 1.8, 2], "y": [11659, 0, 0, 0, 0, 56, 0, 0, 0, 374]}, "f00179": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5063, 410, 380, 296, 336, 387, 333, 423, 475, 3986]}, "f00362": {"x": [0, 0], "y": [12089]}, "f00309": {"x": [0, 0], "y": [12089]}, "f00018": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4620, 464, 405, 320, 377, 439, 395, 519, 580, 3970]}, "f00383": {"x": [0, 0], "y": [12089]}, "f00377": {"x": [0, 0], "y": [12089]}, "f00086": {"x": [0.0, 25.4, 50.8, 76.19999999999999, 101.6, 127.0, 152.39999999999998, 177.79999999999998, 203.2, 228.6, 254.0], "y": [12045, 4, 11, 2, 3, 5, 6, 0, 4, 9]}, "f00300": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11866, 0, 0, 0, 0, 0, 0, 0, 0, 223]}, "f00290": {"x": [0.0, 0.7, 1.4, 2.0999999999999996, 2.8, 3.5, 4.199999999999999, 4.8999999999999995, 5.6, 6.3, 7], "y": [9557, 265, 387, 0, 377, 372, 0, 580, 375, 176]}, "f00003": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [12073, 5, 2, 0, 2, 1, 1, 3, 0, 2]}, "f00092": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [8001, 311, 258, 211, 212, 234, 217, 264, 296, 2085]}, "f00102": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4550, 452, 386, 348, 341, 422, 415, 453, 546, 4176]}, "f00344": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12085, 0, 0, 0, 0, 0, 0, 0, 0, 4]}, "f00095": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6925, 292, 291, 274, 213, 238, 304, 337, 369, 2846]}, "f00365": {"x": [0, 0], "y": [12089]}, "f00068": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5826, 394, 362, 364, 291, 326, 312, 407, 432, 3375]}, "f00272": {"x": [0.0, 0.4, 0.8, 1.2000000000000002, 1.6, 2.0, 2.4000000000000004, 2.8000000000000003, 3.2, 3.6, 4], "y": [10828, 0, 128, 0, 0, 379, 0, 380, 0, 374]}, "f00167": {"x": [0.0, 12.8, 25.6, 38.400000000000006, 51.2, 64.0, 76.80000000000001, 89.60000000000001, 102.4, 115.2, 128.0], "y": [12087, 0, 0, 0, 0, 0, 0, 0, 0, 2]}, "f00371": {"x": [0, 0], "y": [12089]}, "f00194": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [12044, 7, 5, 4, 6, 8, 4, 5, 1, 5]}, "f00080": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11529, 73, 56, 43, 37, 54, 35, 43, 61, 158]}, "f00074": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4684, 413, 369, 345, 357, 379, 395, 475, 601, 4071]}, "f00089": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [10356, 175, 151, 124, 121, 128, 120, 112, 138, 664]}, "f00293": {"x": [0.0, 0.8, 1.6, 2.4000000000000004, 3.2, 4.0, 4.800000000000001, 5.6000000000000005, 6.4, 7.2, 8], "y": [9360, 89, 376, 411, 0, 350, 369, 468, 440, 226]}, "f00287": {"x": [0.0, 0.4, 0.8, 1.2000000000000002, 1.6, 2.0, 2.4000000000000004, 2.8000000000000003, 3.2, 3.6, 4], "y": [10874, 0, 85, 0, 0, 375, 0, 377, 0, 378]}, "f00152": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6389, 320, 295, 220, 241, 267, 256, 299, 376, 3426]}, "f00188": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6646, 371, 358, 301, 293, 320, 336, 359, 423, 2682]}, "f00251": {"x": [0, 0], "y": [12089]}, "f00350": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12076, 0, 0, 0, 0, 0, 0, 0, 0, 13]}, "f00173": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [8701, 252, 245, 174, 192, 227, 178, 213, 223, 1684]}, "f00053": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11931, 21, 22, 9, 11, 10, 16, 16, 10, 43]}, "f00359": {"x": [0, 0], "y": [12089]}, "f00266": {"x": [0.0, 1.3, 2.6, 3.9000000000000004, 5.2, 6.5, 7.800000000000001, 9.1, 10.4, 11.700000000000001, 13], "y": [6426, 381, 387, 780, 346, 377, 761, 733, 1125, 773]}, "f00386": {"x": [0, 0], "y": [12089]}, "f00353": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12079, 0, 0, 0, 0, 0, 0, 0, 0, 10]}, "f00062": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [10295, 181, 141, 124, 124, 135, 129, 102, 143, 715]}, "f00248": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11716, 0, 0, 0, 0, 0, 0, 0, 0, 373]}, "f00275": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11810, 0, 0, 0, 0, 0, 0, 0, 0, 279]}, "f00347": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12078, 0, 0, 0, 0, 0, 0, 0, 0, 11]}, "f00326": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11997, 0, 0, 0, 0, 0, 0, 0, 0, 92]}, "f00374": {"x": [0, 0], "y": [12089]}, "f00233": {"x": [0.0, 1.7, 3.4, 5.1, 6.8, 8.5, 10.2, 11.9, 13.6, 15.299999999999999, 17], "y": [3797, 752, 745, 375, 776, 739, 386, 776, 2760, 983]}, "f00077": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [9160, 289, 233, 196, 184, 211, 207, 195, 208, 1206]}, "f00332": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12088, 0, 0, 0, 0, 0, 0, 0, 0, 1]}, "f00155": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5344, 444, 415, 336, 328, 347, 321, 425, 567, 3562]}, "f00035": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [10265, 241, 181, 133, 120, 129, 116, 109, 160, 635]}, "f00161": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [7603, 358, 320, 243, 249, 237, 266, 312, 376, 2125]}, "f00056": {"x": [0.0, 0.0], "y": [12089]}, "f00083": {"x": [0.0, 22.3, 44.6, 66.9, 89.2, 111.5, 133.8, 156.1, 178.4, 200.70000000000002, 223.0], "y": [12084, 1, 0, 0, 1, 0, 1, 1, 0, 1]}, "f00269": {"x": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10], "y": [8553, 155, 368, 380, 367, 383, 373, 380, 603, 527]}, "f00134": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [9129, 273, 206, 203, 201, 223, 183, 199, 247, 1225]}, "f00182": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5098, 474, 399, 367, 361, 386, 384, 432, 527, 3661]}, "f00041": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4554, 413, 500, 374, 362, 394, 415, 499, 548, 4030]}, "f00368": {"x": [0, 0], "y": [12089]}, "f00176": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5030, 395, 406, 330, 326, 394, 346, 429, 519, 3914]}, "f00140": {"x": [0.0, 0.0], "y": [12089]}, "f00260": {"x": [0.0, 1.2, 2.4, 3.5999999999999996, 4.8, 6.0, 7.199999999999999, 8.4, 9.6, 10.799999999999999, 12], "y": [7557, 377, 379, 376, 388, 750, 373, 706, 766, 417]}, "f00281": {"x": [0, 0], "y": [12089]}, "f00254": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12040, 0, 0, 0, 0, 0, 0, 0, 0, 49]}, "f00149": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6118, 349, 407, 286, 273, 303, 313, 392, 373, 3275]}, "f00023": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11765, 44, 43, 26, 34, 23, 32, 23, 17, 82]}, "f00158": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [3887, 485, 416, 417, 357, 405, 444, 502, 645, 4531]}, "f00017": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [3486, 436, 431, 395, 386, 461, 460, 565, 668, 4801]}, "f00050": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [10907, 154, 129, 115, 73, 85, 78, 89, 109, 350]}, "f00236": {"x": [0.0, 1.6, 3.2, 4.800000000000001, 6.4, 8.0, 9.600000000000001, 11.200000000000001, 12.8, 14.4, 16], "y": [3801, 734, 377, 771, 366, 757, 753, 402, 3034, 1094]}, "f00143": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11390, 78, 57, 54, 58, 92, 48, 62, 55, 195]}, "f00044": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [3848, 456, 452, 360, 394, 462, 458, 486, 610, 4563]}, "f00038": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6753, 429, 404, 326, 293, 322, 328, 360, 427, 2447]}, "f00242": {"x": [0.0, 1.3, 2.6, 3.9000000000000004, 5.2, 6.5, 7.800000000000001, 9.1, 10.4, 11.700000000000001, 13], "y": [7207, 370, 359, 759, 380, 370, 757, 427, 396, 1064]}, "f00116": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [10727, 138, 94, 99, 93, 121, 105, 98, 88, 526]}, "f00137": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11649, 54, 60, 39, 36, 39, 42, 31, 42, 97]}, "f00257": {"x": [0.0, 0.4, 0.8, 1.2000000000000002, 1.6, 2.0, 2.4000000000000004, 2.8000000000000003, 3.2, 3.6, 4], "y": [10630, 0, 325, 0, 0, 380, 0, 376, 0, 378]}, "f00320": {"x": [0.0, 0.2, 0.4, 0.6000000000000001, 0.8, 1.0, 1.2000000000000002, 1.4000000000000001, 1.6, 1.8, 2], "y": [11706, 0, 0, 0, 0, 7, 0, 0, 0, 376]}, "f00164": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11034, 132, 120, 75, 79, 94, 91, 80, 80, 304]}, "f00215": {"x": [0.0, 1.4, 2.8, 4.199999999999999, 5.6, 7.0, 8.399999999999999, 9.799999999999999, 11.2, 12.6, 14], "y": [6427, 378, 757, 382, 375, 764, 378, 1075, 1007, 546]}, "f00308": {"x": [0, 0], "y": [12089]}, "f00122": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [7084, 300, 256, 233, 226, 254, 275, 315, 345, 2801]}, "f00341": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12087, 0, 0, 0, 0, 0, 0, 0, 0, 2]}, "f00335": {"x": [0, 0], "y": [12089]}, "f00329": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12078, 0, 0, 0, 0, 0, 0, 0, 0, 11]}, "f00221": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11867, 0, 0, 0, 0, 0, 0, 0, 0, 222]}, "f00356": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12086, 0, 0, 0, 0, 0, 0, 0, 0, 3]}, "f00059": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11936, 27, 23, 15, 19, 19, 10, 5, 9, 26]}, "f00065": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [7664, 367, 300, 263, 231, 275, 297, 292, 365, 2035]}, "f00314": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12007, 0, 0, 0, 0, 0, 0, 0, 0, 82]}, "f00263": {"x": [0.0, 1.4, 2.8, 4.199999999999999, 5.6, 7.0, 8.399999999999999, 9.799999999999999, 11.2, 12.6, 14], "y": [5669, 379, 768, 379, 372, 755, 369, 1327, 1358, 713]}, "f00323": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11827, 0, 0, 0, 0, 0, 0, 0, 0, 262]}, "f00203": {"x": [0.0, 1.5, 3.0, 4.5, 6.0, 7.5, 9.0, 10.5, 12.0, 13.5, 15], "y": [5301, 387, 737, 389, 753, 384, 769, 353, 2301, 715]}, "f00026": {"x": [0.0, 25.4, 50.8, 76.19999999999999, 101.6, 127.0, 152.39999999999998, 177.79999999999998, 203.2, 228.6, 254.0], "y": [12054, 4, 3, 6, 6, 2, 3, 4, 4, 3]}, "f00391": {"x": [0, 0], "y": [12089]}, "f00125": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6739, 269, 253, 186, 216, 260, 242, 278, 346, 3300]}, "f00032": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11890, 36, 32, 17, 19, 19, 17, 15, 11, 33]}, "f00193": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11795, 51, 34, 31, 32, 31, 23, 26, 14, 52]}, "f00047": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6134, 437, 419, 252, 318, 402, 348, 383, 467, 2929]}, "f00110": {"x": [0.0, 25.3, 50.6, 75.9, 101.2, 126.5, 151.8, 177.1, 202.4, 227.70000000000002, 253.0], "y": [12009, 9, 7, 7, 11, 10, 12, 11, 5, 8]}, "f00245": {"x": [0.0, 0.6, 1.2, 1.7999999999999998, 2.4, 3.0, 3.5999999999999996, 4.2, 4.8, 5.3999999999999995, 6], "y": [10023, 178, 0, 381, 0, 380, 372, 0, 463, 292]}, "f00146": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [7706, 290, 298, 216, 249, 292, 222, 273, 306, 2237]}, "f00005": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11851, 48, 30, 29, 25, 27, 17, 11, 15, 36]}, "f00104": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6824, 434, 338, 302, 300, 355, 322, 338, 404, 2472]}, "f00218": {"x": [0.0, 0.7, 1.4, 2.0999999999999996, 2.8, 3.5, 4.199999999999999, 4.8999999999999995, 5.6, 6.3, 7], "y": [9782, 59, 370, 0, 371, 376, 0, 382, 418, 331]}, "f00131": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4821, 449, 403, 413, 342, 448, 456, 462, 508, 3787]}, "f00317": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11834, 0, 0, 0, 0, 0, 0, 0, 0, 255]}, "f00224": {"x": [0, 0], "y": [12089]}, "f00230": {"x": [0.0, 1.1, 2.2, 3.3000000000000003, 4.4, 5.5, 6.6000000000000005, 7.700000000000001, 8.8, 9.9, 11], "y": [7933, 385, 380, 373, 406, 352, 383, 375, 600, 902]}, "f00239": {"x": [0.0, 1.5, 3.0, 4.5, 6.0, 7.5, 9.0, 10.5, 12.0, 13.5, 15], "y": [4928, 370, 766, 359, 760, 380, 756, 372, 2554, 844]}, "f00094": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [7248, 323, 294, 259, 240, 260, 288, 277, 324, 2576]}, "f00119": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [7742, 293, 319, 211, 235, 251, 232, 275, 334, 2197]}, "f00302": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12016, 0, 0, 0, 0, 0, 0, 0, 0, 73]}, "f00292": {"x": [0.0, 0.8, 1.6, 2.4000000000000004, 3.2, 4.0, 4.800000000000001, 5.6000000000000005, 6.4, 7.2, 8], "y": [9263, 181, 384, 372, 0, 381, 381, 441, 458, 228]}, "f00011": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5764, 446, 429, 313, 341, 377, 338, 393, 448, 3240]}, "f00338": {"x": [0, 0], "y": [12089]}, "f00020": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [8724, 361, 277, 231, 204, 236, 228, 251, 255, 1322]}, "f00175": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5905, 376, 382, 313, 287, 329, 343, 401, 408, 3345]}, "f00206": {"x": [0.0, 1.8, 3.6, 5.4, 7.2, 9.0, 10.8, 12.6, 14.4, 16.2, 18], "y": [3412, 753, 757, 747, 395, 747, 758, 1390, 1996, 1134]}, "f00029": {"x": [0.0, 22.6, 45.2, 67.80000000000001, 90.4, 113.0, 135.60000000000002, 158.20000000000002, 180.8, 203.4, 226.0], "y": [12088, 0, 0, 0, 0, 0, 0, 0, 0, 1]}, "f00274": {"x": [0.0, 0.2, 0.4, 0.6000000000000001, 0.8, 1.0, 1.2000000000000002, 1.4000000000000001, 1.6, 1.8, 2], "y": [11586, 0, 0, 0, 0, 126, 0, 0, 0, 377]}, "f00128": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5125, 424, 393, 318, 305, 344, 326, 414, 524, 3916]}, "f00373": {"x": [0, 0], "y": [12089]}, "f00196": {"x": [0, 0], "y": [12089]}, "f00295": {"x": [0.0, 0.6, 1.2, 1.7999999999999998, 2.4, 3.0, 3.5999999999999996, 4.2, 4.8, 5.3999999999999995, 6], "y": [9997, 203, 0, 382, 0, 376, 376, 0, 408, 347]}, "f00305": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12084, 0, 0, 0, 0, 0, 0, 0, 0, 5]}, "f00289": {"x": [0.0, 0.6, 1.2, 1.7999999999999998, 2.4, 3.0, 3.5999999999999996, 4.2, 4.8, 5.3999999999999995, 6], "y": [9920, 285, 0, 373, 0, 379, 376, 0, 589, 167]}, "f00212": {"x": [0.0, 1.6, 3.2, 4.800000000000001, 6.4, 8.0, 9.600000000000001, 11.200000000000001, 12.8, 14.4, 16], "y": [4178, 741, 372, 780, 354, 761, 768, 388, 2747, 1000]}, "f00181": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5291, 472, 414, 347, 321, 397, 356, 396, 500, 3595]}, "f00076": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [7483, 408, 326, 247, 281, 296, 271, 305, 364, 2108]}, "f00280": {"x": [0, 0], "y": [12089]}, "f00014": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [3738, 434, 464, 340, 369, 391, 391, 517, 510, 4935]}, "f00388": {"x": [0, 0], "y": [12089]}, "f00227": {"x": [0.0, 0.2, 0.4, 0.6000000000000001, 0.8, 1.0, 1.2000000000000002, 1.4000000000000001, 1.6, 1.8, 2], "y": [11527, 0, 0, 0, 0, 187, 0, 0, 0, 375]}, "f00082": {"x": [0.0, 25.4, 50.8, 76.19999999999999, 101.6, 127.0, 152.39999999999998, 177.79999999999998, 203.2, 228.6, 254.0], "y": [12014, 7, 17, 4, 9, 7, 10, 7, 6, 8]}, "f00008": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [9951, 267, 217, 165, 163, 154, 153, 135, 180, 704]}, "f00113": {"x": [0.0, 19.7, 39.4, 59.099999999999994, 78.8, 98.5, 118.19999999999999, 137.9, 157.6, 177.29999999999998, 197.0], "y": [12087, 1, 0, 0, 0, 0, 0, 0, 0, 1]}, "f00107": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [10633, 174, 106, 97, 108, 144, 86, 106, 116, 519]}, "f00097": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6501, 292, 266, 208, 224, 247, 223, 273, 388, 3467]}, "f00311": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12085, 0, 0, 0, 0, 0, 0, 0, 0, 4]}, "f00184": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4057, 463, 429, 354, 401, 429, 411, 484, 696, 4365]}, "f00079": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11075, 131, 78, 77, 83, 89, 87, 67, 87, 315]}, "f00101": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4706, 406, 413, 326, 308, 358, 391, 466, 538, 4177]}, "f00002": {"x": [0.0, 6.9, 13.8, 20.700000000000003, 27.6, 34.5, 41.400000000000006, 48.300000000000004, 55.2, 62.1, 69.0], "y": [12086, 2, 0, 0, 0, 0, 0, 0, 0, 1]}, "f00070": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5411, 368, 362, 308, 278, 334, 326, 337, 451, 3914]}, "f00064": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [8548, 333, 249, 220, 218, 225, 211, 235, 291, 1559]}, "f00209": {"x": [0.0, 1.7, 3.4, 5.1, 6.8, 8.5, 10.2, 11.9, 13.6, 15.299999999999999, 17], "y": [3802, 737, 750, 380, 760, 776, 367, 768, 2732, 1017]}, "f00283": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12004, 0, 0, 0, 0, 0, 0, 0, 0, 85]}, "f00277": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12062, 0, 0, 0, 0, 0, 0, 0, 0, 27]}, "f00091": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [8587, 262, 261, 207, 183, 237, 172, 252, 280, 1648]}, "f00163": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [10179, 165, 161, 155, 140, 159, 129, 136, 152, 713]}, "f00058": {"x": [0.0, 25.4, 50.8, 76.19999999999999, 101.6, 127.0, 152.39999999999998, 177.79999999999998, 203.2, 228.6, 254.0], "y": [12061, 3, 3, 3, 3, 4, 4, 3, 3, 2]}, "f00190": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [9354, 268, 252, 206, 189, 197, 179, 168, 219, 1057]}, "f00200": {"x": [0.0, 0.6, 1.2, 1.7999999999999998, 2.4, 3.0, 3.5999999999999996, 4.2, 4.8, 5.3999999999999995, 6], "y": [10018, 198, 0, 362, 0, 403, 359, 0, 391, 358]}, "f00376": {"x": [0, 0], "y": [12089]}, "f00199": {"x": [0.0, 0.3, 0.6, 0.8999999999999999, 1.2, 1.5, 1.7999999999999998, 2.1, 2.4, 2.6999999999999997, 3], "y": [11267, 0, 0, 67, 0, 0, 377, 0, 0, 378]}, "f00085": {"x": [0.0, 13.3, 26.6, 39.900000000000006, 53.2, 66.5, 79.80000000000001, 93.10000000000001, 106.4, 119.7, 133.0], "y": [12086, 0, 1, 0, 0, 1, 0, 0, 0, 1]}, "f00298": {"x": [0.0, 0.2, 0.4, 0.6000000000000001, 0.8, 1.0, 1.2000000000000002, 1.4000000000000001, 1.6, 1.8, 2], "y": [11342, 0, 0, 0, 0, 369, 0, 0, 0, 378]}, "f00157": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [3882, 449, 425, 384, 392, 441, 424, 521, 647, 4524]}, "f00361": {"x": [0, 0], "y": [12089]}, "f00256": {"x": [0.0, 0.2, 0.4, 0.6000000000000001, 0.8, 1.0, 1.2000000000000002, 1.4000000000000001, 1.6, 1.8, 2], "y": [11346, 0, 0, 0, 0, 367, 0, 0, 0, 376]}, "f00262": {"x": [0.0, 1.4, 2.8, 4.199999999999999, 5.6, 7.0, 8.399999999999999, 9.799999999999999, 11.2, 12.6, 14], "y": [6052, 373, 760, 371, 387, 761, 364, 1167, 1212, 642]}, "f00355": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12084, 0, 0, 0, 0, 0, 0, 0, 0, 5]}, "f00178": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4830, 437, 381, 304, 340, 379, 356, 390, 449, 4223]}, "f00382": {"x": [0, 0], "y": [12089]}, "f00364": {"x": [0, 0], "y": [12089]}, "f00358": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12088, 0, 0, 0, 0, 0, 0, 0, 0, 1]}, "f00067": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6256, 386, 391, 343, 324, 331, 365, 335, 416, 2942]}, "f00271": {"x": [0.0, 0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5], "y": [10223, 0, 366, 0, 367, 0, 378, 0, 380, 375]}, "f00286": {"x": [0.0, 0.3, 0.6, 0.8999999999999999, 1.2, 1.5, 1.7999999999999998, 2.1, 2.4, 2.6999999999999997, 3], "y": [11300, 0, 0, 36, 0, 0, 382, 0, 0, 371]}, "f00244": {"x": [0.0, 0.8, 1.6, 2.4000000000000004, 3.2, 4.0, 4.800000000000001, 5.6000000000000005, 6.4, 7.2, 8], "y": [9118, 345, 363, 374, 0, 383, 386, 369, 601, 150]}, "f00379": {"x": [0, 0], "y": [12089]}, "f00337": {"x": [0, 0], "y": [12089]}, "f00151": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6312, 320, 285, 259, 275, 274, 261, 322, 391, 3390]}, "f00259": {"x": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10], "y": [8358, 337, 383, 366, 385, 372, 379, 382, 391, 736]}, "f00073": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4424, 441, 415, 338, 328, 423, 411, 474, 566, 4269]}, "f00172": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [10154, 149, 158, 111, 128, 134, 118, 173, 154, 810]}, "f00166": {"x": [0.0, 25.3, 50.6, 75.9, 101.2, 126.5, 151.8, 177.1, 202.4, 227.70000000000002, 253.0], "y": [12032, 5, 10, 5, 5, 9, 3, 7, 2, 11]}, "f00385": {"x": [0, 0], "y": [12089]}, "f00088": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11223, 93, 83, 61, 65, 79, 70, 64, 58, 293]}, "f00139": {"x": [0.0, 15.3, 30.6, 45.900000000000006, 61.2, 76.5, 91.80000000000001, 107.10000000000001, 122.4, 137.70000000000002, 153.0], "y": [12084, 1, 1, 0, 0, 0, 0, 0, 1, 2]}, "f00343": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12086, 0, 0, 0, 0, 0, 0, 0, 0, 3]}, "f00052": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11759, 52, 35, 24, 24, 33, 19, 21, 37, 85]}, "f00046": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4627, 410, 429, 346, 368, 383, 379, 491, 583, 4073]}, "f00187": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5262, 459, 383, 331, 332, 364, 383, 438, 551, 3586]}, "f00238": {"x": [0.0, 1.5, 3.0, 4.5, 6.0, 7.5, 9.0, 10.5, 12.0, 13.5, 15], "y": [4555, 357, 758, 382, 752, 376, 756, 417, 2569, 1167]}, "f00250": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12078, 0, 0, 0, 0, 0, 0, 0, 0, 11]}, "f00370": {"x": [0, 0], "y": [12089]}, "f00265": {"x": [0.0, 1.3, 2.6, 3.9000000000000004, 5.2, 6.5, 7.800000000000001, 9.1, 10.4, 11.700000000000001, 13], "y": [6045, 388, 368, 756, 379, 393, 749, 926, 1318, 767]}, "f00145": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [9002, 229, 222, 141, 173, 199, 187, 201, 233, 1502]}, "f00028": {"x": [0.0, 0.0], "y": [12089]}, "f00169": {"x": [0.0, 18.5, 37.0, 55.5, 74.0, 92.5, 111.0, 129.5, 148.0, 166.5, 185.0], "y": [12083, 1, 1, 0, 0, 0, 0, 1, 1, 2]}, "f00232": {"x": [0.0, 1.6, 3.2, 4.800000000000001, 6.4, 8.0, 9.600000000000001, 11.200000000000001, 12.8, 14.4, 16], "y": [4920, 755, 380, 754, 370, 760, 769, 413, 2147, 821]}, "f00352": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12077, 0, 0, 0, 0, 0, 0, 0, 0, 12]}, "f00127": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5959, 375, 377, 256, 263, 273, 285, 361, 422, 3518]}, "f00055": {"x": [0.0, 19.0, 38.0, 57.0, 76.0, 95.0, 114.0, 133.0, 152.0, 171.0, 190.0], "y": [12083, 0, 1, 1, 2, 0, 0, 0, 1, 1]}, "f00310": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12087, 0, 0, 0, 0, 0, 0, 0, 0, 2]}, "f00154": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5935, 413, 389, 270, 301, 368, 321, 351, 420, 3321]}, "f00049": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [9852, 272, 225, 154, 162, 168, 134, 126, 148, 848]}, "f00253": {"x": [0, 0], "y": [12089]}, "f00367": {"x": [0, 0], "y": [12089]}, "f00331": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12088, 0, 0, 0, 0, 0, 0, 0, 0, 1]}, "f00325": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11937, 0, 0, 0, 0, 0, 0, 0, 0, 152]}, "f00034": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11018, 146, 117, 100, 91, 86, 70, 58, 87, 316]}, "f00247": {"x": [0.0, 0.3, 0.6, 0.8999999999999999, 1.2, 1.5, 1.7999999999999998, 2.1, 2.4, 2.6999999999999997, 3], "y": [11319, 0, 0, 15, 0, 0, 378, 0, 0, 377]}, "f00319": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11727, 0, 0, 0, 0, 0, 0, 0, 0, 362]}, "f00160": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6121, 435, 358, 294, 322, 376, 316, 395, 480, 2992]}, "f00346": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12079, 0, 0, 0, 0, 0, 0, 0, 0, 10]}, "f00040": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5033, 400, 476, 364, 364, 427, 434, 427, 517, 3647]}, "f00148": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6238, 375, 369, 319, 314, 298, 331, 372, 388, 3085]}, "f00226": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11987, 0, 0, 0, 0, 0, 0, 0, 0, 102]}, "f00061": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11035, 129, 90, 89, 88, 97, 76, 69, 95, 321]}, "f00268": {"x": [0.0, 1.1, 2.2, 3.3000000000000003, 4.4, 5.5, 6.6000000000000005, 7.700000000000001, 8.8, 9.9, 11], "y": [7944, 375, 382, 369, 377, 382, 379, 375, 647, 859]}, "f00133": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [7831, 348, 304, 261, 234, 246, 251, 256, 360, 1998]}, "f00022": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11351, 119, 106, 67, 64, 65, 39, 36, 51, 191]}, "f00142": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11984, 13, 9, 10, 12, 9, 9, 11, 13, 19]}, "f00037": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [7989, 403, 368, 247, 259, 261, 254, 275, 370, 1663]}, "f00241": {"x": [0.0, 1.5, 3.0, 4.5, 6.0, 7.5, 9.0, 10.5, 12.0, 13.5, 15], "y": [6047, 382, 749, 378, 763, 373, 767, 380, 1767, 483]}, "f00334": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12088, 0, 0, 0, 0, 0, 0, 0, 0, 1]}, "f00229": {"x": [0.0, 0.8, 1.6, 2.4000000000000004, 3.2, 4.0, 4.800000000000001, 5.6000000000000005, 6.4, 7.2, 8], "y": [9283, 177, 381, 360, 0, 381, 373, 390, 566, 178]}, "f00043": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4046, 460, 445, 382, 381, 377, 440, 497, 599, 4462]}, "f00121": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [7163, 327, 295, 290, 254, 279, 251, 305, 325, 2600]}, "f00001": {"x": [0.0, 9.9, 19.8, 29.700000000000003, 39.6, 49.5, 59.400000000000006, 69.3, 79.2, 89.10000000000001, 99.0], "y": [12087, 0, 0, 0, 0, 0, 1, 0, 0, 1]}, "f00115": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11554, 61, 59, 52, 55, 60, 39, 39, 46, 124]}, "f00307": {"x": [0, 0], "y": [12089]}, "f00016": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [3154, 446, 438, 391, 422, 460, 418, 526, 708, 5126]}, "f00220": {"x": [0.0, 0.2, 0.4, 0.6000000000000001, 0.8, 1.0, 1.2000000000000002, 1.4000000000000001, 1.6, 1.8, 2], "y": [11406, 0, 0, 0, 0, 307, 0, 0, 0, 376]}, "f00214": {"x": [0.0, 1.6, 3.2, 4.800000000000001, 6.4, 8.0, 9.600000000000001, 11.200000000000001, 12.8, 14.4, 16], "y": [4926, 745, 393, 738, 383, 755, 762, 398, 2257, 732]}, "f00109": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11696, 39, 30, 37, 33, 49, 31, 37, 43, 94]}, "f00328": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12060, 0, 0, 0, 0, 0, 0, 0, 0, 29]}, "f00136": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11038, 109, 99, 87, 81, 94, 86, 90, 98, 307]}, "f00340": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12088, 0, 0, 0, 0, 0, 0, 0, 0, 1]}, "f00349": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12077, 0, 0, 0, 0, 0, 0, 0, 0, 12]}, "f00100": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5363, 398, 372, 293, 294, 303, 337, 395, 416, 3918]}, "f00235": {"x": [0.0, 1.6, 3.2, 4.800000000000001, 6.4, 8.0, 9.600000000000001, 11.200000000000001, 12.8, 14.4, 16], "y": [3778, 761, 382, 753, 393, 751, 747, 462, 2973, 1089]}, "f00313": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12046, 0, 0, 0, 0, 0, 0, 0, 0, 43]}, "f00208": {"x": [0.0, 1.7, 3.4, 5.1, 6.8, 8.5, 10.2, 11.9, 13.6, 15.299999999999999, 17], "y": [3793, 767, 733, 393, 749, 769, 353, 783, 2729, 1020]}, "f00186": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4193, 442, 423, 362, 346, 413, 462, 503, 661, 4284]}, "f00217": {"x": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10], "y": [8623, 67, 379, 376, 377, 385, 371, 380, 485, 646]}, "f00025": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11988, 15, 20, 11, 6, 10, 5, 6, 9, 19]}, "f00390": {"x": [0, 0], "y": [12089]}, "f00384": {"x": [0, 0], "y": [12089]}, "f00093": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [7596, 327, 256, 249, 230, 228, 252, 280, 358, 2313]}, "f00285": {"x": [0.0, 0.2, 0.4, 0.6000000000000001, 0.8, 1.0, 1.2000000000000002, 1.4000000000000001, 1.6, 1.8, 2], "y": [11653, 0, 0, 0, 0, 58, 0, 0, 0, 378]}, "f00103": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5336, 433, 356, 361, 355, 378, 414, 448, 544, 3464]}, "f00202": {"x": [0.0, 1.3, 2.6, 3.9000000000000004, 5.2, 6.5, 7.800000000000001, 9.1, 10.4, 11.700000000000001, 13], "y": [6805, 386, 377, 747, 375, 393, 754, 882, 888, 482]}, "f00192": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11234, 92, 92, 76, 70, 79, 69, 75, 70, 232]}, "f00031": {"x": [0.0, 25.4, 50.8, 76.19999999999999, 101.6, 127.0, 152.39999999999998, 177.79999999999998, 203.2, 228.6, 254.0], "y": [12033, 11, 6, 9, 9, 4, 4, 3, 2, 8]}, "f00124": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6866, 258, 257, 191, 249, 258, 242, 254, 343, 3171]}, "f00130": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4186, 409, 402, 365, 366, 448, 452, 478, 621, 4362]}, "f00019": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6535, 433, 390, 275, 316, 397, 345, 375, 428, 2595]}, "f00087": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11773, 44, 39, 30, 33, 39, 12, 29, 20, 70]}, "f00301": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11969, 0, 0, 0, 0, 0, 0, 0, 0, 120]}, "f00010": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [7104, 403, 375, 295, 270, 344, 305, 323, 361, 2309]}, "f00004": {"x": [0.0, 25.4, 50.8, 76.19999999999999, 101.6, 127.0, 152.39999999999998, 177.79999999999998, 203.2, 228.6, 254.0], "y": [12010, 18, 19, 7, 8, 4, 4, 3, 6, 10]}, "f00223": {"x": [0, 0], "y": [12089]}, "f00118": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [8533, 253, 269, 203, 182, 239, 208, 218, 275, 1709]}, "f00291": {"x": [0.0, 0.8, 1.6, 2.4000000000000004, 3.2, 4.0, 4.800000000000001, 5.6000000000000005, 6.4, 7.2, 8], "y": [9310, 140, 375, 383, 0, 378, 369, 499, 431, 204]}, "f00322": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11763, 0, 0, 0, 0, 0, 0, 0, 0, 326]}, "f00316": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11901, 0, 0, 0, 0, 0, 0, 0, 0, 188]}, "f00013": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4169, 402, 453, 375, 377, 417, 391, 501, 459, 4545]}, "f00189": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [8107, 299, 298, 257, 219, 235, 258, 268, 320, 1828]}, "f00195": {"x": [0.0, 0.0], "y": [12089]}, "f00112": {"x": [0.0, 0.0], "y": [12089]}, "f00081": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11816, 27, 22, 21, 30, 40, 21, 19, 26, 67]}, "f00007": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [10927, 174, 149, 95, 99, 74, 78, 96, 86, 311]}, "f00211": {"x": [0.0, 1.6, 3.2, 4.800000000000001, 6.4, 8.0, 9.600000000000001, 11.200000000000001, 12.8, 14.4, 16], "y": [4173, 749, 373, 765, 363, 766, 749, 383, 2766, 1002]}, "f00180": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5213, 468, 396, 317, 338, 423, 311, 419, 483, 3721]}, "f00366": {"x": [0, 0], "y": [12089]}, "f00075": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5770, 490, 347, 302, 331, 397, 323, 417, 492, 3220]}, "f00069": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5619, 356, 326, 310, 280, 326, 337, 368, 447, 3720]}, "f00288": {"x": [0.0, 0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5], "y": [10378, 0, 200, 0, 382, 0, 383, 0, 432, 314]}, "f00174": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [7158, 305, 325, 249, 278, 294, 257, 330, 373, 2520]}, "f00387": {"x": [0, 0], "y": [12089]}, "f00205": {"x": [0.0, 1.7, 3.4, 5.1, 6.8, 8.5, 10.2, 11.9, 13.6, 15.299999999999999, 17], "y": [3418, 743, 757, 374, 756, 761, 395, 749, 2987, 1149]}, "f00060": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11619, 78, 52, 40, 30, 54, 36, 37, 27, 116]}, "f00304": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12070, 0, 0, 0, 0, 0, 0, 0, 0, 19]}, "f00273": {"x": [0.0, 0.3, 0.6, 0.8999999999999999, 1.2, 1.5, 1.7999999999999998, 2.1, 2.4, 2.6999999999999997, 3], "y": [11285, 0, 0, 53, 0, 0, 375, 0, 0, 376]}, "f00294": {"x": [0.0, 0.7, 1.4, 2.0999999999999996, 2.8, 3.5, 4.199999999999999, 4.8999999999999995, 5.6, 6.3, 7], "y": [9641, 189, 373, 0, 381, 372, 0, 384, 546, 203]}, "f00168": {"x": [0.0, 0.0], "y": [12089]}, "f00372": {"x": [0, 0], "y": [12089]}, "f00267": {"x": [0.0, 1.3, 2.6, 3.9000000000000004, 5.2, 6.5, 7.800000000000001, 9.1, 10.4, 11.700000000000001, 13], "y": [7182, 379, 398, 749, 369, 371, 754, 596, 855, 436]}, "f00106": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [9670, 270, 196, 140, 185, 185, 162, 177, 194, 910]}, "f00096": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6688, 311, 265, 229, 227, 246, 257, 283, 335, 3248]}, "f00360": {"x": [0, 0], "y": [12089]}, "f00369": {"x": [0, 0], "y": [12089]}, "f00183": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4581, 546, 421, 348, 379, 423, 382, 469, 575, 3965]}, "f00141": {"x": [0.0, 16.6, 33.2, 49.800000000000004, 66.4, 83.0, 99.60000000000001, 116.20000000000002, 132.8, 149.4, 166.0], "y": [12085, 0, 0, 1, 0, 2, 0, 0, 0, 1]}, "f00090": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [9452, 221, 189, 147, 169, 162, 165, 176, 221, 1187]}, "f00354": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12082, 0, 0, 0, 0, 0, 0, 0, 0, 7]}, "f00249": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11974, 0, 0, 0, 0, 0, 0, 0, 0, 115]}, "f00084": {"x": [0.0, 0.0], "y": [12089]}, "f00063": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [9480, 243, 186, 182, 154, 158, 185, 160, 192, 1149]}, "f00198": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11948, 0, 0, 0, 0, 0, 0, 0, 0, 141]}, "f00297": {"x": [0.0, 0.3, 0.6, 0.8999999999999999, 1.2, 1.5, 1.7999999999999998, 2.1, 2.4, 2.6999999999999997, 3], "y": [10963, 0, 0, 374, 0, 0, 374, 0, 0, 378]}, "f00156": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4527, 475, 409, 360, 340, 411, 376, 459, 645, 4087]}, "f00255": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11812, 0, 0, 0, 0, 0, 0, 0, 0, 277]}, "f00078": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [10356, 204, 153, 113, 121, 147, 108, 97, 129, 661]}, "f00282": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12076, 0, 0, 0, 0, 0, 0, 0, 0, 13]}, "f00177": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4723, 404, 353, 334, 360, 405, 376, 411, 518, 4205]}, "f00042": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4282, 425, 468, 374, 372, 381, 462, 488, 521, 4316]}, "f00381": {"x": [0, 0], "y": [12089]}, "f00240": {"x": [0.0, 1.6, 3.2, 4.800000000000001, 6.4, 8.0, 9.600000000000001, 11.200000000000001, 12.8, 14.4, 16], "y": [5291, 762, 371, 759, 384, 762, 745, 450, 1867, 698]}, "f00276": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11977, 0, 0, 0, 0, 0, 0, 0, 0, 112]}, "f00099": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5984, 304, 310, 279, 257, 278, 251, 324, 403, 3699]}, "f00162": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [8968, 271, 259, 188, 208, 200, 214, 214, 257, 1310]}, "f00348": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12074, 0, 0, 0, 0, 0, 0, 0, 0, 15]}, "f00057": {"x": [0.0, 10.7, 21.4, 32.099999999999994, 42.8, 53.5, 64.19999999999999, 74.89999999999999, 85.6, 96.3, 107.0], "y": [12087, 0, 0, 0, 0, 0, 0, 0, 1, 1]}, "f00261": {"x": [0.0, 1.4, 2.8, 4.199999999999999, 5.6, 7.0, 8.399999999999999, 9.799999999999999, 11.2, 12.6, 14], "y": [6432, 368, 761, 381, 387, 747, 379, 1024, 1069, 541]}, "f00375": {"x": [0, 0], "y": [12089]}, "f00039": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [5703, 470, 412, 383, 320, 403, 401, 401, 485, 3111]}, "f00237": {"x": [0.0, 1.5, 3.0, 4.5, 6.0, 7.5, 9.0, 10.5, 12.0, 13.5, 15], "y": [4173, 382, 735, 393, 744, 398, 765, 372, 3016, 1111]}, "f00051": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11475, 97, 58, 63, 57, 52, 37, 46, 46, 158]}, "f00243": {"x": [0.0, 1.1, 2.2, 3.3000000000000003, 4.4, 5.5, 6.6000000000000005, 7.700000000000001, 8.8, 9.9, 11], "y": [8314, 395, 374, 370, 373, 374, 391, 371, 390, 737]}, "f00357": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12087, 0, 0, 0, 0, 0, 0, 0, 0, 2]}, "f00150": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6158, 350, 355, 292, 294, 299, 264, 318, 350, 3409]}, "f00270": {"x": [0.0, 0.7, 1.4, 2.0999999999999996, 2.8, 3.5, 4.199999999999999, 4.8999999999999995, 5.6, 6.3, 7], "y": [9462, 368, 372, 0, 379, 375, 0, 379, 401, 353]}, "f00024": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11902, 27, 31, 12, 14, 21, 8, 12, 16, 46]}, "f00165": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11691, 51, 46, 36, 43, 35, 41, 42, 24, 80]}, "f00159": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4615, 454, 453, 393, 382, 458, 424, 506, 588, 3816]}, "f00363": {"x": [0, 0], "y": [12089]}, "f00258": {"x": [0.0, 0.7, 1.4, 2.0999999999999996, 2.8, 3.5, 4.199999999999999, 4.8999999999999995, 5.6, 6.3, 7], "y": [9543, 281, 392, 0, 370, 369, 0, 378, 622, 134]}, "f00123": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6984, 294, 266, 220, 218, 245, 210, 279, 358, 3015]}, "f00378": {"x": [0, 0], "y": [12089]}, "f00321": {"x": [0.0, 0.2, 0.4, 0.6000000000000001, 0.8, 1.0, 1.2000000000000002, 1.4000000000000001, 1.6, 1.8, 2], "y": [11711, 0, 0, 0, 0, 0, 0, 0, 0, 378]}, "f00336": {"x": [0, 0], "y": [12089]}, "f00342": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12087, 0, 0, 0, 0, 0, 0, 0, 0, 2]}, "f00045": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [3911, 454, 408, 329, 363, 435, 424, 508, 679, 4578]}, "f00264": {"x": [0.0, 1.4, 2.8, 4.199999999999999, 5.6, 7.0, 8.399999999999999, 9.799999999999999, 11.2, 12.6, 14], "y": [5678, 372, 753, 379, 393, 737, 380, 1236, 1402, 759]}, "f00072": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4799, 453, 387, 342, 317, 322, 368, 425, 502, 4174]}, "f00171": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11296, 74, 84, 56, 83, 89, 54, 64, 60, 229]}, "f00222": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12056, 0, 0, 0, 0, 0, 0, 0, 0, 33]}, "f00030": {"x": [0.0, 25.4, 50.8, 76.19999999999999, 101.6, 127.0, 152.39999999999998, 177.79999999999998, 203.2, 228.6, 254.0], "y": [12077, 3, 2, 3, 1, 2, 0, 0, 0, 1]}, "f00066": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6902, 393, 322, 331, 295, 348, 295, 328, 393, 2482]}, "f00138": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [12021, 17, 9, 7, 6, 9, 3, 5, 5, 7]}, "f00144": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [10286, 176, 139, 112, 141, 136, 126, 123, 122, 728]}, "f00279": {"x": [0, 0], "y": [12089]}, "f00153": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6259, 340, 350, 234, 286, 286, 279, 363, 389, 3303]}, "f00012": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [4872, 402, 424, 321, 351, 415, 387, 454, 468, 3995]}, "f00111": {"x": [0.0, 10.2, 20.4, 30.599999999999998, 40.8, 51.0, 61.199999999999996, 71.39999999999999, 81.6, 91.8, 102.0], "y": [12083, 0, 1, 0, 1, 1, 1, 0, 0, 2]}, "f00027": {"x": [0.0, 5.2, 10.4, 15.600000000000001, 20.8, 26.0, 31.200000000000003, 36.4, 41.6, 46.800000000000004, 52.0], "y": [12086, 0, 0, 0, 0, 1, 0, 0, 0, 2]}, "f00033": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11558, 86, 59, 58, 44, 45, 35, 42, 35, 127]}, "f00147": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6773, 352, 334, 276, 270, 303, 268, 333, 402, 2778]}, "f00246": {"x": [0.0, 0.4, 0.8, 1.2000000000000002, 1.6, 2.0, 2.4000000000000004, 2.8000000000000003, 3.2, 3.6, 4], "y": [10771, 0, 186, 0, 0, 377, 0, 377, 0, 378]}, "f00132": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6290, 443, 373, 301, 323, 374, 327, 360, 447, 2851]}, "f00345": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12084, 0, 0, 0, 0, 0, 0, 0, 0, 5]}, "f00204": {"x": [0.0, 1.6, 3.2, 4.800000000000001, 6.4, 8.0, 9.600000000000001, 11.200000000000001, 12.8, 14.4, 16], "y": [4185, 737, 380, 748, 374, 774, 742, 385, 2768, 996]}, "f00048": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [8094, 379, 315, 252, 252, 305, 249, 296, 263, 1684]}, "f00231": {"x": [0.0, 1.5, 3.0, 4.5, 6.0, 7.5, 9.0, 10.5, 12.0, 13.5, 15], "y": [6046, 395, 738, 377, 763, 382, 748, 379, 1725, 536]}, "f00303": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12043, 0, 0, 0, 0, 0, 0, 0, 0, 46]}, "f00126": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [6511, 338, 288, 220, 208, 241, 237, 292, 369, 3385]}, "f00006": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [11528, 92, 84, 57, 50, 47, 40, 43, 31, 117]}, "f00330": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12084, 0, 0, 0, 0, 0, 0, 0, 0, 5]}, "f00339": {"x": [0, 0], "y": [12089]}, "f00219": {"x": [0.0, 0.4, 0.8, 1.2000000000000002, 1.6, 2.0, 2.4000000000000004, 2.8000000000000003, 3.2, 3.6, 4], "y": [10726, 0, 230, 0, 0, 377, 0, 378, 0, 378]}, "f00225": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12086, 0, 0, 0, 0, 0, 0, 0, 0, 3]}, "f00252": {"x": [0, 0], "y": [12089]}, "f00324": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11879, 0, 0, 0, 0, 0, 0, 0, 0, 210]}, "f00351": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [12077, 0, 0, 0, 0, 0, 0, 0, 0, 12]}, "f00210": {"x": [0.0, 1.6, 3.2, 4.800000000000001, 6.4, 8.0, 9.600000000000001, 11.200000000000001, 12.8, 14.4, 16], "y": [4173, 741, 380, 763, 379, 746, 764, 372, 2690, 1081]}, "f00105": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [8423, 354, 259, 224, 214, 257, 237, 234, 274, 1613]}, "f00054": {"x": [0.0, 25.5, 51.0, 76.5, 102.0, 127.5, 153.0, 178.5, 204.0, 229.5, 255.0], "y": [12038, 1, 6, 8, 6, 6, 5, 8, 3, 8]}, "f00318": {"x": [0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1], "y": [11776, 0, 0, 0, 0, 0, 0, 0, 0, 313]}} \ No newline at end of file diff --git a/web_console_v2/api/test/fedlearner_webconsole/test_data/dataset_metainfo/_META b/web_console_v2/api/test/fedlearner_webconsole/test_data/dataset_metainfo/_META deleted file mode 100644 index a8399694e..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/test_data/dataset_metainfo/_META +++ /dev/null @@ -1 +0,0 @@ -{"dtypes": {"f00312": "bigint", "f00207": "bigint", "f00197": "bigint", "f00021": "float", "f00380": "bigint", "f00129": "float", "f00009": "float", "f00333": "bigint", "f00036": "float", "f00000": "float", "f00135": "float", "f00213": "bigint", "f00389": "bigint", "f00108": "float", "f00098": "float", "f00120": "float", "f00306": "bigint", "f00015": "float", "f00234": "bigint", "f00228": "bigint", "f00114": "float", "f00296": "bigint", "f00327": "bigint", "f00315": "bigint", "f00201": "bigint", "f00170": "float", "raw_id": "bigint", "f00278": "bigint", "f00117": "float", "f00191": "float", "f00216": "bigint", "f00185": "float", "f00071": "float", "f00284": "bigint", "f00299": "bigint", "f00179": "float", "f00362": "bigint", "f00309": "bigint", "f00018": "float", "f00383": "bigint", "f00377": "bigint", "f00086": "float", "f00300": "bigint", "f00290": "bigint", "f00003": "float", "f00092": "float", "f00102": "float", "f00344": "bigint", "f00095": "float", "f00365": "bigint", "f00068": "float", "f00272": "bigint", "f00167": "float", "f00371": "bigint", "f00194": "float", "f00080": "float", "f00074": "float", "f00089": "float", "f00293": "bigint", "f00287": "bigint", "f00152": "float", "f00188": "float", "f00251": "bigint", "f00350": "bigint", "f00173": "float", "f00053": "float", "f00359": "bigint", "f00266": "bigint", "f00386": "bigint", "f00353": "bigint", "f00062": "float", "f00248": "bigint", "f00275": "bigint", "f00347": "bigint", "f00326": "bigint", "f00374": "bigint", "f00233": "bigint", "f00077": "float", "f00332": "bigint", "f00155": "float", "f00035": "float", "f00161": "float", "f00056": "float", "f00083": "float", "f00269": "bigint", "f00134": "float", "f00182": "float", "f00041": "float", "f00368": "bigint", "f00176": "float", "f00140": "float", "f00260": "bigint", "f00281": "bigint", "f00254": "bigint", "f00149": "float", "f00023": "float", "f00158": "float", "f00017": "float", "f00050": "float", "f00236": "bigint", "f00143": "float", "f00044": "float", "f00038": "float", "f00242": "bigint", "f00116": "float", "f00137": "float", "f00257": "bigint", "f00320": "bigint", "f00164": "float", "f00215": "bigint", "f00308": "bigint", "f00122": "float", "f00341": "bigint", "f00335": "bigint", "f00329": "bigint", "f00221": "bigint", "f00356": "bigint", "f00059": "float", "f00065": "float", "f00314": "bigint", "f00263": "bigint", "f00323": "bigint", "f00203": "bigint", "f00026": "float", "f00391": "bigint", "f00125": "float", "f00032": "float", "f00193": "float", "f00047": "float", "f00110": "float", "f00245": "bigint", "f00146": "float", "f00005": "float", "f00104": "float", "f00218": "bigint", "f00131": "float", "f00317": "bigint", "f00224": "bigint", "f00230": "bigint", "f00239": "bigint", "f00094": "float", "f00119": "float", "f00302": "bigint", "f00292": "bigint", "f00011": "float", "f00338": "bigint", "f00020": "float", "f00175": "float", "f00206": "bigint", "f00029": "float", "f00274": "bigint", "f00128": "float", "f00373": "bigint", "f00196": "bigint", "f00295": "bigint", "f00305": "bigint", "f00289": "bigint", "f00212": "bigint", "f00181": "float", "f00076": "float", "f00280": "bigint", "f00014": "float", "f00388": "bigint", "f00227": "bigint", "f00082": "float", "f00008": "float", "f00113": "float", "f00107": "float", "f00097": "float", "f00311": "bigint", "f00184": "float", "f00079": "float", "f00101": "float", "f00002": "float", "f00070": "float", "f00064": "float", "f00209": "bigint", "f00283": "bigint", "f00277": "bigint", "f00091": "float", "f00163": "float", "f00058": "float", "f00190": "float", "f00200": "bigint", "f00376": "bigint", "f00199": "bigint", "f00085": "float", "f00298": "bigint", "f00157": "float", "f00361": "bigint", "f00256": "bigint", "f00262": "bigint", "f00355": "bigint", "f00178": "float", "f00382": "bigint", "f00364": "bigint", "f00358": "bigint", "f00067": "float", "f00271": "bigint", "f00286": "bigint", "f00244": "bigint", "f00379": "bigint", "f00337": "bigint", "f00151": "float", "f00259": "bigint", "f00073": "float", "f00172": "float", "f00166": "float", "f00385": "bigint", "f00088": "float", "f00139": "float", "f00343": "bigint", "f00052": "float", "f00046": "float", "f00187": "float", "f00238": "bigint", "f00250": "bigint", "f00370": "bigint", "f00265": "bigint", "f00145": "float", "f00028": "float", "f00169": "float", "f00232": "bigint", "f00352": "bigint", "f00127": "float", "f00055": "float", "f00310": "bigint", "f00154": "float", "f00049": "float", "f00253": "bigint", "f00367": "bigint", "f00331": "bigint", "f00325": "bigint", "f00034": "float", "f00247": "bigint", "f00319": "bigint", "f00160": "float", "f00346": "bigint", "f00040": "float", "f00148": "float", "f00226": "bigint", "f00061": "float", "f00268": "bigint", "f00133": "float", "f00022": "float", "f00142": "float", "f00037": "float", "f00241": "bigint", "f00334": "bigint", "f00229": "bigint", "f00043": "float", "f00121": "float", "f00001": "float", "f00115": "float", "f00307": "bigint", "f00016": "float", "f00220": "bigint", "f00214": "bigint", "f00109": "float", "f00328": "bigint", "f00136": "float", "f00340": "bigint", "f00349": "bigint", "f00100": "float", "f00235": "bigint", "f00313": "bigint", "f00208": "bigint", "f00186": "float", "f00217": "bigint", "f00025": "float", "f00390": "bigint", "f00384": "bigint", "f00093": "float", "f00285": "bigint", "f00103": "float", "f00202": "bigint", "f00192": "float", "f00031": "float", "f00124": "float", "f00130": "float", "f00019": "float", "f00087": "float", "f00301": "bigint", "f00010": "float", "f00004": "float", "f00223": "bigint", "f00118": "float", "f00291": "bigint", "f00322": "bigint", "f00316": "bigint", "f00013": "float", "f00189": "float", "f00195": "float", "f00112": "float", "f00081": "float", "f00007": "float", "f00211": "bigint", "f00180": "float", "f00366": "bigint", "f00075": "float", "f00069": "float", "f00288": "bigint", "f00174": "float", "f00387": "bigint", "f00205": "bigint", "f00060": "float", "f00304": "bigint", "f00273": "bigint", "f00294": "bigint", "f00168": "float", "f00372": "bigint", "f00267": "bigint", "f00106": "float", "f00096": "float", "f00360": "bigint", "f00369": "bigint", "f00183": "float", "f00141": "float", "f00090": "float", "f00354": "bigint", "f00249": "bigint", "f00084": "float", "f00063": "float", "f00198": "bigint", "f00297": "bigint", "f00156": "float", "f00255": "bigint", "f00078": "float", "f00282": "bigint", "f00177": "float", "f00042": "float", "f00381": "bigint", "f00240": "bigint", "f00276": "bigint", "f00099": "float", "f00162": "float", "f00348": "bigint", "f00057": "float", "f00261": "bigint", "f00375": "bigint", "f00039": "float", "f00237": "bigint", "f00051": "float", "f00243": "bigint", "f00357": "bigint", "f00150": "float", "f00270": "bigint", "f00024": "float", "f00165": "float", "f00159": "float", "f00363": "bigint", "f00258": "bigint", "f00123": "float", "f00378": "bigint", "f00321": "bigint", "f00336": "bigint", "f00342": "bigint", "f00045": "float", "f00264": "bigint", "f00072": "float", "f00171": "float", "f00222": "bigint", "f00030": "float", "f00066": "float", "f00138": "float", "f00144": "float", "f00279": "bigint", "f00153": "float", "f00012": "float", "f00111": "float", "f00027": "float", "f00033": "float", "f00147": "float", "f00246": "bigint", "f00132": "float", "f00345": "bigint", "f00204": "bigint", "f00048": "float", "f00231": "bigint", "f00303": "bigint", "f00126": "float", "f00006": "float", "f00330": "bigint", "f00339": "bigint", "f00219": "bigint", "f00225": "bigint", "f00252": "bigint", "f00324": "bigint", "f00351": "bigint", "f00210": "bigint", "f00105": "float", "f00054": "float", "f00318": "bigint"}, "count": 12089, "sample": [[0, 7, 0, 0.0, 0, 0.0, 0.0, 0, 5.0, 0.0, 252.0, 0, 0, 49.0, 203.0, 241.0, 0, 255.0, 1, 0, 0.0, 0, 0, 0, 6, 0.0, 0, 0, 3.0, 0.0, 0, 0.0, 208.0, 0, 0, 252.0, 0, 0, 228.0, 0, 0, 0.0, 0, 0, 0.0, 252.0, 173.0, 0, 16.0, 0, 59.0, 0, 0.0, 0, 0.0, 0.0, 252.0, 0.0, 0, 0, 244.0, 0.0, 0, 0, 231.0, 0.0, 0, 0, 0, 0, 0.0, 0, 0, 0, 0, 0, 3, 7.0, 0, 29.0, 0.0, 89.0, 0.0, 0.0, 0, 252.0, 3.0, 228.0, 0, 205.0, 0.0, 0, 0, 0, 18.0, 0.0, 0.0, 253.0, 0.0, 0, 0.0, 252.0, 227.0, 0, 0.0, 0.0, 0, 0, 37.0, 0, 0, 17.0, 0, 0, 0, 0, 0, 0.0, 252.0, 0, 0, 0, 12, 0.0, 0, 200.0, 0.0, 0.0, 117.0, 0.0, 0, 252.0, 0.0, 252.0, 0, 72.0, 0, 0, 4, 0, 180.0, 252.0, 0, 0, 53.0, 0, 0.0, 245.0, 9, 0.0, 0, 65.0, 0, 0, 0, 0, 0, 0, 124.0, 87.0, 0, 253.0, 0, 0, 0.0, 0.0, 0.0, 49.0, 21.0, 0, 0.0, 0.0, 129.0, 0.0, 18.0, 78.0, 2, 0, 0, 135.0, 180.0, 0.0, 0.0, 0, 0, 0, 0.0, 0, 0.0, 0, 0, 0, 0, 252.0, 0, 0, 0, 125.0, 0, 0, 0, 0, 0, 170.0, 0, 252.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 231.0, 0.0, 0, 0, 0, 0, 105.0, 0.0, 0.0, 6, 0, 216.0, 0.0, 0, 126.0, 0.0, 0, 0, 0, 0, 0.0, 0, 0, 0.0, 0, 243.0, 88.0, 0, 0.0, 0, 241.0, 0.0, 0.0, 54.0, 0, 0, 2, 242.0, 106.0, 0.0, 0.0, 0, 253.0, 0, 0, 0.0, 0, 223.0, 0, 0, 247.0, 0, 0, 3, 0.0, 0, 0.0, 0, 0, 252.0, 0, 252.0, 10, 0.0, 0.0, 53.0, 14.0, 35.0, 0.0, 0, 10.0, 0.0, 0, 136.0, 0, 0, 0, 253.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 252.0, 0, 252.0, 0.0, 0, 252.0, 0, 14, 0.0, 0, 0, 0, 0.0, 0, 0, 66.0, 0.0, 0, 0, 0.0, 0.0, 5.0, 0, 0, 0.0, 6.0, 0, 0, 0.0, 0, 0.0, 0, 216.0, 170.0, 0, 0, 0, 253.0, 180.0, 0, 0.0, 0, 0, 252.0, 0, 0.0, 0, 0, 73.0, 0, 0.0, 0.0, 0.0, 0, 0, 0.0, 0, 0, 0, 0, 252.0, 0, 252.0, 0.0, 0, 0.0, 252.0, 0.0, 0.0, 0, 252.0, 179.0, 0.0, 0.0, 0.0, 242.0, 0, 163.0, 0, 13, 6.0, 7, 0, 252.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 184.0, 0.0, 0], [0, 8, 0, 0.0, 0, 252.0, 222.0, 0, 45.0, 0.0, 0.0, 9, 0, 0.0, 0.0, 0.0, 0, 252.0, 13, 0, 0.0, 0, 0, 0, 6, 0.0, 1, 0, 0.0, 0.0, 0, 252.0, 44.0, 0, 0, 0.0, 0, 0, 177.0, 0, 0, 0.0, 0, 4, 0.0, 0.0, 252.0, 0, 0.0, 0, 44.0, 0, 0.0, 0, 0.0, 0.0, 252.0, 0.0, 0, 3, 0.0, 0.0, 0, 0, 61.0, 0.0, 0, 6, 0, 0, 0.0, 0, 0, 0, 0, 0, 14, 0.0, 0, 98.0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 92.0, 253.0, 0, 29.0, 0.0, 9, 0, 0, 0.0, 0.0, 252.0, 252.0, 0.0, 13, 0.0, 253.0, 253.0, 0, 0.0, 0.0, 2, 0, 0.0, 0, 0, 0.0, 0, 0, 0, 0, 0, 0.0, 31.0, 0, 11, 0, 12, 0.0, 0, 0.0, 0.0, 0.0, 74.0, 0.0, 0, 5.0, 0.0, 0.0, 0, 74.0, 0, 0, 9, 10, 0.0, 0.0, 0, 1, 252.0, 0, 0.0, 252.0, 7, 0.0, 0, 86.0, 0, 0, 0, 0, 5, 13, 18.0, 0.0, 0, 253.0, 0, 0, 0.0, 45.0, 0.0, 0.0, 0.0, 0, 252.0, 0.0, 252.0, 0.0, 44.0, 0.0, 11, 0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0, 0, 0.0, 0, 252.0, 0, 0, 11, 0, 0.0, 0, 0, 0, 52.0, 0, 2, 0, 0, 0, 0.0, 8, 252.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 253.0, 65.0, 13, 0, 0, 8, 0.0, 0.0, 0.0, 13, 0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0, 0, 0, 0.0, 0, 0, 0.0, 0, 253.0, 9.0, 0, 0.0, 0, 0.0, 0.0, 0.0, 223.0, 2, 0, 6, 253.0, 0.0, 0.0, 0.0, 0, 252.0, 0, 5, 0.0, 0, 0.0, 0, 0, 15.0, 13, 0, 7, 243.0, 0, 0.0, 0, 0, 0.0, 0, 74.0, 10, 0.0, 0.0, 0.0, 252.0, 0.0, 0.0, 0, 252.0, 0.0, 0, 0.0, 3, 0, 0, 252.0, 0.0, 0.0, 0.0, 0.0, 0.0, 13, 0.0, 0, 74.0, 44.0, 4, 183.0, 0, 7, 0.0, 0, 0, 0, 0.0, 0, 3, 0.0, 0.0, 0, 0, 239.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 0, 242.0, 0, 0.0, 0, 0.0, 255.0, 0, 7, 0, 0.0, 0.0, 0, 0.0, 11, 0, 253.0, 12, 0.0, 0, 0, 0.0, 0, 0.0, 0.0, 74.0, 0, 5, 0.0, 0, 0, 0, 0, 253.0, 9, 143.0, 0.0, 0, 0.0, 123.0, 0.0, 0.0, 0, 0.0, 252.0, 0.0, 0.0, 0.0, 75.0, 0, 0.0, 0, 7, 0.0, 12, 0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 14, 0.0, 0.0, 0], [0, 10, 0, 0.0, 0, 254.0, 0.0, 0, 0.0, 0.0, 0.0, 2, 0, 0.0, 0.0, 0.0, 0, 254.0, 15, 0, 0.0, 0, 0, 0, 0, 0.0, 2, 0, 0.0, 0.0, 0, 139.0, 1.0, 0, 0, 0.0, 0, 0, 171.0, 0, 0, 0.0, 0, 0, 0.0, 0.0, 214.0, 0, 0.0, 0, 0.0, 0, 0.0, 0, 0.0, 0.0, 219.0, 0.0, 0, 0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 10, 0, 0, 0.0, 0, 0, 0, 0, 0, 16, 0.0, 0, 240.0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 206.0, 89.0, 0, 215.0, 0.0, 1, 0, 0, 0.0, 0.0, 34.0, 254.0, 0.0, 15, 0.0, 240.0, 0.0, 0, 0.0, 0.0, 0, 0, 0.0, 0, 0, 0.0, 0, 0, 0, 0, 0, 0.0, 0.0, 0, 7, 0, 9, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 19.0, 0.0, 0.0, 0, 0.0, 0, 0, 0, 11, 0.0, 0.0, 0, 0, 110.0, 0, 0.0, 254.0, 11, 0.0, 0, 254.0, 0, 0, 0, 0, 0, 10, 89.0, 0.0, 0, 254.0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 254.0, 0.0, 254.0, 0.0, 0.0, 0.0, 16, 0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0, 0, 0.0, 0, 254.0, 0, 0, 5, 0, 36.0, 0, 0, 0, 0.0, 0, 0, 0, 0, 0, 0.0, 0, 254.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 171.0, 0.0, 14, 0, 0, 12, 0.0, 0.0, 0.0, 5, 0, 138.0, 0.0, 0, 25.0, 0.0, 0, 0, 0, 0, 0.0, 0, 0, 0.0, 0, 73.0, 90.0, 0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0, 0, 93.0, 0.0, 0.0, 0.0, 0, 254.0, 0, 0, 0.0, 0, 0.0, 0, 0, 254.0, 15, 0, 13, 8.0, 0, 0.0, 0, 0, 0.0, 0, 28.0, 4, 0.0, 0.0, 0.0, 116.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 0, 0, 0, 254.0, 0.0, 0.0, 0.0, 0.0, 0.0, 14, 51.0, 0, 31.0, 0.0, 0, 164.0, 0, 16, 0.0, 0, 0, 0, 0.0, 0, 3, 0.0, 0.0, 0, 0, 254.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 0, 254.0, 0, 0.0, 0, 63.0, 89.0, 0, 0, 0, 7.0, 0.0, 0, 0.0, 5, 0, 0.0, 14, 0.0, 0, 0, 0.0, 0, 0.0, 0.0, 0.0, 0, 0, 0.0, 0, 0, 0, 0, 254.0, 13, 128.0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 246.0, 0.0, 0.0, 0.0, 177.0, 0, 0.0, 0, 15, 0.0, 7, 0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 15, 0.0, 0.0, 0], [0, 2, 0, 22.0, 0, 0.0, 0.0, 0, 0.0, 0.0, 0.0, 16, 0, 0.0, 0.0, 0.0, 0, 254.0, 15, 0, 0.0, 3, 0, 0, 0, 0.0, 3, 0, 0.0, 0.0, 9, 137.0, 0.0, 0, 0, 0.0, 0, 0, 254.0, 0, 0, 0.0, 0, 4, 0.0, 0.0, 192.0, 0, 0.0, 0, 0.0, 0, 0.0, 0, 0.0, 0.0, 254.0, 0.0, 5, 0, 0.0, 254.0, 0, 0, 0.0, 0.0, 0, 12, 0, 0, 0.0, 0, 0, 0, 0, 0, 16, 254.0, 0, 0.0, 0.0, 250.0, 0.0, 0.0, 8, 116.0, 0.0, 50.0, 0, 209.0, 0.0, 7, 0, 0, 126.0, 0.0, 188.0, 254.0, 2.0, 9, 0.0, 253.0, 0.0, 13, 0.0, 0.0, 0, 0, 0.0, 14, 0, 0.0, 0, 0, 0, 0, 0, 0.0, 0.0, 0, 13, 0, 8, 0.0, 0, 0.0, 0.0, 0.0, 254.0, 0.0, 0, 0.0, 0.0, 254.0, 0, 254.0, 0, 0, 0, 14, 0.0, 0.0, 0, 5, 181.0, 0, 140.0, 24.0, 3, 0.0, 0, 0.0, 0, 0, 4, 0, 2, 12, 0.0, 254.0, 0, 254.0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 23.0, 0.0, 4.0, 0.0, 0.0, 0.0, 0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0, 0, 0.0, 0, 3.0, 0, 0, 13, 0, 15.0, 0, 0, 0, 0.0, 2, 0, 3, 0, 0, 0.0, 2, 200.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 254.0, 254.0, 9, 0, 0, 12, 0.0, 0.0, 0.0, 15, 0, 0.0, 0.0, 0, 0.0, 191.0, 0, 0, 0, 0, 0.0, 0, 0, 254.0, 0, 30.0, 25.0, 0, 0.0, 11, 254.0, 0.0, 0.0, 0.0, 14, 0, 0, 155.0, 0.0, 0.0, 0.0, 0, 254.0, 0, 15, 0.0, 0, 0.0, 0, 0, 0.0, 9, 0, 0, 254.0, 4, 0.0, 0, 0, 0.0, 0, 254.0, 0, 0.0, 0.0, 0.0, 141.0, 252.0, 0.0, 0, 58.0, 0.0, 0, 0.0, 3, 0, 0, 254.0, 209.0, 0.0, 0.0, 0.0, 0.0, 6, 0.0, 0, 254.0, 0.0, 1, 0.0, 0, 16, 0.0, 0, 0, 4, 0.0, 0, 12, 154.0, 0.0, 0, 0, 0.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 1, 0.0, 0, 118.0, 0, 254.0, 73.0, 0, 15, 0, 0.0, 61.0, 0, 0.0, 13, 0, 0.0, 8, 0.0, 8, 0, 86.0, 4, 0.0, 0.0, 254.0, 0, 0, 0.0, 0, 0, 0, 0, 254.0, 13, 91.0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 234.0, 0.0, 0.0, 0.0, 0.0, 0, 254.0, 0, 15, 254.0, 6, 0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 1, 254.0, 0.0, 0], [0, 6, 0, 0.0, 0, 220.0, 0.0, 0, 0.0, 0.0, 0.0, 0, 0, 0.0, 228.0, 247.0, 0, 0.0, 12, 0, 0.0, 0, 0, 0, 0, 0.0, 4, 0, 0.0, 0.0, 0, 0.0, 254.0, 0, 0, 0.0, 0, 0, 254.0, 0, 0, 0.0, 0, 0, 0.0, 214.0, 154.0, 0, 254.0, 0, 203.0, 0, 0.0, 0, 0.0, 33.0, 254.0, 0.0, 0, 0, 0.0, 0.0, 0, 0, 0.0, 160.0, 0, 0, 0, 0, 0.0, 0, 0, 0, 0, 0, 13, 223.0, 0, 244.0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 254.0, 0.0, 0, 4.0, 0.0, 0, 0, 0, 0.0, 67.0, 0.0, 254.0, 225.0, 8, 0.0, 98.0, 0.0, 0, 0.0, 0.0, 0, 0, 0.0, 0, 0, 146.0, 0, 0, 0, 0, 0, 0.0, 31.0, 0, 0, 0, 14, 0.0, 0, 28.0, 0.0, 0.0, 208.0, 0.0, 0, 255.0, 0.0, 0.0, 0, 0.0, 0, 0, 0, 0, 254.0, 254.0, 0, 0, 0.0, 0, 0.0, 206.0, 7, 0.0, 0, 254.0, 0, 0, 0, 0, 0, 0, 179.0, 223.0, 0, 0.0, 0, 0, 0.0, 0.0, 0.0, 0.0, 240.0, 0, 64.0, 52.0, 254.0, 0.0, 212.0, 9.0, 14, 0, 0, 137.0, 0.0, 0.0, 0.0, 0, 0, 0, 0.0, 0, 50.0, 0, 0, 0, 0, 0.0, 0, 0, 0, 137.0, 0, 0, 0, 0, 0, 0.0, 0, 254.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 241.0, 254.0, 0.0, 0, 0, 0, 0, 0.0, 0.0, 0.0, 12, 0, 216.0, 0.0, 0, 137.0, 207.0, 0, 0, 0, 0, 0.0, 0, 0, 0.0, 0, 0.0, 49.0, 0, 0.0, 0, 0.0, 20.0, 0.0, 0.0, 0, 0, 0, 35.0, 179.0, 0.0, 0.0, 0, 39.0, 0, 0, 0.0, 0, 0.0, 0, 0, 254.0, 12, 0, 11, 0.0, 0, 39.0, 0, 0, 254.0, 0, 50.0, 9, 0.0, 0.0, 60.0, 12.0, 56.0, 0.0, 0, 0.0, 0.0, 0, 185.0, 0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3, 8.0, 0, 251.0, 203.0, 0, 254.0, 0, 9, 0.0, 0, 0, 0, 0.0, 0, 0, 0.0, 254.0, 0, 0, 247.0, 0.0, 9.0, 0, 0, 0.0, 0.0, 0, 0, 232.0, 0, 127.0, 0, 0.0, 7.0, 0, 0, 0, 250.0, 0.0, 0, 0.0, 0, 0, 0.0, 4, 254.0, 0, 0, 0.0, 0, 124.0, 0.0, 0.0, 0, 0, 67.0, 0, 0, 0, 0, 254.0, 0, 254.0, 0.0, 0, 0.0, 82.0, 0.0, 0.0, 0, 4.0, 0.0, 0.0, 0.0, 0.0, 222.0, 0, 0.0, 0, 7, 157.0, 7, 0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 9, 0.0, 0.0, 0], [0, 15, 0, 0.0, 0, 31.0, 5.0, 0, 0.0, 0.0, 193.0, 15, 0, 0.0, 0.0, 253.0, 0, 245.0, 14, 0, 0.0, 0, 0, 0, 0, 0.0, 5, 0, 0.0, 122.0, 11, 253.0, 0.0, 0, 0, 236.0, 0, 0, 0.0, 0, 0, 0.0, 0, 0, 0.0, 253.0, 0.0, 0, 189.0, 0, 188.0, 0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0, 0.0, 253.0, 0, 0, 12.0, 0.0, 0, 4, 0, 0, 0.0, 0, 0, 0, 0, 0, 15, 0.0, 0, 131.0, 0.0, 253.0, 0.0, 0.0, 0, 205.0, 252.0, 192.0, 0, 253.0, 0.0, 1, 0, 0, 253.0, 0.0, 253.0, 115.0, 0.0, 14, 0.0, 0.0, 253.0, 2, 0.0, 0.0, 0, 0, 0.0, 12, 0, 186.0, 0, 0, 0, 0, 0, 0.0, 220.0, 0, 12, 0, 8, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 1, 218.0, 0.0, 0.0, 4, 174.0, 0, 0, 0, 13, 253.0, 225.0, 0, 0, 217.0, 0, 0.0, 253.0, 15, 0.0, 0, 0.0, 0, 0, 0, 0, 0, 14, 222.0, 0.0, 0, 250.0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 253.0, 0.0, 0.0, 0.0, 0.0, 47.0, 15, 0, 0, 107.0, 94.0, 0.0, 253.0, 0, 0, 0, 0.0, 0, 222.0, 0, 0, 5, 0, 253.0, 0, 0, 0, 253.0, 0, 0, 1, 0, 0, 59.0, 0, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 0.0, 253.0, 13, 0, 0, 6, 48.0, 0.0, 0.0, 9, 0, 0.0, 0.0, 0, 128.0, 0.0, 0, 0, 0, 0, 0.0, 0, 0, 253.0, 0, 253.0, 253.0, 0, 0.0, 3, 205.0, 0.0, 0.0, 124.0, 4, 0, 0, 0.0, 253.0, 0.0, 0.0, 0, 245.0, 0, 14, 0.0, 0, 58.0, 0, 0, 0.0, 14, 0, 15, 253.0, 5, 0.0, 0, 0, 253.0, 0, 0.0, 2, 0.0, 0.0, 0.0, 42.0, 0.0, 0.0, 0, 41.0, 0.0, 0, 41.0, 0, 0, 0, 253.0, 253.0, 0.0, 0.0, 0.0, 0.0, 14, 222.0, 0, 0.0, 25.0, 0, 152.0, 0, 15, 0.0, 0, 0, 0, 0.0, 0, 4, 0.0, 13.0, 0, 0, 253.0, 0.0, 0.0, 0, 0, 0.0, 11.0, 0, 0, 131.0, 0, 0.0, 0, 253.0, 105.0, 0, 10, 0, 0.0, 253.0, 0, 0.0, 5, 0, 253.0, 13, 0.0, 2, 0, 150.0, 0, 0.0, 0.0, 253.0, 0, 0, 22.0, 0, 0, 0, 0, 0.0, 12, 0.0, 0.0, 0, 0.0, 253.0, 0.0, 0.0, 0, 0.0, 253.0, 0.0, 0.0, 0.0, 253.0, 0, 205.0, 0, 14, 0.0, 5, 0, 0.0, 0.0, 0, 0, 1, 0, 0, 0, 0, 14, 0.0, 0.0, 0], [0, 10, 0, 0.0, 0, 233.0, 0.0, 0, 0.0, 0.0, 0.0, 15, 0, 0.0, 0.0, 0.0, 0, 253.0, 14, 2, 0.0, 0, 0, 0, 8, 0.0, 6, 0, 0.0, 0.0, 4, 253.0, 7.0, 0, 0, 21.0, 0, 0, 253.0, 0, 0, 0.0, 0, 6, 0.0, 0.0, 253.0, 0, 0.0, 0, 0.0, 0, 0.0, 0, 0.0, 0.0, 253.0, 0.0, 3, 4, 0.0, 138.0, 0, 0, 211.0, 0.0, 0, 11, 0, 0, 0.0, 0, 0, 0, 0, 0, 15, 59.0, 0, 37.0, 0.0, 0.0, 0.0, 0.0, 2, 0.0, 151.0, 87.0, 0, 59.0, 0.0, 10, 0, 0, 0.0, 0.0, 253.0, 253.0, 0.0, 14, 0.0, 253.0, 0.0, 10, 0.0, 0.0, 3, 0, 0.0, 7, 0, 0.0, 0, 0, 0, 0, 0, 0.0, 0.0, 0, 12, 0, 13, 0.0, 0, 0.0, 0.0, 0.0, 253.0, 0.0, 0, 0.0, 0.0, 253.0, 0, 253.0, 0, 0, 10, 13, 0.0, 0.0, 0, 3, 201.0, 0, 49.0, 211.0, 15, 0.0, 0, 32.0, 0, 0, 1, 0, 5, 14, 36.0, 250.0, 0, 253.0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 253.0, 0.0, 238.0, 0.0, 0.0, 0.0, 15, 0, 0, 0.0, 0.0, 0.0, 0.0, 3, 0, 0, 0.0, 0, 253.0, 0, 1, 12, 0, 36.0, 0, 0, 0, 0.0, 0, 2, 0, 0, 0, 0.0, 9, 253.0, 66.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 253.0, 253.0, 13, 0, 0, 11, 0.0, 0.0, 0.0, 14, 0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0, 0, 0, 0.0, 0, 0, 138.0, 0, 87.0, 0.0, 0, 0.0, 7, 6.0, 0.0, 0.0, 0.0, 11, 0, 7, 248.0, 0.0, 0.0, 0.0, 0, 253.0, 0, 14, 0.0, 0, 0.0, 0, 0, 62.0, 14, 0, 11, 253.0, 0, 0.0, 0, 0, 0.0, 0, 253.0, 11, 0.0, 0.0, 0.0, 253.0, 230.0, 0.0, 0, 18.0, 0.0, 0, 0.0, 7, 0, 0, 253.0, 0.0, 0.0, 0.0, 0.0, 0.0, 14, 26.0, 0, 253.0, 0.0, 5, 211.0, 0, 15, 0.0, 0, 0, 2, 0.0, 0, 7, 0.0, 0.0, 0, 0, 222.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 0, 203.0, 0, 0.0, 0, 36.0, 87.0, 0, 14, 0, 0.0, 0.0, 0, 0.0, 12, 0, 36.0, 13, 0.0, 3, 0, 0.0, 0, 0.0, 0.0, 253.0, 0, 6, 0.0, 0, 0, 0, 0, 253.0, 12, 152.0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 253.0, 0.0, 0.0, 0.0, 0.0, 0, 150.0, 0, 14, 138.0, 13, 0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 14, 60.0, 0.0, 0], [0, 15, 0, 101.0, 0, 79.0, 0.0, 0, 0.0, 0.0, 0.0, 8, 0, 0.0, 244.0, 0.0, 0, 0.0, 12, 0, 0.0, 5, 0, 0, 0, 0.0, 7, 0, 0.0, 0.0, 4, 0.0, 253.0, 0, 2, 253.0, 0, 0, 253.0, 0, 0, 0.0, 1, 0, 0.0, 0.0, 30.0, 0, 0.0, 0, 0.0, 4, 0.0, 0, 0.0, 0.0, 253.0, 0.0, 3, 0, 253.0, 0.0, 0, 0, 0.0, 0.0, 0, 11, 0, 0, 0.0, 0, 1, 0, 0, 0, 5, 0.0, 0, 137.0, 0.0, 0.0, 0.0, 0.0, 9, 0.0, 141.0, 0.0, 0, 0.0, 0.0, 0, 0, 0, 0.0, 0.0, 0.0, 182.0, 0.0, 14, 0.0, 198.0, 0.0, 12, 0.0, 0.0, 0, 0, 0.0, 7, 0, 58.0, 0, 0, 0, 0, 0, 0.0, 0.0, 0, 8, 0, 0, 0.0, 0, 253.0, 0.0, 0.0, 247.0, 0.0, 6, 0.0, 0.0, 0.0, 2, 0.0, 0, 0, 0, 13, 0.0, 0.0, 1, 2, 0.0, 0, 251.0, 0.0, 12, 0.0, 2, 197.0, 0, 0, 4, 0, 0, 8, 253.0, 0.0, 0, 0.0, 0, 0, 0.0, 0.0, 0.0, 0.0, 224.0, 0, 8.0, 0.0, 239.0, 0.0, 99.0, 0.0, 15, 0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0, 0, 0.0, 2, 0.0, 0, 0, 4, 0, 216.0, 0, 0, 0, 0.0, 5, 0, 7, 0, 0, 253.0, 0, 253.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 253.0, 0.0, 13, 0, 0, 11, 0.0, 0.0, 0.0, 0, 0, 253.0, 0.0, 0, 242.0, 0.0, 0, 0, 0, 0, 0.0, 2, 0, 0.0, 0, 0.0, 0.0, 0, 0.0, 10, 0.0, 0.0, 0.0, 0.0, 13, 0, 0, 99.0, 0.0, 0.0, 0.0, 0, 22.0, 0, 8, 0.0, 0, 0.0, 0, 0, 253.0, 14, 0, 15, 0.0, 2, 0.0, 0, 0, 0.0, 0, 23.0, 0, 0.0, 0.0, 213.0, 0.0, 253.0, 0.0, 1, 0.0, 0.0, 0, 0.0, 0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 14, 253.0, 0, 191.0, 0.0, 0, 0.0, 0, 1, 0.0, 0, 3, 3, 0.0, 0, 11, 0.0, 117.0, 0, 0, 62.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 3, 16.0, 0, 0.0, 0, 0.0, 3.0, 0, 14, 0, 253.0, 0.0, 0, 0.0, 0, 0, 0.0, 13, 0.0, 10, 0, 86.0, 7, 0.0, 0.0, 0.0, 0, 0, 169.0, 0, 0, 0, 0, 253.0, 9, 253.0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0, 253.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3, 0.0, 0, 0, 129.0, 0, 1, 253.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 14, 0.0, 0.0, 0], [0, 0, 0, 3.0, 0, 0.0, 0.0, 0, 0.0, 0.0, 0.0, 15, 0, 0.0, 195.0, 0.0, 0, 253.0, 0, 0, 0.0, 0, 0, 0, 0, 0.0, 8, 0, 0.0, 0.0, 11, 223.0, 192.0, 0, 0, 0.0, 0, 0, 253.0, 0, 0, 0.0, 0, 0, 0.0, 0.0, 0.0, 0, 0.0, 0, 0.0, 0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0, 0.0, 253.0, 0, 0, 0.0, 0.0, 0, 0, 0, 0, 0.0, 0, 0, 0, 0, 0, 0, 253.0, 0, 184.0, 0.0, 253.0, 0.0, 0.0, 0, 253.0, 253.0, 61.0, 0, 0.0, 0.0, 0, 0, 0, 0.0, 0.0, 0.0, 253.0, 4.0, 0, 0.0, 176.0, 42.0, 6, 0.0, 0.0, 0, 0, 0.0, 12, 0, 0.0, 0, 0, 0, 0, 0, 0.0, 0.0, 0, 0, 0, 0, 0.0, 0, 172.0, 0.0, 0.0, 247.0, 0.0, 1, 0.0, 0.0, 112.0, 1, 0.0, 0, 0, 0, 2, 0.0, 0.0, 0, 0, 253.0, 0, 169.0, 0.0, 0, 0.0, 0, 0.0, 0, 0, 0, 0, 0, 14, 144.0, 203.0, 0, 253.0, 0, 0, 0.0, 0.0, 0.0, 0.0, 56.0, 0, 243.0, 0.0, 0.0, 0.0, 59.0, 0.0, 2, 0, 0, 0.0, 0.0, 0.0, 96.0, 0, 0, 0, 0.0, 0, 0.0, 0, 0, 0, 0, 0.0, 0, 0, 0, 0.0, 0, 0, 3, 0, 0, 0.0, 0, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 110.0, 250.0, 1, 0, 0, 0, 0.0, 0.0, 0.0, 0, 0, 17.0, 0.0, 0, 253.0, 195.0, 0, 0, 0, 0, 0.0, 0, 0, 164.0, 0, 61.0, 0.0, 0, 0.0, 0, 248.0, 0.0, 0.0, 0.0, 7, 0, 0, 83.0, 0.0, 0.0, 0.0, 0, 253.0, 0, 14, 0.0, 0, 0.0, 0, 0, 0.0, 0, 0, 0, 223.0, 6, 0.0, 0, 0, 0.0, 0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 253.0, 0.0, 0, 174.0, 0.0, 0, 0.0, 0, 0, 0, 253.0, 253.0, 0.0, 0.0, 0.0, 0.0, 14, 0.0, 0, 50.0, 0.0, 0, 0.0, 0, 0, 0.0, 0, 0, 0, 0.0, 0, 0, 229.0, 0.0, 0, 0, 253.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 0, 62.0, 0, 42.0, 0, 0.0, 61.0, 0, 6, 0, 57.0, 253.0, 0, 0.0, 0, 0, 61.0, 0, 0.0, 6, 0, 0.0, 0, 0.0, 0.0, 85.0, 0, 0, 0.0, 0, 0, 0, 0, 79.0, 0, 30.0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0, 192.0, 253.0, 0.0, 0.0, 0.0, 0.0, 0, 58.0, 0, 0, 253.0, 0, 0, 204.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 7, 253.0, 0.0, 0], [0, 0, 0, 0.0, 0, 72.0, 0.0, 0, 0.0, 0.0, 0.0, 15, 0, 0.0, 40.0, 0.0, 0, 0.0, 0, 0, 0.0, 2, 0, 0, 0, 0.0, 9, 0, 0.0, 0.0, 0, 254.0, 254.0, 0, 0, 0.0, 0, 0, 0.0, 0, 0, 0.0, 0, 0, 0.0, 0.0, 252.0, 0, 0.0, 0, 142.0, 0, 0.0, 0, 0.0, 0.0, 253.0, 0.0, 4, 0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 7, 0, 0, 0.0, 0, 0, 0, 0, 0, 0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 223.0, 0, 0.0, 0.0, 0, 0, 0, 0.0, 0.0, 252.0, 0.0, 0.0, 0, 0.0, 61.0, 0.0, 0, 0.0, 0.0, 0, 2, 0.0, 0, 0, 0.0, 0, 0, 0, 0, 0, 0.0, 0.0, 0, 0, 0, 0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0.0, 0, 203.0, 0, 0, 0, 7, 0.0, 0.0, 0, 4, 113.0, 0, 0.0, 0.0, 0, 0.0, 0, 0.0, 0, 0, 4, 0, 0, 9, 0.0, 0.0, 0, 61.0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 102.0, 0.0, 172.0, 0.0, 243.0, 0.0, 0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0, 0, 0.0, 0, 152.0, 0, 0, 0, 0, 0.0, 0, 0, 0, 62.0, 0, 0, 0, 0, 0, 0.0, 0, 254.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0, 0, 0, 0.0, 0, 1, 0.0, 0, 252.0, 0.0, 0, 0.0, 7, 0.0, 0.0, 0.0, 0.0, 8, 0, 0, 102.0, 0.0, 0.0, 0.0, 0, 0.0, 0, 7, 0.0, 0, 0.0, 0, 0, 111.0, 0, 0, 0, 213.0, 0, 0.0, 0, 0, 0.0, 0, 203.0, 0, 0.0, 0.0, 0.0, 253.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 3, 1, 0, 224.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2, 0.0, 0, 82.0, 203.0, 0, 0.0, 0, 0, 0.0, 0, 0, 6, 0.0, 0, 11, 0.0, 0.0, 0, 0, 0.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 0, 0.0, 0, 0.0, 0, 0.0, 102.0, 0, 14, 0, 71.0, 0.0, 0, 0.0, 0, 0, 152.0, 0, 0.0, 0, 0, 0.0, 0, 0.0, 0.0, 81.0, 0, 0, 0.0, 0, 2, 0, 0, 41.0, 0, 253.0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 253.0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 0, 0, 0.0, 0, 0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.0, 0], [0, 9, 0, 0.0, 0, 215.0, 0.0, 0, 0.0, 0.0, 0.0, 14, 0, 0.0, 0.0, 0.0, 0, 252.0, 13, 0, 0.0, 0, 0, 0, 3, 0.0, 10, 0, 0.0, 0.0, 0, 253.0, 53.0, 0, 0, 0.0, 0, 0, 0.0, 0, 0, 0.0, 0, 3, 0.0, 0.0, 253.0, 0, 0.0, 0, 0.0, 0, 0.0, 0, 0.0, 0.0, 253.0, 0.0, 0, 2, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 5, 0, 0, 0.0, 0, 0, 0, 0, 0, 14, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 113.0, 0.0, 0, 255.0, 0.0, 10, 0, 0, 0.0, 0.0, 253.0, 63.0, 0.0, 13, 0.0, 253.0, 0.0, 0, 0.0, 0.0, 0, 0, 0.0, 0, 0, 0.0, 0, 0, 0, 0, 0, 0.0, 0.0, 0, 11, 0, 12, 0.0, 0, 0.0, 0.0, 0.0, 12.0, 0.0, 0, 0.0, 0.0, 0.0, 0, 89.0, 0, 0, 6, 11, 0.0, 0.0, 0, 0, 112.0, 0, 0.0, 253.0, 5, 0.0, 0, 19.0, 0, 0, 0, 0, 5, 13, 114.0, 0.0, 0, 252.0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 253.0, 0.0, 227.0, 0.0, 0.0, 0.0, 15, 0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0, 0, 0.0, 0, 252.0, 0, 0, 11, 0, 0.0, 0, 0, 0, 0.0, 0, 0, 0, 0, 0, 0.0, 7, 252.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 114.0, 15.0, 12, 0, 0, 9, 0.0, 0.0, 0.0, 14, 0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0, 0, 0, 0.0, 0, 0, 0.0, 0, 0.0, 0.0, 0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 4, 0, 2, 229.0, 0.0, 0.0, 0.0, 0, 239.0, 0, 6, 0.0, 0, 0.0, 0, 0, 22.0, 13, 0, 13, 204.0, 0, 0.0, 0, 0, 0.0, 0, 136.0, 10, 0.0, 0.0, 0.0, 253.0, 0.0, 0.0, 0, 25.0, 0.0, 0, 0.0, 3, 0, 0, 174.0, 0.0, 0.0, 0.0, 0.0, 0.0, 13, 63.0, 0, 27.0, 0.0, 4, 92.0, 0, 6, 0.0, 0, 0, 0, 0.0, 0, 4, 0.0, 0.0, 0, 0, 222.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 0, 107.0, 0, 0.0, 0, 27.0, 126.0, 0, 8, 0, 0.0, 0.0, 0, 0.0, 11, 0, 0.0, 13, 0.0, 0, 0, 0.0, 0, 0.0, 0.0, 167.0, 0, 2, 0.0, 0, 0, 0, 0, 253.0, 9, 177.0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 112.0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 0, 14, 0.0, 12, 0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 13, 0.0, 0.0, 0], [0, 6, 0, 0.0, 0, 0.0, 0.0, 0, 0.0, 0.0, 0.0, 10, 0, 0.0, 0.0, 0.0, 0, 0.0, 15, 0, 0.0, 0, 0, 0, 0, 0.0, 11, 0, 0.0, 0.0, 0, 254.0, 0.0, 0, 0, 0.0, 0, 0, 179.0, 0, 0, 0.0, 0, 0, 0.0, 0.0, 100.0, 0, 0.0, 0, 0.0, 0, 0.0, 0, 0.0, 0.0, 80.0, 0.0, 0, 0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 4, 0, 0, 0.0, 0, 0, 0, 0, 0, 11, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0.0, 0, 254.0, 0.0, 0, 0, 0, 5.0, 0.0, 254.0, 0.0, 0.0, 15, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0, 0.0, 0, 0, 0.0, 0, 0, 0, 0, 0, 0.0, 0.0, 0, 10, 0, 2, 0.0, 0, 0.0, 0.0, 0.0, 239.0, 0.0, 0, 0.0, 0.0, 0.0, 0, 239.0, 0, 0, 0, 14, 0.0, 0.0, 0, 0, 0.0, 0, 0.0, 64.0, 11, 0.0, 0, 0.0, 0, 0, 0, 0, 0, 15, 0.0, 20.0, 0, 0.0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 188.0, 0.0, 0.0, 0.0, 0.0, 0.0, 7, 0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0, 0, 0.0, 0, 150.0, 0, 0, 5, 0, 38.0, 0, 0, 0, 0.0, 0, 0, 0, 0, 0, 0.0, 0, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 80.0, 20.0, 15, 0, 0, 7, 0.0, 0.0, 0.0, 3, 0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0, 0, 0, 0.0, 0, 0, 0.0, 0, 0.0, 140.0, 0, 0.0, 1, 0.0, 0.0, 0.0, 0.0, 4, 0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 0, 3, 0.0, 0, 0.0, 0, 0, 0.0, 15, 0, 5, 209.0, 0, 0.0, 0, 0, 0.0, 0, 239.0, 0, 0.0, 0.0, 0.0, 234.0, 239.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 12, 0.0, 0, 244.0, 0.0, 0, 0.0, 0, 17, 0.0, 0, 0, 0, 0.0, 0, 2, 0.0, 0.0, 0, 0, 34.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 0, 3.0, 0, 0.0, 0, 181.0, 0.0, 0, 9, 0, 0.0, 0.0, 0, 0.0, 3, 0, 0.0, 14, 0.0, 0, 0, 0.0, 0, 0.0, 0.0, 129.0, 0, 0, 0.0, 0, 0, 0, 0, 0.0, 9, 0.0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.0, 0, 0.0, 0, 11, 0.0, 0, 0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 8, 0.0, 0.0, 0], [0, 5, 0, 0.0, 0, 252.0, 76.0, 0, 0.0, 0.0, 0.0, 12, 0, 0.0, 0.0, 0.0, 0, 252.0, 13, 0, 0.0, 2, 0, 0, 0, 0.0, 12, 0, 0.0, 0.0, 0, 253.0, 47.0, 0, 0, 0.0, 0, 0, 128.0, 0, 0, 0.0, 0, 5, 0.0, 0.0, 252.0, 0, 0.0, 0, 0.0, 0, 0.0, 0, 0.0, 0.0, 252.0, 0.0, 5, 0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 11, 0, 0, 0.0, 0, 0, 0, 0, 0, 7, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 2, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0, 0, 0.0, 0.0, 252.0, 252.0, 0.0, 9, 0.0, 253.0, 0.0, 0, 0.0, 0.0, 0, 0, 0.0, 0, 0, 0.0, 0, 0, 0, 0, 0, 0.0, 0.0, 0, 11, 0, 0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0.0, 0, 178.0, 0, 0, 0, 12, 0.0, 0.0, 0, 6, 221.0, 0, 0.0, 0.0, 12, 0.0, 0, 90.0, 0, 0, 2, 0, 1, 13, 0.0, 0.0, 0, 86.0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 222.0, 0.0, 252.0, 0.0, 0.0, 0.0, 3, 0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0, 0, 0.0, 0, 252.0, 0, 0, 11, 0, 154.0, 0, 0, 0, 0.0, 0, 0, 0, 0, 0, 0.0, 0, 252.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 148.0, 0.0, 13, 0, 0, 10, 0.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0, 0, 0, 0.0, 0, 0, 0.0, 0, 0.0, 0.0, 0, 0.0, 5, 0.0, 0.0, 0.0, 0.0, 7, 0, 0, 100.0, 0.0, 0.0, 0.0, 0, 252.0, 0, 5, 0.0, 0, 0.0, 0, 0, 234.0, 13, 0, 2, 243.0, 0, 0.0, 0, 0, 0.0, 0, 199.0, 0, 0.0, 0.0, 0.0, 252.0, 0.0, 0.0, 0, 211.0, 0.0, 0, 0.0, 6, 0, 0, 2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 10, 0.0, 0, 74.0, 0.0, 0, 0.0, 0, 7, 0.0, 0, 0, 4, 0.0, 0, 10, 0.0, 0.0, 0, 0, 36.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 0, 122.0, 0, 0.0, 0, 87.0, 0.0, 0, 13, 0, 34.0, 0.0, 0, 0.0, 5, 0, 0.0, 9, 0.0, 0, 0, 0.0, 0, 0.0, 0.0, 126.0, 0, 0, 0.0, 0, 0, 0, 0, 253.0, 11, 252.0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 43.0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 0, 0, 0.0, 0, 0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 4, 0.0, 0.0, 0], [0, 16, 0, 0.0, 0, 0.0, 0.0, 0, 0.0, 0.0, 0.0, 11, 0, 0.0, 0.0, 0.0, 0, 87.0, 4, 0, 0.0, 0, 0, 0, 0, 0.0, 13, 0, 0.0, 116.0, 8, 48.0, 0.0, 0, 0, 254.0, 0, 0, 0.0, 0, 0, 0.0, 0, 0, 0.0, 0.0, 0.0, 0, 249.0, 0, 192.0, 2, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0, 124.0, 16.0, 0, 0, 0.0, 0.0, 0, 4, 0, 0, 0.0, 0, 0, 0, 0, 0, 0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 4, 0.0, 76.0, 252.0, 0, 0.0, 0.0, 0, 0, 0, 196.0, 0.0, 0.0, 0.0, 0.0, 9, 0.0, 0.0, 35.0, 13, 0.0, 0.0, 0, 0, 0.0, 8, 0, 254.0, 0, 0, 0, 0, 0, 0.0, 0.0, 0, 0, 0, 0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 4, 0.0, 0.0, 0.0, 6, 0.0, 0, 0, 0, 14, 242.0, 0.0, 0, 0, 18.0, 0, 0.0, 0.0, 8, 0.0, 0, 0.0, 0, 0, 0, 0, 0, 14, 116.0, 0.0, 0, 196.0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 112.0, 0.0, 0.0, 0.0, 0.0, 0.0, 16, 0, 0, 0.0, 0.0, 0.0, 116.0, 0, 0, 0, 0.0, 0, 0.0, 0, 0, 0, 0, 254.0, 0, 0, 0, 254.0, 3, 0, 5, 0, 0, 250.0, 0, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 0.0, 0.0, 14, 0, 0, 3, 0.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0, 0, 0, 0.0, 0, 0, 0.0, 0, 254.0, 0.0, 0, 0.0, 4, 0.0, 0.0, 0.0, 0.0, 14, 0, 0, 0.0, 221.0, 0.0, 0.0, 0, 3.0, 2, 10, 0.0, 0, 0.0, 0, 0, 0.0, 7, 0, 16, 0.0, 10, 0.0, 0, 0, 77.0, 0, 0.0, 0, 116.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 0, 0, 0, 254.0, 116.0, 0.0, 0.0, 0.0, 0.0, 9, 196.0, 0, 0.0, 101.0, 0, 0.0, 0, 2, 0.0, 0, 0, 0, 0.0, 0, 4, 0.0, 118.0, 0, 0, 0.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 0, 0.0, 0, 0.0, 0, 45.0, 121.0, 0, 15, 0, 0.0, 0.0, 0, 0.0, 0, 0, 212.0, 14, 0.0, 11, 0, 254.0, 3, 0.0, 0.0, 0.0, 0, 0, 228.0, 0, 0, 0, 0, 0.0, 3, 0.0, 0.0, 0, 0.0, 124.0, 0.0, 0.0, 0, 0.0, 204.0, 0.0, 0.0, 0.0, 0.0, 2, 0.0, 0, 0, 0.0, 0, 0, 0.0, 0.0, 0, 0, 4, 0, 0, 0, 0, 12, 0.0, 0.0, 0], [0, 14, 0, 107.0, 0, 156.0, 0.0, 0, 92.0, 0.0, 0.0, 14, 0, 0.0, 252.0, 253.0, 0, 85.0, 14, 0, 0.0, 0, 0, 0, 0, 0.0, 14, 0, 0.0, 0.0, 0, 252.0, 253.0, 0, 0, 246.0, 0, 0, 0.0, 0, 0, 0.0, 0, 0, 0.0, 252.0, 252.0, 0, 252.0, 0, 252.0, 0, 0.0, 0, 0.0, 0.0, 252.0, 0.0, 0, 0, 71.0, 53.0, 0, 0, 0.0, 0.0, 0, 0, 0, 0, 0.0, 0, 0, 0, 0, 0, 14, 133.0, 0, 146.0, 2.0, 231.0, 0.0, 0.0, 0, 188.0, 249.0, 252.0, 0, 252.0, 0.0, 0, 0, 0, 252.0, 0.0, 252.0, 0.0, 0.0, 13, 0.0, 246.0, 192.0, 3, 0.0, 0.0, 0, 0, 0.0, 4, 0, 194.0, 0, 0, 0, 0, 0, 0.0, 252.0, 0, 0, 0, 12, 0.0, 0, 0.0, 0.0, 0.0, 249.0, 0.0, 0, 85.0, 0.0, 253.0, 0, 253.0, 0, 0, 0, 13, 253.0, 253.0, 0, 0, 85.0, 0, 253.0, 252.0, 15, 0.0, 0, 120.0, 0, 0, 0, 0, 0, 13, 232.0, 253.0, 0, 85.0, 0, 0, 0.0, 0.0, 0.0, 0.0, 252.0, 0, 252.0, 0.0, 252.0, 0.0, 252.0, 252.0, 14, 0, 0, 215.0, 0.0, 0.0, 72.0, 0, 0, 0, 0.0, 0, 252.0, 0, 0, 0, 0, 253.0, 0, 0, 0, 252.0, 0, 0, 0, 0, 0, 57.0, 0, 252.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 232.0, 244.0, 12, 0, 0, 0, 0.0, 0.0, 0.0, 13, 0, 62.0, 0.0, 0, 71.0, 63.0, 0, 0, 0, 0, 0.0, 0, 0, 160.0, 0, 252.0, 252.0, 0, 0.0, 0, 253.0, 0.0, 0.0, 127.0, 11, 0, 0, 253.0, 253.0, 0.0, 0.0, 0, 57.0, 0, 12, 0.0, 0, 0.0, 0, 0, 252.0, 13, 0, 14, 252.0, 0, 0.0, 0, 0, 252.0, 0, 252.0, 4, 0.0, 0.0, 53.0, 253.0, 211.0, 0.0, 0, 15.0, 0.0, 0, 85.0, 0, 0, 0, 85.0, 51.0, 0.0, 0.0, 0.0, 0.0, 14, 249.0, 0, 252.0, 252.0, 0, 85.0, 0, 14, 0.0, 0, 0, 0, 0.0, 0, 0, 9.0, 252.0, 0, 0, 253.0, 0.0, 0.0, 0, 0, 0.0, 92.0, 0, 0, 211.0, 0, 0.0, 0, 252.0, 252.0, 0, 13, 0, 253.0, 231.0, 0, 0.0, 0, 0, 252.0, 12, 0.0, 0, 0, 129.0, 0, 0.0, 0.0, 252.0, 0, 0, 106.0, 0, 0, 0, 0, 232.0, 0, 252.0, 0.0, 0, 0.0, 253.0, 0.0, 0.0, 0, 0.0, 85.0, 0.0, 0.0, 0.0, 252.0, 0, 255.0, 0, 13, 253.0, 4, 0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 13, 203.0, 0.0, 0], [0, 0, 0, 0.0, 0, 0.0, 0.0, 0, 0.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0.0, 0, 24.0, 0, 0, 0.0, 5, 1, 0, 0, 0.0, 15, 0, 0.0, 0.0, 5, 0.0, 0.0, 0, 2, 0.0, 0, 0, 253.0, 0, 0, 0.0, 1, 0, 0.0, 0.0, 0.0, 0, 0.0, 0, 0.0, 4, 0.0, 0, 0.0, 0.0, 54.0, 0.0, 5, 0, 0.0, 112.0, 0, 0, 0.0, 0.0, 0, 5, 0, 0, 0.0, 0, 0, 0, 1, 0, 0, 167.0, 0, 0.0, 0.0, 253.0, 0.0, 0.0, 5, 106.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0, 0, 0.0, 0.0, 0.0, 245.0, 0.0, 4, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0, 0.0, 0, 0, 0.0, 0, 0, 0, 0, 0, 0.0, 0.0, 0, 2, 1, 0, 0.0, 0, 0.0, 0.0, 0.0, 253.0, 0.0, 6, 0.0, 0.0, 248.0, 5, 0.0, 0, 0, 0, 0, 0.0, 0.0, 1, 1, 0.0, 0, 32.0, 0.0, 0, 0.0, 1, 0.0, 0, 0, 6, 0, 0, 0, 0.0, 253.0, 0, 0.0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 0, 0, 0.0, 0.0, 0.0, 175.0, 0, 0, 0, 0.0, 2, 0.0, 0, 0, 0, 0, 0.0, 0, 0, 0, 0.0, 4, 0, 5, 0, 0, 0.0, 0, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 246.0, 0.0, 0, 0, 0, 8, 0.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 0.0, 34.0, 0, 0, 0, 1, 0.0, 0, 0, 112.0, 0, 0.0, 0.0, 0, 0.0, 5, 253.0, 0.0, 0.0, 0.0, 0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0, 117.0, 0, 0, 0.0, 1, 0.0, 0, 0, 0.0, 0, 0, 0, 0.0, 9, 0.0, 0, 0, 0.0, 0, 102.0, 0, 0.0, 0.0, 0.0, 0.0, 135.0, 0.0, 1, 0.0, 0.0, 0, 0.0, 0, 1, 0, 0.0, 253.0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 0, 235.0, 0.0, 0, 0.0, 0, 0, 0.0, 0, 3, 6, 0.0, 0, 5, 5.0, 0.0, 0, 0, 0.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 3, 0.0, 0, 2.0, 0, 0.0, 0.0, 0, 0, 0, 0.0, 129.0, 0, 0.0, 0, 0, 0.0, 2, 0.0, 0, 0, 0.0, 4, 0.0, 0.0, 0.0, 0, 0, 0.0, 0, 2, 0, 0, 104.0, 9, 0.0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2, 196.0, 0, 0, 169.0, 0, 0, 0.0, 0.0, 0, 0, 0, 0, 0, 1, 0, 0, 253.0, 0.0, 0], [0, 3, 0, 0.0, 0, 207.0, 0.0, 0, 0.0, 0.0, 0.0, 14, 0, 0.0, 0.0, 0.0, 0, 96.0, 13, 0, 0.0, 0, 0, 0, 0, 0.0, 16, 0, 0.0, 0.0, 0, 253.0, 0.0, 0, 0, 0.0, 0, 0, 128.0, 0, 0, 0.0, 0, 5, 0.0, 0.0, 252.0, 0, 0.0, 0, 0.0, 0, 0.0, 0, 0.0, 0.0, 252.0, 0.0, 6, 0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 11, 0, 0, 0.0, 0, 0, 0, 0, 0, 3, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 3, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0, 0, 0.0, 0.0, 252.0, 252.0, 0.0, 5, 0.0, 70.0, 0.0, 2, 0.0, 0.0, 0, 0, 0.0, 0, 0, 0.0, 0, 0, 0, 0, 0, 0.0, 0.0, 0, 11, 0, 0, 0.0, 0, 0.0, 0.0, 0.0, 64.0, 0.0, 0, 0.0, 0.0, 0.0, 0, 116.0, 0, 0, 0, 12, 0.0, 0.0, 0, 6, 221.0, 0, 0.0, 0.0, 3, 0.0, 0, 0.0, 0, 0, 2, 0, 1, 13, 0.0, 0.0, 0, 23.0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 138.0, 0.0, 207.0, 0.0, 0.0, 0.0, 0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0, 0, 0.0, 0, 248.0, 0, 0, 11, 0, 0.0, 0, 0, 0, 0.0, 0, 0, 0, 0, 0, 0.0, 0, 223.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 253.0, 53.0, 8, 0, 0, 10, 0.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0, 0, 0, 0.0, 0, 0, 0.0, 0, 0.0, 0.0, 0, 0.0, 6, 0.0, 0.0, 0.0, 0.0, 10, 0, 0, 0.0, 0.0, 0.0, 0.0, 0, 221.0, 0, 8, 0.0, 0, 0.0, 0, 0, 0.0, 11, 0, 0, 253.0, 0, 0.0, 0, 0, 0.0, 0, 116.0, 0, 0.0, 0.0, 0.0, 252.0, 0.0, 0.0, 0, 26.0, 0.0, 0, 0.0, 6, 0, 0, 137.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6, 0.0, 0, 116.0, 0.0, 0, 0.0, 0, 1, 0.0, 0, 0, 4, 0.0, 0, 9, 0.0, 0.0, 0, 0, 5.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 0, 64.0, 0, 0.0, 0, 0.0, 0.0, 0, 13, 0, 0.0, 0.0, 0, 0.0, 2, 0, 0.0, 4, 0.0, 0, 0, 0.0, 0, 0.0, 0.0, 116.0, 0, 0, 0.0, 0, 0, 0, 0, 253.0, 11, 25.0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 210.0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 0, 0, 0.0, 0, 0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 1, 0.0, 0.0, 0], [0, 15, 0, 0.0, 0, 252.0, 0.0, 0, 0.0, 0.0, 0.0, 8, 0, 0.0, 0.0, 0.0, 0, 217.0, 13, 3, 0.0, 0, 0, 0, 7, 0.0, 17, 0, 0.0, 0.0, 0, 237.0, 0.0, 0, 0, 110.0, 0, 0, 252.0, 0, 0, 0.0, 0, 0, 0.0, 0.0, 252.0, 0, 0.0, 0, 104.0, 0, 0.0, 0, 0.0, 0.0, 253.0, 0.0, 0, 0, 0.0, 0.0, 0, 0, 79.0, 0.0, 0, 0, 0, 0, 0.0, 0, 0, 0, 0, 0, 14, 0.0, 0, 63.0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 191.0, 231.0, 0, 0.0, 0.0, 5, 0, 0, 0.0, 0.0, 252.0, 241.0, 0.0, 10, 0.0, 0.0, 144.0, 0, 0.0, 0.0, 3, 0, 0.0, 0, 0, 0.0, 0, 0, 0, 0, 0, 0.0, 0.0, 0, 4, 0, 10, 0.0, 0, 0.0, 0.0, 0.0, 144.0, 0.0, 0, 0.0, 0.0, 0.0, 0, 144.0, 0, 0, 9, 9, 0.0, 0.0, 0, 0, 253.0, 0, 0.0, 0.0, 11, 0.0, 0, 176.0, 0, 0, 0, 0, 0, 11, 109.0, 0.0, 0, 252.0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 253.0, 0.0, 201.0, 0.0, 0.0, 0.0, 14, 0, 0, 0.0, 0.0, 0.0, 0.0, 4, 0, 0, 0.0, 0, 252.0, 0, 2, 4, 0, 0.0, 0, 0, 0, 145.0, 0, 0, 0, 0, 0, 0.0, 6, 182.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 252.0, 0.0, 9, 0, 0, 0, 0.0, 0.0, 0.0, 13, 0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0, 0, 0, 0.0, 0, 0, 0.0, 0, 252.0, 0.0, 0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0, 7, 0.0, 0.0, 0.0, 0.0, 0, 215.0, 0, 0, 0.0, 0, 0.0, 0, 0, 21.0, 14, 0, 14, 62.0, 0, 0.0, 0, 0, 0.0, 0, 144.0, 10, 0.0, 0.0, 0.0, 252.0, 144.0, 0.0, 0, 105.0, 0.0, 0, 0.0, 0, 0, 0, 252.0, 0.0, 0.0, 0.0, 0.0, 0.0, 14, 109.0, 0, 145.0, 0.0, 0, 109.0, 0, 11, 0.0, 0, 0, 0, 0.0, 0, 0, 0.0, 0.0, 0, 0, 255.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 0, 0, 237.0, 0, 0.0, 0, 0.0, 108.0, 0, 5, 0, 0.0, 0.0, 0, 0.0, 5, 0, 253.0, 9, 0.0, 0, 0, 0.0, 0, 0.0, 0.0, 62.0, 0, 6, 0.0, 0, 0, 0, 0, 181.0, 2, 0.0, 0.0, 0, 0.0, 84.0, 0.0, 0.0, 0, 0.0, 252.0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 0, 10, 0.0, 12, 0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 13, 0.0, 0.0, 0], [0, 0, 0, 0.0, 0, 189.0, 254.0, 0, 254.0, 0.0, 0.0, 0, 0, 59.0, 252.0, 59.0, 0, 0.0, 0, 0, 0.0, 0, 0, 0, 0, 0.0, 18, 0, 0.0, 0.0, 0, 0.0, 99.0, 0, 0, 0.0, 0, 0, 0.0, 0, 0, 0.0, 0, 0, 0.0, 187.0, 254.0, 0, 251.0, 0, 0.0, 0, 0.0, 0, 0.0, 254.0, 207.0, 0.0, 0, 0, 151.0, 0.0, 0, 0, 0.0, 254.0, 0, 0, 0, 0, 0.0, 0, 0, 0, 0, 0, 0, 254.0, 0, 144.0, 64.0, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0, 0, 0.0, 0.0, 0.0, 0.0, 168.0, 0, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0, 0.0, 0, 0, 228.0, 0, 0, 0, 0, 0, 0.0, 253.0, 0, 0, 0, 0, 13.0, 0, 254.0, 0.0, 0.0, 40.0, 0.0, 0, 0.0, 0.0, 227.0, 0, 62.0, 0, 0, 0, 0, 254.0, 0.0, 0, 0, 151.0, 0, 0.0, 0.0, 0, 0.0, 0, 213.0, 0, 0, 0, 0, 0, 0, 0.0, 254.0, 0, 0.0, 0, 0, 111.0, 184.0, 0.0, 119.0, 245.0, 0, 0.0, 254.0, 254.0, 0.0, 76.0, 254.0, 0, 0, 0, 6.0, 0.0, 0.0, 0.0, 0, 0, 0, 0.0, 0, 0.0, 0, 0, 0, 0, 0.0, 0, 0, 0, 67.0, 0, 0, 0, 0, 0, 124.0, 0, 207.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 216.0, 0.0, 0.0, 0, 0, 0, 0, 0.0, 0.0, 0.0, 0, 0, 254.0, 0.0, 0, 151.0, 72.0, 0, 0, 0, 0, 0.0, 0, 0, 0.0, 0, 0.0, 0.0, 0, 0.0, 0, 0.0, 0.0, 0.0, 249.0, 0, 0, 0, 0.0, 129.0, 0.0, 0.0, 0, 0.0, 0, 0, 8.0, 0, 0.0, 0, 0, 254.0, 0, 0, 0, 0.0, 0, 22.0, 0, 0, 254.0, 0, 248.0, 0, 0.0, 0.0, 254.0, 88.0, 0.0, 0.0, 0, 233.0, 0.0, 0, 0.0, 0, 0, 0, 0.0, 0.0, 0.0, 0.0, 227.0, 4.0, 0, 0.0, 0, 238.0, 0.0, 0, 0.0, 0, 0, 0.0, 0, 0, 0, 0.0, 0, 0, 153.0, 245.0, 0, 0, 0.0, 0.0, 0.0, 0, 0, 0.0, 147.0, 0, 0, 16.0, 0, 254.0, 0, 0.0, 0.0, 0, 0, 0, 254.0, 0.0, 0, 0.0, 0, 0, 0.0, 0, 168.0, 0, 0, 26.0, 0, 13.0, 0.0, 0.0, 0, 0, 254.0, 0, 0, 0, 0, 0.0, 0, 167.0, 0.0, 0, 0.0, 141.0, 0.0, 0.0, 0, 151.0, 26.0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 0, 0, 60.0, 0, 0, 254.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 227.0, 218.0, 0], [0, 0, 0, 0.0, 0, 11.0, 253.0, 0, 253.0, 0.0, 0.0, 12, 0, 0.0, 0.0, 0.0, 0, 253.0, 10, 4, 0.0, 4, 0, 1, 8, 0.0, 19, 0, 0.0, 0.0, 7, 68.0, 245.0, 1, 0, 0.0, 0, 0, 253.0, 0, 0, 0.0, 0, 6, 0.0, 0.0, 253.0, 0, 0.0, 0, 81.0, 0, 0.0, 0, 0.0, 0.0, 253.0, 0.0, 5, 4, 0.0, 253.0, 0, 0, 42.0, 0.0, 0, 11, 0, 0, 0.0, 0, 0, 0, 0, 0, 11, 0.0, 0, 0.0, 124.0, 15.0, 0.0, 0.0, 8, 0.0, 0.0, 253.0, 0, 0.0, 0.0, 10, 0, 0, 0.0, 0.0, 186.0, 253.0, 0.0, 4, 0.0, 253.0, 253.0, 12, 0.0, 0.0, 4, 2, 0.0, 12, 0, 0.0, 0, 0, 0, 0, 0, 0.0, 171.0, 0, 12, 0, 6, 0.0, 0, 0.0, 0.0, 0.0, 36.0, 0.0, 0, 0.0, 0.0, 42.0, 0, 253.0, 1, 0, 10, 11, 0.0, 0.0, 0, 7, 253.0, 0, 41.0, 0.0, 0, 0.0, 0, 0.0, 0, 0, 4, 0, 5, 6, 0.0, 0.0, 0, 253.0, 0, 2, 0.0, 238.0, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 42.0, 0.0, 245.0, 81.0, 0, 1, 0, 0.0, 0.0, 0.0, 0.0, 6, 0, 2, 0.0, 0, 19.0, 0, 2, 12, 0, 0.0, 0, 0, 0, 143.0, 1, 3, 1, 0, 0, 0.0, 9, 246.0, 42.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 253.0, 253.0, 10, 0, 0, 11, 0.0, 0.0, 0.0, 11, 0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0, 0, 0, 0.0, 0, 1, 253.0, 0, 253.0, 0.0, 0, 0.0, 10, 12.0, 0.0, 0.0, 253.0, 13, 0, 7, 253.0, 0.0, 0.0, 0.0, 0, 253.0, 0, 14, 0.0, 0, 0.0, 0, 0, 0.0, 10, 0, 0, 253.0, 2, 0.0, 0, 0, 0.0, 2, 218.0, 9, 0.0, 0.0, 0.0, 162.0, 217.0, 0.0, 0, 253.0, 0.0, 0, 0.0, 7, 0, 1, 253.0, 15.0, 0.0, 0.0, 0.0, 77.0, 5, 0.0, 0, 143.0, 86.0, 5, 36.0, 0, 0, 0.0, 0, 0, 5, 0.0, 0, 11, 0.0, 0.0, 0, 0, 0.0, 0.0, 0.0, 0, 0, 0.0, 40.0, 0, 1, 0.0, 1, 0.0, 0, 0.0, 253.0, 0, 14, 0, 0.0, 0.0, 0, 0.0, 12, 0, 253.0, 4, 0.0, 6, 0, 0.0, 4, 0.0, 0.0, 253.0, 0, 6, 0.0, 0, 2, 0, 0, 253.0, 12, 245.0, 15.0, 0, 0.0, 245.0, 0.0, 0.0, 0, 0.0, 253.0, 0.0, 0.0, 0.0, 0.0, 0, 215.0, 0, 2, 0.0, 13, 0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.0, 1]]} \ No newline at end of file diff --git a/web_console_v2/api/test/fedlearner_webconsole/test_data/sparkapp.tar b/web_console_v2/api/test/fedlearner_webconsole/test_data/sparkapp.tar deleted file mode 100644 index c7d93f210..000000000 Binary files a/web_console_v2/api/test/fedlearner_webconsole/test_data/sparkapp.tar and /dev/null differ diff --git a/web_console_v2/api/test/fedlearner_webconsole/test_data/workflow_config.json b/web_console_v2/api/test/fedlearner_webconsole/test_data/workflow_config.json deleted file mode 100644 index 3e3d675a1..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/test_data/workflow_config.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "group_alias": "test_workflow", - "is_left": true, - "variables": [ - { - "name": "v1", - "value": "value1", - "access_mode": "PRIVATE", - "widget_schema": "" - }, - { - "name": "v2", - "value": "value2", - "access_mode": "PEER_READABLE", - "widget_schema": "" - }, - { - "name": "v3", - "value": "value3", - "access_mode": "PEER_WRITABLE", - "widget_schema": "" - } - ], - "job_definitions": [ - { - "name": "data-import", - "job_type": "RAW_DATA", - "is_federated": false, - "yaml_template": "data-import-yaml", - "variables": [], - "dependencies": [] - }, - { - "name": "data-join", - "job_type": "PSI_DATA_JOIN", - "is_federated": true, - "yaml_template": "data-join-yaml", - "variables": [], - "dependencies": [ - { - "source": "data-import" - } - ] - }, - { - "name": "training", - "job_type": "TREE_MODEL_TRAINING", - "is_federated": true, - "yaml_template": "training-yaml", - "variables": [], - "dependencies": [ - { - "source": "data-join" - } - ] - } - ] -} diff --git a/web_console_v2/api/test/fedlearner_webconsole/test_data/workflow_config_right.json b/web_console_v2/api/test/fedlearner_webconsole/test_data/workflow_config_right.json deleted file mode 100644 index b4a8f0856..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/test_data/workflow_config_right.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "group_alias": "test_workflow", - "is_left": false, - "variables": [ - { - "name": "v1", - "value": "value1", - "access_mode": "PRIVATE", - "widget_schema": "" - }, - { - "name": "v2", - "value": "value2", - "access_mode": "PEER_READABLE", - "widget_schema": "" - }, - { - "name": "v3", - "value": "value3", - "access_mode": "PEER_WRITABLE", - "widget_schema": "" - } - ], - "job_definitions": [ - { - "name": "data-import", - "job_type": "RAW_DATA", - "is_federated": false, - "yaml_template": "data-import-yaml", - "variables": [], - "dependencies": [] - }, - { - "name": "data-join", - "job_type": "PSI_DATA_JOIN", - "is_federated": true, - "yaml_template": "data-join-yaml", - "variables": [], - "dependencies": [ - { - "source": "data-import" - } - ] - }, - { - "name": "training", - "job_type": "TREE_MODEL_TRAINING", - "is_federated": true, - "yaml_template": "training-yaml", - "variables": [], - "dependencies": [ - { - "source": "data-join" - } - ] - } - ] -} diff --git a/web_console_v2/api/test/fedlearner_webconsole/utils/base64_test.py b/web_console_v2/api/test/fedlearner_webconsole/utils/base64_test.py deleted file mode 100644 index 2667e1025..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/utils/base64_test.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import unittest - -from fedlearner_webconsole.utils.base64 import base64encode, base64decode - - -class Base64Test(unittest.TestCase): - def test_base64encode(self): - self.assertEqual(base64encode('hello 1@2'), 'aGVsbG8gMUAy') - self.assertEqual(base64encode('😈'), '8J+YiA==') - - def test_base64decode(self): - self.assertEqual(base64decode('aGVsbG8gMUAy'), 'hello 1@2') - self.assertEqual(base64decode('JjEzOVlUKiYm'), '&139YT*&&') - - def test_base64_encode_and_decode(self): - self.assertEqual(base64decode(base64encode('test')), 'test') - self.assertEqual(base64encode(base64decode('aGVsbG8gMUAy')), - 'aGVsbG8gMUAy') - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/utils/decorators_test.py b/web_console_v2/api/test/fedlearner_webconsole/utils/decorators_test.py deleted file mode 100644 index 5a6db55db..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/utils/decorators_test.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -import os -import grpc -import unittest -from unittest.mock import MagicMock, patch - -from fedlearner_webconsole.auth.models import User, Role -from fedlearner_webconsole.utils.decorators import retry_fn, admin_required, jwt_required -from fedlearner_webconsole.exceptions import UnauthorizedException - -@retry_fn(retry_times=2, needed_exceptions=[grpc.RpcError]) -def some_unstable_connect(client): - res = client() - if res['status'] != 0: - raise grpc.RpcError() - else: - return res['data'] - -@admin_required -def some_authorized_login(): - return 1 - - -class DecoratorsTest(unittest.TestCase): - @staticmethod - def generator_helper(inject_res): - for r in inject_res: - yield r - - def test_retry_fn(self): - res = [{ - 'status': -1, - 'data': 'hhhhhh' - }, { - 'status': -1, - 'data': 'hhhh' - }] - - client = MagicMock() - client.side_effect = res - with self.assertRaises(grpc.RpcError): - some_unstable_connect(client=client) - - res = [{'status': -1, 'data': 'hhhhhh'}, {'status': 0, 'data': 'hhhh'}] - client = MagicMock() - client.side_effect = res - self.assertTrue(some_unstable_connect(client=client) == 'hhhh') - - - @patch('fedlearner_webconsole.utils.decorators.get_current_user') - def test_admin_required(self, mock_get_current_user): - admin = User(id=0, username='adamin', password='admin', role=Role.ADMIN) - user = User(id=1, username='ada', password='ada', role=Role.USER) - mock_get_current_user.return_value = admin - self.assertTrue(some_authorized_login() == 1) - - mock_get_current_user.return_value = user - self.assertRaises(UnauthorizedException, some_authorized_login) - -if __name__ == '__main__': - unittest.main() - diff --git a/web_console_v2/api/test/fedlearner_webconsole/utils/file_manager_test.py b/web_console_v2/api/test/fedlearner_webconsole/utils/file_manager_test.py deleted file mode 100644 index f32bf303e..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/utils/file_manager_test.py +++ /dev/null @@ -1,242 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import os -import shutil -import tempfile -import unittest - -from collections import namedtuple -from pathlib import Path -from tensorflow.io import gfile - -from fedlearner_webconsole.utils.file_manager import GFileFileManager, FileManager, File - -FakeFileStatistics = namedtuple('FakeFileStatistics', ['length', 'mtime_nsec']) - - -class GFileFileManagerTest(unittest.TestCase): - - _F1_SIZE = 3 - _F2_SIZE = 4 - _S1_SIZE = 55 - _F1_MTIME = 1613982390 - _F2_MTIME = 1613982391 - _S1_MTIME = 1613982392 - - def _get_file_stat(self, orig_os_stat, path): - gfile_stat = FakeFileStatistics(2, 1613982390 * 1e9) - if path == self._get_temp_path('f1.txt') or \ - path == self._get_temp_path('subdir/f1.txt'): - gfile_stat = FakeFileStatistics(self._F1_SIZE, - self._F1_MTIME * 1e9) - return gfile_stat - elif path == self._get_temp_path('f2.txt') or \ - path == self._get_temp_path('f3.txt'): - gfile_stat = FakeFileStatistics(self._F2_SIZE, - self._F2_MTIME * 1e9) - return gfile_stat - elif path == self._get_temp_path('subdir/s1.txt'): - gfile_stat = FakeFileStatistics(self._S1_SIZE, - self._S1_MTIME * 1e9) - return gfile_stat - else: - return orig_os_stat(path) - - def setUp(self): - # Create a temporary directory - self._test_dir = tempfile.mkdtemp() - subdir = Path(self._test_dir).joinpath('subdir') - subdir.mkdir(exist_ok=True) - Path(self._test_dir).joinpath('f1.txt').write_text('xxx') - Path(self._test_dir).joinpath('f2.txt').write_text('xxx') - subdir.joinpath('s1.txt').write_text('xxx') - - # Mocks os.stat - self._orig_os_stat = os.stat - - def fake_stat(path, *arg, **kwargs): - return self._get_file_stat(self._orig_os_stat, path) - - gfile.stat = fake_stat - - self._fm = GFileFileManager() - - def tearDown(self): - os.stat = self._orig_os_stat - # Remove the directory after the test - shutil.rmtree(self._test_dir) - - def _get_temp_path(self, file_path: str = None) -> str: - return str(Path(self._test_dir, file_path or '').absolute()) - - def test_can_handle(self): - self.assertTrue(self._fm.can_handle('/data/abc')) - self.assertFalse(self._fm.can_handle('data')) - - def test_ls(self): - # List file - self.assertEqual(self._fm.ls(self._get_temp_path('f1.txt')), [ - File(path=self._get_temp_path('f1.txt'), - size=self._F1_SIZE, - mtime=self._F1_MTIME) - ]) - # List folder - self.assertEqual( - sorted(self._fm.ls(self._get_temp_path()), - key=lambda file: file.path), - sorted([ - File(path=self._get_temp_path('f1.txt'), - size=self._F1_SIZE, - mtime=self._F1_MTIME), - File(path=self._get_temp_path('f2.txt'), - size=self._F2_SIZE, - mtime=self._F2_MTIME) - ], - key=lambda file: file.path)) - # List folder recursively - self.assertEqual( - sorted(self._fm.ls(self._get_temp_path(), recursive=True), - key=lambda file: file.path), - sorted([ - File(path=self._get_temp_path('f1.txt'), - size=self._F1_SIZE, - mtime=self._F1_MTIME), - File(path=self._get_temp_path('f2.txt'), - size=self._F2_SIZE, - mtime=self._F2_MTIME), - File(path=self._get_temp_path('subdir/s1.txt'), - size=self._S1_SIZE, - mtime=self._S1_MTIME), - ], - key=lambda file: file.path)) - - def test_move(self): - # Moves to another folder - self._fm.move(self._get_temp_path('f1.txt'), - self._get_temp_path('subdir/')) - self.assertEqual( - sorted(self._fm.ls(self._get_temp_path('subdir')), - key=lambda file: file.path), - sorted([ - File(path=self._get_temp_path('subdir/s1.txt'), - size=self._S1_SIZE, - mtime=self._S1_MTIME), - File(path=self._get_temp_path('subdir/f1.txt'), - size=self._F1_SIZE, - mtime=self._F1_MTIME), - ], - key=lambda file: file.path)) - # Renames - self._fm.move(self._get_temp_path('f2.txt'), - self._get_temp_path('f3.txt')) - with self.assertRaises(ValueError): - self._fm.ls(self._get_temp_path('f2.txt')) - self.assertEqual(self._fm.ls(self._get_temp_path('f3.txt')), [ - File(path=self._get_temp_path('f3.txt'), - size=self._F2_SIZE, - mtime=self._F2_MTIME) - ]) - - def test_remove(self): - self._fm.remove(self._get_temp_path('f1.txt')) - self._fm.remove(self._get_temp_path('subdir')) - self.assertEqual(self._fm.ls(self._get_temp_path(), recursive=True), [ - File(path=self._get_temp_path('f2.txt'), - size=self._F2_SIZE, - mtime=self._F2_MTIME) - ]) - - def test_copy(self): - self._fm.copy(self._get_temp_path('f1.txt'), - self._get_temp_path('subdir')) - self.assertEqual(self._fm.ls(self._get_temp_path('f1.txt')), [ - File(path=self._get_temp_path('f1.txt'), - size=self._F1_SIZE, - mtime=self._F1_MTIME) - ]) - self.assertEqual(self._fm.ls(self._get_temp_path('subdir/f1.txt')), [ - File(path=self._get_temp_path('subdir/f1.txt'), - size=self._F1_SIZE, - mtime=self._F1_MTIME) - ]) - - def test_mkdir(self): - self._fm.mkdir(os.path.join(self._get_temp_path(), 'subdir2')) - self.assertTrue(os.path.isdir(self._get_temp_path('subdir2'))) - - def test_read(self): - content = self._fm.read(self._get_temp_path('f1.txt')) - self.assertEqual('xxx', content) - - -class FileManagerTest(unittest.TestCase): - @classmethod - def setUpClass(cls): - fake_fm = 'testing.fake_file_manager:FakeFileManager' - os.environ['CUSTOMIZED_FILE_MANAGER'] = fake_fm - - @classmethod - def tearDownClass(cls): - del os.environ['CUSTOMIZED_FILE_MANAGER'] - - def setUp(self): - self._fm = FileManager() - - def test_can_handle(self): - self.assertTrue(self._fm.can_handle('fake://123')) - # Falls back to default manager - self.assertTrue(self._fm.can_handle('/data/123')) - self.assertFalse(self._fm.can_handle('unsupported:///123')) - - def test_ls(self): - self.assertEqual(self._fm.ls('fake://data'), [{ - 'path': 'fake://data/f1.txt', - 'size': 0 - }]) - - def test_move(self): - self.assertTrue(self._fm.move('fake://move/123', 'fake://move/234')) - self.assertFalse( - self._fm.move('fake://do_not_move/123', 'fake://move/234')) - # No file manager can handle this - self.assertRaises(RuntimeError, - lambda: self._fm.move('hdfs://123', 'fake://abc')) - - def test_remove(self): - self.assertTrue(self._fm.remove('fake://remove/123')) - self.assertFalse(self._fm.remove('fake://do_not_remove/123')) - # No file manager can handle this - self.assertRaises(RuntimeError, - lambda: self._fm.remove('unsupported://123')) - - def test_copy(self): - self.assertTrue(self._fm.copy('fake://copy/123', 'fake://copy/234')) - self.assertFalse( - self._fm.copy('fake://do_not_copy/123', 'fake://copy/234')) - # No file manager can handle this - self.assertRaises(RuntimeError, - lambda: self._fm.copy('hdfs://123', 'fake://abc')) - - def test_mkdir(self): - self.assertTrue(self._fm.mkdir('fake://mkdir/123')) - self.assertFalse(self._fm.mkdir('fake://do_not_mkdir/123')) - # No file manager can handle this - self.assertRaises(RuntimeError, - lambda: self._fm.mkdir('unsupported:///123')) - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/utils/k8s_client_test.py b/web_console_v2/api/test/fedlearner_webconsole/utils/k8s_client_test.py deleted file mode 100644 index 19ffd9fc0..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/utils/k8s_client_test.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import unittest -from unittest.mock import MagicMock - -from kubernetes.client import ApiException - -from fedlearner_webconsole.utils.k8s_client import K8sClient - - -class K8sClientTest(unittest.TestCase): - def setUp(self): - self._k8s_client = K8sClient() - - def test_delete_flapp(self): - mock_crds = MagicMock() - self._k8s_client.crds = mock_crds - # Test delete successfully - mock_crds.delete_namespaced_custom_object = MagicMock() - self._k8s_client.delete_flapp('test_flapp') - mock_crds.delete_namespaced_custom_object.assert_called_once_with( - group='fedlearner.k8s.io', - name='test_flapp', - namespace='default', - plural='flapps', - version='v1alpha1') - # Tests that the flapp has been deleted - mock_crds.delete_namespaced_custom_object = MagicMock( - side_effect=ApiException(status=404)) - self._k8s_client.delete_flapp('test_flapp2') - self.assertEqual(mock_crds.delete_namespaced_custom_object.call_count, - 1) - # Tests with other exceptions - mock_crds.delete_namespaced_custom_object = MagicMock( - side_effect=ApiException(status=500)) - with self.assertRaises(RuntimeError): - self._k8s_client.delete_flapp('test_flapp3') - self.assertEqual(mock_crds.delete_namespaced_custom_object.call_count, - 3) - - def test_create_flapp(self): - test_yaml = { - 'metadata': { - 'name': 'test app' - } - } - mock_crds = MagicMock() - self._k8s_client.crds = mock_crds - # Test create successfully - mock_crds.create_namespaced_custom_object = MagicMock() - self._k8s_client.create_flapp(test_yaml) - mock_crds.create_namespaced_custom_object.assert_called_once_with( - group='fedlearner.k8s.io', - namespace='default', - plural='flapps', - version='v1alpha1', - body=test_yaml) - # Test that flapp exists - mock_crds.create_namespaced_custom_object = MagicMock( - side_effect=[ApiException(status=409), None]) - self._k8s_client.delete_flapp = MagicMock() - self._k8s_client.create_flapp(test_yaml) - self._k8s_client.delete_flapp.assert_called_once_with('test app') - self.assertEqual(mock_crds.create_namespaced_custom_object.call_count, - 2) - # Test with other exceptions - mock_crds.create_namespaced_custom_object = MagicMock( - side_effect=ApiException(status=114)) - with self.assertRaises(ApiException): - self._k8s_client.create_flapp(test_yaml) - self.assertEqual(mock_crds.create_namespaced_custom_object.call_count, - 3) - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/utils/kibana_test.py b/web_console_v2/api/test/fedlearner_webconsole/utils/kibana_test.py deleted file mode 100644 index a5478d693..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/utils/kibana_test.py +++ /dev/null @@ -1,37 +0,0 @@ -import unittest - -from fedlearner_webconsole.exceptions import UnauthorizedException -from fedlearner_webconsole.utils.kibana import Kibana - - -class KibanaTest(unittest.TestCase): - def test_auth(self): - self.assertRaises(UnauthorizedException, - Kibana._check_authorization, 'tags.1') - self.assertRaises(UnauthorizedException, - Kibana._check_authorization, 'tags.1:2') - self.assertRaises(UnauthorizedException, - Kibana._check_authorization, 'x:3 and y:4', {'x'}) - self.assertRaises(UnauthorizedException, - Kibana._check_authorization, - 'x:3 OR y:4 AND z:5', {'x', 'z'}) - try: - Kibana._check_authorization('x:1', {'x'}) - Kibana._check_authorization('x:1 AND y:2 OR z:3', {'x', 'y', 'z'}) - Kibana._check_authorization('x:1 oR y:2 aNd z:3', {'x', 'y', 'z'}) - Kibana._check_authorization('*', {'x', '*'}) - Kibana._check_authorization(None, None) - except UnauthorizedException: - self.fail() - - def test_parse_time(self): - dt1 = 0 - dt2 = 60 * 60 * 24 - args = {'start_time': dt1, 'end_time': dt2} - st, et = Kibana._parse_start_end_time(args) - self.assertEqual(st, '1970-01-01T00:00:00Z') - self.assertEqual(et, '1970-01-02T00:00:00Z') - st, et = Kibana._parse_start_end_time({'start_time': -1, - 'end_time': -1}) - self.assertEqual(st, 'now-5y') - self.assertEqual(et, 'now') diff --git a/web_console_v2/api/test/fedlearner_webconsole/utils/metrics_test.py b/web_console_v2/api/test/fedlearner_webconsole/utils/metrics_test.py deleted file mode 100644 index 4d6172fde..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/utils/metrics_test.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import logging -import unittest - -from fedlearner_webconsole.utils import metrics -from fedlearner_webconsole.utils.metrics import _DefaultMetricsHandler, MetricsHandler - - -class _FakeMetricsHandler(MetricsHandler): - - def emit_counter(self, name, value: int, tags: dict = None): - logging.info(f'[Test][Counter] {name} - {value}') - - def emit_store(self, name, value: int, tags: dict = None): - logging.info(f'[Test][Store] {name} - {value}') - - -class DefaultMetricsHandler(unittest.TestCase): - def setUp(self): - self._handler = _DefaultMetricsHandler() - - def test_emit_counter(self): - with self.assertLogs() as cm: - self._handler.emit_counter('test', 1) - self._handler.emit_counter('test2', 2) - logs = [r.msg for r in cm.records] - self.assertEqual(logs, [ - '[Metric][Counter] test: 1', - '[Metric][Counter] test2: 2']) - - def test_emit_store(self): - with self.assertLogs() as cm: - self._handler.emit_store('test', 199) - self._handler.emit_store('test2', 299) - logs = [r.msg for r in cm.records] - self.assertEqual(logs, [ - '[Metric][Store] test: 199', - '[Metric][Store] test2: 299']) - - -class ClientTest(unittest.TestCase): - def setUp(self): - metrics.add_handler(_FakeMetricsHandler()) - - def tearDown(self): - metrics.reset_handlers() - - def test_emit_counter(self): - with self.assertLogs() as cm: - metrics.emit_counter('test', 1) - logs = [r.msg for r in cm.records] - self.assertEqual(logs, [ - '[Metric][Counter] test: 1', - '[Test][Counter] test - 1']) - - def test_emit_store(self): - with self.assertLogs() as cm: - metrics.emit_store('test', 199) - logs = [r.msg for r in cm.records] - self.assertEqual(logs, [ - '[Metric][Store] test: 199', - '[Test][Store] test - 199']) - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/utils/mixins_test.py b/web_console_v2/api/test/fedlearner_webconsole/utils/mixins_test.py deleted file mode 100644 index b9573b561..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/utils/mixins_test.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import unittest - -from sqlalchemy import Column, Integer -from sqlalchemy.ext.declarative import declarative_base - -from fedlearner_webconsole.utils.mixins import from_dict_mixin, to_dict_mixin - -Base = declarative_base() - - -@to_dict_mixin() -class DeclarativeClass(Base): - __tablename__ = 'just_a_test' - - test = Column(Integer, primary_key=True) - - -@to_dict_mixin(to_dict_fields=['hhh']) -@from_dict_mixin(from_dict_fields=['hhh'], required_fields=['hhh']) -class SpecifyColumnsClass(object): - def __init__(self) -> None: - self.hhh = None - self.not_include = None - - -class MixinsTest(unittest.TestCase): - def test_to_dict_declarative_api(self): - obj = DeclarativeClass() - res = obj.to_dict() - self.assertEqual(len(res), 1) - self.assertTrue('test' in res) - - def test_to_dict_specify_columns(self): - obj = SpecifyColumnsClass() - obj.hhh = 'hhh' - res = obj.to_dict() - self.assertEqual(len(res), 1) - self.assertTrue('hhh' in res) - - def test_from_dict(self): - inputs_pass = {'hhh': 4, 'hhhh': 1} - inputs_raise = {'hhhh': 1} - - obj = SpecifyColumnsClass.from_dict(inputs_pass) - self.assertEqual(obj.hhh, 4) - with self.assertRaises(ValueError): - obj = SpecifyColumnsClass.from_dict(inputs_raise) - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/utils/system_envs_test.py b/web_console_v2/api/test/fedlearner_webconsole/utils/system_envs_test.py deleted file mode 100644 index f90183a59..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/utils/system_envs_test.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import os -import unittest -from unittest.mock import patch - -from fedlearner_webconsole.utils.system_envs import get_system_envs - - -class SystemEnvsTest(unittest.TestCase): - @patch.dict(os.environ, { - 'ES_HOST': 'test es host', - 'ES_PORT': '9200', - 'DB_HOST': 'test db host', - 'DB_PORT': '3306', - 'DB_DATABASE': 'fedlearner', - 'DB_USERNAME': 'username', - 'DB_PASSWORD': 'password', - 'KVSTORE_TYPE': 'mysql', - 'ETCD_NAME': 'fedlearner', - 'ETCD_ADDR': 'fedlearner-stack-etcd.default.svc.cluster.local:2379', - 'ETCD_BASE_DIR': 'fedlearner' - }) - def test_get_system_envs(self): - self.assertEqual( - get_system_envs(), - '{"name": "POD_IP", "valueFrom": {"fieldRef": {"fieldPath": "status.podIP"}}},' - '{"name": "POD_NAME", "valueFrom": {"fieldRef": {"fieldPath": "metadata.name"}}},' - '{"name": "CPU_REQUEST", "valueFrom": {"resourceFieldRef": {"resource": "requests.cpu"}}},' - '{"name": "MEM_REQUEST", "valueFrom": {"resourceFieldRef": {"resource": "requests.memory"}}},' - '{"name": "CPU_LIMIT", "valueFrom": {"resourceFieldRef": {"resource": "limits.cpu"}}},' - '{"name": "MEM_LIMIT", "valueFrom": {"resourceFieldRef": {"resource": "limits.memory"}}},' - '{"name": "ES_HOST", "value": "test es host"},' - '{"name": "ES_PORT", "value": "9200"},' - '{"name": "DB_HOST", "value": "test db host"},' - '{"name": "DB_PORT", "value": "3306"},' - '{"name": "DB_DATABASE", "value": "fedlearner"},' - '{"name": "DB_USERNAME", "value": "username"},' - '{"name": "DB_PASSWORD", "value": "password"},' - '{"name": "KVSTORE_TYPE", "value": "mysql"},' - '{"name": "ETCD_NAME", "value": "fedlearner"},' - '{"name": "ETCD_ADDR", "value": "fedlearner-stack-etcd.default.svc.cluster.local:2379"},' - '{"name": "ETCD_BASE_DIR", "value": "fedlearner"}') - - def test_get_available_envs(self): - self.assertEqual( - get_system_envs(), - '{"name": "POD_IP", "valueFrom": {"fieldRef": {"fieldPath": "status.podIP"}}},' - '{"name": "POD_NAME", "valueFrom": {"fieldRef": {"fieldPath": "metadata.name"}}},' - '{"name": "CPU_REQUEST", "valueFrom": {"resourceFieldRef": {"resource": "requests.cpu"}}},' - '{"name": "MEM_REQUEST", "valueFrom": {"resourceFieldRef": {"resource": "requests.memory"}}},' - '{"name": "CPU_LIMIT", "valueFrom": {"resourceFieldRef": {"resource": "limits.cpu"}}},' - '{"name": "MEM_LIMIT", "valueFrom": {"resourceFieldRef": {"resource": "limits.memory"}}}') - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/workflow/apis_test.py b/web_console_v2/api/test/fedlearner_webconsole/workflow/apis_test.py deleted file mode 100644 index 2c6382658..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/workflow/apis_test.py +++ /dev/null @@ -1,474 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import logging -import random -import string -import time -import json -import unittest -from uuid import UUID -from http import HTTPStatus -from pathlib import Path -from unittest.mock import patch -from google.protobuf.json_format import ParseDict -from fedlearner_webconsole.composer.models import ItemStatus, SchedulerItem -from fedlearner_webconsole.db import db -from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition, JobDefinition -from fedlearner_webconsole.project.models import Project -from fedlearner_webconsole.workflow.cronjob import WorkflowCronJobItem -from fedlearner_webconsole.workflow.models import Workflow, WorkflowState -from fedlearner_webconsole.job.models import Job, JobType, JobState -from fedlearner_webconsole.scheduler.transaction import TransactionState -from fedlearner_webconsole.proto.service_pb2 import GetWorkflowResponse -from fedlearner_webconsole.proto import project_pb2 -from fedlearner_webconsole.rpc.client import RpcClient -from fedlearner_webconsole.proto.common_pb2 import CreateJobFlag -from fedlearner_webconsole.workflow.apis import is_peer_job_inheritance_matched -from testing.common import BaseTestCase -from fedlearner_webconsole.db import db_handler - -class WorkflowsApiTest(BaseTestCase): - class Config(BaseTestCase.Config): - START_GRPC_SERVER = False - START_SCHEDULER = False - - def setUp(self): - self.maxDiff = None - super().setUp() - # Inserts data - workflow1 = Workflow(name='workflow_key_get1', project_id=1) - workflow2 = Workflow(name='workflow_kay_get2', project_id=2) - workflow3 = Workflow(name='workflow_key_get3', project_id=2) - db.session.add(workflow1) - db.session.add(workflow2) - db.session.add(workflow3) - db.session.commit() - - def test_get_with_project(self): - response = self.get_helper('/api/v2/workflows?project=1') - self.assertEqual(response.status_code, HTTPStatus.OK) - data = self.get_response_data(response) - self.assertEqual(len(data), 1) - self.assertEqual(data[0]['name'], 'workflow_key_get1') - - def test_get_with_keyword(self): - response = self.get_helper('/api/v2/workflows?keyword=key') - self.assertEqual(response.status_code, HTTPStatus.OK) - data = self.get_response_data(response) - self.assertEqual(len(data), 2) - self.assertEqual(data[0]['name'], 'workflow_key_get1') - - def test_get_workflows(self): - time.sleep(1) - workflow = Workflow(name='last', project_id=1) - db.session.add(workflow) - db.session.flush() - db.session.commit() - response = self.get_helper('/api/v2/workflows') - data = self.get_response_data(response) - self.assertEqual(data[0]['name'], 'last') - - @patch('fedlearner_webconsole.workflow.apis.scheduler.wakeup') - @patch('fedlearner_webconsole.workflow.apis.uuid4') - def test_create_new_workflow(self, mock_uuid, mock_wakeup): - mock_uuid.return_value = UUID('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa') - with open( - Path(__file__, '../../test_data/workflow_config.json').resolve( - )) as workflow_config: - config = json.load(workflow_config) - extra = ''.join( - random.choice(string.ascii_lowercase) for _ in range(10)) - # extra should be a valid json string so we mock one - extra = f'{{"parent_job_name":"{extra}"}}' - workflow = { - 'name': 'test-workflow', - 'project_id': 1234567, - 'forkable': True, - 'comment': 'test-comment', - 'config': config, - 'extra': extra - } - response = self.post_helper('/api/v2/workflows', data=workflow) - self.assertEqual(response.status_code, HTTPStatus.CREATED) - created_workflow = json.loads(response.data).get('data') - # Check scheduler - mock_wakeup.assert_called_once_with(created_workflow['id']) - self.assertIsNotNone(created_workflow['id']) - self.assertIsNotNone(created_workflow['created_at']) - self.assertIsNotNone(created_workflow['updated_at']) - del created_workflow['id'] - del created_workflow['created_at'] - del created_workflow['updated_at'] - del created_workflow['start_at'] - del created_workflow['stop_at'] - self.assertEqual( - created_workflow, { - 'batch_update_interval': -1, - 'name': 'test-workflow', - 'project_id': 1234567, - 'extra': extra, - 'forkable': True, - 'forked_from': None, - 'metric_is_public': False, - 'comment': 'test-comment', - 'state': 'NEW', - 'target_state': 'READY', - 'transaction_state': 'READY', - 'transaction_err': None, - 'create_job_flags': [1, 1, 1], - 'peer_create_job_flags': None, - 'job_ids': [], - 'transaction_state': 'READY', - 'last_triggered_batch': None, - 'recur_at': None, - 'recur_type': 'NONE', - 'trigger_dataset': None, - 'uuid': f'u{mock_uuid().hex[:19]}' - }) - # Check DB - self.assertEqual(len(Workflow.query.all()), 4) - - # Post again - mock_wakeup.reset_mock() - response = self.post_helper('/api/v2/workflows', data=workflow) - self.assertEqual(response.status_code, HTTPStatus.CONFLICT) - # Check mock - mock_wakeup.assert_not_called() - # Check DB - self.assertEqual(len(Workflow.query.all()), 4) - - @patch('fedlearner_webconsole.workflow.apis.composer.get_item_status') - @patch('fedlearner_webconsole.workflow.apis.composer.collect') - @patch('fedlearner_webconsole.workflow.apis.scheduler.wakeup') - def test_post_batch_update_interval_job(self, mock_wakeup, mock_collect, - mock_get_item_status): - mock_get_item_status.return_value = None - with open( - Path(__file__, '../../test_data/workflow_config.json').resolve( - )) as workflow_config: - config = json.load(workflow_config) - workflow = { - 'name': 'test-workflow-left', - 'project_id': 1234567, - 'forkable': True, - 'config': config, - 'batch_update_interval': 10, - } - responce = self.post_helper('/api/v2/workflows', data=workflow) - self.assertEqual(responce.status_code, HTTPStatus.CREATED) - - with open( - Path(__file__, '../../test_data/workflow_config_right.json'). - resolve()) as workflow_config: - config = json.load(workflow_config) - workflow = { - 'name': 'test-workflow-right', - 'project_id': 1234567, - 'forkable': True, - 'config': config, - 'batch_update_interval': 10, - } - responce = self.post_helper('/api/v2/workflows', data=workflow) - self.assertEqual(responce.status_code, HTTPStatus.BAD_REQUEST) - - mock_collect.assert_called() - mock_wakeup.assert_called() - - def test_fork_workflow(self): - # TODO: insert into db first, and then copy it. - pass - - -class WorkflowApiTest(BaseTestCase): - def test_put_successfully(self): - config = { - 'participants': [{ - 'name': 'party_leader', - 'url': '127.0.0.1:5000', - 'domain_name': 'fl-leader.com' - }], - 'variables': [{ - 'name': 'namespace', - 'value': 'leader' - }, { - 'name': 'basic_envs', - 'value': '{}' - }, { - 'name': 'storage_root_dir', - 'value': '/' - }, { - 'name': 'EGRESS_URL', - 'value': '127.0.0.1:1991' - }] - } - project = Project( - name='test', - config=ParseDict(config, - project_pb2.Project()).SerializeToString()) - db.session.add(project) - workflow = Workflow( - name='test-workflow', - project_id=1, - state=WorkflowState.NEW, - transaction_state=TransactionState.PARTICIPANT_PREPARE, - target_state=WorkflowState.READY) - db.session.add(workflow) - db.session.commit() - db.session.refresh(workflow) - - response = self.put_helper(f'/api/v2/workflows/{workflow.id}', - data={ - 'forkable': True, - 'config': { - 'group_alias': 'test-template' - }, - 'comment': 'test comment' - }) - self.assertEqual(response.status_code, HTTPStatus.OK) - - updated_workflow = Workflow.query.get(workflow.id) - self.assertIsNotNone(updated_workflow.config) - self.assertTrue(updated_workflow.forkable) - self.assertEqual(updated_workflow.comment, 'test comment') - self.assertEqual(updated_workflow.target_state, WorkflowState.READY) - - def test_put_resetting(self): - workflow = Workflow( - name='test-workflow', - project_id=123, - config=WorkflowDefinition( - group_alias='test-template').SerializeToString(), - state=WorkflowState.NEW, - ) - db.session.add(workflow) - db.session.commit() - db.session.refresh(workflow) - - response = self.put_helper(f'/api/v2/workflows/{workflow.id}', - data={ - 'forkable': True, - 'config': { - 'group_alias': 'test-template' - }, - }) - self.assertEqual(response.status_code, HTTPStatus.CONFLICT) - - @patch('fedlearner_webconsole.workflow.apis.scheduler.wakeup') - def test_patch_successfully(self, mock_wakeup): - workflow = Workflow( - name='test-workflow', - project_id=123, - config=WorkflowDefinition().SerializeToString(), - forkable=False, - state=WorkflowState.READY, - ) - db.session.add(workflow) - db.session.commit() - db.session.refresh(workflow) - - response = self.patch_helper(f'/api/v2/workflows/{workflow.id}', - data={'target_state': 'RUNNING'}) - self.assertEqual(response.status_code, HTTPStatus.OK) - patched_data = json.loads(response.data).get('data') - self.assertEqual(patched_data['id'], workflow.id) - self.assertEqual(patched_data['state'], 'READY') - self.assertEqual(patched_data['target_state'], 'RUNNING') - # Checks DB - patched_workflow = Workflow.query.get(workflow.id) - self.assertEqual(patched_workflow.target_state, WorkflowState.RUNNING) - # Checks scheduler - mock_wakeup.assert_called_once_with(workflow.id) - - @patch('fedlearner_webconsole.workflow.apis.scheduler.wakeup') - def test_patch_invalid_target_state(self, mock_wakeup): - workflow = Workflow(name='test-workflow', - project_id=123, - config=WorkflowDefinition().SerializeToString(), - forkable=False, - state=WorkflowState.READY, - target_state=WorkflowState.RUNNING) - db.session.add(workflow) - db.session.commit() - db.session.refresh(workflow) - - response = self.patch_helper(f'/api/v2/workflows/{workflow.id}', - data={'target_state': 'READY'}) - self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) - self.assertEqual( - json.loads(response.data).get('details'), - 'Another transaction is in progress [1]') - # Checks DB - patched_workflow = Workflow.query.get(workflow.id) - self.assertEqual(patched_workflow.state, WorkflowState.READY) - self.assertEqual(patched_workflow.target_state, WorkflowState.RUNNING) - # Checks scheduler - mock_wakeup.assert_not_called() - - @patch('fedlearner_webconsole.workflow.apis.composer.get_item_status') - @patch('fedlearner_webconsole.workflow.apis.composer.patch_item_attr') - @patch('fedlearner_webconsole.workflow.apis.composer.finish') - @patch('fedlearner_webconsole.workflow.apis.composer.collect') - def test_patch_batch_update_interval(self, mock_collect, mock_finish, - mock_patch_item, - mock_get_item_status): - mock_get_item_status.side_effect = [None, ItemStatus.ON] - workflow = Workflow( - name='test-workflow-left', - project_id=123, - config=WorkflowDefinition(is_left=True).SerializeToString(), - forkable=False, - state=WorkflowState.STOPPED, - ) - batch_update_interval = 1 - db.session.add(workflow) - db.session.commit() - db.session.refresh(workflow) - - # test create cronjob - response = self.patch_helper( - f'/api/v2/workflows/{workflow.id}', - data={'batch_update_interval': batch_update_interval}) - self.assertEqual(response.status_code, HTTPStatus.OK) - - mock_collect.assert_called_with( - name=f'workflow_cron_job_{workflow.id}', - items=[WorkflowCronJobItem(workflow.id)], - metadata={}, - interval=batch_update_interval * 60) - - # patch new interval time for cronjob - batch_update_interval = 2 - response = self.patch_helper( - f'/api/v2/workflows/{workflow.id}', - data={'batch_update_interval': batch_update_interval}) - self.assertEqual(response.status_code, HTTPStatus.OK) - mock_patch_item.assert_called_with( - name=f'workflow_cron_job_{workflow.id}', - key='interval_time', - value=batch_update_interval * 60) - - # test stop cronjob - response = self.patch_helper(f'/api/v2/workflows/{workflow.id}', - data={'batch_update_interval': -1}) - self.assertEqual(response.status_code, HTTPStatus.OK) - mock_finish.assert_called_with(name=f'workflow_cron_job_{workflow.id}') - - workflow = Workflow( - name='test-workflow-right', - project_id=456, - config=WorkflowDefinition(is_left=False).SerializeToString(), - forkable=False, - state=WorkflowState.STOPPED, - ) - db.session.add(workflow) - db.session.commit() - db.session.refresh(workflow) - - response = self.patch_helper(f'/api/v2/workflows/{workflow.id}', - data={'batch_update_interval': 1}) - self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) - - def test_patch_not_found(self): - response = self.patch_helper('/api/v2/workflows/1', - data={'target_state': 'RUNNING'}) - self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) - - def test_patch_create_job_flags(self): - with db_handler.session_scope() as session: - workflow, job = add_fake_workflow(session) - response = self.patch_helper(f'/api/v2/workflows/{workflow.id}', - data={'create_job_flags': [3]}) - self.assertEqual(response.status_code, HTTPStatus.OK) - patched_job = Job.query.get(job.id) - self.assertEqual(patched_job.is_disabled, True) - response = self.patch_helper(f'/api/v2/workflows/{workflow.id}', - data={'create_job_flags': [1]}) - self.assertEqual(response.status_code, HTTPStatus.OK) - patched_job = Job.query.get(job.id) - self.assertEqual(patched_job.is_disabled, False) - - # TODO: Move it to service_test - @patch('fedlearner_webconsole.rpc.client.RpcClient.get_workflow') - def test_is_peer_job_inheritance_matched(self, mock_get_workflow): - peer_job_0 = JobDefinition(name='raw-data-job') - peer_job_1 = JobDefinition(name='train-job', is_federated=True) - peer_config = WorkflowDefinition() - peer_config.job_definitions.extend([peer_job_0, peer_job_1]) - resp = GetWorkflowResponse(config=peer_config) - mock_get_workflow.return_value = resp - - job_0 = JobDefinition(name='train-job', is_federated=True) - config = WorkflowDefinition(job_definitions=[job_0]) - - project = Project() - participant = project_pb2.Participant() - project.set_config(project_pb2.Project(participants=[participant])) - workflow0 = Workflow(project=project) - workflow0.set_config(config) - db.session.add(workflow0) - db.session.commit() - db.session.flush() - workflow1 = Workflow(project=project, forked_from=workflow0.id) - workflow1.set_config(config) - workflow1.set_create_job_flags([CreateJobFlag.REUSE]) - workflow1.set_peer_create_job_flags( - [CreateJobFlag.NEW, CreateJobFlag.REUSE]) - - self.assertTrue(is_peer_job_inheritance_matched(workflow1)) - - workflow1.set_create_job_flags([CreateJobFlag.NEW]) - self.assertFalse(is_peer_job_inheritance_matched(workflow1)) - - def test_is_local(self): - with db_handler.session_scope() as session: - workflow, job = add_fake_workflow(session) - self.assertTrue(workflow.is_local()) - config = workflow.get_config() - config.job_definitions[ - 0].is_federated = True - workflow.set_config(config) - self.assertFalse(False, workflow.is_local()) - - -def add_fake_workflow(session): - wd = WorkflowDefinition() - jd = wd.job_definitions.add() - workflow = Workflow( - name='test-workflow', - project_id=123, - config=wd.SerializeToString(), - forkable=False, - state=WorkflowState.READY, - ) - session.add(workflow) - session.flush() - job = Job( - name='test_job', - job_type=JobType(1), - config=jd.SerializeToString(), - workflow_id=workflow.id, - project_id=123, - state=JobState.STOPPED, - is_disabled=False) - session.add(job) - session.flush() - workflow.job_ids = str(job.id) - session.commit() - return workflow, job - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/workflow/cronjob_test.py b/web_console_v2/api/test/fedlearner_webconsole/workflow/cronjob_test.py deleted file mode 100644 index f33014a5d..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/workflow/cronjob_test.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import unittest - -from time import sleep -from sqlalchemy import and_ - -from testing.common import BaseTestCase -from fedlearner_webconsole.db import db -from fedlearner_webconsole.workflow.models import Workflow, WorkflowState -from fedlearner_webconsole.workflow.cronjob import WorkflowCronJob, WorkflowCronJobItem -from fedlearner_webconsole.composer.models import Context, RunnerStatus, SchedulerItem, SchedulerRunner -from fedlearner_webconsole.composer.composer import ComposerConfig -from fedlearner_webconsole.composer.interface import ItemType - - -class CronJobTest(BaseTestCase): - """Disable for now, hacking!!!! - - Hopefully it will enabled again! - """ - def setUp(self): - super(CronJobTest, self).setUp() - self.test_id = 8848 - workflow = Workflow(id=self.test_id, state=WorkflowState.RUNNING) - db.session.add(workflow) - db.session.commit() - - @unittest.skip('waiting for refactor of transaction state') - def test_cronjob_alone(self): - cronjob = WorkflowCronJob(task_id=self.test_id) - context = Context(data={}, internal={}, db_engine=db.engine) - cronjob.start(context) - status, output = cronjob.result(context) - self.assertEqual(status, RunnerStatus.DONE) - self.assertTrue(output['msg'] is not None) - - @unittest.skip('waiting for refactor of transaction state') - def test_cronjob_with_composer(self): - config = ComposerConfig( - runner_fn={ItemType.WORKFLOW_CRON_JOB.value: WorkflowCronJob}, - name='test_cronjob') - with self.composer_scope(config=config) as composer: - item_name = f'workflow_cronjob_{self.test_id}' - composer.collect(name=item_name, - items=[WorkflowCronJobItem(self.test_id)], - metadata={}, - interval=10) - sleep(20) - runners = SchedulerRunner.query.filter( - and_(SchedulerRunner.item_id == SchedulerItem.id, - SchedulerItem.name == item_name)).all() - self.assertEqual(len(runners), 2) - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/workflow_template/apis_test.py b/web_console_v2/api/test/fedlearner_webconsole/workflow_template/apis_test.py deleted file mode 100644 index 128fd9bd8..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/workflow_template/apis_test.py +++ /dev/null @@ -1,241 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import json -import unittest -from http import HTTPStatus - -from fedlearner_webconsole.db import db -from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition -from fedlearner_webconsole.workflow_template.models import WorkflowTemplate, WorkflowTemplateKind -from fedlearner_webconsole.workflow_template.apis import dict_to_workflow_definition -from testing.common import BaseTestCase - - -class WorkflowTemplatesApiTest(BaseTestCase): - class Config(BaseTestCase.Config): - START_GRPC_SERVER = False - START_SCHEDULER = False - - def setUp(self): - super().setUp() - # Inserts data - template1 = WorkflowTemplate(name='t1', - comment='comment for t1', - group_alias='g1', - is_left=True) - template1.set_config( - WorkflowDefinition( - group_alias='g1', - is_left=True, - )) - template2 = WorkflowTemplate(name='t2', - group_alias='g2', - is_left=False) - template2.set_config( - WorkflowDefinition( - group_alias='g2', - is_left=False, - )) - - template3 = WorkflowTemplate( - name='t3', - group_alias='g3', - is_left=True, - kind=WorkflowTemplateKind.PRESET_DATAJOIN.value) - template3.set_config( - WorkflowDefinition( - group_alias='g3', - is_left=False, - )) - - db.session.add(template1) - db.session.add(template2) - db.session.add(template3) - db.session.commit() - - def test_get_with_group_alias(self): - response = self.get_helper('/api/v2/workflow_templates?group_alias=g1') - self.assertEqual(response.status_code, HTTPStatus.OK) - data = json.loads(response.data).get('data') - self.assertEqual(len(data), 1) - self.assertEqual(data[0]['name'], 't1') - - def test_get_with_group_alias_with_is_left(self): - response = self.get_helper( - '/api/v2/workflow_templates?group_alias=g1&is_left=1') - self.assertEqual(response.status_code, HTTPStatus.OK) - data = json.loads(response.data).get('data') - self.assertEqual(len(data), 1) - self.assertEqual(data[0]['name'], 't1') - response = self.get_helper( - '/api/v2/workflow_templates?group_alias=g1&is_left=0') - self.assertEqual(response.status_code, HTTPStatus.OK) - data = json.loads(response.data).get('data') - self.assertEqual(len(data), 0) - - def test_get_all_templates(self): - response = self.get_helper('/api/v2/workflow_templates') - self.assertEqual(response.status_code, HTTPStatus.OK) - data = json.loads(response.data).get('data') - self.assertEqual(len(data), 3) - - def test_post_without_required_arguments(self): - response = self.post_helper('/api/v2/workflow_templates', data={}) - self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) - self.assertEqual( - json.loads(response.data).get('details'), - {'name': 'name is empty'}) - - response = self.post_helper('/api/v2/workflow_templates', - data={ - 'name': 'test', - 'comment': 'test-comment', - 'config': { - 'is_left': True - } - }) - self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) - self.assertEqual( - json.loads(response.data).get('details'), - {'config.group_alias': 'config.group_alias is required'}) - - response = self.post_helper('/api/v2/workflow_templates', - data={ - 'name': 'test', - 'comment': 'test-comment', - 'config': { - 'group_alias': 'g222', - } - }) - self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) - self.assertEqual( - json.loads(response.data).get('details'), - {'config.is_left': 'config.is_left is required'}) - - def test_post_successfully(self): - template_name = 'test-nb-template' - expected_template = WorkflowTemplate.query.filter_by( - name=template_name).first() - self.assertIsNone(expected_template) - - response = self.post_helper('/api/v2/workflow_templates', - data={ - 'name': template_name, - 'comment': 'test-comment', - 'config': { - 'group_alias': 'g222', - 'is_left': True - }, - 'kind': 1, - }) - self.assertEqual(response.status_code, HTTPStatus.CREATED) - data = json.loads(response.data).get('data') - # Checks DB - expected_template = WorkflowTemplate.query.filter_by( - name=template_name).first() - self.assertEqual(expected_template.name, template_name) - self.assertEqual(expected_template.comment, 'test-comment') - self.assertEqual( - expected_template.config, - WorkflowDefinition(group_alias='g222', - is_left=True).SerializeToString()) - expected_template_dict = { - 'comment': 'test-comment', - 'config': { - 'group_alias': 'g222', - 'is_left': True, - 'job_definitions': [], - 'variables': [] - }, - 'editor_info': { - 'yaml_editor_infos': {} - }, - 'group_alias': 'g222', - 'is_left': True, - 'name': 'test-nb-template', - 'id': 4, - 'kind': 1, - } - self.assertEqual(data, expected_template_dict) - - def test_get_workflow_template(self): - response = self.get_response_data( - self.get_helper('/api/v2/workflow_templates/1')) - self.assertEqual(response['name'], 't1') - - def test_delete_workflow_template(self): - response = self.delete_helper('/api/v2/workflow_templates/1') - self.assertEqual(response.status_code, HTTPStatus.OK) - response = self.delete_helper('/api/v2/workflow_templates/1') - self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) - - def test_put_workflow_template(self): - data = { - 'name': 'test_put', - 'comment': 'test-comment', - 'config': { - 'group_alias': 'g222', - 'is_left': True - } - } - response = self.put_helper('/api/v2/workflow_templates/1', data=data) - self.assertEqual(response.status_code, HTTPStatus.OK) - expected_template = WorkflowTemplate.query.filter_by(id=1).first() - self.assertEqual(expected_template.name, data['name']) - self.assertEqual(expected_template.comment, data['comment']) - self.assertEqual(expected_template.group_alias, - data['config']['group_alias']) - self.assertEqual(expected_template.is_left, data['config']['is_left']) - - def test_dict_to_workflow_definition(self): - config = { - 'variables': [{ - 'name': 'code', - 'value': '{"asdf.py": "asdf"}', - 'value_type': 'CODE' - }] - } - proto = dict_to_workflow_definition(config) - self.assertTrue(isinstance(proto.variables[0].value, str)) - - def test_get_code(self): - response = self.get_helper( - '/api/v2/codes?code_path=test/fedlearner_webconsole/test_data/code.tar.gz' - ) - self.assertEqual(response.status_code, HTTPStatus.OK) - data = json.loads(response.data) - self.assertEqual( - { - 'test/a.py': 'awefawefawefawefwaef', - 'test1/b.py': 'asdfasd', - 'c.py': '', - 'test/d.py': 'asdf' - }, data['data']) - response = self.get_helper( - '/api/v2/codes?code_path=../test_data/code.tar.g1') - self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST) - - def test_get_with_kind(self): - response = self.get_helper( - '/api/v2/workflow_templates?from=preset_datajoin') - self.assertEqual(response.status_code, HTTPStatus.OK) - data = json.loads(response.data).get('data') - self.assertEqual(len(data), 1) - self.assertEqual(data[0]['name'], 't3') - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/workflow_template/slots_formater_test.py b/web_console_v2/api/test/fedlearner_webconsole/workflow_template/slots_formater_test.py deleted file mode 100644 index d796a5625..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/workflow_template/slots_formater_test.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import unittest -from fedlearner_webconsole.proto.workflow_definition_pb2 import Slot -from fedlearner_webconsole.workflow_template.slots_formatter import format_yaml, generate_yaml_template - - -class SlotFormatterTest(unittest.TestCase): - def test_format_yaml(self): - slots = {'Slot_prs': 'prs', - 'Slot_prs1': 'prs1', - 'dada': 'paopaotang'} - yaml = '${Slot_prs} a${asdf} ${Slot_prs1}' - self.assertEqual(format_yaml(yaml, **slots), - 'prs a${asdf} prs1') - - def test_generate_yaml_template(self): - slots = {'Slot_prs': Slot(reference_type=Slot.ReferenceType.DEFAULT, default='prs'), - 'Slot_prs1': Slot(reference_type=Slot.ReferenceType.PROJECT, reference='project.variables.namespace')} - yaml = '${Slot_prs} a${asdf} ${Slot_prs1}' - self.assertEqual(generate_yaml_template(yaml, slots), - 'prs a${asdf} ${project.variables.namespace}') - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/workflow_template/template_validator_test.py b/web_console_v2/api/test/fedlearner_webconsole/workflow_template/template_validator_test.py deleted file mode 100644 index 317a8da5e..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/workflow_template/template_validator_test.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import unittest -from fedlearner_webconsole.workflow_template.template_validaor\ - import check_workflow_definition -from test_template_left import make_workflow_template - - -class TemplateValidatorTest(unittest.TestCase): - - - def test_check_workflow_definition(self): - workflow_definition = make_workflow_template() - check_workflow_definition(workflow_definition) - - def test_check_more_json_wrong(self): - yaml_template_more_comma = '{"a": "aa", "b":"" ,}' - workflow_definition = make_workflow_template() - workflow_definition.job_definitions[0].yaml_template = \ - yaml_template_more_comma - with self.assertRaises(ValueError): - check_workflow_definition(workflow_definition) - - def test_check_more_placeholder(self): - workflow_definition = make_workflow_template() - yaml_template_more_placeholder = '{"a": "${workflow.variables.nobody}"}' - workflow_definition.job_definitions[0].yaml_template = \ - yaml_template_more_placeholder - with self.assertRaises(ValueError): - check_workflow_definition(workflow_definition) - - def test_check_wrong_placeholder(self): - workflow_definition = make_workflow_template() - yaml_template_wrong_placeholder = '{"a": "${workflow.xx!x.nobody}"}' - workflow_definition.job_definitions[0].yaml_template =\ - yaml_template_wrong_placeholder - with self.assertRaises(ValueError): - check_workflow_definition(workflow_definition) - - -if __name__ == '__main__': - unittest.main() diff --git a/web_console_v2/api/test/fedlearner_webconsole/workflow_template/test_template_left.py b/web_console_v2/api/test/fedlearner_webconsole/workflow_template/test_template_left.py deleted file mode 100644 index b52bb9f10..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/workflow_template/test_template_left.py +++ /dev/null @@ -1,1332 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -from google.protobuf.json_format import MessageToDict -from fedlearner_webconsole.proto.workflow_definition_pb2 import ( - WorkflowDefinition, JobDefinition, JobDependency -) -from fedlearner_webconsole.proto.common_pb2 import ( - Variable -) - - -def make_workflow_template(): - workflow = WorkflowDefinition( - group_alias='test_template', - is_left=True, - variables=[ - Variable( - name='image_version', - value='v1.5-rc3', - access_mode=Variable.PEER_READABLE), - Variable( - name='num_partitions', - value='4', - access_mode=Variable.PEER_WRITABLE), - ], - job_definitions=[ - JobDefinition( - name='raw-data-job', - job_type=JobDefinition.RAW_DATA, - is_federated=False, - variables=[ - Variable( - name='input_dir', - value='/app/deploy/integrated_test/tfrecord_raw_data', - access_mode=Variable.PRIVATE), - Variable( - name='file_wildcard', - value='*.rd', - access_mode=Variable.PRIVATE), - Variable( - name='batch_size', - value='1024', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='input_format', - value='TF_RECORD', - access_mode=Variable.PRIVATE), - Variable( - name='output_format', - value='TF_RECORD', - access_mode=Variable.PRIVATE), - Variable( - name='master_cpu', - value='2000m', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='master_mem', - value='3Gi', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='worker_cpu', - value='2000m', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='worker_mem', - value='4Gi', - access_mode=Variable.PEER_WRITABLE), - ], - yaml_template='''{ - "apiVersion": "fedlearner.k8s.io/v1alpha1", - "kind": "FLApp", - "metadata": { - "name": "${workflow.jobs.raw-data-job.name}", - "namespace": "${project.variables.namespace}" - }, - "spec": { - "role": "Follower", - "peerSpecs": { - "Leader": { - "peerURL": "", - "authority": "" - } - }, - "cleanPodPolicy": "All", - "flReplicaSpecs": { - "Master": { - "template": { - "spec": { - "containers": [ - { - "resources": { - "limits": { - "cpu": "${workflow.jobs.raw-data-job.variables.master_cpu}", - "memory": "${workflow.jobs.raw-data-job.variables.master_mem}" - }, - "requests": { - "cpu": "${workflow.jobs.raw-data-job.variables.master_cpu}", - "memory": "${workflow.jobs.raw-data-job.variables.master_mem}" - } - }, - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "command": [ - "/app/deploy/scripts/data_portal/run_data_portal_master.sh" - ], - "args": [], - "env": [ - ${system.basic_envs}, - { - "name": "ETCD_NAME", - "value": "fedlearner" - }, - { - "name": "ETCD_ADDR", - "value": "fedlearner-stack-etcd.default.svc.cluster.local:2379" - }, - { - "name": "ETCD_BASE_DIR", - "value": "fedlearner" - }, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - } - }, - { - "name": "POD_NAME", - "valueFrom": { - "fieldRef": { - "fieldPath": "metadata.name" - } - } - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.raw-data-job.name}" - }, - { - "name": "DATA_PORTAL_NAME", - "value": "${workflow.jobs.raw-data-job.name}" - }, - { - "name": "OUTPUT_PARTITION_NUM", - "value": "${workflow.variables.num_partitions}" - }, - { - "name": "INPUT_BASE_DIR", - "value": "${workflow.jobs.raw-data-job.variables.input_dir}" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/raw_data/${workflow.jobs.raw-data-job.name}" - }, - { - "name": "RAW_DATA_PUBLISH_DIR", - "value": "portal_publish_dir/${workflow.jobs.raw-data-job.name}" - }, - { - "name": "DATA_PORTAL_TYPE", - "value": "Streaming" - }, - { - "name": "FILE_WILDCARD", - "value": "${workflow.jobs.raw-data-job.variables.file_wildcard}" - } - ], - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow" - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ], - "restartPolicy": "Never" - } - }, - "pair": false, - "replicas": 1 - }, - "Worker": { - "replicas": 4, - "template": { - "spec": { - "containers": [ - { - "resources": { - "limits": { - "cpu": "${workflow.jobs.raw-data-job.variables.worker_cpu}", - "memory": "${workflow.jobs.raw-data-job.variables.worker_mem}" - }, - "requests": { - "cpu": "${workflow.jobs.raw-data-job.variables.worker_cpu}", - "memory": "${workflow.jobs.raw-data-job.variables.worker_mem}" - } - }, - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "command": [ - "/app/deploy/scripts/data_portal/run_data_portal_worker.sh" - ], - "args": [], - "env": [ - { - "name": "ETCD_NAME", - "value": "fedlearner" - }, - { - "name": "ETCD_ADDR", - "value": "fedlearner-stack-etcd.default.svc.cluster.local:2379" - }, - { - "name": "ETCD_BASE_DIR", - "value": "fedlearner" - }, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - } - }, - { - "name": "POD_NAME", - "valueFrom": { - "fieldRef": { - "fieldPath": "metadata.name" - } - } - }, - ${system.basic_envs}, - { - "name": "CPU_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.cpu" - } - } - }, - { - "name": "MEM_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.memory" - } - } - }, - { - "name": "CPU_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.cpu" - } - } - }, - { - "name": "MEM_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.memory" - } - } - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.raw-data-job.name}" - }, - { - "name": "BATCH_SIZE", - "value": "${workflow.jobs.raw-data-job.variables.batch_size}" - }, - { - "name": "INPUT_DATA_FORMAT", - "value": "${workflow.jobs.raw-data-job.variables.input_format}" - }, - { - "name": "COMPRESSED_TYPE", - "value": "" - }, - { - "name": "OUTPUT_DATA_FORMAT", - "value": "${workflow.jobs.raw-data-job.variables.output_format}" - } - ], - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow" - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ], - "restartPolicy": "Never" - } - }, - "pair": false - } - } - } -} - ''' - ), - JobDefinition( - name='data-join-job', - job_type=JobDefinition.DATA_JOIN, - is_federated=True, - variables=[ - Variable( - name='master_cpu', - value='2000m', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='master_mem', - value='3Gi', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='worker_cpu', - value='4000m', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='worker_mem', - value='4Gi', - access_mode=Variable.PEER_WRITABLE), - ], - dependencies=[ - JobDependency(source='raw-data-job') - ], - yaml_template=''' -{ - "apiVersion": "fedlearner.k8s.io/v1alpha1", - "kind": "FLApp", - "metadata": { - "name": "${workflow.jobs.data-join-job.name}", - "namespace": "${project.variables.namespace}" - }, - "spec": { - "role": "Leader", - "cleanPodPolicy": "All", - "peerSpecs": { - "Follower": { - "peerURL": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80", - "authority": "${project.participants[0].egress_domain}", - "extraHeaders": { - "x-host": "default.fedlearner.operator" - } - } - }, - "flReplicaSpecs": { - "Master": { - "template": { - "spec": { - "restartPolicy": "Never", - "containers": [ - { - "env": [ - ${system.basic_envs}, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - } - }, - { - "name": "POD_NAME", - "valueFrom": { - "fieldRef": { - "fieldPath": "metadata.name" - } - } - }, - { - "name": "ROLE", - "value": "leader" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.data-join-job.name}" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/data_source/${workflow.jobs.data-join-job.name}" - }, - { - "name": "CPU_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.cpu" - } - } - }, - { - "name": "MEM_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.memory" - } - } - }, - { - "name": "CPU_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.cpu" - } - } - }, - { - "name": "MEM_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.memory" - } - } - }, - { - "name": "ETCD_NAME", - "value": "fedlearner" - }, - { - "name": "ETCD_ADDR", - "value": "fedlearner-stack-etcd.default.svc.cluster.local:2379" - }, - { - "name": "ETCD_BASE_DIR", - "value": "fedlearner" - }, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "BATCH_MODE", - "value": "--batch_mode" - }, - { - "name": "PARTITION_NUM", - "value": "${workflow.variables.num_partitions}" - }, - { - "name": "START_TIME", - "value": "0" - }, - { - "name": "END_TIME", - "value": "999999999999" - }, - { - "name": "NEGATIVE_SAMPLING_RATE", - "value": "1.0" - }, - { - "name": "RAW_DATA_SUB_DIR", - "value": "portal_publish_dir/${workflow.jobs.raw-data-job.name}" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow", - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "command": [ - "/app/deploy/scripts/wait4pair_wrapper.sh" - ], - "args": [ - "/app/deploy/scripts/data_join/run_data_join_master.sh" - ], - "resources": { - "limits": { - "cpu": "${workflow.jobs.data-join-job.variables.master_cpu}", - "memory": "${workflow.jobs.data-join-job.variables.master_mem}" - }, - "requests": { - "cpu": "${workflow.jobs.data-join-job.variables.master_cpu}", - "memory": "${workflow.jobs.data-join-job.variables.master_mem}" - } - } - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ] - } - }, - "pair": true, - "replicas": 1 - }, - "Worker": { - "template": { - "spec": { - "restartPolicy": "Never", - "containers": [ - { - "env": [ - ${system.basic_envs}, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - } - }, - { - "name": "POD_NAME", - "valueFrom": { - "fieldRef": { - "fieldPath": "metadata.name" - } - } - }, - { - "name": "ROLE", - "value": "leader" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.data-join-job.name}" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/data_source/${workflow.jobs.data-join-job.name}" - }, - { - "name": "CPU_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.cpu" - } - } - }, - { - "name": "MEM_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.memory" - } - } - }, - { - "name": "CPU_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.cpu" - } - } - }, - { - "name": "MEM_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.memory" - } - } - }, - { - "name": "ETCD_NAME", - "value": "fedlearner" - }, - { - "name": "ETCD_ADDR", - "value": "fedlearner-stack-etcd.default.svc.cluster.local:2379" - }, - { - "name": "ETCD_BASE_DIR", - "value": "fedlearner" - }, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "PARTITION_NUM", - "value": "${workflow.variables.num_partitions}" - }, - { - "name": "RAW_DATA_SUB_DIR", - "value": "portal_publish_dir/${workflow.jobs.raw-data-job.name}" - }, - { - "name": "DATA_BLOCK_DUMP_INTERVAL", - "value": "600" - }, - { - "name": "DATA_BLOCK_DUMP_THRESHOLD", - "value": "65536" - }, - { - "name": "EXAMPLE_ID_DUMP_INTERVAL", - "value": "600" - }, - { - "name": "EXAMPLE_ID_DUMP_THRESHOLD", - "value": "65536" - }, - { - "name": "EXAMPLE_ID_BATCH_SIZE", - "value": "4096" - }, - { - "name": "MAX_FLYING_EXAMPLE_ID", - "value": "307152" - }, - { - "name": "MIN_MATCHING_WINDOW", - "value": "2048" - }, - { - "name": "MAX_MATCHING_WINDOW", - "value": "8192" - }, - { - "name": "RAW_DATA_ITER", - "value": "${workflow.jobs.raw-data-job.variables.output_format}" - }, - { - "name": "RAW_DATA_SUB_DIR", - "value": "portal_publish_dir/${workflow.jobs.raw-data-job.name}" - }, - { - "name": "PARTITION_NUM", - "value": "${workflow.variables.num_partitions}" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow", - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "command": [ - "/app/deploy/scripts/wait4pair_wrapper.sh" - ], - "args": [ - "/app/deploy/scripts/data_join/run_data_join_worker.sh" - ], - "resources": { - "limits": { - "cpu": "${workflow.jobs.data-join-job.variables.worker_cpu}", - "memory": "${workflow.jobs.data-join-job.variables.worker_mem}" - }, - "requests": { - "cpu": "${workflow.jobs.data-join-job.variables.worker_cpu}", - "memory": "${workflow.jobs.data-join-job.variables.worker_mem}" - } - } - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ] - } - }, - "pair": true, - "replicas": ${workflow.variables.num_partitions} - } - } - } -} - ''' - ), - JobDefinition( - name='train-job', - job_type=JobDefinition.NN_MODEL_TRANINING, - is_federated=True, - variables=[ - Variable( - name='code_key', - value='/app/deploy/integrated_test/code_key/criteo-train-2.tar.gz', - access_mode=Variable.PRIVATE - ) - ], - dependencies=[ - JobDependency(source='data-join-job') - ], - yaml_template=''' - { - "apiVersion": "fedlearner.k8s.io/v1alpha1", - "kind": "FLApp", - "metadata": { - "name": "${workflow.jobs.train-job.name}", - "namespace": "${project.variables.namespace}" - }, - "spec": { - "role": "Follower", - "cleanPodPolicy": "All", - "peerSpecs": { - "Leader": { - "peerURL": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80", - "authority": "${project.participants[0].egress_domain}", - "extraHeaders": { - "x-host": "default.fedlearner.operator" - } - } - }, - "flReplicaSpecs": { - "Master": { - "template": { - "spec": { - "restartPolicy": "Never", - "containers": [ - { - "env": [ - ${system.basic_envs}, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - } - }, - { - "name": "POD_NAME", - "valueFrom": { - "fieldRef": { - "fieldPath": "metadata.name" - } - } - }, - { - "name": "ROLE", - "value": "follower" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.train-job.name}" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/job_output/${workflow.jobs.train-job.name}" - }, - { - "name": "CPU_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.cpu" - } - } - }, - { - "name": "MEM_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.memory" - } - } - }, - { - "name": "CPU_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.cpu" - } - } - }, - { - "name": "MEM_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.memory" - } - } - }, - { - "name": "ETCD_NAME", - "value": "fedlearner" - }, - { - "name": "ETCD_ADDR", - "value": "fedlearner-stack-etcd.default.svc.cluster.local:2379" - }, - { - "name": "ETCD_BASE_DIR", - "value": "fedlearner" - }, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "DATA_SOURCE", - "value": "${workflow.jobs.data-join-job.name}" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow", - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "command": [ - "/app/deploy/scripts/trainer/run_trainer_master.sh" - ], - "args": [], - "resources": { - "limits": { - "cpu": "2000m", - "memory": "2Gi" - }, - "requests": { - "cpu": "1000m", - "memory": "2Gi" - } - } - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ] - } - }, - "replicas": 1, - "pair": false - }, - "PS": { - "template": { - "spec": { - "restartPolicy": "Never", - "containers": [ - { - "env": [ - ${system.basic_envs}, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - } - }, - { - "name": "POD_NAME", - "valueFrom": { - "fieldRef": { - "fieldPath": "metadata.name" - } - } - }, - { - "name": "ROLE", - "value": "follower" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.train-job.name}" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/job_output/${workflow.jobs.train-job.name}" - }, - { - "name": "CPU_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.cpu" - } - } - }, - { - "name": "MEM_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.memory" - } - } - }, - { - "name": "CPU_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.cpu" - } - } - }, - { - "name": "MEM_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.memory" - } - } - }, - { - "name": "ETCD_NAME", - "value": "fedlearner" - }, - { - "name": "ETCD_ADDR", - "value": "fedlearner-stack-etcd.default.svc.cluster.local:2379" - }, - { - "name": "ETCD_BASE_DIR", - "value": "fedlearner" - }, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "DATA_SOURCE", - "value": "${workflow.jobs.data-join-job.name}" - }, - { - "name": "DATA_SOURCE", - "value": "${workflow.jobs.data-join-job.name}" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow", - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "command": [ - "/app/deploy/scripts/trainer/run_trainer_ps.sh" - ], - "args": [], - "resources": { - "limits": { - "cpu": "2000m", - "memory": "4Gi" - }, - "requests": { - "cpu": "1000m", - "memory": "2Gi" - } - } - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ] - } - }, - "pair": false, - "replicas": 1 - }, - "Worker": { - "template": { - "spec": { - "restartPolicy": "Never", - "containers": [ - { - "env": [ - ${system.basic_envs}, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - } - }, - { - "name": "POD_NAME", - "valueFrom": { - "fieldRef": { - "fieldPath": "metadata.name" - } - } - }, - { - "name": "ROLE", - "value": "follower" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.train-job.name}" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/job_output/${workflow.jobs.train-job.name}" - }, - { - "name": "CPU_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.cpu" - } - } - }, - { - "name": "MEM_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.memory" - } - } - }, - { - "name": "CPU_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.cpu" - } - } - }, - { - "name": "MEM_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.memory" - } - } - }, - { - "name": "ETCD_NAME", - "value": "fedlearner" - }, - { - "name": "ETCD_ADDR", - "value": "fedlearner-stack-etcd.default.svc.cluster.local:2379" - }, - { - "name": "ETCD_BASE_DIR", - "value": "fedlearner" - }, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "CODE_KEY", - "value": "${workflow.jobs.train-job.variables.code_key}" - }, - { - "name": "SAVE_CHECKPOINT_STEPS", - "value": "1000" - }, - { - "name": "DATA_SOURCE", - "value": "${workflow.jobs.data-join-job.name}" - }, - { - "name": "DATA_SOURCE", - "value": "${workflow.jobs.data-join-job.name}" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow", - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - }, - { - "containerPort": 50052, - "name": "tf-port" - } - ], - "command": [ - "/app/deploy/scripts/wait4pair_wrapper.sh" - ], - "args": [ - "/app/deploy/scripts/trainer/run_trainer_worker.sh" - ], - "resources": { - "limits": { - "cpu": "2000m", - "memory": "4Gi" - }, - "requests": { - "cpu": "1000m", - "memory": "2Gi" - } - } - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ] - } - }, - "pair": true, - "replicas": ${workflow.variables.num_partitions} - } - } - } -} - ''' - ) - ]) - - return workflow - - -import json - -if __name__ == '__main__': - print(json.dumps(MessageToDict( - make_workflow_template(), - preserving_proto_field_name=True, - including_default_value_fields=True))) diff --git a/web_console_v2/api/test/fedlearner_webconsole/workflow_template/test_template_right.py b/web_console_v2/api/test/fedlearner_webconsole/workflow_template/test_template_right.py deleted file mode 100644 index 052a91d8d..000000000 --- a/web_console_v2/api/test/fedlearner_webconsole/workflow_template/test_template_right.py +++ /dev/null @@ -1,1352 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -from google.protobuf.json_format import MessageToDict -from fedlearner_webconsole.proto.workflow_definition_pb2 import ( - WorkflowDefinition, JobDefinition, JobDependency -) -from fedlearner_webconsole.proto.common_pb2 import ( - Variable -) - - -def make_workflow_template(): - workflow = WorkflowDefinition( - group_alias='test_template', - is_left=False, - variables=[ - Variable( - name='image_version', - value='v1.5-rc3', - access_mode=Variable.PEER_READABLE), - Variable( - name='num_partitions', - value='4', - access_mode=Variable.PEER_WRITABLE), - ], - job_definitions=[ - JobDefinition( - name='raw-data-job', - job_type=JobDefinition.RAW_DATA, - is_federated=False, - variables=[ - Variable( - name='input_dir', - value='/app/deploy/integrated_test/tfrecord_raw_data', - access_mode=Variable.PRIVATE), - Variable( - name='file_wildcard', - value='*.rd', - access_mode=Variable.PRIVATE), - Variable( - name='batch_size', - value='1024', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='input_format', - value='TF_RECORD', - access_mode=Variable.PRIVATE), - Variable( - name='output_format', - value='TF_RECORD', - access_mode=Variable.PRIVATE), - Variable( - name='master_cpu', - value='2000m', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='master_mem', - value='3Gi', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='worker_cpu', - value='2000m', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='worker_mem', - value='4Gi', - access_mode=Variable.PEER_WRITABLE), - ], - yaml_template='''{ - "apiVersion": "fedlearner.k8s.io/v1alpha1", - "kind": "FLApp", - "metadata": { - "name": "${workflow.jobs.raw-data-job.name}", - "namespace": "${project.variables.namespace}" - }, - "spec": { - "role": "Follower", - "peerSpecs": { - "Leader": { - "peerURL": "", - "authority": "" - } - }, - "cleanPodPolicy": "All", - "flReplicaSpecs": { - "Master": { - "template": { - "spec": { - "containers": [ - { - "resources": { - "limits": { - "cpu": "${workflow.jobs.raw-data-job.variables.master_cpu}", - "memory": "${workflow.jobs.raw-data-job.variables.master_mem}" - }, - "requests": { - "cpu": "${workflow.jobs.raw-data-job.variables.master_cpu}", - "memory": "${workflow.jobs.raw-data-job.variables.master_mem}" - } - }, - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "command": [ - "/app/deploy/scripts/data_portal/run_data_portal_master.sh" - ], - "args": [], - "env": [ - ${system.basic_envs}, - { - "name": "ETCD_NAME", - "value": "fedlearner" - }, - { - "name": "ETCD_ADDR", - "value": "fedlearner-stack-etcd.default.svc.cluster.local:2379" - }, - { - "name": "ETCD_BASE_DIR", - "value": "fedlearner" - }, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - } - }, - { - "name": "POD_NAME", - "valueFrom": { - "fieldRef": { - "fieldPath": "metadata.name" - } - } - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.raw-data-job.name}" - }, - { - "name": "DATA_PORTAL_NAME", - "value": "${workflow.jobs.raw-data-job.name}" - }, - { - "name": "OUTPUT_PARTITION_NUM", - "value": "${workflow.variables.num_partitions}" - }, - { - "name": "INPUT_BASE_DIR", - "value": "${workflow.jobs.raw-data-job.variables.input_dir}" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/raw_data/${workflow.jobs.raw-data-job.name}" - }, - { - "name": "RAW_DATA_PUBLISH_DIR", - "value": "portal_publish_dir/${workflow.jobs.raw-data-job.name}" - }, - { - "name": "DATA_PORTAL_TYPE", - "value": "Streaming" - }, - { - "name": "FILE_WILDCARD", - "value": "${workflow.jobs.raw-data-job.variables.file_wildcard}" - } - ], - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow" - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ], - "restartPolicy": "Never" - } - }, - "pair": false, - "replicas": 1 - }, - "Worker": { - "replicas": 4, - "template": { - "spec": { - "containers": [ - { - "resources": { - "limits": { - "cpu": "${workflow.jobs.raw-data-job.variables.worker_cpu}", - "memory": "${workflow.jobs.raw-data-job.variables.worker_mem}" - }, - "requests": { - "cpu": "${workflow.jobs.raw-data-job.variables.worker_cpu}", - "memory": "${workflow.jobs.raw-data-job.variables.worker_mem}" - } - }, - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "command": [ - "/app/deploy/scripts/data_portal/run_data_portal_worker.sh" - ], - "args": [], - "env": [ - { - "name": "ETCD_NAME", - "value": "fedlearner" - }, - { - "name": "ETCD_ADDR", - "value": "fedlearner-stack-etcd.default.svc.cluster.local:2379" - }, - { - "name": "ETCD_BASE_DIR", - "value": "fedlearner" - }, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - } - }, - { - "name": "POD_NAME", - "valueFrom": { - "fieldRef": { - "fieldPath": "metadata.name" - } - } - }, - ${system.basic_envs}, - { - "name": "CPU_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.cpu" - } - } - }, - { - "name": "MEM_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.memory" - } - } - }, - { - "name": "CPU_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.cpu" - } - } - }, - { - "name": "MEM_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.memory" - } - } - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.raw-data-job.name}" - }, - { - "name": "BATCH_SIZE", - "value": "${workflow.jobs.raw-data-job.variables.batch_size}" - }, - { - "name": "INPUT_DATA_FORMAT", - "value": "${workflow.jobs.raw-data-job.variables.input_format}" - }, - { - "name": "COMPRESSED_TYPE", - "value": "" - }, - { - "name": "OUTPUT_DATA_FORMAT", - "value": "${workflow.jobs.raw-data-job.variables.output_format}" - } - ], - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow" - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ], - "restartPolicy": "Never" - } - }, - "pair": false - } - } - } -} - ''' - ), - JobDefinition( - name='data-join-job', - job_type=JobDefinition.DATA_JOIN, - is_federated=True, - variables=[ - Variable( - name='master_cpu', - value='2000m', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='master_mem', - value='2Gi', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='worker_cpu', - value='3000m', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='worker_mem', - value='4Gi', - access_mode=Variable.PEER_WRITABLE), - ], - dependencies=[ - JobDependency(source='raw-data-job') - ], - yaml_template=''' -{ - "apiVersion": "fedlearner.k8s.io/v1alpha1", - "kind": "FLApp", - "metadata": { - "name": "${workflow.jobs.data-join-job.name}", - "namespace": "${project.variables.namespace}" - }, - "spec": { - "role": "Follower", - "cleanPodPolicy": "All", - "peerSpecs": { - "Leader": { - "peerURL": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80", - "authority": "${project.participants[0].egress_domain}", - "extraHeaders": { - "x-host": "default.fedlearner.operator" - } - } - }, - "flReplicaSpecs": { - "Master": { - "template": { - "spec": { - "restartPolicy": "Never", - "containers": [ - { - "env": [ - ${system.basic_envs}, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - } - }, - { - "name": "POD_NAME", - "valueFrom": { - "fieldRef": { - "fieldPath": "metadata.name" - } - } - }, - { - "name": "ROLE", - "value": "follower" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.data-join-job.name}" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/data_source/${workflow.jobs.data-join-job.name}" - }, - { - "name": "CPU_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.cpu" - } - } - }, - { - "name": "MEM_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.memory" - } - } - }, - { - "name": "CPU_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.cpu" - } - } - }, - { - "name": "MEM_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.memory" - } - } - }, - { - "name": "ETCD_NAME", - "value": "fedlearner" - }, - { - "name": "ETCD_ADDR", - "value": "fedlearner-stack-etcd.default.svc.cluster.local:2379" - }, - { - "name": "ETCD_BASE_DIR", - "value": "fedlearner" - }, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "BATCH_MODE", - "value": "--batch_mode" - }, - { - "name": "PARTITION_NUM", - "value": "${workflow.variables.num_partitions}" - }, - { - "name": "START_TIME", - "value": "0" - }, - { - "name": "END_TIME", - "value": "999999999999" - }, - { - "name": "NEGATIVE_SAMPLING_RATE", - "value": "1.0" - }, - { - "name": "RAW_DATA_SUB_DIR", - "value": "portal_publish_dir/${workflow.jobs.raw-data-job.name}" - }, - { - "name": "DATA_SOURCE_NAME", - "value": "${workflow.jobs.data-join-job.name}" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow", - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "command": [ - "/app/deploy/scripts/wait4pair_wrapper.sh" - ], - "args": [ - "/app/deploy/scripts/data_join/run_data_join_master.sh" - ], - "resources": { - "limits": { - "cpu": "${workflow.jobs.data-join-job.variables.master_cpu}", - "memory": "${workflow.jobs.data-join-job.variables.master_mem}" - }, - "requests": { - "cpu": "${workflow.jobs.data-join-job.variables.master_cpu}", - "memory": "${workflow.jobs.data-join-job.variables.master_mem}" - } - } - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ] - } - }, - "pair": true, - "replicas": 1 - }, - "Worker": { - "template": { - "spec": { - "restartPolicy": "Never", - "containers": [ - { - "env": [ - ${system.basic_envs}, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - } - }, - { - "name": "POD_NAME", - "valueFrom": { - "fieldRef": { - "fieldPath": "metadata.name" - } - } - }, - { - "name": "ROLE", - "value": "follower" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.data-join-job.name}" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/data_source/${workflow.jobs.data-join-job.name}" - }, - { - "name": "CPU_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.cpu" - } - } - }, - { - "name": "MEM_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.memory" - } - } - }, - { - "name": "CPU_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.cpu" - } - } - }, - { - "name": "MEM_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.memory" - } - } - }, - { - "name": "ETCD_NAME", - "value": "fedlearner" - }, - { - "name": "ETCD_ADDR", - "value": "fedlearner-stack-etcd.default.svc.cluster.local:2379" - }, - { - "name": "ETCD_BASE_DIR", - "value": "fedlearner" - }, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "PARTITION_NUM", - "value": "${workflow.variables.num_partitions}" - }, - { - "name": "RAW_DATA_SUB_DIR", - "value": "portal_publish_dir/${workflow.jobs.raw-data-job.name}" - }, - { - "name": "DATA_BLOCK_DUMP_INTERVAL", - "value": "600" - }, - { - "name": "DATA_BLOCK_DUMP_THRESHOLD", - "value": "65536" - }, - { - "name": "EXAMPLE_ID_DUMP_INTERVAL", - "value": "600" - }, - { - "name": "EXAMPLE_ID_DUMP_THRESHOLD", - "value": "65536" - }, - { - "name": "EXAMPLE_ID_BATCH_SIZE", - "value": "4096" - }, - { - "name": "MAX_FLYING_EXAMPLE_ID", - "value": "307152" - }, - { - "name": "MIN_MATCHING_WINDOW", - "value": "2048" - }, - { - "name": "MAX_MATCHING_WINDOW", - "value": "8192" - }, - { - "name": "RAW_DATA_ITER", - "value": "${workflow.jobs.raw-data-job.variables.output_format}" - }, - { - "name": "RAW_DATA_SUB_DIR", - "value": "portal_publish_dir/${workflow.jobs.raw-data-job.name}" - }, - { - "name": "PARTITION_NUM", - "value": "${workflow.variables.num_partitions}" - }, - { - "name": "DATA_SOURCE_NAME", - "value": "${workflow.jobs.raw-data-job.name}" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow", - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "command": [ - "/app/deploy/scripts/wait4pair_wrapper.sh" - ], - "args": [ - "/app/deploy/scripts/data_join/run_data_join_worker.sh" - ], - "resources": { - "limits": { - "cpu": "${workflow.jobs.data-join-job.variables.worker_cpu}", - "memory": "${workflow.jobs.data-join-job.variables.worker_mem}" - }, - "requests": { - "cpu": "${workflow.jobs.data-join-job.variables.worker_cpu}", - "memory": "${workflow.jobs.data-join-job.variables.worker_mem}" - } - } - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ] - } - }, - "pair": true, - "replicas": ${workflow.variables.num_partitions} - } - } - } -} - ''' - ), - JobDefinition( - name='train-job', - job_type=JobDefinition.NN_MODEL_TRANINING, - is_federated=True, - variables=[ - Variable( - name='code_key', - value='/app/deploy/integrated_test/code_key/criteo-train-2.tar.gz', - access_mode=Variable.PRIVATE - ) - ], - dependencies=[ - JobDependency(source='data-join-job') - ], - yaml_template=''' - { - "apiVersion": "fedlearner.k8s.io/v1alpha1", - "kind": "FLApp", - "metadata": { - "name": "${workflow.jobs.train-job.name}", - "namespace": "${project.variables.namespace}" - }, - "spec": { - "role": "Leader", - "cleanPodPolicy": "All", - "peerSpecs": { - "Follower": { - "peerURL": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80", - "authority": "${project.participants[0].egress_domain}", - "extraHeaders": { - "x-host": "default.fedlearner.operator" - } - } - }, - "flReplicaSpecs": { - "Master": { - "template": { - "spec": { - "restartPolicy": "Never", - "containers": [ - { - "env": [ - ${system.basic_envs}, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - } - }, - { - "name": "POD_NAME", - "valueFrom": { - "fieldRef": { - "fieldPath": "metadata.name" - } - } - }, - { - "name": "ROLE", - "value": "leader" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.train-job.name}" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/job_output/${workflow.jobs.train-job.name}" - }, - { - "name": "CPU_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.cpu" - } - } - }, - { - "name": "MEM_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.memory" - } - } - }, - { - "name": "CPU_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.cpu" - } - } - }, - { - "name": "MEM_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.memory" - } - } - }, - { - "name": "ETCD_NAME", - "value": "fedlearner" - }, - { - "name": "ETCD_ADDR", - "value": "fedlearner-stack-etcd.default.svc.cluster.local:2379" - }, - { - "name": "ETCD_BASE_DIR", - "value": "fedlearner" - }, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "DATA_SOURCE", - "value": "${workflow.jobs.data-join-job.name}" - }, - { - "name": "TRAINING_NAME", - "value": "${workflow.jobs.train-job.name}" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow", - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "command": [ - "/app/deploy/scripts/trainer/run_trainer_master.sh" - ], - "args": [], - "resources": { - "limits": { - "cpu": "2000m", - "memory": "2Gi" - }, - "requests": { - "cpu": "2000m", - "memory": "2Gi" - } - } - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ] - } - }, - "replicas": 1, - "pair": false - }, - "PS": { - "template": { - "spec": { - "restartPolicy": "Never", - "containers": [ - { - "env": [ - ${system.basic_envs}, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - } - }, - { - "name": "POD_NAME", - "valueFrom": { - "fieldRef": { - "fieldPath": "metadata.name" - } - } - }, - { - "name": "ROLE", - "value": "leader" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.train-job.name}" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/job_output/${workflow.jobs.train-job.name}" - }, - { - "name": "CPU_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.cpu" - } - } - }, - { - "name": "MEM_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.memory" - } - } - }, - { - "name": "CPU_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.cpu" - } - } - }, - { - "name": "MEM_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.memory" - } - } - }, - { - "name": "ETCD_NAME", - "value": "fedlearner" - }, - { - "name": "ETCD_ADDR", - "value": "fedlearner-stack-etcd.default.svc.cluster.local:2379" - }, - { - "name": "ETCD_BASE_DIR", - "value": "fedlearner" - }, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "DATA_SOURCE", - "value": "${workflow.jobs.data-join-job.name}" - }, - { - "name": "DATA_SOURCE", - "value": "${workflow.jobs.data-join-job.name}" - }, - { - "name": "TRAINING_NAME", - "value": "${workflow.jobs.train-job.name}" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow", - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "command": [ - "/app/deploy/scripts/trainer/run_trainer_ps.sh" - ], - "args": [], - "resources": { - "limits": { - "cpu": "2000m", - "memory": "4Gi" - }, - "requests": { - "cpu": "2000m", - "memory": "4Gi" - } - } - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ] - } - }, - "pair": false, - "replicas": 1 - }, - "Worker": { - "template": { - "spec": { - "restartPolicy": "Never", - "containers": [ - { - "env": [ - ${system.basic_envs}, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - } - }, - { - "name": "POD_NAME", - "valueFrom": { - "fieldRef": { - "fieldPath": "metadata.name" - } - } - }, - { - "name": "ROLE", - "value": "leader" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.train-job.name}" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/job_output/${workflow.jobs.train-job.name}" - }, - { - "name": "CPU_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.cpu" - } - } - }, - { - "name": "MEM_REQUEST", - "valueFrom": { - "resourceFieldRef": { - "resource": "requests.memory" - } - } - }, - { - "name": "CPU_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.cpu" - } - } - }, - { - "name": "MEM_LIMIT", - "valueFrom": { - "resourceFieldRef": { - "resource": "limits.memory" - } - } - }, - { - "name": "ETCD_NAME", - "value": "fedlearner" - }, - { - "name": "ETCD_ADDR", - "value": "fedlearner-stack-etcd.default.svc.cluster.local:2379" - }, - { - "name": "ETCD_BASE_DIR", - "value": "fedlearner" - }, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "CODE_KEY", - "value": "${workflow.jobs.train-job.variables.code_key}" - }, - { - "name": "SAVE_CHECKPOINT_STEPS", - "value": "1000" - }, - { - "name": "DATA_SOURCE", - "value": "${workflow.jobs.data-join-job.name}" - }, - { - "name": "DATA_SOURCE", - "value": "${workflow.jobs.data-join-job.name}" - }, - { - "name": "TRAINING_NAME", - "value": "${workflow.jobs.train-job.name}" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow", - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - }, - { - "containerPort": 50052, - "name": "tf-port" - } - ], - "command": [ - "/app/deploy/scripts/wait4pair_wrapper.sh" - ], - "args": [ - "/app/deploy/scripts/trainer/run_trainer_worker.sh" - ], - "resources": { - "limits": { - "cpu": "2000m", - "memory": "4Gi" - }, - "requests": { - "cpu": "2000m", - "memory": "4Gi" - } - } - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ] - } - }, - "pair": true, - "replicas": ${workflow.variables.num_partitions} - } - } - } -} - ''' - ) - ]) - - return workflow - - -import json - -if __name__ == '__main__': - print(json.dumps(MessageToDict( - make_workflow_template(), - preserving_proto_field_name=True, - including_default_value_fields=True))) diff --git a/web_console_v2/api/testing/BUILD.bazel b/web_console_v2/api/testing/BUILD.bazel new file mode 100644 index 000000000..718542fae --- /dev/null +++ b/web_console_v2/api/testing/BUILD.bazel @@ -0,0 +1,70 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "helpers_lib", + srcs = ["helpers.py"], + imports = [".."], + visibility = ["//visibility:public"], +) + +py_library( + name = "fake_lib", + testonly = True, + srcs = [ + "dataset.py", + "fake_file_manager.py", + "fake_model_job_config.py", + "fake_remote_serving.py", + "fake_time_patcher.py", + ], + imports = [".."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/composer:composer_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:composer_service_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset/scheduler:base_executor_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset/scheduler:consts_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "//web_console_v2/api/fedlearner_webconsole/serving:remote_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_time_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:utils_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@com_google_protobuf//:protobuf_python", + "@common_freezegun//:pkg", + ], +) + +py_library( + name = "common_lib", + testonly = True, + srcs = ["common.py"], + imports = [".."], + deps = [ + ":no_web_server_test_case_lib", + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:app_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:initial_db_lib", + "//web_console_v2/api/fedlearner_webconsole/auth:services_lib", + "//web_console_v2/api/fedlearner_webconsole/composer:composer_lib", + "//web_console_v2/api/fedlearner_webconsole/iam:client_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/scheduler:scheduler_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_base64_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@common_flask_testing//:pkg", + ], +) + +py_library( + name = "no_web_server_test_case_lib", + testonly = True, + srcs = ["no_web_server_test_case.py"], + imports = [".."], + deps = [ + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "@common_sqlalchemy//:pkg", + ], +) diff --git a/web_console_v2/api/testing/__init__.py b/web_console_v2/api/testing/__init__.py index 3e28547fe..c13b80f8f 100644 --- a/web_console_v2/api/testing/__init__.py +++ b/web_console_v2/api/testing/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/web_console_v2/api/testing/common.py b/web_console_v2/api/testing/common.py index fff83b5a0..d011abd7a 100644 --- a/web_console_v2/api/testing/common.py +++ b/web_console_v2/api/testing/common.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,40 +13,33 @@ # limitations under the License. # coding: utf-8 -import contextlib import json import logging import unittest import secrets from http import HTTPStatus import multiprocessing as mp +from typing import Dict, List, Union +from unittest.mock import patch -from flask import Flask from flask_testing import TestCase -from fedlearner_webconsole.composer.composer import Composer, ComposerConfig -from fedlearner_webconsole.db import db_handler as db, get_database_uri + +from envs import Envs +from fedlearner_webconsole.auth.services import UserService +from fedlearner_webconsole.composer.composer import composer +from fedlearner_webconsole.db import db from fedlearner_webconsole.app import create_app +from fedlearner_webconsole.iam.client import create_iams_for_user from fedlearner_webconsole.initial_db import initial_db +from fedlearner_webconsole.participant.models import Participant from fedlearner_webconsole.scheduler.scheduler import scheduler -# NOTE: the following models imported is intended to be analyzed by SQLAlchemy -from fedlearner_webconsole.auth.models import Role, User, State -from fedlearner_webconsole.composer.models import SchedulerItem, SchedulerRunner, OptimisticLock -from fedlearner_webconsole.utils.base64 import base64encode - +from fedlearner_webconsole.utils.pp_base64 import base64encode +from testing.no_web_server_test_case import NoWebServerTestCase -def create_all_tables(database_uri: str = None): - if database_uri: - db.rebind(database_uri) - # If there's a db file due to some reason, remove it first. - if db.metadata.tables.values(): - db.drop_all() - db.create_all() +class BaseTestCase(NoWebServerTestCase, TestCase): - -class BaseTestCase(TestCase): - class Config(object): - SQLALCHEMY_DATABASE_URI = get_database_uri() + class Config(NoWebServerTestCase.Config): SQLALCHEMY_TRACK_MODIFICATIONS = False JWT_SECRET_KEY = secrets.token_urlsafe(64) PROPAGATE_EXCEPTIONS = True @@ -54,32 +47,35 @@ class Config(object): TESTING = True ENV = 'development' GRPC_LISTEN_PORT = 1990 - START_COMPOSER = False + START_K8S_WATCHER = False def create_app(self): - create_all_tables(self.__class__.Config.SQLALCHEMY_DATABASE_URI) - initial_db() app = create_app(self.__class__.Config) return app def setUp(self): super().setUp() + initial_db() self.signin_helper() + with db.session_scope() as session: + users = UserService(session).get_all_users() + for user in users: + create_iams_for_user(user) def tearDown(self): self.signout_helper() scheduler.stop() - db.drop_all() + composer.stop() super().tearDown() - def get_response_data(self, response): + def get_response_data(self, response) -> dict: return json.loads(response.data).get('data') def signin_as_admin(self): self.signout_helper() - self.signin_helper(username='admin', password='fl@123.') + self.signin_helper(username='admin', password='fl@12345.') - def signin_helper(self, username='ada', password='fl@123.'): + def signin_helper(self, username='ada', password='fl@12345.'): resp = self.client.post('/api/v2/auth/signin', data=json.dumps({ 'username': username, @@ -105,7 +101,7 @@ def _get_headers(self, use_auth=True): def get_helper(self, url, use_auth=True): return self.client.get(url, headers=self._get_headers(use_auth)) - def post_helper(self, url, data, use_auth=True): + def post_helper(self, url, data=None, use_auth=True): return self.client.post(url, data=json.dumps(data), content_type='application/json', @@ -126,44 +122,43 @@ def patch_helper(self, url, data, use_auth=True): def delete_helper(self, url, use_auth=True): return self.client.delete(url, headers=self._get_headers(use_auth)) + def assertResponseDataEqual(self, response, expected_data: Union[Dict, List], ignore_fields=None): + """Asserts if the data in response equals to expected_data. + + It's actually a comparison between two dicts, if ignore_fields is + specified then we ignore those fields in response.""" + actual_data = self.get_response_data(response) + assert type(actual_data) is type(expected_data), 'different type for responce data and expceted data!' + self.assertPartiallyEqual(actual_data, expected_data, ignore_fields) + def setup_project(self, role, peer_port): if role == 'leader': peer_role = 'follower' else: peer_role = 'leader' - + patch.object(Envs, 'DEBUG', True).start() + patch.object(Envs, 'GRPC_SERVER_URL', f'127.0.0.1:{peer_port}').start() name = 'test-project' - config = { - 'participants': [{ - 'name': f'party_{peer_role}', - 'url': f'127.0.0.1:{peer_port}', - 'domain_name': f'fl-{peer_role}.com' - }], - 'variables': [{ - 'name': 'EGRESS_URL', - 'value': f'127.0.0.1:{peer_port}' - }] - } - create_response = self.post_helper('/api/v2/projects', - data={ - 'name': name, - 'config': config, - }) - self.assertEqual(create_response.status_code, HTTPStatus.OK) + with db.session_scope() as session: + participant = Participant(name=f'party_{peer_role}', + host='127.0.0.1', + port=peer_port, + domain_name=f'fl-{peer_role}.com') + session.add(participant) + session.commit() + + create_response = self.post_helper('/api/v2/projects', data={ + 'name': name, + 'participant_ids': [1], + }) + self.assertEqual(create_response.status_code, HTTPStatus.CREATED) return json.loads(create_response.data).get('data') - @contextlib.contextmanager - def composer_scope(self, config: ComposerConfig): - with self.app.app_context(): - composer = Composer(config=config) - composer.run(db.engine) - yield composer - composer.stop() - class TestAppProcess(mp.get_context('spawn').Process): + def __init__(self, test_class, method, config=None, result_queue=None): - super(TestAppProcess, self).__init__() + super().__init__() self._test_class = test_class self._method = method self._app_config = config @@ -177,10 +172,7 @@ def run(self): for h in logging.getLogger().handlers[:]: logging.getLogger().removeHandler(h) h.close() - logging.basicConfig( - level=logging.DEBUG, - format= - 'SPAWN:%(filename)s %(lineno)s %(levelname)s - %(message)s') + logging.basicConfig(level=logging.DEBUG, format='SPAWN:%(filename)s %(lineno)s %(levelname)s - %(message)s') if self._app_config: self._test_class.Config = self._app_config test = self._test_class(self._method) @@ -194,6 +186,7 @@ def new_tear_down(*args, **kwargs): for other_q in self.other_process_queues: other_q.put(None) # check if the test success, than wait others to finish + # pylint: disable=protected-access if not test._outcome.errors: # wait for others for i in range(len(self.other_process_queues)): @@ -207,45 +200,30 @@ def new_tear_down(*args, **kwargs): result = suite.run(result) if result.errors: for method, err in result.errors: - print( - '======================================================================' - ) - - print('ERROR:', method) - print( - '----------------------------------------------------------------------' - ) - print(err) - print( - '----------------------------------------------------------------------' - ) + logging.error('======================================================================') + + logging.error(f'TestAppProcess ERROR: {method}') + logging.error('----------------------------------------------------------------------') + logging.error(err) + logging.error('----------------------------------------------------------------------') if result.failures: for method, fail in result.failures: - print( - '======================================================================' - ) - print('FAIL:', method) - print( - '----------------------------------------------------------------------' - ) - print(fail) - print( - '----------------------------------------------------------------------' - ) + logging.error('======================================================================') + logging.error(f'TestAppProcess FAIL: {method}') + logging.error('----------------------------------------------------------------------') + logging.error(fail) + logging.error('----------------------------------------------------------------------') assert result.wasSuccessful() self._result_queue.put(True) - except Exception as err: - logging.error('expected happened %s', err) + except Exception: + logging.exception('exception happened') self._result_queue.put(False) raise def multi_process_test(test_list): result_queue = mp.get_context('spawn').Queue() - proc_list = [ - TestAppProcess(t['class'], t['method'], t['config'], result_queue) - for t in test_list - ] + proc_list = [TestAppProcess(t['class'], t['method'], t['config'], result_queue) for t in test_list] for p in proc_list: for other_p in proc_list: @@ -265,16 +243,3 @@ def multi_process_test(test_list): p.join() if p.exitcode != 0: raise Exception(f'Subprocess failed: number {i}') - - -class NoWebServerTestCase(unittest.TestCase): - class Config(object): - SQLALCHEMY_DATABASE_URI = get_database_uri() - - def setUp(self) -> None: - super().setUp() - create_all_tables(self.__class__.Config.SQLALCHEMY_DATABASE_URI) - - def tearDown(self) -> None: - db.drop_all() - return super().tearDown() \ No newline at end of file diff --git a/web_console_v2/api/testing/composer/BUILD.bazel b/web_console_v2/api/testing/composer/BUILD.bazel new file mode 100644 index 000000000..3a629825e --- /dev/null +++ b/web_console_v2/api/testing/composer/BUILD.bazel @@ -0,0 +1,15 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "composer", + testonly = True, + srcs = ["common.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/composer:common_lib", + "//web_console_v2/api/fedlearner_webconsole/utils:pp_time_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) diff --git a/web_console_v2/api/testing/composer/common.py b/web_console_v2/api/testing/composer/common.py new file mode 100644 index 000000000..334b82526 --- /dev/null +++ b/web_console_v2/api/testing/composer/common.py @@ -0,0 +1,40 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from typing import Tuple + +from fedlearner_webconsole.composer.context import RunnerContext +from fedlearner_webconsole.composer.interface import IRunnerV2 +from fedlearner_webconsole.composer.models import RunnerStatus +from fedlearner_webconsole.proto.composer_pb2 import RunnerOutput +from fedlearner_webconsole.utils import pp_time + + +class TestRunner(IRunnerV2): + + def __init__(self, with_exception=False): + self._with_exception = with_exception + self.context = None + + def run(self, context: RunnerContext) -> Tuple[RunnerStatus, RunnerOutput]: + logging.info(f'[test_runnner] context: {context}') + self.context = context + pp_time.sleep(2) + if self._with_exception: + raise RuntimeError('fake error') + if context.index == 3: + return RunnerStatus.FAILED, RunnerOutput(error_message='index is 3') + return RunnerStatus.DONE, RunnerOutput() diff --git a/web_console_v2/api/testing/dataset.py b/web_console_v2/api/testing/dataset.py new file mode 100644 index 000000000..24a0148e7 --- /dev/null +++ b/web_console_v2/api/testing/dataset.py @@ -0,0 +1,98 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from datetime import datetime +from typing import List, Optional +from google.protobuf.struct_pb2 import Value + +from fedlearner_webconsole.dataset.job_configer.base_configer import BaseConfiger +from fedlearner_webconsole.dataset.scheduler.base_executor import BaseExecutor +from fedlearner_webconsole.dataset.scheduler.consts import ExecutorResult +from fedlearner_webconsole.proto.dataset_pb2 import DatasetJobGlobalConfigs +from fedlearner_webconsole.proto import common_pb2 +from fedlearner_webconsole.proto.workflow_definition_pb2 import JobDefinition, WorkflowDefinition +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.utils.workflow import zip_workflow_variables + + +class FakeDatasetJobConfiger(BaseConfiger): + + def get_config(self) -> WorkflowDefinition: + return WorkflowDefinition( + variables=[Variable(name='hello', value_type=Variable.ValueType.NUMBER, typed_value=Value(number_value=1))], + job_definitions=[ + JobDefinition(variables=[ + Variable( + name='hello_from_job', value_type=Variable.ValueType.NUMBER, typed_value=Value(number_value=3)) + ]) + ]) + + @property + def user_variables(self) -> List[common_pb2.Variable]: + return list(zip_workflow_variables(self.get_config())) + + def auto_config_variables(self, global_configs: DatasetJobGlobalConfigs) -> DatasetJobGlobalConfigs: + return global_configs + + def config_local_variables(self, + global_configs: DatasetJobGlobalConfigs, + result_dataset_uuid: str, + event_time: Optional[datetime] = None) -> DatasetJobGlobalConfigs: + del result_dataset_uuid + return global_configs + + +class FakeFederatedDatasetJobConfiger(BaseConfiger): + + def get_config(self) -> WorkflowDefinition: + return WorkflowDefinition( + variables=[Variable(name='hello', value_type=Variable.ValueType.NUMBER, typed_value=Value(number_value=1))], + job_definitions=[ + JobDefinition(variables=[ + Variable(name='hello_from_job', + value_type=Variable.ValueType.NUMBER, + typed_value=Value(number_value=3)) + ], + is_federated=True) + ]) + + @property + def user_variables(self) -> List[common_pb2.Variable]: + return list(zip_workflow_variables(self.get_config())) + + def auto_config_variables(self, global_configs: DatasetJobGlobalConfigs) -> DatasetJobGlobalConfigs: + return global_configs + + def config_local_variables(self, + global_configs: DatasetJobGlobalConfigs, + result_dataset_uuid: str, + event_time: Optional[datetime] = None) -> DatasetJobGlobalConfigs: + del result_dataset_uuid + return global_configs + + +class FakeExecutor(BaseExecutor): + + def get_item_ids(self) -> List[int]: + return [1, 2, 3, 4] + + def run_item(self, item_id: int) -> ExecutorResult: + if item_id == 1: + return ExecutorResult.SUCCEEDED + if item_id == 2: + return ExecutorResult.FAILED + if item_id == 3: + return ExecutorResult.SKIP + raise RuntimeError('run fake executor failed') diff --git a/web_console_v2/api/testing/fake_file_manager.py b/web_console_v2/api/testing/fake_file_manager.py index 4a672d327..98af7e4d8 100644 --- a/web_console_v2/api/testing/fake_file_manager.py +++ b/web_console_v2/api/testing/fake_file_manager.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -19,12 +19,12 @@ class FakeFileManager(FileManagerBase): + def can_handle(self, path: str) -> bool: return path.startswith('fake://') - def ls(self, path: str, recursive=False) -> List[Dict]: - return [{'path': 'fake://data/f1.txt', - 'size': 0}] + def ls(self, path: str, recursive=False, include_directory=False) -> List[Dict]: + return [{'path': 'fake://data/f1.txt', 'size': 0}] def move(self, source: str, destination: str) -> bool: return source.startswith('fake://move') diff --git a/web_console_v2/api/testing/fake_model_job_config.py b/web_console_v2/api/testing/fake_model_job_config.py new file mode 100644 index 000000000..0dd95b0af --- /dev/null +++ b/web_console_v2/api/testing/fake_model_job_config.py @@ -0,0 +1,57 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.protobuf.struct_pb2 import Value + +from fedlearner_webconsole.mmgr.models import ModelJobType +from fedlearner_webconsole.workflow_template.utils import make_variable +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition, JobDefinition +from fedlearner_webconsole.proto.common_pb2 import Variable +from fedlearner_webconsole.proto.mmgr_pb2 import ModelJobGlobalConfig, ModelJobConfig + + +def get_workflow_config(model_job_type: ModelJobType): + if model_job_type == ModelJobType.TRAINING: + job_type = JobDefinition.JobType.TREE_MODEL_TRAINING + mode = 'train' + else: + job_type = JobDefinition.JobType.TREE_MODEL_EVALUATION + mode = 'eval' + return WorkflowDefinition(job_definitions=[ + JobDefinition(name='train-job', + job_type=job_type, + variables=[ + make_variable(name='mode', typed_value=mode), + Variable(name='data_source'), + Variable(name='data_path'), + Variable(name='file_wildcard'), + ], + yaml_template='{}') + ]) + + +def get_global_config() -> ModelJobGlobalConfig: + global_config = ModelJobGlobalConfig(dataset_uuid='uuid', model_uuid='model-uuid') + config = ModelJobConfig() + config.variables.extend( + [Variable(name='max_iters', typed_value=Value(number_value=4), value_type=Variable.ValueType.NUMBER)]) + global_config.global_config['test'].MergeFrom(config) + config = ModelJobConfig() + config.variables.extend([ + Variable(name='max_iters', typed_value=Value(number_value=4), value_type=Variable.ValueType.NUMBER), + Variable(name='worker_cpu', typed_value=Value(string_value='2000m'), value_type=Variable.ValueType.STRING) + ]) + global_config.global_config['peer'].MergeFrom(config) + return global_config diff --git a/web_console_v2/api/testing/fake_remote_serving.py b/web_console_v2/api/testing/fake_remote_serving.py new file mode 100644 index 000000000..d68934d7b --- /dev/null +++ b/web_console_v2/api/testing/fake_remote_serving.py @@ -0,0 +1,40 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Optional + +from fedlearner_webconsole.proto.serving_pb2 import RemoteDeployConfig, RemoteDeployState +from fedlearner_webconsole.serving.remote import IRemoteServing + + +class FakeRemoteServing(IRemoteServing): + + SERVING_PLATFORM = 'unittest_mock' + DEPLOY_URL = 'test_deploy_url' + + def deploy_model(self, creator: str, config: RemoteDeployConfig) -> Optional[int]: + return 1 + + def get_deploy_url(self, config: RemoteDeployConfig) -> str: + return self.DEPLOY_URL + + def validate_config(self, config: RemoteDeployConfig) -> bool: + pass + + def get_deploy_status(self, config: RemoteDeployConfig) -> RemoteDeployState: + return RemoteDeployState.REMOTE_DEPLOY_READY + + def undeploy_model(self, config: RemoteDeployConfig): + pass diff --git a/web_console_v2/api/testing/fake_time_patcher.py b/web_console_v2/api/testing/fake_time_patcher.py new file mode 100644 index 000000000..3485975db --- /dev/null +++ b/web_console_v2/api/testing/fake_time_patcher.py @@ -0,0 +1,53 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import time +from datetime import datetime, timezone +from unittest.mock import patch +import freezegun + + +class FakeTimePatcher: + + def start(self, initial_time: datetime = datetime.now(timezone.utc)): + + def _prod_fake_sleep(seconds: int): + time.sleep(seconds / 1000) + + self.sleep_path = patch('fedlearner_webconsole.utils.pp_time.sleep', _prod_fake_sleep) + self.sleep_path.start() + # Freezegun will freeze all datetime used, + # sometimes it's desired to ignore FreezeGun behaviour for particular packages. + # (It will ignore some builtin packages such as Thread by default.) + freezegun.configure(extend_ignore_list=['tensorflow']) + self.freezer = freezegun.freeze_time(initial_time) + self.fake_clock = self.freezer.start() + # Freezegun can not patch func.now automatically, + # so you should add patcher like follower to patch func.now. + # TODO(xiangyuxuan.prs): remove func.now in composer model. + self.patch_func_now = patch('fedlearner_webconsole.composer.composer.func.now', datetime.now) + self.patch_func_now.start() + self.patch_model_func_now = patch('fedlearner_webconsole.composer.composer_service.func.now', datetime.now) + self.patch_model_func_now.start() + + def stop(self): + self.patch_func_now.stop() + self.patch_model_func_now.stop() + self.freezer.stop() + self.sleep_path.stop() + + def interrupt(self, seconds: int): + self.fake_clock.tick(seconds) + time.sleep(2) diff --git a/web_console_v2/api/testing/helpers.py b/web_console_v2/api/testing/helpers.py new file mode 100644 index 000000000..d5cbe6b79 --- /dev/null +++ b/web_console_v2/api/testing/helpers.py @@ -0,0 +1,43 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import json +from types import SimpleNamespace +from typing import Dict + + +def to_simple_namespace(x: Dict) -> SimpleNamespace: + """A helper function to convert a dict to a SimpleNamespace. + + Sample usage: + ``` + d = {'a': {'b': 123}} + sn = to_simple_namespace(d) + print(sn.a.b) + ``` + """ + return json.loads(json.dumps(x), object_hook=lambda o: SimpleNamespace(**o)) + + +class FakeResponse: + + def __init__(self, json_data, status_code, content=None, headers=None): + self.json_data = json_data + self.status_code = status_code + self.content = content + self.headers = headers or {} + + def json(self): + return self.json_data diff --git a/web_console_v2/api/testing/no_web_server_test_case.py b/web_console_v2/api/testing/no_web_server_test_case.py new file mode 100644 index 000000000..1164ecd50 --- /dev/null +++ b/web_console_v2/api/testing/no_web_server_test_case.py @@ -0,0 +1,88 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import unittest +from typing import Dict, Any, List, Optional, Union + +from itertools import zip_longest +from sqlalchemy.dialects import mysql +from sqlalchemy.orm import Query + +from envs import Envs +from fedlearner_webconsole.db import db, turn_db_timezone_to_utc +from uuid import uuid4 + + +def create_all_tables(database_uri: str = None): + if database_uri: + db.rebind(database_uri) + + # If there's a db file due to some reason, remove it first. + if db.metadata.tables.values(): + db.drop_all() + db.create_all() + + +class NoWebServerTestCase(unittest.TestCase): + """A base test case class which does not depend on API server. + """ + + class Config(object): + SQLALCHEMY_DATABASE_URI = \ + f'sqlite:///{os.path.join(Envs.BASE_DIR, f"{uuid4()}-app.db?check_same_thread=False")}' + + def setUp(self) -> None: + super().setUp() + create_all_tables(turn_db_timezone_to_utc(self.__class__.Config.SQLALCHEMY_DATABASE_URI)) + + def tearDown(self) -> None: + db.drop_all() + return super().tearDown() + + @classmethod + def generate_mysql_statement(cls, query: Query) -> str: + # Uses mysql dialect for testing, and `literal_binds` to inline the parameters. + return str(query.statement.compile(dialect=mysql.dialect(), compile_kwargs={'literal_binds': True})) + + def assertDictPartiallyEqual(self, + actual_dict: Dict[Any, Any], + expected_dict: Dict[Any, Any], + ignore_fields: Optional[List[Any]] = None): + """Asserts if the data in dict equals to expected_data. + we ignore ignore_fields in dict. + """ + if ignore_fields is None: + ignore_fields = [] + # Shallow copy + actual_dict = actual_dict.copy() + expected_dict = expected_dict.copy() + for field in ignore_fields: + if field in actual_dict.keys(): + del actual_dict[field] + if field in expected_dict.keys(): + del expected_dict[field] + + self.assertDictEqual(actual_dict, expected_dict) + + def assertPartiallyEqual(self, + actual_data: Union[Dict, List], + expected_data: Union[Dict, List], + ignore_fields: Optional[List] = None): + if isinstance(actual_data, list): + for a, e in zip_longest(actual_data, expected_data, fillvalue={}): + self.assertDictPartiallyEqual(a, e, ignore_fields) + return + self.assertDictPartiallyEqual(actual_data, expected_data, ignore_fields) diff --git a/web_console_v2/api/testing/rpc/BUILD.bazel b/web_console_v2/api/testing/rpc/BUILD.bazel new file mode 100644 index 000000000..479f7655f --- /dev/null +++ b/web_console_v2/api/testing/rpc/BUILD.bazel @@ -0,0 +1,27 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "client_lib", + testonly = True, + srcs = ["client.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/testing:no_web_server_test_case_lib", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + "@common_grpcio_testing//:pkg", + ], +) + +py_library( + name = "service_lib", + testonly = True, + srcs = ["service.py"], + imports = ["../.."], + deps = [ + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto/testing:py_proto", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + ], +) diff --git a/web_console_v2/api/testing/rpc/client.py b/web_console_v2/api/testing/rpc/client.py new file mode 100644 index 000000000..b15e05e83 --- /dev/null +++ b/web_console_v2/api/testing/rpc/client.py @@ -0,0 +1,96 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import socket +from concurrent import futures +from contextlib import contextmanager +from typing import Optional, List, Callable +from unittest.mock import patch + +import grpc +from grpc import ServerInterceptor +from grpc.framework.foundation import logging_pool + +from testing.no_web_server_test_case import NoWebServerTestCase +from grpc_testing._channel._multi_callable import UnaryUnary + + +def _get_free_port() -> int: + with socket.socket() as sock: + sock.bind(('', 0)) + return sock.getsockname()[1] + + +@contextmanager +def testing_channel(register_service: Callable[[grpc.Server], None], + server_interceptors: Optional[List[ServerInterceptor]] = None): + server = grpc.server(futures.ThreadPoolExecutor(max_workers=1), interceptors=server_interceptors or []) + register_service(server) + port = _get_free_port() + server.add_insecure_port(f'[::]:{port}') + server.start() + + channel = grpc.insecure_channel(target=f'localhost:{port}') + try: + yield channel + finally: + server.stop(None) + + +class RpcClientTestCase(NoWebServerTestCase): + + @classmethod + def setUpClass(cls): + super().setUpClass() + # This is a bug of grpcio-testing, its interface is different with grpcio + # TODO(linfan.fine): contribute this to grpcio-testing repo + origin_with_call = UnaryUnary.with_call + + def new_with_call(self, + request, + timeout=None, + metadata=None, + credentials=None, + wait_for_ready=None, + compression=None): + return origin_with_call(self=self, + request=request, + timeout=timeout, + metadata=metadata, + credentials=credentials) + + cls._with_call_patcher = patch('grpc_testing._channel._multi_callable.UnaryUnary.with_call', new=new_with_call) + cls._with_call_patcher.start() + + @classmethod + def tearDownClass(cls): + cls._with_call_patcher.stop() + super().tearDownClass() + + def setUp(self): + super().setUp() + self.client_execution_pool = logging_pool.pool(1) + + def tearDown(self): + self.client_execution_pool.shutdown(wait=False) + super().tearDown() + + +class FakeRpcError(grpc.RpcError): + + def __init__(self, code, details): + super().__init__() + self.code = lambda: code + self.details = lambda: details diff --git a/web_console_v2/api/testing/rpc/service.py b/web_console_v2/api/testing/rpc/service.py new file mode 100644 index 000000000..97ec7736d --- /dev/null +++ b/web_console_v2/api/testing/rpc/service.py @@ -0,0 +1,30 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import grpc + +from fedlearner_webconsole.proto.testing import service_pb2_grpc +from fedlearner_webconsole.proto.testing.service_pb2 import FakeUnaryUnaryResponse, FakeStreamStreamResponse, \ + FakeUnaryUnaryRequest + + +class TestService(service_pb2_grpc.TestServiceServicer): + + def FakeUnaryUnary(self, request: FakeUnaryUnaryRequest, context: grpc.ServicerContext): + return FakeUnaryUnaryResponse() + + def FakeStreamStream(self, request_iterator, context): + for _ in request_iterator: + yield FakeStreamStreamResponse() diff --git a/web_console_v2/api/testing/test_data/BUILD.bazel b/web_console_v2/api/testing/test_data/BUILD.bazel new file mode 100644 index 000000000..d6a888578 --- /dev/null +++ b/web_console_v2/api/testing/test_data/BUILD.bazel @@ -0,0 +1,19 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +filegroup( + name = "test_data", + testonly = True, + srcs = glob(["**/*"]), + visibility = ["//visibility:public"], +) + +py_library( + name = "test_data_lib", + testonly = True, + srcs = [ + "es_query_result.py", + ], + imports = ["../.."], +) diff --git a/web_console_v2/api/testing/test_data/algorithm/BUILD.bazel b/web_console_v2/api/testing/test_data/algorithm/BUILD.bazel new file mode 100644 index 000000000..582f1cffd --- /dev/null +++ b/web_console_v2/api/testing/test_data/algorithm/BUILD.bazel @@ -0,0 +1,6 @@ +filegroup( + name = "algorithm", + testonly = True, + srcs = glob(["**/*"]), + visibility = ["//visibility:public"], +) diff --git a/web_console_v2/api/testing/test_data/algorithm/e2e_test/follower/main.py b/web_console_v2/api/testing/test_data/algorithm/e2e_test/follower/main.py new file mode 100644 index 000000000..e69de29bb diff --git a/web_console_v2/api/testing/test_data/algorithm/e2e_test/leader/main.py b/web_console_v2/api/testing/test_data/algorithm/e2e_test/leader/main.py new file mode 100644 index 000000000..ca07d4cbb --- /dev/null +++ b/web_console_v2/api/testing/test_data/algorithm/e2e_test/leader/main.py @@ -0,0 +1 @@ +import tensorflow diff --git a/web_console_v2/api/testing/test_data/es_query_result.py b/web_console_v2/api/testing/test_data/es_query_result.py new file mode 100644 index 000000000..9725700a1 --- /dev/null +++ b/web_console_v2/api/testing/test_data/es_query_result.py @@ -0,0 +1,4995 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +fake_es_query_nn_metrics_result = { + 'took': 6, + 'timed_out': False, + '_shards': { + 'total': 4, + 'successful': 4, + 'skipped': 0, + 'failed': 0 + }, + 'hits': { + 'total': { + 'value': 4000, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + }, + 'aggregations': { + 'acc': { + 'doc_count': 2000, + 'PROCESS_TIME': { + 'buckets': [{ + 'key_as_string': '2022-02-17T10:27:30.000Z', + 'key': 1645093650000, + 'doc_count': 169, + 'VALUE': { + 'value': 0.37631335140332667 + } + }, { + 'key_as_string': '2022-02-17T10:27:35.000Z', + 'key': 1645093655000, + 'doc_count': 270, + 'VALUE': { + 'value': 0.6482393520849722 + } + }, { + 'key_as_string': '2022-02-17T10:27:40.000Z', + 'key': 1645093660000, + 'doc_count': 290, + 'VALUE': { + 'value': 0.749889914331765 + } + }, { + 'key_as_string': '2022-02-17T10:27:45.000Z', + 'key': 1645093665000, + 'doc_count': 260, + 'VALUE': { + 'value': 0.7920331122783514 + } + }, { + 'key_as_string': '2022-02-17T10:27:50.000Z', + 'key': 1645093670000, + 'doc_count': 119, + 'VALUE': { + 'value': 0.8848890877571427 + } + }, { + 'key_as_string': '2022-02-17T10:27:55.000Z', + 'key': 1645093675000, + 'doc_count': 240, + 'VALUE': { + 'value': 0.8932028951744239 + } + }, { + 'key_as_string': '2022-02-17T10:28:00.000Z', + 'key': 1645093680000, + 'doc_count': 160, + 'VALUE': { + 'value': 0.8983024559915066 + } + }, { + 'key_as_string': '2022-02-17T10:28:05.000Z', + 'key': 1645093685000, + 'doc_count': 160, + 'VALUE': { + 'value': 0.9003030106425285 + } + }, { + 'key_as_string': '2022-02-17T10:28:10.000Z', + 'key': 1645093690000, + 'doc_count': 150, + 'VALUE': { + 'value': 0.9026716228326161 + } + }, { + 'key_as_string': '2022-02-17T10:28:15.000Z', + 'key': 1645093695000, + 'doc_count': 182, + 'VALUE': { + 'value': 0.9047519653053074 + } + }], + 'interval': '1s' + } + }, + 'loss': { + 'doc_count': 2000, + 'PROCESS_TIME': { + 'buckets': [{ + 'key_as_string': '2022-02-17T10:27:30.000Z', + 'key': 1645093650000, + 'doc_count': 169, + 'VALUE': { + 'value': 1.8112774487783219 + } + }, { + 'key_as_string': '2022-02-17T10:27:35.000Z', + 'key': 1645093655000, + 'doc_count': 270, + 'VALUE': { + 'value': 0.8499700573859391 + } + }, { + 'key_as_string': '2022-02-17T10:27:40.000Z', + 'key': 1645093660000, + 'doc_count': 290, + 'VALUE': { + 'value': 0.5077963560819626 + } + }, { + 'key_as_string': '2022-02-17T10:27:45.000Z', + 'key': 1645093665000, + 'doc_count': 260, + 'VALUE': { + 'value': 0.4255857397157412 + } + }, { + 'key_as_string': '2022-02-17T10:27:50.000Z', + 'key': 1645093670000, + 'doc_count': 119, + 'VALUE': { + 'value': 0.3902850116000456 + } + }, { + 'key_as_string': '2022-02-17T10:27:55.000Z', + 'key': 1645093675000, + 'doc_count': 240, + 'VALUE': { + 'value': 0.3689204063266516 + } + }, { + 'key_as_string': '2022-02-17T10:28:00.000Z', + 'key': 1645093680000, + 'doc_count': 160, + 'VALUE': { + 'value': 0.34096595416776837 + } + }, { + 'key_as_string': '2022-02-17T10:28:05.000Z', + 'key': 1645093685000, + 'doc_count': 160, + 'VALUE': { + 'value': 0.3247630867641419 + } + }, { + 'key_as_string': '2022-02-17T10:28:10.000Z', + 'key': 1645093690000, + 'doc_count': 150, + 'VALUE': { + 'value': 0.3146447554727395 + } + }, { + 'key_as_string': '2022-02-17T10:28:15.000Z', + 'key': 1645093695000, + 'doc_count': 182, + 'VALUE': { + 'value': 0.3103061146461047 + } + }], + 'interval': '1s' + } + }, + 'abs': { + 'doc_count': 0, + 'PROCESS_TIME': { + 'buckets': [], + 'interval': '1s' + } + }, + 'mse': { + 'doc_count': 0, + 'PROCESS_TIME': { + 'buckets': [], + 'interval': '1s' + } + }, + 'auc': { + 'doc_count': 0, + 'PROCESS_TIME': { + 'buckets': [], + 'interval': '1s' + } + } + } +} + +fake_es_query_tree_metrics_result = { + 'took': 2, + 'timed_out': False, + '_shards': { + 'total': 4, + 'successful': 4, + 'skipped': 0, + 'failed': 0 + }, + 'hits': { + 'total': { + 'value': 100, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + }, + 'aggregations': { + 'ACC': { + 'doc_count': 10, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + }, + 'TRAIN': { + 'doc_count': 10, + 'TOP': { + 'hits': { + 'total': { + 'value': 10, + 'relation': 'eq' + }, + 'max_score': + None, + 'hits': [{ + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'u5l-Bn8BDVkhsHlSOwtF', + '_score': None, + '_source': { + 'value': 0.857, + 'tags': { + 'iteration': 1 + } + }, + 'sort': [1645081410370] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'eJl-Bn8BDVkhsHlSTmZo', + '_score': None, + '_source': { + 'value': 0.862, + 'tags': { + 'iteration': 2 + } + }, + 'sort': [1645081415270] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'jJl-Bn8BDVkhsHlSYcPm', + '_score': None, + '_source': { + 'value': 0.868, + 'tags': { + 'iteration': 3 + } + }, + 'sort': [1645081420260] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'PJp-Bn8BDVkhsHlSdBpj', + '_score': None, + '_source': { + 'value': 0.872, + 'tags': { + 'iteration': 4 + } + }, + 'sort': [1645081424992] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'v5p-Bn8BDVkhsHlShnT8', + '_score': None, + '_source': { + 'value': 0.886, + 'tags': { + 'iteration': 5 + } + }, + 'sort': [1645081429754] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'oJp-Bn8BDVkhsHlSmdSC', + '_score': None, + '_source': { + 'value': 0.883, + 'tags': { + 'iteration': 6 + } + }, + 'sort': [1645081434496] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': '95t-Bn8BDVkhsHlSqyrd', + '_score': None, + '_source': { + 'value': 0.884, + 'tags': { + 'iteration': 7 + } + }, + 'sort': [1645081439195] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'OZt-Bn8BDVkhsHlSvn4y', + '_score': None, + '_source': { + 'value': 0.895, + 'tags': { + 'iteration': 8 + } + }, + 'sort': [1645081443888] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': '6pt-Bn8BDVkhsHlS0csC', + '_score': None, + '_source': { + 'value': 0.896, + 'tags': { + 'iteration': 9 + } + }, + 'sort': [1645081448704] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'UJx-Bn8BDVkhsHlS4y3-', + '_score': None, + '_source': { + 'value': 0.902, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1645081453564] + }] + } + } + } + }, + 'FN': { + 'doc_count': 10, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + }, + 'TRAIN': { + 'doc_count': 10, + 'TOP': { + 'hits': { + 'total': { + 'value': 10, + 'relation': 'eq' + }, + 'max_score': + None, + 'hits': [{ + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'xJl-Bn8BDVkhsHlSOwtF', + '_score': None, + '_source': { + 'value': 128.0, + 'tags': { + 'iteration': 1 + } + }, + 'sort': [1645081410370] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'gZl-Bn8BDVkhsHlSTmZo', + '_score': None, + '_source': { + 'value': 123.0, + 'tags': { + 'iteration': 2 + } + }, + 'sort': [1645081415270] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'lZl-Bn8BDVkhsHlSYcPm', + '_score': None, + '_source': { + 'value': 116.0, + 'tags': { + 'iteration': 3 + } + }, + 'sort': [1645081420260] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'RZp-Bn8BDVkhsHlSdBpj', + '_score': None, + '_source': { + 'value': 115.0, + 'tags': { + 'iteration': 4 + } + }, + 'sort': [1645081424993] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'yJp-Bn8BDVkhsHlShnT8', + '_score': None, + '_source': { + 'value': 102.0, + 'tags': { + 'iteration': 5 + } + }, + 'sort': [1645081429754] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'qZp-Bn8BDVkhsHlSmdSC', + '_score': None, + '_source': { + 'value': 104.0, + 'tags': { + 'iteration': 6 + } + }, + 'sort': [1645081434496] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'AJt-Bn8BDVkhsHlSqyvd', + '_score': None, + '_source': { + 'value': 105.0, + 'tags': { + 'iteration': 7 + } + }, + 'sort': [1645081439195] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'Qpt-Bn8BDVkhsHlSvn4y', + '_score': None, + '_source': { + 'value': 99.0, + 'tags': { + 'iteration': 8 + } + }, + 'sort': [1645081443888] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': '85t-Bn8BDVkhsHlS0csC', + '_score': None, + '_source': { + 'value': 97.0, + 'tags': { + 'iteration': 9 + } + }, + 'sort': [1645081448705] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'WZx-Bn8BDVkhsHlS4y3-', + '_score': None, + '_source': { + 'value': 93.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1645081453564] + }] + } + } + } + }, + 'KS': { + 'doc_count': 10, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + }, + 'TRAIN': { + 'doc_count': 10, + 'TOP': { + 'hits': { + 'total': { + 'value': 10, + 'relation': 'eq' + }, + 'max_score': + None, + 'hits': [{ + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'wJl-Bn8BDVkhsHlSOwtF', + '_score': None, + '_source': { + 'value': 0.47770564314760644, + 'tags': { + 'iteration': 1 + } + }, + 'sort': [1645081410370] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'fZl-Bn8BDVkhsHlSTmZo', + '_score': None, + '_source': { + 'value': 0.5349813321918623, + 'tags': { + 'iteration': 2 + } + }, + 'sort': [1645081415270] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'kZl-Bn8BDVkhsHlSYcPm', + '_score': None, + '_source': { + 'value': 0.5469192171410906, + 'tags': { + 'iteration': 3 + } + }, + 'sort': [1645081420260] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'QZp-Bn8BDVkhsHlSdBpj', + '_score': None, + '_source': { + 'value': 0.5596894247461416, + 'tags': { + 'iteration': 4 + } + }, + 'sort': [1645081424993] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'xJp-Bn8BDVkhsHlShnT8', + '_score': None, + '_source': { + 'value': 0.5992009702504102, + 'tags': { + 'iteration': 5 + } + }, + 'sort': [1645081429754] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'pZp-Bn8BDVkhsHlSmdSC', + '_score': None, + '_source': { + 'value': 0.6175715202967825, + 'tags': { + 'iteration': 6 + } + }, + 'sort': [1645081434496] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': '_Jt-Bn8BDVkhsHlSqyrd', + '_score': None, + '_source': { + 'value': 0.6366317091151221, + 'tags': { + 'iteration': 7 + } + }, + 'sort': [1645081439195] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'Ppt-Bn8BDVkhsHlSvn4y', + '_score': None, + '_source': { + 'value': 0.6989964566835509, + 'tags': { + 'iteration': 8 + } + }, + 'sort': [1645081443888] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': '75t-Bn8BDVkhsHlS0csC', + '_score': None, + '_source': { + 'value': 0.7088535349932226, + 'tags': { + 'iteration': 9 + } + }, + 'sort': [1645081448704] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'VZx-Bn8BDVkhsHlS4y3-', + '_score': None, + '_source': { + 'value': 0.7418848541057288, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1645081453564] + }] + } + } + } + }, + 'RECALL': { + 'doc_count': 10, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + }, + 'TRAIN': { + 'doc_count': 10, + 'TOP': { + 'hits': { + 'total': { + 'value': 10, + 'relation': 'eq' + }, + 'max_score': + None, + 'hits': [{ + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'vZl-Bn8BDVkhsHlSOwtF', + '_score': None, + '_source': { + 'value': 0.40186915887850466, + 'tags': { + 'iteration': 1 + } + }, + 'sort': [1645081410370] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'epl-Bn8BDVkhsHlSTmZo', + '_score': None, + '_source': { + 'value': 0.4252336448598131, + 'tags': { + 'iteration': 2 + } + }, + 'sort': [1645081415270] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'jpl-Bn8BDVkhsHlSYcPm', + '_score': None, + '_source': { + 'value': 0.45794392523364486, + 'tags': { + 'iteration': 3 + } + }, + 'sort': [1645081420260] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'Ppp-Bn8BDVkhsHlSdBpj', + '_score': None, + '_source': { + 'value': 0.46261682242990654, + 'tags': { + 'iteration': 4 + } + }, + 'sort': [1645081424992] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'wZp-Bn8BDVkhsHlShnT8', + '_score': None, + '_source': { + 'value': 0.5233644859813084, + 'tags': { + 'iteration': 5 + } + }, + 'sort': [1645081429754] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'opp-Bn8BDVkhsHlSmdSC', + '_score': None, + '_source': { + 'value': 0.514018691588785, + 'tags': { + 'iteration': 6 + } + }, + 'sort': [1645081434496] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': '-Zt-Bn8BDVkhsHlSqyrd', + '_score': None, + '_source': { + 'value': 0.5093457943925234, + 'tags': { + 'iteration': 7 + } + }, + 'sort': [1645081439195] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'O5t-Bn8BDVkhsHlSvn4y', + '_score': None, + '_source': { + 'value': 0.5373831775700935, + 'tags': { + 'iteration': 8 + } + }, + 'sort': [1645081443888] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': '7Jt-Bn8BDVkhsHlS0csC', + '_score': None, + '_source': { + 'value': 0.5467289719626168, + 'tags': { + 'iteration': 9 + } + }, + 'sort': [1645081448704] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'Upx-Bn8BDVkhsHlS4y3-', + '_score': None, + '_source': { + 'value': 0.5654205607476636, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1645081453564] + }] + } + } + } + }, + 'FP': { + 'doc_count': 10, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + }, + 'TRAIN': { + 'doc_count': 10, + 'TOP': { + 'hits': { + 'total': { + 'value': 10, + 'relation': 'eq' + }, + 'max_score': + None, + 'hits': [{ + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'w5l-Bn8BDVkhsHlSOwtF', + '_score': None, + '_source': { + 'value': 15.0, + 'tags': { + 'iteration': 1 + } + }, + 'sort': [1645081410370] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'gJl-Bn8BDVkhsHlSTmZo', + '_score': None, + '_source': { + 'value': 15.0, + 'tags': { + 'iteration': 2 + } + }, + 'sort': [1645081415270] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'lJl-Bn8BDVkhsHlSYcPm', + '_score': None, + '_source': { + 'value': 16.0, + 'tags': { + 'iteration': 3 + } + }, + 'sort': [1645081420260] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'RJp-Bn8BDVkhsHlSdBpj', + '_score': None, + '_source': { + 'value': 13.0, + 'tags': { + 'iteration': 4 + } + }, + 'sort': [1645081424993] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'x5p-Bn8BDVkhsHlShnT8', + '_score': None, + '_source': { + 'value': 12.0, + 'tags': { + 'iteration': 5 + } + }, + 'sort': [1645081429754] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'qJp-Bn8BDVkhsHlSmdSC', + '_score': None, + '_source': { + 'value': 13.0, + 'tags': { + 'iteration': 6 + } + }, + 'sort': [1645081434496] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': '_5t-Bn8BDVkhsHlSqyrd', + '_score': None, + '_source': { + 'value': 11.0, + 'tags': { + 'iteration': 7 + } + }, + 'sort': [1645081439195] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'QZt-Bn8BDVkhsHlSvn4y', + '_score': None, + '_source': { + 'value': 6.0, + 'tags': { + 'iteration': 8 + } + }, + 'sort': [1645081443888] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': '8pt-Bn8BDVkhsHlS0csC', + '_score': None, + '_source': { + 'value': 7.0, + 'tags': { + 'iteration': 9 + } + }, + 'sort': [1645081448705] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'WJx-Bn8BDVkhsHlS4y3-', + '_score': None, + '_source': { + 'value': 5.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1645081453564] + }] + } + } + } + }, + 'F1': { + 'doc_count': 10, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + }, + 'TRAIN': { + 'doc_count': 10, + 'TOP': { + 'hits': { + 'total': { + 'value': 10, + 'relation': 'eq' + }, + 'max_score': + None, + 'hits': [{ + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'vpl-Bn8BDVkhsHlSOwtF', + '_score': None, + '_source': { + 'value': 0.546031746031746, + 'tags': { + 'iteration': 1 + } + }, + 'sort': [1645081410370] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'e5l-Bn8BDVkhsHlSTmZo', + '_score': None, + '_source': { + 'value': 0.56875, + 'tags': { + 'iteration': 2 + } + }, + 'sort': [1645081415270] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'j5l-Bn8BDVkhsHlSYcPm', + '_score': None, + '_source': { + 'value': 0.5975609756097561, + 'tags': { + 'iteration': 3 + } + }, + 'sort': [1645081420260] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'P5p-Bn8BDVkhsHlSdBpj', + '_score': None, + '_source': { + 'value': 0.607361963190184, + 'tags': { + 'iteration': 4 + } + }, + 'sort': [1645081424993] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'wpp-Bn8BDVkhsHlShnT8', + '_score': None, + '_source': { + 'value': 0.6627218934911242, + 'tags': { + 'iteration': 5 + } + }, + 'sort': [1645081429754] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'o5p-Bn8BDVkhsHlSmdSC', + '_score': None, + '_source': { + 'value': 0.6528189910979227, + 'tags': { + 'iteration': 6 + } + }, + 'sort': [1645081434496] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': '-pt-Bn8BDVkhsHlSqyrd', + '_score': None, + '_source': { + 'value': 0.6526946107784432, + 'tags': { + 'iteration': 7 + } + }, + 'sort': [1645081439195] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'PJt-Bn8BDVkhsHlSvn4y', + '_score': None, + '_source': { + 'value': 0.6865671641791044, + 'tags': { + 'iteration': 8 + } + }, + 'sort': [1645081443888] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': '7Zt-Bn8BDVkhsHlS0csC', + '_score': None, + '_source': { + 'value': 0.6923076923076923, + 'tags': { + 'iteration': 9 + } + }, + 'sort': [1645081448704] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'U5x-Bn8BDVkhsHlS4y3-', + '_score': None, + '_source': { + 'value': 0.711764705882353, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1645081453564] + }] + } + } + } + }, + 'PRECISION': { + 'doc_count': 10, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + }, + 'TRAIN': { + 'doc_count': 10, + 'TOP': { + 'hits': { + 'total': { + 'value': 10, + 'relation': 'eq' + }, + 'max_score': + None, + 'hits': [{ + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'vJl-Bn8BDVkhsHlSOwtF', + '_score': None, + '_source': { + 'value': 0.8514851485148515, + 'tags': { + 'iteration': 1 + } + }, + 'sort': [1645081410370] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'eZl-Bn8BDVkhsHlSTmZo', + '_score': None, + '_source': { + 'value': 0.8584905660377359, + 'tags': { + 'iteration': 2 + } + }, + 'sort': [1645081415270] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'jZl-Bn8BDVkhsHlSYcPm', + '_score': None, + '_source': { + 'value': 0.8596491228070176, + 'tags': { + 'iteration': 3 + } + }, + 'sort': [1645081420260] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'PZp-Bn8BDVkhsHlSdBpj', + '_score': None, + '_source': { + 'value': 0.8839285714285714, + 'tags': { + 'iteration': 4 + } + }, + 'sort': [1645081424992] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'wJp-Bn8BDVkhsHlShnT8', + '_score': None, + '_source': { + 'value': 0.9032258064516129, + 'tags': { + 'iteration': 5 + } + }, + 'sort': [1645081429754] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'oZp-Bn8BDVkhsHlSmdSC', + '_score': None, + '_source': { + 'value': 0.8943089430894309, + 'tags': { + 'iteration': 6 + } + }, + 'sort': [1645081434496] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': '-Jt-Bn8BDVkhsHlSqyrd', + '_score': None, + '_source': { + 'value': 0.9083333333333333, + 'tags': { + 'iteration': 7 + } + }, + 'sort': [1645081439195] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'Opt-Bn8BDVkhsHlSvn4y', + '_score': None, + '_source': { + 'value': 0.9504132231404959, + 'tags': { + 'iteration': 8 + } + }, + 'sort': [1645081443888] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': '65t-Bn8BDVkhsHlS0csC', + '_score': None, + '_source': { + 'value': 0.9435483870967742, + 'tags': { + 'iteration': 9 + } + }, + 'sort': [1645081448704] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'UZx-Bn8BDVkhsHlS4y3-', + '_score': None, + '_source': { + 'value': 0.9603174603174603, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1645081453564] + }] + } + } + } + }, + 'AUC': { + 'doc_count': 10, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + }, + 'TRAIN': { + 'doc_count': 10, + 'TOP': { + 'hits': { + 'total': { + 'value': 10, + 'relation': 'eq' + }, + 'max_score': + None, + 'hits': [{ + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'v5l-Bn8BDVkhsHlSOwtF', + '_score': None, + '_source': { + 'value': 0.8011640626857863, + 'tags': { + 'iteration': 1 + } + }, + 'sort': [1645081410370] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'fJl-Bn8BDVkhsHlSTmZo', + '_score': None, + '_source': { + 'value': 0.8377684240565029, + 'tags': { + 'iteration': 2 + } + }, + 'sort': [1645081415270] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'kJl-Bn8BDVkhsHlSYcPm', + '_score': None, + '_source': { + 'value': 0.8533328577203871, + 'tags': { + 'iteration': 3 + } + }, + 'sort': [1645081420260] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'QJp-Bn8BDVkhsHlSdBpj', + '_score': None, + '_source': { + 'value': 0.860663242253454, + 'tags': { + 'iteration': 4 + } + }, + 'sort': [1645081424993] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'w5p-Bn8BDVkhsHlShnT8', + '_score': None, + '_source': { + 'value': 0.8797977455946351, + 'tags': { + 'iteration': 5 + } + }, + 'sort': [1645081429754] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'pJp-Bn8BDVkhsHlSmdSC', + '_score': None, + '_source': { + 'value': 0.8921428741290338, + 'tags': { + 'iteration': 6 + } + }, + 'sort': [1645081434496] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': '-5t-Bn8BDVkhsHlSqyrd', + '_score': None, + '_source': { + 'value': 0.9041610187629308, + 'tags': { + 'iteration': 7 + } + }, + 'sort': [1645081439195] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'PZt-Bn8BDVkhsHlSvn4y', + '_score': None, + '_source': { + 'value': 0.9179270409740553, + 'tags': { + 'iteration': 8 + } + }, + 'sort': [1645081443888] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': '7pt-Bn8BDVkhsHlS0csC', + '_score': None, + '_source': { + 'value': 0.928827495184419, + 'tags': { + 'iteration': 9 + } + }, + 'sort': [1645081448704] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'VJx-Bn8BDVkhsHlS4y3-', + '_score': None, + '_source': { + 'value': 0.9439282062257736, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1645081453564] + }] + } + } + } + }, + 'ABS': { + 'doc_count': 0, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + }, + 'TRAIN': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + } + }, + 'TN': { + 'doc_count': 10, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + }, + 'TRAIN': { + 'doc_count': 10, + 'TOP': { + 'hits': { + 'total': { + 'value': 10, + 'relation': 'eq' + }, + 'max_score': + None, + 'hits': [{ + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'wpl-Bn8BDVkhsHlSOwtF', + '_score': None, + '_source': { + 'value': 771.0, + 'tags': { + 'iteration': 1 + } + }, + 'sort': [1645081410370] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'f5l-Bn8BDVkhsHlSTmZo', + '_score': None, + '_source': { + 'value': 771.0, + 'tags': { + 'iteration': 2 + } + }, + 'sort': [1645081415270] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'k5l-Bn8BDVkhsHlSYcPm', + '_score': None, + '_source': { + 'value': 770.0, + 'tags': { + 'iteration': 3 + } + }, + 'sort': [1645081420260] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'Q5p-Bn8BDVkhsHlSdBpj', + '_score': None, + '_source': { + 'value': 773.0, + 'tags': { + 'iteration': 4 + } + }, + 'sort': [1645081424993] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'xpp-Bn8BDVkhsHlShnT8', + '_score': None, + '_source': { + 'value': 774.0, + 'tags': { + 'iteration': 5 + } + }, + 'sort': [1645081429754] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'p5p-Bn8BDVkhsHlSmdSC', + '_score': None, + '_source': { + 'value': 773.0, + 'tags': { + 'iteration': 6 + } + }, + 'sort': [1645081434496] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': '_pt-Bn8BDVkhsHlSqyrd', + '_score': None, + '_source': { + 'value': 775.0, + 'tags': { + 'iteration': 7 + } + }, + 'sort': [1645081439195] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'QJt-Bn8BDVkhsHlSvn4y', + '_score': None, + '_source': { + 'value': 780.0, + 'tags': { + 'iteration': 8 + } + }, + 'sort': [1645081443888] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': '8Zt-Bn8BDVkhsHlS0csC', + '_score': None, + '_source': { + 'value': 779.0, + 'tags': { + 'iteration': 9 + } + }, + 'sort': [1645081448704] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'V5x-Bn8BDVkhsHlS4y3-', + '_score': None, + '_source': { + 'value': 781.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1645081453564] + }] + } + } + } + }, + 'TP': { + 'doc_count': 10, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + }, + 'TRAIN': { + 'doc_count': 10, + 'TOP': { + 'hits': { + 'total': { + 'value': 10, + 'relation': 'eq' + }, + 'max_score': + None, + 'hits': [{ + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'wZl-Bn8BDVkhsHlSOwtF', + '_score': None, + '_source': { + 'value': 86.0, + 'tags': { + 'iteration': 1 + } + }, + 'sort': [1645081410370] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'fpl-Bn8BDVkhsHlSTmZo', + '_score': None, + '_source': { + 'value': 91.0, + 'tags': { + 'iteration': 2 + } + }, + 'sort': [1645081415270] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'kpl-Bn8BDVkhsHlSYcPm', + '_score': None, + '_source': { + 'value': 98.0, + 'tags': { + 'iteration': 3 + } + }, + 'sort': [1645081420260] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'Qpp-Bn8BDVkhsHlSdBpj', + '_score': None, + '_source': { + 'value': 99.0, + 'tags': { + 'iteration': 4 + } + }, + 'sort': [1645081424993] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'xZp-Bn8BDVkhsHlShnT8', + '_score': None, + '_source': { + 'value': 112.0, + 'tags': { + 'iteration': 5 + } + }, + 'sort': [1645081429754] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'ppp-Bn8BDVkhsHlSmdSC', + '_score': None, + '_source': { + 'value': 110.0, + 'tags': { + 'iteration': 6 + } + }, + 'sort': [1645081434496] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': '_Zt-Bn8BDVkhsHlSqyrd', + '_score': None, + '_source': { + 'value': 109.0, + 'tags': { + 'iteration': 7 + } + }, + 'sort': [1645081439195] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'P5t-Bn8BDVkhsHlSvn4y', + '_score': None, + '_source': { + 'value': 115.0, + 'tags': { + 'iteration': 8 + } + }, + 'sort': [1645081443888] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': '8Jt-Bn8BDVkhsHlS0csC', + '_score': None, + '_source': { + 'value': 117.0, + 'tags': { + 'iteration': 9 + } + }, + 'sort': [1645081448704] + }, { + '_index': 'metrics_v2-2022.02.16-000010', + '_type': '_doc', + '_id': 'Vpx-Bn8BDVkhsHlS4y3-', + '_score': None, + '_source': { + 'value': 121.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1645081453564] + }] + } + } + } + }, + 'MSRE': { + 'doc_count': 0, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + }, + 'TRAIN': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + } + }, + 'MSE': { + 'doc_count': 0, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + }, + 'TRAIN': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + } + } + } +} + +fake_es_query_eval_tree_metrics_result = { + 'took': 202, + 'timed_out': False, + '_shards': { + 'total': 4, + 'successful': 4, + 'skipped': 0, + 'failed': 0 + }, + 'hits': { + 'total': { + 'value': 80, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + }, + 'aggregations': { + 'ACC': { + 'doc_count': 8, + 'EVAL': { + 'doc_count': 8, + 'TOP': { + 'hits': { + 'total': { + 'value': 8, + 'relation': 'eq' + }, + 'max_score': + None, + 'hits': [{ + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'R8d4kX8Bft9MydwO744D', + '_score': None, + '_source': { + 'value': 0.792236328125, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413096191] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'pqV4kX8BPdYgu5bg9fKY', + '_score': None, + '_source': { + 'value': 0.810791015625, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413097876] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'sUV4kX8BHocuEQgl-vL9', + '_score': None, + '_source': { + 'value': 0.796142578125, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413099258] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'u0V5kX8BHocuEQglAfIo', + '_score': None, + '_source': { + 'value': 0.7973116377785638, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413100836] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': '2qh5kX8BdTI5PJt-CJEU', + '_score': None, + '_source': { + 'value': 0.806396484375, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413102609] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'Ucd5kX8Bft9MydwODo6l', + '_score': None, + '_source': { + 'value': 0.80224609375, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413104290] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'ieN5kX8BxCxOvEoxFTIT', + '_score': None, + '_source': { + 'value': 0.802734375, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413105935] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': '5Kh5kX8BdTI5PJt-GZHE', + '_score': None, + '_source': { + 'value': 0.8078552175587216, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413107136] + }] + } + } + }, + 'TRAIN': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + } + }, + 'FN': { + 'doc_count': 8, + 'EVAL': { + 'doc_count': 8, + 'TOP': { + 'hits': { + 'total': { + 'value': 8, + 'relation': 'eq' + }, + 'max_score': + None, + 'hits': [{ + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'UMd4kX8Bft9MydwO744D', + '_score': None, + '_source': { + 'value': 746.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413096191] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'r6V4kX8BPdYgu5bg9fKY', + '_score': None, + '_source': { + 'value': 692.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413097876] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'ukV4kX8BHocuEQgl-vL9', + '_score': None, + '_source': { + 'value': 740.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413099258] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'xEV5kX8BHocuEQglAfIo', + '_score': None, + '_source': { + 'value': 504.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413100836] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': '46h5kX8BdTI5PJt-CJEU', + '_score': None, + '_source': { + 'value': 681.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413102609] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'Wsd5kX8Bft9MydwODo6l', + '_score': None, + '_source': { + 'value': 705.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413104290] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'kuN5kX8BxCxOvEoxFTIT', + '_score': None, + '_source': { + 'value': 701.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413105935] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': '7ah5kX8BdTI5PJt-GZHE', + '_score': None, + '_source': { + 'value': 429.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413107136] + }] + } + } + }, + 'TRAIN': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + } + }, + 'KS': { + 'doc_count': 8, + 'EVAL': { + 'doc_count': 8, + 'TOP': { + 'hits': { + 'total': { + 'value': 8, + 'relation': 'eq' + }, + 'max_score': + None, + 'hits': [{ + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'TMd4kX8Bft9MydwO744D', + '_score': None, + '_source': { + 'value': 0.3645519339138261, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413096191] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'q6V4kX8BPdYgu5bg9fKY', + '_score': None, + '_source': { + 'value': 0.3804289511482969, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413097876] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'tkV4kX8BHocuEQgl-vL9', + '_score': None, + '_source': { + 'value': 0.4052552047443612, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413099258] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'wEV5kX8BHocuEQglAfIo', + '_score': None, + '_source': { + 'value': 0.3894954124503266, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413100836] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': '36h5kX8BdTI5PJt-CJEU', + '_score': None, + '_source': { + 'value': 0.3670764787932738, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413102609] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'Vsd5kX8Bft9MydwODo6l', + '_score': None, + '_source': { + 'value': 0.3761368377138089, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413104290] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'juN5kX8BxCxOvEoxFTIT', + '_score': None, + '_source': { + 'value': 0.3470963824157124, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413105935] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': '6ah5kX8BdTI5PJt-GZHE', + '_score': None, + '_source': { + 'value': 0.377164202014282, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413107136] + }] + } + } + }, + 'TRAIN': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + } + }, + 'RECALL': { + 'doc_count': 8, + 'EVAL': { + 'doc_count': 8, + 'TOP': { + 'hits': { + 'total': { + 'value': 8, + 'relation': 'eq' + }, + 'max_score': + None, + 'hits': [{ + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'Scd4kX8Bft9MydwO744D', + '_score': None, + '_source': { + 'value': 0.2021390374331551, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413096191] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'qKV4kX8BPdYgu5bg9fKY', + '_score': None, + '_source': { + 'value': 0.22681564245810057, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413097876] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 's0V4kX8BHocuEQgl-vL9', + '_score': None, + '_source': { + 'value': 0.20600858369098712, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413099258] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'vUV5kX8BHocuEQglAfIo', + '_score': None, + '_source': { + 'value': 0.2112676056338028, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413100836] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': '3Kh5kX8BdTI5PJt-CJEU', + '_score': None, + '_source': { + 'value': 0.2348314606741573, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413102609] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'U8d5kX8Bft9MydwODo6l', + '_score': None, + '_source': { + 'value': 0.21666666666666667, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413104290] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'i-N5kX8BxCxOvEoxFTIT', + '_score': None, + '_source': { + 'value': 0.2043132803632236, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413105935] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': '5qh5kX8BdTI5PJt-GZHE', + '_score': None, + '_source': { + 'value': 0.2393617021276596, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413107136] + }] + } + } + }, + 'TRAIN': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + } + }, + 'FP': { + 'doc_count': 8, + 'EVAL': { + 'doc_count': 8, + 'TOP': { + 'hits': { + 'total': { + 'value': 8, + 'relation': 'eq' + }, + 'max_score': + None, + 'hits': [{ + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'T8d4kX8Bft9MydwO744D', + '_score': None, + '_source': { + 'value': 105.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413096191] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'rqV4kX8BPdYgu5bg9fKY', + '_score': None, + '_source': { + 'value': 83.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413097876] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'uUV4kX8BHocuEQgl-vL9', + '_score': None, + '_source': { + 'value': 95.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413099258] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'w0V5kX8BHocuEQglAfIo', + '_score': None, + '_source': { + 'value': 69.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413100836] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': '4qh5kX8BdTI5PJt-CJEU', + '_score': None, + '_source': { + 'value': 112.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413102609] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'Wcd5kX8Bft9MydwODo6l', + '_score': None, + '_source': { + 'value': 105.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413104290] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'keN5kX8BxCxOvEoxFTIT', + '_score': None, + '_source': { + 'value': 107.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413105935] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': '7Kh5kX8BdTI5PJt-GZHE', + '_score': None, + '_source': { + 'value': 70.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413107136] + }] + } + } + }, + 'TRAIN': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + } + }, + 'F1': { + 'doc_count': 8, + 'EVAL': { + 'doc_count': 8, + 'TOP': { + 'hits': { + 'total': { + 'value': 8, + 'relation': 'eq' + }, + 'max_score': + None, + 'hits': [{ + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'Ssd4kX8Bft9MydwO744D', + '_score': None, + '_source': { + 'value': 0.3075671277461351, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413096191] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'qaV4kX8BPdYgu5bg9fKY', + '_score': None, + '_source': { + 'value': 0.3437764606265876, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413097876] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'tEV4kX8BHocuEQgl-vL9', + '_score': None, + '_source': { + 'value': 0.3150123051681706, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413099258] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'vkV5kX8BHocuEQglAfIo', + '_score': None, + '_source': { + 'value': 0.3202846975088967, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413100836] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': '3ah5kX8BdTI5PJt-CJEU', + '_score': None, + '_source': { + 'value': 0.34516928158546656, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413102609] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'VMd5kX8Bft9MydwODo6l', + '_score': None, + '_source': { + 'value': 0.32499999999999996, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413104290] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'jON5kX8BxCxOvEoxFTIT', + '_score': None, + '_source': { + 'value': 0.30821917808219174, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413105935] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': '56h5kX8BdTI5PJt-GZHE', + '_score': None, + '_source': { + 'value': 0.3511053315994798, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413107136] + }] + } + } + }, + 'TRAIN': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + } + }, + 'PRECISION': { + 'doc_count': 8, + 'EVAL': { + 'doc_count': 8, + 'TOP': { + 'hits': { + 'total': { + 'value': 8, + 'relation': 'eq' + }, + 'max_score': + None, + 'hits': [{ + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'SMd4kX8Bft9MydwO744D', + '_score': None, + '_source': { + 'value': 0.6428571428571429, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413096191] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'p6V4kX8BPdYgu5bg9fKY', + '_score': None, + '_source': { + 'value': 0.7097902097902098, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413097876] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'skV4kX8BHocuEQgl-vL9', + '_score': None, + '_source': { + 'value': 0.6689895470383276, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413099258] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'vEV5kX8BHocuEQglAfIo', + '_score': None, + '_source': { + 'value': 0.6617647058823529, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413100836] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': '26h5kX8BdTI5PJt-CJEU', + '_score': None, + '_source': { + 'value': 0.6510903426791277, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413102609] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'Usd5kX8Bft9MydwODo6l', + '_score': None, + '_source': { + 'value': 0.65, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413104290] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'iuN5kX8BxCxOvEoxFTIT', + '_score': None, + '_source': { + 'value': 0.627177700348432, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413105935] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': '5ah5kX8BdTI5PJt-GZHE', + '_score': None, + '_source': { + 'value': 0.6585365853658537, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413107136] + }] + } + } + }, + 'TRAIN': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + } + }, + 'AUC': { + 'doc_count': 8, + 'EVAL': { + 'doc_count': 8, + 'TOP': { + 'hits': { + 'total': { + 'value': 8, + 'relation': 'eq' + }, + 'max_score': + None, + 'hits': [{ + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'S8d4kX8Bft9MydwO744D', + '_score': None, + '_source': { + 'value': 0.7464538569159221, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413096191] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'qqV4kX8BPdYgu5bg9fKY', + '_score': None, + '_source': { + 'value': 0.749997294839776, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413097876] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'tUV4kX8BHocuEQgl-vL9', + '_score': None, + '_source': { + 'value': 0.7596473266848613, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413099258] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'v0V5kX8BHocuEQglAfIo', + '_score': None, + '_source': { + 'value': 0.760564095521739, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413100836] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': '3qh5kX8BdTI5PJt-CJEU', + '_score': None, + '_source': { + 'value': 0.7533225973771089, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413102609] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'Vcd5kX8Bft9MydwODo6l', + '_score': None, + '_source': { + 'value': 0.7539168752607426, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413104290] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'jeN5kX8BxCxOvEoxFTIT', + '_score': None, + '_source': { + 'value': 0.7379395321660138, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413105935] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': '6Kh5kX8BdTI5PJt-GZHE', + '_score': None, + '_source': { + 'value': 0.7488383167104478, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413107136] + }] + } + } + }, + 'TRAIN': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + } + }, + 'ABS': { + 'doc_count': 0, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + }, + 'TRAIN': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + } + }, + 'TN': { + 'doc_count': 8, + 'EVAL': { + 'doc_count': 8, + 'TOP': { + 'hits': { + 'total': { + 'value': 8, + 'relation': 'eq' + }, + 'max_score': + None, + 'hits': [{ + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'Tsd4kX8Bft9MydwO744D', + '_score': None, + '_source': { + 'value': 3056.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413096191] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'raV4kX8BPdYgu5bg9fKY', + '_score': None, + '_source': { + 'value': 3118.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413097876] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'uEV4kX8BHocuEQgl-vL9', + '_score': None, + '_source': { + 'value': 3069.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413099258] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'wkV5kX8BHocuEQglAfIo', + '_score': None, + '_source': { + 'value': 2119.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413100836] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': '4ah5kX8BdTI5PJt-CJEU', + '_score': None, + '_source': { + 'value': 3094.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413102609] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'WMd5kX8Bft9MydwODo6l', + '_score': None, + '_source': { + 'value': 3091.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413104290] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'kON5kX8BxCxOvEoxFTIT', + '_score': None, + '_source': { + 'value': 3108.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413105935] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': '66h5kX8BdTI5PJt-GZHE', + '_score': None, + '_source': { + 'value': 1963.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413107136] + }] + } + } + }, + 'TRAIN': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + } + }, + 'TP': { + 'doc_count': 8, + 'EVAL': { + 'doc_count': 8, + 'TOP': { + 'hits': { + 'total': { + 'value': 8, + 'relation': 'eq' + }, + 'max_score': + None, + 'hits': [{ + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'Tcd4kX8Bft9MydwO744D', + '_score': None, + '_source': { + 'value': 189.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413096191] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'rKV4kX8BPdYgu5bg9fKY', + '_score': None, + '_source': { + 'value': 203.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413097876] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 't0V4kX8BHocuEQgl-vL9', + '_score': None, + '_source': { + 'value': 192.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413099258] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'wUV5kX8BHocuEQglAfIo', + '_score': None, + '_source': { + 'value': 135.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413100836] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': '4Kh5kX8BdTI5PJt-CJEU', + '_score': None, + '_source': { + 'value': 209.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413102609] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'V8d5kX8Bft9MydwODo6l', + '_score': None, + '_source': { + 'value': 195.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413104290] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': 'j-N5kX8BxCxOvEoxFTIT', + '_score': None, + '_source': { + 'value': 180.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413105935] + }, { + '_index': 'metrics_v2-2022.03.12-000035', + '_type': '_doc', + '_id': '6qh5kX8BdTI5PJt-GZHE', + '_score': None, + '_source': { + 'value': 135.0, + 'tags': { + 'iteration': 10 + } + }, + 'sort': [1647413107136] + }] + } + } + }, + 'TRAIN': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + } + }, + 'MSRE': { + 'doc_count': 0, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + }, + 'TRAIN': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + } + }, + 'MSE': { + 'doc_count': 0, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + }, + 'TRAIN': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + } + } + } + } + } +} + +fake_es_query_tree_metrics_result_v2 = { + 'took': 38, + 'timed_out': False, + '_shards': { + 'total': 72, + 'successful': 72, + 'skipped': 0, + 'failed': 0 + }, + 'hits': { + 'total': { + 'value': 160, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [] + }, + 'aggregations': { + 'ACC': { + 'doc_count': 160, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [], + } + }, + }, + 'TRAIN': { + 'doc_count': 10, + 'TOP': { + 'hits': { + 'total': { + 'value': 10, + 'relation': 'eq' + }, + 'max_score': + 3.7822056, + 'hits': [ + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'viMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.acc': 0.857, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 1, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'zyMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.acc': 0.895, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 8, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '3SMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.acc': 0.868, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 3, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '7yMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.acc': 0.862, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 2, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '-iMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.acc': 0.883, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 6, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'UxAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.acc': 0.886, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 5, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'WRAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.acc': 0.872, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 4, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'XhAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.acc': 0.884, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 7, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'dBAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.acc': 0.896, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 9, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'FCMeHoEBwUXAbMGy-DsO', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.acc': 0.902, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 10, + }, + }, + }, + ], + } + }, + }, + }, + 'FN': { + 'doc_count': 160, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [], + } + }, + }, + 'TRAIN': { + 'doc_count': 10, + 'TOP': { + 'hits': { + 'total': { + 'value': 10, + 'relation': 'eq' + }, + 'max_score': + 3.7822056, + 'hits': [ + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'qiMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.fn': 116, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 3, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'qyMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.fn': 105, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 7, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'zCMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.fn': 102, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 5, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '5yMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.fn': 123, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 2, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '7SMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.fn': 97, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 9, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'VxAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.fn': 99, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 8, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'ZRAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.fn': 115, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 4, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'bRAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.fn': 128, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 1, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'bhAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.fn': 104, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 6, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'FSMeHoEBwUXAbMGy-DsO', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.fn': 93, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 10, + }, + }, + }, + ], + } + }, + }, + }, + 'KS': { + 'doc_count': 160, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [], + } + }, + }, + 'TRAIN': { + 'doc_count': 10, + 'TOP': { + 'hits': { + 'total': { + 'value': 10, + 'relation': 'eq' + }, + 'max_score': + 3.7822056, + 'hits': [ + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'qSMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.ks': 0.5469192171410906, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 3, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'siMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.ks': 0.6989964566835509, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 8, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'uCMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.ks': 0.5349813321918623, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 2, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'uSMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.ks': 0.7088535349932226, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 9, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '0iMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.ks': 0.5992009702504102, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 5, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '8yMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.ks': 0.47770564314760644, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 1, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '9CMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.ks': 0.6175715202967825, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 6, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'BiMeHoEBwUXAbMGy8jt4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.ks': 0.6366317091151221, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 7, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'cRAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.ks': 0.5596894247461416, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 4, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'DSMeHoEBwUXAbMGy-DsO', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.ks': 0.7418848541057288, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 10, + }, + }, + }, + ], + } + }, + }, + }, + 'RECALL': { + 'doc_count': 160, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [], + } + }, + }, + 'TRAIN': { + 'doc_count': 10, + 'TOP': { + 'hits': { + 'total': { + 'value': 10, + 'relation': 'eq' + }, + 'max_score': + 3.7822056, + 'hits': [ + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'wyMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.recall': 0.40186915887850466, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 1, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'ySMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.recall': 0.5373831775700935, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 8, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '8SMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.recall': 0.4252336448598131, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 2, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '8iMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.recall': 0.45794392523364486, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 3, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '-SMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.recall': 0.46261682242990654, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 4, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '-yMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.recall': 0.5467289719626168, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 9, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'VhAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.recall': 0.5233644859813084, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 5, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'axAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.recall': 0.5093457943925234, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 7, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'dRAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.recall': 0.514018691588785, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 6, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'ECMeHoEBwUXAbMGy-DsO', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.recall': 0.5654205607476636, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 10, + }, + }, + }, + ], + } + }, + }, + }, + 'FP': { + 'doc_count': 160, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [], + } + }, + }, + 'TRAIN': { + 'doc_count': 10, + 'TOP': { + 'hits': { + 'total': { + 'value': 10, + 'relation': 'eq' + }, + 'max_score': + 3.7822056, + 'hits': [ + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'pyMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.fp': 15, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 1, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'tCMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.fp': 16, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 3, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'wCMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.fp': 6, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 8, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '1CMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.fp': 15, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 2, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '2yMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.fp': 13, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 4, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '5iMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.fp': 12, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 5, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'AyMeHoEBwUXAbMGy8jt4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.fp': 13, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 6, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'TxAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.fp': 11, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 7, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'eBAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.fp': 7, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 9, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'EiMeHoEBwUXAbMGy-DsO', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.fp': 5, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 10, + }, + }, + }, + ], + } + }, + }, + }, + 'F1': { + 'doc_count': 160, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [], + } + }, + }, + 'TRAIN': { + 'doc_count': 10, + 'TOP': { + 'hits': { + 'total': { + 'value': 10, + 'relation': 'eq' + }, + 'max_score': + 3.7822056, + 'hits': [ + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'pCMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.f1': 0.5975609756097561, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 3, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'sCMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.f1': 0.6627218934911242, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 5, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'xCMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.f1': 0.6528189910979227, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 6, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'yiMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.f1': 0.6923076923076923, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 9, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '0CMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.f1': 0.546031746031746, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 1, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '1iMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.f1': 0.56875, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 2, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'BCMeHoEBwUXAbMGy8jt4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.f1': 0.6526946107784432, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 7, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'bBAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.f1': 0.607361963190184, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 4, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'dhAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.f1': 0.6865671641791044, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 8, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'CyMeHoEBwUXAbMGy-DsO', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.f1': 0.711764705882353, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 10, + }, + }, + }, + ], + } + }, + }, + }, + 'PRECISION': { + 'doc_count': 160, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [], + } + }, + }, + 'TRAIN': { + 'doc_count': 10, + 'TOP': { + 'hits': { + 'total': { + 'value': 10, + 'relation': 'eq' + }, + 'max_score': + 3.7822056, + 'hits': [ + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'oyMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.precision': 0.8584905660377359, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 2, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '1SMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.precision': 0.8596491228070176, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 3, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '3iMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.precision': 0.9083333333333333, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 7, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '5CMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.precision': 0.8839285714285714, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 4, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '5SMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.precision': 0.9435483870967742, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 9, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '8CMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.precision': 0.8943089430894309, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 6, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '9yMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.precision': 0.9032258064516129, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 5, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '-CMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.precision': 0.9504132231404959, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 8, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'XxAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.precision': 0.8514851485148515, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 1, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'DyMeHoEBwUXAbMGy-DsO', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.precision': 0.9603174603174603, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 10, + }, + }, + }, + ], + } + }, + }, + }, + 'AUC': { + 'doc_count': 160, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [], + } + }, + }, + 'TRAIN': { + 'doc_count': 10, + 'TOP': { + 'hits': { + 'total': { + 'value': 10, + 'relation': 'eq' + }, + 'max_score': + 3.7822056, + 'hits': [ + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'pSMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.auc': 0.8797977455946351, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 5, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'sSMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.auc': 0.9041610187629308, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 7, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'xSMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.auc': 0.860663242253454, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 4, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '0SMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.auc': 0.8533328577203871, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 3, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '1yMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.auc': 0.928827495184419, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 9, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'BSMeHoEBwUXAbMGy8jt4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.auc': 0.8377684240565029, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 2, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'VBAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.auc': 0.8921428741290338, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 6, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'YBAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.auc': 0.8011640626857863, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 1, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'ZBAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.auc': 0.9179270409740553, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 8, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'DCMeHoEBwUXAbMGy-DsO', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.auc': 0.9439282062257736, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 10, + }, + }, + }, + ], + } + }, + }, + }, + 'ABS': { + 'doc_count': 160, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [], + } + }, + }, + 'TRAIN': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [], + } + }, + }, + }, + 'TN': { + 'doc_count': 160, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [], + } + }, + }, + 'TRAIN': { + 'doc_count': 10, + 'TOP': { + 'hits': { + 'total': { + 'value': 10, + 'relation': 'eq' + }, + 'max_score': + 3.7822056, + 'hits': [ + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'syMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.tn': 775, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 7, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'yyMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.tn': 770, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 3, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '_iMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.tn': 771, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 2, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'AiMeHoEBwUXAbMGy8jt4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.tn': 779, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 9, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'TRAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.tn': 774, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 5, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'WhAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.tn': 771, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 1, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'YhAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.tn': 773, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 6, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'YxAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.tn': 780, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 8, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'dxAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.tn': 773, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 4, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'ESMeHoEBwUXAbMGy-DsO', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.tn': 781, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 10, + }, + }, + }, + ], + } + }, + }, + }, + 'TP': { + 'doc_count': 160, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [], + } + }, + }, + 'TRAIN': { + 'doc_count': 10, + 'TOP': { + 'hits': { + 'total': { + 'value': 10, + 'relation': 'eq' + }, + 'max_score': + 3.7822056, + 'hits': [ + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'uiMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.tp': 99, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 4, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'vyMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.tp': 115, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 8, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '0yMeHoEBwUXAbMGy8jph', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.tp': 86, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 1, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '2iMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.tp': 117, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 9, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '6yMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.tp': 98, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 3, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': '_CMeHoEBwUXAbMGy8jp4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.tp': 91, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 2, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'ASMeHoEBwUXAbMGy8jt4', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.tp': 109, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 7, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'YRAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.tp': 112, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 5, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'aBAeHoEBFisd-m428sSP', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.tp': 110, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 6, + }, + }, + }, + { + '_index': 'apm-7.16.0-metric-000002', + '_type': '_doc', + '_id': 'DiMeHoEBwUXAbMGy-DsO', + '_score': 3.7822056, + '_source': { + 'values.model.train.tree_vertical.tp': 121, + 'labels': { + 'k8s_job_name': 'ue1d22f070d634ae494d-tree-model', + 'iteration': 10, + }, + }, + }, + ], + } + }, + }, + }, + 'MSRE': { + 'doc_count': 160, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [], + } + }, + }, + 'TRAIN': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [], + } + }, + }, + }, + 'MSE': { + 'doc_count': 160, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [], + } + }, + }, + 'TRAIN': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [], + } + }, + }, + }, + }, +} + +fake_es_query_nn_metrics_result_v2 = { + 'aggregations': { + 'LOSS': { + 'meta': {}, + 'doc_count': 10, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [], + } + }, + }, + 'TRAIN': { + 'doc_count': 1, + 'TOP': { + 'hits': { + 'total': { + 'value': 1, + 'relation': 'eq' + }, + 'max_score': + 3.902454, + 'hits': [{ + '_index': 'apm-7.16.0-metric-000001', + '_type': '_doc', + '_id': 'ffB9sIIBX3IR_A7a36a2', + '_score': 3.902454, + '_source': { + '@timestamp': '2022-08-18T10:26:54.467Z', + 'values.model.train.nn_vertical.loss': 5.694229602813721, + 'labels': { + 'k8s_job_name': 'u30d9521ce4b34c1f8f0-nn-model' + }, + }, + }], + } + }, + }, + }, + 'AUC': { + 'meta': {}, + 'doc_count': 10, + 'EVAL': { + 'doc_count': 0, + 'TOP': { + 'hits': { + 'total': { + 'value': 0, + 'relation': 'eq' + }, + 'max_score': None, + 'hits': [], + } + }, + }, + 'TRAIN': { + 'doc_count': 1, + 'TOP': { + 'hits': { + 'total': { + 'value': 1, + 'relation': 'eq' + }, + 'max_score': + 3.902454, + 'hits': [{ + '_index': 'apm-7.16.0-metric-000001', + '_type': '_doc', + '_id': 'fvB9sIIBX3IR_A7a36a2', + '_score': 3.902454, + '_source': { + '@timestamp': '2022-08-18T10:26:54.467Z', + 'values.model.train.nn_vertical.auc': 0.6585884094238281, + 'labels': { + 'k8s_job_name': 'u30d9521ce4b34c1f8f0-nn-model' + }, + }, + }], + } + }, + }, + }, + }, +} diff --git a/web_console_v2/api/testing/test_data/expected_image_preview.json b/web_console_v2/api/testing/test_data/expected_image_preview.json new file mode 100644 index 000000000..b87845f69 --- /dev/null +++ b/web_console_v2/api/testing/test_data/expected_image_preview.json @@ -0,0 +1,559 @@ +{ + "dtypes":[ + { + "key":"file_name", + "value":"string" + }, + { + "key":"width", + "value":"int" + }, + { + "key":"height", + "value":"int" + }, + { + "key":"nChannels", + "value":"int" + }, + { + "key":"mode", + "value":"int" + }, + { + "key":"name", + "value":"string" + }, + { + "key":"created_at", + "value":"string" + }, + { + "key":"caption", + "value":"string" + }, + { + "key":"label", + "value":"string" + } + ], + "sample":[ + [ + "000000050576.jpg", + 640, + 480, + 3, + 16, + "000000050576.jpg", + "2021-08-30", + "A tow truck loading a bank security truck by a building.", + "B" + ], + [ + "000000240137.jpg", + 480, + 640, + 3, + 16, + "000000240137.jpg", + "2021-08-30", + "A dog leaping in the air to grab a frisbee from its owner on a snowy day.", + "B" + ], + [ + "000000457131.jpg", + 640, + 467, + 3, + 16, + "000000457131.jpg", + "2021-08-30", + "a craft is flying behind a tree line", + "C" + ], + [ + "000000181047.jpg", + 640, + 478, + 3, + 16, + "000000181047.jpg", + "2021-08-30", + "A meal with bread and dipping sauce is on a table.", + "B" + ], + [ + "000000206504.jpg", + 640, + 418, + 3, + 16, + "000000206504.jpg", + "2021-08-30", + "An aircraft is parked in a plane hanger.", + "C" + ], + [ + "000000517851.jpg", + 425, + 640, + 3, + 16, + "000000517851.jpg", + "2021-08-30", + "A large bear sitting on a tree stump.", + "A" + ], + [ + "000000137150.jpg", + 640, + 426, + 3, + 16, + "000000137150.jpg", + "2021-08-30", + "A woman carries a basket of bananas on her head while some men stand around.", + "A" + ], + [ + "000000451824.jpg", + 640, + 640, + 3, + 16, + "000000451824.jpg", + "2021-08-30", + "a queen size bed in a small room with a slanted ceiling and a wooden desk", + "C" + ], + [ + "000000068023.jpg", + 640, + 427, + 3, + 16, + "000000068023.jpg", + "2021-08-30", + "A zebra walks through green and brown grass.", + "B" + ], + [ + "000000249815.jpg", + 640, + 480, + 3, + 16, + "000000249815.jpg", + "2021-08-30", + "A street full of bike racks filled with bicycles.", + "A" + ], + [ + "000000229893.jpg", + 640, + 480, + 3, + 16, + "000000229893.jpg", + "2021-08-30", + "Children near a train on a train track.", + "D" + ], + [ + "000000005756.jpg", + 640, + 361, + 3, + 16, + "000000005756.jpg", + "2021-08-30", + "A group of people holding umbrellas looking at graffiti.", + "A" + ], + [ + "000000008181.jpg", + 640, + 480, + 3, + 16, + "000000008181.jpg", + "2021-08-30", + "A motorcycle is parked on a gravel lot", + "C" + ], + [ + "000000018425.jpg", + 640, + 480, + 3, + 16, + "000000018425.jpg", + "2021-08-30", + "Two giraffe grazing on tree leaves under a hazy sky.", + "B" + ], + [ + "000000421116.jpg", + 427, + 640, + 3, + 16, + "000000421116.jpg", + "2021-08-30", + "a couple of motorcycles parked next to each other on the side of a street.", + "B" + ], + [ + "000000398781.jpg", + 640, + 331, + 3, + 16, + "000000398781.jpg", + "2021-08-30", + "A man in a wetsuit is on the beach with a surfboard.", + "B" + ], + [ + "000000511307.jpg", + 495, + 640, + 3, + 16, + "000000511307.jpg", + "2021-08-30", + "A dog snuggled under covers in a bed.", + "D" + ], + [ + "000000410836.jpg", + 640, + 428, + 3, + 16, + "000000410836.jpg", + "2021-08-30", + "A tablet by a monitor, keyboard and laptop on a desk.", + "D" + ], + [ + "000000457337.jpg", + 640, + 480, + 3, + 16, + "000000457337.jpg", + "2021-08-30", + "A white and gray cat is stretching on a picnic table. ", + "D" + ], + [ + "000000383379.jpg", + 640, + 425, + 3, + 16, + "000000383379.jpg", + "2021-08-30", + "A classic train sits motionless on train tracks. ", + "D" + ] + ], + "num_example":50, + "metrics":{ + "file_name":{ + "count":"50", + "mean":null, + "stddev":null, + "min":"000000005756.jpg", + "max":"000000562222.jpg", + "missing_count":"0" + }, + "width":{ + "count":"50", + "mean":"585.82", + "stddev":"90.52560580367552", + "min":"333", + "max":"640", + "missing_count":"0" + }, + "height":{ + "count":"50", + "mean":"473.58", + "stddev":"98.5165665132064", + "min":"320", + "max":"640", + "missing_count":"0" + }, + "nChannels":{ + "count":"50", + "mean":"3.0", + "stddev":"0.0", + "min":"3", + "max":"3", + "missing_count":"0" + }, + "mode":{ + "count":"50", + "mean":"16.0", + "stddev":"0.0", + "min":"16", + "max":"16", + "missing_count":"0" + }, + "name":{ + "count":"50", + "mean":null, + "stddev":null, + "min":"000000005756.jpg", + "max":"000000562222.jpg", + "missing_count":"0" + }, + "created_at":{ + "count":"50", + "mean":null, + "stddev":null, + "min":"2021-08-30T16:52:15.501516", + "max":"2021-08-30T16:52:15.501516", + "missing_count":"0" + }, + "caption":{ + "count":"50", + "mean":null, + "stddev":null, + "min":"A baby laying on its belly in front of a laptop.", + "max":"young girl laying in bed next to a brown teddy bear.", + "missing_count":"0" + }, + "label":{ + "count":"50", + "mean":null, + "stddev":null, + "min":"A", + "max":"D", + "missing_count":"0" + } + }, + "images":[ + { + "name":"000000050576.jpg", + "file_name":"000000050576.jpg", + "width":640, + "height":480, + "created_at":"2021-08-30", + "annotation":{ + "label":"B" + }, + "path":"/fake_dir/000000050576.png" + }, + { + "name":"000000240137.jpg", + "file_name":"000000240137.jpg", + "width":480, + "height":640, + "created_at":"2021-08-30", + "annotation":{ + "label":"B" + }, + "path":"/fake_dir/000000240137.png" + }, + { + "name":"000000457131.jpg", + "file_name":"000000457131.jpg", + "width":640, + "height":467, + "created_at":"2021-08-30", + "annotation":{ + "label":"C" + }, + "path":"/fake_dir/000000457131.png" + }, + { + "name":"000000181047.jpg", + "file_name":"000000181047.jpg", + "width":640, + "height":478, + "created_at":"2021-08-30", + "annotation":{ + "label":"B" + }, + "path":"/fake_dir/000000181047.png" + }, + { + "name":"000000206504.jpg", + "file_name":"000000206504.jpg", + "width":640, + "height":418, + "created_at":"2021-08-30", + "annotation":{ + "label":"C" + }, + "path":"/fake_dir/000000206504.png" + }, + { + "name":"000000517851.jpg", + "file_name":"000000517851.jpg", + "width":425, + "height":640, + "created_at":"2021-08-30", + "annotation":{ + "label":"A" + }, + "path":"/fake_dir/000000517851.png" + }, + { + "name":"000000137150.jpg", + "file_name":"000000137150.jpg", + "width":640, + "height":426, + "created_at":"2021-08-30", + "annotation":{ + "label":"A" + }, + "path":"/fake_dir/000000137150.png" + }, + { + "name":"000000451824.jpg", + "file_name":"000000451824.jpg", + "width":640, + "height":640, + "created_at":"2021-08-30", + "annotation":{ + "label":"C" + }, + "path":"/fake_dir/000000451824.png" + }, + { + "name":"000000068023.jpg", + "file_name":"000000068023.jpg", + "width":640, + "height":427, + "created_at":"2021-08-30", + "annotation":{ + "label":"B" + }, + "path":"/fake_dir/000000068023.png" + }, + { + "name":"000000249815.jpg", + "file_name":"000000249815.jpg", + "width":640, + "height":480, + "created_at":"2021-08-30", + "annotation":{ + "label":"A" + }, + "path":"/fake_dir/000000249815.png" + }, + { + "name":"000000229893.jpg", + "file_name":"000000229893.jpg", + "width":640, + "height":480, + "created_at":"2021-08-30", + "annotation":{ + "label":"D" + }, + "path":"/fake_dir/000000229893.png" + }, + { + "name":"000000005756.jpg", + "file_name":"000000005756.jpg", + "width":640, + "height":361, + "created_at":"2021-08-30", + "annotation":{ + "label":"A" + }, + "path":"/fake_dir/000000005756.png" + }, + { + "name":"000000008181.jpg", + "file_name":"000000008181.jpg", + "width":640, + "height":480, + "created_at":"2021-08-30", + "annotation":{ + "label":"C" + }, + "path":"/fake_dir/000000008181.png" + }, + { + "name":"000000018425.jpg", + "file_name":"000000018425.jpg", + "width":640, + "height":480, + "created_at":"2021-08-30", + "annotation":{ + "label":"B" + }, + "path":"/fake_dir/000000018425.png" + }, + { + "name":"000000421116.jpg", + "file_name":"000000421116.jpg", + "width":427, + "height":640, + "created_at":"2021-08-30", + "annotation":{ + "label":"B" + }, + "path":"/fake_dir/000000421116.png" + }, + { + "name":"000000398781.jpg", + "file_name":"000000398781.jpg", + "width":640, + "height":331, + "created_at":"2021-08-30", + "annotation":{ + "label":"B" + }, + "path":"/fake_dir/000000398781.png" + }, + { + "name":"000000511307.jpg", + "file_name":"000000511307.jpg", + "width":495, + "height":640, + "created_at":"2021-08-30", + "annotation":{ + "label":"D" + }, + "path":"/fake_dir/000000511307.png" + }, + { + "name":"000000410836.jpg", + "file_name":"000000410836.jpg", + "width":640, + "height":428, + "created_at":"2021-08-30", + "annotation":{ + "label":"D" + }, + "path":"/fake_dir/000000410836.png" + }, + { + "name":"000000457337.jpg", + "file_name":"000000457337.jpg", + "width":640, + "height":480, + "created_at":"2021-08-30", + "annotation":{ + "label":"D" + }, + "path":"/fake_dir/000000457337.png" + }, + { + "name":"000000383379.jpg", + "file_name":"000000383379.jpg", + "width":640, + "height":425, + "created_at":"2021-08-30", + "annotation":{ + "label":"D" + }, + "path":"/fake_dir/000000383379.png" + } + ] + } \ No newline at end of file diff --git a/web_console_v2/api/testing/test_data/hello.py b/web_console_v2/api/testing/test_data/hello.py new file mode 100644 index 000000000..801442938 --- /dev/null +++ b/web_console_v2/api/testing/test_data/hello.py @@ -0,0 +1,18 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +def hello(): + return 1 diff --git a/web_console_v2/api/testing/test_data/image_meta.json b/web_console_v2/api/testing/test_data/image_meta.json new file mode 100644 index 000000000..34b6a71d9 --- /dev/null +++ b/web_console_v2/api/testing/test_data/image_meta.json @@ -0,0 +1,465 @@ +{ + "dtypes": [ + { + "key": "file_name", + "value": "string" + }, + { + "key": "width", + "value": "int" + }, + { + "key": "height", + "value": "int" + }, + { + "key": "nChannels", + "value": "int" + }, + { + "key": "mode", + "value": "int" + }, + { + "key": "name", + "value": "string" + }, + { + "key": "created_at", + "value": "string" + }, + { + "key": "caption", + "value": "string" + }, + { + "key": "label", + "value": "string" + } + ], + "label_count": [ + { + "label": "B", + "count": 19 + }, + { + "label": "D", + "count": 11 + }, + { + "label": "C", + "count": 10 + }, + { + "label": "A", + "count": 10 + } + ], + "count": 50, + "sample": [ + [ + "000000050576.jpg", + 640, + 480, + 3, + 16, + "000000050576.jpg", + "2021-08-30T16:52:15.501516", + "A tow truck loading a bank security truck by a building.", + "B" + ], + [ + "000000240137.jpg", + 480, + 640, + 3, + 16, + "000000240137.jpg", + "2021-08-30T16:52:15.501516", + "A dog leaping in the air to grab a frisbee from its owner on a snowy day.", + "B" + ], + [ + "000000457131.jpg", + 640, + 467, + 3, + 16, + "000000457131.jpg", + "2021-08-30T16:52:15.501516", + "a craft is flying behind a tree line", + "C" + ], + [ + "000000181047.jpg", + 640, + 478, + 3, + 16, + "000000181047.jpg", + "2021-08-30T16:52:15.501516", + "A meal with bread and dipping sauce is on a table.", + "B" + ], + [ + "000000206504.jpg", + 640, + 418, + 3, + 16, + "000000206504.jpg", + "2021-08-30T16:52:15.501516", + "An aircraft is parked in a plane hanger.", + "C" + ], + [ + "000000517851.jpg", + 425, + 640, + 3, + 16, + "000000517851.jpg", + "2021-08-30T16:52:15.501516", + "A large bear sitting on a tree stump.", + "A" + ], + [ + "000000137150.jpg", + 640, + 426, + 3, + 16, + "000000137150.jpg", + "2021-08-30T16:52:15.501516", + "A woman carries a basket of bananas on her head while some men stand around.", + "A" + ], + [ + "000000451824.jpg", + 640, + 640, + 3, + 16, + "000000451824.jpg", + "2021-08-30T16:52:15.501516", + "a queen size bed in a small room with a slanted ceiling and a wooden desk", + "C" + ], + [ + "000000068023.jpg", + 640, + 427, + 3, + 16, + "000000068023.jpg", + "2021-08-30T16:52:15.501516", + "A zebra walks through green and brown grass.", + "B" + ], + [ + "000000249815.jpg", + 640, + 480, + 3, + 16, + "000000249815.jpg", + "2021-08-30T16:52:15.501516", + "A street full of bike racks filled with bicycles.", + "A" + ], + [ + "000000229893.jpg", + 640, + 480, + 3, + 16, + "000000229893.jpg", + "2021-08-30T16:52:15.501516", + "Children near a train on a train track.", + "D" + ], + [ + "000000005756.jpg", + 640, + 361, + 3, + 16, + "000000005756.jpg", + "2021-08-30T16:52:15.501516", + "A group of people holding umbrellas looking at graffiti.", + "A" + ], + [ + "000000008181.jpg", + 640, + 480, + 3, + 16, + "000000008181.jpg", + "2021-08-30T16:52:15.501516", + "A motorcycle is parked on a gravel lot", + "C" + ], + [ + "000000018425.jpg", + 640, + 480, + 3, + 16, + "000000018425.jpg", + "2021-08-30T16:52:15.501516", + "Two giraffe grazing on tree leaves under a hazy sky.", + "B" + ], + [ + "000000421116.jpg", + 427, + 640, + 3, + 16, + "000000421116.jpg", + "2021-08-30T16:52:15.501516", + "a couple of motorcycles parked next to each other on the side of a street.", + "B" + ], + [ + "000000398781.jpg", + 640, + 331, + 3, + 16, + "000000398781.jpg", + "2021-08-30T16:52:15.501516", + "A man in a wetsuit is on the beach with a surfboard.", + "B" + ], + [ + "000000511307.jpg", + 495, + 640, + 3, + 16, + "000000511307.jpg", + "2021-08-30T16:52:15.501516", + "A dog snuggled under covers in a bed.", + "D" + ], + [ + "000000410836.jpg", + 640, + 428, + 3, + 16, + "000000410836.jpg", + "2021-08-30T16:52:15.501516", + "A tablet by a monitor, keyboard and laptop on a desk.", + "D" + ], + [ + "000000457337.jpg", + 640, + 480, + 3, + 16, + "000000457337.jpg", + "2021-08-30T16:52:15.501516", + "A white and gray cat is stretching on a picnic table. ", + "D" + ], + [ + "000000383379.jpg", + 640, + 425, + 3, + 16, + "000000383379.jpg", + "2021-08-30T16:52:15.501516", + "A classic train sits motionless on train tracks. ", + "D" + ] + ], + "features": { + "file_name": { + "count": "50", + "mean": null, + "stddev": null, + "min": "000000005756.jpg", + "max": "000000562222.jpg", + "missing_count": "0" + }, + "width": { + "count": "50", + "mean": "585.82", + "stddev": "90.52560580367552", + "min": "333", + "max": "640", + "missing_count": "0" + }, + "height": { + "count": "50", + "mean": "473.58", + "stddev": "98.5165665132064", + "min": "320", + "max": "640", + "missing_count": "0" + }, + "nChannels": { + "count": "50", + "mean": "3.0", + "stddev": "0.0", + "min": "3", + "max": "3", + "missing_count": "0" + }, + "mode": { + "count": "50", + "mean": "16.0", + "stddev": "0.0", + "min": "16", + "max": "16", + "missing_count": "0" + }, + "name": { + "count": "50", + "mean": null, + "stddev": null, + "min": "000000005756.jpg", + "max": "000000562222.jpg", + "missing_count": "0" + }, + "created_at": { + "count": "50", + "mean": null, + "stddev": null, + "min": "2021-08-30T16:52:15.501516", + "max": "2021-08-30T16:52:15.501516", + "missing_count": "0" + }, + "caption": { + "count": "50", + "mean": null, + "stddev": null, + "min": "A baby laying on its belly in front of a laptop.", + "max": "young girl laying in bed next to a brown teddy bear.", + "missing_count": "0" + }, + "label": { + "count": "50", + "mean": null, + "stddev": null, + "min": "A", + "max": "D", + "missing_count": "0" + } + }, + "hist": { + "width": { + "x": [ + 333.0, + 363.7, + 394.4, + 425.1, + 455.8, + 486.5, + 517.2, + 547.9, + 578.6, + 609.3, + 640.0 + ], + "y": [ + 1, + 1, + 1, + 4, + 3, + 4, + 0, + 0, + 0, + 36 + ] + }, + "height": { + "x": [ + 320.0, + 352.0, + 384.0, + 416.0, + 448.0, + 480.0, + 512.0, + 544.0, + 576.0, + 608.0, + 640.0 + ], + "y": [ + 4, + 4, + 2, + 16, + 2, + 11, + 0, + 0, + 0, + 11 + ] + }, + "nChannels": { + "x": [ + 3.0, + 3.0, + 3.0, + 3.0, + 3.0, + 3.0, + 3.0, + 3.0, + 3.0, + 3.0, + 3.0 + ], + "y": [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 50 + ] + }, + "mode": { + "x": [ + 16.0, + 16.0, + 16.0, + 16.0, + 16.0, + 16.0, + 16.0, + 16.0, + 16.0, + 16.0, + 16.0 + ], + "y": [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 50 + ] + } + } +} \ No newline at end of file diff --git a/web_console_v2/api/testing/test_data/saved_model.pbtxt b/web_console_v2/api/testing/test_data/saved_model.pbtxt new file mode 100644 index 000000000..eef74fb0d --- /dev/null +++ b/web_console_v2/api/testing/test_data/saved_model.pbtxt @@ -0,0 +1,3192 @@ +saved_model_schema_version: 1 +meta_graphs { + meta_info_def { + stripped_op_list { + op { + name: "Add" + input_arg { + name: "x" + type_attr: "T" + } + input_arg { + name: "y" + type_attr: "T" + } + output_arg { + name: "z" + type_attr: "T" + } + attr { + name: "T" + type: "type" + allowed_values { + list { + type: DT_BFLOAT16 + type: DT_HALF + type: DT_FLOAT + type: DT_DOUBLE + type: DT_UINT8 + type: DT_INT8 + type: DT_INT16 + type: DT_INT32 + type: DT_INT64 + type: DT_COMPLEX64 + type: DT_COMPLEX128 + type: DT_STRING + } + } + } + } + op { + name: "Assign" + input_arg { + name: "ref" + type_attr: "T" + is_ref: true + } + input_arg { + name: "value" + type_attr: "T" + } + output_arg { + name: "output_ref" + type_attr: "T" + is_ref: true + } + attr { + name: "T" + type: "type" + } + attr { + name: "validate_shape" + type: "bool" + default_value { + b: true + } + } + attr { + name: "use_locking" + type: "bool" + default_value { + b: true + } + } + allows_uninitialized_input: true + } + op { + name: "BiasAdd" + input_arg { + name: "value" + type_attr: "T" + } + input_arg { + name: "bias" + type_attr: "T" + } + output_arg { + name: "output" + type_attr: "T" + } + attr { + name: "T" + type: "type" + allowed_values { + list { + type: DT_FLOAT + type: DT_DOUBLE + type: DT_INT32 + type: DT_UINT8 + type: DT_INT16 + type: DT_INT8 + type: DT_COMPLEX64 + type: DT_INT64 + type: DT_QINT8 + type: DT_QUINT8 + type: DT_QINT32 + type: DT_BFLOAT16 + type: DT_UINT16 + type: DT_COMPLEX128 + type: DT_HALF + type: DT_UINT32 + type: DT_UINT64 + } + } + } + attr { + name: "data_format" + type: "string" + default_value { + s: "NHWC" + } + allowed_values { + list { + s: "NHWC" + s: "NCHW" + } + } + } + } + op { + name: "ConcatV2" + input_arg { + name: "values" + type_attr: "T" + number_attr: "N" + } + input_arg { + name: "axis" + type_attr: "Tidx" + } + output_arg { + name: "output" + type_attr: "T" + } + attr { + name: "N" + type: "int" + has_minimum: true + minimum: 2 + } + attr { + name: "T" + type: "type" + } + attr { + name: "Tidx" + type: "type" + default_value { + type: DT_INT32 + } + allowed_values { + list { + type: DT_INT32 + type: DT_INT64 + } + } + } + } + op { + name: "Const" + output_arg { + name: "output" + type_attr: "dtype" + } + attr { + name: "value" + type: "tensor" + } + attr { + name: "dtype" + type: "type" + } + } + op { + name: "Identity" + input_arg { + name: "input" + type_attr: "T" + } + output_arg { + name: "output" + type_attr: "T" + } + attr { + name: "T" + type: "type" + } + } + op { + name: "MatMul" + input_arg { + name: "a" + type_attr: "T" + } + input_arg { + name: "b" + type_attr: "T" + } + output_arg { + name: "product" + type_attr: "T" + } + attr { + name: "transpose_a" + type: "bool" + default_value { + b: false + } + } + attr { + name: "transpose_b" + type: "bool" + default_value { + b: false + } + } + attr { + name: "T" + type: "type" + allowed_values { + list { + type: DT_BFLOAT16 + type: DT_HALF + type: DT_FLOAT + type: DT_DOUBLE + type: DT_INT32 + type: DT_INT64 + type: DT_COMPLEX64 + type: DT_COMPLEX128 + } + } + } + } + op { + name: "MergeV2Checkpoints" + input_arg { + name: "checkpoint_prefixes" + type: DT_STRING + } + input_arg { + name: "destination_prefix" + type: DT_STRING + } + attr { + name: "delete_old_dirs" + type: "bool" + default_value { + b: true + } + } + is_stateful: true + } + op { + name: "Mul" + input_arg { + name: "x" + type_attr: "T" + } + input_arg { + name: "y" + type_attr: "T" + } + output_arg { + name: "z" + type_attr: "T" + } + attr { + name: "T" + type: "type" + allowed_values { + list { + type: DT_BFLOAT16 + type: DT_HALF + type: DT_FLOAT + type: DT_DOUBLE + type: DT_UINT8 + type: DT_INT8 + type: DT_UINT16 + type: DT_INT16 + type: DT_INT32 + type: DT_INT64 + type: DT_COMPLEX64 + type: DT_COMPLEX128 + } + } + } + is_commutative: true + } + op { + name: "NoOp" + } + op { + name: "Pack" + input_arg { + name: "values" + type_attr: "T" + number_attr: "N" + } + output_arg { + name: "output" + type_attr: "T" + } + attr { + name: "N" + type: "int" + has_minimum: true + minimum: 1 + } + attr { + name: "T" + type: "type" + } + attr { + name: "axis" + type: "int" + default_value { + i: 0 + } + } + } + op { + name: "ParseExample" + input_arg { + name: "serialized" + type: DT_STRING + } + input_arg { + name: "names" + type: DT_STRING + } + input_arg { + name: "sparse_keys" + type: DT_STRING + number_attr: "Nsparse" + } + input_arg { + name: "dense_keys" + type: DT_STRING + number_attr: "Ndense" + } + input_arg { + name: "dense_defaults" + type_list_attr: "Tdense" + } + output_arg { + name: "sparse_indices" + type: DT_INT64 + number_attr: "Nsparse" + } + output_arg { + name: "sparse_values" + type_list_attr: "sparse_types" + } + output_arg { + name: "sparse_shapes" + type: DT_INT64 + number_attr: "Nsparse" + } + output_arg { + name: "dense_values" + type_list_attr: "Tdense" + } + attr { + name: "Nsparse" + type: "int" + has_minimum: true + } + attr { + name: "Ndense" + type: "int" + has_minimum: true + } + attr { + name: "sparse_types" + type: "list(type)" + has_minimum: true + allowed_values { + list { + type: DT_FLOAT + type: DT_INT64 + type: DT_STRING + } + } + } + attr { + name: "Tdense" + type: "list(type)" + has_minimum: true + allowed_values { + list { + type: DT_FLOAT + type: DT_INT64 + type: DT_STRING + } + } + } + attr { + name: "dense_shapes" + type: "list(shape)" + has_minimum: true + } + } + op { + name: "Placeholder" + output_arg { + name: "output" + type_attr: "dtype" + } + attr { + name: "dtype" + type: "type" + } + attr { + name: "shape" + type: "shape" + default_value { + shape { + unknown_rank: true + } + } + } + } + op { + name: "PlaceholderWithDefault" + input_arg { + name: "input" + type_attr: "dtype" + } + output_arg { + name: "output" + type_attr: "dtype" + } + attr { + name: "dtype" + type: "type" + } + attr { + name: "shape" + type: "shape" + } + } + op { + name: "RandomUniform" + input_arg { + name: "shape" + type_attr: "T" + } + output_arg { + name: "output" + type_attr: "dtype" + } + attr { + name: "seed" + type: "int" + default_value { + i: 0 + } + } + attr { + name: "seed2" + type: "int" + default_value { + i: 0 + } + } + attr { + name: "dtype" + type: "type" + allowed_values { + list { + type: DT_HALF + type: DT_BFLOAT16 + type: DT_FLOAT + type: DT_DOUBLE + } + } + } + attr { + name: "T" + type: "type" + allowed_values { + list { + type: DT_INT32 + type: DT_INT64 + } + } + } + is_stateful: true + } + op { + name: "Relu" + input_arg { + name: "features" + type_attr: "T" + } + output_arg { + name: "activations" + type_attr: "T" + } + attr { + name: "T" + type: "type" + allowed_values { + list { + type: DT_FLOAT + type: DT_DOUBLE + type: DT_INT32 + type: DT_UINT8 + type: DT_INT16 + type: DT_INT8 + type: DT_INT64 + type: DT_BFLOAT16 + type: DT_UINT16 + type: DT_HALF + type: DT_UINT32 + type: DT_UINT64 + type: DT_QINT8 + } + } + } + } + op { + name: "RestoreV2" + input_arg { + name: "prefix" + type: DT_STRING + } + input_arg { + name: "tensor_names" + type: DT_STRING + } + input_arg { + name: "shape_and_slices" + type: DT_STRING + } + output_arg { + name: "tensors" + type_list_attr: "dtypes" + } + attr { + name: "dtypes" + type: "list(type)" + has_minimum: true + minimum: 1 + } + is_stateful: true + } + op { + name: "SaveV2" + input_arg { + name: "prefix" + type: DT_STRING + } + input_arg { + name: "tensor_names" + type: DT_STRING + } + input_arg { + name: "shape_and_slices" + type: DT_STRING + } + input_arg { + name: "tensors" + type_list_attr: "dtypes" + } + attr { + name: "dtypes" + type: "list(type)" + has_minimum: true + minimum: 1 + } + is_stateful: true + } + op { + name: "ShardedFilename" + input_arg { + name: "basename" + type: DT_STRING + } + input_arg { + name: "shard" + type: DT_INT32 + } + input_arg { + name: "num_shards" + type: DT_INT32 + } + output_arg { + name: "filename" + type: DT_STRING + } + } + op { + name: "StringJoin" + input_arg { + name: "inputs" + type: DT_STRING + number_attr: "N" + } + output_arg { + name: "output" + type: DT_STRING + } + attr { + name: "N" + type: "int" + has_minimum: true + minimum: 1 + } + attr { + name: "separator" + type: "string" + default_value { + s: "" + } + } + } + op { + name: "Sub" + input_arg { + name: "x" + type_attr: "T" + } + input_arg { + name: "y" + type_attr: "T" + } + output_arg { + name: "z" + type_attr: "T" + } + attr { + name: "T" + type: "type" + allowed_values { + list { + type: DT_BFLOAT16 + type: DT_HALF + type: DT_FLOAT + type: DT_DOUBLE + type: DT_UINT8 + type: DT_INT8 + type: DT_UINT16 + type: DT_INT16 + type: DT_INT32 + type: DT_INT64 + type: DT_COMPLEX64 + type: DT_COMPLEX128 + } + } + } + } + op { + name: "VariableV2" + output_arg { + name: "ref" + type_attr: "dtype" + is_ref: true + } + attr { + name: "shape" + type: "shape" + } + attr { + name: "dtype" + type: "type" + } + attr { + name: "container" + type: "string" + default_value { + s: "" + } + } + attr { + name: "shared_name" + type: "string" + default_value { + s: "" + } + } + is_stateful: true + } + } + tags: "serve" + tensorflow_version: "1.15.2" + tensorflow_git_version: "v1.15.0-92-g5d80e1e8e6" + } + graph_def { + node { + name: "examples" + op: "Placeholder" + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "shape" + value { + shape { + unknown_rank: true + } + } + } + } + node { + name: "ParseExample/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + } + } + } + } + } + } + node { + name: "ParseExample/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } + } + node { + name: "ParseExample/ParseExample/names" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + } + } + } + } + } + } + node { + name: "ParseExample/ParseExample/dense_keys_0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "example_id" + } + } + } + } + node { + name: "ParseExample/ParseExample/dense_keys_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "x" + } + } + } + } + node { + name: "ParseExample/ParseExample" + op: "ParseExample" + input: "examples" + input: "ParseExample/ParseExample/names" + input: "ParseExample/ParseExample/dense_keys_0" + input: "ParseExample/ParseExample/dense_keys_1" + input: "ParseExample/Const" + input: "ParseExample/Const_1" + attr { + key: "Ndense" + value { + i: 2 + } + } + attr { + key: "Nsparse" + value { + i: 0 + } + } + attr { + key: "Tdense" + value { + list { + type: DT_STRING + type: DT_FLOAT + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + dim { + size: 392 + } + } + } + } + } + attr { + key: "dense_shapes" + value { + list { + shape { + } + shape { + dim { + size: 392 + } + } + } + } + } + attr { + key: "sparse_types" + value { + list { + } + } + } + } + node { + name: "act1_f" + op: "Placeholder" + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + unknown_rank: true + } + } + } + } + node { + name: "w1l/Initializer/random_uniform/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@w1l" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\210\001\000\000\200\000\000\000" + } + } + } + } + node { + name: "w1l/Initializer/random_uniform/min" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@w1l" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: -0.01 + } + } + } + } + node { + name: "w1l/Initializer/random_uniform/max" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@w1l" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.01 + } + } + } + } + node { + name: "w1l/Initializer/random_uniform/RandomUniform" + op: "RandomUniform" + input: "w1l/Initializer/random_uniform/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@w1l" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 392 + } + dim { + size: 128 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } + } + node { + name: "w1l/Initializer/random_uniform/sub" + op: "Sub" + input: "w1l/Initializer/random_uniform/max" + input: "w1l/Initializer/random_uniform/min" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@w1l" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + } + node { + name: "w1l/Initializer/random_uniform/mul" + op: "Mul" + input: "w1l/Initializer/random_uniform/RandomUniform" + input: "w1l/Initializer/random_uniform/sub" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@w1l" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 392 + } + dim { + size: 128 + } + } + } + } + } + } + node { + name: "w1l/Initializer/random_uniform" + op: "Add" + input: "w1l/Initializer/random_uniform/mul" + input: "w1l/Initializer/random_uniform/min" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@w1l" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 392 + } + dim { + size: 128 + } + } + } + } + } + } + node { + name: "w1l" + op: "VariableV2" + attr { + key: "_class" + value { + list { + s: "loc:@w1l" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 392 + } + dim { + size: 128 + } + } + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 392 + } + dim { + size: 128 + } + } + } + } + attr { + key: "shared_name" + value { + s: "" + } + } + } + node { + name: "w1l/Assign" + op: "Assign" + input: "w1l" + input: "w1l/Initializer/random_uniform" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@w1l" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 392 + } + dim { + size: 128 + } + } + } + } + } + attr { + key: "use_locking" + value { + b: true + } + } + attr { + key: "validate_shape" + value { + b: true + } + } + } + node { + name: "w1l/read" + op: "Identity" + input: "w1l" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@w1l" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 392 + } + dim { + size: 128 + } + } + } + } + } + } + node { + name: "b1l/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@b1l" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 128 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 128 + } + } + float_val: 0.0 + } + } + } + } + node { + name: "b1l" + op: "VariableV2" + attr { + key: "_class" + value { + list { + s: "loc:@b1l" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 128 + } + } + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 128 + } + } + } + } + attr { + key: "shared_name" + value { + s: "" + } + } + } + node { + name: "b1l/Assign" + op: "Assign" + input: "b1l" + input: "b1l/Initializer/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@b1l" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 128 + } + } + } + } + } + attr { + key: "use_locking" + value { + b: true + } + } + attr { + key: "validate_shape" + value { + b: true + } + } + } + node { + name: "b1l/read" + op: "Identity" + input: "b1l" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@b1l" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 128 + } + } + } + } + } + } + node { + name: "w2/Initializer/random_uniform/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@w2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\001\000\000\n\000\000\000" + } + } + } + } + node { + name: "w2/Initializer/random_uniform/min" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@w2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: -0.01 + } + } + } + } + node { + name: "w2/Initializer/random_uniform/max" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@w2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.01 + } + } + } + } + node { + name: "w2/Initializer/random_uniform/RandomUniform" + op: "RandomUniform" + input: "w2/Initializer/random_uniform/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@w2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 256 + } + dim { + size: 10 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } + } + node { + name: "w2/Initializer/random_uniform/sub" + op: "Sub" + input: "w2/Initializer/random_uniform/max" + input: "w2/Initializer/random_uniform/min" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@w2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + } + node { + name: "w2/Initializer/random_uniform/mul" + op: "Mul" + input: "w2/Initializer/random_uniform/RandomUniform" + input: "w2/Initializer/random_uniform/sub" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@w2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 256 + } + dim { + size: 10 + } + } + } + } + } + } + node { + name: "w2/Initializer/random_uniform" + op: "Add" + input: "w2/Initializer/random_uniform/mul" + input: "w2/Initializer/random_uniform/min" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@w2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 256 + } + dim { + size: 10 + } + } + } + } + } + } + node { + name: "w2" + op: "VariableV2" + attr { + key: "_class" + value { + list { + s: "loc:@w2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 256 + } + dim { + size: 10 + } + } + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 256 + } + dim { + size: 10 + } + } + } + } + attr { + key: "shared_name" + value { + s: "" + } + } + } + node { + name: "w2/Assign" + op: "Assign" + input: "w2" + input: "w2/Initializer/random_uniform" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@w2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 256 + } + dim { + size: 10 + } + } + } + } + } + attr { + key: "use_locking" + value { + b: true + } + } + attr { + key: "validate_shape" + value { + b: true + } + } + } + node { + name: "w2/read" + op: "Identity" + input: "w2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@w2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 256 + } + dim { + size: 10 + } + } + } + } + } + } + node { + name: "b2/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@b2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 10 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 10 + } + } + float_val: 0.0 + } + } + } + } + node { + name: "b2" + op: "VariableV2" + attr { + key: "_class" + value { + list { + s: "loc:@b2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 10 + } + } + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 10 + } + } + } + } + attr { + key: "shared_name" + value { + s: "" + } + } + } + node { + name: "b2/Assign" + op: "Assign" + input: "b2" + input: "b2/Initializer/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@b2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 10 + } + } + } + } + } + attr { + key: "use_locking" + value { + b: true + } + } + attr { + key: "validate_shape" + value { + b: true + } + } + } + node { + name: "b2/read" + op: "Identity" + input: "b2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@b2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 10 + } + } + } + } + } + } + node { + name: "MatMul" + op: "MatMul" + input: "ParseExample/ParseExample:1" + input: "w1l/read" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + dim { + size: 128 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } + } + node { + name: "BiasAdd" + op: "BiasAdd" + input: "MatMul" + input: "b1l/read" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + dim { + size: 128 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } + } + node { + name: "Relu" + op: "Relu" + input: "BiasAdd" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + dim { + size: 128 + } + } + } + } + } + } + node { + name: "concat/axis" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } + } + node { + name: "concat" + op: "ConcatV2" + input: "Relu" + input: "act1_f" + input: "concat/axis" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + dim { + size: -1 + } + } + } + } + } + } + node { + name: "MatMul_1" + op: "MatMul" + input: "concat" + input: "w2/read" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + dim { + size: 10 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } + } + node { + name: "BiasAdd_1" + op: "BiasAdd" + input: "MatMul_1" + input: "b2/read" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + dim { + size: 10 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } + } + node { + name: "save/filename/input" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "model" + } + } + } + } + node { + name: "save/filename" + op: "PlaceholderWithDefault" + input: "save/filename/input" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "shape" + value { + shape { + } + } + } + } + node { + name: "save/Const" + op: "PlaceholderWithDefault" + input: "save/filename" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "shape" + value { + shape { + } + } + } + } + node { + name: "save/StringJoin/inputs_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "_temp_6d206bff29b74af6822ca72c4313110b/part" + } + } + } + } + node { + name: "save/StringJoin" + op: "StringJoin" + input: "save/Const" + input: "save/StringJoin/inputs_1" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "separator" + value { + s: "" + } + } + } + node { + name: "save/num_shards" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } + } + node { + name: "save/ShardedFilename/shard" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 0 + } + } + } + } + node { + name: "save/ShardedFilename" + op: "ShardedFilename" + input: "save/StringJoin" + input: "save/ShardedFilename/shard" + input: "save/num_shards" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + } + node { + name: "save/SaveV2/tensor_names" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 4 + } + } + string_val: "b1l" + string_val: "b2" + string_val: "w1l" + string_val: "w2" + } + } + } + } + node { + name: "save/SaveV2/shape_and_slices" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 4 + } + } + string_val: "" + string_val: "" + string_val: "" + string_val: "" + } + } + } + } + node { + name: "save/SaveV2" + op: "SaveV2" + input: "save/ShardedFilename" + input: "save/SaveV2/tensor_names" + input: "save/SaveV2/shape_and_slices" + input: "b1l" + input: "b2" + input: "w1l" + input: "w2" + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + } + } + } + } + node { + name: "save/control_dependency" + op: "Identity" + input: "save/ShardedFilename" + input: "^save/SaveV2" + attr { + key: "T" + value { + type: DT_STRING + } + } + attr { + key: "_class" + value { + list { + s: "loc:@save/ShardedFilename" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + } + node { + name: "save/MergeV2Checkpoints/checkpoint_prefixes" + op: "Pack" + input: "save/ShardedFilename" + input: "^save/control_dependency" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_STRING + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } + } + node { + name: "save/MergeV2Checkpoints" + op: "MergeV2Checkpoints" + input: "save/MergeV2Checkpoints/checkpoint_prefixes" + input: "save/Const" + attr { + key: "delete_old_dirs" + value { + b: true + } + } + } + node { + name: "save/Identity" + op: "Identity" + input: "save/Const" + input: "^save/MergeV2Checkpoints" + input: "^save/control_dependency" + attr { + key: "T" + value { + type: DT_STRING + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + } + node { + name: "save/RestoreV2/tensor_names" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 4 + } + } + string_val: "b1l" + string_val: "b2" + string_val: "w1l" + string_val: "w2" + } + } + } + } + node { + name: "save/RestoreV2/shape_and_slices" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 4 + } + } + string_val: "" + string_val: "" + string_val: "" + string_val: "" + } + } + } + } + node { + name: "save/RestoreV2" + op: "RestoreV2" + input: "save/Const" + input: "save/RestoreV2/tensor_names" + input: "save/RestoreV2/shape_and_slices" + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + } + } + } + } + node { + name: "save/Assign" + op: "Assign" + input: "b1l" + input: "save/RestoreV2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@b1l" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 128 + } + } + } + } + } + attr { + key: "use_locking" + value { + b: true + } + } + attr { + key: "validate_shape" + value { + b: true + } + } + } + node { + name: "save/Assign_1" + op: "Assign" + input: "b2" + input: "save/RestoreV2:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@b2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 10 + } + } + } + } + } + attr { + key: "use_locking" + value { + b: true + } + } + attr { + key: "validate_shape" + value { + b: true + } + } + } + node { + name: "save/Assign_2" + op: "Assign" + input: "w1l" + input: "save/RestoreV2:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@w1l" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 392 + } + dim { + size: 128 + } + } + } + } + } + attr { + key: "use_locking" + value { + b: true + } + } + attr { + key: "validate_shape" + value { + b: true + } + } + } + node { + name: "save/Assign_3" + op: "Assign" + input: "w2" + input: "save/RestoreV2:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@w2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 256 + } + dim { + size: 10 + } + } + } + } + } + attr { + key: "use_locking" + value { + b: true + } + } + attr { + key: "validate_shape" + value { + b: true + } + } + } + node { + name: "save/restore_shard" + op: "NoOp" + input: "^save/Assign" + input: "^save/Assign_1" + input: "^save/Assign_2" + input: "^save/Assign_3" + } + node { + name: "save/restore_all" + op: "NoOp" + input: "^save/restore_shard" + } + library { + } + versions { + producer: 134 + } + } + saver_def { + filename_tensor_name: "save/Const:0" + save_tensor_name: "save/Identity:0" + restore_op_name: "save/restore_all" + max_to_keep: 5 + sharded: true + keep_checkpoint_every_n_hours: 10000.0 + version: V2 + } + collection_def { + key: "trainable_variables" + value { + bytes_list { + value: "\n\005w1l:0\022\nw1l/Assign\032\nw1l/read:02 w1l/Initializer/random_uniform:08\001" + value: "\n\005b1l:0\022\nb1l/Assign\032\nb1l/read:02\027b1l/Initializer/zeros:08\001" + value: "\n\004w2:0\022\tw2/Assign\032\tw2/read:02\037w2/Initializer/random_uniform:08\001" + value: "\n\004b2:0\022\tb2/Assign\032\tb2/read:02\026b2/Initializer/zeros:08\001" + } + } + } + collection_def { + key: "variables" + value { + bytes_list { + value: "\n\005w1l:0\022\nw1l/Assign\032\nw1l/read:02 w1l/Initializer/random_uniform:08\001" + value: "\n\005b1l:0\022\nb1l/Assign\032\nb1l/read:02\027b1l/Initializer/zeros:08\001" + value: "\n\004w2:0\022\tw2/Assign\032\tw2/read:02\037w2/Initializer/random_uniform:08\001" + value: "\n\004b2:0\022\tb2/Assign\032\tb2/read:02\026b2/Initializer/zeros:08\001" + } + } + } + signature_def { + key: "serving_default" + value { + inputs { + key: "act1_f" + value { + name: "act1_f:0" + dtype: DT_FLOAT + tensor_shape { + unknown_rank: true + } + } + } + inputs { + key: "examples" + value { + name: "examples:0" + dtype: DT_STRING + tensor_shape { + unknown_rank: true + } + } + } + outputs { + key: "output" + value { + name: "BiasAdd_1:0" + dtype: DT_FLOAT + tensor_shape { + dim { + size: -1 + } + dim { + size: 10 + } + } + } + } + method_name: "tensorflow/serving/predict" + } + } +} diff --git a/web_console_v2/api/testing/test_data/sparkapp.tar b/web_console_v2/api/testing/test_data/sparkapp.tar new file mode 100644 index 000000000..a77195a21 Binary files /dev/null and b/web_console_v2/api/testing/test_data/sparkapp.tar differ diff --git a/web_console_v2/api/testing/test_data/test_sso.json b/web_console_v2/api/testing/test_data/test_sso.json new file mode 100644 index 000000000..725915207 --- /dev/null +++ b/web_console_v2/api/testing/test_data/test_sso.json @@ -0,0 +1,30 @@ +[ + { + "name": "test", + "icon_url": "", + "display_name": "test", + "protocol_type": "oauth", + "oauth": { + "client_id": "test_client_id", + "authorize_url": "authorize_url", + "access_token_url": "access_token_url", + "user_info_url": "user_info_url", + "logout_url": "logout_url", + "secret": "secret", + "username_key": "username", + "email_key": "email" + } + }, + { + "name": "test_cas", + "display_name": "test", + "icon_url": "", + "protocol_type": "cas", + "cas": { + "cas_server_url": "https://test-sso.asdf.net/cas", + "service_url": "https://fl-v2-test.asdf.net/v2/sso-callback/test_cas", + "login_route": "/login", + "validate_route": "/serviceValidate" + } + } +] \ No newline at end of file diff --git a/web_console_v2/api/testing/test_data/tfrecord_test.xx.aaa.data b/web_console_v2/api/testing/test_data/tfrecord_test.xx.aaa.data new file mode 100644 index 000000000..af9b03968 Binary files /dev/null and b/web_console_v2/api/testing/test_data/tfrecord_test.xx.aaa.data differ diff --git a/web_console_v2/api/testing/test_data/workflow_config.json b/web_console_v2/api/testing/test_data/workflow_config.json new file mode 100644 index 000000000..fd22905c9 --- /dev/null +++ b/web_console_v2/api/testing/test_data/workflow_config.json @@ -0,0 +1,57 @@ +{ + "group_alias": "test_workflow", + "variables": [ + { + "name": "v1", + "value": "value1", + "access_mode": "PRIVATE", + "widget_schema": "" + }, + { + "name": "v2", + "value": "value2", + "access_mode": "PEER_READABLE", + "widget_schema": "" + }, + { + "name": "v3", + "value": "value3", + "access_mode": "PEER_WRITABLE", + "widget_schema": "" + } + ], + "job_definitions": [ + { + "name": "data-import", + "job_type": "RAW_DATA", + "is_federated": false, + "yaml_template": "{}", + "variables": [], + "dependencies": [] + }, + { + "name": "data-join", + "job_type": "PSI_DATA_JOIN", + "is_federated": true, + "yaml_template": "{}", + "variables": [], + "dependencies": [ + { + "source": "data-import" + } + ] + }, + { + "name": "training", + "job_type": "TREE_MODEL_TRAINING", + "is_federated": true, + "yaml_template": "{}", + "variables": [], + "dependencies": [ + { + "source": "data-join" + } + ] + } + ] +} \ No newline at end of file diff --git a/web_console_v2/api/testing/test_data/workflow_config_right.json b/web_console_v2/api/testing/test_data/workflow_config_right.json new file mode 100644 index 000000000..4e7ac43d4 --- /dev/null +++ b/web_console_v2/api/testing/test_data/workflow_config_right.json @@ -0,0 +1,57 @@ +{ + "group_alias": "test_workflow", + "variables": [ + { + "name": "v1", + "value": "value1", + "access_mode": "PRIVATE", + "widget_schema": "" + }, + { + "name": "v2", + "value": "value2", + "access_mode": "PEER_READABLE", + "widget_schema": "" + }, + { + "name": "v3", + "value": "value3", + "access_mode": "PEER_WRITABLE", + "widget_schema": "" + } + ], + "job_definitions": [ + { + "name": "data-import", + "job_type": "RAW_DATA", + "is_federated": false, + "yaml_template": "data-import-yaml", + "variables": [], + "dependencies": [] + }, + { + "name": "data-join", + "job_type": "PSI_DATA_JOIN", + "is_federated": true, + "yaml_template": "data-join-yaml", + "variables": [], + "dependencies": [ + { + "source": "data-import" + } + ] + }, + { + "name": "training", + "job_type": "TREE_MODEL_TRAINING", + "is_federated": true, + "yaml_template": "training-yaml", + "variables": [], + "dependencies": [ + { + "source": "data-join" + } + ] + } + ] +} \ No newline at end of file diff --git a/web_console_v2/api/testing/workflow_template/BUILD.bazel b/web_console_v2/api/testing/workflow_template/BUILD.bazel new file mode 100644 index 000000000..e5f6279f1 --- /dev/null +++ b/web_console_v2/api/testing/workflow_template/BUILD.bazel @@ -0,0 +1,16 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +py_library( + name = "workflow_template", + testonly = True, + srcs = [ + "test_template_left.py", + ], + imports = ["../.."], + deps = [ + "//web_console_v2/api/fedlearner_webconsole/workflow_template:service_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + ], +) diff --git a/web_console_v2/api/testing/workflow_template/__init__.py b/web_console_v2/api/testing/workflow_template/__init__.py index 3e28547fe..c13b80f8f 100644 --- a/web_console_v2/api/testing/workflow_template/__init__.py +++ b/web_console_v2/api/testing/workflow_template/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. +# Copyright 2023 The FedLearner Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/web_console_v2/api/testing/workflow_template/psi_join_tree_model_no_label.py b/web_console_v2/api/testing/workflow_template/psi_join_tree_model_no_label.py deleted file mode 100644 index 88dde9379..000000000 --- a/web_console_v2/api/testing/workflow_template/psi_join_tree_model_no_label.py +++ /dev/null @@ -1,728 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import json - -from google.protobuf.json_format import MessageToDict -from fedlearner_webconsole.proto.workflow_definition_pb2 import ( - WorkflowDefinition, JobDefinition, JobDependency -) -from fedlearner_webconsole.proto.common_pb2 import ( - Variable -) - - -def make_workflow_template(): - workflow = WorkflowDefinition( - group_alias='psi_join_tree_model', - is_left=True, - variables=[ - Variable( - name='image_version', - value='v1.5-rc3', - access_mode=Variable.PEER_READABLE), - Variable( - name='num_partitions', - value='2', - access_mode=Variable.PEER_WRITABLE), - ], - job_definitions=[ - JobDefinition( - name='raw-data-job', - job_type=JobDefinition.RAW_DATA, - is_federated=False, - variables=[ - Variable( - name='input_dir', - value='/app/deploy/integrated_test/tfrecord_raw_data', - access_mode=Variable.PRIVATE), - Variable( - name='file_wildcard', - value='*.rd', - access_mode=Variable.PRIVATE), - Variable( - name='batch_size', - value='1024', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='input_format', - value='TF_RECORD', - access_mode=Variable.PRIVATE), - Variable( - name='worker_cpu', - value='2000m', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='worker_mem', - value='4Gi', - access_mode=Variable.PEER_WRITABLE), - ], - yaml_template='''{ - "apiVersion": "fedlearner.k8s.io/v1alpha1", - "kind": "FLApp", - "metadata": { - "name": "${workflow.jobs.raw-data-job.name}", - "namespace": "${project.variables.namespace}" - }, - "spec": { - "cleanPodPolicy": "All", - "flReplicaSpecs": { - "Master": { - "template": { - "spec": { - "containers": [ - { - "resources": { - "limits": { - "cpu": "1000m", - "memory": "2Gi" - }, - "requests": { - "cpu": "1000m", - "memory": "2Gi" - } - }, - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "command": [ - "/app/deploy/scripts/data_portal/run_data_portal_master.sh" - ], - "args": [], - "env": [ - ${system.basic_envs}, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.raw-data-job.name}" - }, - { - "name": "DATA_PORTAL_NAME", - "value": "${workflow.jobs.raw-data-job.name}" - }, - { - "name": "OUTPUT_PARTITION_NUM", - "value": "${workflow.variables.num_partitions}" - }, - { - "name": "INPUT_BASE_DIR", - "value": "${workflow.jobs.raw-data-job.variables.input_dir}" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/raw_data/${workflow.jobs.raw-data-job.name}" - }, - { - "name": "RAW_DATA_PUBLISH_DIR", - "value": "portal_publish_dir/${workflow.jobs.raw-data-job.name}" - }, - { - "name": "DATA_PORTAL_TYPE", - "value": "PSI" - }, - { - "name": "FILE_WILDCARD", - "value": "${workflow.jobs.raw-data-job.variables.file_wildcard}" - } - ], - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow" - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ], - "restartPolicy": "Never" - } - }, - "pair": false, - "replicas": 1 - }, - "Worker": { - "replicas": ${workflow.variables.num_partitions}, - "template": { - "spec": { - "containers": [ - { - "resources": { - "limits": { - "cpu": "${workflow.jobs.raw-data-job.variables.worker_cpu}", - "memory": "${workflow.jobs.raw-data-job.variables.worker_mem}" - }, - "requests": { - "cpu": "${workflow.jobs.raw-data-job.variables.worker_cpu}", - "memory": "${workflow.jobs.raw-data-job.variables.worker_mem}" - } - }, - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "command": [ - "/app/deploy/scripts/data_portal/run_data_portal_worker.sh" - ], - "args": [], - "env": [ - ${system.basic_envs}, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.raw-data-job.name}" - }, - { - "name": "BATCH_SIZE", - "value": "${workflow.jobs.raw-data-job.variables.batch_size}" - }, - { - "name": "INPUT_DATA_FORMAT", - "value": "${workflow.jobs.raw-data-job.variables.input_format}" - }, - { - "name": "COMPRESSED_TYPE", - "value": "" - }, - { - "name": "OUTPUT_DATA_FORMAT", - "value": "TF_RECORD" - } - ], - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow" - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ], - "restartPolicy": "Never" - } - }, - "pair": false - } - } - } -} - ''' - ), - JobDefinition( - name='data-join-job', - job_type=JobDefinition.PSI_DATA_JOIN, - is_federated=True, - variables=[ - Variable( - name='worker_cpu', - value='4000m', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='worker_mem', - value='4Gi', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='rsa_public_key_path', - value='', - access_mode=Variable.PRIVATE), - ], - dependencies=[ - JobDependency(source='raw-data-job') - ], - yaml_template=''' -{ - "apiVersion": "fedlearner.k8s.io/v1alpha1", - "kind": "FLApp", - "metadata": { - "name": "${workflow.jobs.data-join-job.name}", - "namespace": "${project.variables.namespace}" - }, - "spec": { - "role": "Follower", - "cleanPodPolicy": "All", - "peerSpecs": { - "Follower": { - "peerURL": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80", - "authority": "${project.participants[0].egress_domain}", - "extraHeaders": { - "x-host": "default.fedlearner.operator" - } - } - }, - "flReplicaSpecs": { - "Master": { - "template": { - "spec": { - "restartPolicy": "Never", - "containers": [ - { - "env": [ - ${system.basic_envs}, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.data-join-job.name}" - }, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "ROLE", - "value": "follower" - }, - { - "name": "RAW_DATA_SUB_DIR", - "value": "portal_publish_dir/${workflow.jobs.raw-data-job.name}" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/data_source/${workflow.jobs.data-join-job.name}" - }, - { - "name": "PARTITION_NUM", - "value": "${workflow.variables.num_partitions}" - }, - { - "name": "START_TIME", - "value": "0" - }, - { - "name": "END_TIME", - "value": "999999999999" - }, - { - "name": "NEGATIVE_SAMPLING_RATE", - "value": "1.0" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow", - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "command": [ - "/app/deploy/scripts/wait4pair_wrapper.sh" - ], - "args": [ - "/app/deploy/scripts/rsa_psi/run_psi_data_join_master.sh" - ], - "resources": { - "limits": { - "cpu": "2000m", - "memory": "3Gi" - }, - "requests": { - "cpu": "2000m", - "memory": "3Gi" - } - }, - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ] - } - }, - "pair": true, - "replicas": 1 - }, - "Worker": { - "template": { - "spec": { - "restartPolicy": "Never", - "containers": [ - { - "env": [ - ${system.basic_envs}, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.data-join-job.name}" - }, - { - "name": "ROLE", - "value": "follower" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/data_source/${workflow.jobs.data-join-job.name}" - }, - { - "name": "RSA_KEY_PATH", - "value": "${workflow.jobs.data-join-job.rsa_public_key_path}" - }, - { - "name": "PSI_RAW_DATA_ITER", - "value": "TF_RECORD" - }, - { - "name": "PSI_OUTPUT_BUILDER", - "value": "TF_RECORD" - }, - { - "name": "DATA_BLOCK_BUILDER", - "value": "TF_RECORD" - }, - { - "name": "DATA_BLOCK_DUMP_INTERVAL", - "value": "600" - }, - { - "name": "DATA_BLOCK_DUMP_THRESHOLD", - "value": "524288" - }, - { - "name": "EXAMPLE_ID_DUMP_INTERVAL", - "value": "600" - }, - { - "name": "EXAMPLE_ID_DUMP_THRESHOLD", - "value": "524288" - }, - { - "name": "EXAMPLE_JOINER", - "value": "SORT_RUN_JOINER" - }, - { - "name": "SIGN_RPC_TIMEOUT_MS", - "value": "128000" - }, - { - "name": "PARTITION_NUM", - "value": "${workflow.variables.num_partitions}" - }, - { - "name": "RAW_DATA_SUB_DIR", - "value": "portal_publish_dir/${workflow.jobs.raw-data-job.name}" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow", - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "command": [ - "/app/deploy/scripts/wait4pair_wrapper.sh" - ], - "args": [ - "/app/deploy/scripts/rsa_psi/run_psi_data_join_worker.sh" - ], - "resources": { - "limits": { - "cpu": "${workflow.jobs.data-join-job.variables.worker_cpu}", - "memory": "${workflow.jobs.data-join-job.variables.worker_mem}" - }, - "requests": { - "cpu": "${workflow.jobs.data-join-job.variables.worker_cpu}", - "memory": "${workflow.jobs.data-join-job.variables.worker_mem}" - } - } - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ] - } - }, - "pair": true, - "replicas": ${workflow.variables.num_partitions} - } - } - } -} - ''' - ), - JobDefinition( - name='train-job', - job_type=JobDefinition.TREE_MODEL_TRAINING, - is_federated=True, - variables=[ - Variable( - name='worker_cpu', - value='4000m', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='worker_mem', - value='8Gi', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='num_parallel', - value='4', - access_mode=Variable.PEER_WRITABLE), - ], - dependencies=[ - JobDependency(source='data-join-job') - ], - yaml_template=''' - { - "apiVersion": "fedlearner.k8s.io/v1alpha1", - "kind": "FLApp", - "metadata": { - "name": "${workflow.jobs.train-job.name}", - "namespace": "${project.variables.namespace}" - }, - "spec": { - "role": "Follower", - "cleanPodPolicy": "All", - "peerSpecs": { - "Leader": { - "peerURL": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80", - "authority": "${project.participants[0].egress_domain}", - "extraHeaders": { - "x-host": "default.fedlearner.operator" - } - } - }, - "flReplicaSpecs": { - "Worker": { - "template": { - "spec": { - "restartPolicy": "Never", - "containers": [ - { - "env": [ - ${system.basic_envs}, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.train-job.name}" - }, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "ROLE", - "value": "follower" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/job_output/${workflow.jobs.train-job.name}" - }, - { - "name": "MODE", - "value": "train" - }, - { - "name": "NUM_PARALLEL", - "value": "${workflow.jobs.train-job.variables.num_parallel}" - }, - { - "name": "DATA_SOURCE", - "value": "${workflow.jobs.data-join-job.name}" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow", - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "command": [ - "/app/deploy/scripts/wait4pair_wrapper.sh" - ], - "args": [ - "/app/deploy/scripts/trainer/run_tree_worker.sh" - ], - "resources": { - "limits": { - "cpu": "${workflow.jobs.train-job.variables.worker_cpu}", - "memory": "${workflow.jobs.train-job.variables.worker_mem}" - }, - "requests": { - "cpu": "${workflow.jobs.train-job.variables.worker_cpu}", - "memory": "${workflow.jobs.train-job.variables.worker_mem}" - } - } - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ] - } - }, - "pair": true, - "replicas": 1 - } - } - } -} - ''' - ) - ]) - - return workflow - - -if __name__ == '__main__': - print(json.dumps(MessageToDict( - make_workflow_template(), - preserving_proto_field_name=True, - including_default_value_fields=True))) diff --git a/web_console_v2/api/testing/workflow_template/psi_join_tree_model_with_label.py b/web_console_v2/api/testing/workflow_template/psi_join_tree_model_with_label.py deleted file mode 100644 index a872671b3..000000000 --- a/web_console_v2/api/testing/workflow_template/psi_join_tree_model_with_label.py +++ /dev/null @@ -1,748 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import json - -from google.protobuf.json_format import MessageToDict -from fedlearner_webconsole.proto.workflow_definition_pb2 import ( - WorkflowDefinition, JobDefinition, JobDependency -) -from fedlearner_webconsole.proto.common_pb2 import ( - Variable -) - - -def make_workflow_template(): - workflow = WorkflowDefinition( - group_alias='psi_join_tree_model', - is_left=False, - variables=[ - Variable( - name='image_version', - value='v1.5-rc3', - access_mode=Variable.PEER_READABLE), - Variable( - name='num_partitions', - value='2', - access_mode=Variable.PEER_WRITABLE), - ], - job_definitions=[ - JobDefinition( - name='raw-data-job', - job_type=JobDefinition.RAW_DATA, - is_federated=False, - variables=[ - Variable( - name='input_dir', - value='/app/deploy/integrated_test/tfrecord_raw_data', - access_mode=Variable.PRIVATE), - Variable( - name='file_wildcard', - value='*.rd', - access_mode=Variable.PRIVATE), - Variable( - name='batch_size', - value='1024', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='input_format', - value='TF_RECORD', - access_mode=Variable.PRIVATE), - Variable( - name='worker_cpu', - value='2000m', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='worker_mem', - value='4Gi', - access_mode=Variable.PEER_WRITABLE), - ], - yaml_template='''{ - "apiVersion": "fedlearner.k8s.io/v1alpha1", - "kind": "FLApp", - "metadata": { - "name": "${workflow.jobs.raw-data-job.name}", - "namespace": "${project.variables.namespace}" - }, - "spec": { - "cleanPodPolicy": "All", - "flReplicaSpecs": { - "Master": { - "template": { - "spec": { - "containers": [ - { - "resources": { - "limits": { - "cpu": "1000m", - "memory": "2Gi" - }, - "requests": { - "cpu": "1000m", - "memory": "2Gi" - } - }, - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "command": [ - "/app/deploy/scripts/data_portal/run_data_portal_master.sh" - ], - "args": [], - "env": [ - ${system.basic_envs}, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.raw-data-job.name}" - }, - { - "name": "DATA_PORTAL_NAME", - "value": "${workflow.jobs.raw-data-job.name}" - }, - { - "name": "OUTPUT_PARTITION_NUM", - "value": "${workflow.variables.num_partitions}" - }, - { - "name": "INPUT_BASE_DIR", - "value": "${workflow.jobs.raw-data-job.variables.input_dir}" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/raw_data/${workflow.jobs.raw-data-job.name}" - }, - { - "name": "RAW_DATA_PUBLISH_DIR", - "value": "portal_publish_dir/${workflow.jobs.raw-data-job.name}" - }, - { - "name": "DATA_PORTAL_TYPE", - "value": "PSI" - }, - { - "name": "FILE_WILDCARD", - "value": "${workflow.jobs.raw-data-job.variables.file_wildcard}" - } - ], - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow" - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ], - "restartPolicy": "Never" - } - }, - "pair": false, - "replicas": 1 - }, - "Worker": { - "replicas": ${workflow.variables.num_partitions}, - "template": { - "spec": { - "containers": [ - { - "resources": { - "limits": { - "cpu": "${workflow.jobs.raw-data-job.variables.worker_cpu}", - "memory": "${workflow.jobs.raw-data-job.variables.worker_mem}" - }, - "requests": { - "cpu": "${workflow.jobs.raw-data-job.variables.worker_cpu}", - "memory": "${workflow.jobs.raw-data-job.variables.worker_mem}" - } - }, - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "command": [ - "/app/deploy/scripts/data_portal/run_data_portal_worker.sh" - ], - "args": [], - "env": [ - ${system.basic_envs}, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.raw-data-job.name}" - }, - { - "name": "BATCH_SIZE", - "value": "${workflow.jobs.raw-data-job.variables.batch_size}" - }, - { - "name": "INPUT_DATA_FORMAT", - "value": "${workflow.jobs.raw-data-job.variables.input_format}" - }, - { - "name": "COMPRESSED_TYPE", - "value": "" - }, - { - "name": "OUTPUT_DATA_FORMAT", - "value": "TF_RECORD" - } - ], - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow" - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ], - "restartPolicy": "Never" - } - }, - "pair": false - } - } - } -} - ''' - ), - JobDefinition( - name='data-join-job', - job_type=JobDefinition.PSI_DATA_JOIN, - is_federated=True, - variables=[ - Variable( - name='worker_cpu', - value='4000m', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='worker_mem', - value='4Gi', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='rsa_private_key_path', - value='', - access_mode=Variable.PRIVATE), - ], - dependencies=[ - JobDependency(source='raw-data-job') - ], - yaml_template=''' -{ - "apiVersion": "fedlearner.k8s.io/v1alpha1", - "kind": "FLApp", - "metadata": { - "name": "${workflow.jobs.data-join-job.name}", - "namespace": "${project.variables.namespace}" - }, - "spec": { - "role": "Leader", - "cleanPodPolicy": "All", - "peerSpecs": { - "Follower": { - "peerURL": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80", - "authority": "${project.participants[0].egress_domain}", - "extraHeaders": { - "x-host": "default.fedlearner.operator" - } - } - }, - "flReplicaSpecs": { - "Master": { - "template": { - "spec": { - "restartPolicy": "Never", - "containers": [ - { - "env": [ - ${system.basic_envs}, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.data-join-job.name}" - }, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "ROLE", - "value": "leader" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/data_source/${workflow.jobs.data-join-job.name}" - }, - { - "name": "PARTITION_NUM", - "value": "${workflow.variables.num_partitions}" - }, - { - "name": "START_TIME", - "value": "0" - }, - { - "name": "END_TIME", - "value": "999999999999" - }, - { - "name": "NEGATIVE_SAMPLING_RATE", - "value": "1.0" - }, - { - "name": "RAW_DATA_SUB_DIR", - "value": "portal_publish_dir/${workflow.jobs.raw-data-job.name}" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow", - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "command": [ - "/app/deploy/scripts/wait4pair_wrapper.sh" - ], - "args": [ - "/app/deploy/scripts/rsa_psi/run_psi_data_join_master.sh" - ], - "resources": { - "limits": { - "cpu": "2000m", - "memory": "3Gi" - }, - "requests": { - "cpu": "2000m", - "memory": "3Gi" - } - }, - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ] - } - }, - "pair": true, - "replicas": 1 - }, - "Worker": { - "template": { - "spec": { - "restartPolicy": "Never", - "containers": [ - { - "env": [ - ${system.basic_envs}, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "ROLE", - "value": "follower" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.data-join-job.name}" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/data_source/${workflow.jobs.data-join-job.name}" - }, - { - "name": "RSA_KEY_PATH", - "value": "${workflow.jobs.data-join-job.rsa_private_key_path}" - }, - { - "name": "RSA_PRIVATE_KEY_PATH", - "value": "${workflow.jobs.data-join-job.rsa_private_key_path}" - }, - { - "name": "PSI_RAW_DATA_ITER", - "value": "TF_RECORD" - }, - { - "name": "PSI_OUTPUT_BUILDER", - "value": "TF_RECORD" - }, - { - "name": "DATA_BLOCK_BUILDER", - "value": "TF_RECORD" - }, - { - "name": "DATA_BLOCK_DUMP_INTERVAL", - "value": "600" - }, - { - "name": "DATA_BLOCK_DUMP_THRESHOLD", - "value": "524288" - }, - { - "name": "EXAMPLE_ID_DUMP_INTERVAL", - "value": "600" - }, - { - "name": "EXAMPLE_ID_DUMP_THRESHOLD", - "value": "524288" - }, - { - "name": "EXAMPLE_JOINER", - "value": "SORT_RUN_JOINER" - }, - { - "name": "SIGN_RPC_TIMEOUT_MS", - "value": "128000" - }, - { - "name": "RAW_DATA_SUB_DIR", - "value": "portal_publish_dir/${workflow.jobs.raw-data-job.name}" - }, - { - "name": "PARTITION_NUM", - "value": "${workflow.variables.num_partitions}" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow", - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "command": [ - "/app/deploy/scripts/wait4pair_wrapper.sh" - ], - "args": [ - "/app/deploy/scripts/rsa_psi/run_psi_data_join_worker.sh" - ], - "resources": { - "limits": { - "cpu": "${workflow.jobs.data-join-job.variables.worker_cpu}", - "memory": "${workflow.jobs.data-join-job.variables.worker_mem}" - }, - "requests": { - "cpu": "${workflow.jobs.data-join-job.variables.worker_cpu}", - "memory": "${workflow.jobs.data-join-job.variables.worker_mem}" - } - } - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ] - } - }, - "pair": true, - "replicas": ${workflow.variables.num_partitions} - } - } - } -} - ''' - ), - JobDefinition( - name='train-job', - job_type=JobDefinition.TREE_MODEL_TRAINING, - is_federated=True, - variables=[ - Variable( - name='worker_cpu', - value='4000m', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='worker_mem', - value='8Gi', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='send_scores_to_follower', - value='True', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='send_metrics_to_follower', - value='True', - access_mode=Variable.PEER_WRITABLE), - Variable( - name='num_parallel', - value='4', - access_mode=Variable.PEER_WRITABLE), - ], - dependencies=[ - JobDependency(source='data-join-job') - ], - yaml_template=''' - { - "apiVersion": "fedlearner.k8s.io/v1alpha1", - "kind": "FLApp", - "metadata": { - "name": "${workflow.jobs.train-job.name}", - "namespace": "${project.variables.namespace}" - }, - "spec": { - "role": "Leader", - "cleanPodPolicy": "All", - "peerSpecs": { - "Leader": { - "peerURL": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80", - "authority": "${project.participants[0].egress_domain}", - "extraHeaders": { - "x-host": "default.fedlearner.operator" - } - } - }, - "flReplicaSpecs": { - "Worker": { - "template": { - "spec": { - "restartPolicy": "Never", - "containers": [ - { - "env": [ - ${system.basic_envs}, - { - "name": "EGRESS_URL", - "value": "fedlearner-stack-ingress-nginx-controller.default.svc.cluster.local:80" - }, - { - "name": "EGRESS_HOST", - "value": "${project.participants[0].egress_host}" - }, - { - "name": "EGRESS_DOMAIN", - "value": "${project.participants[0].egress_domain}" - }, - { - "name": "APPLICATION_ID", - "value": "${workflow.jobs.train-job.name}" - }, - { - "name": "STORAGE_ROOT_PATH", - "value": "${project.variables.storage_root_dir}" - }, - { - "name": "ROLE", - "value": "leader" - }, - { - "name": "OUTPUT_BASE_DIR", - "value": "${project.variables.storage_root_dir}/job_output/${workflow.jobs.train-job.name}" - }, - { - "name": "MODE", - "value": "train" - }, - { - "name": "SEND_SCORES_TO_FOLLOWER", - "value": "${workflow.jobs.train-job.variables.send_scores_to_follower}" - }, - { - "name": "SEND_METRICS_TO_FOLLOWER", - "value": "${workflow.jobs.train-job.variables.send_metrics_to_follower}" - }, - { - "name": "NUM_PARALLEL", - "value": "${workflow.jobs.train-job.variables.num_parallel}" - }, - { - "name": "DATA_SOURCE", - "value": "${workflow.jobs.data-join-job.name}" - } - ], - "imagePullPolicy": "IfNotPresent", - "name": "tensorflow", - "volumeMounts": [ - { - "mountPath": "/data", - "name": "data" - } - ], - "image": "artifact.bytedance.com/fedlearner/fedlearner:${workflow.variables.image_version}", - "ports": [ - { - "containerPort": 50051, - "name": "flapp-port" - } - ], - "command": [ - "/app/deploy/scripts/wait4pair_wrapper.sh" - ], - "args": [ - "/app/deploy/scripts/trainer/run_tree_worker.sh" - ], - "resources": { - "limits": { - "cpu": "${workflow.jobs.train-job.variables.worker_cpu}", - "memory": "${workflow.jobs.train-job.variables.worker_mem}" - }, - "requests": { - "cpu": "${workflow.jobs.train-job.variables.worker_cpu}", - "memory": "${workflow.jobs.train-job.variables.worker_mem}" - } - } - } - ], - "imagePullSecrets": [ - { - "name": "regcred" - } - ], - "volumes": [ - { - "persistentVolumeClaim": { - "claimName": "pvc-fedlearner-default" - }, - "name": "data" - } - ] - } - }, - "pair": true, - "replicas": 1 - } - } - } -} - ''' - ) - ]) - - return workflow - - -if __name__ == '__main__': - print(json.dumps(MessageToDict( - make_workflow_template(), - preserving_proto_field_name=True, - including_default_value_fields=True))) diff --git a/web_console_v2/api/testing/workflow_template/test_template_left.py b/web_console_v2/api/testing/workflow_template/test_template_left.py new file mode 100644 index 000000000..dd09a0ab2 --- /dev/null +++ b/web_console_v2/api/testing/workflow_template/test_template_left.py @@ -0,0 +1,104 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import json +from google.protobuf.json_format import MessageToDict +from fedlearner_webconsole.proto.workflow_definition_pb2 import (WorkflowDefinition, JobDefinition, JobDependency) +from fedlearner_webconsole.proto.common_pb2 import (Variable) + + +def make_workflow_template(): + workflow = WorkflowDefinition(group_alias='test_template', + variables=[ + Variable(name='image_version', + value='v1.5-rc3', + access_mode=Variable.PEER_READABLE), + Variable(name='num_partitions', value='4', access_mode=Variable.PEER_WRITABLE), + ], + job_definitions=[ + JobDefinition(name='raw-data-job', + job_type=JobDefinition.RAW_DATA, + is_federated=False, + variables=[ + Variable(name='input_dir', + value='/app/deploy/integrated_test/tfrecord_raw_data', + access_mode=Variable.PRIVATE), + Variable(name='file_wildcard', + value='*.rd', + access_mode=Variable.PRIVATE), + Variable(name='batch_size', + value='1024', + access_mode=Variable.PEER_WRITABLE), + Variable(name='input_format', + value='TF_RECORD', + access_mode=Variable.PRIVATE), + Variable(name='output_format', + value='TF_RECORD', + access_mode=Variable.PRIVATE), + Variable(name='master_cpu', + value='2000m', + access_mode=Variable.PEER_WRITABLE), + Variable(name='master_mem', + value='3Gi', + access_mode=Variable.PEER_WRITABLE), + Variable(name='worker_cpu', + value='2000m', + access_mode=Variable.PEER_WRITABLE), + Variable(name='worker_mem', + value='4Gi', + access_mode=Variable.PEER_WRITABLE), + ], + yaml_template="""{'metadata':{'name': self.name,'labels':{}}, + self.variables.master_cpu: self.variables.master_mem, + '1': workflow.variables.image_version, + '2': workflow.jobs['raw-data-job'].variables.batch_size, + project.participants[0].egress_domain: project.variables.storage_root_path, + project.id: project.name, + workflow.id: workflow.uuid, + workflow.name: workflow.creator + ,}"""), + JobDefinition(name='data-join-job', + job_type=JobDefinition.DATA_JOIN, + is_federated=True, + variables=[ + Variable(name='master_cpu', + value='2000m', + access_mode=Variable.PEER_WRITABLE), + Variable(name='master_mem', + value='3Gi', + access_mode=Variable.PEER_WRITABLE), + Variable(name='worker_cpu', + value='4000m', + access_mode=Variable.PEER_WRITABLE), + Variable(name='worker_mem', + value='4Gi', + access_mode=Variable.PEER_WRITABLE), + ], + dependencies=[JobDependency(source='raw-data-job')], + yaml_template="""{'metadata':{'name': self.name,'labels':{}}, + self.variables.master_cpu: self.variables.master_mem, + '1': workflow.variables.image_version, + '2': workflow.jobs['raw-data-job'].variables.batch_size}""") + ]) + + return workflow + + +if __name__ == '__main__': + print( + json.dumps( + MessageToDict(make_workflow_template(), + preserving_proto_field_name=True, + including_default_value_fields=True))) diff --git a/web_console_v2/api/tests/BUILD.bazel b/web_console_v2/api/tests/BUILD.bazel new file mode 100644 index 000000000..220b68c47 --- /dev/null +++ b/web_console_v2/api/tests/BUILD.bazel @@ -0,0 +1,32 @@ +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +sh_test( + name = "ci", + srcs = [ + "ci.sh", + ], + data = [ + "//web_console_v2/api/cmds:flask_cli_bin", + ], + env = { + "APM_SERVER_ENDPOINT": "/dev/null", + "FLASK_APP": "web_console_v2/api/command:app", + }, +) + +py_test( + name = "meta_yaml_test", + srcs = [ + "meta_yaml_test.py", + ], + data = [ + "//web_console_v2/client/src/jobMetaDatas:srcs", + ], + imports = [".."], + main = "meta_yaml_test.py", + deps = [ + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:service_lib", + "//web_console_v2/api/testing:common_lib", + ], +) diff --git a/web_console_v2/api/tests/ci.sh b/web_console_v2/api/tests/ci.sh new file mode 100755 index 000000000..f6ae76fd5 --- /dev/null +++ b/web_console_v2/api/tests/ci.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -ex + +# Run CLI tests +web_console_v2/api/cmds/flask_cli_bin routes + +# Run migrations tests +migration_heads=$( + web_console_v2/api/cmds/flask_cli_bin \ + db heads -d web_console_v2/api/migrations | wc -l +) +[[ $migration_heads -eq 1 ]] diff --git a/web_console_v2/api/tests/meta_yaml_test.py b/web_console_v2/api/tests/meta_yaml_test.py new file mode 100644 index 000000000..3689f1e14 --- /dev/null +++ b/web_console_v2/api/tests/meta_yaml_test.py @@ -0,0 +1,58 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +import os +import json +import unittest +from unittest.mock import patch + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.workflow_template.service import dict_to_workflow_definition,\ + dict_to_editor_info, _format_template_with_yaml_editor +from testing.common import BaseTestCase + + +class MetaYamlTest(BaseTestCase): + + @patch('fedlearner_webconsole.setting.service.SettingService.get_application_version') + def test_meta_yaml(self, mock_version): + mock_version.return_value.version.version = '2.2.2.2' + # test if meta_yaml in frontend is in right form + editor_info = {'yaml_editor_infos': {}} + config = {'job_definitions': []} + meta_yaml_path = 'web_console_v2/client/src/jobMetaDatas' + for meta_yaml_file in os.listdir(meta_yaml_path): + job_name = os.path.splitext(meta_yaml_file)[0] + file_suffix = os.path.splitext(meta_yaml_file)[1] + if file_suffix == '.metayml': + with open(f'{meta_yaml_path}/{job_name}.metayml', 'r', encoding='utf-8') as f: + metayml = f.read() + with open(f'{meta_yaml_path}/{job_name}.json', 'r', encoding='utf-8') as f: + slots = json.load(f) + else: + continue + editor_info['yaml_editor_infos'][job_name] = {} + editor_info['yaml_editor_infos'][job_name]['slots'] = slots + editor_info['yaml_editor_infos'][job_name]['meta_yaml'] = metayml + config['job_definitions'].append({'name': job_name, 'easy_mode': True}) + + editor_info_proto = dict_to_editor_info(editor_info) + template_proto = dict_to_workflow_definition(config) + with db.session_scope() as session: + _format_template_with_yaml_editor(template_proto, editor_info_proto, session) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/api/tools/BUILD.bazel b/web_console_v2/api/tools/BUILD.bazel new file mode 100644 index 000000000..851865c23 --- /dev/null +++ b/web_console_v2/api/tools/BUILD.bazel @@ -0,0 +1,28 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//web_console_v2/api:console_api_package"]) + +# TODO(wangsen.0914): splits this lib into subpackages +py_library( + name = "lib", + srcs = glob( + ["**/*.py"], + ), + imports = [".."], + deps = [ + "//web_console_v2/api:envs_lib", + "//web_console_v2/api/fedlearner_webconsole:app_lib", + "//web_console_v2/api/fedlearner_webconsole:db_lib", + "//web_console_v2/api/fedlearner_webconsole:initial_db_lib", + "//web_console_v2/api/fedlearner_webconsole/dataset:models_lib", + "//web_console_v2/api/fedlearner_webconsole/job:models_lib", + "//web_console_v2/api/fedlearner_webconsole/mmgr:models_lib", + "//web_console_v2/api/fedlearner_webconsole/participant:models_lib", + "//web_console_v2/api/fedlearner_webconsole/project:models_lib", + "//web_console_v2/api/fedlearner_webconsole/serving:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow:models_lib", + "//web_console_v2/api/fedlearner_webconsole/workflow_template:models_lib", + "//web_console_v2/api/protocols/fedlearner_webconsole/proto:py_proto", + "@common_sqlalchemy//:pkg", + ], +) diff --git a/web_console_v2/api/tools/dataset_migration/dataset_importer_migration/dataset_file_migration.py b/web_console_v2/api/tools/dataset_migration/dataset_importer_migration/dataset_file_migration.py new file mode 100644 index 000000000..ecb9c4a12 --- /dev/null +++ b/web_console_v2/api/tools/dataset_migration/dataset_importer_migration/dataset_file_migration.py @@ -0,0 +1,65 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import logging +from tensorflow.io import gfile +# pylint: disable=unused-import +import tensorflow_io + +from fedlearner_webconsole.db import db + + +def _remove(path: str): + if not gfile.exists(path): + return + if gfile.isdir(path): + gfile.rmtree(path) + return + gfile.remove(path) + + +def _copy_file(src: str, dst: str): + _remove(dst) + if not gfile.isdir(src): + gfile.copy(src, dst) + return + gfile.makedirs(dst) + sub_srcs = gfile.glob(os.path.join(src, '*')) + for sub_src in sub_srcs: + _copy_file(sub_src, os.path.join(dst, os.path.basename(sub_src))) + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s') + engine = db.engine + datasets = engine.execute( + 'SELECT id, name, path FROM datasets_v2 WHERE meta_info = \'{"state": "ANALYZED"}\'').fetchall() + logging.info(f'[migration] get all available datasets, total num is {len(datasets)}') + for dataset in datasets: + data_batchs = engine.execute(f'SELECT path FROM data_batches_v2 WHERE dataset_id = {dataset[0]}').fetchall() + if len(data_batchs) != 1: + logging.warning(f'[migration] dataset {dataset[1]} has more than one data_batch, skip!') + continue + data_batch_path, = data_batchs[0] + + rds_path = os.path.join(dataset[2], 'rds') + if not gfile.isdir(rds_path): + logging.warning(f'[migration] dataset {dataset[1]} has no rds folder, skip!') + continue + logging.info(f'[migration] start to migrate dataset {dataset[1]}, dataset path: {dataset[2]}') + _copy_file(rds_path, data_batch_path) + _remove(rds_path) + logging.info(f'[migration] migrate dataset {dataset[1]}, dataset path: {dataset[2]} successfully') diff --git a/web_console_v2/api/tools/dataset_migration/dataset_importer_migration/migrate.sh b/web_console_v2/api/tools/dataset_migration/dataset_importer_migration/migrate.sh new file mode 100644 index 000000000..f8aebdc97 --- /dev/null +++ b/web_console_v2/api/tools/dataset_migration/dataset_importer_migration/migrate.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +ROOT_DIRECTORY=$1 +/bin/sh $ROOT_DIRECTORY/runtime_env.sh + +python3 $ROOT_DIRECTORY/tools/dataset_importer_migration/dataset_file_migration.py \ No newline at end of file diff --git a/web_console_v2/api/tools/dataset_migration/dataset_job_name_migration/README.md b/web_console_v2/api/tools/dataset_migration/dataset_job_name_migration/README.md new file mode 100644 index 000000000..2049ed1a3 --- /dev/null +++ b/web_console_v2/api/tools/dataset_migration/dataset_job_name_migration/README.md @@ -0,0 +1,10 @@ +# 站内执行 +``` +export PRE_START_HOOK=hook:before_app_start +FLASK_APP=command:app flask migrate-dataset-job-name +``` + +# 站外执行 +``` +FLASK_APP=command:app flask migrate-dataset-job-name +``` \ No newline at end of file diff --git a/web_console_v2/api/tools/dataset_migration/dataset_job_name_migration/dataset_job_name_migration.py b/web_console_v2/api/tools/dataset_migration/dataset_job_name_migration/dataset_job_name_migration.py new file mode 100644 index 000000000..68a208722 --- /dev/null +++ b/web_console_v2/api/tools/dataset_migration/dataset_job_name_migration/dataset_job_name_migration.py @@ -0,0 +1,39 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.dataset.models import DatasetJob + + +def migrate_dataset_job_name(): + logging.info('[migration]: start to migrate dataset_job name from result dataset name') + with db.session_scope() as session: + dataset_job_ids = session.query(DatasetJob.id).filter(DatasetJob.name.is_(None)).all() + logging.info(f'[migration]: {len(dataset_job_ids)} dataset_jobs need to migrate name') + failed_ids = [] + for dataset_job_id in dataset_job_ids: + try: + with db.session_scope() as session: + dataset_job: DatasetJob = session.query(DatasetJob).get(dataset_job_id) + dataset_job.name = dataset_job.output_dataset.name + session.commit() + except Exception as e: # pylint: disable=broad-except + failed_ids.append(dataset_job_id) + logging.error(f'[migration]: migrate dataset_job {dataset_job_id} failed: {str(e)}') + if failed_ids: + logging.error(f'[migration]: {failed_ids} failed') + logging.info('[migration]: finish dataset_job name migration') diff --git a/web_console_v2/api/tools/json_formatter.py b/web_console_v2/api/tools/json_formatter.py new file mode 100644 index 000000000..b6fd7ca03 --- /dev/null +++ b/web_console_v2/api/tools/json_formatter.py @@ -0,0 +1,76 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# pylint: disable=redefined-outer-name +import argparse +import glob +import json +import os + +import sys +from pathlib import Path + + +def _format_json(content: str) -> str: + # set ensure_ascii to false to support chinese characters + return json.dumps(json.loads(content), ensure_ascii=False, indent=4, sort_keys=True) + + +def format_json_file(file_path: str, check_only: bool = False) -> bool: + """Formats or checks json file format. + + Returns: + True if the file should be formatted + """ + original = Path(file_path).read_text(encoding='utf-8') + formatted = _format_json(original) + should_format = original != formatted + if should_format and not check_only: + Path(file_path).write_text(formatted, encoding='utf-8') + return should_format + + +def format_json_files(file_wildcard: str, check_only: bool = False) -> bool: + if Path(file_wildcard).is_dir(): + all_files = [] + for root, dirs, files in os.walk(file_wildcard): + for file in files: + all_files.append(os.path.join(root, file)) + else: + all_files = glob.glob(file_wildcard, recursive=True) + + any_formatted = False + for f in all_files: + formatted = format_json_file(f, check_only) + if formatted: + if check_only: + print(f'JSON format is invalid: {f}') + else: + print(f'{f} is formatted') + any_formatted = any_formatted or formatted + return any_formatted + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Json formatter') + parser.add_argument('-c', '--check_only', action='store_true', help='Whether to check json format only') + parser.add_argument('files', type=str, nargs='+', help='list of json file wildcard') + args = parser.parse_args() + + formatted = False + for file_wildcard in args.files: + formatted = formatted or format_json_files(file_wildcard, args.check_only) + if formatted and args.check_only: + sys.exit(1) diff --git a/web_console_v2/api/tools/local_runner/app_a.py b/web_console_v2/api/tools/local_runner/app_a.py deleted file mode 100644 index a9f9cb314..000000000 --- a/web_console_v2/api/tools/local_runner/app_a.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import os -import logging - -from envs import Envs -from fedlearner_webconsole.app import create_app -from tools.local_runner.initial_db import init_db - -BASE_DIR = os.path.abspath(os.path.dirname(__file__)) - - -class Config(object): - SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(BASE_DIR, 'app_a.db') - MYSQL_CHARSET = 'utf8mb4' - SQLALCHEMY_TRACK_MODIFICATIONS = False - JSON_AS_ASCII = False - JWT_SECRET_KEY = 'secret' - PROPAGATE_EXCEPTIONS = True - LOGGING_LEVEL = logging.INFO - GRPC_LISTEN_PORT = 1993 - JWT_ACCESS_TOKEN_EXPIRES = 86400 - STORAGE_ROOT = Envs.STORAGE_ROOT - - START_GRPC_SERVER = True - START_SCHEDULER = True - START_COMPOSER = True - - -app = create_app(Config) - - -@app.cli.command('create-db') -def create_db(): - init_db(1991, 'fl-demo2.com') diff --git a/web_console_v2/api/tools/local_runner/app_b.py b/web_console_v2/api/tools/local_runner/app_b.py deleted file mode 100644 index e9f7cb612..000000000 --- a/web_console_v2/api/tools/local_runner/app_b.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -import os -import logging - -from envs import Envs -from fedlearner_webconsole.app import create_app -from tools.local_runner.initial_db import init_db - -BASE_DIR = os.path.abspath(os.path.dirname(__file__)) - - -class Config(object): - SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(BASE_DIR, 'app_b.db') - MYSQL_CHARSET = 'utf8mb4' - SQLALCHEMY_TRACK_MODIFICATIONS = False - JSON_AS_ASCII = False - JWT_SECRET_KEY = 'secret' - PROPAGATE_EXCEPTIONS = True - LOGGING_LEVEL = logging.INFO - GRPC_LISTEN_PORT = 1991 - JWT_ACCESS_TOKEN_EXPIRES = 86400 - STORAGE_ROOT = Envs.STORAGE_ROOT - - START_GRPC_SERVER = True - START_SCHEDULER = True - START_COMPOSER = False - - -app = create_app(Config) - - -@app.cli.command('create-db') -def create_db(): - init_db(1993, 'fl-demo1.com') diff --git a/web_console_v2/api/tools/local_runner/initial_db.py b/web_console_v2/api/tools/local_runner/initial_db.py deleted file mode 100644 index 44a510ffd..000000000 --- a/web_console_v2/api/tools/local_runner/initial_db.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - - - -from google.protobuf.json_format import ParseDict - -from fedlearner_webconsole.db import db -from fedlearner_webconsole.auth.models import User -from fedlearner_webconsole.proto import project_pb2 -from fedlearner_webconsole.project.models import Project - - -def init_db(port, domain_name): - db.create_all() - user = User(username='ada') - user.set_password('ada') - db.session.add(user) - config = { - 'name': 'test', - 'participants': [ - { - 'name': f'{domain_name}', - 'url': f'127.0.0.1:{port}', - 'domain_name': f'{domain_name}', - 'grpc_spec': { - 'authority': f'{domain_name[:-4]}-client-auth.com' - } - } - ], - 'variables': [ - { - 'name': 'namespace', - 'value': 'default' - }, - { - 'name': 'storage_root_dir', - 'value': '/data' - }, - { - 'name': 'EGRESS_URL', - 'value': f'127.0.0.1:{port}' - } - - ] - } - project = Project(name='test', - config=ParseDict(config, - project_pb2.Project()).SerializeToString()) - db.session.add(project) - db.session.commit() diff --git a/web_console_v2/api/tools/local_runner/run_a.sh b/web_console_v2/api/tools/local_runner/run_a.sh deleted file mode 100755 index 6622b5f4c..000000000 --- a/web_console_v2/api/tools/local_runner/run_a.sh +++ /dev/null @@ -1,11 +0,0 @@ -export PYTHONPATH=$PYTHONPATH:"../../" -export FLASK_APP=app_a:app -export FLASK_ENV=development -flask create-db -export K8S_CONFIG_PATH=$1 -export FEDLEARNER_WEBCONSOLE_POLLING_INTERVAL=10 -export SQLALCHEMY_DATABASE_URI="sqlite:///app_a.db" -export FEATURE_MODEL_WORKFLOW_HOOK=True -export FEATURE_MODEL_K8S_HOOK=True -export ES_READ_HOST=172.21.8.76 # aliyun-demo1 fedlearner-stack-elasticsearch-client -flask run --host=0.0.0.0 --no-reload --eager-loading -p 9001 diff --git a/web_console_v2/api/tools/local_runner/run_b.sh b/web_console_v2/api/tools/local_runner/run_b.sh deleted file mode 100755 index 340f94763..000000000 --- a/web_console_v2/api/tools/local_runner/run_b.sh +++ /dev/null @@ -1,11 +0,0 @@ -export PYTHONPATH=$PYTHONPATH:"../../" -export FLASK_APP=app_b:app -export FLASK_ENV=development -flask create-db -export K8S_CONFIG_PATH=$1 -export FEDLEARNER_WEBCONSOLE_POLLING_INTERVAL=1 -export SQLALCHEMY_DATABASE_URI="sqlite:///app_b.db" -export FEATURE_MODEL_WORKFLOW_HOOK=True -export FEATURE_MODEL_K8S_HOOK=True -export ES_READ_HOST=172.21.14.199 # aliyun-demo2 fedlearner-stack-elasticsearch-client -flask run --host=0.0.0.0 --no-reload --eager-loading -p 9002 diff --git a/web_console_v2/api/tools/migrate_connect_to_test/migrate_connect_to_test.py b/web_console_v2/api/tools/migrate_connect_to_test/migrate_connect_to_test.py new file mode 100644 index 000000000..d96090928 --- /dev/null +++ b/web_console_v2/api/tools/migrate_connect_to_test/migrate_connect_to_test.py @@ -0,0 +1,59 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +from sqlalchemy.orm import Session +from envs import Envs +from fedlearner_webconsole.db import db +from fedlearner_webconsole.participant.models import Participant +from fedlearner_webconsole.participant.k8s_utils import create_or_update_participant_in_k8s +from fedlearner_webconsole.k8s.k8s_client import k8s_client + + +def _migrate_participant(session: Session): + # defensive delete the dirty data. + deleted_count = session.query(Participant).filter_by(domain_name='fl-bytedance-test.com').delete() + logging.info(f'Deleting {deleted_count} rows which has `domain_name`: `fl-bytedance-test.com`') + + participant = session.query(Participant).filter_by(domain_name='fl-bytedance.com').first() + if participant is None: + error_msg = 'Failed to find participant whose `domain_name` is `bytedance`' + logging.error(error_msg) + raise RuntimeError(error_msg) + + logging.info('Updating domain_name from `fl-bytedance.com` to `fl-bytedance-test.com`') + participant.domain_name = 'fl-bytedance-test.com' + participant.host = 'bytedance-test.fedlearner.net' + participant.port = 443 + + +def migrate_connect_to_test(): + logging.basicConfig(level=logging.DEBUG) + with db.session_scope() as session: + _migrate_participant(session) + + logging.info('Updating ingress and service resources in kubernetes...') + create_or_update_participant_in_k8s('fl-bytedance-test.com', + 'bytedance-test.fedlearner.net', + 443, + namespace=Envs.K8S_NAMESPACE) + k8s_client.delete_service(name='fl-bytedance') + k8s_client.delete_ingress(name='fl-bytedance-client-auth') + session.commit() + logging.info('Congratulations! Migration is done.') + + +if __name__ == '__main__': + migrate_connect_to_test() diff --git a/web_console_v2/api/tools/project_cleanup.py b/web_console_v2/api/tools/project_cleanup.py new file mode 100644 index 000000000..06dd46fa0 --- /dev/null +++ b/web_console_v2/api/tools/project_cleanup.py @@ -0,0 +1,118 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import logging + +import sys +from sqlalchemy import or_ +from sqlalchemy.orm import Session + +from fedlearner_webconsole.dataset.models import Dataset +from fedlearner_webconsole.db import db +from fedlearner_webconsole.job.models import Job, JobDependency +from fedlearner_webconsole.mmgr.models import Model, ModelJob, ModelJobGroup +from fedlearner_webconsole.participant.models import ProjectParticipant +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.serving.models import ServingModel, ServingDeployment, ServingNegotiator +from fedlearner_webconsole.workflow.models import Workflow + + +class _ProjectCleaner(object): + + def __init__(self, session: Session, project_id: int): + self._session = session + self.project_id = project_id + + def _delete_rows(self, model: db.Model): + count = self._session.query(model).filter_by(project_id=self.project_id).delete() + logging.info(f'Deleted {count} rows from {model.__tablename__}') + + def _delete_datasets(self): + datasets = self._session.query(Dataset).filter_by(project_id=self.project_id).all() + dataset_count = 0 + for dataset in datasets: + batch_count = 0 + for batch in dataset.data_batches: + self._session.delete(batch) + batch_count += 1 + logging.info(f'Deleted {batch_count} batches for dataset {dataset.id}') + if dataset.parent_dataset_job: + dataset_job_id = dataset.parent_dataset_job.id + self._session.delete(dataset.parent_dataset_job) + logging.info(f'Deleted dataset job {dataset_job_id} for dataset {dataset.id}') + self._session.delete(dataset) + dataset_count += 1 + logging.info(f'Deleted {dataset_count} rows from {Dataset.__tablename__}') + + def _delete_workflows(self): + jobs = self._session.query(Job).filter_by(project_id=self.project_id).all() + job_count = 0 + for job in jobs: + job_deps = self._session.query(JobDependency).filter( + or_(JobDependency.src_job_id == job.id, JobDependency.dst_job_id == job.id)).all() + job_dep_count = 0 + for job_dep in job_deps: + self._session.delete(job_dep) + job_dep_count += 1 + logging.info(f'Deleted {job_dep_count} job deps for job {job.id}') + self._session.delete(job) + job_count += 1 + logging.info(f'Deleted {job_count} rows from {Job.__tablename__}') + self._delete_rows(Workflow) + + def run(self): + # Cleans up model + self._delete_rows(ModelJob) + self._delete_rows(Model) + self._delete_rows(ModelJobGroup) + # TODO(hangweiqiang): cleans up algorithm + # Cleans up serving related stuff + self._delete_rows(ServingModel) + self._delete_rows(ServingDeployment) + self._delete_rows(ServingNegotiator) + # Cleans up dataset + self._delete_datasets() + # Cleans up workflow + self._delete_workflows() + # Cleans up participant + self._delete_rows(ProjectParticipant) + # Deletes project + count = self._session.query(Project).filter_by(id=self.project_id).delete() + logging.info(f'Deleted {count} rows from {Project.__tablename__}') + + +def delete_project(project_id: int): + """Deletes project and related resources' metadata. + + TODO(linfan.fine): cleans up resources on the disk.""" + with db.session_scope() as session: + project = session.query(Project).get(project_id) + project_name = project.name if project is not None else '[NOT EXISTING]' + print(f'You are deleting project (id: {project_id} - name: {project_name}), y/n?') + confirm = input() + if confirm not in ['y', 'Y', 'yes', 'YES', 'yes']: + return + _ProjectCleaner(session, project_id).run() + session.commit() + logging.info(f'Project {project_id} cleaned up') + + +if __name__ == '__main__': + logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) + parser = argparse.ArgumentParser(description='Project cleanup') + parser.add_argument('--project_id', type=int, required=True, help='Project needs to be cleaned up') + args = parser.parse_args() + delete_project(args.project_id) diff --git a/web_console_v2/api/tools/project_lookup.py b/web_console_v2/api/tools/project_lookup.py new file mode 100644 index 000000000..d9a6d6663 --- /dev/null +++ b/web_console_v2/api/tools/project_lookup.py @@ -0,0 +1,48 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import logging +from typing import List + +import sys +from fedlearner_webconsole.db import db +from fedlearner_webconsole.project.models import Project +from fedlearner_webconsole.utils.hooks import pre_start_hook + + +def lookup(var_names: List[str]): + """Finds all projects which contains the specified variables (any of them).""" + with db.session_scope() as session: + projects = session.query(Project).all() + for project in projects: + filtered_variables = [var for var in project.get_variables() if var.name in var_names] + if filtered_variables: + logging.info('============================') + logging.info(f'Project ID: {project.id}') + logging.info(f'Project name: {project.name}') + for var in filtered_variables: + logging.info(f'Var name: {var.name}, Var value: {var.typed_value}') + + +if __name__ == '__main__': + logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) + + parser = argparse.ArgumentParser(description='Project cleanup') + parser.add_argument('--variables', nargs='+', type=str, required=True, help='Variables to lookup') + args = parser.parse_args() + + pre_start_hook() + lookup(args.variables) diff --git a/web_console_v2/api/tools/start_db.sh b/web_console_v2/api/tools/start_db.sh new file mode 100644 index 000000000..108efd61f --- /dev/null +++ b/web_console_v2/api/tools/start_db.sh @@ -0,0 +1,40 @@ +#!/bin/bash +# +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +PORT=33600 + +docker rm -f mysql-fedlearner &> /dev/null +docker run -itd --name mysql-fedlearner -p $PORT:3306 --rm -e MYSQL_ROOT_PASSWORD=root mysql:8.0.22 --default-authentication-plugin=mysql_native_password + +is_existed=$(which mysql) +if [[ $is_existed == "" ]] +then + echo "Please install mysql first" + exit 1 +fi + +while : +do + mysql -h 0.0.0.0 --port 33600 -uroot -proot -e "CREATE DATABASE IF NOT EXISTS fedlearner;" &> /dev/null + if [ $? -eq 0 ] + then + break + fi +done + +echo "Please run:" +echo export SQLALCHEMY_DATABASE_URI="mysql+pymysql://root:root@localhost:$PORT/fedlearner" + diff --git a/web_console_v2/api/tools/variable_finder.py b/web_console_v2/api/tools/variable_finder.py new file mode 100644 index 000000000..14a992cc7 --- /dev/null +++ b/web_console_v2/api/tools/variable_finder.py @@ -0,0 +1,70 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""A tool to find workflow/templates related with an variable. + +Execute internally: +``` +export PRE_START_HOOK=hook:before_app_start +FLASK_APP=command:app flask find-variable +``` + +Otherwise: +``` +FLASK_APP=command:app flask find-variable +``` +""" +import logging +from typing import Union +from fedlearner_webconsole.db import db +from fedlearner_webconsole.proto.workflow_definition_pb2 import WorkflowDefinition +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState +from fedlearner_webconsole.workflow_template.models import WorkflowTemplate, WorkflowTemplateRevision + + +def _contains_var(config: Union[WorkflowDefinition, None], var_name: str) -> bool: + if not config: + return False + for var in config.variables: + if var.name == var_name: + return True + for job in config.job_definitions: + for var in job.variables: + if var.name == var_name: + return True + return False + + +def find(variable_name: str): + with db.session_scope() as session: + templates = session.query(WorkflowTemplate.id).all() + for tid, *_ in templates: + template: WorkflowTemplate = session.query(WorkflowTemplate).get(tid) + if _contains_var(template.get_config(), variable_name): + logging.info(f'[Found variable {variable_name}] template id: {template.id}, name: {template.name}') + + revisions = session.query(WorkflowTemplateRevision.id).all() + for rid, *_ in revisions: + revision: WorkflowTemplateRevision = session.query(WorkflowTemplateRevision).get(rid) + if _contains_var(revision.get_config(), variable_name): + logging.info( + f'[Found variable {variable_name}] revision id: {revision.id}, template id: {revision.template_id}') + + workflows = session.query(Workflow.id).filter( + Workflow.state.in_((WorkflowState.NEW, WorkflowState.READY, WorkflowState.RUNNING))).all() + for wid, *_ in workflows: + workflow: Workflow = session.query(Workflow).get(wid) + if _contains_var(workflow.get_config(), variable_name): + logging.info(f'[Found variable {variable_name}] workflow id: {workflow.id}, name: {workflow.name},' + f' state: {workflow.get_state_for_frontend()}') diff --git a/web_console_v2/api/tools/workflow_migration/README.md b/web_console_v2/api/tools/workflow_migration/README.md new file mode 100644 index 000000000..a17455f0f --- /dev/null +++ b/web_console_v2/api/tools/workflow_migration/README.md @@ -0,0 +1,10 @@ +# 站内执行 +``` +export PRE_START_HOOK=hook:before_app_start +FLASK_APP=command:app flask migrate-workflow-completed-failed-state +``` + +# 站外执行 +``` +FLASK_APP=command:app flask migrate-workflow-completed-failed-state +``` \ No newline at end of file diff --git a/web_console_v2/api/tools/workflow_migration/workflow_completed_failed.py b/web_console_v2/api/tools/workflow_migration/workflow_completed_failed.py new file mode 100644 index 000000000..e2d0e672a --- /dev/null +++ b/web_console_v2/api/tools/workflow_migration/workflow_completed_failed.py @@ -0,0 +1,44 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging + +from fedlearner_webconsole.db import db +from fedlearner_webconsole.workflow.models import Workflow, WorkflowState + + +def migrate_workflow_completed_failed_state(): + logging.info('[migration]: start migrate workflow completed and failed state') + with db.session_scope() as session: + w_ids = session.query(Workflow.id).filter_by(state=WorkflowState.STOPPED).all() + logging.info(f'[migration]: {len(w_ids)} STOPPED workflows need to be migrated') + failed_ids = [] + for w_id in w_ids: + try: + with db.session_scope() as session: + workflow = session.query(Workflow).get(w_id) + if workflow.is_finished(): + logging.info(f'[migration]: {workflow.name} workflow state change to COMPLETED') + workflow.state = WorkflowState.COMPLETED + elif workflow.is_failed(): + logging.info(f'[migration]: {workflow.name} workflow state change to FAILED') + workflow.state = WorkflowState.FAILED + session.commit() + except Exception as e: # pylint: disable=broad-except + failed_ids.append(w_id) + logging.error(f'[migration]: {workflow.name} workflow state changed failed: {str(e)}') + if failed_ids: + logging.error(f'[migration]: {failed_ids} failed') + logging.info('[migration]: finish migrate workflow completed and failed state') diff --git a/web_console_v2/client/.env.development b/web_console_v2/client/.env.development new file mode 100644 index 000000000..b8929c007 --- /dev/null +++ b/web_console_v2/client/.env.development @@ -0,0 +1,3 @@ +REACT_APP_ENABLE_IMAGE_VERSION_PAGE=true +REACT_APP_ENABLE_MODEL_FAVOURIT=true +THEME=normal diff --git a/web_console_v2/client/.env.production b/web_console_v2/client/.env.production new file mode 100644 index 000000000..bb9a4eb69 --- /dev/null +++ b/web_console_v2/client/.env.production @@ -0,0 +1,4 @@ +REACT_APP_ENABLE_IMAGE_VERSION_PAGE=true +REACT_APP_ENABLE_MODEL_FAVOURIT=false +THEME=normal +GENERATE_SOURCEMAP=false diff --git a/web_console_v2/client/.eslintignore b/web_console_v2/client/.eslintignore index 896035c6a..5b842b7c4 100644 --- a/web_console_v2/client/.eslintignore +++ b/web_console_v2/client/.eslintignore @@ -1,3 +1,9 @@ .vscode/* config/* scripts/* +src/libs/*.js +dumi/* +build/* +coverage/* +node_modules/* +public/ diff --git a/web_console_v2/client/.eslintrc.js b/web_console_v2/client/.eslintrc.js index a3677e834..b81b908a8 100644 --- a/web_console_v2/client/.eslintrc.js +++ b/web_console_v2/client/.eslintrc.js @@ -33,6 +33,13 @@ module.exports = { '@typescript-eslint/semi': 'off', '@typescript-eslint/explicit-module-boundary-types': 'off', 'prettier/prettier': ['warn', {}, { usePrettierrc: true }], + 'prefer-const': [ + 'error', + { + destructuring: 'any', + ignoreReadBeforeAssign: false, + }, + ], }, overrides: [ { diff --git a/web_console_v2/client/.npmrc b/web_console_v2/client/.npmrc index b8bf3987d..3c1748897 100644 --- a/web_console_v2/client/.npmrc +++ b/web_console_v2/client/.npmrc @@ -1,2 +1,2 @@ ; Force to use official registry -registry=https://registry.npmjs.org/ +registry=https://registry.npmjs.org diff --git a/web_console_v2/client/.prettierignore b/web_console_v2/client/.prettierignore new file mode 100644 index 000000000..0a2d570ac --- /dev/null +++ b/web_console_v2/client/.prettierignore @@ -0,0 +1,7 @@ +.vscode-test/ +node_modules/ +build/ +dumi/src/.umi/ +dumi/src/.umi-production/ +public/ +coverage/ \ No newline at end of file diff --git a/web_console_v2/client/.vscodeignore b/web_console_v2/client/.vscodeignore new file mode 100644 index 000000000..220cbd713 --- /dev/null +++ b/web_console_v2/client/.vscodeignore @@ -0,0 +1,5 @@ +.vscode +build +coverage +node_modules +dumi/src/.umi diff --git a/web_console_v2/client/BUILD.bazel b/web_console_v2/client/BUILD.bazel new file mode 100644 index 000000000..821338eb2 --- /dev/null +++ b/web_console_v2/client/BUILD.bazel @@ -0,0 +1,5 @@ +filegroup( + name = "srcs", + srcs = glob(["**/*"]), + visibility = ["//visibility:public"], +) diff --git a/web_console_v2/client/config/env.js b/web_console_v2/client/config/env.js index b65d2ed8a..5259ff27f 100644 --- a/web_console_v2/client/config/env.js +++ b/web_console_v2/client/config/env.js @@ -86,6 +86,9 @@ function getClientEnvironment(publicUrl) { // which is why it's disabled by default. // It is defined here so it is available in the webpackHotDevClient. FAST_REFRESH: process.env.FAST_REFRESH !== 'false', + // Theme env variable is used in src/styles/index.js. + // It is defined which theme we can use. + THEME: process.env.THEME || 'normal', }, ); // Stringify all values so we can feed into webpack DefinePlugin diff --git a/web_console_v2/client/config/webpack.config.js b/web_console_v2/client/config/webpack.config.js index bdf53a75e..ff172fc9d 100644 --- a/web_console_v2/client/config/webpack.config.js +++ b/web_console_v2/client/config/webpack.config.js @@ -25,7 +25,8 @@ const ModuleNotFoundPlugin = require('react-dev-utils/ModuleNotFoundPlugin'); const ForkTsCheckerWebpackPlugin = require('react-dev-utils/ForkTsCheckerWebpackPlugin'); const typescriptFormatter = require('react-dev-utils/typescriptFormatter'); const ReactRefreshWebpackPlugin = require('@pmmmwh/react-refresh-webpack-plugin'); -const AntdDayjsWebpackPlugin = require('antd-dayjs-webpack-plugin'); +const ArcoWebpackPlugin = require('@arco-design/webpack-plugin'); +const MonacoWebpackPlugin = require('monaco-editor-webpack-plugin'); const postcssNormalize = require('postcss-normalize'); @@ -70,6 +71,11 @@ const hasJsxRuntime = (() => { } })(); +const themeEnvToArcoThemeLibNameMap = { + normal: '@arco-themes/react-privacy-computing', + bioland: '@arco-themes/react-privacy-computing-bioland', +}; + // This is the production and development configuration. // It is focused on developer experience, fast rebuilds, and a minimal bundle. module.exports = function (webpackEnv) { @@ -178,7 +184,7 @@ module.exports = function (webpackEnv) { ); }; - return { + const config = { mode: isEnvProduction ? 'production' : isEnvDevelopment && 'development', // Stop compilation early in production bail: isEnvProduction, @@ -230,6 +236,7 @@ module.exports = function (webpackEnv) { minimizer: [ // This is only used in production mode new TerserPlugin({ + extractComments: false, terserOptions: { parse: { // We want terser to parse ecma 8 code. However, we don't want it @@ -252,6 +259,8 @@ module.exports = function (webpackEnv) { // Pending further investigation: // https://github.com/terser-js/terser/issues/120 inline: 2, + drop_console: true, + drop_debugger: true, }, mangle: { safari10: true, @@ -295,6 +304,24 @@ module.exports = function (webpackEnv) { splitChunks: { chunks: 'all', name: false, + cacheGroups: { + monacoEditor: { + chunks: 'async', + name: () => 'monaco.editor', + priority: 4, + test: /[\\/]node_modules[\\/]monaco-editor/, + enforce: true, + reuseExistingChunk: true, + }, + mpld3: { + chunks: 'async', + name: 'mpld3', + priority: 4, + test: /[\\/]node_modules[\\/]mpld3/, + enforce: true, + reuseExistingChunk: true, + }, + } }, // Keep the runtime chunk separated to enable long term caching // https://twitter.com/wSokra/status/969679223278505985 @@ -319,9 +346,6 @@ module.exports = function (webpackEnv) { .map((ext) => `.${ext}`) .filter((ext) => useTypeScript || !ext.includes('ts')), alias: { - // Support React Native Web - // https://www.smashingmagazine.com/2016/08/a-glimpse-into-the-future-with-react-native-for-web/ - 'react-native': 'react-native-web', // Allows for better profiling with ReactDevTools ...(isEnvProductionProfile && { 'react-dom$': 'react-dom/profiling', @@ -360,6 +384,10 @@ module.exports = function (webpackEnv) { // match the requirements. When no loader matches it will fall // back to the "file" loader at the end of the loader list. oneOf: [ + { + test: /\.metayml$/i, + loader: require.resolve('raw-loader'), + }, // TODO: Merge this config once `image/avif` is in the mime-db // https://github.com/jshttp/mime-db { @@ -428,6 +456,9 @@ module.exports = function (webpackEnv) { presets: [ [require.resolve('babel-preset-react-app/dependencies'), { helpers: true }], ], + plugins: [ + '@babel/plugin-proposal-class-properties' + ], cacheDirectory: true, // See #6846 for context on why cacheCompression is disabled cacheCompression: false, @@ -508,6 +539,19 @@ module.exports = function (webpackEnv) { ), sideEffects: true, }, + { + test: lessModuleRegex, + use: getStyleLoaders( + { + importLoaders: 3, + sourceMap: isEnvProduction ? shouldUseSourceMap : isEnvDevelopment, + modules: { + getLocalIdent: getCSSModuleLocalIdent, + }, + }, + 'less-loader', + ), + }, // Adds support for CSS Modules, but using SASS // using the extension .module.scss or .module.sass { @@ -546,7 +590,13 @@ module.exports = function (webpackEnv) { ], }, plugins: [ - new AntdDayjsWebpackPlugin(), + new ArcoWebpackPlugin({ + theme: themeEnvToArcoThemeLibNameMap[process.env.THEME || 'normal'], + }), + new MonacoWebpackPlugin({ + languages: ['json', 'python', 'shell', 'javascript', 'go', 'yaml'], + publicPath: isEnvProduction ? '/v2/static/js/' : '/', + }), getHtmlPluginConfig('index'), // getHtmlPluginConfig('login'), // Inlines the webpack runtime script. This script is too small to warrant @@ -654,10 +704,6 @@ module.exports = function (webpackEnv) { : undefined, tsconfig: paths.appTsConfig, reportFiles: [ - // This one is specifically to match during CI tests, - // as micromatch doesn't match - // '../cra-template-typescript/template/src/App.tsx' - // otherwise. '../**/src/**/*.{ts,tsx}', '**/src/**/*.{ts,tsx}', '!**/src/**/__tests__/**', @@ -704,4 +750,6 @@ module.exports = function (webpackEnv) { // our own hints via the FileSizeReporter performance: false, }; + + return config; }; diff --git a/web_console_v2/client/config/webpackDevServer.config.js b/web_console_v2/client/config/webpackDevServer.config.js index d08a8fac5..8a211fd4b 100644 --- a/web_console_v2/client/config/webpackDevServer.config.js +++ b/web_console_v2/client/config/webpackDevServer.config.js @@ -6,6 +6,7 @@ const ignoredFiles = require('react-dev-utils/ignoredFiles'); const redirectServedPath = require('react-dev-utils/redirectServedPathMiddleware'); const paths = require('./paths'); const getHttpsConfig = require('./getHttpsConfig'); +const { createProxyMiddleware } = require('http-proxy-middleware'); const host = process.env.HOST || '0.0.0.0'; const sockHost = process.env.WDS_SOCKET_HOST; @@ -111,6 +112,14 @@ module.exports = function (proxy, allowedHost) { // This registers user provided middleware for proxy reasons require(paths.proxySetup)(app); } + + app.use( + '/mock/20021', + createProxyMiddleware({ + target: 'xxx', + changeOrigin: true, + }), + ); }, after(app) { // Redirect to `PUBLIC_URL` or `homepage` from `package.json` if url not match diff --git a/web_console_v2/client/docs/KNOWN_ISSUES.md b/web_console_v2/client/docs/KNOWN_ISSUES.md deleted file mode 100644 index 7b1432398..000000000 --- a/web_console_v2/client/docs/KNOWN_ISSUES.md +++ /dev/null @@ -1,7 +0,0 @@ -# Known issues - -## Development - -1. Build stuck at `Compiling...` - -Sometime you changed files' structure will lead this happen, just ctrl+c quit process and restart building should get it works. diff --git a/web_console_v2/client/dumi/config/config.js b/web_console_v2/client/dumi/config/config.js new file mode 100644 index 000000000..1fded1233 --- /dev/null +++ b/web_console_v2/client/dumi/config/config.js @@ -0,0 +1,37 @@ +/* eslint-disable import/no-anonymous-default-export */ +import { resolve } from 'path'; + +export default { + title: 'fedlearner', + locales: [ + ['zh-CN', '中文'], + ['en-US', 'English'], + ], + chainWebpack(memo) { + memo.plugins.delete('copy'); + memo.resolve.modules.add(resolve(__dirname, '../../src')); + }, + alias: { + src: resolve(__dirname, '../../src'), + components: resolve(__dirname, '../../src/components'), + assets: resolve(__dirname, '../../src/assets'), + styles: resolve(__dirname, '../../src/styles'), + typings: resolve(__dirname, '../../src/typings'), + shared: resolve(__dirname, '../../src/shared'), + i18n: resolve(__dirname, '../../src/i18n'), + services: resolve(__dirname, '../../src/services'), + stores: resolve(__dirname, '../../src/stores'), + }, + apiParser: { + propFilter: { + skipNodeModules: true, + }, + shouldExtractLiteralValuesFromEnum: true, + shouldExtractValuesFromUnion: true, + }, + define: { + // Force enable mock data + 'process.env.REACT_APP_ENABLE_FULLY_MOCK': 'true', + 'process.env.IS_DUMI_ENV': 'true', + }, +}; diff --git a/web_console_v2/client/dumi/docs/Display/AlgorithmDrawer.md b/web_console_v2/client/dumi/docs/Display/AlgorithmDrawer.md new file mode 100644 index 000000000..c873d3dcc --- /dev/null +++ b/web_console_v2/client/dumi/docs/Display/AlgorithmDrawer.md @@ -0,0 +1,151 @@ +# AlgorithmDrawer + +基于 [Arco Drawer](https://arco.design/react/components/modal#api) 封装,展示某个算法版本具体信息的 Drawer 组件 + +> 注意 ⚠️: 因为组件内部引入了 `react-query` 相关的函数,所以 Demo 代码中用 `` 作为根组件,防止报错。 + + + +```tsx | pure +type AlgorithmParameter = { + name: string; + value: string; + required: boolean; + display_name: string; + comment: string; + value_type: ValueType; +}; +``` + +## 常规使用 + +需要传递算法项目 algorithmProjectId 和算法版本 algorithmId 2 个 ID,组件会内部进行异步请求获取数据 + +```tsx +import React, { useState } from 'react'; +import { RecoilRoot } from 'recoil'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import AlgorithmDrawer from 'components/AlgorithmDrawer'; + +export default () => { + const [visible, setVisible] = useState(false); + + return ( + + + + { + setVisible(false); + }} + /> + + + ); +}; +``` + +## parameterVariables + +如果需要显示额外的超参数的话,可以通过 parameterVariables 这个 props 来实现 + +如果设置`isAppendParameterVariables = true`,他会在原来的超参数数组上,额外增加你所传递的超参数 + +如果设置`isAppendParameterVariables = false`,会只显示 parameterVariables  中的超参数 + +```tsx +import React, { useState } from 'react'; +import { RecoilRoot } from 'recoil'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import AlgorithmDrawer from 'components/AlgorithmDrawer'; + +export default () => { + const [visible, setVisible] = useState(false); + + return ( + + + + { + setVisible(false); + }} + parameterVariables={[ + { + name: 'extraField', + required: true, + value: '', + display_name: '', + comment: '', + value_type: 'STRING', + }, + ]} + /> + + + ); +}; +``` + +## 子组件 + +### Button + +为了方便起见,封装了点击按钮后,展示 Drawer 的逻辑,支持传递 text 来指定 button 的文案,并提供 children 来高度自定义孩子节点(在组件内部已经封装了 onClick 逻辑) + + + +```tsx +import React, { useState } from 'react'; +import { RecoilRoot } from 'recoil'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import AlgorithmDrawer from 'components/AlgorithmDrawer'; + +export default () => { + return ( + + + { + setVisible(false); + }} + /> +
+ { + setVisible(false); + }} + > + Custom children + +
+
+ ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Display/CodePreview.md b/web_console_v2/client/dumi/docs/Display/CodePreview.md new file mode 100644 index 000000000..22ef90fc4 --- /dev/null +++ b/web_console_v2/client/dumi/docs/Display/CodePreview.md @@ -0,0 +1,244 @@ +# CodePreview + +展示算法文件的组件,具有文件目录树和代码编辑器(只读)的功能,支持同步/异步两种模式 + + + +## 常规使用 + +```tsx +/** + * compact: true + */ +import React from 'react'; +import CodePreview from 'components/CodePreview'; + +const fileData = { + 'owner.py': '# coding: utf-8\n', + 'leader/main.py': + "# coding: utf-8\nimport logging\nimport datetime\n\nimport tensorflow.compat.v1 as tf \nimport fedlearner.trainer as flt \nimport os\n\nfrom slot_2_bucket import slot_2_bucket\n\n_SLOT_2_IDX = {pair[0]: i for i, pair in enumerate(slot_2_bucket)}\n_SLOT_2_BUCKET = slot_2_bucket\nROLE = \"leader\"\n\nparser = flt.trainer_worker.create_argument_parser()\nparser.add_argument('--batch-size', type=int, default=256,\n help='Training batch size.')\nparser.add_argument('--clean-model', type=bool, default=True,\n help='clean checkpoint and saved_model')\nargs = parser.parse_args()\nargs.sparse_estimator = True\n\ndef apply_clean():\n if args.worker_rank == 0 and args.clean_model and tf.io.gfile.exists(args.checkpoint_path):\n tf.logging.info(\"--clean_model flag set. Removing existing checkpoint_path dir:\"\n \" {}\".format(args.checkpoint_path))\n tf.io.gfile.rmtree(args.checkpoint_path)\n\n if args.worker_rank == 0 and args.clean_model and args.export_path and tf.io.gfile.exists(args.export_path):\n tf.logging.info(\"--clean_model flag set. Removing existing savedmodel dir:\"\n \" {}\".format(args.export_path))\n tf.io.gfile.rmtree(args.export_path)\n\n\ndef input_fn(bridge, trainer_master=None):\n dataset = flt.data.DataBlockLoader(\n args.batch_size, ROLE, bridge, trainer_master).make_dataset()\n \n def parse_fn(example):\n feature_map = {}\n feature_map[\"example_id\"] = tf.FixedLenFeature([], tf.string)\n feature_map['fids'] = tf.VarLenFeature(tf.int64)\n # feature_map['y'] = tf.FixedLenFeature([], tf.int64)\n features = tf.parse_example(example, features=feature_map)\n # labels = {'y': features.pop('y')}\n labels = {'y': tf.constant(0)}\n return features, labels\n dataset = dataset.map(map_func=parse_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE)\n dataset = dataset.prefetch(2)\n return dataset\n \n # feature_map = {\"fids\": tf.VarLenFeature(tf.int64)}\n # feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\n # record_batch = dataset.make_batch_iterator().get_next()\n # features = tf.parse_example(record_batch, features=feature_map)\n # return features, None\n\ndef raw_serving_input_receiver_fn():\n feature_map = {\n 'fids_indices': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_indices'),\n 'fids_values': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_values'),\n 'fids_dense_shape': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_dense_shape')\n }\n return tf.estimator.export.ServingInputReceiver(\n feature_map, feature_map)\n\n\ndef model_fn(model, features, labels, mode):\n\n def sum_pooling(embeddings, slots):\n slot_embeddings = []\n for slot in slots:\n slot_embeddings.append(embeddings[_SLOT_2_IDX[slot]])\n if len(slot_embeddings) == 1:\n return slot_embeddings[0]\n return tf.add_n(slot_embeddings)\n\n global_step = tf.train.get_or_create_global_step()\n num_slot, embed_size = len(_SLOT_2_BUCKET), 8\n xavier_initializer = tf.glorot_normal_initializer()\n\n flt.feature.FeatureSlot.set_default_bias_initializer(\n tf.zeros_initializer())\n flt.feature.FeatureSlot.set_default_vec_initializer(\n tf.random_uniform_initializer(-0.0078125, 0.0078125))\n flt.feature.FeatureSlot.set_default_bias_optimizer(\n tf.train.FtrlOptimizer(learning_rate=0.01))\n flt.feature.FeatureSlot.set_default_vec_optimizer(\n tf.train.AdagradOptimizer(learning_rate=0.01))\n\n # deal with input cols\n categorical_embed = []\n num_slot, embed_dim = len(_SLOT_2_BUCKET), 8\n\n with tf.variable_scope(\"leader\"):\n for slot, bucket_size in _SLOT_2_BUCKET:\n fs = model.add_feature_slot(slot, bucket_size)\n fc = model.add_feature_column(fs)\n categorical_embed.append(fc.add_vector(embed_dim))\n\n\n # concate all embeddings\n slot_embeddings = categorical_embed\n concat_embedding = tf.concat(slot_embeddings, axis=1)\n output_size = len(slot_embeddings) * embed_dim\n\n model.freeze_slots(features)\n\n with tf.variable_scope(\"follower\"):\n fc1_size, fc2_size, fc3_size = 16, 16, 16\n w1 = tf.get_variable('w1', shape=[output_size, fc1_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b1 = tf.get_variable(\n 'b1', shape=[fc1_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n w2 = tf.get_variable('w2', shape=[fc1_size, fc2_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b2 = tf.get_variable(\n 'b2', shape=[fc2_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n w3 = tf.get_variable('w3', shape=[fc2_size, fc3_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b3 = tf.get_variable(\n 'b3', shape=[fc3_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n\n act1_l = tf.nn.relu(tf.nn.bias_add(tf.matmul(concat_embedding, w1), b1))\n act1_l = tf.layers.batch_normalization(act1_l, training=True)\n act2_l = tf.nn.relu(tf.nn.bias_add(tf.matmul(act1_l, w2), b2))\n act2_l = tf.layers.batch_normalization(act2_l, training=True)\n embedding = tf.nn.relu(tf.nn.bias_add(tf.matmul(act2_l, w3), b3))\n embedding = tf.layers.batch_normalization(embedding, training=True)\n\n if mode == tf.estimator.ModeKeys.TRAIN:\n embedding_grad = model.send('embedding', embedding, require_grad=True)\n optimizer = tf.train.GradientDescentOptimizer(0.01)\n train_op = model.minimize(\n optimizer, embedding, grad_loss=embedding_grad, global_step=global_step)\n return model.make_spec(mode, loss=tf.math.reduce_mean(embedding), train_op=train_op)\n elif mode == tf.estimator.ModeKeys.PREDICT:\n return model.make_spec(mode, predictions={'embedding': embedding})\n\nif __name__ == '__main__':\n logging.basicConfig(\n level=logging.INFO,\n format='%(asctime)-15s [%(filename)s:%(lineno)d] %(levelname)s %(message)s'\n )\n apply_clean()\n flt.trainer_worker.train(\n ROLE, args, input_fn,\n model_fn, raw_serving_input_receiver_fn)\n", + 'leader/slot_2_bucket.py': + '# coding: utf-8\nslot_2_bucket = [(0, 2),(1, 2),(2, 2),(3, 2),(4, 2),(5, 2),(6, 2),(7, 2),(8, 2),(9, 2),(10, 2),(11, 2),(12, 2),(13, 1341),(14, 535),(15, 74138),(16, 70862),(17, 279),(18, 17),(19, 11019),(20, 591),(21, 4),(22, 30227),(23, 4791),(24, 75100),(25, 3075),(26, 27),(27, 9226),(28, 79191),(29, 11),(30, 3990),(31, 1898),(32, 5),\n(33, 76976),(34, 18),(35, 16),(36, 36534),(37, 74),(38, 29059)]\n', + 'follower/main.py': + "# coding: utf-8\n# encoding=utf8\nimport logging\n\nimport tensorflow.compat.v1 as tf\n\nimport fedlearner.trainer as flt\nimport os\n\nROLE = 'follower'\n\nparser = flt.trainer_worker.create_argument_parser()\nparser.add_argument('--batch-size', type=int, default=256,\n help='Training batch size.')\nparser.add_argument('--clean-model', type=bool, default=True,\n help='clean checkpoint and saved_model')\nargs = parser.parse_args()\n\ndef apply_clean():\n if args.worker_rank == 0 and args.clean_model and tf.io.gfile.exists(args.checkpoint_path):\n tf.logging.info(\"--clean_model flag set. Removing existing checkpoint_path dir:\"\n \" {}\".format(args.checkpoint_path))\n tf.io.gfile.rmtree(args.checkpoint_path)\n\n if args.worker_rank == 0 and args.clean_model and args.export_path and tf.io.gfile.exists(args.export_path):\n tf.logging.info(\"--clean_model flag set. Removing existing savedmodel dir:\"\n \" {}\".format(args.export_path))\n tf.io.gfile.rmtree(args.export_path)\n\ndef input_fn(bridge, trainer_master=None):\n dataset = flt.data.DataBlockLoader(\n args.batch_size, ROLE, bridge, trainer_master).make_dataset()\n \n def parse_fn(example):\n feature_map = {}\n feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\n # feature_map['y'] = tf.FixedLenFeature([], tf.int64)\n features = tf.parse_example(example, features=feature_map)\n labels = {'y': tf.constant(0, shape=[1])}\n return features, labels\n \n dataset = dataset.map(map_func=parse_fn,\n num_parallel_calls=tf.data.experimental.AUTOTUNE)\n dataset = dataset.prefetch(2)\n return dataset\n \n\ndef raw_serving_input_receiver_fn():\n features = {}\n features['embedding'] = tf.placeholder(dtype=tf.float32, shape=[1, 16], name='embedding')\n receiver_tensors = {\n 'embedding': features['embedding']\n }\n return tf.estimator.export.ServingInputReceiver(\n features, receiver_tensors)\n\ndef model_fn(model, features, labels, mode):\n global_step = tf.train.get_or_create_global_step()\n xavier_initializer = tf.glorot_normal_initializer()\n\n fc1_size = 16\n with tf.variable_scope('follower'):\n w1f = tf.get_variable('w1f', shape=[\n fc1_size, 1], dtype=tf.float32, initializer=tf.random_uniform_initializer(-0.01, 0.01))\n b1f = tf.get_variable(\n 'b1f', shape=[1], dtype=tf.float32, initializer=tf.zeros_initializer())\n \n if mode == tf.estimator.ModeKeys.TRAIN:\n embedding = model.recv('embedding', tf.float32, require_grad=True)\n else:\n embedding = features['embedding']\n \n logits = tf.nn.bias_add(tf.matmul(embedding, w1f), b1f)\n\n if mode == tf.estimator.ModeKeys.TRAIN:\n y = tf.dtypes.cast(labels['y'], tf.float32)\n loss = tf.nn.sigmoid_cross_entropy_with_logits(\n labels=y, logits=logits)\n loss = tf.math.reduce_mean(loss)\n\n # cala auc\n pred = tf.math.sigmoid(logits)\n print('==============================================================')\n print(tf.shape(y))\n print(tf.shape(pred))\n _, auc = tf.metrics.auc(labels=y, predictions=pred)\n\n logging_hook = tf.train.LoggingTensorHook(\n {\"loss\": loss, \"auc\": auc}, every_n_iter=10)\n\n optimizer = tf.train.GradientDescentOptimizer(0.01)\n train_op = model.minimize(optimizer, loss, global_step=global_step)\n return model.make_spec(mode, loss=loss, train_op=train_op,\n training_hooks=[logging_hook])\n\n if mode == tf.estimator.ModeKeys.PREDICT:\n return model.make_spec(mode, predictions=logits)\n\nif __name__ == '__main__':\n logging.basicConfig(\n level=logging.INFO,\n format='%(asctime)-15s [%(filename)s:%(lineno)d] %(levelname)s %(message)s'\n )\n apply_clean()\n flt.trainer_worker.train(\n ROLE, args, input_fn,\n model_fn, raw_serving_input_receiver_fn)\n", + 'follower/slot_2_bucket.py': + '# coding: utf-8\nslot_2_bucket = [(0, 2),(1, 2),(2, 2),(3, 2),(4, 2),(5, 2),(6, 2),(7, 2),(8, 2),(9, 2),(10, 2),(11, 2),(12, 2),(13, 1341),(14, 535),(15, 74138),(16, 70862),(17, 279),(18, 17),(19, 11019),(20, 591),(21, 4),(22, 30227),(23, 4791),(24, 75100),(25, 3075),(26, 27),(27, 9226),(28, 79191),(29, 11),(30, 3990),(31, 1898),(32, 5),\n(33, 76976),(34, 18),(35, 16),(36, 36534),(37, 74),(38, 29059)]\n', +}; + +export default () => ( + <> + + +); +``` + +## FileData + +fileData 只在同步模式下有效(isAsyncMode = false),作为默认的展示数据 + +有数据,fileData 有值 + +```tsx +/** + * compact: true + */ +import React from 'react'; +import CodePreview from 'components/CodePreview'; + +const fileData = { + 'owner.py': '# coding: utf-8\n', + 'leader/main.py': + "# coding: utf-8\nimport logging\nimport datetime\n\nimport tensorflow.compat.v1 as tf \nimport fedlearner.trainer as flt \nimport os\n\nfrom slot_2_bucket import slot_2_bucket\n\n_SLOT_2_IDX = {pair[0]: i for i, pair in enumerate(slot_2_bucket)}\n_SLOT_2_BUCKET = slot_2_bucket\nROLE = \"leader\"\n\nparser = flt.trainer_worker.create_argument_parser()\nparser.add_argument('--batch-size', type=int, default=256,\n help='Training batch size.')\nparser.add_argument('--clean-model', type=bool, default=True,\n help='clean checkpoint and saved_model')\nargs = parser.parse_args()\nargs.sparse_estimator = True\n\ndef apply_clean():\n if args.worker_rank == 0 and args.clean_model and tf.io.gfile.exists(args.checkpoint_path):\n tf.logging.info(\"--clean_model flag set. Removing existing checkpoint_path dir:\"\n \" {}\".format(args.checkpoint_path))\n tf.io.gfile.rmtree(args.checkpoint_path)\n\n if args.worker_rank == 0 and args.clean_model and args.export_path and tf.io.gfile.exists(args.export_path):\n tf.logging.info(\"--clean_model flag set. Removing existing savedmodel dir:\"\n \" {}\".format(args.export_path))\n tf.io.gfile.rmtree(args.export_path)\n\n\ndef input_fn(bridge, trainer_master=None):\n dataset = flt.data.DataBlockLoader(\n args.batch_size, ROLE, bridge, trainer_master).make_dataset()\n \n def parse_fn(example):\n feature_map = {}\n feature_map[\"example_id\"] = tf.FixedLenFeature([], tf.string)\n feature_map['fids'] = tf.VarLenFeature(tf.int64)\n # feature_map['y'] = tf.FixedLenFeature([], tf.int64)\n features = tf.parse_example(example, features=feature_map)\n # labels = {'y': features.pop('y')}\n labels = {'y': tf.constant(0)}\n return features, labels\n dataset = dataset.map(map_func=parse_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE)\n dataset = dataset.prefetch(2)\n return dataset\n \n # feature_map = {\"fids\": tf.VarLenFeature(tf.int64)}\n # feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\n # record_batch = dataset.make_batch_iterator().get_next()\n # features = tf.parse_example(record_batch, features=feature_map)\n # return features, None\n\ndef raw_serving_input_receiver_fn():\n feature_map = {\n 'fids_indices': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_indices'),\n 'fids_values': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_values'),\n 'fids_dense_shape': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_dense_shape')\n }\n return tf.estimator.export.ServingInputReceiver(\n feature_map, feature_map)\n\n\ndef model_fn(model, features, labels, mode):\n\n def sum_pooling(embeddings, slots):\n slot_embeddings = []\n for slot in slots:\n slot_embeddings.append(embeddings[_SLOT_2_IDX[slot]])\n if len(slot_embeddings) == 1:\n return slot_embeddings[0]\n return tf.add_n(slot_embeddings)\n\n global_step = tf.train.get_or_create_global_step()\n num_slot, embed_size = len(_SLOT_2_BUCKET), 8\n xavier_initializer = tf.glorot_normal_initializer()\n\n flt.feature.FeatureSlot.set_default_bias_initializer(\n tf.zeros_initializer())\n flt.feature.FeatureSlot.set_default_vec_initializer(\n tf.random_uniform_initializer(-0.0078125, 0.0078125))\n flt.feature.FeatureSlot.set_default_bias_optimizer(\n tf.train.FtrlOptimizer(learning_rate=0.01))\n flt.feature.FeatureSlot.set_default_vec_optimizer(\n tf.train.AdagradOptimizer(learning_rate=0.01))\n\n # deal with input cols\n categorical_embed = []\n num_slot, embed_dim = len(_SLOT_2_BUCKET), 8\n\n with tf.variable_scope(\"leader\"):\n for slot, bucket_size in _SLOT_2_BUCKET:\n fs = model.add_feature_slot(slot, bucket_size)\n fc = model.add_feature_column(fs)\n categorical_embed.append(fc.add_vector(embed_dim))\n\n\n # concate all embeddings\n slot_embeddings = categorical_embed\n concat_embedding = tf.concat(slot_embeddings, axis=1)\n output_size = len(slot_embeddings) * embed_dim\n\n model.freeze_slots(features)\n\n with tf.variable_scope(\"follower\"):\n fc1_size, fc2_size, fc3_size = 16, 16, 16\n w1 = tf.get_variable('w1', shape=[output_size, fc1_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b1 = tf.get_variable(\n 'b1', shape=[fc1_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n w2 = tf.get_variable('w2', shape=[fc1_size, fc2_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b2 = tf.get_variable(\n 'b2', shape=[fc2_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n w3 = tf.get_variable('w3', shape=[fc2_size, fc3_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b3 = tf.get_variable(\n 'b3', shape=[fc3_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n\n act1_l = tf.nn.relu(tf.nn.bias_add(tf.matmul(concat_embedding, w1), b1))\n act1_l = tf.layers.batch_normalization(act1_l, training=True)\n act2_l = tf.nn.relu(tf.nn.bias_add(tf.matmul(act1_l, w2), b2))\n act2_l = tf.layers.batch_normalization(act2_l, training=True)\n embedding = tf.nn.relu(tf.nn.bias_add(tf.matmul(act2_l, w3), b3))\n embedding = tf.layers.batch_normalization(embedding, training=True)\n\n if mode == tf.estimator.ModeKeys.TRAIN:\n embedding_grad = model.send('embedding', embedding, require_grad=True)\n optimizer = tf.train.GradientDescentOptimizer(0.01)\n train_op = model.minimize(\n optimizer, embedding, grad_loss=embedding_grad, global_step=global_step)\n return model.make_spec(mode, loss=tf.math.reduce_mean(embedding), train_op=train_op)\n elif mode == tf.estimator.ModeKeys.PREDICT:\n return model.make_spec(mode, predictions={'embedding': embedding})\n\nif __name__ == '__main__':\n logging.basicConfig(\n level=logging.INFO,\n format='%(asctime)-15s [%(filename)s:%(lineno)d] %(levelname)s %(message)s'\n )\n apply_clean()\n flt.trainer_worker.train(\n ROLE, args, input_fn,\n model_fn, raw_serving_input_receiver_fn)\n", + 'leader/slot_2_bucket.py': + '# coding: utf-8\nslot_2_bucket = [(0, 2),(1, 2),(2, 2),(3, 2),(4, 2),(5, 2),(6, 2),(7, 2),(8, 2),(9, 2),(10, 2),(11, 2),(12, 2),(13, 1341),(14, 535),(15, 74138),(16, 70862),(17, 279),(18, 17),(19, 11019),(20, 591),(21, 4),(22, 30227),(23, 4791),(24, 75100),(25, 3075),(26, 27),(27, 9226),(28, 79191),(29, 11),(30, 3990),(31, 1898),(32, 5),\n(33, 76976),(34, 18),(35, 16),(36, 36534),(37, 74),(38, 29059)]\n', + 'follower/main.py': + "# coding: utf-8\n# encoding=utf8\nimport logging\n\nimport tensorflow.compat.v1 as tf\n\nimport fedlearner.trainer as flt\nimport os\n\nROLE = 'follower'\n\nparser = flt.trainer_worker.create_argument_parser()\nparser.add_argument('--batch-size', type=int, default=256,\n help='Training batch size.')\nparser.add_argument('--clean-model', type=bool, default=True,\n help='clean checkpoint and saved_model')\nargs = parser.parse_args()\n\ndef apply_clean():\n if args.worker_rank == 0 and args.clean_model and tf.io.gfile.exists(args.checkpoint_path):\n tf.logging.info(\"--clean_model flag set. Removing existing checkpoint_path dir:\"\n \" {}\".format(args.checkpoint_path))\n tf.io.gfile.rmtree(args.checkpoint_path)\n\n if args.worker_rank == 0 and args.clean_model and args.export_path and tf.io.gfile.exists(args.export_path):\n tf.logging.info(\"--clean_model flag set. Removing existing savedmodel dir:\"\n \" {}\".format(args.export_path))\n tf.io.gfile.rmtree(args.export_path)\n\ndef input_fn(bridge, trainer_master=None):\n dataset = flt.data.DataBlockLoader(\n args.batch_size, ROLE, bridge, trainer_master).make_dataset()\n \n def parse_fn(example):\n feature_map = {}\n feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\n # feature_map['y'] = tf.FixedLenFeature([], tf.int64)\n features = tf.parse_example(example, features=feature_map)\n labels = {'y': tf.constant(0, shape=[1])}\n return features, labels\n \n dataset = dataset.map(map_func=parse_fn,\n num_parallel_calls=tf.data.experimental.AUTOTUNE)\n dataset = dataset.prefetch(2)\n return dataset\n \n\ndef raw_serving_input_receiver_fn():\n features = {}\n features['embedding'] = tf.placeholder(dtype=tf.float32, shape=[1, 16], name='embedding')\n receiver_tensors = {\n 'embedding': features['embedding']\n }\n return tf.estimator.export.ServingInputReceiver(\n features, receiver_tensors)\n\ndef model_fn(model, features, labels, mode):\n global_step = tf.train.get_or_create_global_step()\n xavier_initializer = tf.glorot_normal_initializer()\n\n fc1_size = 16\n with tf.variable_scope('follower'):\n w1f = tf.get_variable('w1f', shape=[\n fc1_size, 1], dtype=tf.float32, initializer=tf.random_uniform_initializer(-0.01, 0.01))\n b1f = tf.get_variable(\n 'b1f', shape=[1], dtype=tf.float32, initializer=tf.zeros_initializer())\n \n if mode == tf.estimator.ModeKeys.TRAIN:\n embedding = model.recv('embedding', tf.float32, require_grad=True)\n else:\n embedding = features['embedding']\n \n logits = tf.nn.bias_add(tf.matmul(embedding, w1f), b1f)\n\n if mode == tf.estimator.ModeKeys.TRAIN:\n y = tf.dtypes.cast(labels['y'], tf.float32)\n loss = tf.nn.sigmoid_cross_entropy_with_logits(\n labels=y, logits=logits)\n loss = tf.math.reduce_mean(loss)\n\n # cala auc\n pred = tf.math.sigmoid(logits)\n print('==============================================================')\n print(tf.shape(y))\n print(tf.shape(pred))\n _, auc = tf.metrics.auc(labels=y, predictions=pred)\n\n logging_hook = tf.train.LoggingTensorHook(\n {\"loss\": loss, \"auc\": auc}, every_n_iter=10)\n\n optimizer = tf.train.GradientDescentOptimizer(0.01)\n train_op = model.minimize(optimizer, loss, global_step=global_step)\n return model.make_spec(mode, loss=loss, train_op=train_op,\n training_hooks=[logging_hook])\n\n if mode == tf.estimator.ModeKeys.PREDICT:\n return model.make_spec(mode, predictions=logits)\n\nif __name__ == '__main__':\n logging.basicConfig(\n level=logging.INFO,\n format='%(asctime)-15s [%(filename)s:%(lineno)d] %(levelname)s %(message)s'\n )\n apply_clean()\n flt.trainer_worker.train(\n ROLE, args, input_fn,\n model_fn, raw_serving_input_receiver_fn)\n", + 'follower/slot_2_bucket.py': + '# coding: utf-8\nslot_2_bucket = [(0, 2),(1, 2),(2, 2),(3, 2),(4, 2),(5, 2),(6, 2),(7, 2),(8, 2),(9, 2),(10, 2),(11, 2),(12, 2),(13, 1341),(14, 535),(15, 74138),(16, 70862),(17, 279),(18, 17),(19, 11019),(20, 591),(21, 4),(22, 30227),(23, 4791),(24, 75100),(25, 3075),(26, 27),(27, 9226),(28, 79191),(29, 11),(30, 3990),(31, 1898),(32, 5),\n(33, 76976),(34, 18),(35, 16),(36, 36534),(37, 74),(38, 29059)]\n', +}; + +export default () => ( + <> + + +); +``` + +无数据, fileData 为空对象或者 undefined + +```tsx +/** + * compact: true + */ +import React from 'react'; +import CodePreview from 'components/CodePreview'; + +const fileData = {}; + +export default () => ( + <> + + +); +``` + +```tsx +/** + * compact: true + */ +import React from 'react'; +import CodePreview from 'components/CodePreview'; + +export default () => ( + <> + + +); +``` + +## IsAsyncMode + +在异步模式(IsAsyncMode = true)下,需要提供以下 2 个函数,再内部自行调用接口,并转换格式。 + +``` +getFileTreeList?: () => Promise; +getFile?: (filePath: string) => Promise; +``` + +1. `getFileTreeList`,用于获取文件目录树的内容 +2. `getFile`,用于获取文件的内容,他接收一个文件路径作为参数,例如 'leader/main.py' + +`getFileTreeList` 返回 Promise,而且 ResolvedValue 为以下格式的数组 + +```tsx | pure +interface FileTreeNode { + filename: string; + path: string; + /** File size */ + size: number; + /** Last Time Modified */ + mtime: number; + is_directory: boolean; + files: FileTreeNode[]; +} +``` + +`getFile` 返回 Promise,而且 ResolvedValue 为文件内容的字符串 + +为了方便调用,不用每次都输入`getFileTreeList`/`getFile`,封装了[AlgorithmProject](#algorithmproject)和[Algorithm](#algorithm) 2 个组件 + +```tsx +/** + * compact: true + */ +import React from 'react'; +import CodePreview from 'components/CodePreview'; + +import { + fetchAlgorithmProjectFileTreeList, + fetchAlgorithmProjectFileContentDetail, +} from 'services/algorithm'; + +const testId = 3; + +export default () => ( + <> + fetchAlgorithmProjectFileTreeList(testId).then((res) => res.data)} + getFile={(filePath: string) => + fetchAlgorithmProjectFileContentDetail(testId, { + path: filePath, + }).then((res) => res.data.content) + } + /> + +); +``` + +## Height + +容器的高度,默认为`480px` + +height = 1024 + +```tsx +/** + * compact: true + */ +import React from 'react'; +import CodePreview from 'components/CodePreview'; + +export default () => ( + <> + + +); +``` + +## 子组件 + +### AlgorithmProject + +为了方便外部调用,封装了与 AlgorithmProject 相关的方法,只需要传 AlgorithmProject 的 id 即可,默认开启异步模式 + +```jsx | pure +export function getAlgorithmProjectProps(props: { id: ID }) { + const { id } = props; + + return { + id: id, + isAsyncMode: true, + getFileTreeList: () => fetchAlgorithmProjectFileTreeList(id!).then((res) => res.data), + getFile: (filePath: string) => + fetchAlgorithmProjectFileContentDetail(id!, { + path: filePath, + }).then((res) => res.data.content), + }; +} +``` + +```tsx +import React from 'react'; +import CodePreview from 'components/CodePreview'; + +export default () => ( + <> + + +); +``` + +### Algorithm + +为了方便外部调用,封装了与 Algorithm 相关的方法,只需要传 Algorithm 的 id 即可,默认开启异步模式 + +```jsx | pure +export function getAlgorithmProps(props: { id: ID }) { + const { id } = props; + + return { + id: id, + isAsyncMode: true, + getFileTreeList: () => fetchAlgorithmFileTreeList(id!).then((res) => res.data), + getFile: (filePath: string) => + fetchAlgorithmFileContentDetail(id!, { + path: filePath, + }).then((res) => res.data.content), + }; +} +``` + +```tsx +/** + * compact: true + */ +import React from 'react'; +import CodePreview from 'components/CodePreview'; + +export default () => ( + <> + + +); +``` diff --git a/web_console_v2/client/dumi/docs/Display/ConfusionMatrix.md b/web_console_v2/client/dumi/docs/Display/ConfusionMatrix.md new file mode 100644 index 000000000..8506ae6eb --- /dev/null +++ b/web_console_v2/client/dumi/docs/Display/ConfusionMatrix.md @@ -0,0 +1,56 @@ +# ConfusionMatrix + +混淆矩阵 + +从上到下,从左到右,分别为 `tp`,`fn`,`fp`,`tn` + + + +## 常规使用 + +```tsx +import React from 'react'; +import ConfusionMatrix from 'components/ConfusionMatrix'; + +export default () => ( + +); +``` + +## 归一化相关配置 + +默认会根据`valueList`和`formatPercentValueList`得到 `percentValueList`,用来显示归一化之后的数据,可以参考[常规使用](#常规使用) + +默认 `formatPercentValueList` 的函数 + +```tsx | pure +export const defaultFormatPercentValueList = (valueList: number[]) => { + const total = valueList.reduce((acc: number, cur: number) => acc + cur, 0); + return valueList.map((num) => ((num / total) * 100).toFixed(2) + '%'); +}; +``` + +强行传入的 `percentValueList` 的话,就只会显示 `percentValueList` + +```tsx +import React from 'react'; +import ConfusionMatrix from 'components/ConfusionMatrix'; + +export default () => ( + +); +``` + +## isEmpty + +强行显示暂无数据布局 + +```tsx +import React from 'react'; +import ConfusionMatrix from 'components/ConfusionMatrix'; + +export default () => ; +``` diff --git a/web_console_v2/client/dumi/docs/Display/FeatureImportance.md b/web_console_v2/client/dumi/docs/Display/FeatureImportance.md new file mode 100644 index 000000000..368b05494 --- /dev/null +++ b/web_console_v2/client/dumi/docs/Display/FeatureImportance.md @@ -0,0 +1,84 @@ +# FeatureImportance + +显示 FeatureImportance 的组件 + + + +```tsx | pure +type Item = { + label: string; + value: any; +}; +``` + +## 常规使用 + +```tsx +import React from 'react'; +import FeatureImportance from 'components/FeatureImportance'; + +const data = [ + { label: 'test_13', value: 0.7 }, + { label: 'test_14', value: 0.6 }, + { label: 'test_15', value: 0.5 }, + { label: 'test_16', value: 0.4 }, + { label: 'peer-1', value: 0.3 }, + { label: 'peer-2', value: 0.3 }, + { label: 'age', value: 0.3 }, + { label: 'overall_score', value: 0.3 }, + { label: 'test_17', value: 0.3 }, + { label: 'salary', value: 0.2 }, + { label: 'test_19', value: 0.2 }, + { label: 'peer-3', value: 0.1 }, + { label: 'education', value: 0.1 }, + { label: 'height', value: 0.1 }, + { label: 'peer-0', value: 0.08 }, +]; + +export default () => ( +
+ +
+); +``` + +## xTickFormatter + +可以指定 x 轴的格式化函数,例如可以把 x 轴显示为百分比 + +默认 `xTickFormatter` 的函数 + +```tsx | pure +function defaultXTickFormatter(val: any) { + return val; +} +``` + +```tsx +import React from 'react'; +import FeatureImportance from 'components/FeatureImportance'; + +const data = [ + { label: 'test_13', value: 0.7 }, + { label: 'test_14', value: 0.6 }, + { label: 'test_15', value: 0.5 }, + { label: 'test_16', value: 0.4 }, + { label: 'peer-1', value: 0.3 }, + { label: 'peer-2', value: 0.3 }, + { label: 'age', value: 0.3 }, + { label: 'overall_score', value: 0.3 }, + { label: 'test_17', value: 0.3 }, + { label: 'salary', value: 0.2 }, + { label: 'test_19', value: 0.2 }, + { label: 'peer-3', value: 0.1 }, + { label: 'education', value: 0.1 }, + { label: 'height', value: 0.1 }, + { label: 'peer-0', value: 0.08 }, +]; + +export default () => ( +
+ `${value * 100}%`} /> +
+); +``` diff --git a/web_console_v2/client/dumi/docs/Display/InfoItem.md b/web_console_v2/client/dumi/docs/Display/InfoItem.md new file mode 100644 index 000000000..40036ab3c --- /dev/null +++ b/web_console_v2/client/dumi/docs/Display/InfoItem.md @@ -0,0 +1,123 @@ +# InfoItem + +显示标题 + Tag 内容的组件,其中标题充当 Header,内容充当 Footer,并带有 Tag 效果 + + + +## 常规使用 + +```jsx +import React from 'react'; +import InfoItem from 'components/InfoItem'; + +export default () => ( + <> +
+ + + +
+ +); +``` + +## IsBlock + +```jsx +import React from 'react'; +import InfoItem from 'components/InfoItem'; + +export default () => ( + <> + + + + +); +``` + +## Value + +value 可以传入任意 React 组件 + +例如,要实现 Copy 功能,支持 Copy 拷贝的组件 + +```jsx +import React from 'react'; +import InfoItem from 'components/InfoItem'; +import ClickToCopy from 'components/ClickToCopy'; +import { Copy } from 'components/IconPark'; + +export default () => ( + <> + + value1 + + + } + /> + +); +``` + +## IsInputMode + +isInputMode = true 时,value 类似 defaultValue 的作用 + +配合 onInputBlur 可以获取 input 的内容 + +```jsx +import React from 'react'; +import InfoItem from 'components/InfoItem'; + +export default () => ( + <> + { + console.log(val); + }} + /> + +); +``` + +## OnClick + +isInputMode = true 时,onClick 不会生效 + +```jsx +import React from 'react'; +import InfoItem from 'components/InfoItem'; + +export default () => ( + <> + { + console.log(val); + }} + onClick={(val) => { + alert('value1'); + }} + /> + + { + alert('value2'); + }} + /> + +); +``` diff --git a/web_console_v2/client/dumi/docs/Display/Label.md b/web_console_v2/client/dumi/docs/Display/Label.md new file mode 100644 index 000000000..e1c59147d --- /dev/null +++ b/web_console_v2/client/dumi/docs/Display/Label.md @@ -0,0 +1,94 @@ +# Label + +展示纯文本的组件  + +> 注意 ⚠️: 此组件是在项目后期创建的,所以代码中可能存在很多硬编码的文本组件,以后为了统一管理,可能会逐一替换。 + + + +## Label + +普通文本 + +```jsx +import React from 'react'; +import { Label } from 'styles/elements'; + +export default () => ( + <> + + + + + +); +``` + +### LabelStrong + +强调文本 + +与 Label 的 props 相同,只是默认值不一样 + +``` +fontSize: props.fontSize || 12, +fontColor: props.fontColor || 'var(--textColorStrong)', +fontWeight: props.fontWeight || 500, +``` + +```jsx +import React from 'react'; +import { LabelStrong } from 'styles/elements'; + +export default () => ( + <> + LabelStrong + +); +``` + +### LabelTint + +次要的小文本 + +与 Label 的 props 相同,只是默认值不一样 + +``` +fontSize: props.fontSize || 12, +fontColor: props.fontColor || 'var(--textColorSecondary)', +fontWeight: props.fontWeight || 400, +``` + +```jsx +import React from 'react'; +import { LabelTint } from 'styles/elements'; + +export default () => ( + <> + LabelTint + +); +``` + +### LabelForm + +表单文本 + +与 Label 的 props 相同,只是默认值不一样 + +``` +fontSize: props.fontSize || 13, +fontColor: props.fontColor || 'rgba(0, 0, 0, 0.85)', +fontWeight: props.fontWeight || 400, +``` + +```jsx +import React from 'react'; +import { LabelForm } from 'styles/elements'; + +export default () => ( + <> + LabelForm + +); +``` diff --git a/web_console_v2/client/dumi/docs/Display/MoreActions.md b/web_console_v2/client/dumi/docs/Display/MoreActions.md new file mode 100644 index 000000000..05850fd3f --- /dev/null +++ b/web_console_v2/client/dumi/docs/Display/MoreActions.md @@ -0,0 +1,293 @@ +# MoreActions + +显示更多操作的组件,click 上去会显示具体的操作,同时带有禁用和禁用提示 disabled/disabledTip 的功能 + + + +### ActionItem + +```jsx | pure +interface ActionItem { + /** Display Label */ + label: string; + onClick?: () => void; + /** Sometimes you need to disable the button */ + disabled?: boolean; + /** Sometimes you want a hint when the button is disabled */ + disabledTip?: string; + /** Danger button style, red color */ + danger?: boolean; +} +``` + +## 常规使用 + +如果不传入 children 的话,默认显示`...` + +```tsx +import React from 'react'; +import MoreActions, { ActionItem } from 'components/MoreActions'; + +const actionList: ActionItem[] = [ + { + label: 'Delete', + onClick: () => { + alert('Delete'); + }, + }, + { + label: 'Log', + onClick: () => { + alert('Log'); + }, + }, + { + label: 'Refetch', + onClick: () => { + alert('Refetch'); + }, + }, + { + label: 'Disabled', + disabled: true, + onClick: () => { + alert('Disabled'); + }, + }, + { + label: 'Disabled with tip', + disabled: true, + disabledTip: 'tip', + onClick: () => { + alert('Disabled with tip'); + }, + }, + { + label: 'Danger', + onClick: () => { + alert('Danger'); + }, + danger: true, + }, + { + label: 'Danger with disabled', + onClick: () => { + alert('Danger with disabled'); + }, + danger: true, + disabled: true, + }, +]; + +export default () => ( + <> + + +); +``` + +## RenderContent + +```tsx +import React from 'react'; +import MoreActions from 'components/MoreActions'; + +const actionList: ActionItem[] = [ + { + label: 'Delete', + onClick: () => { + alert('Delete'); + }, + }, + { + label: 'Log', + onClick: () => { + alert('Log'); + }, + }, + { + label: 'Refetch', + onClick: () => { + alert('Refetch'); + }, + }, + { + label: 'Disabled', + disabled: true, + onClick: () => { + alert('Disabled'); + }, + }, + { + label: 'Disabled with tip', + disabled: true, + disabledTip: 'tip', + onClick: () => { + alert('Disabled with tip'); + }, + }, + { + label: 'Danger', + onClick: () => { + alert('Danger'); + }, + danger: true, + }, + { + label: 'Danger with disabled', + onClick: () => { + alert('Danger with disabled'); + }, + danger: true, + disabled: true, + }, +]; + +export default () => ( + <> + { + return actionList.map((item, index) => { + return ( +
+ {index + 1} + {item.label} +
+ ); + }); + }} + /> + +); +``` + +## Children + +```tsx +import React from 'react'; +import MoreActions, { ActionItem } from 'components/MoreActions'; + +const actionList: ActionItem[] = [ + { + label: 'Delete', + onClick: () => { + alert('Delete'); + }, + }, + { + label: 'Log', + onClick: () => { + alert('Log'); + }, + }, + { + label: 'Refetch', + onClick: () => { + alert('Refetch'); + }, + }, + { + label: 'Disabled', + disabled: true, + onClick: () => { + alert('Disabled'); + }, + }, + { + label: 'Disabled with tip', + disabled: true, + disabledTip: 'tip', + onClick: () => { + alert('Disabled with tip'); + }, + }, + { + label: 'Danger', + onClick: () => { + alert('Danger'); + }, + danger: true, + }, + { + label: 'Danger with disabled', + onClick: () => { + alert('Danger with disabled'); + }, + danger: true, + disabled: true, + }, +]; + +export default () => ( + <> + Click me + +); +``` + +## ZIndex + +支持修改容器的 z-index,默认为 z-index 为 var(--zIndexLessThanModal),目前该值为 999 + +```tsx +import React from 'react'; +import MoreActions, { ActionItem } from 'components/MoreActions'; + +const actionList: ActionItem[] = [ + { + label: 'Delete', + onClick: () => { + alert('Delete'); + }, + }, + { + label: 'Log', + onClick: () => { + alert('Log'); + }, + }, + { + label: 'Refetch', + onClick: () => { + alert('Refetch'); + }, + }, + { + label: 'Disabled', + disabled: true, + onClick: () => { + alert('Disabled'); + }, + }, + { + label: 'Disabled with tip', + disabled: true, + disabledTip: 'tip', + onClick: () => { + alert('Disabled with tip'); + }, + }, + { + label: 'Danger', + onClick: () => { + alert('Danger'); + }, + danger: true, + }, + { + label: 'Danger with disabled', + onClick: () => { + alert('Danger with disabled'); + }, + danger: true, + disabled: true, + }, +]; + +export default () => ( + <> + + +); +``` diff --git a/web_console_v2/client/dumi/docs/Display/PropertyList.md b/web_console_v2/client/dumi/docs/Display/PropertyList.md new file mode 100644 index 000000000..3fb0af023 --- /dev/null +++ b/web_console_v2/client/dumi/docs/Display/PropertyList.md @@ -0,0 +1,330 @@ +# PropertyList + +显示属性列表的组件 + + + +### VariableAccessMode + +```ts | pure +enum VariableAccessMode { + UNSPECIFIED = 'UNSPECIFIED', + PRIVATE = 'PRIVATE', + PEER_READABLE = 'PEER_READABLE', + PEER_WRITABLE = 'PEER_WRITABLE', +} +``` + +### PropertyItem + +```ts | pure +type PropertyItem = { + /** Display label */ + label: string; + /** Display value */ + value: any; + /** Is hidden */ + hidden?: boolean; + /** Access mode */ + accessMode?: VariableAccessMode; +}; +``` + +## 常规使用 + +默认一行两个 + +```tsx +import React from 'react'; +import PropertyList from 'components/PropertyList'; + +const properties = [ + { + label: 'ID', + value: '12345', + }, + { + label: 'State', + value: 'READY', + }, + { + label: 'Type', + value: 'NN Model', + }, + { + label: 'Replicas', + value: '1/10', + }, + { + label: 'Created Time', + value: '2021-04-21 18:00:23', + }, + { + label: 'Updated Time', + value: '2021-04-21 22:00:23', + }, + { + label: 'Deleted Time', + value: '2021-04-21 23:00:23', + }, +]; + +export default () => ( + <> + + +); +``` + +## Cols + +```tsx +import React from 'react'; +import PropertyList from 'components/PropertyList'; + +const properties = [ + { + label: 'ID', + value: '12345', + }, + { + label: 'State', + value: 'READY', + }, + { + label: 'Type', + value: 'NN Model', + }, + { + label: 'Replicas', + value: '1/10', + }, + { + label: 'Created Time', + value: '2021-04-21 18:00:23', + }, + { + label: 'Updated Time', + value: '2021-04-21 22:00:23', + }, + { + label: 'Deleted Time', + value: '2021-04-21 23:00:23', + }, +]; + +export default () => ( + <> +
cols = 3
+ +
cols = 4
+ +
cols = 5
+ + +); +``` + +## InitialVisibleRows + +```tsx +import React from 'react'; +import PropertyList from 'components/PropertyList'; + +const properties = [ + { + label: 'ID', + value: '12345', + }, + { + label: 'State', + value: 'READY', + }, + { + label: 'Type', + value: 'NN Model', + }, + { + label: 'Replicas', + value: '1/10', + }, + { + label: 'Created Time', + value: '2021-04-21 18:00:23', + }, + { + label: 'Updated Time', + value: '2021-04-21 22:00:23', + }, + { + label: 'Deleted Time', + value: '2021-04-21 23:00:23', + }, +]; + +export default () => ( + <> +
initialVisibleRows = 1
+ +
initialVisibleRows = 2
+ +
initialVisibleRows = 3
+ + +); +``` + +## Hidden + +```tsx +import React from 'react'; +import PropertyList from 'components/PropertyList'; + +const properties = [ + { + label: 'ID', + value: '12345', + }, + { + label: 'State', + value: 'READY', + }, + { + label: 'Type', + value: 'NN Model', + }, + { + label: 'Replicas', + value: '1/10', + }, + { + label: 'Created Time', + value: '2021-04-21 18:00:23', + hidden: true, + }, + { + label: 'Updated Time', + value: '2021-04-21 22:00:23', + hidden: true, + }, + { + label: 'Deleted Time', + value: '2021-04-21 23:00:23', + }, +]; + +export default () => ( + <> + + +); +``` + +## AccessMode + +- VariableAccessMode.PEER_WRITABLE 为对侧可编辑 +- VariableAccessMode.PEER_READABLE 为对侧可见 +- VariableAccessMode.PRIVATE 为对侧不可见 + +```tsx +import React from 'react'; +import PropertyList from 'components/PropertyList'; +import { VariablePermissionLegend } from 'components/VariblePermission'; + +enum VariableAccessMode { + UNSPECIFIED = 'UNSPECIFIED', + PRIVATE = 'PRIVATE', + PEER_READABLE = 'PEER_READABLE', + PEER_WRITABLE = 'PEER_WRITABLE', +} + +const properties = [ + { + label: 'ID', + value: '12345', + accessMode: VariableAccessMode.UNSPECIFIED, + }, + { + label: 'State', + value: 'READY', + accessMode: VariableAccessMode.PRIVATE, + }, + { + label: 'Type', + value: 'NN Model', + accessMode: VariableAccessMode.PEER_READABLE, + }, + { + label: 'Replicas', + value: '1/10', + accessMode: VariableAccessMode.PEER_WRITABLE, + }, + { + label: 'Created Time', + value: '2021-04-21 18:00:23', + }, + { + label: 'Updated Time', + value: '2021-04-21 22:00:23', + }, + { + label: 'Deleted Time', + value: '2021-04-21 23:00:23', + }, +]; + +export default () => ( + <> + + + +); +``` + +## Align + +css align-items 属性的代理 + +垂直居中 align="center" + +```tsx +import React from 'react'; +import PropertyList from 'components/PropertyList'; + +const properties = [ + { + label: 'ID', + value: '12345', + }, + { + label: 'State', + value: 'READY', + }, + { + label: 'Type', + value: 'NN Model', + }, + { + label: 'Replicas', + value: '1/10', + }, + { + label: 'Created Time', + value: '2021-04-21 18:00:23', + }, + { + label: 'Updated Time', + value: '2021-04-21 22:00:23', + }, + { + label: 'Deleted Time', + value: '2021-04-21 23:00:23', + }, +]; + +export default () => ( + <> + + +); +``` diff --git a/web_console_v2/client/dumi/docs/Display/StateIndicator.md b/web_console_v2/client/dumi/docs/Display/StateIndicator.md new file mode 100644 index 000000000..e7540ec34 --- /dev/null +++ b/web_console_v2/client/dumi/docs/Display/StateIndicator.md @@ -0,0 +1,198 @@ +# StateIndicator + +显示状态的组件,具有显示颜色圆点 + 状态文本 + 提示语 + Hover 操作+Tag 模式的功能,常与 Table 配合,作为某个 Col 展示 + + + +### StateTypes + +```jsx | pure +type StateTypes = + | 'processing' + | 'success' + | 'warning' + | 'error' + | 'default' + | 'gold' + | 'lime' + | 'unknown' + | 'pending_accept' + | 'deleted'; +``` + +### ActionItem + +```jsx | pure +interface ActionItem { + /** Display Label */ + label: string; + onClick?: () => void; + /** Sometimes you need to disable the button */ + disabled?: boolean; + /** Sometimes you want a hint when the button is disabled */ + disabledTip?: string; + /** Danger button style, red color */ + danger?: boolean; +} +``` + +## 常规使用 + +```jsx +import React from 'react'; +import StateIndicator from 'components/StateIndicator'; + +export default () => ( + <> + + + + + + + + + + +); +``` + +## Tip + +tip 会默认居中(Placement top),注意容器 width 宽度 + +```jsx +import React from 'react'; +import StateIndicator from 'components/StateIndicator'; + +export default () => ( + <> +
+ + + + + + + + + +
+ +); +``` + +## ActionList + +Hover 上去会显示具体的操作 + +```jsx +import React from 'react'; +import StateIndicator from 'components/StateIndicator'; + +const actionList = [ + { + label: 'Delete', + onClick: () => { + alert('Delete'); + }, + }, + { + label: 'Log', + onClick: () => { + alert('Log'); + }, + }, + { + label: 'Refetch', + onClick: () => { + alert('Refetch'); + }, + isLoading: true, + }, +]; + +export default () => ( + <> + + + + + + + + + + +); +``` + +## Tag + +Tag 单纯是[Arco Tag](https://arco.design/react/components/tag#api)组件,没有 tip/actionList 的功能 + +```jsx +import React from 'react'; +import StateIndicator from 'components/StateIndicator'; + +export default () => ( + <> + + + + + + + + + + +); +``` + +## AfterText + +afterText 支持传递一个字符串,它会默认显示一个[Arco Button](https://arco.design/react/components/button#api)组件,并点击后会调用`onAfterTextClick`回调 + +```jsx +import React from 'react'; +import StateIndicator from 'components/StateIndicator'; + +export default () => ( + <> + alert('publish')} + /> + +); +``` + +afterText 也支持传递一个 React.ReactNode 类型(优先级比 string 字符串低),自定义渲染后面的内容,此时`onAfterTextClick`失效 + +```jsx +import React from 'react'; +import StateIndicator from 'components/StateIndicator'; + +export default () => ( + <> + I'm custom afterText} + /> + +); +``` diff --git a/web_console_v2/client/dumi/docs/Display/StatisticList.md b/web_console_v2/client/dumi/docs/Display/StatisticList.md new file mode 100644 index 000000000..891b06622 --- /dev/null +++ b/web_console_v2/client/dumi/docs/Display/StatisticList.md @@ -0,0 +1,97 @@ +# StatisticList + +显示统计数字列表的组件,基于 [Arco Card](https://arco.design/react/components/card#api) 和 [TitleWithIcon](/display/title-with-icon) 封装 + + + +```tsx | pure +type OptionItem = { + /** Display title */ + text: string; + /** Display value */ + value: string | number; + /** Tip */ + tip?: string; +}; +``` + +## 常规使用 + +```tsx +import React from 'react'; +import StatisticList from 'components/StatisticList'; + +const data = [ + { + text: 'AUC ROC', + value: 0.75316, + tip: 'AUC ROC', + }, + { + text: 'Accuracy', + value: 0.75316, + tip: 'Accuracy', + }, + { + text: 'Precision', + value: 0.5, + tip: 'Precision', + }, + { + text: 'Recall', + value: 0.8, + tip: 'Recall', + }, + { + text: 'F1 score', + value: 0.8, + }, + { + text: 'Log loss', + value: 0.7, + }, + { + text: 'Metrics1', + value: 0.12345, + }, + { + text: 'Metrics2', + value: 0.54321, + }, + { + text: 'Metrics3', + value: 0.888888, + }, +]; + +export default () => ( +
+
cols = 6
+ +
cols = 3
+ +
+); +``` + +## 子组件 + +### NumberItem + +```tsx | pure +type NumberItemProps = { + value?: any; + className?: string; +} & TitleWithIconProps; +``` + +```jsx +import React from 'react'; +import { NumberItem } from 'components/StatisticList'; + +export default () => [ + , +
, + , +]; +``` diff --git a/web_console_v2/client/dumi/docs/Display/TitleWithIcon.md b/web_console_v2/client/dumi/docs/Display/TitleWithIcon.md new file mode 100644 index 000000000..e8e101bf7 --- /dev/null +++ b/web_console_v2/client/dumi/docs/Display/TitleWithIcon.md @@ -0,0 +1,106 @@ +# TitleWithIcon + +显示标题 + Icon 的组件,带有 tip 提示功能 + + + +## 常规使用 + +```jsx +import React from 'react'; +import TitleWithIcon from 'components/TitleWithIcon'; + +export default () => ( + <> + + +); +``` + +## IsShowIcon + +```jsx +import React from 'react'; +import TitleWithIcon from 'components/TitleWithIcon'; + +export default () => ( + <> + + +); +``` + +## IsLeftIcon + +```jsx +import React from 'react'; +import TitleWithIcon from 'components/TitleWithIcon'; + +export default () => ( + <> + + + +); +``` + +## Tip + +isShowIcon = false 时,不显示 tip + +```jsx +import React from 'react'; +import TitleWithIcon from 'components/TitleWithIcon'; + +export default () => ( + <> + + + + +); +``` + +## Icon + +```jsx +import React from 'react'; +import TitleWithIcon from 'components/TitleWithIcon'; +import { InfoCircle } from 'components/IconPark'; + +export default () => ( + <> + + + +); +``` + +## TextColor + +```jsx +import React from 'react'; +import TitleWithIcon from 'components/TitleWithIcon'; +import { InfoCircle } from 'components/IconPark'; + +export default () => ( + <> + + + +); +``` diff --git a/web_console_v2/client/dumi/docs/Display/VariblePermission.md b/web_console_v2/client/dumi/docs/Display/VariblePermission.md new file mode 100644 index 000000000..f2784b037 --- /dev/null +++ b/web_console_v2/client/dumi/docs/Display/VariblePermission.md @@ -0,0 +1,96 @@ +# VariblePermission + +显示权限 icon 的组件,常用于工作流详情页中 + +目前有 3 种权限 + +- VariableAccessMode.PEER_WRITABLE `对侧可编辑` +- VariableAccessMode.PEER_READABLE `对侧可见` +- VariableAccessMode.PRIVATE `对侧不可见` + +```js | pure +import { VariableAccessMode } from 'typings/variable'; + +export enum VariableAccessMode { + UNSPECIFIED = 'UNSPECIFIED', + PRIVATE = 'PRIVATE', + PEER_READABLE = 'PEER_READABLE', + PEER_WRITABLE = 'PEER_WRITABLE', +} +``` + + + +## 子组件 + +### Writable + +VariableAccessMode.PEER_WRITABLE 对侧可编辑 + +```tsx +import React from 'react'; +import VariblePermission from 'components/VariblePermission'; + +export default () => ( + <> +
desc = false
+ +
desc = true
+ + +); +``` + +### Readable + +VariableAccessMode.PEER_READABLE `对侧可见` + +```tsx +import React from 'react'; +import VariblePermission from 'components/VariblePermission'; + +export default () => ( + <> +
desc = false
+ +
desc = true
+ + +); +``` + +### Private + +VariableAccessMode.PRIVATE `对侧不可见` + +```tsx +import React from 'react'; +import VariblePermission from 'components/VariblePermission'; + +export default () => ( + <> +
desc = false
+ +
desc = true
+ + +); +``` + +### VariablePermissionLegend + +```tsx +import React from 'react'; +import { VariablePermissionLegend } from 'components/VariblePermission'; + +export default () => ( + <> +
desc = false
+ +
desc = true
+ +
desc = true + prefix = "对侧"
+ + +); +``` diff --git a/web_console_v2/client/dumi/docs/Display/WhichAlgorithm.md b/web_console_v2/client/dumi/docs/Display/WhichAlgorithm.md new file mode 100644 index 000000000..bf0b40fb4 --- /dev/null +++ b/web_console_v2/client/dumi/docs/Display/WhichAlgorithm.md @@ -0,0 +1,74 @@ +# WhichAlgorithm + +展示算法名称的组件,通过传入算法的 id,来显示对应的算法名称 + +> 注意 ⚠️: 因为 `` 内部引入了 `recoil` 相关的函数,所以 Demo 代码中用 `` 作为根组件,防止报错。 + + + +## 常规使用 + +如果找不到 id 对应的算法的话,会显示`-` + +```jsx +import React, { useState } from 'react'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import { RecoilRoot } from 'recoil'; + +import WhichAlgorithm from 'components/WhichAlgorithm'; + +export default () => { + return ( + + +
+ id 为 1 的算法名称(假设存在)名称为: + +
+
+
+ ); +}; +``` + +## formatter + +格式化模型名称的函数 + +默认 `formatter` 的函数 + +```tsx | pure +function defaultFormatter(algorithm: Algorithm) { + return `${algorithm.name} (V${algorithm.version})`; +} +``` + +下面是自定义`formatter`的例子 + +```jsx +import React, { useState } from 'react'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import { RecoilRoot } from 'recoil'; + +import WhichAlgorithm from 'components/WhichAlgorithm'; + +export default () => { + return ( + + +
+ id 为 1 的算法名称(假设存在)名称为: + { + return `__${algorithm.name}__`; + }} + /> +
+
+
+ ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Display/WhichDataset.md b/web_console_v2/client/dumi/docs/Display/WhichDataset.md new file mode 100644 index 000000000..a8750bccf --- /dev/null +++ b/web_console_v2/client/dumi/docs/Display/WhichDataset.md @@ -0,0 +1,126 @@ +# WhichDataset + +展示数据集名称的组件,通过传入数据集的 id,来显示对应的数据集名称 + +数据集有 4 种,`原始数据集`、`结果数据集`、`合作伙伴数据集`、`求交数据集` + +其中合作伙伴数据集只能通过[WhichParticipantDataset](/display/which-participant-dataset)来获取 + +其中求交数据集只能通过[WhichDataset.IntersectionDataset](#intersectiondataset)来获取 + +> 注意 ⚠️: 因为 `` 内部引入了 `recoil` 相关的函数,所以 Demo 代码中用 `` 作为根组件,防止报错。 + +内部封装了获取所有数据集列表的接口的逻辑,可以从 Cache 中,根据 id 找到对应的数据集 + + + +## 常规使用 + +如果找不到 id 对应的数据集(原始+结果)的话,会显示`-` + +```jsx +import React, { useState } from 'react'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import { RecoilRoot } from 'recoil'; + +import WhichDataset from 'components/WhichDataset'; + +export default () => { + return ( + + +
+ id 为 1 的数据集(假设存在)名称为: + +
+
+ id 为 110 的数据集(假设不存在)名称为: + +
+
+
+ ); +}; +``` + +## loading + +强制显示 loading + +```jsx +import React, { useState } from 'react'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import { RecoilRoot } from 'recoil'; + +import WhichDataset from 'components/WhichDataset'; + +export default () => { + return ( + + + + + + ); +}; +``` + +## 子组件 + +### UUID + +根据 UUID 来寻找数据集(原始+结果) + +```jsx +import React, { useState } from 'react'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import { RecoilRoot } from 'recoil'; + +import WhichDataset from 'components/WhichDataset'; + +export default () => { + return ( + + +
+ uuid 为 8 的数据集(假设不存在)名称为: + +
+
+
+ ); +}; +``` + +### IntersectionDataset + +求交数据集 + +```jsx +import React, { useState } from 'react'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import { RecoilRoot } from 'recoil'; + +import WhichDataset from 'components/WhichDataset'; + +export default () => { + return ( + + +
+ id 为 1 的求交数据集(假设存在)名称为: + +
+
+ id 为 110 的求交数据集(假设不存在)名称为: + +
+
+
+ ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Display/WhichModel.md b/web_console_v2/client/dumi/docs/Display/WhichModel.md new file mode 100644 index 000000000..38ddeba92 --- /dev/null +++ b/web_console_v2/client/dumi/docs/Display/WhichModel.md @@ -0,0 +1,74 @@ +# WhichModel + +展示模型名称的组件,通过传入模型的 id,来显示对应的模型名称 + +> 注意 ⚠️: 因为 `` 内部引入了 `recoil` 相关的函数,所以 Demo 代码中用 `` 作为根组件,防止报错。 + + + +## 常规使用 + +如果找不到 id 对应的模型的话,会显示`-` + +```jsx +import React, { useState } from 'react'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import { RecoilRoot } from 'recoil'; + +import WhichModel from 'components/WhichModel'; + +export default () => { + return ( + + +
+ id 为 1 的模型名称(假设存在)名称为: + +
+
+
+ ); +}; +``` + +## formatter + +格式化模型名称的函数 + +默认 `formatter` 的函数 + +```tsx | pure +function defaultFormatter(model: Model) { + return model.name; +} +``` + +下面是自定义`formatter`的例子 + +```jsx +import React, { useState } from 'react'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import { RecoilRoot } from 'recoil'; + +import WhichModel from 'components/WhichModel'; + +export default () => { + return ( + + +
+ id 为 1 的模型名称(假设存在)名称为: + { + return `__${model.name}__`; + }} + /> +
+
+
+ ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Display/WhichParticipant.md b/web_console_v2/client/dumi/docs/Display/WhichParticipant.md new file mode 100644 index 000000000..4013f0e5c --- /dev/null +++ b/web_console_v2/client/dumi/docs/Display/WhichParticipant.md @@ -0,0 +1,54 @@ +# WhichParticipant + +展示合作伙伴名称的组件,通过传入合作伙伴的 id,来显示对应的合作伙伴名称 + +> 注意 ⚠️: 因为 `` 内部引入了 `recoil` 相关的函数,所以 Demo 代码中用 `` 作为根组件,防止报错。 + +内部封装了获取所有合作伙伴列表的接口的逻辑,可以从 Cache 中,根据 id 找到对应的合作伙伴 + + + +## 常规使用 + +如果找不到 id 对应的合作伙伴的话,会显示`--` + +```jsx +import React, { useState } from 'react'; +import { RecoilRoot } from 'recoil'; + +import WhichParticipant from 'components/WhichParticipant'; + +export default () => { + return ( + +
+ id 为 1 的合作伙伴(假设存在)名称为: + +
+
+ id 为 110 的合作伙伴(假设不存在)名称为: + +
+
+ ); +}; +``` + +## loading + +强制显示 loading + +```jsx +import React, { useState } from 'react'; +import { RecoilRoot } from 'recoil'; + +import WhichParticipant from 'components/WhichParticipant'; + +export default () => { + return ( + + + + ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Display/WhichParticipantDataset.md b/web_console_v2/client/dumi/docs/Display/WhichParticipantDataset.md new file mode 100644 index 000000000..6f59438e1 --- /dev/null +++ b/web_console_v2/client/dumi/docs/Display/WhichParticipantDataset.md @@ -0,0 +1,58 @@ +# WhichParticipantDataset + +展示合作伙伴数据集名称的组件,通过传入合作数据集的 uuid,来显示对应的数据集名称 + +> 注意 ⚠️: 因为 `` 内部引入了 `recoil` 相关的函数,所以 Demo 代码中用 `` 作为根组件,防止报错。 + +内部封装了获取所有数据集列表的接口的逻辑,可以从 Cache 中,根据 uuid 找到对应的数据集 + + + +## 常规使用 + +如果找不到 uuid 对应的合作伙伴数据集的话,会显示`-` + +```jsx +import React, { useState } from 'react'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import { RecoilRoot } from 'recoil'; + +import WhichParticipantDataset from 'components/WhichParticipantDataset'; + +export default () => { + return ( + + +
+ uuid 为 u26af7e549f30473382a 的数据集(假设存在)名称为: + +
+
+
+ ); +}; +``` + +## loading + +强制显示 loading + +```jsx +import React, { useState } from 'react'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import { RecoilRoot } from 'recoil'; + +import WhichParticipantDataset from 'components/WhichParticipantDataset'; + +export default () => { + return ( + + + + + + ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Display/WhichProject.md b/web_console_v2/client/dumi/docs/Display/WhichProject.md new file mode 100644 index 000000000..ee4cb5802 --- /dev/null +++ b/web_console_v2/client/dumi/docs/Display/WhichProject.md @@ -0,0 +1,54 @@ +# WhichProject + +展示工作区名称的组件,通过传入工作区的 id,来显示对应的工作区名称 + +> 注意 ⚠️: 因为 `` 内部引入了 `recoil` 相关的函数,所以 Demo 代码中用 `` 作为根组件,防止报错。 + +内部封装了获取所有工作区列表的接口的逻辑,可以从 Cache 中,根据 id 找到对应的工作区 + + + +## 常规使用 + +如果找不到 id 对应的工作区的话,会显示`--` + +```jsx +import React, { useState } from 'react'; +import { RecoilRoot } from 'recoil'; + +import WhichProject from 'components/WhichProject'; + +export default () => { + return ( + +
+ id 为 1 的工作区(假设存在)名称为: + +
+
+ id 为 110 的工作区(假设不存在)名称为: + +
+
+ ); +}; +``` + +## loading + +强制显示 loading + +```jsx +import React, { useState } from 'react'; +import { RecoilRoot } from 'recoil'; + +import WhichProject from 'components/WhichProject'; + +export default () => { + return ( + + + + ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Feedback/ButtonWithModalConfirm.md b/web_console_v2/client/dumi/docs/Feedback/ButtonWithModalConfirm.md new file mode 100644 index 000000000..e8d730be5 --- /dev/null +++ b/web_console_v2/client/dumi/docs/Feedback/ButtonWithModalConfirm.md @@ -0,0 +1,64 @@ +# ButtonWithModalConfirm + +带有二次确认功能(对话框)的 Button 组件,基于[Arco Button](xxx) 和 [Modal.confirm](/feedback/modal#modalconfirm) 封装 + +props 可以传任意 Button 的 props + + + +## 常规使用 + +```tsx +import React from 'react'; +import ButtonWithModalConfirm from 'components/ButtonWithModalConfirm'; + +export default () => ( + <> + { + alert('cancel'); + }} + > + Cancel + + { + alert('confirm'); + }} + > + confirm + + +); +``` + +## IsShowConfirmModal + +```tsx +import React from 'react'; +import ButtonWithModalConfirm from 'components/ButtonWithModalConfirm'; + +export default () => ( + <> + { + alert('Default title/content'); + }} + > + Default title/content + + { + alert('Custom title/content'); + }} + > + Custom title/content + + +); +``` diff --git a/web_console_v2/client/dumi/docs/Feedback/ButtonWithPopconfirm.md b/web_console_v2/client/dumi/docs/Feedback/ButtonWithPopconfirm.md new file mode 100644 index 000000000..4d34ce75b --- /dev/null +++ b/web_console_v2/client/dumi/docs/Feedback/ButtonWithPopconfirm.md @@ -0,0 +1,26 @@ +# ButtonWithPopconfirm + +带有二次确认功能(气泡框)的 Button 组件,基于[Arco Button](xxx) 和 [Arco Popconfirm](xxx) 封装 + + + +## 常规使用 + +```tsx +import React from 'react'; +import ButtonWithPopconfirm from 'components/ButtonWithPopconfirm'; + +export default () => ( + <> + { + console.log('confirm'); + }} + onCancel={() => { + console.log('cancel'); + }} + /> + +); +``` diff --git a/web_console_v2/client/dumi/docs/Feedback/Modal.md b/web_console_v2/client/dumi/docs/Feedback/Modal.md new file mode 100644 index 000000000..e3310e69a --- /dev/null +++ b/web_console_v2/client/dumi/docs/Feedback/Modal.md @@ -0,0 +1,106 @@ +# Modal + +基于 [Arco Modal](https://arco.design/react/components/modal#api) 封装,并在`styles/global.less`中覆盖了 Modal 相关静态方法`Modal.confirm` 的样式(带有`custom-modal`),尽量保持和 UX 图一致 + +把 config 冗余的配置抽离出来,导出`withConfirmProps`/`withDeleteProps` 2 个函数 + +## API + +支持传入 [Arco Modal](https://arco.design/react/components/modal#api) 原有的 props,并且增加一个自定义的静态方法`Modal.delete(props: ModalFuncProps)` + +### withConfirmProps + +```jsx | pure +export function withConfirmProps(props: ConfirmProps) { + return { + className: CUSTOM_CLASS_NAME, + zIndex: Z_INDEX_GREATER_THAN_HEADER, + okText: i18n.t('confirm'), + cancelText: i18n.t('cancel'), + ...props, + }; +} +``` + +### withDeleteProps + +```jsx | pure +export function withDeleteProps(props: ConfirmProps) { + return withConfirmProps({ + okText: i18n.t('delete'), + okButtonProps: { + status: 'danger', + }, + ...props, + }); +} +``` + +```jsx | pure +// Custom method +MyModal.delete = (props: ModalFuncProps) => { + return Modal.confirm(withDeleteProps(props)); +}; +``` + +## Modal.delete + +```jsx +import React from 'react'; +import Modal from 'components/Modal'; + +export default () => { + return ( + <> + + + ); +}; +``` + +## Modal.confirm + +```jsx +import React from 'react'; +import Modal from 'components/Modal'; + +export default () => { + return ( + <> + + + ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Form/BlockRadio.md b/web_console_v2/client/dumi/docs/Form/BlockRadio.md new file mode 100644 index 000000000..531d216b1 --- /dev/null +++ b/web_console_v2/client/dumi/docs/Form/BlockRadio.md @@ -0,0 +1,523 @@ +# BlockRadio + +块状单选框 + + + +### Option + +每个 item 格式定义 + +```jsx | pure +type Option = { + /** form value */ + value: any, + /** display label */ + label: string, + disabled?: boolean, + /** extra data, one of the function(renderBlockInner) arguments */ + data?: any, + /** extra tip, only work in 's options prop */ + tip?: string, +}; +``` + +## 常规用法 + +默认占用一整行,平分,gap = 16 + +```jsx +import React, { useState } from 'react'; +import BlockRadio from 'components/_base/BlockRadio'; + +const options = [ + { + value: '1', + label: 'label1', + }, + { + value: '2', + label: 'label2', + }, + { + value: '3', + label: 'label3', + }, + { + value: '4', + label: 'label4', + }, +]; + +export default () => { + const [value, setValue] = useState(options[0].value); + return ( + <> + { + setValue(val); + }} + /> + + ); +}; +``` + +## Gap + +gap = 64 + +```jsx +import React, { useState } from 'react'; +import BlockRadio from 'components/_base/BlockRadio'; + +const options = [ + { + value: '1', + label: 'label1', + }, + { + value: '2', + label: 'label2', + }, + { + value: '3', + label: 'label3', + }, + { + value: '4', + label: 'label4', + }, +]; + +export default () => { + const [value, setValue] = useState(options[0].value); + return ( + <> + { + setValue(val); + }} + /> + + ); +}; +``` + +## IsCenter + +单选项展示内容是否居中显示 + +```jsx +import React, { useState } from 'react'; +import BlockRadio from 'components/_base/BlockRadio'; + +const options = [ + { + value: '1', + label: 'label1', + }, + { + value: '2', + label: 'label2', + }, + { + value: '3', + label: 'label3', + }, + { + value: '4', + label: 'label4', + }, +]; + +export default () => { + const [value, setValue] = useState(options[0].value); + return ( + <> + { + setValue(val); + }} + /> + + ); +}; +``` + +## Disabled + +disabled = true + +```jsx +import React, { useState } from 'react'; +import BlockRadio from 'components/_base/BlockRadio'; + +const options = [ + { + value: '1', + label: 'label1', + }, + { + value: '2', + label: 'label2', + }, + { + value: '3', + label: 'label3', + }, + { + value: '4', + label: 'label4', + }, +]; + +export default () => { + const [value, setValue] = useState(options[0].value); + return ( + <> + { + setValue(val); + }} + /> + + ); +}; +``` + +## IsVertical + +isVertical = true + +```jsx +import React, { useState } from 'react'; +import BlockRadio from 'components/_base/BlockRadio'; + +const options = [ + { + value: '1', + label: 'label1', + }, + { + value: '2', + label: 'label2', + }, + { + value: '3', + label: 'label3', + }, + { + value: '4', + label: 'label4', + }, +]; + +export default () => { + const [value, setValue] = useState(options[0].value); + return ( + <> + { + setValue(val); + }} + /> + + ); +}; +``` + +## IsOneHalfMode + +isOneHalfMode = true + +```jsx +import React, { useState } from 'react'; +import { Grid, Form, Input } from '@arco-design/web-react'; +import BlockRadio from 'components/_base/BlockRadio'; +const { Row, Col } = Grid; + +const options = [ + { + value: '1', + label: 'label1', + }, + { + value: '2', + label: 'label2', + }, +]; + +export default () => { + const [value, setValue] = useState(options[0].value); + return ( + <> +
+ { + setValue(val); + }} + /> + + + + + + + + + + + + + + + ); +}; +``` + +isOneHalfMode = false + +```jsx +import React, { useState } from 'react'; +import { Grid, Form, Input } from '@arco-design/web-react'; +import BlockRadio from 'components/_base/BlockRadio'; +const { Row, Col } = Grid; + +const options = [ + { + value: '1', + label: 'label1', + }, + { + value: '2', + label: 'label2', + }, +]; + +export default () => { + const [value, setValue] = useState(options[0].value); + return ( + <> +
+ { + setValue(val); + }} + /> + + + + + + + + + + + + + + + ); +}; +``` + +## BeforeChange + +beforeChange 可以拦截 onChange 的事件触发,如果返回 beforeChange 返回 `false` 或者 `Promise.resolve(false)` 的话,则不会触发 onChange 事件 + +```jsx +import React, { useState } from 'react'; +import BlockRadio from 'components/_base/BlockRadio'; +import Modal from 'components/Modal'; + +const options = [ + { + value: '1', + label: 'label1', + }, + { + value: '2', + label: 'label2', + }, + { + value: '3', + label: 'label3', + }, + { + value: '4', + label: 'label4', + }, +]; + +export default () => { + const [value, setValue] = useState(options[0].value); + return ( + <> + { + setValue(val); + }} + beforeChange={() => { + return new Promise((resolve) => { + Modal.confirm({ + title: '确认变更', + content: '确认这样做吗?', + onOk() { + resolve(true); + }, + onCancel() { + resolve(false); + }, + }); + }); + }} + /> + + ); +}; +``` + +## RenderBlockInner + +自定义 inner 内容渲染 + +```jsx +import React, { useState } from 'react'; +import BlockRadio from 'components/_base/BlockRadio'; + +const options = [ + { + value: '1', + label: 'label1', + }, + { + value: '2', + label: 'label2', + }, + { + value: '3', + label: 'label3', + }, + { + value: '4', + label: 'label4', + }, +]; + +export default () => { + const [value, setValue] = useState(options[0].value); + return ( + <> + { + setValue(val); + }} + renderBlockInner={(item, { label, data, isActive }) => { + return ( +
+ {label} +
footer
+
+ ); + }} + /> + + ); +}; +``` + +## 子组件 + +### BlockRadio.WithTip + +重写 renderBlockInner,达到能显示 tip 的效果 + +```jsx | pure +const WithTip: FC = (props) => { + return ( + { + return ( + + + {item.tip} + + ); + }} + {...props} + /> + ); +}; +BlockRadio.WithTip = WithTip; +``` + +BlockRadio.WithTip 的 options 比 BlockRadio 多了一个 tip 字段 + +```jsx +import React, { useState } from 'react'; +import BlockRadio from 'components/_base/BlockRadio'; + +const options = [ + { + value: '1', + label: 'label1', + tip: 'tip1', + }, + { + value: '2', + label: 'label2', + tip: 'tip2', + }, + { + value: '3', + label: 'label3', + tip: 'tip3', + }, + { + value: '4', + label: 'label4', + tip: 'tip4', + }, +]; + +export default () => { + const [value, setValue] = useState(options[0].value); + return ( + <> + { + setValue(val); + }} + /> + + ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Form/CheckboxWithPopconfirm.md b/web_console_v2/client/dumi/docs/Form/CheckboxWithPopconfirm.md new file mode 100644 index 000000000..697aea9bf --- /dev/null +++ b/web_console_v2/client/dumi/docs/Form/CheckboxWithPopconfirm.md @@ -0,0 +1,27 @@ +# CheckboxWithPopconfirm + +带有二次确认功能的 Checkbox 组件,基于[Arco Checkbox](xxx) 和 [Arco Popconfirm](xxx)封装 + + + +## 常规使用 + +```tsx +import React, { useState } from 'react'; +import CheckboxWithPopconfirm from 'components/CheckboxWithPopconfirm'; + +export default () => { + const [value, setValue] = useState(false); + return ( + { + console.log(val); + setValue(val); + }} + /> + ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Form/CheckboxWithTooltip.md b/web_console_v2/client/dumi/docs/Form/CheckboxWithTooltip.md new file mode 100644 index 000000000..6800ba19f --- /dev/null +++ b/web_console_v2/client/dumi/docs/Form/CheckboxWithTooltip.md @@ -0,0 +1,27 @@ +# CheckboxWithTooltip + +带有 Tooptip 提示功能的 Checkbox 组件,基于[Arco Checkbox](xxx) 和 [Arco Tooltip](xxx)封装 + + + +## 常规使用 + +```tsx +import React, { useState } from 'react'; +import CheckboxWithTooltip from 'components/CheckboxWithTooltip'; + +export default () => { + const [value, setValue] = useState(false); + return ( + { + console.log(val); + setValue(val); + }} + /> + ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Form/CodeEditor.md b/web_console_v2/client/dumi/docs/Form/CodeEditor.md new file mode 100644 index 000000000..49203271d --- /dev/null +++ b/web_console_v2/client/dumi/docs/Form/CodeEditor.md @@ -0,0 +1,100 @@ +# CodeEditor + +代码编辑器 + +基于 [monaco-react](https://github.com/suren-atoyan/monaco-react) 封装,增加了`grey`主题色,更改了部分样式 + + + +也支持传入 [monaco-react](https://github.com/suren-atoyan/monaco-react) 原有的 props + +## 常规用法 + +```tsx +/** + * compact: true + */ +import React from 'react'; +import CodeEditor from 'components/CodeEditor'; + +const pyText = + "# coding: utf-8\nimport logging\nimport datetime\n\nimport tensorflow.compat.v1 as tf \nimport fedlearner.trainer as flt \nimport os\n\nfrom slot_2_bucket import slot_2_bucket\n\n_SLOT_2_IDX = {pair[0]: i for i, pair in enumerate(slot_2_bucket)}\n_SLOT_2_BUCKET = slot_2_bucket\nROLE = \"leader\"\n\nparser = flt.trainer_worker.create_argument_parser()\nparser.add_argument('--batch-size', type=int, default=256,\n help='Training batch size.')\nparser.add_argument('--clean-model', type=bool, default=True,\n help='clean checkpoint and saved_model')\nargs = parser.parse_args()\nargs.sparse_estimator = True\n\ndef apply_clean():\n if args.worker_rank == 0 and args.clean_model and tf.io.gfile.exists(args.checkpoint_path):\n tf.logging.info(\"--clean_model flag set. Removing existing checkpoint_path dir:\"\n \" {}\".format(args.checkpoint_path))\n tf.io.gfile.rmtree(args.checkpoint_path)\n\n if args.worker_rank == 0 and args.clean_model and args.export_path and tf.io.gfile.exists(args.export_path):\n tf.logging.info(\"--clean_model flag set. Removing existing savedmodel dir:\"\n \" {}\".format(args.export_path))\n tf.io.gfile.rmtree(args.export_path)\n\n\ndef input_fn(bridge, trainer_master=None):\n dataset = flt.data.DataBlockLoader(\n args.batch_size, ROLE, bridge, trainer_master).make_dataset()\n \n def parse_fn(example):\n feature_map = {}\n feature_map[\"example_id\"] = tf.FixedLenFeature([], tf.string)\n feature_map['fids'] = tf.VarLenFeature(tf.int64)\n # feature_map['y'] = tf.FixedLenFeature([], tf.int64)\n features = tf.parse_example(example, features=feature_map)\n # labels = {'y': features.pop('y')}\n labels = {'y': tf.constant(0)}\n return features, labels\n dataset = dataset.map(map_func=parse_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE)\n dataset = dataset.prefetch(2)\n return dataset\n \n # feature_map = {\"fids\": tf.VarLenFeature(tf.int64)}\n # feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\n # record_batch = dataset.make_batch_iterator().get_next()\n # features = tf.parse_example(record_batch, features=feature_map)\n # return features, None\n\ndef raw_serving_input_receiver_fn():\n feature_map = {\n 'fids_indices': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_indices'),\n 'fids_values': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_values'),\n 'fids_dense_shape': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_dense_shape')\n }\n return tf.estimator.export.ServingInputReceiver(\n feature_map, feature_map)\n\n\ndef model_fn(model, features, labels, mode):\n\n def sum_pooling(embeddings, slots):\n slot_embeddings = []\n for slot in slots:\n slot_embeddings.append(embeddings[_SLOT_2_IDX[slot]])\n if len(slot_embeddings) == 1:\n return slot_embeddings[0]\n return tf.add_n(slot_embeddings)\n\n global_step = tf.train.get_or_create_global_step()\n num_slot, embed_size = len(_SLOT_2_BUCKET), 8\n xavier_initializer = tf.glorot_normal_initializer()\n\n flt.feature.FeatureSlot.set_default_bias_initializer(\n tf.zeros_initializer())\n flt.feature.FeatureSlot.set_default_vec_initializer(\n tf.random_uniform_initializer(-0.0078125, 0.0078125))\n flt.feature.FeatureSlot.set_default_bias_optimizer(\n tf.train.FtrlOptimizer(learning_rate=0.01))\n flt.feature.FeatureSlot.set_default_vec_optimizer(\n tf.train.AdagradOptimizer(learning_rate=0.01))\n\n # deal with input cols\n categorical_embed = []\n num_slot, embed_dim = len(_SLOT_2_BUCKET), 8\n\n with tf.variable_scope(\"leader\"):\n for slot, bucket_size in _SLOT_2_BUCKET:\n fs = model.add_feature_slot(slot, bucket_size)\n fc = model.add_feature_column(fs)\n categorical_embed.append(fc.add_vector(embed_dim))\n\n\n # concate all embeddings\n slot_embeddings = categorical_embed\n concat_embedding = tf.concat(slot_embeddings, axis=1)\n output_size = len(slot_embeddings) * embed_dim\n\n model.freeze_slots(features)\n\n with tf.variable_scope(\"follower\"):\n fc1_size, fc2_size, fc3_size = 16, 16, 16\n w1 = tf.get_variable('w1', shape=[output_size, fc1_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b1 = tf.get_variable(\n 'b1', shape=[fc1_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n w2 = tf.get_variable('w2', shape=[fc1_size, fc2_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b2 = tf.get_variable(\n 'b2', shape=[fc2_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n w3 = tf.get_variable('w3', shape=[fc2_size, fc3_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b3 = tf.get_variable(\n 'b3', shape=[fc3_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n\n act1_l = tf.nn.relu(tf.nn.bias_add(tf.matmul(concat_embedding, w1), b1))\n act1_l = tf.layers.batch_normalization(act1_l, training=True)\n act2_l = tf.nn.relu(tf.nn.bias_add(tf.matmul(act1_l, w2), b2))\n act2_l = tf.layers.batch_normalization(act2_l, training=True)\n embedding = tf.nn.relu(tf.nn.bias_add(tf.matmul(act2_l, w3), b3))\n embedding = tf.layers.batch_normalization(embedding, training=True)\n\n if mode == tf.estimator.ModeKeys.TRAIN:\n embedding_grad = model.send('embedding', embedding, require_grad=True)\n optimizer = tf.train.GradientDescentOptimizer(0.01)\n train_op = model.minimize(\n optimizer, embedding, grad_loss=embedding_grad, global_step=global_step)\n return model.make_spec(mode, loss=tf.math.reduce_mean(embedding), train_op=train_op)\n elif mode == tf.estimator.ModeKeys.PREDICT:\n return model.make_spec(mode, predictions={'embedding': embedding})\n\nif __name__ == '__main__':\n logging.basicConfig(\n level=logging.INFO,\n format='%(asctime)-15s [%(filename)s:%(lineno)d] %(levelname)s %(message)s'\n )\n apply_clean()\n flt.trainer_worker.train(\n ROLE, args, input_fn,\n model_fn, raw_serving_input_receiver_fn)\n"; + +export default () => { + return ( + <> + { + console.log(val); + }} + /> + + ); +}; +``` + +## IsReadOnly + +isReadOnly = true + +```tsx +/** + * compact: true + */ +import React from 'react'; +import CodeEditor from 'components/CodeEditor'; + +const pyText = + "# coding: utf-8\nimport logging\nimport datetime\n\nimport tensorflow.compat.v1 as tf \nimport fedlearner.trainer as flt \nimport os\n\nfrom slot_2_bucket import slot_2_bucket\n\n_SLOT_2_IDX = {pair[0]: i for i, pair in enumerate(slot_2_bucket)}\n_SLOT_2_BUCKET = slot_2_bucket\nROLE = \"leader\"\n\nparser = flt.trainer_worker.create_argument_parser()\nparser.add_argument('--batch-size', type=int, default=256,\n help='Training batch size.')\nparser.add_argument('--clean-model', type=bool, default=True,\n help='clean checkpoint and saved_model')\nargs = parser.parse_args()\nargs.sparse_estimator = True\n\ndef apply_clean():\n if args.worker_rank == 0 and args.clean_model and tf.io.gfile.exists(args.checkpoint_path):\n tf.logging.info(\"--clean_model flag set. Removing existing checkpoint_path dir:\"\n \" {}\".format(args.checkpoint_path))\n tf.io.gfile.rmtree(args.checkpoint_path)\n\n if args.worker_rank == 0 and args.clean_model and args.export_path and tf.io.gfile.exists(args.export_path):\n tf.logging.info(\"--clean_model flag set. Removing existing savedmodel dir:\"\n \" {}\".format(args.export_path))\n tf.io.gfile.rmtree(args.export_path)\n\n\ndef input_fn(bridge, trainer_master=None):\n dataset = flt.data.DataBlockLoader(\n args.batch_size, ROLE, bridge, trainer_master).make_dataset()\n \n def parse_fn(example):\n feature_map = {}\n feature_map[\"example_id\"] = tf.FixedLenFeature([], tf.string)\n feature_map['fids'] = tf.VarLenFeature(tf.int64)\n # feature_map['y'] = tf.FixedLenFeature([], tf.int64)\n features = tf.parse_example(example, features=feature_map)\n # labels = {'y': features.pop('y')}\n labels = {'y': tf.constant(0)}\n return features, labels\n dataset = dataset.map(map_func=parse_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE)\n dataset = dataset.prefetch(2)\n return dataset\n \n # feature_map = {\"fids\": tf.VarLenFeature(tf.int64)}\n # feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\n # record_batch = dataset.make_batch_iterator().get_next()\n # features = tf.parse_example(record_batch, features=feature_map)\n # return features, None\n\ndef raw_serving_input_receiver_fn():\n feature_map = {\n 'fids_indices': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_indices'),\n 'fids_values': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_values'),\n 'fids_dense_shape': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_dense_shape')\n }\n return tf.estimator.export.ServingInputReceiver(\n feature_map, feature_map)\n\n\ndef model_fn(model, features, labels, mode):\n\n def sum_pooling(embeddings, slots):\n slot_embeddings = []\n for slot in slots:\n slot_embeddings.append(embeddings[_SLOT_2_IDX[slot]])\n if len(slot_embeddings) == 1:\n return slot_embeddings[0]\n return tf.add_n(slot_embeddings)\n\n global_step = tf.train.get_or_create_global_step()\n num_slot, embed_size = len(_SLOT_2_BUCKET), 8\n xavier_initializer = tf.glorot_normal_initializer()\n\n flt.feature.FeatureSlot.set_default_bias_initializer(\n tf.zeros_initializer())\n flt.feature.FeatureSlot.set_default_vec_initializer(\n tf.random_uniform_initializer(-0.0078125, 0.0078125))\n flt.feature.FeatureSlot.set_default_bias_optimizer(\n tf.train.FtrlOptimizer(learning_rate=0.01))\n flt.feature.FeatureSlot.set_default_vec_optimizer(\n tf.train.AdagradOptimizer(learning_rate=0.01))\n\n # deal with input cols\n categorical_embed = []\n num_slot, embed_dim = len(_SLOT_2_BUCKET), 8\n\n with tf.variable_scope(\"leader\"):\n for slot, bucket_size in _SLOT_2_BUCKET:\n fs = model.add_feature_slot(slot, bucket_size)\n fc = model.add_feature_column(fs)\n categorical_embed.append(fc.add_vector(embed_dim))\n\n\n # concate all embeddings\n slot_embeddings = categorical_embed\n concat_embedding = tf.concat(slot_embeddings, axis=1)\n output_size = len(slot_embeddings) * embed_dim\n\n model.freeze_slots(features)\n\n with tf.variable_scope(\"follower\"):\n fc1_size, fc2_size, fc3_size = 16, 16, 16\n w1 = tf.get_variable('w1', shape=[output_size, fc1_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b1 = tf.get_variable(\n 'b1', shape=[fc1_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n w2 = tf.get_variable('w2', shape=[fc1_size, fc2_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b2 = tf.get_variable(\n 'b2', shape=[fc2_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n w3 = tf.get_variable('w3', shape=[fc2_size, fc3_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b3 = tf.get_variable(\n 'b3', shape=[fc3_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n\n act1_l = tf.nn.relu(tf.nn.bias_add(tf.matmul(concat_embedding, w1), b1))\n act1_l = tf.layers.batch_normalization(act1_l, training=True)\n act2_l = tf.nn.relu(tf.nn.bias_add(tf.matmul(act1_l, w2), b2))\n act2_l = tf.layers.batch_normalization(act2_l, training=True)\n embedding = tf.nn.relu(tf.nn.bias_add(tf.matmul(act2_l, w3), b3))\n embedding = tf.layers.batch_normalization(embedding, training=True)\n\n if mode == tf.estimator.ModeKeys.TRAIN:\n embedding_grad = model.send('embedding', embedding, require_grad=True)\n optimizer = tf.train.GradientDescentOptimizer(0.01)\n train_op = model.minimize(\n optimizer, embedding, grad_loss=embedding_grad, global_step=global_step)\n return model.make_spec(mode, loss=tf.math.reduce_mean(embedding), train_op=train_op)\n elif mode == tf.estimator.ModeKeys.PREDICT:\n return model.make_spec(mode, predictions={'embedding': embedding})\n\nif __name__ == '__main__':\n logging.basicConfig(\n level=logging.INFO,\n format='%(asctime)-15s [%(filename)s:%(lineno)d] %(levelname)s %(message)s'\n )\n apply_clean()\n flt.trainer_worker.train(\n ROLE, args, input_fn,\n model_fn, raw_serving_input_receiver_fn)\n"; + +export default () => { + return ( + <> + { + console.log(val); + }} + isReadOnly={true} + /> + + ); +}; +``` + +## Theme + +theme = grey + +```tsx +/** + * iframe: true + * compact: true + */ +import React from 'react'; +import CodeEditor from 'components/CodeEditor'; + +const pyText = + "# coding: utf-8\nimport logging\nimport datetime\n\nimport tensorflow.compat.v1 as tf \nimport fedlearner.trainer as flt \nimport os\n\nfrom slot_2_bucket import slot_2_bucket\n\n_SLOT_2_IDX = {pair[0]: i for i, pair in enumerate(slot_2_bucket)}\n_SLOT_2_BUCKET = slot_2_bucket\nROLE = \"leader\"\n\nparser = flt.trainer_worker.create_argument_parser()\nparser.add_argument('--batch-size', type=int, default=256,\n help='Training batch size.')\nparser.add_argument('--clean-model', type=bool, default=True,\n help='clean checkpoint and saved_model')\nargs = parser.parse_args()\nargs.sparse_estimator = True\n\ndef apply_clean():\n if args.worker_rank == 0 and args.clean_model and tf.io.gfile.exists(args.checkpoint_path):\n tf.logging.info(\"--clean_model flag set. Removing existing checkpoint_path dir:\"\n \" {}\".format(args.checkpoint_path))\n tf.io.gfile.rmtree(args.checkpoint_path)\n\n if args.worker_rank == 0 and args.clean_model and args.export_path and tf.io.gfile.exists(args.export_path):\n tf.logging.info(\"--clean_model flag set. Removing existing savedmodel dir:\"\n \" {}\".format(args.export_path))\n tf.io.gfile.rmtree(args.export_path)\n\n\ndef input_fn(bridge, trainer_master=None):\n dataset = flt.data.DataBlockLoader(\n args.batch_size, ROLE, bridge, trainer_master).make_dataset()\n \n def parse_fn(example):\n feature_map = {}\n feature_map[\"example_id\"] = tf.FixedLenFeature([], tf.string)\n feature_map['fids'] = tf.VarLenFeature(tf.int64)\n # feature_map['y'] = tf.FixedLenFeature([], tf.int64)\n features = tf.parse_example(example, features=feature_map)\n # labels = {'y': features.pop('y')}\n labels = {'y': tf.constant(0)}\n return features, labels\n dataset = dataset.map(map_func=parse_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE)\n dataset = dataset.prefetch(2)\n return dataset\n \n # feature_map = {\"fids\": tf.VarLenFeature(tf.int64)}\n # feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\n # record_batch = dataset.make_batch_iterator().get_next()\n # features = tf.parse_example(record_batch, features=feature_map)\n # return features, None\n\ndef raw_serving_input_receiver_fn():\n feature_map = {\n 'fids_indices': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_indices'),\n 'fids_values': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_values'),\n 'fids_dense_shape': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_dense_shape')\n }\n return tf.estimator.export.ServingInputReceiver(\n feature_map, feature_map)\n\n\ndef model_fn(model, features, labels, mode):\n\n def sum_pooling(embeddings, slots):\n slot_embeddings = []\n for slot in slots:\n slot_embeddings.append(embeddings[_SLOT_2_IDX[slot]])\n if len(slot_embeddings) == 1:\n return slot_embeddings[0]\n return tf.add_n(slot_embeddings)\n\n global_step = tf.train.get_or_create_global_step()\n num_slot, embed_size = len(_SLOT_2_BUCKET), 8\n xavier_initializer = tf.glorot_normal_initializer()\n\n flt.feature.FeatureSlot.set_default_bias_initializer(\n tf.zeros_initializer())\n flt.feature.FeatureSlot.set_default_vec_initializer(\n tf.random_uniform_initializer(-0.0078125, 0.0078125))\n flt.feature.FeatureSlot.set_default_bias_optimizer(\n tf.train.FtrlOptimizer(learning_rate=0.01))\n flt.feature.FeatureSlot.set_default_vec_optimizer(\n tf.train.AdagradOptimizer(learning_rate=0.01))\n\n # deal with input cols\n categorical_embed = []\n num_slot, embed_dim = len(_SLOT_2_BUCKET), 8\n\n with tf.variable_scope(\"leader\"):\n for slot, bucket_size in _SLOT_2_BUCKET:\n fs = model.add_feature_slot(slot, bucket_size)\n fc = model.add_feature_column(fs)\n categorical_embed.append(fc.add_vector(embed_dim))\n\n\n # concate all embeddings\n slot_embeddings = categorical_embed\n concat_embedding = tf.concat(slot_embeddings, axis=1)\n output_size = len(slot_embeddings) * embed_dim\n\n model.freeze_slots(features)\n\n with tf.variable_scope(\"follower\"):\n fc1_size, fc2_size, fc3_size = 16, 16, 16\n w1 = tf.get_variable('w1', shape=[output_size, fc1_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b1 = tf.get_variable(\n 'b1', shape=[fc1_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n w2 = tf.get_variable('w2', shape=[fc1_size, fc2_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b2 = tf.get_variable(\n 'b2', shape=[fc2_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n w3 = tf.get_variable('w3', shape=[fc2_size, fc3_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b3 = tf.get_variable(\n 'b3', shape=[fc3_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n\n act1_l = tf.nn.relu(tf.nn.bias_add(tf.matmul(concat_embedding, w1), b1))\n act1_l = tf.layers.batch_normalization(act1_l, training=True)\n act2_l = tf.nn.relu(tf.nn.bias_add(tf.matmul(act1_l, w2), b2))\n act2_l = tf.layers.batch_normalization(act2_l, training=True)\n embedding = tf.nn.relu(tf.nn.bias_add(tf.matmul(act2_l, w3), b3))\n embedding = tf.layers.batch_normalization(embedding, training=True)\n\n if mode == tf.estimator.ModeKeys.TRAIN:\n embedding_grad = model.send('embedding', embedding, require_grad=True)\n optimizer = tf.train.GradientDescentOptimizer(0.01)\n train_op = model.minimize(\n optimizer, embedding, grad_loss=embedding_grad, global_step=global_step)\n return model.make_spec(mode, loss=tf.math.reduce_mean(embedding), train_op=train_op)\n elif mode == tf.estimator.ModeKeys.PREDICT:\n return model.make_spec(mode, predictions={'embedding': embedding})\n\nif __name__ == '__main__':\n logging.basicConfig(\n level=logging.INFO,\n format='%(asctime)-15s [%(filename)s:%(lineno)d] %(levelname)s %(message)s'\n )\n apply_clean()\n flt.trainer_worker.train(\n ROLE, args, input_fn,\n model_fn, raw_serving_input_receiver_fn)\n"; + +export default () => { + return ( + <> + { + console.log(val); + }} + theme="grey" + /> + + ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Form/ConfigForm.md b/web_console_v2/client/dumi/docs/Form/ConfigForm.md new file mode 100644 index 000000000..0e5ce249c --- /dev/null +++ b/web_console_v2/client/dumi/docs/Form/ConfigForm.md @@ -0,0 +1,210 @@ +# ConfigForm + +动态渲染 [Form.Item](https://arco.design/react/components/form#formitem) 配置的表单组,支持[Collapse](https://arco.design/react/components/collapse#api) 包裹部分 Form.Item + + + +### ItemProps + +```ts | pure +type ItemProps = { + componentType?: 'Input' | 'TextArea' | 'InputNumber'; + componentProps?: object; + render?: (props: Omit) => React.ReactNode; +} & FormItemProps; +``` + +## 常规用法 + +```tsx +import React from 'react'; +import ConfigForm from 'components/ConfigForm'; + +const formItemList = [ + { + label: '学习率', + field: 'learning_rate', + componentType: 'InputNumber', + }, + { + label: '迭代数', + field: 'max_iters', + componentType: 'InputNumber', + }, + { + label: '深度', + field: 'max_depth', + componentType: 'InputNumber', + }, + { + label: 'L2惩罚系数', + field: 'l2_regularization', + componentType: 'InputNumber', + }, + { + label: '最大分箱数量', + field: 'max_bins', + componentType: 'InputNumber', + }, + { + label: '线程池大小', + field: 'num_parallel', + componentType: 'InputNumber', + }, +]; +const collapseFormItemList = [ + { + label: '高级配置1', + field: 'p1', + componentType: 'Input', + }, + { + label: '高级配置2', + field: 'p2', + componentType: 'TextArea', + }, + { + label: '高级配置3', + field: 'p3', + }, + { + label: '高级配置4', + field: 'p4', + }, +]; + +export default () => { + return ( + { + console.log(values); + }} + /> + ); +}; +``` + +## Cols + +支持指定每行渲染多少个 Form.Item,取值范围为 cols = 1 | 2 | 3 | 4 | 6 | 8 | 12 + +```tsx +import React from 'react'; +import ConfigForm from 'components/ConfigForm'; + +const formItemList = [ + { + label: '学习率', + field: 'learning_rate', + componentType: 'InputNumber', + }, + { + label: '迭代数', + field: 'max_iters', + componentType: 'InputNumber', + }, + { + label: '深度', + field: 'max_depth', + componentType: 'InputNumber', + }, + { + label: 'L2惩罚系数', + field: 'l2_regularization', + componentType: 'InputNumber', + }, + { + label: '最大分箱数量', + field: 'max_bins', + componentType: 'InputNumber', + }, + { + label: '线程池大小', + field: 'num_parallel', + componentType: 'InputNumber', + }, +]; +const collapseFormItemList = [ + { + label: '高级配置1', + field: 'p1', + componentType: 'Input', + }, + { + label: '高级配置2', + field: 'p2', + componentType: 'TextArea', + }, + { + label: '高级配置3', + field: 'p3', + }, + { + label: '高级配置4', + field: 'p4', + }, +]; + +export default () => { + return ( + { + console.log(values); + }} + /> + ); +}; +``` + +## Render + +componentType 只支持 3 种组件,`Input`、`TextArea`、`InputNumber`,分别为 Arco 的 Input,Input.TextArea,InputNumber 组件,如果需要自定义渲染 Form.Item 包裹的组件的话,可以传入 render 函数,他只接受 1 个参数`componentProps` + +render 的优先级比 componentType 高 + +```tsx +import React from 'react'; +import { Switch } from '@arco-design/web-react'; +import ConfigForm from 'components/ConfigForm'; + +const formItemList = [ + { + label: '切换1', + field: 'switch1', + render(props) { + return ; + }, + }, + { + label: '切换(禁用)', + field: 'switch2', + componentProps: { + disabled: true, + }, + render(props) { + return ; + }, + }, +]; + +export default () => { + return ( + { + console.log(values); + }} + /> + ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Form/DataSourceSelect.md b/web_console_v2/client/dumi/docs/Form/DataSourceSelect.md new file mode 100644 index 000000000..f3573ef1c --- /dev/null +++ b/web_console_v2/client/dumi/docs/Form/DataSourceSelect.md @@ -0,0 +1,42 @@ +# DataSourceSelect + +数据源下拉框,内部封装了获取数据源列表的网络请求 + +> 注意 ⚠️: 因为 `` 内部引入了 `react-query` 相关的函数,所以 Demo 代码中用 `` 作为根组件,防止报错。 + +# API + + + +其他与 [Arco Select](https://arco.design/react/components/select#api) 一样 + +## 常规用法 + +默认展示所有数据源 + +```jsx +import React, { useState } from 'react'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import { RecoilRoot } from 'recoil'; + +import DataSourceSelect from 'components/DataSourceSelect'; + +export default () => { + return ( + + + { + console.log(val); + }} + /> + + + ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Form/DatasetSelect.md b/web_console_v2/client/dumi/docs/Form/DatasetSelect.md new file mode 100644 index 000000000..8394f0b0c --- /dev/null +++ b/web_console_v2/client/dumi/docs/Form/DatasetSelect.md @@ -0,0 +1,135 @@ +# DatasetSelect + +数据集下拉框,内部封装了获取数据集列表的网络请求,并自定义了 children 渲染布局 + +> 注意 ⚠️: 因为 `` 内部引入了 `react-query` 相关的函数,所以 Demo 代码中用 `` 作为根组件,防止报错。 + + + +> 注意 ⚠️: value 和 onChange 的值是 `Dataset` 类型,不是 any(dumi 不支持外部 type,所以显示成 any?),具体字段请查看 `import { Dataset } from 'typings/dataset'` + + + +## 常规用法 + +默认展示所有数据集 + +```jsx +import React, { useState } from 'react'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import { RecoilRoot } from 'recoil'; + +import DatasetSelect from 'components/DatasetSelect'; + +export default () => { + return ( + + + { + console.log(val); + }} + allowClear + /> + + + ); +}; +``` + +## Kind + +根据 kind 来过滤数据集 + +``` +0 - training dataset +1 - test dataset +2 - predict dataset +``` + +```jsx +import React, { useState } from 'react'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import { RecoilRoot } from 'recoil'; + +import DatasetSelect from 'components/DatasetSelect'; + +export default () => { + return ( + + +
训练数据集
+ { + console.log(val); + }} + allowClear + kind={0} + /> +
评估数据集
+ { + console.log(val); + }} + allowClear + kind={1} + /> +
预测数据集
+ { + console.log(val); + }} + allowClear + kind={2} + /> +
+
+ ); +}; +``` + +## 无数据 + +无数据的情况下,会显示文本`暂无数据集 去创建`,点击`去创建`,会跳转到数据集列表页面 + +## 子组件 + +### DatasetPathSelect + +特殊处理 value 和 onChange,提取 dataset.path 字符串 + +```jsx +import React, { useState } from 'react'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import { RecoilRoot } from 'recoil'; + +import { DatasetPathSelect } from 'components/DatasetSelect'; + +export default () => { + return ( + + + { + console.log(val); + }} + allowClear + /> + + + ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Form/DoubleSelect.md b/web_console_v2/client/dumi/docs/Form/DoubleSelect.md new file mode 100644 index 000000000..26aff999a --- /dev/null +++ b/web_console_v2/client/dumi/docs/Form/DoubleSelect.md @@ -0,0 +1,357 @@ +# DoubleSelect + +双下拉框 + +> 注意 ⚠️: 因为 `` 内部引入了 `react-query` 相关的函数,所以 Demo 代码中用 `` 作为根组件,防止报错。 + + + +### OptionItem + +每个 item 格式定义 + +```ts | pure +type OptionItem = { + /** Display label */ + label: string | number; + /** Form value */ + value: any; + disabled?: boolean; +}; +``` + + + +## 常规用法 + +```tsx +import React, { useState } from 'react'; +import DoubleSelect from 'components/DoubleSelect'; + +const leftOptionList = [ + { + label: 'left label1', + value: 'left value1', + }, + { + label: 'left label2', + value: 'left value2', + }, + { + label: 'left label3', + value: 'left value3', + }, +]; + +const rightOptionList = [ + { + label: 'right label1', + value: 'right value1', + }, + { + label: 'right label2', + value: 'right value2', + }, + { + label: 'right label3', + value: 'right value3', + }, +]; + +export default () => { + const [value, setValue] = useState(); + return ( + <> + { + console.log(val); + setValue(val); + }} + /> + + ); +}; +``` + +## IsClearRightValueAfterLeftSelectChange + +isClearRightValueAfterLeftSelectChange = true + +左边的下拉框选中后,会清空右边的下拉框的 value + +```tsx +import React, { useState } from 'react'; +import DoubleSelect from 'components/DoubleSelect'; + +const leftOptionList = [ + { + label: 'left label1', + value: 'left value1', + }, + { + label: 'left label2', + value: 'left value2', + }, + { + label: 'left label3', + value: 'left value3', + }, +]; + +const rightOptionList = [ + { + label: 'right label1', + value: 'right value1', + }, + { + label: 'right label2', + value: 'right value2', + }, + { + label: 'right label3', + value: 'right value3', + }, +]; + +export default () => { + const [value, setValue] = useState(); + return ( + <> + { + console.log(val); + setValue(val); + }} + isClearRightValueAfterLeftSelectChange={true} + /> + + ); +}; +``` + +## LeftLabel/RightLabel + +```tsx +import React, { useState } from 'react'; +import DoubleSelect from 'components/DoubleSelect'; + +const leftOptionList = [ + { + label: 'left label1', + value: 'left value1', + }, + { + label: 'left label2', + value: 'left value2', + }, + { + label: 'left label3', + value: 'left value3', + }, +]; + +const rightOptionList = [ + { + label: 'right label1', + value: 'right value1', + }, + { + label: 'right label2', + value: 'right value2', + }, + { + label: 'right label3', + value: 'right value3', + }, +]; + +export default () => { + const [value, setValue] = useState(); + return ( + <> + { + console.log(val); + setValue(val); + }} + leftLabel="leftLabel" + rightLabel="rightLabel" + /> + + ); +}; +``` + +## 子组件 + +### ModelSelect + +`ModelSelect` 内部封装了获取模型集列表(下拉框)/模型列表(右下拉框)的网络请求 + +isDisabledLinkage = false,联动,会根据选中的模型集来过滤出模型列表,最终呈现在右下拉框上 + +```tsx +import React, { useState } from 'react'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import { RecoilRoot } from 'recoil'; +import DoubleSelect from 'components/DoubleSelect'; + +export default () => { + const [value, setValue] = useState(); + return ( + + + { + console.log(val); + setValue(val); + }} + leftField="model_set_id" + rightField="model_id" + leftLabel="模型集" + rightLabel="模型" + /> + + + ); +}; +``` + +isDisabledLinkage = true, 禁用联动,右下拉框直接显示所有模型 + +```tsx +import React, { useState } from 'react'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import { RecoilRoot } from 'recoil'; +import DoubleSelect from 'components/DoubleSelect'; + +export default () => { + const [value, setValue] = useState(); + return ( + + + { + console.log(val); + setValue(val); + }} + leftField="model_set_id" + rightField="model_id" + leftLabel="模型集" + rightLabel="模型" + isDisabledLinkage={true} + /> + + + ); +}; +``` + +### AlgorithmSelect + +`AlgorithmSelect` 内部封装了获取算法项目(下拉框)/算法版本(右下拉框)的网络请求,支持传递 `algorithmProjectTypeList` 数组来过滤算法类型,类型为 `EnumAlgorithmProjectType` + +value 的格式为 `AlgorithmSelectValue` + +选中算法版本后,如果该算法含有超参数的话,会展示超参数列表,并支持修改该超参数的 value + +```ts +type AlgorithmSelectValue = { + algorithmProjectId: ID; + algorithmId: ID; + config?: AlgorithmParameter[]; + path?: string; +}; + +type AlgorithmParameter = { + name: string; + value: string; + required: boolean; + display_name: string; + comment: string; + value_type: ValueType; +}; + +enum EnumAlgorithmProjectType { + UNSPECIFIED = 'UNSPECIFIED', + TREE_VERTICAL = 'TREE_VERTICAL', + TREE_HORIZONTAL = 'TREE_HORIZONTAL', + NN_VERTICAL = 'NN_VERTICAL', + NN_HORIZONTAL = 'NN_HORIZONTAL', + NN_LOCAL = 'NN_LOCAL', +} +``` + +```tsx +import React, { useState } from 'react'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import { RecoilRoot } from 'recoil'; +import DoubleSelect from 'components/DoubleSelect'; + +export default () => { + const [value, setValue] = useState(); + return ( + + + { + console.log(val); + setValue(val); + }} + leftLabel="算法项目" + rightLabel="算法版本" + /> + + + ); +}; +``` + +### ModelJobGroupSelect + +`ModelJobGroupSelect` 内部封装了获取模型评估和预测的 Job 的网络请求,支持传入 `type` 来根据算法类型过滤 Job Group 列表。 + +`type` 的类型为 `'NN_VERTICAL' | 'NN_HORIZONTAL'`。 + +一个简单的例子: + +```tsx +import React, { useState } from 'react'; +import { QueryClientProvider } from 'react-query'; +import queryClient from 'shared/queryClient'; +import { RecoilRoot } from 'recoil'; +import DoubleSelect from 'components/DoubleSelect'; + +export default () => { + const [value, setValue] = useState(); + return ( + + + { + console.log(val); + setValue(val); + }} + /> + + + ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Form/FileUpload.md b/web_console_v2/client/dumi/docs/Form/FileUpload.md new file mode 100644 index 000000000..68e2ca66d --- /dev/null +++ b/web_console_v2/client/dumi/docs/Form/FileUpload.md @@ -0,0 +1,100 @@ +# FileUpload + +基于 [Arco Upload](xxx) 封装,具有限制文件大小,上传文件成功/失败回调功能 + +> 注意 ⚠️: 目前组件内部逻辑是根据 `action = /api/v2/files` 返回的结构来处理的,如果 action 换成其他地址可能会存在问题 + + + +也支持传入 [Arco Upload](xxx) 原有的 props + +### UploadFileType + +```jsx | pure +enum UploadFileType { + Dataset = 'dataset' +} +``` + +### UploadFile + +```jsx | pure +interface UploadFile { + uid: string; + size: number; + name: string; + fileName?: string; + lastModified?: number; + lastModifiedDate?: Date; + url?: string; + status?: UploadFileStatus; + percent?: number; + thumbUrl?: string; + originFileObj: RcFile; + response?: T; + error?: any; + linkProps?: any; + type: string; + xhr?: T; + preview?: string; +} +``` + +### UploadChangeParam + +```jsx | pure +interface UploadChangeParam { + file: T; + fileList: UploadFile[]; + event?: { + percent: number; + }; +} +``` + +## 常规用法 + +```tsx +import React, { useState } from 'react'; +import FileUpload, { UploadFileType } from 'components/FileUpload'; + +export default () => { + const [value, setValue] = useState([]); + return ( + <> + console.log(info)} + onError={(error) => console.log(error)} + /> + + ); +}; +``` + +## action + +接口使用了 arco upload 组件演示的地址 + +```tsx +import React, { useState } from 'react'; +import FileUpload, { UploadFileType } from 'components/FileUpload'; + +export default () => { + const [value, setValue] = useState([]); + return ( + <> + + + ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Form/InputGroup.md b/web_console_v2/client/dumi/docs/Form/InputGroup.md new file mode 100644 index 000000000..8ac018b3d --- /dev/null +++ b/web_console_v2/client/dumi/docs/Form/InputGroup.md @@ -0,0 +1,199 @@ +# InputGroup + +栅格布局的表单组 + + + +### TColumn + +```typescript +interface TColumn { + /** INPUT for input[type="text"], INPUT_NUMBER for input[type="number"], TEXT for plain value display */ + type: 'INPUT' | 'INPUT_NUMBER' | 'TEXT'; + /** column title */ + title: string; + /** column tooltip */ + tooltip?: string; + /** column data field name */ + dataIndex: string; + /** column width proportion (the total is 24) */ + span: number; + /** the placeholder of current column's input item */ + placeholder?: string; + /** the unit text of current column's input item */ + unitLabel?: string; + /** validation rules of current column's input item */ + rules?: RulesProps[]; + /** precision of number(when type is 'INPUT_NUMBER') */ + precision?: number; + /** minimum value of number(when type is 'INPUT_NUMBER') */ + min?: number; + /** maximum value of number(when type is 'INPUT_NUMBER') */ + max?: number; + /** it is the same with InputNumber of ArcoDesign(when type is 'INPUT_NUMBER') */ + mode?: 'button' | 'embed'; + /** provide a way to process output value, the type of output would be limited to the same type of value. */ + formatValue?: () => string | number; + /** disabled component */ + disabled?: boolean; +} +``` + +> 各个列的 span 总和应该等于 24,否则会抛出一个 error。这也是 ArcoDesign 栅格系统的每行 span 总和。 + +## 常规用法 + +用 `columns` 定义列,用 `onChange` 监听值变化。 + +```jsx +import React from 'react'; +import InputGroup from 'components/InputGroup'; + +const columns = [ + { + title: 'first name', + dataIndex: 'firstName', + span: 10, + type: 'INPUT', + }, + { + title: 'last name', + dataIndex: 'lastName', + span: 10, + type: 'INPUT', + tooltip: "I'm tooltip", + }, + { + title: 'age', + dataIndex: 'age', + span: 4, + type: 'INPUT_NUMBER', + mode: 'button', + min: 1, + max: 100, + }, +]; + +export default () => { + return ( + { + console.log(values); + }} + /> + ); +}; +``` + +## 受控模式 + +在使用了 `value` 属性后,组件就处于受控模式,必须通过修改 `value` 属性来控制组件的值。 + +在此模式下,添加行和删除行都会调用 `onChange`,如果外部接受在 `onChange` 中传入的值后,**不用**它来重新设置 `value`,那组件的值就**不会**发生变化。 + +```jsx +import React, { useState } from 'react'; +import InputGroup from 'components/InputGroup'; + +const columns = [ + { + title: 'first name', + dataIndex: 'firstName', + span: 9, + type: 'INPUT', + }, + { + title: 'last name', + dataIndex: 'lastName', + span: 9, + type: 'INPUT', + }, + { + title: 'age', + dataIndex: 'age', + span: 6, + type: 'INPUT_NUMBER', + mode: 'button', + min: 1, + max: 100, + }, +]; + +const initialValue = [ + { + firstName: 'Steve', + lastName: 'Curry', + age: 34, + }, + { + firstName: 'LerBron', + lastName: 'James', + age: 37, + }, + { + firstName: 'Bryant', + lastName: 'Kobe', + age: 45, + }, +]; + +export default () => { + const [value, setValue] = useState(initialValue); + return ( + { + setValue(newValue); + }} + /> + ); +}; +``` + +## 子组件 + +### CpuInput + +CPU 输入器,虽然显示的单位是`Core`,但是组件内部已经转换了一层,组件期望输入输出的单位都是`m`,转为公式为`1Core = 1000m` + +```tsx +import React, { useState } from 'react'; +import { CpuInput } from 'components/InputGroup/NumberTextInput'; + +export default () => { + const [value, setValue] = useState('2000m'); + return ( + { + console.log(newValue); + setValue(newValue); + }} + /> + ); +}; +``` + +### MemInput + +内存输入器,组件期望输入输出的单位都是`Gi` + +```tsx +import React, { useState } from 'react'; +import { MemInput } from 'components/InputGroup/NumberTextInput'; + +export default () => { + const [value, setValue] = useState('1Gi'); + return ( + { + console.log(newValue); + setValue(newValue); + }} + /> + ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Form/MultiSelect.md b/web_console_v2/client/dumi/docs/Form/MultiSelect.md new file mode 100644 index 000000000..9031b1846 --- /dev/null +++ b/web_console_v2/client/dumi/docs/Form/MultiSelect.md @@ -0,0 +1,157 @@ +# MultiSelect + +基于 [Arco Select](xxx) 封装,面向 UX 设计图的多选下拉框,具有全选功能,并显示当前选中的个数 + + + +也支持传入 [Arco Select](xxx) 原有的 props + +### Option + +每个 item 格式定义 + +```jsx | pure +type OptionItem = { + /** Display label */ + label: string, + /** Form value */ + value: any, +}; +``` + +## 常规用法 + +```jsx +import React, { useState } from 'react'; +import i18n from '../../../src/i18n/index.ts'; +import MultiSelect from 'components/MultiSelect'; + +const options = [ + { + value: '1', + label: 'label1', + }, + { + value: '2', + label: 'label2', + }, + { + value: '3', + label: 'label3', + }, + { + value: '4', + label: 'label4', + }, +]; + +export default () => { + const [value, setValue] = useState([]); + return ( + <> + { + setValue(val); + }} + allowClear + /> + + ); +}; +``` + +## IsHideHeader + +```jsx +import React, { useState } from 'react'; +import i18n from '../../../src/i18n/index.ts'; +import MultiSelect from 'components/MultiSelect'; + +const options = [ + { + value: '1', + label: 'label1', + }, + { + value: '2', + label: 'label2', + }, + { + value: '3', + label: 'label3', + }, + { + value: '4', + label: 'label4', + }, +]; + +export default () => { + const [value, setValue] = useState([]); + return ( + <> + { + setValue(val); + }} + allowClear + isHideHeader={true} + /> + + ); +}; +``` + +## IsHideIndex + +```jsx +import React, { useState } from 'react'; +import i18n from '../../../src/i18n/index.ts'; +import MultiSelect from 'components/MultiSelect'; + +const options = [ + { + value: '1', + label: 'label1', + }, + { + value: '2', + label: 'label2', + }, + { + value: '3', + label: 'label3', + }, + { + value: '4', + label: 'label4', + }, +]; + +export default () => { + const [value, setValue] = useState([]); + return ( + <> + { + setValue(val); + }} + allowClear + isHideIndex={true} + /> + + ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Form/ResourceConfig.md b/web_console_v2/client/dumi/docs/Form/ResourceConfig.md new file mode 100644 index 000000000..5886b0112 --- /dev/null +++ b/web_console_v2/client/dumi/docs/Form/ResourceConfig.md @@ -0,0 +1,90 @@ +# ResourceConfig + +资源配置组件,基于 [Arco Collapse](https://arco.design/react/components/collapse#api) 和 [InputGroup](/form/input-group) 封装 + +不同的 `algorithmType` 算法类型,会显示不同的布局,目前有 2 大类,Tree 算法类型和 NN 算法类型。组件默认显示 Tree 算法类型的布局 + + + +### Option + +```tsx | pure +type Value = { + resource_type: ResourceTemplateType | `${ResourceTemplateType}`; + master_cpu?: string; + master_mem?: string; + master_replicas?: string; + ps_cpu?: string; + ps_mem?: string; + ps_replicas?: string; + worker_cpu?: string; + worker_mem?: string; + worker_replicas?: string; +}; + +enum ResourceTemplateType { + HIGH = 'high', + MEDIUM = 'medium', + LOW = 'low', + CUSTOM = 'custom', +} + +enum AlgorithmType { + TREE_VERTICAL = 'TREE_VERTICAL', + NN_VERTICAL = 'NN_VERTICAL', + NN_HORIZONTAL = 'NN_HORIZONTAL', +} +``` + +## 常规用法 + +```tsx +import React, { useState } from 'react'; +import ResourceConfig from 'components/ResourceConfig'; + +export default () => { + return ( + { + console.log('val', val); + }} + /> + ); +}; +``` + +## 受控模式 + +```tsx +import React, { useState } from 'react'; +import ResourceConfig from 'components/ResourceConfig'; + +export default () => { + const [value, setValue] = useState({ + master_cpu: '1000m', + master_mem: '64Gi', + master_replicas: '2', + ps_cpu: '4000m', + ps_mem: '16Gi', + ps_replicas: '3', + resource_type: 'custom', + worker_cpu: '4000m', + worker_mem: '128Gi', + worker_replicas: '1', + }); + return ( + <> + { + console.log('val', val); + setValue(val); + }} + /> + + ); +}; +``` \ No newline at end of file diff --git a/web_console_v2/client/dumi/docs/Layout/GridRow.md b/web_console_v2/client/dumi/docs/Layout/GridRow.md new file mode 100644 index 000000000..e5fd0f0b4 --- /dev/null +++ b/web_console_v2/client/dumi/docs/Layout/GridRow.md @@ -0,0 +1,77 @@ +# GridRow + +Row component with ability to specify gap between items + + + +# 默认行为 + +默认垂直居中,水平居左,gap 为 0 + +```jsx +import React from 'react'; +import GridRow from 'components/_base/GridRow'; + +export default () => ( + <> + +
Chilren
+
Chilren
+
Chilren
+
+ +); +``` + +## 水平居中 + +```jsx +import React from 'react'; +import GridRow from 'components/_base/GridRow'; + +export default () => ( + <> + +
Chilren
+
Chilren
+
Chilren
+
+ +); +``` + +## Gap + +gap = 14 + +```jsx +import React from 'react'; +import GridRow from 'components/_base/GridRow'; + +export default () => ( + <> + +
Chilren
+
Chilren
+
Chilren
+
+ +); +``` + +gap = 28 + +```jsx +import React from 'react'; +import GridRow from 'components/_base/GridRow'; + +export default () => ( + <> + +
Chilren
+
Chilren
+
Chilren
+
+ +); +``` diff --git a/web_console_v2/client/dumi/docs/Layout/SharedPageLayout.md b/web_console_v2/client/dumi/docs/Layout/SharedPageLayout.md new file mode 100644 index 000000000..e6395c331 --- /dev/null +++ b/web_console_v2/client/dumi/docs/Layout/SharedPageLayout.md @@ -0,0 +1,401 @@ +# SharedPageLayout + +统一的 Content 页面布局,含有标题/中心标题/padding 等配置 + +> 注意 ⚠️: 因为 `` 内部引入了 `recoil` 相关的函数,所以 Demo 代码中用 `` 作为根组件,防止报错。 + +> 主要原因是 `removeSidebar` 这个 props 属性导致,他设置`recoil`某个`atom`下的变量`hideSidebar`为 true。 + +> 现在最新隐藏侧边栏的方案(最新方案是在 React-Router 上配置 Layout 布局)已经不需要这个`removeSidebar`,所以使用了`@deprecated`进行标记,以后会把这个属性删除。 + + + +## 默认 + +```jsx +/** + * compact: true + */ +import React from 'react'; +import { RecoilRoot } from 'recoil'; +import SharedPageLayout from 'components/SharedPageLayout'; + +export default () => ( + + +
Chilren
+
Chilren
+
Chilren
+
+
+); +``` + +## Title + +```jsx +/** + * compact: true + */ +import React from 'react'; +import { RecoilRoot } from 'recoil'; +import SharedPageLayout from 'components/SharedPageLayout'; + +export default () => ( + + +
Chilren
+
Chilren
+
Chilren
+
+
+); +``` + +## CenterTitle + +```jsx +/** + * compact: true + */ +import React from 'react'; +import { RecoilRoot } from 'recoil'; +import SharedPageLayout from 'components/SharedPageLayout'; + +export default () => ( + + +
Chilren
+
Chilren
+
Chilren
+
+
+); +``` + +## Tip + +```jsx +/** + * compact: true + */ +import React from 'react'; +import { RecoilRoot } from 'recoil'; +import SharedPageLayout from 'components/SharedPageLayout'; + +export default () => ( + + +
Chilren
+
Chilren
+
Chilren
+
+
+); +``` + +## CardPadding + +```jsx +/** + * compact: true + */ +import React from 'react'; +import { RecoilRoot } from 'recoil'; +import SharedPageLayout from 'components/SharedPageLayout'; + +export default () => ( + + +
Chilren
+
Chilren
+
Chilren
+
+
+); +``` + +## isHideHeader + +```jsx +/** + * compact: true + */ +import React from 'react'; +import { RecoilRoot } from 'recoil'; +import SharedPageLayout from 'components/SharedPageLayout'; + +export default () => ( + + +
Chilren
+
Chilren
+
Chilren
+
+
+); +``` + +## IsNestSpinFlexContainer + +When isNestSpinFlexContainer is true + +```jsx +/** + * compact: true + */ +import React from 'react'; +import { RecoilRoot } from 'recoil'; +import { Spin } from '@arco-design/web-react'; +import SharedPageLayout from 'components/SharedPageLayout'; + +export default () => ( + + + +
Chilren
+
Chilren
+
Chilren
+
+
+
+); +``` + +When isNestSpinFlexContainer is false + +```jsx +/** + * compact: true + */ +import React from 'react'; +import { RecoilRoot } from 'recoil'; +import { Spin } from '@arco-design/web-react'; +import SharedPageLayout from 'components/SharedPageLayout'; + +export default () => ( + + + +
Chilren
+
Chilren
+
Chilren
+
+
+
+); +``` + +## contentWrapByCard + +When contentWrapByCard is true + +```jsx +/** + * compact: true + */ +import React from 'react'; +import { RecoilRoot } from 'recoil'; +import SharedPageLayout from 'components/SharedPageLayout'; + +export default () => ( + + +
Chilren
+
Chilren
+
Chilren
+
+
+); +``` + +When contentWrapByCard is false + +```jsx +/** + * compact: true + */ +import React from 'react'; +import { RecoilRoot } from 'recoil'; +import SharedPageLayout from 'components/SharedPageLayout'; + +export default () => ( + + +
Chilren
+
Chilren
+
Chilren
+
+
+); +``` + +## isShowFixedBottomLayout + +isShowFixedBottomLayout 为 true 时,会显示固定在底部的 footer 布局,默认自带 2 个按钮(文案可以通过`bottomOkText`/`bottomCancelText`来更改),一个 tip 提示组件(文案可以通过`bottomTip`来更改,内部实际上使用[TitleWithIcon](/display/title-with-icon)来实现) + +```jsx +/** + * compact: true + */ +import React from 'react'; +import { RecoilRoot } from 'recoil'; +import SharedPageLayout from 'components/SharedPageLayout'; + +export default () => ( + + alert('ok')} + onBottomCancelClick={() => alert('cancel')} + > +
Chilren
+
Chilren
+
Chilren
+
+
+); +``` + +也可以通过`renderFixedBottomLayout`来自定义具体的渲染内容 + +```jsx +/** + * compact: true + */ +import React from 'react'; +import { RecoilRoot } from 'recoil'; +import SharedPageLayout from 'components/SharedPageLayout'; + +export default () => ( + + { + return
I am custom render layout
; + }} + > +
Chilren
+
Chilren
+
Chilren
+
+
+); +``` + +## 子组件 + +### 常量 + +`PAGE_SECTION_PADDING` 被用作为 `cardPadding` 的默认值 + +```jsx | pure +export const PAGE_SECTION_PADDING = 20; +``` + +### RemovePadding + +用作抵消 `SharedPageLayout` 卡片布局默认的 padding + +```jsx | pure +export const RemovePadding = styled.div` + margin: -${PAGE_SECTION_PADDING}px -${PAGE_SECTION_PADDING}px 0; +`; +``` + +```jsx +/** + * compact: true + */ +import React from 'react'; +import { RecoilRoot } from 'recoil'; +import SharedPageLayout, { RemovePadding } from 'components/SharedPageLayout'; + +export default () => ( + + + +
Chilren
+
+ +
Chilren
+
Chilren
+ + +
Chilren
+
+
+
+); +``` + +在项目中,常常与 Tabs 配合 + +```jsx +/** + * compact: true + */ +import React from 'react'; +import { RecoilRoot } from 'recoil'; +import { Tabs } from '@arco-design/web-react'; +import SharedPageLayout, { RemovePadding } from 'components/SharedPageLayout'; + +export default () => ( + + + + + + + + +
Chilren
+
Chilren
+
Chilren
+
+
+); +``` + +### FormHeader + +在 `RemovePadding`的基础上,显示卡片布局的标题 + +```jsx | pure +export const FormHeader = styled.h3` + display: flex; + height: 46px; + font-size: 16px; + line-height: 24px; + padding: 12px 20px; + border-bottom: 1px solid var(--lineColor); + margin: -${PAGE_SECTION_PADDING}px -${PAGE_SECTION_PADDING}px 0; +`; +``` + +```jsx +/** + * compact: true + */ +import React from 'react'; +import { RecoilRoot } from 'recoil'; +import SharedPageLayout, { FormHeader } from 'components/SharedPageLayout'; + +export default () => ( + + + Card title +
Chilren
+
Chilren
+
Chilren
+
+
+); +``` diff --git a/web_console_v2/client/dumi/docs/Misc/ClickToCopy.md b/web_console_v2/client/dumi/docs/Misc/ClickToCopy.md new file mode 100644 index 000000000..b6fb74d6d --- /dev/null +++ b/web_console_v2/client/dumi/docs/Misc/ClickToCopy.md @@ -0,0 +1,55 @@ +# ClickToCopy + +Copy 拷贝文本的组件 + +除了组件的形式,也可以使用核心 API `copyToClipboard` + +```js +import { copyToClipboard } from 'shared/helpers'; + +const isOK = copyToClipboard(text); + +if (isOK) { + message.success('Copied success!'); +} else { + message.error('Copied fail!'); +} +``` + + + +## 常规使用 + +目前需要手动从 props 传入所需要拷贝的文本,注意容器 width 宽度,它会影响最终的点击范围 + +```jsx +import React from 'react'; +import ClickToCopy from 'components/ClickToCopy'; + +export default () => ( + <> +
+ Click me to Copy +
+ +); +``` + +## Tip + +`successTip` 为 copy 拷贝成功的提示文案 + +`failTip` 为 copy 拷贝失败的提示文案 + +```jsx +import React from 'react'; +import ClickToCopy from 'components/ClickToCopy'; + +export default () => ( + <> + + Click me to Copy + + +); +``` diff --git a/web_console_v2/client/dumi/docs/Misc/CodeEditorModal.md b/web_console_v2/client/dumi/docs/Misc/CodeEditorModal.md new file mode 100644 index 000000000..11bd35df9 --- /dev/null +++ b/web_console_v2/client/dumi/docs/Misc/CodeEditorModal.md @@ -0,0 +1,228 @@ +# CodeEditorModal + +代码编辑器组件,具有文件目录树和代码编辑器和标签 Tab 的功能,支持同步/异步两种模式 + +导出多种类型组件 + +1. BaseCodeEditor, 核心代码 +2. BaseCodeEditor.AlgorithmProject, 为了方便外部调用,封装了与 AlgorithmProject 相关的方法,只需要传 AlgorithmProject 的 id 即可,默认开启异步模式 +3. BaseCodeEditor.Algorithm, 为了方便外部调用,封装了与 Algorithm 相关的方法,只需要传 Algorithm 的 id 即可,默认开启异步模式 + +4. CodeEditorModal,在 BaseCodeEditor 的基础上,用 Modal 包括起来,全屏显示 +5. [CodeEditorModal.AlgorithmProject](#algorithmproject), 为了方便外部调用,封装了与 AlgorithmProject 相关的方法,只需要传 AlgorithmProject 的 id 即可,默认开启异步模式 +6. [CodeEditorModal.Algorithm](#algorithm), 为了方便外部调用,封装了与 Algorithm 相关的方法,只需要传 Algorithm 的 id 即可,默认开启异步模式 +7. [CodeEditorModal.AlgorithmProjectFormButton](#algorithmprojectformbutton),在 CodeEditorModal.AlgorithmProject 的基础上,根据编辑算法页面的需要,封装了按钮组件,点击即可显示全屏幕的代码编辑器,只需要传 AlgorithmProject 的 id 即可,默认开启异步模式 + +在同步模式(isAsyncMode = false)下,数据从 `initialFileData` 中获取初始值,每次进行文件的操作时,`不会`调用接口,在点击右上角的`保存`按钮时,会把当前最新的 fileData 数据传递出去,给外部使用。点击`重置`按钮,会自动恢复成 `initialFileData` 的数据 + +在异步模式(isAsyncMode = true)下,每进行文件操作时,都会在内部`调用接口`来保存文件内容,包括新增文件,删除文件,重命名文件,编辑文件内容等 + + + +## 基础用法 + +### 同步模式 + +```tsx +import React, { useState } from 'react'; +import CodeEditorModal from 'components/CodeEditorModal'; + +const fileData = { + 'owner.py': '# coding: utf-8\n', + 'leader/main.py': + "# coding: utf-8\nimport logging\nimport datetime\n\nimport tensorflow.compat.v1 as tf \nimport fedlearner.trainer as flt \nimport os\n\nfrom slot_2_bucket import slot_2_bucket\n\n_SLOT_2_IDX = {pair[0]: i for i, pair in enumerate(slot_2_bucket)}\n_SLOT_2_BUCKET = slot_2_bucket\nROLE = \"leader\"\n\nparser = flt.trainer_worker.create_argument_parser()\nparser.add_argument('--batch-size', type=int, default=256,\n help='Training batch size.')\nparser.add_argument('--clean-model', type=bool, default=True,\n help='clean checkpoint and saved_model')\nargs = parser.parse_args()\nargs.sparse_estimator = True\n\ndef apply_clean():\n if args.worker_rank == 0 and args.clean_model and tf.io.gfile.exists(args.checkpoint_path):\n tf.logging.info(\"--clean_model flag set. Removing existing checkpoint_path dir:\"\n \" {}\".format(args.checkpoint_path))\n tf.io.gfile.rmtree(args.checkpoint_path)\n\n if args.worker_rank == 0 and args.clean_model and args.export_path and tf.io.gfile.exists(args.export_path):\n tf.logging.info(\"--clean_model flag set. Removing existing savedmodel dir:\"\n \" {}\".format(args.export_path))\n tf.io.gfile.rmtree(args.export_path)\n\n\ndef input_fn(bridge, trainer_master=None):\n dataset = flt.data.DataBlockLoader(\n args.batch_size, ROLE, bridge, trainer_master).make_dataset()\n \n def parse_fn(example):\n feature_map = {}\n feature_map[\"example_id\"] = tf.FixedLenFeature([], tf.string)\n feature_map['fids'] = tf.VarLenFeature(tf.int64)\n # feature_map['y'] = tf.FixedLenFeature([], tf.int64)\n features = tf.parse_example(example, features=feature_map)\n # labels = {'y': features.pop('y')}\n labels = {'y': tf.constant(0)}\n return features, labels\n dataset = dataset.map(map_func=parse_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE)\n dataset = dataset.prefetch(2)\n return dataset\n \n # feature_map = {\"fids\": tf.VarLenFeature(tf.int64)}\n # feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\n # record_batch = dataset.make_batch_iterator().get_next()\n # features = tf.parse_example(record_batch, features=feature_map)\n # return features, None\n\ndef raw_serving_input_receiver_fn():\n feature_map = {\n 'fids_indices': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_indices'),\n 'fids_values': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_values'),\n 'fids_dense_shape': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_dense_shape')\n }\n return tf.estimator.export.ServingInputReceiver(\n feature_map, feature_map)\n\n\ndef model_fn(model, features, labels, mode):\n\n def sum_pooling(embeddings, slots):\n slot_embeddings = []\n for slot in slots:\n slot_embeddings.append(embeddings[_SLOT_2_IDX[slot]])\n if len(slot_embeddings) == 1:\n return slot_embeddings[0]\n return tf.add_n(slot_embeddings)\n\n global_step = tf.train.get_or_create_global_step()\n num_slot, embed_size = len(_SLOT_2_BUCKET), 8\n xavier_initializer = tf.glorot_normal_initializer()\n\n flt.feature.FeatureSlot.set_default_bias_initializer(\n tf.zeros_initializer())\n flt.feature.FeatureSlot.set_default_vec_initializer(\n tf.random_uniform_initializer(-0.0078125, 0.0078125))\n flt.feature.FeatureSlot.set_default_bias_optimizer(\n tf.train.FtrlOptimizer(learning_rate=0.01))\n flt.feature.FeatureSlot.set_default_vec_optimizer(\n tf.train.AdagradOptimizer(learning_rate=0.01))\n\n # deal with input cols\n categorical_embed = []\n num_slot, embed_dim = len(_SLOT_2_BUCKET), 8\n\n with tf.variable_scope(\"leader\"):\n for slot, bucket_size in _SLOT_2_BUCKET:\n fs = model.add_feature_slot(slot, bucket_size)\n fc = model.add_feature_column(fs)\n categorical_embed.append(fc.add_vector(embed_dim))\n\n\n # concate all embeddings\n slot_embeddings = categorical_embed\n concat_embedding = tf.concat(slot_embeddings, axis=1)\n output_size = len(slot_embeddings) * embed_dim\n\n model.freeze_slots(features)\n\n with tf.variable_scope(\"follower\"):\n fc1_size, fc2_size, fc3_size = 16, 16, 16\n w1 = tf.get_variable('w1', shape=[output_size, fc1_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b1 = tf.get_variable(\n 'b1', shape=[fc1_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n w2 = tf.get_variable('w2', shape=[fc1_size, fc2_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b2 = tf.get_variable(\n 'b2', shape=[fc2_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n w3 = tf.get_variable('w3', shape=[fc2_size, fc3_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b3 = tf.get_variable(\n 'b3', shape=[fc3_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n\n act1_l = tf.nn.relu(tf.nn.bias_add(tf.matmul(concat_embedding, w1), b1))\n act1_l = tf.layers.batch_normalization(act1_l, training=True)\n act2_l = tf.nn.relu(tf.nn.bias_add(tf.matmul(act1_l, w2), b2))\n act2_l = tf.layers.batch_normalization(act2_l, training=True)\n embedding = tf.nn.relu(tf.nn.bias_add(tf.matmul(act2_l, w3), b3))\n embedding = tf.layers.batch_normalization(embedding, training=True)\n\n if mode == tf.estimator.ModeKeys.TRAIN:\n embedding_grad = model.send('embedding', embedding, require_grad=True)\n optimizer = tf.train.GradientDescentOptimizer(0.01)\n train_op = model.minimize(\n optimizer, embedding, grad_loss=embedding_grad, global_step=global_step)\n return model.make_spec(mode, loss=tf.math.reduce_mean(embedding), train_op=train_op)\n elif mode == tf.estimator.ModeKeys.PREDICT:\n return model.make_spec(mode, predictions={'embedding': embedding})\n\nif __name__ == '__main__':\n logging.basicConfig(\n level=logging.INFO,\n format='%(asctime)-15s [%(filename)s:%(lineno)d] %(levelname)s %(message)s'\n )\n apply_clean()\n flt.trainer_worker.train(\n ROLE, args, input_fn,\n model_fn, raw_serving_input_receiver_fn)\n", + 'leader/slot_2_bucket.py': + '# coding: utf-8\nslot_2_bucket = [(0, 2),(1, 2),(2, 2),(3, 2),(4, 2),(5, 2),(6, 2),(7, 2),(8, 2),(9, 2),(10, 2),(11, 2),(12, 2),(13, 1341),(14, 535),(15, 74138),(16, 70862),(17, 279),(18, 17),(19, 11019),(20, 591),(21, 4),(22, 30227),(23, 4791),(24, 75100),(25, 3075),(26, 27),(27, 9226),(28, 79191),(29, 11),(30, 3990),(31, 1898),(32, 5),\n(33, 76976),(34, 18),(35, 16),(36, 36534),(37, 74),(38, 29059)]\n', + 'follower/main.py': + "# coding: utf-8\n# encoding=utf8\nimport logging\n\nimport tensorflow.compat.v1 as tf\n\nimport fedlearner.trainer as flt\nimport os\n\nROLE = 'follower'\n\nparser = flt.trainer_worker.create_argument_parser()\nparser.add_argument('--batch-size', type=int, default=256,\n help='Training batch size.')\nparser.add_argument('--clean-model', type=bool, default=True,\n help='clean checkpoint and saved_model')\nargs = parser.parse_args()\n\ndef apply_clean():\n if args.worker_rank == 0 and args.clean_model and tf.io.gfile.exists(args.checkpoint_path):\n tf.logging.info(\"--clean_model flag set. Removing existing checkpoint_path dir:\"\n \" {}\".format(args.checkpoint_path))\n tf.io.gfile.rmtree(args.checkpoint_path)\n\n if args.worker_rank == 0 and args.clean_model and args.export_path and tf.io.gfile.exists(args.export_path):\n tf.logging.info(\"--clean_model flag set. Removing existing savedmodel dir:\"\n \" {}\".format(args.export_path))\n tf.io.gfile.rmtree(args.export_path)\n\ndef input_fn(bridge, trainer_master=None):\n dataset = flt.data.DataBlockLoader(\n args.batch_size, ROLE, bridge, trainer_master).make_dataset()\n \n def parse_fn(example):\n feature_map = {}\n feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\n # feature_map['y'] = tf.FixedLenFeature([], tf.int64)\n features = tf.parse_example(example, features=feature_map)\n labels = {'y': tf.constant(0, shape=[1])}\n return features, labels\n \n dataset = dataset.map(map_func=parse_fn,\n num_parallel_calls=tf.data.experimental.AUTOTUNE)\n dataset = dataset.prefetch(2)\n return dataset\n \n\ndef raw_serving_input_receiver_fn():\n features = {}\n features['embedding'] = tf.placeholder(dtype=tf.float32, shape=[1, 16], name='embedding')\n receiver_tensors = {\n 'embedding': features['embedding']\n }\n return tf.estimator.export.ServingInputReceiver(\n features, receiver_tensors)\n\ndef model_fn(model, features, labels, mode):\n global_step = tf.train.get_or_create_global_step()\n xavier_initializer = tf.glorot_normal_initializer()\n\n fc1_size = 16\n with tf.variable_scope('follower'):\n w1f = tf.get_variable('w1f', shape=[\n fc1_size, 1], dtype=tf.float32, initializer=tf.random_uniform_initializer(-0.01, 0.01))\n b1f = tf.get_variable(\n 'b1f', shape=[1], dtype=tf.float32, initializer=tf.zeros_initializer())\n \n if mode == tf.estimator.ModeKeys.TRAIN:\n embedding = model.recv('embedding', tf.float32, require_grad=True)\n else:\n embedding = features['embedding']\n \n logits = tf.nn.bias_add(tf.matmul(embedding, w1f), b1f)\n\n if mode == tf.estimator.ModeKeys.TRAIN:\n y = tf.dtypes.cast(labels['y'], tf.float32)\n loss = tf.nn.sigmoid_cross_entropy_with_logits(\n labels=y, logits=logits)\n loss = tf.math.reduce_mean(loss)\n\n # cala auc\n pred = tf.math.sigmoid(logits)\n print('==============================================================')\n print(tf.shape(y))\n print(tf.shape(pred))\n _, auc = tf.metrics.auc(labels=y, predictions=pred)\n\n logging_hook = tf.train.LoggingTensorHook(\n {\"loss\": loss, \"auc\": auc}, every_n_iter=10)\n\n optimizer = tf.train.GradientDescentOptimizer(0.01)\n train_op = model.minimize(optimizer, loss, global_step=global_step)\n return model.make_spec(mode, loss=loss, train_op=train_op,\n training_hooks=[logging_hook])\n\n if mode == tf.estimator.ModeKeys.PREDICT:\n return model.make_spec(mode, predictions=logits)\n\nif __name__ == '__main__':\n logging.basicConfig(\n level=logging.INFO,\n format='%(asctime)-15s [%(filename)s:%(lineno)d] %(levelname)s %(message)s'\n )\n apply_clean()\n flt.trainer_worker.train(\n ROLE, args, input_fn,\n model_fn, raw_serving_input_receiver_fn)\n", + 'follower/slot_2_bucket.py': + '# coding: utf-8\nslot_2_bucket = [(0, 2),(1, 2),(2, 2),(3, 2),(4, 2),(5, 2),(6, 2),(7, 2),(8, 2),(9, 2),(10, 2),(11, 2),(12, 2),(13, 1341),(14, 535),(15, 74138),(16, 70862),(17, 279),(18, 17),(19, 11019),(20, 591),(21, 4),(22, 30227),(23, 4791),(24, 75100),(25, 3075),(26, 27),(27, 9226),(28, 79191),(29, 11),(30, 3990),(31, 1898),(32, 5),\n(33, 76976),(34, 18),(35, 16),(36, 36534),(37, 74),(38, 29059)]\n', +}; + +export default () => { + const [visible, setVisible] = useState(false); + + return ( + <> + + { + setVisible(false); + }} + onSave={(finalFiledata) => { + console.log(finalFiledata); + }} + /> + + ); +}; +``` + +### 异步模式 + +在异步模式(IsAsyncMode = true)下,需要提供以下 2 个函数,再内部自行调用接口,并转换格式 + +``` +getFileTreeList?: () => Promise; +getFile?: (filePath: string) => Promise; +``` + +1. `getFileTreeList`,用于获取文件目录树的内容 +2. `getFile`,用于获取文件的内容,他接收一个文件路径作为参数,例如 'leader/main.py' + +`getFileTreeList` 返回 Promise,而且 ResolvedValue 为以下格式的数组 + +```tsx | pure +interface FileTreeNode { + filename: string; + path: string; + /** File size */ + size: number; + /** Last Time Modified */ + mtime: number; + is_directory: boolean; + files: FileTreeNode[]; +} +``` + +`getFile` 返回 Promise,而且 ResolvedValue 为文件内容的字符串 + +为了方便调用,不用每次都输入`getFileTreeList`/`getFile`,封装了[AlgorithmProject](#algorithmproject)和[Algorithm](#algorithm) 2 个组件 + +```tsx +import React, { useState } from 'react'; +import CodeEditorModal from 'components/CodeEditorModal'; + +import { + fetchAlgorithmProjectFileTreeList, + fetchAlgorithmProjectFileContentDetail, +} from 'services/algorithm'; + +export default () => { + const [visible, setVisible] = useState(false); + + return ( + <> + + { + setVisible(false); + }} + getFileTreeList={() => fetchAlgorithmProjectFileTreeList(3).then((res) => res.data)} + getFile={(filePath: string) => + fetchAlgorithmProjectFileContentDetail(3, { + path: filePath, + }).then((res) => res.data.content) + } + /> + + ); +}; +``` + +## 子组件 + +### AlgorithmProject + +为了方便外部调用,封装了与 AlgorithmProject 相关的方法,只需要传 AlgorithmProject 的 id 即可,默认开启异步模式 + +```tsx +import React, { useState } from 'react'; +import CodeEditorModal from 'components/CodeEditorModal'; + +export default () => { + const [visible, setVisible] = useState(false); + + return ( + <> + + { + setVisible(false); + }} + /> + + ); +}; +``` + +### Algorithm + +为了方便外部调用,封装了与 Algorithm 相关的方法,只需要传 Algorithm 的 id 即可,默认开启异步模式 + +```tsx +import React, { useState } from 'react'; +import CodeEditorModal from 'components/CodeEditorModal'; + +export default () => { + const [visible, setVisible] = useState(false); + + return ( + <> + + { + setVisible(false); + }} + /> + + ); +}; +``` + +### AlgorithmProjectFormButton + +在 CodeEditorModal.AlgorithmProject 的基础上,根据编辑算法页面的需要,封装了按钮组件,点击即可显示全屏幕的代码编辑器,只需要传 AlgorithmProject 的 id 即可,默认开启异步模式 + +可以额外设置设置 width 和 height + + + +```tsx +import React from 'react'; +import CodeEditorModal from 'components/CodeEditorModal'; + +export default () => { + return ( + <> + + + ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Misc/CountTime.md b/web_console_v2/client/dumi/docs/Misc/CountTime.md new file mode 100644 index 000000000..501c89a82 --- /dev/null +++ b/web_console_v2/client/dumi/docs/Misc/CountTime.md @@ -0,0 +1,165 @@ +# CountTime + +计时器组件,支持倒计时 CountDown / 计时 CountUp,有 renderProps 模式 + + + +## 常规使用 + +### CountUp + +```tsx +import React from 'react'; +import CountTime from 'components/CountTime'; + +export default () => ( + <> +
Count up begins at 10 second
+ + +); +``` + +### CountDown + +```tsx +import React from 'react'; +import CountTime from 'components/CountTime'; + +export default () => ( + <> +
Count down begins at 10 second
+ + +); +``` + +## IsOnlyShowSecond + +```tsx +import React from 'react'; +import CountTime from 'components/CountTime'; + +export default () => ( + <> +
Count down begins at 100 second
+ + +); +``` + +## IsStatic + +isStatic 为 true 时,停止计时器 + +```tsx +import React from 'react'; +import CountTime from 'components/CountTime'; + +export default () => ( + <> +
Count down begins at 10 second
+ + +); +``` + +根据这个特性,可以动态控制计时器的开始和暂停 + +```tsx +import React, { useState } from 'react'; +import CountTime from 'components/CountTime'; + +export default () => { + const [isStatic, setIsStatic] = useState(false); + + return ( + <> +
Count down begins at 10 second
+ + + + + ); +}; +``` + +## IsResetOnChange + +当 isResetOnChange = true,并且 isStatic 从 false 变为 true 时,会重置计时器为传入的 time,并停止计时器 + +```tsx +import React, { useState } from 'react'; +import CountTime from 'components/CountTime'; + +export default () => { + const [isStatic, setIsStatic] = useState(false); + + return ( + <> +
Count down begins at 10 second
+ + + + + ); +}; +``` + +## IsRenderPropsMode + +当 isRenderPropsMode = true,可以使用 render props 模式,此时可以在 children 中传入一个函数,格式为`(formatted, noFormattedTime) => React.ReactNode` + +```tsx +import React from 'react'; +import CountTime from 'components/CountTime'; + +export default () => { + return ( + <> +
Count down begins at 10 second
+ + {(formattedTime: any, time: any) => { + return
current is: {time}s
; + }} +
+ + ); +}; +``` + +## OnCountDownFinish + +当 isCountDown = true,并且倒计时首次进入 0 秒时,会触发 1 次 onCountDownFinish 函数 + +```tsx +import React from 'react'; +import CountTime from 'components/CountTime'; + +export default () => { + return ( + <> +
Count down begins at 10 second
+ { + console.log('onCountDownFinish'); + }} + /> + + ); +}; +``` diff --git a/web_console_v2/client/dumi/docs/Navigation/BackButton.md b/web_console_v2/client/dumi/docs/Navigation/BackButton.md new file mode 100644 index 000000000..c6079396e --- /dev/null +++ b/web_console_v2/client/dumi/docs/Navigation/BackButton.md @@ -0,0 +1,92 @@ +# BackButton + +后退组件,带有二次确定对话框(默认关闭),需要自己实现 onClick 方法,常用于 ``的`title`上 + +> 注意 ⚠️: isShowConfirmModal 为 true 时,onClick 方法只会在点击二次确定对话框的`确定`按钮时才会触发 + + + +## 常规使用 + +```tsx +import React from 'react'; +import BackButton from 'components/BackButton'; + +export default () => ( + <> + { + alert('back'); + }} + > + back + + +); +``` + +## IsShowConfirmModal + +当 isShowConfirmModal = true 时,点击组件时,会出现一个用 [Modal.confirm](/feedback/modal#modalconfirm) 封装的二次确定对话框,点击确定时,会触发原来的 onClick 方法 + +```tsx +import React from 'react'; +import BackButton from 'components/BackButton'; + +export default () => ( + <> +
default modal title/content
+ { + alert('back'); + }} + > + back + +
custom modal title/content
+ { + alert('back'); + }} + > + back + + +); +``` + +## 与 SharedPageLayout 配合 + +```tsx +/** + * compact: true + */ +import React from 'react'; +import { RecoilRoot } from 'recoil'; +import SharedPageLayout from 'components/SharedPageLayout'; +import BackButton from 'components/BackButton'; + +export default () => ( + + { + alert('back'); + }} + > + back + + } + > +
Chilren
+
Chilren
+
Chilren
+
+
+); +``` diff --git a/web_console_v2/client/dumi/docs/Navigation/BreadcrumbLink.md b/web_console_v2/client/dumi/docs/Navigation/BreadcrumbLink.md new file mode 100644 index 000000000..6f945de2f --- /dev/null +++ b/web_console_v2/client/dumi/docs/Navigation/BreadcrumbLink.md @@ -0,0 +1,70 @@ +# BreadcrumbLink + +面包屑组件 + + + +```ts +type path = { + /** Display label or i18n key */ + label: string; + /** Link */ + to?: string; +}; +``` + +## 常规使用 + +```tsx +import React from 'react'; +import BreadcrumbLink from 'components/BreadcrumbLink'; + +const paths = [ + { + label: 'navigation', + to: '/navigation', + }, + { + label: 'breadcrumb-link', + to: '/navigation/breadcrumb-link', + }, +]; + +export default () => ( + <> + + +); +``` + +## 与 SharedPageLayout 配合 + +```jsx +/** + * compact: true + */ +import React from 'react'; +import { RecoilRoot } from 'recoil'; +import SharedPageLayout from 'components/SharedPageLayout'; +import BreadcrumbLink from 'components/BreadcrumbLink'; + +const paths = [ + { + label: 'navigation', + to: '/navigation', + }, + { + label: 'breadcrumb-link', + to: '/navigation/breadcrumb-link', + }, +]; +export default () => ( + + }> +
Chilren
+
Chilren
+
Chilren
+
+
+); +``` diff --git a/web_console_v2/client/dumi/docs/index.md b/web_console_v2/client/dumi/docs/index.md new file mode 100644 index 000000000..de7bdb221 --- /dev/null +++ b/web_console_v2/client/dumi/docs/index.md @@ -0,0 +1,3 @@ +# Index + +这里是隐私计算业务组件文档库 diff --git a/web_console_v2/client/dumi/src/global.less b/web_console_v2/client/dumi/src/global.less new file mode 100644 index 000000000..2b20a7580 --- /dev/null +++ b/web_console_v2/client/dumi/src/global.less @@ -0,0 +1,13 @@ +@import '~@arco-design/web-react/dist/css/arco.min.css'; + +@import '../../src/styles/themes/normal/normal.css'; +@import '../../src/styles/variables/normal.less'; +@import '../../src/styles/global.less'; + +body { + background-color: #fff; +} + +.arco-drawer-wrapper { + top: 0 !important; +} diff --git a/web_console_v2/client/jest.config.js b/web_console_v2/client/jest.config.js index 542d0ac68..8828be6fd 100644 --- a/web_console_v2/client/jest.config.js +++ b/web_console_v2/client/jest.config.js @@ -4,16 +4,15 @@ module.exports = { 'src/**/*.{ts,tsx}', '!src/**/*.d.ts', '!src/i18n/index.ts', - '!src/i18n/resources/modules/*.ts', - '!src/components/**/*.tsx', + '!src/i18n/resources/**/*.ts', '!src/stores/**/*.ts', '!src/typings/*.ts', '!src/views/**/*.tsx', - '!src/shared/variablePresets.ts', - '!src/shared/file.ts', - '!src/services/mocks/**/*.ts', - '!src/App.ts', - '!src/index.ts', + '!src/services/**/*.ts', + '!src/libs/*.ts', + '!src/App.tsx', + '!src/index.tsx', + '!src/components/IconPark/**/*.{ts,tsx}', ], setupFiles: ['react-app-polyfill/jsdom'], setupFilesAfterEnv: ['/tests/setup.ts'], @@ -32,9 +31,10 @@ module.exports = { transformIgnorePatterns: [ '[/\\\\]node_modules[/\\\\].+\\.(js|jsx|mjs|cjs|ts|tsx)$', '^.+\\.module\\.(css|sass|scss|less)$', - '/node_modules/(?!antd|@ant-design|rc-.+?|@babel/runtime).+(js|jsx)$', + '/node_modules/(?!antd|lodash-es|@ant-design|rc-.+?|@babel/runtime).+(js|jsx|ts|tsx)$', + 'node_modules/(?!(monaco-editor)/)', ], - modulePaths: [], + modulePaths: ['/src/'], moduleNameMapper: { '^react-native$': 'react-native-web', '^.+\\.module\\.(css|sass|scss|less)$': 'identity-obj-proxy', @@ -44,9 +44,13 @@ module.exports = { 'services/(.*)': '/src/services/$1', 'typings/(.*)': '/src/typings/$1', 'views/(.*)': '/src/views/$1', - 'components/(.*)': ['/src/components/$1', '@ant-design/icons'], + // It will replace lodash-es with the commonjs version during testing runtime. + '^lodash-es$': 'lodash', + '^components/(.*)': '/src/components/$1', 'i18n/(.*)': '/src/i18n/$1', i18n: '/src/i18n/index.ts', + 'stores/(.*)': '/src/stores/$1', + 'assets/(.*)': '/src/assets/$1', }, moduleFileExtensions: [ 'web.js', diff --git a/web_console_v2/client/package.json b/web_console_v2/client/package.json index 0aa988a8a..bb5548351 100644 --- a/web_console_v2/client/package.json +++ b/web_console_v2/client/package.json @@ -9,25 +9,36 @@ "test": "node scripts/test.js", "test:coverage": "npx jest --coverage", "lint": "eslint '*/**/*.{js,ts,tsx}' --fix", - "pritter": "npx prettier --write ./src" + "lint:prod": "npx cross-env NODE_ENV=production eslint '*/**/*.{js,ts,tsx}' --fix", + "pritter": "npx prettier --write ./src", + "build:theme": "node scripts/multiLessVarsTransform.js", + "build:theme:watch": "cross-env NODE_ENV=development node scripts/multiLessVarsTransform.js", + "dumi": "cross-env APP_ROOT=dumi dumi dev", + "dumi-build": "cross-env APP_ROOT=dumi dumi build", + "ts:check": "tsc" }, "dependencies": { - "@ant-design/icons": "^4.6.2", - "@formily/antd": "^1.3.8", - "@formily/antd-components": "^1.3.8", - "@monaco-editor/react": "^4.0.11", - "@welldone-software/why-did-you-render": "^6.1.1", - "antd": "^4.14.0", + "@arco-design/web-react": "2.28.2", + "@arco-themes/react-privacy-computing": "0.0.4", + "@arco-themes/react-privacy-computing-bioland": "0.0.4", + "@formily/core": "^2.2.12", + "@formily/react": "^2.2.12", + "@monaco-editor/react": "^4.4.6", "axios": "^0.21.0", - "chart.js": "^3.2.1", "classnames": "^2.2.6", - "dayjs": "^1.9.7", + "dayjs": "^1.10.8", + "debounce-promise": "^3.1.2", "i18next": "^19.8.3", "ip-port-regex": "^2.0.0", "keyboardjs": "^2.6.4", "lodash-es": "^4.17.15", + "monaco-editor": "^0.34.1", + "mpld3": "0.5.2", "pubsub-js": "^1.9.2", + "qs": "^6.10.1", "rc-menu": "^8.10.6", + "rc-upload": "^4.3.1", + "re-resizable": "^6.9.0", "react": "^17.0.1", "react-app-polyfill": "^2.0.0", "react-chartjs-2": "^3.0.3", @@ -40,32 +51,38 @@ "react-router": "^5.2.0", "react-router-dom": "^5.2.0", "react-use": "^15.3.4", - "recoil": "^0.1.2", + "recoil": "0.3.1", "store2": "^2.12.0", "styled-components": "^5.2.1", "utility-types": "^3.10.0" }, "devDependencies": { + "@arco-design/webpack-plugin": "1.7.0", "@babel/core": "7.12.3", + "@babel/plugin-proposal-class-properties": "^7.18.6", "@pmmmwh/react-refresh-webpack-plugin": "0.4.2", + "@simbathesailor/use-what-changed": "^2.0.0", "@svgr/webpack": "5.4.0", "@testing-library/jest-dom": "^5.11.5", "@testing-library/react": "^11.1.1", + "@testing-library/react-hooks": "^7.0.2", "@testing-library/user-event": "^12.2.0", + "@types/chart.js": "^2.9.34", "@types/classnames": "^2.2.11", + "@types/debounce-promise": "^3.1.4", "@types/jest": "^26.0.15", "@types/keyboardjs": "^2.5.0", "@types/less": "^3.0.1", - "@types/lodash": "^4.14.164", + "@types/lodash-es": "^4.17.4", "@types/node": "^12.19.3", "@types/pubsub-js": "^1.8.1", + "@types/qs": "^6.9.7", "@types/react": "^16.9.55", "@types/react-dom": "^16.9.9", "@types/react-router-dom": "^5.1.6", "@types/styled-components": "^5.1.4", "@typescript-eslint/eslint-plugin": "^4.5.0", "@typescript-eslint/parser": "^4.5.0", - "antd-dayjs-webpack-plugin": "^1.0.6", "babel-eslint": "^10.1.0", "babel-jest": "^26.6.0", "babel-loader": "8.1.0", @@ -74,9 +91,12 @@ "bfj": "^7.0.2", "camelcase": "^6.1.0", "case-sensitive-paths-webpack-plugin": "2.3.0", + "chart.js": "^3.5.0", + "cross-env": "^7.0.3", "css-loader": "4.3.0", "dotenv": "8.2.0", "dotenv-expand": "5.1.0", + "dumi": "^1.1.30", "eslint": "^7.11.0", "eslint-config-prettier": "^6.15.0", "eslint-config-react-app": "^6.0.0", @@ -92,17 +112,19 @@ "file-loader": "6.1.1", "fs-extra": "^9.0.1", "html-webpack-plugin": "4.5.0", + "http-proxy-middleware": "^2.0.6", "identity-obj-proxy": "3.0.0", "jest": "26.6.0", "jest-circus": "26.6.0", "jest-resolve": "26.6.0", + "jest-styled-components": "^7.0.5", "jest-watch-typeahead": "0.6.1", "less": "^3.12.2", "less-loader": "^7.0.2", "less-vars-to-js": "^1.3.0", "lint-staged": "^10.5.1", - "lodash": "^4.17.21", "mini-css-extract-plugin": "0.11.3", + "monaco-editor-webpack-plugin": "^7.0.1", "optimize-css-assets-webpack-plugin": "5.0.4", "pnp-webpack-plugin": "1.6.4", "postcss-flexbugs-fixes": "4.2.1", @@ -111,6 +133,7 @@ "postcss-preset-env": "6.7.0", "postcss-safe-parser": "5.0.2", "prettier": "^2.1.2", + "raw-loader": "^4.0.2", "resolve": "1.18.1", "resolve-url-loader": "^3.1.2", "sass-loader": "8.0.2", @@ -126,7 +149,8 @@ "webpack-cli": "^4.5.0", "webpack-dev-server": "3.11.0", "webpack-manifest-plugin": "2.2.0", - "workbox-webpack-plugin": "5.1.4" + "workbox-webpack-plugin": "5.1.4", + "xhr-mock": "^2.5.1" }, "lint-staged": { "./src/**/*.{js,ts,tsx}": [ @@ -158,5 +182,6 @@ "presets": [ "react-app" ] - } + }, + "proxy": "xxx" } diff --git a/web_console_v2/client/pnpm-lock.yaml b/web_console_v2/client/pnpm-lock.yaml index 2e8578d93..5aa8d9689 100644 --- a/web_console_v2/client/pnpm-lock.yaml +++ b/web_console_v2/client/pnpm-lock.yaml @@ -1,23 +1,166 @@ +lockfileVersion: 5.3 + +overrides: + styled-components: ^5 + +specifiers: + '@arco-design/web-react': 2.28.2 + '@arco-design/webpack-plugin': 1.7.0 + '@arco-themes/react-privacy-computing': 0.0.4 + '@arco-themes/react-privacy-computing-bioland': 0.0.4 + '@babel/core': 7.12.3 + '@babel/plugin-proposal-class-properties': ^7.18.6 + '@formily/core': ^2.2.12 + '@formily/react': ^2.2.12 + '@monaco-editor/react': ^4.4.6 + '@pmmmwh/react-refresh-webpack-plugin': 0.4.2 + '@simbathesailor/use-what-changed': ^2.0.0 + '@svgr/webpack': 5.4.0 + '@testing-library/jest-dom': ^5.11.5 + '@testing-library/react': ^11.1.1 + '@testing-library/react-hooks': ^7.0.2 + '@testing-library/user-event': ^12.2.0 + '@types/chart.js': ^2.9.34 + '@types/classnames': ^2.2.11 + '@types/debounce-promise': ^3.1.4 + '@types/jest': ^26.0.15 + '@types/keyboardjs': ^2.5.0 + '@types/less': ^3.0.1 + '@types/lodash-es': ^4.17.4 + '@types/node': ^12.19.3 + '@types/pubsub-js': ^1.8.1 + '@types/qs': ^6.9.7 + '@types/react': ^16.9.55 + '@types/react-dom': ^16.9.9 + '@types/react-router-dom': ^5.1.6 + '@types/styled-components': ^5.1.4 + '@typescript-eslint/eslint-plugin': ^4.5.0 + '@typescript-eslint/parser': ^4.5.0 + axios: ^0.21.0 + babel-eslint: ^10.1.0 + babel-jest: ^26.6.0 + babel-loader: 8.1.0 + babel-plugin-named-asset-import: ^0.3.7 + babel-preset-react-app: ^10.0.0 + bfj: ^7.0.2 + camelcase: ^6.1.0 + case-sensitive-paths-webpack-plugin: 2.3.0 + chart.js: ^3.5.0 + classnames: ^2.2.6 + cross-env: ^7.0.3 + css-loader: 4.3.0 + dayjs: ^1.10.8 + debounce-promise: ^3.1.2 + dotenv: 8.2.0 + dotenv-expand: 5.1.0 + dumi: ^1.1.30 + eslint: ^7.11.0 + eslint-config-prettier: ^6.15.0 + eslint-config-react-app: ^6.0.0 + eslint-plugin-flowtype: ^5.2.0 + eslint-plugin-import: ^2.22.1 + eslint-plugin-jest: ^24.1.0 + eslint-plugin-jsx-a11y: ^6.3.1 + eslint-plugin-prettier: ^3.1.4 + eslint-plugin-react: ^7.21.5 + eslint-plugin-react-hooks: ^4.2.0 + eslint-plugin-testing-library: ^3.9.2 + eslint-webpack-plugin: ^2.1.0 + file-loader: 6.1.1 + fs-extra: ^9.0.1 + html-webpack-plugin: 4.5.0 + http-proxy-middleware: ^2.0.6 + i18next: ^19.8.3 + identity-obj-proxy: 3.0.0 + ip-port-regex: ^2.0.0 + jest: 26.6.0 + jest-circus: 26.6.0 + jest-resolve: 26.6.0 + jest-styled-components: ^7.0.5 + jest-watch-typeahead: 0.6.1 + keyboardjs: ^2.6.4 + less: ^3.12.2 + less-loader: ^7.0.2 + less-vars-to-js: ^1.3.0 + lint-staged: ^10.5.1 + lodash-es: ^4.17.15 + mini-css-extract-plugin: 0.11.3 + monaco-editor: ^0.34.1 + monaco-editor-webpack-plugin: ^7.0.1 + mpld3: 0.5.2 + optimize-css-assets-webpack-plugin: 5.0.4 + pnp-webpack-plugin: 1.6.4 + postcss-flexbugs-fixes: 4.2.1 + postcss-loader: 3.0.0 + postcss-normalize: 8.0.1 + postcss-preset-env: 6.7.0 + postcss-safe-parser: 5.0.2 + prettier: ^2.1.2 + pubsub-js: ^1.9.2 + qs: ^6.10.1 + raw-loader: ^4.0.2 + rc-menu: ^8.10.6 + rc-upload: ^4.3.1 + re-resizable: ^6.9.0 + react: ^17.0.1 + react-app-polyfill: ^2.0.0 + react-chartjs-2: ^3.0.3 + react-dev-utils: ^11.0.0 + react-dom: ^17.0.1 + react-flow-renderer: ^9.1.1 + react-i18next: ^11.7.3 + react-query: ^3.9.8 + react-refresh: ^0.8.3 + react-router: ^5.2.0 + react-router-dom: ^5.2.0 + react-use: ^15.3.4 + recoil: 0.3.1 + resolve: 1.18.1 + resolve-url-loader: ^3.1.2 + sass-loader: 8.0.2 + semver: 7.3.2 + store2: ^2.12.0 + strip-json-comments: ^3.1.1 + style-loader: 1.3.0 + styled-components: ^5 + terser-webpack-plugin: 4.2.3 + ts-pnp: 1.2.0 + tsconfig-paths-webpack-plugin: ^3.3.0 + typescript: ^4.0.5 + url-loader: 4.1.1 + utility-types: ^3.10.0 + webpack: 4.44.2 + webpack-cli: ^4.5.0 + webpack-dev-server: 3.11.0 + webpack-manifest-plugin: 2.2.0 + workbox-webpack-plugin: 5.1.4 + xhr-mock: ^2.5.1 + dependencies: - '@ant-design/icons': 4.6.2_react-dom@17.0.2+react@17.0.2 - '@formily/antd': 1.3.13_b13a12cfbb184a60a7e1275d8262e450 - '@formily/antd-components': 1.3.13_b13a12cfbb184a60a7e1275d8262e450 - '@monaco-editor/react': 4.1.0_react-dom@17.0.2+react@17.0.2 - '@welldone-software/why-did-you-render': 6.1.1_react@17.0.2 - antd: 4.14.1_2235c505ed33ea6efd93d3050f896208 + '@arco-design/web-react': 2.28.2_d8837eed98748ff5a1dd894fdd80cd72 + '@arco-themes/react-privacy-computing': 0.0.4_@arco-design+web-react@2.28.2 + '@arco-themes/react-privacy-computing-bioland': 0.0.4_@arco-design+web-react@2.28.2 + '@formily/core': 2.2.12 + '@formily/react': 2.2.12_338bd5ec353cfb7439af722c4fbc028f + '@monaco-editor/react': 4.4.6_ec62f306aa7ee40038c222aca8db4940 axios: 0.21.1 - chart.js: 3.2.1 classnames: 2.2.6 - dayjs: 1.10.4 + dayjs: 1.10.8 + debounce-promise: 3.1.2 i18next: 19.9.2 ip-port-regex: 2.0.0 keyboardjs: 2.6.4 lodash-es: 4.17.21 + monaco-editor: 0.34.1 + mpld3: 0.5.2 pubsub-js: 1.9.3 + qs: 6.10.1 rc-menu: 8.10.6_react-dom@17.0.2+react@17.0.2 + rc-upload: 4.3.1_react-dom@17.0.2+react@17.0.2 + re-resizable: 6.9.0_react-dom@17.0.2+react@17.0.2 react: 17.0.2 react-app-polyfill: 2.0.0 - react-chartjs-2: 3.0.3_chart.js@3.2.1+react@17.0.2 + react-chartjs-2: 3.0.4_chart.js@3.5.0+react@17.0.2 react-dev-utils: 11.0.4 react-dom: 17.0.2_react@17.0.2 react-flow-renderer: 9.4.0_react-dom@17.0.2+react@17.0.2 @@ -27,31 +170,38 @@ dependencies: react-router: 5.2.0_react@17.0.2 react-router-dom: 5.2.0_react@17.0.2 react-use: 15.3.8_react-dom@17.0.2+react@17.0.2 - recoil: 0.1.3_react-dom@17.0.2+react@17.0.2 + recoil: 0.3.1_react-dom@17.0.2+react@17.0.2 store2: 2.12.0 styled-components: 5.2.1_react-dom@17.0.2+react@17.0.2 utility-types: 3.10.0 + devDependencies: + '@arco-design/webpack-plugin': 1.7.0_webpack@4.44.2 '@babel/core': 7.12.3 + '@babel/plugin-proposal-class-properties': 7.18.6_@babel+core@7.12.3 '@pmmmwh/react-refresh-webpack-plugin': 0.4.2_d00fcc46a48175a4e289da7534b00e9a + '@simbathesailor/use-what-changed': 2.0.0_react@17.0.2 '@svgr/webpack': 5.4.0 '@testing-library/jest-dom': 5.11.10 '@testing-library/react': 11.2.5_react-dom@17.0.2+react@17.0.2 + '@testing-library/react-hooks': 7.0.2_react-dom@17.0.2+react@17.0.2 '@testing-library/user-event': 12.8.3 + '@types/chart.js': 2.9.34 '@types/classnames': 2.2.11 + '@types/debounce-promise': 3.1.4 '@types/jest': 26.0.22 '@types/keyboardjs': 2.5.0 '@types/less': 3.0.2 - '@types/lodash': 4.14.168 + '@types/lodash-es': 4.17.4 '@types/node': 12.20.7 '@types/pubsub-js': 1.8.2 + '@types/qs': 6.9.7 '@types/react': 16.14.5 '@types/react-dom': 16.9.12 '@types/react-router-dom': 5.1.7 '@types/styled-components': 5.1.9 '@typescript-eslint/eslint-plugin': 4.19.0_821acdc8bc493ad1aa2628c9b724d688 '@typescript-eslint/parser': 4.19.0_eslint@7.23.0+typescript@4.2.3 - antd-dayjs-webpack-plugin: 1.0.6_dayjs@1.10.4 babel-eslint: 10.1.0_eslint@7.23.0 babel-jest: 26.6.3_@babel+core@7.12.3 babel-loader: 8.1.0_427212bc1158d185e577033f19ca0757 @@ -60,9 +210,12 @@ devDependencies: bfj: 7.0.2 camelcase: 6.2.0 case-sensitive-paths-webpack-plugin: 2.3.0 + chart.js: 3.5.0 + cross-env: 7.0.3 css-loader: 4.3.0_webpack@4.44.2 dotenv: 8.2.0 dotenv-expand: 5.1.0 + dumi: 1.1.30_ab15ddca82409ccfb2f88ffa3dfddc1b eslint: 7.23.0 eslint-config-prettier: 6.15.0_eslint@7.23.0 eslint-config-react-app: 6.0.0_2fb64cc94b95fae32741d239fe65ddca @@ -78,17 +231,19 @@ devDependencies: file-loader: 6.1.1_webpack@4.44.2 fs-extra: 9.1.0 html-webpack-plugin: 4.5.0_webpack@4.44.2 + http-proxy-middleware: 2.0.6 identity-obj-proxy: 3.0.0 jest: 26.6.0 jest-circus: 26.6.0 jest-resolve: 26.6.0 + jest-styled-components: 7.0.5_styled-components@5.2.1 jest-watch-typeahead: 0.6.1_jest@26.6.0 less: 3.13.1 less-loader: 7.3.0_less@3.13.1+webpack@4.44.2 less-vars-to-js: 1.3.0 lint-staged: 10.5.4 - lodash: 4.17.21 mini-css-extract-plugin: 0.11.3_webpack@4.44.2 + monaco-editor-webpack-plugin: 7.0.1_d758ab496f5c143a3f97c41b30684737 optimize-css-assets-webpack-plugin: 5.0.4_webpack@4.44.2 pnp-webpack-plugin: 1.6.4_typescript@4.2.3 postcss-flexbugs-fixes: 4.2.1 @@ -97,6 +252,7 @@ devDependencies: postcss-preset-env: 6.7.0 postcss-safe-parser: 5.0.2 prettier: 2.2.1 + raw-loader: 4.0.2_webpack@4.44.2 resolve: 1.18.1 resolve-url-loader: 3.1.2 sass-loader: 8.0.2_webpack@4.44.2 @@ -113,68 +269,100 @@ devDependencies: webpack-dev-server: 3.11.0_webpack-cli@4.6.0+webpack@4.44.2 webpack-manifest-plugin: 2.2.0_webpack@4.44.2 workbox-webpack-plugin: 5.1.4_webpack@4.44.2 -lockfileVersion: 5.2 -overrides: - styled-components: ^5 + xhr-mock: 2.5.1 + packages: - /@ant-design/colors/6.0.0: + + /@arco-design/color/0.4.0: + resolution: {integrity: sha512-s7p9MSwJgHeL8DwcATaXvWT3m2SigKpxx4JA1BGPHL4gfvaQsmQfrLBDpjOJFJuJ2jG2dMt3R3P8Pm9E65q18g==} dependencies: - '@ctrl/tinycolor': 3.4.0 + color: 3.2.1 dev: false - resolution: - integrity: sha512-qAZRvPzfdWHtfameEGP2Qvuf838NhergR35o+EuVyB5XvSA98xod5r4utvi4TJ3ywmevm290g9nsCG5MryrdWQ== - /@ant-design/icons-svg/4.1.0: - dev: false - resolution: - integrity: sha512-Fi03PfuUqRs76aI3UWYpP864lkrfPo0hluwGqh7NJdLhvH4iRDc3jbJqZIvRDLHKbXrvAfPPV3+zjUccfFvWOQ== - /@ant-design/icons/4.6.2_react-dom@17.0.2+react@17.0.2: + + /@arco-design/web-react/2.28.2_d8837eed98748ff5a1dd894fdd80cd72: + resolution: {integrity: sha512-GLVzw9c+j8feSOdOv1Qk/XrQSt40bjlZOYtnLcJ0re6lpNyxbqtTxVXeoP1SsZeUcgTiYoSUS4e+7HVXUsJSqA==} + peerDependencies: + react: '>=16' + react-dom: '>=16' dependencies: - '@ant-design/colors': 6.0.0 - '@ant-design/icons-svg': 4.1.0 - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 + '@arco-design/color': 0.4.0 + '@babel/runtime': 7.20.13 + b-tween: 0.3.3 + b-validate: 1.4.4 + compute-scroll-into-view: 1.0.20 + dayjs: 1.11.7 + lodash: 4.17.21 + number-precision: 1.6.0 react: 17.0.2 + react-dom: 17.0.2_react@17.0.2 + react-focus-lock: 2.9.3_5170878e5e8a60dfb58a26e1cbcc99ef + react-transition-group: 4.4.5_react-dom@17.0.2+react@17.0.2 + resize-observer-polyfill: 1.5.1 + scroll-into-view-if-needed: 2.2.20 + shallowequal: 1.1.0 + transitivePeerDependencies: + - '@types/react' dev: false - engines: - node: '>=8' + + /@arco-design/webpack-plugin/1.7.0_webpack@4.44.2: + resolution: {integrity: sha512-dRDXaNK9pzjTfxo6jQe2oZs9PLJIF6kovT5HJCM843XgdClSyvVvlZo+xtzK84Y+VRL+YE4zUt3Jb+3AEp5gqA==} peerDependencies: - react: '>=16.0.0' - react-dom: '*' - resolution: - integrity: sha512-QsBG2BxBYU/rxr2eb8b2cZ4rPKAPBpzAR+0v6rrZLp/lnyvflLH3tw1vregK+M7aJauGWjIGNdFmUfpAOtw25A== - /@ant-design/react-slick/0.28.2: + webpack: ^4.0.0 || ^5.0.0 dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - json2mq: 0.2.0 + babel-plugin-import: 1.13.6 + chalk: 4.1.2 lodash: 4.17.21 - resize-observer-polyfill: 1.5.1 + micromatch: 4.0.5 + webpack: 4.44.2_webpack-cli@4.6.0 + dev: true + + /@arco-themes/react-privacy-computing-bioland/0.0.4_@arco-design+web-react@2.28.2: + resolution: {integrity: sha512-3n/k43xXtnkiCgJqIoFVl1TBoj+ARP3JtInKBYz2hTqFaMb7W2M0L9Y451tOYv2qh0RqfZ514q7wcHcjsjgwDg==} + peerDependencies: + '@arco-design/web-react': ^2.23.5 + dependencies: + '@arco-design/web-react': 2.28.2_d8837eed98748ff5a1dd894fdd80cd72 dev: false - resolution: - integrity: sha512-nkrvXsO29pLToFaBb3MlJY4McaUFR4UHtXTz6A5HBzYmxH4SwKerX54mWdGc/6tKpHvS3vUwjEOt2T5XqZEo8Q== + + /@arco-themes/react-privacy-computing/0.0.4_@arco-design+web-react@2.28.2: + resolution: {integrity: sha512-g7DYkX06ylB34iNGGq0MgtKVUJfOeG2wYgTD6EiE5UXwYoP6WHqrk8xjXHyoIiq1uZy+S+lE+B74/drRuZrKxA==} + peerDependencies: + '@arco-design/web-react': ^2.23.5 + dependencies: + '@arco-design/web-react': 2.28.2_d8837eed98748ff5a1dd894fdd80cd72 + dev: false + /@babel/code-frame/7.10.4: + resolution: {integrity: sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg==} dependencies: '@babel/highlight': 7.13.10 dev: false - resolution: - integrity: sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg== + /@babel/code-frame/7.12.11: + resolution: {integrity: sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==} dependencies: '@babel/highlight': 7.13.10 dev: true - resolution: - integrity: sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw== + /@babel/code-frame/7.12.13: + resolution: {integrity: sha512-HV1Cm0Q3ZrpCR93tkWOYiuYIgLxZXZFVG2VgK+MBWjUqZTundupbfx2aXarXuw5Ko5aMcjtJgbSs4vUGBS5v6g==} dependencies: '@babel/highlight': 7.13.10 - resolution: - integrity: sha512-HV1Cm0Q3ZrpCR93tkWOYiuYIgLxZXZFVG2VgK+MBWjUqZTundupbfx2aXarXuw5Ko5aMcjtJgbSs4vUGBS5v6g== + + /@babel/code-frame/7.18.6: + resolution: {integrity: sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/highlight': 7.18.6 + dev: true + /@babel/compat-data/7.13.12: + resolution: {integrity: sha512-3eJJ841uKxeV8dcN/2yGEUy+RfgQspPEgQat85umsE1rotuquQ2AbIub4S6j7c50a2d+4myc+zSlnXeIHrOnhQ==} dev: true - resolution: - integrity: sha512-3eJJ841uKxeV8dcN/2yGEUy+RfgQspPEgQat85umsE1rotuquQ2AbIub4S6j7c50a2d+4myc+zSlnXeIHrOnhQ== + /@babel/core/7.12.3: + resolution: {integrity: sha512-0qXcZYKZp3/6N2jKYVxZv0aNCsxTSVCiK72DTiTYZAu7sjg73W0/aynWjMbiGd87EQL4WyA8reiJVh92AVla9g==} + engines: {node: '>=6.9.0'} dependencies: '@babel/code-frame': 7.12.13 '@babel/generator': 7.13.9 @@ -192,31 +380,49 @@ packages: resolve: 1.18.1 semver: 5.7.1 source-map: 0.5.7 + transitivePeerDependencies: + - supports-color dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-0qXcZYKZp3/6N2jKYVxZv0aNCsxTSVCiK72DTiTYZAu7sjg73W0/aynWjMbiGd87EQL4WyA8reiJVh92AVla9g== + /@babel/generator/7.13.9: + resolution: {integrity: sha512-mHOOmY0Axl/JCTkxTU6Lf5sWOg/v8nUa+Xkt4zMTftX0wqmb6Sh7J8gvcehBw7q0AhrhAR+FDacKjCZ2X8K+Sw==} dependencies: '@babel/types': 7.13.13 jsesc: 2.5.2 source-map: 0.5.7 - resolution: - integrity: sha512-mHOOmY0Axl/JCTkxTU6Lf5sWOg/v8nUa+Xkt4zMTftX0wqmb6Sh7J8gvcehBw7q0AhrhAR+FDacKjCZ2X8K+Sw== + + /@babel/generator/7.20.5: + resolution: {integrity: sha512-jl7JY2Ykn9S0yj4DQP82sYvPU+T3g0HFcWTqDLqiuA9tGRNIj9VfbtXGAYTTkyNEnQk1jkMGOdYka8aG/lulCA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.20.5 + '@jridgewell/gen-mapping': 0.3.2 + jsesc: 2.5.2 + dev: true + /@babel/helper-annotate-as-pure/7.12.13: + resolution: {integrity: sha512-7YXfX5wQ5aYM/BOlbSccHDbuXXFPxeoUmfWtz8le2yTkTZc+BxsiEnENFoi2SlmA8ewDkG2LgIMIVzzn2h8kfw==} dependencies: '@babel/types': 7.13.13 - resolution: - integrity: sha512-7YXfX5wQ5aYM/BOlbSccHDbuXXFPxeoUmfWtz8le2yTkTZc+BxsiEnENFoi2SlmA8ewDkG2LgIMIVzzn2h8kfw== + + /@babel/helper-annotate-as-pure/7.18.6: + resolution: {integrity: sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.20.5 + dev: true + /@babel/helper-builder-binary-assignment-operator-visitor/7.12.13: + resolution: {integrity: sha512-CZOv9tGphhDRlVjVkAgm8Nhklm9RzSmWpX2my+t7Ua/KT616pEzXsQCjinzvkRvHWJ9itO4f296efroX23XCMA==} dependencies: '@babel/helper-explode-assignable-expression': 7.13.0 '@babel/types': 7.13.13 dev: true - resolution: - integrity: sha512-CZOv9tGphhDRlVjVkAgm8Nhklm9RzSmWpX2my+t7Ua/KT616pEzXsQCjinzvkRvHWJ9itO4f296efroX23XCMA== + /@babel/helper-compilation-targets/7.13.13_@babel+core@7.12.3: + resolution: {integrity: sha512-q1kcdHNZehBwD9jYPh3WyXcsFERi39X4I59I3NadciWtNDyZ6x+GboOxncFK0kXlKIv6BJm5acncehXWUjWQMQ==} + peerDependencies: + '@babel/core': ^7.0.0 dependencies: '@babel/compat-data': 7.13.12 '@babel/core': 7.12.3 @@ -224,11 +430,11 @@ packages: browserslist: 4.16.3 semver: 6.3.0 dev: true + + /@babel/helper-create-class-features-plugin/7.13.11_@babel+core@7.12.3: + resolution: {integrity: sha512-ays0I7XYq9xbjCSvT+EvysLgfc3tOkwCULHjrnscGT3A9qD4sk3wXnJ3of0MAWsWGjdinFvajHU2smYuqXKMrw==} peerDependencies: '@babel/core': ^7.0.0 - resolution: - integrity: sha512-q1kcdHNZehBwD9jYPh3WyXcsFERi39X4I59I3NadciWtNDyZ6x+GboOxncFK0kXlKIv6BJm5acncehXWUjWQMQ== - /@babel/helper-create-class-features-plugin/7.13.11_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-function-name': 7.12.13 @@ -236,22 +442,42 @@ packages: '@babel/helper-optimise-call-expression': 7.12.13 '@babel/helper-replace-supers': 7.13.12 '@babel/helper-split-export-declaration': 7.12.13 + transitivePeerDependencies: + - supports-color dev: true + + /@babel/helper-create-class-features-plugin/7.20.5_@babel+core@7.12.3: + resolution: {integrity: sha512-3RCdA/EmEaikrhayahwToF0fpweU/8o2p8vhc1c/1kftHOdTKuC65kik/TLc+qfbS8JKw4qqJbne4ovICDhmww==} + engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0 - resolution: - integrity: sha512-ays0I7XYq9xbjCSvT+EvysLgfc3tOkwCULHjrnscGT3A9qD4sk3wXnJ3of0MAWsWGjdinFvajHU2smYuqXKMrw== + dependencies: + '@babel/core': 7.12.3 + '@babel/helper-annotate-as-pure': 7.18.6 + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-function-name': 7.19.0 + '@babel/helper-member-expression-to-functions': 7.18.9 + '@babel/helper-optimise-call-expression': 7.18.6 + '@babel/helper-replace-supers': 7.19.1 + '@babel/helper-split-export-declaration': 7.18.6 + transitivePeerDependencies: + - supports-color + dev: true + /@babel/helper-create-regexp-features-plugin/7.12.17_@babel+core@7.12.3: + resolution: {integrity: sha512-p2VGmBu9oefLZ2nQpgnEnG0ZlRPvL8gAGvPUMQwUdaE8k49rOMuZpOwdQoy5qJf6K8jL3bcAMhVUlHAjIgJHUg==} + peerDependencies: + '@babel/core': ^7.0.0 dependencies: '@babel/core': 7.12.3 '@babel/helper-annotate-as-pure': 7.12.13 regexpu-core: 4.7.1 dev: true - peerDependencies: - '@babel/core': ^7.0.0 - resolution: - integrity: sha512-p2VGmBu9oefLZ2nQpgnEnG0ZlRPvL8gAGvPUMQwUdaE8k49rOMuZpOwdQoy5qJf6K8jL3bcAMhVUlHAjIgJHUg== + /@babel/helper-define-polyfill-provider/0.1.5_@babel+core@7.12.3: + resolution: {integrity: sha512-nXuzCSwlJ/WKr8qxzW816gwyT6VZgiJG17zR40fou70yfAcqjoNyTLl/DQ+FExw5Hx5KNqshmN8Ldl/r2N7cTg==} + peerDependencies: + '@babel/core': ^7.4.0-0 dependencies: '@babel/core': 7.12.3 '@babel/helper-compilation-targets': 7.13.13_@babel+core@7.12.3 @@ -262,48 +488,84 @@ packages: lodash.debounce: 4.0.8 resolve: 1.18.1 semver: 6.3.0 + transitivePeerDependencies: + - supports-color dev: true - peerDependencies: - '@babel/core': ^7.4.0-0 - resolution: - integrity: sha512-nXuzCSwlJ/WKr8qxzW816gwyT6VZgiJG17zR40fou70yfAcqjoNyTLl/DQ+FExw5Hx5KNqshmN8Ldl/r2N7cTg== + + /@babel/helper-environment-visitor/7.18.9: + resolution: {integrity: sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==} + engines: {node: '>=6.9.0'} + dev: true + /@babel/helper-explode-assignable-expression/7.13.0: + resolution: {integrity: sha512-qS0peLTDP8kOisG1blKbaoBg/o9OSa1qoumMjTK5pM+KDTtpxpsiubnCGP34vK8BXGcb2M9eigwgvoJryrzwWA==} dependencies: '@babel/types': 7.13.13 dev: true - resolution: - integrity: sha512-qS0peLTDP8kOisG1blKbaoBg/o9OSa1qoumMjTK5pM+KDTtpxpsiubnCGP34vK8BXGcb2M9eigwgvoJryrzwWA== + /@babel/helper-function-name/7.12.13: + resolution: {integrity: sha512-TZvmPn0UOqmvi5G4vvw0qZTpVptGkB1GL61R6lKvrSdIxGm5Pky7Q3fpKiIkQCAtRCBUwB0PaThlx9vebCDSwA==} dependencies: '@babel/helper-get-function-arity': 7.12.13 '@babel/template': 7.12.13 '@babel/types': 7.13.13 - resolution: - integrity: sha512-TZvmPn0UOqmvi5G4vvw0qZTpVptGkB1GL61R6lKvrSdIxGm5Pky7Q3fpKiIkQCAtRCBUwB0PaThlx9vebCDSwA== + + /@babel/helper-function-name/7.19.0: + resolution: {integrity: sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/template': 7.18.10 + '@babel/types': 7.20.5 + dev: true + /@babel/helper-get-function-arity/7.12.13: + resolution: {integrity: sha512-DjEVzQNz5LICkzN0REdpD5prGoidvbdYk1BVgRUOINaWJP2t6avB27X1guXK1kXNrX0WMfsrm1A/ZBthYuIMQg==} dependencies: '@babel/types': 7.13.13 - resolution: - integrity: sha512-DjEVzQNz5LICkzN0REdpD5prGoidvbdYk1BVgRUOINaWJP2t6avB27X1guXK1kXNrX0WMfsrm1A/ZBthYuIMQg== + /@babel/helper-hoist-variables/7.13.0: + resolution: {integrity: sha512-0kBzvXiIKfsCA0y6cFEIJf4OdzfpRuNk4+YTeHZpGGc666SATFKTz6sRncwFnQk7/ugJ4dSrCj6iJuvW4Qwr2g==} dependencies: '@babel/traverse': 7.13.13 '@babel/types': 7.13.13 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helper-hoist-variables/7.18.6: + resolution: {integrity: sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.20.5 dev: true - resolution: - integrity: sha512-0kBzvXiIKfsCA0y6cFEIJf4OdzfpRuNk4+YTeHZpGGc666SATFKTz6sRncwFnQk7/ugJ4dSrCj6iJuvW4Qwr2g== + /@babel/helper-member-expression-to-functions/7.13.12: + resolution: {integrity: sha512-48ql1CLL59aKbU94Y88Xgb2VFy7a95ykGRbJJaaVv+LX5U8wFpLfiGXJJGUozsmA1oEh/o5Bp60Voq7ACyA/Sw==} dependencies: '@babel/types': 7.13.13 dev: true - resolution: - integrity: sha512-48ql1CLL59aKbU94Y88Xgb2VFy7a95ykGRbJJaaVv+LX5U8wFpLfiGXJJGUozsmA1oEh/o5Bp60Voq7ACyA/Sw== + + /@babel/helper-member-expression-to-functions/7.18.9: + resolution: {integrity: sha512-RxifAh2ZoVU67PyKIO4AMi1wTenGfMR/O/ae0CCRqwgBAt5v7xjdtRw7UoSbsreKrQn5t7r89eruK/9JjYHuDg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.20.5 + dev: true + /@babel/helper-module-imports/7.13.12: + resolution: {integrity: sha512-4cVvR2/1B693IuOvSI20xqqa/+bl7lqAMR59R4iu39R9aOX8/JoYY1sFaNvUMyMBGnHdwvJgUrzNLoUZxXypxA==} dependencies: '@babel/types': 7.13.13 - resolution: - integrity: sha512-4cVvR2/1B693IuOvSI20xqqa/+bl7lqAMR59R4iu39R9aOX8/JoYY1sFaNvUMyMBGnHdwvJgUrzNLoUZxXypxA== + + /@babel/helper-module-imports/7.18.6: + resolution: {integrity: sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.20.7 + dev: true + /@babel/helper-module-transforms/7.13.12: + resolution: {integrity: sha512-7zVQqMO3V+K4JOOj40kxiCrMf6xlQAkewBB0eu2b03OO/Q21ZutOzjpfD79A5gtE/2OWi1nv625MrDlGlkbknQ==} dependencies: '@babel/helper-module-imports': 7.13.12 '@babel/helper-replace-supers': 7.13.12 @@ -313,224 +575,313 @@ packages: '@babel/template': 7.12.13 '@babel/traverse': 7.13.13 '@babel/types': 7.13.13 + transitivePeerDependencies: + - supports-color dev: true - resolution: - integrity: sha512-7zVQqMO3V+K4JOOj40kxiCrMf6xlQAkewBB0eu2b03OO/Q21ZutOzjpfD79A5gtE/2OWi1nv625MrDlGlkbknQ== + /@babel/helper-optimise-call-expression/7.12.13: + resolution: {integrity: sha512-BdWQhoVJkp6nVjB7nkFWcn43dkprYauqtk++Py2eaf/GRDFm5BxRqEIZCiHlZUGAVmtwKcsVL1dC68WmzeFmiA==} dependencies: '@babel/types': 7.13.13 dev: true - resolution: - integrity: sha512-BdWQhoVJkp6nVjB7nkFWcn43dkprYauqtk++Py2eaf/GRDFm5BxRqEIZCiHlZUGAVmtwKcsVL1dC68WmzeFmiA== + + /@babel/helper-optimise-call-expression/7.18.6: + resolution: {integrity: sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.20.5 + dev: true + /@babel/helper-plugin-utils/7.13.0: + resolution: {integrity: sha512-ZPafIPSwzUlAoWT8DKs1W2VyF2gOWthGd5NGFMsBcMMol+ZhK+EQY/e6V96poa6PA/Bh+C9plWN0hXO1uB8AfQ==} + dev: true + + /@babel/helper-plugin-utils/7.20.2: + resolution: {integrity: sha512-8RvlJG2mj4huQ4pZ+rU9lqKi9ZKiRmuvGuM2HlWmkmgOhbs6zEAw6IEiJ5cQqGbDzGZOhwuOQNtZMi/ENLjZoQ==} + engines: {node: '>=6.9.0'} dev: true - resolution: - integrity: sha512-ZPafIPSwzUlAoWT8DKs1W2VyF2gOWthGd5NGFMsBcMMol+ZhK+EQY/e6V96poa6PA/Bh+C9plWN0hXO1uB8AfQ== + /@babel/helper-remap-async-to-generator/7.13.0: + resolution: {integrity: sha512-pUQpFBE9JvC9lrQbpX0TmeNIy5s7GnZjna2lhhcHC7DzgBs6fWn722Y5cfwgrtrqc7NAJwMvOa0mKhq6XaE4jg==} dependencies: '@babel/helper-annotate-as-pure': 7.12.13 '@babel/helper-wrap-function': 7.13.0 '@babel/types': 7.13.13 + transitivePeerDependencies: + - supports-color dev: true - resolution: - integrity: sha512-pUQpFBE9JvC9lrQbpX0TmeNIy5s7GnZjna2lhhcHC7DzgBs6fWn722Y5cfwgrtrqc7NAJwMvOa0mKhq6XaE4jg== + /@babel/helper-replace-supers/7.13.12: + resolution: {integrity: sha512-Gz1eiX+4yDO8mT+heB94aLVNCL+rbuT2xy4YfyNqu8F+OI6vMvJK891qGBTqL9Uc8wxEvRW92Id6G7sDen3fFw==} dependencies: '@babel/helper-member-expression-to-functions': 7.13.12 '@babel/helper-optimise-call-expression': 7.12.13 '@babel/traverse': 7.13.13 '@babel/types': 7.13.13 - dev: true - resolution: - integrity: sha512-Gz1eiX+4yDO8mT+heB94aLVNCL+rbuT2xy4YfyNqu8F+OI6vMvJK891qGBTqL9Uc8wxEvRW92Id6G7sDen3fFw== + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helper-replace-supers/7.19.1: + resolution: {integrity: sha512-T7ahH7wV0Hfs46SFh5Jz3s0B6+o8g3c+7TMxu7xKfmHikg7EAZ3I2Qk9LFhjxXq8sL7UkP5JflezNwoZa8WvWw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-member-expression-to-functions': 7.18.9 + '@babel/helper-optimise-call-expression': 7.18.6 + '@babel/traverse': 7.20.5 + '@babel/types': 7.20.5 + transitivePeerDependencies: + - supports-color + dev: true + /@babel/helper-simple-access/7.13.12: + resolution: {integrity: sha512-7FEjbrx5SL9cWvXioDbnlYTppcZGuCY6ow3/D5vMggb2Ywgu4dMrpTJX0JdQAIcRRUElOIxF3yEooa9gUb9ZbA==} dependencies: '@babel/types': 7.13.13 dev: true - resolution: - integrity: sha512-7FEjbrx5SL9cWvXioDbnlYTppcZGuCY6ow3/D5vMggb2Ywgu4dMrpTJX0JdQAIcRRUElOIxF3yEooa9gUb9ZbA== + /@babel/helper-skip-transparent-expression-wrappers/7.12.1: + resolution: {integrity: sha512-Mf5AUuhG1/OCChOJ/HcADmvcHM42WJockombn8ATJG3OnyiSxBK/Mm5x78BQWvmtXZKHgbjdGL2kin/HOLlZGA==} dependencies: '@babel/types': 7.13.13 dev: true - resolution: - integrity: sha512-Mf5AUuhG1/OCChOJ/HcADmvcHM42WJockombn8ATJG3OnyiSxBK/Mm5x78BQWvmtXZKHgbjdGL2kin/HOLlZGA== + /@babel/helper-split-export-declaration/7.12.13: + resolution: {integrity: sha512-tCJDltF83htUtXx5NLcaDqRmknv652ZWCHyoTETf1CXYJdPC7nohZohjUgieXhv0hTJdRf2FjDueFehdNucpzg==} dependencies: '@babel/types': 7.13.13 - resolution: - integrity: sha512-tCJDltF83htUtXx5NLcaDqRmknv652ZWCHyoTETf1CXYJdPC7nohZohjUgieXhv0hTJdRf2FjDueFehdNucpzg== + + /@babel/helper-split-export-declaration/7.18.6: + resolution: {integrity: sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.20.5 + dev: true + + /@babel/helper-string-parser/7.19.4: + resolution: {integrity: sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==} + engines: {node: '>=6.9.0'} + dev: true + /@babel/helper-validator-identifier/7.12.11: - resolution: - integrity: sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw== + resolution: {integrity: sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw==} + + /@babel/helper-validator-identifier/7.19.1: + resolution: {integrity: sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==} + engines: {node: '>=6.9.0'} + dev: true + /@babel/helper-validator-option/7.12.17: + resolution: {integrity: sha512-TopkMDmLzq8ngChwRlyjR6raKD6gMSae4JdYDB8bByKreQgG0RBTuKe9LRxW3wFtUnjxOPRKBDwEH6Mg5KeDfw==} dev: true - resolution: - integrity: sha512-TopkMDmLzq8ngChwRlyjR6raKD6gMSae4JdYDB8bByKreQgG0RBTuKe9LRxW3wFtUnjxOPRKBDwEH6Mg5KeDfw== + /@babel/helper-wrap-function/7.13.0: + resolution: {integrity: sha512-1UX9F7K3BS42fI6qd2A4BjKzgGjToscyZTdp1DjknHLCIvpgne6918io+aL5LXFcER/8QWiwpoY902pVEqgTXA==} dependencies: '@babel/helper-function-name': 7.12.13 '@babel/template': 7.12.13 '@babel/traverse': 7.13.13 '@babel/types': 7.13.13 + transitivePeerDependencies: + - supports-color dev: true - resolution: - integrity: sha512-1UX9F7K3BS42fI6qd2A4BjKzgGjToscyZTdp1DjknHLCIvpgne6918io+aL5LXFcER/8QWiwpoY902pVEqgTXA== + /@babel/helpers/7.13.10: + resolution: {integrity: sha512-4VO883+MWPDUVRF3PhiLBUFHoX/bsLTGFpFK/HqvvfBZz2D57u9XzPVNFVBTc0PW/CWR9BXTOKt8NF4DInUHcQ==} dependencies: '@babel/template': 7.12.13 '@babel/traverse': 7.13.13 '@babel/types': 7.13.13 + transitivePeerDependencies: + - supports-color dev: true - resolution: - integrity: sha512-4VO883+MWPDUVRF3PhiLBUFHoX/bsLTGFpFK/HqvvfBZz2D57u9XzPVNFVBTc0PW/CWR9BXTOKt8NF4DInUHcQ== + /@babel/highlight/7.13.10: + resolution: {integrity: sha512-5aPpe5XQPzflQrFwL1/QoeHkP2MsA4JCntcXHRhEsdsfPVkvPi2w7Qix4iV7t5S/oC9OodGrggd8aco1g3SZFg==} dependencies: '@babel/helper-validator-identifier': 7.12.11 chalk: 2.4.2 js-tokens: 4.0.0 - resolution: - integrity: sha512-5aPpe5XQPzflQrFwL1/QoeHkP2MsA4JCntcXHRhEsdsfPVkvPi2w7Qix4iV7t5S/oC9OodGrggd8aco1g3SZFg== + + /@babel/highlight/7.18.6: + resolution: {integrity: sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-validator-identifier': 7.19.1 + chalk: 2.4.2 + js-tokens: 4.0.0 + dev: true + /@babel/parser/7.13.13: - engines: - node: '>=6.0.0' + resolution: {integrity: sha512-OhsyMrqygfk5v8HmWwOzlYjJrtLaFhF34MrfG/Z73DgYCI6ojNUTUp2TYbtnjo8PegeJp12eamsNettCQjKjVw==} + engines: {node: '>=6.0.0'} + hasBin: true + + /@babel/parser/7.20.5: + resolution: {integrity: sha512-r27t/cy/m9uKLXQNWWebeCUHgnAZq0CpG1OwKRxzJMP1vpSU4bSIK2hq+/cp0bQxetkXx38n09rNu8jVkcK/zA==} + engines: {node: '>=6.0.0'} hasBin: true - resolution: - integrity: sha512-OhsyMrqygfk5v8HmWwOzlYjJrtLaFhF34MrfG/Z73DgYCI6ojNUTUp2TYbtnjo8PegeJp12eamsNettCQjKjVw== + dev: true + /@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/7.13.12_@babel+core@7.12.3: + resolution: {integrity: sha512-d0u3zWKcoZf379fOeJdr1a5WPDny4aOFZ6hlfKivgK0LY7ZxNfoaHL2fWwdGtHyVvra38FC+HVYkO+byfSA8AQ==} + peerDependencies: + '@babel/core': ^7.13.0 dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 '@babel/helper-skip-transparent-expression-wrappers': 7.12.1 '@babel/plugin-proposal-optional-chaining': 7.13.12_@babel+core@7.12.3 dev: true - peerDependencies: - '@babel/core': ^7.13.0 - resolution: - integrity: sha512-d0u3zWKcoZf379fOeJdr1a5WPDny4aOFZ6hlfKivgK0LY7ZxNfoaHL2fWwdGtHyVvra38FC+HVYkO+byfSA8AQ== + /@babel/plugin-proposal-async-generator-functions/7.13.8_@babel+core@7.12.3: + resolution: {integrity: sha512-rPBnhj+WgoSmgq+4gQUtXx/vOcU+UYtjy1AA/aeD61Hwj410fwYyqfUcRP3lR8ucgliVJL/G7sXcNUecC75IXA==} + peerDependencies: + '@babel/core': ^7.0.0-0 dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 '@babel/helper-remap-async-to-generator': 7.13.0 '@babel/plugin-syntax-async-generators': 7.8.4_@babel+core@7.12.3 + transitivePeerDependencies: + - supports-color dev: true + + /@babel/plugin-proposal-class-properties/7.12.1_@babel+core@7.12.3: + resolution: {integrity: sha512-cKp3dlQsFsEs5CWKnN7BnSHOd0EOW8EKpEjkoz1pO2E5KzIDNV9Ros1b0CnmbVgAGXJubOYVBOGCT1OmJwOI7w==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-rPBnhj+WgoSmgq+4gQUtXx/vOcU+UYtjy1AA/aeD61Hwj410fwYyqfUcRP3lR8ucgliVJL/G7sXcNUecC75IXA== - /@babel/plugin-proposal-class-properties/7.12.1_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-create-class-features-plugin': 7.13.11_@babel+core@7.12.3 '@babel/helper-plugin-utils': 7.13.0 + transitivePeerDependencies: + - supports-color dev: true + + /@babel/plugin-proposal-class-properties/7.13.0_@babel+core@7.12.3: + resolution: {integrity: sha512-KnTDjFNC1g+45ka0myZNvSBFLhNCLN+GeGYLDEA8Oq7MZ6yMgfLoIRh86GRT0FjtJhZw8JyUskP9uvj5pHM9Zg==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-cKp3dlQsFsEs5CWKnN7BnSHOd0EOW8EKpEjkoz1pO2E5KzIDNV9Ros1b0CnmbVgAGXJubOYVBOGCT1OmJwOI7w== - /@babel/plugin-proposal-class-properties/7.13.0_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-create-class-features-plugin': 7.13.11_@babel+core@7.12.3 '@babel/helper-plugin-utils': 7.13.0 + transitivePeerDependencies: + - supports-color dev: true + + /@babel/plugin-proposal-class-properties/7.18.6_@babel+core@7.12.3: + resolution: {integrity: sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ==} + engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-KnTDjFNC1g+45ka0myZNvSBFLhNCLN+GeGYLDEA8Oq7MZ6yMgfLoIRh86GRT0FjtJhZw8JyUskP9uvj5pHM9Zg== + dependencies: + '@babel/core': 7.12.3 + '@babel/helper-create-class-features-plugin': 7.20.5_@babel+core@7.12.3 + '@babel/helper-plugin-utils': 7.20.2 + transitivePeerDependencies: + - supports-color + dev: true + /@babel/plugin-proposal-decorators/7.12.1_@babel+core@7.12.3: + resolution: {integrity: sha512-knNIuusychgYN8fGJHONL0RbFxLGawhXOJNLBk75TniTsZZeA+wdkDuv6wp4lGwzQEKjZi6/WYtnb3udNPmQmQ==} + peerDependencies: + '@babel/core': ^7.0.0-0 dependencies: '@babel/core': 7.12.3 '@babel/helper-create-class-features-plugin': 7.13.11_@babel+core@7.12.3 '@babel/helper-plugin-utils': 7.13.0 '@babel/plugin-syntax-decorators': 7.12.13_@babel+core@7.12.3 + transitivePeerDependencies: + - supports-color dev: true + + /@babel/plugin-proposal-dynamic-import/7.13.8_@babel+core@7.12.3: + resolution: {integrity: sha512-ONWKj0H6+wIRCkZi9zSbZtE/r73uOhMVHh256ys0UzfM7I3d4n+spZNWjOnJv2gzopumP2Wxi186vI8N0Y2JyQ==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-knNIuusychgYN8fGJHONL0RbFxLGawhXOJNLBk75TniTsZZeA+wdkDuv6wp4lGwzQEKjZi6/WYtnb3udNPmQmQ== - /@babel/plugin-proposal-dynamic-import/7.13.8_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 '@babel/plugin-syntax-dynamic-import': 7.8.3_@babel+core@7.12.3 dev: true + + /@babel/plugin-proposal-export-namespace-from/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-INAgtFo4OnLN3Y/j0VwAgw3HDXcDtX+C/erMvWzuV9v71r7urb6iyMXu7eM9IgLr1ElLlOkaHjJ0SbCmdOQ3Iw==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-ONWKj0H6+wIRCkZi9zSbZtE/r73uOhMVHh256ys0UzfM7I3d4n+spZNWjOnJv2gzopumP2Wxi186vI8N0Y2JyQ== - /@babel/plugin-proposal-export-namespace-from/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 '@babel/plugin-syntax-export-namespace-from': 7.8.3_@babel+core@7.12.3 dev: true + + /@babel/plugin-proposal-json-strings/7.13.8_@babel+core@7.12.3: + resolution: {integrity: sha512-w4zOPKUFPX1mgvTmL/fcEqy34hrQ1CRcGxdphBc6snDnnqJ47EZDIyop6IwXzAC8G916hsIuXB2ZMBCExC5k7Q==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-INAgtFo4OnLN3Y/j0VwAgw3HDXcDtX+C/erMvWzuV9v71r7urb6iyMXu7eM9IgLr1ElLlOkaHjJ0SbCmdOQ3Iw== - /@babel/plugin-proposal-json-strings/7.13.8_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 '@babel/plugin-syntax-json-strings': 7.8.3_@babel+core@7.12.3 dev: true + + /@babel/plugin-proposal-logical-assignment-operators/7.13.8_@babel+core@7.12.3: + resolution: {integrity: sha512-aul6znYB4N4HGweImqKn59Su9RS8lbUIqxtXTOcAGtNIDczoEFv+l1EhmX8rUBp3G1jMjKJm8m0jXVp63ZpS4A==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-w4zOPKUFPX1mgvTmL/fcEqy34hrQ1CRcGxdphBc6snDnnqJ47EZDIyop6IwXzAC8G916hsIuXB2ZMBCExC5k7Q== - /@babel/plugin-proposal-logical-assignment-operators/7.13.8_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 '@babel/plugin-syntax-logical-assignment-operators': 7.10.4_@babel+core@7.12.3 dev: true + + /@babel/plugin-proposal-nullish-coalescing-operator/7.12.1_@babel+core@7.12.3: + resolution: {integrity: sha512-nZY0ESiaQDI1y96+jk6VxMOaL4LPo/QDHBqL+SF3/vl6dHkTwHlOI8L4ZwuRBHgakRBw5zsVylel7QPbbGuYgg==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-aul6znYB4N4HGweImqKn59Su9RS8lbUIqxtXTOcAGtNIDczoEFv+l1EhmX8rUBp3G1jMjKJm8m0jXVp63ZpS4A== - /@babel/plugin-proposal-nullish-coalescing-operator/7.12.1_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3_@babel+core@7.12.3 dev: true + + /@babel/plugin-proposal-nullish-coalescing-operator/7.13.8_@babel+core@7.12.3: + resolution: {integrity: sha512-iePlDPBn//UhxExyS9KyeYU7RM9WScAG+D3Hhno0PLJebAEpDZMocbDe64eqynhNAnwz/vZoL/q/QB2T1OH39A==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-nZY0ESiaQDI1y96+jk6VxMOaL4LPo/QDHBqL+SF3/vl6dHkTwHlOI8L4ZwuRBHgakRBw5zsVylel7QPbbGuYgg== - /@babel/plugin-proposal-nullish-coalescing-operator/7.13.8_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3_@babel+core@7.12.3 dev: true + + /@babel/plugin-proposal-numeric-separator/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-O1jFia9R8BUCl3ZGB7eitaAPu62TXJRHn7rh+ojNERCFyqRwJMTmhz+tJ+k0CwI6CLjX/ee4qW74FSqlq9I35w==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-iePlDPBn//UhxExyS9KyeYU7RM9WScAG+D3Hhno0PLJebAEpDZMocbDe64eqynhNAnwz/vZoL/q/QB2T1OH39A== - /@babel/plugin-proposal-numeric-separator/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 '@babel/plugin-syntax-numeric-separator': 7.10.4_@babel+core@7.12.3 dev: true + + /@babel/plugin-proposal-numeric-separator/7.12.1_@babel+core@7.12.3: + resolution: {integrity: sha512-MR7Ok+Af3OhNTCxYVjJZHS0t97ydnJZt/DbR4WISO39iDnhiD8XHrY12xuSJ90FFEGjir0Fzyyn7g/zY6hxbxA==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-O1jFia9R8BUCl3ZGB7eitaAPu62TXJRHn7rh+ojNERCFyqRwJMTmhz+tJ+k0CwI6CLjX/ee4qW74FSqlq9I35w== - /@babel/plugin-proposal-numeric-separator/7.12.1_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 '@babel/plugin-syntax-numeric-separator': 7.10.4_@babel+core@7.12.3 dev: true + + /@babel/plugin-proposal-object-rest-spread/7.13.8_@babel+core@7.12.3: + resolution: {integrity: sha512-DhB2EuB1Ih7S3/IRX5AFVgZ16k3EzfRbq97CxAVI1KSYcW+lexV8VZb7G7L8zuPVSdQMRn0kiBpf/Yzu9ZKH0g==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-MR7Ok+Af3OhNTCxYVjJZHS0t97ydnJZt/DbR4WISO39iDnhiD8XHrY12xuSJ90FFEGjir0Fzyyn7g/zY6hxbxA== - /@babel/plugin-proposal-object-rest-spread/7.13.8_@babel+core@7.12.3: dependencies: '@babel/compat-data': 7.13.12 '@babel/core': 7.12.3 @@ -539,265 +890,268 @@ packages: '@babel/plugin-syntax-object-rest-spread': 7.8.3_@babel+core@7.12.3 '@babel/plugin-transform-parameters': 7.13.0_@babel+core@7.12.3 dev: true + + /@babel/plugin-proposal-optional-catch-binding/7.13.8_@babel+core@7.12.3: + resolution: {integrity: sha512-0wS/4DUF1CuTmGo+NiaHfHcVSeSLj5S3e6RivPTg/2k3wOv3jO35tZ6/ZWsQhQMvdgI7CwphjQa/ccarLymHVA==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-DhB2EuB1Ih7S3/IRX5AFVgZ16k3EzfRbq97CxAVI1KSYcW+lexV8VZb7G7L8zuPVSdQMRn0kiBpf/Yzu9ZKH0g== - /@babel/plugin-proposal-optional-catch-binding/7.13.8_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 '@babel/plugin-syntax-optional-catch-binding': 7.8.3_@babel+core@7.12.3 dev: true + + /@babel/plugin-proposal-optional-chaining/7.12.1_@babel+core@7.12.3: + resolution: {integrity: sha512-c2uRpY6WzaVDzynVY9liyykS+kVU+WRZPMPYpkelXH8KBt1oXoI89kPbZKKG/jDT5UK92FTW2fZkZaJhdiBabw==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-0wS/4DUF1CuTmGo+NiaHfHcVSeSLj5S3e6RivPTg/2k3wOv3jO35tZ6/ZWsQhQMvdgI7CwphjQa/ccarLymHVA== - /@babel/plugin-proposal-optional-chaining/7.12.1_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 '@babel/helper-skip-transparent-expression-wrappers': 7.12.1 '@babel/plugin-syntax-optional-chaining': 7.8.3_@babel+core@7.12.3 dev: true + + /@babel/plugin-proposal-optional-chaining/7.13.12_@babel+core@7.12.3: + resolution: {integrity: sha512-fcEdKOkIB7Tf4IxrgEVeFC4zeJSTr78no9wTdBuZZbqF64kzllU0ybo2zrzm7gUQfxGhBgq4E39oRs8Zx/RMYQ==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-c2uRpY6WzaVDzynVY9liyykS+kVU+WRZPMPYpkelXH8KBt1oXoI89kPbZKKG/jDT5UK92FTW2fZkZaJhdiBabw== - /@babel/plugin-proposal-optional-chaining/7.13.12_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 '@babel/helper-skip-transparent-expression-wrappers': 7.12.1 '@babel/plugin-syntax-optional-chaining': 7.8.3_@babel+core@7.12.3 dev: true + + /@babel/plugin-proposal-private-methods/7.13.0_@babel+core@7.12.3: + resolution: {integrity: sha512-MXyyKQd9inhx1kDYPkFRVOBXQ20ES8Pto3T7UZ92xj2mY0EVD8oAVzeyYuVfy/mxAdTSIayOvg+aVzcHV2bn6Q==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-fcEdKOkIB7Tf4IxrgEVeFC4zeJSTr78no9wTdBuZZbqF64kzllU0ybo2zrzm7gUQfxGhBgq4E39oRs8Zx/RMYQ== - /@babel/plugin-proposal-private-methods/7.13.0_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-create-class-features-plugin': 7.13.11_@babel+core@7.12.3 '@babel/helper-plugin-utils': 7.13.0 + transitivePeerDependencies: + - supports-color dev: true + + /@babel/plugin-proposal-unicode-property-regex/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-XyJmZidNfofEkqFV5VC/bLabGmO5QzenPO/YOfGuEbgU+2sSwMmio3YLb4WtBgcmmdwZHyVyv8on77IUjQ5Gvg==} + engines: {node: '>=4'} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-MXyyKQd9inhx1kDYPkFRVOBXQ20ES8Pto3T7UZ92xj2mY0EVD8oAVzeyYuVfy/mxAdTSIayOvg+aVzcHV2bn6Q== - /@babel/plugin-proposal-unicode-property-regex/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-create-regexp-features-plugin': 7.12.17_@babel+core@7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true - engines: - node: '>=4' + + /@babel/plugin-syntax-async-generators/7.8.4_@babel+core@7.12.3: + resolution: {integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-XyJmZidNfofEkqFV5VC/bLabGmO5QzenPO/YOfGuEbgU+2sSwMmio3YLb4WtBgcmmdwZHyVyv8on77IUjQ5Gvg== - /@babel/plugin-syntax-async-generators/7.8.4_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-syntax-bigint/7.8.3_@babel+core@7.12.3: + resolution: {integrity: sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== - /@babel/plugin-syntax-bigint/7.8.3_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-syntax-class-properties/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== - /@babel/plugin-syntax-class-properties/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-syntax-decorators/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-Rw6aIXGuqDLr6/LoBBYE57nKOzQpz/aDkKlMqEwH+Vp0MXbG6H/TfRjaY343LKxzAKAMXIHsQ8JzaZKuDZ9MwA==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== - /@babel/plugin-syntax-decorators/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-syntax-dynamic-import/7.8.3_@babel+core@7.12.3: + resolution: {integrity: sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-Rw6aIXGuqDLr6/LoBBYE57nKOzQpz/aDkKlMqEwH+Vp0MXbG6H/TfRjaY343LKxzAKAMXIHsQ8JzaZKuDZ9MwA== - /@babel/plugin-syntax-dynamic-import/7.8.3_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-syntax-export-namespace-from/7.8.3_@babel+core@7.12.3: + resolution: {integrity: sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== - /@babel/plugin-syntax-export-namespace-from/7.8.3_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-syntax-flow/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-J/RYxnlSLXZLVR7wTRsozxKT8qbsx1mNKJzXEEjQ0Kjx1ZACcyHgbanNWNCFtc36IzuWhYWPpvJFFoexoOWFmA==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== - /@babel/plugin-syntax-flow/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-syntax-import-meta/7.10.4_@babel+core@7.12.3: + resolution: {integrity: sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-J/RYxnlSLXZLVR7wTRsozxKT8qbsx1mNKJzXEEjQ0Kjx1ZACcyHgbanNWNCFtc36IzuWhYWPpvJFFoexoOWFmA== - /@babel/plugin-syntax-import-meta/7.10.4_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-syntax-json-strings/7.8.3_@babel+core@7.12.3: + resolution: {integrity: sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== - /@babel/plugin-syntax-json-strings/7.8.3_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-syntax-jsx/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-d4HM23Q1K7oq/SLNmG6mRt85l2csmQ0cHRaxRXjKW0YFdEXqlZ5kzFQKH5Uc3rDJECgu+yCRgPkG04Mm98R/1g==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== - /@babel/plugin-syntax-jsx/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-syntax-logical-assignment-operators/7.10.4_@babel+core@7.12.3: + resolution: {integrity: sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-d4HM23Q1K7oq/SLNmG6mRt85l2csmQ0cHRaxRXjKW0YFdEXqlZ5kzFQKH5Uc3rDJECgu+yCRgPkG04Mm98R/1g== - /@babel/plugin-syntax-logical-assignment-operators/7.10.4_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-syntax-nullish-coalescing-operator/7.8.3_@babel+core@7.12.3: + resolution: {integrity: sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== - /@babel/plugin-syntax-nullish-coalescing-operator/7.8.3_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-syntax-numeric-separator/7.10.4_@babel+core@7.12.3: + resolution: {integrity: sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== - /@babel/plugin-syntax-numeric-separator/7.10.4_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-syntax-object-rest-spread/7.8.3_@babel+core@7.12.3: + resolution: {integrity: sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== - /@babel/plugin-syntax-object-rest-spread/7.8.3_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-syntax-optional-catch-binding/7.8.3_@babel+core@7.12.3: + resolution: {integrity: sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== - /@babel/plugin-syntax-optional-catch-binding/7.8.3_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-syntax-optional-chaining/7.8.3_@babel+core@7.12.3: + resolution: {integrity: sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== - /@babel/plugin-syntax-optional-chaining/7.8.3_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-syntax-top-level-await/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-A81F9pDwyS7yM//KwbCSDqy3Uj4NMIurtplxphWxoYtNPov7cJsDkAFNNyVlIZ3jwGycVsurZ+LtOA8gZ376iQ==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== - /@babel/plugin-syntax-top-level-await/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-syntax-typescript/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-cHP3u1JiUiG2LFDKbXnwVad81GvfyIOmCD6HIEId6ojrY0Drfy2q1jw7BwN7dE84+kTnBjLkXoL3IEy/3JPu2w==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-A81F9pDwyS7yM//KwbCSDqy3Uj4NMIurtplxphWxoYtNPov7cJsDkAFNNyVlIZ3jwGycVsurZ+LtOA8gZ376iQ== - /@babel/plugin-syntax-typescript/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-arrow-functions/7.13.0_@babel+core@7.12.3: + resolution: {integrity: sha512-96lgJagobeVmazXFaDrbmCLQxBysKu7U6Do3mLsx27gf5Dk85ezysrs2BZUpXD703U/Su1xTBDxxar2oa4jAGg==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-cHP3u1JiUiG2LFDKbXnwVad81GvfyIOmCD6HIEId6ojrY0Drfy2q1jw7BwN7dE84+kTnBjLkXoL3IEy/3JPu2w== - /@babel/plugin-transform-arrow-functions/7.13.0_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-async-to-generator/7.13.0_@babel+core@7.12.3: + resolution: {integrity: sha512-3j6E004Dx0K3eGmhxVJxwwI89CTJrce7lg3UrtFuDAVQ/2+SJ/h/aSFOeE6/n0WB1GsOffsJp6MnPQNQ8nmwhg==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-96lgJagobeVmazXFaDrbmCLQxBysKu7U6Do3mLsx27gf5Dk85ezysrs2BZUpXD703U/Su1xTBDxxar2oa4jAGg== - /@babel/plugin-transform-async-to-generator/7.13.0_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-module-imports': 7.13.12 '@babel/helper-plugin-utils': 7.13.0 '@babel/helper-remap-async-to-generator': 7.13.0 + transitivePeerDependencies: + - supports-color dev: true + + /@babel/plugin-transform-block-scoped-functions/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-zNyFqbc3kI/fVpqwfqkg6RvBgFpC4J18aKKMmv7KdQ/1GgREapSJAykLMVNwfRGO3BtHj3YQZl8kxCXPcVMVeg==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-3j6E004Dx0K3eGmhxVJxwwI89CTJrce7lg3UrtFuDAVQ/2+SJ/h/aSFOeE6/n0WB1GsOffsJp6MnPQNQ8nmwhg== - /@babel/plugin-transform-block-scoped-functions/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-block-scoping/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-Pxwe0iqWJX4fOOM2kEZeUuAxHMWb9nK+9oh5d11bsLoB0xMg+mkDpt0eYuDZB7ETrY9bbcVlKUGTOGWy7BHsMQ==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-zNyFqbc3kI/fVpqwfqkg6RvBgFpC4J18aKKMmv7KdQ/1GgREapSJAykLMVNwfRGO3BtHj3YQZl8kxCXPcVMVeg== - /@babel/plugin-transform-block-scoping/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-classes/7.13.0_@babel+core@7.12.3: + resolution: {integrity: sha512-9BtHCPUARyVH1oXGcSJD3YpsqRLROJx5ZNP6tN5vnk17N0SVf9WCtf8Nuh1CFmgByKKAIMstitKduoCmsaDK5g==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-Pxwe0iqWJX4fOOM2kEZeUuAxHMWb9nK+9oh5d11bsLoB0xMg+mkDpt0eYuDZB7ETrY9bbcVlKUGTOGWy7BHsMQ== - /@babel/plugin-transform-classes/7.13.0_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-annotate-as-pure': 7.12.13 @@ -807,129 +1161,135 @@ packages: '@babel/helper-replace-supers': 7.13.12 '@babel/helper-split-export-declaration': 7.12.13 globals: 11.12.0 + transitivePeerDependencies: + - supports-color dev: true + + /@babel/plugin-transform-computed-properties/7.13.0_@babel+core@7.12.3: + resolution: {integrity: sha512-RRqTYTeZkZAz8WbieLTvKUEUxZlUTdmL5KGMyZj7FnMfLNKV4+r5549aORG/mgojRmFlQMJDUupwAMiF2Q7OUg==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-9BtHCPUARyVH1oXGcSJD3YpsqRLROJx5ZNP6tN5vnk17N0SVf9WCtf8Nuh1CFmgByKKAIMstitKduoCmsaDK5g== - /@babel/plugin-transform-computed-properties/7.13.0_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-destructuring/7.13.0_@babel+core@7.12.3: + resolution: {integrity: sha512-zym5em7tePoNT9s964c0/KU3JPPnuq7VhIxPRefJ4/s82cD+q1mgKfuGRDMCPL0HTyKz4dISuQlCusfgCJ86HA==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-RRqTYTeZkZAz8WbieLTvKUEUxZlUTdmL5KGMyZj7FnMfLNKV4+r5549aORG/mgojRmFlQMJDUupwAMiF2Q7OUg== - /@babel/plugin-transform-destructuring/7.13.0_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-dotall-regex/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-foDrozE65ZFdUC2OfgeOCrEPTxdB3yjqxpXh8CH+ipd9CHd4s/iq81kcUpyH8ACGNEPdFqbtzfgzbT/ZGlbDeQ==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-zym5em7tePoNT9s964c0/KU3JPPnuq7VhIxPRefJ4/s82cD+q1mgKfuGRDMCPL0HTyKz4dISuQlCusfgCJ86HA== - /@babel/plugin-transform-dotall-regex/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-create-regexp-features-plugin': 7.12.17_@babel+core@7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-duplicate-keys/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-NfADJiiHdhLBW3pulJlJI2NB0t4cci4WTZ8FtdIuNc2+8pslXdPtRRAEWqUY+m9kNOk2eRYbTAOipAxlrOcwwQ==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-foDrozE65ZFdUC2OfgeOCrEPTxdB3yjqxpXh8CH+ipd9CHd4s/iq81kcUpyH8ACGNEPdFqbtzfgzbT/ZGlbDeQ== - /@babel/plugin-transform-duplicate-keys/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-exponentiation-operator/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-fbUelkM1apvqez/yYx1/oICVnGo2KM5s63mhGylrmXUxK/IAXSIf87QIxVfZldWf4QsOafY6vV3bX8aMHSvNrA==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-NfADJiiHdhLBW3pulJlJI2NB0t4cci4WTZ8FtdIuNc2+8pslXdPtRRAEWqUY+m9kNOk2eRYbTAOipAxlrOcwwQ== - /@babel/plugin-transform-exponentiation-operator/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-builder-binary-assignment-operator-visitor': 7.12.13 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-flow-strip-types/7.12.1_@babel+core@7.12.3: + resolution: {integrity: sha512-8hAtkmsQb36yMmEtk2JZ9JnVyDSnDOdlB+0nEGzIDLuK4yR3JcEjfuFPYkdEPSh8Id+rAMeBEn+X0iVEyho6Hg==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-fbUelkM1apvqez/yYx1/oICVnGo2KM5s63mhGylrmXUxK/IAXSIf87QIxVfZldWf4QsOafY6vV3bX8aMHSvNrA== - /@babel/plugin-transform-flow-strip-types/7.12.1_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 '@babel/plugin-syntax-flow': 7.12.13_@babel+core@7.12.3 dev: true + + /@babel/plugin-transform-for-of/7.13.0_@babel+core@7.12.3: + resolution: {integrity: sha512-IHKT00mwUVYE0zzbkDgNRP6SRzvfGCYsOxIRz8KsiaaHCcT9BWIkO+H9QRJseHBLOGBZkHUdHiqj6r0POsdytg==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-8hAtkmsQb36yMmEtk2JZ9JnVyDSnDOdlB+0nEGzIDLuK4yR3JcEjfuFPYkdEPSh8Id+rAMeBEn+X0iVEyho6Hg== - /@babel/plugin-transform-for-of/7.13.0_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-function-name/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-6K7gZycG0cmIwwF7uMK/ZqeCikCGVBdyP2J5SKNCXO5EOHcqi+z7Jwf8AmyDNcBgxET8DrEtCt/mPKPyAzXyqQ==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-IHKT00mwUVYE0zzbkDgNRP6SRzvfGCYsOxIRz8KsiaaHCcT9BWIkO+H9QRJseHBLOGBZkHUdHiqj6r0POsdytg== - /@babel/plugin-transform-function-name/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-function-name': 7.12.13 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-literals/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-FW+WPjSR7hiUxMcKqyNjP05tQ2kmBCdpEpZHY1ARm96tGQCCBvXKnpjILtDplUnJ/eHZ0lALLM+d2lMFSpYJrQ==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-6K7gZycG0cmIwwF7uMK/ZqeCikCGVBdyP2J5SKNCXO5EOHcqi+z7Jwf8AmyDNcBgxET8DrEtCt/mPKPyAzXyqQ== - /@babel/plugin-transform-literals/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-member-expression-literals/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-kxLkOsg8yir4YeEPHLuO2tXP9R/gTjpuTOjshqSpELUN3ZAg2jfDnKUvzzJxObun38sw3wm4Uu69sX/zA7iRvg==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-FW+WPjSR7hiUxMcKqyNjP05tQ2kmBCdpEpZHY1ARm96tGQCCBvXKnpjILtDplUnJ/eHZ0lALLM+d2lMFSpYJrQ== - /@babel/plugin-transform-member-expression-literals/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-modules-amd/7.13.0_@babel+core@7.12.3: + resolution: {integrity: sha512-EKy/E2NHhY/6Vw5d1k3rgoobftcNUmp9fGjb9XZwQLtTctsRBOTRO7RHHxfIky1ogMN5BxN7p9uMA3SzPfotMQ==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-kxLkOsg8yir4YeEPHLuO2tXP9R/gTjpuTOjshqSpELUN3ZAg2jfDnKUvzzJxObun38sw3wm4Uu69sX/zA7iRvg== - /@babel/plugin-transform-modules-amd/7.13.0_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-module-transforms': 7.13.12 '@babel/helper-plugin-utils': 7.13.0 babel-plugin-dynamic-import-node: 2.3.3 + transitivePeerDependencies: + - supports-color dev: true + + /@babel/plugin-transform-modules-commonjs/7.13.8_@babel+core@7.12.3: + resolution: {integrity: sha512-9QiOx4MEGglfYZ4XOnU79OHr6vIWUakIj9b4mioN8eQIoEh+pf5p/zEB36JpDFWA12nNMiRf7bfoRvl9Rn79Bw==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-EKy/E2NHhY/6Vw5d1k3rgoobftcNUmp9fGjb9XZwQLtTctsRBOTRO7RHHxfIky1ogMN5BxN7p9uMA3SzPfotMQ== - /@babel/plugin-transform-modules-commonjs/7.13.8_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-module-transforms': 7.13.12 '@babel/helper-plugin-utils': 7.13.0 '@babel/helper-simple-access': 7.13.12 babel-plugin-dynamic-import-node: 2.3.3 + transitivePeerDependencies: + - supports-color dev: true + + /@babel/plugin-transform-modules-systemjs/7.13.8_@babel+core@7.12.3: + resolution: {integrity: sha512-hwqctPYjhM6cWvVIlOIe27jCIBgHCsdH2xCJVAYQm7V5yTMoilbVMi9f6wKg0rpQAOn6ZG4AOyvCqFF/hUh6+A==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-9QiOx4MEGglfYZ4XOnU79OHr6vIWUakIj9b4mioN8eQIoEh+pf5p/zEB36JpDFWA12nNMiRf7bfoRvl9Rn79Bw== - /@babel/plugin-transform-modules-systemjs/7.13.8_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-hoist-variables': 7.13.0 @@ -937,122 +1297,128 @@ packages: '@babel/helper-plugin-utils': 7.13.0 '@babel/helper-validator-identifier': 7.12.11 babel-plugin-dynamic-import-node: 2.3.3 + transitivePeerDependencies: + - supports-color dev: true + + /@babel/plugin-transform-modules-umd/7.13.0_@babel+core@7.12.3: + resolution: {integrity: sha512-D/ILzAh6uyvkWjKKyFE/W0FzWwasv6vPTSqPcjxFqn6QpX3u8DjRVliq4F2BamO2Wee/om06Vyy+vPkNrd4wxw==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-hwqctPYjhM6cWvVIlOIe27jCIBgHCsdH2xCJVAYQm7V5yTMoilbVMi9f6wKg0rpQAOn6ZG4AOyvCqFF/hUh6+A== - /@babel/plugin-transform-modules-umd/7.13.0_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-module-transforms': 7.13.12 '@babel/helper-plugin-utils': 7.13.0 + transitivePeerDependencies: + - supports-color dev: true - peerDependencies: - '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-D/ILzAh6uyvkWjKKyFE/W0FzWwasv6vPTSqPcjxFqn6QpX3u8DjRVliq4F2BamO2Wee/om06Vyy+vPkNrd4wxw== + /@babel/plugin-transform-named-capturing-groups-regex/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-Xsm8P2hr5hAxyYblrfACXpQKdQbx4m2df9/ZZSQ8MAhsadw06+jW7s9zsSw6he+mJZXRlVMyEnVktJo4zjk1WA==} + peerDependencies: + '@babel/core': ^7.0.0 dependencies: '@babel/core': 7.12.3 '@babel/helper-create-regexp-features-plugin': 7.12.17_@babel+core@7.12.3 dev: true - peerDependencies: - '@babel/core': ^7.0.0 - resolution: - integrity: sha512-Xsm8P2hr5hAxyYblrfACXpQKdQbx4m2df9/ZZSQ8MAhsadw06+jW7s9zsSw6he+mJZXRlVMyEnVktJo4zjk1WA== + /@babel/plugin-transform-new-target/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-/KY2hbLxrG5GTQ9zzZSc3xWiOy379pIETEhbtzwZcw9rvuaVV4Fqy7BYGYOWZnaoXIQYbbJ0ziXLa/sKcGCYEQ==} + peerDependencies: + '@babel/core': ^7.0.0-0 dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-object-super/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-JzYIcj3XtYspZDV8j9ulnoMPZZnF/Cj0LUxPOjR89BdBVx+zYJI9MdMIlUZjbXDX+6YVeS6I3e8op+qQ3BYBoQ==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-/KY2hbLxrG5GTQ9zzZSc3xWiOy379pIETEhbtzwZcw9rvuaVV4Fqy7BYGYOWZnaoXIQYbbJ0ziXLa/sKcGCYEQ== - /@babel/plugin-transform-object-super/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 '@babel/helper-replace-supers': 7.13.12 + transitivePeerDependencies: + - supports-color dev: true + + /@babel/plugin-transform-parameters/7.13.0_@babel+core@7.12.3: + resolution: {integrity: sha512-Jt8k/h/mIwE2JFEOb3lURoY5C85ETcYPnbuAJ96zRBzh1XHtQZfs62ChZ6EP22QlC8c7Xqr9q+e1SU5qttwwjw==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-JzYIcj3XtYspZDV8j9ulnoMPZZnF/Cj0LUxPOjR89BdBVx+zYJI9MdMIlUZjbXDX+6YVeS6I3e8op+qQ3BYBoQ== - /@babel/plugin-transform-parameters/7.13.0_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-property-literals/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-nqVigwVan+lR+g8Fj8Exl0UQX2kymtjcWfMOYM1vTYEKujeyv2SkMgazf2qNcK7l4SDiKyTA/nHCPqL4e2zo1A==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-Jt8k/h/mIwE2JFEOb3lURoY5C85ETcYPnbuAJ96zRBzh1XHtQZfs62ChZ6EP22QlC8c7Xqr9q+e1SU5qttwwjw== - /@babel/plugin-transform-property-literals/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-react-constant-elements/7.13.13_@babel+core@7.12.3: + resolution: {integrity: sha512-SNJU53VM/SjQL0bZhyU+f4kJQz7bQQajnrZRSaU21hruG/NWY41AEM9AWXeXX90pYr/C2yAmTgI6yW3LlLrAUQ==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-nqVigwVan+lR+g8Fj8Exl0UQX2kymtjcWfMOYM1vTYEKujeyv2SkMgazf2qNcK7l4SDiKyTA/nHCPqL4e2zo1A== - /@babel/plugin-transform-react-constant-elements/7.13.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-react-display-name/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-MprESJzI9O5VnJZrL7gg1MpdqmiFcUv41Jc7SahxYsNP2kDkFqClxxTZq+1Qv4AFCamm+GXMRDQINNn+qrxmiA==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-SNJU53VM/SjQL0bZhyU+f4kJQz7bQQajnrZRSaU21hruG/NWY41AEM9AWXeXX90pYr/C2yAmTgI6yW3LlLrAUQ== - /@babel/plugin-transform-react-display-name/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-react-display-name/7.12.1_@babel+core@7.12.3: + resolution: {integrity: sha512-cAzB+UzBIrekfYxyLlFqf/OagTvHLcVBb5vpouzkYkBclRPraiygVnafvAoipErZLI8ANv8Ecn6E/m5qPXD26w==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-MprESJzI9O5VnJZrL7gg1MpdqmiFcUv41Jc7SahxYsNP2kDkFqClxxTZq+1Qv4AFCamm+GXMRDQINNn+qrxmiA== - /@babel/plugin-transform-react-display-name/7.12.1_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-react-jsx-development/7.12.17_@babel+core@7.12.3: + resolution: {integrity: sha512-BPjYV86SVuOaudFhsJR1zjgxxOhJDt6JHNoD48DxWEIxUCAMjV1ys6DYw4SDYZh0b1QsS2vfIA9t/ZsQGsDOUQ==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-cAzB+UzBIrekfYxyLlFqf/OagTvHLcVBb5vpouzkYkBclRPraiygVnafvAoipErZLI8ANv8Ecn6E/m5qPXD26w== - /@babel/plugin-transform-react-jsx-development/7.12.17_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/plugin-transform-react-jsx': 7.13.12_@babel+core@7.12.3 dev: true + + /@babel/plugin-transform-react-jsx-self/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-FXYw98TTJ125GVCCkFLZXlZ1qGcsYqNQhVBQcZjyrwf8FEUtVfKIoidnO8S0q+KBQpDYNTmiGo1gn67Vti04lQ==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-BPjYV86SVuOaudFhsJR1zjgxxOhJDt6JHNoD48DxWEIxUCAMjV1ys6DYw4SDYZh0b1QsS2vfIA9t/ZsQGsDOUQ== - /@babel/plugin-transform-react-jsx-self/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-react-jsx-source/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-O5JJi6fyfih0WfDgIJXksSPhGP/G0fQpfxYy87sDc+1sFmsCS6wr3aAn+whbzkhbjtq4VMqLRaSzR6IsshIC0Q==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-FXYw98TTJ125GVCCkFLZXlZ1qGcsYqNQhVBQcZjyrwf8FEUtVfKIoidnO8S0q+KBQpDYNTmiGo1gn67Vti04lQ== - /@babel/plugin-transform-react-jsx-source/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-react-jsx/7.13.12_@babel+core@7.12.3: + resolution: {integrity: sha512-jcEI2UqIcpCqB5U5DRxIl0tQEProI2gcu+g8VTIqxLO5Iidojb4d77q+fwGseCvd8af/lJ9masp4QWzBXFE2xA==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-O5JJi6fyfih0WfDgIJXksSPhGP/G0fQpfxYy87sDc+1sFmsCS6wr3aAn+whbzkhbjtq4VMqLRaSzR6IsshIC0Q== - /@babel/plugin-transform-react-jsx/7.13.12_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-annotate-as-pure': 7.12.13 @@ -1061,39 +1427,39 @@ packages: '@babel/plugin-syntax-jsx': 7.12.13_@babel+core@7.12.3 '@babel/types': 7.13.13 dev: true + + /@babel/plugin-transform-react-pure-annotations/7.12.1_@babel+core@7.12.3: + resolution: {integrity: sha512-RqeaHiwZtphSIUZ5I85PEH19LOSzxfuEazoY7/pWASCAIBuATQzpSVD+eT6MebeeZT2F4eSL0u4vw6n4Nm0Mjg==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-jcEI2UqIcpCqB5U5DRxIl0tQEProI2gcu+g8VTIqxLO5Iidojb4d77q+fwGseCvd8af/lJ9masp4QWzBXFE2xA== - /@babel/plugin-transform-react-pure-annotations/7.12.1_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-annotate-as-pure': 7.12.13 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-regenerator/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-lxb2ZAvSLyJ2PEe47hoGWPmW22v7CtSl9jW8mingV4H2sEX/JOcrAj2nPuGWi56ERUm2bUpjKzONAuT6HCn2EA==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-RqeaHiwZtphSIUZ5I85PEH19LOSzxfuEazoY7/pWASCAIBuATQzpSVD+eT6MebeeZT2F4eSL0u4vw6n4Nm0Mjg== - /@babel/plugin-transform-regenerator/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 regenerator-transform: 0.14.5 dev: true + + /@babel/plugin-transform-reserved-words/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-xhUPzDXxZN1QfiOy/I5tyye+TRz6lA7z6xaT4CLOjPRMVg1ldRf0LHw0TDBpYL4vG78556WuHdyO9oi5UmzZBg==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-lxb2ZAvSLyJ2PEe47hoGWPmW22v7CtSl9jW8mingV4H2sEX/JOcrAj2nPuGWi56ERUm2bUpjKzONAuT6HCn2EA== - /@babel/plugin-transform-reserved-words/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-runtime/7.12.1_@babel+core@7.12.3: + resolution: {integrity: sha512-Ac/H6G9FEIkS2tXsZjL4RAdS3L3WHxci0usAnz7laPWUmFiGtj7tIASChqKZMHTSQTQY6xDbOq+V1/vIq3QrWg==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-xhUPzDXxZN1QfiOy/I5tyye+TRz6lA7z6xaT4CLOjPRMVg1ldRf0LHw0TDBpYL4vG78556WuHdyO9oi5UmzZBg== - /@babel/plugin-transform-runtime/7.12.1_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-module-imports': 7.13.12 @@ -1101,87 +1467,89 @@ packages: resolve: 1.18.1 semver: 5.7.1 dev: true + + /@babel/plugin-transform-shorthand-properties/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-xpL49pqPnLtf0tVluuqvzWIgLEhuPpZzvs2yabUHSKRNlN7ScYU7aMlmavOeyXJZKgZKQRBlh8rHbKiJDraTSw==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-Ac/H6G9FEIkS2tXsZjL4RAdS3L3WHxci0usAnz7laPWUmFiGtj7tIASChqKZMHTSQTQY6xDbOq+V1/vIq3QrWg== - /@babel/plugin-transform-shorthand-properties/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-spread/7.13.0_@babel+core@7.12.3: + resolution: {integrity: sha512-V6vkiXijjzYeFmQTr3dBxPtZYLPcUfY34DebOU27jIl2M/Y8Egm52Hw82CSjjPqd54GTlJs5x+CR7HeNr24ckg==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-xpL49pqPnLtf0tVluuqvzWIgLEhuPpZzvs2yabUHSKRNlN7ScYU7aMlmavOeyXJZKgZKQRBlh8rHbKiJDraTSw== - /@babel/plugin-transform-spread/7.13.0_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 '@babel/helper-skip-transparent-expression-wrappers': 7.12.1 dev: true + + /@babel/plugin-transform-sticky-regex/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-Jc3JSaaWT8+fr7GRvQP02fKDsYk4K/lYwWq38r/UGfaxo89ajud321NH28KRQ7xy1Ybc0VUE5Pz8psjNNDUglg==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-V6vkiXijjzYeFmQTr3dBxPtZYLPcUfY34DebOU27jIl2M/Y8Egm52Hw82CSjjPqd54GTlJs5x+CR7HeNr24ckg== - /@babel/plugin-transform-sticky-regex/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-template-literals/7.13.0_@babel+core@7.12.3: + resolution: {integrity: sha512-d67umW6nlfmr1iehCcBv69eSUSySk1EsIS8aTDX4Xo9qajAh6mYtcl4kJrBkGXuxZPEgVr7RVfAvNW6YQkd4Mw==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-Jc3JSaaWT8+fr7GRvQP02fKDsYk4K/lYwWq38r/UGfaxo89ajud321NH28KRQ7xy1Ybc0VUE5Pz8psjNNDUglg== - /@babel/plugin-transform-template-literals/7.13.0_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-typeof-symbol/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-eKv/LmUJpMnu4npgfvs3LiHhJua5fo/CysENxa45YCQXZwKnGCQKAg87bvoqSW1fFT+HA32l03Qxsm8ouTY3ZQ==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-d67umW6nlfmr1iehCcBv69eSUSySk1EsIS8aTDX4Xo9qajAh6mYtcl4kJrBkGXuxZPEgVr7RVfAvNW6YQkd4Mw== - /@babel/plugin-transform-typeof-symbol/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-typescript/7.13.0_@babel+core@7.12.3: + resolution: {integrity: sha512-elQEwluzaU8R8dbVuW2Q2Y8Nznf7hnjM7+DSCd14Lo5fF63C9qNLbwZYbmZrtV9/ySpSUpkRpQXvJb6xyu4hCQ==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-eKv/LmUJpMnu4npgfvs3LiHhJua5fo/CysENxa45YCQXZwKnGCQKAg87bvoqSW1fFT+HA32l03Qxsm8ouTY3ZQ== - /@babel/plugin-transform-typescript/7.13.0_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-create-class-features-plugin': 7.13.11_@babel+core@7.12.3 '@babel/helper-plugin-utils': 7.13.0 '@babel/plugin-syntax-typescript': 7.12.13_@babel+core@7.12.3 + transitivePeerDependencies: + - supports-color dev: true + + /@babel/plugin-transform-unicode-escapes/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-0bHEkdwJ/sN/ikBHfSmOXPypN/beiGqjo+o4/5K+vxEFNPRPdImhviPakMKG4x96l85emoa0Z6cDflsdBusZbw==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-elQEwluzaU8R8dbVuW2Q2Y8Nznf7hnjM7+DSCd14Lo5fF63C9qNLbwZYbmZrtV9/ySpSUpkRpQXvJb6xyu4hCQ== - /@babel/plugin-transform-unicode-escapes/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/plugin-transform-unicode-regex/7.12.13_@babel+core@7.12.3: + resolution: {integrity: sha512-mDRzSNY7/zopwisPZ5kM9XKCfhchqIYwAKRERtEnhYscZB79VRekuRSoYbN0+KVe3y8+q1h6A4svXtP7N+UoCA==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-0bHEkdwJ/sN/ikBHfSmOXPypN/beiGqjo+o4/5K+vxEFNPRPdImhviPakMKG4x96l85emoa0Z6cDflsdBusZbw== - /@babel/plugin-transform-unicode-regex/7.12.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-create-regexp-features-plugin': 7.12.17_@babel+core@7.12.3 '@babel/helper-plugin-utils': 7.13.0 dev: true + + /@babel/preset-env/7.12.1_@babel+core@7.12.3: + resolution: {integrity: sha512-H8kxXmtPaAGT7TyBvSSkoSTUK6RHh61So05SyEbpmr0MCZrsNYn7mGMzzeYoOUCdHzww61k8XBft2TaES+xPLg==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-mDRzSNY7/zopwisPZ5kM9XKCfhchqIYwAKRERtEnhYscZB79VRekuRSoYbN0+KVe3y8+q1h6A4svXtP7N+UoCA== - /@babel/preset-env/7.12.1_@babel+core@7.12.3: dependencies: '@babel/compat-data': 7.13.12 '@babel/core': 7.12.3 @@ -1250,12 +1618,14 @@ packages: '@babel/types': 7.13.13 core-js-compat: 3.9.1 semver: 5.7.1 + transitivePeerDependencies: + - supports-color dev: true + + /@babel/preset-env/7.13.12_@babel+core@7.12.3: + resolution: {integrity: sha512-JzElc6jk3Ko6zuZgBtjOd01pf9yYDEIH8BcqVuYIuOkzOwDesoa/Nz4gIo4lBG6K861KTV9TvIgmFuT6ytOaAA==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-H8kxXmtPaAGT7TyBvSSkoSTUK6RHh61So05SyEbpmr0MCZrsNYn7mGMzzeYoOUCdHzww61k8XBft2TaES+xPLg== - /@babel/preset-env/7.13.12_@babel+core@7.12.3: dependencies: '@babel/compat-data': 7.13.12 '@babel/core': 7.12.3 @@ -1327,12 +1697,14 @@ packages: babel-plugin-polyfill-regenerator: 0.1.6_@babel+core@7.12.3 core-js-compat: 3.9.1 semver: 6.3.0 + transitivePeerDependencies: + - supports-color dev: true + + /@babel/preset-modules/0.1.4_@babel+core@7.12.3: + resolution: {integrity: sha512-J36NhwnfdzpmH41M1DrnkkgAqhZaqr/NBdPfQ677mLzlaXo+oDiv1deyCDtgAhz8p328otdob0Du7+xgHGZbKg==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-JzElc6jk3Ko6zuZgBtjOd01pf9yYDEIH8BcqVuYIuOkzOwDesoa/Nz4gIo4lBG6K861KTV9TvIgmFuT6ytOaAA== - /@babel/preset-modules/0.1.4_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 @@ -1341,11 +1713,11 @@ packages: '@babel/types': 7.13.13 esutils: 2.0.3 dev: true + + /@babel/preset-react/7.12.1_@babel+core@7.12.3: + resolution: {integrity: sha512-euCExymHCi0qB9u5fKw7rvlw7AZSjw/NaB9h7EkdTt5+yHRrXdiRTh7fkG3uBPpJg82CqLfp1LHLqWGSCrab+g==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-J36NhwnfdzpmH41M1DrnkkgAqhZaqr/NBdPfQ677mLzlaXo+oDiv1deyCDtgAhz8p328otdob0Du7+xgHGZbKg== - /@babel/preset-react/7.12.1_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 @@ -1356,11 +1728,11 @@ packages: '@babel/plugin-transform-react-jsx-source': 7.12.13_@babel+core@7.12.3 '@babel/plugin-transform-react-pure-annotations': 7.12.1_@babel+core@7.12.3 dev: true + + /@babel/preset-react/7.13.13_@babel+core@7.12.3: + resolution: {integrity: sha512-gx+tDLIE06sRjKJkVtpZ/t3mzCDOnPG+ggHZG9lffUbX8+wC739x20YQc9V35Do6ZAxaUc/HhVHIiOzz5MvDmA==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-euCExymHCi0qB9u5fKw7rvlw7AZSjw/NaB9h7EkdTt5+yHRrXdiRTh7fkG3uBPpJg82CqLfp1LHLqWGSCrab+g== - /@babel/preset-react/7.13.13_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 @@ -1370,46 +1742,74 @@ packages: '@babel/plugin-transform-react-jsx-development': 7.12.17_@babel+core@7.12.3 '@babel/plugin-transform-react-pure-annotations': 7.12.1_@babel+core@7.12.3 dev: true + + /@babel/preset-typescript/7.12.1_@babel+core@7.12.3: + resolution: {integrity: sha512-hNK/DhmoJPsksdHuI/RVrcEws7GN5eamhi28JkO52MqIxU8Z0QpmiSOQxZHWOHV7I3P4UjHV97ay4TcamMA6Kw==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-gx+tDLIE06sRjKJkVtpZ/t3mzCDOnPG+ggHZG9lffUbX8+wC739x20YQc9V35Do6ZAxaUc/HhVHIiOzz5MvDmA== - /@babel/preset-typescript/7.12.1_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-plugin-utils': 7.13.0 '@babel/plugin-transform-typescript': 7.13.0_@babel+core@7.12.3 + transitivePeerDependencies: + - supports-color dev: true - peerDependencies: - '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-hNK/DhmoJPsksdHuI/RVrcEws7GN5eamhi28JkO52MqIxU8Z0QpmiSOQxZHWOHV7I3P4UjHV97ay4TcamMA6Kw== + /@babel/runtime-corejs3/7.13.10: + resolution: {integrity: sha512-x/XYVQ1h684pp1mJwOV4CyvqZXqbc8CMsMGUnAbuc82ZCdv1U63w5RSUzgDSXQHG5Rps/kiksH6g2D5BuaKyXg==} dependencies: core-js-pure: 3.9.1 regenerator-runtime: 0.13.7 dev: true - resolution: - integrity: sha512-x/XYVQ1h684pp1mJwOV4CyvqZXqbc8CMsMGUnAbuc82ZCdv1U63w5RSUzgDSXQHG5Rps/kiksH6g2D5BuaKyXg== + /@babel/runtime/7.12.1: + resolution: {integrity: sha512-J5AIf3vPj3UwXaAzb5j1xM4WAQDX3EMgemF8rjCP3SoW09LfRKAXQKt6CoVYl230P6iWdRcBbnLDDdnqWxZSCA==} dependencies: regenerator-runtime: 0.13.7 dev: true - resolution: - integrity: sha512-J5AIf3vPj3UwXaAzb5j1xM4WAQDX3EMgemF8rjCP3SoW09LfRKAXQKt6CoVYl230P6iWdRcBbnLDDdnqWxZSCA== + + /@babel/runtime/7.12.5: + resolution: {integrity: sha512-plcc+hbExy3McchJCEQG3knOsuh3HH+Prx1P6cLIkET/0dLuQDEnrT+s27Axgc9bqfsmNUNHfscgMUdBpC9xfg==} + dependencies: + regenerator-runtime: 0.13.9 + dev: true + /@babel/runtime/7.13.10: + resolution: {integrity: sha512-4QPkjJq6Ns3V/RgpEahRk+AGfL0eO6RHHtTWoNNr5mO49G6B5+X6d6THgWEAvTrznU5xYpbAlVKRYcsCgh/Akw==} dependencies: regenerator-runtime: 0.13.7 - resolution: - integrity: sha512-4QPkjJq6Ns3V/RgpEahRk+AGfL0eO6RHHtTWoNNr5mO49G6B5+X6d6THgWEAvTrznU5xYpbAlVKRYcsCgh/Akw== + + /@babel/runtime/7.14.8: + resolution: {integrity: sha512-twj3L8Og5SaCRCErB4x4ajbvBIVV77CGeFglHpeg5WC5FF8TZzBWXtTJ4MqaD9QszLYTtr+IsaAL2rEUevb+eg==} + engines: {node: '>=6.9.0'} + dependencies: + regenerator-runtime: 0.13.9 + + /@babel/runtime/7.20.13: + resolution: {integrity: sha512-gt3PKXs0DBoL9xCvOIIZ2NEqAGZqHjAnmVbfQtB620V0uReIQutpel14KcneZuer7UioY8ALKZ7iocavvzTNFA==} + engines: {node: '>=6.9.0'} + dependencies: + regenerator-runtime: 0.13.11 + dev: false + /@babel/template/7.12.13: + resolution: {integrity: sha512-/7xxiGA57xMo/P2GVvdEumr8ONhFOhfgq2ihK3h1e6THqzTAkHbkXgB0xI9yeTfIUoH3+oAeHhqm/I43OTbbjA==} dependencies: '@babel/code-frame': 7.12.13 '@babel/parser': 7.13.13 '@babel/types': 7.13.13 - resolution: - integrity: sha512-/7xxiGA57xMo/P2GVvdEumr8ONhFOhfgq2ihK3h1e6THqzTAkHbkXgB0xI9yeTfIUoH3+oAeHhqm/I43OTbbjA== + + /@babel/template/7.18.10: + resolution: {integrity: sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/code-frame': 7.18.6 + '@babel/parser': 7.20.5 + '@babel/types': 7.20.5 + dev: true + /@babel/traverse/7.13.13: + resolution: {integrity: sha512-CblEcwmXKR6eP43oQGG++0QMTtCjAsa3frUuzHoiIJWpaIIi8dwMyEFUJoXRLxagGqCK+jALRwIO+o3R9p/uUg==} dependencies: '@babel/code-frame': 7.12.13 '@babel/generator': 7.13.9 @@ -1419,10 +1819,12 @@ packages: '@babel/types': 7.13.13 debug: 4.3.1 globals: 11.12.0 + transitivePeerDependencies: + - supports-color dev: true - resolution: - integrity: sha512-CblEcwmXKR6eP43oQGG++0QMTtCjAsa3frUuzHoiIJWpaIIi8dwMyEFUJoXRLxagGqCK+jALRwIO+o3R9p/uUg== + /@babel/traverse/7.13.13_supports-color@5.5.0: + resolution: {integrity: sha512-CblEcwmXKR6eP43oQGG++0QMTtCjAsa3frUuzHoiIJWpaIIi8dwMyEFUJoXRLxagGqCK+jALRwIO+o3R9p/uUg==} dependencies: '@babel/code-frame': 7.12.13 '@babel/generator': 7.13.9 @@ -1432,73 +1834,105 @@ packages: '@babel/types': 7.13.13 debug: 4.3.1_supports-color@5.5.0 globals: 11.12.0 - dev: false - peerDependencies: - supports-color: '*' - resolution: - integrity: sha512-CblEcwmXKR6eP43oQGG++0QMTtCjAsa3frUuzHoiIJWpaIIi8dwMyEFUJoXRLxagGqCK+jALRwIO+o3R9p/uUg== + transitivePeerDependencies: + - supports-color + dev: false + + /@babel/traverse/7.20.5: + resolution: {integrity: sha512-WM5ZNN3JITQIq9tFZaw1ojLU3WgWdtkxnhM1AegMS+PvHjkM5IXjmYEGY7yukz5XS4sJyEf2VzWjI8uAavhxBQ==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/code-frame': 7.18.6 + '@babel/generator': 7.20.5 + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-function-name': 7.19.0 + '@babel/helper-hoist-variables': 7.18.6 + '@babel/helper-split-export-declaration': 7.18.6 + '@babel/parser': 7.20.5 + '@babel/types': 7.20.5 + debug: 4.3.4 + globals: 11.12.0 + transitivePeerDependencies: + - supports-color + dev: true + /@babel/types/7.13.13: + resolution: {integrity: sha512-kt+EpC6qDfIaqlP+DIbIJOclYy/A1YXs9dAf/ljbi+39Bcbc073H6jKVpXEr/EoIh5anGn5xq/yRVzKl+uIc9w==} dependencies: '@babel/helper-validator-identifier': 7.12.11 lodash: 4.17.21 to-fast-properties: 2.0.0 - resolution: - integrity: sha512-kt+EpC6qDfIaqlP+DIbIJOclYy/A1YXs9dAf/ljbi+39Bcbc073H6jKVpXEr/EoIh5anGn5xq/yRVzKl+uIc9w== + + /@babel/types/7.20.5: + resolution: {integrity: sha512-c9fst/h2/dcF7H+MJKZ2T0KjEQ8hY/BNnDk/H3XY8C4Aw/eWQXWn/lWntHF9ooUBnGmEvbfGrTgLWc+um0YDUg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-string-parser': 7.19.4 + '@babel/helper-validator-identifier': 7.19.1 + to-fast-properties: 2.0.0 + dev: true + + /@babel/types/7.20.7: + resolution: {integrity: sha512-69OnhBxSSgK0OzTJai4kyPDiKTIe3j+ctaHdIGVbRahTLAT7L3R9oeXHC2aVSuGYt3cVnoAMDmOCgJ2yaiLMvg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-string-parser': 7.19.4 + '@babel/helper-validator-identifier': 7.19.1 + to-fast-properties: 2.0.0 + dev: true + /@bcoe/v8-coverage/0.2.3: + resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} dev: true - resolution: - integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== + + /@bloomberg/record-tuple-polyfill/0.0.3: + resolution: {integrity: sha512-sBnCqW0nqofE47mxFnw+lvx6kzsQstwaQMVkh66qm/A6IlsnH7WsyGuVXTou8RF2wL4W7ybOoHPvP2WgIo6rhQ==} + dev: true + /@cnakazawa/watch/1.0.4: + resolution: {integrity: sha512-v9kIhKwjeZThiWrLmj0y17CWoyddASLj9O2yvbZkbvw/N3rWOYy9zkV66ursAoVr0mV15bL8g0c4QZUE6cdDoQ==} + engines: {node: '>=0.1.95'} + hasBin: true dependencies: exec-sh: 0.3.6 minimist: 1.2.5 dev: true - engines: - node: '>=0.1.95' - hasBin: true - resolution: - integrity: sha512-v9kIhKwjeZThiWrLmj0y17CWoyddASLj9O2yvbZkbvw/N3rWOYy9zkV66ursAoVr0mV15bL8g0c4QZUE6cdDoQ== + /@csstools/convert-colors/1.4.0: + resolution: {integrity: sha512-5a6wqoJV/xEdbRNKVo6I4hO3VjyDq//8q2f9I6PBAvMesJHFauXDorcNCsr9RzvsZnaWi5NYCcfyqP1QeFHFbw==} + engines: {node: '>=4.0.0'} dev: true - engines: - node: '>=4.0.0' - resolution: - integrity: sha512-5a6wqoJV/xEdbRNKVo6I4hO3VjyDq//8q2f9I6PBAvMesJHFauXDorcNCsr9RzvsZnaWi5NYCcfyqP1QeFHFbw== + /@csstools/normalize.css/10.1.0: + resolution: {integrity: sha512-ij4wRiunFfaJxjB0BdrYHIH8FxBJpOwNPhhAcunlmPdXudL1WQV1qoP9un6JsEBAgQH+7UXyyjh0g7jTxXK6tg==} dev: true - resolution: - integrity: sha512-ij4wRiunFfaJxjB0BdrYHIH8FxBJpOwNPhhAcunlmPdXudL1WQV1qoP9un6JsEBAgQH+7UXyyjh0g7jTxXK6tg== - /@ctrl/tinycolor/3.4.0: - dev: false - engines: - node: '>=10' - resolution: - integrity: sha512-JZButFdZ1+/xAfpguQHoabIXkcqRRKpMrWKBkpEZZyxfY9C1DpADFB8PEqGSTeFr135SaTRfKqGKx5xSCLI7ZQ== + /@discoveryjs/json-ext/0.5.2: + resolution: {integrity: sha512-HyYEUDeIj5rRQU2Hk5HTB2uHsbRQpF70nvMhVzi+VJR0X+xNEhjPui4/kBf3VeH/wqD28PT4sVOm8qqLjBrSZg==} + engines: {node: '>=10.0.0'} dev: true - engines: - node: '>=10.0.0' - resolution: - integrity: sha512-HyYEUDeIj5rRQU2Hk5HTB2uHsbRQpF70nvMhVzi+VJR0X+xNEhjPui4/kBf3VeH/wqD28PT4sVOm8qqLjBrSZg== + /@emotion/is-prop-valid/0.8.8: + resolution: {integrity: sha512-u5WtneEAr5IDG2Wv65yhunPSMLIpuKsbuOktRojfrEiEvRyC85LgPMZI63cr7NUqT8ZIGdSVg8ZKGxIug4lXcA==} dependencies: '@emotion/memoize': 0.7.4 dev: false - resolution: - integrity: sha512-u5WtneEAr5IDG2Wv65yhunPSMLIpuKsbuOktRojfrEiEvRyC85LgPMZI63cr7NUqT8ZIGdSVg8ZKGxIug4lXcA== + /@emotion/memoize/0.7.4: + resolution: {integrity: sha512-Ja/Vfqe3HpuzRsG1oBtWTHk2PGZ7GR+2Vz5iYGelAw8dx32K0y7PjVuxK6z1nMpZOqAFsRUPCkK1YjJ56qJlgw==} dev: false - resolution: - integrity: sha512-Ja/Vfqe3HpuzRsG1oBtWTHk2PGZ7GR+2Vz5iYGelAw8dx32K0y7PjVuxK6z1nMpZOqAFsRUPCkK1YjJ56qJlgw== + /@emotion/stylis/0.8.5: + resolution: {integrity: sha512-h6KtPihKFn3T9fuIrwvXXUOwlx3rfUvfZIcP5a6rh8Y7zjE3O06hT5Ss4S/YI1AYhuZ1kjaE/5EaOOI2NqSylQ==} dev: false - resolution: - integrity: sha512-h6KtPihKFn3T9fuIrwvXXUOwlx3rfUvfZIcP5a6rh8Y7zjE3O06hT5Ss4S/YI1AYhuZ1kjaE/5EaOOI2NqSylQ== + /@emotion/unitless/0.7.5: + resolution: {integrity: sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg==} dev: false - resolution: - integrity: sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg== + /@eslint/eslintrc/0.4.0: + resolution: {integrity: sha512-2ZPCc+uNbjV5ERJr+aKSPRwZgKd2z11x0EgLvb1PURmUrn9QNRXFqje0Ldq454PfAVyaJYyrDvvIKSFP4NnBog==} + engines: {node: ^10.12.0 || >=12.0.0} dependencies: ajv: 6.12.6 debug: 4.3.1 @@ -1509,216 +1943,149 @@ packages: js-yaml: 3.14.1 minimatch: 3.0.4 strip-json-comments: 3.1.1 + transitivePeerDependencies: + - supports-color dev: true - engines: - node: ^10.12.0 || >=12.0.0 - resolution: - integrity: sha512-2ZPCc+uNbjV5ERJr+aKSPRwZgKd2z11x0EgLvb1PURmUrn9QNRXFqje0Ldq454PfAVyaJYyrDvvIKSFP4NnBog== - /@formily/antd-components/1.3.13_b13a12cfbb184a60a7e1275d8262e450: - dependencies: - '@ant-design/icons': 4.6.2_react-dom@17.0.2+react@17.0.2 - '@formily/antd': 1.3.13_b13a12cfbb184a60a7e1275d8262e450 - '@formily/react-schema-renderer': 1.3.13_d8837eed98748ff5a1dd894fdd80cd72 - '@formily/react-shared-components': 1.3.13 - '@formily/shared': 1.3.13 - antd: 4.14.1_2235c505ed33ea6efd93d3050f896208 - classnames: 2.2.6 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - styled-components: 5.2.1_react-dom@17.0.2+react@17.0.2 + + /@formily/core/2.2.12: + resolution: {integrity: sha512-79UjPFLm04zEvrrdIuQDdGG4g/Jdr78SKNfqGWDudHIgL+7pjcUn0usjspGiK2SP+f5ma/X7KMt2pktCQArHvw==} + engines: {npm: '>=3.0.0'} + dependencies: + '@formily/reactive': 2.2.12 + '@formily/shared': 2.2.12 + '@formily/validator': 2.2.12 dev: false - engines: - npm: '>=3.0.0' + + /@formily/json-schema/2.2.12_typescript@4.2.3: + resolution: {integrity: sha512-t1rKA748PVLcPpemlyXNhtGlgDNCMVJeFnxKDjrjU0vAMF3bnncQuwGVQzESvSX0e44u1PSCEFCPk7vymFNl4A==} + engines: {npm: '>=3.0.0'} peerDependencies: - '@types/react': '*' - antd: ^3.14.1 || ^4.0.0 - react: '>=16.8.0' - react-dom: '>=16.8.0' - styled-components: ^4.1.1 - resolution: - integrity: sha512-1QDTwXJ8srbJ1LjcQR5zdC1rFPk7m7vh4bfp0+mtyh6s1ifslnG2sJHESw9a7HqVDjEiVihLYcbUyvmDtVYnwQ== - /@formily/antd/1.3.13_b13a12cfbb184a60a7e1275d8262e450: - dependencies: - '@formily/react-schema-renderer': 1.3.13_f57bb6aef800468546cdc9a81ce5c019 - '@formily/react-shared-components': 1.3.13 - '@formily/shared': 1.3.13 - antd: 4.14.1_2235c505ed33ea6efd93d3050f896208 - classnames: 2.2.6 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - react-eva: 1.1.14 - react-stikky: 0.1.30 - rxjs: 6.6.7 - styled-components: 5.2.1_react-dom@17.0.2+react@17.0.2 + typescript: ^4.1.5 + dependencies: + '@formily/core': 2.2.12 + '@formily/reactive': 2.2.12 + '@formily/shared': 2.2.12 + typescript: 4.2.3 dev: false - engines: - npm: '>=3.0.0' - peerDependencies: - '@types/react': '*' - antd: ^3.14.1 || ^4.0.0 - react: '>=16.8.0' - react-dom: '>=16.8.0' - styled-components: ^4.1.1 - resolution: - integrity: sha512-xKXSkrlLI/jyf25/aVdxSYwXJ+WWYlagAz6dZzk6MVuXvH2SZHbCSEl7QvsFy8Up2UKVUysq0xD9WoSev7T/8w== - /@formily/core/1.3.13: - dependencies: - '@formily/shared': 1.3.13 - '@formily/validator': 1.3.13 - immer: 6.0.9 - dev: false - engines: - npm: '>=3.0.0' - peerDependencies: - scheduler: '>=0.11.2' - resolution: - integrity: sha512-Llwa0VbDOWptDgKz1l9gjn4V0Rrrx7cr7at2cDgBnH8qBCYPqvmsh9VBQ1mT/anvq1LbMoo8VoHG+CXTBa6UXA== - /@formily/react-schema-renderer/1.3.13_d8837eed98748ff5a1dd894fdd80cd72: - dependencies: - '@formily/core': 1.3.13 - '@formily/react': 1.3.13_d8837eed98748ff5a1dd894fdd80cd72 - '@formily/shared': 1.3.13 - '@formily/validator': 1.3.13 - '@types/react': 16.14.5 - hoist-non-react-statics: 3.3.2 - pascal-case: 2.0.1 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 + + /@formily/path/2.2.12: + resolution: {integrity: sha512-UZ7vL2Lj2S99YPqopKL6z53ShdtzJUOP1OROXKXRI0DkoDnpLYl3wtgUVUC1PNlOqF25K9ljw86Q5fVJMno6mw==} + engines: {npm: '>=3.0.0'} dev: false - engines: - npm: '>=3.0.0' + + /@formily/react/2.2.12_338bd5ec353cfb7439af722c4fbc028f: + resolution: {integrity: sha512-k8MZZ6Own3D8NdJs+/BDiTatyWY8cZ5x6Lmq/D/e8KtgVS88FB2cjgBh7vx31QYlsRxJR/27cfR77O+0OGzuzw==} + engines: {npm: '>=3.0.0'} peerDependencies: - '@types/react': ^16.8.23 + '@types/react': '>=16.8.0' + '@types/react-dom': '>=16.8.0' react: '>=16.8.0' react-dom: '>=16.8.0' - react-eva: ^1.1.7 - rxjs: ^6.5.1 - resolution: - integrity: sha512-2iFWqhv/EBwQOwsOAqBjI/mPnVTyymtumnMtkpis7conFK3NJstXnYzhRmYKRAV+41beSaKpesYu2HvtYrq1IA== - /@formily/react-schema-renderer/1.3.13_f57bb6aef800468546cdc9a81ce5c019: - dependencies: - '@formily/core': 1.3.13 - '@formily/react': 1.3.13_f57bb6aef800468546cdc9a81ce5c019 - '@formily/shared': 1.3.13 - '@formily/validator': 1.3.13 + react-is: '>=16.8.0' + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + dependencies: + '@formily/core': 2.2.12 + '@formily/json-schema': 2.2.12_typescript@4.2.3 + '@formily/reactive': 2.2.12 + '@formily/reactive-react': 2.2.12_f7ae02aba8ed3dfe5b262aea61025694 + '@formily/shared': 2.2.12 + '@formily/validator': 2.2.12 '@types/react': 16.14.5 + '@types/react-dom': 16.9.12 hoist-non-react-statics: 3.3.2 - pascal-case: 2.0.1 react: 17.0.2 react-dom: 17.0.2_react@17.0.2 - react-eva: 1.1.14 - rxjs: 6.6.7 + transitivePeerDependencies: + - typescript dev: false - engines: - npm: '>=3.0.0' + + /@formily/reactive-react/2.2.12_f7ae02aba8ed3dfe5b262aea61025694: + resolution: {integrity: sha512-jkH8j4q9Pw7Itppi6FF3YXF+J+GAMpWErfsmr8TlVsMBKs69hSP+Xet0CMrZga61bxMlq+UDylgQ7Dtuho57iA==} + engines: {npm: '>=3.0.0'} peerDependencies: - '@types/react': ^16.8.23 + '@types/react': '>=16.8.0' + '@types/react-dom': '>=16.8.0' react: '>=16.8.0' react-dom: '>=16.8.0' - react-eva: ^1.1.7 - rxjs: ^6.5.1 - resolution: - integrity: sha512-2iFWqhv/EBwQOwsOAqBjI/mPnVTyymtumnMtkpis7conFK3NJstXnYzhRmYKRAV+41beSaKpesYu2HvtYrq1IA== - /@formily/react-shared-components/1.3.13: - dependencies: - '@formily/shared': 1.3.13 - react-drag-listview: 0.1.8 - dev: false - engines: - npm: '>=3.0.0' - resolution: - integrity: sha512-6YSxgM0dAisMjg+77Lsi7uBl0jfXxVjZqRGKPssIinfFQiyVQg9JolYun/vsLnWWqCvFJabtxlikcpfhPI7j8A== - /@formily/react/1.3.13_d8837eed98748ff5a1dd894fdd80cd72: - dependencies: - '@formily/core': 1.3.13 - '@formily/shared': 1.3.13 + react-is: '>=16.8.0' + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + dependencies: + '@formily/reactive': 2.2.12 '@types/react': 16.14.5 + '@types/react-dom': 16.9.12 + hoist-non-react-statics: 3.3.2 react: 17.0.2 react-dom: 17.0.2_react@17.0.2 dev: false - engines: - npm: '>=3.0.0' - peerDependencies: - '@types/react': ^16.8.23 - react: '>=16.8.0' - react-dom: '>=16.8.0' - react-eva: ^1.0.0-alpha.0 - rxjs: ^6.5.1 - resolution: - integrity: sha512-zbfCuGDvHBRnR4FwekOtU8KljJW2fb7aV7Lz3arMOhHYnUD1CoNasmpptPzNCi8SwkrNSJZ2dBu1ZSn7w4aM7Q== - /@formily/react/1.3.13_f57bb6aef800468546cdc9a81ce5c019: - dependencies: - '@formily/core': 1.3.13 - '@formily/shared': 1.3.13 - '@types/react': 16.14.5 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - react-eva: 1.1.14 - rxjs: 6.6.7 + + /@formily/reactive/2.2.12: + resolution: {integrity: sha512-iW3p0ENVblDQeThFh8d1QRjPOTpaTf8+LcTAPuwa2Gq6VzGfz3xpPGybgbvgkdcxesWWQGDMBDYoFtrCoyrvMQ==} + engines: {npm: '>=3.0.0'} dev: false - engines: - npm: '>=3.0.0' - peerDependencies: - '@types/react': ^16.8.23 - react: '>=16.8.0' - react-dom: '>=16.8.0' - react-eva: ^1.0.0-alpha.0 - rxjs: ^6.5.1 - resolution: - integrity: sha512-zbfCuGDvHBRnR4FwekOtU8KljJW2fb7aV7Lz3arMOhHYnUD1CoNasmpptPzNCi8SwkrNSJZ2dBu1ZSn7w4aM7Q== - /@formily/shared/1.3.13: - dependencies: - camel-case: 3.0.0 - cool-path: 0.1.32 - lower-case: 1.1.4 - scheduler: 0.19.1 - upper-case: 1.1.3 + + /@formily/shared/2.2.12: + resolution: {integrity: sha512-BhnG0aQ/4o2weDOig3IOm6A+8Pc3oQnjQky68FoK+NVoXQUkjTSdARYzMa7WzFK1uMzZWqeGvqx5FAs/HmlIgQ==} + engines: {npm: '>=3.0.0'} + dependencies: + '@formily/path': 2.2.12 + camel-case: 4.1.2 + lower-case: 2.0.2 + no-case: 3.0.4 + param-case: 3.0.4 + pascal-case: 3.1.2 + upper-case: 2.0.2 dev: false - engines: - npm: '>=3.0.0' - resolution: - integrity: sha512-pu0/8xJT39NOqI8vYRQik4DK5gLoUsQgA3HLWeTR0TzyNA/b0ozKpri2FMl3sp4aab+S+QOJiY5tpIyJ+feldA== - /@formily/validator/1.3.13: + + /@formily/validator/2.2.12: + resolution: {integrity: sha512-CldnZFZgXO1Aw1rPBMuJ5UueCpD2GdTnui0fbSVXJcP7SI9SfDmKePl4mU0SQli38IJN6YDtao9Qnc52eSOXMQ==} + engines: {npm: '>=3.0.0'} dependencies: - '@formily/shared': 1.3.13 + '@formily/shared': 2.2.12 dev: false - engines: - npm: '>=3.0.0' - resolution: - integrity: sha512-+nKNc8Tdi/7P/3MgURq1uMp11ExFGTeVsnHI0bOFBNs5JyI6853evpfIZwrk/G/wh8ev6KGztRKpTd9x136EVg== + /@hapi/address/2.1.4: + resolution: {integrity: sha512-QD1PhQk+s31P1ixsX0H0Suoupp3VMXzIVMSwobR3F3MSUO2YCV0B7xqLcUw/Bh8yuvd3LhpyqLQWTNcRmp6IdQ==} deprecated: Moved to 'npm install @sideway/address' dev: true - resolution: - integrity: sha512-QD1PhQk+s31P1ixsX0H0Suoupp3VMXzIVMSwobR3F3MSUO2YCV0B7xqLcUw/Bh8yuvd3LhpyqLQWTNcRmp6IdQ== + /@hapi/bourne/1.3.2: + resolution: {integrity: sha512-1dVNHT76Uu5N3eJNTYcvxee+jzX4Z9lfciqRRHCU27ihbUcYi+iSc2iml5Ke1LXe1SyJCLA0+14Jh4tXJgOppA==} deprecated: This version has been deprecated and is no longer supported or maintained dev: true - resolution: - integrity: sha512-1dVNHT76Uu5N3eJNTYcvxee+jzX4Z9lfciqRRHCU27ihbUcYi+iSc2iml5Ke1LXe1SyJCLA0+14Jh4tXJgOppA== + /@hapi/hoek/8.5.1: + resolution: {integrity: sha512-yN7kbciD87WzLGc5539Tn0sApjyiGHAJgKvG9W8C7O+6c7qmoQMfVs0W4bX17eqz6C78QJqqFrtgdK5EWf6Qow==} deprecated: This version has been deprecated and is no longer supported or maintained dev: true - resolution: - integrity: sha512-yN7kbciD87WzLGc5539Tn0sApjyiGHAJgKvG9W8C7O+6c7qmoQMfVs0W4bX17eqz6C78QJqqFrtgdK5EWf6Qow== + /@hapi/joi/15.1.1: + resolution: {integrity: sha512-entf8ZMOK8sc+8YfeOlM8pCfg3b5+WZIKBfUaaJT8UsjAAPjartzxIYm3TIbjvA4u+u++KbcXD38k682nVHDAQ==} + deprecated: Switch to 'npm install joi' dependencies: '@hapi/address': 2.1.4 '@hapi/bourne': 1.3.2 '@hapi/hoek': 8.5.1 '@hapi/topo': 3.1.6 - deprecated: Switch to 'npm install joi' dev: true - resolution: - integrity: sha512-entf8ZMOK8sc+8YfeOlM8pCfg3b5+WZIKBfUaaJT8UsjAAPjartzxIYm3TIbjvA4u+u++KbcXD38k682nVHDAQ== + /@hapi/topo/3.1.6: + resolution: {integrity: sha512-tAag0jEcjwH+P2quUfipd7liWCNX2F8NvYjQp2wtInsZxnMlypdw0FtAOLxtvvkO+GSRRbmNi8m/5y42PQJYCQ==} + deprecated: This version has been deprecated and is no longer supported or maintained dependencies: '@hapi/hoek': 8.5.1 - deprecated: This version has been deprecated and is no longer supported or maintained dev: true - resolution: - integrity: sha512-tAag0jEcjwH+P2quUfipd7liWCNX2F8NvYjQp2wtInsZxnMlypdw0FtAOLxtvvkO+GSRRbmNi8m/5y42PQJYCQ== + /@istanbuljs/load-nyc-config/1.1.0: + resolution: {integrity: sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==} + engines: {node: '>=8'} dependencies: camelcase: 5.3.1 find-up: 4.1.0 @@ -1726,17 +2093,15 @@ packages: js-yaml: 3.14.1 resolve-from: 5.0.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== + /@istanbuljs/schema/0.1.3: + resolution: {integrity: sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==} + engines: {node: '>=8'} dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== + /@jest/console/26.6.2: + resolution: {integrity: sha512-IY1R2i2aLsLr7Id3S6p2BA82GNWryt4oSvEXLAKc+L2zdi89dSkE8xC1C+0kpATG4JhBJREnQOH7/zmccM2B0g==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/types': 26.6.2 '@types/node': 12.20.7 @@ -1745,11 +2110,10 @@ packages: jest-util: 26.6.2 slash: 3.0.0 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-IY1R2i2aLsLr7Id3S6p2BA82GNWryt4oSvEXLAKc+L2zdi89dSkE8xC1C+0kpATG4JhBJREnQOH7/zmccM2B0g== + /@jest/core/26.6.3: + resolution: {integrity: sha512-xvV1kKbhfUqFVuZ8Cyo+JPpipAHHAV3kcDBftiduK8EICXmTFddryy3P7NfZt8Pv37rA9nEJBKCCkglCPt/Xjw==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/console': 26.6.2 '@jest/reporters': 26.6.2 @@ -1779,23 +2143,27 @@ packages: rimraf: 3.0.2 slash: 3.0.0 strip-ansi: 6.0.0 - dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-xvV1kKbhfUqFVuZ8Cyo+JPpipAHHAV3kcDBftiduK8EICXmTFddryy3P7NfZt8Pv37rA9nEJBKCCkglCPt/Xjw== + transitivePeerDependencies: + - bufferutil + - canvas + - supports-color + - ts-node + - utf-8-validate + dev: true + /@jest/environment/26.6.2: + resolution: {integrity: sha512-nFy+fHl28zUrRsCeMB61VDThV1pVTtlEokBRgqPrcT1JNq4yRNIyTHfyht6PqtUvY9IsuLGTrbG8kPXjSZIZwA==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/fake-timers': 26.6.2 '@jest/types': 26.6.2 '@types/node': 12.20.7 jest-mock: 26.6.2 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-nFy+fHl28zUrRsCeMB61VDThV1pVTtlEokBRgqPrcT1JNq4yRNIyTHfyht6PqtUvY9IsuLGTrbG8kPXjSZIZwA== + /@jest/fake-timers/26.6.2: + resolution: {integrity: sha512-14Uleatt7jdzefLPYM3KLcnUl1ZNikaKq34enpb5XG9i81JpppDb5muZvonvKyrl7ftEHkKS5L5/eB/kxJ+bvA==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/types': 26.6.2 '@sinonjs/fake-timers': 6.0.1 @@ -1804,21 +2172,19 @@ packages: jest-mock: 26.6.2 jest-util: 26.6.2 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-14Uleatt7jdzefLPYM3KLcnUl1ZNikaKq34enpb5XG9i81JpppDb5muZvonvKyrl7ftEHkKS5L5/eB/kxJ+bvA== + /@jest/globals/26.6.2: + resolution: {integrity: sha512-85Ltnm7HlB/KesBUuALwQ68YTU72w9H2xW9FjZ1eL1U3lhtefjjl5c2MiUbpXt/i6LaPRvoOFJ22yCBSfQ0JIA==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/environment': 26.6.2 '@jest/types': 26.6.2 expect: 26.6.2 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-85Ltnm7HlB/KesBUuALwQ68YTU72w9H2xW9FjZ1eL1U3lhtefjjl5c2MiUbpXt/i6LaPRvoOFJ22yCBSfQ0JIA== + /@jest/reporters/26.6.2: + resolution: {integrity: sha512-h2bW53APG4HvkOnVMo8q3QXa6pcaNt1HkwVsOPMBV6LD/q9oSpxNSYZQYkAnjdMjrJ86UuYeLo+aEZClV6opnw==} + engines: {node: '>= 10.14.2'} dependencies: '@bcoe/v8-coverage': 0.2.3 '@jest/console': 26.6.2 @@ -1844,47 +2210,51 @@ packages: string-length: 4.0.2 terminal-link: 2.1.1 v8-to-istanbul: 7.1.0 - dev: true - engines: - node: '>= 10.14.2' optionalDependencies: node-notifier: 8.0.2 - resolution: - integrity: sha512-h2bW53APG4HvkOnVMo8q3QXa6pcaNt1HkwVsOPMBV6LD/q9oSpxNSYZQYkAnjdMjrJ86UuYeLo+aEZClV6opnw== + transitivePeerDependencies: + - supports-color + dev: true + /@jest/source-map/26.6.2: + resolution: {integrity: sha512-YwYcCwAnNmOVsZ8mr3GfnzdXDAl4LaenZP5z+G0c8bzC9/dugL8zRmxZzdoTl4IaS3CryS1uWnROLPFmb6lVvA==} + engines: {node: '>= 10.14.2'} dependencies: callsites: 3.1.0 graceful-fs: 4.2.6 source-map: 0.6.1 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-YwYcCwAnNmOVsZ8mr3GfnzdXDAl4LaenZP5z+G0c8bzC9/dugL8zRmxZzdoTl4IaS3CryS1uWnROLPFmb6lVvA== + /@jest/test-result/26.6.2: + resolution: {integrity: sha512-5O7H5c/7YlojphYNrK02LlDIV2GNPYisKwHm2QTKjNZeEzezCbwYs9swJySv2UfPMyZ0VdsmMv7jIlD/IKYQpQ==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/console': 26.6.2 '@jest/types': 26.6.2 '@types/istanbul-lib-coverage': 2.0.3 collect-v8-coverage: 1.0.1 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-5O7H5c/7YlojphYNrK02LlDIV2GNPYisKwHm2QTKjNZeEzezCbwYs9swJySv2UfPMyZ0VdsmMv7jIlD/IKYQpQ== + /@jest/test-sequencer/26.6.3: + resolution: {integrity: sha512-YHlVIjP5nfEyjlrSr8t/YdNfU/1XEt7c5b4OxcXCjyRhjzLYu/rO69/WHPuYcbCWkz8kAeZVZp2N2+IOLLEPGw==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/test-result': 26.6.2 graceful-fs: 4.2.6 jest-haste-map: 26.6.2 jest-runner: 26.6.3 jest-runtime: 26.6.3 - dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-YHlVIjP5nfEyjlrSr8t/YdNfU/1XEt7c5b4OxcXCjyRhjzLYu/rO69/WHPuYcbCWkz8kAeZVZp2N2+IOLLEPGw== + transitivePeerDependencies: + - bufferutil + - canvas + - supports-color + - ts-node + - utf-8-validate + dev: true + /@jest/transform/26.6.2: + resolution: {integrity: sha512-E9JjhUgNzvuQ+vVAL21vlyfy12gP0GhazGgJC4h6qUt1jSdUXGWJ1wfu/X7Sd8etSgxV4ovT1pb9v5D6QW4XgA==} + engines: {node: '>= 10.14.2'} dependencies: '@babel/core': 7.12.3 '@jest/types': 26.6.2 @@ -1901,12 +2271,13 @@ packages: slash: 3.0.0 source-map: 0.6.1 write-file-atomic: 3.0.3 + transitivePeerDependencies: + - supports-color dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-E9JjhUgNzvuQ+vVAL21vlyfy12gP0GhazGgJC4h6qUt1jSdUXGWJ1wfu/X7Sd8etSgxV4ovT1pb9v5D6QW4XgA== + /@jest/types/26.6.2: + resolution: {integrity: sha512-fC6QCp7Sc5sX6g8Tvbmj4XUTbyrik0akgRy03yjXbQaBWWNWGE7SGtJk98m0N8nzegD/7SggrUlivxo5ax4KWQ==} + engines: {node: '>= 10.14.2'} dependencies: '@types/istanbul-lib-coverage': 2.0.3 '@types/istanbul-reports': 3.0.0 @@ -1914,76 +2285,102 @@ packages: '@types/yargs': 15.0.13 chalk: 4.1.0 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-fC6QCp7Sc5sX6g8Tvbmj4XUTbyrik0akgRy03yjXbQaBWWNWGE7SGtJk98m0N8nzegD/7SggrUlivxo5ax4KWQ== - /@monaco-editor/loader/1.0.1: + + /@jridgewell/gen-mapping/0.3.2: + resolution: {integrity: sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==} + engines: {node: '>=6.0.0'} dependencies: + '@jridgewell/set-array': 1.1.2 + '@jridgewell/sourcemap-codec': 1.4.14 + '@jridgewell/trace-mapping': 0.3.17 + dev: true + + /@jridgewell/resolve-uri/3.1.0: + resolution: {integrity: sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==} + engines: {node: '>=6.0.0'} + dev: true + + /@jridgewell/set-array/1.1.2: + resolution: {integrity: sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==} + engines: {node: '>=6.0.0'} + dev: true + + /@jridgewell/sourcemap-codec/1.4.14: + resolution: {integrity: sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==} + dev: true + + /@jridgewell/trace-mapping/0.3.17: + resolution: {integrity: sha512-MCNzAp77qzKca9+W/+I0+sEpaUnZoeasnghNeVc41VZCEKaCH73Vq3BZZ/SzWIgrqE4H4ceI+p+b6C0mHf9T4g==} + dependencies: + '@jridgewell/resolve-uri': 3.1.0 + '@jridgewell/sourcemap-codec': 1.4.14 + dev: true + + /@mapbox/hast-util-to-jsx/1.0.0: + resolution: {integrity: sha512-HJRp3qkr0uGIBFASzA8rVATLo6y/UoOMoD8eXsG8HVofk5Dokc9PV+dh266zYLZniYgtpJbc2+AKf1fNpsVqAA==} + engines: {node: '>=10'} + dependencies: + kebab-case: 1.0.1 + postcss: 7.0.35 + postcss-js: 2.0.3 + property-information: 5.6.0 + react-attr-converter: 0.3.1 + stringify-entities: 3.1.0 + stringify-object: 3.3.0 + dev: true + + /@monaco-editor/loader/1.3.2_monaco-editor@0.34.1: + resolution: {integrity: sha512-BTDbpHl3e47r3AAtpfVFTlAi7WXv4UQ/xZmz8atKl4q7epQV5e7+JbigFDViWF71VBi4IIBdcWP57Hj+OWuc9g==} + peerDependencies: + monaco-editor: '>= 0.21.0 < 1' + dependencies: + monaco-editor: 0.34.1 state-local: 1.0.7 dev: false + + /@monaco-editor/react/4.4.6_ec62f306aa7ee40038c222aca8db4940: + resolution: {integrity: sha512-Gr3uz3LYf33wlFE3eRnta4RxP5FSNxiIV9ENn2D2/rN8KgGAD8ecvcITRtsbbyuOuNkwbuHYxfeaz2Vr+CtyFA==} peerDependencies: - monaco-editor: '>= 0.21.0 < 1' - resolution: - integrity: sha512-hycGOhLqLYjnD0A/FHs56covEQWnDFrSnm/qLKkB/yoeayQ7ju+Vaj4SdTojGrXeY6jhMDx59map0+Jqwquh1Q== - /@monaco-editor/react/4.1.0_react-dom@17.0.2+react@17.0.2: + monaco-editor: '>= 0.25.0 < 1' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 dependencies: - '@monaco-editor/loader': 1.0.1 - prop-types: 15.7.2 + '@monaco-editor/loader': 1.3.2_monaco-editor@0.34.1 + monaco-editor: 0.34.1 + prop-types: 15.8.1 react: 17.0.2 react-dom: 17.0.2_react@17.0.2 - state-local: 1.0.7 dev: false - peerDependencies: - monaco-editor: ^0.21.2 - react: ^16.8.0 || ^17.0.0 - react-dom: ^16.8.0 || ^17.0.0 - resolution: - integrity: sha512-Hh895v/KfGgckDLXq8sdDGT4xS89+2hbQOP1l57sLd2XlJycChdzPiCj02nQDIduLmUIVHittjaj1/xmy94C3A== + /@nodelib/fs.scandir/2.1.4: + resolution: {integrity: sha512-33g3pMJk3bg5nXbL/+CY6I2eJDzZAni49PfJnL5fghPTggPvBd/pFNSgJsdAgWptuFu7qq/ERvOYFlhvsLTCKA==} + engines: {node: '>= 8'} dependencies: '@nodelib/fs.stat': 2.0.4 run-parallel: 1.2.0 - engines: - node: '>= 8' - resolution: - integrity: sha512-33g3pMJk3bg5nXbL/+CY6I2eJDzZAni49PfJnL5fghPTggPvBd/pFNSgJsdAgWptuFu7qq/ERvOYFlhvsLTCKA== + /@nodelib/fs.stat/2.0.4: - engines: - node: '>= 8' - resolution: - integrity: sha512-IYlHJA0clt2+Vg7bccq+TzRdJvv19c2INqBSsoOLp1je7xjtr7J26+WXR72MCdvU9q1qTzIWDfhMf+DRvQJK4Q== + resolution: {integrity: sha512-IYlHJA0clt2+Vg7bccq+TzRdJvv19c2INqBSsoOLp1je7xjtr7J26+WXR72MCdvU9q1qTzIWDfhMf+DRvQJK4Q==} + engines: {node: '>= 8'} + /@nodelib/fs.walk/1.2.6: + resolution: {integrity: sha512-8Broas6vTtW4GIXTAHDoE32hnN2M5ykgCpWGbuXHQ15vEMqr23pB76e/GZcYsZCHALv50ktd24qhEyKr6wBtow==} + engines: {node: '>= 8'} dependencies: '@nodelib/fs.scandir': 2.1.4 fastq: 1.11.0 - engines: - node: '>= 8' - resolution: - integrity: sha512-8Broas6vTtW4GIXTAHDoE32hnN2M5ykgCpWGbuXHQ15vEMqr23pB76e/GZcYsZCHALv50ktd24qhEyKr6wBtow== + /@npmcli/move-file/1.1.2: + resolution: {integrity: sha512-1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg==} + engines: {node: '>=10'} dependencies: mkdirp: 1.0.4 rimraf: 3.0.2 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg== + /@pmmmwh/react-refresh-webpack-plugin/0.4.2_d00fcc46a48175a4e289da7534b00e9a: - dependencies: - ansi-html: 0.0.7 - error-stack-parser: 2.0.6 - html-entities: 1.4.0 - native-url: 0.2.6 - react-refresh: 0.8.3 - schema-utils: 2.7.1 - source-map: 0.7.3 - webpack: 4.44.2_webpack-cli@4.6.0 - webpack-dev-server: 3.11.0_webpack-cli@4.6.0+webpack@4.44.2 - dev: true - engines: - node: '>= 10.x' + resolution: {integrity: sha512-Loc4UDGutcZ+Bd56hBInkm6JyjyCwWy4t2wcDXzN8EDPANgVRj0VP8Nxn0Zq2pc+WKauZwEivQgbDGg4xZO20A==} + engines: {node: '>= 10.x'} peerDependencies: '@types/webpack': 4.x react-refresh: ^0.8.3 @@ -2006,9 +2403,23 @@ packages: optional: true webpack-plugin-serve: optional: true - resolution: - integrity: sha512-Loc4UDGutcZ+Bd56hBInkm6JyjyCwWy4t2wcDXzN8EDPANgVRj0VP8Nxn0Zq2pc+WKauZwEivQgbDGg4xZO20A== + dependencies: + ansi-html: 0.0.7 + error-stack-parser: 2.0.6 + html-entities: 1.4.0 + native-url: 0.2.6 + react-refresh: 0.8.3 + schema-utils: 2.7.1 + source-map: 0.7.3 + webpack: 4.44.2_webpack-cli@4.6.0 + webpack-dev-server: 3.11.0_webpack-cli@4.6.0+webpack@4.44.2 + dev: true + /@rollup/plugin-node-resolve/7.1.3_rollup@1.32.1: + resolution: {integrity: sha512-RxtSL3XmdTAE2byxekYLnx+98kEUOrPHF/KRVjLH+DEIHy6kjIw7YINQzn+NXiH/NTrQLAwYs0GWB+csWygA9Q==} + engines: {node: '>= 8.0.0'} + peerDependencies: + rollup: ^1.20.0||^2.0.0 dependencies: '@rollup/pluginutils': 3.1.0_rollup@1.32.1 '@types/resolve': 0.0.8 @@ -2017,103 +2428,99 @@ packages: resolve: 1.18.1 rollup: 1.32.1 dev: true - engines: - node: '>= 8.0.0' - peerDependencies: - rollup: ^1.20.0||^2.0.0 - resolution: - integrity: sha512-RxtSL3XmdTAE2byxekYLnx+98kEUOrPHF/KRVjLH+DEIHy6kjIw7YINQzn+NXiH/NTrQLAwYs0GWB+csWygA9Q== + /@rollup/plugin-replace/2.4.2_rollup@1.32.1: + resolution: {integrity: sha512-IGcu+cydlUMZ5En85jxHH4qj2hta/11BHq95iHEyb2sbgiN0eCdzvUcHw5gt9pBL5lTi4JDYJ1acCoMGpTvEZg==} + peerDependencies: + rollup: ^1.20.0 || ^2.0.0 dependencies: '@rollup/pluginutils': 3.1.0_rollup@1.32.1 magic-string: 0.25.7 rollup: 1.32.1 dev: true - peerDependencies: - rollup: ^1.20.0 || ^2.0.0 - resolution: - integrity: sha512-IGcu+cydlUMZ5En85jxHH4qj2hta/11BHq95iHEyb2sbgiN0eCdzvUcHw5gt9pBL5lTi4JDYJ1acCoMGpTvEZg== + /@rollup/pluginutils/3.1.0_rollup@1.32.1: + resolution: {integrity: sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg==} + engines: {node: '>= 8.0.0'} + peerDependencies: + rollup: ^1.20.0||^2.0.0 dependencies: '@types/estree': 0.0.39 estree-walker: 1.0.1 picomatch: 2.2.2 rollup: 1.32.1 dev: true - engines: - node: '>= 8.0.0' + + /@simbathesailor/use-what-changed/2.0.0_react@17.0.2: + resolution: {integrity: sha512-ulBNrPSvfho9UN6zS2fii3AsdEcp2fMaKeqUZZeCNPaZbB6aXyTUhpEN9atjMAbu/eyK3AY8L4SYJUG62Ekocw==} peerDependencies: - rollup: ^1.20.0||^2.0.0 - resolution: - integrity: sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg== + react: '>=16' + dependencies: + react: 17.0.2 + dev: true + /@sinonjs/commons/1.8.2: + resolution: {integrity: sha512-sruwd86RJHdsVf/AtBoijDmUqJp3B6hF/DGC23C+JaegnDHaZyewCjoVGTdg3J0uz3Zs7NnIT05OBOmML72lQw==} dependencies: type-detect: 4.0.8 dev: true - resolution: - integrity: sha512-sruwd86RJHdsVf/AtBoijDmUqJp3B6hF/DGC23C+JaegnDHaZyewCjoVGTdg3J0uz3Zs7NnIT05OBOmML72lQw== + /@sinonjs/fake-timers/6.0.1: + resolution: {integrity: sha512-MZPUxrmFubI36XS1DI3qmI0YdN1gks62JtFZvxR67ljjSNCeK6U08Zx4msEWOXuofgqUt6zPHSi1H9fbjR/NRA==} dependencies: '@sinonjs/commons': 1.8.2 dev: true - resolution: - integrity: sha512-MZPUxrmFubI36XS1DI3qmI0YdN1gks62JtFZvxR67ljjSNCeK6U08Zx4msEWOXuofgqUt6zPHSi1H9fbjR/NRA== + /@surma/rollup-plugin-off-main-thread/1.4.2: + resolution: {integrity: sha512-yBMPqmd1yEJo/280PAMkychuaALyQ9Lkb5q1ck3mjJrFuEobIfhnQ4J3mbvBoISmR3SWMWV+cGB/I0lCQee79A==} dependencies: ejs: 2.7.4 magic-string: 0.25.7 dev: true - resolution: - integrity: sha512-yBMPqmd1yEJo/280PAMkychuaALyQ9Lkb5q1ck3mjJrFuEobIfhnQ4J3mbvBoISmR3SWMWV+cGB/I0lCQee79A== + /@svgr/babel-plugin-add-jsx-attribute/5.4.0: + resolution: {integrity: sha512-ZFf2gs/8/6B8PnSofI0inYXr2SDNTDScPXhN7k5EqD4aZ3gi6u+rbmZHVB8IM3wDyx8ntKACZbtXSm7oZGRqVg==} + engines: {node: '>=10'} dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-ZFf2gs/8/6B8PnSofI0inYXr2SDNTDScPXhN7k5EqD4aZ3gi6u+rbmZHVB8IM3wDyx8ntKACZbtXSm7oZGRqVg== + /@svgr/babel-plugin-remove-jsx-attribute/5.4.0: + resolution: {integrity: sha512-yaS4o2PgUtwLFGTKbsiAy6D0o3ugcUhWK0Z45umJ66EPWunAz9fuFw2gJuje6wqQvQWOTJvIahUwndOXb7QCPg==} + engines: {node: '>=10'} dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-yaS4o2PgUtwLFGTKbsiAy6D0o3ugcUhWK0Z45umJ66EPWunAz9fuFw2gJuje6wqQvQWOTJvIahUwndOXb7QCPg== + /@svgr/babel-plugin-remove-jsx-empty-expression/5.0.1: + resolution: {integrity: sha512-LA72+88A11ND/yFIMzyuLRSMJ+tRKeYKeQ+mR3DcAZ5I4h5CPWN9AHyUzJbWSYp/u2u0xhmgOe0+E41+GjEueA==} + engines: {node: '>=10'} dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-LA72+88A11ND/yFIMzyuLRSMJ+tRKeYKeQ+mR3DcAZ5I4h5CPWN9AHyUzJbWSYp/u2u0xhmgOe0+E41+GjEueA== + /@svgr/babel-plugin-replace-jsx-attribute-value/5.0.1: + resolution: {integrity: sha512-PoiE6ZD2Eiy5mK+fjHqwGOS+IXX0wq/YDtNyIgOrc6ejFnxN4b13pRpiIPbtPwHEc+NT2KCjteAcq33/F1Y9KQ==} + engines: {node: '>=10'} dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-PoiE6ZD2Eiy5mK+fjHqwGOS+IXX0wq/YDtNyIgOrc6ejFnxN4b13pRpiIPbtPwHEc+NT2KCjteAcq33/F1Y9KQ== + /@svgr/babel-plugin-svg-dynamic-title/5.4.0: + resolution: {integrity: sha512-zSOZH8PdZOpuG1ZVx/cLVePB2ibo3WPpqo7gFIjLV9a0QsuQAzJiwwqmuEdTaW2pegyBE17Uu15mOgOcgabQZg==} + engines: {node: '>=10'} dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-zSOZH8PdZOpuG1ZVx/cLVePB2ibo3WPpqo7gFIjLV9a0QsuQAzJiwwqmuEdTaW2pegyBE17Uu15mOgOcgabQZg== + /@svgr/babel-plugin-svg-em-dimensions/5.4.0: + resolution: {integrity: sha512-cPzDbDA5oT/sPXDCUYoVXEmm3VIoAWAPT6mSPTJNbQaBNUuEKVKyGH93oDY4e42PYHRW67N5alJx/eEol20abw==} + engines: {node: '>=10'} dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-cPzDbDA5oT/sPXDCUYoVXEmm3VIoAWAPT6mSPTJNbQaBNUuEKVKyGH93oDY4e42PYHRW67N5alJx/eEol20abw== + /@svgr/babel-plugin-transform-react-native-svg/5.4.0: + resolution: {integrity: sha512-3eYP/SaopZ41GHwXma7Rmxcv9uRslRDTY1estspeB1w1ueZWd/tPlMfEOoccYpEMZU3jD4OU7YitnXcF5hLW2Q==} + engines: {node: '>=10'} dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-3eYP/SaopZ41GHwXma7Rmxcv9uRslRDTY1estspeB1w1ueZWd/tPlMfEOoccYpEMZU3jD4OU7YitnXcF5hLW2Q== + /@svgr/babel-plugin-transform-svg-component/5.5.0: + resolution: {integrity: sha512-q4jSH1UUvbrsOtlo/tKcgSeiCHRSBdXoIoqX1pgcKK/aU3JD27wmMKwGtpB8qRYUYoyXvfGxUVKchLuR5pB3rQ==} + engines: {node: '>=10'} dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-q4jSH1UUvbrsOtlo/tKcgSeiCHRSBdXoIoqX1pgcKK/aU3JD27wmMKwGtpB8qRYUYoyXvfGxUVKchLuR5pB3rQ== + /@svgr/babel-preset/5.5.0: + resolution: {integrity: sha512-4FiXBjvQ+z2j7yASeGPEi8VD/5rrGQk4Xrq3EdJmoZgz/tpqChpo5hgXDvmEauwtvOc52q8ghhZK4Oy7qph4ig==} + engines: {node: '>=10'} dependencies: '@svgr/babel-plugin-add-jsx-attribute': 5.4.0 '@svgr/babel-plugin-remove-jsx-attribute': 5.4.0 @@ -2124,50 +2531,49 @@ packages: '@svgr/babel-plugin-transform-react-native-svg': 5.4.0 '@svgr/babel-plugin-transform-svg-component': 5.5.0 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-4FiXBjvQ+z2j7yASeGPEi8VD/5rrGQk4Xrq3EdJmoZgz/tpqChpo5hgXDvmEauwtvOc52q8ghhZK4Oy7qph4ig== + /@svgr/core/5.5.0: + resolution: {integrity: sha512-q52VOcsJPvV3jO1wkPtzTuKlvX7Y3xIcWRpCMtBF3MrteZJtBfQw/+u0B1BHy5ColpQc1/YVTrPEtSYIMNZlrQ==} + engines: {node: '>=10'} dependencies: '@svgr/plugin-jsx': 5.5.0 camelcase: 6.2.0 cosmiconfig: 7.0.0 + transitivePeerDependencies: + - supports-color dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-q52VOcsJPvV3jO1wkPtzTuKlvX7Y3xIcWRpCMtBF3MrteZJtBfQw/+u0B1BHy5ColpQc1/YVTrPEtSYIMNZlrQ== + /@svgr/hast-util-to-babel-ast/5.5.0: + resolution: {integrity: sha512-cAaR/CAiZRB8GP32N+1jocovUtvlj0+e65TB50/6Lcime+EA49m/8l+P2ko+XPJ4dw3xaPS3jOL4F2X4KWxoeQ==} + engines: {node: '>=10'} dependencies: '@babel/types': 7.13.13 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-cAaR/CAiZRB8GP32N+1jocovUtvlj0+e65TB50/6Lcime+EA49m/8l+P2ko+XPJ4dw3xaPS3jOL4F2X4KWxoeQ== + /@svgr/plugin-jsx/5.5.0: + resolution: {integrity: sha512-V/wVh33j12hGh05IDg8GpIUXbjAPnTdPTKuP4VNLggnwaHMPNQNae2pRnyTAILWCQdz5GyMqtO488g7CKM8CBA==} + engines: {node: '>=10'} dependencies: '@babel/core': 7.12.3 '@svgr/babel-preset': 5.5.0 '@svgr/hast-util-to-babel-ast': 5.5.0 svg-parser: 2.0.4 + transitivePeerDependencies: + - supports-color dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-V/wVh33j12hGh05IDg8GpIUXbjAPnTdPTKuP4VNLggnwaHMPNQNae2pRnyTAILWCQdz5GyMqtO488g7CKM8CBA== + /@svgr/plugin-svgo/5.5.0: + resolution: {integrity: sha512-r5swKk46GuQl4RrVejVwpeeJaydoxkdwkM1mBKOgJLBUJPGaLci6ylg/IjhrRsREKDkr4kbMWdgOtbXEh0fyLQ==} + engines: {node: '>=10'} dependencies: cosmiconfig: 7.0.0 deepmerge: 4.2.2 svgo: 1.3.2 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-r5swKk46GuQl4RrVejVwpeeJaydoxkdwkM1mBKOgJLBUJPGaLci6ylg/IjhrRsREKDkr4kbMWdgOtbXEh0fyLQ== + /@svgr/webpack/5.4.0: + resolution: {integrity: sha512-LjepnS/BSAvelnOnnzr6Gg0GcpLmnZ9ThGFK5WJtm1xOqdBE/1IACZU7MMdVzjyUkfFqGz87eRE4hFaSLiUwYg==} + engines: {node: '>=10'} dependencies: '@babel/core': 7.12.3 '@babel/plugin-transform-react-constant-elements': 7.13.13_@babel+core@7.12.3 @@ -2177,12 +2583,13 @@ packages: '@svgr/plugin-jsx': 5.5.0 '@svgr/plugin-svgo': 5.5.0 loader-utils: 2.0.0 + transitivePeerDependencies: + - supports-color dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-LjepnS/BSAvelnOnnzr6Gg0GcpLmnZ9ThGFK5WJtm1xOqdBE/1IACZU7MMdVzjyUkfFqGz87eRE4hFaSLiUwYg== + /@testing-library/dom/7.30.1: + resolution: {integrity: sha512-RQUvqqq2lxTCOffhSNxpX/9fCoR+nwuQPmG5uhuuEH5KBAzNf2bK3OzBoWjm5zKM78SLjnGRAKt8hRjQA4E46A==} + engines: {node: '>=10'} dependencies: '@babel/code-frame': 7.12.13 '@babel/runtime': 7.13.10 @@ -2193,11 +2600,10 @@ packages: lz-string: 1.4.4 pretty-format: 26.6.2 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-RQUvqqq2lxTCOffhSNxpX/9fCoR+nwuQPmG5uhuuEH5KBAzNf2bK3OzBoWjm5zKM78SLjnGRAKt8hRjQA4E46A== + /@testing-library/jest-dom/5.11.10: + resolution: {integrity: sha512-FuKiq5xuk44Fqm0000Z9w0hjOdwZRNzgx7xGGxQYepWFZy+OYUMOT/wPI4nLYXCaVltNVpU1W/qmD88wLWDsqQ==} + engines: {node: '>=8', npm: '>=6', yarn: '>=1'} dependencies: '@babel/runtime': 7.13.10 '@types/testing-library__jest-dom': 5.9.5 @@ -2208,46 +2614,61 @@ packages: lodash: 4.17.21 redent: 3.0.0 dev: true - engines: - node: '>=8' - npm: '>=6' - yarn: '>=1' - resolution: - integrity: sha512-FuKiq5xuk44Fqm0000Z9w0hjOdwZRNzgx7xGGxQYepWFZy+OYUMOT/wPI4nLYXCaVltNVpU1W/qmD88wLWDsqQ== - /@testing-library/react/11.2.5_react-dom@17.0.2+react@17.0.2: + + /@testing-library/react-hooks/7.0.2_react-dom@17.0.2+react@17.0.2: + resolution: {integrity: sha512-dYxpz8u9m4q1TuzfcUApqi8iFfR6R0FaMbr2hjZJy1uC8z+bO/K4v8Gs9eogGKYQop7QsrBTFkv/BCF7MzD2Cg==} + engines: {node: '>=12'} + peerDependencies: + react: '>=16.9.0' + react-dom: '>=16.9.0' + react-test-renderer: '>=16.9.0' + peerDependenciesMeta: + react-dom: + optional: true + react-test-renderer: + optional: true dependencies: - '@babel/runtime': 7.13.10 - '@testing-library/dom': 7.30.1 + '@babel/runtime': 7.14.8 + '@types/react': 16.14.5 + '@types/react-dom': 16.9.12 + '@types/react-test-renderer': 17.0.1 react: 17.0.2 react-dom: 17.0.2_react@17.0.2 + react-error-boundary: 3.1.3_react@17.0.2 dev: true - engines: - node: '>=10' + + /@testing-library/react/11.2.5_react-dom@17.0.2+react@17.0.2: + resolution: {integrity: sha512-yEx7oIa/UWLe2F2dqK0FtMF9sJWNXD+2PPtp39BvE0Kh9MJ9Kl0HrZAgEuhUJR+Lx8Di6Xz+rKwSdEPY2UV8ZQ==} + engines: {node: '>=10'} peerDependencies: react: '*' react-dom: '*' - resolution: - integrity: sha512-yEx7oIa/UWLe2F2dqK0FtMF9sJWNXD+2PPtp39BvE0Kh9MJ9Kl0HrZAgEuhUJR+Lx8Di6Xz+rKwSdEPY2UV8ZQ== - /@testing-library/user-event/12.8.3: dependencies: '@babel/runtime': 7.13.10 + '@testing-library/dom': 7.30.1 + react: 17.0.2 + react-dom: 17.0.2_react@17.0.2 dev: true - engines: - node: '>=10' - npm: '>=6' + + /@testing-library/user-event/12.8.3: + resolution: {integrity: sha512-IR0iWbFkgd56Bu5ZI/ej8yQwrkCv8Qydx6RzwbKz9faXazR/+5tvYKsZQgyXJiwgpcva127YO6JcWy7YlCfofQ==} + engines: {node: '>=10', npm: '>=6'} peerDependencies: '@testing-library/dom': '>=7.21.4' - resolution: - integrity: sha512-IR0iWbFkgd56Bu5ZI/ej8yQwrkCv8Qydx6RzwbKz9faXazR/+5tvYKsZQgyXJiwgpcva127YO6JcWy7YlCfofQ== + dependencies: + '@babel/runtime': 7.13.10 + dev: true + /@types/anymatch/1.3.1: + resolution: {integrity: sha512-/+CRPXpBDpo2RK9C68N3b2cOvO0Cf5B9aPijHsoDQTHivnGSObdOF2BRQOYjojWTDy6nQvMjmqRXIxH55VjxxA==} dev: true - resolution: - integrity: sha512-/+CRPXpBDpo2RK9C68N3b2cOvO0Cf5B9aPijHsoDQTHivnGSObdOF2BRQOYjojWTDy6nQvMjmqRXIxH55VjxxA== + /@types/aria-query/4.2.1: + resolution: {integrity: sha512-S6oPal772qJZHoRZLFc/XoZW2gFvwXusYUmXPXkgxJLuEk2vOt7jc4Yo6z/vtI0EBkbPBVrJJ0B+prLIKiWqHg==} dev: true - resolution: - integrity: sha512-S6oPal772qJZHoRZLFc/XoZW2gFvwXusYUmXPXkgxJLuEk2vOt7jc4Yo6z/vtI0EBkbPBVrJJ0B+prLIKiWqHg== + /@types/babel__core/7.1.14: + resolution: {integrity: sha512-zGZJzzBUVDo/eV6KgbE0f0ZI7dInEYvo12Rb70uNQDshC3SkRMb67ja0GgRHZgAX3Za6rhaWlvbDO8rrGyAb1g==} dependencies: '@babel/parser': 7.13.13 '@babel/types': 7.13.13 @@ -2255,180 +2676,186 @@ packages: '@types/babel__template': 7.4.0 '@types/babel__traverse': 7.11.1 dev: true - resolution: - integrity: sha512-zGZJzzBUVDo/eV6KgbE0f0ZI7dInEYvo12Rb70uNQDshC3SkRMb67ja0GgRHZgAX3Za6rhaWlvbDO8rrGyAb1g== + /@types/babel__generator/7.6.2: + resolution: {integrity: sha512-MdSJnBjl+bdwkLskZ3NGFp9YcXGx5ggLpQQPqtgakVhsWK0hTtNYhjpZLlWQTviGTvF8at+Bvli3jV7faPdgeQ==} dependencies: '@babel/types': 7.13.13 dev: true - resolution: - integrity: sha512-MdSJnBjl+bdwkLskZ3NGFp9YcXGx5ggLpQQPqtgakVhsWK0hTtNYhjpZLlWQTviGTvF8at+Bvli3jV7faPdgeQ== + /@types/babel__template/7.4.0: + resolution: {integrity: sha512-NTPErx4/FiPCGScH7foPyr+/1Dkzkni+rHiYHHoTjvwou7AQzJkNeD60A9CXRy+ZEN2B1bggmkTMCDb+Mv5k+A==} dependencies: '@babel/parser': 7.13.13 '@babel/types': 7.13.13 dev: true - resolution: - integrity: sha512-NTPErx4/FiPCGScH7foPyr+/1Dkzkni+rHiYHHoTjvwou7AQzJkNeD60A9CXRy+ZEN2B1bggmkTMCDb+Mv5k+A== + /@types/babel__traverse/7.11.1: + resolution: {integrity: sha512-Vs0hm0vPahPMYi9tDjtP66llufgO3ST16WXaSTtDGEl9cewAl3AibmxWw6TINOqHPT9z0uABKAYjT9jNSg4npw==} dependencies: '@babel/types': 7.13.13 dev: true - resolution: - integrity: sha512-Vs0hm0vPahPMYi9tDjtP66llufgO3ST16WXaSTtDGEl9cewAl3AibmxWw6TINOqHPT9z0uABKAYjT9jNSg4npw== + + /@types/chart.js/2.9.34: + resolution: {integrity: sha512-CtZVk+kh1IN67dv+fB0CWmCLCRrDJgqOj15qPic2B1VCMovNO6B7Vhf/TgPpNscjhAL1j+qUntDMWb9A4ZmPTg==} + dependencies: + moment: 2.29.1 + dev: true + /@types/classnames/2.2.11: + resolution: {integrity: sha512-2koNhpWm3DgWRp5tpkiJ8JGc1xTn2q0l+jUNUE7oMKXUf5NpI9AIdC4kbjGNFBdHtcxBD18LAksoudAVhFKCjw==} dev: true - resolution: - integrity: sha512-2koNhpWm3DgWRp5tpkiJ8JGc1xTn2q0l+jUNUE7oMKXUf5NpI9AIdC4kbjGNFBdHtcxBD18LAksoudAVhFKCjw== + /@types/d3-array/2.9.0: + resolution: {integrity: sha512-sdBMGfNvLUkBypPMEhOcKcblTQfgHbqbYrUqRE31jOwdDHBJBxz4co2MDAq93S4Cp++phk4UiwoEg/1hK3xXAQ==} dev: false - resolution: - integrity: sha512-sdBMGfNvLUkBypPMEhOcKcblTQfgHbqbYrUqRE31jOwdDHBJBxz4co2MDAq93S4Cp++phk4UiwoEg/1hK3xXAQ== + /@types/d3-axis/2.0.0: + resolution: {integrity: sha512-gUdlEwGBLl3tXGiBnBNmNzph9W3bCfa4tBgWZD60Z1eDQKTY4zyCAcZ3LksignGfKawYatmDYcBdjJ5h/54sqA==} dependencies: '@types/d3-selection': 2.0.0 dev: false - resolution: - integrity: sha512-gUdlEwGBLl3tXGiBnBNmNzph9W3bCfa4tBgWZD60Z1eDQKTY4zyCAcZ3LksignGfKawYatmDYcBdjJ5h/54sqA== + /@types/d3-brush/2.1.0: + resolution: {integrity: sha512-rLQqxQeXWF4ArXi81GlV8HBNwJw9EDpz0jcWvvzv548EDE4tXrayBTOHYi/8Q4FZ/Df8PGXFzxpAVQmJMjOtvQ==} dependencies: '@types/d3-selection': 2.0.0 dev: false - resolution: - integrity: sha512-rLQqxQeXWF4ArXi81GlV8HBNwJw9EDpz0jcWvvzv548EDE4tXrayBTOHYi/8Q4FZ/Df8PGXFzxpAVQmJMjOtvQ== + /@types/d3-chord/2.0.0: + resolution: {integrity: sha512-3nHsLY7lImpZlM/hrPeDqqW2a+lRXXoHsG54QSurDGihZAIE/doQlohs0evoHrWOJqXyn4A4xbSVEtXnMEZZiw==} dev: false - resolution: - integrity: sha512-3nHsLY7lImpZlM/hrPeDqqW2a+lRXXoHsG54QSurDGihZAIE/doQlohs0evoHrWOJqXyn4A4xbSVEtXnMEZZiw== + /@types/d3-color/2.0.1: + resolution: {integrity: sha512-u7LTCL7RnaavFSmob2rIAJLNwu50i6gFwY9cHFr80BrQURYQBRkJ+Yv47nA3Fm7FeRhdWTiVTeqvSeOuMAOzBQ==} dev: false - resolution: - integrity: sha512-u7LTCL7RnaavFSmob2rIAJLNwu50i6gFwY9cHFr80BrQURYQBRkJ+Yv47nA3Fm7FeRhdWTiVTeqvSeOuMAOzBQ== + /@types/d3-contour/2.0.0: + resolution: {integrity: sha512-PS9UO6zBQqwHXsocbpdzZFONgK1oRUgWtjjh/iz2vM06KaXLInLiKZ9e3OLBRerc1cU2uJYpO+8zOnb6frvCGQ==} dependencies: '@types/d3-array': 2.9.0 '@types/geojson': 7946.0.7 dev: false - resolution: - integrity: sha512-PS9UO6zBQqwHXsocbpdzZFONgK1oRUgWtjjh/iz2vM06KaXLInLiKZ9e3OLBRerc1cU2uJYpO+8zOnb6frvCGQ== + /@types/d3-delaunay/5.3.0: + resolution: {integrity: sha512-gJYcGxLu0xDZPccbUe32OUpeaNtd1Lz0NYJtko6ZLMyG2euF4pBzrsQXms67LHZCDFzzszw+dMhSL/QAML3bXw==} dev: false - resolution: - integrity: sha512-gJYcGxLu0xDZPccbUe32OUpeaNtd1Lz0NYJtko6ZLMyG2euF4pBzrsQXms67LHZCDFzzszw+dMhSL/QAML3bXw== + /@types/d3-dispatch/2.0.0: + resolution: {integrity: sha512-Sh0KW6z/d7uxssD7K4s4uCSzlEG/+SP+U47q098NVdOfFvUKNTvKAIV4XqjxsUuhE/854ARAREHOxkr9gQOCyg==} dev: false - resolution: - integrity: sha512-Sh0KW6z/d7uxssD7K4s4uCSzlEG/+SP+U47q098NVdOfFvUKNTvKAIV4XqjxsUuhE/854ARAREHOxkr9gQOCyg== + /@types/d3-drag/2.0.0: + resolution: {integrity: sha512-VaUJPjbMnDn02tcRqsHLRAX5VjcRIzCjBfeXTLGe6QjMn5JccB5Cz4ztMRXMJfkbC45ovgJFWuj6DHvWMX1thA==} dependencies: '@types/d3-selection': 2.0.0 dev: false - resolution: - integrity: sha512-VaUJPjbMnDn02tcRqsHLRAX5VjcRIzCjBfeXTLGe6QjMn5JccB5Cz4ztMRXMJfkbC45ovgJFWuj6DHvWMX1thA== + /@types/d3-dsv/2.0.1: + resolution: {integrity: sha512-wovgiG9Mgkr/SZ/m/c0m+RwrIT4ozsuCWeLxJyoObDWsie2DeQT4wzMdHZPR9Ya5oZLQT3w3uSl0NehG0+0dCA==} dev: false - resolution: - integrity: sha512-wovgiG9Mgkr/SZ/m/c0m+RwrIT4ozsuCWeLxJyoObDWsie2DeQT4wzMdHZPR9Ya5oZLQT3w3uSl0NehG0+0dCA== + /@types/d3-ease/2.0.0: + resolution: {integrity: sha512-6aZrTyX5LG+ptofVHf+gTsThLRY1nhLotJjgY4drYqk1OkJMu2UvuoZRlPw2fffjRHeYepue3/fxTufqKKmvsA==} dev: false - resolution: - integrity: sha512-6aZrTyX5LG+ptofVHf+gTsThLRY1nhLotJjgY4drYqk1OkJMu2UvuoZRlPw2fffjRHeYepue3/fxTufqKKmvsA== + /@types/d3-fetch/2.0.0: + resolution: {integrity: sha512-WnLepGtxepFfXRdPI8I5FTgNiHn9p4vMTTqaNCzJJfAswXx0rOY2jjeolzEU063em3iJmGZ+U79InnEeFOrCRw==} dependencies: '@types/d3-dsv': 2.0.1 dev: false - resolution: - integrity: sha512-WnLepGtxepFfXRdPI8I5FTgNiHn9p4vMTTqaNCzJJfAswXx0rOY2jjeolzEU063em3iJmGZ+U79InnEeFOrCRw== + /@types/d3-force/2.1.1: + resolution: {integrity: sha512-3r+CQv2K/uDTAVg0DGxsbBjV02vgOxb8RhPIv3gd6cp3pdPAZ7wEXpDjUZSoqycAQLSDOxG/AZ54Vx6YXZSbmQ==} dev: false - resolution: - integrity: sha512-3r+CQv2K/uDTAVg0DGxsbBjV02vgOxb8RhPIv3gd6cp3pdPAZ7wEXpDjUZSoqycAQLSDOxG/AZ54Vx6YXZSbmQ== + /@types/d3-format/2.0.0: + resolution: {integrity: sha512-uagdkftxnGkO4pZw5jEYOM5ZnZOEsh7z8j11Qxk85UkB2RzfUUxRl7R9VvvJZHwKn8l+x+rpS77Nusq7FkFmIg==} dev: false - resolution: - integrity: sha512-uagdkftxnGkO4pZw5jEYOM5ZnZOEsh7z8j11Qxk85UkB2RzfUUxRl7R9VvvJZHwKn8l+x+rpS77Nusq7FkFmIg== + /@types/d3-geo/2.0.0: + resolution: {integrity: sha512-DHHgYXW36lnAEQMYU2udKVOxxljHrn2EdOINeSC9jWCAXwOnGn7A19B8sNsHqgpu4F7O2bSD7//cqBXD3W0Deg==} dependencies: '@types/geojson': 7946.0.7 dev: false - resolution: - integrity: sha512-DHHgYXW36lnAEQMYU2udKVOxxljHrn2EdOINeSC9jWCAXwOnGn7A19B8sNsHqgpu4F7O2bSD7//cqBXD3W0Deg== + /@types/d3-hierarchy/2.0.0: + resolution: {integrity: sha512-YxdskUvwzqggpnSnDQj4KVkicgjpkgXn/g/9M9iGsiToLS3nG6Ytjo1FoYhYVAAElV/fJBGVL3cQ9Hb7tcv+lw==} dev: false - resolution: - integrity: sha512-YxdskUvwzqggpnSnDQj4KVkicgjpkgXn/g/9M9iGsiToLS3nG6Ytjo1FoYhYVAAElV/fJBGVL3cQ9Hb7tcv+lw== + /@types/d3-interpolate/2.0.0: + resolution: {integrity: sha512-Wt1v2zTlEN8dSx8hhx6MoOhWQgTkz0Ukj7owAEIOF2QtI0e219paFX9rf/SLOr/UExWb1TcUzatU8zWwFby6gg==} dependencies: '@types/d3-color': 2.0.1 dev: false - resolution: - integrity: sha512-Wt1v2zTlEN8dSx8hhx6MoOhWQgTkz0Ukj7owAEIOF2QtI0e219paFX9rf/SLOr/UExWb1TcUzatU8zWwFby6gg== + /@types/d3-path/1.0.9: + resolution: {integrity: sha512-NaIeSIBiFgSC6IGUBjZWcscUJEq7vpVu7KthHN8eieTV9d9MqkSOZLH4chq1PmcKy06PNe3axLeKmRIyxJ+PZQ==} dev: false - resolution: - integrity: sha512-NaIeSIBiFgSC6IGUBjZWcscUJEq7vpVu7KthHN8eieTV9d9MqkSOZLH4chq1PmcKy06PNe3axLeKmRIyxJ+PZQ== + /@types/d3-path/2.0.0: + resolution: {integrity: sha512-tXcR/9OtDdeCIsyl6eTNHC3XOAOdyc6ceF3QGBXOd9jTcK+ex/ecr00p9L9362e/op3UEPpxrToi1FHrtTSj7Q==} dev: false - resolution: - integrity: sha512-tXcR/9OtDdeCIsyl6eTNHC3XOAOdyc6ceF3QGBXOd9jTcK+ex/ecr00p9L9362e/op3UEPpxrToi1FHrtTSj7Q== + /@types/d3-polygon/2.0.0: + resolution: {integrity: sha512-fISnMd8ePED1G4aa4V974Jmt+ajHSgPoxMa2D0ULxMybpx0Vw4WEzhQEaMIrL3hM8HVRcKTx669I+dTy/4PhAw==} dev: false - resolution: - integrity: sha512-fISnMd8ePED1G4aa4V974Jmt+ajHSgPoxMa2D0ULxMybpx0Vw4WEzhQEaMIrL3hM8HVRcKTx669I+dTy/4PhAw== + /@types/d3-quadtree/2.0.0: + resolution: {integrity: sha512-YZuJuGBnijD0H+98xMJD4oZXgv/umPXy5deu3IimYTPGH3Kr8Th6iQUff0/6S80oNBD7KtOuIHwHUCymUiRoeQ==} dev: false - resolution: - integrity: sha512-YZuJuGBnijD0H+98xMJD4oZXgv/umPXy5deu3IimYTPGH3Kr8Th6iQUff0/6S80oNBD7KtOuIHwHUCymUiRoeQ== + /@types/d3-random/2.2.0: + resolution: {integrity: sha512-Hjfj9m68NmYZzushzEG7etPvKH/nj9b9s9+qtkNG3/dbRBjQZQg1XS6nRuHJcCASTjxXlyXZnKu2gDxyQIIu9A==} dev: false - resolution: - integrity: sha512-Hjfj9m68NmYZzushzEG7etPvKH/nj9b9s9+qtkNG3/dbRBjQZQg1XS6nRuHJcCASTjxXlyXZnKu2gDxyQIIu9A== + /@types/d3-scale-chromatic/2.0.0: + resolution: {integrity: sha512-Y62+2clOwZoKua84Ha0xU77w7lePiaBoTjXugT4l8Rd5LAk+Mn/ZDtrgs087a+B5uJ3jYUHHtKw5nuEzp0WBHw==} dev: false - resolution: - integrity: sha512-Y62+2clOwZoKua84Ha0xU77w7lePiaBoTjXugT4l8Rd5LAk+Mn/ZDtrgs087a+B5uJ3jYUHHtKw5nuEzp0WBHw== + /@types/d3-scale/3.2.2: + resolution: {integrity: sha512-qpQe8G02tzUwt9sdWX1h8A/W0Q1+N48wMnYXVOkrzeLUkCfvzJYV9Ee3aORCS4dN4ONRLFmMvaXdziQ29XGLjQ==} dependencies: '@types/d3-time': 2.0.0 dev: false - resolution: - integrity: sha512-qpQe8G02tzUwt9sdWX1h8A/W0Q1+N48wMnYXVOkrzeLUkCfvzJYV9Ee3aORCS4dN4ONRLFmMvaXdziQ29XGLjQ== + /@types/d3-selection/2.0.0: + resolution: {integrity: sha512-EF0lWZ4tg7oDFg4YQFlbOU3936e3a9UmoQ2IXlBy1+cv2c2Pv7knhKUzGlH5Hq2sF/KeDTH1amiRPey2rrLMQA==} dev: false - resolution: - integrity: sha512-EF0lWZ4tg7oDFg4YQFlbOU3936e3a9UmoQ2IXlBy1+cv2c2Pv7knhKUzGlH5Hq2sF/KeDTH1amiRPey2rrLMQA== + /@types/d3-shape/2.0.0: + resolution: {integrity: sha512-NLzD02m5PiD1KLEDjLN+MtqEcFYn4ZL9+Rqc9ZwARK1cpKZXd91zBETbe6wpBB6Ia0D0VZbpmbW3+BsGPGnCpA==} dependencies: '@types/d3-path': 1.0.9 dev: false - resolution: - integrity: sha512-NLzD02m5PiD1KLEDjLN+MtqEcFYn4ZL9+Rqc9ZwARK1cpKZXd91zBETbe6wpBB6Ia0D0VZbpmbW3+BsGPGnCpA== + /@types/d3-time-format/3.0.0: + resolution: {integrity: sha512-UpLg1mn/8PLyjr+J/JwdQJM/GzysMvv2CS8y+WYAL5K0+wbvXv/pPSLEfdNaprCZsGcXTxPsFMy8QtkYv9ueew==} dev: false - resolution: - integrity: sha512-UpLg1mn/8PLyjr+J/JwdQJM/GzysMvv2CS8y+WYAL5K0+wbvXv/pPSLEfdNaprCZsGcXTxPsFMy8QtkYv9ueew== + /@types/d3-time/2.0.0: + resolution: {integrity: sha512-Abz8bTzy8UWDeYs9pCa3D37i29EWDjNTjemdk0ei1ApYVNqulYlGUKip/jLOpogkPSsPz/GvZCYiC7MFlEk0iQ==} dev: false - resolution: - integrity: sha512-Abz8bTzy8UWDeYs9pCa3D37i29EWDjNTjemdk0ei1ApYVNqulYlGUKip/jLOpogkPSsPz/GvZCYiC7MFlEk0iQ== + /@types/d3-timer/2.0.0: + resolution: {integrity: sha512-l6stHr1VD1BWlW6u3pxrjLtJfpPZq9I3XmKIQtq7zHM/s6fwEtI1Yn6Sr5/jQTrUDCC5jkS6gWqlFGCDArDqNg==} dev: false - resolution: - integrity: sha512-l6stHr1VD1BWlW6u3pxrjLtJfpPZq9I3XmKIQtq7zHM/s6fwEtI1Yn6Sr5/jQTrUDCC5jkS6gWqlFGCDArDqNg== + /@types/d3-transition/2.0.0: + resolution: {integrity: sha512-UJDzI98utcZQUJt3uIit/Ho0/eBIANzrWJrTmi4+TaKIyWL2iCu7ShP0o4QajCskhyjOA7C8+4CE3b1YirTzEQ==} dependencies: '@types/d3-selection': 2.0.0 dev: false - resolution: - integrity: sha512-UJDzI98utcZQUJt3uIit/Ho0/eBIANzrWJrTmi4+TaKIyWL2iCu7ShP0o4QajCskhyjOA7C8+4CE3b1YirTzEQ== + /@types/d3-zoom/2.0.0: + resolution: {integrity: sha512-daL0PJm4yT0ISTGa7p2lHX0kvv9FO/IR1ooWbHR/7H4jpbaKiLux5FslyS/OvISPiJ5SXb4sOqYhO6fMB6hKRw==} dependencies: '@types/d3-interpolate': 2.0.0 '@types/d3-selection': 2.0.0 dev: false - resolution: - integrity: sha512-daL0PJm4yT0ISTGa7p2lHX0kvv9FO/IR1ooWbHR/7H4jpbaKiLux5FslyS/OvISPiJ5SXb4sOqYhO6fMB6hKRw== + /@types/d3/6.3.0: + resolution: {integrity: sha512-YILdGsjNTbvkWZKsBasB4cVDwNPnni7ILMJg9keMErQHyuII2yO2jyFdUy5E+7k/HTNP/AucrPddQuu27udbeA==} dependencies: '@types/d3-array': 2.9.0 '@types/d3-axis': 2.0.0 @@ -2461,219 +2888,298 @@ packages: '@types/d3-transition': 2.0.0 '@types/d3-zoom': 2.0.0 dev: false - resolution: - integrity: sha512-YILdGsjNTbvkWZKsBasB4cVDwNPnni7ILMJg9keMErQHyuII2yO2jyFdUy5E+7k/HTNP/AucrPddQuu27udbeA== + + /@types/debounce-promise/3.1.4: + resolution: {integrity: sha512-9SEVY3nsz+uMN2DwDocftB5TAgZe7D0cOzxxRhpotWs6T4QFqRaTXpXbOSzbk31/7iYcfCkJJPwWGzTxyuGhCg==} + dev: true + /@types/eslint/7.2.7: + resolution: {integrity: sha512-EHXbc1z2GoQRqHaAT7+grxlTJ3WE2YNeD6jlpPoRc83cCoThRY+NUWjCUZaYmk51OICkPXn2hhphcWcWXgNW0Q==} dependencies: '@types/estree': 0.0.47 '@types/json-schema': 7.0.7 dev: true - resolution: - integrity: sha512-EHXbc1z2GoQRqHaAT7+grxlTJ3WE2YNeD6jlpPoRc83cCoThRY+NUWjCUZaYmk51OICkPXn2hhphcWcWXgNW0Q== + /@types/estree/0.0.39: + resolution: {integrity: sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw==} dev: true - resolution: - integrity: sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== + /@types/estree/0.0.47: + resolution: {integrity: sha512-c5ciR06jK8u9BstrmJyO97m+klJrrhCf9u3rLu3DEAJBirxRqSCvDQoYKmxuYwQI5SZChAWu+tq9oVlGRuzPAg==} dev: true - resolution: - integrity: sha512-c5ciR06jK8u9BstrmJyO97m+klJrrhCf9u3rLu3DEAJBirxRqSCvDQoYKmxuYwQI5SZChAWu+tq9oVlGRuzPAg== + /@types/geojson/7946.0.7: + resolution: {integrity: sha512-wE2v81i4C4Ol09RtsWFAqg3BUitWbHSpSlIo+bNdsCJijO9sjme+zm+73ZMCa/qMC8UEERxzGbvmr1cffo2SiQ==} dev: false - resolution: - integrity: sha512-wE2v81i4C4Ol09RtsWFAqg3BUitWbHSpSlIo+bNdsCJijO9sjme+zm+73ZMCa/qMC8UEERxzGbvmr1cffo2SiQ== + /@types/glob/7.1.3: + resolution: {integrity: sha512-SEYeGAIQIQX8NN6LDKprLjbrd5dARM5EXsd8GI/A5l0apYI1fGMWgPHSe4ZKL4eozlAyI+doUE9XbYS4xCkQ1w==} dependencies: '@types/minimatch': 3.0.4 '@types/node': 12.20.7 dev: true - resolution: - integrity: sha512-SEYeGAIQIQX8NN6LDKprLjbrd5dARM5EXsd8GI/A5l0apYI1fGMWgPHSe4ZKL4eozlAyI+doUE9XbYS4xCkQ1w== + /@types/graceful-fs/4.1.5: + resolution: {integrity: sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw==} dependencies: '@types/node': 12.20.7 dev: true - resolution: - integrity: sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw== + + /@types/hast/2.3.4: + resolution: {integrity: sha512-wLEm0QvaoawEDoTRwzTXp4b4jpwiJDvR5KMnFnVodm3scufTlBOWRD6N1OBf9TZMhjlNsSfcO5V+7AF4+Vy+9g==} + dependencies: + '@types/unist': 2.0.6 + dev: true + /@types/history/4.7.8: + resolution: {integrity: sha512-S78QIYirQcUoo6UJZx9CSP0O2ix9IaeAXwQi26Rhr/+mg7qqPy8TzaxHSUut7eGjL8WmLccT7/MXf304WjqHcA==} dev: true - resolution: - integrity: sha512-S78QIYirQcUoo6UJZx9CSP0O2ix9IaeAXwQi26Rhr/+mg7qqPy8TzaxHSUut7eGjL8WmLccT7/MXf304WjqHcA== + /@types/hoist-non-react-statics/3.3.1: + resolution: {integrity: sha512-iMIqiko6ooLrTh1joXodJK5X9xeEALT1kM5G3ZLhD3hszxBdIEd5C75U834D9mLcINgD4OyZf5uQXjkuYydWvA==} dependencies: '@types/react': 16.14.5 hoist-non-react-statics: 3.3.2 - resolution: - integrity: sha512-iMIqiko6ooLrTh1joXodJK5X9xeEALT1kM5G3ZLhD3hszxBdIEd5C75U834D9mLcINgD4OyZf5uQXjkuYydWvA== + /@types/html-minifier-terser/5.1.1: + resolution: {integrity: sha512-giAlZwstKbmvMk1OO7WXSj4OZ0keXAcl2TQq4LWHiiPH2ByaH7WeUzng+Qej8UPxxv+8lRTuouo0iaNDBuzIBA==} dev: true - resolution: - integrity: sha512-giAlZwstKbmvMk1OO7WXSj4OZ0keXAcl2TQq4LWHiiPH2ByaH7WeUzng+Qej8UPxxv+8lRTuouo0iaNDBuzIBA== + + /@types/http-proxy/1.17.9: + resolution: {integrity: sha512-QsbSjA/fSk7xB+UXlCT3wHBy5ai9wOcNDWwZAtud+jXhwOM3l+EYZh8Lng4+/6n8uar0J7xILzqftJdJ/Wdfkw==} + dependencies: + '@types/node': 14.17.20 + dev: true + /@types/istanbul-lib-coverage/2.0.3: + resolution: {integrity: sha512-sz7iLqvVUg1gIedBOvlkxPlc8/uVzyS5OwGz1cKjXzkl3FpL3al0crU8YGU1WoHkxn0Wxbw5tyi6hvzJKNzFsw==} dev: true - resolution: - integrity: sha512-sz7iLqvVUg1gIedBOvlkxPlc8/uVzyS5OwGz1cKjXzkl3FpL3al0crU8YGU1WoHkxn0Wxbw5tyi6hvzJKNzFsw== + /@types/istanbul-lib-report/3.0.0: + resolution: {integrity: sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==} dependencies: '@types/istanbul-lib-coverage': 2.0.3 dev: true - resolution: - integrity: sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== + /@types/istanbul-reports/3.0.0: + resolution: {integrity: sha512-nwKNbvnwJ2/mndE9ItP/zc2TCzw6uuodnF4EHYWD+gCQDVBuRQL5UzbZD0/ezy1iKsFU2ZQiDqg4M9dN4+wZgA==} dependencies: '@types/istanbul-lib-report': 3.0.0 dev: true - resolution: - integrity: sha512-nwKNbvnwJ2/mndE9ItP/zc2TCzw6uuodnF4EHYWD+gCQDVBuRQL5UzbZD0/ezy1iKsFU2ZQiDqg4M9dN4+wZgA== + /@types/jest/26.0.22: + resolution: {integrity: sha512-eeWwWjlqxvBxc4oQdkueW5OF/gtfSceKk4OnOAGlUSwS/liBRtZppbJuz1YkgbrbfGOoeBHun9fOvXnjNwrSOw==} dependencies: jest-diff: 26.6.2 pretty-format: 26.6.2 dev: true - resolution: - integrity: sha512-eeWwWjlqxvBxc4oQdkueW5OF/gtfSceKk4OnOAGlUSwS/liBRtZppbJuz1YkgbrbfGOoeBHun9fOvXnjNwrSOw== + /@types/js-cookie/2.2.6: + resolution: {integrity: sha512-+oY0FDTO2GYKEV0YPvSshGq9t7YozVkgvXLty7zogQNuCxBhT9/3INX9Q7H1aRZ4SUDRXAKlJuA4EA5nTt7SNw==} dev: false - resolution: - integrity: sha512-+oY0FDTO2GYKEV0YPvSshGq9t7YozVkgvXLty7zogQNuCxBhT9/3INX9Q7H1aRZ4SUDRXAKlJuA4EA5nTt7SNw== + /@types/json-schema/7.0.7: + resolution: {integrity: sha512-cxWFQVseBm6O9Gbw1IWb8r6OS4OhSt3hPZLkFApLjM8TEXROBuQGLAH2i2gZpcXdLBIrpXuTDhH7Vbm1iXmNGA==} dev: true - resolution: - integrity: sha512-cxWFQVseBm6O9Gbw1IWb8r6OS4OhSt3hPZLkFApLjM8TEXROBuQGLAH2i2gZpcXdLBIrpXuTDhH7Vbm1iXmNGA== + + /@types/json-schema/7.0.8: + resolution: {integrity: sha512-YSBPTLTVm2e2OoQIDYx8HaeWJ5tTToLH67kXR7zYNGupXMEHa2++G8k+DczX2cFVgalypqtyZIcU19AFcmOpmg==} + dev: true + /@types/json5/0.0.29: + resolution: {integrity: sha1-7ihweulOEdK4J7y+UnC86n8+ce4=} dev: true - resolution: - integrity: sha1-7ihweulOEdK4J7y+UnC86n8+ce4= + /@types/keyboardjs/2.5.0: + resolution: {integrity: sha512-tGU6Lz04lDNH+N3AZYIWVeBza2ZSaLlZuSkzi38zSFSuh6DgVqBdqgkX+OS+jg1vwlw5XzS5MASY44fr9C12Yg==} dev: true - resolution: - integrity: sha512-tGU6Lz04lDNH+N3AZYIWVeBza2ZSaLlZuSkzi38zSFSuh6DgVqBdqgkX+OS+jg1vwlw5XzS5MASY44fr9C12Yg== + /@types/less/3.0.2: + resolution: {integrity: sha512-62vfe65cMSzYaWmpmhqCMMNl0khen89w57mByPi1OseGfcV/LV03fO8YVrNj7rFQsRWNJo650WWyh6m7p8vZmA==} + dev: true + + /@types/lodash-es/4.17.4: + resolution: {integrity: sha512-BBz79DCJbD2CVYZH67MBeHZRX++HF+5p8Mo5MzjZi64Wac39S3diedJYHZtScbRVf4DjZyN6LzA0SB0zy+HSSQ==} + dependencies: + '@types/lodash': 4.14.168 dev: true - resolution: - integrity: sha512-62vfe65cMSzYaWmpmhqCMMNl0khen89w57mByPi1OseGfcV/LV03fO8YVrNj7rFQsRWNJo650WWyh6m7p8vZmA== + /@types/lodash/4.14.168: + resolution: {integrity: sha512-oVfRvqHV/V6D1yifJbVRU3TMp8OT6o6BG+U9MkwuJ3U8/CsDHvalRpsxBqivn71ztOFZBTfJMvETbqHiaNSj7Q==} + dev: true + + /@types/mathjax/0.0.36: + resolution: {integrity: sha512-TqDJc2GWuTqd/m+G/FbNkN+/TF2OCCHvcawmhIrUaZkdVquMdNZmNiNUkupNg9qctorXXkVLVSogZv1DhmgLmg==} + dev: true + + /@types/mdast/3.0.10: + resolution: {integrity: sha512-W864tg/Osz1+9f4lrGTZpCSO5/z4608eUp19tbozkq2HJK6i3z1kT0H9tlADXuYIb1YYOBByU4Jsqkk75q48qA==} + dependencies: + '@types/unist': 2.0.6 dev: true - resolution: - integrity: sha512-oVfRvqHV/V6D1yifJbVRU3TMp8OT6o6BG+U9MkwuJ3U8/CsDHvalRpsxBqivn71ztOFZBTfJMvETbqHiaNSj7Q== + /@types/minimatch/3.0.4: + resolution: {integrity: sha512-1z8k4wzFnNjVK/tlxvrWuK5WMt6mydWWP7+zvH5eFep4oj+UkrfiJTRtjCeBXNpwaA/FYqqtb4/QS4ianFpIRA==} dev: true - resolution: - integrity: sha512-1z8k4wzFnNjVK/tlxvrWuK5WMt6mydWWP7+zvH5eFep4oj+UkrfiJTRtjCeBXNpwaA/FYqqtb4/QS4ianFpIRA== + /@types/node/12.20.7: + resolution: {integrity: sha512-gWL8VUkg8VRaCAUgG9WmhefMqHmMblxe2rVpMF86nZY/+ZysU+BkAp+3cz03AixWDSSz0ks5WX59yAhv/cDwFA==} dev: true - resolution: - integrity: sha512-gWL8VUkg8VRaCAUgG9WmhefMqHmMblxe2rVpMF86nZY/+ZysU+BkAp+3cz03AixWDSSz0ks5WX59yAhv/cDwFA== + + /@types/node/14.17.20: + resolution: {integrity: sha512-gI5Sl30tmhXsqkNvopFydP7ASc4c2cLfGNQrVKN3X90ADFWFsPEsotm/8JHSUJQKTHbwowAHtcJPeyVhtKv0TQ==} + dev: true + /@types/normalize-package-data/2.4.0: + resolution: {integrity: sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA==} dev: true - resolution: - integrity: sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA== + /@types/parse-json/4.0.0: + resolution: {integrity: sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==} + dev: true + + /@types/parse5/5.0.3: + resolution: {integrity: sha512-kUNnecmtkunAoQ3CnjmMkzNU/gtxG8guhi+Fk2U/kOpIKjIMKnXGp4IJCgQJrXSgMsWYimYG4TGjz/UzbGEBTw==} dev: true - resolution: - integrity: sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== + /@types/prettier/2.2.3: + resolution: {integrity: sha512-PijRCG/K3s3w1We6ynUKdxEc5AcuuH3NBmMDP8uvKVp6X43UY7NQlTzczakXP3DJR0F4dfNQIGjU2cUeRYs2AA==} dev: true - resolution: - integrity: sha512-PijRCG/K3s3w1We6ynUKdxEc5AcuuH3NBmMDP8uvKVp6X43UY7NQlTzczakXP3DJR0F4dfNQIGjU2cUeRYs2AA== + /@types/prop-types/15.7.3: - resolution: - integrity: sha512-KfRL3PuHmqQLOG+2tGpRO26Ctg+Cq1E01D2DMriKEATHgWLfeNDmq9e29Q9WIky0dQ3NPkd1mzYH8Lm936Z9qw== + resolution: {integrity: sha512-KfRL3PuHmqQLOG+2tGpRO26Ctg+Cq1E01D2DMriKEATHgWLfeNDmq9e29Q9WIky0dQ3NPkd1mzYH8Lm936Z9qw==} + /@types/pubsub-js/1.8.2: + resolution: {integrity: sha512-cj3ZoAopr2ZmUYwRuXUiq48PlfNj5sBcUIkBnSJunfXlmf6y8o2kx4l70h1X1j0fR3IBorPrPM3B9SoyWwoqLg==} dev: true - resolution: - integrity: sha512-cj3ZoAopr2ZmUYwRuXUiq48PlfNj5sBcUIkBnSJunfXlmf6y8o2kx4l70h1X1j0fR3IBorPrPM3B9SoyWwoqLg== + /@types/q/1.5.4: + resolution: {integrity: sha512-1HcDas8SEj4z1Wc696tH56G8OlRaH/sqZOynNNB+HF0WOeXPaxTtbYzJY2oEfiUxjSKjhCKr+MvR7dCHcEelug==} dev: true - resolution: - integrity: sha512-1HcDas8SEj4z1Wc696tH56G8OlRaH/sqZOynNNB+HF0WOeXPaxTtbYzJY2oEfiUxjSKjhCKr+MvR7dCHcEelug== + + /@types/qs/6.9.7: + resolution: {integrity: sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==} + dev: true + /@types/react-dom/16.9.12: + resolution: {integrity: sha512-i7NPZZpPte3jtVOoW+eLB7G/jsX5OM6GqQnH+lC0nq0rqwlK0x8WcMEvYDgFWqWhWMlTltTimzdMax6wYfZssA==} dependencies: '@types/react': 16.14.5 dev: true - resolution: - integrity: sha512-i7NPZZpPte3jtVOoW+eLB7G/jsX5OM6GqQnH+lC0nq0rqwlK0x8WcMEvYDgFWqWhWMlTltTimzdMax6wYfZssA== + /@types/react-redux/7.1.16: + resolution: {integrity: sha512-f/FKzIrZwZk7YEO9E1yoxIuDNRiDducxkFlkw/GNMGEnK9n4K8wJzlJBghpSuOVDgEUHoDkDF7Gi9lHNQR4siw==} dependencies: '@types/hoist-non-react-statics': 3.3.1 '@types/react': 16.14.5 hoist-non-react-statics: 3.3.2 redux: 4.0.5 dev: false - resolution: - integrity: sha512-f/FKzIrZwZk7YEO9E1yoxIuDNRiDducxkFlkw/GNMGEnK9n4K8wJzlJBghpSuOVDgEUHoDkDF7Gi9lHNQR4siw== + + /@types/react-router-config/5.0.2: + resolution: {integrity: sha512-WOSetDV3YPxbkVJAdv/bqExJjmcdCi/vpCJh3NfQOy1X15vHMSiMioXIcGekXDJJYhqGUMDo9e337mh508foAA==} + dependencies: + '@types/history': 4.7.8 + '@types/react': 16.14.5 + '@types/react-router': 5.1.13 + dev: true + /@types/react-router-dom/5.1.7: + resolution: {integrity: sha512-D5mHD6TbdV/DNHYsnwBTv+y73ei+mMjrkGrla86HthE4/PVvL1J94Bu3qABU+COXzpL23T1EZapVVpwHuBXiUg==} dependencies: '@types/history': 4.7.8 '@types/react': 16.14.5 '@types/react-router': 5.1.13 dev: true - resolution: - integrity: sha512-D5mHD6TbdV/DNHYsnwBTv+y73ei+mMjrkGrla86HthE4/PVvL1J94Bu3qABU+COXzpL23T1EZapVVpwHuBXiUg== + + /@types/react-router/5.1.12: + resolution: {integrity: sha512-0bhXQwHYfMeJlCh7mGhc0VJTRm0Gk+Z8T00aiP4702mDUuLs9SMhnd2DitpjWFjdOecx2UXtICK14H9iMnziGA==} + dependencies: + '@types/history': 4.7.8 + '@types/react': 16.14.5 + dev: true + /@types/react-router/5.1.13: + resolution: {integrity: sha512-ZIuaO9Yrln54X6elg8q2Ivp6iK6p4syPsefEYAhRDAoqNh48C8VYUmB9RkXjKSQAJSJV0mbIFCX7I4vZDcHrjg==} dependencies: '@types/history': 4.7.8 '@types/react': 16.14.5 dev: true - resolution: - integrity: sha512-ZIuaO9Yrln54X6elg8q2Ivp6iK6p4syPsefEYAhRDAoqNh48C8VYUmB9RkXjKSQAJSJV0mbIFCX7I4vZDcHrjg== + + /@types/react-test-renderer/17.0.1: + resolution: {integrity: sha512-3Fi2O6Zzq/f3QR9dRnlnHso9bMl7weKCviFmfF6B4LS1Uat6Hkm15k0ZAQuDz+UBq6B3+g+NM6IT2nr5QgPzCw==} + dependencies: + '@types/react': 16.14.5 + dev: true + /@types/react/16.14.5: + resolution: {integrity: sha512-YRRv9DNZhaVTVRh9Wmmit7Y0UFhEVqXqCSw3uazRWMxa2x85hWQZ5BN24i7GXZbaclaLXEcodEeIHsjBA8eAMw==} dependencies: '@types/prop-types': 15.7.3 '@types/scheduler': 0.16.1 csstype: 3.0.7 - resolution: - integrity: sha512-YRRv9DNZhaVTVRh9Wmmit7Y0UFhEVqXqCSw3uazRWMxa2x85hWQZ5BN24i7GXZbaclaLXEcodEeIHsjBA8eAMw== + /@types/resolve/0.0.8: + resolution: {integrity: sha512-auApPaJf3NPfe18hSoJkp8EbZzer2ISk7o8mCC3M9he/a04+gbMF97NkpD2S8riMGvm4BMRI59/SZQSaLTKpsQ==} dependencies: '@types/node': 12.20.7 dev: true - resolution: - integrity: sha512-auApPaJf3NPfe18hSoJkp8EbZzer2ISk7o8mCC3M9he/a04+gbMF97NkpD2S8riMGvm4BMRI59/SZQSaLTKpsQ== + + /@types/sax/1.2.3: + resolution: {integrity: sha512-+QSw6Tqvs/KQpZX8DvIl3hZSjNFLW/OqE5nlyHXtTwODaJvioN2rOWpBNEWZp2HZUFhOh+VohmJku/WxEXU2XA==} + dependencies: + '@types/node': 14.17.20 + dev: true + /@types/scheduler/0.16.1: - resolution: - integrity: sha512-EaCxbanVeyxDRTQBkdLb3Bvl/HK7PBK6UJjsSixB0iHKoWxE5uu2Q/DgtpOhPIojN0Zl1whvOd7PoHs2P0s5eA== + resolution: {integrity: sha512-EaCxbanVeyxDRTQBkdLb3Bvl/HK7PBK6UJjsSixB0iHKoWxE5uu2Q/DgtpOhPIojN0Zl1whvOd7PoHs2P0s5eA==} + /@types/source-list-map/0.1.2: + resolution: {integrity: sha512-K5K+yml8LTo9bWJI/rECfIPrGgxdpeNbj+d53lwN4QjW1MCwlkhUms+gtdzigTeUyBr09+u8BwOIY3MXvHdcsA==} dev: true - resolution: - integrity: sha512-K5K+yml8LTo9bWJI/rECfIPrGgxdpeNbj+d53lwN4QjW1MCwlkhUms+gtdzigTeUyBr09+u8BwOIY3MXvHdcsA== + /@types/stack-utils/2.0.0: + resolution: {integrity: sha512-RJJrrySY7A8havqpGObOB4W92QXKJo63/jFLLgpvOtsGUqbQZ9Sbgl35KMm1DjC6j7AvmmU2bIno+3IyEaemaw==} dev: true - resolution: - integrity: sha512-RJJrrySY7A8havqpGObOB4W92QXKJo63/jFLLgpvOtsGUqbQZ9Sbgl35KMm1DjC6j7AvmmU2bIno+3IyEaemaw== + /@types/styled-components/5.1.9: + resolution: {integrity: sha512-kbEG6YlwK8rucITpKEr6pA4Ho9KSQHUUOzZ9lY3va1mtcjvS3D0wDciFyHEiNHKLL/npZCKDQJqm0x44sPO9oA==} dependencies: '@types/hoist-non-react-statics': 3.3.1 '@types/react': 16.14.5 csstype: 3.0.7 dev: true - resolution: - integrity: sha512-kbEG6YlwK8rucITpKEr6pA4Ho9KSQHUUOzZ9lY3va1mtcjvS3D0wDciFyHEiNHKLL/npZCKDQJqm0x44sPO9oA== + /@types/tapable/1.0.7: + resolution: {integrity: sha512-0VBprVqfgFD7Ehb2vd8Lh9TG3jP98gvr8rgehQqzztZNI7o8zS8Ad4jyZneKELphpuE212D8J70LnSNQSyO6bQ==} dev: true - resolution: - integrity: sha512-0VBprVqfgFD7Ehb2vd8Lh9TG3jP98gvr8rgehQqzztZNI7o8zS8Ad4jyZneKELphpuE212D8J70LnSNQSyO6bQ== + /@types/testing-library__jest-dom/5.9.5: + resolution: {integrity: sha512-ggn3ws+yRbOHog9GxnXiEZ/35Mow6YtPZpd7Z5mKDeZS/o7zx3yAle0ov/wjhVB5QT4N2Dt+GNoGCdqkBGCajQ==} dependencies: '@types/jest': 26.0.22 dev: true - resolution: - integrity: sha512-ggn3ws+yRbOHog9GxnXiEZ/35Mow6YtPZpd7Z5mKDeZS/o7zx3yAle0ov/wjhVB5QT4N2Dt+GNoGCdqkBGCajQ== + /@types/uglify-js/3.13.0: + resolution: {integrity: sha512-EGkrJD5Uy+Pg0NUR8uA4bJ5WMfljyad0G+784vLCNUkD+QwOJXUbBYExXfVGf7YtyzdQp3L/XMYcliB987kL5Q==} dependencies: source-map: 0.6.1 dev: true - resolution: - integrity: sha512-EGkrJD5Uy+Pg0NUR8uA4bJ5WMfljyad0G+784vLCNUkD+QwOJXUbBYExXfVGf7YtyzdQp3L/XMYcliB987kL5Q== + + /@types/unist/2.0.6: + resolution: {integrity: sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==} + dev: true + /@types/webpack-sources/2.1.0: + resolution: {integrity: sha512-LXn/oYIpBeucgP1EIJbKQ2/4ZmpvRl+dlrFdX7+94SKRUV3Evy3FsfMZY318vGhkWUS5MPhtOM3w1/hCOAOXcg==} dependencies: '@types/node': 12.20.7 '@types/source-list-map': 0.1.2 source-map: 0.7.3 dev: true - resolution: - integrity: sha512-LXn/oYIpBeucgP1EIJbKQ2/4ZmpvRl+dlrFdX7+94SKRUV3Evy3FsfMZY318vGhkWUS5MPhtOM3w1/hCOAOXcg== + /@types/webpack/4.41.27: + resolution: {integrity: sha512-wK/oi5gcHi72VMTbOaQ70VcDxSQ1uX8S2tukBK9ARuGXrYM/+u4ou73roc7trXDNmCxCoerE8zruQqX/wuHszA==} dependencies: '@types/anymatch': 1.3.1 '@types/node': 12.20.7 @@ -2682,19 +3188,27 @@ packages: '@types/webpack-sources': 2.1.0 source-map: 0.6.1 dev: true - resolution: - integrity: sha512-wK/oi5gcHi72VMTbOaQ70VcDxSQ1uX8S2tukBK9ARuGXrYM/+u4ou73roc7trXDNmCxCoerE8zruQqX/wuHszA== + /@types/yargs-parser/20.2.0: + resolution: {integrity: sha512-37RSHht+gzzgYeobbG+KWryeAW8J33Nhr69cjTqSYymXVZEN9NbRYWoYlRtDhHKPVT1FyNKwaTPC1NynKZpzRA==} dev: true - resolution: - integrity: sha512-37RSHht+gzzgYeobbG+KWryeAW8J33Nhr69cjTqSYymXVZEN9NbRYWoYlRtDhHKPVT1FyNKwaTPC1NynKZpzRA== + /@types/yargs/15.0.13: + resolution: {integrity: sha512-kQ5JNTrbDv3Rp5X2n/iUu37IJBDU2gsZ5R/g1/KHOOEc5IKfUFjXT6DENPGduh08I/pamwtEq4oul7gUqKTQDQ==} dependencies: '@types/yargs-parser': 20.2.0 dev: true - resolution: - integrity: sha512-kQ5JNTrbDv3Rp5X2n/iUu37IJBDU2gsZ5R/g1/KHOOEc5IKfUFjXT6DENPGduh08I/pamwtEq4oul7gUqKTQDQ== + /@typescript-eslint/eslint-plugin/4.19.0_821acdc8bc493ad1aa2628c9b724d688: + resolution: {integrity: sha512-CRQNQ0mC2Pa7VLwKFbrGVTArfdVDdefS+gTw0oC98vSI98IX5A8EVH4BzJ2FOB0YlCmm8Im36Elad/Jgtvveaw==} + engines: {node: ^10.12.0 || >=12.0.0} + peerDependencies: + '@typescript-eslint/parser': ^4.0.0 + eslint: ^5.0.0 || ^6.0.0 || ^7.0.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true dependencies: '@typescript-eslint/experimental-utils': 4.19.0_eslint@7.23.0+typescript@4.2.3 '@typescript-eslint/parser': 4.19.0_eslint@7.23.0+typescript@4.2.3 @@ -2707,19 +3221,15 @@ packages: semver: 7.3.2 tsutils: 3.21.0_typescript@4.2.3 typescript: 4.2.3 + transitivePeerDependencies: + - supports-color dev: true - engines: - node: ^10.12.0 || >=12.0.0 - peerDependencies: - '@typescript-eslint/parser': ^4.0.0 - eslint: ^5.0.0 || ^6.0.0 || ^7.0.0 - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - resolution: - integrity: sha512-CRQNQ0mC2Pa7VLwKFbrGVTArfdVDdefS+gTw0oC98vSI98IX5A8EVH4BzJ2FOB0YlCmm8Im36Elad/Jgtvveaw== + /@typescript-eslint/experimental-utils/3.10.1_eslint@7.23.0+typescript@4.2.3: + resolution: {integrity: sha512-DewqIgscDzmAfd5nOGe4zm6Bl7PKtMG2Ad0KG8CUZAHlXfAKTF9Ol5PXhiMh39yRL2ChRH1cuuUGOcVyyrhQIw==} + engines: {node: ^10.12.0 || >=12.0.0} + peerDependencies: + eslint: '*' dependencies: '@types/json-schema': 7.0.7 '@typescript-eslint/types': 3.10.1 @@ -2727,15 +3237,16 @@ packages: eslint: 7.23.0 eslint-scope: 5.1.1 eslint-utils: 2.1.0 + transitivePeerDependencies: + - supports-color + - typescript dev: true - engines: - node: ^10.12.0 || >=12.0.0 + + /@typescript-eslint/experimental-utils/4.19.0_eslint@7.23.0+typescript@4.2.3: + resolution: {integrity: sha512-9/23F1nnyzbHKuoTqFN1iXwN3bvOm/PRIXSBR3qFAYotK/0LveEOHr5JT1WZSzcD6BESl8kPOG3OoDRKO84bHA==} + engines: {node: ^10.12.0 || >=12.0.0} peerDependencies: eslint: '*' - typescript: '*' - resolution: - integrity: sha512-DewqIgscDzmAfd5nOGe4zm6Bl7PKtMG2Ad0KG8CUZAHlXfAKTF9Ol5PXhiMh39yRL2ChRH1cuuUGOcVyyrhQIw== - /@typescript-eslint/experimental-utils/4.19.0_eslint@7.23.0+typescript@4.2.3: dependencies: '@types/json-schema': 7.0.7 '@typescript-eslint/scope-manager': 4.19.0 @@ -2744,15 +3255,20 @@ packages: eslint: 7.23.0 eslint-scope: 5.1.1 eslint-utils: 2.1.0 + transitivePeerDependencies: + - supports-color + - typescript dev: true - engines: - node: ^10.12.0 || >=12.0.0 + + /@typescript-eslint/parser/4.19.0_eslint@7.23.0+typescript@4.2.3: + resolution: {integrity: sha512-/uabZjo2ZZhm66rdAu21HA8nQebl3lAIDcybUoOxoI7VbZBYavLIwtOOmykKCJy+Xq6Vw6ugkiwn8Js7D6wieA==} + engines: {node: ^10.12.0 || >=12.0.0} peerDependencies: - eslint: '*' + eslint: ^5.0.0 || ^6.0.0 || ^7.0.0 typescript: '*' - resolution: - integrity: sha512-9/23F1nnyzbHKuoTqFN1iXwN3bvOm/PRIXSBR3qFAYotK/0LveEOHr5JT1WZSzcD6BESl8kPOG3OoDRKO84bHA== - /@typescript-eslint/parser/4.19.0_eslint@7.23.0+typescript@4.2.3: + peerDependenciesMeta: + typescript: + optional: true dependencies: '@typescript-eslint/scope-manager': 4.19.0 '@typescript-eslint/types': 4.19.0 @@ -2760,39 +3276,36 @@ packages: debug: 4.3.1 eslint: 7.23.0 typescript: 4.2.3 + transitivePeerDependencies: + - supports-color dev: true - engines: - node: ^10.12.0 || >=12.0.0 - peerDependencies: - eslint: ^5.0.0 || ^6.0.0 || ^7.0.0 - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - resolution: - integrity: sha512-/uabZjo2ZZhm66rdAu21HA8nQebl3lAIDcybUoOxoI7VbZBYavLIwtOOmykKCJy+Xq6Vw6ugkiwn8Js7D6wieA== + /@typescript-eslint/scope-manager/4.19.0: + resolution: {integrity: sha512-GGy4Ba/hLXwJXygkXqMzduqOMc+Na6LrJTZXJWVhRrSuZeXmu8TAnniQVKgj8uTRKe4igO2ysYzH+Np879G75g==} + engines: {node: ^8.10.0 || ^10.13.0 || >=11.10.1} dependencies: '@typescript-eslint/types': 4.19.0 '@typescript-eslint/visitor-keys': 4.19.0 dev: true - engines: - node: ^8.10.0 || ^10.13.0 || >=11.10.1 - resolution: - integrity: sha512-GGy4Ba/hLXwJXygkXqMzduqOMc+Na6LrJTZXJWVhRrSuZeXmu8TAnniQVKgj8uTRKe4igO2ysYzH+Np879G75g== + /@typescript-eslint/types/3.10.1: + resolution: {integrity: sha512-+3+FCUJIahE9q0lDi1WleYzjCwJs5hIsbugIgnbB+dSCYUxl8L6PwmsyOPFZde2hc1DlTo/xnkOgiTLSyAbHiQ==} + engines: {node: ^8.10.0 || ^10.13.0 || >=11.10.1} dev: true - engines: - node: ^8.10.0 || ^10.13.0 || >=11.10.1 - resolution: - integrity: sha512-+3+FCUJIahE9q0lDi1WleYzjCwJs5hIsbugIgnbB+dSCYUxl8L6PwmsyOPFZde2hc1DlTo/xnkOgiTLSyAbHiQ== + /@typescript-eslint/types/4.19.0: + resolution: {integrity: sha512-A4iAlexVvd4IBsSTNxdvdepW0D4uR/fwxDrKUa+iEY9UWvGREu2ZyB8ylTENM1SH8F7bVC9ac9+si3LWNxcBuA==} + engines: {node: ^8.10.0 || ^10.13.0 || >=11.10.1} dev: true - engines: - node: ^8.10.0 || ^10.13.0 || >=11.10.1 - resolution: - integrity: sha512-A4iAlexVvd4IBsSTNxdvdepW0D4uR/fwxDrKUa+iEY9UWvGREu2ZyB8ylTENM1SH8F7bVC9ac9+si3LWNxcBuA== + /@typescript-eslint/typescript-estree/3.10.1_typescript@4.2.3: + resolution: {integrity: sha512-QbcXOuq6WYvnB3XPsZpIwztBoquEYLXh2MtwVU+kO8jgYCiv4G5xrSP/1wg4tkvrEE+esZVquIPX/dxPlePk1w==} + engines: {node: ^10.12.0 || >=12.0.0} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true dependencies: '@typescript-eslint/types': 3.10.1 '@typescript-eslint/visitor-keys': 3.10.1 @@ -2803,17 +3316,18 @@ packages: semver: 7.3.2 tsutils: 3.21.0_typescript@4.2.3 typescript: 4.2.3 + transitivePeerDependencies: + - supports-color dev: true - engines: - node: ^10.12.0 || >=12.0.0 + + /@typescript-eslint/typescript-estree/4.19.0_typescript@4.2.3: + resolution: {integrity: sha512-3xqArJ/A62smaQYRv2ZFyTA+XxGGWmlDYrsfZG68zJeNbeqRScnhf81rUVa6QG4UgzHnXw5VnMT5cg75dQGDkA==} + engines: {node: ^10.12.0 || >=12.0.0} peerDependencies: typescript: '*' peerDependenciesMeta: typescript: optional: true - resolution: - integrity: sha512-QbcXOuq6WYvnB3XPsZpIwztBoquEYLXh2MtwVU+kO8jgYCiv4G5xrSP/1wg4tkvrEE+esZVquIPX/dxPlePk1w== - /@typescript-eslint/typescript-estree/4.19.0_typescript@4.2.3: dependencies: '@typescript-eslint/types': 4.19.0 '@typescript-eslint/visitor-keys': 4.19.0 @@ -2823,99 +3337,414 @@ packages: semver: 7.3.2 tsutils: 3.21.0_typescript@4.2.3 typescript: 4.2.3 + transitivePeerDependencies: + - supports-color dev: true - engines: - node: ^10.12.0 || >=12.0.0 - peerDependencies: - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - resolution: - integrity: sha512-3xqArJ/A62smaQYRv2ZFyTA+XxGGWmlDYrsfZG68zJeNbeqRScnhf81rUVa6QG4UgzHnXw5VnMT5cg75dQGDkA== + /@typescript-eslint/visitor-keys/3.10.1: + resolution: {integrity: sha512-9JgC82AaQeglebjZMgYR5wgmfUdUc+EitGUUMW8u2nDckaeimzW+VsoLV6FoimPv2id3VQzfjwBxEMVz08ameQ==} + engines: {node: ^8.10.0 || ^10.13.0 || >=11.10.1} dependencies: eslint-visitor-keys: 1.3.0 dev: true - engines: - node: ^8.10.0 || ^10.13.0 || >=11.10.1 - resolution: - integrity: sha512-9JgC82AaQeglebjZMgYR5wgmfUdUc+EitGUUMW8u2nDckaeimzW+VsoLV6FoimPv2id3VQzfjwBxEMVz08ameQ== + /@typescript-eslint/visitor-keys/4.19.0: + resolution: {integrity: sha512-aGPS6kz//j7XLSlgpzU2SeTqHPsmRYxFztj2vPuMMFJXZudpRSehE3WCV+BaxwZFvfAqMoSd86TEuM0PQ59E/A==} + engines: {node: ^8.10.0 || ^10.13.0 || >=11.10.1} dependencies: '@typescript-eslint/types': 4.19.0 eslint-visitor-keys: 2.0.0 dev: true - engines: - node: ^8.10.0 || ^10.13.0 || >=11.10.1 - resolution: - integrity: sha512-aGPS6kz//j7XLSlgpzU2SeTqHPsmRYxFztj2vPuMMFJXZudpRSehE3WCV+BaxwZFvfAqMoSd86TEuM0PQ59E/A== + + /@umijs/ast/3.5.20: + resolution: {integrity: sha512-bDKofiykO9xT6QWLeauv9B8XOd32SUlRDXVGcLEOhR8ZPPWR2LYgJaYZAnpxEbRmCrfVokiB69uZ2/qcvIvjmQ==} + dependencies: + '@umijs/utils': 3.5.20 + dev: true + + /@umijs/babel-plugin-auto-css-modules/3.5.20: + resolution: {integrity: sha512-mN/ueXm7KHCmrfK8nluPqx3JGNftNj/wWPUKpcDiheagVNz+PJ++aIFI9ikfqK8ukHVVBisWltEJwOrDM8QUdQ==} + dependencies: + '@umijs/utils': 3.5.20 + dev: true + + /@umijs/babel-plugin-import-to-await-require/3.5.20: + resolution: {integrity: sha512-cZma+jLAQ0FeHpezTYJLELSyKMMtrYNIjFeTLxDT6Pw5Z1Ei3cJHf8ERYV4kDBzu/rvuUFx1AC5UPGwYmqsVxw==} + dependencies: + '@umijs/utils': 3.5.20 + dev: true + + /@umijs/babel-plugin-lock-core-js-3/3.5.20: + resolution: {integrity: sha512-bbyg0QLSeNXVrnFZIx2TgOalDUMBPVIHtR6G7aeBmSsiUSFRwsCNprjc/NPEBwOFG10J4XmJFETzcgig3hKLoA==} + dependencies: + '@umijs/utils': 3.5.20 + core-js: 3.6.5 + dev: true + + /@umijs/babel-plugin-no-anonymous-default-export/3.5.20: + resolution: {integrity: sha512-ufM+mcDrRJMTWWqP/C73NLqeW7CrgxrXlSKnmJ+CCNTT1GPex5+5Ou2IM6HLqXukm+7W+xdDepMEWrYGmdGQRg==} + dependencies: + '@umijs/utils': 3.5.20 + dev: true + + /@umijs/babel-preset-umi/3.5.20: + resolution: {integrity: sha512-EBvLi2aVkIiKAGmdDXkyx/pW4OJXqxvnSz54Za9r+kVZMbG5kT50ieqMk9yciNa+1JzHGiY+XZl0f0YF9pStAg==} + dependencies: + '@babel/runtime': 7.12.5 + '@umijs/babel-plugin-auto-css-modules': 3.5.20 + '@umijs/babel-plugin-import-to-await-require': 3.5.20 + '@umijs/babel-plugin-lock-core-js-3': 3.5.20 + '@umijs/babel-plugin-no-anonymous-default-export': 3.5.20 + '@umijs/deps': 3.5.20 + dev: true + + /@umijs/bundler-utils/3.5.20_39566ec7cc5fe716a59f91f7330320ef: + resolution: {integrity: sha512-9tg8Dq3ufChaeLVE3RaMNrdtzru7ev/JX9lFUgUXnTladGp4mhPiBHv1bbx0X2I+ZTlRJAplQDKMAq4zkonUXw==} + dependencies: + '@umijs/babel-preset-umi': 3.5.20 + '@umijs/types': 3.5.20_39566ec7cc5fe716a59f91f7330320ef + '@umijs/utils': 3.5.20 + transitivePeerDependencies: + - react + - react-dom + - react-router + dev: true + + /@umijs/bundler-webpack/3.5.20_39566ec7cc5fe716a59f91f7330320ef: + resolution: {integrity: sha512-eGcxaUKTuAXd46uu6d9/B0SwqgR06zaay3iD1AQCle8x3zKilVBT62L+H8IbM0mB/3XFvIXMOxOBGMHPVr+tCQ==} + hasBin: true + dependencies: + '@umijs/bundler-utils': 3.5.20_39566ec7cc5fe716a59f91f7330320ef + '@umijs/deps': 3.5.20 + '@umijs/types': 3.5.20_39566ec7cc5fe716a59f91f7330320ef + '@umijs/utils': 3.5.20 + jest-worker: 26.6.2 + node-libs-browser: 2.2.1 + normalize-url: 1.9.1 + postcss: 7.0.32 + postcss-flexbugs-fixes: 4.2.1 + postcss-loader: 3.0.0 + postcss-preset-env: 6.7.0 + postcss-safe-parser: 4.0.2 + terser: 5.6.0 + webpack-chain: 6.5.1 + transitivePeerDependencies: + - react + - react-dom + - react-router + dev: true + + /@umijs/core/3.5.20: + resolution: {integrity: sha512-XiNHL3cZ8tQAG3FFGkCcIyJGZADM/F53JMgshtKmWmpLWPmd623LUJP5Tz3ifad+4QhxkY3tOKBIoVbnCxwGkA==} + dependencies: + '@umijs/ast': 3.5.20 + '@umijs/babel-preset-umi': 3.5.20 + '@umijs/deps': 3.5.20 + '@umijs/utils': 3.5.20 + dev: true + + /@umijs/deps/3.5.20: + resolution: {integrity: sha512-75iqB0+ITFtxlLb945W2b6lVEgLWRFXaSQZD+wH6c4/WDiagOdYMWX9aiPs2JSzoM/yCtKpMaLeGbmVXsb7y4g==} + dependencies: + '@bloomberg/record-tuple-polyfill': 0.0.3 + chokidar: 3.5.1 + clipboardy: 2.3.0 + esbuild: 0.12.15 + jest-worker: 24.9.0 + prettier: 2.2.1 + dev: true + + /@umijs/plugin-analytics/0.2.2_umi@3.5.20: + resolution: {integrity: sha512-dVDzUfgIdEwdCC6a5IsMYpIPI+bEZjBEqIhAvw9dic6Vk77w9RxQxyRfW11dDmdXLAwWphp22NntQNt1ejZPtg==} + peerDependencies: + umi: 3.x + dependencies: + umi: 3.5.20_react-router@5.2.0 + dev: true + + /@umijs/preset-built-in/3.5.20_react-dom@16.14.0+react@16.14.0: + resolution: {integrity: sha512-4qrYPdEDi0ewZ1gYyfbg9bRmbPtvaNTs3WlA574VPf77ntsZDsMUNNUfJSd/acQTzzbEajte2u8uq8q0yvZ4mA==} + peerDependencies: + react: 16.x || 17.x + dependencies: + '@types/react-router-config': 5.0.2 + '@umijs/babel-preset-umi': 3.5.20 + '@umijs/bundler-webpack': 3.5.20_39566ec7cc5fe716a59f91f7330320ef + '@umijs/deps': 3.5.20 + '@umijs/renderer-mpa': 3.5.20_react-dom@16.14.0+react@16.14.0 + '@umijs/renderer-react': 3.5.20_39566ec7cc5fe716a59f91f7330320ef + '@umijs/runtime': 3.5.20_react@16.14.0 + '@umijs/server': 3.5.20 + '@umijs/types': 3.5.20_39566ec7cc5fe716a59f91f7330320ef + '@umijs/utils': 3.5.20 + ansi-html: 0.0.7 + core-js: 3.6.5 + core-js-pure: 3.9.1 + error-stack-parser: 2.0.6 + es-module-lexer: 0.7.1 + es5-imcompatible-versions: 0.1.73 + history-with-query: 4.10.4 + html-entities: 2.3.2 + mime: 1.3.6 + react: 16.14.0 + react-refresh: 0.10.0 + react-router: 5.2.0_react@16.14.0 + react-router-config: 5.1.1_react-router@5.2.0+react@16.14.0 + react-router-dom: 5.2.0_react@16.14.0 + regenerator-runtime: 0.13.5 + schema-utils: 3.1.0 + transitivePeerDependencies: + - react-dom + dev: true + + /@umijs/preset-dumi/1.1.30_b08c95616290592113c9128c4b0c3f8f: + resolution: {integrity: sha512-DzfSuSSDe/jH9w/CbebtjXurKHo14nSMWfQYv08O8IMuvNqp+Hey+h9nkFteiIdyk8LNWd/FdguKaSiAaXqkBQ==} + peerDependencies: + umi: 3.x + dependencies: + '@babel/core': 7.12.3 + '@babel/generator': 7.13.9 + '@babel/plugin-transform-modules-commonjs': 7.13.8_@babel+core@7.12.3 + '@babel/traverse': 7.13.13 + '@babel/types': 7.13.13 + '@mapbox/hast-util-to-jsx': 1.0.0 + '@umijs/babel-preset-umi': 3.5.20 + '@umijs/plugin-analytics': 0.2.2_umi@3.5.20 + '@umijs/runtime': 3.5.20_react@17.0.2 + '@umijs/types': 3.5.20_1a2589ed57a826879b76fd1635c5e26a + '@umijs/utils': 3.5.20 + copy-text-to-clipboard: 2.2.0 + deepmerge: 4.2.2 + dumi-assets-types: 1.0.0 + dumi-theme-default: 1.1.13_ac48d56268a7095d7c6000b1357273b0 + enhanced-resolve: 4.5.0 + github-slugger: 1.4.0 + hast-util-has-property: 1.0.4 + hast-util-is-element: 1.1.0 + hast-util-raw: 6.1.0 + hast-util-to-html: 7.1.3 + hast-util-to-string: 1.0.4 + hosted-git-info: 3.0.8 + ignore: 5.1.8 + js-yaml: 3.14.1 + lodash.throttle: 4.1.1 + lz-string: 1.4.4 + react-docgen-typescript-dumi-tmp: 1.22.1-0_typescript@4.2.3 + rehype-autolink-headings: 4.0.0 + rehype-mathjax: 3.1.0 + rehype-remove-comments: 4.0.2 + rehype-stringify: 8.0.0 + remark-frontmatter: 3.0.0 + remark-gfm: 1.0.0 + remark-math: 4.0.0 + remark-parse: 9.0.0 + remark-rehype: 8.1.0 + remark-stringify: 9.0.1 + sitemap: 6.4.0 + slash2: 2.0.0 + terser: 5.6.1 + umi: 3.5.20_react-router@5.2.0 + unified: 8.4.2 + unist-util-visit: 2.0.3 + unist-util-visit-parents: 3.1.1 + transitivePeerDependencies: + - bufferutil + - canvas + - react + - react-dom + - react-router + - supports-color + - typescript + - utf-8-validate + dev: true + + /@umijs/renderer-mpa/3.5.20_react-dom@16.14.0+react@16.14.0: + resolution: {integrity: sha512-lE1EA8kciz8YTpeRGPd2ZY+owH1lAuj6nkhvID9QRR8qTPRRunU0lZHMAW0C8NcnQ9kECbHblkPPbDG3NayZZA==} + peerDependencies: + react: 16.x || 17.x + react-dom: 16.x || 17.x + dependencies: + '@types/react': 16.14.5 + '@types/react-dom': 16.9.12 + '@umijs/runtime': 3.5.20_react@16.14.0 + react: 16.14.0 + react-dom: 16.14.0_react@16.14.0 + dev: true + + /@umijs/renderer-react/3.5.20_1a2589ed57a826879b76fd1635c5e26a: + resolution: {integrity: sha512-8ZEHxMmF0Rm5il9RjTbzn4DLd7FU8KkqAwRGcpNhMcsPxXkbieTfnQC+a0D7nvor0+vakygfCG7ZgXDkcO1W0Q==} + peerDependencies: + react: 16.x || 17.x + react-dom: 16.x || 17.x + dependencies: + '@types/react': 16.14.5 + '@types/react-dom': 16.9.12 + '@types/react-router-config': 5.0.2 + '@umijs/runtime': 3.5.20_react@17.0.2 + react: 17.0.2 + react-dom: 17.0.2_react@17.0.2 + react-router-config: 5.1.1_react-router@5.2.0+react@17.0.2 + transitivePeerDependencies: + - react-router + dev: true + + /@umijs/renderer-react/3.5.20_39566ec7cc5fe716a59f91f7330320ef: + resolution: {integrity: sha512-8ZEHxMmF0Rm5il9RjTbzn4DLd7FU8KkqAwRGcpNhMcsPxXkbieTfnQC+a0D7nvor0+vakygfCG7ZgXDkcO1W0Q==} + peerDependencies: + react: 16.x || 17.x + react-dom: 16.x || 17.x + dependencies: + '@types/react': 16.14.5 + '@types/react-dom': 16.9.12 + '@types/react-router-config': 5.0.2 + '@umijs/runtime': 3.5.20_react@16.14.0 + react: 16.14.0 + react-dom: 16.14.0_react@16.14.0 + react-router-config: 5.1.1_react-router@5.2.0+react@16.14.0 + transitivePeerDependencies: + - react-router + dev: true + + /@umijs/runtime/3.5.20_react@16.14.0: + resolution: {integrity: sha512-AADBzjzbydjMBpaA9nw1vNsaBA7YoUTV45tmUAf47Mn3sPlEknbUFOPryhZEsOFfkBvag7GysRNzgCJPxYiWIQ==} + peerDependencies: + react: 16.x || 17.x + dependencies: + '@types/react-router': 5.1.12 + '@types/react-router-dom': 5.1.7 + history-with-query: 4.10.4 + react: 16.14.0 + react-router: 5.2.0_react@16.14.0 + react-router-dom: 5.2.0_react@16.14.0 + use-subscription: 1.5.1_react@16.14.0 + dev: true + + /@umijs/runtime/3.5.20_react@17.0.2: + resolution: {integrity: sha512-AADBzjzbydjMBpaA9nw1vNsaBA7YoUTV45tmUAf47Mn3sPlEknbUFOPryhZEsOFfkBvag7GysRNzgCJPxYiWIQ==} + peerDependencies: + react: 16.x || 17.x + dependencies: + '@types/react-router': 5.1.12 + '@types/react-router-dom': 5.1.7 + history-with-query: 4.10.4 + react: 17.0.2 + react-router: 5.2.0_react@17.0.2 + react-router-dom: 5.2.0_react@17.0.2 + use-subscription: 1.5.1_react@17.0.2 + dev: true + + /@umijs/server/3.5.20: + resolution: {integrity: sha512-upIahEP5+Xb4e9GIjFWWF5boHRvINuVgtn21ySt9335JkT24wIRsInMgrQJ6WvqCxckSg0g0/hpxIJWCcIhIkw==} + dependencies: + '@umijs/deps': 3.5.20 + '@umijs/utils': 3.5.20 + dev: true + + /@umijs/types/3.5.20_1a2589ed57a826879b76fd1635c5e26a: + resolution: {integrity: sha512-g2Eesf6tLfKdS6lRNDG4YYG3cEYKYsD5FWcziM12fpAV9u8wEmxxhpc+GeSgtQ5s/FT7B7A1WoX8YmSJgupM/w==} + dependencies: + '@umijs/babel-preset-umi': 3.5.20 + '@umijs/core': 3.5.20 + '@umijs/deps': 3.5.20 + '@umijs/renderer-react': 3.5.20_1a2589ed57a826879b76fd1635c5e26a + '@umijs/server': 3.5.20 + '@umijs/utils': 3.5.20 + webpack-chain: 6.5.1 + transitivePeerDependencies: + - react + - react-dom + - react-router + dev: true + + /@umijs/types/3.5.20_39566ec7cc5fe716a59f91f7330320ef: + resolution: {integrity: sha512-g2Eesf6tLfKdS6lRNDG4YYG3cEYKYsD5FWcziM12fpAV9u8wEmxxhpc+GeSgtQ5s/FT7B7A1WoX8YmSJgupM/w==} + dependencies: + '@umijs/babel-preset-umi': 3.5.20 + '@umijs/core': 3.5.20 + '@umijs/deps': 3.5.20 + '@umijs/renderer-react': 3.5.20_39566ec7cc5fe716a59f91f7330320ef + '@umijs/server': 3.5.20 + '@umijs/utils': 3.5.20 + webpack-chain: 6.5.1 + transitivePeerDependencies: + - react + - react-dom + - react-router + dev: true + + /@umijs/utils/3.5.20: + resolution: {integrity: sha512-Y0i27zZTCKoqdHHyTuebO/GOIY4gGLUwDFs1eoH+m4etPn+uRq0iax9KJOkelmax2K3YLsT4KbRwM1enlSsv3A==} + dependencies: + '@umijs/deps': 3.5.20 + dev: true + /@webassemblyjs/ast/1.9.0: + resolution: {integrity: sha512-C6wW5L+b7ogSDVqymbkkvuW9kruN//YisMED04xzeBBqjHa2FYnmvOlS6Xj68xWQRgWvI9cIglsjFowH/RJyEA==} dependencies: '@webassemblyjs/helper-module-context': 1.9.0 '@webassemblyjs/helper-wasm-bytecode': 1.9.0 '@webassemblyjs/wast-parser': 1.9.0 dev: true - resolution: - integrity: sha512-C6wW5L+b7ogSDVqymbkkvuW9kruN//YisMED04xzeBBqjHa2FYnmvOlS6Xj68xWQRgWvI9cIglsjFowH/RJyEA== + /@webassemblyjs/floating-point-hex-parser/1.9.0: + resolution: {integrity: sha512-TG5qcFsS8QB4g4MhrxK5TqfdNe7Ey/7YL/xN+36rRjl/BlGE/NcBvJcqsRgCP6Z92mRE+7N50pRIi8SmKUbcQA==} dev: true - resolution: - integrity: sha512-TG5qcFsS8QB4g4MhrxK5TqfdNe7Ey/7YL/xN+36rRjl/BlGE/NcBvJcqsRgCP6Z92mRE+7N50pRIi8SmKUbcQA== + /@webassemblyjs/helper-api-error/1.9.0: + resolution: {integrity: sha512-NcMLjoFMXpsASZFxJ5h2HZRcEhDkvnNFOAKneP5RbKRzaWJN36NC4jqQHKwStIhGXu5mUWlUUk7ygdtrO8lbmw==} dev: true - resolution: - integrity: sha512-NcMLjoFMXpsASZFxJ5h2HZRcEhDkvnNFOAKneP5RbKRzaWJN36NC4jqQHKwStIhGXu5mUWlUUk7ygdtrO8lbmw== + /@webassemblyjs/helper-buffer/1.9.0: + resolution: {integrity: sha512-qZol43oqhq6yBPx7YM3m9Bv7WMV9Eevj6kMi6InKOuZxhw+q9hOkvq5e/PpKSiLfyetpaBnogSbNCfBwyB00CA==} dev: true - resolution: - integrity: sha512-qZol43oqhq6yBPx7YM3m9Bv7WMV9Eevj6kMi6InKOuZxhw+q9hOkvq5e/PpKSiLfyetpaBnogSbNCfBwyB00CA== + /@webassemblyjs/helper-code-frame/1.9.0: + resolution: {integrity: sha512-ERCYdJBkD9Vu4vtjUYe8LZruWuNIToYq/ME22igL+2vj2dQ2OOujIZr3MEFvfEaqKoVqpsFKAGsRdBSBjrIvZA==} dependencies: '@webassemblyjs/wast-printer': 1.9.0 dev: true - resolution: - integrity: sha512-ERCYdJBkD9Vu4vtjUYe8LZruWuNIToYq/ME22igL+2vj2dQ2OOujIZr3MEFvfEaqKoVqpsFKAGsRdBSBjrIvZA== + /@webassemblyjs/helper-fsm/1.9.0: + resolution: {integrity: sha512-OPRowhGbshCb5PxJ8LocpdX9Kl0uB4XsAjl6jH/dWKlk/mzsANvhwbiULsaiqT5GZGT9qinTICdj6PLuM5gslw==} dev: true - resolution: - integrity: sha512-OPRowhGbshCb5PxJ8LocpdX9Kl0uB4XsAjl6jH/dWKlk/mzsANvhwbiULsaiqT5GZGT9qinTICdj6PLuM5gslw== + /@webassemblyjs/helper-module-context/1.9.0: + resolution: {integrity: sha512-MJCW8iGC08tMk2enck1aPW+BE5Cw8/7ph/VGZxwyvGbJwjktKkDK7vy7gAmMDx88D7mhDTCNKAW5tED+gZ0W8g==} dependencies: '@webassemblyjs/ast': 1.9.0 dev: true - resolution: - integrity: sha512-MJCW8iGC08tMk2enck1aPW+BE5Cw8/7ph/VGZxwyvGbJwjktKkDK7vy7gAmMDx88D7mhDTCNKAW5tED+gZ0W8g== + /@webassemblyjs/helper-wasm-bytecode/1.9.0: + resolution: {integrity: sha512-R7FStIzyNcd7xKxCZH5lE0Bqy+hGTwS3LJjuv1ZVxd9O7eHCedSdrId/hMOd20I+v8wDXEn+bjfKDLzTepoaUw==} dev: true - resolution: - integrity: sha512-R7FStIzyNcd7xKxCZH5lE0Bqy+hGTwS3LJjuv1ZVxd9O7eHCedSdrId/hMOd20I+v8wDXEn+bjfKDLzTepoaUw== + /@webassemblyjs/helper-wasm-section/1.9.0: + resolution: {integrity: sha512-XnMB8l3ek4tvrKUUku+IVaXNHz2YsJyOOmz+MMkZvh8h1uSJpSen6vYnw3IoQ7WwEuAhL8Efjms1ZWjqh2agvw==} dependencies: '@webassemblyjs/ast': 1.9.0 '@webassemblyjs/helper-buffer': 1.9.0 '@webassemblyjs/helper-wasm-bytecode': 1.9.0 '@webassemblyjs/wasm-gen': 1.9.0 dev: true - resolution: - integrity: sha512-XnMB8l3ek4tvrKUUku+IVaXNHz2YsJyOOmz+MMkZvh8h1uSJpSen6vYnw3IoQ7WwEuAhL8Efjms1ZWjqh2agvw== + /@webassemblyjs/ieee754/1.9.0: + resolution: {integrity: sha512-dcX8JuYU/gvymzIHc9DgxTzUUTLexWwt8uCTWP3otys596io0L5aW02Gb1RjYpx2+0Jus1h4ZFqjla7umFniTg==} dependencies: '@xtuc/ieee754': 1.2.0 dev: true - resolution: - integrity: sha512-dcX8JuYU/gvymzIHc9DgxTzUUTLexWwt8uCTWP3otys596io0L5aW02Gb1RjYpx2+0Jus1h4ZFqjla7umFniTg== + /@webassemblyjs/leb128/1.9.0: + resolution: {integrity: sha512-ENVzM5VwV1ojs9jam6vPys97B/S65YQtv/aanqnU7D8aSoHFX8GyhGg0CMfyKNIHBuAVjy3tlzd5QMMINa7wpw==} dependencies: '@xtuc/long': 4.2.2 dev: true - resolution: - integrity: sha512-ENVzM5VwV1ojs9jam6vPys97B/S65YQtv/aanqnU7D8aSoHFX8GyhGg0CMfyKNIHBuAVjy3tlzd5QMMINa7wpw== + /@webassemblyjs/utf8/1.9.0: + resolution: {integrity: sha512-GZbQlWtopBTP0u7cHrEx+73yZKrQoBMpwkGEIqlacljhXCkVM1kMQge/Mf+csMJAjEdSwhOyLAS0AoR3AG5P8w==} dev: true - resolution: - integrity: sha512-GZbQlWtopBTP0u7cHrEx+73yZKrQoBMpwkGEIqlacljhXCkVM1kMQge/Mf+csMJAjEdSwhOyLAS0AoR3AG5P8w== + /@webassemblyjs/wasm-edit/1.9.0: + resolution: {integrity: sha512-FgHzBm80uwz5M8WKnMTn6j/sVbqilPdQXTWraSjBwFXSYGirpkSWE2R9Qvz9tNiTKQvoKILpCuTjBKzOIm0nxw==} dependencies: '@webassemblyjs/ast': 1.9.0 '@webassemblyjs/helper-buffer': 1.9.0 @@ -2926,9 +3755,9 @@ packages: '@webassemblyjs/wasm-parser': 1.9.0 '@webassemblyjs/wast-printer': 1.9.0 dev: true - resolution: - integrity: sha512-FgHzBm80uwz5M8WKnMTn6j/sVbqilPdQXTWraSjBwFXSYGirpkSWE2R9Qvz9tNiTKQvoKILpCuTjBKzOIm0nxw== + /@webassemblyjs/wasm-gen/1.9.0: + resolution: {integrity: sha512-cPE3o44YzOOHvlsb4+E9qSqjc9Qf9Na1OO/BHFy4OI91XDE14MjFN4lTMezzaIWdPqHnsTodGGNP+iRSYfGkjA==} dependencies: '@webassemblyjs/ast': 1.9.0 '@webassemblyjs/helper-wasm-bytecode': 1.9.0 @@ -2936,18 +3765,18 @@ packages: '@webassemblyjs/leb128': 1.9.0 '@webassemblyjs/utf8': 1.9.0 dev: true - resolution: - integrity: sha512-cPE3o44YzOOHvlsb4+E9qSqjc9Qf9Na1OO/BHFy4OI91XDE14MjFN4lTMezzaIWdPqHnsTodGGNP+iRSYfGkjA== + /@webassemblyjs/wasm-opt/1.9.0: + resolution: {integrity: sha512-Qkjgm6Anhm+OMbIL0iokO7meajkzQD71ioelnfPEj6r4eOFuqm4YC3VBPqXjFyyNwowzbMD+hizmprP/Fwkl2A==} dependencies: '@webassemblyjs/ast': 1.9.0 '@webassemblyjs/helper-buffer': 1.9.0 '@webassemblyjs/wasm-gen': 1.9.0 '@webassemblyjs/wasm-parser': 1.9.0 dev: true - resolution: - integrity: sha512-Qkjgm6Anhm+OMbIL0iokO7meajkzQD71ioelnfPEj6r4eOFuqm4YC3VBPqXjFyyNwowzbMD+hizmprP/Fwkl2A== + /@webassemblyjs/wasm-parser/1.9.0: + resolution: {integrity: sha512-9+wkMowR2AmdSWQzsPEjFU7njh8HTO5MqO8vjwEHuM+AMHioNqSBONRdr0NQQ3dVQrzp0s8lTcYqzUdb7YgELA==} dependencies: '@webassemblyjs/ast': 1.9.0 '@webassemblyjs/helper-api-error': 1.9.0 @@ -2956,9 +3785,9 @@ packages: '@webassemblyjs/leb128': 1.9.0 '@webassemblyjs/utf8': 1.9.0 dev: true - resolution: - integrity: sha512-9+wkMowR2AmdSWQzsPEjFU7njh8HTO5MqO8vjwEHuM+AMHioNqSBONRdr0NQQ3dVQrzp0s8lTcYqzUdb7YgELA== + /@webassemblyjs/wast-parser/1.9.0: + resolution: {integrity: sha512-qsqSAP3QQ3LyZjNC/0jBJ/ToSxfYJ8kYyuiGvtn/8MK89VrNEfwj7BPQzJVHi0jGTRK2dGdJ5PRqhtjzoww+bw==} dependencies: '@webassemblyjs/ast': 1.9.0 '@webassemblyjs/floating-point-hex-parser': 1.9.0 @@ -2967,369 +3796,295 @@ packages: '@webassemblyjs/helper-fsm': 1.9.0 '@xtuc/long': 4.2.2 dev: true - resolution: - integrity: sha512-qsqSAP3QQ3LyZjNC/0jBJ/ToSxfYJ8kYyuiGvtn/8MK89VrNEfwj7BPQzJVHi0jGTRK2dGdJ5PRqhtjzoww+bw== + /@webassemblyjs/wast-printer/1.9.0: + resolution: {integrity: sha512-2J0nE95rHXHyQ24cWjMKJ1tqB/ds8z/cyeOZxJhcb+rW+SQASVjuznUSmdz5GpVJTzU8JkhYut0D3siFDD6wsA==} dependencies: '@webassemblyjs/ast': 1.9.0 '@webassemblyjs/wast-parser': 1.9.0 '@xtuc/long': 4.2.2 dev: true - resolution: - integrity: sha512-2J0nE95rHXHyQ24cWjMKJ1tqB/ds8z/cyeOZxJhcb+rW+SQASVjuznUSmdz5GpVJTzU8JkhYut0D3siFDD6wsA== + /@webpack-cli/configtest/1.0.2_webpack-cli@4.6.0+webpack@4.44.2: - dependencies: - webpack: 4.44.2_webpack-cli@4.6.0 - webpack-cli: 4.6.0_e3222a4926c3b7d4c1aa5becb19e445f - dev: true + resolution: {integrity: sha512-3OBzV2fBGZ5TBfdW50cha1lHDVf9vlvRXnjpVbJBa20pSZQaSkMJZiwA8V2vD9ogyeXn8nU5s5A6mHyf5jhMzA==} peerDependencies: webpack: 4.x.x || 5.x.x webpack-cli: 4.x.x - resolution: - integrity: sha512-3OBzV2fBGZ5TBfdW50cha1lHDVf9vlvRXnjpVbJBa20pSZQaSkMJZiwA8V2vD9ogyeXn8nU5s5A6mHyf5jhMzA== - /@webpack-cli/info/1.2.3_webpack-cli@4.6.0: dependencies: - envinfo: 7.7.4 + webpack: 4.44.2_webpack-cli@4.6.0 webpack-cli: 4.6.0_e3222a4926c3b7d4c1aa5becb19e445f dev: true + + /@webpack-cli/info/1.2.3_webpack-cli@4.6.0: + resolution: {integrity: sha512-lLek3/T7u40lTqzCGpC6CAbY6+vXhdhmwFRxZLMnRm6/sIF/7qMpT8MocXCRQfz0JAh63wpbXLMnsQ5162WS7Q==} peerDependencies: webpack-cli: 4.x.x - resolution: - integrity: sha512-lLek3/T7u40lTqzCGpC6CAbY6+vXhdhmwFRxZLMnRm6/sIF/7qMpT8MocXCRQfz0JAh63wpbXLMnsQ5162WS7Q== - /@webpack-cli/serve/1.3.1_6ea2aad37093f4611e49e6674f4decdc: dependencies: + envinfo: 7.7.4 webpack-cli: 4.6.0_e3222a4926c3b7d4c1aa5becb19e445f - webpack-dev-server: 3.11.0_webpack-cli@4.6.0+webpack@4.44.2 dev: true + + /@webpack-cli/serve/1.3.1_6ea2aad37093f4611e49e6674f4decdc: + resolution: {integrity: sha512-0qXvpeYO6vaNoRBI52/UsbcaBydJCggoBBnIo/ovQQdn6fug0BgwsjorV1hVS7fMqGVTZGcVxv8334gjmbj5hw==} peerDependencies: webpack-cli: 4.x.x webpack-dev-server: '*' peerDependenciesMeta: webpack-dev-server: optional: true - resolution: - integrity: sha512-0qXvpeYO6vaNoRBI52/UsbcaBydJCggoBBnIo/ovQQdn6fug0BgwsjorV1hVS7fMqGVTZGcVxv8334gjmbj5hw== - /@welldone-software/why-did-you-render/6.1.1_react@17.0.2: dependencies: - lodash: 4.17.21 - react: 17.0.2 - dev: false - peerDependencies: - react: ^16 || ^17 - resolution: - integrity: sha512-BMFp33T4MC27qvCWsI1SqwZCxIlxoQXsPQFdGLDsPSg7sgoWX4Gzj0+hlKVrWrCBiIxi7gP2JcS9IK6CZzk8mg== + webpack-cli: 4.6.0_e3222a4926c3b7d4c1aa5becb19e445f + webpack-dev-server: 3.11.0_webpack-cli@4.6.0+webpack@4.44.2 + dev: true + /@xobotyi/scrollbar-width/1.9.5: + resolution: {integrity: sha512-N8tkAACJx2ww8vFMneJmaAgmjAG1tnVBZJRLRcx061tmsLRZHSEZSLuGWnwPtunsSLvSqXQ2wfp7Mgqg1I+2dQ==} dev: false - resolution: - integrity: sha512-N8tkAACJx2ww8vFMneJmaAgmjAG1tnVBZJRLRcx061tmsLRZHSEZSLuGWnwPtunsSLvSqXQ2wfp7Mgqg1I+2dQ== + /@xtuc/ieee754/1.2.0: + resolution: {integrity: sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==} dev: true - resolution: - integrity: sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== + /@xtuc/long/4.2.2: + resolution: {integrity: sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==} dev: true - resolution: - integrity: sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== + /abab/2.0.5: + resolution: {integrity: sha512-9IK9EadsbHo6jLWIpxpR6pL0sazTXV6+SQv25ZB+F7Bj9mJNaOc4nCRabwd5M/JwmUa8idz6Eci6eKfJryPs6Q==} dev: true - resolution: - integrity: sha512-9IK9EadsbHo6jLWIpxpR6pL0sazTXV6+SQv25ZB+F7Bj9mJNaOc4nCRabwd5M/JwmUa8idz6Eci6eKfJryPs6Q== + /accepts/1.3.7: + resolution: {integrity: sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==} + engines: {node: '>= 0.6'} dependencies: mime-types: 2.1.29 negotiator: 0.6.2 dev: true - engines: - node: '>= 0.6' - resolution: - integrity: sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA== + /acorn-globals/6.0.0: + resolution: {integrity: sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg==} dependencies: acorn: 7.4.1 acorn-walk: 7.2.0 dev: true - resolution: - integrity: sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg== + /acorn-jsx/5.3.1_acorn@7.4.1: + resolution: {integrity: sha512-K0Ptm/47OKfQRpNQ2J/oIN/3QYiK6FwW+eJbILhsdxh2WTLdl+30o8aGdTbm5JbffpFFAg/g+zi1E+jvJha5ng==} + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 dependencies: acorn: 7.4.1 dev: true - peerDependencies: - acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 - resolution: - integrity: sha512-K0Ptm/47OKfQRpNQ2J/oIN/3QYiK6FwW+eJbILhsdxh2WTLdl+30o8aGdTbm5JbffpFFAg/g+zi1E+jvJha5ng== + /acorn-walk/7.2.0: + resolution: {integrity: sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==} + engines: {node: '>=0.4.0'} dev: true - engines: - node: '>=0.4.0' - resolution: - integrity: sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== + /acorn/6.4.2: - dev: true - engines: - node: '>=0.4.0' + resolution: {integrity: sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ==} + engines: {node: '>=0.4.0'} hasBin: true - resolution: - integrity: sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ== - /acorn/7.4.1: dev: true - engines: - node: '>=0.4.0' + + /acorn/7.4.1: + resolution: {integrity: sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==} + engines: {node: '>=0.4.0'} hasBin: true - resolution: - integrity: sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== - /acorn/8.1.0: dev: true - engines: - node: '>=0.4.0' + + /acorn/8.1.0: + resolution: {integrity: sha512-LWCF/Wn0nfHOmJ9rzQApGnxnvgfROzGilS8936rqN/lfcYkY9MYZzdMqN+2NJ4SlTc+m5HiSa+kNfDtI64dwUA==} + engines: {node: '>=0.4.0'} hasBin: true - resolution: - integrity: sha512-LWCF/Wn0nfHOmJ9rzQApGnxnvgfROzGilS8936rqN/lfcYkY9MYZzdMqN+2NJ4SlTc+m5HiSa+kNfDtI64dwUA== + dev: true + /address/1.1.2: + resolution: {integrity: sha512-aT6camzM4xEA54YVJYSqxz1kv4IHnQZRtThJJHhUMRExaU5spC7jX5ugSwTaTgJliIgs4VhZOk7htClvQ/LmRA==} + engines: {node: '>= 0.12.0'} dev: false - engines: - node: '>= 0.12.0' - resolution: - integrity: sha512-aT6camzM4xEA54YVJYSqxz1kv4IHnQZRtThJJHhUMRExaU5spC7jX5ugSwTaTgJliIgs4VhZOk7htClvQ/LmRA== + /adjust-sourcemap-loader/3.0.0: + resolution: {integrity: sha512-YBrGyT2/uVQ/c6Rr+t6ZJXniY03YtHGMJQYal368burRGYKqhx9qGTWqcBU5s1CwYY9E/ri63RYyG1IacMZtqw==} + engines: {node: '>=8.9'} dependencies: loader-utils: 2.0.0 regex-parser: 2.2.11 dev: true - engines: - node: '>=8.9' - resolution: - integrity: sha512-YBrGyT2/uVQ/c6Rr+t6ZJXniY03YtHGMJQYal368burRGYKqhx9qGTWqcBU5s1CwYY9E/ri63RYyG1IacMZtqw== + /aggregate-error/3.1.0: + resolution: {integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==} + engines: {node: '>=8'} dependencies: clean-stack: 2.2.0 indent-string: 4.0.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA== + /ajv-errors/1.0.1_ajv@6.12.6: - dependencies: - ajv: 6.12.6 - dev: true + resolution: {integrity: sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ==} peerDependencies: ajv: '>=5.0.0' - resolution: - integrity: sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ== - /ajv-keywords/3.5.2_ajv@6.12.6: dependencies: ajv: 6.12.6 dev: true + + /ajv-keywords/3.5.2_ajv@6.12.6: + resolution: {integrity: sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==} peerDependencies: ajv: ^6.9.1 - resolution: - integrity: sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== + dependencies: + ajv: 6.12.6 + dev: true + /ajv/6.12.6: + resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==} dependencies: fast-deep-equal: 3.1.3 fast-json-stable-stringify: 2.1.0 json-schema-traverse: 0.4.1 uri-js: 4.4.1 dev: true - resolution: - integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + /ajv/8.0.1: + resolution: {integrity: sha512-46ZA4TalFcLLqX1dEU3dhdY38wAtDydJ4e7QQTVekLUTzXkb1LfqU6VOBXC/a9wiv4T094WURqJH6ZitF92Kqw==} dependencies: fast-deep-equal: 3.1.3 json-schema-traverse: 1.0.0 require-from-string: 2.0.2 uri-js: 4.4.1 dev: true - resolution: - integrity: sha512-46ZA4TalFcLLqX1dEU3dhdY38wAtDydJ4e7QQTVekLUTzXkb1LfqU6VOBXC/a9wiv4T094WURqJH6ZitF92Kqw== + /alphanum-sort/1.0.2: + resolution: {integrity: sha1-l6ERlkmyEa0zaR2fn0hqjsn74KM=} dev: true - resolution: - integrity: sha1-l6ERlkmyEa0zaR2fn0hqjsn74KM= + /ansi-colors/3.2.4: + resolution: {integrity: sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA==} + engines: {node: '>=6'} dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA== + /ansi-colors/4.1.1: + resolution: {integrity: sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==} + engines: {node: '>=6'} dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA== + /ansi-escapes/4.3.2: + resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} + engines: {node: '>=8'} dependencies: type-fest: 0.21.3 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== + /ansi-html/0.0.7: - dev: true - engines: - '0': node >= 0.8.0 + resolution: {integrity: sha1-gTWEAhliqenm/QOflA0S9WynhZ4=} + engines: {'0': node >= 0.8.0} hasBin: true - resolution: - integrity: sha1-gTWEAhliqenm/QOflA0S9WynhZ4= + dev: true + /ansi-regex/2.1.1: + resolution: {integrity: sha1-w7M6te42DYbg5ijwRorn7yfWVN8=} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-w7M6te42DYbg5ijwRorn7yfWVN8= + /ansi-regex/4.1.0: + resolution: {integrity: sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==} + engines: {node: '>=6'} dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== + /ansi-regex/5.0.0: - engines: - node: '>=8' - resolution: - integrity: sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg== + resolution: {integrity: sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==} + engines: {node: '>=8'} + /ansi-styles/3.2.1: + resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==} + engines: {node: '>=4'} dependencies: color-convert: 1.9.3 - engines: - node: '>=4' - resolution: - integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + /ansi-styles/4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} dependencies: color-convert: 2.0.1 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== - /antd-dayjs-webpack-plugin/1.0.6_dayjs@1.10.4: - dependencies: - dayjs: 1.10.4 - dev: true - peerDependencies: - dayjs: '*' - resolution: - integrity: sha512-UlK3BfA0iE2c5+Zz/Bd2iPAkT6cICtrKG4/swSik5MZweBHtgmu1aUQCHvICdiv39EAShdZy/edfP6mlkS/xXg== - /antd/4.14.1_2235c505ed33ea6efd93d3050f896208: - dependencies: - '@ant-design/colors': 6.0.0 - '@ant-design/icons': 4.6.2_react-dom@17.0.2+react@17.0.2 - '@ant-design/react-slick': 0.28.2 - '@babel/runtime': 7.13.10 - array-tree-filter: 2.1.0 - classnames: 2.2.6 - copy-to-clipboard: 3.3.1 - lodash: 4.17.21 - moment: 2.29.1 - rc-cascader: 1.4.2_react-dom@17.0.2+react@17.0.2 - rc-checkbox: 2.3.2_react-dom@17.0.2+react@17.0.2 - rc-collapse: 3.1.0_react-dom@17.0.2+react@17.0.2 - rc-dialog: 8.5.2_react-dom@17.0.2+react@17.0.2 - rc-drawer: 4.3.1_react-dom@17.0.2+react@17.0.2 - rc-dropdown: 3.2.0_react-dom@17.0.2+react@17.0.2 - rc-field-form: 1.20.0_react-dom@17.0.2+react@17.0.2 - rc-image: 5.2.4_react-dom@17.0.2+react@17.0.2 - rc-input-number: 7.0.3_react-dom@17.0.2+react@17.0.2 - rc-mentions: 1.5.3_react-dom@17.0.2+react@17.0.2 - rc-menu: 8.10.6_react-dom@17.0.2+react@17.0.2 - rc-motion: 2.4.1_react-dom@17.0.2+react@17.0.2 - rc-notification: 4.5.5_react-dom@17.0.2+react@17.0.2 - rc-pagination: 3.1.6_react-dom@17.0.2+react@17.0.2 - rc-picker: 2.5.10_2235c505ed33ea6efd93d3050f896208 - rc-progress: 3.1.3_react-dom@17.0.2+react@17.0.2 - rc-rate: 2.9.1_react-dom@17.0.2+react@17.0.2 - rc-resize-observer: 1.0.0_react-dom@17.0.2+react@17.0.2 - rc-select: 12.1.7_react-dom@17.0.2+react@17.0.2 - rc-slider: 9.7.2_react-dom@17.0.2+react@17.0.2 - rc-steps: 4.1.3_react-dom@17.0.2+react@17.0.2 - rc-switch: 3.2.2_react-dom@17.0.2+react@17.0.2 - rc-table: 7.13.3_react-dom@17.0.2+react@17.0.2 - rc-tabs: 11.7.3_react-dom@17.0.2+react@17.0.2 - rc-textarea: 0.3.4_react-dom@17.0.2+react@17.0.2 - rc-tooltip: 5.1.0_react-dom@17.0.2+react@17.0.2 - rc-tree: 4.1.5_react-dom@17.0.2+react@17.0.2 - rc-tree-select: 4.3.1_react-dom@17.0.2+react@17.0.2 - rc-trigger: 5.2.3_react-dom@17.0.2+react@17.0.2 - rc-upload: 4.2.0_react-dom@17.0.2+react@17.0.2 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - scroll-into-view-if-needed: 2.2.28 - warning: 4.0.3 - dev: false - peerDependencies: - dayjs: '*' - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-984zBd4EtsBfCC4dUmDAZfaCphjcm7+ldKBWJHPyheUZL5S3X7ZSz+Ld75XGNFj4pLjcGMi2SwGOr/4hmByNsg== + /anymatch/2.0.0: + resolution: {integrity: sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==} dependencies: micromatch: 3.1.10 normalize-path: 2.1.1 dev: true - resolution: - integrity: sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== + /anymatch/3.1.1: + resolution: {integrity: sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==} + engines: {node: '>= 8'} dependencies: normalize-path: 3.0.0 picomatch: 2.2.2 dev: true - engines: - node: '>= 8' - resolution: - integrity: sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg== + + /anymatch/3.1.2: + resolution: {integrity: sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==} + engines: {node: '>= 8'} + dependencies: + normalize-path: 3.0.0 + picomatch: 2.3.1 + dev: true + optional: true + /aproba/1.2.0: + resolution: {integrity: sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==} dev: true - resolution: - integrity: sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== + + /arch/2.2.0: + resolution: {integrity: sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==} + dev: true + + /arg/5.0.1: + resolution: {integrity: sha512-e0hDa9H2Z9AwFkk2qDlwhoMYE4eToKarchkQHovNdLTCYMHZHeRjI71crOh+dio4K6u1IcwubQqo79Ga4CyAQA==} + dev: true + /argparse/1.0.10: + resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} dependencies: sprintf-js: 1.0.3 dev: true - resolution: - integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + /aria-query/4.2.2: + resolution: {integrity: sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA==} + engines: {node: '>=6.0'} dependencies: '@babel/runtime': 7.13.10 '@babel/runtime-corejs3': 7.13.10 dev: true - engines: - node: '>=6.0' - resolution: - integrity: sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA== + /arity-n/1.0.4: + resolution: {integrity: sha1-2edrEXM+CFacCEeuezmyhgswt0U=} dev: true - resolution: - integrity: sha1-2edrEXM+CFacCEeuezmyhgswt0U= + /arr-diff/4.0.0: - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= + resolution: {integrity: sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA=} + engines: {node: '>=0.10.0'} + /arr-flatten/1.1.0: - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== + resolution: {integrity: sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==} + engines: {node: '>=0.10.0'} + /arr-union/3.1.0: - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= + resolution: {integrity: sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ=} + engines: {node: '>=0.10.0'} + /array-flatten/1.1.1: + resolution: {integrity: sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=} dev: true - resolution: - integrity: sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= + /array-flatten/2.1.2: + resolution: {integrity: sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ==} dev: true - resolution: - integrity: sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== + /array-includes/3.1.3: + resolution: {integrity: sha512-gcem1KlBU7c9rB+Rq8/3PPKsK2kjqeEBa3bD5kkQo4nYlOHQCJqIJFqBXDEfwaRuYTT4E+FxA9xez7Gf/e3Q7A==} + engines: {node: '>= 0.4'} dependencies: call-bind: 1.0.2 define-properties: 1.1.3 @@ -3337,147 +4092,126 @@ packages: get-intrinsic: 1.1.1 is-string: 1.0.5 dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-gcem1KlBU7c9rB+Rq8/3PPKsK2kjqeEBa3bD5kkQo4nYlOHQCJqIJFqBXDEfwaRuYTT4E+FxA9xez7Gf/e3Q7A== - /array-tree-filter/2.1.0: - dev: false - resolution: - integrity: sha512-4ROwICNlNw/Hqa9v+rk5h22KjmzB1JGTMVKP2AKJBOCgb0yL0ASf0+YvCcLNNwquOHNX48jkeZIJ3a+oOQqKcw== + /array-union/1.0.2: + resolution: {integrity: sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk=} + engines: {node: '>=0.10.0'} dependencies: array-uniq: 1.0.3 dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk= + /array-union/2.1.0: - engines: - node: '>=8' - resolution: - integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} + engines: {node: '>=8'} + /array-uniq/1.0.3: + resolution: {integrity: sha1-r2rId6Jcx/dOBYiUdThY39sk/bY=} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-r2rId6Jcx/dOBYiUdThY39sk/bY= + /array-unique/0.3.2: - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= + resolution: {integrity: sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg=} + engines: {node: '>=0.10.0'} + /array.prototype.flat/1.2.4: + resolution: {integrity: sha512-4470Xi3GAPAjZqFcljX2xzckv1qeKPizoNkiS0+O4IoPR2ZNpcjE0pkhdihlDouK+x6QOast26B4Q/O9DJnwSg==} + engines: {node: '>= 0.4'} dependencies: call-bind: 1.0.2 define-properties: 1.1.3 es-abstract: 1.18.0 dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-4470Xi3GAPAjZqFcljX2xzckv1qeKPizoNkiS0+O4IoPR2ZNpcjE0pkhdihlDouK+x6QOast26B4Q/O9DJnwSg== + /array.prototype.flatmap/1.2.4: + resolution: {integrity: sha512-r9Z0zYoxqHz60vvQbWEdXIEtCwHF0yxaWfno9qzXeNHvfyl3BZqygmGzb84dsubyaXLH4husF+NFgMSdpZhk2Q==} + engines: {node: '>= 0.4'} dependencies: call-bind: 1.0.2 define-properties: 1.1.3 es-abstract: 1.18.0 function-bind: 1.1.1 dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-r9Z0zYoxqHz60vvQbWEdXIEtCwHF0yxaWfno9qzXeNHvfyl3BZqygmGzb84dsubyaXLH4husF+NFgMSdpZhk2Q== + /arrify/2.0.1: + resolution: {integrity: sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==} + engines: {node: '>=8'} dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug== + /asap/2.0.6: + resolution: {integrity: sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY=} dev: false - resolution: - integrity: sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= + /asn1.js/5.4.1: + resolution: {integrity: sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA==} dependencies: bn.js: 4.12.0 inherits: 2.0.4 minimalistic-assert: 1.0.1 safer-buffer: 2.1.2 dev: true - resolution: - integrity: sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA== + /asn1/0.2.4: + resolution: {integrity: sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==} dependencies: safer-buffer: 2.1.2 dev: true - resolution: - integrity: sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== + /assert-plus/1.0.0: + resolution: {integrity: sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=} + engines: {node: '>=0.8'} dev: true - engines: - node: '>=0.8' - resolution: - integrity: sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= + /assert/1.5.0: + resolution: {integrity: sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA==} dependencies: object-assign: 4.1.1 util: 0.10.3 dev: true - resolution: - integrity: sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA== + /assign-symbols/1.0.0: - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= + resolution: {integrity: sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c=} + engines: {node: '>=0.10.0'} + /ast-types-flow/0.0.7: + resolution: {integrity: sha1-9wtzXGvKGlycItmCw+Oef+ujva0=} dev: true - resolution: - integrity: sha1-9wtzXGvKGlycItmCw+Oef+ujva0= + /astral-regex/2.0.0: + resolution: {integrity: sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==} + engines: {node: '>=8'} dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ== + /async-each/1.0.3: + resolution: {integrity: sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ==} dev: true - resolution: - integrity: sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== + /async-limiter/1.0.1: + resolution: {integrity: sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==} dev: true - resolution: - integrity: sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ== - /async-validator/3.5.1: - dev: false - resolution: - integrity: sha512-DDmKA7sdSAJtTVeNZHrnr2yojfFaoeW8MfQN8CeuXg8DDQHTqKk9Fdv38dSvnesHoO8MUwMI2HphOeSyIF+wmQ== + /async/2.6.3: + resolution: {integrity: sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==} dependencies: lodash: 4.17.21 dev: true - resolution: - integrity: sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg== + /asynckit/0.4.0: + resolution: {integrity: sha1-x57Zf380y48robyXkLzDZkdLS3k=} dev: true - resolution: - integrity: sha1-x57Zf380y48robyXkLzDZkdLS3k= + /at-least-node/1.0.0: + resolution: {integrity: sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==} + engines: {node: '>= 4.0.0'} dev: true - engines: - node: '>= 4.0.0' - resolution: - integrity: sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== + /atob/2.1.2: - engines: - node: '>= 4.5.0' + resolution: {integrity: sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==} + engines: {node: '>= 4.5.0'} hasBin: true - resolution: - integrity: sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== + /autoprefixer/9.8.6: + resolution: {integrity: sha512-XrvP4VVHdRBCdX1S3WXVD8+RyG9qeb1D5Sn1DeLiG2xfSpzellk5k54xbUERJ3M5DggQxes39UGOTP8CFrEGbg==} + hasBin: true dependencies: browserslist: 4.16.3 caniuse-lite: 1.0.30001204 @@ -3487,34 +4221,46 @@ packages: postcss: 7.0.35 postcss-value-parser: 4.1.0 dev: true - hasBin: true - resolution: - integrity: sha512-XrvP4VVHdRBCdX1S3WXVD8+RyG9qeb1D5Sn1DeLiG2xfSpzellk5k54xbUERJ3M5DggQxes39UGOTP8CFrEGbg== + /aws-sign2/0.7.0: + resolution: {integrity: sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=} dev: true - resolution: - integrity: sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= + /aws4/1.11.0: + resolution: {integrity: sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==} dev: true - resolution: - integrity: sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA== + /axe-core/4.1.3: + resolution: {integrity: sha512-vwPpH4Aj4122EW38mxO/fxhGKtwWTMLDIJfZ1He0Edbtjcfna/R3YB67yVhezUMzqc3Jr3+Ii50KRntlENL4xQ==} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-vwPpH4Aj4122EW38mxO/fxhGKtwWTMLDIJfZ1He0Edbtjcfna/R3YB67yVhezUMzqc3Jr3+Ii50KRntlENL4xQ== + /axios/0.21.1: + resolution: {integrity: sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA==} dependencies: follow-redirects: 1.13.3 + transitivePeerDependencies: + - debug dev: false - resolution: - integrity: sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA== + /axobject-query/2.2.0: + resolution: {integrity: sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA==} dev: true - resolution: - integrity: sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA== + + /b-tween/0.3.3: + resolution: {integrity: sha512-oEHegcRpA7fAuc9KC4nktucuZn2aS8htymCPcP3qkEGPqiBH+GfqtqoG2l7LxHngg6O0HFM7hOeOYExl1Oz4ZA==} + dev: false + + /b-validate/1.4.4: + resolution: {integrity: sha512-E2tnSnxxKDyxP1G+TMTbVHA8XajfHHOJKeWm9YVRISSPtzTL7ZP/7tIYp01b+O83L5R/6i31+Su+vCOJBnQWFQ==} + dev: false + /babel-eslint/10.1.0_eslint@7.23.0: + resolution: {integrity: sha512-ifWaTHQ0ce+448CYop8AdrQiBsGrnC+bMgfyKFdi6EsPLTAWG+QfyDeM6OH+FmWnKvEq5NnBMLvlBUPKQZoDSg==} + engines: {node: '>=6'} + deprecated: babel-eslint is now @babel/eslint-parser. This package will no longer receive updates. + peerDependencies: + eslint: '>= 4.12.1' dependencies: '@babel/code-frame': 7.12.13 '@babel/parser': 7.13.13 @@ -3523,23 +4269,22 @@ packages: eslint: 7.23.0 eslint-visitor-keys: 1.3.0 resolve: 1.18.1 - deprecated: babel-eslint is now @babel/eslint-parser. This package will no longer receive updates. + transitivePeerDependencies: + - supports-color dev: true - engines: - node: '>=6' - peerDependencies: - eslint: '>= 4.12.1' - resolution: - integrity: sha512-ifWaTHQ0ce+448CYop8AdrQiBsGrnC+bMgfyKFdi6EsPLTAWG+QfyDeM6OH+FmWnKvEq5NnBMLvlBUPKQZoDSg== + /babel-extract-comments/1.0.0: + resolution: {integrity: sha512-qWWzi4TlddohA91bFwgt6zO/J0X+io7Qp184Fw0m2JYRSTZnJbFR8+07KmzudHCZgOiKRCrjhylwv9Xd8gfhVQ==} + engines: {node: '>=4'} dependencies: babylon: 6.18.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-qWWzi4TlddohA91bFwgt6zO/J0X+io7Qp184Fw0m2JYRSTZnJbFR8+07KmzudHCZgOiKRCrjhylwv9Xd8gfhVQ== + /babel-jest/26.6.3_@babel+core@7.12.3: + resolution: {integrity: sha512-pl4Q+GAVOHwvjrck6jKjvmGhnO3jHX/xuB9d27f+EJZ/6k+6nMuPjorrYp7s++bKKdANwzElBWnLWaObvTnaZA==} + engines: {node: '>= 10.14.2'} + peerDependencies: + '@babel/core': ^7.0.0 dependencies: '@babel/core': 7.12.3 '@jest/transform': 26.6.2 @@ -3550,14 +4295,16 @@ packages: chalk: 4.1.0 graceful-fs: 4.2.6 slash: 3.0.0 + transitivePeerDependencies: + - supports-color dev: true - engines: - node: '>= 10.14.2' + + /babel-loader/8.1.0_427212bc1158d185e577033f19ca0757: + resolution: {integrity: sha512-7q7nC1tYOrqvUrN3LQK4GwSk/TQorZSOlO9C+RZDZpODgyN4ZlCqE5q9cDsyWOliN+aU9B4JX01xK9eJXowJLw==} + engines: {node: '>= 6.9'} peerDependencies: '@babel/core': ^7.0.0 - resolution: - integrity: sha512-pl4Q+GAVOHwvjrck6jKjvmGhnO3jHX/xuB9d27f+EJZ/6k+6nMuPjorrYp7s++bKKdANwzElBWnLWaObvTnaZA== - /babel-loader/8.1.0_427212bc1158d185e577033f19ca0757: + webpack: '>=2' dependencies: '@babel/core': 7.12.3 find-cache-dir: 2.1.0 @@ -3567,89 +4314,98 @@ packages: schema-utils: 2.7.1 webpack: 4.44.2_webpack-cli@4.6.0 dev: true - engines: - node: '>= 6.9' - peerDependencies: - '@babel/core': ^7.0.0 - webpack: '>=2' - resolution: - integrity: sha512-7q7nC1tYOrqvUrN3LQK4GwSk/TQorZSOlO9C+RZDZpODgyN4ZlCqE5q9cDsyWOliN+aU9B4JX01xK9eJXowJLw== + /babel-plugin-dynamic-import-node/2.3.3: + resolution: {integrity: sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==} dependencies: object.assign: 4.1.2 dev: true - resolution: - integrity: sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ== + + /babel-plugin-import/1.13.6: + resolution: {integrity: sha512-N7FYnGh0DFsvDRkAPsvFq/metVfVD7P2h1rokOPpEH4cZbdRHCW+2jbXt0nnuqowkm/xhh2ww1anIdEpfYa7ZA==} + dependencies: + '@babel/helper-module-imports': 7.18.6 + dev: true + /babel-plugin-istanbul/6.0.0: + resolution: {integrity: sha512-AF55rZXpe7trmEylbaE1Gv54wn6rwU03aptvRoVIGP8YykoSxqdVLV1TfwflBCE/QtHmqtP8SWlTENqbK8GCSQ==} + engines: {node: '>=8'} dependencies: '@babel/helper-plugin-utils': 7.13.0 '@istanbuljs/load-nyc-config': 1.1.0 '@istanbuljs/schema': 0.1.3 istanbul-lib-instrument: 4.0.3 test-exclude: 6.0.0 + transitivePeerDependencies: + - supports-color dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-AF55rZXpe7trmEylbaE1Gv54wn6rwU03aptvRoVIGP8YykoSxqdVLV1TfwflBCE/QtHmqtP8SWlTENqbK8GCSQ== + /babel-plugin-jest-hoist/26.6.2: + resolution: {integrity: sha512-PO9t0697lNTmcEHH69mdtYiOIkkOlj9fySqfO3K1eCcdISevLAE0xY59VLLUj0SoiPiTX/JU2CYFpILydUa5Lw==} + engines: {node: '>= 10.14.2'} dependencies: '@babel/template': 7.12.13 '@babel/types': 7.13.13 '@types/babel__core': 7.1.14 '@types/babel__traverse': 7.11.1 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-PO9t0697lNTmcEHH69mdtYiOIkkOlj9fySqfO3K1eCcdISevLAE0xY59VLLUj0SoiPiTX/JU2CYFpILydUa5Lw== + /babel-plugin-macros/2.8.0: + resolution: {integrity: sha512-SEP5kJpfGYqYKpBrj5XU3ahw5p5GOHJ0U5ssOSQ/WBVdwkD2Dzlce95exQTs3jOVWPPKLBN2rlEWkCK7dSmLvg==} dependencies: '@babel/runtime': 7.12.1 cosmiconfig: 6.0.0 resolve: 1.18.1 dev: true - resolution: - integrity: sha512-SEP5kJpfGYqYKpBrj5XU3ahw5p5GOHJ0U5ssOSQ/WBVdwkD2Dzlce95exQTs3jOVWPPKLBN2rlEWkCK7dSmLvg== + /babel-plugin-named-asset-import/0.3.7_@babel+core@7.12.3: + resolution: {integrity: sha512-squySRkf+6JGnvjoUtDEjSREJEBirnXi9NqP6rjSYsylxQxqBTz+pkmf395i9E2zsvmYUaI40BHo6SqZUdydlw==} + peerDependencies: + '@babel/core': ^7.1.0 dependencies: '@babel/core': 7.12.3 dev: true - peerDependencies: - '@babel/core': ^7.1.0 - resolution: - integrity: sha512-squySRkf+6JGnvjoUtDEjSREJEBirnXi9NqP6rjSYsylxQxqBTz+pkmf395i9E2zsvmYUaI40BHo6SqZUdydlw== + /babel-plugin-polyfill-corejs2/0.1.10_@babel+core@7.12.3: + resolution: {integrity: sha512-DO95wD4g0A8KRaHKi0D51NdGXzvpqVLnLu5BTvDlpqUEpTmeEtypgC1xqesORaWmiUOQI14UHKlzNd9iZ2G3ZA==} + peerDependencies: + '@babel/core': ^7.0.0-0 dependencies: '@babel/compat-data': 7.13.12 '@babel/core': 7.12.3 '@babel/helper-define-polyfill-provider': 0.1.5_@babel+core@7.12.3 semver: 6.3.0 + transitivePeerDependencies: + - supports-color dev: true + + /babel-plugin-polyfill-corejs3/0.1.7_@babel+core@7.12.3: + resolution: {integrity: sha512-u+gbS9bbPhZWEeyy1oR/YaaSpod/KDT07arZHb80aTpl8H5ZBq+uN1nN9/xtX7jQyfLdPfoqI4Rue/MQSWJquw==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-DO95wD4g0A8KRaHKi0D51NdGXzvpqVLnLu5BTvDlpqUEpTmeEtypgC1xqesORaWmiUOQI14UHKlzNd9iZ2G3ZA== - /babel-plugin-polyfill-corejs3/0.1.7_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-define-polyfill-provider': 0.1.5_@babel+core@7.12.3 core-js-compat: 3.9.1 + transitivePeerDependencies: + - supports-color dev: true + + /babel-plugin-polyfill-regenerator/0.1.6_@babel+core@7.12.3: + resolution: {integrity: sha512-OUrYG9iKPKz8NxswXbRAdSwF0GhRdIEMTloQATJi4bDuFqrXaXcCUT/VGNrr8pBcjMh1RxZ7Xt9cytVJTJfvMg==} peerDependencies: '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-u+gbS9bbPhZWEeyy1oR/YaaSpod/KDT07arZHb80aTpl8H5ZBq+uN1nN9/xtX7jQyfLdPfoqI4Rue/MQSWJquw== - /babel-plugin-polyfill-regenerator/0.1.6_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 '@babel/helper-define-polyfill-provider': 0.1.5_@babel+core@7.12.3 + transitivePeerDependencies: + - supports-color dev: true - peerDependencies: - '@babel/core': ^7.0.0-0 - resolution: - integrity: sha512-OUrYG9iKPKz8NxswXbRAdSwF0GhRdIEMTloQATJi4bDuFqrXaXcCUT/VGNrr8pBcjMh1RxZ7Xt9cytVJTJfvMg== + /babel-plugin-styled-components/1.12.0_styled-components@5.2.1: + resolution: {integrity: sha512-FEiD7l5ZABdJPpLssKXjBUJMYqzbcNzBowfXDCdJhOpbhWiewapUaY+LZGT8R4Jg2TwOjGjG4RKeyrO5p9sBkA==} + peerDependencies: + styled-components: '>= 2' dependencies: '@babel/helper-annotate-as-pure': 7.12.13 '@babel/helper-module-imports': 7.13.12 @@ -3657,30 +4413,30 @@ packages: lodash: 4.17.21 styled-components: 5.2.1_react-dom@17.0.2+react@17.0.2 dev: false - peerDependencies: - styled-components: '>= 2' - resolution: - integrity: sha512-FEiD7l5ZABdJPpLssKXjBUJMYqzbcNzBowfXDCdJhOpbhWiewapUaY+LZGT8R4Jg2TwOjGjG4RKeyrO5p9sBkA== + /babel-plugin-syntax-jsx/6.18.0: + resolution: {integrity: sha1-CvMqmm4Tyno/1QaeYtew9Y0NiUY=} dev: false - resolution: - integrity: sha1-CvMqmm4Tyno/1QaeYtew9Y0NiUY= + /babel-plugin-syntax-object-rest-spread/6.13.0: + resolution: {integrity: sha1-/WU28rzhODb/o6VFjEkDpZe7O/U=} dev: true - resolution: - integrity: sha1-/WU28rzhODb/o6VFjEkDpZe7O/U= + /babel-plugin-transform-object-rest-spread/6.26.0: + resolution: {integrity: sha1-DzZpLVD+9rfi1LOsFHgTepY7ewY=} dependencies: babel-plugin-syntax-object-rest-spread: 6.13.0 babel-runtime: 6.26.0 dev: true - resolution: - integrity: sha1-DzZpLVD+9rfi1LOsFHgTepY7ewY= + /babel-plugin-transform-react-remove-prop-types/0.4.24: + resolution: {integrity: sha512-eqj0hVcJUR57/Ug2zE1Yswsw4LhuqqHhD+8v120T1cl3kjg76QwtyBrdIk4WVwK+lAhBJVYCd/v+4nc4y+8JsA==} dev: true - resolution: - integrity: sha512-eqj0hVcJUR57/Ug2zE1Yswsw4LhuqqHhD+8v120T1cl3kjg76QwtyBrdIk4WVwK+lAhBJVYCd/v+4nc4y+8JsA== + /babel-preset-current-node-syntax/1.0.1_@babel+core@7.12.3: + resolution: {integrity: sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ==} + peerDependencies: + '@babel/core': ^7.0.0 dependencies: '@babel/core': 7.12.3 '@babel/plugin-syntax-async-generators': 7.8.4_@babel+core@7.12.3 @@ -3696,23 +4452,20 @@ packages: '@babel/plugin-syntax-optional-chaining': 7.8.3_@babel+core@7.12.3 '@babel/plugin-syntax-top-level-await': 7.12.13_@babel+core@7.12.3 dev: true + + /babel-preset-jest/26.6.2_@babel+core@7.12.3: + resolution: {integrity: sha512-YvdtlVm9t3k777c5NPQIv6cxFFFapys25HiUmuSgHwIZhfifweR5c5Sf5nwE3MAbfu327CYSvps8Yx6ANLyleQ==} + engines: {node: '>= 10.14.2'} peerDependencies: '@babel/core': ^7.0.0 - resolution: - integrity: sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== - /babel-preset-jest/26.6.2_@babel+core@7.12.3: dependencies: '@babel/core': 7.12.3 babel-plugin-jest-hoist: 26.6.2 babel-preset-current-node-syntax: 1.0.1_@babel+core@7.12.3 dev: true - engines: - node: '>= 10.14.2' - peerDependencies: - '@babel/core': ^7.0.0 - resolution: - integrity: sha512-YvdtlVm9t3k777c5NPQIv6cxFFFapys25HiUmuSgHwIZhfifweR5c5Sf5nwE3MAbfu327CYSvps8Yx6ANLyleQ== + /babel-preset-react-app/10.0.0: + resolution: {integrity: sha512-itL2z8v16khpuKutx5IH8UdCdSTuzrOhRFTEdIhveZ2i1iBKDrVE0ATa4sFVy+02GLucZNVBWtoarXBy0Msdpg==} dependencies: '@babel/core': 7.12.3 '@babel/plugin-proposal-class-properties': 7.12.1_@babel+core@7.12.3 @@ -3729,24 +4482,32 @@ packages: '@babel/runtime': 7.12.1 babel-plugin-macros: 2.8.0 babel-plugin-transform-react-remove-prop-types: 0.4.24 + transitivePeerDependencies: + - supports-color dev: true - resolution: - integrity: sha512-itL2z8v16khpuKutx5IH8UdCdSTuzrOhRFTEdIhveZ2i1iBKDrVE0ATa4sFVy+02GLucZNVBWtoarXBy0Msdpg== + /babel-runtime/6.26.0: + resolution: {integrity: sha1-llxwWGaOgrVde/4E/yM3vItWR/4=} dependencies: core-js: 2.6.12 regenerator-runtime: 0.11.1 - resolution: - integrity: sha1-llxwWGaOgrVde/4E/yM3vItWR/4= - /babylon/6.18.0: dev: true + + /babylon/6.18.0: + resolution: {integrity: sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ==} hasBin: true - resolution: - integrity: sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ== + dev: true + + /bail/1.0.5: + resolution: {integrity: sha512-xFbRxM1tahm08yHBP16MMjVUAvDaBMD38zsM9EMAUN61omwLmKlOpB/Zku5QkjZ8TZ4vn53pj+t518cH0S03RQ==} + dev: true + /balanced-match/1.0.0: - resolution: - integrity: sha1-ibTRmasr7kneFk6gK4nORi1xt2c= + resolution: {integrity: sha1-ibTRmasr7kneFk6gK4nORi1xt2c=} + /base/0.11.2: + resolution: {integrity: sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==} + engines: {node: '>=0.10.0'} dependencies: cache-base: 1.0.1 class-utils: 0.3.6 @@ -3755,77 +4516,71 @@ packages: isobject: 3.0.1 mixin-deep: 1.3.2 pascalcase: 0.1.1 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== + /base64-js/1.5.1: + resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} dev: true - resolution: - integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== + /batch/0.6.1: + resolution: {integrity: sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY=} dev: true - resolution: - integrity: sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY= + /bcrypt-pbkdf/1.0.2: + resolution: {integrity: sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=} dependencies: tweetnacl: 0.14.5 dev: true - resolution: - integrity: sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= + /bfj/7.0.2: + resolution: {integrity: sha512-+e/UqUzwmzJamNF50tBV6tZPTORow7gQ96iFow+8b562OdMpEK0BcJEq2OSPEDmAbSMBQ7PKZ87ubFkgxpYWgw==} + engines: {node: '>= 8.0.0'} dependencies: bluebird: 3.7.2 check-types: 11.1.2 hoopy: 0.1.4 tryer: 1.0.1 dev: true - engines: - node: '>= 8.0.0' - resolution: - integrity: sha512-+e/UqUzwmzJamNF50tBV6tZPTORow7gQ96iFow+8b562OdMpEK0BcJEq2OSPEDmAbSMBQ7PKZ87ubFkgxpYWgw== + /big-integer/1.6.48: + resolution: {integrity: sha512-j51egjPa7/i+RdiRuJbPdJ2FIUYYPhvYLjzoYbcMMm62ooO6F94fETG4MTs46zPAF9Brs04OajboA/qTGuz78w==} + engines: {node: '>=0.6'} dev: false - engines: - node: '>=0.6' - resolution: - integrity: sha512-j51egjPa7/i+RdiRuJbPdJ2FIUYYPhvYLjzoYbcMMm62ooO6F94fETG4MTs46zPAF9Brs04OajboA/qTGuz78w== + /big.js/5.2.2: - resolution: - integrity: sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== + resolution: {integrity: sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==} + /binary-extensions/1.13.1: + resolution: {integrity: sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== + /binary-extensions/2.2.0: + resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==} + engines: {node: '>=8'} dev: true - engines: - node: '>=8' - optional: true - resolution: - integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== + /bindings/1.5.0: + resolution: {integrity: sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==} dependencies: file-uri-to-path: 1.0.0 dev: true optional: true - resolution: - integrity: sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ== + /bluebird/3.7.2: + resolution: {integrity: sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==} dev: true - resolution: - integrity: sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== + /bn.js/4.12.0: + resolution: {integrity: sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==} dev: true - resolution: - integrity: sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA== + /bn.js/5.2.0: + resolution: {integrity: sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==} dev: true - resolution: - integrity: sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw== + /body-parser/1.19.0: + resolution: {integrity: sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==} + engines: {node: '>= 0.8'} dependencies: bytes: 3.1.0 content-type: 1.0.4 @@ -3838,11 +4593,9 @@ packages: raw-body: 2.4.0 type-is: 1.6.18 dev: true - engines: - node: '>= 0.8' - resolution: - integrity: sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw== + /bonjour/3.5.0: + resolution: {integrity: sha1-jokKGD2O6aI5OzhExpGkK897yfU=} dependencies: array-flatten: 2.1.2 deep-equal: 1.1.1 @@ -3851,19 +4604,20 @@ packages: multicast-dns: 6.2.3 multicast-dns-service-types: 1.1.0 dev: true - resolution: - integrity: sha1-jokKGD2O6aI5OzhExpGkK897yfU= + /boolbase/1.0.0: + resolution: {integrity: sha1-aN/1++YMUes3cl6p4+0xDcwed24=} dev: true - resolution: - integrity: sha1-aN/1++YMUes3cl6p4+0xDcwed24= + /brace-expansion/1.1.11: + resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} dependencies: balanced-match: 1.0.0 concat-map: 0.0.1 - resolution: - integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + /braces/2.3.2: + resolution: {integrity: sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==} + engines: {node: '>=0.10.0'} dependencies: arr-flatten: 1.1.0 array-unique: 0.3.2 @@ -3875,18 +4629,15 @@ packages: snapdragon-node: 2.1.1 split-string: 3.1.0 to-regex: 3.0.2 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== + /braces/3.0.2: + resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} + engines: {node: '>=8'} dependencies: fill-range: 7.0.1 - engines: - node: '>=8' - resolution: - integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + /broadcast-channel/3.5.3: + resolution: {integrity: sha512-OLOXfwReZa2AAAh9yOUyiALB3YxBe0QpThwwuyRHLgpl8bSznSDmV6Mz7LeBJg1VZsMcDcNMy7B53w12qHrIhQ==} dependencies: '@babel/runtime': 7.13.10 detect-node: 2.0.5 @@ -3896,17 +4647,17 @@ packages: rimraf: 3.0.2 unload: 2.2.0 dev: false - resolution: - integrity: sha512-OLOXfwReZa2AAAh9yOUyiALB3YxBe0QpThwwuyRHLgpl8bSznSDmV6Mz7LeBJg1VZsMcDcNMy7B53w12qHrIhQ== + /brorand/1.1.0: + resolution: {integrity: sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8=} dev: true - resolution: - integrity: sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8= + /browser-process-hrtime/1.0.0: + resolution: {integrity: sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==} dev: true - resolution: - integrity: sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== + /browserify-aes/1.2.0: + resolution: {integrity: sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==} dependencies: buffer-xor: 1.0.3 cipher-base: 1.0.4 @@ -3915,33 +4666,33 @@ packages: inherits: 2.0.4 safe-buffer: 5.2.1 dev: true - resolution: - integrity: sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== + /browserify-cipher/1.0.1: + resolution: {integrity: sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w==} dependencies: browserify-aes: 1.2.0 browserify-des: 1.0.2 evp_bytestokey: 1.0.3 dev: true - resolution: - integrity: sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w== + /browserify-des/1.0.2: + resolution: {integrity: sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A==} dependencies: cipher-base: 1.0.4 des.js: 1.0.1 inherits: 2.0.4 safe-buffer: 5.2.1 dev: true - resolution: - integrity: sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A== + /browserify-rsa/4.1.0: + resolution: {integrity: sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog==} dependencies: bn.js: 5.2.0 randombytes: 2.1.0 dev: true - resolution: - integrity: sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog== + /browserify-sign/4.2.1: + resolution: {integrity: sha512-/vrA5fguVAKKAVTNJjgSm1tRQDHUU6DbwO9IROu/0WAzC8PKhucDSh18J0RMvVeHAn5puMd+QHC2erPRNf8lmg==} dependencies: bn.js: 5.2.0 browserify-rsa: 4.1.0 @@ -3953,27 +4704,28 @@ packages: readable-stream: 3.6.0 safe-buffer: 5.2.1 dev: true - resolution: - integrity: sha512-/vrA5fguVAKKAVTNJjgSm1tRQDHUU6DbwO9IROu/0WAzC8PKhucDSh18J0RMvVeHAn5puMd+QHC2erPRNf8lmg== + /browserify-zlib/0.2.0: + resolution: {integrity: sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA==} dependencies: pako: 1.0.11 dev: true - resolution: - integrity: sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA== + /browserslist/4.14.2: + resolution: {integrity: sha512-HI4lPveGKUR0x2StIz+2FXfDk9SfVMrxn6PLh1JeGUwcuoDkdKZebWiyLRJ68iIPDpMI4JLVDf7S7XzslgWOhw==} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true dependencies: caniuse-lite: 1.0.30001204 electron-to-chromium: 1.3.701 escalade: 3.1.1 node-releases: 1.1.71 dev: false - engines: - node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7 - hasBin: true - resolution: - integrity: sha512-HI4lPveGKUR0x2StIz+2FXfDk9SfVMrxn6PLh1JeGUwcuoDkdKZebWiyLRJ68iIPDpMI4JLVDf7S7XzslgWOhw== + /browserslist/4.16.3: + resolution: {integrity: sha512-vIyhWmIkULaq04Gt93txdh+j02yX/JzlyhLYbV3YQCn/zvES3JnY7TifHHvvr1w5hTDluNKMkV05cs4vy8Q7sw==} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true dependencies: caniuse-lite: 1.0.30001204 colorette: 1.2.2 @@ -3981,60 +4733,54 @@ packages: escalade: 3.1.1 node-releases: 1.1.71 dev: true - engines: - node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7 - hasBin: true - resolution: - integrity: sha512-vIyhWmIkULaq04Gt93txdh+j02yX/JzlyhLYbV3YQCn/zvES3JnY7TifHHvvr1w5hTDluNKMkV05cs4vy8Q7sw== + /bser/2.1.1: + resolution: {integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==} dependencies: node-int64: 0.4.0 dev: true - resolution: - integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== + /buffer-from/1.1.1: + resolution: {integrity: sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==} dev: true - resolution: - integrity: sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== + /buffer-indexof/1.1.1: + resolution: {integrity: sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g==} dev: true - resolution: - integrity: sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g== + /buffer-xor/1.0.3: + resolution: {integrity: sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk=} dev: true - resolution: - integrity: sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk= + /buffer/4.9.2: + resolution: {integrity: sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==} dependencies: base64-js: 1.5.1 ieee754: 1.2.1 isarray: 1.0.0 dev: true - resolution: - integrity: sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg== + /builtin-modules/3.2.0: + resolution: {integrity: sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA==} + engines: {node: '>=6'} dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA== + /builtin-status-codes/3.0.0: + resolution: {integrity: sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug=} dev: true - resolution: - integrity: sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug= + /bytes/3.0.0: + resolution: {integrity: sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=} + engines: {node: '>= 0.8'} dev: true - engines: - node: '>= 0.8' - resolution: - integrity: sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg= + /bytes/3.1.0: + resolution: {integrity: sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==} + engines: {node: '>= 0.8'} dev: true - engines: - node: '>= 0.8' - resolution: - integrity: sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== + /cacache/12.0.4: + resolution: {integrity: sha512-a0tMB40oefvuInr4Cwb3GerbL9xTj1D5yg0T5xrjGCGyfvbxseIXX7BAO/u/hIXdafzOI5JC3wDwHyf24buOAQ==} dependencies: bluebird: 3.7.2 chownr: 1.1.4 @@ -4052,9 +4798,10 @@ packages: unique-filename: 1.1.1 y18n: 4.0.1 dev: true - resolution: - integrity: sha512-a0tMB40oefvuInr4Cwb3GerbL9xTj1D5yg0T5xrjGCGyfvbxseIXX7BAO/u/hIXdafzOI5JC3wDwHyf24buOAQ== + /cacache/15.0.6: + resolution: {integrity: sha512-g1WYDMct/jzW+JdWEyjaX2zoBkZ6ZT9VpOyp2I/VMtDsNLffNat3kqPFfi1eDRSK9/SuKGyORDHcQMcPF8sQ/w==} + engines: {node: '>= 10'} dependencies: '@npmcli/move-file': 1.1.2 chownr: 2.0.0 @@ -4074,11 +4821,10 @@ packages: tar: 6.1.0 unique-filename: 1.1.1 dev: true - engines: - node: '>= 10' - resolution: - integrity: sha512-g1WYDMct/jzW+JdWEyjaX2zoBkZ6ZT9VpOyp2I/VMtDsNLffNat3kqPFfi1eDRSK9/SuKGyORDHcQMcPF8sQ/w== + /cache-base/1.0.1: + resolution: {integrity: sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==} + engines: {node: '>=0.10.0'} dependencies: collection-visit: 1.0.0 component-emitter: 1.3.0 @@ -4089,147 +4835,158 @@ packages: to-object-path: 0.3.0 union-value: 1.0.1 unset-value: 1.0.0 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== + /call-bind/1.0.2: + resolution: {integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==} dependencies: function-bind: 1.1.1 get-intrinsic: 1.1.1 - dev: true - resolution: - integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + /caller-callsite/2.0.0: + resolution: {integrity: sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ=} + engines: {node: '>=4'} dependencies: callsites: 2.0.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ= + /caller-path/2.0.0: + resolution: {integrity: sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ=} + engines: {node: '>=4'} dependencies: caller-callsite: 2.0.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ= + /callsites/2.0.0: + resolution: {integrity: sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA=} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA= + /callsites/3.1.0: + resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} + engines: {node: '>=6'} dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== - /camel-case/3.0.0: - dependencies: - no-case: 2.3.2 - upper-case: 1.1.3 - dev: false - resolution: - integrity: sha1-yjw2iKTpzzpM2nd9xNy8cTJJz3M= + /camel-case/4.1.2: + resolution: {integrity: sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==} dependencies: pascal-case: 3.1.2 - tslib: 2.1.0 + tslib: 2.4.1 + + /camelcase-css/2.0.1: + resolution: {integrity: sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==} + engines: {node: '>= 6'} dev: true - resolution: - integrity: sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw== + /camelcase/5.3.1: + resolution: {integrity: sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==} + engines: {node: '>=6'} dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== + /camelcase/6.2.0: + resolution: {integrity: sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==} + engines: {node: '>=10'} dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg== + /camelize/1.0.0: + resolution: {integrity: sha1-FkpUg+Yw+kMh5a8HAg5TGDGyYJs=} dev: false - resolution: - integrity: sha1-FkpUg+Yw+kMh5a8HAg5TGDGyYJs= + /caniuse-api/3.0.0: + resolution: {integrity: sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==} dependencies: browserslist: 4.16.3 caniuse-lite: 1.0.30001204 lodash.memoize: 4.1.2 lodash.uniq: 4.5.0 dev: true - resolution: - integrity: sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw== + /caniuse-lite/1.0.30001204: - resolution: - integrity: sha512-JUdjWpcxfJ9IPamy2f5JaRDCaqJOxDzOSKtbdx4rH9VivMd1vIzoPumsJa9LoMIi4Fx2BV2KZOxWhNkBjaYivQ== + resolution: {integrity: sha512-JUdjWpcxfJ9IPamy2f5JaRDCaqJOxDzOSKtbdx4rH9VivMd1vIzoPumsJa9LoMIi4Fx2BV2KZOxWhNkBjaYivQ==} + /capture-exit/2.0.0: + resolution: {integrity: sha512-PiT/hQmTonHhl/HFGN+Lx3JJUznrVYJ3+AQsnthneZbvW7x+f08Tk7yLJTLEOUvBTbduLeeBkxEaYXUOUrRq6g==} + engines: {node: 6.* || 8.* || >= 10.*} dependencies: rsvp: 4.8.5 dev: true - engines: - node: 6.* || 8.* || >= 10.* - resolution: - integrity: sha512-PiT/hQmTonHhl/HFGN+Lx3JJUznrVYJ3+AQsnthneZbvW7x+f08Tk7yLJTLEOUvBTbduLeeBkxEaYXUOUrRq6g== + /case-sensitive-paths-webpack-plugin/2.3.0: + resolution: {integrity: sha512-/4YgnZS8y1UXXmC02xD5rRrBEu6T5ub+mQHLNRj0fzTRbgdBYhsNo2V5EqwgqrExjxsjtF/OpAKAMkKsxbD5XQ==} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-/4YgnZS8y1UXXmC02xD5rRrBEu6T5ub+mQHLNRj0fzTRbgdBYhsNo2V5EqwgqrExjxsjtF/OpAKAMkKsxbD5XQ== + /caseless/0.12.0: + resolution: {integrity: sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=} + dev: true + + /ccount/1.1.0: + resolution: {integrity: sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg==} dev: true - resolution: - integrity: sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= + /chalk/2.4.2: + resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} + engines: {node: '>=4'} dependencies: ansi-styles: 3.2.1 escape-string-regexp: 1.0.5 supports-color: 5.5.0 - engines: - node: '>=4' - resolution: - integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + /chalk/3.0.0: + resolution: {integrity: sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==} + engines: {node: '>=8'} dependencies: ansi-styles: 4.3.0 supports-color: 7.2.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg== + /chalk/4.1.0: + resolution: {integrity: sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==} + engines: {node: '>=10'} + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + dev: true + + /chalk/4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} dependencies: ansi-styles: 4.3.0 supports-color: 7.2.0 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A== + /char-regex/1.0.2: + resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==} + engines: {node: '>=10'} dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== - /chart.js/3.2.1: - dev: false - resolution: - integrity: sha512-XsNDf3854RGZkLCt+5vWAXGAtUdKP2nhfikLGZqud6G4CvRE2ts64TIxTTfspOin2kEZvPgomE29E6oU02dYjQ== + + /character-entities-html4/1.1.4: + resolution: {integrity: sha512-HRcDxZuZqMx3/a+qrzxdBKBPUpxWEq9xw2OPZ3a/174ihfrQKVsFhqtthBInFy1zZ9GgZyFXOatNujm8M+El3g==} + dev: true + + /character-entities-legacy/1.1.4: + resolution: {integrity: sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA==} + dev: true + + /character-entities/1.2.4: + resolution: {integrity: sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw==} + dev: true + + /character-reference-invalid/1.1.4: + resolution: {integrity: sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg==} + dev: true + + /chart.js/3.5.0: + resolution: {integrity: sha512-J1a4EAb1Gi/KbhwDRmoovHTRuqT8qdF0kZ4XgwxpGethJHUdDrkqyPYwke0a+BuvSeUxPf8Cos6AX2AB8H8GLA==} + dev: true + /check-types/11.1.2: + resolution: {integrity: sha512-tzWzvgePgLORb9/3a0YenggReLKAIb2owL03H2Xdoe5pKcUyWRSEQ8xfCar8t2SIAuEDwtmx2da1YB52YuHQMQ==} dev: true - resolution: - integrity: sha512-tzWzvgePgLORb9/3a0YenggReLKAIb2owL03H2Xdoe5pKcUyWRSEQ8xfCar8t2SIAuEDwtmx2da1YB52YuHQMQ== + /chokidar/2.1.8: + resolution: {integrity: sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==} + deprecated: Chokidar 2 will break on node v14+. Upgrade to chokidar 3 with 15x less dependencies. dependencies: anymatch: 2.0.0 async-each: 1.0.3 @@ -4242,13 +4999,13 @@ packages: path-is-absolute: 1.0.1 readdirp: 2.2.1 upath: 1.2.0 - deprecated: Chokidar 2 will break on node v14+. Upgrade to chokidar 3 with 15x less dependencies. - dev: true optionalDependencies: fsevents: 1.2.13 - resolution: - integrity: sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg== + dev: true + /chokidar/3.5.1: + resolution: {integrity: sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw==} + engines: {node: '>= 8.10.0'} dependencies: anymatch: 3.1.1 braces: 3.0.2 @@ -4257,247 +5014,271 @@ packages: is-glob: 4.0.1 normalize-path: 3.0.0 readdirp: 3.5.0 + optionalDependencies: + fsevents: 2.3.2 dev: true - engines: - node: '>= 8.10.0' - optional: true + + /chokidar/3.5.3: + resolution: {integrity: sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==} + engines: {node: '>= 8.10.0'} + dependencies: + anymatch: 3.1.2 + braces: 3.0.2 + glob-parent: 5.1.2 + is-binary-path: 2.1.0 + is-glob: 4.0.1 + normalize-path: 3.0.0 + readdirp: 3.6.0 optionalDependencies: fsevents: 2.3.2 - resolution: - integrity: sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw== + dev: true + optional: true + /chownr/1.1.4: + resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==} dev: true - resolution: - integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== + /chownr/2.0.0: + resolution: {integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==} + engines: {node: '>=10'} dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ== + /chrome-trace-event/1.0.2: + resolution: {integrity: sha512-9e/zx1jw7B4CO+c/RXoCsfg/x1AfUBioy4owYH0bJprEYAx5hRFLRhWBqHAG57D0ZM4H7vxbP7bPe0VwhQRYDQ==} + engines: {node: '>=6.0'} dependencies: tslib: 1.14.1 dev: true - engines: - node: '>=6.0' - resolution: - integrity: sha512-9e/zx1jw7B4CO+c/RXoCsfg/x1AfUBioy4owYH0bJprEYAx5hRFLRhWBqHAG57D0ZM4H7vxbP7bPe0VwhQRYDQ== + /ci-info/2.0.0: + resolution: {integrity: sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==} dev: true - resolution: - integrity: sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== + /cipher-base/1.0.4: + resolution: {integrity: sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==} dependencies: inherits: 2.0.4 safe-buffer: 5.2.1 dev: true - resolution: - integrity: sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== + /cjs-module-lexer/0.6.0: + resolution: {integrity: sha512-uc2Vix1frTfnuzxxu1Hp4ktSvM3QaI4oXl4ZUqL1wjTu/BGki9TrCWoqLTg/drR1KwAEarXuRFCG2Svr1GxPFw==} dev: true - resolution: - integrity: sha512-uc2Vix1frTfnuzxxu1Hp4ktSvM3QaI4oXl4ZUqL1wjTu/BGki9TrCWoqLTg/drR1KwAEarXuRFCG2Svr1GxPFw== + /class-utils/0.3.6: + resolution: {integrity: sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==} + engines: {node: '>=0.10.0'} dependencies: arr-union: 3.1.0 define-property: 0.2.5 isobject: 3.0.1 static-extend: 0.1.2 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== + /classcat/5.0.3: + resolution: {integrity: sha512-6dK2ke4VEJZOFx2ZfdDAl5OhEL8lvkl6EHF92IfRePfHxQTqir5NlcNVUv+2idjDqCX2NDc8m8YSAI5NI975ZQ==} dev: false - resolution: - integrity: sha512-6dK2ke4VEJZOFx2ZfdDAl5OhEL8lvkl6EHF92IfRePfHxQTqir5NlcNVUv+2idjDqCX2NDc8m8YSAI5NI975ZQ== + /classnames/2.2.6: + resolution: {integrity: sha512-JR/iSQOSt+LQIWwrwEzJ9uk0xfN3mTVYMwt1Ir5mUcSN6pU+V4zQFFaJsclJbPuAUQH+yfWef6tm7l1quW3C8Q==} + + /classnames/2.3.1: + resolution: {integrity: sha512-OlQdbZ7gLfGarSqxesMesDa5uz7KFbID8Kpq/SxIoNGDqY8lSYs0D+hhtBXhcdB3rcbXArFr7vlHheLk1voeNA==} dev: false - resolution: - integrity: sha512-JR/iSQOSt+LQIWwrwEzJ9uk0xfN3mTVYMwt1Ir5mUcSN6pU+V4zQFFaJsclJbPuAUQH+yfWef6tm7l1quW3C8Q== + /clean-css/4.2.3: + resolution: {integrity: sha512-VcMWDN54ZN/DS+g58HYL5/n4Zrqe8vHJpGA8KdgUXFU4fuP/aHNw8eld9SyEIyabIMJX/0RaY/fplOo5hYLSFA==} + engines: {node: '>= 4.0'} dependencies: source-map: 0.6.1 dev: true - engines: - node: '>= 4.0' - resolution: - integrity: sha512-VcMWDN54ZN/DS+g58HYL5/n4Zrqe8vHJpGA8KdgUXFU4fuP/aHNw8eld9SyEIyabIMJX/0RaY/fplOo5hYLSFA== + /clean-stack/2.2.0: + resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} + engines: {node: '>=6'} dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== + /cli-cursor/3.1.0: + resolution: {integrity: sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==} + engines: {node: '>=8'} dependencies: restore-cursor: 3.1.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw== + /cli-truncate/2.1.0: + resolution: {integrity: sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==} + engines: {node: '>=8'} dependencies: slice-ansi: 3.0.0 string-width: 4.2.2 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg== + + /clipboardy/2.3.0: + resolution: {integrity: sha512-mKhiIL2DrQIsuXMgBgnfEHOZOryC7kY7YO//TN6c63wlEm3NG5tz+YgY5rVi29KCmq/QQjKYvM7a19+MDOTHOQ==} + engines: {node: '>=8'} + dependencies: + arch: 2.2.0 + execa: 1.0.0 + is-wsl: 2.2.0 + dev: true + /cliui/5.0.0: + resolution: {integrity: sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==} dependencies: string-width: 3.1.0 strip-ansi: 5.2.0 wrap-ansi: 5.1.0 dev: true - resolution: - integrity: sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA== + /cliui/6.0.0: + resolution: {integrity: sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==} dependencies: string-width: 4.2.2 strip-ansi: 6.0.0 wrap-ansi: 6.2.0 dev: true - resolution: - integrity: sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ== + /clone-deep/4.0.1: + resolution: {integrity: sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==} + engines: {node: '>=6'} dependencies: is-plain-object: 2.0.4 kind-of: 6.0.3 shallow-clone: 3.0.1 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ== + /co/4.6.0: + resolution: {integrity: sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=} + engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'} dev: true - engines: - iojs: '>= 1.0.0' - node: '>= 0.12.0' - resolution: - integrity: sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ= + /coa/2.0.2: + resolution: {integrity: sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA==} + engines: {node: '>= 4.0'} dependencies: '@types/q': 1.5.4 chalk: 2.4.2 q: 1.5.1 dev: true - engines: - node: '>= 4.0' - resolution: - integrity: sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA== + /collect-v8-coverage/1.0.1: + resolution: {integrity: sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg==} dev: true - resolution: - integrity: sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== + /collection-visit/1.0.0: + resolution: {integrity: sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA=} + engines: {node: '>=0.10.0'} dependencies: map-visit: 1.0.0 object-visit: 1.0.1 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= + /color-convert/1.9.3: + resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==} dependencies: color-name: 1.1.3 - resolution: - integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + /color-convert/2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} dependencies: color-name: 1.1.4 dev: true - engines: - node: '>=7.0.0' - resolution: - integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + /color-name/1.1.3: - resolution: - integrity: sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= + resolution: {integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==} + /color-name/1.1.4: - dev: true - resolution: - integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + /color-string/1.5.5: + resolution: {integrity: sha512-jgIoum0OfQfq9Whcfc2z/VhCNcmQjWbey6qBX0vqt7YICflUmBCh9E9CiQD5GSJ+Uehixm3NUwHVhqUAWRivZg==} dependencies: color-name: 1.1.4 simple-swizzle: 0.2.2 dev: true - resolution: - integrity: sha512-jgIoum0OfQfq9Whcfc2z/VhCNcmQjWbey6qBX0vqt7YICflUmBCh9E9CiQD5GSJ+Uehixm3NUwHVhqUAWRivZg== + + /color-string/1.9.1: + resolution: {integrity: sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==} + dependencies: + color-name: 1.1.4 + simple-swizzle: 0.2.2 + dev: false + /color/3.1.3: + resolution: {integrity: sha512-xgXAcTHa2HeFCGLE9Xs/R82hujGtu9Jd9x4NW3T34+OMs7VoPsjwzRczKHvTAHeJwWFwX5j15+MgAppE8ztObQ==} dependencies: color-convert: 1.9.3 color-string: 1.5.5 dev: true - resolution: - integrity: sha512-xgXAcTHa2HeFCGLE9Xs/R82hujGtu9Jd9x4NW3T34+OMs7VoPsjwzRczKHvTAHeJwWFwX5j15+MgAppE8ztObQ== + + /color/3.2.1: + resolution: {integrity: sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==} + dependencies: + color-convert: 1.9.3 + color-string: 1.9.1 + dev: false + /colorette/1.2.2: + resolution: {integrity: sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w==} dev: true - resolution: - integrity: sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w== + /combined-stream/1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} dependencies: delayed-stream: 1.0.0 dev: true - engines: - node: '>= 0.8' - resolution: - integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + + /comma-separated-tokens/1.0.8: + resolution: {integrity: sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==} + dev: true + /commander/2.20.3: + resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==} dev: true - resolution: - integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== + /commander/4.1.1: + resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} + engines: {node: '>= 6'} dev: true - engines: - node: '>= 6' - resolution: - integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA== + /commander/6.2.1: + resolution: {integrity: sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==} + engines: {node: '>= 6'} dev: true - engines: - node: '>= 6' - resolution: - integrity: sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA== + /commander/7.2.0: + resolution: {integrity: sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==} + engines: {node: '>= 10'} dev: true - engines: - node: '>= 10' - resolution: - integrity: sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== + /common-tags/1.8.0: + resolution: {integrity: sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw==} + engines: {node: '>=4.0.0'} dev: true - engines: - node: '>=4.0.0' - resolution: - integrity: sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw== + /commondir/1.0.1: + resolution: {integrity: sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=} dev: true - resolution: - integrity: sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= + /component-emitter/1.3.0: - resolution: - integrity: sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== + resolution: {integrity: sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==} + /compose-function/3.0.3: + resolution: {integrity: sha1-ntZ18TzFRQHTCVCkhv9qe6OrGF8=} dependencies: arity-n: 1.0.4 dev: true - resolution: - integrity: sha1-ntZ18TzFRQHTCVCkhv9qe6OrGF8= + /compressible/2.0.18: + resolution: {integrity: sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==} + engines: {node: '>= 0.6'} dependencies: mime-db: 1.46.0 dev: true - engines: - node: '>= 0.6' - resolution: - integrity: sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== + /compression/1.7.4: + resolution: {integrity: sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==} + engines: {node: '>= 0.8.0'} dependencies: accepts: 1.3.7 bytes: 3.0.0 @@ -4507,97 +5288,89 @@ packages: safe-buffer: 5.1.2 vary: 1.1.2 dev: true - engines: - node: '>= 0.8.0' - resolution: - integrity: sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== - /compute-scroll-into-view/1.0.17: + + /compute-scroll-into-view/1.0.11: + resolution: {integrity: sha512-uUnglJowSe0IPmWOdDtrlHXof5CTIJitfJEyITHBW6zDVOGu9Pjk5puaLM73SLcwak0L4hEjO7Td88/a6P5i7A==} dev: false - resolution: - integrity: sha512-j4dx+Fb0URmzbwwMUrhqWM2BEWHdFGx+qZ9qqASHRPqvTYdqvWnHg0H1hIbcyLnvgnoNAVMlwkepyqM3DaIFUg== + + /compute-scroll-into-view/1.0.20: + resolution: {integrity: sha512-UCB0ioiyj8CRjtrvaceBLqqhZCVP+1B8+NWQhmdsm0VXOJtobBCf1dBQmebCCo34qZmUwZfIH2MZLqNHazrfjg==} + dev: false + /concat-map/0.0.1: - resolution: - integrity: sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= + resolution: {integrity: sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=} + /concat-stream/1.6.2: + resolution: {integrity: sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==} + engines: {'0': node >= 0.8} dependencies: buffer-from: 1.1.1 inherits: 2.0.4 readable-stream: 2.3.7 typedarray: 0.0.6 dev: true - engines: - '0': node >= 0.8 - resolution: - integrity: sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== + /confusing-browser-globals/1.0.10: + resolution: {integrity: sha512-gNld/3lySHwuhaVluJUKLePYirM3QNCKzVxqAdhJII9/WXKVX5PURzMVJspS1jTslSqjeuG4KMVTSouit5YPHA==} dev: true - resolution: - integrity: sha512-gNld/3lySHwuhaVluJUKLePYirM3QNCKzVxqAdhJII9/WXKVX5PURzMVJspS1jTslSqjeuG4KMVTSouit5YPHA== + /connect-history-api-fallback/1.6.0: + resolution: {integrity: sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg==} + engines: {node: '>=0.8'} dev: true - engines: - node: '>=0.8' - resolution: - integrity: sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg== + /console-browserify/1.2.0: + resolution: {integrity: sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA==} dev: true - resolution: - integrity: sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA== + /constants-browserify/1.0.0: + resolution: {integrity: sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U=} dev: true - resolution: - integrity: sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U= + /contains-path/0.1.0: + resolution: {integrity: sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo=} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo= + /content-disposition/0.5.3: + resolution: {integrity: sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==} + engines: {node: '>= 0.6'} dependencies: safe-buffer: 5.1.2 dev: true - engines: - node: '>= 0.6' - resolution: - integrity: sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g== + /content-type/1.0.4: + resolution: {integrity: sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==} + engines: {node: '>= 0.6'} dev: true - engines: - node: '>= 0.6' - resolution: - integrity: sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== + /convert-source-map/0.3.5: + resolution: {integrity: sha1-8dgClQr33SYxof6+BZZVDIarMZA=} dev: true - resolution: - integrity: sha1-8dgClQr33SYxof6+BZZVDIarMZA= + /convert-source-map/1.7.0: + resolution: {integrity: sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA==} dependencies: safe-buffer: 5.1.2 dev: true - resolution: - integrity: sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA== + /cookie-signature/1.0.6: + resolution: {integrity: sha1-4wOogrNCzD7oylE6eZmXNNqzriw=} dev: true - resolution: - integrity: sha1-4wOogrNCzD7oylE6eZmXNNqzriw= + /cookie/0.4.0: + resolution: {integrity: sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==} + engines: {node: '>= 0.6'} dev: true - engines: - node: '>= 0.6' - resolution: - integrity: sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg== - /cool-path/0.1.32: - dev: false - resolution: - integrity: sha512-u8xxIqRoOP12uML/H3prx6kOhg+XxyIumz7nn6Kn2k/6sZ8BD4YyomPGGiiZhk0OObjY7QijmZJPskzUaSA8GA== + /copy-anything/2.0.3: + resolution: {integrity: sha512-GK6QUtisv4fNS+XcI7shX0Gx9ORg7QqIznyfho79JTnX1XhLiyZHfftvGiziqzRiEi/Bjhgpi+D2o7HxJFPnDQ==} dependencies: is-what: 3.14.1 dev: true - resolution: - integrity: sha512-GK6QUtisv4fNS+XcI7shX0Gx9ORg7QqIznyfho79JTnX1XhLiyZHfftvGiziqzRiEi/Bjhgpi+D2o7HxJFPnDQ== + /copy-concurrently/1.0.5: + resolution: {integrity: sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A==} dependencies: aproba: 1.2.0 fs-write-stream-atomic: 1.0.10 @@ -4606,57 +5379,67 @@ packages: rimraf: 2.7.1 run-queue: 1.0.3 dev: true - resolution: - integrity: sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A== + /copy-descriptor/0.1.1: - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= + resolution: {integrity: sha1-Z29us8OZl8LuGsOpJP1hJHSPV40=} + engines: {node: '>=0.10.0'} + + /copy-text-to-clipboard/2.2.0: + resolution: {integrity: sha512-WRvoIdnTs1rgPMkgA2pUOa/M4Enh2uzCwdKsOMYNAJiz/4ZvEJgmbF4OmninPmlFdAWisfeh0tH+Cpf7ni3RqQ==} + engines: {node: '>=6'} + dev: true + /copy-to-clipboard/3.3.1: + resolution: {integrity: sha512-i13qo6kIHTTpCm8/Wup+0b1mVWETvu2kIMzKoK8FpkLkFxlt0znUAHcMzox+T8sPlqtZXq3CulEjQHsYiGFJUw==} dependencies: toggle-selection: 1.0.6 dev: false - resolution: - integrity: sha512-i13qo6kIHTTpCm8/Wup+0b1mVWETvu2kIMzKoK8FpkLkFxlt0znUAHcMzox+T8sPlqtZXq3CulEjQHsYiGFJUw== + /core-js-compat/3.9.1: + resolution: {integrity: sha512-jXAirMQxrkbiiLsCx9bQPJFA6llDadKMpYrBJQJ3/c4/vsPP/fAf29h24tviRlvwUL6AmY5CHLu2GvjuYviQqA==} dependencies: browserslist: 4.16.3 semver: 7.0.0 dev: true - resolution: - integrity: sha512-jXAirMQxrkbiiLsCx9bQPJFA6llDadKMpYrBJQJ3/c4/vsPP/fAf29h24tviRlvwUL6AmY5CHLu2GvjuYviQqA== + /core-js-pure/3.9.1: - dev: true + resolution: {integrity: sha512-laz3Zx0avrw9a4QEIdmIblnVuJz8W51leY9iLThatCsFawWxC3sE4guASC78JbCin+DkwMpCdp1AVAuzL/GN7A==} requiresBuild: true - resolution: - integrity: sha512-laz3Zx0avrw9a4QEIdmIblnVuJz8W51leY9iLThatCsFawWxC3sE4guASC78JbCin+DkwMpCdp1AVAuzL/GN7A== + dev: true + /core-js/2.6.12: + resolution: {integrity: sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ==} deprecated: core-js@<3 is no longer maintained and not recommended for usage due to the number of issues. Please, upgrade your dependencies to the actual version of core-js@3. requiresBuild: true - resolution: - integrity: sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ== + dev: true + + /core-js/3.6.5: + resolution: {integrity: sha512-vZVEEwZoIsI+vPEuoF9Iqf5H7/M3eeQqWlQnYa8FSKKePuYTf5MWnxb5SDAzCa60b3JBRS5g9b+Dq7b1y/RCrA==} + requiresBuild: true + dev: true + /core-js/3.9.1: - dev: false + resolution: {integrity: sha512-gSjRvzkxQc1zjM/5paAmL4idJBFzuJoo+jDjF1tStYFMV2ERfD02HhahhCGXUyHxQRG4yFKVSdO6g62eoRMcDg==} requiresBuild: true - resolution: - integrity: sha512-gSjRvzkxQc1zjM/5paAmL4idJBFzuJoo+jDjF1tStYFMV2ERfD02HhahhCGXUyHxQRG4yFKVSdO6g62eoRMcDg== + dev: false + /core-util-is/1.0.2: + resolution: {integrity: sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=} dev: true - resolution: - integrity: sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= + /cosmiconfig/5.2.1: + resolution: {integrity: sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA==} + engines: {node: '>=4'} dependencies: import-fresh: 2.0.0 is-directory: 0.3.1 js-yaml: 3.14.1 parse-json: 4.0.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA== + /cosmiconfig/6.0.0: + resolution: {integrity: sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg==} + engines: {node: '>=8'} dependencies: '@types/parse-json': 4.0.0 import-fresh: 3.3.0 @@ -4664,11 +5447,10 @@ packages: path-type: 4.0.0 yaml: 1.10.2 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg== + /cosmiconfig/7.0.0: + resolution: {integrity: sha512-pondGvTuVYDk++upghXJabWzL6Kxu6f26ljFw64Swq9v6sQPUL3EUlVDV56diOjpCayKihL6hVe8exIACU4XcA==} + engines: {node: '>=10'} dependencies: '@types/parse-json': 4.0.0 import-fresh: 3.3.0 @@ -4676,18 +5458,16 @@ packages: path-type: 4.0.0 yaml: 1.10.2 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-pondGvTuVYDk++upghXJabWzL6Kxu6f26ljFw64Swq9v6sQPUL3EUlVDV56diOjpCayKihL6hVe8exIACU4XcA== + /create-ecdh/4.0.4: + resolution: {integrity: sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A==} dependencies: bn.js: 4.12.0 elliptic: 6.5.4 dev: true - resolution: - integrity: sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A== + /create-hash/1.2.0: + resolution: {integrity: sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==} dependencies: cipher-base: 1.0.4 inherits: 2.0.4 @@ -4695,9 +5475,9 @@ packages: ripemd160: 2.0.2 sha.js: 2.4.11 dev: true - resolution: - integrity: sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== + /create-hmac/1.1.7: + resolution: {integrity: sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==} dependencies: cipher-base: 1.0.4 create-hash: 1.2.0 @@ -4706,9 +5486,18 @@ packages: safe-buffer: 5.2.1 sha.js: 2.4.11 dev: true - resolution: - integrity: sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== + + /cross-env/7.0.3: + resolution: {integrity: sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==} + engines: {node: '>=10.14', npm: '>=6', yarn: '>=1'} + hasBin: true + dependencies: + cross-spawn: 7.0.3 + dev: true + /cross-spawn/6.0.5: + resolution: {integrity: sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==} + engines: {node: '>=4.8'} dependencies: nice-try: 1.0.5 path-key: 2.0.1 @@ -4716,20 +5505,17 @@ packages: shebang-command: 1.2.0 which: 1.3.1 dev: true - engines: - node: '>=4.8' - resolution: - integrity: sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== + /cross-spawn/7.0.3: + resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} + engines: {node: '>= 8'} dependencies: path-key: 3.1.1 shebang-command: 2.0.0 which: 2.0.2 - engines: - node: '>= 8' - resolution: - integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + /crypto-browserify/3.12.0: + resolution: {integrity: sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg==} dependencies: browserify-cipher: 1.0.1 browserify-sign: 4.2.1 @@ -4743,60 +5529,58 @@ packages: randombytes: 2.1.0 randomfill: 1.0.4 dev: true - resolution: - integrity: sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg== + /crypto-random-string/1.0.0: + resolution: {integrity: sha1-ojD2T1aDEOFJgAmUB5DsmVRbyn4=} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-ojD2T1aDEOFJgAmUB5DsmVRbyn4= + /css-blank-pseudo/0.1.4: + resolution: {integrity: sha512-LHz35Hr83dnFeipc7oqFDmsjHdljj3TQtxGGiNWSOsTLIAubSm4TEz8qCaKFpk7idaQ1GfWscF4E6mgpBysA1w==} + engines: {node: '>=6.0.0'} + hasBin: true dependencies: postcss: 7.0.35 dev: true - engines: - node: '>=6.0.0' - hasBin: true - resolution: - integrity: sha512-LHz35Hr83dnFeipc7oqFDmsjHdljj3TQtxGGiNWSOsTLIAubSm4TEz8qCaKFpk7idaQ1GfWscF4E6mgpBysA1w== + /css-color-keywords/1.0.0: + resolution: {integrity: sha1-/qJhbcZ2spYmhrOvjb2+GAskTgU=} + engines: {node: '>=4'} dev: false - engines: - node: '>=4' - resolution: - integrity: sha1-/qJhbcZ2spYmhrOvjb2+GAskTgU= + /css-color-names/0.0.4: + resolution: {integrity: sha1-gIrcLnnPhHOAabZGyyDsJ762KeA=} dev: true - resolution: - integrity: sha1-gIrcLnnPhHOAabZGyyDsJ762KeA= + /css-declaration-sorter/4.0.1: + resolution: {integrity: sha512-BcxQSKTSEEQUftYpBVnsH4SF05NTuBokb19/sBt6asXGKZ/6VP7PLG1CBCkFDYOnhXhPh0jMhO6xZ71oYHXHBA==} + engines: {node: '>4'} dependencies: postcss: 7.0.35 timsort: 0.3.0 dev: true - engines: - node: '>4' - resolution: - integrity: sha512-BcxQSKTSEEQUftYpBVnsH4SF05NTuBokb19/sBt6asXGKZ/6VP7PLG1CBCkFDYOnhXhPh0jMhO6xZ71oYHXHBA== + /css-has-pseudo/0.10.0: + resolution: {integrity: sha512-Z8hnfsZu4o/kt+AuFzeGpLVhFOGO9mluyHBaA2bA8aCGTwah5sT3WV/fTHH8UNZUytOIImuGPrl/prlb4oX4qQ==} + engines: {node: '>=6.0.0'} + hasBin: true dependencies: postcss: 7.0.35 postcss-selector-parser: 5.0.0 dev: true - engines: - node: '>=6.0.0' - hasBin: true - resolution: - integrity: sha512-Z8hnfsZu4o/kt+AuFzeGpLVhFOGO9mluyHBaA2bA8aCGTwah5sT3WV/fTHH8UNZUytOIImuGPrl/prlb4oX4qQ== + /css-in-js-utils/2.0.1: + resolution: {integrity: sha512-PJF0SpJT+WdbVVt0AOYp9C8GnuruRlL/UFW7932nLWmFLQTaWEzTBQEx7/hn4BuV+WON75iAViSUJLiU3PKbpA==} dependencies: hyphenate-style-name: 1.0.4 isobject: 3.0.1 dev: false - resolution: - integrity: sha512-PJF0SpJT+WdbVVt0AOYp9C8GnuruRlL/UFW7932nLWmFLQTaWEzTBQEx7/hn4BuV+WON75iAViSUJLiU3PKbpA== + /css-loader/4.3.0_webpack@4.44.2: + resolution: {integrity: sha512-rdezjCjScIrsL8BSYszgT4s476IcNKt6yX69t0pHjJVnPUTDpn4WfIpDQTN3wCJvUvfsz/mFjuGOekf3PY3NUg==} + engines: {node: '>= 10.13.0'} + peerDependencies: + webpack: ^4.27.0 || ^5.0.0 dependencies: camelcase: 6.2.0 cssesc: 3.0.0 @@ -4812,105 +5596,96 @@ packages: semver: 7.3.2 webpack: 4.44.2_webpack-cli@4.6.0 dev: true - engines: - node: '>= 10.13.0' - peerDependencies: - webpack: ^4.27.0 || ^5.0.0 - resolution: - integrity: sha512-rdezjCjScIrsL8BSYszgT4s476IcNKt6yX69t0pHjJVnPUTDpn4WfIpDQTN3wCJvUvfsz/mFjuGOekf3PY3NUg== + /css-prefers-color-scheme/3.1.1: + resolution: {integrity: sha512-MTu6+tMs9S3EUqzmqLXEcgNRbNkkD/TGFvowpeoWJn5Vfq7FMgsmRQs9X5NXAURiOBmOxm/lLjsDNXDE6k9bhg==} + engines: {node: '>=6.0.0'} + hasBin: true dependencies: postcss: 7.0.35 dev: true - engines: - node: '>=6.0.0' - hasBin: true - resolution: - integrity: sha512-MTu6+tMs9S3EUqzmqLXEcgNRbNkkD/TGFvowpeoWJn5Vfq7FMgsmRQs9X5NXAURiOBmOxm/lLjsDNXDE6k9bhg== + /css-select-base-adapter/0.1.1: + resolution: {integrity: sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w==} dev: true - resolution: - integrity: sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w== + /css-select/2.1.0: + resolution: {integrity: sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ==} dependencies: boolbase: 1.0.0 css-what: 3.4.2 domutils: 1.7.0 nth-check: 1.0.2 dev: true - resolution: - integrity: sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ== + /css-to-react-native/3.0.0: + resolution: {integrity: sha512-Ro1yETZA813eoyUp2GDBhG2j+YggidUmzO1/v9eYBKR2EHVEniE2MI/NqpTQ954BMpTPZFsGNPm46qFB9dpaPQ==} dependencies: camelize: 1.0.0 css-color-keywords: 1.0.0 postcss-value-parser: 4.1.0 dev: false - resolution: - integrity: sha512-Ro1yETZA813eoyUp2GDBhG2j+YggidUmzO1/v9eYBKR2EHVEniE2MI/NqpTQ954BMpTPZFsGNPm46qFB9dpaPQ== + /css-tree/1.0.0-alpha.37: + resolution: {integrity: sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg==} + engines: {node: '>=8.0.0'} dependencies: mdn-data: 2.0.4 source-map: 0.6.1 dev: true - engines: - node: '>=8.0.0' - resolution: - integrity: sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg== + /css-tree/1.1.2: + resolution: {integrity: sha512-wCoWush5Aeo48GLhfHPbmvZs59Z+M7k5+B1xDnXbdWNcEF423DoFdqSWE0PM5aNk5nI5cp1q7ms36zGApY/sKQ==} + engines: {node: '>=8.0.0'} dependencies: mdn-data: 2.0.14 source-map: 0.6.1 - engines: - node: '>=8.0.0' - resolution: - integrity: sha512-wCoWush5Aeo48GLhfHPbmvZs59Z+M7k5+B1xDnXbdWNcEF423DoFdqSWE0PM5aNk5nI5cp1q7ms36zGApY/sKQ== + /css-what/3.4.2: + resolution: {integrity: sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ==} + engines: {node: '>= 6'} dev: true - engines: - node: '>= 6' - resolution: - integrity: sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ== + /css.escape/1.5.1: + resolution: {integrity: sha1-QuJ9T6BK4y+TGktNQZH6nN3ul8s=} dev: true - resolution: - integrity: sha1-QuJ9T6BK4y+TGktNQZH6nN3ul8s= + /css/2.2.4: + resolution: {integrity: sha512-oUnjmWpy0niI3x/mPL8dVEI1l7MnG3+HHyRPHf+YFSbK+svOhXpmSOcDURUh2aOCgl2grzrOPt1nHLuCVFULLw==} dependencies: inherits: 2.0.4 source-map: 0.6.1 source-map-resolve: 0.5.3 urix: 0.1.0 dev: true - resolution: - integrity: sha512-oUnjmWpy0niI3x/mPL8dVEI1l7MnG3+HHyRPHf+YFSbK+svOhXpmSOcDURUh2aOCgl2grzrOPt1nHLuCVFULLw== + /css/3.0.0: + resolution: {integrity: sha512-DG9pFfwOrzc+hawpmqX/dHYHJG+Bsdb0klhyi1sDneOgGOXy9wQIC8hzyVp1e4NRYDBdxcylvywPkkXCHAzTyQ==} dependencies: inherits: 2.0.4 source-map: 0.6.1 source-map-resolve: 0.6.0 dev: true - resolution: - integrity: sha512-DG9pFfwOrzc+hawpmqX/dHYHJG+Bsdb0klhyi1sDneOgGOXy9wQIC8hzyVp1e4NRYDBdxcylvywPkkXCHAzTyQ== + /cssdb/4.4.0: + resolution: {integrity: sha512-LsTAR1JPEM9TpGhl/0p3nQecC2LJ0kD8X5YARu1hk/9I1gril5vDtMZyNxcEpxxDj34YNck/ucjuoUd66K03oQ==} dev: true - resolution: - integrity: sha512-LsTAR1JPEM9TpGhl/0p3nQecC2LJ0kD8X5YARu1hk/9I1gril5vDtMZyNxcEpxxDj34YNck/ucjuoUd66K03oQ== + /cssesc/2.0.0: - dev: true - engines: - node: '>=4' + resolution: {integrity: sha512-MsCAG1z9lPdoO/IUMLSBWBSVxVtJ1395VGIQ+Fc2gNdkQ1hNDnQdw3YhA71WJCBW1vdwA0cAnk/DnW6bqoEUYg==} + engines: {node: '>=4'} hasBin: true - resolution: - integrity: sha512-MsCAG1z9lPdoO/IUMLSBWBSVxVtJ1395VGIQ+Fc2gNdkQ1hNDnQdw3YhA71WJCBW1vdwA0cAnk/DnW6bqoEUYg== - /cssesc/3.0.0: dev: true - engines: - node: '>=4' + + /cssesc/3.0.0: + resolution: {integrity: sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==} + engines: {node: '>=4'} hasBin: true - resolution: - integrity: sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== + dev: true + /cssnano-preset-default/4.0.7: + resolution: {integrity: sha512-x0YHHx2h6p0fCl1zY9L9roD7rnlltugGu7zXSKQx6k2rYw0Hi3IqxcoAGF7u9Q5w1nt7vK0ulxV8Lo+EvllGsA==} + engines: {node: '>=6.9.0'} dependencies: css-declaration-sorter: 4.0.1 cssnano-util-raw-cache: 4.0.1 @@ -4943,119 +5718,116 @@ packages: postcss-svgo: 4.0.2 postcss-unique-selectors: 4.0.1 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-x0YHHx2h6p0fCl1zY9L9roD7rnlltugGu7zXSKQx6k2rYw0Hi3IqxcoAGF7u9Q5w1nt7vK0ulxV8Lo+EvllGsA== + /cssnano-util-get-arguments/4.0.0: + resolution: {integrity: sha1-7ToIKZ8h11dBsg87gfGU7UnMFQ8=} + engines: {node: '>=6.9.0'} dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha1-7ToIKZ8h11dBsg87gfGU7UnMFQ8= + /cssnano-util-get-match/4.0.0: + resolution: {integrity: sha1-wOTKB/U4a7F+xeUiULT1lhNlFW0=} + engines: {node: '>=6.9.0'} dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha1-wOTKB/U4a7F+xeUiULT1lhNlFW0= + /cssnano-util-raw-cache/4.0.1: + resolution: {integrity: sha512-qLuYtWK2b2Dy55I8ZX3ky1Z16WYsx544Q0UWViebptpwn/xDBmog2TLg4f+DBMg1rJ6JDWtn96WHbOKDWt1WQA==} + engines: {node: '>=6.9.0'} dependencies: postcss: 7.0.35 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-qLuYtWK2b2Dy55I8ZX3ky1Z16WYsx544Q0UWViebptpwn/xDBmog2TLg4f+DBMg1rJ6JDWtn96WHbOKDWt1WQA== + /cssnano-util-same-parent/4.0.1: + resolution: {integrity: sha512-WcKx5OY+KoSIAxBW6UBBRay1U6vkYheCdjyVNDm85zt5K9mHoGOfsOsqIszfAqrQQFIIKgjh2+FDgIj/zsl21Q==} + engines: {node: '>=6.9.0'} dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-WcKx5OY+KoSIAxBW6UBBRay1U6vkYheCdjyVNDm85zt5K9mHoGOfsOsqIszfAqrQQFIIKgjh2+FDgIj/zsl21Q== + /cssnano/4.1.10: + resolution: {integrity: sha512-5wny+F6H4/8RgNlaqab4ktc3e0/blKutmq8yNlBFXA//nSFFAqAngjNVRzUvCgYROULmZZUoosL/KSoZo5aUaQ==} + engines: {node: '>=6.9.0'} dependencies: cosmiconfig: 5.2.1 cssnano-preset-default: 4.0.7 is-resolvable: 1.1.0 postcss: 7.0.35 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-5wny+F6H4/8RgNlaqab4ktc3e0/blKutmq8yNlBFXA//nSFFAqAngjNVRzUvCgYROULmZZUoosL/KSoZo5aUaQ== + /csso/4.2.0: + resolution: {integrity: sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA==} + engines: {node: '>=8.0.0'} dependencies: css-tree: 1.1.2 dev: true - engines: - node: '>=8.0.0' - resolution: - integrity: sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA== + /cssom/0.3.8: + resolution: {integrity: sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg==} dev: true - resolution: - integrity: sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg== + /cssom/0.4.4: + resolution: {integrity: sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw==} dev: true - resolution: - integrity: sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw== + /cssstyle/2.3.0: + resolution: {integrity: sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A==} + engines: {node: '>=8'} dependencies: cssom: 0.3.8 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A== + /csstype/3.0.7: - resolution: - integrity: sha512-KxnUB0ZMlnUWCsx2Z8MUsr6qV6ja1w9ArPErJaJaF8a5SOWoHLIszeCTKGRGRgtLgYrs1E8CHkNSP1VZTTPc9g== + resolution: {integrity: sha512-KxnUB0ZMlnUWCsx2Z8MUsr6qV6ja1w9ArPErJaJaF8a5SOWoHLIszeCTKGRGRgtLgYrs1E8CHkNSP1VZTTPc9g==} + + /csstype/3.1.1: + resolution: {integrity: sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw==} + dev: false + /cyclist/1.0.1: + resolution: {integrity: sha1-WW6WmP0MgOEgOMK4LW6xs1tiJNk=} dev: true - resolution: - integrity: sha1-WW6WmP0MgOEgOMK4LW6xs1tiJNk= + /d/1.0.1: + resolution: {integrity: sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA==} dependencies: es5-ext: 0.10.53 type: 1.2.0 dev: true - resolution: - integrity: sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== + /d3-color/2.0.0: + resolution: {integrity: sha512-SPXi0TSKPD4g9tw0NMZFnR95XVgUZiBH+uUTqQuDu1OsE2zomHU7ho0FISciaPvosimixwHFl3WHLGabv6dDgQ==} dev: false - resolution: - integrity: sha512-SPXi0TSKPD4g9tw0NMZFnR95XVgUZiBH+uUTqQuDu1OsE2zomHU7ho0FISciaPvosimixwHFl3WHLGabv6dDgQ== + /d3-dispatch/2.0.0: + resolution: {integrity: sha512-S/m2VsXI7gAti2pBoLClFFTMOO1HTtT0j99AuXLoGFKO6deHDdnv6ZGTxSTTUTgO1zVcv82fCOtDjYK4EECmWA==} dev: false - resolution: - integrity: sha512-S/m2VsXI7gAti2pBoLClFFTMOO1HTtT0j99AuXLoGFKO6deHDdnv6ZGTxSTTUTgO1zVcv82fCOtDjYK4EECmWA== + /d3-drag/2.0.0: + resolution: {integrity: sha512-g9y9WbMnF5uqB9qKqwIIa/921RYWzlUDv9Jl1/yONQwxbOfszAWTCm8u7HOTgJgRDXiRZN56cHT9pd24dmXs8w==} dependencies: d3-dispatch: 2.0.0 d3-selection: 2.0.0 dev: false - resolution: - integrity: sha512-g9y9WbMnF5uqB9qKqwIIa/921RYWzlUDv9Jl1/yONQwxbOfszAWTCm8u7HOTgJgRDXiRZN56cHT9pd24dmXs8w== + /d3-ease/2.0.0: + resolution: {integrity: sha512-68/n9JWarxXkOWMshcT5IcjbB+agblQUaIsbnXmrzejn2O82n3p2A9R2zEB9HIEFWKFwPAEDDN8gR0VdSAyyAQ==} dev: false - resolution: - integrity: sha512-68/n9JWarxXkOWMshcT5IcjbB+agblQUaIsbnXmrzejn2O82n3p2A9R2zEB9HIEFWKFwPAEDDN8gR0VdSAyyAQ== + /d3-interpolate/2.0.1: + resolution: {integrity: sha512-c5UhwwTs/yybcmTpAVqwSFl6vrQ8JZJoT5F7xNFK9pymv5C0Ymcc9/LIJHtYIggg/yS9YHw8i8O8tgb9pupjeQ==} dependencies: d3-color: 2.0.0 dev: false - resolution: - integrity: sha512-c5UhwwTs/yybcmTpAVqwSFl6vrQ8JZJoT5F7xNFK9pymv5C0Ymcc9/LIJHtYIggg/yS9YHw8i8O8tgb9pupjeQ== + /d3-selection/2.0.0: + resolution: {integrity: sha512-XoGGqhLUN/W14NmaqcO/bb1nqjDAw5WtSYb2X8wiuQWvSZUsUVYsOSkOybUrNvcBjaywBdYPy03eXHMXjk9nZA==} dev: false - resolution: - integrity: sha512-XoGGqhLUN/W14NmaqcO/bb1nqjDAw5WtSYb2X8wiuQWvSZUsUVYsOSkOybUrNvcBjaywBdYPy03eXHMXjk9nZA== + /d3-timer/2.0.0: + resolution: {integrity: sha512-TO4VLh0/420Y/9dO3+f9abDEFYeCUr2WZRlxJvbp4HPTQcSylXNiL6yZa9FIUvV1yRiFufl1bszTCLDqv9PWNA==} dev: false - resolution: - integrity: sha512-TO4VLh0/420Y/9dO3+f9abDEFYeCUr2WZRlxJvbp4HPTQcSylXNiL6yZa9FIUvV1yRiFufl1bszTCLDqv9PWNA== + /d3-transition/2.0.0_d3-selection@2.0.0: + resolution: {integrity: sha512-42ltAGgJesfQE3u9LuuBHNbGrI/AJjNL2OAUdclE70UE6Vy239GCBEYD38uBPoLeNsOhFStGpPI0BAOV+HMxog==} + peerDependencies: + d3-selection: '2' dependencies: d3-color: 2.0.0 d3-dispatch: 2.0.0 @@ -5064,11 +5836,9 @@ packages: d3-selection: 2.0.0 d3-timer: 2.0.0 dev: false - peerDependencies: - d3-selection: '2' - resolution: - integrity: sha512-42ltAGgJesfQE3u9LuuBHNbGrI/AJjNL2OAUdclE70UE6Vy239GCBEYD38uBPoLeNsOhFStGpPI0BAOV+HMxog== + /d3-zoom/2.0.0: + resolution: {integrity: sha512-fFg7aoaEm9/jf+qfstak0IYpnesZLiMX6GZvXtUSdv8RH2o4E2qeelgdU09eKS6wGuiGMfcnMI0nTIqWzRHGpw==} dependencies: d3-dispatch: 2.0.0 d3-drag: 2.0.0 @@ -5076,112 +5846,119 @@ packages: d3-selection: 2.0.0 d3-transition: 2.0.0_d3-selection@2.0.0 dev: false - resolution: - integrity: sha512-fFg7aoaEm9/jf+qfstak0IYpnesZLiMX6GZvXtUSdv8RH2o4E2qeelgdU09eKS6wGuiGMfcnMI0nTIqWzRHGpw== + /damerau-levenshtein/1.0.6: + resolution: {integrity: sha512-JVrozIeElnj3QzfUIt8tB8YMluBJom4Vw9qTPpjGYQ9fYlB3D/rb6OordUxf3xeFB35LKWs0xqcO5U6ySvBtug==} dev: true - resolution: - integrity: sha512-JVrozIeElnj3QzfUIt8tB8YMluBJom4Vw9qTPpjGYQ9fYlB3D/rb6OordUxf3xeFB35LKWs0xqcO5U6ySvBtug== + /dashdash/1.14.1: + resolution: {integrity: sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=} + engines: {node: '>=0.10'} dependencies: assert-plus: 1.0.0 dev: true - engines: - node: '>=0.10' - resolution: - integrity: sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= + /data-urls/2.0.0: + resolution: {integrity: sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ==} + engines: {node: '>=10'} dependencies: abab: 2.0.5 whatwg-mimetype: 2.3.0 whatwg-url: 8.5.0 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ== - /date-fns/2.19.0: + + /dayjs/1.10.8: + resolution: {integrity: sha512-wbNwDfBHHur9UOzNUjeKUOJ0fCb0a52Wx0xInmQ7Y8FstyajiV1NmK1e00cxsr9YrE9r7yAChE0VvpuY5Rnlow==} dev: false - engines: - node: '>=0.11' - resolution: - integrity: sha512-X3bf2iTPgCAQp9wvjOQytnf5vO5rESYRXlPIVcgSbtT5OTScPcsf9eZU+B/YIkKAtYr5WeCii58BgATrNitlWg== - /dayjs/1.10.4: + + /dayjs/1.11.7: + resolution: {integrity: sha512-+Yw9U6YO5TQohxLcIkrXBeY73WP3ejHWVvx8XCk3gxvQDCTEmS48ZrSZCKciI7Bhl/uCMyxYtE9UqRILmFphkQ==} dev: false - resolution: - integrity: sha512-RI/Hh4kqRc1UKLOAf/T5zdMMX5DQIlDxwUe3wSyMMnEbGunnpENCdbUgM+dW7kXidZqCttBrmw7BhN4TMddkCw== + + /debounce-promise/3.1.2: + resolution: {integrity: sha512-rZHcgBkbYavBeD9ej6sP56XfG53d51CD4dnaw989YX/nZ/ZJfgRx/9ePKmTNiUiyQvh4mtrMoS3OAWW+yoYtpg==} + dev: false + /debug/2.6.9: + resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==} dependencies: ms: 2.0.0 - resolution: - integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + /debug/3.2.7: + resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==} dependencies: ms: 2.1.3 dev: true - resolution: - integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + /debug/4.3.1: + resolution: {integrity: sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true dependencies: ms: 2.1.2 dev: true - engines: - node: '>=6.0' + + /debug/4.3.1_supports-color@5.5.0: + resolution: {integrity: sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==} + engines: {node: '>=6.0'} peerDependencies: supports-color: '*' peerDependenciesMeta: supports-color: optional: true - resolution: - integrity: sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== - /debug/4.3.1_supports-color@5.5.0: dependencies: ms: 2.1.2 supports-color: 5.5.0 dev: false - engines: - node: '>=6.0' + + /debug/4.3.1_supports-color@6.1.0: + resolution: {integrity: sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==} + engines: {node: '>=6.0'} peerDependencies: supports-color: '*' peerDependenciesMeta: supports-color: optional: true - resolution: - integrity: sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== - /debug/4.3.1_supports-color@6.1.0: dependencies: ms: 2.1.2 supports-color: 6.1.0 dev: true - engines: - node: '>=6.0' + + /debug/4.3.4: + resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} + engines: {node: '>=6.0'} peerDependencies: supports-color: '*' peerDependenciesMeta: supports-color: optional: true - resolution: - integrity: sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== + dependencies: + ms: 2.1.2 + dev: true + /decamelize/1.2.0: + resolution: {integrity: sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= + /decimal.js/10.2.1: + resolution: {integrity: sha512-KaL7+6Fw6i5A2XSnsbhm/6B+NuEA7TZ4vqxnd5tXz9sbKtrN9Srj8ab4vKVdK8YAqZO9P1kg45Y6YLoduPf+kw==} dev: true - resolution: - integrity: sha512-KaL7+6Fw6i5A2XSnsbhm/6B+NuEA7TZ4vqxnd5tXz9sbKtrN9Srj8ab4vKVdK8YAqZO9P1kg45Y6YLoduPf+kw== + /decode-uri-component/0.2.0: - engines: - node: '>=0.10' - resolution: - integrity: sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= + resolution: {integrity: sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU=} + engines: {node: '>=0.10'} + /dedent/0.7.0: + resolution: {integrity: sha1-JJXduvbrh0q7Dhvp3yLS5aVEMmw=} dev: true - resolution: - integrity: sha1-JJXduvbrh0q7Dhvp3yLS5aVEMmw= + /deep-equal/1.1.1: + resolution: {integrity: sha512-yd9c5AdiqVcR+JjcwUQb9DkhJc8ngNr0MahEBGvDiJw8puWab2yZlh+nkasOnZP+EGTAP6rRp2JzJhJZzvNF8g==} dependencies: is-arguments: 1.1.0 is-date-object: 1.0.2 @@ -5190,58 +5967,58 @@ packages: object-keys: 1.1.1 regexp.prototype.flags: 1.3.1 dev: true - resolution: - integrity: sha512-yd9c5AdiqVcR+JjcwUQb9DkhJc8ngNr0MahEBGvDiJw8puWab2yZlh+nkasOnZP+EGTAP6rRp2JzJhJZzvNF8g== + /deep-is/0.1.3: + resolution: {integrity: sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=} + dev: true + + /deepmerge/1.5.2: + resolution: {integrity: sha512-95k0GDqvBjZavkuvzx/YqVLv/6YYa17fz6ILMSf7neqQITCPbnfEnQvEgMPNjH4kgobe7+WIL0yJEHku+H3qtQ==} + engines: {node: '>=0.10.0'} dev: true - resolution: - integrity: sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= + /deepmerge/4.2.2: + resolution: {integrity: sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== + /default-gateway/4.2.0: + resolution: {integrity: sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA==} + engines: {node: '>=6'} dependencies: execa: 1.0.0 ip-regex: 2.1.0 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA== + /define-properties/1.1.3: + resolution: {integrity: sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==} + engines: {node: '>= 0.4'} dependencies: object-keys: 1.1.1 dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== + /define-property/0.2.5: + resolution: {integrity: sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=} + engines: {node: '>=0.10.0'} dependencies: is-descriptor: 0.1.6 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= + /define-property/1.0.0: + resolution: {integrity: sha1-dp66rz9KY6rTr56NMEybvnm/sOY=} + engines: {node: '>=0.10.0'} dependencies: is-descriptor: 1.0.2 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-dp66rz9KY6rTr56NMEybvnm/sOY= + /define-property/2.0.2: + resolution: {integrity: sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ==} + engines: {node: '>=0.10.0'} dependencies: is-descriptor: 1.0.2 isobject: 3.0.1 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== + /del/4.1.1: + resolution: {integrity: sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ==} + engines: {node: '>=6'} dependencies: '@types/glob': 7.1.3 globby: 6.1.0 @@ -5251,232 +6028,264 @@ packages: pify: 4.0.1 rimraf: 2.7.1 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ== + /delayed-stream/1.0.0: + resolution: {integrity: sha1-3zrhmayt+31ECqrgsp4icrJOxhk=} + engines: {node: '>=0.4.0'} dev: true - engines: - node: '>=0.4.0' - resolution: - integrity: sha1-3zrhmayt+31ECqrgsp4icrJOxhk= + /depd/1.1.2: + resolution: {integrity: sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=} + engines: {node: '>= 0.6'} dev: true - engines: - node: '>= 0.6' - resolution: - integrity: sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= + /des.js/1.0.1: + resolution: {integrity: sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA==} dependencies: inherits: 2.0.4 minimalistic-assert: 1.0.1 dev: true - resolution: - integrity: sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA== + /destroy/1.0.4: + resolution: {integrity: sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=} dev: true - resolution: - integrity: sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= + /detect-newline/3.1.0: + resolution: {integrity: sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==} + engines: {node: '>=8'} dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== + + /detect-node-es/1.1.0: + resolution: {integrity: sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==} + dev: false + /detect-node/2.0.5: - resolution: - integrity: sha512-qi86tE6hRcFHy8jI1m2VG+LaPUR1LhqDa5G8tVjuUXmOrpuAgqsA1pN0+ldgr3aKUH+QLI9hCY/OcRYisERejw== + resolution: {integrity: sha512-qi86tE6hRcFHy8jI1m2VG+LaPUR1LhqDa5G8tVjuUXmOrpuAgqsA1pN0+ldgr3aKUH+QLI9hCY/OcRYisERejw==} + /detect-port-alt/1.1.6: + resolution: {integrity: sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q==} + engines: {node: '>= 4.2.1'} + hasBin: true dependencies: address: 1.1.2 debug: 2.6.9 dev: false - engines: - node: '>= 4.2.1' - hasBin: true - resolution: - integrity: sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q== + /diff-sequences/26.6.2: + resolution: {integrity: sha512-Mv/TDa3nZ9sbc5soK+OoA74BsS3mL37yixCvUAQkiuA4Wz6YtwP/K47n2rv2ovzHZvoiQeA5FTQOschKkEwB0Q==} + engines: {node: '>= 10.14.2'} dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-Mv/TDa3nZ9sbc5soK+OoA74BsS3mL37yixCvUAQkiuA4Wz6YtwP/K47n2rv2ovzHZvoiQeA5FTQOschKkEwB0Q== + /diffie-hellman/5.0.3: + resolution: {integrity: sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg==} dependencies: bn.js: 4.12.0 miller-rabin: 4.0.1 randombytes: 2.1.0 dev: true - resolution: - integrity: sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg== + /dir-glob/3.0.1: + resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} + engines: {node: '>=8'} dependencies: path-type: 4.0.0 - engines: - node: '>=8' - resolution: - integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + /dns-equal/1.0.0: + resolution: {integrity: sha1-s55/HabrCnW6nBcySzR1PEfgZU0=} dev: true - resolution: - integrity: sha1-s55/HabrCnW6nBcySzR1PEfgZU0= + /dns-packet/1.3.1: + resolution: {integrity: sha512-0UxfQkMhYAUaZI+xrNZOz/as5KgDU0M/fQ9b6SpkyLbk3GEswDi6PADJVaYJradtRVsRIlF1zLyOodbcTCDzUg==} dependencies: ip: 1.1.5 safe-buffer: 5.2.1 dev: true - resolution: - integrity: sha512-0UxfQkMhYAUaZI+xrNZOz/as5KgDU0M/fQ9b6SpkyLbk3GEswDi6PADJVaYJradtRVsRIlF1zLyOodbcTCDzUg== + /dns-txt/2.0.2: + resolution: {integrity: sha1-uR2Ab10nGI5Ks+fRB9iBocxGQrY=} dependencies: buffer-indexof: 1.1.1 dev: true - resolution: - integrity: sha1-uR2Ab10nGI5Ks+fRB9iBocxGQrY= + /doctrine/1.5.0: + resolution: {integrity: sha1-N53Ocw9hZvds76TmcHoVmwLFpvo=} + engines: {node: '>=0.10.0'} dependencies: esutils: 2.0.3 isarray: 1.0.0 dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-N53Ocw9hZvds76TmcHoVmwLFpvo= + /doctrine/2.1.0: + resolution: {integrity: sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==} + engines: {node: '>=0.10.0'} dependencies: esutils: 2.0.3 dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== + /doctrine/3.0.0: + resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==} + engines: {node: '>=6.0.0'} dependencies: esutils: 2.0.3 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== + /dom-accessibility-api/0.5.4: + resolution: {integrity: sha512-TvrjBckDy2c6v6RLxPv5QXOnU+SmF9nBII5621Ve5fu6Z/BDrENurBEvlC1f44lKEUVqOpK4w9E5Idc5/EgkLQ==} dev: true - resolution: - integrity: sha512-TvrjBckDy2c6v6RLxPv5QXOnU+SmF9nBII5621Ve5fu6Z/BDrENurBEvlC1f44lKEUVqOpK4w9E5Idc5/EgkLQ== + /dom-align/1.12.0: - dev: false - resolution: - integrity: sha512-YkoezQuhp3SLFGdOlr5xkqZ640iXrnHAwVYcDg8ZKRUtO7mSzSC2BA5V0VuyAwPSJA4CLIc6EDDJh4bEsD2+zA== + resolution: {integrity: sha512-YkoezQuhp3SLFGdOlr5xkqZ640iXrnHAwVYcDg8ZKRUtO7mSzSC2BA5V0VuyAwPSJA4CLIc6EDDJh4bEsD2+zA==} + /dom-converter/0.2.0: + resolution: {integrity: sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==} dependencies: utila: 0.4.0 dev: true - resolution: - integrity: sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== + + /dom-helpers/5.2.1: + resolution: {integrity: sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==} + dependencies: + '@babel/runtime': 7.20.13 + csstype: 3.1.1 + dev: false + /dom-serializer/0.2.2: + resolution: {integrity: sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g==} dependencies: domelementtype: 2.1.0 entities: 2.2.0 dev: true - resolution: - integrity: sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== + + /dom-walk/0.1.2: + resolution: {integrity: sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w==} + dev: true + /domain-browser/1.2.0: + resolution: {integrity: sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA==} + engines: {node: '>=0.4', npm: '>=1.2'} dev: true - engines: - node: '>=0.4' - npm: '>=1.2' - resolution: - integrity: sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA== + /domelementtype/1.3.1: + resolution: {integrity: sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w==} dev: true - resolution: - integrity: sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== + /domelementtype/2.1.0: + resolution: {integrity: sha512-LsTgx/L5VpD+Q8lmsXSHW2WpA+eBlZ9HPf3erD1IoPF00/3JKHZ3BknUVA2QGDNu69ZNmyFmCWBSO45XjYKC5w==} dev: true - resolution: - integrity: sha512-LsTgx/L5VpD+Q8lmsXSHW2WpA+eBlZ9HPf3erD1IoPF00/3JKHZ3BknUVA2QGDNu69ZNmyFmCWBSO45XjYKC5w== + /domexception/2.0.1: + resolution: {integrity: sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg==} + engines: {node: '>=8'} dependencies: webidl-conversions: 5.0.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg== + /domhandler/2.4.2: + resolution: {integrity: sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA==} dependencies: domelementtype: 1.3.1 dev: true - resolution: - integrity: sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA== + /domutils/1.7.0: + resolution: {integrity: sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg==} dependencies: dom-serializer: 0.2.2 domelementtype: 1.3.1 dev: true - resolution: - integrity: sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== + /dot-case/3.0.4: + resolution: {integrity: sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==} dependencies: no-case: 3.0.4 - tslib: 2.1.0 - dev: true - resolution: - integrity: sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w== + tslib: 2.4.1 + /dot-prop/5.3.0: + resolution: {integrity: sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==} + engines: {node: '>=8'} dependencies: is-obj: 2.0.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q== + /dotenv-expand/5.1.0: + resolution: {integrity: sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA==} dev: true - resolution: - integrity: sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA== + /dotenv/8.2.0: + resolution: {integrity: sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw==} + engines: {node: '>=8'} + dev: true + + /dumi-assets-types/1.0.0: + resolution: {integrity: sha512-7nhSeWM15vybbUAMPLZsdls2jKoHB2UU4P1RM6kLPucuS8eC/HSmufquFqTTYtX4oIDLHGtil/dVtMreNGwhdA==} + dev: true + + /dumi-theme-default/1.1.13_ac48d56268a7095d7c6000b1357273b0: + resolution: {integrity: sha512-vTjjzcfVko4EslgiEcEAECi/MLas35/oD/Sfs7Eehn10SbzX4DhtEQwwMZVzoc+nuStPmKXsczYAtiWndX6Aig==} + peerDependencies: + '@umijs/preset-dumi': 1.x + react: ^16.13.1 || ^17.0.0 + dependencies: + '@umijs/preset-dumi': 1.1.30_b08c95616290592113c9128c4b0c3f8f + prism-react-renderer: 1.2.1_react@17.0.2 + prismjs: 1.25.0 + rc-tabs: 11.7.3_react-dom@17.0.2+react@17.0.2 + react: 17.0.2 + transitivePeerDependencies: + - react-dom dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw== + + /dumi/1.1.30_ab15ddca82409ccfb2f88ffa3dfddc1b: + resolution: {integrity: sha512-qJz4S/K/Ghkmwt8fejFeRYOhRy/98vOL555XuxbV1X03KFfWNWYJR9s2/CNt9vWvW2iunAnZOqp2QCRjgpPRQg==} + hasBin: true + dependencies: + '@umijs/preset-dumi': 1.1.30_b08c95616290592113c9128c4b0c3f8f + umi: 3.5.20_react-router@5.2.0 + transitivePeerDependencies: + - bufferutil + - canvas + - react + - react-dom + - react-router + - supports-color + - typescript + - utf-8-validate + dev: true + /duplexer/0.1.2: + resolution: {integrity: sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==} dev: false - resolution: - integrity: sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== + /duplexify/3.7.1: + resolution: {integrity: sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g==} dependencies: end-of-stream: 1.4.4 inherits: 2.0.4 readable-stream: 2.3.7 stream-shift: 1.0.1 dev: true - resolution: - integrity: sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g== + /ecc-jsbn/0.1.2: + resolution: {integrity: sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=} dependencies: jsbn: 0.1.1 safer-buffer: 2.1.2 dev: true - resolution: - integrity: sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= + /ee-first/1.1.1: + resolution: {integrity: sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=} dev: true - resolution: - integrity: sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= + /ejs/2.7.4: - dev: true - engines: - node: '>=0.10.0' + resolution: {integrity: sha512-7vmuyh5+kuUyJKePhQfRQBhXV5Ce+RnaeeQArKu1EAMpL3WbgMt5WG6uQZpEVvYSSsxMXRKOewtDk9RaTKXRlA==} + engines: {node: '>=0.10.0'} requiresBuild: true - resolution: - integrity: sha512-7vmuyh5+kuUyJKePhQfRQBhXV5Ce+RnaeeQArKu1EAMpL3WbgMt5WG6uQZpEVvYSSsxMXRKOewtDk9RaTKXRlA== + dev: true + /electron-to-chromium/1.3.701: - resolution: - integrity: sha512-Zd9ofdIMYHYhG1gvnejQDvC/kqSeXQvtXF0yRURGxgwGqDZm9F9Fm3dYFnm5gyuA7xpXfBlzVLN1sz0FjxpKfw== + resolution: {integrity: sha512-Zd9ofdIMYHYhG1gvnejQDvC/kqSeXQvtXF0yRURGxgwGqDZm9F9Fm3dYFnm5gyuA7xpXfBlzVLN1sz0FjxpKfw==} + /elliptic/6.5.4: + resolution: {integrity: sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==} dependencies: bn.js: 4.12.0 brorand: 1.1.0 @@ -5486,110 +6295,103 @@ packages: minimalistic-assert: 1.0.1 minimalistic-crypto-utils: 1.0.1 dev: true - resolution: - integrity: sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ== + /emittery/0.7.2: + resolution: {integrity: sha512-A8OG5SR/ij3SsJdWDJdkkSYUjQdCUx6APQXem0SaEePBSRg4eymGYwBkKo1Y6DU+af/Jn2dBQqDBvjnr9Vi8nQ==} + engines: {node: '>=10'} dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-A8OG5SR/ij3SsJdWDJdkkSYUjQdCUx6APQXem0SaEePBSRg4eymGYwBkKo1Y6DU+af/Jn2dBQqDBvjnr9Vi8nQ== + /emoji-regex/7.0.3: + resolution: {integrity: sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==} dev: true - resolution: - integrity: sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== + /emoji-regex/8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} dev: true - resolution: - integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + /emoji-regex/9.2.2: + resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} dev: true - resolution: - integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== + /emojis-list/2.1.0: + resolution: {integrity: sha1-TapNnbAPmBmIDHn6RXrlsJof04k=} + engines: {node: '>= 0.10'} dev: true - engines: - node: '>= 0.10' - resolution: - integrity: sha1-TapNnbAPmBmIDHn6RXrlsJof04k= + /emojis-list/3.0.0: - engines: - node: '>= 4' - resolution: - integrity: sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== + resolution: {integrity: sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==} + engines: {node: '>= 4'} + /encodeurl/1.0.2: + resolution: {integrity: sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=} + engines: {node: '>= 0.8'} dev: true - engines: - node: '>= 0.8' - resolution: - integrity: sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= + /end-of-stream/1.4.4: + resolution: {integrity: sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==} dependencies: once: 1.4.0 dev: true - resolution: - integrity: sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== + /enhanced-resolve/4.5.0: + resolution: {integrity: sha512-Nv9m36S/vxpsI+Hc4/ZGRs0n9mXqSWGGq49zxb/cJfPAQMbUtttJAlNPS4AQzaBdw/pKskw5bMbekT/Y7W/Wlg==} + engines: {node: '>=6.9.0'} dependencies: graceful-fs: 4.2.6 memory-fs: 0.5.0 tapable: 1.1.3 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-Nv9m36S/vxpsI+Hc4/ZGRs0n9mXqSWGGq49zxb/cJfPAQMbUtttJAlNPS4AQzaBdw/pKskw5bMbekT/Y7W/Wlg== + /enhanced-resolve/5.7.0: + resolution: {integrity: sha512-6njwt/NsZFUKhM6j9U8hzVyD4E4r0x7NQzhTCbcWOJ0IQjNSAoalWmb0AE51Wn+fwan5qVESWi7t2ToBxs9vrw==} + engines: {node: '>=10.13.0'} dependencies: graceful-fs: 4.2.6 tapable: 2.2.0 dev: true - engines: - node: '>=10.13.0' - resolution: - integrity: sha512-6njwt/NsZFUKhM6j9U8hzVyD4E4r0x7NQzhTCbcWOJ0IQjNSAoalWmb0AE51Wn+fwan5qVESWi7t2ToBxs9vrw== + /enquirer/2.3.6: + resolution: {integrity: sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==} + engines: {node: '>=8.6'} dependencies: ansi-colors: 4.1.1 dev: true - engines: - node: '>=8.6' - resolution: - integrity: sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg== + /entities/1.1.2: + resolution: {integrity: sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w==} dev: true - resolution: - integrity: sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w== + /entities/2.2.0: + resolution: {integrity: sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==} dev: true - resolution: - integrity: sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== + /envinfo/7.7.4: - dev: true - engines: - node: '>=4' + resolution: {integrity: sha512-TQXTYFVVwwluWSFis6K2XKxgrD22jEv0FTuLCQI+OjH7rn93+iY0fSSFM5lrSxFY+H1+B0/cvvlamr3UsBivdQ==} + engines: {node: '>=4'} hasBin: true - resolution: - integrity: sha512-TQXTYFVVwwluWSFis6K2XKxgrD22jEv0FTuLCQI+OjH7rn93+iY0fSSFM5lrSxFY+H1+B0/cvvlamr3UsBivdQ== + dev: true + /errno/0.1.8: + resolution: {integrity: sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==} + hasBin: true dependencies: prr: 1.0.1 dev: true - hasBin: true - resolution: - integrity: sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A== + /error-ex/1.3.2: + resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} dependencies: is-arrayish: 0.2.1 dev: true - resolution: - integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== + /error-stack-parser/2.0.6: + resolution: {integrity: sha512-d51brTeqC+BHlwF0BhPtcYgF5nlzf9ZZ0ZIUQNZpc9ZB9qw5IJ2diTrBY9jlCJkTLITYPjmiX6OWCwH+fuyNgQ==} dependencies: stackframe: 1.2.0 - resolution: - integrity: sha512-d51brTeqC+BHlwF0BhPtcYgF5nlzf9ZZ0ZIUQNZpc9ZB9qw5IJ2diTrBY9jlCJkTLITYPjmiX6OWCwH+fuyNgQ== + /es-abstract/1.18.0: + resolution: {integrity: sha512-LJzK7MrQa8TS0ja2w3YNLzUgJCGPdPOV1yVvezjNnS89D+VR08+Szt2mz3YB2Dck/+w5tfIq/RoUAFqJJGM2yw==} + engines: {node: '>= 0.4'} dependencies: call-bind: 1.0.2 es-to-primitive: 1.2.1 @@ -5608,103 +6410,100 @@ packages: string.prototype.trimstart: 1.0.4 unbox-primitive: 1.0.1 dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-LJzK7MrQa8TS0ja2w3YNLzUgJCGPdPOV1yVvezjNnS89D+VR08+Szt2mz3YB2Dck/+w5tfIq/RoUAFqJJGM2yw== + + /es-module-lexer/0.7.1: + resolution: {integrity: sha512-MgtWFl5No+4S3TmhDmCz2ObFGm6lEpTnzbQi+Dd+pw4mlTIZTmM2iAs5gRlmx5zS9luzobCSBSI90JM/1/JgOw==} + dev: true + /es-to-primitive/1.2.1: + resolution: {integrity: sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==} + engines: {node: '>= 0.4'} dependencies: is-callable: 1.2.3 is-date-object: 1.0.2 is-symbol: 1.0.3 dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + /es5-ext/0.10.53: + resolution: {integrity: sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q==} dependencies: es6-iterator: 2.0.3 es6-symbol: 3.1.3 next-tick: 1.0.0 dev: true - resolution: - integrity: sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q== + + /es5-imcompatible-versions/0.1.73: + resolution: {integrity: sha512-P0SgLrYl9iqlrt0h6n/iz5z5P1uuhnfHp9BA/tcLfqgVIWHNvY4Rm+jtSvnh1ADK4DJOYDwJvxlrHMRoLQMgmQ==} + dev: true + /es6-iterator/2.0.3: + resolution: {integrity: sha1-p96IkUGgWpSwhUQDstCg+/qY87c=} dependencies: d: 1.0.1 es5-ext: 0.10.53 es6-symbol: 3.1.3 dev: true - resolution: - integrity: sha1-p96IkUGgWpSwhUQDstCg+/qY87c= + /es6-symbol/3.1.3: + resolution: {integrity: sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA==} dependencies: d: 1.0.1 ext: 1.4.0 dev: true - resolution: - integrity: sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== + + /esbuild/0.12.15: + resolution: {integrity: sha512-72V4JNd2+48eOVCXx49xoSWHgC3/cCy96e7mbXKY+WOWghN00cCmlGnwVLRhRHorvv0dgCyuMYBZlM2xDM5OQw==} + hasBin: true + requiresBuild: true + dev: true + /escalade/3.1.1: - engines: - node: '>=6' - resolution: - integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== + resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==} + engines: {node: '>=6'} + /escape-html/1.0.3: + resolution: {integrity: sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=} dev: true - resolution: - integrity: sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= + /escape-string-regexp/1.0.5: - engines: - node: '>=0.8.0' - resolution: - integrity: sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= + resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} + engines: {node: '>=0.8.0'} + /escape-string-regexp/2.0.0: - engines: - node: '>=8' - resolution: - integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== + resolution: {integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==} + engines: {node: '>=8'} + + /escape-string-regexp/4.0.0: + resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} + engines: {node: '>=10'} + dev: true + /escodegen/2.0.0: + resolution: {integrity: sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw==} + engines: {node: '>=6.0'} + hasBin: true dependencies: esprima: 4.0.1 estraverse: 5.2.0 esutils: 2.0.3 optionator: 0.8.3 - dev: true - engines: - node: '>=6.0' - hasBin: true optionalDependencies: source-map: 0.6.1 - resolution: - integrity: sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw== - /eslint-config-prettier/6.15.0_eslint@7.23.0: - dependencies: - eslint: 7.23.0 - get-stdin: 6.0.0 dev: true + + /eslint-config-prettier/6.15.0_eslint@7.23.0: + resolution: {integrity: sha512-a1+kOYLR8wMGustcgAjdydMsQ2A/2ipRPwRKUmfYaSxc9ZPcrku080Ctl6zrZzZNs/U82MjSv+qKREkoq3bJaw==} hasBin: true peerDependencies: eslint: '>=3.14.1' - resolution: - integrity: sha512-a1+kOYLR8wMGustcgAjdydMsQ2A/2ipRPwRKUmfYaSxc9ZPcrku080Ctl6zrZzZNs/U82MjSv+qKREkoq3bJaw== - /eslint-config-react-app/6.0.0_2fb64cc94b95fae32741d239fe65ddca: dependencies: - '@typescript-eslint/eslint-plugin': 4.19.0_821acdc8bc493ad1aa2628c9b724d688 - '@typescript-eslint/parser': 4.19.0_eslint@7.23.0+typescript@4.2.3 - babel-eslint: 10.1.0_eslint@7.23.0 - confusing-browser-globals: 1.0.10 eslint: 7.23.0 - eslint-plugin-flowtype: 5.4.0_eslint@7.23.0 - eslint-plugin-import: 2.22.1_eslint@7.23.0 - eslint-plugin-jest: 24.3.2_c42078cdfffa5b71bbb788f736a64691 - eslint-plugin-jsx-a11y: 6.4.1_eslint@7.23.0 - eslint-plugin-react: 7.23.1_eslint@7.23.0 - eslint-plugin-react-hooks: 4.2.0_eslint@7.23.0 - eslint-plugin-testing-library: 3.10.2_eslint@7.23.0+typescript@4.2.3 + get-stdin: 6.0.0 dev: true - engines: - node: ^10.12.0 || >=12.0.0 + + /eslint-config-react-app/6.0.0_2fb64cc94b95fae32741d239fe65ddca: + resolution: {integrity: sha512-bpoAAC+YRfzq0dsTk+6v9aHm/uqnDwayNAXleMypGl6CpxI9oXXscVHo4fk3eJPIn+rsbtNetB4r/ZIidFIE8A==} + engines: {node: ^10.12.0 || >=12.0.0} peerDependencies: '@typescript-eslint/eslint-plugin': ^4.0.0 '@typescript-eslint/parser': ^4.0.0 @@ -5722,37 +6521,52 @@ packages: optional: true eslint-plugin-testing-library: optional: true - resolution: - integrity: sha512-bpoAAC+YRfzq0dsTk+6v9aHm/uqnDwayNAXleMypGl6CpxI9oXXscVHo4fk3eJPIn+rsbtNetB4r/ZIidFIE8A== + dependencies: + '@typescript-eslint/eslint-plugin': 4.19.0_821acdc8bc493ad1aa2628c9b724d688 + '@typescript-eslint/parser': 4.19.0_eslint@7.23.0+typescript@4.2.3 + babel-eslint: 10.1.0_eslint@7.23.0 + confusing-browser-globals: 1.0.10 + eslint: 7.23.0 + eslint-plugin-flowtype: 5.4.0_eslint@7.23.0 + eslint-plugin-import: 2.22.1_eslint@7.23.0 + eslint-plugin-jest: 24.3.2_c42078cdfffa5b71bbb788f736a64691 + eslint-plugin-jsx-a11y: 6.4.1_eslint@7.23.0 + eslint-plugin-react: 7.23.1_eslint@7.23.0 + eslint-plugin-react-hooks: 4.2.0_eslint@7.23.0 + eslint-plugin-testing-library: 3.10.2_eslint@7.23.0+typescript@4.2.3 + dev: true + /eslint-import-resolver-node/0.3.4: + resolution: {integrity: sha512-ogtf+5AB/O+nM6DIeBUNr2fuT7ot9Qg/1harBfBtaP13ekEWFQEEMP94BCB7zaNW3gyY+8SHYF00rnqYwXKWOA==} dependencies: debug: 2.6.9 resolve: 1.18.1 dev: true - resolution: - integrity: sha512-ogtf+5AB/O+nM6DIeBUNr2fuT7ot9Qg/1harBfBtaP13ekEWFQEEMP94BCB7zaNW3gyY+8SHYF00rnqYwXKWOA== + /eslint-module-utils/2.6.0: + resolution: {integrity: sha512-6j9xxegbqe8/kZY8cYpcp0xhbK0EgJlg3g9mib3/miLaExuuwc3n5UEfSnU6hWMbT0FAYVvDbL9RrRgpUeQIvA==} + engines: {node: '>=4'} dependencies: debug: 2.6.9 pkg-dir: 2.0.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-6j9xxegbqe8/kZY8cYpcp0xhbK0EgJlg3g9mib3/miLaExuuwc3n5UEfSnU6hWMbT0FAYVvDbL9RrRgpUeQIvA== + /eslint-plugin-flowtype/5.4.0_eslint@7.23.0: + resolution: {integrity: sha512-O0s0iTT5UxYuoOpHMLSIO2qZMyvrb9shhk1EM5INNGtJ2CffrfUmsnh6TVsnoT41fkXIEndP630WNovhoO87xQ==} + engines: {node: ^10.12.0 || >=12.0.0} + peerDependencies: + eslint: ^7.1.0 dependencies: eslint: 7.23.0 lodash: 4.17.21 string-natural-compare: 3.0.1 dev: true - engines: - node: ^10.12.0 || >=12.0.0 - peerDependencies: - eslint: ^7.1.0 - resolution: - integrity: sha512-O0s0iTT5UxYuoOpHMLSIO2qZMyvrb9shhk1EM5INNGtJ2CffrfUmsnh6TVsnoT41fkXIEndP630WNovhoO87xQ== + /eslint-plugin-import/2.22.1_eslint@7.23.0: + resolution: {integrity: sha512-8K7JjINHOpH64ozkAhpT3sd+FswIZTfMZTjdx052pnWrgRCVfp8op9tbjpAk3DdUeI/Ba4C8OjdC0r90erHEOw==} + engines: {node: '>=4'} + peerDependencies: + eslint: ^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 dependencies: array-includes: 3.1.3 array.prototype.flat: 1.2.4 @@ -5769,30 +6583,30 @@ packages: resolve: 1.18.1 tsconfig-paths: 3.9.0 dev: true - engines: - node: '>=4' - peerDependencies: - eslint: ^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 - resolution: - integrity: sha512-8K7JjINHOpH64ozkAhpT3sd+FswIZTfMZTjdx052pnWrgRCVfp8op9tbjpAk3DdUeI/Ba4C8OjdC0r90erHEOw== + /eslint-plugin-jest/24.3.2_c42078cdfffa5b71bbb788f736a64691: - dependencies: - '@typescript-eslint/eslint-plugin': 4.19.0_821acdc8bc493ad1aa2628c9b724d688 - '@typescript-eslint/experimental-utils': 4.19.0_eslint@7.23.0+typescript@4.2.3 - eslint: 7.23.0 - dev: true - engines: - node: '>=10' + resolution: {integrity: sha512-cicWDr+RvTAOKS3Q/k03+Z3odt3VCiWamNUHWd6QWbVQWcYJyYgUTu8x0mx9GfeDEimawU5kQC+nQ3MFxIM6bw==} + engines: {node: '>=10'} peerDependencies: '@typescript-eslint/eslint-plugin': '>= 4' eslint: '>=5' - typescript: '*' peerDependenciesMeta: '@typescript-eslint/eslint-plugin': optional: true - resolution: - integrity: sha512-cicWDr+RvTAOKS3Q/k03+Z3odt3VCiWamNUHWd6QWbVQWcYJyYgUTu8x0mx9GfeDEimawU5kQC+nQ3MFxIM6bw== + dependencies: + '@typescript-eslint/eslint-plugin': 4.19.0_821acdc8bc493ad1aa2628c9b724d688 + '@typescript-eslint/experimental-utils': 4.19.0_eslint@7.23.0+typescript@4.2.3 + eslint: 7.23.0 + transitivePeerDependencies: + - supports-color + - typescript + dev: true + /eslint-plugin-jsx-a11y/6.4.1_eslint@7.23.0: + resolution: {integrity: sha512-0rGPJBbwHoGNPU73/QCLP/vveMlM1b1Z9PponxO87jfr6tuH5ligXbDT6nHSSzBC8ovX2Z+BQu7Bk5D/Xgq9zg==} + engines: {node: '>=4.0'} + peerDependencies: + eslint: ^3 || ^4 || ^5 || ^6 || ^7 dependencies: '@babel/runtime': 7.13.10 aria-query: 4.2.2 @@ -5807,21 +6621,10 @@ packages: jsx-ast-utils: 3.2.0 language-tags: 1.0.5 dev: true - engines: - node: '>=4.0' - peerDependencies: - eslint: ^3 || ^4 || ^5 || ^6 || ^7 - resolution: - integrity: sha512-0rGPJBbwHoGNPU73/QCLP/vveMlM1b1Z9PponxO87jfr6tuH5ligXbDT6nHSSzBC8ovX2Z+BQu7Bk5D/Xgq9zg== + /eslint-plugin-prettier/3.3.1_9b658f06c6707d3d08f34c93bae76087: - dependencies: - eslint: 7.23.0 - eslint-config-prettier: 6.15.0_eslint@7.23.0 - prettier: 2.2.1 - prettier-linter-helpers: 1.0.0 - dev: true - engines: - node: '>=6.0.0' + resolution: {integrity: sha512-Rq3jkcFY8RYeQLgk2cCwuc0P7SEFwDravPhsJZOQ5N4YI4DSg50NyqJ/9gdZHzQlHf8MvafSesbNJCcP/FF6pQ==} + engines: {node: '>=6.0.0'} peerDependencies: eslint: '>=5.0.0' eslint-config-prettier: '*' @@ -5829,19 +6632,27 @@ packages: peerDependenciesMeta: eslint-config-prettier: optional: true - resolution: - integrity: sha512-Rq3jkcFY8RYeQLgk2cCwuc0P7SEFwDravPhsJZOQ5N4YI4DSg50NyqJ/9gdZHzQlHf8MvafSesbNJCcP/FF6pQ== - /eslint-plugin-react-hooks/4.2.0_eslint@7.23.0: dependencies: eslint: 7.23.0 + eslint-config-prettier: 6.15.0_eslint@7.23.0 + prettier: 2.2.1 + prettier-linter-helpers: 1.0.0 dev: true - engines: - node: '>=10' + + /eslint-plugin-react-hooks/4.2.0_eslint@7.23.0: + resolution: {integrity: sha512-623WEiZJqxR7VdxFCKLI6d6LLpwJkGPYKODnkH3D7WpOG5KM8yWueBd8TLsNAetEJNF5iJmolaAKO3F8yzyVBQ==} + engines: {node: '>=10'} peerDependencies: eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 - resolution: - integrity: sha512-623WEiZJqxR7VdxFCKLI6d6LLpwJkGPYKODnkH3D7WpOG5KM8yWueBd8TLsNAetEJNF5iJmolaAKO3F8yzyVBQ== + dependencies: + eslint: 7.23.0 + dev: true + /eslint-plugin-react/7.23.1_eslint@7.23.0: + resolution: {integrity: sha512-MvFGhZjI8Z4HusajmSw0ougGrq3Gs4vT/0WgwksZgf5RrLrRa2oYAw56okU4tZJl8+j7IYNuTM+2RnFEuTSdRQ==} + engines: {node: '>=4'} + peerDependencies: + eslint: ^3 || ^4 || ^5 || ^6 || ^7 dependencies: array-includes: 3.1.3 array.prototype.flatmap: 1.2.4 @@ -5857,64 +6668,59 @@ packages: resolve: 2.0.0-next.3 string.prototype.matchall: 4.0.4 dev: true - engines: - node: '>=4' - peerDependencies: - eslint: ^3 || ^4 || ^5 || ^6 || ^7 - resolution: - integrity: sha512-MvFGhZjI8Z4HusajmSw0ougGrq3Gs4vT/0WgwksZgf5RrLrRa2oYAw56okU4tZJl8+j7IYNuTM+2RnFEuTSdRQ== + /eslint-plugin-testing-library/3.10.2_eslint@7.23.0+typescript@4.2.3: + resolution: {integrity: sha512-WAmOCt7EbF1XM8XfbCKAEzAPnShkNSwcIsAD2jHdsMUT9mZJPjLCG7pMzbcC8kK366NOuGip8HKLDC+Xk4yIdA==} + engines: {node: ^10.12.0 || >=12.0.0, npm: '>=6'} + peerDependencies: + eslint: ^5 || ^6 || ^7 dependencies: '@typescript-eslint/experimental-utils': 3.10.1_eslint@7.23.0+typescript@4.2.3 eslint: 7.23.0 + transitivePeerDependencies: + - supports-color + - typescript dev: true - engines: - node: ^10.12.0 || >=12.0.0 - npm: '>=6' - peerDependencies: - eslint: ^5 || ^6 || ^7 - typescript: '*' - resolution: - integrity: sha512-WAmOCt7EbF1XM8XfbCKAEzAPnShkNSwcIsAD2jHdsMUT9mZJPjLCG7pMzbcC8kK366NOuGip8HKLDC+Xk4yIdA== + /eslint-scope/4.0.3: + resolution: {integrity: sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==} + engines: {node: '>=4.0.0'} dependencies: esrecurse: 4.3.0 estraverse: 4.3.0 dev: true - engines: - node: '>=4.0.0' - resolution: - integrity: sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg== + /eslint-scope/5.1.1: + resolution: {integrity: sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==} + engines: {node: '>=8.0.0'} dependencies: esrecurse: 4.3.0 estraverse: 4.3.0 dev: true - engines: - node: '>=8.0.0' - resolution: - integrity: sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== + /eslint-utils/2.1.0: + resolution: {integrity: sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==} + engines: {node: '>=6'} dependencies: eslint-visitor-keys: 1.3.0 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg== + /eslint-visitor-keys/1.3.0: + resolution: {integrity: sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ== + /eslint-visitor-keys/2.0.0: + resolution: {integrity: sha512-QudtT6av5WXels9WjIM7qz1XD1cWGvX4gGXvp/zBn9nXG02D0utdU3Em2m/QjTnrsk6bBjmCygl3rmj118msQQ==} + engines: {node: '>=10'} dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-QudtT6av5WXels9WjIM7qz1XD1cWGvX4gGXvp/zBn9nXG02D0utdU3Em2m/QjTnrsk6bBjmCygl3rmj118msQQ== + /eslint-webpack-plugin/2.5.3_eslint@7.23.0+webpack@4.44.2: + resolution: {integrity: sha512-LewNevZf9ghDCxCGT6QltNWVi8KIYWc4LKcin8K9Azh1hypG7YAmobUDIU67fAPa+eMjRnU4rjEkLbYI1w5/UA==} + engines: {node: '>= 10.13.0'} + peerDependencies: + eslint: ^7.0.0 + webpack: ^4.0.0 || ^5.0.0 dependencies: '@types/eslint': 7.2.7 arrify: 2.0.1 @@ -5924,14 +6730,11 @@ packages: schema-utils: 3.0.0 webpack: 4.44.2_webpack-cli@4.6.0 dev: true - engines: - node: '>= 10.13.0' - peerDependencies: - eslint: ^7.0.0 - webpack: ^4.0.0 || ^5.0.0 - resolution: - integrity: sha512-LewNevZf9ghDCxCGT6QltNWVi8KIYWc4LKcin8K9Azh1hypG7YAmobUDIU67fAPa+eMjRnU4rjEkLbYI1w5/UA== + /eslint/7.23.0: + resolution: {integrity: sha512-kqvNVbdkjzpFy0XOszNwjkKzZ+6TcwCQ/h+ozlcIWwaimBBuhlQ4nN6kbiM2L+OjDcznkTJxzYfRFH92sx4a0Q==} + engines: {node: ^10.12.0 || >=12.0.0} + hasBin: true dependencies: '@babel/code-frame': 7.12.11 '@eslint/eslintrc': 0.4.0 @@ -5970,107 +6773,102 @@ packages: table: 6.0.8 text-table: 0.2.0 v8-compile-cache: 2.3.0 + transitivePeerDependencies: + - supports-color dev: true - engines: - node: ^10.12.0 || >=12.0.0 - hasBin: true - resolution: - integrity: sha512-kqvNVbdkjzpFy0XOszNwjkKzZ+6TcwCQ/h+ozlcIWwaimBBuhlQ4nN6kbiM2L+OjDcznkTJxzYfRFH92sx4a0Q== + + /esm/3.2.25: + resolution: {integrity: sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==} + engines: {node: '>=6'} + dev: true + /espree/7.3.1: + resolution: {integrity: sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g==} + engines: {node: ^10.12.0 || >=12.0.0} dependencies: acorn: 7.4.1 acorn-jsx: 5.3.1_acorn@7.4.1 eslint-visitor-keys: 1.3.0 dev: true - engines: - node: ^10.12.0 || >=12.0.0 - resolution: - integrity: sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g== + /esprima/4.0.1: - dev: true - engines: - node: '>=4' + resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} + engines: {node: '>=4'} hasBin: true - resolution: - integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + dev: true + /esquery/1.4.0: + resolution: {integrity: sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==} + engines: {node: '>=0.10'} dependencies: estraverse: 5.2.0 dev: true - engines: - node: '>=0.10' - resolution: - integrity: sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== + /esrecurse/4.3.0: + resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} + engines: {node: '>=4.0'} dependencies: estraverse: 5.2.0 dev: true - engines: - node: '>=4.0' - resolution: - integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + /estraverse/4.3.0: + resolution: {integrity: sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==} + engines: {node: '>=4.0'} dev: true - engines: - node: '>=4.0' - resolution: - integrity: sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== + /estraverse/5.2.0: + resolution: {integrity: sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==} + engines: {node: '>=4.0'} dev: true - engines: - node: '>=4.0' - resolution: - integrity: sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ== + /estree-walker/0.6.1: + resolution: {integrity: sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==} dev: true - resolution: - integrity: sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w== + /estree-walker/1.0.1: + resolution: {integrity: sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg==} dev: true - resolution: - integrity: sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg== + /esutils/2.0.3: + resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + /etag/1.8.1: + resolution: {integrity: sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=} + engines: {node: '>= 0.6'} dev: true - engines: - node: '>= 0.6' - resolution: - integrity: sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= + /eventemitter3/4.0.7: + resolution: {integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==} dev: true - resolution: - integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== + /events/3.3.0: + resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} + engines: {node: '>=0.8.x'} dev: true - engines: - node: '>=0.8.x' - resolution: - integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== + /eventsource/1.1.0: + resolution: {integrity: sha512-VSJjT5oCNrFvCS6igjzPAt5hBzQ2qPBFIbJ03zLI9SE0mxwZpMw6BfJrbFHm1a141AavMEB8JHmBhWAd66PfCg==} + engines: {node: '>=0.12.0'} dependencies: original: 1.0.2 dev: true - engines: - node: '>=0.12.0' - resolution: - integrity: sha512-VSJjT5oCNrFvCS6igjzPAt5hBzQ2qPBFIbJ03zLI9SE0mxwZpMw6BfJrbFHm1a141AavMEB8JHmBhWAd66PfCg== + /evp_bytestokey/1.0.3: + resolution: {integrity: sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==} dependencies: md5.js: 1.3.5 safe-buffer: 5.2.1 dev: true - resolution: - integrity: sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== + /exec-sh/0.3.6: + resolution: {integrity: sha512-nQn+hI3yp+oD0huYhKwvYI32+JFeq+XkNcD1GAo3Y/MjxsfVGmrrzrnzjWiNY6f+pUCP440fThsFh5gZrRAU/w==} dev: true - resolution: - integrity: sha512-nQn+hI3yp+oD0huYhKwvYI32+JFeq+XkNcD1GAo3Y/MjxsfVGmrrzrnzjWiNY6f+pUCP440fThsFh5gZrRAU/w== + /execa/1.0.0: + resolution: {integrity: sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==} + engines: {node: '>=6'} dependencies: cross-spawn: 6.0.5 get-stream: 4.1.0 @@ -6080,11 +6878,10 @@ packages: signal-exit: 3.0.3 strip-eof: 1.0.0 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== + /execa/4.1.0: + resolution: {integrity: sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA==} + engines: {node: '>=10'} dependencies: cross-spawn: 7.0.3 get-stream: 5.2.0 @@ -6096,11 +6893,10 @@ packages: signal-exit: 3.0.3 strip-final-newline: 2.0.0 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA== + /execa/5.0.0: + resolution: {integrity: sha512-ov6w/2LCiuyO4RLYGdpFGjkcs0wMTgGE8PrkTHikeUy5iJekXyPIKUjifk5CsE0pt7sMCrMZ3YNqoCj6idQOnQ==} + engines: {node: '>=10'} dependencies: cross-spawn: 7.0.3 get-stream: 6.0.0 @@ -6112,17 +6908,15 @@ packages: signal-exit: 3.0.3 strip-final-newline: 2.0.0 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-ov6w/2LCiuyO4RLYGdpFGjkcs0wMTgGE8PrkTHikeUy5iJekXyPIKUjifk5CsE0pt7sMCrMZ3YNqoCj6idQOnQ== + /exit/0.1.2: + resolution: {integrity: sha1-BjJjj42HfMghB9MKD/8aF8uhzQw=} + engines: {node: '>= 0.8.0'} dev: true - engines: - node: '>= 0.8.0' - resolution: - integrity: sha1-BjJjj42HfMghB9MKD/8aF8uhzQw= + /expand-brackets/2.1.4: + resolution: {integrity: sha1-t3c14xXOMPa27/D4OwQVGiJEliI=} + engines: {node: '>=0.10.0'} dependencies: debug: 2.6.9 define-property: 0.2.5 @@ -6131,11 +6925,10 @@ packages: regex-not: 1.0.2 snapdragon: 0.8.2 to-regex: 3.0.2 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-t3c14xXOMPa27/D4OwQVGiJEliI= + /expect/26.6.2: + resolution: {integrity: sha512-9/hlOBkQl2l/PLHJx6JjoDF6xPKcJEsUlWKb23rKE7KzeDqUZKXKNMW27KIue5JMdBV9HgmoJPcc8HtO85t9IA==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/types': 26.6.2 ansi-styles: 4.3.0 @@ -6144,11 +6937,10 @@ packages: jest-message-util: 26.6.2 jest-regex-util: 26.0.0 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-9/hlOBkQl2l/PLHJx6JjoDF6xPKcJEsUlWKb23rKE7KzeDqUZKXKNMW27KIue5JMdBV9HgmoJPcc8HtO85t9IA== + /express/4.17.1: + resolution: {integrity: sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==} + engines: {node: '>= 0.10.0'} dependencies: accepts: 1.3.7 array-flatten: 1.1.1 @@ -6181,36 +6973,33 @@ packages: utils-merge: 1.0.1 vary: 1.1.2 dev: true - engines: - node: '>= 0.10.0' - resolution: - integrity: sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g== + /ext/1.4.0: + resolution: {integrity: sha512-Key5NIsUxdqKg3vIsdw9dSuXpPCQ297y6wBjL30edxwPgt2E44WcWBZey/ZvUc6sERLTxKdyCu4gZFmUbk1Q7A==} dependencies: type: 2.5.0 dev: true - resolution: - integrity: sha512-Key5NIsUxdqKg3vIsdw9dSuXpPCQ297y6wBjL30edxwPgt2E44WcWBZey/ZvUc6sERLTxKdyCu4gZFmUbk1Q7A== + /extend-shallow/2.0.1: + resolution: {integrity: sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=} + engines: {node: '>=0.10.0'} dependencies: is-extendable: 0.1.1 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= + /extend-shallow/3.0.2: + resolution: {integrity: sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg=} + engines: {node: '>=0.10.0'} dependencies: assign-symbols: 1.0.0 is-extendable: 1.0.1 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= + /extend/3.0.2: + resolution: {integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==} dev: true - resolution: - integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== + /extglob/2.0.4: + resolution: {integrity: sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw==} + engines: {node: '>=0.10.0'} dependencies: array-unique: 0.3.2 define-property: 1.0.0 @@ -6220,24 +7009,22 @@ packages: regex-not: 1.0.2 snapdragon: 0.8.2 to-regex: 3.0.2 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== + /extsprintf/1.3.0: + resolution: {integrity: sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=} + engines: {'0': node >=0.6.0} dev: true - engines: - '0': node >=0.6.0 - resolution: - integrity: sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= + /fast-deep-equal/3.1.3: - resolution: - integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + /fast-diff/1.2.0: + resolution: {integrity: sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==} dev: true - resolution: - integrity: sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w== + /fast-glob/3.2.5: + resolution: {integrity: sha512-2DtFcgT68wiTTiwZ2hNdJfcHNke9XOfnwmBRWXhmeKM8rF0TGwmC/Qto3S7RoZKp5cilZbxzO5iTNTQsJ+EeDg==} + engines: {node: '>=8'} dependencies: '@nodelib/fs.stat': 2.0.4 '@nodelib/fs.walk': 1.2.6 @@ -6245,118 +7032,124 @@ packages: merge2: 1.4.1 micromatch: 4.0.2 picomatch: 2.2.2 - engines: - node: '>=8' - resolution: - integrity: sha512-2DtFcgT68wiTTiwZ2hNdJfcHNke9XOfnwmBRWXhmeKM8rF0TGwmC/Qto3S7RoZKp5cilZbxzO5iTNTQsJ+EeDg== + /fast-json-stable-stringify/2.1.0: + resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} dev: true - resolution: - integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + /fast-levenshtein/2.0.6: + resolution: {integrity: sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=} dev: true - resolution: - integrity: sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= + + /fast-memoize/2.5.2: + resolution: {integrity: sha512-Ue0LwpDYErFbmNnZSF0UH6eImUwDmogUO1jyE+JbN2gsQz/jICm1Ve7t9QT0rNSsfJt+Hs4/S3GnsDVjL4HVrw==} + dev: false + /fast-shallow-equal/1.0.0: + resolution: {integrity: sha512-HPtaa38cPgWvaCFmRNhlc6NG7pv6NUHqjPgVAkWGoB9mQMwYB27/K0CvOM5Czy+qpT3e8XJ6Q4aPAnzpNpzNaw==} dev: false - resolution: - integrity: sha512-HPtaa38cPgWvaCFmRNhlc6NG7pv6NUHqjPgVAkWGoB9mQMwYB27/K0CvOM5Czy+qpT3e8XJ6Q4aPAnzpNpzNaw== + /fastest-levenshtein/1.0.12: + resolution: {integrity: sha512-On2N+BpYJ15xIC974QNVuYGMOlEVt4s0EOI3wwMqOmK1fdDY+FN/zltPV8vosq4ad4c/gJ1KHScUn/6AWIgiow==} dev: true - resolution: - integrity: sha512-On2N+BpYJ15xIC974QNVuYGMOlEVt4s0EOI3wwMqOmK1fdDY+FN/zltPV8vosq4ad4c/gJ1KHScUn/6AWIgiow== + /fastest-stable-stringify/2.0.2: + resolution: {integrity: sha512-bijHueCGd0LqqNK9b5oCMHc0MluJAx0cwqASgbWMvkO01lCYgIhacVRLcaDz3QnyYIRNJRDwMb41VuT6pHJ91Q==} dev: false - resolution: - integrity: sha512-bijHueCGd0LqqNK9b5oCMHc0MluJAx0cwqASgbWMvkO01lCYgIhacVRLcaDz3QnyYIRNJRDwMb41VuT6pHJ91Q== + /fastq/1.11.0: + resolution: {integrity: sha512-7Eczs8gIPDrVzT+EksYBcupqMyxSHXXrHOLRRxU2/DicV8789MRBRR8+Hc2uWzUupOs4YS4JzBmBxjjCVBxD/g==} dependencies: reusify: 1.0.4 - resolution: - integrity: sha512-7Eczs8gIPDrVzT+EksYBcupqMyxSHXXrHOLRRxU2/DicV8789MRBRR8+Hc2uWzUupOs4YS4JzBmBxjjCVBxD/g== + + /fault/1.0.4: + resolution: {integrity: sha512-CJ0HCB5tL5fYTEA7ToAq5+kTwd++Borf1/bifxd9iT70QcXr4MRrO3Llf8Ifs70q+SJcGHFtnIE/Nw6giCtECA==} + dependencies: + format: 0.2.2 + dev: true + /faye-websocket/0.10.0: + resolution: {integrity: sha1-TkkvjQTftviQA1B/btvy1QHnxvQ=} + engines: {node: '>=0.4.0'} dependencies: websocket-driver: 0.6.5 dev: true - engines: - node: '>=0.4.0' - resolution: - integrity: sha1-TkkvjQTftviQA1B/btvy1QHnxvQ= + /faye-websocket/0.11.3: + resolution: {integrity: sha512-D2y4bovYpzziGgbHYtGCMjlJM36vAl/y+xUyn1C+FVx8szd1E+86KwVw6XvYSzOP8iMpm1X0I4xJD+QtUb36OA==} + engines: {node: '>=0.8.0'} dependencies: websocket-driver: 0.7.4 dev: true - engines: - node: '>=0.8.0' - resolution: - integrity: sha512-D2y4bovYpzziGgbHYtGCMjlJM36vAl/y+xUyn1C+FVx8szd1E+86KwVw6XvYSzOP8iMpm1X0I4xJD+QtUb36OA== + /fb-watchman/2.0.1: + resolution: {integrity: sha512-DkPJKQeY6kKwmuMretBhr7G6Vodr7bFwDYTXIkfG1gjvNpaxBTQV3PbXg6bR1c1UP4jPOX0jHUbbHANL9vRjVg==} dependencies: bser: 2.1.1 dev: true - resolution: - integrity: sha512-DkPJKQeY6kKwmuMretBhr7G6Vodr7bFwDYTXIkfG1gjvNpaxBTQV3PbXg6bR1c1UP4jPOX0jHUbbHANL9vRjVg== + /figgy-pudding/3.5.2: + resolution: {integrity: sha512-0btnI/H8f2pavGMN8w40mlSKOfTK2SVJmBfBeVIj3kNw0swwgzyRq0d5TJVOwodFmtvpPeWPN/MCcfuWF0Ezbw==} dev: true - resolution: - integrity: sha512-0btnI/H8f2pavGMN8w40mlSKOfTK2SVJmBfBeVIj3kNw0swwgzyRq0d5TJVOwodFmtvpPeWPN/MCcfuWF0Ezbw== + /figures/3.2.0: + resolution: {integrity: sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==} + engines: {node: '>=8'} dependencies: escape-string-regexp: 1.0.5 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg== + /file-entry-cache/6.0.1: + resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==} + engines: {node: ^10.12.0 || >=12.0.0} dependencies: flat-cache: 3.0.4 dev: true - engines: - node: ^10.12.0 || >=12.0.0 - resolution: - integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== + /file-loader/6.1.1_webpack@4.44.2: + resolution: {integrity: sha512-Klt8C4BjWSXYQAfhpYYkG4qHNTna4toMHEbWrI5IuVoxbU6uiDKeKAP99R8mmbJi3lvewn/jQBOgU4+NS3tDQw==} + engines: {node: '>= 10.13.0'} + peerDependencies: + webpack: ^4.0.0 || ^5.0.0 dependencies: loader-utils: 2.0.0 schema-utils: 3.0.0 webpack: 4.44.2_webpack-cli@4.6.0 dev: true - engines: - node: '>= 10.13.0' - peerDependencies: - webpack: ^4.0.0 || ^5.0.0 - resolution: - integrity: sha512-Klt8C4BjWSXYQAfhpYYkG4qHNTna4toMHEbWrI5IuVoxbU6uiDKeKAP99R8mmbJi3lvewn/jQBOgU4+NS3tDQw== + /file-uri-to-path/1.0.0: + resolution: {integrity: sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==} dev: true optional: true - resolution: - integrity: sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw== + /filesize/6.1.0: + resolution: {integrity: sha512-LpCHtPQ3sFx67z+uh2HnSyWSLLu5Jxo21795uRDuar/EOuYWXib5EmPaGIBuSnRqH2IODiKA2k5re/K9OnN/Yg==} + engines: {node: '>= 0.4.0'} dev: false - engines: - node: '>= 0.4.0' - resolution: - integrity: sha512-LpCHtPQ3sFx67z+uh2HnSyWSLLu5Jxo21795uRDuar/EOuYWXib5EmPaGIBuSnRqH2IODiKA2k5re/K9OnN/Yg== + /fill-range/4.0.0: + resolution: {integrity: sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=} + engines: {node: '>=0.10.0'} dependencies: extend-shallow: 2.0.1 is-number: 3.0.0 repeat-string: 1.6.1 to-regex-range: 2.1.1 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= + /fill-range/7.0.1: + resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} + engines: {node: '>=8'} dependencies: to-regex-range: 5.0.1 - engines: - node: '>=8' - resolution: - integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== + + /filter-obj/1.1.0: + resolution: {integrity: sha1-mzERErxsYSehbgFsbF1/GeCAXFs=} + engines: {node: '>=0.10.0'} + dev: true + /finalhandler/1.1.2: + resolution: {integrity: sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==} + engines: {node: '>= 0.8'} dependencies: debug: 2.6.9 encodeurl: 1.0.2 @@ -6366,97 +7159,107 @@ packages: statuses: 1.5.0 unpipe: 1.0.0 dev: true - engines: - node: '>= 0.8' - resolution: - integrity: sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== + /find-cache-dir/2.1.0: + resolution: {integrity: sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==} + engines: {node: '>=6'} dependencies: commondir: 1.0.1 make-dir: 2.1.0 pkg-dir: 3.0.0 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ== + /find-cache-dir/3.3.1: + resolution: {integrity: sha512-t2GDMt3oGC/v+BMwzmllWDuJF/xcDtE5j/fCGbqDD7OLuJkj0cfh1YSA5VKPvwMeLFLNDBkwOKZ2X85jGLVftQ==} + engines: {node: '>=8'} dependencies: commondir: 1.0.1 make-dir: 3.1.0 pkg-dir: 4.2.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-t2GDMt3oGC/v+BMwzmllWDuJF/xcDtE5j/fCGbqDD7OLuJkj0cfh1YSA5VKPvwMeLFLNDBkwOKZ2X85jGLVftQ== + /find-up/2.1.0: + resolution: {integrity: sha1-RdG35QbHF93UgndaK3eSCjwMV6c=} + engines: {node: '>=4'} dependencies: locate-path: 2.0.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-RdG35QbHF93UgndaK3eSCjwMV6c= + /find-up/3.0.0: + resolution: {integrity: sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==} + engines: {node: '>=6'} dependencies: locate-path: 3.0.0 - engines: - node: '>=6' - resolution: - integrity: sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== + /find-up/4.1.0: + resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} + engines: {node: '>=8'} dependencies: locate-path: 5.0.0 path-exists: 4.0.0 - engines: - node: '>=8' - resolution: - integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + /flat-cache/3.0.4: + resolution: {integrity: sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==} + engines: {node: ^10.12.0 || >=12.0.0} dependencies: flatted: 3.1.1 rimraf: 3.0.2 dev: true - engines: - node: ^10.12.0 || >=12.0.0 - resolution: - integrity: sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + /flatted/3.1.1: + resolution: {integrity: sha512-zAoAQiudy+r5SvnSw3KJy5os/oRJYHzrzja/tBDqrZtNhUw8bt6y8OBzMWcjWr+8liV8Eb6yOhw8WZ7VFZ5ZzA==} dev: true - resolution: - integrity: sha512-zAoAQiudy+r5SvnSw3KJy5os/oRJYHzrzja/tBDqrZtNhUw8bt6y8OBzMWcjWr+8liV8Eb6yOhw8WZ7VFZ5ZzA== + /flatten/1.0.3: + resolution: {integrity: sha512-dVsPA/UwQ8+2uoFe5GHtiBMu48dWLTdsuEd7CKGlZlD78r1TTWBvDuFaFGKCo/ZfEr95Uk56vZoX86OsHkUeIg==} dev: true - resolution: - integrity: sha512-dVsPA/UwQ8+2uoFe5GHtiBMu48dWLTdsuEd7CKGlZlD78r1TTWBvDuFaFGKCo/ZfEr95Uk56vZoX86OsHkUeIg== + /flush-write-stream/1.1.1: + resolution: {integrity: sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w==} dependencies: inherits: 2.0.4 readable-stream: 2.3.7 dev: true - resolution: - integrity: sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w== + + /focus-lock/0.11.5: + resolution: {integrity: sha512-1mTr6pl9HBpJ8CqY7hRc38MCrcuTZIeYAkBD1gBTzbx5/to+bRBaBYtJ68iDq7ryTzAAbKrG3dVKjkrWTaaEaw==} + engines: {node: '>=10'} + dependencies: + tslib: 2.5.0 + dev: false + /follow-redirects/1.13.3: - engines: - node: '>=4.0' + resolution: {integrity: sha512-DUgl6+HDzB0iEptNQEXLx/KhTmDb8tZUHSeLqpnjpknR70H0nC2t9N73BK6fN4hOvJ84pKlIQVQ4k5FFlBedKA==} + engines: {node: '>=4.0'} + peerDependencies: + debug: '*' + peerDependenciesMeta: + debug: + optional: true + + /follow-redirects/1.13.3_debug@4.3.1: + resolution: {integrity: sha512-DUgl6+HDzB0iEptNQEXLx/KhTmDb8tZUHSeLqpnjpknR70H0nC2t9N73BK6fN4hOvJ84pKlIQVQ4k5FFlBedKA==} + engines: {node: '>=4.0'} peerDependencies: debug: '*' peerDependenciesMeta: debug: optional: true - resolution: - integrity: sha512-DUgl6+HDzB0iEptNQEXLx/KhTmDb8tZUHSeLqpnjpknR70H0nC2t9N73BK6fN4hOvJ84pKlIQVQ4k5FFlBedKA== + dependencies: + debug: 4.3.1_supports-color@6.1.0 + dev: true + /for-in/1.0.2: - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= + resolution: {integrity: sha1-gQaNKVqBQuwKxybG4iAMMPttXoA=} + engines: {node: '>=0.10.0'} + /forever-agent/0.6.1: + resolution: {integrity: sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=} dev: true - resolution: - integrity: sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= + /fork-ts-checker-webpack-plugin/4.1.6: + resolution: {integrity: sha512-DUxuQaKoqfNne8iikd14SAkh5uw4+8vNifp6gmA73yYNS6ywLIWSLD/n/mBzHQRpW3J7rbATEakmiA8JvkTyZw==} + engines: {node: '>=6.11.5', yarn: '>=1.0.0'} dependencies: '@babel/code-frame': 7.10.4 chalk: 2.4.2 @@ -6466,213 +7269,197 @@ packages: tapable: 1.1.3 worker-rpc: 0.1.1 dev: false - engines: - node: '>=6.11.5' - yarn: '>=1.0.0' - resolution: - integrity: sha512-DUxuQaKoqfNne8iikd14SAkh5uw4+8vNifp6gmA73yYNS6ywLIWSLD/n/mBzHQRpW3J7rbATEakmiA8JvkTyZw== + /form-data/2.3.3: + resolution: {integrity: sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==} + engines: {node: '>= 0.12'} dependencies: asynckit: 0.4.0 combined-stream: 1.0.8 mime-types: 2.1.29 dev: true - engines: - node: '>= 0.12' - resolution: - integrity: sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== + + /format/0.2.2: + resolution: {integrity: sha1-1hcBB+nv3E7TDJ3DkBbflCtctYs=} + engines: {node: '>=0.4.x'} + dev: true + /forwarded/0.1.2: + resolution: {integrity: sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=} + engines: {node: '>= 0.6'} dev: true - engines: - node: '>= 0.6' - resolution: - integrity: sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ= + /fragment-cache/0.2.1: + resolution: {integrity: sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk=} + engines: {node: '>=0.10.0'} dependencies: map-cache: 0.2.2 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= + /fresh/0.5.2: + resolution: {integrity: sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=} + engines: {node: '>= 0.6'} dev: true - engines: - node: '>= 0.6' - resolution: - integrity: sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= + /from2/2.3.0: + resolution: {integrity: sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8=} dependencies: inherits: 2.0.4 readable-stream: 2.3.7 dev: true - resolution: - integrity: sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8= + /fs-extra/7.0.1: + resolution: {integrity: sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==} + engines: {node: '>=6 <7 || >=8'} dependencies: graceful-fs: 4.2.6 jsonfile: 4.0.0 universalify: 0.1.2 dev: true - engines: - node: '>=6 <7 || >=8' - resolution: - integrity: sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw== + /fs-extra/8.1.0: + resolution: {integrity: sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==} + engines: {node: '>=6 <7 || >=8'} dependencies: graceful-fs: 4.2.6 jsonfile: 4.0.0 universalify: 0.1.2 dev: true - engines: - node: '>=6 <7 || >=8' - resolution: - integrity: sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g== + /fs-extra/9.1.0: + resolution: {integrity: sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==} + engines: {node: '>=10'} dependencies: at-least-node: 1.0.0 graceful-fs: 4.2.6 jsonfile: 6.1.0 universalify: 2.0.0 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== + /fs-minipass/2.1.0: + resolution: {integrity: sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==} + engines: {node: '>= 8'} dependencies: minipass: 3.1.3 dev: true - engines: - node: '>= 8' - resolution: - integrity: sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg== + /fs-write-stream-atomic/1.0.10: + resolution: {integrity: sha1-tH31NJPvkR33VzHnCp3tAYnbQMk=} dependencies: graceful-fs: 4.2.6 iferr: 0.1.5 imurmurhash: 0.1.4 readable-stream: 2.3.7 dev: true - resolution: - integrity: sha1-tH31NJPvkR33VzHnCp3tAYnbQMk= + /fs.realpath/1.0.0: - resolution: - integrity: sha1-FQStJSMVjKpA20onh8sBQRmU6k8= + resolution: {integrity: sha1-FQStJSMVjKpA20onh8sBQRmU6k8=} + /fsevents/1.2.13: + resolution: {integrity: sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw==} + engines: {node: '>= 4.0'} + os: [darwin] + deprecated: fsevents 1 will break on node v14+ and could be using insecure binaries. Upgrade to fsevents 2. + requiresBuild: true dependencies: bindings: 1.5.0 nan: 2.14.2 - deprecated: fsevents 1 will break on node v14+ and could be using insecure binaries. Upgrade to fsevents 2. dev: true - engines: - node: '>= 4.0' optional: true - os: - - darwin - requiresBuild: true - resolution: - integrity: sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw== + /fsevents/2.3.2: + resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + requiresBuild: true dev: true - engines: - node: ^8.16.0 || ^10.6.0 || >=11.0.0 optional: true - os: - - darwin - resolution: - integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + /function-bind/1.1.1: - dev: true - resolution: - integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==} + /functional-red-black-tree/1.0.1: + resolution: {integrity: sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=} dev: true - resolution: - integrity: sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc= + /gensync/1.0.0-beta.2: + resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} + engines: {node: '>=6.9.0'} dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + /get-caller-file/2.0.5: + resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} + engines: {node: 6.* || 8.* || >= 10.*} dev: true - engines: - node: 6.* || 8.* || >= 10.* - resolution: - integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + /get-intrinsic/1.1.1: + resolution: {integrity: sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==} dependencies: function-bind: 1.1.1 has: 1.0.3 has-symbols: 1.0.2 - dev: true - resolution: - integrity: sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q== + /get-own-enumerable-property-symbols/3.0.2: + resolution: {integrity: sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==} dev: true - resolution: - integrity: sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g== + /get-package-type/0.1.0: + resolution: {integrity: sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==} + engines: {node: '>=8.0.0'} dev: true - engines: - node: '>=8.0.0' - resolution: - integrity: sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== + /get-stdin/6.0.0: + resolution: {integrity: sha512-jp4tHawyV7+fkkSKyvjuLZswblUtz+SQKzSWnBbii16BuZksJlU1wuBYXY75r+duh/llF1ur6oNwi+2ZzjKZ7g==} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-jp4tHawyV7+fkkSKyvjuLZswblUtz+SQKzSWnBbii16BuZksJlU1wuBYXY75r+duh/llF1ur6oNwi+2ZzjKZ7g== + /get-stream/4.1.0: + resolution: {integrity: sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==} + engines: {node: '>=6'} dependencies: pump: 3.0.0 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== + /get-stream/5.2.0: + resolution: {integrity: sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==} + engines: {node: '>=8'} dependencies: pump: 3.0.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== + /get-stream/6.0.0: + resolution: {integrity: sha512-A1B3Bh1UmL0bidM/YX2NsCOTnGJePL9rO/M+Mw3m9f2gUpfokS0hi5Eah0WSUEWZdZhIZtMjkIYS7mDfOqNHbg==} + engines: {node: '>=10'} dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-A1B3Bh1UmL0bidM/YX2NsCOTnGJePL9rO/M+Mw3m9f2gUpfokS0hi5Eah0WSUEWZdZhIZtMjkIYS7mDfOqNHbg== + /get-value/2.0.6: - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= + resolution: {integrity: sha1-3BXKHGcjh8p2vTesCjlbogQqLCg=} + engines: {node: '>=0.10.0'} + /getpass/0.1.7: + resolution: {integrity: sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=} dependencies: assert-plus: 1.0.0 dev: true - resolution: - integrity: sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= + + /github-slugger/1.4.0: + resolution: {integrity: sha512-w0dzqw/nt51xMVmlaV1+JRzN+oCa1KfcgGEWhxUG16wbdA+Xnt/yoFO8Z8x/V82ZcZ0wy6ln9QDup5avbhiDhQ==} + dev: true + /glob-parent/3.1.0: + resolution: {integrity: sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=} dependencies: is-glob: 3.1.0 path-dirname: 1.0.2 dev: true - resolution: - integrity: sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= + /glob-parent/5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} dependencies: is-glob: 4.0.1 - engines: - node: '>= 6' - resolution: - integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + /glob/7.1.6: + resolution: {integrity: sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==} dependencies: fs.realpath: 1.0.0 inflight: 1.0.6 @@ -6680,48 +7467,51 @@ packages: minimatch: 3.0.4 once: 1.4.0 path-is-absolute: 1.0.1 - resolution: - integrity: sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== + /global-modules/2.0.0: + resolution: {integrity: sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A==} + engines: {node: '>=6'} dependencies: global-prefix: 3.0.0 dev: false - engines: - node: '>=6' - resolution: - integrity: sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== + /global-prefix/3.0.0: + resolution: {integrity: sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg==} + engines: {node: '>=6'} dependencies: ini: 1.3.8 kind-of: 6.0.3 which: 1.3.1 dev: false - engines: - node: '>=6' - resolution: - integrity: sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== + + /global/4.4.0: + resolution: {integrity: sha512-wv/LAoHdRE3BeTGz53FAamhGlPLhlssK45usmGFThIi4XqnBmjKQ16u+RNbP7WvigRZDxUsM0J3gcQ5yicaL0w==} + dependencies: + min-document: 2.19.0 + process: 0.11.10 + dev: true + /globals/11.12.0: - engines: - node: '>=4' - resolution: - integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==} + engines: {node: '>=4'} + /globals/12.4.0: + resolution: {integrity: sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==} + engines: {node: '>=8'} dependencies: type-fest: 0.8.1 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg== + /globals/13.7.0: + resolution: {integrity: sha512-Aipsz6ZKRxa/xQkZhNg0qIWXT6x6rD46f6x/PCnBomlttdIyAPak4YD9jTmKpZ72uROSMU87qJtcgpgHaVchiA==} + engines: {node: '>=8'} dependencies: type-fest: 0.20.2 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-Aipsz6ZKRxa/xQkZhNg0qIWXT6x6rD46f6x/PCnBomlttdIyAPak4YD9jTmKpZ72uROSMU87qJtcgpgHaVchiA== + /globby/11.0.1: + resolution: {integrity: sha512-iH9RmgwCmUJHi2z5o2l3eTtGBtXek1OYlHrbcxOYugyHLmAsZrPj43OtHThd62Buh/Vv6VyCBD2bdyWcGNQqoQ==} + engines: {node: '>=10'} dependencies: array-union: 2.1.0 dir-glob: 3.0.1 @@ -6730,11 +7520,10 @@ packages: merge2: 1.4.1 slash: 3.0.0 dev: false - engines: - node: '>=10' - resolution: - integrity: sha512-iH9RmgwCmUJHi2z5o2l3eTtGBtXek1OYlHrbcxOYugyHLmAsZrPj43OtHThd62Buh/Vv6VyCBD2bdyWcGNQqoQ== + /globby/11.0.3: + resolution: {integrity: sha512-ffdmosjA807y7+lA1NM0jELARVmYul/715xiILEjo3hBLPTcirgQNnXECn5g3mtR8TOLCVbkfua1Hpen25/Xcg==} + engines: {node: '>=10'} dependencies: array-union: 2.1.0 dir-glob: 3.0.1 @@ -6743,11 +7532,10 @@ packages: merge2: 1.4.1 slash: 3.0.0 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-ffdmosjA807y7+lA1NM0jELARVmYul/715xiILEjo3hBLPTcirgQNnXECn5g3mtR8TOLCVbkfua1Hpen25/Xcg== + /globby/6.1.0: + resolution: {integrity: sha1-9abXDoOV4hyFj7BInWTfAkJNUGw=} + engines: {node: '>=0.10.0'} dependencies: array-union: 1.0.2 glob: 7.1.6 @@ -6755,210 +7543,338 @@ packages: pify: 2.3.0 pinkie-promise: 2.0.1 dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-9abXDoOV4hyFj7BInWTfAkJNUGw= + /graceful-fs/4.2.6: + resolution: {integrity: sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ==} + dev: true + + /graceful-fs/4.2.9: + resolution: {integrity: sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==} dev: true - resolution: - integrity: sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ== + optional: true + /growly/1.3.0: + resolution: {integrity: sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE=} dev: true optional: true - resolution: - integrity: sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE= + /gzip-size/5.1.1: + resolution: {integrity: sha512-FNHi6mmoHvs1mxZAds4PpdCS6QG8B4C1krxJsMutgxl5t3+GlRTzzI3NEkifXx2pVsOvJdOGSmIgDhQ55FwdPA==} + engines: {node: '>=6'} dependencies: duplexer: 0.1.2 pify: 4.0.1 dev: false - engines: - node: '>=6' - resolution: - integrity: sha512-FNHi6mmoHvs1mxZAds4PpdCS6QG8B4C1krxJsMutgxl5t3+GlRTzzI3NEkifXx2pVsOvJdOGSmIgDhQ55FwdPA== + + /hamt_plus/1.0.2: + resolution: {integrity: sha1-4hwlKWjH4zsg9qGwlM2FeHomVgE=} + dev: false + /handle-thing/2.0.1: + resolution: {integrity: sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg==} dev: true - resolution: - integrity: sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg== + /har-schema/2.0.0: + resolution: {integrity: sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= + /har-validator/5.1.5: + resolution: {integrity: sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==} + engines: {node: '>=6'} + deprecated: this library is no longer supported dependencies: ajv: 6.12.6 har-schema: 2.0.0 - deprecated: this library is no longer supported dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w== + /harmony-reflect/1.6.1: + resolution: {integrity: sha512-WJTeyp0JzGtHcuMsi7rw2VwtkvLa+JyfEKJCFyfcS0+CDkjQ5lHPu7zEhFZP+PDSRrEgXa5Ah0l1MbgbE41XjA==} dev: true - resolution: - integrity: sha512-WJTeyp0JzGtHcuMsi7rw2VwtkvLa+JyfEKJCFyfcS0+CDkjQ5lHPu7zEhFZP+PDSRrEgXa5Ah0l1MbgbE41XjA== + /has-bigints/1.0.1: + resolution: {integrity: sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA==} dev: true - resolution: - integrity: sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA== + /has-flag/3.0.0: - engines: - node: '>=4' - resolution: - integrity: sha1-tdRU3CGZriJWmfNGfloH87lVuv0= + resolution: {integrity: sha1-tdRU3CGZriJWmfNGfloH87lVuv0=} + engines: {node: '>=4'} + /has-flag/4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + /has-symbols/1.0.2: - dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw== + resolution: {integrity: sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==} + engines: {node: '>= 0.4'} + /has-value/0.3.1: + resolution: {integrity: sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8=} + engines: {node: '>=0.10.0'} dependencies: get-value: 2.0.6 has-values: 0.1.4 isobject: 2.1.0 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= + /has-value/1.0.0: + resolution: {integrity: sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc=} + engines: {node: '>=0.10.0'} dependencies: get-value: 2.0.6 has-values: 1.0.0 isobject: 3.0.1 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= + /has-values/0.1.4: - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-bWHeldkd/Km5oCCJrThL/49it3E= + resolution: {integrity: sha1-bWHeldkd/Km5oCCJrThL/49it3E=} + engines: {node: '>=0.10.0'} + /has-values/1.0.0: + resolution: {integrity: sha1-lbC2P+whRmGab+V/51Yo1aOe/k8=} + engines: {node: '>=0.10.0'} dependencies: is-number: 3.0.0 kind-of: 4.0.0 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= + /has/1.0.3: + resolution: {integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==} + engines: {node: '>= 0.4.0'} dependencies: function-bind: 1.1.1 - dev: true - engines: - node: '>= 0.4.0' - resolution: - integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + /hash-base/3.1.0: + resolution: {integrity: sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==} + engines: {node: '>=4'} dependencies: inherits: 2.0.4 readable-stream: 3.6.0 safe-buffer: 5.2.1 dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA== + /hash.js/1.1.7: + resolution: {integrity: sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==} dependencies: inherits: 2.0.4 minimalistic-assert: 1.0.1 dev: true - resolution: - integrity: sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== - /he/1.2.0: + + /hast-to-hyperscript/9.0.1: + resolution: {integrity: sha512-zQgLKqF+O2F72S1aa4y2ivxzSlko3MAvxkwG8ehGmNiqd98BIN3JM1rAJPmplEyLmGLO2QZYJtIneOSZ2YbJuA==} + dependencies: + '@types/unist': 2.0.6 + comma-separated-tokens: 1.0.8 + property-information: 5.6.0 + space-separated-tokens: 1.1.5 + style-to-object: 0.3.0 + unist-util-is: 4.1.0 + web-namespaces: 1.1.4 + dev: true + + /hast-util-from-dom/3.0.0: + resolution: {integrity: sha512-4vQuGiD5Y/wlD7fZiY4mZML/6oh0GOnH38UNyeDFcSTE4AHF0zjKHZfbd+ekVwPvsZXRl8choc99INHUwSPJlg==} + dependencies: + hastscript: 6.0.0 + web-namespaces: 1.1.4 + dev: true + + /hast-util-from-parse5/6.0.1: + resolution: {integrity: sha512-jeJUWiN5pSxW12Rh01smtVkZgZr33wBokLzKLwinYOUfSzm1Nl/c3GUGebDyOKjdsRgMvoVbV0VpAcpjF4NrJA==} + dependencies: + '@types/parse5': 5.0.3 + hastscript: 6.0.0 + property-information: 5.6.0 + vfile: 4.2.1 + vfile-location: 3.2.0 + web-namespaces: 1.1.4 + dev: true + + /hast-util-has-property/1.0.4: + resolution: {integrity: sha512-ghHup2voGfgFoHMGnaLHOjbYFACKrRh9KFttdCzMCbFoBMJXiNi2+XTrPP8+q6cDJM/RSqlCfVWrjp1H201rZg==} + dev: true + + /hast-util-is-conditional-comment/1.0.4: + resolution: {integrity: sha512-rtULxWWknVeSuU/vsJ9tHo+M3ExyaOrZcWvLxqY2nUfCHbDcq60EJzSJC5zNm6ZlbxbJ8l7Ej8C1Kzsi5PJS1A==} + dev: true + + /hast-util-is-element/1.1.0: + resolution: {integrity: sha512-oUmNua0bFbdrD/ELDSSEadRVtWZOf3iF6Lbv81naqsIV99RnSCieTbWuWCY8BAeEfKJTKl0gRdokv+dELutHGQ==} + dev: true + + /hast-util-parse-selector/2.2.5: + resolution: {integrity: sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ==} + dev: true + + /hast-util-raw/6.1.0: + resolution: {integrity: sha512-5FoZLDHBpka20OlZZ4I/+RBw5piVQ8iI1doEvffQhx5CbCyTtP8UCq8Tw6NmTAMtXgsQxmhW7Ly8OdFre5/YMQ==} + dependencies: + '@types/hast': 2.3.4 + hast-util-from-parse5: 6.0.1 + hast-util-to-parse5: 6.0.0 + html-void-elements: 1.0.5 + parse5: 6.0.1 + unist-util-position: 3.1.0 + unist-util-visit: 2.0.3 + vfile: 4.2.1 + web-namespaces: 1.1.4 + xtend: 4.0.2 + zwitch: 1.0.5 + dev: true + + /hast-util-to-html/7.1.3: + resolution: {integrity: sha512-yk2+1p3EJTEE9ZEUkgHsUSVhIpCsL/bvT8E5GzmWc+N1Po5gBw+0F8bo7dpxXR0nu0bQVxVZGX2lBGF21CmeDw==} + dependencies: + ccount: 1.1.0 + comma-separated-tokens: 1.0.8 + hast-util-is-element: 1.1.0 + hast-util-whitespace: 1.0.4 + html-void-elements: 1.0.5 + property-information: 5.6.0 + space-separated-tokens: 1.1.5 + stringify-entities: 3.1.0 + unist-util-is: 4.1.0 + xtend: 4.0.2 + dev: true + + /hast-util-to-parse5/6.0.0: + resolution: {integrity: sha512-Lu5m6Lgm/fWuz8eWnrKezHtVY83JeRGaNQ2kn9aJgqaxvVkFCZQBEhgodZUDUvoodgyROHDb3r5IxAEdl6suJQ==} + dependencies: + hast-to-hyperscript: 9.0.1 + property-information: 5.6.0 + web-namespaces: 1.1.4 + xtend: 4.0.2 + zwitch: 1.0.5 + dev: true + + /hast-util-to-string/1.0.4: + resolution: {integrity: sha512-eK0MxRX47AV2eZ+Lyr18DCpQgodvaS3fAQO2+b9Two9F5HEoRPhiUMNzoXArMJfZi2yieFzUBMRl3HNJ3Jus3w==} + dev: true + + /hast-util-to-text/2.0.1: + resolution: {integrity: sha512-8nsgCARfs6VkwH2jJU9b8LNTuR4700na+0h3PqCaEk4MAnMDeu5P0tP8mjk9LLNGxIeQRLbiDbZVw6rku+pYsQ==} + dependencies: + hast-util-is-element: 1.1.0 + repeat-string: 1.6.1 + unist-util-find-after: 3.0.0 + dev: true + + /hast-util-whitespace/1.0.4: + resolution: {integrity: sha512-I5GTdSfhYfAPNztx2xJRQpG8cuDSNt599/7YUn7Gx/WxNMsG+a835k97TDkFgk123cwjfwINaZknkKkphx/f2A==} + dev: true + + /hastscript/6.0.0: + resolution: {integrity: sha512-nDM6bvd7lIqDUiYEiu5Sl/+6ReP0BMk/2f4U/Rooccxkj0P5nm+acM5PrGJ/t5I8qPGiqZSE6hVAwZEdZIvP4w==} + dependencies: + '@types/hast': 2.3.4 + comma-separated-tokens: 1.0.8 + hast-util-parse-selector: 2.2.5 + property-information: 5.6.0 + space-separated-tokens: 1.1.5 dev: true + + /he/1.2.0: + resolution: {integrity: sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==} hasBin: true - resolution: - integrity: sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== + dev: true + /hex-color-regex/1.1.0: + resolution: {integrity: sha512-l9sfDFsuqtOqKDsQdqrMRk0U85RZc0RtOR9yPI7mRVOa4FsR/BVnZ0shmQRM96Ji99kYZP/7hn1cedc1+ApsTQ==} dev: true - resolution: - integrity: sha512-l9sfDFsuqtOqKDsQdqrMRk0U85RZc0RtOR9yPI7mRVOa4FsR/BVnZ0shmQRM96Ji99kYZP/7hn1cedc1+ApsTQ== - /history/4.10.1: + + /history-with-query/4.10.4: + resolution: {integrity: sha512-JnskQK8X+PbRFHSdDAExhoJyhLnlLZL+UuHQuQhys+Se9/ukRDRBWU4JVTjsiIfbv1fcEmR3oqKW56OYmk5M5w==} dependencies: - '@babel/runtime': 7.13.10 + '@babel/runtime': 7.14.8 loose-envify: 1.4.0 + query-string: 6.14.1 resolve-pathname: 3.0.0 tiny-invariant: 1.1.0 tiny-warning: 1.0.3 value-equal: 1.0.1 - dev: false - resolution: - integrity: sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew== + dev: true + + /history/4.10.1: + resolution: {integrity: sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew==} + dependencies: + '@babel/runtime': 7.13.10 + loose-envify: 1.4.0 + resolve-pathname: 3.0.0 + tiny-invariant: 1.1.0 + tiny-warning: 1.0.3 + value-equal: 1.0.1 + /hmac-drbg/1.0.1: + resolution: {integrity: sha1-0nRXAQJabHdabFRXk+1QL8DGSaE=} dependencies: hash.js: 1.1.7 minimalistic-assert: 1.0.1 minimalistic-crypto-utils: 1.0.1 dev: true - resolution: - integrity: sha1-0nRXAQJabHdabFRXk+1QL8DGSaE= + /hoist-non-react-statics/3.3.2: + resolution: {integrity: sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==} dependencies: react-is: 16.13.1 - resolution: - integrity: sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw== + /hoopy/0.1.4: + resolution: {integrity: sha512-HRcs+2mr52W0K+x8RzcLzuPPmVIKMSv97RGHy0Ea9y/mpcaK+xTrjICA04KAHi4GRzxliNqNJEFYWHghy3rSfQ==} + engines: {node: '>= 6.0.0'} dev: true - engines: - node: '>= 6.0.0' - resolution: - integrity: sha512-HRcs+2mr52W0K+x8RzcLzuPPmVIKMSv97RGHy0Ea9y/mpcaK+xTrjICA04KAHi4GRzxliNqNJEFYWHghy3rSfQ== + /hosted-git-info/2.8.8: + resolution: {integrity: sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==} + dev: true + + /hosted-git-info/3.0.8: + resolution: {integrity: sha512-aXpmwoOhRBrw6X3j0h5RloK4x1OzsxMPyxqIHyNfSe2pypkVTZFpEiRoSipPEPlMrh0HW/XsjkJ5WgnCirpNUw==} + engines: {node: '>=10'} + dependencies: + lru-cache: 6.0.0 dev: true - resolution: - integrity: sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg== + /hpack.js/2.1.6: + resolution: {integrity: sha1-h3dMCUnlE/QuhFdbPEVoH63ioLI=} dependencies: inherits: 2.0.4 obuf: 1.1.2 readable-stream: 2.3.7 wbuf: 1.7.3 dev: true - resolution: - integrity: sha1-h3dMCUnlE/QuhFdbPEVoH63ioLI= + /hsl-regex/1.0.0: + resolution: {integrity: sha1-1JMwx4ntgZ4nakwNJy3/owsY/m4=} dev: true - resolution: - integrity: sha1-1JMwx4ntgZ4nakwNJy3/owsY/m4= + /hsla-regex/1.0.0: + resolution: {integrity: sha1-wc56MWjIxmFAM6S194d/OyJfnDg=} dev: true - resolution: - integrity: sha1-wc56MWjIxmFAM6S194d/OyJfnDg= + /html-comment-regex/1.1.2: + resolution: {integrity: sha512-P+M65QY2JQ5Y0G9KKdlDpo0zK+/OHptU5AaBwUfAIDJZk1MYf32Frm84EcOytfJE0t5JvkAnKlmjsXDnWzCJmQ==} dev: true - resolution: - integrity: sha512-P+M65QY2JQ5Y0G9KKdlDpo0zK+/OHptU5AaBwUfAIDJZk1MYf32Frm84EcOytfJE0t5JvkAnKlmjsXDnWzCJmQ== + /html-encoding-sniffer/2.0.1: + resolution: {integrity: sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ==} + engines: {node: '>=10'} dependencies: whatwg-encoding: 1.0.5 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ== + /html-entities/1.4.0: + resolution: {integrity: sha512-8nxjcBcd8wovbeKx7h3wTji4e6+rhaVuPNpMqwWgnHh+N9ToqsCs6XztWRBPQ+UtzsoMAdKZtUENoVzU/EMtZA==} + dev: true + + /html-entities/2.3.2: + resolution: {integrity: sha512-c3Ab/url5ksaT0WyleslpBEthOzWhrjQbg75y7XUsfSzi3Dgzt0l8w5e7DylRn15MTlMMD58dTfzddNS2kcAjQ==} dev: true - resolution: - integrity: sha512-8nxjcBcd8wovbeKx7h3wTji4e6+rhaVuPNpMqwWgnHh+N9ToqsCs6XztWRBPQ+UtzsoMAdKZtUENoVzU/EMtZA== + /html-escaper/2.0.2: + resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} dev: true - resolution: - integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== + /html-minifier-terser/5.1.1: + resolution: {integrity: sha512-ZPr5MNObqnV/T9akshPKbVgyOqLmy+Bxo7juKCfTfnjNniTAMdy4hz21YQqoofMBJD2kdREaqPPdThoR78Tgxg==} + engines: {node: '>=6'} + hasBin: true dependencies: camel-case: 4.1.2 clean-css: 4.2.3 @@ -6968,18 +7884,22 @@ packages: relateurl: 0.2.7 terser: 4.8.0 dev: true - engines: - node: '>=6' - hasBin: true - resolution: - integrity: sha512-ZPr5MNObqnV/T9akshPKbVgyOqLmy+Bxo7juKCfTfnjNniTAMdy4hz21YQqoofMBJD2kdREaqPPdThoR78Tgxg== + /html-parse-stringify2/2.0.1: + resolution: {integrity: sha1-3FZwtyksoVi3vJFsmmc1rIhyg0o=} dependencies: void-elements: 2.0.1 dev: false - resolution: - integrity: sha1-3FZwtyksoVi3vJFsmmc1rIhyg0o= + + /html-void-elements/1.0.5: + resolution: {integrity: sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w==} + dev: true + /html-webpack-plugin/4.5.0_webpack@4.44.2: + resolution: {integrity: sha512-MouoXEYSjTzCrjIxWwg8gxL5fE2X2WZJLmBYXlaJhQUH5K/b5OrqmV7T4dB7iu0xkmJ6JlUuV6fFVtnqbPopZw==} + engines: {node: '>=6.9'} + peerDependencies: + webpack: ^4.0.0 || ^5.0.0 dependencies: '@types/html-minifier-terser': 5.1.1 '@types/tapable': 1.0.7 @@ -6992,13 +7912,9 @@ packages: util.promisify: 1.0.0 webpack: 4.44.2_webpack-cli@4.6.0 dev: true - engines: - node: '>=6.9' - peerDependencies: - webpack: ^4.0.0 || ^5.0.0 - resolution: - integrity: sha512-MouoXEYSjTzCrjIxWwg8gxL5fE2X2WZJLmBYXlaJhQUH5K/b5OrqmV7T4dB7iu0xkmJ6JlUuV6fFVtnqbPopZw== + /htmlparser2/3.10.1: + resolution: {integrity: sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ==} dependencies: domelementtype: 1.3.1 domhandler: 2.4.2 @@ -7007,24 +7923,24 @@ packages: inherits: 2.0.4 readable-stream: 3.6.0 dev: true - resolution: - integrity: sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ== + /http-deceiver/1.2.7: + resolution: {integrity: sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc=} dev: true - resolution: - integrity: sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc= + /http-errors/1.6.3: + resolution: {integrity: sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0=} + engines: {node: '>= 0.6'} dependencies: depd: 1.1.2 inherits: 2.0.3 setprototypeof: 1.1.0 statuses: 1.5.0 dev: true - engines: - node: '>= 0.6' - resolution: - integrity: sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0= + /http-errors/1.7.2: + resolution: {integrity: sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==} + engines: {node: '>= 0.6'} dependencies: depd: 1.1.2 inherits: 2.0.3 @@ -7032,11 +7948,10 @@ packages: statuses: 1.5.0 toidentifier: 1.0.0 dev: true - engines: - node: '>= 0.6' - resolution: - integrity: sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg== + /http-errors/1.7.3: + resolution: {integrity: sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw==} + engines: {node: '>= 0.6'} dependencies: depd: 1.1.2 inherits: 2.0.4 @@ -7044,379 +7959,385 @@ packages: statuses: 1.5.0 toidentifier: 1.0.0 dev: true - engines: - node: '>= 0.6' - resolution: - integrity: sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw== + /http-parser-js/0.5.3: + resolution: {integrity: sha512-t7hjvef/5HEK7RWTdUzVUhl8zkEu+LlaE0IYzdMuvbSDipxBRpOn4Uhw8ZyECEa808iVT8XCjzo6xmYt4CiLZg==} dev: true - resolution: - integrity: sha512-t7hjvef/5HEK7RWTdUzVUhl8zkEu+LlaE0IYzdMuvbSDipxBRpOn4Uhw8ZyECEa808iVT8XCjzo6xmYt4CiLZg== + /http-proxy-middleware/0.19.1_debug@4.3.1: + resolution: {integrity: sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q==} + engines: {node: '>=4.0.0'} dependencies: http-proxy: 1.18.1_debug@4.3.1 is-glob: 4.0.1 lodash: 4.17.21 micromatch: 3.1.10 + transitivePeerDependencies: + - debug dev: true - engines: - node: '>=4.0.0' + + /http-proxy-middleware/2.0.6: + resolution: {integrity: sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw==} + engines: {node: '>=12.0.0'} peerDependencies: - debug: '*' - resolution: - integrity: sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q== - /http-proxy/1.18.1_debug@4.3.1: + '@types/express': ^4.17.13 + peerDependenciesMeta: + '@types/express': + optional: true + dependencies: + '@types/http-proxy': 1.17.9 + http-proxy: 1.18.1 + is-glob: 4.0.1 + is-plain-obj: 3.0.0 + micromatch: 4.0.2 + transitivePeerDependencies: + - debug + dev: true + + /http-proxy/1.18.1: + resolution: {integrity: sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==} + engines: {node: '>=8.0.0'} dependencies: eventemitter3: 4.0.7 follow-redirects: 1.13.3 requires-port: 1.0.0 + transitivePeerDependencies: + - debug dev: true - engines: - node: '>=8.0.0' - peerDependencies: - debug: '*' - resolution: - integrity: sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== + + /http-proxy/1.18.1_debug@4.3.1: + resolution: {integrity: sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==} + engines: {node: '>=8.0.0'} + dependencies: + eventemitter3: 4.0.7 + follow-redirects: 1.13.3_debug@4.3.1 + requires-port: 1.0.0 + transitivePeerDependencies: + - debug + dev: true + /http-signature/1.2.0: + resolution: {integrity: sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=} + engines: {node: '>=0.8', npm: '>=1.3.7'} dependencies: assert-plus: 1.0.0 jsprim: 1.4.1 sshpk: 1.16.1 dev: true - engines: - node: '>=0.8' - npm: '>=1.3.7' - resolution: - integrity: sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= + /https-browserify/1.0.0: + resolution: {integrity: sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM=} dev: true - resolution: - integrity: sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM= + /human-signals/1.1.1: + resolution: {integrity: sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw==} + engines: {node: '>=8.12.0'} dev: true - engines: - node: '>=8.12.0' - resolution: - integrity: sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw== + /human-signals/2.1.0: + resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} + engines: {node: '>=10.17.0'} dev: true - engines: - node: '>=10.17.0' - resolution: - integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== + /hyphenate-style-name/1.0.4: + resolution: {integrity: sha512-ygGZLjmXfPHj+ZWh6LwbC37l43MhfztxetbFCoYTM2VjkIUpeHgSNn7QIyVFj7YQ1Wl9Cbw5sholVJPzWvC2MQ==} dev: false - resolution: - integrity: sha512-ygGZLjmXfPHj+ZWh6LwbC37l43MhfztxetbFCoYTM2VjkIUpeHgSNn7QIyVFj7YQ1Wl9Cbw5sholVJPzWvC2MQ== + /i18next/19.9.2: + resolution: {integrity: sha512-0i6cuo6ER6usEOtKajUUDj92zlG+KArFia0857xxiEHAQcUwh/RtOQocui1LPJwunSYT574Pk64aNva1kwtxZg==} dependencies: '@babel/runtime': 7.13.10 dev: false - resolution: - integrity: sha512-0i6cuo6ER6usEOtKajUUDj92zlG+KArFia0857xxiEHAQcUwh/RtOQocui1LPJwunSYT574Pk64aNva1kwtxZg== + /iconv-lite/0.4.24: + resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==} + engines: {node: '>=0.10.0'} dependencies: safer-buffer: 2.1.2 dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + /icss-utils/4.1.1: + resolution: {integrity: sha512-4aFq7wvWyMHKgxsH8QQtGpvbASCf+eM3wPRLI6R+MgAnTCZ6STYsRvttLvRWK0Nfif5piF394St3HeJDaljGPA==} + engines: {node: '>= 6'} dependencies: postcss: 7.0.35 dev: true - engines: - node: '>= 6' - resolution: - integrity: sha512-4aFq7wvWyMHKgxsH8QQtGpvbASCf+eM3wPRLI6R+MgAnTCZ6STYsRvttLvRWK0Nfif5piF394St3HeJDaljGPA== + /identity-obj-proxy/3.0.0: + resolution: {integrity: sha1-lNK9qWCERT7zb7xarsN+D3nx/BQ=} + engines: {node: '>=4'} dependencies: harmony-reflect: 1.6.1 dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-lNK9qWCERT7zb7xarsN+D3nx/BQ= + /ieee754/1.2.1: + resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} dev: true - resolution: - integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== + /iferr/0.1.5: + resolution: {integrity: sha1-xg7taebY/bazEEofy8ocGS3FtQE=} dev: true - resolution: - integrity: sha1-xg7taebY/bazEEofy8ocGS3FtQE= + /ignore/4.0.6: + resolution: {integrity: sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==} + engines: {node: '>= 4'} dev: true - engines: - node: '>= 4' - resolution: - integrity: sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== + /ignore/5.1.8: - engines: - node: '>= 4' - resolution: - integrity: sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw== + resolution: {integrity: sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw==} + engines: {node: '>= 4'} + /image-size/0.5.5: - dev: true - engines: - node: '>=0.10.0' + resolution: {integrity: sha1-Cd/Uq50g4p6xw+gLiZA3jfnjy5w=} + engines: {node: '>=0.10.0'} hasBin: true + requiresBuild: true + dev: true optional: true - resolution: - integrity: sha1-Cd/Uq50g4p6xw+gLiZA3jfnjy5w= - /immer/6.0.9: - dev: false - resolution: - integrity: sha512-SyCYnAuiRf67Lvk0VkwFvwtDoEiCMjeamnHvRfnVDyc7re1/rQrNxuL+jJ7lA3WvdC4uznrvbmm+clJ9+XXatg== + /immer/8.0.1: + resolution: {integrity: sha512-aqXhGP7//Gui2+UrEtvxZxSquQVXTpZ7KDxfCcKAF3Vysvw0CViVaW9RZ1j1xlIYqaaaipBoqdqeibkc18PNvA==} dev: false - resolution: - integrity: sha512-aqXhGP7//Gui2+UrEtvxZxSquQVXTpZ7KDxfCcKAF3Vysvw0CViVaW9RZ1j1xlIYqaaaipBoqdqeibkc18PNvA== + /import-cwd/2.1.0: + resolution: {integrity: sha1-qmzzbnInYShcs3HsZRn1PiQ1sKk=} + engines: {node: '>=4'} dependencies: import-from: 2.1.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-qmzzbnInYShcs3HsZRn1PiQ1sKk= + /import-fresh/2.0.0: + resolution: {integrity: sha1-2BNVwVYS04bGH53dOSLUMEgipUY=} + engines: {node: '>=4'} dependencies: caller-path: 2.0.0 resolve-from: 3.0.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-2BNVwVYS04bGH53dOSLUMEgipUY= + /import-fresh/3.3.0: + resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==} + engines: {node: '>=6'} dependencies: parent-module: 1.0.1 resolve-from: 4.0.0 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + /import-from/2.1.0: + resolution: {integrity: sha1-M1238qev/VOqpHHUuAId7ja387E=} + engines: {node: '>=4'} dependencies: resolve-from: 3.0.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-M1238qev/VOqpHHUuAId7ja387E= + /import-local/2.0.0: + resolution: {integrity: sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ==} + engines: {node: '>=6'} + hasBin: true dependencies: pkg-dir: 3.0.0 resolve-cwd: 2.0.0 dev: true - engines: - node: '>=6' - hasBin: true - resolution: - integrity: sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ== + /import-local/3.0.2: + resolution: {integrity: sha512-vjL3+w0oulAVZ0hBHnxa/Nm5TAurf9YLQJDhqRZyqb+VKGOB6LU8t9H1Nr5CIo16vh9XfJTOoHwU0B71S557gA==} + engines: {node: '>=8'} + hasBin: true dependencies: pkg-dir: 4.2.0 resolve-cwd: 3.0.0 dev: true - engines: - node: '>=8' - hasBin: true - resolution: - integrity: sha512-vjL3+w0oulAVZ0hBHnxa/Nm5TAurf9YLQJDhqRZyqb+VKGOB6LU8t9H1Nr5CIo16vh9XfJTOoHwU0B71S557gA== + /imurmurhash/0.1.4: + resolution: {integrity: sha1-khi5srkoojixPcT7a21XbyMUU+o=} + engines: {node: '>=0.8.19'} dev: true - engines: - node: '>=0.8.19' - resolution: - integrity: sha1-khi5srkoojixPcT7a21XbyMUU+o= + /indent-string/4.0.0: + resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==} + engines: {node: '>=8'} dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== + /indexes-of/1.0.1: + resolution: {integrity: sha1-8w9xbI4r00bHtn0985FVZqfAVgc=} dev: true - resolution: - integrity: sha1-8w9xbI4r00bHtn0985FVZqfAVgc= + /infer-owner/1.0.4: + resolution: {integrity: sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==} dev: true - resolution: - integrity: sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A== + /inflight/1.0.6: + resolution: {integrity: sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=} dependencies: once: 1.4.0 wrappy: 1.0.2 - resolution: - integrity: sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= + /inherits/2.0.1: + resolution: {integrity: sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE=} dev: true - resolution: - integrity: sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE= + /inherits/2.0.3: + resolution: {integrity: sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=} dev: true - resolution: - integrity: sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= + /inherits/2.0.4: - resolution: - integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + /ini/1.3.8: + resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==} dev: false - resolution: - integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== + + /inline-style-parser/0.1.1: + resolution: {integrity: sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==} + dev: true + /inline-style-prefixer/6.0.0: + resolution: {integrity: sha512-XTHvRUS4ZJNzC1GixJRmOlWSS45fSt+DJoyQC9ytj0WxQfcgofQtDtyKKYxHUqEsWCs+LIWftPF1ie7+i012Fg==} dependencies: css-in-js-utils: 2.0.1 dev: false - resolution: - integrity: sha512-XTHvRUS4ZJNzC1GixJRmOlWSS45fSt+DJoyQC9ytj0WxQfcgofQtDtyKKYxHUqEsWCs+LIWftPF1ie7+i012Fg== + /internal-ip/4.3.0: + resolution: {integrity: sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg==} + engines: {node: '>=6'} dependencies: default-gateway: 4.2.0 ipaddr.js: 1.9.1 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg== + /internal-slot/1.0.3: + resolution: {integrity: sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==} + engines: {node: '>= 0.4'} dependencies: get-intrinsic: 1.1.1 has: 1.0.3 side-channel: 1.0.4 dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA== + /interpret/2.2.0: + resolution: {integrity: sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==} + engines: {node: '>= 0.10'} dev: true - engines: - node: '>= 0.10' - resolution: - integrity: sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw== + /ip-port-regex/2.0.0: + resolution: {integrity: sha1-5a+/arAXmGQ/eTatVCCsIUJzEH4=} + engines: {node: '>=4'} dependencies: ip-regex: 1.0.3 dev: false - engines: - node: '>=4' - resolution: - integrity: sha1-5a+/arAXmGQ/eTatVCCsIUJzEH4= + /ip-regex/1.0.3: + resolution: {integrity: sha1-3FiQdvZZ9BnCIgOaMzFvHHOH7/0=} + engines: {node: '>=0.10.0'} dev: false - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-3FiQdvZZ9BnCIgOaMzFvHHOH7/0= + /ip-regex/2.1.0: + resolution: {integrity: sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk=} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk= + /ip/1.1.5: + resolution: {integrity: sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo=} dev: true - resolution: - integrity: sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo= + /ipaddr.js/1.9.1: + resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==} + engines: {node: '>= 0.10'} dev: true - engines: - node: '>= 0.10' - resolution: - integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== + /is-absolute-url/2.1.0: + resolution: {integrity: sha1-UFMN+4T8yap9vnhS6Do3uTufKqY=} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-UFMN+4T8yap9vnhS6Do3uTufKqY= + /is-absolute-url/3.0.3: + resolution: {integrity: sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q==} + engines: {node: '>=8'} dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q== + /is-accessor-descriptor/0.1.6: + resolution: {integrity: sha1-qeEss66Nh2cn7u84Q/igiXtcmNY=} + engines: {node: '>=0.10.0'} dependencies: kind-of: 3.2.2 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= + /is-accessor-descriptor/1.0.0: + resolution: {integrity: sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==} + engines: {node: '>=0.10.0'} dependencies: kind-of: 6.0.3 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== + + /is-alphabetical/1.0.4: + resolution: {integrity: sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg==} + dev: true + + /is-alphanumerical/1.0.4: + resolution: {integrity: sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A==} + dependencies: + is-alphabetical: 1.0.4 + is-decimal: 1.0.4 + dev: true + /is-arguments/1.1.0: + resolution: {integrity: sha512-1Ij4lOMPl/xB5kBDn7I+b2ttPMKa8szhEIrXDuXQD/oe3HJLTLhqhgGspwgyGd6MOywBUqVvYicF72lkgDnIHg==} + engines: {node: '>= 0.4'} dependencies: call-bind: 1.0.2 dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-1Ij4lOMPl/xB5kBDn7I+b2ttPMKa8szhEIrXDuXQD/oe3HJLTLhqhgGspwgyGd6MOywBUqVvYicF72lkgDnIHg== + /is-arrayish/0.2.1: + resolution: {integrity: sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=} dev: true - resolution: - integrity: sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= + /is-arrayish/0.3.2: - dev: true - resolution: - integrity: sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ== + resolution: {integrity: sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==} + /is-bigint/1.0.1: + resolution: {integrity: sha512-J0ELF4yHFxHy0cmSxZuheDOz2luOdVvqjwmEcj8H/L1JHeuEDSDbeRP+Dk9kFVk5RTFzbucJ2Kb9F7ixY2QaCg==} dev: true - resolution: - integrity: sha512-J0ELF4yHFxHy0cmSxZuheDOz2luOdVvqjwmEcj8H/L1JHeuEDSDbeRP+Dk9kFVk5RTFzbucJ2Kb9F7ixY2QaCg== + /is-binary-path/1.0.1: + resolution: {integrity: sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg=} + engines: {node: '>=0.10.0'} dependencies: binary-extensions: 1.13.1 dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= + /is-binary-path/2.1.0: + resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} + engines: {node: '>=8'} dependencies: binary-extensions: 2.2.0 dev: true - engines: - node: '>=8' - optional: true - resolution: - integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== + /is-boolean-object/1.1.0: + resolution: {integrity: sha512-a7Uprx8UtD+HWdyYwnD1+ExtTgqQtD2k/1yJgtXP6wnMm8byhkoTZRl+95LLThpzNZJ5aEvi46cdH+ayMFRwmA==} + engines: {node: '>= 0.4'} dependencies: call-bind: 1.0.2 dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-a7Uprx8UtD+HWdyYwnD1+ExtTgqQtD2k/1yJgtXP6wnMm8byhkoTZRl+95LLThpzNZJ5aEvi46cdH+ayMFRwmA== + /is-buffer/1.1.6: - resolution: - integrity: sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== + resolution: {integrity: sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==} + + /is-buffer/2.0.5: + resolution: {integrity: sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==} + engines: {node: '>=4'} + dev: true + /is-callable/1.2.3: + resolution: {integrity: sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ==} + engines: {node: '>= 0.4'} dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ== + /is-ci/2.0.0: + resolution: {integrity: sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==} + hasBin: true dependencies: ci-info: 2.0.0 dev: true - hasBin: true - resolution: - integrity: sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w== + /is-color-stop/1.1.0: + resolution: {integrity: sha1-z/9HGu5N1cnhWFmPvhKWe1za00U=} dependencies: css-color-names: 0.0.4 hex-color-regex: 1.1.0 @@ -7425,367 +8346,347 @@ packages: rgb-regex: 1.0.1 rgba-regex: 1.0.0 dev: true - resolution: - integrity: sha1-z/9HGu5N1cnhWFmPvhKWe1za00U= + /is-core-module/2.2.0: + resolution: {integrity: sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ==} dependencies: has: 1.0.3 dev: true - resolution: - integrity: sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ== + /is-data-descriptor/0.1.4: + resolution: {integrity: sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y=} + engines: {node: '>=0.10.0'} dependencies: kind-of: 3.2.2 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= + /is-data-descriptor/1.0.0: + resolution: {integrity: sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==} + engines: {node: '>=0.10.0'} dependencies: kind-of: 6.0.3 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== + /is-date-object/1.0.2: + resolution: {integrity: sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==} + engines: {node: '>= 0.4'} + dev: true + + /is-decimal/1.0.4: + resolution: {integrity: sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==} dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g== + /is-descriptor/0.1.6: + resolution: {integrity: sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==} + engines: {node: '>=0.10.0'} dependencies: is-accessor-descriptor: 0.1.6 is-data-descriptor: 0.1.4 kind-of: 5.1.0 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== + /is-descriptor/1.0.2: + resolution: {integrity: sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==} + engines: {node: '>=0.10.0'} dependencies: is-accessor-descriptor: 1.0.0 is-data-descriptor: 1.0.0 kind-of: 6.0.3 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== + /is-directory/0.3.1: + resolution: {integrity: sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE=} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE= + /is-docker/2.1.1: - engines: - node: '>=8' + resolution: {integrity: sha512-ZOoqiXfEwtGknTiuDEy8pN2CfE3TxMHprvNer1mXiqwkOT77Rw3YVrUQ52EqAOU3QAWDQ+bQdx7HJzrv7LS2Hw==} + engines: {node: '>=8'} hasBin: true - resolution: - integrity: sha512-ZOoqiXfEwtGknTiuDEy8pN2CfE3TxMHprvNer1mXiqwkOT77Rw3YVrUQ52EqAOU3QAWDQ+bQdx7HJzrv7LS2Hw== + /is-extendable/0.1.1: - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= + resolution: {integrity: sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=} + engines: {node: '>=0.10.0'} + /is-extendable/1.0.1: + resolution: {integrity: sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==} + engines: {node: '>=0.10.0'} dependencies: is-plain-object: 2.0.4 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== + /is-extglob/2.1.1: - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= + resolution: {integrity: sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=} + engines: {node: '>=0.10.0'} + /is-fullwidth-code-point/2.0.0: + resolution: {integrity: sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= + /is-fullwidth-code-point/3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + /is-generator-fn/2.1.0: + resolution: {integrity: sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==} + engines: {node: '>=6'} dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== + /is-glob/3.1.0: + resolution: {integrity: sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=} + engines: {node: '>=0.10.0'} dependencies: is-extglob: 2.1.1 dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= + /is-glob/4.0.1: + resolution: {integrity: sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==} + engines: {node: '>=0.10.0'} dependencies: is-extglob: 2.1.1 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== + + /is-hexadecimal/1.0.4: + resolution: {integrity: sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw==} + dev: true + /is-module/1.0.0: + resolution: {integrity: sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE=} dev: true - resolution: - integrity: sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE= + /is-negative-zero/2.0.1: + resolution: {integrity: sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w==} + engines: {node: '>= 0.4'} dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w== + /is-number-object/1.0.4: + resolution: {integrity: sha512-zohwelOAur+5uXtk8O3GPQ1eAcu4ZX3UwxQhUlfFFMNpUd83gXgjbhJh6HmB6LUNV/ieOLQuDwJO3dWJosUeMw==} + engines: {node: '>= 0.4'} dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-zohwelOAur+5uXtk8O3GPQ1eAcu4ZX3UwxQhUlfFFMNpUd83gXgjbhJh6HmB6LUNV/ieOLQuDwJO3dWJosUeMw== + /is-number/3.0.0: + resolution: {integrity: sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=} + engines: {node: '>=0.10.0'} dependencies: kind-of: 3.2.2 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= + /is-number/7.0.0: - engines: - node: '>=0.12.0' - resolution: - integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + /is-obj/1.0.1: + resolution: {integrity: sha1-PkcprB9f3gJc19g6iW2rn09n2w8=} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-PkcprB9f3gJc19g6iW2rn09n2w8= + /is-obj/2.0.0: + resolution: {integrity: sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==} + engines: {node: '>=8'} dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w== + /is-path-cwd/2.2.0: + resolution: {integrity: sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==} + engines: {node: '>=6'} dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ== + /is-path-in-cwd/2.1.0: + resolution: {integrity: sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ==} + engines: {node: '>=6'} dependencies: is-path-inside: 2.1.0 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ== + /is-path-inside/2.1.0: + resolution: {integrity: sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg==} + engines: {node: '>=6'} dependencies: path-is-inside: 1.0.2 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg== + /is-plain-obj/1.1.0: + resolution: {integrity: sha1-caUMhCnfync8kqOQpKA7OfzVHT4=} + engines: {node: '>=0.10.0'} + dev: true + + /is-plain-obj/2.1.0: + resolution: {integrity: sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==} + engines: {node: '>=8'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-caUMhCnfync8kqOQpKA7OfzVHT4= + + /is-plain-obj/3.0.0: + resolution: {integrity: sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==} + engines: {node: '>=10'} + dev: true + /is-plain-object/2.0.4: + resolution: {integrity: sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==} + engines: {node: '>=0.10.0'} dependencies: isobject: 3.0.1 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== + /is-potential-custom-element-name/1.0.0: + resolution: {integrity: sha1-DFLlS8yjkbssSUsh6GJtczbG45c=} dev: true - resolution: - integrity: sha1-DFLlS8yjkbssSUsh6GJtczbG45c= + /is-regex/1.1.2: + resolution: {integrity: sha512-axvdhb5pdhEVThqJzYXwMlVuZwC+FF2DpcOhTS+y/8jVq4trxyPgfcwIxIKiyeuLlSQYKkmUaPQJ8ZE4yNKXDg==} + engines: {node: '>= 0.4'} dependencies: call-bind: 1.0.2 has-symbols: 1.0.2 dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-axvdhb5pdhEVThqJzYXwMlVuZwC+FF2DpcOhTS+y/8jVq4trxyPgfcwIxIKiyeuLlSQYKkmUaPQJ8ZE4yNKXDg== + /is-regexp/1.0.0: + resolution: {integrity: sha1-/S2INUXEa6xaYz57mgnof6LLUGk=} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-/S2INUXEa6xaYz57mgnof6LLUGk= + /is-resolvable/1.1.0: + resolution: {integrity: sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg==} dev: true - resolution: - integrity: sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg== + /is-root/2.1.0: + resolution: {integrity: sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg==} + engines: {node: '>=6'} dev: false - engines: - node: '>=6' - resolution: - integrity: sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg== + /is-stream/1.1.0: + resolution: {integrity: sha1-EtSj3U5o4Lec6428hBc66A2RykQ=} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-EtSj3U5o4Lec6428hBc66A2RykQ= + /is-stream/2.0.0: + resolution: {integrity: sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==} + engines: {node: '>=8'} dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw== + /is-string/1.0.5: + resolution: {integrity: sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==} + engines: {node: '>= 0.4'} dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ== + /is-svg/3.0.0: + resolution: {integrity: sha512-gi4iHK53LR2ujhLVVj+37Ykh9GLqYHX6JOVXbLAucaG/Cqw9xwdFOjDM2qeifLs1sF1npXXFvDu0r5HNgCMrzQ==} + engines: {node: '>=4'} dependencies: html-comment-regex: 1.1.2 dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-gi4iHK53LR2ujhLVVj+37Ykh9GLqYHX6JOVXbLAucaG/Cqw9xwdFOjDM2qeifLs1sF1npXXFvDu0r5HNgCMrzQ== + /is-symbol/1.0.3: + resolution: {integrity: sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==} + engines: {node: '>= 0.4'} dependencies: has-symbols: 1.0.2 dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ== + /is-typedarray/1.0.0: + resolution: {integrity: sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=} dev: true - resolution: - integrity: sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= + /is-unicode-supported/0.1.0: + resolution: {integrity: sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==} + engines: {node: '>=10'} dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw== + /is-what/3.14.1: + resolution: {integrity: sha512-sNxgpk9793nzSs7bA6JQJGeIuRBQhAaNGG77kzYQgMkrID+lS6SlK07K5LaptscDlSaIgH+GPFzf+d75FVxozA==} dev: true - resolution: - integrity: sha512-sNxgpk9793nzSs7bA6JQJGeIuRBQhAaNGG77kzYQgMkrID+lS6SlK07K5LaptscDlSaIgH+GPFzf+d75FVxozA== + /is-windows/1.0.2: - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== + resolution: {integrity: sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==} + engines: {node: '>=0.10.0'} + /is-wsl/1.1.0: + resolution: {integrity: sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0=} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= + /is-wsl/2.2.0: + resolution: {integrity: sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==} + engines: {node: '>=8'} dependencies: is-docker: 2.1.1 - engines: - node: '>=8' - resolution: - integrity: sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== + /isarray/0.0.1: - dev: false - resolution: - integrity: sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8= + resolution: {integrity: sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=} + /isarray/1.0.0: - resolution: - integrity: sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= + resolution: {integrity: sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=} + /isexe/2.0.0: - resolution: - integrity: sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= + resolution: {integrity: sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=} + /isobject/2.1.0: + resolution: {integrity: sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk=} + engines: {node: '>=0.10.0'} dependencies: isarray: 1.0.0 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= + /isobject/3.0.1: - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-TkMekrEalzFjaqH5yNHMvP2reN8= + resolution: {integrity: sha1-TkMekrEalzFjaqH5yNHMvP2reN8=} + engines: {node: '>=0.10.0'} + /isstream/0.1.2: + resolution: {integrity: sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=} dev: true - resolution: - integrity: sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= + /istanbul-lib-coverage/3.0.0: + resolution: {integrity: sha512-UiUIqxMgRDET6eR+o5HbfRYP1l0hqkWOs7vNxC/mggutCMUIhWMm8gAHb8tHlyfD3/l6rlgNA5cKdDzEAf6hEg==} + engines: {node: '>=8'} dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-UiUIqxMgRDET6eR+o5HbfRYP1l0hqkWOs7vNxC/mggutCMUIhWMm8gAHb8tHlyfD3/l6rlgNA5cKdDzEAf6hEg== + /istanbul-lib-instrument/4.0.3: + resolution: {integrity: sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ==} + engines: {node: '>=8'} dependencies: '@babel/core': 7.12.3 '@istanbuljs/schema': 0.1.3 istanbul-lib-coverage: 3.0.0 semver: 6.3.0 + transitivePeerDependencies: + - supports-color dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ== + /istanbul-lib-report/3.0.0: + resolution: {integrity: sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==} + engines: {node: '>=8'} dependencies: istanbul-lib-coverage: 3.0.0 make-dir: 3.1.0 supports-color: 7.2.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== + /istanbul-lib-source-maps/4.0.0: + resolution: {integrity: sha512-c16LpFRkR8vQXyHZ5nLpY35JZtzj1PQY1iZmesUbf1FZHbIupcWfjgOXBY9YHkLEQ6puz1u4Dgj6qmU/DisrZg==} + engines: {node: '>=8'} dependencies: debug: 4.3.1 istanbul-lib-coverage: 3.0.0 source-map: 0.6.1 + transitivePeerDependencies: + - supports-color dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-c16LpFRkR8vQXyHZ5nLpY35JZtzj1PQY1iZmesUbf1FZHbIupcWfjgOXBY9YHkLEQ6puz1u4Dgj6qmU/DisrZg== + /istanbul-reports/3.0.2: + resolution: {integrity: sha512-9tZvz7AiR3PEDNGiV9vIouQ/EAcqMXFmkcA1CDFTwOB98OZVDL0PH9glHotf5Ugp6GCOTypfzGWI/OqjWNCRUw==} + engines: {node: '>=8'} dependencies: html-escaper: 2.0.2 istanbul-lib-report: 3.0.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-9tZvz7AiR3PEDNGiV9vIouQ/EAcqMXFmkcA1CDFTwOB98OZVDL0PH9glHotf5Ugp6GCOTypfzGWI/OqjWNCRUw== + + /javascript-stringify/2.1.0: + resolution: {integrity: sha512-JVAfqNPTvNq3sB/VHQJAFxN/sPgKnsKrCwyRt15zwNCdrMMJDdcEOdubuy+DuJYYdm0ox1J4uzEuYKkN+9yhVg==} + dev: true + /jest-changed-files/26.6.2: + resolution: {integrity: sha512-fDS7szLcY9sCtIip8Fjry9oGf3I2ht/QT21bAHm5Dmf0mD4X3ReNUf17y+bO6fR8WgbIZTlbyG1ak/53cbRzKQ==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/types': 26.6.2 execa: 4.1.0 throat: 5.0.0 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-fDS7szLcY9sCtIip8Fjry9oGf3I2ht/QT21bAHm5Dmf0mD4X3ReNUf17y+bO6fR8WgbIZTlbyG1ak/53cbRzKQ== + /jest-circus/26.6.0: + resolution: {integrity: sha512-L2/Y9szN6FJPWFK8kzWXwfp+FOR7xq0cUL4lIsdbIdwz3Vh6P1nrpcqOleSzr28zOtSHQNV9Z7Tl+KkuK7t5Ng==} + engines: {node: '>= 10.14.2'} dependencies: '@babel/traverse': 7.13.13 '@jest/environment': 26.6.2 @@ -7808,12 +8709,18 @@ packages: pretty-format: 26.6.2 stack-utils: 2.0.3 throat: 5.0.0 - dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-L2/Y9szN6FJPWFK8kzWXwfp+FOR7xq0cUL4lIsdbIdwz3Vh6P1nrpcqOleSzr28zOtSHQNV9Z7Tl+KkuK7t5Ng== + transitivePeerDependencies: + - bufferutil + - canvas + - supports-color + - ts-node + - utf-8-validate + dev: true + /jest-cli/26.6.3: + resolution: {integrity: sha512-GF9noBSa9t08pSyl3CY4frMrqp+aQXFGFkf5hEPbh/pIUFYWMK6ZLTfbmadxJVcJrdRoChlWQsA2VkJcDFK8hg==} + engines: {node: '>= 10.14.2'} + hasBin: true dependencies: '@jest/core': 26.6.3 '@jest/test-result': 26.6.2 @@ -7828,13 +8735,22 @@ packages: jest-validate: 26.6.2 prompts: 2.4.0 yargs: 15.4.1 - dev: true - engines: - node: '>= 10.14.2' - hasBin: true - resolution: - integrity: sha512-GF9noBSa9t08pSyl3CY4frMrqp+aQXFGFkf5hEPbh/pIUFYWMK6ZLTfbmadxJVcJrdRoChlWQsA2VkJcDFK8hg== + transitivePeerDependencies: + - bufferutil + - canvas + - supports-color + - ts-node + - utf-8-validate + dev: true + /jest-config/26.6.3: + resolution: {integrity: sha512-t5qdIj/bCj2j7NFVHb2nFB4aUdfucDn3JRKgrZnplb8nieAirAzRSHP8uDEd+qV6ygzg9Pz4YG7UTJf94LPSyg==} + engines: {node: '>= 10.14.2'} + peerDependencies: + ts-node: '>=9.0.0' + peerDependenciesMeta: + ts-node: + optional: true dependencies: '@babel/core': 7.12.3 '@jest/test-sequencer': 26.6.3 @@ -7854,36 +8770,33 @@ packages: jest-validate: 26.6.2 micromatch: 4.0.2 pretty-format: 26.6.2 + transitivePeerDependencies: + - bufferutil + - canvas + - supports-color + - utf-8-validate dev: true - engines: - node: '>= 10.14.2' - peerDependencies: - ts-node: '>=9.0.0' - peerDependenciesMeta: - ts-node: - optional: true - resolution: - integrity: sha512-t5qdIj/bCj2j7NFVHb2nFB4aUdfucDn3JRKgrZnplb8nieAirAzRSHP8uDEd+qV6ygzg9Pz4YG7UTJf94LPSyg== + /jest-diff/26.6.2: + resolution: {integrity: sha512-6m+9Z3Gv9wN0WFVasqjCL/06+EFCMTqDEUl/b87HYK2rAPTyfz4ZIuSlPhY51PIQRWx5TaxeF1qmXKe9gfN3sA==} + engines: {node: '>= 10.14.2'} dependencies: chalk: 4.1.0 diff-sequences: 26.6.2 jest-get-type: 26.3.0 pretty-format: 26.6.2 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-6m+9Z3Gv9wN0WFVasqjCL/06+EFCMTqDEUl/b87HYK2rAPTyfz4ZIuSlPhY51PIQRWx5TaxeF1qmXKe9gfN3sA== + /jest-docblock/26.0.0: + resolution: {integrity: sha512-RDZ4Iz3QbtRWycd8bUEPxQsTlYazfYn/h5R65Fc6gOfwozFhoImx+affzky/FFBuqISPTqjXomoIGJVKBWoo0w==} + engines: {node: '>= 10.14.2'} dependencies: detect-newline: 3.1.0 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-RDZ4Iz3QbtRWycd8bUEPxQsTlYazfYn/h5R65Fc6gOfwozFhoImx+affzky/FFBuqISPTqjXomoIGJVKBWoo0w== + /jest-each/26.6.2: + resolution: {integrity: sha512-Mer/f0KaATbjl8MCJ+0GEpNdqmnVmDYqCTJYTvoo7rqmRiDllmp2AYN+06F93nXcY3ur9ShIjS+CO/uD+BbH4A==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/types': 26.6.2 chalk: 4.1.0 @@ -7891,11 +8804,10 @@ packages: jest-util: 26.6.2 pretty-format: 26.6.2 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-Mer/f0KaATbjl8MCJ+0GEpNdqmnVmDYqCTJYTvoo7rqmRiDllmp2AYN+06F93nXcY3ur9ShIjS+CO/uD+BbH4A== + /jest-environment-jsdom/26.6.2: + resolution: {integrity: sha512-jgPqCruTlt3Kwqg5/WVFyHIOJHsiAvhcp2qiR2QQstuG9yWox5+iHpU3ZrcBxW14T4fe5Z68jAfLRh7joCSP2Q==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/environment': 26.6.2 '@jest/fake-timers': 26.6.2 @@ -7904,12 +8816,15 @@ packages: jest-mock: 26.6.2 jest-util: 26.6.2 jsdom: 16.5.2 + transitivePeerDependencies: + - bufferutil + - canvas + - utf-8-validate dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-jgPqCruTlt3Kwqg5/WVFyHIOJHsiAvhcp2qiR2QQstuG9yWox5+iHpU3ZrcBxW14T4fe5Z68jAfLRh7joCSP2Q== + /jest-environment-node/26.6.2: + resolution: {integrity: sha512-zhtMio3Exty18dy8ee8eJ9kjnRyZC1N4C1Nt/VShN1apyXc8rWGtJ9lI7vqiWcyyXS4BVSEn9lxAM2D+07/Tag==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/environment': 26.6.2 '@jest/fake-timers': 26.6.2 @@ -7918,17 +8833,15 @@ packages: jest-mock: 26.6.2 jest-util: 26.6.2 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-zhtMio3Exty18dy8ee8eJ9kjnRyZC1N4C1Nt/VShN1apyXc8rWGtJ9lI7vqiWcyyXS4BVSEn9lxAM2D+07/Tag== + /jest-get-type/26.3.0: + resolution: {integrity: sha512-TpfaviN1R2pQWkIihlfEanwOXK0zcxrKEE4MlU6Tn7keoXdN6/3gK/xl0yEh8DOunn5pOVGKf8hB4R9gVh04ig==} + engines: {node: '>= 10.14.2'} dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-TpfaviN1R2pQWkIihlfEanwOXK0zcxrKEE4MlU6Tn7keoXdN6/3gK/xl0yEh8DOunn5pOVGKf8hB4R9gVh04ig== + /jest-haste-map/26.6.2: + resolution: {integrity: sha512-easWIJXIw71B2RdR8kgqpjQrbMRWQBgiBwXYEhtGUTaX+doCjBheluShdDMeR8IMfJiTqH4+zfhtg29apJf/8w==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/types': 26.6.2 '@types/graceful-fs': 4.1.5 @@ -7943,14 +8856,13 @@ packages: micromatch: 4.0.2 sane: 4.1.0 walker: 1.0.7 - dev: true - engines: - node: '>= 10.14.2' optionalDependencies: fsevents: 2.3.2 - resolution: - integrity: sha512-easWIJXIw71B2RdR8kgqpjQrbMRWQBgiBwXYEhtGUTaX+doCjBheluShdDMeR8IMfJiTqH4+zfhtg29apJf/8w== + dev: true + /jest-jasmine2/26.6.3: + resolution: {integrity: sha512-kPKUrQtc8aYwBV7CqBg5pu+tmYXlvFlSFYn18ev4gPFtrRzB15N2gW/Roew3187q2w2eHuu0MU9TJz6w0/nPEg==} + engines: {node: '>= 10.14.2'} dependencies: '@babel/traverse': 7.13.13 '@jest/environment': 26.6.2 @@ -7970,32 +8882,35 @@ packages: jest-util: 26.6.2 pretty-format: 26.6.2 throat: 5.0.0 - dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-kPKUrQtc8aYwBV7CqBg5pu+tmYXlvFlSFYn18ev4gPFtrRzB15N2gW/Roew3187q2w2eHuu0MU9TJz6w0/nPEg== + transitivePeerDependencies: + - bufferutil + - canvas + - supports-color + - ts-node + - utf-8-validate + dev: true + /jest-leak-detector/26.6.2: + resolution: {integrity: sha512-i4xlXpsVSMeKvg2cEKdfhh0H39qlJlP5Ex1yQxwF9ubahboQYMgTtz5oML35AVA3B4Eu+YsmwaiKVev9KCvLxg==} + engines: {node: '>= 10.14.2'} dependencies: jest-get-type: 26.3.0 pretty-format: 26.6.2 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-i4xlXpsVSMeKvg2cEKdfhh0H39qlJlP5Ex1yQxwF9ubahboQYMgTtz5oML35AVA3B4Eu+YsmwaiKVev9KCvLxg== + /jest-matcher-utils/26.6.2: + resolution: {integrity: sha512-llnc8vQgYcNqDrqRDXWwMr9i7rS5XFiCwvh6DTP7Jqa2mqpcCBBlpCbn+trkG0KNhPu/h8rzyBkriOtBstvWhw==} + engines: {node: '>= 10.14.2'} dependencies: chalk: 4.1.0 jest-diff: 26.6.2 jest-get-type: 26.3.0 pretty-format: 26.6.2 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-llnc8vQgYcNqDrqRDXWwMr9i7rS5XFiCwvh6DTP7Jqa2mqpcCBBlpCbn+trkG0KNhPu/h8rzyBkriOtBstvWhw== + /jest-message-util/26.6.2: + resolution: {integrity: sha512-rGiLePzQ3AzwUshu2+Rn+UMFk0pHN58sOG+IaJbk5Jxuqo3NYO1U2/MIR4S1sKgsoYSXSzdtSa0TgrmtUwEbmA==} + engines: {node: '>= 10.14.2'} dependencies: '@babel/code-frame': 7.12.13 '@jest/types': 26.6.2 @@ -8007,62 +8922,56 @@ packages: slash: 3.0.0 stack-utils: 2.0.3 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-rGiLePzQ3AzwUshu2+Rn+UMFk0pHN58sOG+IaJbk5Jxuqo3NYO1U2/MIR4S1sKgsoYSXSzdtSa0TgrmtUwEbmA== + /jest-mock/26.6.2: + resolution: {integrity: sha512-YyFjePHHp1LzpzYcmgqkJ0nm0gg/lJx2aZFzFy1S6eUqNjXsOqTK10zNRff2dNfssgokjkG65OlWNcIlgd3zew==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/types': 26.6.2 '@types/node': 12.20.7 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-YyFjePHHp1LzpzYcmgqkJ0nm0gg/lJx2aZFzFy1S6eUqNjXsOqTK10zNRff2dNfssgokjkG65OlWNcIlgd3zew== + /jest-pnp-resolver/1.2.2_jest-resolve@26.6.0: - dependencies: - jest-resolve: 26.6.0 - dev: true - engines: - node: '>=6' + resolution: {integrity: sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w==} + engines: {node: '>=6'} peerDependencies: jest-resolve: '*' peerDependenciesMeta: jest-resolve: optional: true - resolution: - integrity: sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== - /jest-pnp-resolver/1.2.2_jest-resolve@26.6.2: dependencies: - jest-resolve: 26.6.2 + jest-resolve: 26.6.0 dev: true - engines: - node: '>=6' + + /jest-pnp-resolver/1.2.2_jest-resolve@26.6.2: + resolution: {integrity: sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w==} + engines: {node: '>=6'} peerDependencies: jest-resolve: '*' peerDependenciesMeta: jest-resolve: optional: true - resolution: - integrity: sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== + dependencies: + jest-resolve: 26.6.2 + dev: true + /jest-regex-util/26.0.0: + resolution: {integrity: sha512-Gv3ZIs/nA48/Zvjrl34bf+oD76JHiGDUxNOVgUjh3j890sblXryjY4rss71fPtD/njchl6PSE2hIhvyWa1eT0A==} + engines: {node: '>= 10.14.2'} dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-Gv3ZIs/nA48/Zvjrl34bf+oD76JHiGDUxNOVgUjh3j890sblXryjY4rss71fPtD/njchl6PSE2hIhvyWa1eT0A== + /jest-resolve-dependencies/26.6.3: + resolution: {integrity: sha512-pVwUjJkxbhe4RY8QEWzN3vns2kqyuldKpxlxJlzEYfKSvY6/bMvxoFrYYzUO1Gx28yKWN37qyV7rIoIp2h8fTg==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/types': 26.6.2 jest-regex-util: 26.0.0 jest-snapshot: 26.6.2 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-pVwUjJkxbhe4RY8QEWzN3vns2kqyuldKpxlxJlzEYfKSvY6/bMvxoFrYYzUO1Gx28yKWN37qyV7rIoIp2h8fTg== + /jest-resolve/26.6.0: + resolution: {integrity: sha512-tRAz2bwraHufNp+CCmAD8ciyCpXCs1NQxB5EJAmtCFy6BN81loFEGWKzYu26Y62lAJJe4X4jg36Kf+NsQyiStQ==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/types': 26.6.2 chalk: 4.1.0 @@ -8073,11 +8982,10 @@ packages: resolve: 1.18.1 slash: 3.0.0 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-tRAz2bwraHufNp+CCmAD8ciyCpXCs1NQxB5EJAmtCFy6BN81loFEGWKzYu26Y62lAJJe4X4jg36Kf+NsQyiStQ== + /jest-resolve/26.6.2: + resolution: {integrity: sha512-sOxsZOq25mT1wRsfHcbtkInS+Ek7Q8jCHUB0ZUTP0tc/c41QHriU/NunqMfCUWsL4H3MHpvQD4QR9kSYhS7UvQ==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/types': 26.6.2 chalk: 4.1.0 @@ -8088,11 +8996,10 @@ packages: resolve: 1.18.1 slash: 3.0.0 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-sOxsZOq25mT1wRsfHcbtkInS+Ek7Q8jCHUB0ZUTP0tc/c41QHriU/NunqMfCUWsL4H3MHpvQD4QR9kSYhS7UvQ== + /jest-runner/26.6.3: + resolution: {integrity: sha512-atgKpRHnaA2OvByG/HpGA4g6CSPS/1LK0jK3gATJAoptC1ojltpmVlYC3TYgdmGp+GLuhzpH30Gvs36szSL2JQ==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/console': 26.6.2 '@jest/environment': 26.6.2 @@ -8114,12 +9021,18 @@ packages: jest-worker: 26.6.2 source-map-support: 0.5.19 throat: 5.0.0 - dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-atgKpRHnaA2OvByG/HpGA4g6CSPS/1LK0jK3gATJAoptC1ojltpmVlYC3TYgdmGp+GLuhzpH30Gvs36szSL2JQ== + transitivePeerDependencies: + - bufferutil + - canvas + - supports-color + - ts-node + - utf-8-validate + dev: true + /jest-runtime/26.6.3: + resolution: {integrity: sha512-lrzyR3N8sacTAMeonbqpnSka1dHNux2uk0qqDXVkMv2c/A3wYnvQ4EXuI013Y6+gSKSCxdaczvf4HF0mVXHRdw==} + engines: {node: '>= 10.14.2'} + hasBin: true dependencies: '@jest/console': 26.6.2 '@jest/environment': 26.6.2 @@ -8148,22 +9061,25 @@ packages: slash: 3.0.0 strip-bom: 4.0.0 yargs: 15.4.1 - dev: true - engines: - node: '>= 10.14.2' - hasBin: true - resolution: - integrity: sha512-lrzyR3N8sacTAMeonbqpnSka1dHNux2uk0qqDXVkMv2c/A3wYnvQ4EXuI013Y6+gSKSCxdaczvf4HF0mVXHRdw== + transitivePeerDependencies: + - bufferutil + - canvas + - supports-color + - ts-node + - utf-8-validate + dev: true + /jest-serializer/26.6.2: + resolution: {integrity: sha512-S5wqyz0DXnNJPd/xfIzZ5Xnp1HrJWBczg8mMfMpN78OJ5eDxXyf+Ygld9wX1DnUWbIbhM1YDY95NjR4CBXkb2g==} + engines: {node: '>= 10.14.2'} dependencies: '@types/node': 12.20.7 graceful-fs: 4.2.6 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-S5wqyz0DXnNJPd/xfIzZ5Xnp1HrJWBczg8mMfMpN78OJ5eDxXyf+Ygld9wX1DnUWbIbhM1YDY95NjR4CBXkb2g== + /jest-snapshot/26.6.2: + resolution: {integrity: sha512-OLhxz05EzUtsAmOMzuupt1lHYXCNib0ECyuZ/PZOx9TrZcC8vL0x+DUG3TL+GLX3yHG45e6YGjIm0XwDc3q3og==} + engines: {node: '>= 10.14.2'} dependencies: '@babel/types': 7.13.13 '@jest/types': 26.6.2 @@ -8182,11 +9098,20 @@ packages: pretty-format: 26.6.2 semver: 7.3.2 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-OLhxz05EzUtsAmOMzuupt1lHYXCNib0ECyuZ/PZOx9TrZcC8vL0x+DUG3TL+GLX3yHG45e6YGjIm0XwDc3q3og== + + /jest-styled-components/7.0.5_styled-components@5.2.1: + resolution: {integrity: sha512-ZR/r3IKNkgaaVIOThn0Qis4sNQtA352qHjhbxSHeLS3FDIvHSUSJoI2b3kzk+bHHQ1VOeV630usERtnyhyZh4A==} + engines: {node: '>= 12'} + peerDependencies: + styled-components: '>= 5' + dependencies: + css: 3.0.0 + styled-components: 5.2.1_react-dom@17.0.2+react@17.0.2 + dev: true + /jest-util/26.6.2: + resolution: {integrity: sha512-MDW0fKfsn0OI7MS7Euz6h8HNDXVQ0gaM9uW6RjfDmd1DAFcaxX9OqIakHIqhbnmF08Cf2DLDG+ulq8YQQ0Lp0Q==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/types': 26.6.2 '@types/node': 12.20.7 @@ -8195,11 +9120,10 @@ packages: is-ci: 2.0.0 micromatch: 4.0.2 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-MDW0fKfsn0OI7MS7Euz6h8HNDXVQ0gaM9uW6RjfDmd1DAFcaxX9OqIakHIqhbnmF08Cf2DLDG+ulq8YQQ0Lp0Q== + /jest-validate/26.6.2: + resolution: {integrity: sha512-NEYZ9Aeyj0i5rQqbq+tpIOom0YS1u2MVu6+euBsvpgIme+FOfRmoC4R5p0JiAUpaFvFy24xgrpMknarR/93XjQ==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/types': 26.6.2 camelcase: 6.2.0 @@ -8208,11 +9132,12 @@ packages: leven: 3.1.0 pretty-format: 26.6.2 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-NEYZ9Aeyj0i5rQqbq+tpIOom0YS1u2MVu6+euBsvpgIme+FOfRmoC4R5p0JiAUpaFvFy24xgrpMknarR/93XjQ== + /jest-watch-typeahead/0.6.1_jest@26.6.0: + resolution: {integrity: sha512-ITVnHhj3Jd/QkqQcTqZfRgjfyRhDFM/auzgVo2RKvSwi18YMvh0WvXDJFoFED6c7jd/5jxtu4kSOb9PTu2cPVg==} + engines: {node: '>=10'} + peerDependencies: + jest: ^26.0.0 dependencies: ansi-escapes: 4.3.2 chalk: 4.1.0 @@ -8223,13 +9148,10 @@ packages: string-length: 4.0.2 strip-ansi: 6.0.0 dev: true - engines: - node: '>=10' - peerDependencies: - jest: ^26.0.0 - resolution: - integrity: sha512-ITVnHhj3Jd/QkqQcTqZfRgjfyRhDFM/auzgVo2RKvSwi18YMvh0WvXDJFoFED6c7jd/5jxtu4kSOb9PTu2cPVg== + /jest-watcher/26.6.2: + resolution: {integrity: sha512-WKJob0P/Em2csiVthsI68p6aGKTIcsfjH9Gsx1f0A3Italz43e3ho0geSAVsmj09RWOELP1AZ/DXyJgOgDKxXQ==} + engines: {node: '>= 10.14.2'} dependencies: '@jest/test-result': 26.6.2 '@jest/types': 26.6.2 @@ -8239,64 +9161,71 @@ packages: jest-util: 26.6.2 string-length: 4.0.2 dev: true - engines: - node: '>= 10.14.2' - resolution: - integrity: sha512-WKJob0P/Em2csiVthsI68p6aGKTIcsfjH9Gsx1f0A3Italz43e3ho0geSAVsmj09RWOELP1AZ/DXyJgOgDKxXQ== + /jest-worker/24.9.0: + resolution: {integrity: sha512-51PE4haMSXcHohnSMdM42anbvZANYTqMrr52tVKPqqsPJMzoP6FYYDVqahX/HrAoKEKz3uUPzSvKs9A3qR4iVw==} + engines: {node: '>= 6'} dependencies: merge-stream: 2.0.0 supports-color: 6.1.0 dev: true - engines: - node: '>= 6' - resolution: - integrity: sha512-51PE4haMSXcHohnSMdM42anbvZANYTqMrr52tVKPqqsPJMzoP6FYYDVqahX/HrAoKEKz3uUPzSvKs9A3qR4iVw== + /jest-worker/26.6.2: + resolution: {integrity: sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ==} + engines: {node: '>= 10.13.0'} dependencies: '@types/node': 12.20.7 merge-stream: 2.0.0 supports-color: 7.2.0 dev: true - engines: - node: '>= 10.13.0' - resolution: - integrity: sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ== + /jest/26.6.0: + resolution: {integrity: sha512-jxTmrvuecVISvKFFhOkjsWRZV7sFqdSUAd1ajOKY+/QE/aLBVstsJ/dX8GczLzwiT6ZEwwmZqtCUHLHHQVzcfA==} + engines: {node: '>= 10.14.2'} + hasBin: true dependencies: '@jest/core': 26.6.3 import-local: 3.0.2 jest-cli: 26.6.3 - dev: true - engines: - node: '>= 10.14.2' - hasBin: true - resolution: - integrity: sha512-jxTmrvuecVISvKFFhOkjsWRZV7sFqdSUAd1ajOKY+/QE/aLBVstsJ/dX8GczLzwiT6ZEwwmZqtCUHLHHQVzcfA== + transitivePeerDependencies: + - bufferutil + - canvas + - supports-color + - ts-node + - utf-8-validate + dev: true + /js-cookie/2.2.1: + resolution: {integrity: sha512-HvdH2LzI/EAZcUwA8+0nKNtWHqS+ZmijLA30RwZA0bo7ToCckjK5MkGhjED9KoRcXO6BaGI3I9UIzSA1FKFPOQ==} dev: false - resolution: - integrity: sha512-HvdH2LzI/EAZcUwA8+0nKNtWHqS+ZmijLA30RwZA0bo7ToCckjK5MkGhjED9KoRcXO6BaGI3I9UIzSA1FKFPOQ== + /js-sha3/0.8.0: + resolution: {integrity: sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==} dev: false - resolution: - integrity: sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q== + /js-tokens/4.0.0: - resolution: - integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} + /js-yaml/3.14.1: + resolution: {integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==} + hasBin: true dependencies: argparse: 1.0.10 esprima: 4.0.1 dev: true - hasBin: true - resolution: - integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + /jsbn/0.1.1: + resolution: {integrity: sha1-peZUwuWi3rXyAdls77yoDA7y9RM=} dev: true - resolution: - integrity: sha1-peZUwuWi3rXyAdls77yoDA7y9RM= + /jsdom/16.5.2: + resolution: {integrity: sha512-JxNtPt9C1ut85boCbJmffaQ06NBnzkQY/MWO3YxPW8IWS38A26z+B1oBvA9LwKrytewdfymnhi4UNH3/RAgZrg==} + engines: {node: '>=10'} + peerDependencies: + canvas: ^2.5.0 + peerDependenciesMeta: + canvas: + optional: true dependencies: abab: 2.0.5 acorn: 8.1.0 @@ -8324,175 +9253,176 @@ packages: whatwg-url: 8.5.0 ws: 7.4.4 xml-name-validator: 3.0.0 + transitivePeerDependencies: + - bufferutil + - utf-8-validate dev: true - engines: - node: '>=10' - peerDependencies: - canvas: ^2.5.0 - peerDependenciesMeta: - canvas: - optional: true - resolution: - integrity: sha512-JxNtPt9C1ut85boCbJmffaQ06NBnzkQY/MWO3YxPW8IWS38A26z+B1oBvA9LwKrytewdfymnhi4UNH3/RAgZrg== + /jsesc/0.5.0: - dev: true + resolution: {integrity: sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0=} hasBin: true - resolution: - integrity: sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= + dev: true + /jsesc/2.5.2: - engines: - node: '>=4' + resolution: {integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==} + engines: {node: '>=4'} hasBin: true - resolution: - integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== + /json-parse-better-errors/1.0.2: + resolution: {integrity: sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==} dev: true - resolution: - integrity: sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== + /json-parse-even-better-errors/2.3.1: + resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} dev: true - resolution: - integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== + /json-schema-traverse/0.4.1: + resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==} dev: true - resolution: - integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + /json-schema-traverse/1.0.0: + resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==} dev: true - resolution: - integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== + /json-schema/0.2.3: + resolution: {integrity: sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=} dev: true - resolution: - integrity: sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= + /json-stable-stringify-without-jsonify/1.0.1: + resolution: {integrity: sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=} dev: true - resolution: - integrity: sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE= + /json-stringify-safe/5.0.1: + resolution: {integrity: sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=} dev: true - resolution: - integrity: sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= - /json2mq/0.2.0: - dependencies: - string-convert: 0.2.1 - dev: false - resolution: - integrity: sha1-tje9O6nqvhIsg+lyBIOusQ0skEo= + /json3/3.3.3: + resolution: {integrity: sha512-c7/8mbUsKigAbLkD5B010BK4D9LZm7A1pNItkEwiUZRpIN66exu/e7YQWysGun+TRKaJp8MhemM+VkfWv42aCA==} dev: true - resolution: - integrity: sha512-c7/8mbUsKigAbLkD5B010BK4D9LZm7A1pNItkEwiUZRpIN66exu/e7YQWysGun+TRKaJp8MhemM+VkfWv42aCA== + /json5/1.0.1: + resolution: {integrity: sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==} + hasBin: true dependencies: minimist: 1.2.5 dev: true - hasBin: true - resolution: - integrity: sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== + /json5/2.2.0: + resolution: {integrity: sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==} + engines: {node: '>=6'} + hasBin: true dependencies: minimist: 1.2.5 - engines: - node: '>=6' + + /json5/2.2.1: + resolution: {integrity: sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==} + engines: {node: '>=6'} hasBin: true - resolution: - integrity: sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA== - /jsonfile/4.0.0: dev: true + + /jsonfile/4.0.0: + resolution: {integrity: sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=} optionalDependencies: - graceful-fs: 4.2.6 - resolution: - integrity: sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= + graceful-fs: 4.2.9 + dev: true + /jsonfile/6.1.0: + resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} dependencies: universalify: 2.0.0 - dev: true optionalDependencies: - graceful-fs: 4.2.6 - resolution: - integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== + graceful-fs: 4.2.9 + dev: true + /jsprim/1.4.1: + resolution: {integrity: sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=} + engines: {'0': node >=0.6.0} dependencies: assert-plus: 1.0.0 extsprintf: 1.3.0 json-schema: 0.2.3 verror: 1.10.0 dev: true - engines: - '0': node >=0.6.0 - resolution: - integrity: sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= + /jsx-ast-utils/3.2.0: + resolution: {integrity: sha512-EIsmt3O3ljsU6sot/J4E1zDRxfBNrhjyf/OKjlydwgEimQuznlM4Wv7U+ueONJMyEn1WRE0K8dhi3dVAXYT24Q==} + engines: {node: '>=4.0'} dependencies: array-includes: 3.1.3 object.assign: 4.1.2 dev: true - engines: - node: '>=4.0' - resolution: - integrity: sha512-EIsmt3O3ljsU6sot/J4E1zDRxfBNrhjyf/OKjlydwgEimQuznlM4Wv7U+ueONJMyEn1WRE0K8dhi3dVAXYT24Q== + + /katex/0.12.0: + resolution: {integrity: sha512-y+8btoc/CK70XqcHqjxiGWBOeIL8upbS0peTPXTvgrh21n1RiWWcIpSWM+4uXq+IAgNh9YYQWdc7LVDPDAEEAg==} + hasBin: true + dependencies: + commander: 2.20.3 + dev: true + + /kebab-case/1.0.1: + resolution: {integrity: sha512-txPHx6nVLhv8PHGXIlAk0nYoh894SpAqGPXNvbg2hh8spvHXIah3+vT87DLoa59nKgC6scD3u3xAuRIgiMqbfQ==} + dev: true + /keyboardjs/2.6.4: + resolution: {integrity: sha512-xDiNwiwH3KUqap++RFJiLAXzbvRB5Yw08xliuceOgLhM1o7g1puKKR9vWy6wp9H/Bi4VP0+SQMpiWXMWWmR6rA==} dev: false - resolution: - integrity: sha512-xDiNwiwH3KUqap++RFJiLAXzbvRB5Yw08xliuceOgLhM1o7g1puKKR9vWy6wp9H/Bi4VP0+SQMpiWXMWWmR6rA== + /killable/1.0.1: + resolution: {integrity: sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg==} dev: true - resolution: - integrity: sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg== + /kind-of/3.2.2: + resolution: {integrity: sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=} + engines: {node: '>=0.10.0'} dependencies: is-buffer: 1.1.6 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= + /kind-of/4.0.0: + resolution: {integrity: sha1-IIE989cSkosgc3hpGkUGb65y3Vc=} + engines: {node: '>=0.10.0'} dependencies: is-buffer: 1.1.6 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-IIE989cSkosgc3hpGkUGb65y3Vc= + /kind-of/5.1.0: - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== + resolution: {integrity: sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==} + engines: {node: '>=0.10.0'} + /kind-of/6.0.3: - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== + resolution: {integrity: sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==} + engines: {node: '>=0.10.0'} + /kleur/3.0.3: - engines: - node: '>=6' - resolution: - integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== + resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} + engines: {node: '>=6'} + /klona/2.0.4: + resolution: {integrity: sha512-ZRbnvdg/NxqzC7L9Uyqzf4psi1OM4Cuc+sJAkQPjO6XkQIJTNbfK2Rsmbw8fx1p2mkZdp2FZYo2+LwXYY/uwIA==} + engines: {node: '>= 8'} dev: true - engines: - node: '>= 8' - resolution: - integrity: sha512-ZRbnvdg/NxqzC7L9Uyqzf4psi1OM4Cuc+sJAkQPjO6XkQIJTNbfK2Rsmbw8fx1p2mkZdp2FZYo2+LwXYY/uwIA== + /language-subtag-registry/0.3.21: + resolution: {integrity: sha512-L0IqwlIXjilBVVYKFT37X9Ih11Um5NEl9cbJIuU/SwP/zEEAbBPOnEeeuxVMf45ydWQRDQN3Nqc96OgbH1K+Pg==} dev: true - resolution: - integrity: sha512-L0IqwlIXjilBVVYKFT37X9Ih11Um5NEl9cbJIuU/SwP/zEEAbBPOnEeeuxVMf45ydWQRDQN3Nqc96OgbH1K+Pg== + /language-tags/1.0.5: + resolution: {integrity: sha1-0yHbxNowuovzAk4ED6XBRmH5GTo=} dependencies: language-subtag-registry: 0.3.21 dev: true - resolution: - integrity: sha1-0yHbxNowuovzAk4ED6XBRmH5GTo= + /last-call-webpack-plugin/3.0.0: + resolution: {integrity: sha512-7KI2l2GIZa9p2spzPIVZBYyNKkN+e/SQPpnjlTiPhdbDW3F86tdKKELxKpzJ5sgU19wQWsACULZmpTPYHeWO5w==} dependencies: lodash: 4.17.21 webpack-sources: 1.4.3 dev: true - resolution: - integrity: sha512-7KI2l2GIZa9p2spzPIVZBYyNKkN+e/SQPpnjlTiPhdbDW3F86tdKKELxKpzJ5sgU19wQWsACULZmpTPYHeWO5w== + /less-loader/7.3.0_less@3.13.1+webpack@4.44.2: + resolution: {integrity: sha512-Mi8915g7NMaLlgi77mgTTQvK022xKRQBIVDSyfl3ErTuBhmZBQab0mjeJjNNqGbdR+qrfTleKXqbGI4uEFavxg==} + engines: {node: '>= 10.13.0'} + peerDependencies: + less: ^3.5.0 || ^4.0.0 + webpack: ^4.0.0 || ^5.0.0 dependencies: klona: 2.0.4 less: 3.13.1 @@ -8500,68 +9430,59 @@ packages: schema-utils: 3.0.0 webpack: 4.44.2_webpack-cli@4.6.0 dev: true - engines: - node: '>= 10.13.0' - peerDependencies: - less: ^3.5.0 || ^4.0.0 - webpack: ^4.0.0 || ^5.0.0 - resolution: - integrity: sha512-Mi8915g7NMaLlgi77mgTTQvK022xKRQBIVDSyfl3ErTuBhmZBQab0mjeJjNNqGbdR+qrfTleKXqbGI4uEFavxg== + /less-vars-to-js/1.3.0: + resolution: {integrity: sha512-xeiLLn/IMCGtdyCkYQnW8UuzoW2oYMCKg9boZRaGI58fLz5r90bNJDlqGzmVt/1Uqk75/DxIVtQSNCMkE5fRZQ==} + engines: {node: '>=8'} dependencies: strip-json-comments: 2.0.1 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-xeiLLn/IMCGtdyCkYQnW8UuzoW2oYMCKg9boZRaGI58fLz5r90bNJDlqGzmVt/1Uqk75/DxIVtQSNCMkE5fRZQ== + /less/3.13.1: + resolution: {integrity: sha512-SwA1aQXGUvp+P5XdZslUOhhLnClSLIjWvJhmd+Vgib5BFIr9lMNlQwmwUNOjXThF/A0x+MCYYPeWEfeWiLRnTw==} + engines: {node: '>=6'} + hasBin: true dependencies: copy-anything: 2.0.3 tslib: 1.14.1 - dev: true - engines: - node: '>=6' - hasBin: true optionalDependencies: errno: 0.1.8 - graceful-fs: 4.2.6 + graceful-fs: 4.2.9 image-size: 0.5.5 make-dir: 2.1.0 mime: 1.6.0 - native-request: 1.0.8 + native-request: 1.1.0 source-map: 0.6.1 - resolution: - integrity: sha512-SwA1aQXGUvp+P5XdZslUOhhLnClSLIjWvJhmd+Vgib5BFIr9lMNlQwmwUNOjXThF/A0x+MCYYPeWEfeWiLRnTw== + dev: true + /leven/3.1.0: + resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} + engines: {node: '>=6'} dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== + /levn/0.3.0: + resolution: {integrity: sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=} + engines: {node: '>= 0.8.0'} dependencies: prelude-ls: 1.1.2 type-check: 0.3.2 dev: true - engines: - node: '>= 0.8.0' - resolution: - integrity: sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= + /levn/0.4.1: + resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} + engines: {node: '>= 0.8.0'} dependencies: prelude-ls: 1.2.1 type-check: 0.4.0 dev: true - engines: - node: '>= 0.8.0' - resolution: - integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== + /lines-and-columns/1.1.6: + resolution: {integrity: sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA=} dev: true - resolution: - integrity: sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA= + /lint-staged/10.5.4: + resolution: {integrity: sha512-EechC3DdFic/TdOPgj/RB3FicqE6932LTHCUm0Y2fsD9KGlLB+RwJl2q1IYBIvEsKzDOgn0D4gll+YxG5RsrKg==} + hasBin: true dependencies: chalk: 4.1.0 cli-truncate: 2.1.0 @@ -8578,11 +9499,15 @@ packages: please-upgrade-node: 3.2.0 string-argv: 0.3.1 stringify-object: 3.3.0 + transitivePeerDependencies: + - supports-color dev: true - hasBin: true - resolution: - integrity: sha512-EechC3DdFic/TdOPgj/RB3FicqE6932LTHCUm0Y2fsD9KGlLB+RwJl2q1IYBIvEsKzDOgn0D4gll+YxG5RsrKg== + /listr2/3.4.3_enquirer@2.3.6: + resolution: {integrity: sha512-wZmkzNiuinOfwrGqAwTCcPw6aKQGTAMGXwG5xeU1WpDjJNeBA35jGBeWxR3OF+R6Yl5Y3dRG+3vE8t6PDcSNHA==} + engines: {node: '>=10.0.0'} + peerDependencies: + enquirer: '>= 2.3.0 < 3' dependencies: chalk: 4.1.0 cli-truncate: 2.1.0 @@ -8595,300 +9520,494 @@ packages: through: 2.3.8 wrap-ansi: 7.0.0 dev: true - engines: - node: '>=10.0.0' - peerDependencies: - enquirer: '>= 2.3.0 < 3' - resolution: - integrity: sha512-wZmkzNiuinOfwrGqAwTCcPw6aKQGTAMGXwG5xeU1WpDjJNeBA35jGBeWxR3OF+R6Yl5Y3dRG+3vE8t6PDcSNHA== + /load-json-file/2.0.0: + resolution: {integrity: sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=} + engines: {node: '>=4'} dependencies: graceful-fs: 4.2.6 parse-json: 2.2.0 pify: 2.3.0 strip-bom: 3.0.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg= + /loader-runner/2.4.0: + resolution: {integrity: sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw==} + engines: {node: '>=4.3.0 <5.0.0 || >=5.10'} dev: true - engines: - node: '>=4.3.0 <5.0.0 || >=5.10' - resolution: - integrity: sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw== + /loader-utils/1.2.3: + resolution: {integrity: sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA==} + engines: {node: '>=4.0.0'} dependencies: big.js: 5.2.2 emojis-list: 2.1.0 json5: 1.0.1 dev: true - engines: - node: '>=4.0.0' - resolution: - integrity: sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA== + /loader-utils/1.4.0: + resolution: {integrity: sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA==} + engines: {node: '>=4.0.0'} dependencies: big.js: 5.2.2 emojis-list: 3.0.0 json5: 1.0.1 dev: true - engines: - node: '>=4.0.0' - resolution: - integrity: sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA== + /loader-utils/2.0.0: + resolution: {integrity: sha512-rP4F0h2RaWSvPEkD7BLDFQnvSf+nK+wr3ESUjNTyAGobqrijmW92zc+SO6d4p4B1wh7+B/Jg1mkQe5NYUEHtHQ==} + engines: {node: '>=8.9.0'} dependencies: big.js: 5.2.2 emojis-list: 3.0.0 json5: 2.2.0 - engines: - node: '>=8.9.0' - resolution: - integrity: sha512-rP4F0h2RaWSvPEkD7BLDFQnvSf+nK+wr3ESUjNTyAGobqrijmW92zc+SO6d4p4B1wh7+B/Jg1mkQe5NYUEHtHQ== + + /loader-utils/2.0.4: + resolution: {integrity: sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==} + engines: {node: '>=8.9.0'} + dependencies: + big.js: 5.2.2 + emojis-list: 3.0.0 + json5: 2.2.1 + dev: true + /locate-path/2.0.0: + resolution: {integrity: sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=} + engines: {node: '>=4'} dependencies: p-locate: 2.0.0 path-exists: 3.0.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-K1aLJl7slExtnA3pw9u7ygNUzY4= + /locate-path/3.0.0: + resolution: {integrity: sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==} + engines: {node: '>=6'} dependencies: p-locate: 3.0.0 path-exists: 3.0.0 - engines: - node: '>=6' - resolution: - integrity: sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== + /locate-path/5.0.0: + resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} + engines: {node: '>=8'} dependencies: p-locate: 4.1.0 - engines: - node: '>=8' - resolution: - integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== + /lodash-es/4.17.21: + resolution: {integrity: sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==} dev: false - resolution: - integrity: sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw== + /lodash._reinterpolate/3.0.0: + resolution: {integrity: sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0=} dev: true - resolution: - integrity: sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0= + /lodash.clonedeep/4.5.0: + resolution: {integrity: sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=} dev: true - resolution: - integrity: sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8= + /lodash.debounce/4.0.8: + resolution: {integrity: sha1-gteb/zCmfEAF/9XiUVMArZyk168=} dev: true - resolution: - integrity: sha1-gteb/zCmfEAF/9XiUVMArZyk168= + /lodash.flatten/4.4.0: + resolution: {integrity: sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8=} dev: true - resolution: - integrity: sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8= + /lodash.memoize/4.1.2: + resolution: {integrity: sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4=} dev: true - resolution: - integrity: sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4= + /lodash.template/4.5.0: + resolution: {integrity: sha512-84vYFxIkmidUiFxidA/KjjH9pAycqW+h980j7Fuz5qxRtO9pgB7MDFTdys1N7A5mcucRiDyEq4fusljItR1T/A==} dependencies: lodash._reinterpolate: 3.0.0 lodash.templatesettings: 4.2.0 dev: true - resolution: - integrity: sha512-84vYFxIkmidUiFxidA/KjjH9pAycqW+h980j7Fuz5qxRtO9pgB7MDFTdys1N7A5mcucRiDyEq4fusljItR1T/A== + /lodash.templatesettings/4.2.0: + resolution: {integrity: sha512-stgLz+i3Aa9mZgnjr/O+v9ruKZsPsndy7qPZOchbqk2cnTU1ZaldKK+v7m54WoKIyxiuMZTKT2H81F8BeAc3ZQ==} dependencies: lodash._reinterpolate: 3.0.0 dev: true - resolution: - integrity: sha512-stgLz+i3Aa9mZgnjr/O+v9ruKZsPsndy7qPZOchbqk2cnTU1ZaldKK+v7m54WoKIyxiuMZTKT2H81F8BeAc3ZQ== + + /lodash.throttle/4.1.1: + resolution: {integrity: sha1-wj6RtxAkKscMN/HhzaknTMOb8vQ=} + dev: true + /lodash.truncate/4.4.2: + resolution: {integrity: sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM=} dev: true - resolution: - integrity: sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM= + /lodash.uniq/4.5.0: + resolution: {integrity: sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=} dev: true - resolution: - integrity: sha1-0CJTc662Uq3BvILklFM5qEJ1R3M= + /lodash/4.17.21: - resolution: - integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} + /log-symbols/4.1.0: + resolution: {integrity: sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==} + engines: {node: '>=10'} dependencies: chalk: 4.1.0 is-unicode-supported: 0.1.0 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg== + /log-update/4.0.0: + resolution: {integrity: sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg==} + engines: {node: '>=10'} dependencies: ansi-escapes: 4.3.2 cli-cursor: 3.1.0 slice-ansi: 4.0.0 wrap-ansi: 6.2.0 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg== + /loglevel/1.7.1: + resolution: {integrity: sha512-Hesni4s5UkWkwCGJMQGAh71PaLUmKFM60dHvq0zi/vDhhrzuk+4GgNbTXJ12YYQJn6ZKBDNIjYcuQGKudvqrIw==} + engines: {node: '>= 0.6.0'} + dev: true + + /longest-streak/2.0.4: + resolution: {integrity: sha512-vM6rUVCVUJJt33bnmHiZEvr7wPT78ztX7rojL+LW51bHtLh6HTjx84LA5W4+oa6aKEJA7jJu5LR6vQRBpA5DVg==} dev: true - engines: - node: '>= 0.6.0' - resolution: - integrity: sha512-Hesni4s5UkWkwCGJMQGAh71PaLUmKFM60dHvq0zi/vDhhrzuk+4GgNbTXJ12YYQJn6ZKBDNIjYcuQGKudvqrIw== + /loose-envify/1.4.0: + resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} + hasBin: true dependencies: js-tokens: 4.0.0 - hasBin: true - resolution: - integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== - /lower-case/1.1.4: - dev: false - resolution: - integrity: sha1-miyr0bno4K6ZOkv31YdcOcQujqw= + /lower-case/2.0.2: + resolution: {integrity: sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==} dependencies: - tslib: 2.1.0 - dev: true - resolution: - integrity: sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== + tslib: 2.4.1 + /lru-cache/5.1.1: + resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} dependencies: yallist: 3.1.1 dev: true - resolution: - integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== + /lru-cache/6.0.0: + resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==} + engines: {node: '>=10'} dependencies: yallist: 4.0.0 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + /lz-string/1.4.4: - dev: true + resolution: {integrity: sha1-wNjq82BZ9wV5bh40SBHPTEmNOiY=} hasBin: true - resolution: - integrity: sha1-wNjq82BZ9wV5bh40SBHPTEmNOiY= + dev: true + /magic-string/0.25.7: + resolution: {integrity: sha512-4CrMT5DOHTDk4HYDlzmwu4FVCcIYI8gauveasrdCu2IKIFOJ3f0v/8MDGJCDL9oD2ppz/Av1b0Nj345H9M+XIA==} dependencies: sourcemap-codec: 1.4.8 dev: true - resolution: - integrity: sha512-4CrMT5DOHTDk4HYDlzmwu4FVCcIYI8gauveasrdCu2IKIFOJ3f0v/8MDGJCDL9oD2ppz/Av1b0Nj345H9M+XIA== + /make-dir/2.1.0: + resolution: {integrity: sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==} + engines: {node: '>=6'} dependencies: pify: 4.0.1 semver: 5.7.1 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== + /make-dir/3.1.0: + resolution: {integrity: sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==} + engines: {node: '>=8'} dependencies: semver: 6.3.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== + /makeerror/1.0.11: + resolution: {integrity: sha1-4BpckQnyr3lmDk6LlYd5AYT1qWw=} dependencies: tmpl: 1.0.4 dev: true - resolution: - integrity: sha1-4BpckQnyr3lmDk6LlYd5AYT1qWw= + /map-cache/0.2.2: - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= + resolution: {integrity: sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8=} + engines: {node: '>=0.10.0'} + /map-visit/1.0.0: + resolution: {integrity: sha1-7Nyo8TFE5mDxtb1B8S80edmN+48=} + engines: {node: '>=0.10.0'} dependencies: object-visit: 1.0.1 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= + + /markdown-table/2.0.0: + resolution: {integrity: sha512-Ezda85ToJUBhM6WGaG6veasyym+Tbs3cMAw/ZhOPqXiYsr0jgocBV3j3nx+4lk47plLlIqjwuTm/ywVI+zjJ/A==} + dependencies: + repeat-string: 1.6.1 + dev: true + /match-sorter/6.3.0: + resolution: {integrity: sha512-efYOf/wUpNb8FgNY+cOD2EIJI1S5I7YPKsw0LBp7wqPh5pmMS6i/wr3ZWwfwrAw1NvqTA2KUReVRWDX84lUcOQ==} dependencies: '@babel/runtime': 7.13.10 remove-accents: 0.4.2 dev: false - resolution: - integrity: sha512-efYOf/wUpNb8FgNY+cOD2EIJI1S5I7YPKsw0LBp7wqPh5pmMS6i/wr3ZWwfwrAw1NvqTA2KUReVRWDX84lUcOQ== + + /mathjax-full/3.2.0: + resolution: {integrity: sha512-D2EBNvUG+mJyhn+M1C858k0f2Fc4KxXvbEX2WCMXroV10212JwfYqaBJ336ECBSz5X9L5LRoamxb7AJtg3KaJA==} + dependencies: + esm: 3.2.25 + mhchemparser: 4.1.1 + mj-context-menu: 0.6.1 + speech-rule-engine: 3.3.3 + dev: true + /md5.js/1.3.5: + resolution: {integrity: sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==} dependencies: hash-base: 3.1.0 inherits: 2.0.4 safe-buffer: 5.2.1 dev: true - resolution: - integrity: sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== + + /mdast-util-definitions/4.0.0: + resolution: {integrity: sha512-k8AJ6aNnUkB7IE+5azR9h81O5EQ/cTDXtWdMq9Kk5KcEW/8ritU5CeLg/9HhOC++nALHBlaogJ5jz0Ybk3kPMQ==} + dependencies: + unist-util-visit: 2.0.3 + dev: true + + /mdast-util-find-and-replace/1.1.1: + resolution: {integrity: sha512-9cKl33Y21lyckGzpSmEQnIDjEfeeWelN5s1kUW1LwdB0Fkuq2u+4GdqcGEygYxJE8GVqCl0741bYXHgamfWAZA==} + dependencies: + escape-string-regexp: 4.0.0 + unist-util-is: 4.1.0 + unist-util-visit-parents: 3.1.1 + dev: true + + /mdast-util-from-markdown/0.8.5: + resolution: {integrity: sha512-2hkTXtYYnr+NubD/g6KGBS/0mFmBcifAsI0yIWRiRo0PjVs6SSOSOdtzbp6kSGnShDN6G5aWZpKQ2lWRy27mWQ==} + dependencies: + '@types/mdast': 3.0.10 + mdast-util-to-string: 2.0.0 + micromark: 2.11.4 + parse-entities: 2.0.0 + unist-util-stringify-position: 2.0.3 + transitivePeerDependencies: + - supports-color + dev: true + + /mdast-util-frontmatter/0.2.0: + resolution: {integrity: sha512-FHKL4w4S5fdt1KjJCwB0178WJ0evnyyQr5kXTM3wrOVpytD0hrkvd+AOOjU9Td8onOejCkmZ+HQRT3CZ3coHHQ==} + dependencies: + micromark-extension-frontmatter: 0.2.2 + dev: true + + /mdast-util-gfm-autolink-literal/0.1.3: + resolution: {integrity: sha512-GjmLjWrXg1wqMIO9+ZsRik/s7PLwTaeCHVB7vRxUwLntZc8mzmTsLVr6HW1yLokcnhfURsn5zmSVdi3/xWWu1A==} + dependencies: + ccount: 1.1.0 + mdast-util-find-and-replace: 1.1.1 + micromark: 2.11.4 + transitivePeerDependencies: + - supports-color + dev: true + + /mdast-util-gfm-strikethrough/0.2.3: + resolution: {integrity: sha512-5OQLXpt6qdbttcDG/UxYY7Yjj3e8P7X16LzvpX8pIQPYJ/C2Z1qFGMmcw+1PZMUM3Z8wt8NRfYTvCni93mgsgA==} + dependencies: + mdast-util-to-markdown: 0.6.5 + dev: true + + /mdast-util-gfm-table/0.1.6: + resolution: {integrity: sha512-j4yDxQ66AJSBwGkbpFEp9uG/LS1tZV3P33fN1gkyRB2LoRL+RR3f76m0HPHaby6F4Z5xr9Fv1URmATlRRUIpRQ==} + dependencies: + markdown-table: 2.0.0 + mdast-util-to-markdown: 0.6.5 + dev: true + + /mdast-util-gfm-task-list-item/0.1.6: + resolution: {integrity: sha512-/d51FFIfPsSmCIRNp7E6pozM9z1GYPIkSy1urQ8s/o4TC22BZ7DqfHFWiqBD23bc7J3vV1Fc9O4QIHBlfuit8A==} + dependencies: + mdast-util-to-markdown: 0.6.5 + dev: true + + /mdast-util-gfm/0.1.2: + resolution: {integrity: sha512-NNkhDx/qYcuOWB7xHUGWZYVXvjPFFd6afg6/e2g+SV4r9q5XUcCbV4Wfa3DLYIiD+xAEZc6K4MGaE/m0KDcPwQ==} + dependencies: + mdast-util-gfm-autolink-literal: 0.1.3 + mdast-util-gfm-strikethrough: 0.2.3 + mdast-util-gfm-table: 0.1.6 + mdast-util-gfm-task-list-item: 0.1.6 + mdast-util-to-markdown: 0.6.5 + transitivePeerDependencies: + - supports-color + dev: true + + /mdast-util-math/0.1.2: + resolution: {integrity: sha512-fogAitds+wH+QRas78Yr1TwmQGN4cW/G2WRw5ePuNoJbBSPJCxIOCE8MTzHgWHVSpgkRaPQTgfzXRE1CrwWSlg==} + dependencies: + longest-streak: 2.0.4 + mdast-util-to-markdown: 0.6.5 + repeat-string: 1.6.1 + dev: true + + /mdast-util-to-hast/10.2.0: + resolution: {integrity: sha512-JoPBfJ3gBnHZ18icCwHR50orC9kNH81tiR1gs01D8Q5YpV6adHNO9nKNuFBCJQ941/32PT1a63UF/DitmS3amQ==} + dependencies: + '@types/mdast': 3.0.10 + '@types/unist': 2.0.6 + mdast-util-definitions: 4.0.0 + mdurl: 1.0.1 + unist-builder: 2.0.3 + unist-util-generated: 1.1.6 + unist-util-position: 3.1.0 + unist-util-visit: 2.0.3 + dev: true + + /mdast-util-to-markdown/0.6.5: + resolution: {integrity: sha512-XeV9sDE7ZlOQvs45C9UKMtfTcctcaj/pGwH8YLbMHoMOXNNCn2LsqVQOqrF1+/NU8lKDAqozme9SCXWyo9oAcQ==} + dependencies: + '@types/unist': 2.0.6 + longest-streak: 2.0.4 + mdast-util-to-string: 2.0.0 + parse-entities: 2.0.0 + repeat-string: 1.6.1 + zwitch: 1.0.5 + dev: true + + /mdast-util-to-string/2.0.0: + resolution: {integrity: sha512-AW4DRS3QbBayY/jJmD8437V1Gombjf8RSOUCMFBuo5iHi58AGEgVCKQ+ezHkZZDpAQS75hcBMpLqjpJTjtUL7w==} + dev: true + /mdn-data/2.0.14: - resolution: - integrity: sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow== + resolution: {integrity: sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==} + /mdn-data/2.0.4: + resolution: {integrity: sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA==} dev: true - resolution: - integrity: sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA== + + /mdurl/1.0.1: + resolution: {integrity: sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4=} + dev: true + /media-typer/0.3.0: + resolution: {integrity: sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=} + engines: {node: '>= 0.6'} dev: true - engines: - node: '>= 0.6' - resolution: - integrity: sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= + /memory-fs/0.4.1: + resolution: {integrity: sha1-OpoguEYlI+RHz7x+i7gO1me/xVI=} dependencies: errno: 0.1.8 readable-stream: 2.3.7 dev: true - resolution: - integrity: sha1-OpoguEYlI+RHz7x+i7gO1me/xVI= + /memory-fs/0.5.0: + resolution: {integrity: sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA==} + engines: {node: '>=4.3.0 <5.0.0 || >=5.10'} dependencies: errno: 0.1.8 readable-stream: 2.3.7 dev: true - engines: - node: '>=4.3.0 <5.0.0 || >=5.10' - resolution: - integrity: sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA== + /merge-descriptors/1.0.1: + resolution: {integrity: sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=} dev: true - resolution: - integrity: sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= + /merge-stream/2.0.0: + resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} dev: true - resolution: - integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== + /merge2/1.4.1: - engines: - node: '>= 8' - resolution: - integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} + engines: {node: '>= 8'} + /methods/1.1.2: + resolution: {integrity: sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=} + engines: {node: '>= 0.6'} + dev: true + + /mhchemparser/4.1.1: + resolution: {integrity: sha512-R75CUN6O6e1t8bgailrF1qPq+HhVeFTM3XQ0uzI+mXTybmphy3b6h4NbLOYhemViQ3lUs+6CKRkC3Ws1TlYREA==} dev: true - engines: - node: '>= 0.6' - resolution: - integrity: sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= + /microevent.ts/0.1.1: + resolution: {integrity: sha512-jo1OfR4TaEwd5HOrt5+tAZ9mqT4jmpNAusXtyfNzqVm9uiSYFZlKM1wYL4oU7azZW/PxQW53wM0S6OR1JHNa2g==} dev: false - resolution: - integrity: sha512-jo1OfR4TaEwd5HOrt5+tAZ9mqT4jmpNAusXtyfNzqVm9uiSYFZlKM1wYL4oU7azZW/PxQW53wM0S6OR1JHNa2g== + + /micromark-extension-frontmatter/0.2.2: + resolution: {integrity: sha512-q6nPLFCMTLtfsctAuS0Xh4vaolxSFUWUWR6PZSrXXiRy+SANGllpcqdXFv2z07l0Xz/6Hl40hK0ffNCJPH2n1A==} + dependencies: + fault: 1.0.4 + dev: true + + /micromark-extension-gfm-autolink-literal/0.5.7: + resolution: {integrity: sha512-ePiDGH0/lhcngCe8FtH4ARFoxKTUelMp4L7Gg2pujYD5CSMb9PbblnyL+AAMud/SNMyusbS2XDSiPIRcQoNFAw==} + dependencies: + micromark: 2.11.4 + transitivePeerDependencies: + - supports-color + dev: true + + /micromark-extension-gfm-strikethrough/0.6.5: + resolution: {integrity: sha512-PpOKlgokpQRwUesRwWEp+fHjGGkZEejj83k9gU5iXCbDG+XBA92BqnRKYJdfqfkrRcZRgGuPuXb7DaK/DmxOhw==} + dependencies: + micromark: 2.11.4 + transitivePeerDependencies: + - supports-color + dev: true + + /micromark-extension-gfm-table/0.4.3: + resolution: {integrity: sha512-hVGvESPq0fk6ALWtomcwmgLvH8ZSVpcPjzi0AjPclB9FsVRgMtGZkUcpE0zgjOCFAznKepF4z3hX8z6e3HODdA==} + dependencies: + micromark: 2.11.4 + transitivePeerDependencies: + - supports-color + dev: true + + /micromark-extension-gfm-tagfilter/0.3.0: + resolution: {integrity: sha512-9GU0xBatryXifL//FJH+tAZ6i240xQuFrSL7mYi8f4oZSbc+NvXjkrHemeYP0+L4ZUT+Ptz3b95zhUZnMtoi/Q==} + dev: true + + /micromark-extension-gfm-task-list-item/0.3.3: + resolution: {integrity: sha512-0zvM5iSLKrc/NQl84pZSjGo66aTGd57C1idmlWmE87lkMcXrTxg1uXa/nXomxJytoje9trP0NDLvw4bZ/Z/XCQ==} + dependencies: + micromark: 2.11.4 + transitivePeerDependencies: + - supports-color + dev: true + + /micromark-extension-gfm/0.3.3: + resolution: {integrity: sha512-oVN4zv5/tAIA+l3GbMi7lWeYpJ14oQyJ3uEim20ktYFAcfX1x3LNlFGGlmrZHt7u9YlKExmyJdDGaTt6cMSR/A==} + dependencies: + micromark: 2.11.4 + micromark-extension-gfm-autolink-literal: 0.5.7 + micromark-extension-gfm-strikethrough: 0.6.5 + micromark-extension-gfm-table: 0.4.3 + micromark-extension-gfm-tagfilter: 0.3.0 + micromark-extension-gfm-task-list-item: 0.3.3 + transitivePeerDependencies: + - supports-color + dev: true + + /micromark-extension-math/0.1.2: + resolution: {integrity: sha512-ZJXsT2eVPM8VTmcw0CPSDeyonOn9SziGK3Z+nkf9Vb6xMPeU+4JMEnO6vzDL10562Favw8Vste74f54rxJ/i6Q==} + dependencies: + katex: 0.12.0 + micromark: 2.11.4 + transitivePeerDependencies: + - supports-color + dev: true + + /micromark/2.11.4: + resolution: {integrity: sha512-+WoovN/ppKolQOFIAajxi7Lu9kInbPxFuTBVEavFcL8eAfVstoc5MocPmqBeAdBOJV00uaVjegzH4+MA0DN/uA==} + dependencies: + debug: 4.3.1 + parse-entities: 2.0.0 + transitivePeerDependencies: + - supports-color + dev: true + /micromatch/3.1.10: + resolution: {integrity: sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==} + engines: {node: '>=0.10.0'} dependencies: arr-diff: 4.0.0 array-unique: 0.3.2 @@ -8903,83 +10022,107 @@ packages: regex-not: 1.0.2 snapdragon: 0.8.2 to-regex: 3.0.2 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== + /micromatch/4.0.2: + resolution: {integrity: sha512-y7FpHSbMUMoyPbYUSzO6PaZ6FyRnQOpHuKwbo1G+Knck95XVU4QAiKdGEnj5wwoS7PlOgthX/09u5iFJ+aYf5Q==} + engines: {node: '>=8'} dependencies: braces: 3.0.2 picomatch: 2.2.2 - engines: - node: '>=8' - resolution: - integrity: sha512-y7FpHSbMUMoyPbYUSzO6PaZ6FyRnQOpHuKwbo1G+Knck95XVU4QAiKdGEnj5wwoS7PlOgthX/09u5iFJ+aYf5Q== + + /micromatch/4.0.5: + resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==} + engines: {node: '>=8.6'} + dependencies: + braces: 3.0.2 + picomatch: 2.3.1 + dev: true + /microseconds/0.2.0: + resolution: {integrity: sha512-n7DHHMjR1avBbSpsTBj6fmMGh2AGrifVV4e+WYc3Q9lO+xnSZ3NyhcBND3vzzatt05LFhoKFRxrIyklmLlUtyA==} dev: false - resolution: - integrity: sha512-n7DHHMjR1avBbSpsTBj6fmMGh2AGrifVV4e+WYc3Q9lO+xnSZ3NyhcBND3vzzatt05LFhoKFRxrIyklmLlUtyA== + /miller-rabin/4.0.1: + resolution: {integrity: sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA==} + hasBin: true dependencies: bn.js: 4.12.0 brorand: 1.1.0 dev: true - hasBin: true - resolution: - integrity: sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA== + /mime-db/1.46.0: + resolution: {integrity: sha512-svXaP8UQRZ5K7or+ZmfNhg2xX3yKDMUzqadsSqi4NCH/KomcH75MAMYAGVlvXn4+b/xOPhS3I2uHKRUzvjY7BQ==} + engines: {node: '>= 0.6'} dev: true - engines: - node: '>= 0.6' - resolution: - integrity: sha512-svXaP8UQRZ5K7or+ZmfNhg2xX3yKDMUzqadsSqi4NCH/KomcH75MAMYAGVlvXn4+b/xOPhS3I2uHKRUzvjY7BQ== + /mime-types/2.1.29: + resolution: {integrity: sha512-Y/jMt/S5sR9OaqteJtslsFZKWOIIqMACsJSiHghlCAyhf7jfVYjKBmLiX8OgpWeW+fjJ2b+Az69aPFPkUOY6xQ==} + engines: {node: '>= 0.6'} dependencies: mime-db: 1.46.0 dev: true - engines: - node: '>= 0.6' - resolution: - integrity: sha512-Y/jMt/S5sR9OaqteJtslsFZKWOIIqMACsJSiHghlCAyhf7jfVYjKBmLiX8OgpWeW+fjJ2b+Az69aPFPkUOY6xQ== - /mime/1.6.0: + + /mime/1.3.6: + resolution: {integrity: sha1-WR2E02U6awtKO5343lqoEI5y5eA=} + hasBin: true dev: true - engines: - node: '>=4' + + /mime/1.6.0: + resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==} + engines: {node: '>=4'} hasBin: true - resolution: - integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== - /mime/2.5.2: dev: true - engines: - node: '>=4.0.0' + + /mime/2.5.2: + resolution: {integrity: sha512-tqkh47FzKeCPD2PUiPB6pkbMzsCasjxAfC62/Wap5qrUWcb+sFasXUC5I3gYM5iBM8v/Qpn4UK0x+j0iHyFPDg==} + engines: {node: '>=4.0.0'} hasBin: true - resolution: - integrity: sha512-tqkh47FzKeCPD2PUiPB6pkbMzsCasjxAfC62/Wap5qrUWcb+sFasXUC5I3gYM5iBM8v/Qpn4UK0x+j0iHyFPDg== + dev: true + /mimic-fn/2.1.0: + resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} + engines: {node: '>=6'} + dev: true + + /min-document/2.19.0: + resolution: {integrity: sha1-e9KC4/WELtKVu3SM3Z8f+iyCRoU=} + dependencies: + dom-walk: 0.1.2 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== + /min-indent/1.0.1: + resolution: {integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg== - /mini-create-react-context/0.4.1_prop-types@15.7.2+react@17.0.2: + + /mini-create-react-context/0.4.1_prop-types@15.7.2+react@16.14.0: + resolution: {integrity: sha512-YWCYEmd5CQeHGSAKrYvXgmzzkrvssZcuuQDDeqkT+PziKGMgE+0MCCtcKbROzocGBG1meBLl2FotlRwf4gAzbQ==} + peerDependencies: + prop-types: ^15.0.0 + react: ^0.14.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 dependencies: '@babel/runtime': 7.13.10 prop-types: 15.7.2 - react: 17.0.2 + react: 16.14.0 tiny-warning: 1.0.3 - dev: false + dev: true + + /mini-create-react-context/0.4.1_prop-types@15.7.2+react@17.0.2: + resolution: {integrity: sha512-YWCYEmd5CQeHGSAKrYvXgmzzkrvssZcuuQDDeqkT+PziKGMgE+0MCCtcKbROzocGBG1meBLl2FotlRwf4gAzbQ==} peerDependencies: prop-types: ^15.0.0 react: ^0.14.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 - resolution: - integrity: sha512-YWCYEmd5CQeHGSAKrYvXgmzzkrvssZcuuQDDeqkT+PziKGMgE+0MCCtcKbROzocGBG1meBLl2FotlRwf4gAzbQ== + dependencies: + '@babel/runtime': 7.13.10 + prop-types: 15.7.2 + react: 17.0.2 + tiny-warning: 1.0.3 + /mini-css-extract-plugin/0.11.3_webpack@4.44.2: + resolution: {integrity: sha512-n9BA8LonkOkW1/zn+IbLPQmovsL0wMb9yx75fMJQZf2X1Zoec9yTZtyMePcyu19wPkmFbzZZA6fLTotpFhQsOA==} + engines: {node: '>= 6.9.0'} + peerDependencies: + webpack: ^4.4.0 || ^5.0.0 dependencies: loader-utils: 1.4.0 normalize-url: 1.9.1 @@ -8987,82 +10130,73 @@ packages: webpack: 4.44.2_webpack-cli@4.6.0 webpack-sources: 1.4.3 dev: true - engines: - node: '>= 6.9.0' - peerDependencies: - webpack: ^4.4.0 || ^5.0.0 - resolution: - integrity: sha512-n9BA8LonkOkW1/zn+IbLPQmovsL0wMb9yx75fMJQZf2X1Zoec9yTZtyMePcyu19wPkmFbzZZA6fLTotpFhQsOA== + /mini-store/3.0.6_react-dom@17.0.2+react@17.0.2: + resolution: {integrity: sha512-YzffKHbYsMQGUWQRKdsearR79QsMzzJcDDmZKlJBqt5JNkqpyJHYlK6gP61O36X+sLf76sO9G6mhKBe83gIZIQ==} + peerDependencies: + react: '>=16.9.0' + react-dom: '>=16.9.0' dependencies: hoist-non-react-statics: 3.3.2 react: 17.0.2 react-dom: 17.0.2_react@17.0.2 shallowequal: 1.1.0 - dev: false - peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-YzffKHbYsMQGUWQRKdsearR79QsMzzJcDDmZKlJBqt5JNkqpyJHYlK6gP61O36X+sLf76sO9G6mhKBe83gIZIQ== + /minimalistic-assert/1.0.1: + resolution: {integrity: sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==} dev: true - resolution: - integrity: sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== + /minimalistic-crypto-utils/1.0.1: + resolution: {integrity: sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo=} dev: true - resolution: - integrity: sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= + /minimatch/3.0.4: + resolution: {integrity: sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==} dependencies: brace-expansion: 1.1.11 - resolution: - integrity: sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== + /minimist/1.2.5: - resolution: - integrity: sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== + resolution: {integrity: sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==} + /minipass-collect/1.0.2: + resolution: {integrity: sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA==} + engines: {node: '>= 8'} dependencies: minipass: 3.1.3 dev: true - engines: - node: '>= 8' - resolution: - integrity: sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA== + /minipass-flush/1.0.5: + resolution: {integrity: sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==} + engines: {node: '>= 8'} dependencies: minipass: 3.1.3 dev: true - engines: - node: '>= 8' - resolution: - integrity: sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw== + /minipass-pipeline/1.2.4: + resolution: {integrity: sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==} + engines: {node: '>=8'} dependencies: minipass: 3.1.3 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A== + /minipass/3.1.3: + resolution: {integrity: sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==} + engines: {node: '>=8'} dependencies: yallist: 4.0.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg== + /minizlib/2.1.2: + resolution: {integrity: sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==} + engines: {node: '>= 8'} dependencies: minipass: 3.1.3 yallist: 4.0.0 dev: true - engines: - node: '>= 8' - resolution: - integrity: sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg== + /mississippi/3.0.0: + resolution: {integrity: sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA==} + engines: {node: '>=4.0.0'} dependencies: concat-stream: 1.6.2 duplexify: 3.7.1 @@ -9075,37 +10209,52 @@ packages: stream-each: 1.2.3 through2: 2.0.5 dev: true - engines: - node: '>=4.0.0' - resolution: - integrity: sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA== + /mixin-deep/1.3.2: + resolution: {integrity: sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==} + engines: {node: '>=0.10.0'} dependencies: for-in: 1.0.2 is-extendable: 1.0.1 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== + + /mj-context-menu/0.6.1: + resolution: {integrity: sha512-7NO5s6n10TIV96d4g2uDpG7ZDpIhMh0QNfGdJw/W47JswFcosz457wqz/b5sAKvl12sxINGFCn80NZHKwxQEXA==} + dev: true + /mkdirp/0.5.5: + resolution: {integrity: sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==} + hasBin: true dependencies: minimist: 1.2.5 dev: true - hasBin: true - resolution: - integrity: sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== + /mkdirp/1.0.4: - dev: true - engines: - node: '>=10' + resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==} + engines: {node: '>=10'} hasBin: true - resolution: - integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== + dev: true + /moment/2.29.1: - dev: false - resolution: - integrity: sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ== - /move-concurrently/1.0.1: + resolution: {integrity: sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ==} + dev: true + + /monaco-editor-webpack-plugin/7.0.1_d758ab496f5c143a3f97c41b30684737: + resolution: {integrity: sha512-M8qIqizltrPlIbrb73cZdTWfU9sIsUVFvAZkL3KGjAHmVWEJ0hZKa/uad14JuOckc0GwnCaoGHvMoYtJjVyCzw==} + peerDependencies: + monaco-editor: '>= 0.31.0' + webpack: ^4.5.0 || 5.x + dependencies: + loader-utils: 2.0.4 + monaco-editor: 0.34.1 + webpack: 4.44.2_webpack-cli@4.6.0 + dev: true + + /monaco-editor/0.34.1: + resolution: {integrity: sha512-FKc80TyiMaruhJKKPz5SpJPIjL+dflGvz4CpuThaPMc94AyN7SeC9HQ8hrvaxX7EyHdJcUY5i4D0gNyJj1vSZQ==} + dev: false + + /move-concurrently/1.0.1: + resolution: {integrity: sha1-viwAX9oy4LKa8fBdfEszIUxwH5I=} dependencies: aproba: 1.2.0 copy-concurrently: 1.0.5 @@ -9114,40 +10263,47 @@ packages: rimraf: 2.7.1 run-queue: 1.0.3 dev: true - resolution: - integrity: sha1-viwAX9oy4LKa8fBdfEszIUxwH5I= + + /mpld3/0.5.2: + resolution: {integrity: sha512-9Asjh2evbVnbDn3x7ubVEZJ06v9Gl+DDKixLmaTwBu4Zy5M6vj7A9jv3ZVYoM8pMfEmT+VD5ot/m5DJItx29vg==} + dev: false + /ms/2.0.0: - resolution: - integrity: sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= + resolution: {integrity: sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=} + /ms/2.1.1: + resolution: {integrity: sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==} dev: true - resolution: - integrity: sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== + /ms/2.1.2: - resolution: - integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} + /ms/2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} dev: true - resolution: - integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + /multicast-dns-service-types/1.1.0: + resolution: {integrity: sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE=} dev: true - resolution: - integrity: sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE= + /multicast-dns/6.2.3: + resolution: {integrity: sha512-ji6J5enbMyGRHIAkAOu3WdV8nggqviKCEKtXcOqfphZZtQrmHKycfynJ2V7eVPUA4NhJ6V7Wf4TmGbTwKE9B6g==} + hasBin: true dependencies: dns-packet: 1.3.1 thunky: 1.1.0 dev: true - hasBin: true - resolution: - integrity: sha512-ji6J5enbMyGRHIAkAOu3WdV8nggqviKCEKtXcOqfphZZtQrmHKycfynJ2V7eVPUA4NhJ6V7Wf4TmGbTwKE9B6g== + /nan/2.14.2: + resolution: {integrity: sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ==} dev: true optional: true - resolution: - integrity: sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ== + /nano-css/5.3.1_react-dom@17.0.2+react@17.0.2: + resolution: {integrity: sha512-ENPIyNzANQRyYVvb62ajDd7PAyIgS2LIUnT9ewih4yrXSZX4hKoUwssy8WjUH++kEOA5wUTMgNnV7ko5n34kUA==} + peerDependencies: + react: '*' + react-dom: '*' dependencies: css-tree: 1.1.2 csstype: 3.0.7 @@ -9160,25 +10316,22 @@ packages: stacktrace-js: 2.0.2 stylis: 4.0.9 dev: false - peerDependencies: - react: '*' - react-dom: '*' - resolution: - integrity: sha512-ENPIyNzANQRyYVvb62ajDd7PAyIgS2LIUnT9ewih4yrXSZX4hKoUwssy8WjUH++kEOA5wUTMgNnV7ko5n34kUA== + /nano-time/1.0.0: + resolution: {integrity: sha1-sFVPaa2J4i0JB/ehKwmTpdlhN+8=} dependencies: big-integer: 1.6.48 dev: false - resolution: - integrity: sha1-sFVPaa2J4i0JB/ehKwmTpdlhN+8= + /nanoid/3.1.22: - dev: true - engines: - node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1 + resolution: {integrity: sha512-/2ZUaJX2ANuLtTvqTlgqBQNJoQO398KyJgZloL0PZkC0dpysjncRUPsFe3DUPzz/y3h+u7C46np8RMuvF3jsSQ==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} hasBin: true - resolution: - integrity: sha512-/2ZUaJX2ANuLtTvqTlgqBQNJoQO398KyJgZloL0PZkC0dpysjncRUPsFe3DUPzz/y3h+u7C46np8RMuvF3jsSQ== + dev: true + /nanomatch/1.2.13: + resolution: {integrity: sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==} + engines: {node: '>=0.10.0'} dependencies: arr-diff: 4.0.0 array-unique: 0.3.2 @@ -9191,67 +10344,57 @@ packages: regex-not: 1.0.2 snapdragon: 0.8.2 to-regex: 3.0.2 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== - /native-request/1.0.8: + + /native-request/1.1.0: + resolution: {integrity: sha512-uZ5rQaeRn15XmpgE0xoPL8YWqcX90VtCFglYwAgkvKM5e8fog+vePLAhHxuuv/gRkrQxIeh5U3q9sMNUrENqWw==} + requiresBuild: true dev: true optional: true - resolution: - integrity: sha512-vU2JojJVelUGp6jRcLwToPoWGxSx23z/0iX+I77J3Ht17rf2INGjrhOoQnjVo60nQd8wVsgzKkPfRXBiVdD2ag== + /native-url/0.2.6: + resolution: {integrity: sha512-k4bDC87WtgrdD362gZz6zoiXQrl40kYlBmpfmSjwRO1VU0V5ccwJTlxuE72F6m3V0vc1xOf6n3UCP9QyerRqmA==} dependencies: querystring: 0.2.1 dev: true - resolution: - integrity: sha512-k4bDC87WtgrdD362gZz6zoiXQrl40kYlBmpfmSjwRO1VU0V5ccwJTlxuE72F6m3V0vc1xOf6n3UCP9QyerRqmA== + /natural-compare/1.4.0: + resolution: {integrity: sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=} dev: true - resolution: - integrity: sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= + /negotiator/0.6.2: + resolution: {integrity: sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==} + engines: {node: '>= 0.6'} dev: true - engines: - node: '>= 0.6' - resolution: - integrity: sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== + /neo-async/2.6.2: + resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==} dev: true - resolution: - integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== + /next-tick/1.0.0: + resolution: {integrity: sha1-yobR/ogoFpsBICCOPchCS524NCw=} dev: true - resolution: - integrity: sha1-yobR/ogoFpsBICCOPchCS524NCw= + /nice-try/1.0.5: + resolution: {integrity: sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==} dev: true - resolution: - integrity: sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== - /no-case/2.3.2: - dependencies: - lower-case: 1.1.4 - dev: false - resolution: - integrity: sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ== + /no-case/3.0.4: + resolution: {integrity: sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==} dependencies: lower-case: 2.0.2 - tslib: 2.1.0 - dev: true - resolution: - integrity: sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg== + tslib: 2.4.1 + /node-forge/0.10.0: + resolution: {integrity: sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==} + engines: {node: '>= 6.0.0'} dev: true - engines: - node: '>= 6.0.0' - resolution: - integrity: sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA== + /node-int64/0.4.0: + resolution: {integrity: sha1-h6kGXNs1XTGC2PlM4RGIuCXGijs=} dev: true - resolution: - integrity: sha1-h6kGXNs1XTGC2PlM4RGIuCXGijs= + /node-libs-browser/2.2.1: + resolution: {integrity: sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q==} dependencies: assert: 1.5.0 browserify-zlib: 0.2.0 @@ -9277,15 +10420,15 @@ packages: util: 0.11.1 vm-browserify: 1.1.2 dev: true - resolution: - integrity: sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q== + /node-modules-regexp/1.0.0: + resolution: {integrity: sha1-jZ2+KJZKSsVxLpExZCEHxx6Q7EA=} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-jZ2+KJZKSsVxLpExZCEHxx6Q7EA= + /node-notifier/8.0.2: + resolution: {integrity: sha512-oJP/9NAdd9+x2Q+rfphB2RJCHjod70RcRLjosiPMMu5gjIfwVnOUGq2nbTjTUbmy0DJ/tFIVT30+Qe3nzl4TJg==} + requiresBuild: true dependencies: growly: 1.3.0 is-wsl: 2.2.0 @@ -9295,251 +10438,236 @@ packages: which: 2.0.2 dev: true optional: true - resolution: - integrity: sha512-oJP/9NAdd9+x2Q+rfphB2RJCHjod70RcRLjosiPMMu5gjIfwVnOUGq2nbTjTUbmy0DJ/tFIVT30+Qe3nzl4TJg== + /node-releases/1.1.71: - resolution: - integrity: sha512-zR6HoT6LrLCRBwukmrVbHv0EpEQjksO6GmFcZQQuCAy139BEsoVKPYnf3jongYW83fAa1torLGYwxxky/p28sg== + resolution: {integrity: sha512-zR6HoT6LrLCRBwukmrVbHv0EpEQjksO6GmFcZQQuCAy139BEsoVKPYnf3jongYW83fAa1torLGYwxxky/p28sg==} + /normalize-package-data/2.5.0: + resolution: {integrity: sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==} dependencies: hosted-git-info: 2.8.8 resolve: 1.18.1 semver: 5.7.1 validate-npm-package-license: 3.0.4 dev: true - resolution: - integrity: sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== + /normalize-path/2.1.1: + resolution: {integrity: sha1-GrKLVW4Zg2Oowab35vogE3/mrtk=} + engines: {node: '>=0.10.0'} dependencies: remove-trailing-separator: 1.1.0 dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= + /normalize-path/3.0.0: + resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + /normalize-range/0.1.2: + resolution: {integrity: sha1-LRDAa9/TEuqXd2laTShDlFa3WUI=} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= + /normalize-url/1.9.1: + resolution: {integrity: sha1-LMDWazHqIwNkWENuNiDYWVTGbDw=} + engines: {node: '>=4'} dependencies: object-assign: 4.1.1 prepend-http: 1.0.4 query-string: 4.3.4 sort-keys: 1.1.2 dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-LMDWazHqIwNkWENuNiDYWVTGbDw= + /normalize-url/3.3.0: + resolution: {integrity: sha512-U+JJi7duF1o+u2pynbp2zXDW2/PADgC30f0GsHZtRh+HOcXHnw137TrNlyxxRvWW5fjKd3bcLHPxofWuCjaeZg==} + engines: {node: '>=6'} dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-U+JJi7duF1o+u2pynbp2zXDW2/PADgC30f0GsHZtRh+HOcXHnw137TrNlyxxRvWW5fjKd3bcLHPxofWuCjaeZg== + /npm-run-path/2.0.2: + resolution: {integrity: sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=} + engines: {node: '>=4'} dependencies: path-key: 2.0.1 dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= + /npm-run-path/4.0.1: + resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} + engines: {node: '>=8'} dependencies: path-key: 3.1.1 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== + /nth-check/1.0.2: + resolution: {integrity: sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg==} dependencies: boolbase: 1.0.0 dev: true - resolution: - integrity: sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== + /num2fraction/1.2.2: + resolution: {integrity: sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4=} dev: true - resolution: - integrity: sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4= + + /number-precision/1.6.0: + resolution: {integrity: sha512-05OLPgbgmnixJw+VvEh18yNPUo3iyp4BEWJcrLu4X9W05KmMifN7Mu5exYvQXqxxeNWhvIF+j3Rij+HmddM/hQ==} + dev: false + /nwsapi/2.2.0: + resolution: {integrity: sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ==} dev: true - resolution: - integrity: sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ== + /oauth-sign/0.9.0: + resolution: {integrity: sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==} dev: true - resolution: - integrity: sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== + /object-assign/4.1.1: - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= + resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} + engines: {node: '>=0.10.0'} + /object-copy/0.1.0: + resolution: {integrity: sha1-fn2Fi3gb18mRpBupde04EnVOmYw=} + engines: {node: '>=0.10.0'} dependencies: copy-descriptor: 0.1.1 define-property: 0.2.5 kind-of: 3.2.2 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-fn2Fi3gb18mRpBupde04EnVOmYw= + + /object-inspect/1.11.0: + resolution: {integrity: sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==} + /object-inspect/1.9.0: + resolution: {integrity: sha512-i3Bp9iTqwhaLZBxGkRfo5ZbE07BQRT7MGu8+nNgwW9ItGp1TzCTw2DLEoWwjClxBjOFI/hWljTAmYGCEwmtnOw==} dev: true - resolution: - integrity: sha512-i3Bp9iTqwhaLZBxGkRfo5ZbE07BQRT7MGu8+nNgwW9ItGp1TzCTw2DLEoWwjClxBjOFI/hWljTAmYGCEwmtnOw== + /object-is/1.1.5: + resolution: {integrity: sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw==} + engines: {node: '>= 0.4'} dependencies: call-bind: 1.0.2 define-properties: 1.1.3 dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw== + /object-keys/1.1.1: + resolution: {integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==} + engines: {node: '>= 0.4'} dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + /object-visit/1.0.1: + resolution: {integrity: sha1-95xEk68MU3e1n+OdOV5BBC3QRbs=} + engines: {node: '>=0.10.0'} dependencies: isobject: 3.0.1 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= + /object.assign/4.1.2: + resolution: {integrity: sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==} + engines: {node: '>= 0.4'} dependencies: call-bind: 1.0.2 define-properties: 1.1.3 has-symbols: 1.0.2 object-keys: 1.1.1 dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ== + /object.entries/1.1.3: + resolution: {integrity: sha512-ym7h7OZebNS96hn5IJeyUmaWhaSM4SVtAPPfNLQEI2MYWCO2egsITb9nab2+i/Pwibx+R0mtn+ltKJXRSeTMGg==} + engines: {node: '>= 0.4'} dependencies: call-bind: 1.0.2 define-properties: 1.1.3 es-abstract: 1.18.0 has: 1.0.3 dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-ym7h7OZebNS96hn5IJeyUmaWhaSM4SVtAPPfNLQEI2MYWCO2egsITb9nab2+i/Pwibx+R0mtn+ltKJXRSeTMGg== + /object.fromentries/2.0.4: + resolution: {integrity: sha512-EsFBshs5RUUpQEY1D4q/m59kMfz4YJvxuNCJcv/jWwOJr34EaVnG11ZrZa0UHB3wnzV1wx8m58T4hQL8IuNXlQ==} + engines: {node: '>= 0.4'} dependencies: call-bind: 1.0.2 define-properties: 1.1.3 es-abstract: 1.18.0 has: 1.0.3 dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-EsFBshs5RUUpQEY1D4q/m59kMfz4YJvxuNCJcv/jWwOJr34EaVnG11ZrZa0UHB3wnzV1wx8m58T4hQL8IuNXlQ== + /object.getownpropertydescriptors/2.1.2: + resolution: {integrity: sha512-WtxeKSzfBjlzL+F9b7M7hewDzMwy+C8NRssHd1YrNlzHzIDrXcXiNOMrezdAEM4UXixgV+vvnyBeN7Rygl2ttQ==} + engines: {node: '>= 0.8'} dependencies: call-bind: 1.0.2 define-properties: 1.1.3 es-abstract: 1.18.0 dev: true - engines: - node: '>= 0.8' - resolution: - integrity: sha512-WtxeKSzfBjlzL+F9b7M7hewDzMwy+C8NRssHd1YrNlzHzIDrXcXiNOMrezdAEM4UXixgV+vvnyBeN7Rygl2ttQ== + /object.pick/1.3.0: + resolution: {integrity: sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c=} + engines: {node: '>=0.10.0'} dependencies: isobject: 3.0.1 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= + /object.values/1.1.3: + resolution: {integrity: sha512-nkF6PfDB9alkOUxpf1HNm/QlkeW3SReqL5WXeBLpEJJnlPSvRaDQpW3gQTksTN3fgJX4hL42RzKyOin6ff3tyw==} + engines: {node: '>= 0.4'} dependencies: call-bind: 1.0.2 define-properties: 1.1.3 es-abstract: 1.18.0 has: 1.0.3 dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-nkF6PfDB9alkOUxpf1HNm/QlkeW3SReqL5WXeBLpEJJnlPSvRaDQpW3gQTksTN3fgJX4hL42RzKyOin6ff3tyw== + /obuf/1.1.2: + resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==} dev: true - resolution: - integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== + /on-finished/2.3.0: + resolution: {integrity: sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=} + engines: {node: '>= 0.8'} dependencies: ee-first: 1.1.1 dev: true - engines: - node: '>= 0.8' - resolution: - integrity: sha1-IPEzZIGwg811M3mSoWlxqi2QaUc= + /on-headers/1.0.2: + resolution: {integrity: sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==} + engines: {node: '>= 0.8'} dev: true - engines: - node: '>= 0.8' - resolution: - integrity: sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== + /once/1.4.0: + resolution: {integrity: sha1-WDsap3WWHUsROsF9nFC6753Xa9E=} dependencies: wrappy: 1.0.2 - resolution: - integrity: sha1-WDsap3WWHUsROsF9nFC6753Xa9E= + /onetime/5.1.2: + resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} + engines: {node: '>=6'} dependencies: mimic-fn: 2.1.0 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== + /open/7.4.2: + resolution: {integrity: sha512-MVHddDVweXZF3awtlAS+6pgKLlm/JgxZ90+/NBurBoQctVOOB/zDdVjcyPzQ+0laDGbsWgrRkflI65sQeOgT9Q==} + engines: {node: '>=8'} dependencies: is-docker: 2.1.1 is-wsl: 2.2.0 dev: false - engines: - node: '>=8' - resolution: - integrity: sha512-MVHddDVweXZF3awtlAS+6pgKLlm/JgxZ90+/NBurBoQctVOOB/zDdVjcyPzQ+0laDGbsWgrRkflI65sQeOgT9Q== + /opn/5.5.0: + resolution: {integrity: sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA==} + engines: {node: '>=4'} dependencies: is-wsl: 1.1.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA== + /optimize-css-assets-webpack-plugin/5.0.4_webpack@4.44.2: + resolution: {integrity: sha512-wqd6FdI2a5/FdoiCNNkEvLeA//lHHfG24Ln2Xm2qqdIk4aOlsR18jwpyOihqQ8849W3qu2DX8fOYxpvTMj+93A==} + peerDependencies: + webpack: ^4.0.0 dependencies: cssnano: 4.1.10 last-call-webpack-plugin: 3.0.0 webpack: 4.44.2_webpack-cli@4.6.0 dev: true - peerDependencies: - webpack: ^4.0.0 - resolution: - integrity: sha512-wqd6FdI2a5/FdoiCNNkEvLeA//lHHfG24Ln2Xm2qqdIk4aOlsR18jwpyOihqQ8849W3qu2DX8fOYxpvTMj+93A== + /optionator/0.8.3: + resolution: {integrity: sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==} + engines: {node: '>= 0.8.0'} dependencies: deep-is: 0.1.3 fast-levenshtein: 2.0.6 @@ -9548,11 +10676,10 @@ packages: type-check: 0.3.2 word-wrap: 1.2.3 dev: true - engines: - node: '>= 0.8.0' - resolution: - integrity: sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== + /optionator/0.9.1: + resolution: {integrity: sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==} + engines: {node: '>= 0.8.0'} dependencies: deep-is: 0.1.3 fast-levenshtein: 2.0.6 @@ -9561,138 +10688,121 @@ packages: type-check: 0.4.0 word-wrap: 1.2.3 dev: true - engines: - node: '>= 0.8.0' - resolution: - integrity: sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== + /original/1.0.2: + resolution: {integrity: sha512-hyBVl6iqqUOJ8FqRe+l/gS8H+kKYjrEndd5Pm1MfBtsEKA038HkkdbAl/72EAXGyonD/PFsvmVG+EvcIpliMBg==} dependencies: url-parse: 1.5.1 dev: true - resolution: - integrity: sha512-hyBVl6iqqUOJ8FqRe+l/gS8H+kKYjrEndd5Pm1MfBtsEKA038HkkdbAl/72EAXGyonD/PFsvmVG+EvcIpliMBg== + /os-browserify/0.3.0: + resolution: {integrity: sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc=} dev: true - resolution: - integrity: sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc= + /p-each-series/2.2.0: + resolution: {integrity: sha512-ycIL2+1V32th+8scbpTvyHNaHe02z0sjgh91XXjAk+ZeXoPN4Z46DVUnzdso0aX4KckKw0FNNFHdjZ2UsZvxiA==} + engines: {node: '>=8'} dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-ycIL2+1V32th+8scbpTvyHNaHe02z0sjgh91XXjAk+ZeXoPN4Z46DVUnzdso0aX4KckKw0FNNFHdjZ2UsZvxiA== + /p-finally/1.0.0: + resolution: {integrity: sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= + /p-limit/1.3.0: + resolution: {integrity: sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==} + engines: {node: '>=4'} dependencies: p-try: 1.0.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q== + /p-limit/2.3.0: + resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} + engines: {node: '>=6'} dependencies: p-try: 2.2.0 - engines: - node: '>=6' - resolution: - integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== + /p-limit/3.1.0: + resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} + engines: {node: '>=10'} dependencies: yocto-queue: 0.1.0 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== + /p-locate/2.0.0: + resolution: {integrity: sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=} + engines: {node: '>=4'} dependencies: p-limit: 1.3.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-IKAQOyIqcMj9OcwuWAaA893l7EM= + /p-locate/3.0.0: + resolution: {integrity: sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==} + engines: {node: '>=6'} dependencies: p-limit: 2.3.0 - engines: - node: '>=6' - resolution: - integrity: sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== + /p-locate/4.1.0: + resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==} + engines: {node: '>=8'} dependencies: p-limit: 2.3.0 - engines: - node: '>=8' - resolution: - integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== + /p-map/2.1.0: + resolution: {integrity: sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==} + engines: {node: '>=6'} dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== + /p-map/4.0.0: + resolution: {integrity: sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==} + engines: {node: '>=10'} dependencies: aggregate-error: 3.1.0 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ== + /p-retry/3.0.1: + resolution: {integrity: sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w==} + engines: {node: '>=6'} dependencies: retry: 0.12.0 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w== + /p-try/1.0.0: + resolution: {integrity: sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M= + /p-try/2.2.0: - engines: - node: '>=6' - resolution: - integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== + resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} + engines: {node: '>=6'} + /pako/1.0.11: + resolution: {integrity: sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==} dev: true - resolution: - integrity: sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw== + /parallel-transform/1.2.0: + resolution: {integrity: sha512-P2vSmIu38uIlvdcU7fDkyrxj33gTUy/ABO5ZUbGowxNCopBq/OoD42bP4UmMrJoPyk4Uqf0mu3mtWBhHCZD8yg==} dependencies: cyclist: 1.0.1 inherits: 2.0.4 readable-stream: 2.3.7 dev: true - resolution: - integrity: sha512-P2vSmIu38uIlvdcU7fDkyrxj33gTUy/ABO5ZUbGowxNCopBq/OoD42bP4UmMrJoPyk4Uqf0mu3mtWBhHCZD8yg== + /param-case/3.0.4: + resolution: {integrity: sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==} dependencies: dot-case: 3.0.4 - tslib: 2.1.0 - dev: true - resolution: - integrity: sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A== + tslib: 2.4.1 + /parent-module/1.0.1: + resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} + engines: {node: '>=6'} dependencies: callsites: 3.1.0 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + /parse-asn1/5.1.6: + resolution: {integrity: sha512-RnZRo1EPU6JBnra2vGHj0yhp6ebyjBZpmUCLHWiFhxlzvBCCpAuZ7elsBp1PVAbQN0/04VD/19rfzlBSwLstMw==} dependencies: asn1.js: 5.4.1 browserify-aes: 1.2.0 @@ -9700,131 +10810,122 @@ packages: pbkdf2: 3.1.1 safe-buffer: 5.2.1 dev: true - resolution: - integrity: sha512-RnZRo1EPU6JBnra2vGHj0yhp6ebyjBZpmUCLHWiFhxlzvBCCpAuZ7elsBp1PVAbQN0/04VD/19rfzlBSwLstMw== + + /parse-entities/2.0.0: + resolution: {integrity: sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ==} + dependencies: + character-entities: 1.2.4 + character-entities-legacy: 1.1.4 + character-reference-invalid: 1.1.4 + is-alphanumerical: 1.0.4 + is-decimal: 1.0.4 + is-hexadecimal: 1.0.4 + dev: true + /parse-json/2.2.0: + resolution: {integrity: sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=} + engines: {node: '>=0.10.0'} dependencies: error-ex: 1.3.2 dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-9ID0BDTvgHQfhGkJn43qGPVaTck= + /parse-json/4.0.0: + resolution: {integrity: sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=} + engines: {node: '>=4'} dependencies: error-ex: 1.3.2 json-parse-better-errors: 1.0.2 dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA= + /parse-json/5.2.0: + resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} + engines: {node: '>=8'} dependencies: '@babel/code-frame': 7.12.13 error-ex: 1.3.2 json-parse-even-better-errors: 2.3.1 lines-and-columns: 1.1.6 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== + /parse5/6.0.1: + resolution: {integrity: sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==} dev: true - resolution: - integrity: sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== + /parseurl/1.3.3: + resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==} + engines: {node: '>= 0.8'} dev: true - engines: - node: '>= 0.8' - resolution: - integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== - /pascal-case/2.0.1: - dependencies: - camel-case: 3.0.0 - upper-case-first: 1.1.2 - dev: false - resolution: - integrity: sha1-LVeNNFX2YNpl7KGO+VtODekSdh4= + /pascal-case/3.1.2: + resolution: {integrity: sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==} dependencies: no-case: 3.0.4 - tslib: 2.1.0 - dev: true - resolution: - integrity: sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g== + tslib: 2.4.1 + /pascalcase/0.1.1: - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= + resolution: {integrity: sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ=} + engines: {node: '>=0.10.0'} + /path-browserify/0.0.1: + resolution: {integrity: sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ==} dev: true - resolution: - integrity: sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ== + /path-dirname/1.0.2: + resolution: {integrity: sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA=} dev: true - resolution: - integrity: sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= + /path-exists/3.0.0: - engines: - node: '>=4' - resolution: - integrity: sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= + resolution: {integrity: sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=} + engines: {node: '>=4'} + /path-exists/4.0.0: - engines: - node: '>=8' - resolution: - integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== + resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} + engines: {node: '>=8'} + /path-is-absolute/1.0.1: - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-F0uSaHNVNP+8es5r9TpanhtcX18= + resolution: {integrity: sha1-F0uSaHNVNP+8es5r9TpanhtcX18=} + engines: {node: '>=0.10.0'} + /path-is-inside/1.0.2: + resolution: {integrity: sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=} dev: true - resolution: - integrity: sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM= + /path-key/2.0.1: + resolution: {integrity: sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= + /path-key/3.1.1: - engines: - node: '>=8' - resolution: - integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + /path-parse/1.0.6: + resolution: {integrity: sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==} dev: true - resolution: - integrity: sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== + /path-to-regexp/0.1.7: + resolution: {integrity: sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=} dev: true - resolution: - integrity: sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= + /path-to-regexp/1.8.0: + resolution: {integrity: sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==} dependencies: isarray: 0.0.1 - dev: false - resolution: - integrity: sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA== + /path-type/2.0.0: + resolution: {integrity: sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=} + engines: {node: '>=4'} dependencies: pify: 2.3.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM= + /path-type/4.0.0: - engines: - node: '>=8' - resolution: - integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} + engines: {node: '>=8'} + /pbkdf2/3.1.1: + resolution: {integrity: sha512-4Ejy1OPxi9f2tt1rRV7Go7zmfDQ+ZectEQz3VGUQhgq62HtIRPDyG/JtnwIxs6x3uNMwo2V7q1fMvKjb+Tnpqg==} + engines: {node: '>=0.12'} dependencies: create-hash: 1.2.0 create-hmac: 1.1.7 @@ -9832,188 +10933,173 @@ packages: safe-buffer: 5.2.1 sha.js: 2.4.11 dev: true - engines: - node: '>=0.12' - resolution: - integrity: sha512-4Ejy1OPxi9f2tt1rRV7Go7zmfDQ+ZectEQz3VGUQhgq62HtIRPDyG/JtnwIxs6x3uNMwo2V7q1fMvKjb+Tnpqg== + /performance-now/2.1.0: - resolution: - integrity: sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= + resolution: {integrity: sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=} + /picomatch/2.2.2: - engines: - node: '>=8.6' - resolution: - integrity: sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg== + resolution: {integrity: sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==} + engines: {node: '>=8.6'} + + /picomatch/2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + dev: true + /pify/2.3.0: + resolution: {integrity: sha1-7RQaasBDqEnqWISY59yosVMw6Qw=} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-7RQaasBDqEnqWISY59yosVMw6Qw= + /pify/4.0.1: - engines: - node: '>=6' - resolution: - integrity: sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== + resolution: {integrity: sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==} + engines: {node: '>=6'} + /pinkie-promise/2.0.1: + resolution: {integrity: sha1-ITXW36ejWMBprJsXh3YogihFD/o=} + engines: {node: '>=0.10.0'} dependencies: pinkie: 2.0.4 dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-ITXW36ejWMBprJsXh3YogihFD/o= + /pinkie/2.0.4: + resolution: {integrity: sha1-clVrgM+g1IqXToDnckjoDtT3+HA=} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-clVrgM+g1IqXToDnckjoDtT3+HA= + /pirates/4.0.1: + resolution: {integrity: sha512-WuNqLTbMI3tmfef2TKxlQmAiLHKtFhlsCZnPIpuv2Ow0RDVO8lfy1Opf4NUzlMXLjPl+Men7AuVdX6TA+s+uGA==} + engines: {node: '>= 6'} dependencies: node-modules-regexp: 1.0.0 dev: true - engines: - node: '>= 6' - resolution: - integrity: sha512-WuNqLTbMI3tmfef2TKxlQmAiLHKtFhlsCZnPIpuv2Ow0RDVO8lfy1Opf4NUzlMXLjPl+Men7AuVdX6TA+s+uGA== + /pkg-dir/2.0.0: + resolution: {integrity: sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=} + engines: {node: '>=4'} dependencies: find-up: 2.1.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s= + /pkg-dir/3.0.0: + resolution: {integrity: sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==} + engines: {node: '>=6'} dependencies: find-up: 3.0.0 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw== + /pkg-dir/4.2.0: + resolution: {integrity: sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==} + engines: {node: '>=8'} dependencies: find-up: 4.1.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== + /pkg-up/3.1.0: + resolution: {integrity: sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA==} + engines: {node: '>=8'} dependencies: find-up: 3.0.0 dev: false - engines: - node: '>=8' - resolution: - integrity: sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA== + /please-upgrade-node/3.2.0: + resolution: {integrity: sha512-gQR3WpIgNIKwBMVLkpMUeR3e1/E1y42bqDQZfql+kDeXd8COYfM8PQA4X6y7a8u9Ua9FHmsrrmirW2vHs45hWg==} dependencies: semver-compare: 1.0.0 dev: true - resolution: - integrity: sha512-gQR3WpIgNIKwBMVLkpMUeR3e1/E1y42bqDQZfql+kDeXd8COYfM8PQA4X6y7a8u9Ua9FHmsrrmirW2vHs45hWg== + /pnp-webpack-plugin/1.6.4_typescript@4.2.3: + resolution: {integrity: sha512-7Wjy+9E3WwLOEL30D+m8TSTF7qJJUJLONBnwQp0518siuMxUQUbgZwssaFX+QKlZkjHZcw/IpZCt/H0srrntSg==} + engines: {node: '>=6'} dependencies: ts-pnp: 1.2.0_typescript@4.2.3 + transitivePeerDependencies: + - typescript dev: true - engines: - node: '>=6' - peerDependencies: - typescript: '*' - resolution: - integrity: sha512-7Wjy+9E3WwLOEL30D+m8TSTF7qJJUJLONBnwQp0518siuMxUQUbgZwssaFX+QKlZkjHZcw/IpZCt/H0srrntSg== + /portfinder/1.0.28: + resolution: {integrity: sha512-Se+2isanIcEqf2XMHjyUKskczxbPH7dQnlMjXX6+dybayyHvAf/TCgyMRlzf/B6QDhAEFOGes0pzRo3by4AbMA==} + engines: {node: '>= 0.12.0'} dependencies: async: 2.6.3 debug: 3.2.7 mkdirp: 0.5.5 dev: true - engines: - node: '>= 0.12.0' - resolution: - integrity: sha512-Se+2isanIcEqf2XMHjyUKskczxbPH7dQnlMjXX6+dybayyHvAf/TCgyMRlzf/B6QDhAEFOGes0pzRo3by4AbMA== + /posix-character-classes/0.1.1: - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= + resolution: {integrity: sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=} + engines: {node: '>=0.10.0'} + /postcss-attribute-case-insensitive/4.0.2: + resolution: {integrity: sha512-clkFxk/9pcdb4Vkn0hAHq3YnxBQ2p0CGD1dy24jN+reBck+EWxMbxSUqN4Yj7t0w8csl87K6p0gxBe1utkJsYA==} dependencies: postcss: 7.0.35 postcss-selector-parser: 6.0.4 dev: true - resolution: - integrity: sha512-clkFxk/9pcdb4Vkn0hAHq3YnxBQ2p0CGD1dy24jN+reBck+EWxMbxSUqN4Yj7t0w8csl87K6p0gxBe1utkJsYA== + /postcss-browser-comments/3.0.0_browserslist@4.16.3: + resolution: {integrity: sha512-qfVjLfq7HFd2e0HW4s1dvU8X080OZdG46fFbIBFjW7US7YPDcWfRvdElvwMJr2LI6hMmD+7LnH2HcmXTs+uOig==} + engines: {node: '>=8.0.0'} + peerDependencies: + browserslist: ^4 dependencies: browserslist: 4.16.3 postcss: 7.0.35 dev: true - engines: - node: '>=8.0.0' - peerDependencies: - browserslist: ^4 - resolution: - integrity: sha512-qfVjLfq7HFd2e0HW4s1dvU8X080OZdG46fFbIBFjW7US7YPDcWfRvdElvwMJr2LI6hMmD+7LnH2HcmXTs+uOig== + /postcss-calc/7.0.5: + resolution: {integrity: sha512-1tKHutbGtLtEZF6PT4JSihCHfIVldU72mZ8SdZHIYriIZ9fh9k9aWSppaT8rHsyI3dX+KSR+W+Ix9BMY3AODrg==} dependencies: postcss: 7.0.35 postcss-selector-parser: 6.0.4 postcss-value-parser: 4.1.0 dev: true - resolution: - integrity: sha512-1tKHutbGtLtEZF6PT4JSihCHfIVldU72mZ8SdZHIYriIZ9fh9k9aWSppaT8rHsyI3dX+KSR+W+Ix9BMY3AODrg== + /postcss-color-functional-notation/2.0.1: + resolution: {integrity: sha512-ZBARCypjEDofW4P6IdPVTLhDNXPRn8T2s1zHbZidW6rPaaZvcnCS2soYFIQJrMZSxiePJ2XIYTlcb2ztr/eT2g==} + engines: {node: '>=6.0.0'} dependencies: postcss: 7.0.35 postcss-values-parser: 2.0.1 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-ZBARCypjEDofW4P6IdPVTLhDNXPRn8T2s1zHbZidW6rPaaZvcnCS2soYFIQJrMZSxiePJ2XIYTlcb2ztr/eT2g== + /postcss-color-gray/5.0.0: + resolution: {integrity: sha512-q6BuRnAGKM/ZRpfDascZlIZPjvwsRye7UDNalqVz3s7GDxMtqPY6+Q871liNxsonUw8oC61OG+PSaysYpl1bnw==} + engines: {node: '>=6.0.0'} dependencies: '@csstools/convert-colors': 1.4.0 postcss: 7.0.35 postcss-values-parser: 2.0.1 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-q6BuRnAGKM/ZRpfDascZlIZPjvwsRye7UDNalqVz3s7GDxMtqPY6+Q871liNxsonUw8oC61OG+PSaysYpl1bnw== + /postcss-color-hex-alpha/5.0.3: + resolution: {integrity: sha512-PF4GDel8q3kkreVXKLAGNpHKilXsZ6xuu+mOQMHWHLPNyjiUBOr75sp5ZKJfmv1MCus5/DWUGcK9hm6qHEnXYw==} + engines: {node: '>=6.0.0'} dependencies: postcss: 7.0.35 postcss-values-parser: 2.0.1 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-PF4GDel8q3kkreVXKLAGNpHKilXsZ6xuu+mOQMHWHLPNyjiUBOr75sp5ZKJfmv1MCus5/DWUGcK9hm6qHEnXYw== + /postcss-color-mod-function/3.0.3: + resolution: {integrity: sha512-YP4VG+xufxaVtzV6ZmhEtc+/aTXH3d0JLpnYfxqTvwZPbJhWqp8bSY3nfNzNRFLgB4XSaBA82OE4VjOOKpCdVQ==} + engines: {node: '>=6.0.0'} dependencies: '@csstools/convert-colors': 1.4.0 postcss: 7.0.35 postcss-values-parser: 2.0.1 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-YP4VG+xufxaVtzV6ZmhEtc+/aTXH3d0JLpnYfxqTvwZPbJhWqp8bSY3nfNzNRFLgB4XSaBA82OE4VjOOKpCdVQ== + /postcss-color-rebeccapurple/4.0.1: + resolution: {integrity: sha512-aAe3OhkS6qJXBbqzvZth2Au4V3KieR5sRQ4ptb2b2O8wgvB3SJBsdG+jsn2BZbbwekDG8nTfcCNKcSfe/lEy8g==} + engines: {node: '>=6.0.0'} dependencies: postcss: 7.0.35 postcss-values-parser: 2.0.1 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-aAe3OhkS6qJXBbqzvZth2Au4V3KieR5sRQ4ptb2b2O8wgvB3SJBsdG+jsn2BZbbwekDG8nTfcCNKcSfe/lEy8g== + /postcss-colormin/4.0.3: + resolution: {integrity: sha512-WyQFAdDZpExQh32j0U0feWisZ0dmOtPl44qYmJKkq9xFWY3p+4qnRzCHeNrkeRhwPHz9bQ3mo0/yVkaply0MNw==} + engines: {node: '>=6.9.0'} dependencies: browserslist: 4.16.3 color: 3.1.3 @@ -10021,214 +11107,199 @@ packages: postcss: 7.0.35 postcss-value-parser: 3.3.1 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-WyQFAdDZpExQh32j0U0feWisZ0dmOtPl44qYmJKkq9xFWY3p+4qnRzCHeNrkeRhwPHz9bQ3mo0/yVkaply0MNw== + /postcss-convert-values/4.0.1: + resolution: {integrity: sha512-Kisdo1y77KUC0Jmn0OXU/COOJbzM8cImvw1ZFsBgBgMgb1iL23Zs/LXRe3r+EZqM3vGYKdQ2YJVQ5VkJI+zEJQ==} + engines: {node: '>=6.9.0'} dependencies: postcss: 7.0.35 postcss-value-parser: 3.3.1 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-Kisdo1y77KUC0Jmn0OXU/COOJbzM8cImvw1ZFsBgBgMgb1iL23Zs/LXRe3r+EZqM3vGYKdQ2YJVQ5VkJI+zEJQ== + /postcss-custom-media/7.0.8: + resolution: {integrity: sha512-c9s5iX0Ge15o00HKbuRuTqNndsJUbaXdiNsksnVH8H4gdc+zbLzr/UasOwNG6CTDpLFekVY4672eWdiiWu2GUg==} + engines: {node: '>=6.0.0'} dependencies: postcss: 7.0.35 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-c9s5iX0Ge15o00HKbuRuTqNndsJUbaXdiNsksnVH8H4gdc+zbLzr/UasOwNG6CTDpLFekVY4672eWdiiWu2GUg== + /postcss-custom-properties/8.0.11: + resolution: {integrity: sha512-nm+o0eLdYqdnJ5abAJeXp4CEU1c1k+eB2yMCvhgzsds/e0umabFrN6HoTy/8Q4K5ilxERdl/JD1LO5ANoYBeMA==} + engines: {node: '>=6.0.0'} dependencies: postcss: 7.0.35 postcss-values-parser: 2.0.1 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-nm+o0eLdYqdnJ5abAJeXp4CEU1c1k+eB2yMCvhgzsds/e0umabFrN6HoTy/8Q4K5ilxERdl/JD1LO5ANoYBeMA== + /postcss-custom-selectors/5.1.2: + resolution: {integrity: sha512-DSGDhqinCqXqlS4R7KGxL1OSycd1lydugJ1ky4iRXPHdBRiozyMHrdu0H3o7qNOCiZwySZTUI5MV0T8QhCLu+w==} + engines: {node: '>=6.0.0'} dependencies: postcss: 7.0.35 postcss-selector-parser: 5.0.0 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-DSGDhqinCqXqlS4R7KGxL1OSycd1lydugJ1ky4iRXPHdBRiozyMHrdu0H3o7qNOCiZwySZTUI5MV0T8QhCLu+w== + /postcss-dir-pseudo-class/5.0.0: + resolution: {integrity: sha512-3pm4oq8HYWMZePJY+5ANriPs3P07q+LW6FAdTlkFH2XqDdP4HeeJYMOzn0HYLhRSjBO3fhiqSwwU9xEULSrPgw==} + engines: {node: '>=4.0.0'} dependencies: postcss: 7.0.35 postcss-selector-parser: 5.0.0 dev: true - engines: - node: '>=4.0.0' - resolution: - integrity: sha512-3pm4oq8HYWMZePJY+5ANriPs3P07q+LW6FAdTlkFH2XqDdP4HeeJYMOzn0HYLhRSjBO3fhiqSwwU9xEULSrPgw== + /postcss-discard-comments/4.0.2: + resolution: {integrity: sha512-RJutN259iuRf3IW7GZyLM5Sw4GLTOH8FmsXBnv8Ab/Tc2k4SR4qbV4DNbyyY4+Sjo362SyDmW2DQ7lBSChrpkg==} + engines: {node: '>=6.9.0'} dependencies: postcss: 7.0.35 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-RJutN259iuRf3IW7GZyLM5Sw4GLTOH8FmsXBnv8Ab/Tc2k4SR4qbV4DNbyyY4+Sjo362SyDmW2DQ7lBSChrpkg== + /postcss-discard-duplicates/4.0.2: + resolution: {integrity: sha512-ZNQfR1gPNAiXZhgENFfEglF93pciw0WxMkJeVmw8eF+JZBbMD7jp6C67GqJAXVZP2BWbOztKfbsdmMp/k8c6oQ==} + engines: {node: '>=6.9.0'} dependencies: postcss: 7.0.35 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-ZNQfR1gPNAiXZhgENFfEglF93pciw0WxMkJeVmw8eF+JZBbMD7jp6C67GqJAXVZP2BWbOztKfbsdmMp/k8c6oQ== + /postcss-discard-empty/4.0.1: + resolution: {integrity: sha512-B9miTzbznhDjTfjvipfHoqbWKwd0Mj+/fL5s1QOz06wufguil+Xheo4XpOnc4NqKYBCNqqEzgPv2aPBIJLox0w==} + engines: {node: '>=6.9.0'} dependencies: postcss: 7.0.35 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-B9miTzbznhDjTfjvipfHoqbWKwd0Mj+/fL5s1QOz06wufguil+Xheo4XpOnc4NqKYBCNqqEzgPv2aPBIJLox0w== + /postcss-discard-overridden/4.0.1: + resolution: {integrity: sha512-IYY2bEDD7g1XM1IDEsUT4//iEYCxAmP5oDSFMVU/JVvT7gh+l4fmjciLqGgwjdWpQIdb0Che2VX00QObS5+cTg==} + engines: {node: '>=6.9.0'} dependencies: postcss: 7.0.35 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-IYY2bEDD7g1XM1IDEsUT4//iEYCxAmP5oDSFMVU/JVvT7gh+l4fmjciLqGgwjdWpQIdb0Che2VX00QObS5+cTg== + /postcss-double-position-gradients/1.0.0: + resolution: {integrity: sha512-G+nV8EnQq25fOI8CH/B6krEohGWnF5+3A6H/+JEpOncu5dCnkS1QQ6+ct3Jkaepw1NGVqqOZH6lqrm244mCftA==} + engines: {node: '>=6.0.0'} dependencies: postcss: 7.0.35 postcss-values-parser: 2.0.1 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-G+nV8EnQq25fOI8CH/B6krEohGWnF5+3A6H/+JEpOncu5dCnkS1QQ6+ct3Jkaepw1NGVqqOZH6lqrm244mCftA== + /postcss-env-function/2.0.2: + resolution: {integrity: sha512-rwac4BuZlITeUbiBq60h/xbLzXY43qOsIErngWa4l7Mt+RaSkT7QBjXVGTcBHupykkblHMDrBFh30zchYPaOUw==} + engines: {node: '>=6.0.0'} dependencies: postcss: 7.0.35 postcss-values-parser: 2.0.1 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-rwac4BuZlITeUbiBq60h/xbLzXY43qOsIErngWa4l7Mt+RaSkT7QBjXVGTcBHupykkblHMDrBFh30zchYPaOUw== + /postcss-flexbugs-fixes/4.2.1: + resolution: {integrity: sha512-9SiofaZ9CWpQWxOwRh1b/r85KD5y7GgvsNt1056k6OYLvWUun0czCvogfJgylC22uJTwW1KzY3Gz65NZRlvoiQ==} dependencies: postcss: 7.0.35 dev: true - resolution: - integrity: sha512-9SiofaZ9CWpQWxOwRh1b/r85KD5y7GgvsNt1056k6OYLvWUun0czCvogfJgylC22uJTwW1KzY3Gz65NZRlvoiQ== + /postcss-focus-visible/4.0.0: + resolution: {integrity: sha512-Z5CkWBw0+idJHSV6+Bgf2peDOFf/x4o+vX/pwcNYrWpXFrSfTkQ3JQ1ojrq9yS+upnAlNRHeg8uEwFTgorjI8g==} + engines: {node: '>=6.0.0'} dependencies: postcss: 7.0.35 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-Z5CkWBw0+idJHSV6+Bgf2peDOFf/x4o+vX/pwcNYrWpXFrSfTkQ3JQ1ojrq9yS+upnAlNRHeg8uEwFTgorjI8g== + /postcss-focus-within/3.0.0: + resolution: {integrity: sha512-W0APui8jQeBKbCGZudW37EeMCjDeVxKgiYfIIEo8Bdh5SpB9sxds/Iq8SEuzS0Q4YFOlG7EPFulbbxujpkrV2w==} + engines: {node: '>=6.0.0'} dependencies: postcss: 7.0.35 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-W0APui8jQeBKbCGZudW37EeMCjDeVxKgiYfIIEo8Bdh5SpB9sxds/Iq8SEuzS0Q4YFOlG7EPFulbbxujpkrV2w== + /postcss-font-variant/4.0.1: + resolution: {integrity: sha512-I3ADQSTNtLTTd8uxZhtSOrTCQ9G4qUVKPjHiDk0bV75QSxXjVWiJVJ2VLdspGUi9fbW9BcjKJoRvxAH1pckqmA==} dependencies: postcss: 7.0.35 dev: true - resolution: - integrity: sha512-I3ADQSTNtLTTd8uxZhtSOrTCQ9G4qUVKPjHiDk0bV75QSxXjVWiJVJ2VLdspGUi9fbW9BcjKJoRvxAH1pckqmA== + /postcss-gap-properties/2.0.0: + resolution: {integrity: sha512-QZSqDaMgXCHuHTEzMsS2KfVDOq7ZFiknSpkrPJY6jmxbugUPTuSzs/vuE5I3zv0WAS+3vhrlqhijiprnuQfzmg==} + engines: {node: '>=6.0.0'} dependencies: postcss: 7.0.35 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-QZSqDaMgXCHuHTEzMsS2KfVDOq7ZFiknSpkrPJY6jmxbugUPTuSzs/vuE5I3zv0WAS+3vhrlqhijiprnuQfzmg== + /postcss-image-set-function/3.0.1: + resolution: {integrity: sha512-oPTcFFip5LZy8Y/whto91L9xdRHCWEMs3e1MdJxhgt4jy2WYXfhkng59fH5qLXSCPN8k4n94p1Czrfe5IOkKUw==} + engines: {node: '>=6.0.0'} dependencies: postcss: 7.0.35 postcss-values-parser: 2.0.1 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-oPTcFFip5LZy8Y/whto91L9xdRHCWEMs3e1MdJxhgt4jy2WYXfhkng59fH5qLXSCPN8k4n94p1Czrfe5IOkKUw== + /postcss-initial/3.0.2: + resolution: {integrity: sha512-ugA2wKonC0xeNHgirR4D3VWHs2JcU08WAi1KFLVcnb7IN89phID6Qtg2RIctWbnvp1TM2BOmDtX8GGLCKdR8YA==} dependencies: lodash.template: 4.5.0 postcss: 7.0.35 dev: true - resolution: - integrity: sha512-ugA2wKonC0xeNHgirR4D3VWHs2JcU08WAi1KFLVcnb7IN89phID6Qtg2RIctWbnvp1TM2BOmDtX8GGLCKdR8YA== + + /postcss-js/2.0.3: + resolution: {integrity: sha512-zS59pAk3deu6dVHyrGqmC3oDXBdNdajk4k1RyxeVXCrcEDBUBHoIhE4QTsmhxgzXxsaqFDAkUZfmMa5f/N/79w==} + dependencies: + camelcase-css: 2.0.1 + postcss: 7.0.35 + dev: true + /postcss-lab-function/2.0.1: + resolution: {integrity: sha512-whLy1IeZKY+3fYdqQFuDBf8Auw+qFuVnChWjmxm/UhHWqNHZx+B99EwxTvGYmUBqe3Fjxs4L1BoZTJmPu6usVg==} + engines: {node: '>=6.0.0'} dependencies: '@csstools/convert-colors': 1.4.0 postcss: 7.0.35 postcss-values-parser: 2.0.1 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-whLy1IeZKY+3fYdqQFuDBf8Auw+qFuVnChWjmxm/UhHWqNHZx+B99EwxTvGYmUBqe3Fjxs4L1BoZTJmPu6usVg== + /postcss-load-config/2.1.2: + resolution: {integrity: sha512-/rDeGV6vMUo3mwJZmeHfEDvwnTKKqQ0S7OHUi/kJvvtx3aWtyWG2/0ZWnzCt2keEclwN6Tf0DST2v9kITdOKYw==} + engines: {node: '>= 4'} dependencies: cosmiconfig: 5.2.1 import-cwd: 2.1.0 dev: true - engines: - node: '>= 4' - resolution: - integrity: sha512-/rDeGV6vMUo3mwJZmeHfEDvwnTKKqQ0S7OHUi/kJvvtx3aWtyWG2/0ZWnzCt2keEclwN6Tf0DST2v9kITdOKYw== + /postcss-loader/3.0.0: + resolution: {integrity: sha512-cLWoDEY5OwHcAjDnkyRQzAXfs2jrKjXpO/HQFcc5b5u/r7aa471wdmChmwfnv7x2u840iat/wi0lQ5nbRgSkUA==} + engines: {node: '>= 6'} dependencies: loader-utils: 1.4.0 postcss: 7.0.35 postcss-load-config: 2.1.2 schema-utils: 1.0.0 dev: true - engines: - node: '>= 6' - resolution: - integrity: sha512-cLWoDEY5OwHcAjDnkyRQzAXfs2jrKjXpO/HQFcc5b5u/r7aa471wdmChmwfnv7x2u840iat/wi0lQ5nbRgSkUA== + /postcss-logical/3.0.0: + resolution: {integrity: sha512-1SUKdJc2vuMOmeItqGuNaC+N8MzBWFWEkAnRnLpFYj1tGGa7NqyVBujfRtgNa2gXR+6RkGUiB2O5Vmh7E2RmiA==} + engines: {node: '>=6.0.0'} dependencies: postcss: 7.0.35 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-1SUKdJc2vuMOmeItqGuNaC+N8MzBWFWEkAnRnLpFYj1tGGa7NqyVBujfRtgNa2gXR+6RkGUiB2O5Vmh7E2RmiA== + /postcss-media-minmax/4.0.0: + resolution: {integrity: sha512-fo9moya6qyxsjbFAYl97qKO9gyre3qvbMnkOZeZwlsW6XYFsvs2DMGDlchVLfAd8LHPZDxivu/+qW2SMQeTHBw==} + engines: {node: '>=6.0.0'} dependencies: postcss: 7.0.35 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-fo9moya6qyxsjbFAYl97qKO9gyre3qvbMnkOZeZwlsW6XYFsvs2DMGDlchVLfAd8LHPZDxivu/+qW2SMQeTHBw== + /postcss-merge-longhand/4.0.11: + resolution: {integrity: sha512-alx/zmoeXvJjp7L4mxEMjh8lxVlDFX1gqWHzaaQewwMZiVhLo42TEClKaeHbRf6J7j82ZOdTJ808RtN0ZOZwvw==} + engines: {node: '>=6.9.0'} dependencies: css-color-names: 0.0.4 postcss: 7.0.35 postcss-value-parser: 3.3.1 stylehacks: 4.0.3 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-alx/zmoeXvJjp7L4mxEMjh8lxVlDFX1gqWHzaaQewwMZiVhLo42TEClKaeHbRf6J7j82ZOdTJ808RtN0ZOZwvw== + /postcss-merge-rules/4.0.3: + resolution: {integrity: sha512-U7e3r1SbvYzO0Jr3UT/zKBVgYYyhAz0aitvGIYOYK5CPmkNih+WDSsS5tvPrJ8YMQYlEMvsZIiqmn7HdFUaeEQ==} + engines: {node: '>=6.9.0'} dependencies: browserslist: 4.16.3 caniuse-api: 3.0.0 @@ -10237,31 +11308,28 @@ packages: postcss-selector-parser: 3.1.2 vendors: 1.0.4 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-U7e3r1SbvYzO0Jr3UT/zKBVgYYyhAz0aitvGIYOYK5CPmkNih+WDSsS5tvPrJ8YMQYlEMvsZIiqmn7HdFUaeEQ== + /postcss-minify-font-values/4.0.2: + resolution: {integrity: sha512-j85oO6OnRU9zPf04+PZv1LYIYOprWm6IA6zkXkrJXyRveDEuQggG6tvoy8ir8ZwjLxLuGfNkCZEQG7zan+Hbtg==} + engines: {node: '>=6.9.0'} dependencies: postcss: 7.0.35 postcss-value-parser: 3.3.1 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-j85oO6OnRU9zPf04+PZv1LYIYOprWm6IA6zkXkrJXyRveDEuQggG6tvoy8ir8ZwjLxLuGfNkCZEQG7zan+Hbtg== + /postcss-minify-gradients/4.0.2: + resolution: {integrity: sha512-qKPfwlONdcf/AndP1U8SJ/uzIJtowHlMaSioKzebAXSG4iJthlWC9iSWznQcX4f66gIWX44RSA841HTHj3wK+Q==} + engines: {node: '>=6.9.0'} dependencies: cssnano-util-get-arguments: 4.0.0 is-color-stop: 1.1.0 postcss: 7.0.35 postcss-value-parser: 3.3.1 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-qKPfwlONdcf/AndP1U8SJ/uzIJtowHlMaSioKzebAXSG4iJthlWC9iSWznQcX4f66gIWX44RSA841HTHj3wK+Q== + /postcss-minify-params/4.0.2: + resolution: {integrity: sha512-G7eWyzEx0xL4/wiBBJxJOz48zAKV2WG3iZOqVhPet/9geefm/Px5uo1fzlHu+DOjT+m0Mmiz3jkQzVHe6wxAWg==} + engines: {node: '>=6.9.0'} dependencies: alphanum-sort: 1.0.2 browserslist: 4.16.3 @@ -10270,155 +11338,140 @@ packages: postcss-value-parser: 3.3.1 uniqs: 2.0.0 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-G7eWyzEx0xL4/wiBBJxJOz48zAKV2WG3iZOqVhPet/9geefm/Px5uo1fzlHu+DOjT+m0Mmiz3jkQzVHe6wxAWg== + /postcss-minify-selectors/4.0.2: + resolution: {integrity: sha512-D5S1iViljXBj9kflQo4YutWnJmwm8VvIsU1GeXJGiG9j8CIg9zs4voPMdQDUmIxetUOh60VilsNzCiAFTOqu3g==} + engines: {node: '>=6.9.0'} dependencies: alphanum-sort: 1.0.2 has: 1.0.3 postcss: 7.0.35 postcss-selector-parser: 3.1.2 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-D5S1iViljXBj9kflQo4YutWnJmwm8VvIsU1GeXJGiG9j8CIg9zs4voPMdQDUmIxetUOh60VilsNzCiAFTOqu3g== + /postcss-modules-extract-imports/2.0.0: + resolution: {integrity: sha512-LaYLDNS4SG8Q5WAWqIJgdHPJrDDr/Lv775rMBFUbgjTz6j34lUznACHcdRWroPvXANP2Vj7yNK57vp9eFqzLWQ==} + engines: {node: '>= 6'} dependencies: postcss: 7.0.35 dev: true - engines: - node: '>= 6' - resolution: - integrity: sha512-LaYLDNS4SG8Q5WAWqIJgdHPJrDDr/Lv775rMBFUbgjTz6j34lUznACHcdRWroPvXANP2Vj7yNK57vp9eFqzLWQ== + /postcss-modules-local-by-default/3.0.3: + resolution: {integrity: sha512-e3xDq+LotiGesympRlKNgaJ0PCzoUIdpH0dj47iWAui/kyTgh3CiAr1qP54uodmJhl6p9rN6BoNcdEDVJx9RDw==} + engines: {node: '>= 6'} dependencies: icss-utils: 4.1.1 postcss: 7.0.35 postcss-selector-parser: 6.0.4 postcss-value-parser: 4.1.0 dev: true - engines: - node: '>= 6' - resolution: - integrity: sha512-e3xDq+LotiGesympRlKNgaJ0PCzoUIdpH0dj47iWAui/kyTgh3CiAr1qP54uodmJhl6p9rN6BoNcdEDVJx9RDw== + /postcss-modules-scope/2.2.0: + resolution: {integrity: sha512-YyEgsTMRpNd+HmyC7H/mh3y+MeFWevy7V1evVhJWewmMbjDHIbZbOXICC2y+m1xI1UVfIT1HMW/O04Hxyu9oXQ==} + engines: {node: '>= 6'} dependencies: postcss: 7.0.35 postcss-selector-parser: 6.0.4 dev: true - engines: - node: '>= 6' - resolution: - integrity: sha512-YyEgsTMRpNd+HmyC7H/mh3y+MeFWevy7V1evVhJWewmMbjDHIbZbOXICC2y+m1xI1UVfIT1HMW/O04Hxyu9oXQ== + /postcss-modules-values/3.0.0: + resolution: {integrity: sha512-1//E5jCBrZ9DmRX+zCtmQtRSV6PV42Ix7Bzj9GbwJceduuf7IqP8MgeTXuRDHOWj2m0VzZD5+roFWDuU8RQjcg==} dependencies: icss-utils: 4.1.1 postcss: 7.0.35 dev: true - resolution: - integrity: sha512-1//E5jCBrZ9DmRX+zCtmQtRSV6PV42Ix7Bzj9GbwJceduuf7IqP8MgeTXuRDHOWj2m0VzZD5+roFWDuU8RQjcg== + /postcss-nesting/7.0.1: + resolution: {integrity: sha512-FrorPb0H3nuVq0Sff7W2rnc3SmIcruVC6YwpcS+k687VxyxO33iE1amna7wHuRVzM8vfiYofXSBHNAZ3QhLvYg==} + engines: {node: '>=6.0.0'} dependencies: postcss: 7.0.35 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-FrorPb0H3nuVq0Sff7W2rnc3SmIcruVC6YwpcS+k687VxyxO33iE1amna7wHuRVzM8vfiYofXSBHNAZ3QhLvYg== + /postcss-normalize-charset/4.0.1: + resolution: {integrity: sha512-gMXCrrlWh6G27U0hF3vNvR3w8I1s2wOBILvA87iNXaPvSNo5uZAMYsZG7XjCUf1eVxuPfyL4TJ7++SGZLc9A3g==} + engines: {node: '>=6.9.0'} dependencies: postcss: 7.0.35 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-gMXCrrlWh6G27U0hF3vNvR3w8I1s2wOBILvA87iNXaPvSNo5uZAMYsZG7XjCUf1eVxuPfyL4TJ7++SGZLc9A3g== + /postcss-normalize-display-values/4.0.2: + resolution: {integrity: sha512-3F2jcsaMW7+VtRMAqf/3m4cPFhPD3EFRgNs18u+k3lTJJlVe7d0YPO+bnwqo2xg8YiRpDXJI2u8A0wqJxMsQuQ==} + engines: {node: '>=6.9.0'} dependencies: cssnano-util-get-match: 4.0.0 postcss: 7.0.35 postcss-value-parser: 3.3.1 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-3F2jcsaMW7+VtRMAqf/3m4cPFhPD3EFRgNs18u+k3lTJJlVe7d0YPO+bnwqo2xg8YiRpDXJI2u8A0wqJxMsQuQ== + /postcss-normalize-positions/4.0.2: + resolution: {integrity: sha512-Dlf3/9AxpxE+NF1fJxYDeggi5WwV35MXGFnnoccP/9qDtFrTArZ0D0R+iKcg5WsUd8nUYMIl8yXDCtcrT8JrdA==} + engines: {node: '>=6.9.0'} dependencies: cssnano-util-get-arguments: 4.0.0 has: 1.0.3 postcss: 7.0.35 postcss-value-parser: 3.3.1 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-Dlf3/9AxpxE+NF1fJxYDeggi5WwV35MXGFnnoccP/9qDtFrTArZ0D0R+iKcg5WsUd8nUYMIl8yXDCtcrT8JrdA== + /postcss-normalize-repeat-style/4.0.2: + resolution: {integrity: sha512-qvigdYYMpSuoFs3Is/f5nHdRLJN/ITA7huIoCyqqENJe9PvPmLhNLMu7QTjPdtnVf6OcYYO5SHonx4+fbJE1+Q==} + engines: {node: '>=6.9.0'} dependencies: cssnano-util-get-arguments: 4.0.0 cssnano-util-get-match: 4.0.0 postcss: 7.0.35 postcss-value-parser: 3.3.1 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-qvigdYYMpSuoFs3Is/f5nHdRLJN/ITA7huIoCyqqENJe9PvPmLhNLMu7QTjPdtnVf6OcYYO5SHonx4+fbJE1+Q== + /postcss-normalize-string/4.0.2: + resolution: {integrity: sha512-RrERod97Dnwqq49WNz8qo66ps0swYZDSb6rM57kN2J+aoyEAJfZ6bMx0sx/F9TIEX0xthPGCmeyiam/jXif0eA==} + engines: {node: '>=6.9.0'} dependencies: has: 1.0.3 postcss: 7.0.35 postcss-value-parser: 3.3.1 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-RrERod97Dnwqq49WNz8qo66ps0swYZDSb6rM57kN2J+aoyEAJfZ6bMx0sx/F9TIEX0xthPGCmeyiam/jXif0eA== + /postcss-normalize-timing-functions/4.0.2: + resolution: {integrity: sha512-acwJY95edP762e++00Ehq9L4sZCEcOPyaHwoaFOhIwWCDfik6YvqsYNxckee65JHLKzuNSSmAdxwD2Cud1Z54A==} + engines: {node: '>=6.9.0'} dependencies: cssnano-util-get-match: 4.0.0 postcss: 7.0.35 postcss-value-parser: 3.3.1 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-acwJY95edP762e++00Ehq9L4sZCEcOPyaHwoaFOhIwWCDfik6YvqsYNxckee65JHLKzuNSSmAdxwD2Cud1Z54A== + /postcss-normalize-unicode/4.0.1: + resolution: {integrity: sha512-od18Uq2wCYn+vZ/qCOeutvHjB5jm57ToxRaMeNuf0nWVHaP9Hua56QyMF6fs/4FSUnVIw0CBPsU0K4LnBPwYwg==} + engines: {node: '>=6.9.0'} dependencies: browserslist: 4.16.3 postcss: 7.0.35 postcss-value-parser: 3.3.1 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-od18Uq2wCYn+vZ/qCOeutvHjB5jm57ToxRaMeNuf0nWVHaP9Hua56QyMF6fs/4FSUnVIw0CBPsU0K4LnBPwYwg== + /postcss-normalize-url/4.0.1: + resolution: {integrity: sha512-p5oVaF4+IHwu7VpMan/SSpmpYxcJMtkGppYf0VbdH5B6hN8YNmVyJLuY9FmLQTzY3fag5ESUUHDqM+heid0UVA==} + engines: {node: '>=6.9.0'} dependencies: is-absolute-url: 2.1.0 normalize-url: 3.3.0 postcss: 7.0.35 postcss-value-parser: 3.3.1 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-p5oVaF4+IHwu7VpMan/SSpmpYxcJMtkGppYf0VbdH5B6hN8YNmVyJLuY9FmLQTzY3fag5ESUUHDqM+heid0UVA== + /postcss-normalize-whitespace/4.0.2: + resolution: {integrity: sha512-tO8QIgrsI3p95r8fyqKV+ufKlSHh9hMJqACqbv2XknufqEDhDvbguXGBBqxw9nsQoXWf0qOqppziKJKHMD4GtA==} + engines: {node: '>=6.9.0'} dependencies: postcss: 7.0.35 postcss-value-parser: 3.3.1 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-tO8QIgrsI3p95r8fyqKV+ufKlSHh9hMJqACqbv2XknufqEDhDvbguXGBBqxw9nsQoXWf0qOqppziKJKHMD4GtA== + /postcss-normalize/8.0.1: + resolution: {integrity: sha512-rt9JMS/m9FHIRroDDBGSMsyW1c0fkvOJPy62ggxSHUldJO7B195TqFMqIf+lY5ezpDcYOV4j86aUp3/XbxzCCQ==} + engines: {node: '>=8.0.0'} dependencies: '@csstools/normalize.css': 10.1.0 browserslist: 4.16.3 @@ -10426,44 +11479,40 @@ packages: postcss-browser-comments: 3.0.0_browserslist@4.16.3 sanitize.css: 10.0.0 dev: true - engines: - node: '>=8.0.0' - resolution: - integrity: sha512-rt9JMS/m9FHIRroDDBGSMsyW1c0fkvOJPy62ggxSHUldJO7B195TqFMqIf+lY5ezpDcYOV4j86aUp3/XbxzCCQ== + /postcss-ordered-values/4.1.2: + resolution: {integrity: sha512-2fCObh5UanxvSxeXrtLtlwVThBvHn6MQcu4ksNT2tsaV2Fg76R2CV98W7wNSlX+5/pFwEyaDwKLLoEV7uRybAw==} + engines: {node: '>=6.9.0'} dependencies: cssnano-util-get-arguments: 4.0.0 postcss: 7.0.35 postcss-value-parser: 3.3.1 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-2fCObh5UanxvSxeXrtLtlwVThBvHn6MQcu4ksNT2tsaV2Fg76R2CV98W7wNSlX+5/pFwEyaDwKLLoEV7uRybAw== + /postcss-overflow-shorthand/2.0.0: + resolution: {integrity: sha512-aK0fHc9CBNx8jbzMYhshZcEv8LtYnBIRYQD5i7w/K/wS9c2+0NSR6B3OVMu5y0hBHYLcMGjfU+dmWYNKH0I85g==} + engines: {node: '>=6.0.0'} dependencies: postcss: 7.0.35 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-aK0fHc9CBNx8jbzMYhshZcEv8LtYnBIRYQD5i7w/K/wS9c2+0NSR6B3OVMu5y0hBHYLcMGjfU+dmWYNKH0I85g== + /postcss-page-break/2.0.0: + resolution: {integrity: sha512-tkpTSrLpfLfD9HvgOlJuigLuk39wVTbbd8RKcy8/ugV2bNBUW3xU+AIqyxhDrQr1VUj1RmyJrBn1YWrqUm9zAQ==} dependencies: postcss: 7.0.35 dev: true - resolution: - integrity: sha512-tkpTSrLpfLfD9HvgOlJuigLuk39wVTbbd8RKcy8/ugV2bNBUW3xU+AIqyxhDrQr1VUj1RmyJrBn1YWrqUm9zAQ== + /postcss-place/4.0.1: + resolution: {integrity: sha512-Zb6byCSLkgRKLODj/5mQugyuj9bvAAw9LqJJjgwz5cYryGeXfFZfSXoP1UfveccFmeq0b/2xxwcTEVScnqGxBg==} + engines: {node: '>=6.0.0'} dependencies: postcss: 7.0.35 postcss-values-parser: 2.0.1 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-Zb6byCSLkgRKLODj/5mQugyuj9bvAAw9LqJJjgwz5cYryGeXfFZfSXoP1UfveccFmeq0b/2xxwcTEVScnqGxBg== + /postcss-preset-env/6.7.0: + resolution: {integrity: sha512-eU4/K5xzSFwUFJ8hTdTQzo2RBLbDVt83QZrAvI07TULOkmyQlnYlpwep+2yIK+K+0KlZO4BvFcleOCCcUtwchg==} + engines: {node: '>=6.0.0'} dependencies: autoprefixer: 9.8.6 browserslist: 4.16.3 @@ -10503,284 +11552,300 @@ packages: postcss-selector-matches: 4.0.0 postcss-selector-not: 4.0.1 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-eU4/K5xzSFwUFJ8hTdTQzo2RBLbDVt83QZrAvI07TULOkmyQlnYlpwep+2yIK+K+0KlZO4BvFcleOCCcUtwchg== + /postcss-pseudo-class-any-link/6.0.0: + resolution: {integrity: sha512-lgXW9sYJdLqtmw23otOzrtbDXofUdfYzNm4PIpNE322/swES3VU9XlXHeJS46zT2onFO7V1QFdD4Q9LiZj8mew==} + engines: {node: '>=6.0.0'} dependencies: postcss: 7.0.35 postcss-selector-parser: 5.0.0 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-lgXW9sYJdLqtmw23otOzrtbDXofUdfYzNm4PIpNE322/swES3VU9XlXHeJS46zT2onFO7V1QFdD4Q9LiZj8mew== + /postcss-reduce-initial/4.0.3: + resolution: {integrity: sha512-gKWmR5aUulSjbzOfD9AlJiHCGH6AEVLaM0AV+aSioxUDd16qXP1PCh8d1/BGVvpdWn8k/HiK7n6TjeoXN1F7DA==} + engines: {node: '>=6.9.0'} dependencies: browserslist: 4.16.3 caniuse-api: 3.0.0 has: 1.0.3 postcss: 7.0.35 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-gKWmR5aUulSjbzOfD9AlJiHCGH6AEVLaM0AV+aSioxUDd16qXP1PCh8d1/BGVvpdWn8k/HiK7n6TjeoXN1F7DA== + /postcss-reduce-transforms/4.0.2: + resolution: {integrity: sha512-EEVig1Q2QJ4ELpJXMZR8Vt5DQx8/mo+dGWSR7vWXqcob2gQLyQGsionYcGKATXvQzMPn6DSN1vTN7yFximdIAg==} + engines: {node: '>=6.9.0'} dependencies: cssnano-util-get-match: 4.0.0 has: 1.0.3 postcss: 7.0.35 postcss-value-parser: 3.3.1 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-EEVig1Q2QJ4ELpJXMZR8Vt5DQx8/mo+dGWSR7vWXqcob2gQLyQGsionYcGKATXvQzMPn6DSN1vTN7yFximdIAg== + /postcss-replace-overflow-wrap/3.0.0: + resolution: {integrity: sha512-2T5hcEHArDT6X9+9dVSPQdo7QHzG4XKclFT8rU5TzJPDN7RIRTbO9c4drUISOVemLj03aezStHCR2AIcr8XLpw==} + dependencies: + postcss: 7.0.35 + dev: true + + /postcss-safe-parser/4.0.2: + resolution: {integrity: sha512-Uw6ekxSWNLCPesSv/cmqf2bY/77z11O7jZGPax3ycZMFU/oi2DMH9i89AdHc1tRwFg/arFoEwX0IS3LCUxJh1g==} + engines: {node: '>=6.0.0'} dependencies: postcss: 7.0.35 dev: true - resolution: - integrity: sha512-2T5hcEHArDT6X9+9dVSPQdo7QHzG4XKclFT8rU5TzJPDN7RIRTbO9c4drUISOVemLj03aezStHCR2AIcr8XLpw== + /postcss-safe-parser/5.0.2: + resolution: {integrity: sha512-jDUfCPJbKOABhwpUKcqCVbbXiloe/QXMcbJ6Iipf3sDIihEzTqRCeMBfRaOHxhBuTYqtASrI1KJWxzztZU4qUQ==} + engines: {node: '>=10.0'} dependencies: postcss: 8.2.8 dev: true - engines: - node: '>=10.0' - resolution: - integrity: sha512-jDUfCPJbKOABhwpUKcqCVbbXiloe/QXMcbJ6Iipf3sDIihEzTqRCeMBfRaOHxhBuTYqtASrI1KJWxzztZU4qUQ== + /postcss-selector-matches/4.0.0: + resolution: {integrity: sha512-LgsHwQR/EsRYSqlwdGzeaPKVT0Ml7LAT6E75T8W8xLJY62CE4S/l03BWIt3jT8Taq22kXP08s2SfTSzaraoPww==} dependencies: balanced-match: 1.0.0 postcss: 7.0.35 dev: true - resolution: - integrity: sha512-LgsHwQR/EsRYSqlwdGzeaPKVT0Ml7LAT6E75T8W8xLJY62CE4S/l03BWIt3jT8Taq22kXP08s2SfTSzaraoPww== + /postcss-selector-not/4.0.1: + resolution: {integrity: sha512-YolvBgInEK5/79C+bdFMyzqTg6pkYqDbzZIST/PDMqa/o3qtXenD05apBG2jLgT0/BQ77d4U2UK12jWpilqMAQ==} dependencies: balanced-match: 1.0.0 postcss: 7.0.35 dev: true - resolution: - integrity: sha512-YolvBgInEK5/79C+bdFMyzqTg6pkYqDbzZIST/PDMqa/o3qtXenD05apBG2jLgT0/BQ77d4U2UK12jWpilqMAQ== + /postcss-selector-parser/3.1.2: + resolution: {integrity: sha512-h7fJ/5uWuRVyOtkO45pnt1Ih40CEleeyCHzipqAZO2e5H20g25Y48uYnFUiShvY4rZWNJ/Bib/KVPmanaCtOhA==} + engines: {node: '>=8'} dependencies: dot-prop: 5.3.0 indexes-of: 1.0.1 uniq: 1.0.1 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-h7fJ/5uWuRVyOtkO45pnt1Ih40CEleeyCHzipqAZO2e5H20g25Y48uYnFUiShvY4rZWNJ/Bib/KVPmanaCtOhA== + /postcss-selector-parser/5.0.0: + resolution: {integrity: sha512-w+zLE5Jhg6Liz8+rQOWEAwtwkyqpfnmsinXjXg6cY7YIONZZtgvE0v2O0uhQBs0peNomOJwWRKt6JBfTdTd3OQ==} + engines: {node: '>=4'} dependencies: cssesc: 2.0.0 indexes-of: 1.0.1 uniq: 1.0.1 dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-w+zLE5Jhg6Liz8+rQOWEAwtwkyqpfnmsinXjXg6cY7YIONZZtgvE0v2O0uhQBs0peNomOJwWRKt6JBfTdTd3OQ== + /postcss-selector-parser/6.0.4: + resolution: {integrity: sha512-gjMeXBempyInaBqpp8gODmwZ52WaYsVOsfr4L4lDQ7n3ncD6mEyySiDtgzCT+NYC0mmeOLvtsF8iaEf0YT6dBw==} + engines: {node: '>=4'} dependencies: cssesc: 3.0.0 indexes-of: 1.0.1 uniq: 1.0.1 util-deprecate: 1.0.2 dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-gjMeXBempyInaBqpp8gODmwZ52WaYsVOsfr4L4lDQ7n3ncD6mEyySiDtgzCT+NYC0mmeOLvtsF8iaEf0YT6dBw== + /postcss-svgo/4.0.2: + resolution: {integrity: sha512-C6wyjo3VwFm0QgBy+Fu7gCYOkCmgmClghO+pjcxvrcBKtiKt0uCF+hvbMO1fyv5BMImRK90SMb+dwUnfbGd+jw==} + engines: {node: '>=6.9.0'} dependencies: is-svg: 3.0.0 postcss: 7.0.35 postcss-value-parser: 3.3.1 svgo: 1.3.2 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-C6wyjo3VwFm0QgBy+Fu7gCYOkCmgmClghO+pjcxvrcBKtiKt0uCF+hvbMO1fyv5BMImRK90SMb+dwUnfbGd+jw== + /postcss-unique-selectors/4.0.1: + resolution: {integrity: sha512-+JanVaryLo9QwZjKrmJgkI4Fn8SBgRO6WXQBJi7KiAVPlmxikB5Jzc4EvXMT2H0/m0RjrVVm9rGNhZddm/8Spg==} + engines: {node: '>=6.9.0'} dependencies: alphanum-sort: 1.0.2 postcss: 7.0.35 uniqs: 2.0.0 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-+JanVaryLo9QwZjKrmJgkI4Fn8SBgRO6WXQBJi7KiAVPlmxikB5Jzc4EvXMT2H0/m0RjrVVm9rGNhZddm/8Spg== + /postcss-value-parser/3.3.1: + resolution: {integrity: sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==} dev: true - resolution: - integrity: sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ== + /postcss-value-parser/4.1.0: - resolution: - integrity: sha512-97DXOFbQJhk71ne5/Mt6cOu6yxsSfM0QGQyl0L25Gca4yGWEGJaig7l7gbCX623VqTBNGLRLaVUCnNkcedlRSQ== + resolution: {integrity: sha512-97DXOFbQJhk71ne5/Mt6cOu6yxsSfM0QGQyl0L25Gca4yGWEGJaig7l7gbCX623VqTBNGLRLaVUCnNkcedlRSQ==} + /postcss-values-parser/2.0.1: + resolution: {integrity: sha512-2tLuBsA6P4rYTNKCXYG/71C7j1pU6pK503suYOmn4xYrQIzW+opD+7FAFNuGSdZC/3Qfy334QbeMu7MEb8gOxg==} + engines: {node: '>=6.14.4'} dependencies: flatten: 1.0.3 indexes-of: 1.0.1 uniq: 1.0.1 dev: true - engines: - node: '>=6.14.4' - resolution: - integrity: sha512-2tLuBsA6P4rYTNKCXYG/71C7j1pU6pK503suYOmn4xYrQIzW+opD+7FAFNuGSdZC/3Qfy334QbeMu7MEb8gOxg== + /postcss/7.0.21: + resolution: {integrity: sha512-uIFtJElxJo29QC753JzhidoAhvp/e/Exezkdhfmt8AymWT6/5B7W1WmponYWkHk2eg6sONyTch0A3nkMPun3SQ==} + engines: {node: '>=6.0.0'} + dependencies: + chalk: 2.4.2 + source-map: 0.6.1 + supports-color: 6.1.0 + dev: true + + /postcss/7.0.32: + resolution: {integrity: sha512-03eXong5NLnNCD05xscnGKGDZ98CyzoqPSMjOe6SuoQY7Z2hIj0Ld1g/O/UQRuOle2aRtiIRDg9tDcTGAkLfKw==} + engines: {node: '>=6.0.0'} dependencies: chalk: 2.4.2 source-map: 0.6.1 supports-color: 6.1.0 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-uIFtJElxJo29QC753JzhidoAhvp/e/Exezkdhfmt8AymWT6/5B7W1WmponYWkHk2eg6sONyTch0A3nkMPun3SQ== + /postcss/7.0.35: + resolution: {integrity: sha512-3QT8bBJeX/S5zKTTjTCIjRF3If4avAT6kqxcASlTWEtAFCb9NH0OUxNDfgZSWdP5fJnBYCMEWkIFfWeugjzYMg==} + engines: {node: '>=6.0.0'} dependencies: chalk: 2.4.2 source-map: 0.6.1 supports-color: 6.1.0 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-3QT8bBJeX/S5zKTTjTCIjRF3If4avAT6kqxcASlTWEtAFCb9NH0OUxNDfgZSWdP5fJnBYCMEWkIFfWeugjzYMg== + /postcss/8.2.8: + resolution: {integrity: sha512-1F0Xb2T21xET7oQV9eKuctbM9S7BC0fetoHCc4H13z0PT6haiRLP4T0ZY4XWh7iLP0usgqykT6p9B2RtOf4FPw==} + engines: {node: ^10 || ^12 || >=14} dependencies: colorette: 1.2.2 nanoid: 3.1.22 source-map: 0.6.1 dev: true - engines: - node: ^10 || ^12 || >=14 - resolution: - integrity: sha512-1F0Xb2T21xET7oQV9eKuctbM9S7BC0fetoHCc4H13z0PT6haiRLP4T0ZY4XWh7iLP0usgqykT6p9B2RtOf4FPw== + /prelude-ls/1.1.2: + resolution: {integrity: sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=} + engines: {node: '>= 0.8.0'} dev: true - engines: - node: '>= 0.8.0' - resolution: - integrity: sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= + /prelude-ls/1.2.1: + resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} + engines: {node: '>= 0.8.0'} dev: true - engines: - node: '>= 0.8.0' - resolution: - integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== + /prepend-http/1.0.4: + resolution: {integrity: sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw=} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw= + /prettier-linter-helpers/1.0.0: + resolution: {integrity: sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==} + engines: {node: '>=6.0.0'} dependencies: fast-diff: 1.2.0 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w== + /prettier/2.2.1: - dev: true - engines: - node: '>=10.13.0' + resolution: {integrity: sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q==} + engines: {node: '>=10.13.0'} hasBin: true - resolution: - integrity: sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q== + dev: true + /pretty-bytes/5.6.0: + resolution: {integrity: sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg==} + engines: {node: '>=6'} dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg== + /pretty-error/2.1.2: + resolution: {integrity: sha512-EY5oDzmsX5wvuynAByrmY0P0hcp+QpnAKbJng2A2MPjVKXCxrDSUkzghVJ4ZGPIv+JC4gX8fPUWscC0RtjsWGw==} dependencies: lodash: 4.17.21 renderkid: 2.0.5 dev: true - resolution: - integrity: sha512-EY5oDzmsX5wvuynAByrmY0P0hcp+QpnAKbJng2A2MPjVKXCxrDSUkzghVJ4ZGPIv+JC4gX8fPUWscC0RtjsWGw== + /pretty-format/26.6.2: + resolution: {integrity: sha512-7AeGuCYNGmycyQbCqd/3PWH4eOoX/OiCa0uphp57NVTeAGdJGaAliecxwBDHYQCIvrW7aDBZCYeNTP/WX69mkg==} + engines: {node: '>= 10'} dependencies: '@jest/types': 26.6.2 ansi-regex: 5.0.0 ansi-styles: 4.3.0 react-is: 17.0.2 dev: true - engines: - node: '>= 10' - resolution: - integrity: sha512-7AeGuCYNGmycyQbCqd/3PWH4eOoX/OiCa0uphp57NVTeAGdJGaAliecxwBDHYQCIvrW7aDBZCYeNTP/WX69mkg== + + /prism-react-renderer/1.2.1_react@17.0.2: + resolution: {integrity: sha512-w23ch4f75V1Tnz8DajsYKvY5lF7H1+WvzvLUcF0paFxkTHSp42RS0H5CttdN2Q8RR3DRGZ9v5xD/h3n8C8kGmg==} + peerDependencies: + react: '>=0.14.9' + dependencies: + react: 17.0.2 + dev: true + + /prismjs/1.25.0: + resolution: {integrity: sha512-WCjJHl1KEWbnkQom1+SzftbtXMKQoezOCYs5rECqMN+jP+apI7ftoflyqigqzopSO3hMhTEb0mFClA8lkolgEg==} + dev: true + /process-nextick-args/2.0.1: + resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==} dev: true - resolution: - integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== + /process/0.11.10: + resolution: {integrity: sha1-czIwDoQBYb2j5podHZGn1LwW8YI=} + engines: {node: '>= 0.6.0'} dev: true - engines: - node: '>= 0.6.0' - resolution: - integrity: sha1-czIwDoQBYb2j5podHZGn1LwW8YI= + /progress/2.0.3: + resolution: {integrity: sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==} + engines: {node: '>=0.4.0'} dev: true - engines: - node: '>=0.4.0' - resolution: - integrity: sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== + /promise-inflight/1.0.1: + resolution: {integrity: sha1-mEcocL8igTL8vdhoEputEsPAKeM=} dev: true - resolution: - integrity: sha1-mEcocL8igTL8vdhoEputEsPAKeM= + /promise/8.1.0: + resolution: {integrity: sha512-W04AqnILOL/sPRXziNicCjSNRruLAuIHEOVBazepu0545DDNGYHz7ar9ZgZ1fMU8/MA4mVxp5rkBWRi6OXIy3Q==} dependencies: asap: 2.0.6 dev: false - resolution: - integrity: sha512-W04AqnILOL/sPRXziNicCjSNRruLAuIHEOVBazepu0545DDNGYHz7ar9ZgZ1fMU8/MA4mVxp5rkBWRi6OXIy3Q== + /prompts/2.4.0: + resolution: {integrity: sha512-awZAKrk3vN6CroQukBL+R9051a4R3zCZBlJm/HBfrSZ8iTpYix3VX1vU4mveiLpiwmOJT4wokTF9m6HUk4KqWQ==} + engines: {node: '>= 6'} dependencies: kleur: 3.0.3 sisteransi: 1.0.5 - engines: - node: '>= 6' - resolution: - integrity: sha512-awZAKrk3vN6CroQukBL+R9051a4R3zCZBlJm/HBfrSZ8iTpYix3VX1vU4mveiLpiwmOJT4wokTF9m6HUk4KqWQ== + /prop-types/15.7.2: + resolution: {integrity: sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==} + dependencies: + loose-envify: 1.4.0 + object-assign: 4.1.1 + react-is: 16.13.1 + + /prop-types/15.8.1: + resolution: {integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==} dependencies: loose-envify: 1.4.0 object-assign: 4.1.1 react-is: 16.13.1 - resolution: - integrity: sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ== + dev: false + + /property-information/5.6.0: + resolution: {integrity: sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA==} + dependencies: + xtend: 4.0.2 + dev: true + /proxy-addr/2.0.6: + resolution: {integrity: sha512-dh/frvCBVmSsDYzw6n926jv974gddhkFPfiN8hPOi30Wax25QZyZEGveluCgliBnqmuM+UJmBErbAUFIoDbjOw==} + engines: {node: '>= 0.10'} dependencies: forwarded: 0.1.2 ipaddr.js: 1.9.1 dev: true - engines: - node: '>= 0.10' - resolution: - integrity: sha512-dh/frvCBVmSsDYzw6n926jv974gddhkFPfiN8hPOi30Wax25QZyZEGveluCgliBnqmuM+UJmBErbAUFIoDbjOw== + /prr/1.0.1: + resolution: {integrity: sha1-0/wRS6BplaRexok/SEzrHXj19HY=} dev: true - resolution: - integrity: sha1-0/wRS6BplaRexok/SEzrHXj19HY= + /psl/1.8.0: + resolution: {integrity: sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==} dev: true - resolution: - integrity: sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== + /public-encrypt/4.0.3: + resolution: {integrity: sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q==} dependencies: bn.js: 4.12.0 browserify-rsa: 4.1.0 @@ -10789,293 +11854,186 @@ packages: randombytes: 2.1.0 safe-buffer: 5.2.1 dev: true - resolution: - integrity: sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q== + /pubsub-js/1.9.3: + resolution: {integrity: sha512-FhYYlPNOywTh7zN38u5AlG67emA47w6JZd7YgdQU1w8gQbZhhIGxVM0AQosdaINHb2ALb+fhfnVyBJAt4D4IzA==} dev: false - resolution: - integrity: sha512-FhYYlPNOywTh7zN38u5AlG67emA47w6JZd7YgdQU1w8gQbZhhIGxVM0AQosdaINHb2ALb+fhfnVyBJAt4D4IzA== + /pump/2.0.1: + resolution: {integrity: sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA==} dependencies: end-of-stream: 1.4.4 once: 1.4.0 dev: true - resolution: - integrity: sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA== + /pump/3.0.0: + resolution: {integrity: sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==} dependencies: end-of-stream: 1.4.4 once: 1.4.0 dev: true - resolution: - integrity: sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== + /pumpify/1.5.1: + resolution: {integrity: sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ==} dependencies: duplexify: 3.7.1 inherits: 2.0.4 pump: 2.0.1 dev: true - resolution: - integrity: sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ== + /punycode/1.3.2: + resolution: {integrity: sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=} dev: true - resolution: - integrity: sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0= + /punycode/1.4.1: + resolution: {integrity: sha1-wNWmOycYgArY4esPpSachN1BhF4=} dev: true - resolution: - integrity: sha1-wNWmOycYgArY4esPpSachN1BhF4= + /punycode/2.1.1: + resolution: {integrity: sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==} + engines: {node: '>=6'} dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + /q/1.5.1: + resolution: {integrity: sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc=} + engines: {node: '>=0.6.0', teleport: '>=0.2.0'} dev: true - engines: - node: '>=0.6.0' - teleport: '>=0.2.0' - resolution: - integrity: sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc= + + /qs/6.10.1: + resolution: {integrity: sha512-M528Hph6wsSVOBiYUnGf+K/7w0hNshs/duGsNXPUCLH5XAqjEtiPGwNONLV0tBH8NoGb0mvD5JubnUTrujKDTg==} + engines: {node: '>=0.6'} + dependencies: + side-channel: 1.0.4 + dev: false + /qs/6.5.2: + resolution: {integrity: sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==} + engines: {node: '>=0.6'} dev: true - engines: - node: '>=0.6' - resolution: - integrity: sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== + /qs/6.7.0: + resolution: {integrity: sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==} + engines: {node: '>=0.6'} dev: true - engines: - node: '>=0.6' - resolution: - integrity: sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ== + /query-string/4.3.4: + resolution: {integrity: sha1-u7aTucqRXCMlFbIosaArYJBD2+s=} + engines: {node: '>=0.10.0'} dependencies: object-assign: 4.1.1 strict-uri-encode: 1.1.0 dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-u7aTucqRXCMlFbIosaArYJBD2+s= + + /query-string/6.14.1: + resolution: {integrity: sha512-XDxAeVmpfu1/6IjyT/gXHOl+S0vQ9owggJ30hhWKdHAsNPOcasn5o9BW0eejZqL2e4vMjhAxoW3jVHcD6mbcYw==} + engines: {node: '>=6'} + dependencies: + decode-uri-component: 0.2.0 + filter-obj: 1.1.0 + split-on-first: 1.1.0 + strict-uri-encode: 2.0.0 + dev: true + /querystring-es3/0.2.1: + resolution: {integrity: sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM=} + engines: {node: '>=0.4.x'} dev: true - engines: - node: '>=0.4.x' - resolution: - integrity: sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM= + /querystring/0.2.0: + resolution: {integrity: sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=} + engines: {node: '>=0.4.x'} + deprecated: The querystring API is considered Legacy. new code should use the URLSearchParams API instead. dev: true - engines: - node: '>=0.4.x' - resolution: - integrity: sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= + /querystring/0.2.1: + resolution: {integrity: sha512-wkvS7mL/JMugcup3/rMitHmd9ecIGd2lhFhK9N3UUQ450h66d1r3Y9nvXzQAW1Lq+wyx61k/1pfKS5KuKiyEbg==} + engines: {node: '>=0.4.x'} dev: true - engines: - node: '>=0.4.x' - resolution: - integrity: sha512-wkvS7mL/JMugcup3/rMitHmd9ecIGd2lhFhK9N3UUQ450h66d1r3Y9nvXzQAW1Lq+wyx61k/1pfKS5KuKiyEbg== + /querystringify/2.2.0: + resolution: {integrity: sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==} dev: true - resolution: - integrity: sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== + /queue-microtask/1.2.3: - resolution: - integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + /raf/3.4.1: + resolution: {integrity: sha512-Sq4CW4QhwOHE8ucn6J34MqtZCeWFP2aQSmrlroYgqAV1PjStIhJXxYuTgUIfkEk7zTLjmIjLmU5q+fbD1NnOJA==} dependencies: performance-now: 2.1.0 dev: false - resolution: - integrity: sha512-Sq4CW4QhwOHE8ucn6J34MqtZCeWFP2aQSmrlroYgqAV1PjStIhJXxYuTgUIfkEk7zTLjmIjLmU5q+fbD1NnOJA== + /randombytes/2.1.0: + resolution: {integrity: sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==} dependencies: safe-buffer: 5.2.1 dev: true - resolution: - integrity: sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== + /randomfill/1.0.4: + resolution: {integrity: sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw==} dependencies: randombytes: 2.1.0 safe-buffer: 5.2.1 dev: true - resolution: - integrity: sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw== + /range-parser/1.2.1: + resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==} + engines: {node: '>= 0.6'} dev: true - engines: - node: '>= 0.6' - resolution: - integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== + /raw-body/2.4.0: + resolution: {integrity: sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==} + engines: {node: '>= 0.8'} dependencies: bytes: 3.1.0 http-errors: 1.7.2 iconv-lite: 0.4.24 unpipe: 1.0.0 dev: true - engines: - node: '>= 0.8' - resolution: - integrity: sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q== - /rc-align/4.0.9_react-dom@17.0.2+react@17.0.2: - dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - dom-align: 1.12.0 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - resize-observer-polyfill: 1.5.1 - dev: false - peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-myAM2R4qoB6LqBul0leaqY8gFaiECDJ3MtQDmzDo9xM9NRT/04TvWOYd2YHU9zvGzqk9QXF6S9/MifzSKDZeMw== - /rc-cascader/1.4.2_react-dom@17.0.2+react@17.0.2: - dependencies: - '@babel/runtime': 7.13.10 - array-tree-filter: 2.1.0 - rc-trigger: 5.2.3_react-dom@17.0.2+react@17.0.2 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - warning: 4.0.3 - dev: false + + /raw-loader/4.0.2_webpack@4.44.2: + resolution: {integrity: sha512-ZnScIV3ag9A4wPX/ZayxL/jZH+euYb6FcUinPcgiQW0+UBtEv0O6Q3lGd3cqJ+GHH+rksEv3Pj99oxJ3u3VIKA==} + engines: {node: '>= 10.13.0'} peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-JVuLGrSi+3G8DZyPvlKlGVWJjhoi9NTz6REHIgRspa5WnznRkKGm2ejb0jJtz0m2IL8Q9BG4ZA2sXuqAu71ltQ== - /rc-checkbox/2.3.2_react-dom@17.0.2+react@17.0.2: - dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-afVi1FYiGv1U0JlpNH/UaEXdh6WUJjcWokj/nUN2TgG80bfG+MDdbfHKlLcNNba94mbjy2/SXJ1HDgrOkXGAjg== - /rc-collapse/3.1.0_react-dom@17.0.2+react@17.0.2: - dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - rc-motion: 2.4.1_react-dom@17.0.2+react@17.0.2 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - shallowequal: 1.1.0 - dev: false - peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-EwpNPJcLe7b+5JfyaxM9ZNnkCgqArt3QQO0Cr5p5plwz/C9h8liAmjYY5I4+hl9lAjBqb7ZwLu94+z+rt5g1WQ== - /rc-dialog/8.5.2_react-dom@17.0.2+react@17.0.2: + webpack: ^4.0.0 || ^5.0.0 dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - rc-motion: 2.4.1_react-dom@17.0.2+react@17.0.2 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false + loader-utils: 2.0.0 + schema-utils: 3.1.0 + webpack: 4.44.2_webpack-cli@4.6.0 + dev: true + + /rc-align/4.0.9_react-dom@17.0.2+react@17.0.2: + resolution: {integrity: sha512-myAM2R4qoB6LqBul0leaqY8gFaiECDJ3MtQDmzDo9xM9NRT/04TvWOYd2YHU9zvGzqk9QXF6S9/MifzSKDZeMw==} peerDependencies: react: '>=16.9.0' react-dom: '>=16.9.0' - resolution: - integrity: sha512-3n4taFcjqhTE9uNuzjB+nPDeqgRBTEGBfe46mb1e7r88DgDo0lL4NnxY/PZ6PJKd2tsCt+RrgF/+YeTvJ/Thsw== - /rc-drawer/4.3.1_react-dom@17.0.2+react@17.0.2: dependencies: '@babel/runtime': 7.13.10 classnames: 2.2.6 + dom-align: 1.12.0 rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 react: 17.0.2 react-dom: 17.0.2_react@17.0.2 - dev: false - peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-GMfFy4maqxS9faYXEhQ+0cA1xtkddEQzraf6SAdzWbn444DrrLogwYPk1NXSpdXjLCLxgxOj9MYtyYG42JsfXg== + resize-observer-polyfill: 1.5.1 + /rc-dropdown/3.2.0_react-dom@17.0.2+react@17.0.2: - dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - rc-trigger: 5.2.3_react-dom@17.0.2+react@17.0.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false + resolution: {integrity: sha512-j1HSw+/QqlhxyTEF6BArVZnTmezw2LnSmRk6I9W7BCqNCKaRwleRmMMs1PHbuaG8dKHVqP6e21RQ7vPBLVnnNw==} peerDependencies: react: '*' react-dom: '*' - resolution: - integrity: sha512-j1HSw+/QqlhxyTEF6BArVZnTmezw2LnSmRk6I9W7BCqNCKaRwleRmMMs1PHbuaG8dKHVqP6e21RQ7vPBLVnnNw== - /rc-field-form/1.20.0_react-dom@17.0.2+react@17.0.2: - dependencies: - '@babel/runtime': 7.13.10 - async-validator: 3.5.1 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - engines: - node: '>=8.x' - peerDependencies: - react: '>= 16.9.0' - react-dom: '>= 16.9.0' - resolution: - integrity: sha512-jkzsIfXR7ywEYdeAtktt1aLff88wxIPDLpq7KShHNl4wlsWrCE+TzkXBfjvVzYOVZt5GGrD8YDqNO/q6eaR/eA== - /rc-image/5.2.4_react-dom@17.0.2+react@17.0.2: - dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - rc-dialog: 8.5.2_react-dom@17.0.2+react@17.0.2 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-kWOjhZC1OoGKfvWqtDoO9r8WUNswBwnjcstI6rf7HMudz0usmbGvewcWqsOhyaBRJL9+I4eeG+xiAoxV1xi75Q== - /rc-input-number/7.0.3_react-dom@17.0.2+react@17.0.2: - dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-y0nVqVANWyxQbm/vdhz1p5E1V5Y6Yd2+3MGKntSzCxrYgw0F7/COXkbRdcTECnXwiDv8ZrbYQ1pTP3u43PqE4Q== - /rc-mentions/1.5.3_react-dom@17.0.2+react@17.0.2: dependencies: '@babel/runtime': 7.13.10 classnames: 2.2.6 - rc-menu: 8.10.6_react-dom@17.0.2+react@17.0.2 - rc-textarea: 0.3.4_react-dom@17.0.2+react@17.0.2 rc-trigger: 5.2.3_react-dom@17.0.2+react@17.0.2 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 react: 17.0.2 react-dom: 17.0.2_react@17.0.2 - dev: false + dev: true + + /rc-menu/8.10.6_react-dom@17.0.2+react@17.0.2: + resolution: {integrity: sha512-RVkd8XChwSmVOdNULbqLNnABthRZWnhqct1Q74onEXTClsXvsLADMhlIJtw/umglVSECM+14TJdIli9rl2Bzlw==} peerDependencies: react: '>=16.9.0' react-dom: '>=16.9.0' - resolution: - integrity: sha512-NG/KB8YiKBCJPHHvr/QapAb4f9YzLJn7kDHtmI1K6t7ZMM5YgrjIxNNhoRKKP9zJvb9PdPts69Hbg4ZMvLVIFQ== - /rc-menu/8.10.6_react-dom@17.0.2+react@17.0.2: dependencies: '@babel/runtime': 7.13.10 classnames: 2.2.6 @@ -11087,369 +12045,153 @@ packages: react-dom: 17.0.2_react@17.0.2 resize-observer-polyfill: 1.5.1 shallowequal: 1.1.0 - dev: false + + /rc-motion/2.4.1_react-dom@17.0.2+react@17.0.2: + resolution: {integrity: sha512-TWLvymfMu8SngPx5MDH8dQ0D2RYbluNTfam4hY/dNNx9RQ3WtGuZ/GXHi2ymLMzH+UNd6EEFYkOuR5JTTtm8Xg==} peerDependencies: react: '>=16.9.0' react-dom: '>=16.9.0' - resolution: - integrity: sha512-RVkd8XChwSmVOdNULbqLNnABthRZWnhqct1Q74onEXTClsXvsLADMhlIJtw/umglVSECM+14TJdIli9rl2Bzlw== - /rc-motion/2.4.1_react-dom@17.0.2+react@17.0.2: dependencies: '@babel/runtime': 7.13.10 classnames: 2.2.6 rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 react: 17.0.2 react-dom: 17.0.2_react@17.0.2 - dev: false + + /rc-resize-observer/1.0.0_react-dom@17.0.2+react@17.0.2: + resolution: {integrity: sha512-RgKGukg1mlzyGdvzF7o/LGFC8AeoMH9aGzXTUdp6m+OApvmRdUuOscq/Y2O45cJA+rXt1ApWlpFoOIioXL3AGg==} peerDependencies: react: '>=16.9.0' react-dom: '>=16.9.0' - resolution: - integrity: sha512-TWLvymfMu8SngPx5MDH8dQ0D2RYbluNTfam4hY/dNNx9RQ3WtGuZ/GXHi2ymLMzH+UNd6EEFYkOuR5JTTtm8Xg== - /rc-notification/4.5.5_react-dom@17.0.2+react@17.0.2: dependencies: '@babel/runtime': 7.13.10 classnames: 2.2.6 - rc-motion: 2.4.1_react-dom@17.0.2+react@17.0.2 rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 react: 17.0.2 react-dom: 17.0.2_react@17.0.2 - dev: false - engines: - node: '>=8.x' + resize-observer-polyfill: 1.5.1 + dev: true + + /rc-tabs/11.7.3_react-dom@17.0.2+react@17.0.2: + resolution: {integrity: sha512-5nd2NVss9TprPRV9r8N05SjQyAE7zDrLejxFLcbJ+BdLxSwnGnk3ws/Iq0smqKZUnPQC0XEvnpF3+zlllUUT2w==} + engines: {node: '>=8.x'} peerDependencies: react: '>=16.9.0' react-dom: '>=16.9.0' - resolution: - integrity: sha512-YIfhTSw+h5GsSdgMnuMx24wqiPlg3FeamuOlkh9RkyHx+SeZVAKzQ0juy2NGvPEF2hDWi5xTqxUqLdo0L2AmGg== - /rc-overflow/1.0.2_react-dom@17.0.2+react@17.0.2: dependencies: '@babel/runtime': 7.13.10 classnames: 2.2.6 + rc-dropdown: 3.2.0_react-dom@17.0.2+react@17.0.2 + rc-menu: 8.10.6_react-dom@17.0.2+react@17.0.2 rc-resize-observer: 1.0.0_react-dom@17.0.2+react@17.0.2 rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 react: 17.0.2 react-dom: 17.0.2_react@17.0.2 - dev: false + dev: true + + /rc-trigger/5.2.3_react-dom@17.0.2+react@17.0.2: + resolution: {integrity: sha512-6Fokao07HUbqKIDkDRFEM0AGZvsvK0Fbp8A/KFgl1ngaqfO1nY037cISCG1Jm5fxImVsXp9awdkP7Vu5cxjjog==} + engines: {node: '>=8.x'} peerDependencies: react: '>=16.9.0' react-dom: '>=16.9.0' - resolution: - integrity: sha512-GXj4DAyNxm4f57LvXLwhJaZoJHzSge2l2lQq64MZP7NJAfLpQqOLD+v9JMV9ONTvDPZe8kdzR+UMmkAn7qlzFA== - /rc-pagination/3.1.6_react-dom@17.0.2+react@17.0.2: dependencies: '@babel/runtime': 7.13.10 classnames: 2.2.6 + rc-align: 4.0.9_react-dom@17.0.2+react@17.0.2 + rc-motion: 2.4.1_react-dom@17.0.2+react@17.0.2 + rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 react: 17.0.2 react-dom: 17.0.2_react@17.0.2 - dev: false + + /rc-upload/4.3.1_react-dom@17.0.2+react@17.0.2: + resolution: {integrity: sha512-W8Iyv0LRyEnFEzpv90ET/i1XG2jlPzPxKkkOVtDfgh9c3f4lZV770vgpUfiyQza+iLtQLVco3qIvgue8aDiOsQ==} peerDependencies: react: '>=16.9.0' react-dom: '>=16.9.0' - resolution: - integrity: sha512-Pb2zJEt8uxXzYCWx/2qwsYZ3vSS9Eqdw0cJBli6C58/iYhmvutSBqrBJh51Z5UzYc5ZcW5CMeP5LbbKE1J3rpw== - /rc-picker/2.5.10_2235c505ed33ea6efd93d3050f896208: dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - date-fns: 2.19.0 - dayjs: 1.10.4 - moment: 2.29.1 - rc-trigger: 5.2.3_react-dom@17.0.2+react@17.0.2 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 + '@babel/runtime': 7.14.8 + classnames: 2.3.1 + rc-util: 5.13.2_react-dom@17.0.2+react@17.0.2 react: 17.0.2 react-dom: 17.0.2_react@17.0.2 - shallowequal: 1.1.0 dev: false - engines: - node: '>=8.x' + + /rc-util/5.13.2_react-dom@17.0.2+react@17.0.2: + resolution: {integrity: sha512-eYc71XXGlp96RMzg01Mhq/T3BL6OOVTDSS0urFEuvpi+e7slhJRhaHGCKy2hqJm18m9ff7VoRoptplKu60dYog==} peerDependencies: - dayjs: ^1.8.30 react: '>=16.9.0' react-dom: '>=16.9.0' - resolution: - integrity: sha512-d2or2jql9SSY8CaRPybpbKkXBq3bZ6g88UKyWQZBLTCrc92Xm87RfRC/P3UEQo/CLmia3jVF7IXVi1HmNe2DZA== - /rc-progress/3.1.3_react-dom@17.0.2+react@17.0.2: dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 + '@babel/runtime': 7.14.8 react: 17.0.2 react-dom: 17.0.2_react@17.0.2 + react-is: 16.13.1 + shallowequal: 1.1.0 dev: false + + /rc-util/5.9.8_react-dom@17.0.2+react@17.0.2: + resolution: {integrity: sha512-typLSHYGf5irvGLYQshs0Ra3aze086h0FhzsAkyirMunYZ7b3Te8gKa5PVaanoHaZa9sS6qx98BxgysoRP+6Tw==} peerDependencies: react: '>=16.9.0' react-dom: '>=16.9.0' - resolution: - integrity: sha512-Jl4fzbBExHYMoC6HBPzel0a9VmhcSXx24LVt/mdhDM90MuzoMCJjXZAlhA0V0CJi+SKjMhfBoIQ6Lla1nD4QNw== - /rc-rate/2.9.1_react-dom@17.0.2+react@17.0.2: dependencies: '@babel/runtime': 7.13.10 - classnames: 2.2.6 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 react: 17.0.2 react-dom: 17.0.2_react@17.0.2 - dev: false - engines: - node: '>=8.x' + react-is: 16.13.1 + shallowequal: 1.1.0 + + /re-resizable/6.9.0_react-dom@17.0.2+react@17.0.2: + resolution: {integrity: sha512-3cUDG81ylyqI0Pdgle/RHwwRYq0ORZzsUaySOCO8IbEtNyaRtrIHYm/jMQ5pjcNiKCxR3vsSymIQZHwJq4gg2Q==} peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-MmIU7FT8W4LYRRHJD1sgG366qKtSaKb67D0/vVvJYR0lrCuRrCiVQ5qhfT5ghVO4wuVIORGpZs7ZKaYu+KMUzA== - /rc-resize-observer/1.0.0_react-dom@17.0.2+react@17.0.2: + react: ^16.13.1 || ^17.0.0 + react-dom: ^16.13.1 || ^17.0.0 dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 + fast-memoize: 2.5.2 react: 17.0.2 react-dom: 17.0.2_react@17.0.2 - resize-observer-polyfill: 1.5.1 dev: false - peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-RgKGukg1mlzyGdvzF7o/LGFC8AeoMH9aGzXTUdp6m+OApvmRdUuOscq/Y2O45cJA+rXt1ApWlpFoOIioXL3AGg== - /rc-select/12.1.7_react-dom@17.0.2+react@17.0.2: + + /react-app-polyfill/2.0.0: + resolution: {integrity: sha512-0sF4ny9v/B7s6aoehwze9vJNWcmCemAUYBVasscVr92+UYiEqDXOxfKjXN685mDaMRNF3WdhHQs76oTODMocFA==} + engines: {node: '>=10'} dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - rc-motion: 2.4.1_react-dom@17.0.2+react@17.0.2 - rc-overflow: 1.0.2_react-dom@17.0.2+react@17.0.2 - rc-trigger: 5.2.3_react-dom@17.0.2+react@17.0.2 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 - rc-virtual-list: 3.2.6_react-dom@17.0.2+react@17.0.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 + core-js: 3.9.1 + object-assign: 4.1.1 + promise: 8.1.0 + raf: 3.4.1 + regenerator-runtime: 0.13.7 + whatwg-fetch: 3.6.2 dev: false - engines: - node: '>=8.x' + + /react-attr-converter/0.3.1: + resolution: {integrity: sha512-dSxo2Mn6Zx4HajeCeQNLefwEO4kNtV/0E682R1+ZTyFRPqxDa5zYb5qM/ocqw9Bxr/kFQO0IUiqdV7wdHw+Cdg==} + dev: true + + /react-chartjs-2/3.0.4_chart.js@3.5.0+react@17.0.2: + resolution: {integrity: sha512-pcbFNpkPMTkGXXJ7k7hnukbRD0ZV01qB6JQY1ontITc2IYvhGlK6BBDy28VeydYs1Dl/c5ZpRgRVEtT5GUnxcQ==} peerDependencies: - react: '*' - react-dom: '*' - resolution: - integrity: sha512-sLZlfp+U7Typ+jPM5gTi8I4/oJalRw8kyhxZZ9Q4mEfO2p+otd1Chmzhh+wPraBY3IwE0RZM2/x1Leg/kQKk/w== - /rc-slider/9.7.2_react-dom@17.0.2+react@17.0.2: + chart.js: ^3.1.0 + react: ^16.8.0 || ^17.0.0 dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - rc-tooltip: 5.1.0_react-dom@17.0.2+react@17.0.2 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 + chart.js: 3.5.0 + lodash: 4.17.21 react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - shallowequal: 1.1.0 dev: false - engines: - node: '>=8.x' + + /react-clientside-effect/1.2.6_react@17.0.2: + resolution: {integrity: sha512-XGGGRQAKY+q25Lz9a/4EPqom7WRjz3z9R2k4jhVKA/puQFH/5Nt27vFZYql4m4NVNdUvX8PS3O7r/Zzm7cjUlg==} peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-mVaLRpDo6otasBs6yVnG02ykI3K6hIrLTNfT5eyaqduFv95UODI9PDS6fWuVVehVpdS4ENgOSwsTjrPVun+k9g== - /rc-steps/4.1.3_react-dom@17.0.2+react@17.0.2: + react: ^15.3.0 || ^16.0.0 || ^17.0.0 || ^18.0.0 dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 + '@babel/runtime': 7.20.13 react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 dev: false - engines: - node: '>=8.x' - peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-GXrMfWQOhN3sVze3JnzNboHpQdNHcdFubOETUHyDpa/U3HEKBZC3xJ8XK4paBgF4OJ3bdUVLC+uBPc6dCxvDYA== - /rc-switch/3.2.2_react-dom@17.0.2+react@17.0.2: - dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-+gUJClsZZzvAHGy1vZfnwySxj+MjLlGRyXKXScrtCTcmiYNPzxDFOxdQ/3pK1Kt/0POvwJ/6ALOR8gwdXGhs+A== - /rc-table/7.13.3_react-dom@17.0.2+react@17.0.2: - dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - rc-resize-observer: 1.0.0_react-dom@17.0.2+react@17.0.2 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - shallowequal: 1.1.0 - dev: false - engines: - node: '>=8.x' - peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-oP4fknjvKCZAaiDnvj+yzBaWcg+JYjkASbeWonU1BbrLcomkpKvMUgPODNEzg0QdXA9OGW0PO86h4goDSW06Kg== - /rc-tabs/11.7.3_react-dom@17.0.2+react@17.0.2: - dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - rc-dropdown: 3.2.0_react-dom@17.0.2+react@17.0.2 - rc-menu: 8.10.6_react-dom@17.0.2+react@17.0.2 - rc-resize-observer: 1.0.0_react-dom@17.0.2+react@17.0.2 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - engines: - node: '>=8.x' - peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-5nd2NVss9TprPRV9r8N05SjQyAE7zDrLejxFLcbJ+BdLxSwnGnk3ws/Iq0smqKZUnPQC0XEvnpF3+zlllUUT2w== - /rc-textarea/0.3.4_react-dom@17.0.2+react@17.0.2: - dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - rc-resize-observer: 1.0.0_react-dom@17.0.2+react@17.0.2 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-ILUYx831ZukQPv3m7R4RGRtVVWmL1LV4ME03L22mvT56US0DGCJJaRTHs4vmpcSjFHItph5OTmhodY4BOwy81A== - /rc-tooltip/5.1.0_react-dom@17.0.2+react@17.0.2: - dependencies: - '@babel/runtime': 7.13.10 - rc-trigger: 5.2.3_react-dom@17.0.2+react@17.0.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-pFqD1JZwNIpbdcefB7k5xREoHAWM/k3yQwYF0iminbmDXERgq4rvBfUwIvlCqqZSM7HDr9hYeYr6ZsVNaKtvCQ== - /rc-tree-select/4.3.1_react-dom@17.0.2+react@17.0.2: - dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - rc-select: 12.1.7_react-dom@17.0.2+react@17.0.2 - rc-tree: 4.1.5_react-dom@17.0.2+react@17.0.2 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - peerDependencies: - react: '*' - react-dom: '*' - resolution: - integrity: sha512-OeV8u5kBEJ8MbatP04Rh8T3boOHGjdGBTEm1a0bubBbB2GNNhlMOr4ZxezkHYtXf02JdBS/WyydmI/RMjXgtJA== - /rc-tree/4.1.5_react-dom@17.0.2+react@17.0.2: - dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - rc-motion: 2.4.1_react-dom@17.0.2+react@17.0.2 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 - rc-virtual-list: 3.2.6_react-dom@17.0.2+react@17.0.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - engines: - node: '>=8.x' - peerDependencies: - react: '*' - react-dom: '*' - resolution: - integrity: sha512-q2vjcmnBDylGZ9/ZW4F9oZMKMJdbFWC7um+DAQhZG1nqyg1iwoowbBggUDUaUOEryJP+08bpliEAYnzJXbI5xQ== - /rc-trigger/5.2.3_react-dom@17.0.2+react@17.0.2: - dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - rc-align: 4.0.9_react-dom@17.0.2+react@17.0.2 - rc-motion: 2.4.1_react-dom@17.0.2+react@17.0.2 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - engines: - node: '>=8.x' - peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-6Fokao07HUbqKIDkDRFEM0AGZvsvK0Fbp8A/KFgl1ngaqfO1nY037cISCG1Jm5fxImVsXp9awdkP7Vu5cxjjog== - /rc-upload/4.2.0_react-dom@17.0.2+react@17.0.2: - dependencies: - '@babel/runtime': 7.13.10 - classnames: 2.2.6 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-BXtvBs1PnwLjaUzBBU5z4yb9NMSaxc6mUIoPmS9LUAzaTz12L3TLrwu+8dnopYUiyLmYFS3LEO7aUfEWBqJfSA== - /rc-util/5.9.8_react-dom@17.0.2+react@17.0.2: - dependencies: - '@babel/runtime': 7.13.10 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - react-is: 16.13.1 - shallowequal: 1.1.0 - dev: false - peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - resolution: - integrity: sha512-typLSHYGf5irvGLYQshs0Ra3aze086h0FhzsAkyirMunYZ7b3Te8gKa5PVaanoHaZa9sS6qx98BxgysoRP+6Tw== - /rc-virtual-list/3.2.6_react-dom@17.0.2+react@17.0.2: - dependencies: - classnames: 2.2.6 - rc-resize-observer: 1.0.0_react-dom@17.0.2+react@17.0.2 - rc-util: 5.9.8_react-dom@17.0.2+react@17.0.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - engines: - node: '>=8.x' - peerDependencies: - react: '*' - react-dom: '*' - resolution: - integrity: sha512-8FiQLDzm3c/tMX0d62SQtKDhLH7zFlSI6pWBAPt+TUntEqd3Lz9zFAmpvTu8gkvUom/HCsDSZs4wfV4wDPWC0Q== - /react-app-polyfill/2.0.0: - dependencies: - core-js: 3.9.1 - object-assign: 4.1.1 - promise: 8.1.0 - raf: 3.4.1 - regenerator-runtime: 0.13.7 - whatwg-fetch: 3.6.2 - dev: false - engines: - node: '>=10' - resolution: - integrity: sha512-0sF4ny9v/B7s6aoehwze9vJNWcmCemAUYBVasscVr92+UYiEqDXOxfKjXN685mDaMRNF3WdhHQs76oTODMocFA== - /react-chartjs-2/3.0.3_chart.js@3.2.1+react@17.0.2: - dependencies: - chart.js: 3.2.1 - lodash: 4.17.21 - react: 17.0.2 - dev: false - peerDependencies: - chart.js: ^3.1.0 - react: ^16.8.0 || ^17.0.0 - resolution: - integrity: sha512-jOFZKwZ8sMLkddewZ/tToxuu4pYimAvvY5I6uK+hCpSFT16Pvo2bdHhUoZ0X87zu9I+dx2I+JCqaLN6XhmrbDg== + /react-dev-utils/11.0.4: + resolution: {integrity: sha512-dx0LvIGHcOPtKbeiSUM4jqpBl3TcY7CDjZdfOIcKeznE7BWr9dg0iPG90G5yfVQ+p/rGNMXdbfStvzQZEVEi4A==} + engines: {node: '>=10'} dependencies: '@babel/code-frame': 7.10.4 address: 1.1.2 @@ -11476,47 +12218,64 @@ packages: strip-ansi: 6.0.0 text-table: 0.2.0 dev: false - engines: - node: '>=10' - resolution: - integrity: sha512-dx0LvIGHcOPtKbeiSUM4jqpBl3TcY7CDjZdfOIcKeznE7BWr9dg0iPG90G5yfVQ+p/rGNMXdbfStvzQZEVEi4A== - /react-dom/17.0.2_react@17.0.2: + + /react-docgen-typescript-dumi-tmp/1.22.1-0_typescript@4.2.3: + resolution: {integrity: sha512-wjuAm1yj+ZZucovow2VF0MXkH2SGZ+squZxfNdnam3oyUbHy/xZaU1ZabCn7rY+13ZFx0/NLda+ZuBgF3g8vBA==} + peerDependencies: + typescript: '>= 3.x' + dependencies: + typescript: 4.2.3 + dev: true + + /react-dom/16.14.0_react@16.14.0: + resolution: {integrity: sha512-1gCeQXDLoIqMgqD3IO2Ah9bnf0w9kzhwN5q4FGnHZ67hBm9yePzB5JJAIQCc8x3pFnNlwFq4RidZggNAAkzWWw==} + peerDependencies: + react: ^16.14.0 dependencies: loose-envify: 1.4.0 object-assign: 4.1.1 - react: 17.0.2 - scheduler: 0.20.2 - dev: false + prop-types: 15.7.2 + react: 16.14.0 + scheduler: 0.19.1 + dev: true + + /react-dom/17.0.2_react@17.0.2: + resolution: {integrity: sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA==} peerDependencies: react: 17.0.2 - resolution: - integrity: sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA== - /react-drag-listview/0.1.8: dependencies: - babel-runtime: 6.26.0 - prop-types: 15.7.2 + loose-envify: 1.4.0 + object-assign: 4.1.1 + react: 17.0.2 + scheduler: 0.20.2 dev: false - resolution: - integrity: sha512-ZJnjFEz89RPZ1DzI8f6LngmtsmJbLry/pMz2tEqABxHA+d8cUFRmVPS1DxZdoz/htc+uri9fCdv4dqIiPz0xIA== + /react-draggable/4.4.3: + resolution: {integrity: sha512-jV4TE59MBuWm7gb6Ns3Q1mxX8Azffb7oTtDtBgFkxRvhDp38YAARmRplrj0+XGkhOJB5XziArX+4HUUABtyZ0w==} dependencies: classnames: 2.2.6 prop-types: 15.7.2 dev: false - resolution: - integrity: sha512-jV4TE59MBuWm7gb6Ns3Q1mxX8Azffb7oTtDtBgFkxRvhDp38YAARmRplrj0+XGkhOJB5XziArX+4HUUABtyZ0w== - /react-error-overlay/6.0.9: - dev: false - resolution: - integrity: sha512-nQTTcUu+ATDbrSD1BZHr5kgSD4oF8OFjxun8uAaL8RwPBacGBNPf/yAuVVdx17N8XNzRDMrZ9XcKZHCjPW+9ew== - /react-eva/1.1.14: + + /react-error-boundary/3.1.3_react@17.0.2: + resolution: {integrity: sha512-A+F9HHy9fvt9t8SNDlonq01prnU8AmkjvGKV4kk8seB9kU3xMEO8J/PQlLVmoOIDODl5U2kufSBs4vrWIqhsAA==} + engines: {node: '>=10', npm: '>=6'} + peerDependencies: + react: '>=16.13.1' dependencies: - rxjs: 6.6.7 - rxjs-compat: 6.6.7 + '@babel/runtime': 7.14.8 + react: 17.0.2 + dev: true + + /react-error-overlay/6.0.9: + resolution: {integrity: sha512-nQTTcUu+ATDbrSD1BZHr5kgSD4oF8OFjxun8uAaL8RwPBacGBNPf/yAuVVdx17N8XNzRDMrZ9XcKZHCjPW+9ew==} dev: false - resolution: - integrity: sha512-/5EeqL425sgDX0yhtNIoJh0kdjt1i/FFwh0OcnEZri83FIGNzaAXmNm7lbnSd2wvedgJ1/qnYPcOz5HuF2UvBA== + /react-flow-renderer/9.4.0_react-dom@17.0.2+react@17.0.2: + resolution: {integrity: sha512-jv3w2RnoK5CC4tSKD48x9HWoaZZFjQbGAtu5lIeYtmgBKRaOFrA2t+zxgdenSALAXd2k9J+L5hXlY18FvtBmmA==} + peerDependencies: + react: "16 ||\_17" + react-dom: "16 ||\_17" dependencies: '@babel/runtime': 7.13.10 '@types/d3': 6.3.0 @@ -11530,32 +12289,59 @@ packages: react-draggable: 4.4.3 react-redux: 7.2.3_8436876974e3dcafae98d64b636de192 redux: 4.0.5 + transitivePeerDependencies: + - react-native dev: false + + /react-focus-lock/2.9.3_5170878e5e8a60dfb58a26e1cbcc99ef: + resolution: {integrity: sha512-cGNkz9p5Fpqio6hBHlkKxzRYrBYtcPosFOL6Q3N/LSbHjwP/PTBqHpvbgaOYoE7rWfzw8qXPKTB3Tk/VPgw4NQ==} peerDependencies: - react: "16 ||\_17" - react-dom: "16 ||\_17" - resolution: - integrity: sha512-jv3w2RnoK5CC4tSKD48x9HWoaZZFjQbGAtu5lIeYtmgBKRaOFrA2t+zxgdenSALAXd2k9J+L5hXlY18FvtBmmA== + '@types/react': ^16.8.0 || ^17.0.0 || ^18.0.0 + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + '@types/react': + optional: true + dependencies: + '@babel/runtime': 7.20.13 + '@types/react': 16.14.5 + focus-lock: 0.11.5 + prop-types: 15.8.1 + react: 17.0.2 + react-clientside-effect: 1.2.6_react@17.0.2 + use-callback-ref: 1.3.0_5170878e5e8a60dfb58a26e1cbcc99ef + use-sidecar: 1.1.2_5170878e5e8a60dfb58a26e1cbcc99ef + dev: false + /react-i18next/11.8.12_i18next@19.9.2+react@17.0.2: + resolution: {integrity: sha512-M2PSVP9MzT/7yofXfCOF5gAVotinrM4BXWiguk8uFSznJsfFzTjrp3K9CBWcXitpoCBVZGZJ2AnbaWGSNkJqfw==} + peerDependencies: + i18next: '>= 19.0.0' + react: '>= 16.8.0' dependencies: '@babel/runtime': 7.13.10 html-parse-stringify2: 2.0.1 i18next: 19.9.2 react: 17.0.2 dev: false - peerDependencies: - i18next: '>= 19.0.0' - react: '>= 16.8.0' - resolution: - integrity: sha512-M2PSVP9MzT/7yofXfCOF5gAVotinrM4BXWiguk8uFSznJsfFzTjrp3K9CBWcXitpoCBVZGZJ2AnbaWGSNkJqfw== + /react-is/16.13.1: - resolution: - integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== + resolution: {integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==} + /react-is/17.0.2: + resolution: {integrity: sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==} dev: true - resolution: - integrity: sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w== + /react-query/3.13.0_react-dom@17.0.2+react@17.0.2: + resolution: {integrity: sha512-CzBvgjMh8jNJMSPhXCE92DBIFbE31j8PA2k7ipR1F8DlcNAEsZwLsUzh1cTtzpDaS2+r6sntgmM6qKnCD6E5zQ==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 + react-dom: '*' + react-native: '*' + peerDependenciesMeta: + react-dom: + optional: true + react-native: + optional: true dependencies: '@babel/runtime': 7.13.10 broadcast-channel: 3.5.3 @@ -11563,18 +12349,19 @@ packages: react: 17.0.2 react-dom: 17.0.2_react@17.0.2 dev: false + + /react-redux/7.2.3_8436876974e3dcafae98d64b636de192: + resolution: {integrity: sha512-ZhAmQ1lrK+Pyi0ZXNMUZuYxYAZd59wFuVDGUt536kSGdD0ya9Q7BfsE95E3TsFLE3kOSFp5m6G5qbatE+Ic1+w==} peerDependencies: - react: ^16.8.0 || ^17.0.0 + react: ^16.8.3 || ^17 react-dom: '*' react-native: '*' + redux: ^2.0.0 || ^3.0.0 || ^4.0.0-0 peerDependenciesMeta: react-dom: optional: true react-native: optional: true - resolution: - integrity: sha512-CzBvgjMh8jNJMSPhXCE92DBIFbE31j8PA2k7ipR1F8DlcNAEsZwLsUzh1cTtzpDaS2+r6sntgmM6qKnCD6E5zQ== - /react-redux/7.2.3_8436876974e3dcafae98d64b636de192: dependencies: '@babel/runtime': 7.13.10 '@types/react-redux': 7.1.16 @@ -11586,25 +12373,58 @@ packages: react-is: 16.13.1 redux: 4.0.5 dev: false - peerDependencies: - react: ^16.8.3 || ^17 - react-dom: '*' - react-native: '*' - redux: ^2.0.0 || ^3.0.0 || ^4.0.0-0 - peerDependenciesMeta: - react-dom: - optional: true - react-native: - optional: true - resolution: - integrity: sha512-ZhAmQ1lrK+Pyi0ZXNMUZuYxYAZd59wFuVDGUt536kSGdD0ya9Q7BfsE95E3TsFLE3kOSFp5m6G5qbatE+Ic1+w== + + /react-refresh/0.10.0: + resolution: {integrity: sha512-PgidR3wST3dDYKr6b4pJoqQFpPGNKDSCDx4cZoshjXipw3LzO7mG1My2pwEzz2JVkF+inx3xRpDeQLFQGH/hsQ==} + engines: {node: '>=0.10.0'} + dev: true + /react-refresh/0.8.3: + resolution: {integrity: sha512-X8jZHc7nCMjaCqoU+V2I0cOhNW+QMBwSUkeXnTi8IPe6zaRWfn60ZzvFDZqWPfmSJfjub7dDW1SP0jaHWLu/hg==} + engines: {node: '>=0.10.0'} dev: false - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-X8jZHc7nCMjaCqoU+V2I0cOhNW+QMBwSUkeXnTi8IPe6zaRWfn60ZzvFDZqWPfmSJfjub7dDW1SP0jaHWLu/hg== + + /react-router-config/5.1.1_react-router@5.2.0+react@16.14.0: + resolution: {integrity: sha512-DuanZjaD8mQp1ppHjgnnUnyOlqYXZVjnov/JzFhjLEwd3Z4dYjMSnqrEzzGThH47vpCOqPPwJM2FtthLeJ8Pbg==} + peerDependencies: + react: '>=15' + react-router: '>=5' + dependencies: + '@babel/runtime': 7.14.8 + react: 16.14.0 + react-router: 5.2.0_react@17.0.2 + dev: true + + /react-router-config/5.1.1_react-router@5.2.0+react@17.0.2: + resolution: {integrity: sha512-DuanZjaD8mQp1ppHjgnnUnyOlqYXZVjnov/JzFhjLEwd3Z4dYjMSnqrEzzGThH47vpCOqPPwJM2FtthLeJ8Pbg==} + peerDependencies: + react: '>=15' + react-router: '>=5' + dependencies: + '@babel/runtime': 7.14.8 + react: 17.0.2 + react-router: 5.2.0_react@17.0.2 + dev: true + + /react-router-dom/5.2.0_react@16.14.0: + resolution: {integrity: sha512-gxAmfylo2QUjcwxI63RhQ5G85Qqt4voZpUXSEqCwykV0baaOTQDR1f0PmY8AELqIyVc0NEZUj0Gov5lNGcXgsA==} + peerDependencies: + react: '>=15' + dependencies: + '@babel/runtime': 7.13.10 + history: 4.10.1 + loose-envify: 1.4.0 + prop-types: 15.7.2 + react: 16.14.0 + react-router: 5.2.0_react@16.14.0 + tiny-invariant: 1.1.0 + tiny-warning: 1.0.3 + dev: true + /react-router-dom/5.2.0_react@17.0.2: + resolution: {integrity: sha512-gxAmfylo2QUjcwxI63RhQ5G85Qqt4voZpUXSEqCwykV0baaOTQDR1f0PmY8AELqIyVc0NEZUj0Gov5lNGcXgsA==} + peerDependencies: + react: '>=15' dependencies: '@babel/runtime': 7.13.10 history: 4.10.1 @@ -11614,12 +12434,29 @@ packages: react-router: 5.2.0_react@17.0.2 tiny-invariant: 1.1.0 tiny-warning: 1.0.3 - dev: false + + /react-router/5.2.0_react@16.14.0: + resolution: {integrity: sha512-smz1DUuFHRKdcJC0jobGo8cVbhO3x50tCL4icacOlcwDOEQPq4TMqwx3sY1TP+DvtTgz4nm3thuo7A+BK2U0Dw==} peerDependencies: react: '>=15' - resolution: - integrity: sha512-gxAmfylo2QUjcwxI63RhQ5G85Qqt4voZpUXSEqCwykV0baaOTQDR1f0PmY8AELqIyVc0NEZUj0Gov5lNGcXgsA== + dependencies: + '@babel/runtime': 7.13.10 + history: 4.10.1 + hoist-non-react-statics: 3.3.2 + loose-envify: 1.4.0 + mini-create-react-context: 0.4.1_prop-types@15.7.2+react@16.14.0 + path-to-regexp: 1.8.0 + prop-types: 15.7.2 + react: 16.14.0 + react-is: 16.13.1 + tiny-invariant: 1.1.0 + tiny-warning: 1.0.3 + dev: true + /react-router/5.2.0_react@17.0.2: + resolution: {integrity: sha512-smz1DUuFHRKdcJC0jobGo8cVbhO3x50tCL4icacOlcwDOEQPq4TMqwx3sY1TP+DvtTgz4nm3thuo7A+BK2U0Dw==} + peerDependencies: + react: '>=15' dependencies: '@babel/runtime': 7.13.10 history: 4.10.1 @@ -11632,29 +12469,36 @@ packages: react-is: 16.13.1 tiny-invariant: 1.1.0 tiny-warning: 1.0.3 - dev: false + + /react-transition-group/4.4.5_react-dom@17.0.2+react@17.0.2: + resolution: {integrity: sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g==} peerDependencies: - react: '>=15' - resolution: - integrity: sha512-smz1DUuFHRKdcJC0jobGo8cVbhO3x50tCL4icacOlcwDOEQPq4TMqwx3sY1TP+DvtTgz4nm3thuo7A+BK2U0Dw== - /react-stikky/0.1.30: + react: '>=16.6.0' + react-dom: '>=16.6.0' dependencies: - classnames: 2.2.6 - window-scroll: 1.0.0 + '@babel/runtime': 7.20.13 + dom-helpers: 5.2.1 + loose-envify: 1.4.0 + prop-types: 15.8.1 + react: 17.0.2 + react-dom: 17.0.2_react@17.0.2 dev: false - resolution: - integrity: sha512-qzt3MF/t3VoXLPQl4AY6+0jIdIIA9LdZgxwjniYWLWgPxPHGzbkxHOSlYAs+JrEm//9mxFCwS2i2kEUKrS3ewA== + /react-universal-interface/0.6.2_react@17.0.2+tslib@2.1.0: + resolution: {integrity: sha512-dg8yXdcQmvgR13RIlZbTRQOoUrDciFVoSBZILwjE2LFISxZZ8loVJKAkuzswl5js8BHda79bIb2b84ehU8IjXw==} + peerDependencies: + react: '*' + tslib: '*' dependencies: react: 17.0.2 tslib: 2.1.0 dev: false - peerDependencies: - react: '*' - tslib: '*' - resolution: - integrity: sha512-dg8yXdcQmvgR13RIlZbTRQOoUrDciFVoSBZILwjE2LFISxZZ8loVJKAkuzswl5js8BHda79bIb2b84ehU8IjXw== + /react-use/15.3.8_react-dom@17.0.2+react@17.0.2: + resolution: {integrity: sha512-GeGcrmGuUvZrY5wER3Lnph9DSYhZt5nEjped4eKDq8BRGr2CnLf9bDQWG9RFc7oCPphnscUUdOovzq0E5F2c6Q==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 + react-dom: ^16.8.0 || ^17.0.0 dependencies: '@types/js-cookie': 2.2.6 '@xobotyi/scrollbar-width': 1.9.5 @@ -11673,61 +12517,62 @@ packages: ts-easing: 0.2.0 tslib: 2.1.0 dev: false - peerDependencies: - react: ^16.8.0 || ^17.0.0 - react-dom: ^16.8.0 || ^17.0.0 - resolution: - integrity: sha512-GeGcrmGuUvZrY5wER3Lnph9DSYhZt5nEjped4eKDq8BRGr2CnLf9bDQWG9RFc7oCPphnscUUdOovzq0E5F2c6Q== + + /react/16.14.0: + resolution: {integrity: sha512-0X2CImDkJGApiAlcf0ODKIneSwBPhqJawOa5wCtKbu7ZECrmS26NvtSILynQ66cgkT/RJ4LidJOc3bUESwmU8g==} + engines: {node: '>=0.10.0'} + dependencies: + loose-envify: 1.4.0 + object-assign: 4.1.1 + prop-types: 15.7.2 + dev: true + /react/17.0.2: + resolution: {integrity: sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==} + engines: {node: '>=0.10.0'} dependencies: loose-envify: 1.4.0 object-assign: 4.1.1 dev: false - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA== + /read-pkg-up/2.0.0: + resolution: {integrity: sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=} + engines: {node: '>=4'} dependencies: find-up: 2.1.0 read-pkg: 2.0.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4= + /read-pkg-up/7.0.1: + resolution: {integrity: sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==} + engines: {node: '>=8'} dependencies: find-up: 4.1.0 read-pkg: 5.2.0 type-fest: 0.8.1 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg== + /read-pkg/2.0.0: + resolution: {integrity: sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=} + engines: {node: '>=4'} dependencies: load-json-file: 2.0.0 normalize-package-data: 2.5.0 path-type: 2.0.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg= + /read-pkg/5.2.0: + resolution: {integrity: sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==} + engines: {node: '>=8'} dependencies: '@types/normalize-package-data': 2.4.0 normalize-package-data: 2.5.0 parse-json: 5.2.0 type-fest: 0.6.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg== + /readable-stream/2.3.7: + resolution: {integrity: sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==} dependencies: core-util-is: 1.0.2 inherits: 2.0.4 @@ -11737,50 +12582,49 @@ packages: string_decoder: 1.1.1 util-deprecate: 1.0.2 dev: true - resolution: - integrity: sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== + /readable-stream/3.6.0: + resolution: {integrity: sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==} + engines: {node: '>= 6'} dependencies: inherits: 2.0.4 string_decoder: 1.3.0 util-deprecate: 1.0.2 dev: true - engines: - node: '>= 6' - resolution: - integrity: sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== + /readdirp/2.2.1: + resolution: {integrity: sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ==} + engines: {node: '>=0.10'} dependencies: graceful-fs: 4.2.6 micromatch: 3.1.10 readable-stream: 2.3.7 dev: true - engines: - node: '>=0.10' - resolution: - integrity: sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== + /readdirp/3.5.0: + resolution: {integrity: sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ==} + engines: {node: '>=8.10.0'} dependencies: picomatch: 2.2.2 dev: true - engines: - node: '>=8.10.0' + + /readdirp/3.6.0: + resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} + engines: {node: '>=8.10.0'} + dependencies: + picomatch: 2.3.1 + dev: true optional: true - resolution: - integrity: sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ== + /rechoir/0.7.0: + resolution: {integrity: sha512-ADsDEH2bvbjltXEP+hTIAmeFekTFK0V2BTxMkok6qILyAJEXV0AFfoWcAq4yfll5VdIMd/RVXq0lR+wQi5ZU3Q==} + engines: {node: '>= 0.10'} dependencies: resolve: 1.18.1 dev: true - engines: - node: '>= 0.10' - resolution: - integrity: sha512-ADsDEH2bvbjltXEP+hTIAmeFekTFK0V2BTxMkok6qILyAJEXV0AFfoWcAq4yfll5VdIMd/RVXq0lR+wQi5ZU3Q== - /recoil/0.1.3_react-dom@17.0.2+react@17.0.2: - dependencies: - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false + + /recoil/0.3.1_react-dom@17.0.2+react@17.0.2: + resolution: {integrity: sha512-KNA3DRqgxX4rRC8E7fc6uIw7BACmMPuraIYy+ejhE8tsw7w32CetMm8w7AMZa34wzanKKkev3vl3H7Z4s0QSiA==} peerDependencies: react: '>=16.13.1' react-dom: '*' @@ -11790,84 +12634,96 @@ packages: optional: true react-native: optional: true - resolution: - integrity: sha512-/Rm7wN7jqCjhtFK1TgtK0V115SUXNu6d4QYvwxWNLydib0QChSmpB6U8CaHoRPS0MFWtAIsD/IFjpbfk/OYm7Q== + dependencies: + hamt_plus: 1.0.2 + react: 17.0.2 + react-dom: 17.0.2_react@17.0.2 + dev: false + /recursive-readdir/2.2.2: + resolution: {integrity: sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg==} + engines: {node: '>=0.10.0'} dependencies: minimatch: 3.0.4 dev: false - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg== + /redent/3.0.0: + resolution: {integrity: sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==} + engines: {node: '>=8'} dependencies: indent-string: 4.0.0 strip-indent: 3.0.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg== + /redux/4.0.5: + resolution: {integrity: sha512-VSz1uMAH24DM6MF72vcojpYPtrTUu3ByVWfPL1nPfVRb5mZVTve5GnNCUV53QM/BZ66xfWrm0CTWoM+Xlz8V1w==} dependencies: loose-envify: 1.4.0 symbol-observable: 1.2.0 dev: false - resolution: - integrity: sha512-VSz1uMAH24DM6MF72vcojpYPtrTUu3ByVWfPL1nPfVRb5mZVTve5GnNCUV53QM/BZ66xfWrm0CTWoM+Xlz8V1w== + /regenerate-unicode-properties/8.2.0: + resolution: {integrity: sha512-F9DjY1vKLo/tPePDycuH3dn9H1OTPIkVD9Kz4LODu+F2C75mgjAJ7x/gwy6ZcSNRAAkhNlJSOHRe8k3p+K9WhA==} + engines: {node: '>=4'} dependencies: regenerate: 1.4.2 dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-F9DjY1vKLo/tPePDycuH3dn9H1OTPIkVD9Kz4LODu+F2C75mgjAJ7x/gwy6ZcSNRAAkhNlJSOHRe8k3p+K9WhA== + /regenerate/1.4.2: + resolution: {integrity: sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==} dev: true - resolution: - integrity: sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== + /regenerator-runtime/0.11.1: - resolution: - integrity: sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg== + resolution: {integrity: sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg==} + dev: true + + /regenerator-runtime/0.13.11: + resolution: {integrity: sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==} + dev: false + + /regenerator-runtime/0.13.5: + resolution: {integrity: sha512-ZS5w8CpKFinUzOwW3c83oPeVXoNsrLsaCoLtJvAClH135j/R77RuymhiSErhm2lKcwSCIpmvIWSbDkIfAqKQlA==} + dev: true + /regenerator-runtime/0.13.7: - resolution: - integrity: sha512-a54FxoJDIr27pgf7IgeQGxmqUNYrcV338lf/6gH456HZ/PhX+5BcwHXG9ajESmwe6WRO0tAzRUrRmNONWgkrew== + resolution: {integrity: sha512-a54FxoJDIr27pgf7IgeQGxmqUNYrcV338lf/6gH456HZ/PhX+5BcwHXG9ajESmwe6WRO0tAzRUrRmNONWgkrew==} + + /regenerator-runtime/0.13.9: + resolution: {integrity: sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==} + /regenerator-transform/0.14.5: + resolution: {integrity: sha512-eOf6vka5IO151Jfsw2NO9WpGX58W6wWmefK3I1zEGr0lOD0u8rwPaNqQL1aRxUaxLeKO3ArNh3VYg1KbaD+FFw==} dependencies: '@babel/runtime': 7.13.10 dev: true - resolution: - integrity: sha512-eOf6vka5IO151Jfsw2NO9WpGX58W6wWmefK3I1zEGr0lOD0u8rwPaNqQL1aRxUaxLeKO3ArNh3VYg1KbaD+FFw== + /regex-not/1.0.2: + resolution: {integrity: sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==} + engines: {node: '>=0.10.0'} dependencies: extend-shallow: 3.0.2 safe-regex: 1.1.0 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== + /regex-parser/2.2.11: + resolution: {integrity: sha512-jbD/FT0+9MBU2XAZluI7w2OBs1RBi6p9M83nkoZayQXXU9e8Robt69FcZc7wU4eJD/YFTjn1JdCk3rbMJajz8Q==} dev: true - resolution: - integrity: sha512-jbD/FT0+9MBU2XAZluI7w2OBs1RBi6p9M83nkoZayQXXU9e8Robt69FcZc7wU4eJD/YFTjn1JdCk3rbMJajz8Q== + /regexp.prototype.flags/1.3.1: + resolution: {integrity: sha512-JiBdRBq91WlY7uRJ0ds7R+dU02i6LKi8r3BuQhNXn+kmeLN+EfHhfjqMRis1zJxnlu88hq/4dx0P2OP3APRTOA==} + engines: {node: '>= 0.4'} dependencies: call-bind: 1.0.2 define-properties: 1.1.3 dev: true - engines: - node: '>= 0.4' - resolution: - integrity: sha512-JiBdRBq91WlY7uRJ0ds7R+dU02i6LKi8r3BuQhNXn+kmeLN+EfHhfjqMRis1zJxnlu88hq/4dx0P2OP3APRTOA== + /regexpp/3.1.0: + resolution: {integrity: sha512-ZOIzd8yVsQQA7j8GCSlPGXwg5PfmA1mrq0JP4nGhh54LaKN3xdai/vHUDu74pKwV8OxseMS65u2NImosQcSD0Q==} + engines: {node: '>=8'} dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-ZOIzd8yVsQQA7j8GCSlPGXwg5PfmA1mrq0JP4nGhh54LaKN3xdai/vHUDu74pKwV8OxseMS65u2NImosQcSD0Q== + /regexpu-core/4.7.1: + resolution: {integrity: sha512-ywH2VUraA44DZQuRKzARmw6S66mr48pQVva4LBeRhcOltJ6hExvWly5ZjFLYo67xbIxb6W1q4bAGtgfEl20zfQ==} + engines: {node: '>=4'} dependencies: regenerate: 1.4.2 regenerate-unicode-properties: 8.2.0 @@ -11876,36 +12732,115 @@ packages: unicode-match-property-ecmascript: 1.0.4 unicode-match-property-value-ecmascript: 1.2.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-ywH2VUraA44DZQuRKzARmw6S66mr48pQVva4LBeRhcOltJ6hExvWly5ZjFLYo67xbIxb6W1q4bAGtgfEl20zfQ== + /regjsgen/0.5.2: + resolution: {integrity: sha512-OFFT3MfrH90xIW8OOSyUrk6QHD5E9JOTeGodiJeBS3J6IwlgzJMNE/1bZklWz5oTg+9dCMyEetclvCVXOPoN3A==} dev: true - resolution: - integrity: sha512-OFFT3MfrH90xIW8OOSyUrk6QHD5E9JOTeGodiJeBS3J6IwlgzJMNE/1bZklWz5oTg+9dCMyEetclvCVXOPoN3A== + /regjsparser/0.6.9: + resolution: {integrity: sha512-ZqbNRz1SNjLAiYuwY0zoXW8Ne675IX5q+YHioAGbCw4X96Mjl2+dcX9B2ciaeyYjViDAfvIjFpQjJgLttTEERQ==} + hasBin: true dependencies: jsesc: 0.5.0 dev: true - hasBin: true - resolution: - integrity: sha512-ZqbNRz1SNjLAiYuwY0zoXW8Ne675IX5q+YHioAGbCw4X96Mjl2+dcX9B2ciaeyYjViDAfvIjFpQjJgLttTEERQ== + + /rehype-autolink-headings/4.0.0: + resolution: {integrity: sha512-2lglJ+4S3A4RCz+zlKVWj1wHvwO4bjunAoEOgMfjphT59EVXwdMiJzrL/A2fuAX/33k/LhkGW6BEK1Cl1I5WQw==} + dependencies: + extend: 3.0.2 + hast-util-has-property: 1.0.4 + hast-util-is-element: 1.1.0 + unist-util-visit: 2.0.3 + dev: true + + /rehype-mathjax/3.1.0: + resolution: {integrity: sha512-Pmz92Y56lBFmDjFc9nIdrKu1xzKSBYevcwKiKiG7b5JJg74q1E62nRSbPEm37vXaXn7Bn25iRsWcP39bJKkMxg==} + dependencies: + '@types/mathjax': 0.0.36 + hast-util-from-dom: 3.0.0 + hast-util-to-text: 2.0.1 + jsdom: 16.5.2 + mathjax-full: 3.2.0 + unist-util-visit: 2.0.3 + transitivePeerDependencies: + - bufferutil + - canvas + - utf-8-validate + dev: true + + /rehype-remove-comments/4.0.2: + resolution: {integrity: sha512-E2FNohTuIs7QzUnEQs3SdYdCScsTgUN7yPeDNWi+gsvx+pbLzIAyp27TWz3Gm64jpdLi7/6HxyRHxdd1NVQ37A==} + dependencies: + hast-util-is-conditional-comment: 1.0.4 + unist-util-filter: 2.0.3 + dev: true + + /rehype-stringify/8.0.0: + resolution: {integrity: sha512-VkIs18G0pj2xklyllrPSvdShAV36Ff3yE5PUO9u36f6+2qJFnn22Z5gKwBOwgXviux4UC7K+/j13AnZfPICi/g==} + dependencies: + hast-util-to-html: 7.1.3 + dev: true + /relateurl/0.2.7: + resolution: {integrity: sha1-VNvzd+UUQKypCkzSdGANP/LYiKk=} + engines: {node: '>= 0.10'} dev: true - engines: - node: '>= 0.10' - resolution: - integrity: sha1-VNvzd+UUQKypCkzSdGANP/LYiKk= + + /remark-frontmatter/3.0.0: + resolution: {integrity: sha512-mSuDd3svCHs+2PyO29h7iijIZx4plX0fheacJcAoYAASfgzgVIcXGYSq9GFyYocFLftQs8IOmmkgtOovs6d4oA==} + dependencies: + mdast-util-frontmatter: 0.2.0 + micromark-extension-frontmatter: 0.2.2 + dev: true + + /remark-gfm/1.0.0: + resolution: {integrity: sha512-KfexHJCiqvrdBZVbQ6RopMZGwaXz6wFJEfByIuEwGf0arvITHjiKKZ1dpXujjH9KZdm1//XJQwgfnJ3lmXaDPA==} + dependencies: + mdast-util-gfm: 0.1.2 + micromark-extension-gfm: 0.3.3 + transitivePeerDependencies: + - supports-color + dev: true + + /remark-math/4.0.0: + resolution: {integrity: sha512-lH7SoQenXtQrvL0bm+mjZbvOk//YWNuyR+MxV18Qyv8rgFmMEGNuB0TSCQDkoDaiJ40FCnG8lxErc/zhcedYbw==} + dependencies: + mdast-util-math: 0.1.2 + micromark-extension-math: 0.1.2 + transitivePeerDependencies: + - supports-color + dev: true + + /remark-parse/9.0.0: + resolution: {integrity: sha512-geKatMwSzEXKHuzBNU1z676sGcDcFoChMK38TgdHJNAYfFtsfHDQG7MoJAjs6sgYMqyLduCYWDIWZIxiPeafEw==} + dependencies: + mdast-util-from-markdown: 0.8.5 + transitivePeerDependencies: + - supports-color + dev: true + + /remark-rehype/8.1.0: + resolution: {integrity: sha512-EbCu9kHgAxKmW1yEYjx3QafMyGY3q8noUbNUI5xyKbaFP89wbhDrKxyIQNukNYthzjNHZu6J7hwFg7hRm1svYA==} + dependencies: + mdast-util-to-hast: 10.2.0 + dev: true + + /remark-stringify/9.0.1: + resolution: {integrity: sha512-mWmNg3ZtESvZS8fv5PTvaPckdL4iNlCHTt8/e/8oN08nArHRHjNZMKzA/YW3+p7/lYqIw4nx1XsjCBo/AxNChg==} + dependencies: + mdast-util-to-markdown: 0.6.5 + dev: true + /remove-accents/0.4.2: + resolution: {integrity: sha1-CkPTqq4egNuRngeuJUsoXZ4ce7U=} dev: false - resolution: - integrity: sha1-CkPTqq4egNuRngeuJUsoXZ4ce7U= + /remove-trailing-separator/1.1.0: + resolution: {integrity: sha1-wkvOKig62tW8P1jg1IJJuSN52O8=} dev: true - resolution: - integrity: sha1-wkvOKig62tW8P1jg1IJJuSN52O8= + /renderkid/2.0.5: + resolution: {integrity: sha512-ccqoLg+HLOHq1vdfYNm4TBeaCDIi1FLt3wGojTDSvdewUv65oTmI3cnT2E4hRjl1gzKZIPK+KZrXzlUYKnR+vQ==} dependencies: css-select: 2.1.0 dom-converter: 0.2.0 @@ -11913,44 +12848,42 @@ packages: lodash: 4.17.21 strip-ansi: 3.0.1 dev: true - resolution: - integrity: sha512-ccqoLg+HLOHq1vdfYNm4TBeaCDIi1FLt3wGojTDSvdewUv65oTmI3cnT2E4hRjl1gzKZIPK+KZrXzlUYKnR+vQ== + /repeat-element/1.1.3: - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== + resolution: {integrity: sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g==} + engines: {node: '>=0.10.0'} + /repeat-string/1.6.1: - engines: - node: '>=0.10' - resolution: - integrity: sha1-jcrkcOHIirwtYA//Sndihtp15jc= + resolution: {integrity: sha1-jcrkcOHIirwtYA//Sndihtp15jc=} + engines: {node: '>=0.10'} + /request-promise-core/1.1.4_request@2.88.2: + resolution: {integrity: sha512-TTbAfBBRdWD7aNNOoVOBH4pN/KigV6LyapYNNlAPA8JwbovRti1E88m3sYAwsLi5ryhPKsE9APwnjFTgdUjTpw==} + engines: {node: '>=0.10.0'} + peerDependencies: + request: ^2.34 dependencies: lodash: 4.17.21 request: 2.88.2 dev: true - engines: - node: '>=0.10.0' + + /request-promise-native/1.0.9_request@2.88.2: + resolution: {integrity: sha512-wcW+sIUiWnKgNY0dqCpOZkUbF/I+YPi+f09JZIDa39Ec+q82CpSYniDp+ISgTTbKmnpJWASeJBPZmoxH84wt3g==} + engines: {node: '>=0.12.0'} + deprecated: request-promise-native has been deprecated because it extends the now deprecated request package, see https://github.com/request/request/issues/3142 peerDependencies: request: ^2.34 - resolution: - integrity: sha512-TTbAfBBRdWD7aNNOoVOBH4pN/KigV6LyapYNNlAPA8JwbovRti1E88m3sYAwsLi5ryhPKsE9APwnjFTgdUjTpw== - /request-promise-native/1.0.9_request@2.88.2: dependencies: request: 2.88.2 request-promise-core: 1.1.4_request@2.88.2 stealthy-require: 1.1.1 tough-cookie: 2.5.0 - deprecated: request-promise-native has been deprecated because it extends the now deprecated request package, see https://github.com/request/request/issues/3142 dev: true - engines: - node: '>=0.12.0' - peerDependencies: - request: ^2.34 - resolution: - integrity: sha512-wcW+sIUiWnKgNY0dqCpOZkUbF/I+YPi+f09JZIDa39Ec+q82CpSYniDp+ISgTTbKmnpJWASeJBPZmoxH84wt3g== + /request/2.88.2: + resolution: {integrity: sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==} + engines: {node: '>= 6'} + deprecated: request has been deprecated, see https://github.com/request/request/issues/3142 dependencies: aws-sign2: 0.7.0 aws4: 1.11.0 @@ -11972,75 +12905,64 @@ packages: tough-cookie: 2.5.0 tunnel-agent: 0.6.0 uuid: 3.4.0 - deprecated: request has been deprecated, see https://github.com/request/request/issues/3142 dev: true - engines: - node: '>= 6' - resolution: - integrity: sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw== + /require-directory/2.1.1: + resolution: {integrity: sha1-jGStX9MNqxyXbiNE/+f3kqam30I=} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-jGStX9MNqxyXbiNE/+f3kqam30I= + /require-from-string/2.0.2: + resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== + /require-main-filename/2.0.0: + resolution: {integrity: sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==} dev: true - resolution: - integrity: sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== + /requires-port/1.0.0: + resolution: {integrity: sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==} dev: true - resolution: - integrity: sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8= + /resize-observer-polyfill/1.5.1: - dev: false - resolution: - integrity: sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg== + resolution: {integrity: sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg==} + /resolve-cwd/2.0.0: + resolution: {integrity: sha1-AKn3OHVW4nA46uIyyqNypqWbZlo=} + engines: {node: '>=4'} dependencies: resolve-from: 3.0.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-AKn3OHVW4nA46uIyyqNypqWbZlo= + /resolve-cwd/3.0.0: + resolution: {integrity: sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==} + engines: {node: '>=8'} dependencies: resolve-from: 5.0.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== + /resolve-from/3.0.0: + resolution: {integrity: sha1-six699nWiBvItuZTM17rywoYh0g=} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-six699nWiBvItuZTM17rywoYh0g= + /resolve-from/4.0.0: + resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + /resolve-from/5.0.0: + resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} + engines: {node: '>=8'} dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== + /resolve-pathname/3.0.0: - dev: false - resolution: - integrity: sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng== + resolution: {integrity: sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng==} + /resolve-url-loader/3.1.2: + resolution: {integrity: sha512-QEb4A76c8Mi7I3xNKXlRKQSlLBwjUV/ULFMP+G7n3/7tJZ8MG5wsZ3ucxP1Jz8Vevn6fnJsxDx9cIls+utGzPQ==} + engines: {node: '>=6.0.0'} dependencies: adjust-sourcemap-loader: 3.0.0 camelcase: 5.3.1 @@ -12053,107 +12975,102 @@ packages: rework-visit: 1.0.0 source-map: 0.6.1 dev: true - engines: - node: '>=6.0.0' - resolution: - integrity: sha512-QEb4A76c8Mi7I3xNKXlRKQSlLBwjUV/ULFMP+G7n3/7tJZ8MG5wsZ3ucxP1Jz8Vevn6fnJsxDx9cIls+utGzPQ== + /resolve-url/0.2.1: + resolution: {integrity: sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=} deprecated: https://github.com/lydell/resolve-url#deprecated - resolution: - integrity: sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= + /resolve/1.18.1: + resolution: {integrity: sha512-lDfCPaMKfOJXjy0dPayzPdF1phampNWr3qFCjAu+rw/qbQmr5jWH5xN2hwh9QKfw9E5v4hwV7A+jrCmL8yjjqA==} dependencies: is-core-module: 2.2.0 path-parse: 1.0.6 dev: true - resolution: - integrity: sha512-lDfCPaMKfOJXjy0dPayzPdF1phampNWr3qFCjAu+rw/qbQmr5jWH5xN2hwh9QKfw9E5v4hwV7A+jrCmL8yjjqA== + /resolve/2.0.0-next.3: + resolution: {integrity: sha512-W8LucSynKUIDu9ylraa7ueVZ7hc0uAgJBxVsQSKOXOyle8a93qXhcz+XAXZ8bIq2d6i4Ehddn6Evt+0/UwKk6Q==} dependencies: is-core-module: 2.2.0 path-parse: 1.0.6 dev: true - resolution: - integrity: sha512-W8LucSynKUIDu9ylraa7ueVZ7hc0uAgJBxVsQSKOXOyle8a93qXhcz+XAXZ8bIq2d6i4Ehddn6Evt+0/UwKk6Q== + /restore-cursor/3.1.0: + resolution: {integrity: sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==} + engines: {node: '>=8'} dependencies: onetime: 5.1.2 signal-exit: 3.0.3 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA== + /ret/0.1.15: - engines: - node: '>=0.12' - resolution: - integrity: sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== + resolution: {integrity: sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==} + engines: {node: '>=0.12'} + /retry/0.12.0: + resolution: {integrity: sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs=} + engines: {node: '>= 4'} dev: true - engines: - node: '>= 4' - resolution: - integrity: sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs= + /reusify/1.0.4: - engines: - iojs: '>=1.0.0' - node: '>=0.10.0' - resolution: - integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + /rework-visit/1.0.0: + resolution: {integrity: sha1-mUWygD8hni96ygCtuLyfZA+ELJo=} dev: true - resolution: - integrity: sha1-mUWygD8hni96ygCtuLyfZA+ELJo= + /rework/1.0.1: + resolution: {integrity: sha1-MIBqhBNCtUUQqkEQhQzUhTQUSqc=} dependencies: convert-source-map: 0.3.5 css: 2.2.4 dev: true - resolution: - integrity: sha1-MIBqhBNCtUUQqkEQhQzUhTQUSqc= + /rgb-regex/1.0.1: + resolution: {integrity: sha1-wODWiC3w4jviVKR16O3UGRX+rrE=} dev: true - resolution: - integrity: sha1-wODWiC3w4jviVKR16O3UGRX+rrE= + /rgba-regex/1.0.0: + resolution: {integrity: sha1-QzdOLiyglosO8VI0YLfXMP8i7rM=} dev: true - resolution: - integrity: sha1-QzdOLiyglosO8VI0YLfXMP8i7rM= + /rimraf/2.7.1: + resolution: {integrity: sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==} + hasBin: true dependencies: glob: 7.1.6 dev: true - hasBin: true - resolution: - integrity: sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== + /rimraf/3.0.2: + resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==} + hasBin: true dependencies: glob: 7.1.6 - hasBin: true - resolution: - integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + /ripemd160/2.0.2: + resolution: {integrity: sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==} dependencies: hash-base: 3.1.0 inherits: 2.0.4 dev: true - resolution: - integrity: sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== + /rollup-plugin-babel/4.4.0_@babel+core@7.12.3+rollup@1.32.1: + resolution: {integrity: sha512-Lek/TYp1+7g7I+uMfJnnSJ7YWoD58ajo6Oarhlex7lvUce+RCKRuGRSgztDO3/MF/PuGKmUL5iTHKf208UNszw==} + deprecated: This package has been deprecated and is no longer maintained. Please use @rollup/plugin-babel. + peerDependencies: + '@babel/core': 7 || ^7.0.0-rc.2 + rollup: '>=0.60.0 <3' dependencies: '@babel/core': 7.12.3 '@babel/helper-module-imports': 7.13.12 rollup: 1.32.1 rollup-pluginutils: 2.8.2 - deprecated: This package has been deprecated and is no longer maintained. Please use @rollup/plugin-babel. dev: true - peerDependencies: - '@babel/core': 7 || ^7.0.0-rc.2 - rollup: '>=0.60.0 <3' - resolution: - integrity: sha512-Lek/TYp1+7g7I+uMfJnnSJ7YWoD58ajo6Oarhlex7lvUce+RCKRuGRSgztDO3/MF/PuGKmUL5iTHKf208UNszw== + /rollup-plugin-terser/5.3.1_rollup@1.32.1: + resolution: {integrity: sha512-1pkwkervMJQGFYvM9nscrUoncPwiKR/K+bHdjv6PFgRo3cgPHoRT83y2Aa3GvINj4539S15t/tpFPb775TDs6w==} + peerDependencies: + rollup: '>=0.66.0 <3' dependencies: '@babel/code-frame': 7.12.13 jest-worker: 24.9.0 @@ -12162,77 +13079,72 @@ packages: serialize-javascript: 4.0.0 terser: 4.8.0 dev: true - peerDependencies: - rollup: '>=0.66.0 <3' - resolution: - integrity: sha512-1pkwkervMJQGFYvM9nscrUoncPwiKR/K+bHdjv6PFgRo3cgPHoRT83y2Aa3GvINj4539S15t/tpFPb775TDs6w== + /rollup-pluginutils/2.8.2: + resolution: {integrity: sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ==} dependencies: estree-walker: 0.6.1 dev: true - resolution: - integrity: sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ== + /rollup/1.32.1: + resolution: {integrity: sha512-/2HA0Ec70TvQnXdzynFffkjA6XN+1e2pEv/uKS5Ulca40g2L7KuOE3riasHoNVHOsFD5KKZgDsMk1CP3Tw9s+A==} + hasBin: true dependencies: '@types/estree': 0.0.47 '@types/node': 12.20.7 acorn: 7.4.1 dev: true - hasBin: true - resolution: - integrity: sha512-/2HA0Ec70TvQnXdzynFffkjA6XN+1e2pEv/uKS5Ulca40g2L7KuOE3riasHoNVHOsFD5KKZgDsMk1CP3Tw9s+A== + /rsvp/4.8.5: + resolution: {integrity: sha512-nfMOlASu9OnRJo1mbEk2cz0D56a1MBNrJ7orjRZQG10XDyuvwksKbuXNp6qa+kbn839HwjwhBzhFmdsaEAfauA==} + engines: {node: 6.* || >= 7.*} dev: true - engines: - node: 6.* || >= 7.* - resolution: - integrity: sha512-nfMOlASu9OnRJo1mbEk2cz0D56a1MBNrJ7orjRZQG10XDyuvwksKbuXNp6qa+kbn839HwjwhBzhFmdsaEAfauA== + /rtl-css-js/1.14.0: + resolution: {integrity: sha512-Dl5xDTeN3e7scU1cWX8c9b6/Nqz3u/HgR4gePc1kWXYiQWVQbKCEyK6+Hxve9LbcJ5EieHy1J9nJCN3grTtGwg==} dependencies: '@babel/runtime': 7.13.10 dev: false - resolution: - integrity: sha512-Dl5xDTeN3e7scU1cWX8c9b6/Nqz3u/HgR4gePc1kWXYiQWVQbKCEyK6+Hxve9LbcJ5EieHy1J9nJCN3grTtGwg== + /run-parallel/1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} dependencies: queue-microtask: 1.2.3 - resolution: - integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + /run-queue/1.0.3: + resolution: {integrity: sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec=} dependencies: aproba: 1.2.0 dev: true - resolution: - integrity: sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec= - /rxjs-compat/6.6.7: - dev: false - resolution: - integrity: sha512-szN4fK+TqBPOFBcBcsR0g2cmTTUF/vaFEOZNuSdfU8/pGFnNmmn2u8SystYXG1QMrjOPBc6XTKHMVfENDf6hHw== + /rxjs/6.6.7: + resolution: {integrity: sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ==} + engines: {npm: '>=2.0.0'} dependencies: tslib: 1.14.1 - engines: - npm: '>=2.0.0' - resolution: - integrity: sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ== + dev: true + /safe-buffer/5.1.2: + resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==} dev: true - resolution: - integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + /safe-buffer/5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} dev: true - resolution: - integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + /safe-regex/1.1.0: + resolution: {integrity: sha1-QKNmnzsHfR6UPURinhV91IAjvy4=} dependencies: ret: 0.1.15 - resolution: - integrity: sha1-QKNmnzsHfR6UPURinhV91IAjvy4= + /safer-buffer/2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} dev: true - resolution: - integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + /sane/4.1.0: + resolution: {integrity: sha512-hhbzAgTIX8O7SHfp2c8/kREfEn4qO/9q8C9beyY6+tvZ87EpoZ3i1RIEvp27YBswnNbY9mWd6paKVmKbAgLfZA==} + engines: {node: 6.* || 8.* || >= 10.*} + hasBin: true dependencies: '@cnakazawa/watch': 1.0.4 anymatch: 2.0.0 @@ -12244,26 +13156,14 @@ packages: minimist: 1.2.5 walker: 1.0.7 dev: true - engines: - node: 6.* || 8.* || >= 10.* - hasBin: true - resolution: - integrity: sha512-hhbzAgTIX8O7SHfp2c8/kREfEn4qO/9q8C9beyY6+tvZ87EpoZ3i1RIEvp27YBswnNbY9mWd6paKVmKbAgLfZA== + /sanitize.css/10.0.0: + resolution: {integrity: sha512-vTxrZz4dX5W86M6oVWVdOVe72ZiPs41Oi7Z6Km4W5Turyz28mrXSJhhEBZoRtzJWIv3833WKVwLSDWWkEfupMg==} dev: true - resolution: - integrity: sha512-vTxrZz4dX5W86M6oVWVdOVe72ZiPs41Oi7Z6Km4W5Turyz28mrXSJhhEBZoRtzJWIv3833WKVwLSDWWkEfupMg== + /sass-loader/8.0.2_webpack@4.44.2: - dependencies: - clone-deep: 4.0.1 - loader-utils: 1.4.0 - neo-async: 2.6.2 - schema-utils: 2.7.1 - semver: 6.3.0 - webpack: 4.44.2_webpack-cli@4.6.0 - dev: true - engines: - node: '>= 8.9.0' + resolution: {integrity: sha512-7o4dbSK8/Ol2KflEmSco4jTjQoV988bM82P9CZdmo9hR3RLnvNc0ufMNdMrB0caq38JQ/FgF4/7RcbcfKzxoFQ==} + engines: {node: '>= 8.9.0'} peerDependencies: fibers: '>= 3.1.0' node-sass: ^4.0.0 @@ -12276,112 +13176,124 @@ packages: optional: true sass: optional: true - resolution: - integrity: sha512-7o4dbSK8/Ol2KflEmSco4jTjQoV988bM82P9CZdmo9hR3RLnvNc0ufMNdMrB0caq38JQ/FgF4/7RcbcfKzxoFQ== + dependencies: + clone-deep: 4.0.1 + loader-utils: 1.4.0 + neo-async: 2.6.2 + schema-utils: 2.7.1 + semver: 6.3.0 + webpack: 4.44.2_webpack-cli@4.6.0 + dev: true + /sax/1.2.4: + resolution: {integrity: sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==} dev: true - resolution: - integrity: sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== + /saxes/5.0.1: + resolution: {integrity: sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw==} + engines: {node: '>=10'} dependencies: xmlchars: 2.2.0 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw== + /scheduler/0.19.1: + resolution: {integrity: sha512-n/zwRWRYSUj0/3g/otKDRPMh6qv2SYMWNq85IEa8iZyAv8od9zDYpGSnpBEjNgcMNq6Scbu5KfIPxNF72R/2EA==} dependencies: loose-envify: 1.4.0 object-assign: 4.1.1 - dev: false - resolution: - integrity: sha512-n/zwRWRYSUj0/3g/otKDRPMh6qv2SYMWNq85IEa8iZyAv8od9zDYpGSnpBEjNgcMNq6Scbu5KfIPxNF72R/2EA== + dev: true + /scheduler/0.20.2: + resolution: {integrity: sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==} dependencies: loose-envify: 1.4.0 object-assign: 4.1.1 dev: false - resolution: - integrity: sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ== + /schema-utils/1.0.0: + resolution: {integrity: sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==} + engines: {node: '>= 4'} dependencies: ajv: 6.12.6 ajv-errors: 1.0.1_ajv@6.12.6 ajv-keywords: 3.5.2_ajv@6.12.6 dev: true - engines: - node: '>= 4' - resolution: - integrity: sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g== + /schema-utils/2.7.1: + resolution: {integrity: sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg==} + engines: {node: '>= 8.9.0'} dependencies: '@types/json-schema': 7.0.7 ajv: 6.12.6 ajv-keywords: 3.5.2_ajv@6.12.6 dev: true - engines: - node: '>= 8.9.0' - resolution: - integrity: sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg== + /schema-utils/3.0.0: + resolution: {integrity: sha512-6D82/xSzO094ajanoOSbe4YvXWMfn2A//8Y1+MUqFAJul5Bs+yn36xbK9OtNDcRVSBJ9jjeoXftM6CfztsjOAA==} + engines: {node: '>= 10.13.0'} dependencies: '@types/json-schema': 7.0.7 ajv: 6.12.6 ajv-keywords: 3.5.2_ajv@6.12.6 dev: true - engines: - node: '>= 10.13.0' - resolution: - integrity: sha512-6D82/xSzO094ajanoOSbe4YvXWMfn2A//8Y1+MUqFAJul5Bs+yn36xbK9OtNDcRVSBJ9jjeoXftM6CfztsjOAA== + + /schema-utils/3.1.0: + resolution: {integrity: sha512-tTEaeYkyIhEZ9uWgAjDerWov3T9MgX8dhhy2r0IGeeX4W8ngtGl1++dUve/RUqzuaASSh7shwCDJjEzthxki8w==} + engines: {node: '>= 10.13.0'} + dependencies: + '@types/json-schema': 7.0.8 + ajv: 6.12.6 + ajv-keywords: 3.5.2_ajv@6.12.6 + dev: true + /screenfull/5.1.0: + resolution: {integrity: sha512-dYaNuOdzr+kc6J6CFcBrzkLCfyGcMg+gWkJ8us93IQ7y1cevhQAugFsaCdMHb6lw8KV3xPzSxzH7zM1dQap9mA==} + engines: {node: '>=0.10.0'} dev: false - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-dYaNuOdzr+kc6J6CFcBrzkLCfyGcMg+gWkJ8us93IQ7y1cevhQAugFsaCdMHb6lw8KV3xPzSxzH7zM1dQap9mA== - /scroll-into-view-if-needed/2.2.28: + + /scroll-into-view-if-needed/2.2.20: + resolution: {integrity: sha512-P9kYMrhi9f6dvWwTGpO5I3HgjSU/8Mts7xL3lkoH5xlewK7O9Obdc5WmMCzppln7bCVGNmf3qfoZXrpCeyNJXw==} dependencies: - compute-scroll-into-view: 1.0.17 + compute-scroll-into-view: 1.0.11 dev: false - resolution: - integrity: sha512-8LuxJSuFVc92+0AdNv4QOxRL4Abeo1DgLnGNkn1XlaujPH/3cCFz3QI60r2VNu4obJJROzgnIUw5TKQkZvZI1w== + /select-hose/2.0.0: + resolution: {integrity: sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo=} dev: true - resolution: - integrity: sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= + /selfsigned/1.10.8: + resolution: {integrity: sha512-2P4PtieJeEwVgTU9QEcwIRDQ/mXJLX8/+I3ur+Pg16nS8oNbrGxEso9NyYWy8NAmXiNl4dlAp5MwoNeCWzON4w==} dependencies: node-forge: 0.10.0 dev: true - resolution: - integrity: sha512-2P4PtieJeEwVgTU9QEcwIRDQ/mXJLX8/+I3ur+Pg16nS8oNbrGxEso9NyYWy8NAmXiNl4dlAp5MwoNeCWzON4w== + /semver-compare/1.0.0: + resolution: {integrity: sha1-De4hahyUGrN+nvsXiPavxf9VN/w=} dev: true - resolution: - integrity: sha1-De4hahyUGrN+nvsXiPavxf9VN/w= + /semver/5.7.1: + resolution: {integrity: sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==} hasBin: true - resolution: - integrity: sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== + /semver/6.3.0: - dev: true + resolution: {integrity: sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==} hasBin: true - resolution: - integrity: sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== - /semver/7.0.0: dev: true + + /semver/7.0.0: + resolution: {integrity: sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==} hasBin: true - resolution: - integrity: sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A== - /semver/7.3.2: dev: true - engines: - node: '>=10' + + /semver/7.3.2: + resolution: {integrity: sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ==} + engines: {node: '>=10'} hasBin: true - resolution: - integrity: sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ== + dev: true + /send/0.17.1: + resolution: {integrity: sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==} + engines: {node: '>= 0.8.0'} dependencies: debug: 2.6.9 depd: 1.1.2 @@ -12397,23 +13309,22 @@ packages: range-parser: 1.2.1 statuses: 1.5.0 dev: true - engines: - node: '>= 0.8.0' - resolution: - integrity: sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg== + /serialize-javascript/4.0.0: + resolution: {integrity: sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw==} dependencies: randombytes: 2.1.0 dev: true - resolution: - integrity: sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw== + /serialize-javascript/5.0.1: + resolution: {integrity: sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA==} dependencies: randombytes: 2.1.0 dev: true - resolution: - integrity: sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA== + /serve-index/1.9.1: + resolution: {integrity: sha1-03aNabHn2C5c4FD/9bRTvqEqkjk=} + engines: {node: '>= 0.8.0'} dependencies: accepts: 1.3.7 batch: 0.6.1 @@ -12423,171 +13334,170 @@ packages: mime-types: 2.1.29 parseurl: 1.3.3 dev: true - engines: - node: '>= 0.8.0' - resolution: - integrity: sha1-03aNabHn2C5c4FD/9bRTvqEqkjk= + /serve-static/1.14.1: + resolution: {integrity: sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==} + engines: {node: '>= 0.8.0'} dependencies: encodeurl: 1.0.2 escape-html: 1.0.3 parseurl: 1.3.3 send: 0.17.1 dev: true - engines: - node: '>= 0.8.0' - resolution: - integrity: sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg== + /set-blocking/2.0.0: + resolution: {integrity: sha1-BF+XgtARrppoA93TgrJDkrPYkPc=} dev: true - resolution: - integrity: sha1-BF+XgtARrppoA93TgrJDkrPYkPc= + /set-harmonic-interval/1.0.1: + resolution: {integrity: sha512-AhICkFV84tBP1aWqPwLZqFvAwqEoVA9kxNMniGEUvzOlm4vLmOFLiTT3UZ6bziJTy4bOVpzWGTfSCbmaayGx8g==} + engines: {node: '>=6.9'} dev: false - engines: - node: '>=6.9' - resolution: - integrity: sha512-AhICkFV84tBP1aWqPwLZqFvAwqEoVA9kxNMniGEUvzOlm4vLmOFLiTT3UZ6bziJTy4bOVpzWGTfSCbmaayGx8g== + /set-value/2.0.1: + resolution: {integrity: sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==} + engines: {node: '>=0.10.0'} dependencies: extend-shallow: 2.0.1 is-extendable: 0.1.1 is-plain-object: 2.0.4 split-string: 3.1.0 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== + /setimmediate/1.0.5: + resolution: {integrity: sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU=} dev: true - resolution: - integrity: sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU= + /setprototypeof/1.1.0: + resolution: {integrity: sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==} dev: true - resolution: - integrity: sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== + /setprototypeof/1.1.1: + resolution: {integrity: sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==} dev: true - resolution: - integrity: sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw== + /sha.js/2.4.11: + resolution: {integrity: sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==} + hasBin: true dependencies: inherits: 2.0.4 safe-buffer: 5.2.1 dev: true - hasBin: true - resolution: - integrity: sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== + /shallow-clone/3.0.1: + resolution: {integrity: sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==} + engines: {node: '>=8'} dependencies: kind-of: 6.0.3 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA== + /shallowequal/1.1.0: - dev: false - resolution: - integrity: sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ== + resolution: {integrity: sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==} + /shebang-command/1.2.0: + resolution: {integrity: sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=} + engines: {node: '>=0.10.0'} dependencies: shebang-regex: 1.0.0 dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= + /shebang-command/2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} dependencies: shebang-regex: 3.0.0 - engines: - node: '>=8' - resolution: - integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + /shebang-regex/1.0.0: + resolution: {integrity: sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= + /shebang-regex/3.0.0: - engines: - node: '>=8' - resolution: - integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + /shell-quote/1.7.2: + resolution: {integrity: sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg==} dev: false - resolution: - integrity: sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg== + /shellwords/0.1.1: + resolution: {integrity: sha512-vFwSUfQvqybiICwZY5+DAWIPLKsWO31Q91JSKl3UYv+K5c2QRPzn0qzec6QPu1Qc9eHYItiP3NdJqNVqetYAww==} dev: true optional: true - resolution: - integrity: sha512-vFwSUfQvqybiICwZY5+DAWIPLKsWO31Q91JSKl3UYv+K5c2QRPzn0qzec6QPu1Qc9eHYItiP3NdJqNVqetYAww== + /side-channel/1.0.4: + resolution: {integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==} dependencies: call-bind: 1.0.2 get-intrinsic: 1.1.1 - object-inspect: 1.9.0 - dev: true - resolution: - integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== + object-inspect: 1.11.0 + /signal-exit/3.0.3: + resolution: {integrity: sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==} dev: true - resolution: - integrity: sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA== + /simple-swizzle/0.2.2: + resolution: {integrity: sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==} dependencies: is-arrayish: 0.3.2 - dev: true - resolution: - integrity: sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo= + /sisteransi/1.0.5: - resolution: - integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== + resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} + + /sitemap/6.4.0: + resolution: {integrity: sha512-DoPKNc2/apQZTUnfiOONWctwq7s6dZVspxAZe2VPMNtoqNq7HgXRvlRnbIpKjf+8+piQdWncwcy+YhhTGY5USQ==} + engines: {node: '>=10.3.0', npm: '>=5.6.0'} + hasBin: true + dependencies: + '@types/node': 14.17.20 + '@types/sax': 1.2.3 + arg: 5.0.1 + sax: 1.2.4 + dev: true + /slash/3.0.0: - engines: - node: '>=8' - resolution: - integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} + engines: {node: '>=8'} + + /slash2/2.0.0: + resolution: {integrity: sha512-7ElvBydJPi3MHU/KEOblFSbO/skl4Z69jKkFCpYIYVOMSIZsKi4gYU43HGeZPmjxCXrHekoDAAewphPQNnsqtA==} + engines: {node: '>=6'} + dev: true + /slice-ansi/3.0.0: + resolution: {integrity: sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==} + engines: {node: '>=8'} dependencies: ansi-styles: 4.3.0 astral-regex: 2.0.0 is-fullwidth-code-point: 3.0.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ== + /slice-ansi/4.0.0: + resolution: {integrity: sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==} + engines: {node: '>=10'} dependencies: ansi-styles: 4.3.0 astral-regex: 2.0.0 is-fullwidth-code-point: 3.0.0 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ== + /snapdragon-node/2.1.1: + resolution: {integrity: sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw==} + engines: {node: '>=0.10.0'} dependencies: define-property: 1.0.0 isobject: 3.0.1 snapdragon-util: 3.0.1 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== + /snapdragon-util/3.0.1: + resolution: {integrity: sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ==} + engines: {node: '>=0.10.0'} dependencies: kind-of: 3.2.2 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== + /snapdragon/0.8.2: + resolution: {integrity: sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg==} + engines: {node: '>=0.10.0'} dependencies: base: 0.11.2 debug: 2.6.9 @@ -12597,11 +13507,9 @@ packages: source-map: 0.5.7 source-map-resolve: 0.5.3 use: 3.1.1 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== + /sockjs-client/1.4.0: + resolution: {integrity: sha512-5zaLyO8/nri5cua0VtOrFXBPK1jbL4+1cebT/mmKA1E1ZXOvJrII75bPu0l0k843G/+iAbhEqzyKr0w/eCCj7g==} dependencies: debug: 3.2.7 eventsource: 1.1.0 @@ -12610,102 +13518,101 @@ packages: json3: 3.3.3 url-parse: 1.5.1 dev: true - resolution: - integrity: sha512-5zaLyO8/nri5cua0VtOrFXBPK1jbL4+1cebT/mmKA1E1ZXOvJrII75bPu0l0k843G/+iAbhEqzyKr0w/eCCj7g== + /sockjs/0.3.20: + resolution: {integrity: sha512-SpmVOVpdq0DJc0qArhF3E5xsxvaiqGNb73XfgBpK1y3UD5gs8DSo8aCTsuT5pX8rssdc2NDIzANwP9eCAiSdTA==} dependencies: faye-websocket: 0.10.0 uuid: 3.4.0 websocket-driver: 0.6.5 dev: true - resolution: - integrity: sha512-SpmVOVpdq0DJc0qArhF3E5xsxvaiqGNb73XfgBpK1y3UD5gs8DSo8aCTsuT5pX8rssdc2NDIzANwP9eCAiSdTA== + /sort-keys/1.1.2: + resolution: {integrity: sha1-RBttTTRnmPG05J6JIK37oOVD+a0=} + engines: {node: '>=0.10.0'} dependencies: is-plain-obj: 1.1.0 dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-RBttTTRnmPG05J6JIK37oOVD+a0= + /source-list-map/2.0.1: + resolution: {integrity: sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw==} dev: true - resolution: - integrity: sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== + /source-map-resolve/0.5.3: + resolution: {integrity: sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw==} dependencies: atob: 2.1.2 decode-uri-component: 0.2.0 resolve-url: 0.2.1 source-map-url: 0.4.1 urix: 0.1.0 - resolution: - integrity: sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw== + /source-map-resolve/0.6.0: + resolution: {integrity: sha512-KXBr9d/fO/bWo97NXsPIAW1bFSBOuCnjbNTBMO7N59hsv5i9yzRDfcYwwt0l04+VqnKC+EwzvJZIP/qkuMgR/w==} dependencies: atob: 2.1.2 decode-uri-component: 0.2.0 dev: true - resolution: - integrity: sha512-KXBr9d/fO/bWo97NXsPIAW1bFSBOuCnjbNTBMO7N59hsv5i9yzRDfcYwwt0l04+VqnKC+EwzvJZIP/qkuMgR/w== + /source-map-support/0.5.19: + resolution: {integrity: sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==} dependencies: buffer-from: 1.1.1 source-map: 0.6.1 dev: true - resolution: - integrity: sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw== + /source-map-url/0.4.1: - resolution: - integrity: sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw== + resolution: {integrity: sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw==} + /source-map/0.5.6: + resolution: {integrity: sha1-dc449SvwczxafwwRjYEzSiu19BI=} + engines: {node: '>=0.10.0'} dev: false - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-dc449SvwczxafwwRjYEzSiu19BI= + /source-map/0.5.7: - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= + resolution: {integrity: sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=} + engines: {node: '>=0.10.0'} + /source-map/0.6.1: - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} + engines: {node: '>=0.10.0'} + /source-map/0.7.3: + resolution: {integrity: sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==} + engines: {node: '>= 8'} dev: true - engines: - node: '>= 8' - resolution: - integrity: sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ== + /sourcemap-codec/1.4.8: - resolution: - integrity: sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA== + resolution: {integrity: sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==} + + /space-separated-tokens/1.1.5: + resolution: {integrity: sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA==} + dev: true + /spdx-correct/3.1.1: + resolution: {integrity: sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==} dependencies: spdx-expression-parse: 3.0.1 spdx-license-ids: 3.0.7 dev: true - resolution: - integrity: sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w== + /spdx-exceptions/2.3.0: + resolution: {integrity: sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==} dev: true - resolution: - integrity: sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A== + /spdx-expression-parse/3.0.1: + resolution: {integrity: sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==} dependencies: spdx-exceptions: 2.3.0 spdx-license-ids: 3.0.7 dev: true - resolution: - integrity: sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q== + /spdx-license-ids/3.0.7: + resolution: {integrity: sha512-U+MTEOO0AiDzxwFvoa4JVnMV6mZlJKk2sBLt90s7G0Gd0Mlknc7kxEn3nuDPNZRta7O2uy8oLcZLVT+4sqNZHQ==} dev: true - resolution: - integrity: sha512-U+MTEOO0AiDzxwFvoa4JVnMV6mZlJKk2sBLt90s7G0Gd0Mlknc7kxEn3nuDPNZRta7O2uy8oLcZLVT+4sqNZHQ== + /spdy-transport/3.0.0_supports-color@6.1.0: + resolution: {integrity: sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw==} dependencies: debug: 4.3.1_supports-color@6.1.0 detect-node: 2.0.5 @@ -12713,37 +13620,51 @@ packages: obuf: 1.1.2 readable-stream: 3.6.0 wbuf: 1.7.3 + transitivePeerDependencies: + - supports-color dev: true - peerDependencies: - supports-color: '*' - resolution: - integrity: sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== + /spdy/4.0.2_supports-color@6.1.0: + resolution: {integrity: sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA==} + engines: {node: '>=6.0.0'} dependencies: debug: 4.3.1_supports-color@6.1.0 handle-thing: 2.0.1 http-deceiver: 1.2.7 select-hose: 2.0.0 spdy-transport: 3.0.0_supports-color@6.1.0 + transitivePeerDependencies: + - supports-color dev: true - engines: - node: '>=6.0.0' - peerDependencies: - supports-color: '*' - resolution: - integrity: sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA== + + /speech-rule-engine/3.3.3: + resolution: {integrity: sha512-0exWw+0XauLjat+f/aFeo5T8SiDsO1JtwpY3qgJE4cWt+yL/Stl0WP4VNDWdh7lzGkubUD9lWP4J1ASnORXfyQ==} + hasBin: true + dependencies: + commander: 7.2.0 + wicked-good-xpath: 1.3.0 + xmldom-sre: 0.1.31 + dev: true + + /split-on-first/1.1.0: + resolution: {integrity: sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw==} + engines: {node: '>=6'} + dev: true + /split-string/3.1.0: + resolution: {integrity: sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==} + engines: {node: '>=0.10.0'} dependencies: extend-shallow: 3.0.2 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== + /sprintf-js/1.0.3: + resolution: {integrity: sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=} dev: true - resolution: - integrity: sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= + /sshpk/1.16.1: + resolution: {integrity: sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==} + engines: {node: '>=0.10.0'} + hasBin: true dependencies: asn1: 0.2.4 assert-plus: 1.0.0 @@ -12755,104 +13676,96 @@ packages: safer-buffer: 2.1.2 tweetnacl: 0.14.5 dev: true - engines: - node: '>=0.10.0' - hasBin: true - resolution: - integrity: sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== + /ssri/6.0.1: + resolution: {integrity: sha512-3Wge10hNcT1Kur4PDFwEieXSCMCJs/7WvSACcrMYrNp+b8kDL1/0wJch5Ni2WrtwEa2IO8OsVfeKIciKCDx/QA==} dependencies: figgy-pudding: 3.5.2 dev: true - resolution: - integrity: sha512-3Wge10hNcT1Kur4PDFwEieXSCMCJs/7WvSACcrMYrNp+b8kDL1/0wJch5Ni2WrtwEa2IO8OsVfeKIciKCDx/QA== + /ssri/8.0.1: + resolution: {integrity: sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ==} + engines: {node: '>= 8'} dependencies: minipass: 3.1.3 dev: true - engines: - node: '>= 8' - resolution: - integrity: sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ== + /stable/0.1.8: + resolution: {integrity: sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==} dev: true - resolution: - integrity: sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w== + /stack-generator/2.0.5: + resolution: {integrity: sha512-/t1ebrbHkrLrDuNMdeAcsvynWgoH/i4o8EGGfX7dEYDoTXOYVAkEpFdtshlvabzc6JlJ8Kf9YdFEoz7JkzGN9Q==} dependencies: stackframe: 1.2.0 dev: false - resolution: - integrity: sha512-/t1ebrbHkrLrDuNMdeAcsvynWgoH/i4o8EGGfX7dEYDoTXOYVAkEpFdtshlvabzc6JlJ8Kf9YdFEoz7JkzGN9Q== + /stack-utils/2.0.3: + resolution: {integrity: sha512-gL//fkxfWUsIlFL2Tl42Cl6+HFALEaB1FU76I/Fy+oZjRreP7OPMXFlGbxM7NQsI0ZpUfw76sHnv0WNYuTb7Iw==} + engines: {node: '>=10'} dependencies: escape-string-regexp: 2.0.0 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-gL//fkxfWUsIlFL2Tl42Cl6+HFALEaB1FU76I/Fy+oZjRreP7OPMXFlGbxM7NQsI0ZpUfw76sHnv0WNYuTb7Iw== + /stackframe/1.2.0: - resolution: - integrity: sha512-GrdeshiRmS1YLMYgzF16olf2jJ/IzxXY9lhKOskuVziubpTYcYqyOwYeJKzQkwy7uN0fYSsbsC4RQaXf9LCrYA== + resolution: {integrity: sha512-GrdeshiRmS1YLMYgzF16olf2jJ/IzxXY9lhKOskuVziubpTYcYqyOwYeJKzQkwy7uN0fYSsbsC4RQaXf9LCrYA==} + /stacktrace-gps/3.0.4: + resolution: {integrity: sha512-qIr8x41yZVSldqdqe6jciXEaSCKw1U8XTXpjDuy0ki/apyTn/r3w9hDAAQOhZdxvsC93H+WwwEu5cq5VemzYeg==} dependencies: source-map: 0.5.6 stackframe: 1.2.0 dev: false - resolution: - integrity: sha512-qIr8x41yZVSldqdqe6jciXEaSCKw1U8XTXpjDuy0ki/apyTn/r3w9hDAAQOhZdxvsC93H+WwwEu5cq5VemzYeg== + /stacktrace-js/2.0.2: + resolution: {integrity: sha512-Je5vBeY4S1r/RnLydLl0TBTi3F2qdfWmYsGvtfZgEI+SCprPppaIhQf5nGcal4gI4cGpCV/duLcAzT1np6sQqg==} dependencies: error-stack-parser: 2.0.6 stack-generator: 2.0.5 stacktrace-gps: 3.0.4 dev: false - resolution: - integrity: sha512-Je5vBeY4S1r/RnLydLl0TBTi3F2qdfWmYsGvtfZgEI+SCprPppaIhQf5nGcal4gI4cGpCV/duLcAzT1np6sQqg== + /state-local/1.0.7: + resolution: {integrity: sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w==} dev: false - resolution: - integrity: sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w== + /static-extend/0.1.2: + resolution: {integrity: sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY=} + engines: {node: '>=0.10.0'} dependencies: define-property: 0.2.5 object-copy: 0.1.0 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= + /statuses/1.5.0: + resolution: {integrity: sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=} + engines: {node: '>= 0.6'} dev: true - engines: - node: '>= 0.6' - resolution: - integrity: sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= + /stealthy-require/1.1.1: + resolution: {integrity: sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks= + /store2/2.12.0: + resolution: {integrity: sha512-7t+/wpKLanLzSnQPX8WAcuLCCeuSHoWdQuh9SB3xD0kNOM38DNf+0Oa+wmvxmYueRzkmh6IcdKFtvTa+ecgPDw==} dev: false - resolution: - integrity: sha512-7t+/wpKLanLzSnQPX8WAcuLCCeuSHoWdQuh9SB3xD0kNOM38DNf+0Oa+wmvxmYueRzkmh6IcdKFtvTa+ecgPDw== + /stream-browserify/2.0.2: + resolution: {integrity: sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg==} dependencies: inherits: 2.0.4 readable-stream: 2.3.7 dev: true - resolution: - integrity: sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg== + /stream-each/1.2.3: + resolution: {integrity: sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw==} dependencies: end-of-stream: 1.4.4 stream-shift: 1.0.1 dev: true - resolution: - integrity: sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw== + /stream-http/2.8.3: + resolution: {integrity: sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw==} dependencies: builtin-status-codes: 3.0.0 inherits: 2.0.4 @@ -12860,62 +13773,58 @@ packages: to-arraybuffer: 1.0.1 xtend: 4.0.2 dev: true - resolution: - integrity: sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw== + /stream-shift/1.0.1: + resolution: {integrity: sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==} dev: true - resolution: - integrity: sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ== + /strict-uri-encode/1.1.0: + resolution: {integrity: sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM=} + engines: {node: '>=0.10.0'} + dev: true + + /strict-uri-encode/2.0.0: + resolution: {integrity: sha1-ucczDHBChi9rFC3CdLvMWGbONUY=} + engines: {node: '>=4'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM= + /string-argv/0.3.1: + resolution: {integrity: sha512-a1uQGz7IyVy9YwhqjZIZu1c8JO8dNIe20xBmSS6qu9kv++k3JGzCVmprbNN5Kn+BgzD5E7YYwg1CcjuJMRNsvg==} + engines: {node: '>=0.6.19'} dev: true - engines: - node: '>=0.6.19' - resolution: - integrity: sha512-a1uQGz7IyVy9YwhqjZIZu1c8JO8dNIe20xBmSS6qu9kv++k3JGzCVmprbNN5Kn+BgzD5E7YYwg1CcjuJMRNsvg== - /string-convert/0.2.1: - dev: false - resolution: - integrity: sha1-aYLMMEn7tM2F+LJFaLnZvznu/5c= + /string-length/4.0.2: + resolution: {integrity: sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==} + engines: {node: '>=10'} dependencies: char-regex: 1.0.2 strip-ansi: 6.0.0 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== + /string-natural-compare/3.0.1: + resolution: {integrity: sha512-n3sPwynL1nwKi3WJ6AIsClwBMa0zTi54fn2oLU6ndfTSIO05xaznjSf15PcBZU6FNWbmN5Q6cxT4V5hGvB4taw==} dev: true - resolution: - integrity: sha512-n3sPwynL1nwKi3WJ6AIsClwBMa0zTi54fn2oLU6ndfTSIO05xaznjSf15PcBZU6FNWbmN5Q6cxT4V5hGvB4taw== + /string-width/3.1.0: + resolution: {integrity: sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==} + engines: {node: '>=6'} dependencies: emoji-regex: 7.0.3 is-fullwidth-code-point: 2.0.0 strip-ansi: 5.2.0 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== + /string-width/4.2.2: + resolution: {integrity: sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==} + engines: {node: '>=8'} dependencies: emoji-regex: 8.0.0 is-fullwidth-code-point: 3.0.0 strip-ansi: 6.0.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA== + /string.prototype.matchall/4.0.4: + resolution: {integrity: sha512-pknFIWVachNcyqRfaQSeu/FUfpvJTe4uskUSZ9Wc1RijsPuzbZ8TyYT8WCNnntCjUEqQ3vUHMAfVj2+wLAisPQ==} dependencies: call-bind: 1.0.2 define-properties: 1.1.3 @@ -12925,133 +13834,139 @@ packages: regexp.prototype.flags: 1.3.1 side-channel: 1.0.4 dev: true - resolution: - integrity: sha512-pknFIWVachNcyqRfaQSeu/FUfpvJTe4uskUSZ9Wc1RijsPuzbZ8TyYT8WCNnntCjUEqQ3vUHMAfVj2+wLAisPQ== + /string.prototype.trimend/1.0.4: + resolution: {integrity: sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A==} dependencies: call-bind: 1.0.2 define-properties: 1.1.3 dev: true - resolution: - integrity: sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A== + /string.prototype.trimstart/1.0.4: + resolution: {integrity: sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw==} dependencies: call-bind: 1.0.2 define-properties: 1.1.3 dev: true - resolution: - integrity: sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw== + /string_decoder/1.1.1: + resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==} dependencies: safe-buffer: 5.1.2 dev: true - resolution: - integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + /string_decoder/1.3.0: + resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} dependencies: safe-buffer: 5.2.1 dev: true - resolution: - integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + + /stringify-entities/3.1.0: + resolution: {integrity: sha512-3FP+jGMmMV/ffZs86MoghGqAoqXAdxLrJP4GUdrDN1aIScYih5tuIO3eF4To5AJZ79KDZ8Fpdy7QJnK8SsL1Vg==} + dependencies: + character-entities-html4: 1.1.4 + character-entities-legacy: 1.1.4 + xtend: 4.0.2 + dev: true + /stringify-object/3.3.0: + resolution: {integrity: sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw==} + engines: {node: '>=4'} dependencies: get-own-enumerable-property-symbols: 3.0.2 is-obj: 1.0.1 is-regexp: 1.0.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw== + /strip-ansi/3.0.1: + resolution: {integrity: sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=} + engines: {node: '>=0.10.0'} dependencies: ansi-regex: 2.1.1 dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= + /strip-ansi/5.2.0: + resolution: {integrity: sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==} + engines: {node: '>=6'} dependencies: ansi-regex: 4.1.0 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== + /strip-ansi/6.0.0: + resolution: {integrity: sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==} + engines: {node: '>=8'} dependencies: ansi-regex: 5.0.0 - engines: - node: '>=8' - resolution: - integrity: sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w== + /strip-bom/3.0.0: + resolution: {integrity: sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM= + /strip-bom/4.0.0: + resolution: {integrity: sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==} + engines: {node: '>=8'} dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== + /strip-comments/1.0.2: + resolution: {integrity: sha512-kL97alc47hoyIQSV165tTt9rG5dn4w1dNnBhOQ3bOU1Nc1hel09jnXANaHJ7vzHLd4Ju8kseDGzlev96pghLFw==} + engines: {node: '>=4'} dependencies: babel-extract-comments: 1.0.0 babel-plugin-transform-object-rest-spread: 6.26.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-kL97alc47hoyIQSV165tTt9rG5dn4w1dNnBhOQ3bOU1Nc1hel09jnXANaHJ7vzHLd4Ju8kseDGzlev96pghLFw== + /strip-eof/1.0.0: + resolution: {integrity: sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= + /strip-final-newline/2.0.0: + resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} + engines: {node: '>=6'} dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== + /strip-indent/3.0.0: + resolution: {integrity: sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==} + engines: {node: '>=8'} dependencies: min-indent: 1.0.1 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ== + /strip-json-comments/2.0.1: + resolution: {integrity: sha1-PFMZQukIwml8DsNEhYwobHygpgo=} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-PFMZQukIwml8DsNEhYwobHygpgo= + /strip-json-comments/3.1.1: + resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} + engines: {node: '>=8'} dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + /style-loader/1.3.0_webpack@4.44.2: + resolution: {integrity: sha512-V7TCORko8rs9rIqkSrlMfkqA63DfoGBBJmK1kKGCcSi+BWb4cqz0SRsnp4l6rU5iwOEd0/2ePv68SV22VXon4Q==} + engines: {node: '>= 8.9.0'} + peerDependencies: + webpack: ^4.0.0 || ^5.0.0 dependencies: loader-utils: 2.0.0 schema-utils: 2.7.1 webpack: 4.44.2_webpack-cli@4.6.0 dev: true - engines: - node: '>= 8.9.0' - peerDependencies: - webpack: ^4.0.0 || ^5.0.0 - resolution: - integrity: sha512-V7TCORko8rs9rIqkSrlMfkqA63DfoGBBJmK1kKGCcSi+BWb4cqz0SRsnp4l6rU5iwOEd0/2ePv68SV22VXon4Q== + + /style-to-object/0.3.0: + resolution: {integrity: sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA==} + dependencies: + inline-style-parser: 0.1.1 + dev: true + /styled-components/5.2.1_react-dom@17.0.2+react@17.0.2: + resolution: {integrity: sha512-sBdgLWrCFTKtmZm/9x7jkIabjFNVzCUeKfoQsM6R3saImkUnjx0QYdLwJHBjY9ifEcmjDamJDVfknWm1yxZPxQ==} + engines: {node: '>=10'} + peerDependencies: + react: '>= 16.8.0' + react-dom: '>= 16.8.0' + react-is: '>= 16.8.0' dependencies: '@babel/helper-module-imports': 7.13.12 '@babel/traverse': 7.13.13_supports-color@5.5.0 @@ -13066,65 +13981,56 @@ packages: shallowequal: 1.1.0 supports-color: 5.5.0 dev: false - engines: - node: '>=10' - peerDependencies: - react: '>= 16.8.0' - react-dom: '>= 16.8.0' - react-is: '>= 16.8.0' - resolution: - integrity: sha512-sBdgLWrCFTKtmZm/9x7jkIabjFNVzCUeKfoQsM6R3saImkUnjx0QYdLwJHBjY9ifEcmjDamJDVfknWm1yxZPxQ== + /stylehacks/4.0.3: + resolution: {integrity: sha512-7GlLk9JwlElY4Y6a/rmbH2MhVlTyVmiJd1PfTCqFaIBEGMYNsrO/v3SeGTdhBThLg4Z+NbOk/qFMwCa+J+3p/g==} + engines: {node: '>=6.9.0'} dependencies: browserslist: 4.16.3 postcss: 7.0.35 postcss-selector-parser: 3.1.2 dev: true - engines: - node: '>=6.9.0' - resolution: - integrity: sha512-7GlLk9JwlElY4Y6a/rmbH2MhVlTyVmiJd1PfTCqFaIBEGMYNsrO/v3SeGTdhBThLg4Z+NbOk/qFMwCa+J+3p/g== + /stylis/4.0.9: + resolution: {integrity: sha512-ci7pEFNVW3YJiWEzqPOMsAjY6kgraZ3ZgBfQ5HYbNtLJEsQ0G46ejWZpfSSCp/FaSiCSGGhzL9O2lN+2cB6ong==} dev: false - resolution: - integrity: sha512-ci7pEFNVW3YJiWEzqPOMsAjY6kgraZ3ZgBfQ5HYbNtLJEsQ0G46ejWZpfSSCp/FaSiCSGGhzL9O2lN+2cB6ong== + /supports-color/5.5.0: + resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==} + engines: {node: '>=4'} dependencies: has-flag: 3.0.0 - engines: - node: '>=4' - resolution: - integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + /supports-color/6.1.0: + resolution: {integrity: sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==} + engines: {node: '>=6'} dependencies: has-flag: 3.0.0 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== + /supports-color/7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} dependencies: has-flag: 4.0.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + /supports-hyperlinks/2.1.0: + resolution: {integrity: sha512-zoE5/e+dnEijk6ASB6/qrK+oYdm2do1hjoLWrqUC/8WEIW1gbxFcKuBof7sW8ArN6e+AYvsE8HBGiVRWL/F5CA==} + engines: {node: '>=8'} dependencies: has-flag: 4.0.0 supports-color: 7.2.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-zoE5/e+dnEijk6ASB6/qrK+oYdm2do1hjoLWrqUC/8WEIW1gbxFcKuBof7sW8ArN6e+AYvsE8HBGiVRWL/F5CA== + /svg-parser/2.0.4: + resolution: {integrity: sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==} dev: true - resolution: - integrity: sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ== + /svgo/1.3.2: + resolution: {integrity: sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw==} + engines: {node: '>=4.0.0'} + hasBin: true dependencies: chalk: 2.4.2 coa: 2.0.2 @@ -13140,22 +14046,19 @@ packages: unquote: 1.1.1 util.promisify: 1.0.1 dev: true - engines: - node: '>=4.0.0' - hasBin: true - resolution: - integrity: sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw== + /symbol-observable/1.2.0: + resolution: {integrity: sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ==} + engines: {node: '>=0.10.0'} dev: false - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ== + /symbol-tree/3.2.4: + resolution: {integrity: sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==} dev: true - resolution: - integrity: sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== + /table/6.0.8: + resolution: {integrity: sha512-OBAdezyozae8IvjHGXBDHByVkLCcsmffXUSj8LXkNb0SluRd4ug3GFCjk6JynZONIPhOkyr0Nnvbq1rlIspXyQ==} + engines: {node: '>=10.0.0'} dependencies: ajv: 8.0.1 is-boolean-object: 1.1.0 @@ -13167,22 +14070,19 @@ packages: slice-ansi: 4.0.0 string-width: 4.2.2 dev: true - engines: - node: '>=10.0.0' - resolution: - integrity: sha512-OBAdezyozae8IvjHGXBDHByVkLCcsmffXUSj8LXkNb0SluRd4ug3GFCjk6JynZONIPhOkyr0Nnvbq1rlIspXyQ== + /tapable/1.1.3: - engines: - node: '>=6' - resolution: - integrity: sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== + resolution: {integrity: sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA==} + engines: {node: '>=6'} + /tapable/2.2.0: + resolution: {integrity: sha512-FBk4IesMV1rBxX2tfiK8RAmogtWn53puLOQlvO8XuwlgxcYbP4mVPS9Ph4aeamSyyVjOl24aYWAuc8U5kCVwMw==} + engines: {node: '>=6'} dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-FBk4IesMV1rBxX2tfiK8RAmogtWn53puLOQlvO8XuwlgxcYbP4mVPS9Ph4aeamSyyVjOl24aYWAuc8U5kCVwMw== + /tar/6.1.0: + resolution: {integrity: sha512-DUCttfhsnLCjwoDoFcI+B2iJgYa93vBnDUATYEeRx6sntCTdN01VnqsIuTlALXla/LWooNg0yEGeB+Y8WdFxGA==} + engines: {node: '>= 10'} dependencies: chownr: 2.0.0 fs-minipass: 2.1.0 @@ -13191,36 +14091,34 @@ packages: mkdirp: 1.0.4 yallist: 4.0.0 dev: true - engines: - node: '>= 10' - resolution: - integrity: sha512-DUCttfhsnLCjwoDoFcI+B2iJgYa93vBnDUATYEeRx6sntCTdN01VnqsIuTlALXla/LWooNg0yEGeB+Y8WdFxGA== + /temp-dir/1.0.0: + resolution: {integrity: sha1-CnwOom06Oa+n4OvqnB/AvE2qAR0=} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-CnwOom06Oa+n4OvqnB/AvE2qAR0= + /tempy/0.3.0: + resolution: {integrity: sha512-WrH/pui8YCwmeiAoxV+lpRH9HpRtgBhSR2ViBPgpGb/wnYDzp21R4MN45fsCGvLROvY67o3byhJRYRONJyImVQ==} + engines: {node: '>=8'} dependencies: temp-dir: 1.0.0 type-fest: 0.3.1 unique-string: 1.0.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-WrH/pui8YCwmeiAoxV+lpRH9HpRtgBhSR2ViBPgpGb/wnYDzp21R4MN45fsCGvLROvY67o3byhJRYRONJyImVQ== + /terminal-link/2.1.1: + resolution: {integrity: sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ==} + engines: {node: '>=8'} dependencies: ansi-escapes: 4.3.2 supports-hyperlinks: 2.1.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== + /terser-webpack-plugin/1.4.5_webpack@4.44.2: + resolution: {integrity: sha512-04Rfe496lN8EYruwi6oPQkG0vo8C+HT49X687FZnpPF0qMAIHONI6HEXYPKDOE8e5HjXTyKfqRd/agHtH0kOtw==} + engines: {node: '>= 6.9.0'} + peerDependencies: + webpack: ^4.0.0 dependencies: cacache: 12.0.4 find-cache-dir: 2.1.0 @@ -13233,13 +14131,12 @@ packages: webpack-sources: 1.4.3 worker-farm: 1.7.0 dev: true - engines: - node: '>= 6.9.0' - peerDependencies: - webpack: ^4.0.0 - resolution: - integrity: sha512-04Rfe496lN8EYruwi6oPQkG0vo8C+HT49X687FZnpPF0qMAIHONI6HEXYPKDOE8e5HjXTyKfqRd/agHtH0kOtw== + /terser-webpack-plugin/4.2.3_webpack@4.44.2: + resolution: {integrity: sha512-jTgXh40RnvOrLQNgIkwEKnQ8rmHjHK4u+6UBEi+W+FPmvb+uo+chJXntKe7/3lW5mNysgSWD60KyesnhW8D6MQ==} + engines: {node: '>= 10.13.0'} + peerDependencies: + webpack: ^4.0.0 || ^5.0.0 dependencies: cacache: 15.0.6 find-cache-dir: 3.3.1 @@ -13252,467 +14149,527 @@ packages: webpack: 4.44.2_webpack-cli@4.6.0 webpack-sources: 1.4.3 dev: true - engines: - node: '>= 10.13.0' - peerDependencies: - webpack: ^4.0.0 || ^5.0.0 - resolution: - integrity: sha512-jTgXh40RnvOrLQNgIkwEKnQ8rmHjHK4u+6UBEi+W+FPmvb+uo+chJXntKe7/3lW5mNysgSWD60KyesnhW8D6MQ== + /terser/4.8.0: + resolution: {integrity: sha512-EAPipTNeWsb/3wLPeup1tVPaXfIaU68xMnVdPafIL1TV05OhASArYyIfFvnvJCNrR2NIOvDVNNTFRa+Re2MWyw==} + engines: {node: '>=6.0.0'} + hasBin: true dependencies: commander: 2.20.3 source-map: 0.6.1 source-map-support: 0.5.19 dev: true - engines: - node: '>=6.0.0' + + /terser/5.6.0: + resolution: {integrity: sha512-vyqLMoqadC1uR0vywqOZzriDYzgEkNJFK4q9GeyOBHIbiECHiWLKcWfbQWAUaPfxkjDhapSlZB9f7fkMrvkVjA==} + engines: {node: '>=10'} hasBin: true - resolution: - integrity: sha512-EAPipTNeWsb/3wLPeup1tVPaXfIaU68xMnVdPafIL1TV05OhASArYyIfFvnvJCNrR2NIOvDVNNTFRa+Re2MWyw== - /terser/5.6.1: dependencies: commander: 2.20.3 source-map: 0.7.3 source-map-support: 0.5.19 dev: true - engines: - node: '>=10' + + /terser/5.6.1: + resolution: {integrity: sha512-yv9YLFQQ+3ZqgWCUk+pvNJwgUTdlIxUk1WTN+RnaFJe2L7ipG2csPT0ra2XRm7Cs8cxN7QXmK1rFzEwYEQkzXw==} + engines: {node: '>=10'} hasBin: true - resolution: - integrity: sha512-yv9YLFQQ+3ZqgWCUk+pvNJwgUTdlIxUk1WTN+RnaFJe2L7ipG2csPT0ra2XRm7Cs8cxN7QXmK1rFzEwYEQkzXw== + dependencies: + commander: 2.20.3 + source-map: 0.7.3 + source-map-support: 0.5.19 + dev: true + /test-exclude/6.0.0: + resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==} + engines: {node: '>=8'} dependencies: '@istanbuljs/schema': 0.1.3 glob: 7.1.6 minimatch: 3.0.4 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== + /text-table/0.2.0: - resolution: - integrity: sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= + resolution: {integrity: sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=} + /throat/5.0.0: + resolution: {integrity: sha512-fcwX4mndzpLQKBS1DVYhGAcYaYt7vsHNIvQV+WXMvnow5cgjPphq5CaayLaGsjRdSCKZFNGt7/GYAuXaNOiYCA==} dev: true - resolution: - integrity: sha512-fcwX4mndzpLQKBS1DVYhGAcYaYt7vsHNIvQV+WXMvnow5cgjPphq5CaayLaGsjRdSCKZFNGt7/GYAuXaNOiYCA== + /throttle-debounce/2.3.0: + resolution: {integrity: sha512-H7oLPV0P7+jgvrk+6mwwwBDmxTaxnu9HMXmloNLXwnNO0ZxZ31Orah2n8lU1eMPvsaowP2CX+USCgyovXfdOFQ==} + engines: {node: '>=8'} dev: false - engines: - node: '>=8' - resolution: - integrity: sha512-H7oLPV0P7+jgvrk+6mwwwBDmxTaxnu9HMXmloNLXwnNO0ZxZ31Orah2n8lU1eMPvsaowP2CX+USCgyovXfdOFQ== + /through/2.3.8: + resolution: {integrity: sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=} dev: true - resolution: - integrity: sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= + /through2/2.0.5: + resolution: {integrity: sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==} dependencies: readable-stream: 2.3.7 xtend: 4.0.2 dev: true - resolution: - integrity: sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== + /thunky/1.1.0: + resolution: {integrity: sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==} dev: true - resolution: - integrity: sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== + /timers-browserify/2.0.12: + resolution: {integrity: sha512-9phl76Cqm6FhSX9Xe1ZUAMLtm1BLkKj2Qd5ApyWkXzsMRaA7dgr81kf4wJmQf/hAvg8EEyJxDo3du/0KlhPiKQ==} + engines: {node: '>=0.6.0'} dependencies: setimmediate: 1.0.5 dev: true - engines: - node: '>=0.6.0' - resolution: - integrity: sha512-9phl76Cqm6FhSX9Xe1ZUAMLtm1BLkKj2Qd5ApyWkXzsMRaA7dgr81kf4wJmQf/hAvg8EEyJxDo3du/0KlhPiKQ== + /timsort/0.3.0: + resolution: {integrity: sha1-QFQRqOfmM5/mTbmiNN4R3DHgK9Q=} dev: true - resolution: - integrity: sha1-QFQRqOfmM5/mTbmiNN4R3DHgK9Q= + /tiny-invariant/1.1.0: - dev: false - resolution: - integrity: sha512-ytxQvrb1cPc9WBEI/HSeYYoGD0kWnGEOR8RY6KomWLBVhqz0RgTwVO9dLrGz7dC+nN9llyI7OKAgRq8Vq4ZBSw== + resolution: {integrity: sha512-ytxQvrb1cPc9WBEI/HSeYYoGD0kWnGEOR8RY6KomWLBVhqz0RgTwVO9dLrGz7dC+nN9llyI7OKAgRq8Vq4ZBSw==} + /tiny-warning/1.0.3: - dev: false - resolution: - integrity: sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA== + resolution: {integrity: sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==} + /tmpl/1.0.4: + resolution: {integrity: sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE=} dev: true - resolution: - integrity: sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE= + /to-arraybuffer/1.0.1: + resolution: {integrity: sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M=} dev: true - resolution: - integrity: sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M= + /to-fast-properties/2.0.0: - engines: - node: '>=4' - resolution: - integrity: sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= + resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==} + engines: {node: '>=4'} + /to-object-path/0.3.0: + resolution: {integrity: sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68=} + engines: {node: '>=0.10.0'} dependencies: kind-of: 3.2.2 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= + /to-regex-range/2.1.1: + resolution: {integrity: sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=} + engines: {node: '>=0.10.0'} dependencies: is-number: 3.0.0 repeat-string: 1.6.1 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= + /to-regex-range/5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} dependencies: is-number: 7.0.0 - engines: - node: '>=8.0' - resolution: - integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + /to-regex/3.0.2: + resolution: {integrity: sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw==} + engines: {node: '>=0.10.0'} dependencies: define-property: 2.0.2 extend-shallow: 3.0.2 regex-not: 1.0.2 safe-regex: 1.1.0 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== + /toggle-selection/1.0.6: + resolution: {integrity: sha1-bkWxJj8gF/oKzH2J14sVuL932jI=} dev: false - resolution: - integrity: sha1-bkWxJj8gF/oKzH2J14sVuL932jI= + /toidentifier/1.0.0: + resolution: {integrity: sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==} + engines: {node: '>=0.6'} dev: true - engines: - node: '>=0.6' - resolution: - integrity: sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== + /tough-cookie/2.5.0: + resolution: {integrity: sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==} + engines: {node: '>=0.8'} dependencies: psl: 1.8.0 punycode: 2.1.1 dev: true - engines: - node: '>=0.8' - resolution: - integrity: sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g== + /tough-cookie/4.0.0: + resolution: {integrity: sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==} + engines: {node: '>=6'} dependencies: psl: 1.8.0 punycode: 2.1.1 universalify: 0.1.2 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg== + /tr46/2.0.2: + resolution: {integrity: sha512-3n1qG+/5kg+jrbTzwAykB5yRYtQCTqOGKq5U5PE3b0a1/mzo6snDhjGS0zJVJunO0NrT3Dg1MLy5TjWP/UJppg==} + engines: {node: '>=8'} dependencies: punycode: 2.1.1 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-3n1qG+/5kg+jrbTzwAykB5yRYtQCTqOGKq5U5PE3b0a1/mzo6snDhjGS0zJVJunO0NrT3Dg1MLy5TjWP/UJppg== + + /trough/1.0.5: + resolution: {integrity: sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA==} + dev: true + /tryer/1.0.1: + resolution: {integrity: sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA==} dev: true - resolution: - integrity: sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA== + /ts-easing/0.2.0: + resolution: {integrity: sha512-Z86EW+fFFh/IFB1fqQ3/+7Zpf9t2ebOAxNI/V6Wo7r5gqiqtxmgTlQ1qbqQcjLKYeSHPTsEmvlJUDg/EuL0uHQ==} dev: false - resolution: - integrity: sha512-Z86EW+fFFh/IFB1fqQ3/+7Zpf9t2ebOAxNI/V6Wo7r5gqiqtxmgTlQ1qbqQcjLKYeSHPTsEmvlJUDg/EuL0uHQ== + /ts-pnp/1.2.0_typescript@4.2.3: - dependencies: - typescript: 4.2.3 - dev: true - engines: - node: '>=6' + resolution: {integrity: sha512-csd+vJOb/gkzvcCHgTGSChYpy5f1/XKNsmvBGO4JXS+z1v2HobugDz4s1IeFXM3wZB44uczs+eazB5Q/ccdhQw==} + engines: {node: '>=6'} peerDependencies: typescript: '*' peerDependenciesMeta: typescript: optional: true - resolution: - integrity: sha512-csd+vJOb/gkzvcCHgTGSChYpy5f1/XKNsmvBGO4JXS+z1v2HobugDz4s1IeFXM3wZB44uczs+eazB5Q/ccdhQw== + dependencies: + typescript: 4.2.3 + dev: true + /tsconfig-paths-webpack-plugin/3.5.1: + resolution: {integrity: sha512-n5CMlUUj+N5pjBhBACLq4jdr9cPTitySCjIosoQm0zwK99gmrcTGAfY9CwxRFT9+9OleNWXPRUcxsKP4AYExxQ==} dependencies: chalk: 4.1.0 enhanced-resolve: 5.7.0 tsconfig-paths: 3.9.0 dev: true - resolution: - integrity: sha512-n5CMlUUj+N5pjBhBACLq4jdr9cPTitySCjIosoQm0zwK99gmrcTGAfY9CwxRFT9+9OleNWXPRUcxsKP4AYExxQ== + /tsconfig-paths/3.9.0: + resolution: {integrity: sha512-dRcuzokWhajtZWkQsDVKbWyY+jgcLC5sqJhg2PSgf4ZkH2aHPvaOY8YWGhmjb68b5qqTfasSsDO9k7RUiEmZAw==} dependencies: '@types/json5': 0.0.29 json5: 1.0.1 minimist: 1.2.5 strip-bom: 3.0.0 dev: true - resolution: - integrity: sha512-dRcuzokWhajtZWkQsDVKbWyY+jgcLC5sqJhg2PSgf4ZkH2aHPvaOY8YWGhmjb68b5qqTfasSsDO9k7RUiEmZAw== + /tslib/1.14.1: - resolution: - integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== + resolution: {integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==} + dev: true + /tslib/2.1.0: - resolution: - integrity: sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A== + resolution: {integrity: sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A==} + dev: false + + /tslib/2.4.1: + resolution: {integrity: sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA==} + + /tslib/2.5.0: + resolution: {integrity: sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==} + dev: false + /tsutils/3.21.0_typescript@4.2.3: + resolution: {integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==} + engines: {node: '>= 6'} + peerDependencies: + typescript: '>=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta' dependencies: tslib: 1.14.1 typescript: 4.2.3 dev: true - engines: - node: '>= 6' - peerDependencies: - typescript: '>=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta' - resolution: - integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== + /tty-browserify/0.0.0: + resolution: {integrity: sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY=} dev: true - resolution: - integrity: sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY= + /tunnel-agent/0.6.0: + resolution: {integrity: sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=} dependencies: safe-buffer: 5.2.1 dev: true - resolution: - integrity: sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= + /tweetnacl/0.14.5: + resolution: {integrity: sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=} dev: true - resolution: - integrity: sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= + /type-check/0.3.2: + resolution: {integrity: sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=} + engines: {node: '>= 0.8.0'} dependencies: prelude-ls: 1.1.2 dev: true - engines: - node: '>= 0.8.0' - resolution: - integrity: sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= + /type-check/0.4.0: + resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} + engines: {node: '>= 0.8.0'} dependencies: prelude-ls: 1.2.1 dev: true - engines: - node: '>= 0.8.0' - resolution: - integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== + /type-detect/4.0.8: + resolution: {integrity: sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== + /type-fest/0.20.2: + resolution: {integrity: sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==} + engines: {node: '>=10'} dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== + /type-fest/0.21.3: + resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} + engines: {node: '>=10'} dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== + /type-fest/0.3.1: + resolution: {integrity: sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ==} + engines: {node: '>=6'} dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ== + /type-fest/0.6.0: + resolution: {integrity: sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==} + engines: {node: '>=8'} dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg== + /type-fest/0.8.1: + resolution: {integrity: sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==} + engines: {node: '>=8'} dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA== + /type-is/1.6.18: + resolution: {integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==} + engines: {node: '>= 0.6'} dependencies: media-typer: 0.3.0 mime-types: 2.1.29 dev: true - engines: - node: '>= 0.6' - resolution: - integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== + /type/1.2.0: + resolution: {integrity: sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg==} dev: true - resolution: - integrity: sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== + /type/2.5.0: + resolution: {integrity: sha512-180WMDQaIMm3+7hGXWf12GtdniDEy7nYcyFMKJn/eZz/6tSLXrUN9V0wKSbMjej0I1WHWbpREDEKHtqPQa9NNw==} dev: true - resolution: - integrity: sha512-180WMDQaIMm3+7hGXWf12GtdniDEy7nYcyFMKJn/eZz/6tSLXrUN9V0wKSbMjej0I1WHWbpREDEKHtqPQa9NNw== + /typedarray-to-buffer/3.1.5: + resolution: {integrity: sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==} dependencies: is-typedarray: 1.0.0 dev: true - resolution: - integrity: sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== + /typedarray/0.0.6: + resolution: {integrity: sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=} dev: true - resolution: - integrity: sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= + /typescript/4.2.3: + resolution: {integrity: sha512-qOcYwxaByStAWrBf4x0fibwZvMRG+r4cQoTjbPtUlrWjBHbmCAww1i448U0GJ+3cNNEtebDteo/cHOR3xJ4wEw==} + engines: {node: '>=4.2.0'} + hasBin: true dev: true - engines: - node: '>=4.2.0' + + /umi/3.5.20_react-router@5.2.0: + resolution: {integrity: sha512-rliZTS2LoudsIelaSipZrPUEjPOi2HDlj1VCNXt63YFxeqSXQkijKmM1+hSVEDDRwPLq4L+RZhuVnCasZA9Nng==} hasBin: true - resolution: - integrity: sha512-qOcYwxaByStAWrBf4x0fibwZvMRG+r4cQoTjbPtUlrWjBHbmCAww1i448U0GJ+3cNNEtebDteo/cHOR3xJ4wEw== + dependencies: + '@umijs/bundler-webpack': 3.5.20_39566ec7cc5fe716a59f91f7330320ef + '@umijs/core': 3.5.20 + '@umijs/deps': 3.5.20 + '@umijs/preset-built-in': 3.5.20_react-dom@16.14.0+react@16.14.0 + '@umijs/runtime': 3.5.20_react@16.14.0 + '@umijs/types': 3.5.20_39566ec7cc5fe716a59f91f7330320ef + '@umijs/utils': 3.5.20 + react: 16.14.0 + react-dom: 16.14.0_react@16.14.0 + v8-compile-cache: 2.3.0 + transitivePeerDependencies: + - react-router + dev: true + /unbox-primitive/1.0.1: + resolution: {integrity: sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw==} dependencies: function-bind: 1.1.1 has-bigints: 1.0.1 has-symbols: 1.0.2 which-boxed-primitive: 1.0.2 dev: true - resolution: - integrity: sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw== + /unicode-canonical-property-names-ecmascript/1.0.4: + resolution: {integrity: sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ==} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ== + /unicode-match-property-ecmascript/1.0.4: + resolution: {integrity: sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg==} + engines: {node: '>=4'} dependencies: unicode-canonical-property-names-ecmascript: 1.0.4 unicode-property-aliases-ecmascript: 1.1.0 dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg== + /unicode-match-property-value-ecmascript/1.2.0: + resolution: {integrity: sha512-wjuQHGQVofmSJv1uVISKLE5zO2rNGzM/KCYZch/QQvez7C1hUhBIuZ701fYXExuufJFMPhv2SyL8CyoIfMLbIQ==} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-wjuQHGQVofmSJv1uVISKLE5zO2rNGzM/KCYZch/QQvez7C1hUhBIuZ701fYXExuufJFMPhv2SyL8CyoIfMLbIQ== + /unicode-property-aliases-ecmascript/1.1.0: + resolution: {integrity: sha512-PqSoPh/pWetQ2phoj5RLiaqIk4kCNwoV3CI+LfGmWLKI3rE3kl1h59XpX2BjgDrmbxD9ARtQobPGU1SguCYuQg==} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-PqSoPh/pWetQ2phoj5RLiaqIk4kCNwoV3CI+LfGmWLKI3rE3kl1h59XpX2BjgDrmbxD9ARtQobPGU1SguCYuQg== + + /unified/8.4.2: + resolution: {integrity: sha512-JCrmN13jI4+h9UAyKEoGcDZV+i1E7BLFuG7OsaDvTXI5P0qhHX+vZO/kOhz9jn8HGENDKbwSeB0nVOg4gVStGA==} + dependencies: + bail: 1.0.5 + extend: 3.0.2 + is-plain-obj: 2.1.0 + trough: 1.0.5 + vfile: 4.2.1 + dev: true + /union-value/1.0.1: + resolution: {integrity: sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==} + engines: {node: '>=0.10.0'} dependencies: arr-union: 3.1.0 get-value: 2.0.6 is-extendable: 0.1.1 set-value: 2.0.1 - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== + /uniq/1.0.1: + resolution: {integrity: sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8=} + dev: true + + /uniqs/2.0.0: + resolution: {integrity: sha1-/+3ks2slKQaW5uFl1KWe25mOawI=} + dev: true + + /unique-filename/1.1.1: + resolution: {integrity: sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ==} + dependencies: + unique-slug: 2.0.2 + dev: true + + /unique-slug/2.0.2: + resolution: {integrity: sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w==} + dependencies: + imurmurhash: 0.1.4 + dev: true + + /unique-string/1.0.0: + resolution: {integrity: sha1-nhBXzKhRq7kzmPizOuGHuZyuwRo=} + engines: {node: '>=4'} + dependencies: + crypto-random-string: 1.0.0 + dev: true + + /unist-builder/2.0.3: + resolution: {integrity: sha512-f98yt5pnlMWlzP539tPc4grGMsFaQQlP/vM396b00jngsiINumNmsY8rkXjfoi1c6QaM8nQ3vaGDuoKWbe/1Uw==} + dev: true + + /unist-util-filter/2.0.3: + resolution: {integrity: sha512-8k6Jl/KLFqIRTHydJlHh6+uFgqYHq66pV75pZgr1JwfyFSjbWb12yfb0yitW/0TbHXjr9U4G9BQpOvMANB+ExA==} + dependencies: + unist-util-is: 4.1.0 + dev: true + + /unist-util-find-after/3.0.0: + resolution: {integrity: sha512-ojlBqfsBftYXExNu3+hHLfJQ/X1jYY/9vdm4yZWjIbf0VuWF6CRufci1ZyoD/wV2TYMKxXUoNuoqwy+CkgzAiQ==} + dependencies: + unist-util-is: 4.1.0 dev: true - resolution: - integrity: sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8= - /uniqs/2.0.0: + + /unist-util-generated/1.1.6: + resolution: {integrity: sha512-cln2Mm1/CZzN5ttGK7vkoGw+RZ8VcUH6BtGbq98DDtRGquAAOXig1mrBQYelOwMXYS8rK+vZDyyojSjp7JX+Lg==} dev: true - resolution: - integrity: sha1-/+3ks2slKQaW5uFl1KWe25mOawI= - /unique-filename/1.1.1: + + /unist-util-is/4.1.0: + resolution: {integrity: sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==} + dev: true + + /unist-util-position/3.1.0: + resolution: {integrity: sha512-w+PkwCbYSFw8vpgWD0v7zRCl1FpY3fjDSQ3/N/wNd9Ffa4gPi8+4keqt99N3XW6F99t/mUzp2xAhNmfKWp95QA==} + dev: true + + /unist-util-stringify-position/2.0.3: + resolution: {integrity: sha512-3faScn5I+hy9VleOq/qNbAd6pAx7iH5jYBMS9I1HgQVijz/4mv5Bvw5iw1sC/90CODiKo81G/ps8AJrISn687g==} dependencies: - unique-slug: 2.0.2 + '@types/unist': 2.0.6 dev: true - resolution: - integrity: sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ== - /unique-slug/2.0.2: + + /unist-util-visit-parents/3.1.1: + resolution: {integrity: sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg==} dependencies: - imurmurhash: 0.1.4 + '@types/unist': 2.0.6 + unist-util-is: 4.1.0 dev: true - resolution: - integrity: sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w== - /unique-string/1.0.0: + + /unist-util-visit/2.0.3: + resolution: {integrity: sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==} dependencies: - crypto-random-string: 1.0.0 + '@types/unist': 2.0.6 + unist-util-is: 4.1.0 + unist-util-visit-parents: 3.1.1 dev: true - engines: - node: '>=4' - resolution: - integrity: sha1-nhBXzKhRq7kzmPizOuGHuZyuwRo= + /universalify/0.1.2: + resolution: {integrity: sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==} + engines: {node: '>= 4.0.0'} dev: true - engines: - node: '>= 4.0.0' - resolution: - integrity: sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== + /universalify/2.0.0: + resolution: {integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==} + engines: {node: '>= 10.0.0'} dev: true - engines: - node: '>= 10.0.0' - resolution: - integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== + /unload/2.2.0: + resolution: {integrity: sha512-B60uB5TNBLtN6/LsgAf3udH9saB5p7gqJwcFfbOEZ8BcBHnGwCf6G/TGiEqkRAxX7zAFIUtzdrXQSdL3Q/wqNA==} dependencies: '@babel/runtime': 7.13.10 detect-node: 2.0.5 dev: false - resolution: - integrity: sha512-B60uB5TNBLtN6/LsgAf3udH9saB5p7gqJwcFfbOEZ8BcBHnGwCf6G/TGiEqkRAxX7zAFIUtzdrXQSdL3Q/wqNA== + /unpipe/1.0.0: + resolution: {integrity: sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=} + engines: {node: '>= 0.8'} dev: true - engines: - node: '>= 0.8' - resolution: - integrity: sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= + /unquote/1.1.1: + resolution: {integrity: sha1-j97XMk7G6IoP+LkF58CYzcCG1UQ=} dev: true - resolution: - integrity: sha1-j97XMk7G6IoP+LkF58CYzcCG1UQ= + /unset-value/1.0.0: + resolution: {integrity: sha1-g3aHP30jNRef+x5vw6jtDfyKtVk=} + engines: {node: '>=0.10.0'} dependencies: has-value: 0.3.1 isobject: 3.0.1 - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= + /upath/1.2.0: + resolution: {integrity: sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg==} + engines: {node: '>=4'} dev: true - engines: - node: '>=4' - resolution: - integrity: sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== - /upper-case-first/1.1.2: + + /upper-case/2.0.2: + resolution: {integrity: sha512-KgdgDGJt2TpuwBUIjgG6lzw2GWFRCW9Qkfkiv0DxqHHLYJHmtmdUIKcZd8rHgFSjopVTlw6ggzCm1b8MFQwikg==} dependencies: - upper-case: 1.1.3 + tslib: 2.4.1 dev: false - resolution: - integrity: sha1-XXm+3P8UQZUY/S7bCgUHybaFkRU= - /upper-case/1.1.3: - dev: false - resolution: - integrity: sha1-9rRQHC7EzdJrp4vnIilh3ndiFZg= + /uri-js/4.4.1: + resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} dependencies: punycode: 2.1.1 dev: true - resolution: - integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + /urix/0.1.0: + resolution: {integrity: sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI=} deprecated: Please see https://github.com/lydell/urix#deprecated - resolution: - integrity: sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= + /url-loader/4.1.1_file-loader@6.1.1+webpack@4.44.2: + resolution: {integrity: sha512-3BTV812+AVHHOJQO8O5MkWgZ5aosP7GnROJwvzLS9hWDj00lZ6Z0wNak423Lp9PBZN05N+Jk/N5Si8jRAlGyWA==} + engines: {node: '>= 10.13.0'} + peerDependencies: + file-loader: '*' + webpack: ^4.0.0 || ^5.0.0 + peerDependenciesMeta: + file-loader: + optional: true dependencies: file-loader: 6.1.1_webpack@4.44.2 loader-utils: 2.0.0 @@ -13720,211 +14677,285 @@ packages: schema-utils: 3.0.0 webpack: 4.44.2_webpack-cli@4.6.0 dev: true - engines: - node: '>= 10.13.0' - peerDependencies: - file-loader: '*' - webpack: ^4.0.0 || ^5.0.0 - peerDependenciesMeta: - file-loader: - optional: true - resolution: - integrity: sha512-3BTV812+AVHHOJQO8O5MkWgZ5aosP7GnROJwvzLS9hWDj00lZ6Z0wNak423Lp9PBZN05N+Jk/N5Si8jRAlGyWA== + /url-parse/1.5.1: + resolution: {integrity: sha512-HOfCOUJt7iSYzEx/UqgtwKRMC6EU91NFhsCHMv9oM03VJcVo2Qrp8T8kI9D7amFf1cu+/3CEhgb3rF9zL7k85Q==} dependencies: querystringify: 2.2.0 requires-port: 1.0.0 dev: true - resolution: - integrity: sha512-HOfCOUJt7iSYzEx/UqgtwKRMC6EU91NFhsCHMv9oM03VJcVo2Qrp8T8kI9D7amFf1cu+/3CEhgb3rF9zL7k85Q== + /url/0.11.0: + resolution: {integrity: sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE=} dependencies: punycode: 1.3.2 querystring: 0.2.0 dev: true - resolution: - integrity: sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE= + + /use-callback-ref/1.3.0_5170878e5e8a60dfb58a26e1cbcc99ef: + resolution: {integrity: sha512-3FT9PRuRdbB9HfXhEq35u4oZkvpJ5kuYbpqhCfmiZyReuRgpnhDlbr2ZEnnuS0RrJAPn6l23xjFg9kpDM+Ms7w==} + engines: {node: '>=10'} + peerDependencies: + '@types/react': ^16.8.0 || ^17.0.0 || ^18.0.0 + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + '@types/react': + optional: true + dependencies: + '@types/react': 16.14.5 + react: 17.0.2 + tslib: 2.5.0 + dev: false + + /use-sidecar/1.1.2_5170878e5e8a60dfb58a26e1cbcc99ef: + resolution: {integrity: sha512-epTbsLuzZ7lPClpz2TyryBfztm7m+28DlEv2ZCQ3MDr5ssiwyOwGH/e5F9CkfWjJ1t4clvI58yF822/GUkjjhw==} + engines: {node: '>=10'} + peerDependencies: + '@types/react': ^16.9.0 || ^17.0.0 || ^18.0.0 + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + '@types/react': + optional: true + dependencies: + '@types/react': 16.14.5 + detect-node-es: 1.1.0 + react: 17.0.2 + tslib: 2.5.0 + dev: false + + /use-subscription/1.5.1_react@16.14.0: + resolution: {integrity: sha512-Xv2a1P/yReAjAbhylMfFplFKj9GssgTwN7RlcTxBujFQcloStWNDQdc4g4NRWH9xS4i/FDk04vQBptAXoF3VcA==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 + dependencies: + object-assign: 4.1.1 + react: 16.14.0 + dev: true + + /use-subscription/1.5.1_react@17.0.2: + resolution: {integrity: sha512-Xv2a1P/yReAjAbhylMfFplFKj9GssgTwN7RlcTxBujFQcloStWNDQdc4g4NRWH9xS4i/FDk04vQBptAXoF3VcA==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 + dependencies: + object-assign: 4.1.1 + react: 17.0.2 + dev: true + /use/3.1.1: - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== + resolution: {integrity: sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==} + engines: {node: '>=0.10.0'} + /util-deprecate/1.0.2: + resolution: {integrity: sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=} dev: true - resolution: - integrity: sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= + /util.promisify/1.0.0: + resolution: {integrity: sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA==} dependencies: define-properties: 1.1.3 object.getownpropertydescriptors: 2.1.2 dev: true - resolution: - integrity: sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA== + /util.promisify/1.0.1: + resolution: {integrity: sha512-g9JpC/3He3bm38zsLupWryXHoEcS22YHthuPQSJdMy6KNrzIRzWqcsHzD/WUnqe45whVou4VIsPew37DoXWNrA==} dependencies: define-properties: 1.1.3 es-abstract: 1.18.0 has-symbols: 1.0.2 object.getownpropertydescriptors: 2.1.2 dev: true - resolution: - integrity: sha512-g9JpC/3He3bm38zsLupWryXHoEcS22YHthuPQSJdMy6KNrzIRzWqcsHzD/WUnqe45whVou4VIsPew37DoXWNrA== + /util/0.10.3: + resolution: {integrity: sha1-evsa/lCAUkZInj23/g7TeTNqwPk=} dependencies: inherits: 2.0.1 dev: true - resolution: - integrity: sha1-evsa/lCAUkZInj23/g7TeTNqwPk= + /util/0.11.1: + resolution: {integrity: sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ==} dependencies: inherits: 2.0.3 dev: true - resolution: - integrity: sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ== + /utila/0.4.0: + resolution: {integrity: sha1-ihagXURWV6Oupe7MWxKk+lN5dyw=} dev: true - resolution: - integrity: sha1-ihagXURWV6Oupe7MWxKk+lN5dyw= + /utility-types/3.10.0: + resolution: {integrity: sha512-O11mqxmi7wMKCo6HKFt5AhO4BwY3VV68YU07tgxfz8zJTIxr4BpsezN49Ffwy9j3ZpwwJp4fkRwjRzq3uWE6Rg==} + engines: {node: '>= 4'} dev: false - engines: - node: '>= 4' - resolution: - integrity: sha512-O11mqxmi7wMKCo6HKFt5AhO4BwY3VV68YU07tgxfz8zJTIxr4BpsezN49Ffwy9j3ZpwwJp4fkRwjRzq3uWE6Rg== + /utils-merge/1.0.1: + resolution: {integrity: sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=} + engines: {node: '>= 0.4.0'} dev: true - engines: - node: '>= 0.4.0' - resolution: - integrity: sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= + /uuid/3.4.0: - dev: true + resolution: {integrity: sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==} hasBin: true - resolution: - integrity: sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== - /uuid/8.3.2: dev: true + + /uuid/8.3.2: + resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} hasBin: true + dev: true optional: true - resolution: - integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== + /v8-compile-cache/2.3.0: + resolution: {integrity: sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==} dev: true - resolution: - integrity: sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== + /v8-to-istanbul/7.1.0: + resolution: {integrity: sha512-uXUVqNUCLa0AH1vuVxzi+MI4RfxEOKt9pBgKwHbgH7st8Kv2P1m+jvWNnektzBh5QShF3ODgKmUFCf38LnVz1g==} + engines: {node: '>=10.10.0'} dependencies: '@types/istanbul-lib-coverage': 2.0.3 convert-source-map: 1.7.0 source-map: 0.7.3 dev: true - engines: - node: '>=10.10.0' - resolution: - integrity: sha512-uXUVqNUCLa0AH1vuVxzi+MI4RfxEOKt9pBgKwHbgH7st8Kv2P1m+jvWNnektzBh5QShF3ODgKmUFCf38LnVz1g== + /validate-npm-package-license/3.0.4: + resolution: {integrity: sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==} dependencies: spdx-correct: 3.1.1 spdx-expression-parse: 3.0.1 dev: true - resolution: - integrity: sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== + /value-equal/1.0.1: - dev: false - resolution: - integrity: sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw== + resolution: {integrity: sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw==} + /vary/1.1.2: + resolution: {integrity: sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=} + engines: {node: '>= 0.8'} dev: true - engines: - node: '>= 0.8' - resolution: - integrity: sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= + /vendors/1.0.4: + resolution: {integrity: sha512-/juG65kTL4Cy2su4P8HjtkTxk6VmJDiOPBufWniqQ6wknac6jNiXS9vU+hO3wgusiyqWlzTbVHi0dyJqRONg3w==} dev: true - resolution: - integrity: sha512-/juG65kTL4Cy2su4P8HjtkTxk6VmJDiOPBufWniqQ6wknac6jNiXS9vU+hO3wgusiyqWlzTbVHi0dyJqRONg3w== + /verror/1.10.0: + resolution: {integrity: sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=} + engines: {'0': node >=0.6.0} dependencies: assert-plus: 1.0.0 core-util-is: 1.0.2 extsprintf: 1.3.0 dev: true - engines: - '0': node >=0.6.0 - resolution: - integrity: sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= + + /vfile-location/3.2.0: + resolution: {integrity: sha512-aLEIZKv/oxuCDZ8lkJGhuhztf/BW4M+iHdCwglA/eWc+vtuRFJj8EtgceYFX4LRjOhCAAiNHsKGssC6onJ+jbA==} + dev: true + + /vfile-message/2.0.4: + resolution: {integrity: sha512-DjssxRGkMvifUOJre00juHoP9DPWuzjxKuMDrhNbk2TdaYYBNMStsNhEOt3idrtI12VQYM/1+iM0KOzXi4pxwQ==} + dependencies: + '@types/unist': 2.0.6 + unist-util-stringify-position: 2.0.3 + dev: true + + /vfile/4.2.1: + resolution: {integrity: sha512-O6AE4OskCG5S1emQ/4gl8zK586RqA3srz3nfK/Viy0UPToBc5Trp9BVFb1u0CjsKrAWwnpr4ifM/KBXPWwJbCA==} + dependencies: + '@types/unist': 2.0.6 + is-buffer: 2.0.5 + unist-util-stringify-position: 2.0.3 + vfile-message: 2.0.4 + dev: true + /vm-browserify/1.1.2: + resolution: {integrity: sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==} dev: true - resolution: - integrity: sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ== + /void-elements/2.0.1: + resolution: {integrity: sha1-wGavtYK7HLQSjWDqkjkulNXp2+w=} + engines: {node: '>=0.10.0'} dev: false - engines: - node: '>=0.10.0' - resolution: - integrity: sha1-wGavtYK7HLQSjWDqkjkulNXp2+w= + /w3c-hr-time/1.0.2: + resolution: {integrity: sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ==} dependencies: browser-process-hrtime: 1.0.0 dev: true - resolution: - integrity: sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ== + /w3c-xmlserializer/2.0.0: + resolution: {integrity: sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA==} + engines: {node: '>=10'} dependencies: xml-name-validator: 3.0.0 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA== + /walker/1.0.7: + resolution: {integrity: sha1-L3+bj9ENZ3JisYqITijRlhjgKPs=} dependencies: makeerror: 1.0.11 dev: true - resolution: - integrity: sha1-L3+bj9ENZ3JisYqITijRlhjgKPs= - /warning/4.0.3: - dependencies: - loose-envify: 1.4.0 - dev: false - resolution: - integrity: sha512-rpJyN222KWIvHJ/F53XSZv0Zl/accqHR8et1kpaMTD/fLCRxtV8iX8czMzY7sVZupTI3zcUTg8eycS2kNF9l6w== + /watchpack-chokidar2/2.0.1: + resolution: {integrity: sha512-nCFfBIPKr5Sh61s4LPpy1Wtfi0HE8isJ3d2Yb5/Ppw2P2B/3eVSEBjKfN0fmHJSK14+31KwMKmcrzs2GM4P0Ww==} + requiresBuild: true dependencies: chokidar: 2.1.8 dev: true optional: true - resolution: - integrity: sha512-nCFfBIPKr5Sh61s4LPpy1Wtfi0HE8isJ3d2Yb5/Ppw2P2B/3eVSEBjKfN0fmHJSK14+31KwMKmcrzs2GM4P0Ww== + /watchpack/1.7.5: + resolution: {integrity: sha512-9P3MWk6SrKjHsGkLT2KHXdQ/9SNkyoJbabxnKOoJepsvJjJG8uYTR3yTPxPQvNDI3w4Nz1xnE0TLHK4RIVe/MQ==} dependencies: graceful-fs: 4.2.6 neo-async: 2.6.2 - dev: true optionalDependencies: - chokidar: 3.5.1 + chokidar: 3.5.3 watchpack-chokidar2: 2.0.1 - resolution: - integrity: sha512-9P3MWk6SrKjHsGkLT2KHXdQ/9SNkyoJbabxnKOoJepsvJjJG8uYTR3yTPxPQvNDI3w4Nz1xnE0TLHK4RIVe/MQ== + dev: true + /wbuf/1.7.3: + resolution: {integrity: sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA==} dependencies: minimalistic-assert: 1.0.1 dev: true - resolution: - integrity: sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== + + /web-namespaces/1.1.4: + resolution: {integrity: sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw==} + dev: true + /webidl-conversions/5.0.0: + resolution: {integrity: sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA==} + engines: {node: '>=8'} dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA== + /webidl-conversions/6.1.0: + resolution: {integrity: sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w==} + engines: {node: '>=10.4'} + dev: true + + /webpack-chain/6.5.1: + resolution: {integrity: sha512-7doO/SRtLu8q5WM0s7vPKPWX580qhi0/yBHkOxNkv50f6qB76Zy9o2wRTrrPULqYTvQlVHuvbA8v+G5ayuUDsA==} + engines: {node: '>=8'} + dependencies: + deepmerge: 1.5.2 + javascript-stringify: 2.1.0 dev: true - engines: - node: '>=10.4' - resolution: - integrity: sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== + /webpack-cli/4.6.0_e3222a4926c3b7d4c1aa5becb19e445f: + resolution: {integrity: sha512-9YV+qTcGMjQFiY7Nb1kmnupvb1x40lfpj8pwdO/bom+sQiP4OBMKjHq29YQrlDWDPZO9r/qWaRRywKaRDKqBTA==} + engines: {node: '>=10.13.0'} + hasBin: true + peerDependencies: + '@webpack-cli/generators': '*' + '@webpack-cli/migrate': '*' + webpack: 4.x.x || 5.x.x + webpack-bundle-analyzer: '*' + webpack-dev-server: '*' + peerDependenciesMeta: + '@webpack-cli/generators': + optional: true + '@webpack-cli/migrate': + optional: true + webpack-bundle-analyzer: + optional: true + webpack-dev-server: + optional: true dependencies: '@discoveryjs/json-ext': 0.5.2 '@webpack-cli/configtest': 1.0.2_webpack-cli@4.6.0+webpack@4.44.2 @@ -13943,27 +14974,12 @@ packages: webpack-dev-server: 3.11.0_webpack-cli@4.6.0+webpack@4.44.2 webpack-merge: 5.7.3 dev: true - engines: - node: '>=10.13.0' - hasBin: true - peerDependencies: - '@webpack-cli/generators': '*' - '@webpack-cli/migrate': '*' - webpack: 4.x.x || 5.x.x - webpack-bundle-analyzer: '*' - webpack-dev-server: '*' - peerDependenciesMeta: - '@webpack-cli/generators': - optional: true - '@webpack-cli/migrate': - optional: true - webpack-bundle-analyzer: - optional: true - webpack-dev-server: - optional: true - resolution: - integrity: sha512-9YV+qTcGMjQFiY7Nb1kmnupvb1x40lfpj8pwdO/bom+sQiP4OBMKjHq29YQrlDWDPZO9r/qWaRRywKaRDKqBTA== + /webpack-dev-middleware/3.7.3_webpack@4.44.2: + resolution: {integrity: sha512-djelc/zGiz9nZj/U7PTBi2ViorGJXEWo/3ltkPbDyxCXhhEXkW0ce99falaok4TPj+AsxLiXJR0EBOb0zh9fKQ==} + engines: {node: '>= 6'} + peerDependencies: + webpack: ^4.0.0 || ^5.0.0 dependencies: memory-fs: 0.4.1 mime: 2.5.2 @@ -13972,13 +14988,17 @@ packages: webpack: 4.44.2_webpack-cli@4.6.0 webpack-log: 2.0.0 dev: true - engines: - node: '>= 6' + + /webpack-dev-server/3.11.0_webpack-cli@4.6.0+webpack@4.44.2: + resolution: {integrity: sha512-PUxZ+oSTxogFQgkTtFndEtJIPNmml7ExwufBZ9L2/Xyyd5PnOL5UreWe5ZT7IU25DSdykL9p1MLQzmLh2ljSeg==} + engines: {node: '>= 6.11.5'} + hasBin: true peerDependencies: webpack: ^4.0.0 || ^5.0.0 - resolution: - integrity: sha512-djelc/zGiz9nZj/U7PTBi2ViorGJXEWo/3ltkPbDyxCXhhEXkW0ce99falaok4TPj+AsxLiXJR0EBOb0zh9fKQ== - /webpack-dev-server/3.11.0_webpack-cli@4.6.0+webpack@4.44.2: + webpack-cli: '*' + peerDependenciesMeta: + webpack-cli: + optional: true dependencies: ansi-html: 0.0.7 bonjour: 3.5.0 @@ -14016,27 +15036,20 @@ packages: ws: 6.2.1 yargs: 13.3.2 dev: true - engines: - node: '>= 6.11.5' - hasBin: true - peerDependencies: - webpack: ^4.0.0 || ^5.0.0 - webpack-cli: '*' - peerDependenciesMeta: - webpack-cli: - optional: true - resolution: - integrity: sha512-PUxZ+oSTxogFQgkTtFndEtJIPNmml7ExwufBZ9L2/Xyyd5PnOL5UreWe5ZT7IU25DSdykL9p1MLQzmLh2ljSeg== + /webpack-log/2.0.0: + resolution: {integrity: sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg==} + engines: {node: '>= 6'} dependencies: ansi-colors: 3.2.4 uuid: 3.4.0 dev: true - engines: - node: '>= 6' - resolution: - integrity: sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg== + /webpack-manifest-plugin/2.2.0_webpack@4.44.2: + resolution: {integrity: sha512-9S6YyKKKh/Oz/eryM1RyLVDVmy3NSPV0JXMRhZ18fJsq+AwGxUY34X54VNwkzYcEmEkDwNxuEOboCZEebJXBAQ==} + engines: {node: '>=6.11.5'} + peerDependencies: + webpack: 2 || 3 || 4 dependencies: fs-extra: 7.0.1 lodash: 4.17.21 @@ -14044,29 +15057,34 @@ packages: tapable: 1.1.3 webpack: 4.44.2_webpack-cli@4.6.0 dev: true - engines: - node: '>=6.11.5' - peerDependencies: - webpack: 2 || 3 || 4 - resolution: - integrity: sha512-9S6YyKKKh/Oz/eryM1RyLVDVmy3NSPV0JXMRhZ18fJsq+AwGxUY34X54VNwkzYcEmEkDwNxuEOboCZEebJXBAQ== + /webpack-merge/5.7.3: + resolution: {integrity: sha512-6/JUQv0ELQ1igjGDzHkXbVDRxkfA57Zw7PfiupdLFJYrgFqY5ZP8xxbpp2lU3EPwYx89ht5Z/aDkD40hFCm5AA==} + engines: {node: '>=10.0.0'} dependencies: clone-deep: 4.0.1 wildcard: 2.0.0 dev: true - engines: - node: '>=10.0.0' - resolution: - integrity: sha512-6/JUQv0ELQ1igjGDzHkXbVDRxkfA57Zw7PfiupdLFJYrgFqY5ZP8xxbpp2lU3EPwYx89ht5Z/aDkD40hFCm5AA== + /webpack-sources/1.4.3: + resolution: {integrity: sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ==} dependencies: source-list-map: 2.0.1 source-map: 0.6.1 dev: true - resolution: - integrity: sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== + /webpack/4.44.2_webpack-cli@4.6.0: + resolution: {integrity: sha512-6KJVGlCxYdISyurpQ0IPTklv+DULv05rs2hseIXer6D7KrUicRDLFb4IUM1S6LUAKypPM/nSiVSuv8jHu1m3/Q==} + engines: {node: '>=6.11.5'} + hasBin: true + peerDependencies: + webpack-cli: '*' + webpack-command: '*' + peerDependenciesMeta: + webpack-cli: + optional: true + webpack-command: + optional: true dependencies: '@webassemblyjs/ast': 1.9.0 '@webassemblyjs/helper-module-context': 1.9.0 @@ -14093,68 +15111,53 @@ packages: webpack-cli: 4.6.0_e3222a4926c3b7d4c1aa5becb19e445f webpack-sources: 1.4.3 dev: true - engines: - node: '>=6.11.5' - hasBin: true - peerDependencies: - webpack-cli: '*' - webpack-command: '*' - peerDependenciesMeta: - webpack-cli: - optional: true - webpack-command: - optional: true - resolution: - integrity: sha512-6KJVGlCxYdISyurpQ0IPTklv+DULv05rs2hseIXer6D7KrUicRDLFb4IUM1S6LUAKypPM/nSiVSuv8jHu1m3/Q== + /websocket-driver/0.6.5: + resolution: {integrity: sha1-XLJVbOuF9Dc8bYI4qmkchFThOjY=} + engines: {node: '>=0.6.0'} dependencies: websocket-extensions: 0.1.4 dev: true - engines: - node: '>=0.6.0' - resolution: - integrity: sha1-XLJVbOuF9Dc8bYI4qmkchFThOjY= + /websocket-driver/0.7.4: + resolution: {integrity: sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==} + engines: {node: '>=0.8.0'} dependencies: http-parser-js: 0.5.3 safe-buffer: 5.2.1 websocket-extensions: 0.1.4 dev: true - engines: - node: '>=0.8.0' - resolution: - integrity: sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg== + /websocket-extensions/0.1.4: + resolution: {integrity: sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==} + engines: {node: '>=0.8.0'} dev: true - engines: - node: '>=0.8.0' - resolution: - integrity: sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== + /whatwg-encoding/1.0.5: + resolution: {integrity: sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw==} dependencies: iconv-lite: 0.4.24 dev: true - resolution: - integrity: sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== + /whatwg-fetch/3.6.2: + resolution: {integrity: sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA==} dev: false - resolution: - integrity: sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA== + /whatwg-mimetype/2.3.0: + resolution: {integrity: sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g==} dev: true - resolution: - integrity: sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== + /whatwg-url/8.5.0: + resolution: {integrity: sha512-fy+R77xWv0AiqfLl4nuGUlQ3/6b5uNfQ4WAbGQVMYshCTCCPK9psC1nWh3XHuxGVCtlcDDQPQW1csmmIQo+fwg==} + engines: {node: '>=10'} dependencies: lodash: 4.17.21 tr46: 2.0.2 webidl-conversions: 6.1.0 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-fy+R77xWv0AiqfLl4nuGUlQ3/6b5uNfQ4WAbGQVMYshCTCCPK9psC1nWh3XHuxGVCtlcDDQPQW1csmmIQo+fwg== + /which-boxed-primitive/1.0.2: + resolution: {integrity: sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==} dependencies: is-bigint: 1.0.1 is-boolean-object: 1.1.0 @@ -14162,53 +15165,52 @@ packages: is-string: 1.0.5 is-symbol: 1.0.3 dev: true - resolution: - integrity: sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== + /which-module/2.0.0: + resolution: {integrity: sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=} dev: true - resolution: - integrity: sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= + /which/1.3.1: + resolution: {integrity: sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==} + hasBin: true dependencies: isexe: 2.0.0 - hasBin: true - resolution: - integrity: sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== + /which/2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true dependencies: isexe: 2.0.0 - engines: - node: '>= 8' - hasBin: true - resolution: - integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + + /wicked-good-xpath/1.3.0: + resolution: {integrity: sha1-gbDpXoZQ5JyUsiKY//hoa1VTz2w=} + dev: true + /wildcard/2.0.0: + resolution: {integrity: sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==} dev: true - resolution: - integrity: sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw== - /window-scroll/1.0.0: - dev: false - resolution: - integrity: sha1-bAxIxiCPkGHtkOEPZYkTqZGxiYc= + /word-wrap/1.2.3: + resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==} + engines: {node: '>=0.10.0'} dev: true - engines: - node: '>=0.10.0' - resolution: - integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + /workbox-background-sync/5.1.4: + resolution: {integrity: sha512-AH6x5pYq4vwQvfRDWH+vfOePfPIYQ00nCEB7dJRU1e0n9+9HMRyvI63FlDvtFT2AvXVRsXvUt7DNMEToyJLpSA==} dependencies: workbox-core: 5.1.4 dev: true - resolution: - integrity: sha512-AH6x5pYq4vwQvfRDWH+vfOePfPIYQ00nCEB7dJRU1e0n9+9HMRyvI63FlDvtFT2AvXVRsXvUt7DNMEToyJLpSA== + /workbox-broadcast-update/5.1.4: + resolution: {integrity: sha512-HTyTWkqXvHRuqY73XrwvXPud/FN6x3ROzkfFPsRjtw/kGZuZkPzfeH531qdUGfhtwjmtO/ZzXcWErqVzJNdXaA==} dependencies: workbox-core: 5.1.4 dev: true - resolution: - integrity: sha512-HTyTWkqXvHRuqY73XrwvXPud/FN6x3ROzkfFPsRjtw/kGZuZkPzfeH531qdUGfhtwjmtO/ZzXcWErqVzJNdXaA== + /workbox-build/5.1.4: + resolution: {integrity: sha512-xUcZn6SYU8usjOlfLb9Y2/f86Gdo+fy1fXgH8tJHjxgpo53VVsqRX0lUDw8/JuyzNmXuo8vXX14pXX2oIm9Bow==} + engines: {node: '>=8.0.0'} dependencies: '@babel/core': 7.12.3 '@babel/preset-env': 7.13.12_@babel+core@7.12.3 @@ -14246,79 +15248,82 @@ packages: workbox-streams: 5.1.4 workbox-sw: 5.1.4 workbox-window: 5.1.4 + transitivePeerDependencies: + - supports-color dev: true - engines: - node: '>=8.0.0' - resolution: - integrity: sha512-xUcZn6SYU8usjOlfLb9Y2/f86Gdo+fy1fXgH8tJHjxgpo53VVsqRX0lUDw8/JuyzNmXuo8vXX14pXX2oIm9Bow== + /workbox-cacheable-response/5.1.4: + resolution: {integrity: sha512-0bfvMZs0Of1S5cdswfQK0BXt6ulU5kVD4lwer2CeI+03czHprXR3V4Y8lPTooamn7eHP8Iywi5QjyAMjw0qauA==} dependencies: workbox-core: 5.1.4 dev: true - resolution: - integrity: sha512-0bfvMZs0Of1S5cdswfQK0BXt6ulU5kVD4lwer2CeI+03czHprXR3V4Y8lPTooamn7eHP8Iywi5QjyAMjw0qauA== + /workbox-core/5.1.4: + resolution: {integrity: sha512-+4iRQan/1D8I81nR2L5vcbaaFskZC2CL17TLbvWVzQ4qiF/ytOGF6XeV54pVxAvKUtkLANhk8TyIUMtiMw2oDg==} dev: true - resolution: - integrity: sha512-+4iRQan/1D8I81nR2L5vcbaaFskZC2CL17TLbvWVzQ4qiF/ytOGF6XeV54pVxAvKUtkLANhk8TyIUMtiMw2oDg== + /workbox-expiration/5.1.4: + resolution: {integrity: sha512-oDO/5iC65h2Eq7jctAv858W2+CeRW5e0jZBMNRXpzp0ZPvuT6GblUiHnAsC5W5lANs1QS9atVOm4ifrBiYY7AQ==} dependencies: workbox-core: 5.1.4 dev: true - resolution: - integrity: sha512-oDO/5iC65h2Eq7jctAv858W2+CeRW5e0jZBMNRXpzp0ZPvuT6GblUiHnAsC5W5lANs1QS9atVOm4ifrBiYY7AQ== + /workbox-google-analytics/5.1.4: + resolution: {integrity: sha512-0IFhKoEVrreHpKgcOoddV+oIaVXBFKXUzJVBI+nb0bxmcwYuZMdteBTp8AEDJacENtc9xbR0wa9RDCnYsCDLjA==} dependencies: workbox-background-sync: 5.1.4 workbox-core: 5.1.4 workbox-routing: 5.1.4 workbox-strategies: 5.1.4 dev: true - resolution: - integrity: sha512-0IFhKoEVrreHpKgcOoddV+oIaVXBFKXUzJVBI+nb0bxmcwYuZMdteBTp8AEDJacENtc9xbR0wa9RDCnYsCDLjA== + /workbox-navigation-preload/5.1.4: + resolution: {integrity: sha512-Wf03osvK0wTflAfKXba//QmWC5BIaIZARU03JIhAEO2wSB2BDROWI8Q/zmianf54kdV7e1eLaIEZhth4K4MyfQ==} dependencies: workbox-core: 5.1.4 dev: true - resolution: - integrity: sha512-Wf03osvK0wTflAfKXba//QmWC5BIaIZARU03JIhAEO2wSB2BDROWI8Q/zmianf54kdV7e1eLaIEZhth4K4MyfQ== + /workbox-precaching/5.1.4: + resolution: {integrity: sha512-gCIFrBXmVQLFwvAzuGLCmkUYGVhBb7D1k/IL7pUJUO5xacjLcFUaLnnsoVepBGAiKw34HU1y/YuqvTKim9qAZA==} dependencies: workbox-core: 5.1.4 dev: true - resolution: - integrity: sha512-gCIFrBXmVQLFwvAzuGLCmkUYGVhBb7D1k/IL7pUJUO5xacjLcFUaLnnsoVepBGAiKw34HU1y/YuqvTKim9qAZA== + /workbox-range-requests/5.1.4: + resolution: {integrity: sha512-1HSujLjgTeoxHrMR2muDW2dKdxqCGMc1KbeyGcmjZZAizJTFwu7CWLDmLv6O1ceWYrhfuLFJO+umYMddk2XMhw==} dependencies: workbox-core: 5.1.4 dev: true - resolution: - integrity: sha512-1HSujLjgTeoxHrMR2muDW2dKdxqCGMc1KbeyGcmjZZAizJTFwu7CWLDmLv6O1ceWYrhfuLFJO+umYMddk2XMhw== + /workbox-routing/5.1.4: + resolution: {integrity: sha512-8ljknRfqE1vEQtnMtzfksL+UXO822jJlHTIR7+BtJuxQ17+WPZfsHqvk1ynR/v0EHik4x2+826Hkwpgh4GKDCw==} dependencies: workbox-core: 5.1.4 dev: true - resolution: - integrity: sha512-8ljknRfqE1vEQtnMtzfksL+UXO822jJlHTIR7+BtJuxQ17+WPZfsHqvk1ynR/v0EHik4x2+826Hkwpgh4GKDCw== + /workbox-strategies/5.1.4: + resolution: {integrity: sha512-VVS57LpaJTdjW3RgZvPwX0NlhNmscR7OQ9bP+N/34cYMDzXLyA6kqWffP6QKXSkca1OFo/v6v7hW7zrrguo6EA==} dependencies: workbox-core: 5.1.4 workbox-routing: 5.1.4 dev: true - resolution: - integrity: sha512-VVS57LpaJTdjW3RgZvPwX0NlhNmscR7OQ9bP+N/34cYMDzXLyA6kqWffP6QKXSkca1OFo/v6v7hW7zrrguo6EA== + /workbox-streams/5.1.4: + resolution: {integrity: sha512-xU8yuF1hI/XcVhJUAfbQLa1guQUhdLMPQJkdT0kn6HP5CwiPOGiXnSFq80rAG4b1kJUChQQIGPrq439FQUNVrw==} dependencies: workbox-core: 5.1.4 workbox-routing: 5.1.4 dev: true - resolution: - integrity: sha512-xU8yuF1hI/XcVhJUAfbQLa1guQUhdLMPQJkdT0kn6HP5CwiPOGiXnSFq80rAG4b1kJUChQQIGPrq439FQUNVrw== + /workbox-sw/5.1.4: + resolution: {integrity: sha512-9xKnKw95aXwSNc8kk8gki4HU0g0W6KXu+xks7wFuC7h0sembFnTrKtckqZxbSod41TDaGh+gWUA5IRXrL0ECRA==} dev: true - resolution: - integrity: sha512-9xKnKw95aXwSNc8kk8gki4HU0g0W6KXu+xks7wFuC7h0sembFnTrKtckqZxbSod41TDaGh+gWUA5IRXrL0ECRA== + /workbox-webpack-plugin/5.1.4_webpack@4.44.2: + resolution: {integrity: sha512-PZafF4HpugZndqISi3rZ4ZK4A4DxO8rAqt2FwRptgsDx7NF8TVKP86/huHquUsRjMGQllsNdn4FNl8CD/UvKmQ==} + engines: {node: '>=8.0.0'} + peerDependencies: + webpack: ^4.0.0 dependencies: '@babel/runtime': 7.13.10 fast-json-stable-stringify: 2.1.0 @@ -14327,83 +15332,76 @@ packages: webpack: 4.44.2_webpack-cli@4.6.0 webpack-sources: 1.4.3 workbox-build: 5.1.4 + transitivePeerDependencies: + - supports-color dev: true - engines: - node: '>=8.0.0' - peerDependencies: - webpack: ^4.0.0 - resolution: - integrity: sha512-PZafF4HpugZndqISi3rZ4ZK4A4DxO8rAqt2FwRptgsDx7NF8TVKP86/huHquUsRjMGQllsNdn4FNl8CD/UvKmQ== + /workbox-window/5.1.4: + resolution: {integrity: sha512-vXQtgTeMCUq/4pBWMfQX8Ee7N2wVC4Q7XYFqLnfbXJ2hqew/cU1uMTD2KqGEgEpE4/30luxIxgE+LkIa8glBYw==} dependencies: workbox-core: 5.1.4 dev: true - resolution: - integrity: sha512-vXQtgTeMCUq/4pBWMfQX8Ee7N2wVC4Q7XYFqLnfbXJ2hqew/cU1uMTD2KqGEgEpE4/30luxIxgE+LkIa8glBYw== + /worker-farm/1.7.0: + resolution: {integrity: sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw==} dependencies: errno: 0.1.8 dev: true - resolution: - integrity: sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw== + /worker-rpc/0.1.1: + resolution: {integrity: sha512-P1WjMrUB3qgJNI9jfmpZ/htmBEjFh//6l/5y8SD9hg1Ef5zTTVVoRjTrTEzPrNBQvmhMxkoTsjOXN10GWU7aCg==} dependencies: microevent.ts: 0.1.1 dev: false - resolution: - integrity: sha512-P1WjMrUB3qgJNI9jfmpZ/htmBEjFh//6l/5y8SD9hg1Ef5zTTVVoRjTrTEzPrNBQvmhMxkoTsjOXN10GWU7aCg== + /wrap-ansi/5.1.0: + resolution: {integrity: sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==} + engines: {node: '>=6'} dependencies: ansi-styles: 3.2.1 string-width: 3.1.0 strip-ansi: 5.2.0 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q== + /wrap-ansi/6.2.0: + resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==} + engines: {node: '>=8'} dependencies: ansi-styles: 4.3.0 string-width: 4.2.2 strip-ansi: 6.0.0 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA== + /wrap-ansi/7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} dependencies: ansi-styles: 4.3.0 string-width: 4.2.2 strip-ansi: 6.0.0 dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + /wrappy/1.0.2: - resolution: - integrity: sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= + resolution: {integrity: sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=} + /write-file-atomic/3.0.3: + resolution: {integrity: sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==} dependencies: imurmurhash: 0.1.4 is-typedarray: 1.0.0 signal-exit: 3.0.3 typedarray-to-buffer: 3.1.5 dev: true - resolution: - integrity: sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== + /ws/6.2.1: + resolution: {integrity: sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA==} dependencies: async-limiter: 1.0.1 dev: true - resolution: - integrity: sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA== + /ws/7.4.4: - dev: true - engines: - node: '>=8.3.0' + resolution: {integrity: sha512-Qm8k8ojNQIMx7S+Zp8u/uHOx7Qazv3Yv4q68MiWWWOJhiwG5W3x7iqmRtJo8xxrciZUY4vRxUTJCKuRnF28ZZw==} + engines: {node: '>=8.3.0'} peerDependencies: bufferutil: ^4.0.1 utf-8-validate: ^5.0.2 @@ -14412,57 +15410,67 @@ packages: optional: true utf-8-validate: optional: true - resolution: - integrity: sha512-Qm8k8ojNQIMx7S+Zp8u/uHOx7Qazv3Yv4q68MiWWWOJhiwG5W3x7iqmRtJo8xxrciZUY4vRxUTJCKuRnF28ZZw== + dev: true + + /xhr-mock/2.5.1: + resolution: {integrity: sha512-UKOjItqjFgPUwQGPmRAzNBn8eTfIhcGjBVGvKYAWxUQPQsXNGD6KEckGTiHwyaAUp9C9igQlnN1Mp79KWCg7CQ==} + dependencies: + global: 4.4.0 + url: 0.11.0 + dev: true + /xml-name-validator/3.0.0: + resolution: {integrity: sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw==} dev: true - resolution: - integrity: sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== + /xmlchars/2.2.0: + resolution: {integrity: sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==} + dev: true + + /xmldom-sre/0.1.31: + resolution: {integrity: sha512-f9s+fUkX04BxQf+7mMWAp5zk61pciie+fFLC9hX9UVvCeJQfNHRHXpeo5MPcR0EUf57PYLdt+ZO4f3Ipk2oZUw==} + engines: {node: '>=0.1'} dev: true - resolution: - integrity: sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== + /xtend/4.0.2: + resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} + engines: {node: '>=0.4'} dev: true - engines: - node: '>=0.4' - resolution: - integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== + /y18n/4.0.1: + resolution: {integrity: sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==} dev: true - resolution: - integrity: sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ== + /yallist/3.1.1: + resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} dev: true - resolution: - integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== + /yallist/4.0.0: + resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} dev: true - resolution: - integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + /yaml/1.10.2: + resolution: {integrity: sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==} + engines: {node: '>= 6'} dev: true - engines: - node: '>= 6' - resolution: - integrity: sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== + /yargs-parser/13.1.2: + resolution: {integrity: sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==} dependencies: camelcase: 5.3.1 decamelize: 1.2.0 dev: true - resolution: - integrity: sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg== + /yargs-parser/18.1.3: + resolution: {integrity: sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==} + engines: {node: '>=6'} dependencies: camelcase: 5.3.1 decamelize: 1.2.0 dev: true - engines: - node: '>=6' - resolution: - integrity: sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ== + /yargs/13.3.2: + resolution: {integrity: sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==} dependencies: cliui: 5.0.0 find-up: 3.0.0 @@ -14475,9 +15483,10 @@ packages: y18n: 4.0.1 yargs-parser: 13.1.2 dev: true - resolution: - integrity: sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw== + /yargs/15.4.1: + resolution: {integrity: sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==} + engines: {node: '>=8'} dependencies: cliui: 6.0.0 decamelize: 1.2.0 @@ -14491,127 +15500,12 @@ packages: y18n: 4.0.1 yargs-parser: 18.1.3 dev: true - engines: - node: '>=8' - resolution: - integrity: sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A== + /yocto-queue/0.1.0: + resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} + engines: {node: '>=10'} + dev: true + + /zwitch/1.0.5: + resolution: {integrity: sha512-V50KMwwzqJV0NpZIZFwfOD5/lyny3WlSzRiXgA0G7VUnRlqttta1L6UQIHzd6EuBY/cHGfwTIck7w1yH6Q5zUw==} dev: true - engines: - node: '>=10' - resolution: - integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== -specifiers: - '@ant-design/icons': ^4.6.2 - '@babel/core': 7.12.3 - '@formily/antd': ^1.3.8 - '@formily/antd-components': ^1.3.8 - '@monaco-editor/react': ^4.0.11 - '@pmmmwh/react-refresh-webpack-plugin': 0.4.2 - '@svgr/webpack': 5.4.0 - '@testing-library/jest-dom': ^5.11.5 - '@testing-library/react': ^11.1.1 - '@testing-library/user-event': ^12.2.0 - '@types/classnames': ^2.2.11 - '@types/jest': ^26.0.15 - '@types/keyboardjs': ^2.5.0 - '@types/less': ^3.0.1 - '@types/lodash': ^4.14.164 - '@types/node': ^12.19.3 - '@types/pubsub-js': ^1.8.1 - '@types/react': ^16.9.55 - '@types/react-dom': ^16.9.9 - '@types/react-router-dom': ^5.1.6 - '@types/styled-components': ^5.1.4 - '@typescript-eslint/eslint-plugin': ^4.5.0 - '@typescript-eslint/parser': ^4.5.0 - '@welldone-software/why-did-you-render': ^6.1.1 - antd: ^4.14.0 - antd-dayjs-webpack-plugin: ^1.0.6 - axios: ^0.21.0 - babel-eslint: ^10.1.0 - babel-jest: ^26.6.0 - babel-loader: 8.1.0 - babel-plugin-named-asset-import: ^0.3.7 - babel-preset-react-app: ^10.0.0 - bfj: ^7.0.2 - camelcase: ^6.1.0 - case-sensitive-paths-webpack-plugin: 2.3.0 - chart.js: ^3.2.1 - classnames: ^2.2.6 - css-loader: 4.3.0 - dayjs: ^1.9.7 - dotenv: 8.2.0 - dotenv-expand: 5.1.0 - eslint: ^7.11.0 - eslint-config-prettier: ^6.15.0 - eslint-config-react-app: ^6.0.0 - eslint-plugin-flowtype: ^5.2.0 - eslint-plugin-import: ^2.22.1 - eslint-plugin-jest: ^24.1.0 - eslint-plugin-jsx-a11y: ^6.3.1 - eslint-plugin-prettier: ^3.1.4 - eslint-plugin-react: ^7.21.5 - eslint-plugin-react-hooks: ^4.2.0 - eslint-plugin-testing-library: ^3.9.2 - eslint-webpack-plugin: ^2.1.0 - file-loader: 6.1.1 - fs-extra: ^9.0.1 - html-webpack-plugin: 4.5.0 - i18next: ^19.8.3 - identity-obj-proxy: 3.0.0 - ip-port-regex: ^2.0.0 - jest: 26.6.0 - jest-circus: 26.6.0 - jest-resolve: 26.6.0 - jest-watch-typeahead: 0.6.1 - keyboardjs: ^2.6.4 - less: ^3.12.2 - less-loader: ^7.0.2 - less-vars-to-js: ^1.3.0 - lint-staged: ^10.5.1 - lodash: ^4.17.21 - lodash-es: ^4.17.15 - mini-css-extract-plugin: 0.11.3 - optimize-css-assets-webpack-plugin: 5.0.4 - pnp-webpack-plugin: 1.6.4 - postcss-flexbugs-fixes: 4.2.1 - postcss-loader: 3.0.0 - postcss-normalize: 8.0.1 - postcss-preset-env: 6.7.0 - postcss-safe-parser: 5.0.2 - prettier: ^2.1.2 - pubsub-js: ^1.9.2 - rc-menu: ^8.10.6 - react: ^17.0.1 - react-app-polyfill: ^2.0.0 - react-chartjs-2: ^3.0.3 - react-dev-utils: ^11.0.0 - react-dom: ^17.0.1 - react-flow-renderer: ^9.1.1 - react-i18next: ^11.7.3 - react-query: ^3.9.8 - react-refresh: ^0.8.3 - react-router: ^5.2.0 - react-router-dom: ^5.2.0 - react-use: ^15.3.4 - recoil: ^0.1.2 - resolve: 1.18.1 - resolve-url-loader: ^3.1.2 - sass-loader: 8.0.2 - semver: 7.3.2 - store2: ^2.12.0 - strip-json-comments: ^3.1.1 - style-loader: 1.3.0 - styled-components: ^5.2.1 - terser-webpack-plugin: 4.2.3 - ts-pnp: 1.2.0 - tsconfig-paths-webpack-plugin: ^3.3.0 - typescript: ^4.0.5 - url-loader: 4.1.1 - utility-types: ^3.10.0 - webpack: 4.44.2 - webpack-cli: ^4.5.0 - webpack-dev-server: 3.11.0 - webpack-manifest-plugin: 2.2.0 - workbox-webpack-plugin: 5.1.4 diff --git a/web_console_v2/client/public/fed-favicon.ico b/web_console_v2/client/public/fed-favicon.ico deleted file mode 100644 index 4d5f19db9..000000000 Binary files a/web_console_v2/client/public/fed-favicon.ico and /dev/null differ diff --git a/web_console_v2/client/public/icon/apple-touch-icon.png b/web_console_v2/client/public/icon/apple-touch-icon.png new file mode 100644 index 000000000..df108e59e Binary files /dev/null and b/web_console_v2/client/public/icon/apple-touch-icon.png differ diff --git a/web_console_v2/client/public/icon/fed-favicon.ico b/web_console_v2/client/public/icon/fed-favicon.ico new file mode 100644 index 000000000..4e114d66c Binary files /dev/null and b/web_console_v2/client/public/icon/fed-favicon.ico differ diff --git a/web_console_v2/client/public/icon/icon-sso-bytedance.svg b/web_console_v2/client/public/icon/icon-sso-bytedance.svg new file mode 100644 index 000000000..6ba6e6f3e --- /dev/null +++ b/web_console_v2/client/public/icon/icon-sso-bytedance.svg @@ -0,0 +1,4 @@ + + + + diff --git a/web_console_v2/client/public/index.html b/web_console_v2/client/public/index.html index 555c38039..22875b6fa 100644 --- a/web_console_v2/client/public/index.html +++ b/web_console_v2/client/public/index.html @@ -2,11 +2,11 @@ - + - + \ No newline at end of file diff --git a/web_console_v2/client/src/assets/icons/logo-bioland-colorful.svg b/web_console_v2/client/src/assets/icons/logo-bioland-colorful.svg new file mode 100644 index 000000000..fefc7961e --- /dev/null +++ b/web_console_v2/client/src/assets/icons/logo-bioland-colorful.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/web_console_v2/client/src/assets/icons/logo-bioland.png b/web_console_v2/client/src/assets/icons/logo-bioland.png new file mode 100644 index 000000000..b2e7d6cd2 Binary files /dev/null and b/web_console_v2/client/src/assets/icons/logo-bioland.png differ diff --git a/web_console_v2/client/src/assets/icons/logo-bioland.svg b/web_console_v2/client/src/assets/icons/logo-bioland.svg new file mode 100644 index 000000000..ced772ee4 --- /dev/null +++ b/web_console_v2/client/src/assets/icons/logo-bioland.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/web_console_v2/client/src/assets/icons/python.svg b/web_console_v2/client/src/assets/icons/python.svg index fd3d0576e..923f41cc0 100644 --- a/web_console_v2/client/src/assets/icons/python.svg +++ b/web_console_v2/client/src/assets/icons/python.svg @@ -1,20 +1 @@ - - - python - - - - - - - - - - - - - - - - - + \ No newline at end of file diff --git a/web_console_v2/client/src/assets/icons/successful-status-icon.svg b/web_console_v2/client/src/assets/icons/successful-status-icon.svg new file mode 100644 index 000000000..3df10d67b --- /dev/null +++ b/web_console_v2/client/src/assets/icons/successful-status-icon.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/web_console_v2/client/src/assets/icons/workflow-completed.svg b/web_console_v2/client/src/assets/icons/workflow-completed.svg index 57af20b03..b99eee707 100644 --- a/web_console_v2/client/src/assets/icons/workflow-completed.svg +++ b/web_console_v2/client/src/assets/icons/workflow-completed.svg @@ -1,5 +1 @@ - - - - - + \ No newline at end of file diff --git a/web_console_v2/client/src/assets/icons/workflow-error.svg b/web_console_v2/client/src/assets/icons/workflow-error.svg index 9c3655077..fef8632d7 100644 --- a/web_console_v2/client/src/assets/icons/workflow-error.svg +++ b/web_console_v2/client/src/assets/icons/workflow-error.svg @@ -1,4 +1 @@ - - - - + \ No newline at end of file diff --git a/web_console_v2/client/src/assets/icons/workflow-pending.svg b/web_console_v2/client/src/assets/icons/workflow-pending.svg index 04405989e..9dbe51838 100644 --- a/web_console_v2/client/src/assets/icons/workflow-pending.svg +++ b/web_console_v2/client/src/assets/icons/workflow-pending.svg @@ -1,5 +1 @@ - - - - - + \ No newline at end of file diff --git a/web_console_v2/client/src/assets/icons/workflow-warning.svg b/web_console_v2/client/src/assets/icons/workflow-warning.svg index 1c3464064..5b0b39a86 100644 --- a/web_console_v2/client/src/assets/icons/workflow-warning.svg +++ b/web_console_v2/client/src/assets/icons/workflow-warning.svg @@ -1,4 +1 @@ - - - - + \ No newline at end of file diff --git a/web_console_v2/client/src/assets/images/avatar.jpg b/web_console_v2/client/src/assets/images/avatar.jpg index f19714c77..c15a6db8f 100644 Binary files a/web_console_v2/client/src/assets/images/avatar.jpg and b/web_console_v2/client/src/assets/images/avatar.jpg differ diff --git a/web_console_v2/client/src/assets/images/close-icon.svg b/web_console_v2/client/src/assets/images/close-icon.svg index 6738ab5f0..43ef05109 100644 --- a/web_console_v2/client/src/assets/images/close-icon.svg +++ b/web_console_v2/client/src/assets/images/close-icon.svg @@ -1,3 +1 @@ - - - + \ No newline at end of file diff --git a/web_console_v2/client/src/assets/images/dataset-publish-bg.png b/web_console_v2/client/src/assets/images/dataset-publish-bg.png new file mode 100644 index 000000000..c1c19282c Binary files /dev/null and b/web_console_v2/client/src/assets/images/dataset-publish-bg.png differ diff --git a/web_console_v2/client/src/assets/images/empty-data.svg b/web_console_v2/client/src/assets/images/empty-data.svg new file mode 100644 index 000000000..19fb2077e --- /dev/null +++ b/web_console_v2/client/src/assets/images/empty-data.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/web_console_v2/client/src/assets/images/empty.png b/web_console_v2/client/src/assets/images/empty.png new file mode 100644 index 000000000..da635b462 Binary files /dev/null and b/web_console_v2/client/src/assets/images/empty.png differ diff --git a/web_console_v2/client/src/assets/images/empty.svg b/web_console_v2/client/src/assets/images/empty.svg index 72ebd1882..120eae83e 100644 --- a/web_console_v2/client/src/assets/images/empty.svg +++ b/web_console_v2/client/src/assets/images/empty.svg @@ -1 +1 @@ -empty \ No newline at end of file + \ No newline at end of file diff --git a/web_console_v2/client/src/assets/images/file-green.svg b/web_console_v2/client/src/assets/images/file-green.svg new file mode 100644 index 000000000..c0ef562d5 --- /dev/null +++ b/web_console_v2/client/src/assets/images/file-green.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/web_console_v2/client/src/assets/images/file.svg b/web_console_v2/client/src/assets/images/file.svg index 2f5c45502..668fa2c37 100644 --- a/web_console_v2/client/src/assets/images/file.svg +++ b/web_console_v2/client/src/assets/images/file.svg @@ -1,9 +1 @@ - - - - - - - - - + \ No newline at end of file diff --git a/web_console_v2/client/src/assets/images/get-metrics.svg b/web_console_v2/client/src/assets/images/get-metrics.svg index 8d30bb7b6..4669fb4d2 100644 --- a/web_console_v2/client/src/assets/images/get-metrics.svg +++ b/web_console_v2/client/src/assets/images/get-metrics.svg @@ -1 +1 @@ - \ No newline at end of file + \ No newline at end of file diff --git a/web_console_v2/client/src/assets/images/hacker-codes.jpg b/web_console_v2/client/src/assets/images/hacker-codes.jpg index 3eb986760..ab3b490eb 100644 Binary files a/web_console_v2/client/src/assets/images/hacker-codes.jpg and b/web_console_v2/client/src/assets/images/hacker-codes.jpg differ diff --git a/web_console_v2/client/src/assets/images/login-illustration.png b/web_console_v2/client/src/assets/images/login-illustration.png index cdcf31465..bc2371acd 100644 Binary files a/web_console_v2/client/src/assets/images/login-illustration.png and b/web_console_v2/client/src/assets/images/login-illustration.png differ diff --git a/web_console_v2/client/src/assets/images/logo-black.png b/web_console_v2/client/src/assets/images/logo-black.png new file mode 100644 index 000000000..0504bdbc1 Binary files /dev/null and b/web_console_v2/client/src/assets/images/logo-black.png differ diff --git a/web_console_v2/client/src/assets/images/logo-colorful.svg b/web_console_v2/client/src/assets/images/logo-colorful.svg deleted file mode 100644 index f90316f4b..000000000 --- a/web_console_v2/client/src/assets/images/logo-colorful.svg +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/web_console_v2/client/src/assets/images/logo-sso-cas-default.jpeg b/web_console_v2/client/src/assets/images/logo-sso-cas-default.jpeg new file mode 100644 index 000000000..ac6a03261 Binary files /dev/null and b/web_console_v2/client/src/assets/images/logo-sso-cas-default.jpeg differ diff --git a/web_console_v2/client/src/assets/images/logo-white.png b/web_console_v2/client/src/assets/images/logo-white.png new file mode 100644 index 000000000..5b34e3c5e Binary files /dev/null and b/web_console_v2/client/src/assets/images/logo-white.png differ diff --git a/web_console_v2/client/src/assets/images/logo-white.svg b/web_console_v2/client/src/assets/images/logo-white.svg deleted file mode 100644 index 32422be82..000000000 --- a/web_console_v2/client/src/assets/images/logo-white.svg +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - - - diff --git a/web_console_v2/client/src/assets/images/logo.svg b/web_console_v2/client/src/assets/images/logo.svg deleted file mode 100644 index 70f2868fc..000000000 --- a/web_console_v2/client/src/assets/images/logo.svg +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/web_console_v2/client/src/assets/images/no-result.svg b/web_console_v2/client/src/assets/images/no-result.svg index 5cda352cc..505bc07a7 100644 --- a/web_console_v2/client/src/assets/images/no-result.svg +++ b/web_console_v2/client/src/assets/images/no-result.svg @@ -1,14 +1 @@ - - - - - - - - - - - - - - + \ No newline at end of file diff --git a/web_console_v2/client/src/assets/images/project-action.svg b/web_console_v2/client/src/assets/images/project-action.svg index 3f0b39025..346d6df33 100644 --- a/web_console_v2/client/src/assets/images/project-action.svg +++ b/web_console_v2/client/src/assets/images/project-action.svg @@ -1,3 +1 @@ - - - + \ No newline at end of file diff --git a/web_console_v2/client/src/assets/images/project-list-bg.png b/web_console_v2/client/src/assets/images/project-list-bg.png new file mode 100644 index 000000000..4d3030cb6 Binary files /dev/null and b/web_console_v2/client/src/assets/images/project-list-bg.png differ diff --git a/web_console_v2/client/src/assets/images/settings.svg b/web_console_v2/client/src/assets/images/settings.svg index 7a4b98819..2ad0f6164 100644 --- a/web_console_v2/client/src/assets/images/settings.svg +++ b/web_console_v2/client/src/assets/images/settings.svg @@ -1 +1 @@ - \ No newline at end of file + \ No newline at end of file diff --git a/web_console_v2/client/src/components/AddTypeSelect/index.tsx b/web_console_v2/client/src/components/AddTypeSelect/index.tsx new file mode 100644 index 000000000..4998bdacc --- /dev/null +++ b/web_console_v2/client/src/components/AddTypeSelect/index.tsx @@ -0,0 +1,228 @@ +/* istanbul ignore file */ + +import React, { FC, useState } from 'react'; +import styled from 'styled-components'; +import { giveWeakRandomKey, transformRegexSpecChar } from 'shared/helpers'; + +import { Select, Button, Input, Message, SelectProps } from '@arco-design/web-react'; +import { IconPlus } from '@arco-design/web-react/icon'; +import { Check, Close } from 'components/IconPark'; + +export interface OptionItem { + label: string; + value: any; + isCreating?: boolean; + id?: string; +} + +export interface Props extends SelectProps { + value?: any[]; + onChange?: (val: any) => void; + onInputConfirm?: (val: any) => void; + optionList: OptionItem[]; + addTypeText?: string; + typeInputPlaceholader?: string; +} + +const Footer = styled.div` + display: flex; + justify-content: center; + align-items: center; + padding-top: 4px; + border-top: 1px solid var(--lineColor); +`; + +const LabelStrong = styled.span` + font-size: 14px; + color: var(--textColorStrong); + white-space: normal; + word-break: break-all; +`; +const LabelIndex = styled.span` + display: inline-block; + width: 30px; + font-size: 14px; + color: var(--textColorSecondary); +`; + +const ItemCotainer = styled.div` + display: flex; + width: 100%; + justify-content: space-between; +`; + +const Left = styled.div` + display: flex; + flex: 1; + align-items: center; +`; +const ButtonGroup = styled.div` + flex: 0 0 60px; + display: flex; + justify-content: space-around; + align-items: center; +`; + +const StyledInput = styled(Input)` + flex: 1; + background-color: transparent; + &:hover { + background-color: #fff; + border: 1px solid #2761f6; + } +`; + +const AddTypeSelect: FC = ({ + value, + onChange = () => {}, + onInputConfirm = () => {}, + optionList, + addTypeText = '新增类型', + typeInputPlaceholader = '请输入算法类型', + ...props +}) => { + const [tempList, setTempList] = useState([]); + + return ( + + ); + + function onCreateClick() { + setTempList((prevState) => + prevState.concat([ + { + label: '', + value: '', + isCreating: true, + id: giveWeakRandomKey(), + }, + ]), + ); + } +}; + +export default AddTypeSelect; diff --git a/web_console_v2/client/src/components/AlgorithmDrawer/AlgorithmInfo.tsx b/web_console_v2/client/src/components/AlgorithmDrawer/AlgorithmInfo.tsx new file mode 100644 index 000000000..c48595b2e --- /dev/null +++ b/web_console_v2/client/src/components/AlgorithmDrawer/AlgorithmInfo.tsx @@ -0,0 +1,149 @@ +/* istanbul ignore file */ + +import React, { FC } from 'react'; +import styled from 'styled-components'; + +import { CONSTANTS } from 'shared/constants'; + +import { Table, Collapse } from '@arco-design/web-react'; +import CodeEditor from 'components/CodePreview'; +import { IconCaretRight } from '@arco-design/web-react/icon'; + +import { ColumnProps } from '@arco-design/web-react/es/Table'; +import { AlgorithmProject, Algorithm } from 'typings/algorithm'; +import { useGetCurrentProjectId, useGetCurrentProjectParticipantId } from 'hooks'; + +const SectionTitle = styled.p<{ hasMargin?: boolean }>` + margin-top: ${(props) => (props.hasMargin ? '20px' : '0')}; + font-size: 12px; + font-weight: bold; + color: var(--color-text-1); +`; +const RequiredAsterisk = styled.span` + display: inline-block; + margin-left: 0.2em; + font-size: 1.8em; + color: rgb(var(--red-6)); + line-height: 0.5em; + vertical-align: bottom; +`; +const StyledCollapse = styled(Collapse)` + .arco-collapse-item-header { + position: relative; + border-bottom: none; + padding-left: 0; + padding-bottom: 0; + } + + .arco-collapse-item .arco-collapse-item-icon-hover { + left: 3em; + right: unset; + // note: 和下面的自定义 expandIcon 相对应 + transform: translateY(-65%); + } + + .arco-collapse-item-content-box { + padding: 0; + background: transparent; + } + + .arco-collapse-item .arco-collapse-item-icon-hover-right > .arco-collapse-item-header-icon-down { + transform: rotate(90deg); + } +`; + +const CollapseItem = Collapse.Item; +const tables: ColumnProps[] = [ + { + dataIndex: 'name', + title: '名称', + render(val: string) { + return val || CONSTANTS.EMPTY_PLACEHOLDER; + }, + }, + { + dataIndex: 'value', + title: '默认值', + render(val: string) { + return val || CONSTANTS.EMPTY_PLACEHOLDER; + }, + }, + { + dataIndex: 'required', + title: '是否必填', + render(required: boolean) { + return ( + + {required ? '是' : '否'} + {required ? : null} + + ); + }, + }, + { + dataIndex: 'comment', + title: '提示语', + render(val: string) { + return val || CONSTANTS.EMPTY_PLACEHOLDER; + }, + }, +]; + +type Props = { + type: 'algorithm_project' | 'algorithm' | 'pending_algorithm'; + detail?: AlgorithmProject | Algorithm; + isParticipant?: boolean; +}; + +const AlgorithmInfo: FC = ({ isParticipant, type, detail }) => { + const participantId = useGetCurrentProjectParticipantId(); + const projectId = useGetCurrentProjectId(); + let Editor: React.FC; + + switch (type) { + case 'algorithm': + Editor = isParticipant ? CodeEditor.PeerAlgorithm : CodeEditor.Algorithm; + break; + case 'pending_algorithm': + Editor = CodeEditor.PendingAlgorithm; + break; + case 'algorithm_project': + default: + Editor = CodeEditor.AlgorithmProject; + break; + } + + if (!detail) { + return null; + } + + return ( + <> + + 超参数} + expandIcon={} + name="table" + > + record.name} + /> + + + 算法代码 + + + ); +}; + +export default AlgorithmInfo; diff --git a/web_console_v2/client/src/components/AlgorithmDrawer/index.tsx b/web_console_v2/client/src/components/AlgorithmDrawer/index.tsx new file mode 100644 index 000000000..e11510ac0 --- /dev/null +++ b/web_console_v2/client/src/components/AlgorithmDrawer/index.tsx @@ -0,0 +1,164 @@ +/* istanbul ignore file */ + +import React, { FC, useState, useMemo } from 'react'; +import { useQuery } from 'react-query'; + +import { fetchPeerAlgorithmProjectById, fetchProjectDetail } from 'services/algorithm'; +import { CONSTANTS } from 'shared/constants'; + +import { Drawer, Spin, Button } from '@arco-design/web-react'; +import AlgorithmInfo from './AlgorithmInfo'; + +import { Algorithm, AlgorithmParameter } from 'typings/algorithm'; +import { DrawerProps } from '@arco-design/web-react/es/Drawer'; +import { useGetCurrentProjectId } from 'hooks'; + +type Props = DrawerProps & { + algorithmProjectId: ID; + algorithmId?: ID; + algorithmProjectUuid?: ID; + algorithmUuid?: ID; + participantId?: ID; + parameterVariables?: AlgorithmParameter[]; + isAppendParameterVariables?: boolean; +}; +type ButtonProps = Omit & { + text?: string; +}; + +const AlgorithmDrawer: FC & { + Button: FC; +} = ({ + algorithmProjectId, + algorithmId, + algorithmProjectUuid, + algorithmUuid, + participantId, + parameterVariables, + isAppendParameterVariables = false, + ...resetProps +}) => { + const projectId = useGetCurrentProjectId(); + const algorithmProjectDetailQuery = useQuery( + ['getAlgorithmProjectDetailInAlgorithmDrawer', algorithmProjectId, algorithmId], + () => fetchProjectDetail(algorithmProjectId), + { + // 对侧算法algorithmId为null + //TODO:后端修改接口,对侧算法algorithm改为0,删除algorithmId为null的兼容逻辑 + enabled: + (Boolean(algorithmProjectId) || algorithmProjectId === 0) && + algorithmId !== null && + algorithmId !== 0, + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const peerAlgorithmProjectDetailQuery = useQuery( + [ + 'getFetchPeerAlgorithmProjectById', + projectId, + participantId, + algorithmProjectUuid, + algorithmId, + ], + () => fetchPeerAlgorithmProjectById(projectId, participantId, algorithmProjectUuid), + { + // 对侧算法algorithmId为null + //TODO:后端修改接口,对侧算法algorithm改为0,删除algorithmId为null的兼容逻辑 + enabled: + (algorithmId === null || algorithmId === 0) && + Boolean(algorithmProjectUuid) && + (Boolean(projectId) || projectId === 0) && + (Boolean(participantId) || participantId === 0), + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const algorithmProjectDetail = useMemo(() => { + return algorithmId === null || algorithmId === 0 + ? peerAlgorithmProjectDetailQuery.data?.data + : algorithmProjectDetailQuery.data?.data; + }, [algorithmId, peerAlgorithmProjectDetailQuery, algorithmProjectDetailQuery]); + + const previewAlgorithm = useMemo(() => { + if (!algorithmProjectDetail) { + return undefined; + } + + const currentAlgorithm = algorithmProjectDetail.algorithms?.find( + //旧模型训练可能没有algorithmUuid + (item) => item.id === algorithmId || item.uuid === algorithmUuid, + ); + + if (!currentAlgorithm) return undefined; + + if (parameterVariables && parameterVariables.length > 0) { + let finalVariables = parameterVariables; + if (isAppendParameterVariables) { + finalVariables = (currentAlgorithm?.parameter?.variables ?? []).concat(parameterVariables); + } + + return { + ...currentAlgorithm, + parameter: { + ...currentAlgorithm.parameter, + variables: finalVariables, + }, + }; + } + + return currentAlgorithm; + }, [ + algorithmProjectDetail, + algorithmId, + algorithmUuid, + parameterVariables, + isAppendParameterVariables, + ]); + + return ( + + + + + + ); +}; + +export function _Button({ text = '查看', children, ...restProps }: ButtonProps) { + const [visible, setVisible] = useState(false); + return ( + <> + {children ? ( + setVisible(true)}>{children} + ) : ( + + )} + setVisible(false)} /> + + ); +} + +AlgorithmDrawer.Button = _Button; + +export default AlgorithmDrawer; diff --git a/web_console_v2/client/src/components/AlgorithmSelect/index.module.less b/web_console_v2/client/src/components/AlgorithmSelect/index.module.less new file mode 100644 index 000000000..8f9ee5c64 --- /dev/null +++ b/web_console_v2/client/src/components/AlgorithmSelect/index.module.less @@ -0,0 +1,16 @@ +li:has(.second_option_container){ + height: 54px; + line-height: 24px; + +} +.second_option_container{ + > span{ + font-weight: 500; + } +} +.second_option_content{ + color: var(--color-text-2); +} +.text_content{ + font-size: 12px; +} diff --git a/web_console_v2/client/src/components/AlgorithmSelect/index.tsx b/web_console_v2/client/src/components/AlgorithmSelect/index.tsx new file mode 100644 index 000000000..2d453f1b0 --- /dev/null +++ b/web_console_v2/client/src/components/AlgorithmSelect/index.tsx @@ -0,0 +1,371 @@ +import React, { useMemo } from 'react'; +import { Cascader, Select, Grid, Input, Space, Divider } from '@arco-design/web-react'; +import { + AlgorithmParameter, + AlgorithmVersionStatus, + EnumAlgorithmProjectSource, + EnumAlgorithmProjectType, +} from 'typings/algorithm'; +import { useQuery } from 'react-query'; +import { + useGetCurrentProjectId, + useGetCurrentProjectParticipantList, + useGetCurrentProjectParticipantName, +} from 'hooks'; +import { + fetchProjectList, + fetchPeerAlgorithmProjectList, + fetchPeerAlgorithmList, + fetchAlgorithmList, + fetchProjectDetail, +} from 'services/algorithm'; + +import styles from './index.module.less'; + +const { Row, Col } = Grid; + +const ALGORITHM_TYPE_LABEL_MAPPER: Record = { + NN_HORIZONTAL: '横向联邦-NN模型', + NN_VERTICAL: '纵向联邦-NN模型', + TRUSTED_COMPUTING: '可信计算', + UNSPECIFIED: '自定义模型', +}; + +interface Props { + value?: AlgorithmSelectValue; + leftDisabled?: boolean; + rightDisabled?: boolean; + isParticipant?: boolean; + algorithmType?: EnumAlgorithmProjectType[]; + algorithmOwnerType: string; + showHyperParameters?: boolean; + filterReleasedAlgo?: boolean; + onChange?: (value: AlgorithmSelectValue) => void; + onAlgorithmOwnerChange?: (algorithmOwnerType: string) => void; +} +export type AlgorithmSelectValue = { + algorithmProjectId?: ID; + algorithmId?: ID; + algorithmProjectUuid?: ID; + algorithmUuid?: ID; + config?: AlgorithmParameter[]; + path?: string; + participantId?: ID; +}; + +function AlgorithmSelect({ + value, + leftDisabled = false, + rightDisabled = false, + isParticipant = false, + algorithmType = [], + algorithmOwnerType, + showHyperParameters = true, + filterReleasedAlgo = false, + onChange: onChangeFromProps, + onAlgorithmOwnerChange, +}: Props) { + const projectId = useGetCurrentProjectId(); + const participantName = useGetCurrentProjectParticipantName(); + const participantList = useGetCurrentProjectParticipantList(); + + const leftValue = useMemo(() => { + //支持选择合作伙伴后algorithmProjectId不唯一 + return [algorithmOwnerType, value?.algorithmProjectUuid as string]; + }, [value, algorithmOwnerType]); + const algorithmProjectListQuery = useQuery( + ['fetchAllAlgorithmProjectList', ...algorithmType, projectId, value?.algorithmProjectUuid], + () => + fetchProjectList(projectId, { + type: algorithmType, + }), + { + enabled: Boolean(projectId), + retry: 2, + refetchOnWindowFocus: false, + onSuccess(res) { + const data = res.data; + const algorithmProjectId = data.find((item) => item.uuid === value?.algorithmProjectUuid) + ?.id; + if (algorithmProjectId && value?.algorithmProjectId === undefined) { + onChangeFromProps?.({ ...value, algorithmProjectId: algorithmProjectId }); + } + }, + }, + ); + const preAlgorithmProjectListQuery = useQuery( + ['fetchPreAlgorithmProjectListQuery', algorithmType, value?.algorithmProjectUuid], + () => + fetchProjectList(0, { + type: algorithmType, + sources: EnumAlgorithmProjectSource.PRESET, + }), + { + retry: 2, + refetchOnWindowFocus: false, + onSuccess(res) { + const data = res.data; + const algorithmProjectId = data.find((item) => item.uuid === value?.algorithmProjectUuid) + ?.id; + if (algorithmProjectId && value?.algorithmProjectId === undefined) { + onChangeFromProps?.({ ...value, algorithmProjectId: algorithmProjectId }); + } + }, + }, + ); + const peerAlgorithmProjectListQuery = useQuery( + ['fetchPeerAlgorithmProjectListQuery', projectId, algorithmType], + () => + fetchPeerAlgorithmProjectList(projectId, 0, { + filter: `(type:${JSON.stringify(algorithmType)})`, + }), + { + enabled: Boolean(projectId) || projectId === 0, + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const algorithmProjectDetailQuery = useQuery( + ['fetchAlgorithmProjectDetail', value], + () => fetchProjectDetail(value?.algorithmProjectId!), + { + enabled: + leftDisabled && + value?.algorithmProjectUuid === undefined && + value?.algorithmProjectId !== undefined, + // 旧的工作区编辑时服务端不返回algorithmProjectUuid + onSuccess(res) { + const algorithmProjectDetail = res.data; + onChangeFromProps?.({ + ...value, + algorithmProjectUuid: algorithmProjectDetail?.uuid, + }); + }, + }, + ); + + const algorithmListQuery = useQuery( + ['getAlgorithmListDetail', value?.algorithmProjectId], + () => fetchAlgorithmList(0, { algo_project_id: value?.algorithmProjectId! }), + { + enabled: + !isParticipant && + leftValue?.[0] === 'self' && + (Boolean(value?.algorithmProjectId) || value?.algorithmProjectId === 0), + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const peerAlgorithmListQuery = useQuery( + ['getPeerAlgorithmList', projectId, value?.participantId, value?.algorithmProjectUuid], + () => + fetchPeerAlgorithmList(projectId, value?.participantId, { + algorithm_project_uuid: value?.algorithmProjectUuid!, + }), + { + enabled: + !isParticipant && + leftValue?.[0] === 'peer' && + Boolean(value?.algorithmProjectUuid) && + Boolean(value?.participantId), + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const algorithmProjectList = useMemo(() => { + return [ + ...(algorithmProjectListQuery?.data?.data || []), + ...(preAlgorithmProjectListQuery.data?.data || []), + ]; + }, [algorithmProjectListQuery, preAlgorithmProjectListQuery]); + const peerAlgorithmProjectList = useMemo(() => { + return peerAlgorithmProjectListQuery.data?.data || []; + }, [peerAlgorithmProjectListQuery]); + + const configValueList = useMemo(() => { + return value?.config || []; + }, [value?.config]); + + const algorithmProjectDetail = useMemo(() => { + return leftValue?.[0] === 'self' + ? algorithmListQuery.data?.data + : peerAlgorithmListQuery.data?.data; + }, [leftValue, algorithmListQuery, peerAlgorithmListQuery]); + + const rightValue = useMemo(() => { + if (value?.algorithmId !== null && value?.algorithmId !== 0 && algorithmOwnerType === 'self') { + return algorithmProjectDetail?.find((item) => item.id === value?.algorithmId)?.uuid; + } + return value?.algorithmUuid; + }, [value, algorithmProjectDetail, algorithmOwnerType]); + + const leftOptions = useMemo(() => { + return [ + { + value: 'self', + label: '我方算法', + disabled: algorithmProjectList.length === 0, + children: algorithmProjectList.map((item) => ({ + ...item, + value: item.uuid, + label: item.name, + participantName: '我方', + })), + }, + { + value: 'peer', + label: '合作伙伴算法', + disabled: peerAlgorithmProjectList.length === 0, + children: peerAlgorithmProjectList.map((item) => ({ + ...item, + value: item.uuid, + label: item.name, + participantName: + participantList.find((participant) => participant.id === item.participant_id)?.name || + participantName, + })), + }, + ]; + }, [algorithmProjectList, peerAlgorithmProjectList, participantName, participantList]); + + const rightOptions = useMemo(() => { + if (filterReleasedAlgo) { + const releasedAlgo = algorithmProjectDetail?.filter( + (item) => item.status === AlgorithmVersionStatus.PUBLISHED, + ); + return ( + releasedAlgo?.map((item) => ({ + label: `V${item.version}`, + value: item.uuid as ID, + extra: item, + })) || [] + ); + } + + return ( + algorithmProjectDetail?.map((item) => ({ + label: `V${item.version}`, + value: item.uuid as ID, + extra: item, + })) || [] + ); + }, [algorithmProjectDetail, filterReleasedAlgo]); + + return ( + <> + {!isParticipant && ( + + + { + onAlgorithmOwnerChange?.(value?.[0] as string); + onChange({ algorithmProjectUuid: value?.[1] as ID, algorithmUuid: undefined }); + }} + disabled={leftDisabled} + renderOption={(option, level) => { + if (level === 0) { + return {option.label}; + } + return ( +
+ {option.name} + } + > + {option.participantName} + {ALGORITHM_TYPE_LABEL_MAPPER?.[option.type as string]} + +
+ ); + }} + /> + +
+ onConfigValueChange(value, 'name', index)} + disabled={true} + /> + + + onConfigValueChange(value, 'value', index)} + /> + + + ))} + + + )} + {showHyperParameters && isParticipant && configValueList.length <= 0 && ( + 对侧无算法超参数,无需配置 + )} + + ); + + function onConfigValueChange(val: string, key: string, index: number) { + const newConfigValueList = [...configValueList]; + newConfigValueList[index] = { ...newConfigValueList[index], [key]: val }; + onChangeFromProps?.({ + ...value, + config: newConfigValueList, + }); + } + + function onChange(val: { algorithmProjectUuid?: ID; algorithmUuid?: ID }) { + const rightItem = rightOptions.find((item) => item.value === val.algorithmUuid); + + const config = rightItem?.extra?.parameter?.variables ?? []; + const path = rightItem?.extra?.path ?? ''; + const algorithmId = rightItem?.extra?.id; + const algorithmProjectId = [...algorithmProjectList, ...peerAlgorithmProjectList].find( + (item) => item.uuid === val.algorithmProjectUuid, + )?.id; + const participantId = + peerAlgorithmProjectList.find((item) => item.uuid === val.algorithmProjectUuid) + ?.participant_id ?? 0; + + onChangeFromProps?.({ + ...val, + config, + path, + algorithmId, + algorithmProjectId, + participantId, + }); + } +} +export default AlgorithmSelect; diff --git a/web_console_v2/client/src/components/AlgorithmType/index.tsx b/web_console_v2/client/src/components/AlgorithmType/index.tsx new file mode 100644 index 000000000..930f8898c --- /dev/null +++ b/web_console_v2/client/src/components/AlgorithmType/index.tsx @@ -0,0 +1,86 @@ +import React, { FC, useMemo } from 'react'; +import styled from 'styled-components'; +import { Tag, TagProps } from '@arco-design/web-react'; +import { EnumAlgorithmProjectType } from 'typings/algorithm'; + +type Props = { + style?: React.CSSProperties; + type: EnumAlgorithmProjectType; + tagProps?: Partial; +}; + +const Container = styled.div` + display: inline-block; +`; +const StyledModalTag = styled(Tag)` + margin-right: 4px; + font-size: 12px; + vertical-align: top; +`; + +const AlgorithmType: FC = ({ style = {}, type, tagProps = {} }) => { + const [tagName, modelName] = useMemo(() => { + if (type === EnumAlgorithmProjectType.UNSPECIFIED) { + return ['自定义算法', '']; + } + + const [modelType, federalType] = type.split('_'); + let tagName = ''; + let modelName = ''; + + if (federalType) { + switch (federalType) { + case 'VERTICAL': + tagName = '纵向联邦'; + break; + case 'HORIZONTAL': + tagName = '横向联邦'; + break; + case 'COMPUTING': + tagName = '可信计算'; + break; + default: + tagName = '-'; + break; + } + } + + if (modelType) { + switch (modelType) { + case 'NN': + modelName = 'NN模型'; + break; + case 'TREE': + modelName = '树模型'; + break; + case 'TRUSTED': + modelName = ''; + break; + } + } + + return [tagName, modelName]; + }, [type]); + + const mergedTagStyle = useMemo(() => { + return { + fontWeight: 'normal', + ...(tagProps.style ?? {}), + }; + }, [tagProps.style]); + + return ( + + {tagName ? ( + + {tagName} + {modelName ? `-${modelName}` : ''} + + ) : ( + '' + )} + + ); +}; + +export default AlgorithmType; diff --git a/web_console_v2/client/src/components/BackButton/index.tsx b/web_console_v2/client/src/components/BackButton/index.tsx index 7a04963b0..a62bae414 100644 --- a/web_console_v2/client/src/components/BackButton/index.tsx +++ b/web_console_v2/client/src/components/BackButton/index.tsx @@ -1,17 +1,36 @@ +/* istanbul ignore file */ + import React, { FC } from 'react'; -import { Left } from 'components/IconPark'; -import GridRow from 'components/_base/GridRow'; import styled from 'styled-components'; +import GridRow from 'components/_base/GridRow'; +import { Left } from 'components/IconPark'; +import Modal, { CUSTOM_CLASS_NAME } from 'components/Modal'; + const Container = styled.div` cursor: pointer; `; type Props = { + /** the className prop of confirm modal */ + modalClassName?: string; + /** Alias onOK of Modal's props when isShowConfirmModal = true */ onClick?: (evt: React.MouseEvent) => void; + /** Show confirm modal after click children */ + isShowConfirmModal?: boolean; + /** Modal title when isShowConfirmModal = true */ + title?: string; + /** Modal content when isShowConfirmModal = true */ + content?: string; }; - -const BackButton: FC = ({ onClick, children }) => { +const BackButton: FC = ({ + onClick, + isShowConfirmModal = false, + title = '确认要退出?', + content = '退出后,当前所填写的信息将被清空。', + children, + modalClassName = CUSTOM_CLASS_NAME, +}) => { return ( @@ -22,7 +41,18 @@ const BackButton: FC = ({ onClick, children }) => { ); function onEleClick(evt: React.MouseEvent) { - onClick && onClick(evt); + if (isShowConfirmModal) { + Modal.confirm({ + className: modalClassName, + title: title, + content: content, + onOk() { + onClick?.(evt); + }, + }); + } else { + onClick?.(evt); + } } }; diff --git a/web_console_v2/client/src/components/BlockchainStorageTable/index.tsx b/web_console_v2/client/src/components/BlockchainStorageTable/index.tsx new file mode 100644 index 000000000..d0e56b5ef --- /dev/null +++ b/web_console_v2/client/src/components/BlockchainStorageTable/index.tsx @@ -0,0 +1,171 @@ +import React, { useMemo } from 'react'; +import GridRow from '../_base/GridRow'; +import { Statistic, Table } from '@arco-design/web-react'; +import { useTranslation } from 'react-i18next'; +import styled from 'styled-components'; +import { useQuery } from 'react-query'; +import { fetchDatasetLedger } from 'services/dataset'; +import { TABLE_COL_WIDTH, TIME_INTERVAL } from 'shared/constants'; +import { ColumnProps } from '@arco-design/web-react/es/Table'; +import { + DatasetTransactionItem, + DatasetTransactionStatus, + TransactionExtraData, +} from 'typings/dataset'; +import { formatTimestamp } from 'shared/date'; +import { useTablePaginationWithUrlState, useUrlState } from 'hooks'; +import { PaginationProps } from '@arco-design/web-react/es/Pagination/pagination'; +import { SorterResult } from '@arco-design/web-react/es/Table/interface'; +import { get } from 'lodash-es'; +import StateIndicator from '../StateIndicator'; +import { getTransactionStatus } from 'shared/dataset'; + +type IBlockchainStorageTable = { + datasetId: ID; +}; + +const StyledStatistic = styled(Statistic)` + margin: 12px 0; + .arco-statistic-value { + font-family: 'PingFang SC'; + font-style: normal; + font-size: 16px; + line-height: 20px; + .arco-statistic-value-prefix { + font-weight: 400; + } + } +`; + +export default function BlockchainStorageTable(prop: IBlockchainStorageTable) { + const { datasetId } = prop; + const { t } = useTranslation(); + const { paginationProps } = useTablePaginationWithUrlState(); + const [urlState, setUrlState] = useUrlState({ + timestamp_sort: '', + }); + const query = useQuery(['fetch_dataset_ledger', datasetId], () => fetchDatasetLedger(datasetId), { + retry: 2, + refetchOnWindowFocus: false, + refetchInterval: TIME_INTERVAL.FLAG, + }); + const totalValue = useMemo(() => { + return get(query, 'data.data.total_value') || 0; + }, [query]); + const list = useMemo(() => { + return get(query, 'data.data.transactions') || []; + }, [query]); + const columns = useMemo[]>(() => { + return [ + { + title: t('dataset.col_ledger_hash'), + dataIndex: 'trans_hash', + key: 'trans_hash', + width: TABLE_COL_WIDTH.NAME, + ellipsis: true, + }, + { + title: t('dataset.col_ledger_block'), + dataIndex: 'block_number', + key: 'block_number', + width: TABLE_COL_WIDTH.NORMAL, + }, + { + title: t('dataset.col_ledger_trade_block_id'), + dataIndex: 'trans_index', + key: 'trans_index', + width: TABLE_COL_WIDTH.NORMAL, + }, + { + title: t('dataset.col_ledger_chain_time'), + dataIndex: 'timestamp', + key: 'timestamp', + width: TABLE_COL_WIDTH.TIME, + sorter(a: DatasetTransactionItem, b: DatasetTransactionItem) { + return a.timestamp - b.timestamp; + }, + defaultSortOrder: urlState?.timestamp_sort, + render: (date: number) =>
{formatTimestamp(date)}
, + }, + { + title: t('dataset.col_ledger_sender'), + dataIndex: 'sender_name', + key: 'sender_name', + width: TABLE_COL_WIDTH.NORMAL, + }, + { + title: t('dataset.col_ledger_receiver'), + dataIndex: 'receiver_name', + key: 'receiver_name', + width: TABLE_COL_WIDTH.OPERATION, + }, + { + title: t('dataset.col_ledger_trade_fee'), + dataIndex: 'value', + key: 'value', + width: TABLE_COL_WIDTH.NORMAL, + }, + { + title: t('dataset.col_ledger_trade_status'), + dataIndex: 'status', + key: 'status', + width: TABLE_COL_WIDTH.NORMAL, + render: (val: DatasetTransactionStatus) => { + return ; + }, + }, + { + title: t('dataset.col_ledger_trade_info'), + dataIndex: 'extra_data', + key: 'extra_data', + width: TABLE_COL_WIDTH.BIG_WIDTH, + ellipsis: true, + render: (val: TransactionExtraData) => { + return val?.transaction_info; + }, + }, + ]; + }, [t, urlState]); + const handleOnChange = ( + pagination: PaginationProps, + sorter: SorterResult, + filters: Partial>, + extra: { + currentData: DatasetTransactionItem[]; + action: 'paginate' | 'sort' | 'filter'; + }, + ) => { + switch (extra.action) { + case 'sort': + setUrlState((prevState) => ({ + ...prevState, + [`${sorter.field}_sort`]: sorter.direction, + })); + break; + default: + break; + } + }; + return ( + <> + + + +
+ + ); +} diff --git a/web_console_v2/client/src/components/BreadcrumbLink/Slash.tsx b/web_console_v2/client/src/components/BreadcrumbLink/Slash.tsx index 6a138887c..a48cc8697 100644 --- a/web_console_v2/client/src/components/BreadcrumbLink/Slash.tsx +++ b/web_console_v2/client/src/components/BreadcrumbLink/Slash.tsx @@ -1,3 +1,5 @@ +/* istanbul ignore file */ + import React, { ReactElement } from 'react'; import styled from 'styled-components'; @@ -8,7 +10,7 @@ const Slash = styled.div` transform: matrix(0.87, 0.5, 0.5, -0.87, 0, 0); border-radius: 0.5px; margin: 7px 2px 0; - background-color: var(--gray4); + background-color: rgb(var(--gray-4)); `; function BreadcrumbSlash(): ReactElement { diff --git a/web_console_v2/client/src/components/BreadcrumbLink/index.tsx b/web_console_v2/client/src/components/BreadcrumbLink/index.tsx index 36dff226f..4487603ec 100644 --- a/web_console_v2/client/src/components/BreadcrumbLink/index.tsx +++ b/web_console_v2/client/src/components/BreadcrumbLink/index.tsx @@ -1,6 +1,8 @@ +/* istanbul ignore file */ + import React, { FC } from 'react'; import styled from 'styled-components'; -import { Breadcrumb } from 'antd'; +import { Breadcrumb } from '@arco-design/web-react'; import Slash from './Slash'; import { Link } from 'react-router-dom'; import { useTranslation } from 'react-i18next'; @@ -11,7 +13,9 @@ const Container = styled(Breadcrumb)` type Props = { paths: { + /** Display label or i18n key */ label: string; + /** link */ to?: string; }[]; }; diff --git a/web_console_v2/client/src/components/ButtonWithModalConfirm/index.tsx b/web_console_v2/client/src/components/ButtonWithModalConfirm/index.tsx new file mode 100644 index 000000000..263b83b12 --- /dev/null +++ b/web_console_v2/client/src/components/ButtonWithModalConfirm/index.tsx @@ -0,0 +1,51 @@ +/* istanbul ignore file */ + +import React, { FC } from 'react'; +import i18n from 'i18n'; + +import { Button } from '@arco-design/web-react'; +import Modal from 'components/Modal'; + +import { ButtonProps } from '@arco-design/web-react/es/Button'; + +export interface Props extends ButtonProps { + /** Alias onOK of Modal's props when isShowConfirmModal = true */ + onClick?: () => void; + /** Show confirm modal after click children */ + isShowConfirmModal?: boolean; + /** Modal title when isShowConfirmModal = true */ + title?: string; + /** Modal content when isShowConfirmModal = true */ + content?: string; +} + +const ButtonWithModalConfirm: FC = ({ + isShowConfirmModal = false, + title = i18n.t('msg_quit_modal_title'), + content = i18n.t('msg_quit_modal_content'), + children, + onClick, + ...resetProps +}) => { + return ( + + ); + + function _onClick() { + if (isShowConfirmModal) { + Modal.confirm({ + title: title, + content: content, + onOk() { + onClick?.(); + }, + }); + } else { + onClick?.(); + } + } +}; + +export default ButtonWithModalConfirm; diff --git a/web_console_v2/client/src/components/ButtonWithPopconfirm/index.tsx b/web_console_v2/client/src/components/ButtonWithPopconfirm/index.tsx new file mode 100644 index 000000000..c89d7b89c --- /dev/null +++ b/web_console_v2/client/src/components/ButtonWithPopconfirm/index.tsx @@ -0,0 +1,51 @@ +/* istanbul ignore file */ + +import React, { FC } from 'react'; +import i18n from 'i18n'; + +import { Popconfirm, Button } from '@arco-design/web-react'; +import { PopconfirmProps } from '@arco-design/web-react/es/Popconfirm'; +import { ButtonProps } from '@arco-design/web-react/es/Button'; + +export interface Props { + onCancel?: () => void; + onConfirm?: () => void; + /** Popconfirm title */ + title?: React.ReactNode; + /** Button title */ + buttonText?: React.ReactNode; + /** Popconfirm ok button title */ + okText?: string; + /** Popconfirm cancel button title */ + cancelText?: string; + /** Arco button props */ + buttonProps?: ButtonProps; + /** Arco popconfirmutton props */ + popconfirmProps?: PopconfirmProps; +} + +const ButtonWithPopconfirm: FC = ({ + title = i18n.t('msg_quit_warning'), + buttonText = i18n.t('cancel'), + okText = i18n.t('submit'), + cancelText = i18n.t('cancel'), + onCancel, + onConfirm, + buttonProps, + popconfirmProps, +}) => { + return ( + + + + ); +}; + +export default ButtonWithPopconfirm; diff --git a/web_console_v2/client/src/components/CheckboxWithPopconfirm/index.tsx b/web_console_v2/client/src/components/CheckboxWithPopconfirm/index.tsx new file mode 100644 index 000000000..8070b5664 --- /dev/null +++ b/web_console_v2/client/src/components/CheckboxWithPopconfirm/index.tsx @@ -0,0 +1,74 @@ +/* istanbul ignore file */ + +import React, { FC, useState } from 'react'; + +import { Checkbox, Popconfirm } from '@arco-design/web-react'; + +import { CheckboxProps } from '@arco-design/web-react/es/Checkbox'; + +type Props = { + /** Popconfirm title */ + title?: string; + /** Checkbox text */ + text?: string; + /** Popconfirm cancel button title */ + cancelText?: string; + /** Popconfirm ok button title */ + okText?: string; + /** Checkbox disabled */ + disabled?: boolean; + /** Checkbox value */ + value?: boolean; + onChange?: (val: boolean) => void; +} & CheckboxProps; +const CheckboxWithPopconfirm: FC = ({ + value, + onChange, + title, + text, + disabled, + cancelText = '取消', + okText = '确认', + ...props +}) => { + const [isShowCheckboxPopconfirm, setIsShowCheckboxPopconfirm] = useState(false); + + return ( + <> + + + {text} + + + + ); + function onCheckboxClick(e: any) { + if (disabled) { + return; + } + + if (value) { + onChange && onChange(false); + } else { + setIsShowCheckboxPopconfirm(true); + } + } + + function onCheckboxPopconfirmConfirm() { + setIsShowCheckboxPopconfirm(false); + onChange && onChange(true); + } + function onCheckboxPopconfirmCancel() { + setIsShowCheckboxPopconfirm(false); + onChange && onChange(false); + } +}; + +export default CheckboxWithPopconfirm; diff --git a/web_console_v2/client/src/components/CheckboxWithTooltip/index.tsx b/web_console_v2/client/src/components/CheckboxWithTooltip/index.tsx new file mode 100644 index 000000000..b2fff16b7 --- /dev/null +++ b/web_console_v2/client/src/components/CheckboxWithTooltip/index.tsx @@ -0,0 +1,35 @@ +/* istanbul ignore file */ + +import React, { FC } from 'react'; +import { Checkbox, Tooltip } from '@arco-design/web-react'; + +import { CheckboxProps } from '@arco-design/web-react/es/Checkbox'; + +type Props = { + /** Tooltip title */ + tip?: string; + /** Checkbox text */ + text?: string; + /** Checkbox value */ + value?: boolean; + onChange?: (val: boolean) => void; +} & CheckboxProps; +const CheckboxWithTooltip: FC = ({ value, onChange, tip, text, ...props }) => { + return ( + <> + + { + onChange?.(checked); + }} + checked={value} + {...props} + > + {text} + + + + ); +}; + +export default CheckboxWithTooltip; diff --git a/web_console_v2/client/src/components/ClickToCopy/index.tsx b/web_console_v2/client/src/components/ClickToCopy/index.tsx index 1acb9f049..197d91498 100644 --- a/web_console_v2/client/src/components/ClickToCopy/index.tsx +++ b/web_console_v2/client/src/components/ClickToCopy/index.tsx @@ -1,28 +1,43 @@ +/* istanbul ignore file */ + import React, { FC } from 'react'; -import styled from 'styled-components'; -import { useTranslation } from 'react-i18next'; -import { copyToClipboard } from 'shared/helpers'; -import { message } from 'antd'; +import i18n from 'i18n'; -const Container = styled.div` - cursor: pointer; -`; +import { newCopyToClipboard, to } from 'shared/helpers'; +import { Message } from '@arco-design/web-react'; type Props = { + /** Text that will be copied */ text: string; + /** Tip that it will show when copied success */ + successTip?: string; + /** Tip that it will show when copied fail */ + failTip?: string; }; -const ClickToCopy: FC = ({ children, text }) => { - const { t } = useTranslation(); - - return {children}; - - function onClick() { - const isOK = copyToClipboard(text); +const ClickToCopy: FC = ({ + children, + text, + successTip = i18n.t('app.copy_success'), + failTip = i18n.t('app.copy_fail'), +}) => { + return ( +
+ {children} +
+ ); - if (isOK) { - message.success(t('app.copy_success')); + async function onClick() { + const [, error] = await to(newCopyToClipboard(text)); + if (error) { + return Message.error(failTip!); } + return Message.success(successTip!); } }; diff --git a/web_console_v2/client/src/components/CodeEditor/__mocks__/index.tsx b/web_console_v2/client/src/components/CodeEditor/__mocks__/index.tsx new file mode 100644 index 000000000..9d8b86297 --- /dev/null +++ b/web_console_v2/client/src/components/CodeEditor/__mocks__/index.tsx @@ -0,0 +1,109 @@ +import React, { FC, useEffect, useRef, useState } from 'react'; +import { noop } from 'lodash-es'; + +import { EditorProps, Monaco } from '@monaco-editor/react'; +import type * as monaco from 'monaco-editor/esm/vs/editor/editor.api'; + +export const VS_DARK_COLOR = '#1e1e1e'; + +export enum Action { + Save = 'code_editor_action_save', +} + +export type CodeEditorProps = Omit & { + /** Code text */ + value?: string; + onChange?: (value: string) => void; + language?: 'json' | 'python' | 'javascript' | 'java' | 'go'; + isReadOnly?: boolean; + /** Get editor/monaco instance on mount */ + getInstance?: (editor: monaco.editor.IStandaloneCodeEditor, monaco: Monaco) => void; +}; + +const CodeEditor: FC = ({ value, onChange, getInstance, isReadOnly }) => { + const keyToModelMap = useRef<{ + [key: string]: { + code: string; + dispose: () => void; + }; + }>({}); + const $input = useRef(null); + + const [innerValue, setInnerValue] = useState(value); + + const isControlled = typeof value === 'string'; + + useEffect(() => { + if (isControlled) { + setInnerValue(value); + } + }, [value, isControlled]); + + useEffect(() => { + getInstance?.( + { + saveViewState: noop as any, + restoreViewState: noop as any, + setModel: (model: any) => { + const code = model.code; + if (isControlled) { + setInnerValue(code); + } else { + $input.current!.value = code; + } + }, + } as any, + { + Uri: { + file: (key: string) => key, + }, + editor: { + getModel: (key: string) => keyToModelMap.current[key] ?? null, + createModel: (code: string, language: string, uri: string) => { + if (isControlled) { + setInnerValue(code); + } else { + $input.current!.value = code; + } + + const model = { + code, + dispose: () => { + delete keyToModelMap.current[uri]; + if (Object.keys(keyToModelMap.current).length === 0) { + if (isControlled) { + setInnerValue(''); + } else { + $input.current!.value = ''; + } + } + }, + }; + keyToModelMap.current[uri] = model; + return model; + }, + }, + } as any, + ); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + return ( + { + const { value, fileKey } = event.target; + + const model = keyToModelMap.current[fileKey]; + if (model) { + model.code = value; + } + onChange?.(value); + }} + value={isControlled ? innerValue : undefined} + data-testid="input-code-editor" + disabled={isReadOnly} + /> + ); +}; +export default CodeEditor; diff --git a/web_console_v2/client/src/components/CodeEditor/index.tsx b/web_console_v2/client/src/components/CodeEditor/index.tsx index f10e6322a..157c03345 100644 --- a/web_console_v2/client/src/components/CodeEditor/index.tsx +++ b/web_console_v2/client/src/components/CodeEditor/index.tsx @@ -1,50 +1,122 @@ -import React, { FC } from 'react'; -import Editor, { EditorProps } from '@monaco-editor/react'; +/* istanbul ignore file */ -export type CodeEditorProps = Omit & { +import React, { FC, useRef } from 'react'; +import Editor, { EditorProps, loader, Monaco } from '@monaco-editor/react'; +import { Message } from '@arco-design/web-react'; +import styled from 'styled-components'; + +import { formatJSONValue } from 'shared/helpers'; +import type * as monaco from 'monaco-editor/esm/vs/editor/editor.api'; +import PubSub from 'pubsub-js'; + +export type CodeEditorProps = Omit & { + /** Code text */ value?: string; onChange?: (value: string) => void; - language: 'json' | 'python'; + language?: 'json' | 'python' | 'javascript' | 'java' | 'go' | 'shell'; + isReadOnly?: boolean; + /** Get editor/monaco instance on mount */ + getInstance?: (editor: monaco.editor.IStandaloneCodeEditor, monaco: Monaco) => void; }; -const formatJsonValue = (str: string) => { - try { - const value = JSON.stringify(JSON.parse(str)); - return value; - } catch (error) { - return str; +async function monacoInit() { + // If current env isn't dumi, use local monaco file. otherwise, monaco files are being downloaded from CDN. + // https://github.com/suren-atoyan/monaco-react#loader-config + if (!process.env.IS_DUMI_ENV) { + const monaco = await import('monaco-editor/esm/vs/editor/editor.api.js'); + loader.config({ monaco }); } -}; + const monacoInstance = await loader.init(); + monacoInstance.editor.defineTheme('grey', { + base: 'vs', + inherit: true, + rules: [], + colors: { + 'editor.background': '#F6F7FB', + }, + }); +} + +monacoInit(); export const VS_DARK_COLOR = '#1e1e1e'; -const CodeEditor: FC = ({ value, onChange, language, ...props }) => { +export enum Action { + Save = 'code_editor_action_save', +} + +const StyledCodeEditor = styled(Editor)<{ + isReadOnly?: boolean; +}>` + .monaco-editor .cursors-layer > .cursor { + ${(props) => props.options?.readOnly && `display: none !important`}; + } + .monaco-editor .overflowingContentWidgets { + ${(props) => props.options?.readOnly && `display: none !important`}; + } +`; +const CodeEditor: FC = ({ + value, + onChange, + language, + isReadOnly = false, + getInstance, + ...props +}) => { + const editorRef = useRef(null); return ( - ); function onCodeChange(val?: string) { if (language === 'json') { - onChange && onChange(formatJsonValue(val || '')); + onChange && onChange(formatJSONValue(val || '')); return; } onChange && onChange(val || ''); } + + function onEditorDidMount(editor: any, monaco: any) { + if (!isReadOnly) { + editorRef.current = editor; + + const KM = monaco.KeyMod; + const KC = monaco.KeyCode; + + editorRef.current?.addCommand(KM.CtrlCmd | KC.KEY_S, () => { + PubSub.publish(Action.Save); + Message.success('已保存'); + }); + } + if (getInstance) { + getInstance(editor, monaco); + } + } }; export default CodeEditor; diff --git a/web_console_v2/client/src/components/CodeEditorDrawer/index.tsx b/web_console_v2/client/src/components/CodeEditorDrawer/index.tsx new file mode 100644 index 000000000..b329e3e3e --- /dev/null +++ b/web_console_v2/client/src/components/CodeEditorDrawer/index.tsx @@ -0,0 +1,122 @@ +/* istanbul ignore file */ + +import React, { useState } from 'react'; +import ReactDOM from 'react-dom'; + +import { Drawer, DrawerProps, Button } from '@arco-design/web-react'; +import CodeEditor, { CodeEditorProps } from 'components/CodeEditor'; + +export type CodeEditorDrawerProps = { + visible: boolean; + value?: string; + onChange?: (val: string) => void; + isReadOnly?: boolean; + language?: string; + theme?: string; + drawerWidth?: string | number; + container?: HTMLElement; + onClose?: (params?: any) => void; + codeEditorProps?: CodeEditorProps; +} & Pick; + +export type ButtonProps = Omit & { + text?: string; + children?: React.ReactNode; +}; +export type ShowProps = Omit; + +function CodeEditorDrawer({ + title, + value, + visible, + container, + language = 'json', + isReadOnly = true, + theme = 'grey', + drawerWidth = '50%', + codeEditorProps, + afterClose, + onClose, + onChange, +}: CodeEditorDrawerProps) { + return ( + container || window.document.body} + > + + + ); +} + +function _Button({ text = '查看', children, ...restProps }: ButtonProps) { + const [visible, setVisible] = useState(false); + return ( + <> + + setVisible(false)} /> + + ); +} + +function show(props: ShowProps) { + const key = `__code_editor_drawer_${Date.now()}__`; + const container = window.document.createElement('div'); + container.style.zIndex = '1000'; + window.document.body.appendChild(container); + + _hide(props); + _show(props); + + function _renderComp(props: CodeEditorDrawerProps) { + ReactDOM.render( + React.createElement(CodeEditorDrawer, { + ...props, + key, + container, + onClose() { + props.onClose?.(); + _hide(props); + }, + }), + container, + ); + } + + function _hide(props: ShowProps) { + _renderComp({ + ...props, + visible: false, + afterClose() { + window.document.body.removeChild(container); + }, + }); + } + + function _show(props: ShowProps) { + _renderComp({ + ...props, + visible: true, + }); + } +} + +CodeEditorDrawer.show = show; +CodeEditorDrawer.Button = _Button; + +export default CodeEditorDrawer; diff --git a/web_console_v2/client/src/components/CodeEditorFormButton/index.tsx b/web_console_v2/client/src/components/CodeEditorFormButton/index.tsx new file mode 100644 index 000000000..8f07a9611 --- /dev/null +++ b/web_console_v2/client/src/components/CodeEditorFormButton/index.tsx @@ -0,0 +1,69 @@ +/* istanbul ignore file */ + +import React, { FC, useState } from 'react'; +import styled from 'styled-components'; + +import { Button } from '@arco-design/web-react'; +import { IconCodeSquare } from '@arco-design/web-react/icon'; +import { FileData } from 'components/FileExplorer'; +import CodeEditorModal, { Props as CodeEditorModalProps } from 'components/CodeEditorModal'; + +export type Props = { + value?: FileData; + onChange?: (val?: FileData) => any; + disabled?: boolean; + buttonText?: string; + buttonType?: 'default' | 'primary' | 'secondary' | 'dashed' | 'text' | 'outline'; + buttonIcon?: React.ReactNode; + buttonStyle?: React.CSSProperties; +} & Partial; + +const Container = styled.div``; + +const CodeEditorFormButton: FC = ({ + value, + onChange, + disabled, + buttonStyle = {}, + buttonText = '打开代码编辑器', + buttonType = 'default', + buttonIcon = , + title = '代码编辑器', + ...resetProps +}) => { + const [isShowCodeEditorModal, setIsShowCodeEditorModal] = useState(false); + + return ( + + + + + ); + + function onButtonClick() { + setIsShowCodeEditorModal(true); + } + function onCloseButtonClick() { + setIsShowCodeEditorModal(false); + } + function onSave(fileData: FileData) { + onChange && onChange(fileData); + } +}; + +export default CodeEditorFormButton; diff --git a/web_console_v2/client/src/components/CodeEditorModal/Tab.tsx b/web_console_v2/client/src/components/CodeEditorModal/Tab.tsx new file mode 100644 index 000000000..13e1fb807 --- /dev/null +++ b/web_console_v2/client/src/components/CodeEditorModal/Tab.tsx @@ -0,0 +1,104 @@ +import React, { FC } from 'react'; +import styled from 'styled-components'; +import { Tooltip } from '@arco-design/web-react'; + +import { MixinEllipsis } from 'styles/mixins'; + +import { Close } from 'components/IconPark'; + +const IconContainer = styled.div` + display: inline-block; + margin-right: 8px; + overflow: hidden; +`; +const Name = styled.div` + ${MixinEllipsis(80, '%')} + display: inline-block; + font-size: 12px; + font-weight: 400; + color: var(--font-color); +`; +const StyledClose = styled(Close)` + position: absolute; + right: 8px; + top: 50%; + transform: translateY(-50%); +`; + +const Container = styled.div` + --bg-color: #f2f3f8; + --bg-color-active: #fff; + --icon-color: #86909c; + --icon-color-active: #468dff; + --font-color: #1d2129; + --border-color: #e5e8ee; + --border-color-active: #1678ff; + + position: relative; + display: inline-block; + height: 36px; + line-height: 36px; + padding: 0 32px 0 12px; + + min-width: 80px; + max-width: 200px; + background-color: var(--bg-color); + border-right: 1px solid var(--border-color); + border-bottom: 1px solid transparent; + cursor: pointer; + + &[data-is-active='true'] { + background-color: var(--bg-color-active); + border-bottom: 1px solid var(--border-color-active); + + ${IconContainer} { + color: var(--icon-color-active); + } + ${Name} { + font-weight: 500; + } + } +`; + +type Props = { + theme?: string; + isActive?: boolean; + icon?: React.ReactNode; + fileName?: string; + fullPathFileName?: string; + onClick?: () => void; + onClose?: () => void; +}; + +const Tab: FC = ({ + theme, + isActive = false, + icon, + fileName = '', + fullPathFileName = '', + onClick, + onClose, +}) => { + return ( + + + {icon && {icon}} + {fileName || ''} + { + event.stopPropagation(); + onClose && onClose(); + }} + /> + + + ); +}; + +export default Tab; diff --git a/web_console_v2/client/src/components/CodeEditorModal/index.integration.test.tsx b/web_console_v2/client/src/components/CodeEditorModal/index.integration.test.tsx new file mode 100644 index 000000000..3c759d0b4 --- /dev/null +++ b/web_console_v2/client/src/components/CodeEditorModal/index.integration.test.tsx @@ -0,0 +1,1031 @@ +import React from 'react'; +import { render, fireEvent, waitFor, RenderResult, within } from '@testing-library/react'; +import mock from 'xhr-mock'; + +import { waitForLoadingEnd } from 'shared/testUtils'; +import * as api from 'services/algorithm'; +import * as helpers from 'shared/helpers'; +import { getFileInfoByFilePath } from 'shared/file'; + +import { BaseCodeEditor } from './index'; +import { FileData } from 'components/FileExplorer'; + +import { FileTreeNode, FileContent } from 'typings/algorithm'; + +jest.mock('components/CodeEditor'); +jest.mock('services/algorithm'); + +const mockApi = api as jest.Mocked; + +const testTreeList: FileTreeNode[] = [ + { + filename: 'owner.py', + path: 'owner.py', + size: 17, + mtime: 1637141275, + is_directory: false, + files: [], + }, + { + filename: 't1.yaml', + path: 't1.yaml', + size: 17, + mtime: 1637141275, + is_directory: false, + files: [], + }, + { + filename: 't2.config', + path: 't2.config', + size: 17, + mtime: 1637141275, + is_directory: false, + files: [], + }, + { + filename: 't3.json', + path: 't3.json', + size: 17, + mtime: 1637141275, + is_directory: false, + files: [], + }, + { + filename: 'leader', + path: 'leader', + size: 96, + mtime: 1637141275, + is_directory: true, + files: [ + { + filename: 'main.py', + path: 'leader/main.py', + size: 17, + mtime: 1637141275, + is_directory: false, + files: [], + }, + + { + filename: 'test', + path: 'leader/test', + size: 96, + mtime: 1637141275, + is_directory: true, + files: [ + { + filename: 't1.js', + path: 'leader/test/t1.js', + size: 17, + mtime: 1637141275, + is_directory: false, + files: [], + }, + ], + }, + ], + }, +]; + +const testFiledata: FileData = { + 'owner.py': '# coding: utf-8', + 'leader/main.py': 'I am leader/main.py', + 'leader/test/t1.js': 'var a = 1;', + 't1.yaml': `# Get Started with Codebase CI`, + 't2.config': '# coding: utf-8', + 't3.json': '{ "a":1 }', +}; + +const testFileKey = 'owner.py'; +const testFileContent = '# coding: utf-8'; + +const asyncProps = { + id: 3, + isAsyncMode: true, + getFileTreeList: () => api.fetchAlgorithmProjectFileTreeList(1).then((res) => res.data), + getFile: (filePath: string) => + api + .fetchAlgorithmProjectFileContentDetail(1, { + path: filePath, + }) + .then((res) => res.data.content), +}; + +function _getFileName(fileKey: string) { + const { fileName } = getFileInfoByFilePath(fileKey); + return fileName; +} + +describe('', () => { + let wrapper: RenderResult; + let $createRootFileBtn: HTMLElement; + let $createRootFolderBtn: HTMLElement; + let $editBtn: HTMLElement; + let $input: HTMLInputElement; + let $codeEditorInput: HTMLInputElement; + let $tabList: HTMLElement; + let $treeContainer: HTMLElement; + let $moreActionBtn: HTMLElement; + let $deleteBtn: HTMLElement; + let $fileInput: HTMLElement; + + let giveWeakRandomKeySpy: jest.SpyInstance; + const tempRandomKey = 'tempFile'; + let tempNodeKey = ''; + + async function _createFile(fileKey: string, isAsyncMode = false, isAPISuccess = true) { + return _createNode(fileKey, true, isAsyncMode, isAPISuccess); + } + async function _createFolder(folderKey: string, isAsyncMode = false, isAPISuccess = true) { + return _createNode(folderKey, false, isAsyncMode, isAPISuccess); + } + async function _createNode( + nodeKey: string, + isFile = true, + isAsyncMode = false, + isAPISuccess = true, + ) { + let $createFileBtn: HTMLElement | null; + let $createFolderBtn: HTMLElement | null; + + const { parentPath, fileName } = getFileInfoByFilePath(nodeKey); + + // Get existed button of creating through parent nodes + if (isFile) { + $createFileBtn = parentPath + ? wrapper.getByTestId(`btn-create-file-${parentPath}`) + : $createRootFileBtn; + $createFolderBtn = parentPath + ? wrapper.getByTestId(`btn-create-folder-${parentPath}`) + : $createRootFolderBtn; + } else { + $createFileBtn = wrapper.queryByTestId(`btn-create-file-${parentPath}`); + $createFolderBtn = wrapper.queryByTestId(`btn-create-folder-${parentPath}`); + + while (!$createFileBtn || !$createFolderBtn) { + const { parentPath: innerParentPath } = getFileInfoByFilePath(parentPath); + + if (!innerParentPath) { + $createFileBtn = $createRootFileBtn; + $createFolderBtn = $createRootFolderBtn; + } else { + $createFileBtn = wrapper.queryByTestId(`btn-create-file-${innerParentPath}`); + $createFolderBtn = wrapper.queryByTestId(`btn-create-folder-${innerParentPath}`); + } + } + } + + // Click create button to enter focus mode + fireEvent.click(isFile ? $createFileBtn! : $createFolderBtn!); + + await waitFor(() => { + $input = wrapper.queryByTestId( + `input-${parentPath ? `${parentPath}/` : ''}${tempNodeKey}`, + ) as HTMLInputElement; + expect($input).toBeInTheDocument(); + }); + + // Only 1 input dom should be rendered + expect(wrapper.container.querySelectorAll('.arco-tree input').length).toBe(1); + // Default value is empty string + expect($input.value).toBe(''); + + // Trigger blur event to save the value + fireEvent.blur($input, { target: { value: fileName } }); + + expect($input.value).toBe(fileName); + + if (isAsyncMode) { + await waitForLoadingEnd(wrapper); + } + + await waitFor(() => { + expect( + wrapper.queryByTestId(`input-${parentPath ? `${parentPath}/` : ''}${tempNodeKey}`), + ).not.toBeInTheDocument(); + if (!isAsyncMode || isAPISuccess) { + expect(wrapper.queryByTestId(nodeKey)).toBeInTheDocument(); + } + }); + + if (isFile && (!isAsyncMode || isAPISuccess)) { + expect(wrapper.container.querySelectorAll('.arco-tree-node-selected').length).toBe(1); + } + } + async function _selectNode(nodeKey: string, isAsyncMode = false) { + const $fileNode = wrapper.getByTestId(nodeKey); + fireEvent.click(within($fileNode).getByText(_getFileName(nodeKey))); + + try { + if (isAsyncMode) { + await waitForLoadingEnd(wrapper); + } + } catch (error) { + // If there is no loading, do nothing + } + + expect(wrapper.container.querySelectorAll('.arco-tree-node-selected').length).toBe(1); + } + async function _renameNode( + nodeKey: string, + newNodeName: string, + isAsyncMode = false, + isAPISuccess = true, + ) { + $editBtn = wrapper.getByTestId(`btn-edit-${nodeKey}`); + $input = wrapper.queryByTestId(`input-${nodeKey}`) as HTMLInputElement; + + const { fileName } = getFileInfoByFilePath(nodeKey); + + // Input should not be rendered + expect(wrapper.container.querySelectorAll('.arco-tree input').length).toBe(0); + expect($input).not.toBeInTheDocument(); + + // Click edit button to enter focus mode + fireEvent.click($editBtn); + + await waitFor(() => { + $input = wrapper.queryByTestId(`input-${nodeKey}`) as HTMLInputElement; + expect($input).toBeInTheDocument(); + }); + + // Only 1 input dom should be rendered + expect(wrapper.container.querySelectorAll('.arco-tree input').length).toBe(1); + // Default value is file name + expect($input.value).toBe(fileName); + + // Rename fileName to newNodeName + fireEvent.change($input, { target: { value: newNodeName } }); + expect($input.value).toBe(newNodeName); + + // Trigger blur event + fireEvent.blur($input, { target: { value: newNodeName } }); + + if (isAsyncMode) { + await waitForLoadingEnd(wrapper); + } + + await waitFor(() => { + expect(wrapper.queryByTestId(`input-${nodeKey}`)).not.toBeInTheDocument(); + }); + + if (!isAsyncMode || isAPISuccess) { + await waitFor(() => { + expect(within($treeContainer).queryByText(newNodeName)).toBeInTheDocument(); + }); + expect(within($treeContainer).queryByText(fileName)).not.toBeInTheDocument(); + } else { + await waitFor(() => { + expect(within($treeContainer).queryByText(fileName)).toBeInTheDocument(); + }); + expect(within($treeContainer).queryByText(newNodeName)).not.toBeInTheDocument(); + } + } + async function _deleteNode(fileKey: string, isAsyncMode = false, isAPISuccess = true) { + const $fileNode = wrapper.getByTestId(fileKey); + + $moreActionBtn = within($fileNode).getByTestId(`btn-more-actions`); + + if (!wrapper.queryByTestId(`btn-more-acitons-delete-${fileKey}`)) { + // Click more action button to show delete button + fireEvent.click($moreActionBtn); + } + + // Wait for delete button to be visible + await waitFor(() => + expect(wrapper.getByTestId(`btn-more-acitons-delete-${fileKey}`)).toBeVisible(), + ); + + // Find delete button + $deleteBtn = wrapper.getByTestId(`btn-more-acitons-delete-${fileKey}`)!; + + // Click delete button + fireEvent.click($deleteBtn); + + if (isAsyncMode) { + await waitForLoadingEnd(wrapper); + } + + if (!isAsyncMode || isAPISuccess) { + await waitFor(() => { + expect(wrapper.queryByTestId(`tab-${fileKey}`)).not.toBeInTheDocument(); + expect(wrapper.queryByTestId(fileKey)).not.toBeInTheDocument(); + }); + } else { + await waitFor(() => { + expect(wrapper.queryByTestId(`tab-${fileKey}`)).toBeInTheDocument(); + expect(wrapper.queryByTestId(fileKey)).toBeInTheDocument(); + }); + } + } + async function _uploadFile( + fileKey: string, + fileContent: string, + isAsyncMode = false, + isAPISuccess = true, + ) { + const { fileName } = getFileInfoByFilePath(fileKey); + const mockFile = new File([fileContent], fileName, { type: 'text/plain' }); + + fireEvent.change($fileInput, { target: { files: [mockFile] } }); + + try { + if (isAsyncMode) { + await waitForLoadingEnd(wrapper); + } + } catch (error) { + // If there is no loading, do nothing + } + + if (!isAsyncMode || isAPISuccess) { + await waitFor(() => { + expect(wrapper.queryByTestId(fileKey)).toBeInTheDocument(); + }); + } else { + await waitFor(() => { + expect(wrapper.queryByTestId(fileKey)).not.toBeInTheDocument(); + }); + } + } + + async function _closeTab(fileKey: string) { + fireEvent.click(wrapper.getByTestId(`tab-btn-close-${fileKey}`)); + + await _waitForTabHidden(fileKey); + } + + async function _editCodeEditorValue(fileKey: string, newValue: string) { + fireEvent.change($codeEditorInput, { target: { value: newValue, fileKey }, fileKey }); + await waitFor(() => { + expect($codeEditorInput.value).toBe(newValue); + }); + } + async function _waitForTabShow(fileKey: string, isActive: boolean = true) { + await waitFor(() => wrapper.getByTestId(`tab-${fileKey}`)); + expect(wrapper.getByTestId(`tab-${fileKey}`)).toHaveAttribute( + 'data-is-active', + isActive ? 'true' : 'false', + ); + } + async function _waitForTabHidden(fileKey: string) { + await waitFor(() => { + expect(wrapper.queryByTestId(`tab-${fileKey}`)).not.toBeInTheDocument(); + }); + } + describe('isAsyncMode = false', () => { + beforeEach(async () => { + giveWeakRandomKeySpy = jest.spyOn(helpers, 'giveWeakRandomKey').mockImplementation(() => { + return tempRandomKey; + }); + tempNodeKey = `${tempRandomKey}`; + + wrapper = render( + , + ); + expect(wrapper.getByText('I am title')).toBeInTheDocument(); + + $createRootFileBtn = wrapper.getByTestId('btn-create-file-on-root'); + $createRootFolderBtn = wrapper.getByTestId('btn-create-folder-on-root'); + $tabList = wrapper.getByTestId('tab-list'); + $fileInput = wrapper.container.querySelector('input[type="file"]')!; + + expect($createRootFileBtn).toBeInTheDocument(); + expect($createRootFolderBtn).toBeInTheDocument(); + + await waitFor(() => wrapper.getAllByText(testFileKey)[0]); // file node + $treeContainer = wrapper.container.querySelector('.arco-tree')!; + + const nodeList = wrapper.container.querySelectorAll('.arco-tree .arco-tree-node'); + expect(nodeList.length).toBe(8); + + await waitFor(() => wrapper.getByTestId(`tab-${testFileKey}`)); + + expect(wrapper.getByTestId(`tab-${testFileKey}`)).toHaveAttribute('data-is-active', 'true'); + + $codeEditorInput = wrapper.getByTestId('input-code-editor') as HTMLInputElement; + expect($codeEditorInput.value).toBe(testFileContent); + }); + + afterEach(() => { + giveWeakRandomKeySpy.mockRestore(); + }); + + it(` + 1. create new file(newFile.js) on root + 2. edit the new file(newFile.js) content + 3. select the other file(t1.yaml) to trigger tab change, and save the value that belong to the new file + 4. edit the other file(t1.yaml) content + 5. select new file(newFile.js) again to trigger tab change, and save the value that belong to the other file + 6. select first file(owner.py) again to trigger tab change, and save the value that belong to the first file + `, async () => { + await _createFile('newFile.js'); + await _waitForTabShow('newFile.js'); + await _editCodeEditorValue('newFile.js', 'I am newFile.js'); + await _selectNode('t1.yaml'); + await _waitForTabShow('t1.yaml'); + expect($codeEditorInput.value).toBe('# Get Started with Codebase CI'); + await _editCodeEditorValue('t1.yaml', 'I am t1.yaml'); + await _selectNode('newFile.js'); + await _waitForTabShow('newFile.js'); + expect($codeEditorInput.value).toBe('I am newFile.js'); + await _selectNode(testFileKey); + await _waitForTabShow(testFileKey); + expect($codeEditorInput.value).toBe(testFileContent); + }); + + it(` + 1. select file(t1.yaml) + 2. select file(t2.config) + 3. select file(t3.json) + 4. select file(t2.config) + 5. close file tab(t3.json) + 6. close file tab(t2.config) + 6. close file tab(t1.yaml) + 7. close file tab(owner.py) + `, async () => { + expect($tabList.children.length).toBe(1); + await _selectNode('t1.yaml'); + await _waitForTabShow('t1.yaml'); + expect($codeEditorInput.value).toBe('# Get Started with Codebase CI'); + expect($tabList.children.length).toBe(2); + await _selectNode('t2.config'); + await _waitForTabShow('t2.config'); + expect($codeEditorInput.value).toBe('# coding: utf-8'); + expect($tabList.children.length).toBe(3); + await _selectNode('t3.json'); + await _waitForTabShow('t3.json'); + expect($codeEditorInput.value).toBe('{ "a":1 }'); + expect($tabList.children.length).toBe(4); + await _selectNode('t2.config'); + await _waitForTabShow('t2.config'); + expect($tabList.children.length).toBe(4); + expect($codeEditorInput.value).toBe('# coding: utf-8'); + await _closeTab('t3.json'); + expect($tabList.children.length).toBe(3); + expect($tabList.querySelectorAll('div[data-is-active="true"]').length).toBe(1); + expect(wrapper.getByTestId(`tab-t2.config`)).toHaveAttribute('data-is-active', 'true'); + expect($codeEditorInput.value).toBe('# coding: utf-8'); + await _closeTab('t2.config'); + expect($tabList.children.length).toBe(2); + expect($tabList.querySelectorAll('div[data-is-active="true"]').length).toBe(1); + expect(wrapper.getByTestId(`tab-owner.py`)).toHaveAttribute('data-is-active', 'true'); + expect($codeEditorInput.value).toBe(testFileContent); + await _closeTab('t1.yaml'); + expect($tabList.children.length).toBe(1); + expect($tabList.querySelectorAll('div[data-is-active="true"]').length).toBe(1); + expect(wrapper.getByTestId(`tab-owner.py`)).toHaveAttribute('data-is-active', 'true'); + expect($codeEditorInput.value).toBe(testFileContent); + expect($codeEditorInput).not.toBeDisabled(); + await _closeTab(testFileKey); + expect($tabList.children.length).toBe(0); + expect($tabList.querySelectorAll('div[data-is-active="true"]').length).toBe(0); + expect($codeEditorInput.value).toBe(''); + expect($codeEditorInput).toBeDisabled(); + }); + + describe('Handle folder', () => { + beforeEach(async () => { + await _selectNode('leader/main.py'); + await _waitForTabShow('leader/main.py'); + // There are 3 nodes(1 folder + 2 file) in this folder before create + expect(wrapper.getAllByTestId(new RegExp(`^leader/`)).length).toBe(3); + + await _createFile('leader/newFile.js'); + await _waitForTabShow('leader/newFile.js'); + await _editCodeEditorValue('leader/newFile.js', 'I am leader/newFile.js'); + // There are 4 nodes(1 folder + 3 file) in this folder + expect(wrapper.getAllByTestId(new RegExp(`^leader/`)).length).toBe(4); + + await _createFolder('leader/newFolder'); + // There are 5 nodes(2 folder + 3 file) in this folder + expect(wrapper.getAllByTestId(new RegExp(`^leader/`)).length).toBe(5); + + await _selectNode('leader/newFolder'); + await _createFile('leader/newFolder/newFile2.js'); + await _waitForTabShow('leader/newFolder/newFile2.js'); + await _editCodeEditorValue( + 'leader/newFolder/newFile2.js', + 'I am leader/newFolder/newFile2.js', + ); + // There are 6 nodes(2 folder + 4 file) in this folder + expect(wrapper.getAllByTestId(new RegExp(`^leader/`)).length).toBe(6); + + // There are 4 tab(owner.py, leader/main.py, leader/newFile.js, leader/newFolder/newFile2.js) + expect($tabList.children.length).toBe(4); + expect($tabList.querySelectorAll('div[data-is-active="true"]').length).toBe(1); + expect($codeEditorInput.value).toBe('I am leader/newFolder/newFile2.js'); + }); + + it(` + 1. select file(leader/main.py) + 2. create new file(leader/newFile.js) + 3. create new folder(leader/newFolder) + 4. select folder(leader/newFolder) + 5. create new file(leader/newFolder/newFile2.js) + 6. select leader/newFile.js + 7. select leader/main.py + 8. select owner.py + `, async () => { + await _selectNode('leader/newFile.js'); + await _waitForTabShow('leader/newFile.js'); + expect($codeEditorInput.value).toBe('I am leader/newFile.js'); + expect($tabList.children.length).toBe(4); + + await _selectNode('leader/main.py'); + await _waitForTabShow('leader/main.py'); + expect($codeEditorInput.value).toBe('I am leader/main.py'); + expect($tabList.children.length).toBe(4); + + await _selectNode(testFileKey); + await _waitForTabShow(testFileKey); + expect($codeEditorInput.value).toBe(testFileContent); + expect($tabList.children.length).toBe(4); + }); + + it(` + 1. select file(leader/main.py) + 2. create new file(leader/newFile.js) + 3. create new folder(leader/newFolder) + 4. select folder(leader/newFolder) + 5. create new file(leader/newFolder/newFile2.js) + 6. rename file(leader/newFile.js) => file(leader/renameNewFile.js) + 7. rename file(leader/newFolder/newFile2.js) => file(leader/newFolder/renameNewFile2.js) + 8. select file(leader/newFolder/renameNewFile2.js) + 9. rename folder(leader/newFolder) => folder(leader/renameNewFolder) + `, async () => { + // There are 6 nodes(2 folder + 4 file) in this folder + expect(wrapper.getAllByTestId(new RegExp(`^leader/`)).length).toBe(6); + expect($tabList.children.length).toBe(4); + + await _renameNode('leader/newFile.js', 'renameNewFile.js'); + await waitFor(() => { + expect(wrapper.queryByTestId(`tab-leader/newFile.js`)).not.toBeInTheDocument(); + expect(wrapper.queryByTestId(`leader/renameNewFile.js`)).toBeInTheDocument(); + }); + expect(wrapper.getAllByTestId(new RegExp(`^leader/`)).length).toBe(6); + expect($tabList.children.length).toBe(3); + + await _renameNode('leader/newFolder/newFile2.js', 'renameNewFile2.js'); + await waitFor(() => { + expect(wrapper.queryByTestId(`tab-leader/newFolder/newFile2.js`)).not.toBeInTheDocument(); + expect(wrapper.queryByTestId(`leader/newFolder/renameNewFile2.js`)).toBeInTheDocument(); + }); + expect(wrapper.getAllByTestId(new RegExp(`^leader/`)).length).toBe(6); + expect($tabList.children.length).toBe(2); + + await _selectNode('leader/newFolder/renameNewFile2.js'); + await _waitForTabShow('leader/newFolder/renameNewFile2.js'); + expect($tabList.children.length).toBe(3); + expect($codeEditorInput.value).toBe('I am leader/newFolder/newFile2.js'); + + await _renameNode('leader/newFolder', 'renameNewFolder'); + expect($tabList.children.length).toBe(2); + + await waitFor(() => { + expect(wrapper.queryByTestId(`leader/newFolder`)).not.toBeInTheDocument(); + expect(wrapper.queryByTestId(`leader/renameNewFolder`)).toBeInTheDocument(); + }); + await _selectNode('leader/renameNewFolder'); // select folder to expand folder + + await waitFor(() => + expect( + wrapper.queryByTestId(`leader/renameNewFolder/renameNewFile2.js`), + ).toBeInTheDocument(), + ); + // There are 6 nodes(2 folder + 4 file) in this folder + expect(wrapper.getAllByTestId(new RegExp(`^leader/`)).length).toBe(6); + }); + }); + }); + + describe('isAsyncMode = true', () => { + beforeEach(async () => { + mockApi.fetchAlgorithmProjectFileTreeList.mockResolvedValue({ + data: testTreeList, + }); + mockApi.fetchAlgorithmProjectFileContentDetail.mockImplementation((id, { path }) => { + const { parentPath, fileName } = getFileInfoByFilePath(path); + return Promise.resolve({ + data: { + path: parentPath, + filename: fileName, + content: `I am ${path}`, + } as FileContent, + }); + }); + mockApi.createOrUpdateAlgorithmProjectFileContent.mockImplementation( + (id, { path, filename, is_directory, file }) => + Promise.resolve({ + data: { + path, + filename, + content: file, + } as FileContent, + }), + ); + mockApi.renameAlgorithmProjectFileContent.mockImplementation((id, { path, dest }) => + Promise.resolve(null), + ); + mockApi.deleteAlgorithmProjectFileContent.mockImplementation((id, { path }) => + Promise.resolve(null), + ); + + giveWeakRandomKeySpy = jest.spyOn(helpers, 'giveWeakRandomKey').mockImplementation(() => { + return tempRandomKey; + }); + tempNodeKey = `${tempRandomKey}`; + + wrapper = render( + , + ); + expect(wrapper.getByText('I am title')).toBeInTheDocument(); + + $createRootFileBtn = wrapper.getByTestId('btn-create-file-on-root'); + $createRootFolderBtn = wrapper.getByTestId('btn-create-folder-on-root'); + $tabList = wrapper.getByTestId('tab-list'); + $fileInput = wrapper.container.querySelector('input[type="file"]')!; + + expect($createRootFileBtn).toBeInTheDocument(); + expect($createRootFolderBtn).toBeInTheDocument(); + + await waitForLoadingEnd(wrapper); + + await waitFor(() => wrapper.getAllByText(testFileKey)[0]); // file node + $treeContainer = wrapper.container.querySelector('.arco-tree')!; + + const nodeList = wrapper.container.querySelectorAll('.arco-tree .arco-tree-node'); + expect(nodeList.length).toBe(8); + + await waitFor(() => wrapper.getByTestId(`tab-${testFileKey}`)); + + expect(wrapper.getByTestId(`tab-${testFileKey}`)).toHaveAttribute('data-is-active', 'true'); + + $codeEditorInput = wrapper.getByTestId('input-code-editor') as HTMLInputElement; + expect($codeEditorInput.value).toBe(`I am ${testFileKey}`); + }); + + afterEach(() => { + giveWeakRandomKeySpy.mockRestore(); + }); + + it(` + 1. create new file(newFile.js) on root + 2. edit the new file(newFile.js) content + 3. select the other file(t1.yaml) to trigger tab change, and save the value that belong to the new file + 4. edit the other file(t1.yaml) content + 5. select new file(newFile.js) again to trigger tab change, and save the value that belong to the other file + 6. select first file(owner.py) again to trigger tab change, and save the value that belong to the first file + `, async () => { + await _createFile('newFile.js', true); + await _waitForTabShow('newFile.js'); + await _editCodeEditorValue('newFile.js', 'Edit: I am newFile.js'); + await _selectNode('t1.yaml', true); + await _waitForTabShow('t1.yaml'); + expect($codeEditorInput.value).toBe('I am t1.yaml'); + await _editCodeEditorValue('t1.yaml', 'Edit: I am t1.yaml'); + await _selectNode('newFile.js', true); + await _waitForTabShow('newFile.js'); + expect($codeEditorInput.value).toBe('Edit: I am newFile.js'); + await _selectNode(testFileKey, true); + await _waitForTabShow(testFileKey); + expect($codeEditorInput.value).toBe(`I am ${testFileKey}`); + }); + + it(` + 1. select file(t1.yaml) + 2. select file(t2.config) + 3. select file(t3.json) + 4. select file(t2.config) + 5. close file tab(t3.json) + 6. close file tab(t2.config) + 6. close file tab(t1.yaml) + 7. close file tab(owner.py) + `, async () => { + expect($tabList.children.length).toBe(1); + await _selectNode('t1.yaml', true); + await _waitForTabShow('t1.yaml'); + expect($codeEditorInput.value).toBe('I am t1.yaml'); + expect($tabList.children.length).toBe(2); + await _selectNode('t2.config', true); + await _waitForTabShow('t2.config'); + expect($codeEditorInput.value).toBe('I am t2.config'); + expect($tabList.children.length).toBe(3); + await _selectNode('t3.json', true); + await _waitForTabShow('t3.json'); + expect($codeEditorInput.value).toBe('I am t3.json'); + expect($tabList.children.length).toBe(4); + await _selectNode('t2.config', true); + await _waitForTabShow('t2.config'); + expect($tabList.children.length).toBe(4); + expect($codeEditorInput.value).toBe('I am t2.config'); + await _closeTab('t3.json'); + expect($tabList.children.length).toBe(3); + expect($tabList.querySelectorAll('div[data-is-active="true"]').length).toBe(1); + expect(wrapper.getByTestId(`tab-t2.config`)).toHaveAttribute('data-is-active', 'true'); + expect($codeEditorInput.value).toBe('I am t2.config'); + await _closeTab('t2.config'); + expect($tabList.children.length).toBe(2); + expect($tabList.querySelectorAll('div[data-is-active="true"]').length).toBe(1); + expect(wrapper.getByTestId(`tab-owner.py`)).toHaveAttribute('data-is-active', 'true'); + expect($codeEditorInput.value).toBe('I am owner.py'); + await _closeTab('t1.yaml'); + expect($tabList.children.length).toBe(1); + expect($tabList.querySelectorAll('div[data-is-active="true"]').length).toBe(1); + expect(wrapper.getByTestId(`tab-owner.py`)).toHaveAttribute('data-is-active', 'true'); + expect($codeEditorInput.value).toBe('I am owner.py'); + expect($codeEditorInput).not.toBeDisabled(); + await _closeTab(testFileKey); + expect($tabList.children.length).toBe(0); + expect($tabList.querySelectorAll('div[data-is-active="true"]').length).toBe(0); + expect($codeEditorInput.value).toBe(''); + expect($codeEditorInput).toBeDisabled(); + }); + + it(` + 1. upload file(uploadFile.js) + 2. select file(uploadFile.js) + 3. close file tab(uploadFile.js) + `, async () => { + expect($tabList.children.length).toBe(1); + expect(wrapper.container.querySelectorAll('.arco-tree .arco-tree-node').length).toBe( + 8, + ); + + mock.setup(); + mock.post('/api/v2/algorithm_projects/3/files', { + status: 200, + reason: 'ok', + body: JSON.stringify({ + data: { + path: '', + filename: 'uploadFile.js', + }, + }), + }); + + await _uploadFile('uploadFile.js', 'I am uploadFile.js', true, true); + expect(wrapper.container.querySelectorAll('.arco-tree .arco-tree-node').length).toBe( + 9, + ); + expect($tabList.children.length).toBe(1); + await _selectNode('uploadFile.js'); + await _waitForTabShow('uploadFile.js'); + expect($tabList.children.length).toBe(2); + expect($codeEditorInput.value).toBe('I am uploadFile.js'); + await _closeTab('uploadFile.js'); + expect($tabList.children.length).toBe(1); + expect($tabList.querySelectorAll('div[data-is-active="true"]').length).toBe(1); + expect(wrapper.getByTestId(`tab-owner.py`)).toHaveAttribute('data-is-active', 'true'); + expect($codeEditorInput.value).toBe('I am owner.py'); + + mock.teardown(); + }); + + describe('Handle folder', () => { + beforeEach(async () => { + await _selectNode('leader/main.py', true); + await _waitForTabShow('leader/main.py'); + // There are 3 nodes(1 folder + 2 file) in this folder before create + expect(wrapper.getAllByTestId(new RegExp(`^leader/`)).length).toBe(3); + + await _createFile('leader/newFile.js', true); + await _waitForTabShow('leader/newFile.js'); + await _editCodeEditorValue('leader/newFile.js', 'I am leader/newFile.js'); + // There are 4 nodes(1 folder + 3 file) in this folder + expect(wrapper.getAllByTestId(new RegExp(`^leader/`)).length).toBe(4); + + await _createFolder('leader/newFolder', true); + // There are 5 nodes(2 folder + 3 file) in this folder + expect(wrapper.getAllByTestId(new RegExp(`^leader/`)).length).toBe(5); + + await _selectNode('leader/newFolder'); + await _createFile('leader/newFolder/newFile2.js', true); + await _waitForTabShow('leader/newFolder/newFile2.js'); + await _editCodeEditorValue( + 'leader/newFolder/newFile2.js', + 'I am leader/newFolder/newFile2.js', + ); + // There are 6 nodes(2 folder + 4 file) in this folder + expect(wrapper.getAllByTestId(new RegExp(`^leader/`)).length).toBe(6); + + // There are 4 tab(owner.py, leader/main.py, leader/newFile.js, leader/newFolder/newFile2.js) + expect($tabList.children.length).toBe(4); + expect($tabList.querySelectorAll('div[data-is-active="true"]').length).toBe(1); + expect($codeEditorInput.value).toBe('I am leader/newFolder/newFile2.js'); + }); + + it(` + 1. select file(leader/main.py) + 2. create new file(leader/newFile.js) + 3. create new folder(leader/newFolder) + 4. select folder(leader/newFolder) + 5. create new file(leader/newFolder/newFile2.js) + 6. select leader/newFile.js + 7. select leader/main.py + 8. select owner.py + `, async () => { + await _selectNode('leader/newFile.js', true); + await _waitForTabShow('leader/newFile.js'); + expect($codeEditorInput.value).toBe('I am leader/newFile.js'); + expect($tabList.children.length).toBe(4); + + await _selectNode('leader/main.py', true); + await _waitForTabShow('leader/main.py'); + expect($codeEditorInput.value).toBe('I am leader/main.py'); + expect($tabList.children.length).toBe(4); + + await _selectNode(testFileKey, true); + await _waitForTabShow(testFileKey); + expect($codeEditorInput.value).toBe(`I am ${testFileKey}`); + expect($tabList.children.length).toBe(4); + }); + + it(` + 1. select file(leader/main.py) + 2. create new file(leader/newFile.js) + 3. create new folder(leader/newFolder) + 4. select folder(leader/newFolder) + 5. create new file(leader/newFolder/newFile2.js) + 6. rename file(leader/newFile.js) => file(leader/renameNewFile.js) + 7. rename file(leader/newFolder/newFile2.js) => file(leader/newFolder/renameNewFile2.js) + 8. select file(leader/newFolder/renameNewFile2.js) + 9. rename folder(leader/newFolder) => folder(leader/renameNewFolder) + `, async () => { + // There are 6 nodes(2 folder + 4 file) in this folder + expect(wrapper.getAllByTestId(new RegExp(`^leader/`)).length).toBe(6); + expect($tabList.children.length).toBe(4); + + await _renameNode('leader/newFile.js', 'renameNewFile.js'); + await waitFor(() => { + expect(wrapper.queryByTestId(`tab-leader/newFile.js`)).not.toBeInTheDocument(); + expect(wrapper.queryByTestId(`leader/renameNewFile.js`)).toBeInTheDocument(); + }); + expect(wrapper.getAllByTestId(new RegExp(`^leader/`)).length).toBe(6); + expect($tabList.children.length).toBe(3); + + await _renameNode('leader/newFolder/newFile2.js', 'renameNewFile2.js'); + await waitFor(() => { + expect(wrapper.queryByTestId(`tab-leader/newFolder/newFile2.js`)).not.toBeInTheDocument(); + expect(wrapper.queryByTestId(`leader/newFolder/renameNewFile2.js`)).toBeInTheDocument(); + }); + expect(wrapper.getAllByTestId(new RegExp(`^leader/`)).length).toBe(6); + expect($tabList.children.length).toBe(2); + + await _selectNode('leader/newFolder/renameNewFile2.js', true); + await _waitForTabShow('leader/newFolder/renameNewFile2.js'); + expect($tabList.children.length).toBe(3); + expect($codeEditorInput.value).toBe('I am leader/newFolder/newFile2.js'); + + await _renameNode('leader/newFolder', 'renameNewFolder'); + expect($tabList.children.length).toBe(2); + + await waitFor(() => { + expect(wrapper.queryByTestId(`leader/newFolder`)).not.toBeInTheDocument(); + expect(wrapper.queryByTestId(`leader/renameNewFolder`)).toBeInTheDocument(); + }); + await _selectNode('leader/renameNewFolder'); // select folder to expand folder + + await waitFor(() => + expect( + wrapper.queryByTestId(`leader/renameNewFolder/renameNewFile2.js`), + ).toBeInTheDocument(), + ); + // There are 6 nodes(2 folder + 4 file) in this folder + expect(wrapper.getAllByTestId(new RegExp(`^leader/`)).length).toBe(6); + }); + }); + + describe('Handle API response error', () => { + it(` + 1. create new file(newFile.js) on root but API response error + 2. create new file(newFile.js) on root + 3. rename file(newFile.js) => file(renameNewFile.js) but API response error + 4. rename file(newFile.js) => file(renameNewFile.js) + 5. select file(renameNewFile.js) + 6. delete file(renameNewFile.js) but API response error + 7. delete file(renameNewFile.js) + 8. upload file(uploadFile.js) on root but API response error + `, async () => { + mockApi.createOrUpdateAlgorithmProjectFileContent.mockImplementationOnce( + (id, { path, filename, is_directory, file }) => + Promise.reject({ + data: null, + }), + ); + expect(wrapper.container.querySelectorAll('.arco-tree .arco-tree-node').length).toBe( + 8, + ); + await _createFile('newFile.js', true, false); + expect(wrapper.queryByTestId(`tab-newFile.js`)).not.toBeInTheDocument(); + expect(wrapper.container.querySelectorAll('.arco-tree .arco-tree-node').length).toBe( + 8, + ); + await _createFile('newFile.js', true, true); + await _waitForTabShow('newFile.js'); + expect(wrapper.container.querySelectorAll('.arco-tree .arco-tree-node').length).toBe( + 9, + ); + await _editCodeEditorValue('newFile.js', 'Edit: I am newFile.js'); + + mockApi.renameAlgorithmProjectFileContent.mockImplementationOnce((id, { path, dest }) => + Promise.reject({ + data: null, + }), + ); + await _renameNode('newFile.js', 'renameNewFile.js', true, false); + await waitFor(() => { + expect(wrapper.queryByTestId(`tab-newFile.js`)).toBeInTheDocument(); + expect(wrapper.queryByTestId(`renameNewFile.js`)).not.toBeInTheDocument(); + }); + expect(wrapper.container.querySelectorAll('.arco-tree .arco-tree-node').length).toBe( + 9, + ); + expect($tabList.children.length).toBe(2); + + await _renameNode('newFile.js', 'renameNewFile.js', true, true); + await waitFor(() => { + expect(wrapper.queryByTestId(`tab-newFile.js`)).not.toBeInTheDocument(); + expect(wrapper.queryByTestId(`renameNewFile.js`)).toBeInTheDocument(); + }); + expect(wrapper.container.querySelectorAll('.arco-tree .arco-tree-node').length).toBe( + 9, + ); + expect($tabList.children.length).toBe(1); + + await _selectNode('renameNewFile.js'); + await _waitForTabShow('renameNewFile.js'); + expect($codeEditorInput.value).toBe('Edit: I am newFile.js'); + expect($tabList.children.length).toBe(2); + + mockApi.deleteAlgorithmProjectFileContent.mockImplementationOnce((id, { path }) => + Promise.reject({ + data: null, + }), + ); + mock.setup(); + mock.post('/api/v2/algorithm_projects/3/files', { + status: 400, + reason: 'Bad request', + body: '', + }); + + await _uploadFile('uploadFile.js', 'I am uploadFile.js', true, false); + expect(wrapper.container.querySelectorAll('.arco-tree .arco-tree-node').length).toBe( + 9, + ); + expect($tabList.children.length).toBe(2); + + // TODO: mock upload API success + + mock.teardown(); + }); + }); + }); + + it('should only show one input when focus mode = true', async () => { + wrapper = render( + , + ); + + await waitFor(() => wrapper.getAllByText(testFileKey)[0]); // file node + + $createRootFileBtn = wrapper.getByTestId('btn-create-file-on-root'); + $createRootFolderBtn = wrapper.getByTestId('btn-create-folder-on-root'); + + expect(wrapper.container.querySelectorAll('.arco-tree .arco-tree-node').length).toBe(8); + // Only 0 input dom should be rendered + expect(wrapper.container.querySelectorAll('.arco-tree input').length).toBe(0); + fireEvent.click($createRootFileBtn); + + // Only 1 input dom should be rendered + expect(wrapper.container.querySelectorAll('.arco-tree input').length).toBe(1); + expect(wrapper.container.querySelectorAll('.arco-tree .arco-tree-node').length).toBe(9); + fireEvent.click($createRootFileBtn); + expect(wrapper.container.querySelectorAll('.arco-tree input').length).toBe(1); + expect(wrapper.container.querySelectorAll('.arco-tree .arco-tree-node').length).toBe(9); + fireEvent.click($createRootFileBtn); + expect(wrapper.container.querySelectorAll('.arco-tree input').length).toBe(1); + expect(wrapper.container.querySelectorAll('.arco-tree .arco-tree-node').length).toBe(9); + fireEvent.click($createRootFolderBtn); + expect(wrapper.container.querySelectorAll('.arco-tree input').length).toBe(1); + expect(wrapper.container.querySelectorAll('.arco-tree .arco-tree-node').length).toBe(9); + fireEvent.click($createRootFolderBtn); + expect(wrapper.container.querySelectorAll('.arco-tree input').length).toBe(1); + expect(wrapper.container.querySelectorAll('.arco-tree .arco-tree-node').length).toBe(9); + }); +}); diff --git a/web_console_v2/client/src/components/CodeEditorModal/index.module.less b/web_console_v2/client/src/components/CodeEditorModal/index.module.less new file mode 100644 index 000000000..c25155e30 --- /dev/null +++ b/web_console_v2/client/src/components/CodeEditorModal/index.module.less @@ -0,0 +1,12 @@ +.code_editor_model_wrapper{ + :global{ + .arco-modal-content{ + padding: 0px; + } + } +} + +.code_editor_upload{ + height: 18px; + margin-right: 8px; +} diff --git a/web_console_v2/client/src/components/CodeEditorModal/index.tsx b/web_console_v2/client/src/components/CodeEditorModal/index.tsx new file mode 100644 index 000000000..4bf154b54 --- /dev/null +++ b/web_console_v2/client/src/components/CodeEditorModal/index.tsx @@ -0,0 +1,1069 @@ +import React, { FC, useState, useMemo, useRef, useReducer } from 'react'; +import styled from 'styled-components'; +import { useTranslation } from 'react-i18next'; +import classNames from 'classnames'; + +import { + createOrUpdateAlgorithmProjectFileContent, + renameAlgorithmProjectFileContent, + deleteAlgorithmProjectFileContent, +} from 'services/algorithm'; + +import { Button, Message, Tooltip, Modal, Upload } from '@arco-design/web-react'; +import { IconCodeSquare } from '@arco-design/web-react/icon'; +import { Resizable } from 're-resizable'; +import FileExplorer, { + FileDataNode, + FileExplorerExposedRef, + Key, + FileData, + fileExtToIconMap, +} from 'components/FileExplorer'; +import { ArrowUpFill, FolderAddFill, FileAddFill, Close, MenuFold } from 'components/IconPark'; +import CodeEditor, { Action } from 'components/CodeEditor'; +import StateIndicator from 'components/StateIndicator'; +import Tab from './Tab'; + +import { useSubscribe } from 'hooks'; +import { ModalProps } from '@arco-design/web-react/es/Modal/interface'; +import { Monaco } from '@monaco-editor/react'; +import type * as monaco from 'monaco-editor/esm/vs/editor/editor.api'; +import { UploadItem } from '@arco-design/web-react/es/Upload'; +import { FileContent } from 'typings/algorithm'; + +import { Z_INDEX_GREATER_THAN_HEADER } from 'components/Header'; +import { transformRegexSpecChar, formatLanguage, getJWTHeaders } from 'shared/helpers'; +import { buildRelativePath, getFileInfoByFilePath, readAsTextFromFile } from 'shared/file'; +import { MixinCommonTransition, MixinSquare, MixinFlexAlignCenter } from 'styles/mixins'; +import { getAlgorithmProjectProps, getAlgorithmProps } from 'components/shared'; +import { CONSTANTS } from 'shared/constants'; +import styles from './index.module.less'; + +function MixinHeader() { + return ` + display: flex; + justify-content: space-between; + align-items: center; + padding: 0 16px; + border-bottom: 1px solid var(--border-color); + `; +} + +const Layout = styled.main` + --bg: #fff; + --font-color: #1d2129; + --border-color: #e5e8ef; + --action-button-color: #86909c; + --action-button-color-hover: #4e5969; + --tab-header-bg: #f2f3f8; + + --left-width: 272px; + + display: grid; + grid-template-areas: 'head head' 'left right'; + grid-template-rows: 46px calc(100vh - 46px); + grid-template-columns: var(--left-width) calc(max(500px, 100vw) - var(--left-width)); + + height: 100vh; + width: 100vw; + min-width: 500px; + min-height: 500px; + background-color: var(--bg); + overflow: hidden; +`; + +const Header = styled.div` + ${MixinHeader()}; + grid-area: head; +`; +const FileExplorerHeader = styled.div` + ${MixinHeader()}; + height: 36px; +`; +const TabHeader = styled.div` + height: 36px; + width: 100%; + border-bottom: 1px solid var(--border-color); + background-color: var(--tab-header-bg); + overflow-x: auto; + white-space: nowrap; +`; + +const Left = styled(Resizable)` + position: relative; + height: 100%; + grid-area: left; + display: flex; + flex-direction: column; + border-right: 1px solid var(--border-color); + z-index: 1; // overlay right layout + background-color: var(--bg); + padding-bottom: 40px; + + .resize-bar-wrapper { + &.resizing > div, + > div:hover { + ${MixinCommonTransition('background-color')} + padding:0 3px; + background-clip: content-box; + background-color: var(--primaryColor); + } + } +`; + +const Right = styled.div` + position: relative; + height: 100%; + display: flex; + grid-area: right; + flex-direction: column; + background-color: var(--bg); +`; + +const Title = styled.span` + font-size: 14px; + font-weight: 500; + color: var(--font-color); +`; + +const ActionContainer = styled.span` + > button:not(:last-child) { + margin-right: 8px; + } + > span:not(:last-child) { + margin-right: 8px; + } + .arco-upload-trigger { + display: inline; + } + .anticon { + color: var(--action-button-color); + cursor: pointer; + font-size: 18px; + &:not(:last-child) { + margin-right: 8px; + } + &:hover { + color: var(--action-button-color-hover); + } + &.anticon-close { + font-size: 14px; + } + } +`; + +const FoldButton = styled.div` + ${MixinSquare(24)} + ${MixinFlexAlignCenter()} + + position:absolute; + right: 8px; + bottom: 8px; + display: inline-flex; + background-color: rgb(var(--gray-1)); + color: rgb(var(--gray-6)); + border-radius: 2px; + cursor: pointer; + + &:hover { + background-color: rgb(var(--gray-2)); + } +`; + +export const MIN_FILE_TREE_WIDTH = 220; +export const MAX_FILE_TREE_WIDTH = 600; + +const resizeableEnable = { + right: true, +}; + +export type BaseCodeEditorProps = { + /** Algorithm project id / Algorithm id */ + id?: ID; + /** Modal display title */ + title?: string; + isReadOnly?: boolean; + initialFileData?: FileData; + isAsyncMode?: boolean; + /** On reset button click, only work in sync mode */ + onReset?: () => void; + /** On save button click, it will send latest fileData in sync mode */ + onSave?: (fileData: FileData) => void; + /** On close button click */ + onClose?: () => void; + getFileTreeList?: () => Promise; + getFile?: (filePath: string) => Promise; + onFileDataChange?: (fileData: FileData) => void; + /** Call this fn when they are some file are changed including create/delete/rename/change file content */ + onContentChange?: () => void; +}; + +export type AsyncBaseCodeEditorProps = Omit & + Required>; + +export interface FileTab { + node: FileDataNode; + model: monaco.editor.ITextModel | null; +} + +export type Props = Omit & BaseCodeEditorProps; +export type AsyncProps = Omit & Required>; +export type AlgorithmProjectFormButtonProps = Omit & { + /** Container width */ + width?: number | string; + /** Container height */ + height?: number | string; +}; + +export const BaseCodeEditor: FC & { + AlgorithmProject: FC; + Algorithm: FC; +} = ({ + id, + title = '', + initialFileData = {}, + isReadOnly = false, + isAsyncMode = false, + getFileTreeList, + getFile, + onFileDataChange: onFileDataChangeFromProps, + onContentChange, + ...restProps +}) => { + const { t } = useTranslation(); + + const [selectedNode, setSelectedNode] = useState(); + const [editingNode, setEditingNode] = useState(); + const [fileData, setFileData] = useState(initialFileData ?? {}); + const [fileTabList, setFileTabList] = useState([]); + const [leftWidth, setLeftWidth] = useState(272); + const [isLoading, setIsLoading] = useState(false); + + const fileExplorerRef = useRef(null); + const tempCodeRef = useRef(''); + const codeEditorInstance = useRef(); + const monacoInstance = useRef(); + const pendingCreateOrFocusTabQueue = useRef([]); + const tempLeftWidth = useRef(null); + const [isFocusMode, setIsFocusMode] = useState(false); + + const fileKeyToViewState = useRef<{ + [key: string]: monaco.editor.ICodeEditorViewState | null; + }>({}); + + const [, forceUpdate] = useReducer((x) => x + 1, 0); + + const selectedKeys = useMemo(() => { + return selectedNode ? [selectedNode.key] : []; + }, [selectedNode]); + + // subscribe CodeEdiot action(command + s), to save file + useSubscribe(Action.Save, () => saveEditorCode(), [fileData, editingNode]); + + return ( + +
+ {title} + + {!isReadOnly && ( + <> + + {!isAsyncMode && ( + + )} + + )} +
+ { + // If trigger onResize first time, save prev left width + if (!tempLeftWidth.current) { + tempLeftWidth.current = leftWidth; + } + let nextLeftWidth = tempLeftWidth.current + d.width; + + if (nextLeftWidth <= MIN_FILE_TREE_WIDTH) { + nextLeftWidth = MIN_FILE_TREE_WIDTH; + } else if (nextLeftWidth >= MAX_FILE_TREE_WIDTH) { + nextLeftWidth = MAX_FILE_TREE_WIDTH; + } + setLeftWidth(nextLeftWidth); + }} + onResizeStop={(e, direction, ref, d) => { + // Reset tempLeftWidth + tempLeftWidth.current = null; + // Only to refresh handleWrapperClass + forceUpdate(); + }} + > + + 文件列表 + {!isReadOnly && ( + + { + return isAsyncMode; + }} + data={(file) => { + const { name } = file; + + const folderKey = selectedNode + ? selectedNode.isFolder + ? selectedNode.key + : selectedNode.parentKey + : ''; + + return { + path: folderKey, + filename: name, + }; + }} + > + + + + + + + + + + + + )} + + + + + + + + + + {fileTabList.map((item) => { + return ( + { + // prevent select same tab + if (item.node.key === selectedNode?.key) { + return; + } + focusTab(item); + }} + onClose={() => { + onDeleteFinish([item.node.key], item.node.key, true); + }} + /> + ); + })} + + + +
+ ); + + async function createOrUpdateNodeInBackEnd( + filePath: string, + isFolder: boolean, + code: string = '', + ) { + if (!isAsyncMode || !id) return; + + const { parentPath, fileName } = getFileInfoByFilePath(filePath); + setIsLoading(true); + try { + const result = await createOrUpdateAlgorithmProjectFileContent(id, { + path: parentPath, + filename: fileName, + is_directory: isFolder, + file: code, + }); + setIsLoading(false); + return result; + } catch (error) { + Message.error(error.message); + setIsLoading(false); + return Promise.reject(error); + } + } + async function renameNodeInBackEnd(oldPath: string, newPath: string) { + if (!isAsyncMode || !id) return; + + setIsLoading(true); + try { + const result = await renameAlgorithmProjectFileContent(id, { + path: oldPath, + dest: newPath, + }); + setIsLoading(false); + return result; + } catch (error) { + Message.error(error.message); + setIsLoading(false); + return Promise.reject(error); + } + } + async function deleteNodeInBackEnd(filePath: string) { + if (!isAsyncMode || !id) return; + + setIsLoading(true); + try { + const result = await deleteAlgorithmProjectFileContent(id, { path: filePath }); + setIsLoading(false); + return result; + } catch (error) { + Message.error(error.message); + setIsLoading(false); + return Promise.reject(error); + } + } + + function resetState() { + tempCodeRef.current = ''; + fileKeyToViewState.current = {}; + // dispose model + fileTabList.forEach((item) => { + item.model?.dispose(); + }); + setFileTabList([]); + setSelectedNode(undefined); + setEditingNode(undefined); + } + + function getCodeEditorInstance(editor: monaco.editor.IStandaloneCodeEditor, monaco: Monaco) { + codeEditorInstance.current = editor; + monacoInstance.current = monaco; + + // dequeue pending queue + if (pendingCreateOrFocusTabQueue.current.length > 0) { + pendingCreateOrFocusTabQueue.current.forEach((node) => createOrFocusTab(node)); + + // reset pending queue + pendingCreateOrFocusTabQueue.current = []; + } + } + + function createOrFocusTab(node: FileDataNode) { + if (!monacoInstance.current || !codeEditorInstance.current) { + // enqueue pending queue + pendingCreateOrFocusTabQueue.current.push(node); + return; + } + const monaco = monacoInstance.current; + const tempUri = monaco.Uri.file(String(node.key)); + let model = monaco.editor.getModel(tempUri); + // if model not exist create, otherwise replace value (for editor resets). + if (model === null) { + model = monaco.editor.createModel( + node.code ?? '', + formatLanguage(node.fileExt ?? '') ?? undefined, + tempUri, + ); + + // add new tab + setFileTabList((prevState) => [ + ...prevState, + { + node, + model, + }, + ]); + } + + // focus editor + // codeEditorInstance.current?.focus(); + // save editor view state + if (editingNode) { + const tempState = codeEditorInstance.current.saveViewState(); + fileKeyToViewState.current[String(editingNode?.key)] = tempState; + } + fileKeyToViewState.current[node.key] = null; + + // replace editor model + codeEditorInstance.current?.setModel(model); + + if (fileKeyToViewState.current[node.key]) { + codeEditorInstance.current?.restoreViewState(fileKeyToViewState.current[node.key]!); + } + } + + async function saveEditorCode(node?: FileDataNode) { + // save prev tempCodeRef + if (editingNode) { + const tempCode = tempCodeRef.current; + try { + if (isAsyncMode) { + // Save code in Back-end + await createOrUpdateNodeInBackEnd( + String(editingNode.key), + Boolean(editingNode.isFolder), + tempCode, + ); + } + + setFileData((prevState) => { + return { + ...prevState, + [editingNode.key]: tempCode, + }; + }); + onFileDataChangeFromProps?.({ ...fileData, [editingNode.key]: tempCode }); + onContentChange?.(); + } catch (error) { + // Do nothing + } + } + if (node) { + tempCodeRef.current = fileData[String(node.key)] || node.code || ''; + } + } + + function focusTab(tab: FileTab, isSaveCode = true) { + if (!monacoInstance.current || !codeEditorInstance.current) { + return; + } + + if (isSaveCode) { + saveEditorCode(tab.node); + } else { + tempCodeRef.current = fileData[String(tab.node.key)] || tab.node.code || ''; + } + + // focus editor + // codeEditorInstance.current?.focus(); + const tempState = codeEditorInstance.current.saveViewState(); + fileKeyToViewState.current[String(editingNode?.key)] = tempState; + + setSelectedNode(tab.node); + setEditingNode(tab.node); + codeEditorInstance.current?.setModel(tab.model); + + if (fileKeyToViewState.current[tab.node.key]) { + codeEditorInstance.current?.restoreViewState(fileKeyToViewState.current[tab.node.key]!); + } + } + + function onDeleteFinish(deleteKeys: Key[], firstDeleteKey: Key, isTabClick = false) { + const newFileTabList: FileTab[] = []; + fileTabList.forEach((item) => { + if (deleteKeys.includes(item.node.key)) { + // dispose model + item.model?.dispose(); + + // clear fileKeyToViewState + delete fileKeyToViewState.current[item.node.key]; + } else { + newFileTabList.push(item); + } + }); + + // delete active node + if (selectedNode && deleteKeys.includes(selectedNode.key)) { + setSelectedNode(undefined); + } + if (editingNode && deleteKeys.includes(editingNode.key)) { + setEditingNode(undefined); + // default select first tab + if (newFileTabList.length > 0) { + // must clear node state, otherwise saveEditorCode will create extra same node + focusTab(newFileTabList[0], false); + } + } + + setFileTabList(newFileTabList); + + if (newFileTabList.length === 0) { + resetState(); + } + if (!isTabClick) { + onContentChange?.(); + } + } + async function onRenameFinish(node: FileDataNode, oldKey: Key, newKey: Key) { + onContentChange?.(); + + if (!node.isFolder) { + // rename file node + if (selectedNode?.key === oldKey) { + // must clear node state, otherwise saveEditorCode will create extra same node + setSelectedNode(undefined); + setEditingNode(undefined); + } + + const oldTabIndex = fileTabList.findIndex((item) => item.node.key === oldKey); + + if (oldTabIndex === -1) { + return; + } + const { model: oldModel } = fileTabList[oldTabIndex]; + + // rename fileKeyToViewState + const oldViewState = fileKeyToViewState.current[String(oldKey)]; + fileKeyToViewState.current[String(newKey)] = oldViewState; + delete fileKeyToViewState.current[String(oldKey)]; + + // dispose oldModel + oldModel?.dispose(); + + // TODO: Find a good way to replace oldTab to newTab, no just only delete oldTab + setFileTabList([...fileTabList.slice(0, oldTabIndex), ...fileTabList.slice(oldTabIndex + 1)]); + } else { + // rename folder node + const allOldFileKey: Key[] = []; + + const regx = new RegExp(`^${transformRegexSpecChar(String(oldKey))}`); // prefix originKey + // find all file node key under this folder + Object.keys(fileData).forEach((key) => { + if (!!key.match(regx)) { + allOldFileKey.push(key); + } + }); + + if (allOldFileKey.includes(String(selectedNode?.key))) { + // must clear node state, otherwise saveEditorCode will create extra same node + setSelectedNode(undefined); + setEditingNode(undefined); + } + + // rename fileKeyToViewState + const filteredFileTabList = fileTabList.filter((item) => { + const { node: oldNode, model: oldModel } = item; + if (allOldFileKey.includes(oldNode.key)) { + const oldViewState = fileKeyToViewState.current[String(oldNode.key)]; + const tnewKey = String(oldNode.key).replace(regx, String(newKey)); + + fileKeyToViewState.current[String(tnewKey)] = oldViewState; + delete fileKeyToViewState.current[String(oldNode.key)]; + + // dispose oldModel + oldModel?.dispose(); + + // delete this tab + return false; + } + // reserve this tab + return true; + }); + setFileTabList(filteredFileTabList); + } + } + function onCreateFinish(path: Key, isFolder: boolean) { + onContentChange?.(); + } + function onClickRename(node: FileDataNode) { + saveEditorCode(); + } + function onFocusModeChange(focusMode: boolean) { + setIsFocusMode(focusMode); + } + + function onCodeChange(val?: string) { + // store temp code + tempCodeRef.current = val ?? ''; + } + function onFileDataChange(fileData: FileData) { + setFileData(fileData); + onFileDataChangeFromProps?.(fileData); + } + function onFileNodeSelect(filePath: Key, fileContent: string, node: FileDataNode) { + // prevent select same node + if (node.key === selectedNode?.key) { + return; + } + setSelectedNode(node); + saveEditorCode(node); + setEditingNode(node); + createOrFocusTab(node); + } + function onSelect( + selectedKeys: Key[], + info: { + selected: boolean; + selectedNodes: FileDataNode[]; + node: FileDataNode; + e: Event; + }, + ) { + // folder or file + if (info.selected && info.selectedNodes && info.selectedNodes[0]) { + const currentNode = info.selectedNodes[0]; + // prevent select same node + if (currentNode.key === selectedNode?.key) { + return; + } + + setSelectedNode(currentNode); + } + } + + async function onUploadChange(fileList: UploadItem[], info: UploadItem) { + const { status, response, name } = info; + switch (status) { + case 'done': + const resData: Omit = (response as any).data; + const fileKey = buildRelativePath(resData); + + // Set 's inner filePathToIsReadMap, so it will fetch file content API when select this upload file + fileExplorerRef.current?.setFilePathToIsReadMap({ + [fileKey]: false, + }); + + // Display node in file explorer + setFileData((prevState) => { + return { + ...prevState, + [fileKey]: '', + }; + }); + onFileDataChangeFromProps?.({ + ...fileData, + [(response as any).data.path]: (response as any).data.content, + }); + onContentChange?.(); + setIsLoading(false); + break; + case 'error': + Message.error(t('upload.msg_upload_fail', { fileName: info.name })); + setIsLoading(false); + break; + case 'uploading': + setIsLoading(true); + break; + // When beforeUpload return false, status will be undefined. + // In this case, isAsyncMode = false, so read file content locally + case undefined: + // const template = await readAsJSONFromFile(info.file as any); + const code = await readAsTextFromFile(info.originFile as any); + + const folderKey = selectedNode + ? selectedNode.isFolder + ? selectedNode.key + : selectedNode.parentKey + : ''; + + const path = `${folderKey ? `${folderKey}/` : ''}${name}`; + setFileData((prevState) => { + return { + ...prevState, + [path]: code, + }; + }); + onFileDataChangeFromProps?.({ ...fileData, [path]: code }); + onContentChange?.(); + + break; + default: + break; + } + } + function onAddFolderOnRoot() { + if (isLoading || isFocusMode) return; + fileExplorerRef.current?.createFileOrFolder(undefined, false); + } + function onAddFileOnRoot() { + if (isLoading || isFocusMode) return; + + fileExplorerRef.current?.createFileOrFolder(undefined, true); + } + function onFoldClick() { + setLeftWidth(MIN_FILE_TREE_WIDTH); + } + + async function onSave() { + if (isAsyncMode) { + await saveEditorCode(); + Message.success(t('algorithm_management.form_code_changed')); + } else { + let finalFileData = fileData; + if (editingNode) { + const tempCode = tempCodeRef.current; + finalFileData = { + ...fileData, + [editingNode.key]: tempCode, + }; + setFileData(finalFileData); + onFileDataChangeFromProps?.(finalFileData); + } + Message.success(t('algorithm_management.form_code_changed')); + restProps.onSave?.(finalFileData); + } + } + function onReset() { + if (isAsyncMode) return; + setFileData(initialFileData); + onFileDataChangeFromProps?.(initialFileData); + + resetState(); + + restProps.onReset?.(); + } + async function onClose() { + if (restProps.onClose) { + // Clear all code editor model info in memory + // If no clear, it will effect create + resetState(); + restProps.onClose(); + } + } + + async function beforeCreate(node: FileDataNode, key: Key, isFolder: boolean) { + try { + if (isAsyncMode) { + // Create new file/folder when API response success + const result = await createOrUpdateNodeInBackEnd(String(key), isFolder); + const fileKey = result && result.data ? buildRelativePath(result.data) : key; + return String(fileKey); + } + } catch (error) { + return false; // return false to prevent create file/folder + } + return true; + } + async function beforeRename(node: FileDataNode, oldKey: Key, newKey: Key, isFolder: boolean) { + try { + if (isAsyncMode) { + // Save code in Back-end if this node is editingNode + if (editingNode?.key === node.key) { + // Save code in Back-end + await createOrUpdateNodeInBackEnd( + String(editingNode.key), + Boolean(editingNode.isFolder), + tempCodeRef.current || '', + ); + } + // Rename node in Back-end + await renameNodeInBackEnd(String(oldKey), String(newKey)); + } + } catch (error) { + return false; // return false to prevent rename file/folder + } + return true; + } + async function beforeDelete(key: Key, isFolder: boolean) { + try { + if (isAsyncMode) { + await deleteNodeInBackEnd(String(key)); + } + } catch (error) { + return false; // return false to prevent delete file/folder + } + return true; + } +}; + +const _BaseCodeEditorWithAlgorithmProjectAPI: FC = ({ + id, + ...restProps +}) => { + return ; +}; + +const _BaseCodeEditorWithAlgorithmAPI: FC = ({ id, ...restProps }) => { + return ; +}; + +export const CodeEditorModal: FC & { + AlgorithmProject: FC; + Algorithm: FC; + AlgorithmProjectFormButton: FC; +} = ({ + id, + isAsyncMode = false, + visible = false, + title = CONSTANTS.EMPTY_PLACEHOLDER, + initialFileData, + isReadOnly = false, + onReset = () => {}, + onSave = () => {}, + onClose = () => {}, + getFileTreeList, + getFile, + onFileDataChange, + onContentChange, + ...resetProps +}) => { + return ( + + + + ); +}; + +const _CodeEditorModalWithAlgorithmProjectAPI: FC = ({ id, ...restProps }) => { + return ; +}; + +const _CodeEditorModalWithAlgorithmAPI: FC = ({ id, ...restProps }) => { + return ; +}; + +const FormButtonContainer = styled.div` + display: flex; + flex-direction: column; + justify-content: center; + align-items: center; + padding: 24px 0; + border: 1px dashed var(--lineColor); + border-radius: 2px; + background: rgb(var(--gray-2)); + cursor: pointer; + ${MixinCommonTransition('border-color')} + + &:hover { + border-color: var(--primaryColor); + } +`; +const FormButtonTitle = styled.div` + color: var(--textColorStrong); + font-weight: 500; + font-size: 16px; + margin: 14px 0 4px; +`; +const FormButtonTip = styled.span` + display: inline-block; + color: var(--textColorSecondary); + font-size: 12px; +`; +export const AlgorithmProjectFormButton: FC = ({ + id, + width = '100%', + height = 140, + ...restProps +}) => { + const [isShowCodeEditor, setIsShowCodeEditor] = useState(false); + const [isEdited, setIsEdited] = useState(false); + + return ( + <> + { + setIsShowCodeEditor(true); + }} + > + + 代码编辑器 +
+ + {isEdited ? '更改内容已在后台保存' : '点击进入代码编辑器'} +
+
+ { + setIsShowCodeEditor(false); + }} + onContentChange={() => { + if (!isEdited) { + setIsEdited(true); + } + }} + {...getAlgorithmProjectProps({ id: id! })} + {...restProps} + /> + + ); +}; + +BaseCodeEditor.AlgorithmProject = _BaseCodeEditorWithAlgorithmProjectAPI; +BaseCodeEditor.Algorithm = _BaseCodeEditorWithAlgorithmAPI; +CodeEditorModal.AlgorithmProject = _CodeEditorModalWithAlgorithmProjectAPI; +CodeEditorModal.Algorithm = _CodeEditorModalWithAlgorithmAPI; +CodeEditorModal.AlgorithmProjectFormButton = AlgorithmProjectFormButton; + +export default CodeEditorModal; diff --git a/web_console_v2/client/src/components/CodePreview/index.tsx b/web_console_v2/client/src/components/CodePreview/index.tsx new file mode 100644 index 000000000..35cf76634 --- /dev/null +++ b/web_console_v2/client/src/components/CodePreview/index.tsx @@ -0,0 +1,184 @@ +/* istanbul ignore file */ + +import React, { FC, useState } from 'react'; +import styled from 'styled-components'; +import classNames from 'classnames'; + +import { MixinCommonTransition } from 'styles/mixins'; +import { formatLanguage } from 'shared/helpers'; + +import { Resizable } from 're-resizable'; +import CodeEditor from 'components/CodeEditor'; +import FileExplorer, { FileDataNode } from 'components/FileExplorer'; +import { + getAlgorithmProjectProps, + getAlgorithmProps, + getPeerAlgorithmProps, + getPendingAlgorithmProps, +} from 'components/shared'; + +const Container = styled.div` + display: flex; + flex: 1; + border: 1px solid var(--lineColor); + + .resize-bar-wrapper { + &.resizing > div, + > div:hover { + ${MixinCommonTransition('background-color')} + padding:0 3px; + background-clip: content-box; + background-color: var(--primaryColor); + } + } +`; + +const StyledResizable = styled(Resizable)` + position: relative; + padding: 12px 0 0 0; + overflow: hidden; + border-right: 1px solid var(--lineColor); + &::after { + position: absolute; + right: 0px; + content: ''; + width: 1px; + background-color: var(--lineColor); + } +`; + +const Right = styled.div` + flex: 1; + // when resizing, max-width + overflow will trigger automaticLayout + overflow: hidden; +`; + +export type Props = { + /** Algorithm project id / Algorithm id */ + id?: ID; + isAsyncMode?: boolean; + getFileTreeList?: () => Promise; + getFile?: (filePath: string) => Promise; + /** Container height */ + height?: number | string; + /** Default display value. Only work on sync mode, it isn't work on async mode */ + fileData?: { [filePath: string]: string }; + isLoading?: boolean; +}; + +export type AsyncProps = Omit & Required>; + +type Key = string | number; + +export const MIN_FILE_TREE_WIDTH = 270; +export const MAX_FILE_TREE_WIDTH = 600; + +const resizeableDefaultSize = { + width: MIN_FILE_TREE_WIDTH, + height: 'auto', +}; +const resizeableEnable = { + right: true, +}; + +const CodePreview: FC & { + Algorithm: FC; + AlgorithmProject: FC; + PendingAlgorithm: FC; + PeerAlgorithm: FC; +} = ({ id, fileData, height = 480, isLoading, isAsyncMode = false, getFileTreeList, getFile }) => { + const [currentCode, setCurrentCode] = useState(''); + const [currentLanguage, setCurrentLanguage] = useState< + 'json' | 'python' | 'javascript' | 'java' | 'go' + >('python'); + + const [isResizing, setIsResizing] = useState(false); + + return ( + + { + if (isResizing) return; + setIsResizing(true); + }} + onResizeStop={(e, direction, ref, d) => { + setIsResizing(false); + }} + > + + + + + + + ); + + function onFileNodeSelect(filePath: Key, fileContent: string, node: FileDataNode) { + setCurrentCode(fileContent ?? ''); + setCurrentLanguage(formatLanguage(node.fileExt ?? '') as any); + } +}; + +const _WithAlgorithmProjectAPI: FC = ({ id, ...restProps }) => { + return ; +}; + +const _WithAlgorithmAPI: FC = ({ id, ...restProps }) => { + return ; +}; + +const _WithPendingAlgorithmAPI: FC = ({ + projId, + id, + ...restProps +}) => { + return ; +}; + +const _WithPeerAlgorithmAPI: FC = ({ + id, + participantId, + projId, + uuid, + ...restProps +}) => { + return ( + + ); +}; + +CodePreview.AlgorithmProject = _WithAlgorithmProjectAPI; +CodePreview.Algorithm = _WithAlgorithmAPI; +CodePreview.PendingAlgorithm = _WithPendingAlgorithmAPI; +CodePreview.PeerAlgorithm = _WithPeerAlgorithmAPI; + +export default CodePreview; diff --git a/web_console_v2/client/src/components/CodePreviewCollapse/index.tsx b/web_console_v2/client/src/components/CodePreviewCollapse/index.tsx new file mode 100644 index 000000000..2966d7944 --- /dev/null +++ b/web_console_v2/client/src/components/CodePreviewCollapse/index.tsx @@ -0,0 +1,153 @@ +/* istanbul ignore file */ + +import React, { FC, useState } from 'react'; +import styled from 'styled-components'; +import { Collapse } from '@arco-design/web-react'; + +import { formatLanguage } from 'shared/helpers'; + +import { Resizable } from 're-resizable'; +import CodeEditor from 'components/CodeEditor'; +import FileExplorer, { FileDataNode } from 'components/FileExplorer'; + +const Content = styled.div` + display: flex; + flex: 1; + height: 443px; +`; + +const StyledCollapse = styled(Collapse)` + // when resizing, max-width + overflow will trigger automaticLayout + // I dont know why calc(100%) or calc(100% - 0px) not work,so I set calc(100% - 1px) + max-width: calc(100% - 1px); + + .ant-collapse-content > .ant-collapse-content-box { + padding: 0; + } + .ant-collapse-header { + background-color: #fff; + } +`; + +const StyledResizable = styled(Resizable)` + position: relative; + padding: 12px 0; + border-right: 1px solid var(--lineColor); + &::after { + position: absolute; + right: 0px; + content: ''; + width: 1px; + background-color: var(--lineColor); + } +`; + +const Right = styled.div` + flex: 1; + // when resizing, max-width + overflow will trigger automaticLayout + overflow: hidden; +`; + +const Title = styled.span` + display: inline-block; + margin-right: 12px; + color: #1d252f; + font-size: 13px; + font-weight: 500; +`; +const Label = styled.span` + display: inline-block; + padding: 0 6px; + border-radius: 2px; + background: #e8f4ff; + font-size: 12px; + color: var(--primaryColor); +`; + +type Props = { + title?: string; + label?: string; + style?: React.CSSProperties; + fileData: { [filePath: string]: string }; + isLoading?: boolean; +}; + +type Key = string | number; + +export const MIN_FILE_TREE_WIDTH = 270; +export const MAX_FILE_TREE_WIDTH = 600; + +const resizeableDefaultSize = { + width: MIN_FILE_TREE_WIDTH, + height: 'auto', +}; +const resizeableEnable = { + right: true, +}; + +const CodePreviewCollapse: FC = ({ title, label, fileData, style, isLoading }) => { + const [currentCode, setCurrentCode] = useState(''); + const [currentLanguage, setCurrentLanguage] = useState< + 'json' | 'python' | 'javascript' | 'java' | 'go' + >('python'); + + return ( + + + {title} + {label && } + + } + name="1" + > + + + + + + + + + + + ); + + function onSelect( + selectedKeys: Key[], + info: { + selected: boolean; + node: FileDataNode; + selectedNodes: FileDataNode[]; + e: Event; + }, + ) { + // folder or file + if ( + info.selected && + info.selectedNodes && + info.selectedNodes[0] && + !info.selectedNodes[0].isFolder + ) { + setCurrentCode(info.selectedNodes[0]?.code ?? ''); + setCurrentLanguage(formatLanguage(info.selectedNodes[0].fileExt ?? '') as any); + } + } +}; + +export default CodePreviewCollapse; diff --git a/web_console_v2/client/src/components/ConfigForm/index.less b/web_console_v2/client/src/components/ConfigForm/index.less new file mode 100644 index 000000000..7a683d952 --- /dev/null +++ b/web_console_v2/client/src/components/ConfigForm/index.less @@ -0,0 +1,42 @@ +.config-form{ + position: relative; + .config-form-extra{ + position: absolute; + top: 0; + right: 0; + } +} +.config-form-collapse{ + width: 100%; + overflow: initial; + .arco-collapse-item-header { + padding-left: 12px; + padding-right: 0; + border-width: 0; + &-title { + font-weight: 400 !important; + font-size: 12px; + } + .arco-btn-size-mini{ + padding: 0; + } + .arco-icon-hover{ + left: -3px + } + .arco-collapse-item-icon-hover{ + left: -3px + } + } + .arco-collapse-item-content { + background-color: transparent; + } + .arco-collapse-item-content-box { + padding: 0; + } +} + +.config-form-variable-label{ + font-size: 12px; + padding: 0 10px; + color: var(--color-text-2); +} diff --git a/web_console_v2/client/src/components/ConfigForm/index.tsx b/web_console_v2/client/src/components/ConfigForm/index.tsx new file mode 100644 index 000000000..3f8c1a25f --- /dev/null +++ b/web_console_v2/client/src/components/ConfigForm/index.tsx @@ -0,0 +1,306 @@ +/* istanbul ignore file */ +import React, { + useEffect, + useMemo, + useImperativeHandle, + ForwardRefRenderFunction, + forwardRef, + ReactNode, +} from 'react'; +import i18n from 'i18n'; + +import { Grid, Form, Input, Collapse, InputNumber, Select, Switch } from '@arco-design/web-react'; +import { IconQuestionCircle } from '@arco-design/web-react/icon'; +import ModelCodesEditorButton from 'components/ModelCodesEditorButton'; +import YAMLTemplateEditorButton from 'components/YAMLTemplateEditorButton'; +import EnvsInputForm from 'views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/EnvsInputForm'; +import { AlgorithmSelect } from 'components/DoubleSelect'; +import { CpuInput, MemInput } from 'components/InputGroup/NumberTextInput'; +import TitleWithIcon from 'components/TitleWithIcon'; + +import { FormProps, FormItemProps, FormInstance } from '@arco-design/web-react/es/Form'; +import { VariableComponent } from 'typings/variable'; +import { NO_CATEGORY } from 'views/Datasets/shared'; +import './index.less'; + +const { Row, Col } = Grid; + +export type ItemProps = { + componentType?: `${VariableComponent}`; + componentProps?: object; + tip?: string; + render?: (props: object) => React.ReactNode; + tag?: string; +} & FormItemProps; + +export type ExposedRef = { + formInstance: FormInstance; +}; + +type Props = { + value?: { [key: string]: any }; + onChange?: (val: any) => void; + /** Extra form props */ + formProps?: FormProps; + /** Extra for action*/ + configFormExtra?: ReactNode; + /** Form item list */ + formItemList?: ItemProps[]; + /** Collapse form item list */ + collapseFormItemList?: ItemProps[]; + /** Collapse title */ + collapseTitle?: string; + /** Collapse title extra node */ + collapseTitleExtra?: ReactNode; + /** Is default open collapse */ + isDefaultOpenCollapse?: boolean; + /** How many cols in one row */ + cols?: 1 | 2 | 3 | 4 | 6 | 8 | 12 | 24; + /** Reset initialValues when changing formItemList or collapseFormItemList */ + isResetOnFormItemListChange?: boolean; + /** variable will be grouped by this field */ + groupBy?: string; + /** Is group tag Hidden */ + hiddenGroupTag?: boolean; + /** Is Collapse Hidden*/ + hiddenCollapse?: boolean; + /** filter */ + filter?: (item: ItemProps) => boolean; +}; + +const emptyList: ItemProps[] = []; +interface ItemMapper { + [tagKey: string]: { + list: ItemProps[]; + rows: number; + }; +} + +const ConfigForm: ForwardRefRenderFunction = ( + { + value, + onChange, + formProps, + configFormExtra, + formItemList = emptyList, + collapseFormItemList = emptyList, + collapseTitle = i18n.t('model_center.title_advanced_config'), + collapseTitleExtra, + isDefaultOpenCollapse = false, + isResetOnFormItemListChange = false, + cols = 2, + groupBy = '', + hiddenGroupTag = true, + hiddenCollapse = false, + filter, + }, + parentRef, +) => { + const isControlled = typeof value === 'object' && value !== null; + const [form] = Form.useForm(); + + const initialFormValue = useMemo(() => { + const list = [...formItemList, ...collapseFormItemList]; + + return list.reduce((acc, cur) => { + const { field, initialValue } = cur; + + if (field) { + acc[field] = initialValue; + } + + return acc; + }, {} as any); + }, [formItemList, collapseFormItemList]); + + const span = useMemo(() => { + return Math.floor(24 / cols); + }, [cols]); + + const getGroupedItemMapper = ( + itemList: ItemProps[], + groupBy: string, + cols: number, + filter?: (item: ItemProps) => boolean, + ) => { + return itemList?.reduce((acc: ItemMapper, cur: any) => { + // Executive filter + if (filter && !filter(cur)) { + return acc; + } + const tag = cur[groupBy] || NO_CATEGORY; + if (!acc[tag]) { + acc[tag] = { + list: [], + rows: 0, + }; + } + acc[tag]?.list?.push({ ...cur }); + acc[tag].rows = Math.ceil(acc[tag]?.list?.length / cols); + return acc; + }, {} as ItemMapper); + }; + + const groupedFormItemMapper = useMemo(() => { + return getGroupedItemMapper(formItemList, groupBy, cols, filter); + }, [formItemList, groupBy, cols, filter]); + const groupedCollapseFormItemMapper = useMemo(() => { + return getGroupedItemMapper(collapseFormItemList, groupBy, cols, filter); + }, [collapseFormItemList, groupBy, cols, filter]); + + useEffect(() => { + if (isControlled) { + form.setFieldsValue({ ...value }); + } + }, [value, isControlled, form]); + + useEffect(() => { + if (isResetOnFormItemListChange) { + onChange?.(initialFormValue); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [initialFormValue, isResetOnFormItemListChange]); + + useImperativeHandle(parentRef, () => { + return { + formInstance: form, + }; + }); + + const renderFormItemList = ( + groupedFormItemMapper: ItemMapper, + cols: number, + span: number, + hiddenGroupTag: boolean, + ) => { + return Object.keys(groupedFormItemMapper).reduce((acc: any, cur: string) => { + const { list = [] as ItemProps[], rows } = groupedFormItemMapper[cur]; + !hiddenGroupTag && + !!groupBy && + acc.push( + + {cur} + , + ); + for (let i = 0; i < rows; i++) { + acc.push( + + {list.slice(i * cols, (i + 1) * cols).map((item: ItemProps, index: number) => { + const { componentType, componentProps, render, label, tip, ...restProps } = item; + return ( +
+ + ) : ( + label + ) + } + > + {renderFormItemContent(item)} + + + ); + })} + , + ); + } + return acc; + }, [] as any); + }; + + return ( + { + onChange?.(values); + }} + scrollToFirstError + {...formProps} + > +
{configFormExtra}
+ {renderFormItemList(groupedFormItemMapper, cols, span, hiddenGroupTag)} + {!hiddenCollapse && ( + + + {renderFormItemList(groupedCollapseFormItemMapper, cols, span, hiddenGroupTag)} + + + )} + + ); +}; + +function renderFormItemContent(props: ItemProps) { + const { componentType, componentProps = {}, render } = props; + + if (render) { + return render(componentProps); + } + + const Component = getRenderComponent(componentType) || Input; + + return ; +} + +export function getRenderComponent(componentType?: VariableComponent | `${VariableComponent}`) { + let Component: React.Component | React.FC = Input; + switch (componentType) { + case VariableComponent.Input: + Component = Input; + break; + case VariableComponent.TextArea: + Component = Input.TextArea; + break; + case VariableComponent.NumberPicker: + Component = InputNumber; + break; + case VariableComponent.Select: + Component = Select; + break; + case VariableComponent.Switch: + Component = Switch; + break; + case VariableComponent.Code: + Component = ModelCodesEditorButton; + break; + case VariableComponent.JSON: + Component = YAMLTemplateEditorButton; + break; + case VariableComponent.EnvsInput: + Component = EnvsInputForm; + break; + case VariableComponent.AlgorithmSelect: + Component = AlgorithmSelect; + break; + case VariableComponent.CPU: + Component = CpuInput; + break; + case VariableComponent.MEM: + Component = MemInput; + break; + default: + Component = Input; + break; + } + return Component; +} + +export default forwardRef(ConfigForm); diff --git a/web_console_v2/client/src/components/ConfusionMatrix/index.tsx b/web_console_v2/client/src/components/ConfusionMatrix/index.tsx new file mode 100644 index 000000000..9424bff86 --- /dev/null +++ b/web_console_v2/client/src/components/ConfusionMatrix/index.tsx @@ -0,0 +1,279 @@ +/* istanbul ignore file */ + +import React, { FC, useMemo, useState } from 'react'; +import styled from 'styled-components'; +import i18n from 'i18n'; +import { Switch } from '@arco-design/web-react'; +import NoResult from 'components/NoResult'; +import TitleWithIcon from 'components/TitleWithIcon'; +import { QuestionCircle } from 'components/IconPark'; +import { useModelMetriesResult } from 'hooks/modelCenter'; + +const Card = styled.div<{ height?: number }>` + display: flex; + align-items: center; + justify-content: center; + position: relative; + ${(props) => props.height && `height: ${props.height}px`}; + border: 1px solid var(--lineColor); + border-radius: 2px; +`; +const Content = styled.div` + position: relative; + width: 160px; + display: flex; + flex-direction: row; + flex-wrap: wrap; + margin: 0 auto; +`; + +const Item = styled.div` + display: flex; + justify-content: center; + align-items: center; + flex: 0 0 80px; + width: 80px; + height: 50px; + font-size: 12px; + &:nth-of-type(1) { + background-color: #468dff; + color: #fff; + } + &:nth-of-type(2) { + background-color: #f6f9fe; + color: var(--textColor); + } + &:nth-of-type(3) { + background-color: #d6e4fd; + color: var(--textColor); + } + &:nth-of-type(4) { + background-color: #7da9f8; + color: #fff; + } +`; + +const Label = styled.span` + font-size: 12px; + color: var(--textColorStrong); +`; +const TopTitle = styled(Label)` + display: block; + width: 100%; + margin-bottom: 10px; + text-align: center; +`; +const LeftTitle = styled(Label)` + position: absolute; + top: 65px; + left: -45px; + transform: rotate(-90deg); +`; +const RightTopTitle = styled(Label)` + position: absolute; + top: 45px; + right: -30px; +`; +const RightBottomTitle = styled(Label)` + position: absolute; + top: 95px; + right: -30px; +`; +const BottomLeftTitle = styled(Label)` + position: absolute; + bottom: 15px; + left: 30px; +`; +const BottomRightTitle = styled(Label)` + position: absolute; + bottom: 15px; + right: 30px; +`; + +const Bar = styled.div` + position: relative; + width: 160px; + height: 8px; + margin-top: 30px; + background: linear-gradient(90deg, #fff 0%, #468dff 100%); + visibility: hidden; + &::before { + position: absolute; + left: -30px; + top: -5px; + display: inline-block; + content: '0'; + font-size: 12px; + color: var(--textColorStrong); + } + &::after { + position: absolute; + top: -5px; + right: -30px; + display: inline-block; + content: '1'; + font-size: 12px; + color: var(--textColorStrong); + } +`; + +const CenterLayout = styled.div` + margin: 0 auto; +`; + +const Title = styled(TitleWithIcon)` + position: absolute; + left: 16px; + top: 12px; + color: var(--textColor); + font-size: 12px; +`; + +const PercentContainer = styled.div` + position: absolute; + top: -3px; + right: -80px; + + button { + margin-left: 10px; + } +`; + +export type Props = { + valueList: any[]; + percentValueList?: any[]; + height?: number; + title?: string; + tip?: string; + isEmpty?: boolean; + formatPercentValueList?: (valueList: number[]) => string[]; +}; + +export type ModelEvaluationVariantProps = { + id: ID; + participantId?: ID; +}; + +export const defaultFormatPercentValueList = (valueList: number[]) => { + const total = valueList.reduce((acc: number, cur: number) => acc + cur, 0); + return valueList.map((num) => ((num / total) * 100).toFixed(2) + '%'); +}; + +type VariantComponents = { + ModelEvaluationVariant: React.FC; +}; + +export const ConfusionMatrix: FC & VariantComponents = ({ + valueList, + percentValueList: percentValueListFromProps, + height = 260, + title = 'Confusion matrix', + tip = '', + isEmpty = false, + formatPercentValueList = defaultFormatPercentValueList, +}) => { + const [isShowPercent, setIsShowPercent] = useState(false); + + const percentValueList = useMemo(() => { + if (percentValueListFromProps) { + return percentValueListFromProps; + } + + if (!valueList) { + return []; + } + + return formatPercentValueList(valueList); + }, [valueList, percentValueListFromProps, formatPercentValueList]); + + const displayValueList = isShowPercent ? percentValueList : valueList; + + // tp = true positive,答案是1、预测是1 + // tn = true negative,答案是0、预测是0 + // fp = false positive,预测是1,答案是0 + // fn = false negative,预测是0,答案是1 + return ( + + + {isEmpty ? ( + <CenterLayout style={{ margin: '0 auto' }}> + <NoResult.NoData /> + </CenterLayout> + ) : ( + <Content> + <TopTitle>Actual class</TopTitle> + <LeftTitle>Predicted</LeftTitle> + <RightTopTitle>0</RightTopTitle> + <RightBottomTitle>1</RightBottomTitle> + <BottomLeftTitle>0</BottomLeftTitle> + <BottomRightTitle>1</BottomRightTitle> + <LeftTitle>Predicted</LeftTitle> + <Item>{displayValueList?.[3] ?? ''}</Item> + <Item>{displayValueList?.[1] ?? ''}</Item> + <Item>{displayValueList?.[2] ?? ''}</Item> + <Item>{displayValueList?.[0] ?? ''}</Item> + <PercentContainer> + <TitleWithIcon + title={i18n.t('model_center.title_confusion_matrix_normalization')} + isShowIcon={true} + isBlock={false} + tip={i18n.t('model_center.tip_confusion_matrix_normalization')} + icon={QuestionCircle} + /> + <Switch checked={isShowPercent} onChange={onSwitchChange} /> + </PercentContainer> + <Bar /> + </Content> + )} + </Card> + ); + + function onSwitchChange(val: boolean) { + setIsShowPercent(val); + } +}; + +export const ModelEvaluationVariant: React.FC<ModelEvaluationVariantProps> = ({ + id, + participantId, +}) => { + const { data } = useModelMetriesResult(id, participantId); + + const valueList = useMemo(() => { + if (!data) { + return []; + } + const { confusion_matrix = {} } = data; + return [confusion_matrix.tp, confusion_matrix.fn, confusion_matrix.fp, confusion_matrix.tn].map( + (item) => { + switch (typeof item) { + case 'string': + return parseInt(item); + case 'number': + return item; + case 'undefined': + default: + return 0; + } + }, + ); + }, [data]); + + return ( + <ConfusionMatrix + valueList={valueList} + isEmpty={valueList.length === 0 || valueList.every((v) => v === 0)} + /> + ); +}; + +ConfusionMatrix.ModelEvaluationVariant = ModelEvaluationVariant; + +export default ConfusionMatrix; diff --git a/web_console_v2/client/src/components/CountTime/index.test.tsx b/web_console_v2/client/src/components/CountTime/index.test.tsx new file mode 100644 index 000000000..f0a06b772 --- /dev/null +++ b/web_console_v2/client/src/components/CountTime/index.test.tsx @@ -0,0 +1,299 @@ +import React from 'react'; +import { render, act } from '@testing-library/react'; +import CountTime from './index'; + +describe('<CountTime />', () => { + beforeEach(() => { + jest.useFakeTimers(); + }); + describe('count down', () => { + it('should render correctly and trigger onCountDownFinish one time when countdown to zero', () => { + const onCountDownFinish = jest.fn(); + const wrapper = render( + <CountTime time={10} isCountDown={true} onCountDownFinish={onCountDownFinish} />, + ); + expect(wrapper.queryByText('00:00:10')).toBeInTheDocument(); + expect(onCountDownFinish).not.toHaveBeenCalled(); + + act(() => { + jest.advanceTimersByTime(1000); + }); + + expect(wrapper.queryByText('00:00:09')).toBeInTheDocument(); + expect(onCountDownFinish).not.toHaveBeenCalled(); + + act(() => { + jest.advanceTimersByTime(1000); + }); + expect(wrapper.queryByText('00:00:08')).toBeInTheDocument(); + expect(onCountDownFinish).not.toHaveBeenCalled(); + + act(() => { + jest.advanceTimersByTime(999); + }); + expect(wrapper.queryByText('00:00:08')).toBeInTheDocument(); + expect(onCountDownFinish).not.toHaveBeenCalled(); + + act(() => { + jest.advanceTimersByTime(1); + }); + expect(wrapper.queryByText('00:00:07')).toBeInTheDocument(); + expect(onCountDownFinish).not.toHaveBeenCalled(); + + act(() => { + jest.advanceTimersByTime(7000); + }); + expect(wrapper.queryByText('00:00:00')).toBeInTheDocument(); + expect(onCountDownFinish).toHaveBeenCalledTimes(1); + + act(() => { + jest.advanceTimersByTime(1000); + }); + expect(wrapper.queryByText('00:00:00')).toBeInTheDocument(); + expect(onCountDownFinish).toHaveBeenCalledTimes(1); + }); + + it('should only render second', () => { + const onCountDownFinish = jest.fn(); + const wrapper = render( + <CountTime + time={70} + isCountDown={true} + onCountDownFinish={onCountDownFinish} + isOnlyShowSecond={true} + />, + ); + expect(wrapper.queryByText('70')).toBeInTheDocument(); + expect(onCountDownFinish).not.toHaveBeenCalled(); + + act(() => { + jest.advanceTimersByTime(1000); + }); + + expect(wrapper.queryByText('69')).toBeInTheDocument(); + expect(onCountDownFinish).not.toHaveBeenCalled(); + + act(() => { + jest.advanceTimersByTime(1000); + }); + expect(wrapper.queryByText('68')).toBeInTheDocument(); + expect(onCountDownFinish).not.toHaveBeenCalled(); + + act(() => { + jest.advanceTimersByTime(999); + }); + expect(wrapper.queryByText('68')).toBeInTheDocument(); + expect(onCountDownFinish).not.toHaveBeenCalled(); + + act(() => { + jest.advanceTimersByTime(1); + }); + expect(wrapper.queryByText('67')).toBeInTheDocument(); + expect(onCountDownFinish).not.toHaveBeenCalled(); + + act(() => { + jest.advanceTimersByTime(7000); + }); + expect(wrapper.queryByText('60')).toBeInTheDocument(); + expect(onCountDownFinish).toHaveBeenCalledTimes(0); + + act(() => { + jest.advanceTimersByTime(60000); + }); + expect(wrapper.queryByText('0')).toBeInTheDocument(); + expect(onCountDownFinish).toHaveBeenCalledTimes(1); + + act(() => { + jest.advanceTimersByTime(1000); + }); + expect(wrapper.queryByText('0')).toBeInTheDocument(); + expect(onCountDownFinish).toHaveBeenCalledTimes(1); + }); + }); + + describe('count up', () => { + it('should render correctly', () => { + const wrapper = render(<CountTime time={10} isCountDown={false} />); + expect(wrapper.queryByText('00:00:10')).toBeInTheDocument(); + + act(() => { + jest.advanceTimersByTime(1000); + }); + expect(wrapper.queryByText('00:00:11')).toBeInTheDocument(); + + act(() => { + jest.advanceTimersByTime(1000); + }); + expect(wrapper.queryByText('00:00:12')).toBeInTheDocument(); + + act(() => { + jest.advanceTimersByTime(999); + }); + expect(wrapper.queryByText('00:00:12')).toBeInTheDocument(); + + act(() => { + jest.advanceTimersByTime(1); + }); + expect(wrapper.queryByText('00:00:13')).toBeInTheDocument(); + + act(() => { + jest.advanceTimersByTime(7000); + }); + + expect(wrapper.queryByText('00:00:20')).toBeInTheDocument(); + + act(() => { + jest.advanceTimersByTime(41000); + }); + + expect(wrapper.queryByText('00:01:01')).toBeInTheDocument(); + }); + + it('should only render second', () => { + const wrapper = render(<CountTime time={70} isCountDown={false} isOnlyShowSecond={true} />); + expect(wrapper.queryByText('70')).toBeInTheDocument(); + + act(() => { + jest.advanceTimersByTime(1000); + }); + + expect(wrapper.queryByText('71')).toBeInTheDocument(); + + act(() => { + jest.advanceTimersByTime(1000); + }); + expect(wrapper.queryByText('72')).toBeInTheDocument(); + + act(() => { + jest.advanceTimersByTime(999); + }); + expect(wrapper.queryByText('72')).toBeInTheDocument(); + + act(() => { + jest.advanceTimersByTime(1); + }); + expect(wrapper.queryByText('73')).toBeInTheDocument(); + + act(() => { + jest.advanceTimersByTime(7000); + }); + expect(wrapper.queryByText('80')).toBeInTheDocument(); + + act(() => { + jest.advanceTimersByTime(60000); + }); + expect(wrapper.queryByText('140')).toBeInTheDocument(); + }); + }); + + it('should render static time', () => { + const wrapper = render(<CountTime time={10} isCountDown={true} isStatic={true} />); + expect(wrapper.queryByText('00:00:10')).toBeInTheDocument(); + + act(() => { + jest.advanceTimersByTime(2000); + }); + + expect(wrapper.queryByText('00:00:10')).toBeInTheDocument(); + }); + + describe('change isStatic prop', () => { + it('should reset time on change isStatic prop', () => { + const wrapper = render( + <CountTime time={10} isCountDown={true} isStatic={false} isResetOnChange={true} />, + ); + expect(wrapper.queryByText('00:00:10')).toBeInTheDocument(); + + act(() => { + jest.advanceTimersByTime(2000); + }); + + expect(wrapper.queryByText('00:00:08')).toBeInTheDocument(); + + wrapper.rerender( + <CountTime time={10} isCountDown={true} isStatic={true} isResetOnChange={true} />, + ); + expect(wrapper.queryByText('00:00:10')).toBeInTheDocument(); + + act(() => { + jest.advanceTimersByTime(2000); + }); + + expect(wrapper.queryByText('00:00:10')).toBeInTheDocument(); + }); + + it('should not reset time on change prop', () => { + const wrapper = render( + <CountTime time={10} isCountDown={true} isStatic={false} isResetOnChange={false} />, + ); + expect(wrapper.queryByText('00:00:10')).toBeInTheDocument(); + + act(() => { + jest.advanceTimersByTime(2000); + }); + + expect(wrapper.queryByText('00:00:08')).toBeInTheDocument(); + + wrapper.rerender( + <CountTime time={10} isCountDown={true} isStatic={true} isResetOnChange={false} />, + ); + expect(wrapper.queryByText('00:00:08')).toBeInTheDocument(); + + act(() => { + jest.advanceTimersByTime(2000); + }); + + expect(wrapper.queryByText('00:00:08')).toBeInTheDocument(); + }); + }); + + it('should support render props mode', () => { + const onCountDownFinish = jest.fn(); + + const wrapper = render( + <CountTime + time={10} + isCountDown={true} + isRenderPropsMode={true} + onCountDownFinish={onCountDownFinish} + > + {(formattedTime: string, noFormattedTime: number) => { + return ( + <> + <div>formattedTime: {formattedTime}</div> + <div>noFormattedTime: {noFormattedTime}</div> + </> + ); + }} + </CountTime>, + ); + + expect(wrapper.queryByText('formattedTime: 00:00:10')).toBeInTheDocument(); + expect(wrapper.queryByText('noFormattedTime: 10')).toBeInTheDocument(); + expect(onCountDownFinish).not.toHaveBeenCalled(); + + act(() => { + jest.advanceTimersByTime(5000); + }); + + expect(wrapper.queryByText('formattedTime: 00:00:05')).toBeInTheDocument(); + expect(wrapper.queryByText('noFormattedTime: 5')).toBeInTheDocument(); + expect(onCountDownFinish).not.toHaveBeenCalled(); + + act(() => { + jest.advanceTimersByTime(5000); + }); + + expect(wrapper.queryByText('formattedTime: 00:00:00')).toBeInTheDocument(); + expect(wrapper.queryByText('noFormattedTime: 0')).toBeInTheDocument(); + expect(onCountDownFinish).toHaveBeenCalledTimes(1); + + act(() => { + jest.advanceTimersByTime(5000); + }); + + expect(wrapper.queryByText('formattedTime: 00:00:00')).toBeInTheDocument(); + expect(wrapper.queryByText('noFormattedTime: 0')).toBeInTheDocument(); + expect(onCountDownFinish).toHaveBeenCalledTimes(1); + }); +}); diff --git a/web_console_v2/client/src/components/CountTime/index.tsx b/web_console_v2/client/src/components/CountTime/index.tsx index 0407f8ff5..b50c8ac7f 100644 --- a/web_console_v2/client/src/components/CountTime/index.tsx +++ b/web_console_v2/client/src/components/CountTime/index.tsx @@ -1,23 +1,77 @@ -import { noop } from 'lodash'; -import React, { FC, useState, useEffect } from 'react'; -import { fomatTimeCount } from 'shared/date'; +import { noop } from 'lodash-es'; +import React, { FC, useState, useEffect, useCallback } from 'react'; +import { formatTimeCount } from 'shared/date'; + +export function formatSecound(input: number): string { + return input.toString(); +} type Props = { - time: number; // Accurate to seconds - isStatic: boolean; + /** Accurate to seconds */ + time: number; + /** Stop count timer */ + isStatic?: boolean; + /** Enable render props mode */ + isRenderPropsMode?: boolean; + /** Reset time when changing isStatic value from false to true */ + isResetOnChange?: boolean; + /** Is count down, otherwise, count up */ + isCountDown?: boolean; + /** Only show second */ + isOnlyShowSecond?: boolean; + /** When time less than or equal 0, call this function one time */ + onCountDownFinish?: () => void; }; -const CountTime: FC<Props> = ({ time, isStatic }) => { - let [formatted, setFormatted] = useState(fomatTimeCount(time)); +const CountTime: FC<Props> = ({ + time, + isStatic = false, + isRenderPropsMode = false, + isResetOnChange = false, + isCountDown = false, + isOnlyShowSecond = false, + onCountDownFinish, + children, +}) => { + const formatFn = useCallback( + (inputTime: number) => { + return isOnlyShowSecond ? formatSecound(inputTime) : formatTimeCount(inputTime); + }, + [isOnlyShowSecond], + ); + + const [formatted, setFormatted] = useState(formatFn(time)); + const [noFormattedTime, setNoFormattedTime] = useState(time); useEffect(() => { - if (isStatic) return noop; + if (isStatic) { + if (isResetOnChange) { + setFormatted(formatFn(time)); + setNoFormattedTime(time); + } + return noop; + } - const timer = setInterval(() => { - setFormatted(fomatTimeCount(time++)); + if (isCountDown && Number(noFormattedTime) <= 0) { + return noop; + } + const timer = setTimeout(() => { + const tempTime = isCountDown ? noFormattedTime - 1 : noFormattedTime + 1; + setFormatted(formatFn(tempTime)); + setNoFormattedTime(tempTime); }, 1000); - return () => clearInterval(timer); - }, [time, isStatic]); + return () => clearTimeout(timer); + }, [time, isStatic, isResetOnChange, isCountDown, formatFn, noFormattedTime]); + + useEffect(() => { + if (isCountDown && onCountDownFinish && Number(noFormattedTime) <= 0) { + onCountDownFinish(); + } + }, [noFormattedTime, isCountDown, onCountDownFinish]); + + if (isRenderPropsMode && typeof children === 'function') { + return children(formatted, noFormattedTime); + } return <span>{formatted}</span>; }; diff --git a/web_console_v2/client/src/components/CronTimePicker/index.module.less b/web_console_v2/client/src/components/CronTimePicker/index.module.less new file mode 100644 index 000000000..6baa287f8 --- /dev/null +++ b/web_console_v2/client/src/components/CronTimePicker/index.module.less @@ -0,0 +1,4 @@ +.time_picker_container{ + display: grid; + grid-template-columns: 1fr 10px 1fr 10px 1fr; +} diff --git a/web_console_v2/client/src/components/CronTimePicker/index.tsx b/web_console_v2/client/src/components/CronTimePicker/index.tsx new file mode 100644 index 000000000..bdd6ab942 --- /dev/null +++ b/web_console_v2/client/src/components/CronTimePicker/index.tsx @@ -0,0 +1,153 @@ +import React, { FC, useState } from 'react'; +import { TimePicker, Select } from '@arco-design/web-react'; +import dayjs, { Dayjs } from 'dayjs'; +import objectSupport from 'dayjs/plugin/objectSupport'; + +import styles from './index.module.less'; + +dayjs.extend(objectSupport); + +const { Option } = Select; + +export interface PickerValue { + method: string; + weekday?: number; + time: Dayjs | null; +} + +type Props = { + value?: PickerValue; + onChange?: (value: PickerValue) => void; +}; + +const CronTimePicker: FC<Props> = ({ value, onChange }) => { + const [method, setMethod] = useState(value?.method!); + const [weekday, setWeekday] = useState(value?.weekday || 0); + const [time, setTime] = useState<Dayjs | null>(value?.time || null); + + return ( + <div className={styles.time_picker_container}> + <Select + onChange={(val) => { + setMethod(val); + onChange && onChange({ method: val, weekday, time }); + }} + value={method} + > + <Option value="hour">每时</Option> + <Option value="day">每天</Option> + <Option value="week">每周</Option> + </Select> + <div /> + {method === 'week' && ( + <> + <Select + onChange={(val) => { + setWeekday(val); + onChange && onChange({ method, weekday: val, time }); + }} + value={weekday} + > + <Option value={0}>星期天</Option> + <Option value={1}>星期一</Option> + <Option value={2}>星期二</Option> + <Option value={3}>星期三</Option> + <Option value={4}>星期四</Option> + <Option value={5}>星期五</Option> + <Option value={6}>星期六</Option> + </Select> + <div /> + </> + )} + + <TimePicker + value={time as any} + onChange={(_, val: any) => { + setTime(val); + onChange && onChange({ method, weekday: val, time: val }); + }} + format={method === 'hour' ? 'mm 分' : 'HH 时 : mm 分'} + showNowBtn={false} + placeholder={method === 'hour' ? '- 分' : '- 时 - 分'} + /> + </div> + ); +}; + +/** + * PickerValue in local format -> Cron in UTC format + * @param value + * @returns + */ +export function toCron(value: PickerValue) { + const { method, weekday, time } = formatWithUtc(value, true); + let cron = 'null'; + if (time) { + if (method === 'week') { + cron = `${time.minute()} ${time.hour()} * * ${weekday}`; + } else if (method === 'day') { + cron = `${time.minute()} ${time.hour()} * * *`; + } else if (method === 'hour') { + cron = `${time.minute()} * * * *`; + } + } + return cron; +} + +/** + * Cron in UTC format -> PickerValue in local format + * @param cron + * @returns + */ +export function parseCron(cron: string) { + const parsed: PickerValue = { + method: 'day', + time: null, + }; + if (cron && cron !== 'null') { + const cronArray = cron.split(' '); + const cronLen = cronArray.length; + if (cronArray[cronLen - 1] !== '*') { + // This means that the time is based on the day of the week + parsed.weekday = Number(cronArray[cronLen - 1]); + parsed.method = 'week'; + } + if (cronLen === 5) { + if (cronArray[1] === '*') { + parsed.method = 'hour'; + parsed.time = dayjs().set({ + minute: Number(cronArray[0]), + second: 0, + }); + } else { + parsed.time = dayjs().set({ + hour: Number(cronArray[1]), + minute: Number(cronArray[0]), + }); + } + } + } + return formatWithUtc(parsed, false); +} + +export function formatWithUtc({ method, weekday, time }: PickerValue, isToUtc: boolean) { + if (time) { + let offsetHour = dayjs().utcOffset() / 60; + !isToUtc && (offsetHour = 0 - offsetHour); + const newHour = time.hour() - offsetHour; + if (method === 'week' && weekday !== undefined) { + let utcWeekday = weekday; + if (newHour < 0) { + utcWeekday -= 1; + } + if (newHour > 23) { + utcWeekday += 1; + } + weekday = (utcWeekday + 7) % 7; + } + time = dayjs().set({ second: time.second(), minute: time.minute(), hour: (newHour + 24) % 24 }); + } + return { method, weekday, time }; +} + +export default CronTimePicker; diff --git a/web_console_v2/client/src/components/DataPreview/PictureDataTable/PictureList.tsx b/web_console_v2/client/src/components/DataPreview/PictureDataTable/PictureList.tsx new file mode 100644 index 000000000..944c01cac --- /dev/null +++ b/web_console_v2/client/src/components/DataPreview/PictureDataTable/PictureList.tsx @@ -0,0 +1,92 @@ +/* istanbul ignore file */ + +import React, { FC } from 'react'; +import styled from 'styled-components'; +import GridRow from 'components/_base/GridRow'; +import { ImageDetail } from 'typings/dataset'; + +const Container = styled.div` + --cols: 6; + + display: grid; + grid-template-columns: repeat(var(--cols), 1fr); + align-items: start; + justify-content: space-between; + grid-gap: 15px; + width: 100%; + padding: 15px 20px; + min-width: 550px; + + @media screen and (max-width: 1600px) { + --cols: 5; + } + + @media screen and (max-width: 1500px) { + --cols: 4; + } +`; + +const CardContainer = styled.div` + text-align: center; +`; +const Name = styled.div` + color: var(--textColorStrongSecondary); +`; + +const Size = styled.div` + color: var(--textColorSecondary); +`; +const PictureContainer = styled(GridRow)` + min-width: 88px; + min-height: 88px; + background-color: #f6f7fb; + border-radius: 4px; + cursor: pointer; +`; +const StyledImg = styled.img` + height: 66px; + width: 66px; + border-radius: 5px; + /* Crop the picture */ + object-fit: cover; +`; + +const PictureCard: FC<{ data: ImageDetail; onClick?: (data: ImageDetail) => void }> = ({ + data, + onClick, +}) => { + return ( + <CardContainer> + <PictureContainer + justify="center" + align="center" + onClick={() => { + onClick?.(data); + }} + > + <StyledImg + alt={data?.annotation?.caption || '照片显示错误'} + src={data?.uri} + title={data?.annotation?.caption} + /> + </PictureContainer> + <Name>{data.name}</Name> + <Size>{`${data.width} × ${data.height}`}</Size> + </CardContainer> + ); +}; + +const PictureList: FC<{ data: ImageDetail[]; onClick?: (data: ImageDetail) => void }> = ({ + data, + onClick, +}) => { + return ( + <Container> + {data.map((item, index) => ( + <PictureCard data={item} key={`pic-${item.file_name}`} onClick={onClick} /> + ))} + </Container> + ); +}; + +export default PictureList; diff --git a/web_console_v2/client/src/components/DataPreview/PictureDataTable/index.tsx b/web_console_v2/client/src/components/DataPreview/PictureDataTable/index.tsx new file mode 100644 index 000000000..223289581 --- /dev/null +++ b/web_console_v2/client/src/components/DataPreview/PictureDataTable/index.tsx @@ -0,0 +1,326 @@ +/* istanbul ignore file */ + +import React, { FC, useEffect, useMemo, useState } from 'react'; +import styled from 'styled-components'; +import { useTranslation } from 'react-i18next'; +import { isEmpty, isNil } from 'lodash-es'; + +import { transformRegexSpecChar } from 'shared/helpers'; +import { CONSTANTS } from 'shared/constants'; + +import { Input, Message, Select, Spin, Tooltip } from '@arco-design/web-react'; +import GridRow from 'components/_base/GridRow'; + +import NoResult from 'components/NoResult'; +import PictureList from './PictureList'; + +import { ImageDetail, PreviewData } from 'typings/dataset'; +import { MixinEllipsis } from 'styles/mixins'; + +const { Option } = Select; + +const Container = styled.div` + display: grid; + grid-template-columns: 180px 0.6fr 0.4fr; + border: 1px solid var(--lineColor); + width: 100%; +`; + +const ColumnContainer = styled.div` + display: grid; + grid-template-rows: 36px 1fr; + height: 100%; +`; + +const Header = styled.div` + display: flex; + justify-content: space-between; + align-items: center; + padding: 8px 20px; + border-bottom: 1px solid var(--lineColor); + border-right: 1px solid var(--lineColor); + color: #000000; + font-size: 12px; +`; + +const Body = styled.div` + height: 557px; + overflow-y: auto; + border-right: 1px solid var(--lineColor); +`; + +const StyledGridRow = styled(GridRow)` + padding: 6px 20px; + cursor: pointer; + + .itemNum { + background-color: #f6f7fb; + border-radius: 32px; + padding: 0 6px; + } + + &:hover, + &[data-active='true'] { + background-color: #f6f7fb; + + .itemValue { + color: var(--primaryColor); + } + + .itemNum { + background-color: #ffffff; + } + } +`; + +const StyledFileName = styled.div` + ${MixinEllipsis()}; + width: 90%; + color: var(--textColorStrong); +`; + +const StyledSize = styled.span` + background-color: #f6f7fb; + border-radius: 8px; + padding: 0 4px; + color: var(--textColorStrongSecondary); +`; + +type Props = { + data?: PreviewData; + loading?: boolean; + isError?: boolean; + noResultText?: string; +}; +const PictureDataPreviewTable: FC<Props> = ({ data, loading, isError, noResultText }) => { + const { t } = useTranslation(); + /** active label */ + const [active, setActive] = useState(''); + const [labelOrder, setLabelOrder] = useState('ascending'); + const [filterText, setFilterText] = useState(''); + const [pictureOnFocus, setPictureOnFocus] = useState<ImageDetail>(); + + const formatData = useMemo(() => { + if (!data) { + return undefined; + } + + const map: { [key: string]: ImageDetail[] } = {}; + + data?.images?.forEach((item) => { + const labelName = item?.annotation?.label ?? t('no_label'); + if (map[labelName] === undefined) { + map[labelName] = []; + } + map[labelName].push({ + ...item, + uri: `/api/v2/image?name=${item.path}`, + }); + }); + + return { ...data, formatImages: map }; + }, [data, t]); + + const labelList = useMemo(() => { + const list: Array<{ count: number; value: string }> = []; + const map: { [key: string]: number } = {}; + + data?.images?.forEach((item) => { + const labelName = item?.annotation?.label ?? t('no_label'); + + if (map[labelName] === undefined) { + map[labelName] = 0; + } + map[labelName]++; + }); + + Object.keys(map).forEach((key) => { + const value = map[key]; + list.push({ + value: key, + count: value, + }); + }); + + return list; + }, [data, t]); + + useEffect(() => { + if (labelList.length && !active) { + setActive(labelList[0].value); + } + }, [active, labelList]); + + const filterTagList = useMemo(() => { + const regx = new RegExp(`^.*${transformRegexSpecChar(filterText)}.*$`); + return labelList.filter((item: any) => { + return regx.test(item.value); + }); + }, [filterText, labelList]); + + const tagShowList = useMemo(() => { + if (filterTagList) { + const list = [...filterTagList]; + switch (labelOrder) { + case 'ascending': + return list.sort((a: any, b: any) => { + return a.count - b.count; + }); + + case 'descending': + return list.sort((a: any, b: any) => { + return b.count - a.count; + }); + case 'beginA': + return list.sort((a: any, b: any) => { + return a.value.localeCompare(b.value); + }); + case 'endA': + return list.sort((a: any, b: any) => { + return b.value.localeCompare(a.value); + }); + default: + Message.error('未知选项'); + return list; + } + } + return []; + }, [filterTagList, labelOrder]); + + const imageList = useMemo(() => { + if (active && formatData && formatData.formatImages) { + const list = [...formatData.formatImages[active]]; + + return list; + } + return []; + }, [active, formatData]); + + useEffect(() => { + if (imageList && !pictureOnFocus) { + setPictureOnFocus(imageList[0]); + } + }, [imageList, pictureOnFocus]); + + if (isError) { + return <NoResult text={t('dataset.tip_state_error')} />; + } + + if (loading) { + return ( + <GridRow style={{ height: '100%' }} justify="center"> + <Spin loading={true} /> + </GridRow> + ); + } + + if (isNil(data)) { + return <NoResult text={t('dataset.tip_state_error')} />; + } + + if (noResultText) { + return <NoResult text={noResultText} />; + } + + if (!labelList || isEmpty(labelList)) { + return <NoResult text={t('dataset.tip_state_error')} />; + } + return ( + <Container> + <ColumnContainer> + <Header style={{ paddingLeft: '12px' }}> + <Select + defaultValue="ascending" + bordered={false} + size="small" + style={{ color: '#000000', fontSize: 12 }} + onChange={(value) => { + setLabelOrder(value); + }} + value={labelOrder} + dropdownMenuStyle={{ width: 'auto' }} + > + <Option value="ascending">数据量升序</Option> + <Option value="descending">数据量降序</Option> + <Option value="beginA">按字母 A-Z</Option> + <Option value="endA">按字母 Z-A</Option> + </Select> + </Header> + <Body> + <div style={{ padding: '6px 13px' }}> + <Input.Search + placeholder="搜索..." + allowClear + size="small" + onChange={(value) => { + setFilterText(value); + }} + /> + </div> + {tagShowList.map((item, index) => { + if (!active && !index) { + setActive(item.value); + } + return ( + <StyledGridRow + justify="space-between" + data-active={active === item.value} + onClick={() => { + setActive(item.value); + setPictureOnFocus(undefined); + }} + key={`tag-${index}`} + > + <span className="itemValue">{item.value}</span> + <span className="itemNum">{item.count}</span> + </StyledGridRow> + ); + })} + </Body> + </ColumnContainer> + <ColumnContainer> + <Header> + <span style={{ color: 'var(--textColorStrongSecondary)' }}> + 以下展示该标签下的20张样例 + </span> + </Header> + <Body> + <PictureList + data={imageList} + onClick={(item) => { + setPictureOnFocus(item); + }} + /> + </Body> + </ColumnContainer> + <ColumnContainer> + <Header style={{ borderRight: 0 }}> + <GridRow gap={10}> + <Tooltip content={pictureOnFocus?.name ?? ''}> + <StyledFileName>{pictureOnFocus?.name ?? CONSTANTS.EMPTY_PLACEHOLDER}</StyledFileName> + </Tooltip> + <StyledSize>{`${pictureOnFocus?.width || 0} × ${ + pictureOnFocus?.height || 0 + } pixels`}</StyledSize> + </GridRow> + <span + style={{ color: 'var(--textColorStrongSecondary)' }} + >{`${pictureOnFocus?.file_name?.split('.').pop()}`}</span> + </Header> + <Body style={{ borderRight: 0 }}> + <div style={{ height: '100%', minHeight: '100%', margin: '0 10%' }}> + <GridRow justify="center" align="center" style={{ height: '100%' }}> + <img + alt={pictureOnFocus?.annotation?.caption || '照片显示错误'} + src={pictureOnFocus?.uri} + title={pictureOnFocus?.annotation?.caption} + /> + </GridRow> + </div> + </Body> + </ColumnContainer> + </Container> + ); +}; + +export default PictureDataPreviewTable; diff --git a/web_console_v2/client/src/components/DataPreview/StructDataTable/FeatureInfoDrawer.module.less b/web_console_v2/client/src/components/DataPreview/StructDataTable/FeatureInfoDrawer.module.less new file mode 100644 index 000000000..13292ed32 --- /dev/null +++ b/web_console_v2/client/src/components/DataPreview/StructDataTable/FeatureInfoDrawer.module.less @@ -0,0 +1,33 @@ +.drawer_container{ + :global{ + .arco-drawer-content{ + padding-top: 0; + padding-bottom: 200px; + } + } +} +.drawer_header{ + position: sticky; + z-index: 2; + top: 0; + margin: 0 -24px 0; + padding: 10px 16px 10px 24px; + background-color: white; + border-bottom: 1px solid var(--lineColor); +} +.feature_key{ + position: relative; + margin-bottom: 0; + margin-right: 10px; +} +.info_table{ + margin-top: 30px; + border: 1px solid var(--lineColor); + border-radius: 4px; +} +.chart_container{ + padding: 20px 16px; + margin-top: 20px; + border: 1px solid var(--lineColor); + border-radius: 4px; +} diff --git a/web_console_v2/client/src/components/DataPreview/StructDataTable/FeatureInfoDrawer.tsx b/web_console_v2/client/src/components/DataPreview/StructDataTable/FeatureInfoDrawer.tsx new file mode 100644 index 000000000..ed41b7b82 --- /dev/null +++ b/web_console_v2/client/src/components/DataPreview/StructDataTable/FeatureInfoDrawer.tsx @@ -0,0 +1,190 @@ +/* istanbul ignore file */ + +import React, { FC, useMemo } from 'react'; +import { Drawer, DrawerProps, Grid, Button, Table } from '@arco-design/web-react'; +import GridRow from 'components/_base/GridRow'; +import { CaretDown, CaretUp, Close } from 'components/IconPark'; +import { Bar } from 'react-chartjs-2'; +import { floor } from 'lodash-es'; +import { CONSTANTS } from 'shared/constants'; + +import styles from './FeatureInfoDrawer.module.less'; + +const { Row } = Grid; + +type HistDataset = { + data: number[]; + label?: string; + backgroundColor?: string; +}; + +export const formatChartData = (labels: number[], datasets: HistDataset[]) => ({ + labels: labels + .map((v) => floor(v, 1)) + .reduce((acc, curr, index, arr) => { + if (arr[index + 1]) { + acc.push(`[${curr}, ${arr[index + 1]}]`); + } + return acc; + }, [] as string[]), + + datasets: datasets.map((data) => { + return Object.assign({ label: '数据集', backgroundColor: '#468DFF' }, data); + }), +}); + +interface Props extends DrawerProps { + data?: any[]; + histData?: ReturnType<typeof formatChartData>; + compareWithBase?: boolean; + loading?: boolean; + featureKey?: string; + toggleVisible: (val: boolean) => void; + onClose?: () => void; +} + +const barChartOptions: Chart.ChartOptions = { + scales: { + xAxes: [ + { + ticks: { + beginAtZero: false, + fontSize: 8, + }, + }, + ], + }, +}; + +export const METRIC_KEY_TRANSLATE_MAP: { [key: string]: string } = { + count: '样本数', + mean: '平均值', + stddev: '标准差', + min: '最小值', + max: '最大值', + missing_count: '缺失数', + missing_rate: '缺失率', +}; + +export const FEATURE_DRAWER_ID = 'feature_drawer'; + +const FeatureInfoDrawer: FC<Props> = ({ + featureKey, + data, + histData, + loading, + onClose, + toggleVisible, + compareWithBase, + ...props +}) => { + const columns = useMemo(() => { + return !compareWithBase + ? [ + { + title: '参数', + dataIndex: 'key', + width: '100px', + }, + { + title: '求交数据集', + dataIndex: 'value', + }, + ] + : [ + { + title: '参数', + dataIndex: 'key', + width: '100px', + }, + { + title: '原始数据集', + dataIndex: 'baseValue', + }, + + { + title: '求交数据集', + dataIndex: 'value', + }, + { + title: '对比', + dataIndex: 'diff', + render: (val: any, record: { baseValue: number; diff: number; isPercent: boolean }) => { + let isShowUpIcon = false; + + // If isPercent = true, diff is string, like '99%','-88%' + if (record.isPercent) { + const strDiff = String(record.diff); + isShowUpIcon = + strDiff.length > 0 ? strDiff[0] !== CONSTANTS.EMPTY_PLACEHOLDER : false; + } else { + isShowUpIcon = val >= 0; + } + + return ( + <GridRow gap={5}> + {isShowUpIcon ? ( + <CaretUp style={{ color: 'var(--successColor)' }} /> + ) : ( + <CaretDown style={{ color: 'var(--errorColor)' }} /> + )} + {record.isPercent + ? record.diff + : floor((record.diff / (record.baseValue || 1)) * 100, 2) + '%'} + </GridRow> + ); + }, + }, + ]; + }, [compareWithBase]); + return ( + <Drawer + maskStyle={{ backdropFilter: 'blur(3px)' }} + width="520px" + onCancel={closeDrawer} + headerStyle={{ display: 'none' }} + footer={null} + focusLock={true} + maskClosable={true} + {...props} + > + <div id={FEATURE_DRAWER_ID} style={{ height: '100%' }}> + <Row + id={FEATURE_DRAWER_ID} + className={styles.drawer_header} + align="center" + justify="space-between" + > + <Row align="center"> + <h3 className={styles.feature_key}>{featureKey}</h3> + </Row> + <GridRow gap="10"> + <Button size="small" icon={<Close />} onClick={closeDrawer} /> + </GridRow> + </Row> + + <Table + className={`${styles.info_table} custom-table`} + rowKey={'key'} + size="small" + columns={columns} + pagination={false} + data={data ?? []} + loading={loading} + /> + {histData && ( + <div className={styles.chart_container}> + <Bar data={histData} options={barChartOptions} /> + </div> + )} + </div> + </Drawer> + ); + + function closeDrawer() { + toggleVisible && toggleVisible(false); + onClose && onClose(); + } +}; + +export default FeatureInfoDrawer; diff --git a/web_console_v2/client/src/components/DataPreview/StructDataTable/hooks.tsx b/web_console_v2/client/src/components/DataPreview/StructDataTable/hooks.tsx new file mode 100644 index 000000000..bbf409e2f --- /dev/null +++ b/web_console_v2/client/src/components/DataPreview/StructDataTable/hooks.tsx @@ -0,0 +1,31 @@ +/* istanbul ignore file */ + +import { useEffect } from 'react'; +import { STRUCT_DATA_TABLE_ID } from '.'; +import { FEATURE_DRAWER_ID } from './FeatureInfoDrawer'; + +export function useFeatureDrawerClickOutside(params: { + setActiveFeatKey: (key?: string) => void; + toggleDrawerVisible: (val: boolean) => void; + allowlistElementIds?: string[]; +}) { + useEffect(() => { + document.addEventListener('click', handler); + + function handler(evt: MouseEvent) { + const target = evt.target as HTMLElement; + if ( + !document.getElementById(STRUCT_DATA_TABLE_ID)?.contains(target) && + !document.getElementById(FEATURE_DRAWER_ID)?.contains(target) + ) { + params.setActiveFeatKey(undefined); + params.toggleDrawerVisible(false); + } + } + + return () => { + document.removeEventListener('click', handler); + }; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [params.setActiveFeatKey, params.toggleDrawerVisible]); +} diff --git a/web_console_v2/client/src/components/DataPreview/StructDataTable/index.tsx b/web_console_v2/client/src/components/DataPreview/StructDataTable/index.tsx new file mode 100644 index 000000000..631889669 --- /dev/null +++ b/web_console_v2/client/src/components/DataPreview/StructDataTable/index.tsx @@ -0,0 +1,419 @@ +/* istanbul ignore file */ + +import React, { FC, memo, useRef, useMemo } from 'react'; +import styled from 'styled-components'; +import { useTranslation } from 'react-i18next'; +import { isEmpty, isNil, floor } from 'lodash-es'; + +import { Table, Spin, Checkbox } from '@arco-design/web-react'; +import GridRow from 'components/_base/GridRow'; +import NoResult from 'components/NoResult'; + +import { PreviewData, ValueType } from 'typings/dataset'; +import { TABLE_COL_WIDTH } from 'shared/constants'; + +const Container = styled.div` + ${(props: any) => + props.activeIndex === 1 + ? '' + : `.arco-table-th:nth-child(${props.activeIndex}), .arco-table-td:nth-child(${props.activeIndex}) { + --activeBackground: rgba(22, 100, 255, 0.06); + --summaryBackground: transparent; + }`} +`; + +const PreviewTable = styled(Table)` + border: 1px solid var(--lineColor); + border-radius: 4px; + width: auto; + + .arco-table-th-item, + .arco-table-td { + padding: 0; + } + + .arco-table-col-fixed-left:first-of-type { + text-align: center; + background: var(--color-fill-2); + } + + .arco-table-cell { + word-break: break-word; + } +`; +const SummaryCol = styled.div` + padding: 8px; + font-size: 12px; + line-height: 22px; + white-space: nowrap; + background-color: var(--summaryBackground, #ffffff) !important; +`; +const SummaryLabelCol = styled(SummaryCol)` + margin-left: -40px; + padding-left: 30px; +`; +const SummaryColCell = styled.div``; +const DataTypeCell = styled(SummaryColCell)` + color: var(--textColorSecondary); +`; +const ClickableHeader = styled.label` + display: flex; + padding: 8px; + font-size: 12px; + text-decoration: underline; + white-space: nowrap; + cursor: pointer; + border-right: 1px solid var(--lineColor); + border-bottom: 2px solid var(--lineColor); + background-color: #ffffff; + + &[data-is-avtive='true'] { + background-color: var(--activeBackground) !important; + box-shadow: 0 2.5px 0 0 var(--primaryColor) inset; + } + + &:hover { + color: var(--primaryColor); + } +`; + +const TableCell = styled.label` + display: flex; + padding: 12px 8px; + font-size: 12px; + &[data-is-avtive='true'] { + background-color: var(--activeBackground); + } +`; + +const SummaryTd = styled.td` + background-color: #fafafa; + text-align: center; +`; +const CheckboxConatiner = styled.div` + margin-left: 5px; + transform: scale(0.875); +`; + +export const STRUCT_DATA_TABLE_ID = 'struct_data_table'; + +/** + * Format origin value to display value + */ +export function formatOriginValue(originValue: string | number, type?: ValueType) { + if (type === 'string') { + return originValue; + } + /** + * origin value / display value + * missing value => "null" + * string "null" => "null" + * string "nan" => "NaN" + */ + let displayValue: string | number; + if (originValue == null || originValue === 'null') { + displayValue = 'null'; + } else if (originValue === 'nan') { + displayValue = 'NaN'; + } else { + displayValue = parseFloat(Number(originValue).toFixed(3)); + } + return displayValue; +} + +const FeatureMetric: FC<{ + type?: string; + missingCount?: string | number; + baseMissingCount?: string | number; + count?: string | number; + baseCount?: string | number; +}> = memo(({ type, missingCount, baseMissingCount, count, baseCount }) => { + let tempMissingCount = 0; + let tempAllCount = 0; + let missingRate = 'N/A'; + + let tempBaseMissingCount = 0; + let tempBaseAllCount = 0; + let baseMissingRate = 'N/A'; + + if (missingCount !== 'N/A') { + tempMissingCount = Number(missingCount) || 0; + tempAllCount = tempMissingCount + (Number(count) || 0); + missingRate = floor((tempMissingCount / tempAllCount) * 100, 2) + '%'; + } + if (baseMissingCount !== 'N/A') { + tempBaseMissingCount = Number(baseMissingCount) || 0; + tempBaseAllCount = tempBaseMissingCount + (Number(baseCount) || 0); + baseMissingRate = floor((tempBaseMissingCount / tempBaseAllCount) * 100, 2) + '%'; + } + + return ( + <SummaryCol> + <DataTypeCell>{type}</DataTypeCell> + {baseMissingCount && <SummaryColCell>{baseMissingRate}</SummaryColCell>} + <SummaryColCell>{missingRate}</SummaryColCell> + </SummaryCol> + ); +}); + +const CustomRow: React.FC<{ index: number; maxCount: number; featuresKeysCount: number }> = ( + props, +) => { + const { index, maxCount, featuresKeysCount, children, ...restProps } = props; + + // Render summary row + if (index >= maxCount) { + return ( + <tr {...restProps}> + <SummaryTd colSpan={featuresKeysCount + 1}> + <span>以上为取 {maxCount.toLocaleString('en')} 条样本数据</span> + </SummaryTd> + </tr> + ); + } + + return <tr children={children} {...restProps} />; +}; + +type Props = { + data?: PreviewData; + loading?: boolean; + compareWithBase?: boolean; + baseData?: PreviewData; + datasetName?: string; + activeKey?: string; + checkable?: boolean; + checkedKeys?: string[]; + noResultText?: string; + isError?: boolean; + onCheckedChange?: (keys: string[]) => void; + onActiveFeatChange?: (k: string) => void; +}; + +const StructDataPreviewTable: FC<Props> = ({ + datasetName = '', + loading, + data, + baseData, + activeKey, + onActiveFeatChange, + compareWithBase, + checkable, + checkedKeys = [], + onCheckedChange, + noResultText, + isError, +}) => { + const dom = useRef<HTMLDivElement>(); + const { t } = useTranslation(); + + const [featuresKeys, featuresTypes] = useMemo(() => { + const featuresKeys: string[] = []; + const featuresTypes: ValueType[] = []; + + data?.dtypes?.forEach((item) => { + featuresKeys.push(item.key); + featuresTypes.push(item.value); + }); + + return [featuresKeys, featuresTypes]; + }, [data]); + + if (isError) { + return <NoResult text={t('dataset.tip_state_error')} />; + } + + if (loading) { + return ( + <GridRow style={{ height: '100%' }} justify="center"> + <Spin loading={true} /> + </GridRow> + ); + } + if (isNil(data)) { + return <NoResult text={t('dataset.tip_state_error')} />; + } + + const previewData = data; + + const metrics = previewData.metrics ?? {}; + const count = previewData.count ?? 0; + const sampleCount = previewData.sample?.length ?? 0; + + if (noResultText) { + return <NoResult text={noResultText} />; + } + + if (!previewData.sample || isEmpty(previewData.sample)) { + return <NoResult text={t('dataset.tip_state_error')} />; + } + + if (!previewData.metrics || isEmpty(previewData.metrics)) { + return <NoResult text={t('dataset.tip_state_error')} />; + } + + const list = previewData.sample.map((item) => { + return item.reduce((ret, curr, index) => { + ret[featuresKeys[index] as any] = formatOriginValue(curr, featuresTypes[index]); + return ret; + }, {} as { [key: string]: string | number }); + }); + + const isShowSummaryRow = list.length > 0 && list.length < count; + + // Add summary row + if (isShowSummaryRow) { + list.push({ + id: '__summary__', + }); + } + + const activeKeyIndex = featuresKeys.indexOf(activeKey!); + const headerCellStyle = { + padding: 0, + }; + const columns: any[] = [ + { + fixed: 'left', + children: [ + { + title: ( + <SummaryLabelCol className="head-col"> + <DataTypeCell>类型</DataTypeCell> + {compareWithBase && <SummaryColCell>原始数据集缺失率%</SummaryColCell>} + <SummaryColCell>{compareWithBase && datasetName}缺失率%</SummaryColCell> + </SummaryLabelCol> + ), + fixed: 'left', + width: TABLE_COL_WIDTH.THIN, + dataIndex: 'order', + key: 'order', + headerCellStyle: { + ...headerCellStyle, + backgroundColor: '#fff', + }, + render: (_: any, record: any, index: number) => { + return <span>{index + 1}</span>; + }, + }, + ], + headerCellStyle: { + ...headerCellStyle, + backgroundColor: '#fff', + backgroundImage: `url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAwAAAAMCAYAAABWdVznAAAANklEQVR4AZXKBQ3AQBTA0Lke8ybimz2sgjZp8A2m6Y9T4eWLrHBbYBaYBWaBWWAWmAVmgVlgLvftw6nDSDtTAAAAAElFTkSuQmCC')`, + backgroundSize: '8px 8px', + backgroundPosition: 'right bottom', + backgroundRepeat: 'no-repeat', + }, + }, + ...featuresKeys.map((featKey, index) => ({ + key: featKey, + headerCellStyle, + title: ( + <ClickableHeader + data-is-avtive={activeKey === featKey} + onClick={() => onFeatColHeaderClick(featKey)} + > + {featKey} + {checkable && ( + <CheckboxConatiner> + <Checkbox + checked={checkedKeys.includes(featKey)} + onChange={(checked) => onFeatChecked(featKey, checked)} + /> + </CheckboxConatiner> + )} + </ClickableHeader> + ), + children: [ + { + width: Math.max(featKey.length, 7) * 10 + (checkable ? 20 : 0), + headerCellStyle, + title: ( + <FeatureMetric + type={featuresTypes[index]} + missingCount={metrics[featKey]?.missing_count ?? 0} + baseMissingCount={ + compareWithBase + ? (baseData?.metrics && baseData.metrics[featKey]?.missing_count) ?? 'N/A' + : undefined + } + baseCount={ + compareWithBase + ? (baseData?.metrics && baseData.metrics[featKey]?.count) ?? 'N/A' + : undefined + } + count={metrics[featKey]?.count ?? 0} + /> + ), + key: featKey, + dataIndex: featKey, + render: (value: any, record: any, index: number) => { + return renderColumn(value, index, activeKey === featKey); + }, + }, + ], + })), + ]; + + return ( + <Container + id={STRUCT_DATA_TABLE_ID} + ref={(dom as unknown) as any} + {...{ activeIndex: activeKeyIndex + 2 }} + > + <PreviewTable + data={list} + columns={columns} + size="small" + pagination={false} + scroll={{ + x: 'max-content', + y: window.innerHeight - 490, + }} + components={{ + body: { + row: CustomRow, + }, + }} + onRow={(record, index) => { + return { + index, + // Set 999999, in order to don't show summary row + maxCount: isShowSummaryRow ? sampleCount : 999999, + featuresKeysCount: featuresKeys.length, + } as any; + }} + /> + </Container> + ); + + function renderColumn(value: string, index: number, isActive: boolean) { + const obj = { + children: <TableCell data-is-avtive={isActive}>{value}</TableCell>, + props: {} as any, + }; + // Last column of each row consist the summary row on the bottom + if (isShowSummaryRow && index === sampleCount) { + obj.props.colSpan = 0; + } + return obj; + } + + function onFeatColHeaderClick(featKey: string) { + onActiveFeatChange?.(featKey); + } + function onFeatChecked(key: string, checked: boolean) { + if (!checkable) return; + + const nextCheckedKeys = [...checkedKeys]; + if (checked) { + nextCheckedKeys.push(key); + } else { + nextCheckedKeys.splice(nextCheckedKeys.indexOf(key), 1); + } + onCheckedChange?.(nextCheckedKeys); + } +}; + +export default StructDataPreviewTable; diff --git a/web_console_v2/client/src/components/DataSourceSelect/index.module.less b/web_console_v2/client/src/components/DataSourceSelect/index.module.less new file mode 100644 index 000000000..f65c9a84b --- /dev/null +++ b/web_console_v2/client/src/components/DataSourceSelect/index.module.less @@ -0,0 +1,9 @@ +.data_source_select{ + width: 100%; +} +.data_source_select_option_text{ + height: 20px; + line-height: 20px; + color: #86909C; + font-size: 12px; +} diff --git a/web_console_v2/client/src/components/DataSourceSelect/index.tsx b/web_console_v2/client/src/components/DataSourceSelect/index.tsx new file mode 100644 index 000000000..290d2ba6f --- /dev/null +++ b/web_console_v2/client/src/components/DataSourceSelect/index.tsx @@ -0,0 +1,121 @@ +/* istanbul ignore file */ +import React, { FC, useMemo, useState } from 'react'; +import { useQuery } from 'react-query'; +import { useTranslation } from 'react-i18next'; +import { fetchDataSourceList } from 'services/dataset'; +import { Select, Grid, Tag, Space } from '@arco-design/web-react'; +import { useGetCurrentProjectId } from 'hooks'; + +import { SelectProps } from '@arco-design/web-react/es/Select'; +import { OptionInfo } from '@arco-design/web-react/es/Select/interface'; +import { DataSource, DatasetType } from 'typings/dataset'; +import TitleWithIcon from 'components/TitleWithIcon'; +import { IconInfoCircle } from '@arco-design/web-react/icon'; +import styled from './index.module.less'; + +const Row = Grid.Row; +const Col = Grid.Col; + +type Props = { + /** extra API query params */ + queryParams?: object; + valueKey?: 'id' | 'uuid'; +} & SelectProps; + +function renderOption(data: DataSource) { + const isStream = data.dataset_type === DatasetType.STREAMING; + let dataDescText = ''; + switch (data.dataset_format) { + case 'TABULAR': + dataDescText = `结构化数据${data.store_format ? '/' + data.store_format : ''}`; + break; + case 'NONE_STRUCTURED': + dataDescText = '非结构化数据'; + break; + case 'IMAGE': + dataDescText = '图片'; + break; + default: + dataDescText = '未知'; + break; + } + return ( + <div> + <Row> + <Col span={18}> + <span>{data.name}</span> + </Col> + <Col span={6}>{isStream ? <Tag color="blue">增量</Tag> : <></>}</Col> + </Row> + <div className={styled.data_source_select_option_text}>{dataDescText}</div> + </div> + ); +} + +export const DataSourceSelect: FC<Props> = ({ + value, + valueKey = 'id', + onChange, + queryParams, + ...props +}) => { + const { t } = useTranslation(); + const projectId = useGetCurrentProjectId(); + const [isShowTip, setShowTip] = useState(false); + const query = useQuery( + ['fetchDataSourceList', projectId], + () => fetchDataSourceList({ projectId: projectId, ...queryParams }), + { + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const optionList = useMemo(() => { + if (!query.data) { + return []; + } + + return query.data.data.map((item) => ({ + label: renderOption(item), + value: item[valueKey], + extra: item, + })); + }, [query.data, valueKey]); + + const isControlled = typeof value !== 'undefined'; + const valueProps = isControlled ? { value } : {}; + + return ( + <Space direction="vertical" className={styled.data_source_select}> + <Select + placeholder={t('placeholder_select')} + onChange={onSelectChange} + loading={query.isFetching} + showSearch + allowClear + filterOption={(inputValue, option) => { + return option.props.extra.name.toLowerCase().indexOf(inputValue.toLowerCase()) >= 0; + }} + options={optionList} + {...valueProps} + {...props} + /> + {isShowTip && ( + <TitleWithIcon + title="增量数据将检查目录结构,并批量导入。导入后该数据集只能用于求交任务" + isLeftIcon={true} + isShowIcon={true} + icon={IconInfoCircle} + /> + )} + </Space> + ); + + function onSelectChange(id: string, options: OptionInfo | OptionInfo[]) { + setShowTip(options && (options as any).extra.dataset_type === DatasetType.STREAMING); + onChange?.(id, options); + } +}; + +export default DataSourceSelect; diff --git a/web_console_v2/client/src/components/DatasetExportModal/index.module.less b/web_console_v2/client/src/components/DatasetExportModal/index.module.less new file mode 100644 index 000000000..3dd37d7f2 --- /dev/null +++ b/web_console_v2/client/src/components/DatasetExportModal/index.module.less @@ -0,0 +1,4 @@ +.footer_grid_row{ + padding-top: 15px; + border-top: 1px solid var(--backgroundColorGray); +} diff --git a/web_console_v2/client/src/components/DatasetExportModal/index.tsx b/web_console_v2/client/src/components/DatasetExportModal/index.tsx new file mode 100644 index 000000000..2c3474f1d --- /dev/null +++ b/web_console_v2/client/src/components/DatasetExportModal/index.tsx @@ -0,0 +1,84 @@ +import React, { FC, useState } from 'react'; +import { to } from 'shared/helpers'; +import { exportDataset } from 'services/dataset'; +import { Modal, Form, Input, Button, Message } from '@arco-design/web-react'; +import GridRow from 'components/_base/GridRow'; +import ButtonWithPopconfirm from 'components/ButtonWithPopconfirm'; +import { ExportDataset } from 'typings/dataset'; +import { removeFalsy } from 'shared/object'; +import styled from './index.module.less'; +export interface Props { + visible: boolean; + id?: ID; + batchId?: ID; + onSuccess?: (datasetId: ID, datasetJobId: ID) => void; + onFail?: () => void; + onCancel?: () => void; +} + +interface FormData { + export_path: string; +} + +const ExportModal: FC<Props> = ({ id, batchId, visible, onSuccess, onFail, onCancel }) => { + const [isLoading, setIsLoading] = useState(false); + + const [formInstance] = Form.useForm<any>(); + + return ( + <Modal + title="导出数据集" + visible={visible} + maskClosable={false} + afterClose={afterClose} + onCancel={onCancel} + footer={null} + > + <Form layout="vertical" form={formInstance} onSubmit={onSubmit}> + <Form.Item field="export_path" label="导出路径" rules={[{ required: true }]}> + <Input /> + </Form.Item> + + <Form.Item wrapperCol={{ span: 23 }} style={{ marginBottom: 0 }}> + <GridRow className={styled.footer_grid_row} justify="end" gap="12"> + <ButtonWithPopconfirm buttonText="取消" onConfirm={onCancel} /> + <Button type="primary" htmlType="submit" loading={isLoading}> + 确认 + </Button> + </GridRow> + </Form.Item> + </Form> + </Modal> + ); + + async function onSubmit(values: FormData) { + setIsLoading(true); + const [data, err] = await to( + exportDataset( + id!, + removeFalsy({ + batch_id: batchId, + export_path: values.export_path, + }), + ), + ); + if (err) { + setIsLoading(false); + onFail?.(); + Message.error(err.message || '导出失败'); + return; + } + + Message.success('导出成功'); + setIsLoading(false); + const { dataset_job_id, export_dataset_id } = data?.data || ({} as ExportDataset); + onSuccess?.(export_dataset_id, dataset_job_id); + } + + function afterClose() { + // Clear all fields + formInstance.resetFields(); + } +}; + +export default ExportModal; diff --git a/web_console_v2/client/src/components/DatasetJobsType/index.tsx b/web_console_v2/client/src/components/DatasetJobsType/index.tsx new file mode 100644 index 000000000..9de38cbff --- /dev/null +++ b/web_console_v2/client/src/components/DatasetJobsType/index.tsx @@ -0,0 +1,65 @@ +import React, { FC, useMemo } from 'react'; +import styled from 'styled-components'; +import { Tag, TagProps } from '@arco-design/web-react'; +import { useTranslation } from 'react-i18next'; +import { DataJobBackEndType } from 'typings/dataset'; + +type Props = { + style?: React.CSSProperties; + type: DataJobBackEndType; + tagProps?: Partial<TagProps>; +}; + +const Container = styled.div` + display: inline-block; +`; +const StyledModalTag = styled(Tag)` + margin-right: 4px; + font-size: 12px; + vertical-align: top; +`; + +const DatasetJobsType: FC<Props> = ({ style = {}, type, tagProps = {} }) => { + const { t } = useTranslation(); + const [taskType, tagColor]: string[] = useMemo(() => { + if (!type) { + return ['jobType error: type empty']; + } + switch (type) { + case DataJobBackEndType.DATA_JOIN: + case DataJobBackEndType.RSA_PSI_DATA_JOIN: + case DataJobBackEndType.OT_PSI_DATA_JOIN: + case DataJobBackEndType.LIGHT_CLIENT_RSA_PSI_DATA_JOIN: + case DataJobBackEndType.LIGHT_CLIENT_OT_PSI_DATA_JOIN: + case DataJobBackEndType.HASH_DATA_JOIN: + return [t('dataset.label_data_job_type_create'), 'arcoblue']; + case DataJobBackEndType.DATA_ALIGNMENT: + return [t('dataset.label_data_job_type_alignment'), 'arcoblue']; + case DataJobBackEndType.IMPORT_SOURCE: + return [t('dataset.label_data_job_type_import')]; + case DataJobBackEndType.EXPORT: + return [t('dataset.label_data_job_type_export')]; + case DataJobBackEndType.ANALYZER: + return ['数据探查']; + default: + return ['jobType error: unknown type']; + } + }, [t, type]); + + const mergedTagStyle = useMemo<React.CSSProperties>(() => { + return { + fontWeight: 'normal', + ...(tagProps.style ?? {}), + }; + }, [tagProps.style]); + + return ( + <Container style={style}> + <StyledModalTag {...{ ...tagProps, color: tagColor }} style={mergedTagStyle}> + {taskType} + </StyledModalTag> + </Container> + ); +}; + +export default DatasetJobsType; diff --git a/web_console_v2/client/src/components/DatasetPublishAndRevokeModal/index.tsx b/web_console_v2/client/src/components/DatasetPublishAndRevokeModal/index.tsx new file mode 100644 index 000000000..264713b03 --- /dev/null +++ b/web_console_v2/client/src/components/DatasetPublishAndRevokeModal/index.tsx @@ -0,0 +1,211 @@ +import React, { FC, useMemo } from 'react'; +import styled from 'styled-components'; +import { useTranslation } from 'react-i18next'; + +import { postPublishDataset, unpublishDataset } from 'services/dataset'; + +import { Modal, Form, Message, InputNumber, Space } from '@arco-design/web-react'; +import datasetPublishBg from 'assets/images/dataset-publish-bg.png'; +import { Dataset } from 'typings/dataset'; +import creditsIcon from 'assets/icons/credits-icon.svg'; +import i18n from 'i18n'; +import { IconInfoCircle } from '@arco-design/web-react/icon'; +import { CREDITS_LIMITS } from 'views/Datasets/shared'; +import { useGetAppFlagValue } from 'hooks'; +import { FlagKey } from 'typings/flag'; + +const ContainerModal = styled(Modal)<{ $isPublish: boolean }>` + ${(prop) => + prop.$isPublish + ? ` + .arco-modal-content{ + padding: 0; + } + ` + : ''} + .arco-modal-header { + border-bottom: 0; + } + .arco-modal-footer { + border-top: 0; + text-align: center; + } +`; + +const StyledPublishContent = styled.div` + height: 88px; + display: flex; + flex-direction: column; + justify-content: space-around; + align-items: center; + background-image: url(${datasetPublishBg}); + background-size: cover; +`; + +const StyledForm = styled(Form)` + width: 140px; + margin: 16px auto 0; +` as typeof Form; + +const StyledInputNumber = styled(InputNumber)` + width: 140px; +`; + +const StyledCreditIcon = styled.img` + display: inline-block; +`; + +const StyledSpace = styled(Space)` + width: 100%; + height: 32px; + background: #e8f4ff; + justify-content: center; +`; + +const StyleTitleSpace = styled(Space)` + width: 100%; + justify-content: center; +`; + +enum OPERATION_TYPE { + PUBLISH = 'publish', + REVOKE = 'revoke', +} + +export interface Props { + visible: boolean; + dataset?: Dataset; + onSuccess?: () => void; + onFail?: () => void; + onCancel?: () => void; +} + +interface FormData { + value: number; +} + +const DatasetPublishAndRevokeModal: FC<Props> = ({ + dataset, + visible, + onSuccess, + onFail, + onCancel, +}) => { + const bcs_support_enabled = useGetAppFlagValue(FlagKey.BCS_SUPPORT_ENABLED); + const { t } = useTranslation(); + const formData: FormData = { + value: !dataset?.value ? 100 : dataset?.value, + }; + const [formInstance] = Form.useForm<FormData>(); + const willUnpublished = useMemo(() => { + if (!dataset?.is_published) { + return false; + } + return dataset.is_published === true; + }, [dataset]); + + return ( + <ContainerModal + $isPublish={!willUnpublished} + closable={false} + visible={visible} + onConfirm={handleOnConfirm} + onCancel={handleOnCancel} + maskClosable={false} + okText={t(willUnpublished ? OPERATION_TYPE.REVOKE : OPERATION_TYPE.PUBLISH)} + cancelText={t('cancel')} + okButtonProps={{ + status: willUnpublished ? 'danger' : 'default', + }} + title={willUnpublished ? renderTitle() : null} + > + {willUnpublished ? renderRevoke() : renderPublish()} + </ContainerModal> + ); + + async function handleOnConfirm() { + if (!dataset) { + return; + } + try { + if (willUnpublished) { + await unpublishDataset(dataset.id); + } else { + const value = formInstance.getFieldValue('value'); + await postPublishDataset(dataset.id, { + value, + }); + } + Message.success(t(willUnpublished ? 'message_revoke_success' : 'message_publish_success')); + formInstance.resetFields('value'); + onSuccess?.(); + } catch (e) { + Message.error(t(willUnpublished ? 'message_revoke_failed' : 'message_publish_failed')); + formInstance.resetFields('value'); + onFail?.(); + } + } + + function handleOnCancel() { + formInstance.resetFields('value'); + onCancel?.(); + } + + function renderPublish() { + return ( + <> + <StyledPublishContent> + <span> + {t(`dataset.msg_publish_confirm`, { + name: dataset?.name, + })} + </span> + <span>{t('dataset.tips_publish')}</span> + </StyledPublishContent> + {/*temporary solution for no metadata*/} + {!!bcs_support_enabled && renderFirstPublishTips(dataset?.value || 0)} + {!!bcs_support_enabled && ( + <StyledForm initialValues={formData} layout="vertical" form={formInstance}> + <Form.Item field="value" label={t('dataset.label_use_price')}> + <StyledInputNumber + min={CREDITS_LIMITS.MIN} + max={CREDITS_LIMITS.MAX} + suffix={t('dataset.label_publish_credits')} + step={1} + /> + </Form.Item> + </StyledForm> + )} + </> + ); + } + + function renderRevoke() { + return <StyleTitleSpace>{t(`dataset.msg_unpublish_tip`)}</StyleTitleSpace>; + } + + function renderTitle() { + return ( + <StyleTitleSpace> + <IconInfoCircle style={{ color: '#FA9600' }} /> + {t(`dataset.msg_unpublish_confirm`, { + name: dataset?.name, + })} + </StyleTitleSpace> + ); + } + + function renderFirstPublishTips(price: number = 0) { + if (price > 0) { + return null; + } + return ( + <StyledSpace align="center"> + <StyledCreditIcon src={creditsIcon} /> + {i18n.t('dataset.tips_first_publish')} + </StyledSpace> + ); + } +}; + +export default DatasetPublishAndRevokeModal; diff --git a/web_console_v2/client/src/components/DatasetSelect/index.tsx b/web_console_v2/client/src/components/DatasetSelect/index.tsx index 4417148d2..02a005c0d 100644 --- a/web_console_v2/client/src/components/DatasetSelect/index.tsx +++ b/web_console_v2/client/src/components/DatasetSelect/index.tsx @@ -1,56 +1,375 @@ -import React, { FC } from 'react'; +/* istanbul ignore file */ + +import React, { FC, useEffect, useRef, useState } from 'react'; import styled from 'styled-components'; -import { Select } from 'antd'; +import { Select, Grid, Tag, Statistic } from '@arco-design/web-react'; import { useQuery } from 'react-query'; import { DATASET_LIST_QUERY_KEY } from 'views/Datasets/DatasetList'; -import { fetchDatasetList } from 'services/dataset'; +import { fetchDatasetList, fetchParticipantDatasetList } from 'services/dataset'; import { useTranslation } from 'react-i18next'; import { Link } from 'react-router-dom'; +import { + Dataset, + DatasetDataType, + DatasetDataTypeText, + DatasetKindBackEndType, + DatasetStateFront, + ParticipantDataset, + DatasetType__archived, +} from 'typings/dataset'; +import { useRecoilValue } from 'recoil'; +import { projectState } from 'stores/project'; +import { SelectProps } from '@arco-design/web-react/es/Select'; +import { PageMeta } from 'typings/app'; +import { debounce } from 'lodash-es'; +import { FILTER_OPERATOR_MAPPER, filterExpressionGenerator } from 'views/Datasets/shared'; +import { humanFileSize } from 'shared/file'; +import { FilterOp } from 'typings/filter'; + +const Row = Grid.Row; +const Col = Grid.Col; const EmptyPlaceholder = styled.div` line-height: 32px; `; -type Props = { - value?: string; - onChange?: (val: string) => void; +const StyledOptionContainer = styled.div``; +interface ILazyLoad { + enable: boolean; + page_size?: number; +} + +interface IFilterOption { + dataset_type?: DatasetType__archived; + cron_interval?: Array<'DAYS' | 'HOURS'>; + dataset_format?: DatasetDataType[]; + dataset_kind?: DatasetKindBackEndType[]; + participant_id?: ID; +} +interface Props extends Omit<SelectProps, 'value'> { + /** Accept Dataset */ + value?: Dataset | ParticipantDataset; + /** Accept Dataset */ + onChange?: (val?: Dataset | ParticipantDataset) => void; + /** Is participant dataset */ + isParticipant?: boolean; + /** extra API query params */ + queryParams?: object; + /** open pagination and fetch list with page params */ + lazyLoad?: ILazyLoad; + /** placeholder change */ + placeholder?: string; + /** */ + filterOptions?: IFilterOption; + /** is create button visible */ + isCreateVisible?: boolean; +} + +const FILTER_OPERATOR_MAPPER_List = { + ...FILTER_OPERATOR_MAPPER, + dataset_kind: FilterOp.IN, }; -const DatasetSelect: FC<Props> = ({ value, onChange, ...props }) => { - const { t } = useTranslation(); - const query = useQuery([DATASET_LIST_QUERY_KEY, ''], () => fetchDatasetList(), { - retry: 2, +export function renderOption(data: ParticipantDataset | Dataset, isParticipant: boolean) { + let formatText = DatasetDataTypeText.STRUCT; + const format: DatasetDataType = isParticipant + ? (data as ParticipantDataset).format + : (data as Dataset).dataset_format; + + if (format === DatasetDataType.STRUCT) { + formatText = DatasetDataTypeText.STRUCT; + } + if (format === DatasetDataType.PICTURE) { + formatText = DatasetDataTypeText.PICTURE; + } + if (format === DatasetDataType.NONE_STRUCTURED) { + formatText = DatasetDataTypeText.NONE_STRUCTURED; + } + + let tagText = ''; + if (data.dataset_kind === DatasetKindBackEndType.RAW) { + tagText = '原始'; + } + if (data.dataset_kind === DatasetKindBackEndType.PROCESSED) { + tagText = '结果'; + } + + return ( + <StyledOptionContainer> + <Row> + <Col span={18}> + <span>{data.name}</span> + </Col> + <Col span={6}>{tagText ? <Tag>{tagText}</Tag> : <></>}</Col> + </Row> + <Row> + <Col span={6}>{formatText}</Col> + <Col span={2}> + <span> | </span> + </Col> + <Col span={6}>{humanFileSize(data.file_size)}</Col> + <Col span={2}> + <span> | </span> + </Col> + {Object.prototype.hasOwnProperty.call(data, 'num_example') && ( + <Col span={6}> + 样本量 + { + <Statistic + groupSeparator={true} + styleValue={{ fontSize: '14px', fontWeight: 400 }} + value={(data as Dataset).num_example!} + /> + } + </Col> + )} + </Row> + </StyledOptionContainer> + ); +} + +const DatasetSelect: FC<Props> = ({ + value, + onChange, + queryParams, + isParticipant = false, + placeholder, + lazyLoad = { + enable: false, + page_size: 10, + }, + filterOptions = {}, + isCreateVisible = true, + ...props +}) => { + const { t } = useTranslation(); + const selectedProject = useRecoilValue(projectState); + const [pageInfo, setPageInfo] = useState({ + page: 1, + totalPages: 0, + keyword: '', }); + const [options, setOptions] = useState([] as Array<ParticipantDataset | Dataset>); + const [selectDataset, setSelectDataset] = useState<ParticipantDataset | Dataset>(); + const refCanTriggerLoadMore = useRef(true); + const query = useQuery<{ + data: Array<Dataset | ParticipantDataset>; + page_meta?: PageMeta; + }>( + [ + DATASET_LIST_QUERY_KEY, + selectedProject.current?.id, + isParticipant, + lazyLoad?.enable ? pageInfo.page : null, + lazyLoad?.enable ? pageInfo.keyword : null, + filterOptions?.dataset_type, + filterOptions?.dataset_kind, + filterOptions?.cron_interval, + queryParams, + ], + () => { + const pageParams = lazyLoad?.enable + ? { + page: pageInfo.page, + page_size: lazyLoad.page_size, + } + : {}; + const filter = filterExpressionGenerator( + { + project_id: selectedProject.current?.id, + name: pageInfo.keyword, + is_published: isParticipant ? undefined : true, + dataset_type: filterOptions?.dataset_type, + dataset_format: filterOptions?.dataset_format, + dataset_kind: filterOptions?.dataset_kind, + }, + FILTER_OPERATOR_MAPPER_List, + ); + if (isParticipant) { + return fetchParticipantDatasetList(selectedProject.current?.id!, { + ...queryParams, + ...pageParams, + cron_interval: filterOptions?.cron_interval, + }); + } + return fetchDatasetList({ + filter, + ...queryParams, + ...pageParams, + state_frontend: [DatasetStateFront.SUCCEEDED], + cron_interval: filterOptions?.cron_interval, + }); + }, + { + enabled: Boolean(selectedProject.current), + retry: 2, + refetchOnWindowFocus: false, + onSuccess: (res) => { + setPageInfo((pre) => { + const { page_meta } = res; + return { + ...pre, + page: page_meta?.current_page || pre.page, + totalPages: page_meta?.total_pages || pre.totalPages, + }; + }); + setOptions((pre) => { + const { data } = res; + let addOption = (data ?? []) as Dataset[]; + // 由于目前合作伙伴接口不支持过滤, 需要前端过滤 + if (isParticipant) { + addOption = addOption.filter((item: any) => { + let isShow = true; + if (filterOptions && filterOptions.dataset_type) { + isShow = Boolean(filterOptions.dataset_type === item.dataset_type); + } + if (filterOptions && isShow && filterOptions.dataset_format) { + isShow = filterOptions.dataset_format?.includes(item.format); + } + if (isShow && filterOptions?.participant_id) { + isShow = filterOptions.participant_id === item.participant_id; + } + if (isShow && filterOptions?.dataset_kind) { + isShow = filterOptions?.dataset_kind.includes(item.dataset_kind); + } + return isShow; + }); + } + return pre.concat(addOption); + }); + }, + }, + ); + + useEffect(() => { + setOptions([]); + setPageInfo((pre) => ({ + ...pre, + page: 1, + })); + }, [filterOptions.dataset_type, filterOptions.cron_interval]); + // Empty only if there is no keyword, and the 1st page is requested, and there is no data + const isEmpty = + options.length === 0 && !query.isFetching && !pageInfo.keyword && pageInfo.page === 1; - const isEmpty = !query.isFetching && query.data?.data.length === 0; + const popupScrollHandler = (element: any) => { + if (!lazyLoad?.enable || pageInfo.page >= pageInfo.totalPages) { + return; + } + const { scrollTop, scrollHeight, clientHeight } = element; + const scrollBottom = scrollHeight - (scrollTop + clientHeight); + if (scrollBottom < 10) { + if (!query.isFetching && refCanTriggerLoadMore.current) { + setPageInfo((pre) => ({ + ...pre, + page: pre.page + 1, + })); + refCanTriggerLoadMore.current = false; + } + } else { + refCanTriggerLoadMore.current = true; + } + }; + + const debouncedFetchUser = debounce((inputValue: string) => { + if (!lazyLoad?.enable) { + return; + } + setPageInfo({ + keyword: inputValue, + page: 1, + totalPages: 0, + }); + setOptions([]); + }, 500); return ( <> {isEmpty ? ( <EmptyPlaceholder> - {t('dataset.no_result')} <Link to="/datasets/create">{t('app.go_create')}</Link> + {t('dataset.no_result')}{' '} + {!isParticipant && isCreateVisible && ( + <Link to="/datasets/raw/create">{t('app.go_create')}</Link> + )} </EmptyPlaceholder> ) : ( <Select - value={value || undefined} - placeholder={t('workflow.placeholder_dataset')} + onSearch={debouncedFetchUser} + onPopupScroll={popupScrollHandler} + value={isParticipant ? (value as ParticipantDataset)?.uuid : (value as Dataset)?.id} + placeholder={placeholder || t('placeholder_select')} onChange={onSelectChange} + showSearch + allowClear + filterOption={(inputValue, option) => { + return option.props.extra.name.toLowerCase().indexOf(inputValue.toLowerCase()) >= 0; + }} + loading={query.isFetching} {...props} > - {query.data?.data.map((item) => { - return ( - <Select.Option key={item.id} value={item.path}> - {item.name} - </Select.Option> - ); - })} + {options.map((item) => ( + <Select.Option + key={isParticipant ? (item as ParticipantDataset).uuid : (item as Dataset).id} + value={isParticipant ? (item as ParticipantDataset).uuid : (item as Dataset).id} + extra={item} + > + {renderOption(item, isParticipant)} + </Select.Option> + ))} + {Boolean(selectDataset && selectDataset?.name) && ( + <Select.Option + key={ + isParticipant + ? (selectDataset as ParticipantDataset).uuid + : (selectDataset as Dataset).id + } + value={ + isParticipant + ? (selectDataset as ParticipantDataset).uuid + : (selectDataset as Dataset).id + } + extra={selectDataset} + > + {renderOption(selectDataset!, isParticipant)} + </Select.Option> + )} </Select> )} </> ); - function onSelectChange(val: string) { - onChange && onChange(val); + function onSelectChange(id: string) { + const target = options.find((item) => { + if (isParticipant) { + return (item as ParticipantDataset).uuid === id; + } + return (item as Dataset).id === id; + }); + setSelectDataset(target); + onChange?.(target); + } +}; + +type PathProps = { + /** Accept dataset path */ + value?: string; + /** Accept dataset path */ + onChange?: (val?: string) => void; +}; + +export const DatasetPathSelect: FC<PathProps> = ({ value, onChange, ...props }) => { + const query = useQuery([DATASET_LIST_QUERY_KEY], () => fetchDatasetList(), { + retry: 2, + }); + + const dataset = query.data?.data.find((item) => item.path === value); + + return ( + <DatasetSelect value={dataset} onChange={onDatasetChange} {...props} isParticipant={false} /> + ); + + function onDatasetChange(item?: Dataset | ParticipantDataset) { + onChange?.((item as Dataset)?.path); } }; diff --git a/web_console_v2/client/src/components/DoubleSelect/index.less b/web_console_v2/client/src/components/DoubleSelect/index.less new file mode 100644 index 000000000..b3c54ea13 --- /dev/null +++ b/web_console_v2/client/src/components/DoubleSelect/index.less @@ -0,0 +1,13 @@ +.algorithm-container{ + position: relative; + .algorithm-select{ + width: 100%; + } + .delete-icon{ + position: absolute; + top: 7px; + right: -30px; + font-size: 18px; + cursor: pointer; + } +} diff --git a/web_console_v2/client/src/components/DoubleSelect/index.tsx b/web_console_v2/client/src/components/DoubleSelect/index.tsx new file mode 100644 index 000000000..b70a2ed83 --- /dev/null +++ b/web_console_v2/client/src/components/DoubleSelect/index.tsx @@ -0,0 +1,780 @@ +/* istanbul ignore file */ + +import React, { FC, useMemo, useState, useRef, useEffect } from 'react'; + +import { useInfiniteQuery, useQuery } from 'react-query'; +import { useTranslation } from 'react-i18next'; + +import { + fetchModelSetList, + fetchModelJobList, + fetchModelJobGroupList, + fetchModelJobList_new, +} from 'services/modelCenter'; +import { fetchProjectList, fetchProjectDetail } from 'services/algorithm'; + +import { Input, Grid, Select } from '@arco-design/web-react'; +import { Delete } from 'components/IconPark'; +import { formatListWithExtra } from 'shared/modelCenter'; +import { + AlgorithmParameter, + AlgorithmProject, + EnumAlgorithmProjectSource, + EnumAlgorithmProjectType, +} from 'typings/algorithm'; +import { useGetCurrentProjectId, usePrevious } from 'hooks'; +import { WorkflowState } from 'typings/workflow'; +import { ModelJobStatus } from 'typings/modelCenter'; + +import './index.less'; +import { filterExpressionGenerator } from 'views/Datasets/shared'; +import { + FILTER_MODEL_JOB_OPERATOR_MAPPER, + FILTER_MODEL_TRAIN_OPERATOR_MAPPER, +} from 'views/ModelCenter/shared'; +import { debounce } from 'lodash-es'; + +const { Row, Col } = Grid; + +export type OptionItem = { + /** Display label */ + label: string | number; + /** Form value */ + value: any; + disabled?: boolean; + /** Extra data */ + extra?: any; +}; + +type Props = { + /** + * Form value + * + * { [leftField]: any,[rightField]: any } + */ + value?: { [key: string]: any }; + onChange?: (val: any) => void; + onLeftSelectChange?: (val: any) => void; + onRightSelectChange?: (val: any) => void; + onDeleteClick?: () => void; + leftOnPopupScroll?: (element: any) => void; + rightOnPopupScroll?: (element: any) => void; + onLeftSearch?: (val: string) => void; + onRightSearch?: (val: string) => void; + /** Reset right selector when left selector change */ + isClearRightValueAfterLeftSelectChange?: boolean; + /** Reset both side's value when left side options changed */ + isClearBothAfterLeftOptionsChange?: boolean; + /** Left selector datasource */ + leftOptionList?: OptionItem[]; + /** Right selector datasource */ + rightOptionList?: OptionItem[]; + /** Left selector value field */ + leftField?: string; + /** Right selector value field */ + rightField?: string; + /** Left selector label */ + leftLabel?: string; + /** Right selector label */ + rightLabel?: string; + className?: any; + /** Is show delete icon */ + isShowDelete?: boolean; + disabled?: boolean; + leftDisabled?: boolean; + rightDisabled?: boolean; + /** Container style */ + containerStyle?: React.CSSProperties; + /** Indicate left side data fetching */ + leftLoading?: boolean; + /** Indicate right side data fetching */ + rightLoading?: boolean; +}; + +export type ModelSelectProps = Props & { + /** Cahce map, it's helpful to calc disabled per item */ + modelIdToIsSelectedMap?: { + [key: number]: boolean; + }; + /** Disable linkage */ + isDisabledLinkage?: boolean; +}; +export type ModelGroupSelectProps = Props & { + /** The algorithm type of the job groups */ + type: 'NN_VERTICAL' | 'NN_HORIZONTAL'; + onLeftOptionsEmpty?: () => void; +}; + +export type AlgorithmSelectValue = { + algorithmProjectId?: ID; + algorithmId?: ID; + algorithmUuid?: ID; + config?: AlgorithmParameter[]; + path?: string; +}; +export type AlgorithmSelectProps = Omit<Props, 'leftField' | 'rightField'> & { + value?: AlgorithmSelectValue; + onChange?: (val: AlgorithmSelectValue) => void; + algorithmProjectTypeList?: EnumAlgorithmProjectType[]; + disableFirstOnChange?: boolean; + isParticipant?: boolean; + disableHyperparameters?: boolean; +}; +const DoubleSelect: FC<Props> & { + ModelSelect: FC<ModelSelectProps>; + AlgorithmSelect: FC<AlgorithmSelectProps>; + ModelJobGroupSelect: FC<ModelGroupSelectProps>; +} = ({ + value, + onChange, + onLeftSelectChange: onLeftSelectChangeFromProps, + onRightSelectChange: onRightSelectChangeFromProps, + onDeleteClick, + leftField = 'leftValue', + rightField = 'rightValue', + leftLabel = '', + rightLabel = '', + leftOptionList = [], + rightOptionList = [], + isClearRightValueAfterLeftSelectChange = false, + isClearBothAfterLeftOptionsChange = false, + className, + isShowDelete = false, + disabled = false, + leftDisabled = false, + rightDisabled = false, + containerStyle, + leftLoading = false, + rightLoading = false, + leftOnPopupScroll, + rightOnPopupScroll, + onLeftSearch, + onRightSearch, +}) => { + const isControlled = typeof value === 'object'; + const { t } = useTranslation(); + const [innerLeftValue, setInnerLeftValue] = useState(); + const [innerRightValue, setInnerRightValue] = useState(); + + const leftValue = useMemo(() => { + if (!isControlled) { + return innerLeftValue; + } + + if (value?.[leftField] || value?.[leftField] === 0) { + return value[leftField]; + } + + return undefined; + }, [value, leftField, isControlled, innerLeftValue]); + + const rightValue = useMemo(() => { + if (!isControlled) { + return innerRightValue; + } + + if (value?.[rightField] || value?.[rightField] === 0) { + return value[rightField]; + } + + return undefined; + }, [value, rightField, isControlled, innerRightValue]); + + // clear both selectors' value when left side options changed + const prevLeftOptionList = usePrevious(leftOptionList); + useEffect(() => { + // if previous option list is empty, indicating that options have just been initialized, hence ignore it. + if ( + !isClearBothAfterLeftOptionsChange || + !prevLeftOptionList || + prevLeftOptionList.length === 0 || + leftOptionList.length === 0 + ) { + return; + } + + if (!isControlled) { + setInnerLeftValue(undefined); + setInnerRightValue(undefined); + } + onLeftSelectChangeFromProps?.(undefined); + onChange?.({ + [leftField]: undefined, + [rightField]: undefined, + }); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [prevLeftOptionList, leftOptionList]); + + return ( + <div className={`${className} algorithm-container`} style={containerStyle}> + {(leftLabel || rightLabel) && ( + <Row gutter={12}> + <Col span={12}>{leftLabel}</Col> + <Col span={12}>{rightLabel}</Col> + </Row> + )} + + <Row gutter={12}> + <Col span={12}> + <Select + className="algorithm-select" + value={leftValue} + placeholder={t('model_center.placeholder_select')} + onChange={onLeftSelectChange} + allowClear + disabled={disabled || leftDisabled} + showSearch + loading={leftLoading} + onPopupScroll={leftOnPopupScroll} + filterOption={(input, option) => { + if (option.props.children) { + return option.props.children.toLowerCase().indexOf(input.toLowerCase()) >= 0; + } else if (option?.props?.label) { + return ( + (option.props.label as string).toLowerCase().indexOf(input.toLowerCase()) >= 0 + ); + } + return true; + }} + onSearch={onLeftSearch} + > + {leftOptionList.map((item) => { + return ( + <Select.Option + key={item.value} + value={item.value} + disabled={item.disabled || disabled || leftDisabled} + extra={item.extra} + > + {item.label} + </Select.Option> + ); + })} + </Select> + </Col> + <Col span={12}> + <Select + className="algorithm-select" + value={rightValue} + placeholder={t('model_center.placeholder_select')} + onChange={onRightSelectChange} + onPopupScroll={rightOnPopupScroll} + allowClear + disabled={disabled || rightDisabled} + showSearch + loading={rightLoading} + filterOption={(input, option) => { + if (option?.props?.children) { + return option.props.children.toLowerCase().indexOf(input.toLowerCase()) >= 0; + } else if (option?.props?.label) { + return ( + (option.props.label as string).toLowerCase().indexOf(input.toLowerCase()) >= 0 + ); + } + return true; + }} + onSearch={onRightSearch} + > + {rightOptionList.map((item) => { + return ( + <Select.Option + key={item.value} + value={item.value} + disabled={item.disabled || disabled || rightDisabled} + extra={item.extra} + > + {item.label} + </Select.Option> + ); + })} + </Select> + </Col> + </Row> + {isShowDelete && <Delete className="delete-icon" onClick={onDeleteClick} />} + </div> + ); + + function onLeftSelectChange(val: any) { + if (!isControlled) { + setInnerLeftValue(val); + if (isClearRightValueAfterLeftSelectChange) { + setInnerRightValue(undefined); + } + } + onLeftSelectChangeFromProps?.(val); + onChange?.({ + ...value, + [leftField]: val, + [rightField]: isClearRightValueAfterLeftSelectChange + ? undefined + : isControlled + ? value?.[rightField] + : innerRightValue, + }); + } + function onRightSelectChange(val: any) { + if (!isControlled) { + setInnerRightValue(val); + } + onRightSelectChangeFromProps?.(val); + onChange?.({ + ...value, + [leftField]: isControlled ? value?.[leftField] : innerLeftValue, + [rightField]: val, + }); + } +}; + +export const ModelSelect: FC<ModelSelectProps> = ({ + leftField = 'model_set_id', + rightField = 'model_id', + modelIdToIsSelectedMap = {}, + value, + isDisabledLinkage = false, + ...props +}) => { + const projectId = useGetCurrentProjectId(); + + const listQuery = useQuery(['double-select-fetch-model-set-list'], () => fetchModelSetList(), { + retry: 2, + refetchOnWindowFocus: false, + }); + + // TODO: filter by group_id + const modelListQuery = useQuery( + ['double-select-fetch-model-jobs'], + () => fetchModelJobList({ types: ['TREE_TRAINING', 'NN_TRAINING', 'TRAINING'] }), + { + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const leftOptionList = useMemo(() => { + if (!listQuery.data) { + return []; + } + + let list = listQuery.data.data || []; + + list = formatListWithExtra(list, true); + + // filter by project_id and isCompareReport + list = list.filter((item) => { + return !item.isCompareReport && (!projectId || String(item.project_id) === String(projectId)); + }); + + // desc sort + list.sort((a, b) => b.created_at - a.created_at); + + return list.map((item) => ({ + label: item.name, + value: item.id, + })); + }, [listQuery.data, projectId]); + + const rightOptionList = useMemo(() => { + if (isDisabledLinkage) { + return modelListQuery.data?.data.map((item) => ({ + label: item.name, + value: item.id, + disabled: modelIdToIsSelectedMap[item.id as any], + })); + } + const leftValue = value?.[leftField]; + + if (!modelListQuery.data || !leftValue) { + return []; + } + + return modelListQuery.data?.data + .filter((item) => { + return item.group_id === leftValue && item.state === WorkflowState.COMPLETED; + }) + .map((item) => ({ + label: item.name, + value: item.id, + disabled: modelIdToIsSelectedMap[item.id as any], + })); + }, [modelListQuery.data, modelIdToIsSelectedMap, value, leftField, isDisabledLinkage]); + + return ( + <DoubleSelect + value={value} + leftField={leftField} + rightField={rightField} + leftOptionList={leftOptionList} + rightOptionList={rightOptionList} + isClearRightValueAfterLeftSelectChange={true} + {...props} + /> + ); +}; + +export const ModelJobGroupSelect: FC<ModelGroupSelectProps> = ({ + type, + onChange, + onLeftOptionsEmpty, + ...props +}) => { + const projectId = useGetCurrentProjectId(); + const [selectedGroup, setSelectedGroup] = useState<number>(); + const [leftKeyWord, setLeftKeyWord] = useState<string>(); + const [rightKeyWord, setRightKeyWord] = useState<string>(); + + const { + data: pageModelJobGroupList, + fetchNextPage: fetchModelGroupNextPage, + isFetchingNextPage: isFetchingModelGroupNextPage, + hasNextPage: hasModelGroupNextPage, + } = useInfiniteQuery( + ['fetchModelJboGroupList', projectId, type, leftKeyWord], + ({ pageParam = 1 }) => + fetchModelJobGroupList(projectId!, { + page: pageParam, + pageSize: 10, + filter: filterExpressionGenerator( + { + configured: true, + algorithm_type: [type], + name: leftKeyWord, + }, + FILTER_MODEL_TRAIN_OPERATOR_MAPPER, + ), + }), + { + enabled: Boolean(projectId && type), + keepPreviousData: true, + getNextPageParam: (lastPage) => (lastPage.page_meta?.current_page ?? 0) + 1, + }, + ); + + const { + data: pageModelJobList, + fetchNextPage: fetchModelJobListNextPage, + isFetchingNextPage: isFetchingModelJobListNextPage, + hasNextPage: hasModelJobListNextPage, + } = useInfiniteQuery( + ['fetchModelJobList', selectedGroup, projectId, rightKeyWord], + ({ pageParam = 1 }) => + fetchModelJobList_new(projectId!, { + group_id: selectedGroup?.toString()!, + page: pageParam, + page_size: 10, + filter: filterExpressionGenerator( + { status: [ModelJobStatus.SUCCEEDED], name: rightKeyWord }, + FILTER_MODEL_JOB_OPERATOR_MAPPER, + ), + }), + { + enabled: Boolean(projectId && selectedGroup), + keepPreviousData: true, + getNextPageParam: (lastPage) => (lastPage.page_meta?.current_page ?? 0) + 1, + }, + ); + + const leftOptions = useMemo(() => { + const resultOptions: { label: string; value: ID }[] = []; + pageModelJobGroupList?.pages.forEach((group) => { + resultOptions.push(...group.data.map((item) => ({ label: item.name, value: item.id }))); + }); + return resultOptions; + }, [pageModelJobGroupList]); + + const rightOptions = useMemo(() => { + const resultOptions: { label: string; value: ID }[] = []; + pageModelJobList?.pages.forEach((group) => { + resultOptions.push(...group.data.map((item) => ({ label: item.name, value: item.id }))); + }); + return resultOptions; + }, [pageModelJobList]); + + const leftPopupScrollHandler = (element: any) => { + const { scrollTop, scrollHeight, clientHeight } = element; + const scrollBottom = scrollHeight - (scrollTop + clientHeight); + const pagesNumber = pageModelJobGroupList?.pages.length || 0; + const { current_page, total_pages } = pageModelJobGroupList?.pages?.[pagesNumber - 1] + .page_meta || { current_page: 0, total_pages: 0 }; + if (scrollBottom < 10 && !isFetchingModelGroupNextPage && current_page < total_pages) { + hasModelGroupNextPage && fetchModelGroupNextPage(); + } + }; + const rightPopupScrollHandler = (element: any) => { + const { scrollTop, scrollHeight, clientHeight } = element; + const scrollBottom = scrollHeight - (scrollTop + clientHeight); + const pagesNumber = pageModelJobGroupList?.pages.length || 0; + const { current_page, total_pages } = pageModelJobGroupList?.pages?.[pagesNumber - 1] + .page_meta || { current_page: 0, total_pages: 0 }; + if (scrollBottom < 10 && !isFetchingModelJobListNextPage && current_page < total_pages) { + hasModelJobListNextPage && fetchModelJobListNextPage(); + } + }; + + useEffect(() => { + if ( + props.leftField && + props.value?.[props.leftField] != null && + (!leftOptions || leftOptions.length === 0) + ) { + onLeftOptionsEmpty?.(); + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [leftOptions]); + + useEffect(() => { + setSelectedGroup(props.leftField ? props.value?.[props.leftField] : undefined); + }, [props.value, props.leftField]); + + return ( + <DoubleSelect + onChange={onChange} + leftLoading={isFetchingModelGroupNextPage} + rightLoading={isFetchingModelJobListNextPage} + leftOptionList={leftOptions ?? []} + rightOptionList={rightOptions ?? []} + onLeftSelectChange={setSelectedGroup} + isClearBothAfterLeftOptionsChange={false} + leftOnPopupScroll={leftPopupScrollHandler} + rightOnPopupScroll={rightPopupScrollHandler} + onLeftSearch={debounce((value: string) => { + setLeftKeyWord(value); + }, 300)} + onRightSearch={debounce((value: string) => { + setRightKeyWord(value); + }, 300)} + {...props} + /> + ); +}; + +export const AlgorithmSelect: FC<AlgorithmSelectProps> = ({ + value, + onChange: onChangeFromProps, + algorithmProjectTypeList, + disableFirstOnChange = false, + containerStyle, + isParticipant = false, + disableHyperparameters = false, + ...props +}) => { + const isControlled = typeof value === 'object'; + + const [innerLeftValue, setInnerLeftValue] = useState<ID>(); + const [innerConfigValueList, setInnerConfigValueList] = useState<AlgorithmParameter[]>([]); + const isAlreadyCallOnChange = useRef(false); + const projectId = useGetCurrentProjectId(); + + const { t } = useTranslation(); + + const leftValue = useMemo(() => { + if (value?.algorithmProjectId || value?.algorithmProjectId === 0) { + return value.algorithmProjectId; + } + + return undefined; + }, [value]); + + const rightValue = useMemo(() => { + if (value?.algorithmId || value?.algorithmId === 0) { + return value?.algorithmId; + } + + return undefined; + }, [value]); + + const configValueList = useMemo(() => { + if (value?.config) { + return value.config; + } + + return innerConfigValueList; + }, [value, innerConfigValueList]); + + const algorithmProjectQuery = useQuery( + ['getAllAlgorithmProjectList', projectId, ...(algorithmProjectTypeList ?? [])], + async () => { + let data: AlgorithmProject[] = []; + try { + if (projectId) { + const resp = await fetchProjectList(projectId ?? 0, { + type: algorithmProjectTypeList ? algorithmProjectTypeList : undefined, + }); + data = data.concat(resp.data || []); + } + + // preset algorithm + const resp = await fetchProjectList(0, { + type: algorithmProjectTypeList ? algorithmProjectTypeList : undefined, + sources: EnumAlgorithmProjectSource.PRESET, + }); + data = data.concat(resp.data || []); + } catch (error) {} + + return { data }; + }, + + { + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const algorithmProjectDetailQuery = useQuery( + ['getAlgorithmProjectDetail', leftValue, innerLeftValue], + () => fetchProjectDetail(isControlled ? leftValue! : innerLeftValue!), + { + enabled: + (isControlled + ? leftValue !== null && leftValue !== undefined + : innerLeftValue !== null && innerLeftValue !== undefined) && !isParticipant, + retry: 2, + refetchOnWindowFocus: false, + onSuccess(res) { + if ( + isAlreadyCallOnChange.current || + (!leftValue && leftValue !== 0) || + (!rightValue && rightValue !== 0) || + disableFirstOnChange + ) { + return; + } + + const rightItem = (res.data?.algorithms ?? []).find((item) => item.id === rightValue); + + // Because it is necessary to get extra info in edit mode, so call onChangeFromProps manually + onChangeFromProps?.({ + algorithmProjectId: leftValue, + algorithmId: rightValue, + config: value?.config ?? rightItem?.parameter?.variables ?? [], + path: value?.path ?? rightItem?.path ?? '', + algorithmUuid: rightItem?.uuid ?? '', + }); + }, + }, + ); + + const leftOptionList = useMemo(() => { + if (!algorithmProjectQuery.data) { + return []; + } + + const list = algorithmProjectQuery.data.data || []; + + return list.map((item) => ({ + label: item.name, + value: item.id, + extra: item, + })); + }, [algorithmProjectQuery.data]); + + const rightOptionList = useMemo(() => { + if ( + !algorithmProjectDetailQuery.data || + (isControlled && !leftValue && leftValue !== 0) || + (!isControlled && !innerLeftValue && innerLeftValue !== 0) + ) { + return []; + } + + return (algorithmProjectDetailQuery.data?.data?.algorithms ?? []).map((item) => ({ + label: `V${item.version}`, + value: item.id, + extra: item, + })); + }, [algorithmProjectDetailQuery.data, isControlled, leftValue, innerLeftValue]); + + return ( + <div style={containerStyle}> + {!isParticipant && ( + <DoubleSelect + value={value} + leftField="algorithmProjectId" + rightField="algorithmId" + leftOptionList={leftOptionList} + rightOptionList={rightOptionList} + isClearRightValueAfterLeftSelectChange={true} + onChange={onChange} + containerStyle={containerStyle} + {...props} + /> + )} + {configValueList.length > 0 && ( + <> + {!isParticipant && ( + <Row gutter={[12, 12]}> + <Col span={12}>{t('hyper_parameters')}</Col> + </Row> + )} + <Row gutter={[12, 12]}> + {configValueList.map((item, index) => ( + <React.Fragment key={`${item.name}_${index}`}> + <Col span={12}> + <Input + defaultValue={item.name} + onChange={(_, event) => onConfigValueChange(event.target.value, 'name', index)} + disabled={true} + /> + </Col> + <Col span={12}> + <Input + defaultValue={item.value} + onChange={(_, event) => onConfigValueChange(event.target.value, 'value', index)} + disabled={disableHyperparameters} + /> + </Col> + </React.Fragment> + ))} + </Row> + </> + )} + {isParticipant && configValueList.length <= 0 && ( + <Input disabled placeholder="对侧无算法超参数,无需配置" /> + )} + </div> + ); + + function onChange(val: { algorithmProjectId: ID; algorithmId: ID }) { + isAlreadyCallOnChange.current = true; + const rightItem = rightOptionList.find((item) => item.value === val.algorithmId); + + const config = rightItem?.extra?.parameter?.variables ?? []; + const path = rightItem?.extra?.path ?? []; + const algorithmUuid = rightItem?.extra?.uuid; + + if (!isControlled) { + if (leftValue !== val.algorithmProjectId) { + setInnerLeftValue(val.algorithmProjectId); + } + + setInnerConfigValueList(config); + } + + onChangeFromProps?.({ + ...val, + config, + path, + algorithmUuid, + }); + } + + function onConfigValueChange(val: string, key: string, index: number) { + const newConfigValueList = [...configValueList]; + newConfigValueList[index] = { ...newConfigValueList[index], [key]: val }; + + if (!isControlled) { + setInnerConfigValueList(newConfigValueList); + } + + onChangeFromProps?.({ + ...value, + config: newConfigValueList, + }); + } +}; + +DoubleSelect.ModelSelect = ModelSelect; +DoubleSelect.ModelJobGroupSelect = ModelJobGroupSelect; +DoubleSelect.AlgorithmSelect = AlgorithmSelect; + +export default DoubleSelect; diff --git a/web_console_v2/client/src/components/DrawerConfirm/index.test.tsx b/web_console_v2/client/src/components/DrawerConfirm/index.test.tsx new file mode 100644 index 000000000..610655b63 --- /dev/null +++ b/web_console_v2/client/src/components/DrawerConfirm/index.test.tsx @@ -0,0 +1,84 @@ +import React from 'react'; +import { act, screen, fireEvent, waitFor } from '@testing-library/react'; +import confirm from './index'; + +describe('<DrawerConfirm />', () => { + it('should render in specific container', () => { + const container = document.createElement('div'); + + act(() => { + confirm({ + container, + renderContent: () => <div>content</div>, + }); + }); + + const drawer = document.querySelector('.arco-drawer'); + expect(drawer).toBeInTheDocument(); + }); + + it('should called onOk and onClose', () => { + let onOk = jest.fn(() => Promise.resolve('')); + let onClose = jest.fn(); + const okText = 'okText' + Date.now(); + const cancelText = 'cancelText' + Date.now(); + + act(() => { + confirm({ + onOk, + onClose, + okText, + cancelText, + renderContent: () => <div>content</div>, + }); + }); + + const okBtn = screen.getByText(okText); + + fireEvent.click(okBtn); + expect(onOk).toBeCalledTimes(1); + expect(onClose).toBeCalledTimes(0); + + onOk = jest.fn(() => Promise.resolve('')); + onClose = jest.fn(); + act(() => { + confirm({ + onOk, + onClose, + okText, + cancelText, + renderContent: () => <div>content</div>, + }); + }); + + // 当前页面有两个 drawer + const [, cancelBtn] = screen.getAllByText(cancelText); + + fireEvent.click(cancelBtn); + + // Click <ButtonWithPopconfirm/>'s submit button to close drawer + fireEvent.click(screen.getAllByText('submit')[0]); + + expect(onOk).toBeCalledTimes(0); + expect(onClose).toBeCalledTimes(1); + }); + + it('should remove container after close', () => { + const container = document.createElement('div'); + const cancelText = 'cancelText' + Date.now(); + + act(() => { + confirm({ + container, + cancelText, + renderContent: () => <div>content</div>, + }); + }); + + const cancelBtn = screen.getByText(cancelText); + fireEvent.click(cancelBtn); + waitFor(() => { + expect(container.querySelector('.arco-drawer')).toBeNull(); + }); + }); +}); diff --git a/web_console_v2/client/src/components/DrawerConfirm/index.tsx b/web_console_v2/client/src/components/DrawerConfirm/index.tsx new file mode 100644 index 000000000..d60e6fc60 --- /dev/null +++ b/web_console_v2/client/src/components/DrawerConfirm/index.tsx @@ -0,0 +1,136 @@ +// 给 Drawer 组件添加类似于 Modal.confirm 一样的功能 + +import React, { FC, useState } from 'react'; +import ReactDOM from 'react-dom'; +import styled from 'styled-components'; + +import { Drawer, DrawerProps, Button, Space } from '@arco-design/web-react'; +import ButtonWithPopconfirm from 'components/ButtonWithPopconfirm'; + +type TSetParams = (params: any) => void; + +export type TProps = { + visible: boolean; + container?: HTMLElement; + renderContent: (setOkParams: TSetParams, setCloseParams: TSetParams) => React.ReactNode; + okText?: string; + cancelText?: string; + onOk?: (params: any) => Promise<any>; + onClose?: (params?: any) => void; +} & Pick<DrawerProps, 'title' | 'afterClose'>; + +const StyledContainer = styled.div` + font-size: 12px; + color: rgb(var(--gray-8)); +`; +const StyledButtonSpace = styled(Space)` + margin-top: 28px; +`; + +const ConfirmDrawer: FC<TProps> = ({ + title, + okText, + cancelText, + visible, + container, + renderContent, + afterClose, + onOk, + onClose, +}) => { + const [confirming, setConfirming] = useState<boolean>(false); + const [okParams, setOkParams] = useState<any>({}); + const [closeParams, setCloseParams] = useState<any>({}); + + return ( + <Drawer + width={400} + visible={visible} + maskClosable={false} + title={title} + closable={true} + onCancel={onClose} + unmountOnExit={true} + afterClose={afterClose} + getPopupContainer={() => container || window.document.body} + > + <StyledContainer>{renderContent(setOkParams, setCloseParams)}</StyledContainer> + <StyledButtonSpace> + <Button loading={confirming} onClick={onConfirmWrap} type="primary"> + {okText} + </Button> + <ButtonWithPopconfirm + buttonProps={{ + disabled: confirming, + }} + buttonText={cancelText} + onConfirm={() => { + onClose?.(closeParams); + }} + /> + </StyledButtonSpace> + </Drawer> + ); + + async function onConfirmWrap() { + if (typeof onOk === 'function') { + setConfirming(true); + await onOk(okParams); + setConfirming(false); + } + onClose?.(); + } +}; + +type TConfirmProps = Omit<TProps, 'visible'>; +function confirm(props: TConfirmProps) { + const key = `__scale_drawer_${Date.now()}__`; + const container = window.document.createElement('div'); + container.style.zIndex = '1000'; + window.document.body.appendChild(container); + + hide(props); // 先渲染组件 + show(props); // 再显示 + + function renderComp(props: TProps) { + ReactDOM.render( + React.createElement(ConfirmDrawer, { + ...props, + key, + container, + async onOk(params) { + if (props.onOk) { + await props.onOk(params); + hide(props); + } + }, + onClose() { + if (props.onClose) { + props.onClose(); + } + hide(props); + }, + }), + container, + ); + } + + function hide(props: TConfirmProps) { + renderComp({ + ...props, + visible: false, + afterClose() { + window.document.body.removeChild(container); + }, + }); + } + + function show(props: TConfirmProps) { + renderComp({ + ...props, + visible: true, + }); + } +} + +export default confirm; diff --git a/web_console_v2/client/src/components/ErrorBoundary/index.tsx b/web_console_v2/client/src/components/ErrorBoundary/index.tsx new file mode 100644 index 000000000..0ca86f0e4 --- /dev/null +++ b/web_console_v2/client/src/components/ErrorBoundary/index.tsx @@ -0,0 +1,50 @@ +import * as React from 'react'; +import { Alert } from '@arco-design/web-react'; + +interface ErrorBoundaryProps { + title?: React.ReactNode; + description?: React.ReactNode; + children?: React.ReactNode; +} + +export default class ErrorBoundary extends React.Component< + ErrorBoundaryProps, + { + error?: Error | null; + info: { + componentStack?: string; + }; + } +> { + state = { + error: undefined, + info: { + componentStack: '', + }, + }; + + componentDidCatch(error: Error | null, info: object) { + this.setState({ error, info }); + } + + render() { + const { title, description, children } = this.props; + const { + error, + info: { componentStack }, + } = this.state; + const errorTitle = typeof title === 'undefined' ? (error || '').toString() : title; + const errorDescription = typeof description === 'undefined' ? componentStack : description; + if (error) { + return ( + <Alert + type="error" + style={{ overflow: 'auto' }} + title={errorTitle} + content={<pre>{errorDescription}</pre>} + /> + ); + } + return children; + } +} diff --git a/web_console_v2/client/src/components/FeatureImportance/index.tsx b/web_console_v2/client/src/components/FeatureImportance/index.tsx new file mode 100644 index 000000000..c005a6ac4 --- /dev/null +++ b/web_console_v2/client/src/components/FeatureImportance/index.tsx @@ -0,0 +1,165 @@ +/* istanbul ignore file */ + +import React, { FC, useMemo } from 'react'; +import styled from 'styled-components'; + +import HorizontalBarChart from 'components/HorizontalBarChart'; +import NoResult from 'components/NoResult'; +import TitleWithIcon from 'components/TitleWithIcon'; +import { QuestionCircle } from 'components/IconPark'; +import { useModelMetriesResult } from 'hooks/modelCenter'; + +const Card = styled.div<{ height?: number }>` + display: flex; + align-items: center; + justify-content: center; + position: relative; + ${(props) => props.height && `height: ${props.height}px`}; + border: 1px solid var(--lineColor); + border-radius: 2px; + padding: 30px 0; +`; +const Title = styled(TitleWithIcon)` + position: absolute; + left: 16px; + top: 12px; + color: var(--textColor); + font-size: 12px; +`; +const Content = styled.div` + position: relative; + width: 100%; + height: 100%; + display: flex; + flex-direction: row; + flex-wrap: wrap; + margin: 0 auto; +`; + +type TXTickFormatter = (val: number) => string | number; + +function defaultXTickFormatter(val: any) { + return val; +} + +const getBarOption = (xTickFormatter?: TXTickFormatter) => { + return { + maintainAspectRatio: false, + indexAxis: 'y', + // Elements options apply to all of the options unless overridden in a dataset + // In this case, we are setting the border of each horizontal bar to be 2px wide + elements: { + bar: { + borderWidth: 0, + }, + }, + responsive: true, + plugins: { + legend: { + display: false, + }, + title: { + display: false, + }, + }, + scales: { + y: { + grid: { + color: 'transparent', + tickColor: '#cecece', + }, + }, + x: { + grid: { + drawBorder: false, + }, + min: 0, + ticks: { + callback: + typeof xTickFormatter === 'function' + ? xTickFormatter + : function (value: any) { + return value; + }, + }, + }, + }, + }; +}; + +type Item = { + label: string; + value: any; +}; + +export type Props = { + valueList: Item[]; + height?: number; + title?: string; + tip?: string; + xTipFormatter?: TXTickFormatter; +}; + +export type ModelEvaluationVariantProps = { + id: ID; + participantId?: ID; + tip?: string; +}; + +type VariantComponent = { + ModelEvaluationVariant: FC<ModelEvaluationVariantProps>; +}; + +export const FeatureImportance: FC<Props> & VariantComponent = ({ + valueList = [], + height = 260, + title = 'Feature importance(Top 15)', + tip = 'Feature importance', + xTipFormatter = defaultXTickFormatter, +}) => { + return ( + <Card height={height}> + <Title + title={title || ''} + isShowIcon={Boolean(tip)} + isLeftIcon={false} + isBlock={false} + tip={tip} + icon={QuestionCircle} + /> + {valueList.length > 0 ? ( + <Content> + <HorizontalBarChart valueList={valueList} options={getBarOption(xTipFormatter)} /> + </Content> + ) : ( + <NoResult.NoData /> + )} + </Card> + ); +}; + +const ModelValuationVariant: FC<ModelEvaluationVariantProps> = ({ id, participantId, tip }) => { + const { data } = useModelMetriesResult(id, participantId); + + const valueList = useMemo(() => { + if (!data) { + return []; + } + + const list = []; + const { feature_importance = {} } = data; + + for (const k in feature_importance) { + list.push({ + label: k, + value: feature_importance[k], + }); + } + return list.sort((a, b) => b.value - a.value); + }, [data]); + + return <FeatureImportance valueList={valueList} xTipFormatter={(val: any) => val} tip={tip} />; +}; + +FeatureImportance.ModelEvaluationVariant = ModelValuationVariant; +export default FeatureImportance; diff --git a/web_console_v2/client/src/components/FeatureSelect/index.tsx b/web_console_v2/client/src/components/FeatureSelect/index.tsx new file mode 100644 index 000000000..f39818a12 --- /dev/null +++ b/web_console_v2/client/src/components/FeatureSelect/index.tsx @@ -0,0 +1,330 @@ +/* istanbul ignore file */ + +import { Button, Input, Select, Tooltip, Message, Alert } from '@arco-design/web-react'; +import ErrorBoundary from 'components/ErrorBoundary'; +import StructDataPreviewTable from 'components/DataPreview/StructDataTable'; +import { Delete } from 'components/IconPark'; +import NoResult from 'components/NoResult'; +import GridRow from 'components/_base/GridRow'; +import React, { FC } from 'react'; +import { useQuery } from 'react-query'; +import { useRecoilState } from 'recoil'; +import { fetchDatasetPreviewData } from 'services/dataset'; +import { datasetState } from 'stores/dataset'; +import styled from 'styled-components'; +import { MixinCommonTransition } from 'styles/mixins'; + +const Container = styled.div` + position: relative; + left: 250px; + width: calc(100vw - 2 * var(--contentOuterPadding)); + min-height: 400px; + display: flex; + transform: translateX(-50%); + border-top: 1px solid var(--lineColor); +`; +const NoResultContainer = styled.div` + flex-shrink: 0; + width: 100%; + margin: 20px 0; +`; +const TableConatienr = styled.div` + width: 50%; + padding: 24px; + padding-right: 0; + padding-bottom: 0; + border-right: 1px solid var(--lineColor); + overflow: hidden; +`; +const SelectedConatienr = styled.div` + display: flex; + flex-wrap: wrap; + align-content: flex-start; + width: 50%; + padding: 24px; + + > div + div { + margin-left: 20px; + } +`; +const Heading = styled.h4` + margin-bottom: 20px; + line-height: 20px; + font-size: 13px; + font-weight: 500; +`; +const SelectedFeatCol = styled.div` + flex: 1; + min-width: 150px; + max-width: 250px; +`; +const MissingCountCol = styled.div` + flex: 2; + max-width: 400px; +`; +const FeatureDisplay = styled.div` + ${MixinCommonTransition()} + display: flex; + justify-content: space-between; + align-items: center; + padding: 5px 20px; + margin-bottom: 20px; + padding-right: 10px; + line-height: 22px; + border-radius: 2px; + background-color: var(--backgroundColor); + cursor: pointer; + + &:hover { + color: var(--primaryColor); + background-color: white; + box-shadow: 0 0 0 1px var(--primaryColor) inset; + } +`; +const FeatureInput = styled(Input)` + margin-bottom: 20px; +`; +const ButtonGroup = styled(GridRow)` + width: 100%; + flex: 0 0 auto; +`; +const InvalidAlert = styled(Alert)` + width: 100%; + margin-bottom: 10px; +`; + +const CUSTOM_VALUE_TYPE = '_'; +const VALUE_TYPE_OPTIONS = [ + { + value: CUSTOM_VALUE_TYPE, + label: '指定值', + disabled: false, + }, + { + value: 'max', + label: '最大值', + disabled: true, + }, + { + value: 'min', + label: '最小值', + disabled: true, + }, + { + value: 'mean', + label: '中位数', + disabled: true, + }, + { + value: 'avg', + label: '平均值', + disabled: true, + }, +] as const; + +type Props = { + value?: string; + onChange?: (val: string) => void; +}; +type ParsedFeatures = { key: string; value: string }[]; + +let inputTimer: TimeoutID = undefined; + +const FeatureSelect: FC<Props> = ({ value, onChange }) => { + const checkedFeats = _parseValue(value); + + const [dataset] = useRecoilState(datasetState); + const datasetId = dataset.current?.id; + + const previewDataQuery = useQuery( + ['fetchStructPreviewData', datasetId], + () => fetchDatasetPreviewData(datasetId!), + { + refetchOnWindowFocus: false, + retry: 0, + enabled: Boolean(datasetId), + }, + ); + + if (!datasetId) { + return ( + <Container style={{ flexDirection: 'column' }}> + <NoResult text="数据集ID不存在,请检查" /> + <GridRow justify="center"> + <Button onClick={onPreviousClick}>上一步</Button> + </GridRow> + </Container> + ); + } + + const checkedKeys = checkedFeats.map((item) => item.key); + const nothingChecked = checkedKeys.length === 0; + + return ( + <ErrorBoundary> + <Container> + {/* Preview table */} + <TableConatienr> + <Heading>选择特征</Heading> + {/* For hiding preview table's border-right */} + <div style={{ marginRight: -2 }}> + <StructDataPreviewTable + data={previewDataQuery.data?.data} + loading={previewDataQuery.isFetching} + checkable + checkedKeys={checkedKeys} + onCheckedChange={onCheckedChange} + /> + </div> + </TableConatienr> + {/* Selected */} + <SelectedConatienr> + {!_validate(checkedFeats) && <InvalidAlert content="请检查特征缺失默认填充情况" banner />} + <SelectedFeatCol> + <Heading>已选 {0} 项特征</Heading> + {checkedFeats.map((item) => { + return ( + <FeatureDisplay key={item.key}> + {item.key} + <Tooltip content="取消选择该特征"> + <Delete onClick={() => onFeatDeselect(item.key)} /> + </Tooltip> + </FeatureDisplay> + ); + })} + </SelectedFeatCol> + <MissingCountCol> + <Heading>缺失值填充</Heading> + {checkedFeats.map((item) => { + const isCustom = _isCustomType(item.value); + return ( + <FeatureInput + type="number" + addBefore={ + <Select + style={{ minWidth: 90 }} + defaultValue={isCustom ? CUSTOM_VALUE_TYPE : item.value} + onChange={(type) => onValueTypeChange({ type, key: item.key })} + > + {VALUE_TYPE_OPTIONS.map((item) => ( + <Select.Option key={item.value} disabled={item.disabled} value={item.value}> + {item.label} + </Select.Option> + ))} + </Select> + } + defaultValue={item.value} + disabled={!isCustom} + placeholder="请输入" + onChange={(value: string, evt) => + onFeatValueChange({ value: evt.target.value, key: item.key }) + } + key={item.key} + /> + ); + })} + </MissingCountCol> + + {/* Nothing selected */} + {nothingChecked && ( + <NoResultContainer> + <NoResult width="200px" text="没有已选的特征" /> + </NoResultContainer> + )} + + <ButtonGroup justify="center" gap={12}> + <Button + style={{ width: '156px' }} + disabled={nothingChecked} + onClick={onSubmit} + type="primary" + > + 下一步 + </Button> + <Button onClick={onPreviousClick}>上一步</Button> + </ButtonGroup> + </SelectedConatienr> + </Container> + </ErrorBoundary> + ); + + function onSubmit(evt: Event) { + if (checkedFeats.length === 0) { + Message.error('请选择至少一个特征'); + evt.preventDefault(); + return; + } + if (!_validate(checkedFeats)) { + Message.error('请检查特征缺失默认填充情况'); + evt.preventDefault(); + return; + } + } + function onFeatDeselect(featKey: string) { + const nextCheckedKeys = [...checkedKeys]; + + nextCheckedKeys.splice(nextCheckedKeys.indexOf(featKey), 1); + onCheckedChange(nextCheckedKeys); + } + function onValueTypeChange(payload: { type: string; key: string }) { + if (payload.type !== CUSTOM_VALUE_TYPE) { + updateValueByFeatKey(payload.key, payload.type); + } + } + function onFeatValueChange(payload: { value: string; key: string }) { + clearTimeout((inputTimer as unknown) as number); + + inputTimer = setTimeout(() => { + updateValueByFeatKey(payload.key, payload.value); + }, 200); + } + function updateValueByFeatKey(featKey: string, value: string) { + const targetFeat = checkedFeats.find((item) => item.key === featKey); + if (!targetFeat) return; + + targetFeat.value = value; + + onChange?.(_assembleValue(checkedFeats)); + } + function onCheckedChange(keys: string[]) { + const values = keys.map((key) => { + return { + key, + value: checkedFeats.find((item) => item.key === key)?.value ?? '', + }; + }); + onChange?.(_assembleValue(values)); + } + function onPreviousClick() {} +}; + +/** Every featrure need a non-empty value */ +function _validate(feats: ParsedFeatures) { + return feats.every(({ value }) => Boolean(value.trim())); +} +/** + * ! @NOTE: value is a twice-JSON-stringified string + */ +function _parseValue(value?: string): ParsedFeatures { + if (!value) return []; + + try { + const unwrapValue = JSON.parse(value); + const featuresMap = unwrapValue?.replace ? JSON.parse(unwrapValue.replace(/\\/g, '')) : {}; + return Object.entries(featuresMap).map(([key, value]) => ({ key, value: value as string })); + } catch (error) { + console.error('[Feature Select]:', error); + return []; + } +} +/** ! @NOTE: Stringify twice warning */ +function _assembleValue(feats: ParsedFeatures) { + const data = JSON.stringify(Object.fromEntries(feats.map((item) => [item.key, item.value]))); + // NOTE: escape json string to avoid invalid params in backend + return JSON.stringify(data); +} +/** Custom value is always number format */ +function _isCustomType(val: string) { + return /[\d]+/.test(val) || !val; +} +export default FeatureSelect; diff --git a/web_console_v2/client/src/components/FileExplorer/index.less b/web_console_v2/client/src/components/FileExplorer/index.less new file mode 100644 index 000000000..13b2130ca --- /dev/null +++ b/web_console_v2/client/src/components/FileExplorer/index.less @@ -0,0 +1,131 @@ +.file-export-wrapper { + height: 100%; + display: block; + .arco-spin-children { + height: 100%; + overflow: auto; + } +} +.file-node-content-container { + display: flex; + .arco-form-item{ + flex: 1; + } + .arco-form-item-control { + min-height: 26px; + } +} + +.folder-selected{ + background-color: var(--color-fill-2); +} +.file-export-node-name { + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + //todo: checker auto is valid or not; + //max-width: auto; + flex: 1; + display: inline-block; + font-size: 14px; + color: var(--font-color); + font-weight: var(--font-weight-normal); + height: 28px; + line-height: 28px; +} +.file-action-list-container { + visibility: hidden; + + .anticon { + color: var(--action-button-color); + } + .anticon:hover { + color: var(--action-button-color-hover); + } + /* transition: background-color 0s !important; */ + .arco-btn { + transition-duration: 0s !important; + } +} +.file-icon-container { + display: inline-block; + margin-right: 5px; + display: flex; + justify-content: center; + align-items: center; + height: 28px; + .anticon{ + height: 16px; + line-height: 16px; + } +} +.file-directory-tree { + // TODO: theme + --bg-hover: #f6f7fb; + --bg-selected: #f2f3f8; + --font-weight-normal: 400; + --font-weight-selected: 500; + --font-color: #1d2129; + --switcher-color: #86909c; + --action-button-color: #86909c; + --action-button-color-hover: #4e5969; + + height: 100%; + overflow: scroll; + .arco-tree-node-switcher { + display: none; + color: var(--switcher-color) !important; + } + + .arco-tree-node:hover { + background-color: var(--color-fill-2); + ::before { + bottom: 0px; + background-color: var(--bg-hover); + } + .file-action-list-container { + visibility: visible; + } + } + .arco-tree-node{ + padding-left: 16px; + } + .arco-tree-node-title{ + padding: 0 !important; + overflow: hidden; + } + .arco-tree-node-title:hover{ + background-color: transparent; + } + .arco-tree-node-selected { + ::before { + background-color: var(--bg-selected) !important; + } + + .file-export-node-name { + font-weight: var(--font-weight-selected); + } + } + .arco-tree-node-indent-block{ + margin: 0; + } + + .arco-form-item { + width: auto; + .arco-col-19{ + width: 100%; + flex: 1; + } + } + + .arco-tree-node.is-focus-mode:not(.is-focus-node) { + opacity: 0.3; + pointer-events: none; + } + &-isFocusMode { + .is-not-focus-node { + opacity: 0.3; + pointer-events: none; + } + } +} diff --git a/web_console_v2/client/src/components/FileExplorer/index.tsx b/web_console_v2/client/src/components/FileExplorer/index.tsx new file mode 100644 index 000000000..77e42448c --- /dev/null +++ b/web_console_v2/client/src/components/FileExplorer/index.tsx @@ -0,0 +1,935 @@ +import React, { + ForwardRefRenderFunction, + useMemo, + useState, + useEffect, + useRef, + useImperativeHandle, + forwardRef, +} from 'react'; +import styled from 'styled-components'; +import i18n from 'i18n'; +import { useMount } from 'react-use'; +import classNames from 'classnames'; +import { record } from 'shared/object'; +import { Spin, Message, Tooltip, Form, Input, Tree } from '@arco-design/web-react'; +import NoResult from 'components/NoResult'; +import { + EditNoUnderline, + Check, + Close, + FolderAddFill, + FileAddFill, + ArrowFillDown, + ArrowFillRight, +} from 'components/IconPark'; +import MoreActions from 'components/MoreActions'; + +import { TreeProps, TreeNodeProps } from '@arco-design/web-react/es/Tree'; +import { FormItemProps } from '@arco-design/web-react/es/Form/interface'; +import { + transformRegexSpecChar, + giveWeakRandomKey, + dfs, + formatTreeData, + getFirstFileNode, + fileExtToIconMap as fileExtToIconMapOrigin, + formatFileTreeNodeListToFileData, +} from 'shared/helpers'; +import './index.less'; +const TreeNode = Tree.Node; + +export const fileExtToIconMap = fileExtToIconMapOrigin; + +export type Key = string; + +export type FileDataNode = TreeNodeProps & { + key: string; + code?: string | null; + fileExt?: string; + parentKey?: Key; + label?: string; + children?: FileDataNode[]; + isFolder?: boolean; +}; + +export type FileData = { [filePath: string]: string | null }; +export type FilePathToIsReadMap = { [filePath: string]: boolean }; + +export type Props = { + fileData?: FileData; + getFileTreeList?: () => Promise<any[]>; + getFile?: (filePath: string) => Promise<any>; + formatFileTreeListToFileData?: (data: any[]) => FileData; + isAsyncMode?: boolean; + isLoading?: boolean; + isAutoSelectFirstFile?: boolean; + isReadOnly?: boolean; + isShowNodeTooltip?: boolean; + isExpandAll?: boolean; + onFileDataChange?: (fileData: FileData) => void; + onDeleteFinish?: (keys: Key[], firstKey: Key) => void; + onRenameFinish?: (node: FileDataNode, oldKey: Key, newKey: Key) => void; + onCreateFinish?: (key: Key, isFolder: boolean) => void; + onSelectFile?: (filePath: Key, fileContent: string, node: FileDataNode) => void; + onClickRename?: (node: FileDataNode) => void; + /** + * When beforeCreate return false or Promise that is resolved false, don't create node + */ + beforeCreate?: ( + node: FileDataNode, + key: Key, + isFolder: boolean, + ) => boolean | Promise<boolean | string>; + /** + * When beforeRename return false or Promise that is resolved false, don't rename node + */ + beforeRename?: ( + node: FileDataNode, + oldKey: Key, + newKey: Key, + isFolder: boolean, + ) => boolean | Promise<boolean | string>; + /** + * When beforeDelete return false or Promise that is resolved false, don't delete node + */ + beforeDelete?: (key: Key, isFolder: boolean) => boolean | Promise<boolean>; + onFocusModeChange?: (isFocusMode: boolean) => void; +} & Partial<TreeProps>; + +export type FileExplorerExposedRef = { + getFileData: () => FileData; + createFileOrFolder: (node?: FileDataNode, isFile?: boolean) => void; + setFilePathToIsReadMap: (filePathToIsReadMap: FilePathToIsReadMap) => void; +}; + +// TODO:There are some dependencies on properties defined by this component that will be removed later +const StyledTreeNode = styled(TreeNode)<{ + code?: string | null; + fileExt?: string; + parentKey?: Key; + label?: string; + isFolder?: boolean; +}>``; + +const FileExplorer: ForwardRefRenderFunction<FileExplorerExposedRef, Props> = ( + { + fileData, + getFileTreeList, + getFile, + formatFileTreeListToFileData = formatFileTreeNodeListToFileData, + isAsyncMode = false, + isLoading: isLoadingFromProps = false, + isReadOnly = false, + isShowNodeTooltip = true, + isAutoSelectFirstFile = true, + isExpandAll = true, + onFileDataChange, + onSelect: onSelectFromProps, + onSelectFile, + onDeleteFinish, + onRenameFinish, + onCreateFinish, + onClickRename, + beforeCreate, + beforeRename, + beforeDelete, + selectedKeys: selectedKeysFromProps, + expandedKeys: expandedKeysFromProps, + onFocusModeChange, + ...restProps + }, + parentRef, +) => { + const [tempFileData, setTempFileData] = useState<FileData>(fileData || {}); + const [focusKey, setFocusKey] = useState<Key | null>(); + const [isCreating, setIsCreating] = useState(false); + const [selectedKeys, setSelectedKeys] = useState<Array<Key>>([]); + const [expandedKeys, setExpandedKeys] = useState<Array<Key>>([]); + const [validateObj, setValidateObj] = useState<{ + validateStatus: FormItemProps['validateStatus']; + help?: string; + }>({ + validateStatus: undefined, + help: '', + }); + const [isLoading, setIsLoading] = useState(false); + + const isDeleteButtonClick = useRef(false); + const isExpandedAll = useRef(false); + const isAlreadySelectFirstNode = useRef(false); + const isAlreadyFetchedFileTreeList = useRef(false); + const inputRef = useRef<any>(null); + // TODO: when to clear cache? + const filePathToIsReadMap = useRef<FilePathToIsReadMap>({}); + + // sync fileData + useEffect(() => { + if (fileData) { + setTempFileData((prevState) => fileData); + } + }, [fileData]); + useEffect(() => { + if (selectedKeysFromProps) { + setSelectedKeys((prevState) => selectedKeysFromProps); + } + }, [selectedKeysFromProps]); + useEffect(() => { + if (expandedKeysFromProps) { + setExpandedKeys((prevState) => expandedKeysFromProps); + } + }, [expandedKeysFromProps]); + useEffect(() => { + onFocusModeChange?.(Boolean(focusKey)); + }, [focusKey, onFocusModeChange]); + + // Fetch file tree data + useMount(() => { + if (!isAsyncMode || !getFileTreeList) return; + + filePathToIsReadMap.current = {}; + + setIsLoading(true); + getFileTreeList() + .then((data) => { + const tempFileData = formatFileTreeListToFileData(data || []) || {}; + // If there is no file data, isAutoSelectFirstFile will be invalid + if (data.length === 0) { + isAlreadySelectFirstNode.current = true; + } + + // Store filePathToIsReadMap cache + filePathToIsReadMap.current = record(tempFileData, false); + setTempFileData((prevState) => tempFileData); + setIsLoading(false); + + isAlreadyFetchedFileTreeList.current = true; + }) + .catch((error) => { + setIsLoading(false); + Message.error(error.message); + // If there is no file data, isAutoSelectFirstFile will be invalid + isAlreadySelectFirstNode.current = true; + isAlreadyFetchedFileTreeList.current = true; + }); + }); + + const formattedTreeData = useMemo(() => formatTreeData(tempFileData), [tempFileData]); + + // Auto select first node in synchronous mode + useMount(() => { + if (isAsyncMode || !isAutoSelectFirstFile) return; + + selectFirstFileNode(formattedTreeData); + }); + // Auto select first node in asynchronous mode (after async tree data loaded) + useEffect(() => { + if (!isAsyncMode || !isAutoSelectFirstFile || isAlreadySelectFirstNode.current) return; + + selectFirstFileNode(formattedTreeData); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [isAsyncMode, isAutoSelectFirstFile, formattedTreeData]); + + // default expand all folder + useEffect(() => { + if (isExpandedAll.current || !formattedTreeData || formattedTreeData.length === 0) { + return; + } + // search all folder node + const allFolderNode = dfs<FileDataNode>( + { key: '', parentKey: '', children: formattedTreeData }, + (node) => { + if (node.isFolder && node.key) { + return true; + } + return false; + }, + ); + + // get all folder key + if (isExpandAll) { + setExpandedKeys(allFolderNode.map((item) => item.key)); + } + isExpandedAll.current = true; + }, [isExpandAll, isExpandedAll, formattedTreeData]); + + useImperativeHandle(parentRef, () => { + return { + getFileData: () => tempFileData, + createFileOrFolder: onCreateFile, + setFilePathToIsReadMap, + }; + }); + + if (!formattedTreeData || formattedTreeData.length === 0) { + return ( + <Spin className={'file-export-wrapper'} loading={isLoadingFromProps || isLoading}> + <NoResult.NoData /> + </Spin> + ); + } + + return ( + <Spin className={'file-export-wrapper'} loading={isLoadingFromProps || isLoading}> + <Tree + className={`file-directory-tree ${!!focusKey ? 'file-directory-tree-isFocusMode' : ''}`} + showLine={false} + autoExpandParent={true} + blockNode={true} + size="small" + selectedKeys={selectedKeys} + expandedKeys={expandedKeys} + onSelect={onSelect as any} + onExpand={onExpand} + {...restProps} + > + {renderTreeNode(formattedTreeData)} + </Tree> + </Spin> + ); + + function renderFileTreeNode(node: FileDataNode) { + const Icon = typeof node.icon === 'function' ? node.icon({ ...restProps }) : node.icon; + + const isFocus = focusKey === node.key; + return ( + <div className="file-node-content-container"> + {Icon && <span className="file-icon-container">{Icon}</span>} + {isFocus && !isReadOnly ? ( + <Form.Item {...validateObj} hasFeedback> + <Input + style={{ + padding: 0, + }} + ref={inputRef} + autoFocus + defaultValue={isCreating ? '' : (node.title as string) || ''} + onChange={(value: string) => { + isValidateName(value, node); + }} + onBlur={(event) => { + // actual: onBlur => onClick + // expect: onCick => onBlur + // in isFocus mode, when click create or delete button, input blur event will trigger first + // so use setTimeout + local ref flag to change trigger order + setTimeout(() => { + if (!isDeleteButtonClick.current) { + onInputBlur(event, node); + } else { + resetState(); + } + }, 100); + }} + onKeyDown={(event: any) => { + onInputKeyPress(event, node); + }} + data-testid={`input-${node.key}`} + /> + </Form.Item> + ) : isShowNodeTooltip ? ( + <Tooltip content={node.title || ''} position="lb"> + <span className="file-export-node-name" data-testid={node.key}> + {node.title} + </span> + </Tooltip> + ) : ( + <span className="file-export-node-name" data-testid={node.key}> + {node.title} + </span> + )} + {!isReadOnly && ( + <span + className="file-action-list-container" + style={isFocus ? { visibility: 'visible' } : undefined} + data-testid={`action-list-container-${node.key}`} + > + {isFocus ? ( + <> + <Tooltip content={i18n.t('create')}> + <Check + style={{ margin: '0 8px' }} + onClick={(event) => { + event.stopPropagation(); + // don't do sth, only trigger input blur event + }} + data-testid={`btn-ok-${node.key}`} + /> + </Tooltip> + <Tooltip content={'删除'}> + <Close + onClick={(event) => { + event.stopPropagation(); + // set local ref flag + isDeleteButtonClick.current = true; + onDeleteFile(node, true); + }} + data-testid={`btn-delete-${node.key}`} + /> + </Tooltip> + </> + ) : ( + <> + <Tooltip content={'编辑'}> + <EditNoUnderline + onClick={(event) => { + event.stopPropagation(); + onEditFile(node); + }} + data-testid={`btn-edit-${node.key}`} + /> + </Tooltip> + <MoreActions + zIndex={9999} + actionList={[ + { + label: '删除', + onClick: () => onDeleteFile(node), + testId: `btn-more-acitons-delete-${node.key}`, + }, + ]} + /> + </> + )} + </span> + )} + </div> + ); + } + function renderFolderTreeNode(node: FileDataNode) { + const isFocus = focusKey === node.key; + const isExpanded = expandedKeys.includes(node.key); + return ( + <div className="file-node-content-container"> + <span className="file-icon-container"> + {isExpanded ? <ArrowFillDown /> : <ArrowFillRight />} + </span> + {isFocus && !isReadOnly ? ( + <Form.Item {...validateObj} hasFeedback> + <Input + style={{ + padding: 0, + }} + ref={inputRef} + autoFocus + defaultValue={isCreating ? '' : (node.title as string) || ''} + onChange={(value: string) => { + isValidateName(value, node); + }} + onBlur={(event) => { + onInputBlur(event, node); + }} + onKeyDown={(event: any) => { + onInputKeyPress(event, node); + }} + data-testid={`input-${node.key}`} + /> + </Form.Item> + ) : isShowNodeTooltip ? ( + <Tooltip content={node.title || ''} position="tl"> + <span className="file-export-node-name" data-testid={node.key}> + {node.title} + </span> + </Tooltip> + ) : ( + <span className="file-export-node-name" data-testid={node.key}> + {node.title} + </span> + )} + + {!isReadOnly && ( + <span + className="file-action-list-container" + style={isFocus ? { visibility: 'visible' } : undefined} + data-testid={`action-list-container-${node.key}`} + > + {isFocus ? ( + <> + <Tooltip content={i18n.t('create')}> + <Check + style={{ margin: '0 8px' }} + onClick={(event) => { + event.stopPropagation(); + // don't do sth, only trigger input blur event + }} + data-testid={`btn-ok-${node.key}`} + /> + </Tooltip> + <Tooltip content={'删除'}> + <Close + onClick={(event) => { + event.stopPropagation(); + // set local ref flag + isDeleteButtonClick.current = true; + onDeleteFile(node, true); + }} + data-testid={`btn-delete-${node.key}`} + /> + </Tooltip> + </> + ) : ( + <> + <Tooltip content={'编辑'}> + <EditNoUnderline + onClick={(event) => { + event.stopPropagation(); + onEditFile(node); + }} + data-testid={`btn-edit-${node.key}`} + /> + </Tooltip> + <Tooltip content={i18n.t('create_folder')}> + <FolderAddFill + style={{ marginLeft: 4 }} + onClick={(event) => { + event.stopPropagation(); + onCreateFile(node, false); + }} + data-testid={`btn-create-folder-${node.key}`} + /> + </Tooltip> + <Tooltip content={i18n.t('create_file')}> + <FileAddFill + style={{ marginLeft: 4 }} + onClick={(event) => { + event.stopPropagation(); + onCreateFile(node, true); + }} + data-testid={`btn-create-file-${node.key}`} + /> + </Tooltip> + <MoreActions + zIndex={9999} + actionList={[ + { + label: '删除', + onClick: () => onDeleteFile(node), + testId: `btn-more-acitons-delete-${node.key}`, + }, + ]} + /> + </> + )} + </span> + )} + </div> + ); + } + function renderTreeNode(treeList: FileDataNode[]) { + return treeList.map((item) => { + const isFocus = focusKey === item.key; + const isSelected = selectedKeys.includes(item.key) && item.isLeaf; + return ( + <StyledTreeNode + key={item.key} + parentKey={item.parentKey} + title={!item.isFolder ? renderFileTreeNode(item) : renderFolderTreeNode(item)} + isLeaf={item.isLeaf} + isFolder={item.isFolder} + code={item.code} + fileExt={item.fileExt} + label={String(item.title)} + data-key={item.key} + data-testid={item.key} + icons={{ + switcherIcon: null, + }} + className={classNames({ + 'is-focus-mode': !!focusKey, + 'is-focus-node': isFocus, + 'is-not-focus-node': !isFocus, + 'folder-selected': isSelected, + })} + > + {renderTreeNode(item.children || [])} + </StyledTreeNode> + ); + }); + } + + function setFileData(fileData: FileData) { + setTempFileData(fileData); + onFileDataChange?.(fileData); + } + function setFilePathToIsReadMap(finalFilePathToIsReadMap: FilePathToIsReadMap) { + filePathToIsReadMap.current = { ...filePathToIsReadMap.current, ...finalFilePathToIsReadMap }; + } + async function renameFileOrFolder(node: FileDataNode, renameKey: string) { + const isFile = !node.isFolder; + const originKey = String(node.key); + + if ( + (isFile && !Object.prototype.hasOwnProperty.call(tempFileData, originKey)) || + originKey === renameKey + ) { + return; + } + + let finalKey = renameKey; + + try { + if (isCreating) { + if (beforeCreate) { + const result = await beforeCreate(node, renameKey, Boolean(node.isFolder)); + + if (result === false) { + deleteFileOrFolder(originKey, !node.isFolder, true); + return; + } + if (result && typeof result === 'string') { + finalKey = result; + } + } + } else { + if (beforeRename) { + const result = await beforeRename(node, originKey, renameKey, Boolean(node.isFolder)); + if (result === false) { + return; + } + if (result && typeof result === 'string') { + finalKey = result; + } + } + } + } catch (error) { + if (isCreating) { + // If beforeCreate throw error(Promise.reject), delete the file/folder + deleteFileOrFolder(originKey, !node.isFolder, true); + } + return; + } + + let tempFileDataCopy = { ...tempFileData }; + + if (isFile) { + tempFileDataCopy[finalKey] = tempFileDataCopy[originKey]; + delete tempFileDataCopy[originKey]; + } else { + const regx = new RegExp(`^${transformRegexSpecChar(originKey)}`); // prefix originKey + // change folder, in other word, change all file under this folder + tempFileDataCopy = Object.keys(tempFileDataCopy).reduce((sum, current) => { + if (!!current.match(regx)) { + const newKey = current.replace(regx, finalKey); + sum[newKey] = tempFileDataCopy[current]; + } else { + sum[current] = tempFileDataCopy[current]; + } + + return sum; + }, {} as FileData); + } + + setFileData(tempFileDataCopy); + + if (isCreating) { + onCreateFinish?.(finalKey, !isFile); + } else { + onRenameFinish?.(node, originKey, finalKey); + } + + // Auto selected new file node + if (isCreating && isFile) { + const extList = finalKey.split('.'); + const nameList = finalKey.split('/'); + const fileExt = extList && extList.length > 0 ? extList[extList.length - 1] : 'default'; + + // trigger mock select event + onSelect( + [finalKey], + { + selected: true, + node: { + props: { + dataRef: { + ...node, + title: nameList[nameList.length - 1], + label: nameList[nameList.length - 1], + key: finalKey, + code: '', + isLeaf: true, + fileExt: fileExt, + isFolder: false, + }, + }, + }, + selectedNodes: [ + { + ...node, + title: nameList[nameList.length - 1], + label: nameList[nameList.length - 1], + key: finalKey, + code: '', + isLeaf: true, + fileExt: fileExt, + isFolder: false, + }, + ], + e: null as any, + }, + true, + ); + } + } + async function deleteFileOrFolder(key: string, isFile = true, isForceDelete = false) { + if (!isForceDelete && beforeDelete) { + try { + const result = await beforeDelete(key, !isFile); + if (result === false) { + return; + } + } catch (error) { + // beforeDelete return Promise.reject, do nothing + return; + } + } + + const toBeDeleteKeys = []; + let tempFileDataCopy = { ...tempFileData }; + if (isFile) { + if (!Object.prototype.hasOwnProperty.call(tempFileData, key)) { + return; + } + // delete node by key + delete tempFileDataCopy[key]; + toBeDeleteKeys.push(key); + } else { + const regx = new RegExp(`^${transformRegexSpecChar(key)}`); // prefix originKey + // delete folder, delete all file under this folder + tempFileDataCopy = Object.keys(tempFileDataCopy).reduce((sum, current) => { + if (!!current.match(regx)) { + // delete + toBeDeleteKeys.push(current); + return sum; + } + + sum[current] = tempFileDataCopy[current]; + + return sum; + }, {} as FileData); + } + + if (!isCreating && onDeleteFinish) { + onDeleteFinish(toBeDeleteKeys, key); + } + + // Cancel focus mode + if (focusKey === key) { + setFocusKey(null); + } + setFileData(tempFileDataCopy); + } + + function resetState() { + setFocusKey(null); + setIsCreating(false); + setValidateObj({ + validateStatus: undefined, + help: '', + }); + isDeleteButtonClick.current = false; + } + function isValidateName(name: string, node: FileDataNode) { + // validate empty + if (!name) { + setValidateObj({ + validateStatus: 'error', + help: i18n.t('valid_error.empty_node_name_invalid'), + }); + return false; + } + if (name === node.title) { + return true; + } + // validate same file path + let tempkey = ''; + if (node.parentKey) { + tempkey = `${node.parentKey}/${name}`; + } else { + tempkey = `${name}`; + } + if ( + Object.prototype.hasOwnProperty.call(tempFileData, tempkey) || + Object.keys(tempFileData).some((innerPath) => { + // Case: tempFileData = { 'main/test.py': '1' }, tempkey = "main" + // There is a folder named "main", so it is not validate name + return innerPath.startsWith(`${tempkey}/`); + }) + ) { + setValidateObj({ + validateStatus: 'error', + help: i18n.t('valid_error.same_node_name_invalid'), + }); + return false; + } + + if (validateObj.validateStatus !== 'success') { + setValidateObj({ + validateStatus: 'success', + help: '', + }); + } + + return true; + } + + function onSelect( + selectedKeys: Key[], + info: { + selected: boolean; + selectedNodes: FileDataNode[]; + node: any; + e: Event; + }, + isForceSelect = false, // 扩展参数,表示是否强制选择 + ) { + // Disable select handler in focus mode + if (!isForceSelect && Boolean(focusKey)) return; + + const node = info?.node?.props.dataRef ?? {}; + const { key, isFolder, code } = node; + + if (info.selected && !isFolder) { + if (isAsyncMode && Object.prototype.hasOwnProperty.call(filePathToIsReadMap.current, key)) { + if (getFile) { + if (filePathToIsReadMap.current[key]) { + onSelectFile?.(key, code!, node); + } else { + setIsLoading(true); + getFile(String(key)) + .then((fileContent) => { + filePathToIsReadMap.current[key] = true; + setFileData({ + ...tempFileData, + [key]: fileContent, + }); + node.code = fileContent; + onSelectFile?.(key, fileContent, node); + setIsLoading(false); + }) + .catch((error) => { + Message.error(error.message); + setIsLoading(false); + }); + } + } else { + onSelectFile?.(key, code!, node); + } + } else { + onSelectFile?.(key, code!, node); + } + } + if (isFolder) { + if (expandedKeys.includes(key)) { + setExpandedKeys([...expandedKeys.filter((item) => item !== key)]); + } else { + setExpandedKeys([...expandedKeys, key]); + } + } + setSelectedKeys(selectedKeys); + onSelectFromProps?.(selectedKeys, info as any); + } + function onExpand(expandedKeys: Array<Key>) { + setExpandedKeys(expandedKeys); + } + async function onInputBlur(event: React.FocusEvent<HTMLInputElement>, node: FileDataNode) { + event.stopPropagation(); + const inputValue = event.target.value; + + if (!isValidateName(inputValue, node)) { + inputRef.current?.focus(); + return; + } + + const finalKey = String(node.key).replace(String(node.title), inputValue); + + await renameFileOrFolder(node, finalKey); + + resetState(); + } + async function onInputKeyPress(event: any, node: FileDataNode) { + if (event.key === 'Escape') { + if (isCreating) { + // remove temp node + await deleteFileOrFolder(String(focusKey), !node.isFolder, true); + } + resetState(); + return; + } + if (event.key === 'Enter') { + const inputValue = event.target.value; + + if (!isValidateName(inputValue, node)) return; + + const finalKey = String(node.key).replace(String(node.title), inputValue); + await renameFileOrFolder(node, finalKey); + resetState(); + return; + } + } + function onCreateFile(node?: FileDataNode, isFile?: boolean) { + let tempKey = ''; + const folderKey = node ? (node.isFolder ? node.key : node.parentKey) : ''; + if (folderKey) { + // set folder expand key + setExpandedKeys((prevState) => [...prevState, folderKey]); + // insert new temp node on folder + tempKey = `${folderKey}/${giveWeakRandomKey()}`; + } else { + // insert new temp node on root + tempKey = `${giveWeakRandomKey()}`; + } + + setIsCreating(true); + setFocusKey(tempKey); + setFileData({ ...tempFileData, [tempKey]: isFile ? '' : null }); // null will be treated as folder + } + function onEditFile(node: FileDataNode) { + onClickRename?.(node); + setFocusKey(node.key); + } + function onDeleteFile(node: FileDataNode, isForceDelete = false) { + deleteFileOrFolder(String(node.key), !node.isFolder, isForceDelete); + } + + function selectFirstFileNode(fileTreeList: FileDataNode[] = formattedTreeData) { + if (isAlreadySelectFirstNode.current) return; + + const node = getFirstFileNode(fileTreeList); + + // If there is no file node, isAutoSelectFirstFile will be invalid + if (isAsyncMode && isAlreadyFetchedFileTreeList.current && !node) { + isAlreadySelectFirstNode.current = true; + return; + } + + if (node) { + isAlreadySelectFirstNode.current = true; + // Trigger mock select event + onSelect( + [node.key], + { + selected: true, + node: { + props: { + dataRef: { + ...node, + title: node.title, + label: node.title as string, + }, + }, + }, + selectedNodes: [ + { + ...node, + title: node.title, + label: node.title as string, + }, + ], + e: null as any, + }, + true, + ); + } + } +}; + +export default forwardRef(FileExplorer); diff --git a/web_console_v2/client/src/components/FileUpload/index.module.less b/web_console_v2/client/src/components/FileUpload/index.module.less new file mode 100644 index 000000000..0142bb639 --- /dev/null +++ b/web_console_v2/client/src/components/FileUpload/index.module.less @@ -0,0 +1,24 @@ +.file_upload_inner{ + padding: 20px 0; + display: flex; + justify-content: center; + flex-direction: column; + align-items: center; + background-color: var(--color-fill-2); +} + +.file_upload_placeholder{ + line-height: 20px; + font-size: 12px; +} + +.file_upload_tip{ + font-size: 12px; + line-height: 18px; + color: var(--textColorSecondary); +} + +.plus_icon{ + margin-bottom: 10px; + font-size: 20px; +} diff --git a/web_console_v2/client/src/components/FileUpload/index.tsx b/web_console_v2/client/src/components/FileUpload/index.tsx new file mode 100644 index 000000000..436c5962f --- /dev/null +++ b/web_console_v2/client/src/components/FileUpload/index.tsx @@ -0,0 +1,160 @@ +/* istanbul ignore file */ + +import { Message, Upload, UploadProps } from '@arco-design/web-react'; +import { PlusCircle } from 'components/IconPark'; +import React, { FC, useState } from 'react'; +import { isNil, omit } from 'lodash-es'; +import { humanFileSize } from 'shared/file'; +import { getJWTHeaders } from 'shared/helpers'; +import { UploadItem } from '@arco-design/web-react/es/Upload'; +import styles from './index.module.less'; + +export enum UploadFileType { + Dataset = 'dataset', +} + +export type UploadFile = { + /** + * File display name with folder displayed in code editor. + * + * Examples: "test/test.py","syslib.bin" + */ + display_file_name: string; + /** Internal store location for uploaded file */ + internal_path: string; + /** Internal store parent directory for upload file */ + internal_directory: string; + /** + * File content that will be visible and editable for users + * + * Applicable only to human-readable text files + */ + content?: string; +}; + +type Props = Omit<UploadProps, 'action' | 'headers' | 'onChange'> & { + /** Path string */ + value?: string[]; + /** Path string list */ + onChange?: (val: UploadFile[]) => void; + /** Will be as http post body data, i.e. { kind } */ + kind?: UploadFileType; + action?: UploadProps['action']; + headers?: UploadProps['headers']; + /** Upload success */ + onSuccess?: (info: UploadItem) => void; + /** Upload error */ + onError?: (error: any) => void; + /** Max file size */ + maxSize?: number; + maxCount?: number; + /** + * File size unit + * + * True - 1024 + * + * False - 1000 + */ + isBinaryUnit?: boolean; + /** Number of decimal places to display maxSize */ + dp?: number; +}; + +const FileUpload: FC<Props> = ({ + kind, + action = '/api/v2/files', + headers, + onSuccess, + onError, + onChange, + value, + maxSize, + isBinaryUnit = true, + dp = 1, + ...props +}) => { + const [uidToPathMap, setMap] = useState<{ [key: string]: UploadFile }>({}); + + return ( + <Upload + drag={true} + data={kind ? { kind } : undefined} + limit={props.maxCount} + headers={{ ...getJWTHeaders(), ...headers }} + {...props} + action={action} + multiple + onChange={onUploadChange} + onRemove={onFileRemove} + beforeUpload={beforeFileUpload} + > + <div className={styles.file_upload_inner}> + <PlusCircle className={styles.plus_icon} /> + <div className={styles.file_upload_placeholder}>点击或拖拽文件到此处上传</div> + <small className={styles.file_upload_tip}> + {props.accept && `请上传${props.accept.split(',').join('/')}格式文件`} + {props.accept && maxSize && `,`} + {maxSize && `大小不超过${humanFileSize(maxSize, !isBinaryUnit, dp)}`} + </small> + </div> + </Upload> + ); + + function onUploadChange(fileList: UploadItem[], info: UploadItem) { + const { status, uid, originFile, response } = info; + + if (status === 'done') { + onSuccess?.(info); + const uploadedFile = (response as any)?.data?.uploaded_files[0]; + + let nextMap: typeof uidToPathMap; + + // Will replace current one when maxCount is 1 + if (props.maxCount === 1) { + nextMap = { [uid]: uploadedFile }; + } else { + nextMap = { ...uidToPathMap, [uid]: uploadedFile }; + } + setMap(nextMap); + onChange?.(Object.values(nextMap)); + } else if (status === 'error') { + Message.error(`${originFile?.name} 上传失败`); + onError?.(originFile); + } + } + function onFileRemove(file: UploadItem) { + const { uid } = file; + + if (uidToPathMap[uid]) { + const path = uidToPathMap[uid]; + const index = value?.findIndex((item) => item === path.internal_path); + + if (!isNil(index) && index > -1) { + const next = [...(value ?? [])]; + next.splice(index, 1); + + const newMap = omit(uidToPathMap, uid); + setMap(newMap); + + onChange?.( + next.map((item) => { + return { + display_file_name: item, + internal_path: item, + internal_directory: item, + }; + }), + ); + } + } + } + function beforeFileUpload(file: File, filesList: File[]) { + if (maxSize && file.size > maxSize) { + Message.warning(`文件大小不能超过 ${humanFileSize(maxSize, !isBinaryUnit, dp)}`); + return false; + } + return props.beforeUpload ? props.beforeUpload(file, filesList) : true; + } +}; + +export default FileUpload; diff --git a/web_console_v2/client/src/components/Footer/index.tsx b/web_console_v2/client/src/components/Footer/index.tsx deleted file mode 100644 index f780d8257..000000000 --- a/web_console_v2/client/src/components/Footer/index.tsx +++ /dev/null @@ -1,14 +0,0 @@ -import React from 'react'; -import styled from 'styled-components'; -import { StyledComponetProps } from 'typings/component'; - -const Container = styled.footer` - text-align: center; - background-color: #f0f0f0; -`; - -function Footer({ className }: StyledComponetProps) { - return <Container className={className}>@fl</Container>; -} - -export default Footer; diff --git a/web_console_v2/client/src/components/FormLabel/index.tsx b/web_console_v2/client/src/components/FormLabel/index.tsx index ab67df103..566c3face 100644 --- a/web_console_v2/client/src/components/FormLabel/index.tsx +++ b/web_console_v2/client/src/components/FormLabel/index.tsx @@ -1,5 +1,7 @@ +/* istanbul ignore file */ + import React, { FC } from 'react'; -import { Tooltip } from 'antd'; +import { Tooltip } from '@arco-design/web-react'; import { QuestionCircle } from 'components/IconPark'; import GridRow from 'components/_base/GridRow'; import styled from 'styled-components'; @@ -12,16 +14,17 @@ const LabelText = styled.span` type Props = { label: string; tooltip?: string; + className?: string; }; -const FormLabel: FC<Props> = ({ label, tooltip }) => { +const FormLabel: FC<Props> = ({ label, tooltip, className }) => { if (!tooltip) { return <LabelText>{label}</LabelText>; } return ( - <GridRow gap="8" role="label"> - <Tooltip title={tooltip}> + <GridRow gap="8" role="label" className={className}> + <Tooltip content={tooltip}> <LabelText> {label} <QuestionCircle style={{ marginLeft: '5px' }} /> diff --git a/web_console_v2/client/src/components/Header/Account.tsx b/web_console_v2/client/src/components/Header/Account.tsx index 910c5fa1d..d73a43d09 100644 --- a/web_console_v2/client/src/components/Header/Account.tsx +++ b/web_console_v2/client/src/components/Header/Account.tsx @@ -1,21 +1,27 @@ +/* istanbul ignore file */ + import React, { FC } from 'react'; import styled from 'styled-components'; -import { userInfoQuery } from 'stores/user'; +import { userInfoQuery, userInfoState } from 'stores/user'; import avatar from 'assets/images/avatar.jpg'; import { useRecoilQuery } from 'hooks/recoil'; import { MixinCommonTransition, MixinSquare } from 'styles/mixins'; -import { message, Popover, Button } from 'antd'; +import { Message, Popover, Button } from '@arco-design/web-react'; import GridRow from 'components/_base/GridRow'; -import { Settings } from 'components/IconPark'; +import { Settings, UserGroup, Audit, Common, TeamOutlined } from 'components/IconPark'; import { Redirect, useHistory } from 'react-router-dom'; import { useTranslation } from 'react-i18next'; import store from 'store2'; import LOCAL_STORAGE_KEYS from 'shared/localStorageKeys'; -import { useResetRecoilState } from 'recoil'; +import { useResetRecoilState, useRecoilValue } from 'recoil'; import { ErrorCodes } from 'typings/app'; import i18n from 'i18n'; import { FedUserInfo } from 'typings/auth'; import UserRoleBadge from 'components/UserRoleBadge'; +import { logout } from 'services/user'; +import { useIsAdminRole } from 'hooks/user'; +import { appFlag } from 'stores/app'; +import { FlagKey } from 'typings/flag'; const Container = styled.div` ${MixinCommonTransition()} @@ -59,7 +65,7 @@ const ButtonRow = styled(GridRow)` cursor: pointer; &:hover { - background-color: var(--gray1); + background-color: rgb(rgb(var(--gray-1))); } `; const LogoutButton = styled(Button)` @@ -74,14 +80,46 @@ export const ACCOUNT_CHANNELS = { const AccountPopover: FC = () => { const history = useHistory(); const { t } = useTranslation(); + const resetUserInfoState = useResetRecoilState(userInfoState); const resetUserInfo = useResetRecoilState(userInfoQuery); + const appFlagValue = useRecoilValue(appFlag); + + const isAdminRole = useIsAdminRole(); return ( <div> - <ButtonRow gap="5" onClick={onSettingClick}> - <Settings /> - {t('app.system_settings')} + <ButtonRow gap="5" onClick={onMessageClick}> + <TeamOutlined /> + {t('app.participant')} </ButtonRow> + {isAdminRole && Boolean(appFlagValue[FlagKey.USER_MANAGEMENT_ENABLED]) && ( + <ButtonRow gap="5" onClick={onUserClick}> + <UserGroup /> + {t('app.user_management')} + </ButtonRow> + )} + + {isAdminRole && ( + <ButtonRow gap="5" onClick={onSettingClick}> + <Settings /> + {t('app.system_settings')} + </ButtonRow> + )} + + {isAdminRole && ( + <ButtonRow gap="5" onClick={onAuditClick}> + <Audit /> + {t('app.audit_log')} + </ButtonRow> + )} + + {isAdminRole && ( + <ButtonRow gap="5" onClick={onOperationClick}> + <Common /> + {t('app.operation_maintenance')} + </ButtonRow> + )} + <LogoutButton size="large" onClick={onLogoutClick}> {t('app.logout')} </LogoutButton> @@ -90,18 +128,32 @@ const AccountPopover: FC = () => { async function onLogoutClick() { try { - // logout api is now unavailable, only fe remove the user storage. - // await logout(); + await logout(); store.remove(LOCAL_STORAGE_KEYS.current_user); + store.remove(LOCAL_STORAGE_KEYS.current_project); + store.remove(LOCAL_STORAGE_KEYS.sso_info); + resetUserInfoState(); resetUserInfo(); history.push('/login'); - } catch (error) { - message.error(error.message); + } catch (error: any) { + Message.error(error.message); } } + function onUserClick() { + window.open('/v2/users', '_blank'); + } function onSettingClick() { - history.push('/settings'); + window.open('/v2/settings/variables', '_blank'); + } + function onMessageClick() { + window.open('/v2/partners', '_blank'); + } + function onAuditClick() { + window.open('/v2/audit/event', '_blank'); + } + function onOperationClick() { + window.open('/v2/operation', '_blank'); } }; const Username: FC<{ userInfo: FedUserInfo }> = ({ userInfo }) => { @@ -117,7 +169,7 @@ function HeaderAccount() { const { isLoading, data: userInfo, error } = useRecoilQuery(userInfoQuery); if (error && error.code === ErrorCodes.TokenExpired) { - message.info(i18n.t('error.token_expired')); + Message.info(i18n.t('error.token_expired')); return <Redirect to="/login" />; } @@ -130,11 +182,7 @@ function HeaderAccount() { } return ( - <Popover - content={<AccountPopover />} - title={<Username userInfo={userInfo} />} - placement="bottomLeft" - > + <Popover content={<AccountPopover />} title={<Username userInfo={userInfo} />} position="bl"> <Container> <Avatar src={avatar} alt="avatar" className="user-avatar" /> </Container> diff --git a/web_console_v2/client/src/components/Header/LanguageSwitch.tsx b/web_console_v2/client/src/components/Header/LanguageSwitch.tsx index 41219791f..cf70b90d9 100644 --- a/web_console_v2/client/src/components/Header/LanguageSwitch.tsx +++ b/web_console_v2/client/src/components/Header/LanguageSwitch.tsx @@ -1,11 +1,13 @@ +/* istanbul ignore file */ + import { FALLBACK_LNG, setLocale } from 'i18n'; import React, { FC, useState } from 'react'; import styled from 'styled-components'; import { FedLanguages } from 'typings/app'; import classNames from 'classnames'; -import store from 'store2'; -import LOCAL_STORAGE_KEYS from 'shared/localStorageKeys'; import { MixinCommonTransition } from 'styles/mixins'; +import { useRecoilState } from 'recoil'; +import { appPreference } from 'stores/app'; const Container = styled.div` position: relative; @@ -39,7 +41,7 @@ const Slider = styled.div` height: 20px; left: 3px; border-radius: 100px; - background-color: var(--darkGray3); + background-color: rgb(var(--dark-gray-3)); `; const Langs = [ @@ -54,7 +56,8 @@ const Langs = [ ]; const LanguageSwitch: FC = () => { - const [current, setLng] = useState(store.get(LOCAL_STORAGE_KEYS.language) || FALLBACK_LNG); + const [preference, setPreference] = useRecoilState(appPreference); + const [current, setLng] = useState(preference.language || FALLBACK_LNG); const idx = Langs.findIndex((item) => item.val === current); const sliderOffset = (idx === -1 ? 0 : idx) * 32; @@ -78,6 +81,10 @@ const LanguageSwitch: FC = () => { function onLngClick(val: FedLanguages) { setLocale(val); setLng(val); + setPreference({ + ...preference, + language: val, + }); } }; diff --git a/web_console_v2/client/src/components/Header/ProjectSelect.tsx b/web_console_v2/client/src/components/Header/ProjectSelect.tsx deleted file mode 100644 index e45ae4d0a..000000000 --- a/web_console_v2/client/src/components/Header/ProjectSelect.tsx +++ /dev/null @@ -1,98 +0,0 @@ -import React, { FC, memo } from 'react'; -import styled from 'styled-components'; -import { useTranslation } from 'react-i18next'; -import { Dropdown } from 'antd'; -import PrettyMenu, { PrettyMenuItem } from 'components/PrettyMenu'; -import { useRecoilQuery } from 'hooks/recoil'; -import { projectListQuery, projectState } from 'stores/project'; -import GridRow from 'components/_base/GridRow'; -import { useRecoilState } from 'recoil'; -import { CaretDown } from 'components/IconPark'; -import { CloseCircleFilled } from '@ant-design/icons'; -import store from 'store2'; -import LOCAL_STORAGE_KEYS from 'shared/localStorageKeys'; -import { Project } from 'typings/project'; - -const Trigger = styled(GridRow)` - grid-area: project-select; - font-size: 14px; - cursor: pointer; - - &:hover { - > [data-name='clear'] { - display: block; - } - - > [data-name='arrow']:not([data-project-selected='false']) { - display: none; - } - } -`; -const Placeholder = styled.small` - opacity: 0.4; -`; -const ProjectItem = styled.div` - cursor: pointer; -`; -const ClearButton = styled(CloseCircleFilled)` - display: none; - font-size: 12px; -`; - -const ProjectSelect: FC = memo(() => { - const { t } = useTranslation(); - - const projectsQuery = useRecoilQuery(projectListQuery); - const [state, setProjectState] = useRecoilState(projectState); - - if (projectsQuery.isLoading || !projectsQuery.data) { - return <Trigger />; - } - - const hasProjectSelected = Boolean(state.current); - - return ( - <Dropdown - trigger={['click']} - overlay={ - <PrettyMenu> - {projectsQuery.data?.map((item, index) => ( - <PrettyMenuItem key={item.id + index} onClick={() => onProjectSelect(item)}> - <ProjectItem> - <div>{item.name}</div> - </ProjectItem> - </PrettyMenuItem> - ))} - {projectsQuery.data?.length === 0 && t('project.placeholder_no_project')} - </PrettyMenu> - } - placement="bottomCenter" - > - <Trigger gap={5} left="20"> - {state.current ? ( - <GridRow gap="4">{state.current.name}</GridRow> - ) : ( - <Placeholder>{t('project.placeholder_global_project_filter')}</Placeholder> - )} - {hasProjectSelected && <ClearButton data-name="clear" onClick={onClearCick} />} - <CaretDown - data-project-selected={hasProjectSelected} - data-name="arrow" - style={{ fontSize: 12 }} - /> - </Trigger> - </Dropdown> - ); - - function onClearCick(evt: React.MouseEvent) { - evt.stopPropagation(); - setProjectState({ current: undefined }); - store.remove(LOCAL_STORAGE_KEYS.current_project); - } - function onProjectSelect(item: Project) { - setProjectState({ current: item }); - store.set(LOCAL_STORAGE_KEYS.current_project, item); - } -}); - -export default ProjectSelect; diff --git a/web_console_v2/client/src/components/Header/ProjectSelectNew.tsx b/web_console_v2/client/src/components/Header/ProjectSelectNew.tsx new file mode 100644 index 000000000..b20e28573 --- /dev/null +++ b/web_console_v2/client/src/components/Header/ProjectSelectNew.tsx @@ -0,0 +1,135 @@ +import React, { FC, memo, useCallback, useEffect, useMemo, useState } from 'react'; +import { useRecoilQuery } from 'hooks/recoil'; +import { projectListQuery, projectState } from 'stores/project'; +import { useRecoilState } from 'recoil'; +import store from 'store2'; +import LOCAL_STORAGE_KEYS from 'shared/localStorageKeys'; +import { Select } from '@arco-design/web-react'; +import { IconDown } from '@arco-design/web-react/icon'; +import styled from 'styled-components'; +import { useUrlState } from 'hooks'; +import { Project } from 'typings/project'; +import { useLocation } from 'react-router-dom'; +const Option = Select.Option; + +type Props = { + isHidden?: boolean; +}; + +const StyledSelect = styled(Select)` + max-width: 240px; + .arco-select-view-value { + color: white; + } + .arco-select-view-input { + color: white; + } +`; + +interface IUrlState { + project_id?: ID; +} + +const ProjectSelectNew: FC<Props> = memo(({ isHidden }) => { + const location = useLocation(); + const [urlState, setUrlState] = useUrlState<IUrlState>( + { project_id: undefined }, + { navigateMode: 'replace' }, + ); + const [filterValue, setFilterValue] = useState<string>(''); + const projectsQuery = useRecoilQuery(projectListQuery); + const [selectProject, setSelectProject] = useRecoilState(projectState); + const projectList = useMemo(() => { + const tempList = projectsQuery?.data?.filter((item) => item.name.indexOf(filterValue) !== -1); + if (!filterValue) { + return tempList; + } + return tempList.sort((a, b) => (a.name.length < b.name.length ? -1 : 1)); + }, [filterValue, projectsQuery]); + + const idNotExist = (id?: ID) => { + return !id && id !== 0; + }; + + const refreshSelectProject = useCallback( + (selectProject: Project | undefined) => { + setSelectProject({ current: selectProject }); + store.set(LOCAL_STORAGE_KEYS.current_project, selectProject); + }, + [setSelectProject], + ); + + const refreshUrl = useCallback( + (project_id) => { + setUrlState((pre) => ({ + ...pre, + project_id: project_id, + })); + }, + [setUrlState], + ); + + const removeProject = () => { + setSelectProject({ current: undefined }); + store.remove(LOCAL_STORAGE_KEYS.current_project); + }; + + useEffect(() => { + const urlProjectIdExist = Boolean(urlState.project_id); + const notMatch = Boolean(selectProject.current?.id !== parseInt(urlState.project_id)); + if (urlProjectIdExist && notMatch) { + const selectProject = projectsQuery.data?.find((p) => p.id === parseInt(urlState.project_id)); + if (selectProject) { + refreshSelectProject(selectProject); + } + } else { + refreshUrl(selectProject.current?.id); + } + // watching location.pathname is necessary there for adding the project_id to the URL when switching pages + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [projectsQuery.data, location.pathname]); + + const handleOnChange = (projectId: ID) => { + if (idNotExist(projectId)) { + removeProject(); + } else { + const selectProject = projectsQuery.data.find((p) => p.id === projectId); + refreshSelectProject(selectProject); + } + refreshUrl(projectId); + }; + + if (projectsQuery.isLoading || !projectsQuery.data) { + return <div style={{ gridArea: 'project-select' }} />; + } + + return isHidden ? ( + <div className="empty" /> + ) : ( + <StyledSelect + size={'small'} + bordered={false} + arrowIcon={<IconDown />} + allowClear={true} + value={selectProject.current?.id} + placeholder={'请先选择工作区!'} + showSearch={true} + onChange={handleOnChange} + filterOption={false} + getPopupContainer={() => document.getElementById('page-header') as Element} + onInputValueChange={(value) => { + setFilterValue(value); + }} + > + {projectList?.map((item) => { + return ( + <Option key={item.id} value={item.id}> + {item.name} + </Option> + ); + })} + </StyledSelect> + ); +}); + +export default ProjectSelectNew; diff --git a/web_console_v2/client/src/components/Header/Version.tsx b/web_console_v2/client/src/components/Header/Version.tsx new file mode 100644 index 000000000..72364a927 --- /dev/null +++ b/web_console_v2/client/src/components/Header/Version.tsx @@ -0,0 +1,149 @@ +/* istanbul ignore file */ + +import React, { FC, useMemo } from 'react'; +import ClickToCopy from 'components/ClickToCopy'; +import styled from 'styled-components'; +import { useQuery } from 'react-query'; +import { Branch } from 'components/IconPark'; +import { fetchSystemVersion } from 'services/system'; +import { CONSTANTS } from 'shared/constants'; +import { Popover } from '@arco-design/web-react'; +import { omitBy, isNull } from 'lodash-es'; + +const Container = styled.div` + display: flex; + grid-area: version; + align-items: center; + padding: 5px 8px; + color: rgb(var(--gray-10)); + line-height: 1; + font-size: 12px; + border-radius: 5px; + font-weight: bold; + overflow: hidden; + opacity: 0.5; + transition: 0.12s ease-in; + background-color: rgb(var(--gray-3)); + border: 1px solid rgb(var(--gray-3)); + + &:hover { + opacity: 0.9; + } +`; +const VersionIcon = styled(Branch)` + font-size: 12px; +`; +const VersionText = styled.div` + padding: 5px 8px 5px 6px; + margin: -5px -8px -5px 5px; + background-color: #fff; +`; +const DetailList = styled.ul` + list-style: none; +`; +const DetailItem = styled.li` + display: flex; + align-items: center; + justify-content: space-between; + width: 250px; + height: 30px; +`; + +const HeaderVersion: FC = () => { + const versionQuery = useQuery('fetchVersion', fetchSystemVersion, { + refetchOnWindowFocus: false, + staleTime: 100 * 60 * 1000, + }); + + const version = useMemo(() => { + if (!versionQuery.data?.data) { + return CONSTANTS.EMPTY_PLACEHOLDER; + } + + if (versionQuery.data.data.version) { + return versionQuery.data.data.version; + } + + if (versionQuery.data.data.revision) { + return versionQuery.data.data.revision.slice(-6); + } + + return CONSTANTS.EMPTY_PLACEHOLDER; + }, [versionQuery.data]); + + const sha = useMemo(() => { + if (!versionQuery.data?.data) { + return CONSTANTS.EMPTY_PLACEHOLDER; + } + + if (versionQuery.data.data.revision) { + return versionQuery.data.data.revision.slice(0, 12); + } + + return CONSTANTS.EMPTY_PLACEHOLDER; + }, [versionQuery.data]); + + const pubDate = useMemo(() => { + if (!versionQuery.data?.data) { + return CONSTANTS.EMPTY_PLACEHOLDER; + } + if (versionQuery.data.data.pub_date) { + return versionQuery.data.data.pub_date; + } + + return CONSTANTS.EMPTY_PLACEHOLDER; + }, [versionQuery.data]); + + const infoToBeCopy = useMemo(() => { + if (!versionQuery.data?.data) { + return ''; + } + const { revision, pub_date, version } = versionQuery.data.data; + + // Filter null field + return omitBy( + { + revision: revision, + pub_date: pub_date, + version: version, + }, + isNull, + ); + }, [versionQuery.data]); + + if (!versionQuery.data) { + return null; + } + + return ( + <ClickToCopy text={JSON.stringify(infoToBeCopy ?? '')}> + <Popover trigger={['hover']} content={renderDetails()} title={'当前版本'} position="br"> + <Container> + <VersionIcon /> + <VersionText>{version}</VersionText> + </Container> + </Popover> + </ClickToCopy> + ); + + function renderDetails() { + return ( + <DetailList> + <DetailItem> + <strong>Version</strong> + <span>{version}</span> + </DetailItem> + <DetailItem> + <strong>SHA</strong> + <span>{sha}</span> + </DetailItem> + <DetailItem> + <strong>发布日期</strong> + <span>{pubDate}</span> + </DetailItem> + </DetailList> + ); + } +}; + +export default HeaderVersion; diff --git a/web_console_v2/client/src/components/Header/index.tsx b/web_console_v2/client/src/components/Header/index.tsx index d0028f89a..a84d2c166 100644 --- a/web_console_v2/client/src/components/Header/index.tsx +++ b/web_console_v2/client/src/components/Header/index.tsx @@ -1,12 +1,17 @@ -import React from 'react'; +/* istanbul ignore file */ + +import React, { useState } from 'react'; import styled from 'styled-components'; +import { useLocation } from 'react-router-dom'; + +import { useGetLogoSrc } from 'hooks'; + +import { isInWorkspace } from 'components/Sidebar'; +import ProjectSelectNew from './ProjectSelectNew'; import HeaderAccount from './Account'; -import { Tooltip } from 'antd'; -import logo from 'assets/images/logo-colorful.svg'; +import HeaderVersion from './Version'; + import { StyledComponetProps } from 'typings/component'; -import { QuestionCircle } from 'components/IconPark'; -import { useTranslation } from 'react-i18next'; -import ProjectSelect from './ProjectSelect'; export const Z_INDEX_HEADER = 1001; export const Z_INDEX_GREATER_THAN_HEADER = 1002; @@ -17,45 +22,42 @@ const Container = styled.header` top: 0; display: grid; align-items: center; - grid-template-areas: 'logo project-select . help account-info'; + grid-template-areas: 'logo project-select . version account-info'; grid-template-columns: auto auto 1fr auto auto; gap: 12px; height: var(--headerHeight); - padding: 0 30px; + padding-left: var(--headerPaddingLeft, 30px); + padding-right: var(--headerPaddingRight, 30px); background-color: var(--headerBackground); color: white; - border-bottom: 1px solid var(--backgroundColorGray); + border-bottom: var(--headerBorderBottomWidth) solid var(--headerBorderBottomColor); `; const LogoLink = styled.a` grid-area: logo; `; const Logo = styled.img` - height: 32px; -`; -const HelpIcon = styled(QuestionCircle)` - font-size: 14px; - margin-right: 10px; - cursor: pointer; + height: var(--headerLogoHeight); `; function Header({ className }: StyledComponetProps) { - const { t } = useTranslation(); + const { primaryLogo } = useGetLogoSrc(); + + const location = useLocation(); + + const [isHidden] = useState(() => { + const { pathname } = location; + return !isInWorkspace(pathname); + }); return ( <Container className={className} id="page-header"> <LogoLink href="/"> - <Logo src={logo} alt="Federation Learner logo" /> + <Logo src={primaryLogo} alt="Federation Learner logo" /> </LogoLink> - - <ProjectSelect /> - + <ProjectSelectNew isHidden={isHidden} /> {/* This empty element is used to fill the blank sapce */} <div className="empty" /> - - <Tooltip title={t('app.help')} placement="bottom"> - <HelpIcon /> - </Tooltip> - + <HeaderVersion /> <HeaderAccount /> </Container> ); diff --git a/web_console_v2/client/src/components/HorizontalBarChart/index.tsx b/web_console_v2/client/src/components/HorizontalBarChart/index.tsx new file mode 100644 index 000000000..5826eccad --- /dev/null +++ b/web_console_v2/client/src/components/HorizontalBarChart/index.tsx @@ -0,0 +1,108 @@ +/* istanbul ignore file */ + +import React, { FC, useMemo } from 'react'; +import { Bar } from 'react-chartjs-2'; + +type Item = { + label: string; + value: any; +}; + +type Props = { + valueList: Item[]; + formatData?: (valueList: Item[]) => any; + options?: any; + width?: number; + height?: number; + maxValue?: number; +}; + +const defaultFormatData = (valueList: Item[]) => { + const labels: any[] = []; + const data: any[] = []; + + valueList.forEach((item) => { + labels.push(item.label); + data.push(item.value); + }); + + const finalData = { + labels, + datasets: [ + { + data, + backgroundColor: '#468DFF', + borderWidth: 0, + barPercentage: 0.6, + }, + ], + }; + + return finalData; +}; + +const defaultMaxValue = 1; + +const getDefaultOptions = (maxValue = 1) => ({ + maintainAspectRatio: false, + indexAxis: 'y', + // Elements options apply to all of the options unless overridden in a dataset + // In this case, we are setting the border of each horizontal bar to be 2px wide + elements: { + bar: { + borderWidth: 0, + }, + }, + responsive: true, + plugins: { + legend: { + display: false, + }, + title: { + display: false, + }, + }, + scales: { + y: { + grid: { + color: 'transparent', + tickColor: '#cecece', + }, + }, + x: { + grid: { + drawBorder: false, + }, + min: 0, + // max: maxValue * 1.2, + // ticks: { + // min: 0, + // max: maxValue * 1.2, + // suggestedMin: 0, + // suggestedMax: maxValue * 1.2, + // stepSize: 0.2, + // }, + }, + }, +}); + +const HorizontalBarChart: FC<Props> = ({ + valueList, + formatData = defaultFormatData, + options, + width, + height, + maxValue = defaultMaxValue, +}) => { + const data = useMemo(() => { + return formatData(valueList); + }, [valueList, formatData]); + + const defaultOptions = useMemo(() => { + return getDefaultOptions(maxValue); + }, [maxValue]); + + return <Bar data={data} options={options || defaultOptions} width={width} height={height} />; +}; + +export default HorizontalBarChart; diff --git a/web_console_v2/client/src/components/IconButton/index.tsx b/web_console_v2/client/src/components/IconButton/index.tsx index f0f5e8da7..e00ed0623 100644 --- a/web_console_v2/client/src/components/IconButton/index.tsx +++ b/web_console_v2/client/src/components/IconButton/index.tsx @@ -1,7 +1,9 @@ +/* istanbul ignore file */ + import React, { FC } from 'react'; import styled from 'styled-components'; -import { Button } from 'antd'; -import { ButtonProps } from 'antd/lib/button/button'; +import { Button } from '@arco-design/web-react'; +import { ButtonProps } from '@arco-design/web-react/lib/Button'; const ButtonContainer = styled(Button)` &:hover { diff --git a/web_console_v2/client/src/components/IconPark/fileIcon/Config.tsx b/web_console_v2/client/src/components/IconPark/fileIcon/Config.tsx new file mode 100644 index 000000000..d2e9eb75e --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/fileIcon/Config.tsx @@ -0,0 +1,24 @@ +/** + * @file Config config + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'config', + false, + (props: ISvgIconProps) => ( + <svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + d="M0.920044 8.74992V7.24992C1.25004 7.21992 1.50504 7.20492 1.65504 7.09992C1.80504 7.05492 1.94004 6.90492 2.06004 6.73992C2.18004 6.55992 2.25504 6.36492 2.31504 6.07992C2.34504 5.88492 2.39004 5.55492 2.39004 5.07492C2.39004 4.26492 2.42004 3.70992 2.51004 3.37992C2.58504 3.07992 2.73504 2.79492 2.91504 2.61492C3.11004 2.43492 3.42504 2.28492 3.80004 2.16492C4.05504 2.11992 4.46004 2.04492 5.04504 2.04492H5.40504V3.51492C4.92504 3.51492 4.59504 3.54492 4.47504 3.61992C4.35504 3.66492 4.25004 3.73992 4.14504 3.87492C4.07004 3.97992 4.04004 4.15992 4.04004 4.39992C4.04004 4.65492 4.01004 5.16492 3.96504 5.86992C3.93504 6.30492 3.89004 6.63492 3.81504 6.85992C3.71004 7.11492 3.59004 7.30992 3.45504 7.48992C3.33504 7.63992 3.09504 7.81992 2.82504 7.99992C3.08004 8.14992 3.30504 8.29992 3.45504 8.47992C3.60504 8.65992 3.74004 8.91492 3.83004 9.15492C3.93504 9.43992 3.98004 9.78492 3.98004 10.2349C4.01004 10.9099 4.01004 11.3449 4.01004 11.5549C4.01004 11.8399 4.04004 12.0049 4.11504 12.1399C4.19004 12.2599 4.31004 12.3199 4.44504 12.3949C4.56504 12.4399 4.89504 12.4999 5.37504 12.4999V13.9699H5.00004C4.41504 13.9699 3.93504 13.9399 3.66504 13.8499C3.33504 13.7449 3.08004 13.5949 2.85504 13.3999C2.63004 13.2049 2.49504 12.9499 2.40504 12.6349C2.33004 12.3349 2.30004 11.8549 2.30004 11.1949C2.30004 10.4299 2.27004 9.93492 2.19504 9.72492C2.09004 9.39492 1.94004 9.13992 1.74504 8.98992C1.62504 8.83992 1.31004 8.74992 0.920044 8.74992ZM15.035 8.74992C14.705 8.77992 14.45 8.79492 14.3 8.89992C14.15 8.94492 14.015 9.09492 13.895 9.25992C13.775 9.43992 13.7 9.63492 13.64 9.91992C13.61 10.1149 13.565 10.4449 13.565 10.9249C13.565 11.7349 13.535 12.2899 13.445 12.6199C13.37 12.9499 13.22 13.2049 13.04 13.3849C12.845 13.5649 12.53 13.7149 12.155 13.8349C11.9 13.8799 11.495 13.9549 10.91 13.9549H10.55V12.4849C11.03 12.4849 11.315 12.4549 11.48 12.3799C11.63 12.3349 11.735 12.2299 11.81 12.1249C11.885 12.0199 11.915 11.8399 11.915 11.5999C11.915 11.3599 11.945 10.8649 11.99 10.1599C12.02 9.72492 12.095 9.37992 12.185 9.15492C12.29 8.86992 12.41 8.67492 12.56 8.49492C12.71 8.31492 12.92 8.16492 13.16 8.01492C12.755 7.75992 12.5 7.57992 12.38 7.41492C12.185 7.12992 12.02 6.78492 11.975 6.40992C11.9 6.10992 11.87 5.47992 11.87 4.51992C11.87 4.21992 11.84 4.00992 11.765 3.88992C11.69 3.78492 11.615 3.70992 11.48 3.63492C11.36 3.58992 11.03 3.52992 10.52 3.52992V2.05992H10.88C11.465 2.05992 11.945 2.08992 12.215 2.17992C12.545 2.28492 12.8 2.43492 13.025 2.62992C13.25 2.82492 13.385 3.07992 13.475 3.39492C13.55 3.69492 13.595 4.17492 13.595 4.83492C13.595 5.59992 13.625 6.07992 13.7 6.30492C13.805 6.63492 13.955 6.88992 14.15 6.97992C14.345 7.12992 14.66 7.17492 15.035 7.23492V8.74992ZM10.79 8.59992C10.535 8.52492 10.385 8.29992 10.385 8.04492C10.385 7.78992 10.565 7.56492 10.79 7.48992C10.865 7.45992 10.91 7.38492 10.895 7.30992C10.82 7.02492 10.715 6.78492 10.565 6.54492C10.535 6.46992 10.445 6.43992 10.37 6.49992C10.295 6.54492 10.19 6.57492 10.085 6.57492C9.75504 6.57492 9.50004 6.28992 9.50004 5.98992C9.50004 5.88492 9.53004 5.79492 9.57504 5.70492C9.60504 5.62992 9.57504 5.55492 9.53004 5.50992C9.30504 5.35992 9.02004 5.25492 8.76504 5.17992C8.69004 5.14992 8.61504 5.20992 8.58504 5.28492C8.51004 5.53992 8.28504 5.68992 8.03004 5.68992C7.77504 5.68992 7.55004 5.50992 7.47504 5.28492C7.44504 5.20992 7.37004 5.16492 7.29504 5.17992C7.01004 5.25492 6.77004 5.35992 6.53004 5.50992C6.45504 5.53992 6.42504 5.62992 6.48504 5.70492C6.53004 5.77992 6.56004 5.88492 6.56004 5.98992C6.56004 6.31992 6.27504 6.57492 5.97504 6.57492C5.87004 6.57492 5.78004 6.54492 5.69004 6.49992C5.61504 6.46992 5.54004 6.49992 5.49504 6.54492C5.34504 6.76992 5.24004 7.05492 5.16504 7.30992C5.13504 7.38492 5.19504 7.45992 5.27004 7.48992C5.52504 7.56492 5.67504 7.78992 5.67504 8.04492C5.67504 8.29992 5.49504 8.52492 5.27004 8.59992C5.19504 8.62992 5.15004 8.70492 5.16504 8.77992C5.24004 9.06492 5.34504 9.30492 5.49504 9.54492C5.52504 9.61992 5.61504 9.64992 5.69004 9.58992C5.76504 9.54492 5.87004 9.51492 5.97504 9.51492C6.30504 9.51492 6.56004 9.79992 6.56004 10.0999C6.56004 10.2049 6.53004 10.2949 6.48504 10.3849C6.45504 10.4599 6.48504 10.5349 6.53004 10.5799C6.75504 10.7299 7.04004 10.8349 7.29504 10.9099H7.32504C7.37004 10.9099 7.44504 10.8799 7.47504 10.8049C7.55004 10.5499 7.77504 10.3999 8.03004 10.3999C8.28504 10.3999 8.51004 10.5799 8.58504 10.8049C8.61504 10.8799 8.69004 10.9249 8.76504 10.9099C9.05005 10.8349 9.29004 10.7299 9.53004 10.5799C9.60504 10.5499 9.63505 10.4599 9.57504 10.3849C9.53004 10.3099 9.50004 10.2049 9.50004 10.0999C9.50004 9.76992 9.78504 9.51492 10.085 9.51492C10.19 9.51492 10.28 9.54492 10.37 9.58992C10.445 9.61992 10.52 9.58992 10.565 9.54492C10.715 9.31992 10.82 9.03492 10.895 8.77992C10.925 8.68992 10.865 8.61492 10.79 8.59992ZM7.98504 8.91492C7.47504 8.91492 7.07004 8.50992 7.07004 7.99992C7.07004 7.48992 7.47504 7.08492 7.98504 7.08492C8.49504 7.08492 8.90004 7.48992 8.90004 7.99992C8.90004 8.52492 8.49504 8.91492 7.98504 8.91492Z" + fill="#FEC745" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/fileIcon/Default.tsx b/web_console_v2/client/src/components/IconPark/fileIcon/Default.tsx new file mode 100644 index 000000000..bd56f5933 --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/fileIcon/Default.tsx @@ -0,0 +1,26 @@ +/** + * @file Default default + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'default', + false, + (props: ISvgIconProps) => ( + <svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M9.91403 1.7002C10.2328 1.7002 10.5386 1.82706 10.7637 2.0528L13.0496 4.34485C13.274 4.5698 13.4 4.87454 13.4 5.19225V13.7002C13.4 14.0316 13.1313 14.3002 12.8 14.3002H3.19998C2.8686 14.3002 2.59998 14.0316 2.59998 13.7002V2.3002C2.59998 1.96882 2.8686 1.7002 3.19998 1.7002H9.91403ZM8.29998 9.2002C8.46566 9.2002 8.59998 9.33451 8.59998 9.5002V10.2369C8.59998 10.4026 8.46566 10.5369 8.29998 10.5369H5.59998C5.43429 10.5369 5.29998 10.4026 5.29998 10.2369V9.5002C5.29998 9.33451 5.43429 9.2002 5.59998 9.2002H8.29998ZM10.4 6.04139C10.5657 6.04139 10.7 6.17571 10.7 6.34139V7.1002C10.7 7.26588 10.5657 7.4002 10.4 7.4002H5.59998C5.43429 7.4002 5.29998 7.26588 5.29998 7.1002V6.34139C5.29998 6.17571 5.43429 6.04139 5.59998 6.04139H10.4Z" + fill="#628099" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/fileIcon/GitIgnore.tsx b/web_console_v2/client/src/components/IconPark/fileIcon/GitIgnore.tsx new file mode 100644 index 000000000..603eaf113 --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/fileIcon/GitIgnore.tsx @@ -0,0 +1,24 @@ +/** + * @file GitIgnore git-ignore + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'git-ignore', + false, + (props: ISvgIconProps) => ( + <svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + d="M14.9283 7.35862L8.64127 1.07199C8.55515 0.985854 8.4529 0.917525 8.34037 0.870906C8.22784 0.824287 8.10723 0.800293 7.98542 0.800293C7.86362 0.800293 7.74301 0.824287 7.63048 0.870906C7.51795 0.917525 7.4157 0.985854 7.32958 1.07199L5.86896 2.53262L6.96825 3.63191C7.22015 3.51161 7.50316 3.47235 7.7783 3.51956C8.05344 3.56676 8.30717 3.69811 8.50457 3.8955C8.70196 4.0929 8.83331 4.34663 8.88051 4.62177C8.92772 4.89691 8.88847 5.17992 8.76816 5.43182L10.5676 7.23128C10.855 7.09403 11.1816 7.06277 11.4898 7.14305C11.7979 7.22333 12.0678 7.40997 12.2517 7.66997C12.4355 7.92998 12.5215 8.24662 12.4945 8.56392C12.4675 8.88122 12.3292 9.17875 12.104 9.40393C11.8788 9.62911 11.5813 9.76743 11.264 9.79445C10.9467 9.82147 10.6301 9.73544 10.37 9.55158C10.11 9.36772 9.9234 9.09785 9.84312 8.78969C9.76285 8.48152 9.7941 8.1549 9.93135 7.86755L8.13189 6.06809C8.08902 6.08874 8.045 6.10691 8.00005 6.12254V9.8767C8.30028 9.98285 8.55332 10.1917 8.71445 10.4664C8.87557 10.7411 8.93441 11.0638 8.88056 11.3777C8.82671 11.6916 8.66364 11.9763 8.42017 12.1815C8.17671 12.3868 7.86851 12.4994 7.55007 12.4994C7.23163 12.4994 6.92344 12.3868 6.67997 12.1815C6.4365 11.9763 6.27343 11.6916 6.21958 11.3777C6.16573 11.0638 6.22457 10.7411 6.3857 10.4664C6.54682 10.1917 6.79986 9.98285 7.10009 9.8767V6.12254C6.92159 6.05944 6.75836 5.9595 6.62099 5.82921C6.48363 5.69892 6.37521 5.5412 6.30277 5.36628C6.23033 5.19136 6.1955 5.00315 6.20053 4.8139C6.20556 4.62464 6.25035 4.43855 6.33198 4.26773L5.23269 3.16844L1.07175 7.32893C0.98561 7.41505 0.91728 7.51729 0.870662 7.62982C0.824043 7.74235 0.800049 7.86296 0.800049 7.98477C0.800049 8.10657 0.824043 8.22718 0.870662 8.33972C0.91728 8.45225 0.98561 8.55449 1.07175 8.64061L7.35928 14.9272C7.4454 15.0134 7.54765 15.0817 7.66018 15.1283C7.77271 15.1749 7.89332 15.1989 8.01512 15.1989C8.13693 15.1989 8.25754 15.1749 8.37007 15.1283C8.4826 15.0817 8.58484 15.0134 8.67096 14.9272L14.9283 8.66986C15.0145 8.58374 15.0828 8.48149 15.1294 8.36896C15.1761 8.25643 15.2 8.13582 15.2 8.01402C15.2 7.89221 15.1761 7.7716 15.1294 7.65907C15.0828 7.54654 15.0145 7.44429 14.9283 7.35817V7.35862Z" + fill="#FFA841" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/fileIcon/JavaScript.tsx b/web_console_v2/client/src/components/IconPark/fileIcon/JavaScript.tsx new file mode 100644 index 000000000..0ee53ea43 --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/fileIcon/JavaScript.tsx @@ -0,0 +1,24 @@ +/** + * @file Javascript javascript + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'javascript', + false, + (props: ISvgIconProps) => ( + <svg width="12" height="12" viewBox="0 0 12 12" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + d="M1 0C0.447715 0 0 0.447715 0 1V11C0 11.5523 0.447715 12 1 12H11C11.5523 12 12 11.5523 12 11V1C12 0.447715 11.5523 0 11 0H1ZM6.53036 9.35893C6.53036 10.5268 5.84464 11.0598 4.84554 11.0598C3.94286 11.0598 3.42054 10.5938 3.15268 10.0286L4.07143 9.47411C4.24821 9.7875 4.40893 10.0527 4.79732 10.0527C5.16696 10.0527 5.40268 9.90804 5.40268 9.34286V5.50982H6.53036V9.35893ZM9.19821 11.0598C8.15089 11.0598 7.47321 10.5616 7.14375 9.90804L8.0625 9.37768C8.30357 9.77143 8.61964 10.0634 9.17411 10.0634C9.64018 10.0634 9.94018 9.83036 9.94018 9.50625C9.94018 9.12054 9.63482 8.98393 9.11786 8.75625L8.83661 8.63571C8.02232 8.29018 7.48393 7.85357 7.48393 6.93482C7.48393 6.08839 8.12946 5.44554 9.13393 5.44554C9.85179 5.44554 10.3661 5.69464 10.7357 6.34821L9.85714 6.91071C9.66429 6.56518 9.45536 6.42857 9.13125 6.42857C8.80179 6.42857 8.59286 6.6375 8.59286 6.91071C8.59286 7.24821 8.80179 7.38482 9.28661 7.59643L9.56786 7.71696C10.5268 8.12679 11.0652 8.54732 11.0652 9.49018C11.0652 10.5027 10.267 11.0598 9.19821 11.0598Z" + fill="#FFAF38" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/fileIcon/Json.tsx b/web_console_v2/client/src/components/IconPark/fileIcon/Json.tsx new file mode 100644 index 000000000..1a9e2e934 --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/fileIcon/Json.tsx @@ -0,0 +1,32 @@ +/** + * @file Json json + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'json', + false, + (props: ISvgIconProps) => ( + <svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M3.05956 9.3008C3.05956 11.5189 4.19803 13.6286 5.84481 14.6591C3.03344 13.7502 1.00012 11.1111 1.00012 7.99715C1.00012 4.19857 4.0258 1.10666 7.79859 1C10.902 1.10174 12.199 3.91217 12.199 6.22454C12.199 8.47716 11.7949 11.2187 8.14371 11.438C9.64657 10.9981 10.7505 9.53443 10.7505 7.7966C10.7505 5.71093 9.16045 4.02017 7.19904 4.02017C6.93893 4.02017 6.68535 4.0499 6.44114 4.10635C4.98564 4.39796 3.05956 5.60796 3.05956 9.3008Z" + fill="#846BCE" + /> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M12.9409 6.6992C12.9409 4.48113 11.8025 2.37143 10.1557 1.34094C12.967 2.24983 15.0004 4.88888 15.0004 8.00285C15.0004 11.8014 11.9747 14.8933 8.2019 15C5.09852 14.8983 3.80149 12.0878 3.80149 9.77546C3.80149 7.52284 4.20558 4.78129 7.85677 4.56204C6.35392 5.0019 5.25 6.46557 5.25 8.2034C5.25 10.2891 6.84004 11.9798 8.80145 11.9798C9.06156 11.9798 9.31514 11.9501 9.55934 11.8936C11.0148 11.602 12.9409 10.392 12.9409 6.6992Z" + fill="#846BCE" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/fileIcon/Markdown.tsx b/web_console_v2/client/src/components/IconPark/fileIcon/Markdown.tsx new file mode 100644 index 000000000..ddc3fda30 --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/fileIcon/Markdown.tsx @@ -0,0 +1,32 @@ +/** + * @file MarkDown markdown + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'markdown', + false, + (props: ISvgIconProps) => ( + <svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M13.5265 4.00024H11.5151V8.55859H9.04089L12.5205 12.4396L16.0001 8.55859H13.5265V4.00024Z" + fill="#3ADEAC" + /> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M6.57023 4.00024H8.58165V12.4396H6.57023V7.12947L6.04568 7.75459L4.49563 9.62392L3.42792 8.3415L2.41145 7.13012V12.4399H0.400024V4.00061H2.41068L2.41099 4.00035L2.41121 4.00061H2.41145V4.0009L4.49057 6.47869L6.57023 4.00024Z" + fill="#3ADEAC" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/fileIcon/Python.tsx b/web_console_v2/client/src/components/IconPark/fileIcon/Python.tsx new file mode 100644 index 000000000..07b567b2f --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/fileIcon/Python.tsx @@ -0,0 +1,32 @@ +/** + * @file Python python + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'python', + false, + (props: ISvgIconProps) => ( + <svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M2.60638 10.1862C2.83607 10.4764 3.1504 10.6577 3.46472 10.7302C3.63672 10.75 3.73828 10.75 4.19008 10.75H4.51379V9.3543C4.51379 8.41541 5.27491 7.6543 6.21379 7.6543H8.46235C9.43564 7.64618 9.46794 7.63715 9.6476 7.58688C9.661 7.58312 9.67523 7.57914 9.69073 7.57492C10.1139 7.45402 10.4282 7.21224 10.6095 6.87373C10.6579 6.77702 10.7183 6.59568 10.7425 6.4627C10.7667 6.37807 10.7667 6.25718 10.7667 4.84273V3.30738L10.7425 3.22275C10.6941 3.07768 10.6095 2.89634 10.537 2.81172C10.5043 2.79539 10.4661 2.75148 10.4299 2.70979C10.4125 2.68976 10.3955 2.67025 10.3798 2.65456C9.88416 2.20725 8.97746 1.97755 7.72017 2.00173C7.59498 2.00173 7.46979 2.0107 7.37193 2.01772C7.30913 2.02222 7.25758 2.02591 7.2245 2.02591C6.08811 2.11054 5.39901 2.42486 5.14514 2.9447C5.13065 2.9761 5.11905 3.00026 5.10977 3.02527C5.0726 3.12549 5.0726 3.23949 5.0726 3.88767V4.63721H7.9136V4.84273C7.9136 5.02407 7.9136 5.04824 7.88942 5.04824H5.77378C3.69442 5.04824 3.67024 5.04824 3.56144 5.07242C3.01742 5.18123 2.60638 5.49555 2.34041 6.06375C2.11071 6.48688 2.02609 7.00672 2.00191 7.68372C1.97773 8.78385 2.18325 9.66637 2.60638 10.1862ZM6.75302 3.33156C6.80138 3.53708 6.6684 3.79095 6.46288 3.87558C6.40243 3.92394 6.37825 3.92394 6.25736 3.92394C6.16064 3.92394 6.12438 3.92394 6.07602 3.89976C5.82214 3.82722 5.68916 3.58543 5.73752 3.33156C5.78587 3.00515 6.1002 2.8359 6.39034 2.92052C6.57168 2.96888 6.70466 3.12604 6.75302 3.33156Z" + fill="#468DFF" + /> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M13.3936 5.81379C13.1639 5.52364 12.8496 5.3423 12.5353 5.26977C12.3633 5.25 12.2617 5.25 11.8099 5.25H11.4862V6.6457C11.4862 7.58459 10.7251 8.3457 9.78621 8.3457H7.53765C6.56436 8.35382 6.53206 8.36285 6.3524 8.41312C6.339 8.41688 6.32477 8.42086 6.30927 8.42508C5.88614 8.54598 5.57182 8.78776 5.39048 9.12627C5.34212 9.22298 5.28168 9.40432 5.2575 9.5373C5.23332 9.62193 5.23332 9.74282 5.23332 11.1573V12.6926L5.2575 12.7772C5.30585 12.9223 5.39048 13.1037 5.46302 13.1883C5.49568 13.2046 5.53386 13.2485 5.5701 13.2902C5.58752 13.3102 5.60448 13.3298 5.62018 13.3454C6.11584 13.7928 7.02254 14.0224 8.27983 13.9983C8.40502 13.9983 8.53021 13.9893 8.62807 13.9823C8.69087 13.9778 8.74242 13.9741 8.7755 13.9741C9.91189 13.8895 10.601 13.5751 10.8549 13.0553C10.8694 13.0239 10.8809 12.9997 10.8902 12.9747C10.9274 12.8745 10.9274 12.7605 10.9274 12.1123V11.3628H8.0864V11.1573C8.0864 10.9759 8.0864 10.9518 8.11058 10.9518H10.2262C12.3056 10.9518 12.3298 10.9518 12.4386 10.9276C12.9826 10.8188 13.3936 10.5045 13.6596 9.93625C13.8893 9.51312 13.9739 8.99328 13.9981 8.31628C14.0223 7.21615 13.8167 6.33363 13.3936 5.81379ZM9.24698 12.6684C9.19862 12.4629 9.3316 12.209 9.53712 12.1244C9.59757 12.0761 9.62175 12.0761 9.74264 12.0761C9.83936 12.0761 9.87562 12.0761 9.92398 12.1002C10.1779 12.1728 10.3108 12.4146 10.2625 12.6684C10.2141 12.9949 9.8998 13.1641 9.60966 13.0795C9.42832 13.0311 9.29534 12.874 9.24698 12.6684Z" + fill="#468DFF" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/fileIcon/Yaml.tsx b/web_console_v2/client/src/components/IconPark/fileIcon/Yaml.tsx new file mode 100644 index 000000000..d42a8721d --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/fileIcon/Yaml.tsx @@ -0,0 +1,26 @@ +/** + * @file Yaml yaml + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'yaml', + false, + (props: ISvgIconProps) => ( + <svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M10.1821 2.51831H13.3465L5.6645 14.0183H2.50012L6.34112 8.26831L2.50012 2.51831H5.6645L7.92332 5.89975L10.1821 2.51831Z" + fill="#FFA841" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/ArrowFillDown.tsx b/web_console_v2/client/src/components/IconPark/icons/ArrowFillDown.tsx new file mode 100644 index 000000000..9082452f7 --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/ArrowFillDown.tsx @@ -0,0 +1,26 @@ +/** + * @file Python python + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'arrow-fill-down', + false, + (props: ISvgIconProps) => ( + <svg width="8" height="5" viewBox="0 0 8 5" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + fill-rule="evenodd" + clip-rule="evenodd" + d="M4.30622 4.24922C4.14638 4.4393 3.85377 4.4393 3.69393 4.24922L0.85292 0.870721C0.634116 0.610521 0.819096 0.213281 1.15907 0.213281L6.84108 0.213281C7.18105 0.213281 7.36603 0.610521 7.14723 0.870721L4.30622 4.24922Z" + fill="#86909C" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/ArrowFillRight.tsx b/web_console_v2/client/src/components/IconPark/icons/ArrowFillRight.tsx new file mode 100644 index 000000000..da7f3d9ca --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/ArrowFillRight.tsx @@ -0,0 +1,26 @@ +/** + * @file Python python + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'arrow-fill-right', + false, + (props: ISvgIconProps) => ( + <svg width="5" height="8" viewBox="0 0 5 8" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + fill-rule="evenodd" + clip-rule="evenodd" + d="M4.5113 3.69366C4.70138 3.8535 4.70138 4.14611 4.5113 4.30595L1.13281 7.14696C0.872606 7.36576 0.475366 7.18078 0.475366 6.84081V1.1588C0.475366 0.818828 0.872606 0.633847 1.13281 0.852652L4.5113 3.69366Z" + fill="#86909C" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/ArrowUpFill.tsx b/web_console_v2/client/src/components/IconPark/icons/ArrowUpFill.tsx new file mode 100644 index 000000000..4193cc66a --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/ArrowUpFill.tsx @@ -0,0 +1,33 @@ +/** + * @file ArrowUpFill arrow-up-fill + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'arrow-up-fill', + false, + (props: ISvgIconProps) => ( + <svg + width={props.size || 16} + height={props.size || 16} + viewBox="0 0 16 16" + fill="none" + xmlns="http://www.w3.org/2000/svg" + > + <circle cx="7.99998" cy="8.0001" r="6.9" fill="currentColor" /> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M8.26849 4.79241L11.1173 7.6412C11.2656 7.78954 11.2656 8.03006 11.1173 8.1784L10.5801 8.71561C10.4317 8.86396 10.1912 8.86396 10.0429 8.71561L8.66732 7.34017L8.6674 11.1659C8.6674 11.3757 8.49733 11.5458 8.28753 11.5458H7.52781C7.31801 11.5458 7.14794 11.3757 7.14794 11.1659L7.14756 7.52443L5.95692 8.71561C5.80857 8.86396 5.56805 8.86396 5.41971 8.71561L4.8825 8.1784C4.73415 8.03006 4.73415 7.78954 4.8825 7.6412L7.73128 4.79241C7.87963 4.64407 8.12015 4.64407 8.26849 4.79241Z" + fill="white" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/Audit.tsx b/web_console_v2/client/src/components/IconPark/icons/Audit.tsx new file mode 100644 index 000000000..0cff6ea5f --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/Audit.tsx @@ -0,0 +1,60 @@ +/** + * @file Audit audit + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'audit', + false, + (props: ISvgIconProps) => ( + <svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + d="M11.8646 2.59896C11.8646 2.27813 11.6035 2.01562 11.2812 2.01562H3.11458C2.79229 2.01562 2.53125 2.27813 2.53125 2.59896V13.3906C2.53125 13.7115 2.79229 13.974 3.11458 13.974H6.61465V12.8073H3.69785V3.18233H10.6978V6.67706H11.8646V2.59896Z" + fill="#4E5969" + /> + <path + d="M7.19603 7.88301C7.17918 8.02807 7.0559 8.14066 6.90632 8.14066H5.15632L5.12231 8.1387C4.97725 8.12185 4.86465 7.99857 4.86465 7.84899V7.26566L4.86662 7.23165C4.88346 7.08659 5.00674 6.97399 5.15632 6.97399H6.90632L6.94034 6.97596C7.08539 6.9928 7.19799 7.11608 7.19799 7.26566V7.84899L7.19603 7.88301Z" + fill="#4E5969" + /> + <path + d="M9.2736 4.64259L9.23958 4.64062H5.15625C5.00667 4.64062 4.88339 4.75322 4.86655 4.89828L4.86458 4.93229V5.51562C4.86458 5.6652 4.97718 5.78848 5.12224 5.80533L5.15625 5.80729H9.23958C9.38916 5.80729 9.51244 5.6947 9.52929 5.54964L9.53125 5.51562V4.93229C9.53125 4.78271 9.41865 4.65944 9.2736 4.64259Z" + fill="#4E5969" + /> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M9.09382 9.17188C9.09382 8.28592 9.81203 7.56771 10.698 7.56771C11.5839 7.56771 12.3022 8.28592 12.3022 9.17188C12.3022 9.85204 11.8789 10.4333 11.2813 10.6667V11.0677H12.7688C13.1554 11.0677 13.4688 11.3811 13.4688 11.7677V13.2844C13.4688 13.671 13.1554 13.9844 12.7688 13.9844H8.62715C8.24055 13.9844 7.92715 13.671 7.92715 13.2844V11.7677C7.92715 11.3811 8.24055 11.0677 8.62715 11.0677H10.1147V10.6667C9.51712 10.4333 9.09382 9.85204 9.09382 9.17188ZM10.698 8.73438C10.4564 8.73438 10.2605 8.93025 10.2605 9.17188C10.2605 9.4135 10.4564 9.60938 10.698 9.60938C10.9396 9.60938 11.1355 9.4135 11.1355 9.17188C11.1355 8.93025 10.9396 8.73438 10.698 8.73438ZM9.09382 12.8177V12.2344H12.3022V12.8177H9.09382Z" + fill="#4E5969" + /> + <path + d="M11.8646 2.59896C11.8646 2.27813 11.6035 2.01562 11.2812 2.01562H3.11458C2.79229 2.01562 2.53125 2.27813 2.53125 2.59896V13.3906C2.53125 13.7115 2.79229 13.974 3.11458 13.974H6.61465V12.8073H3.69785V3.18233H10.6978V6.67706H11.8646V2.59896Z" + stroke="#4E5969" + strokeWidth="0.2" + /> + <path + d="M7.19603 7.88301C7.17918 8.02807 7.0559 8.14066 6.90632 8.14066H5.15632L5.12231 8.1387C4.97725 8.12185 4.86465 7.99857 4.86465 7.84899V7.26566L4.86662 7.23165C4.88346 7.08659 5.00674 6.97399 5.15632 6.97399H6.90632L6.94034 6.97596C7.08539 6.9928 7.19799 7.11608 7.19799 7.26566V7.84899L7.19603 7.88301Z" + stroke="#4E5969" + strokeWidth="0.2" + /> + <path + d="M9.2736 4.64259L9.23958 4.64062H5.15625C5.00667 4.64062 4.88339 4.75322 4.86655 4.89828L4.86458 4.93229V5.51562C4.86458 5.6652 4.97718 5.78848 5.12224 5.80533L5.15625 5.80729H9.23958C9.38916 5.80729 9.51244 5.6947 9.52929 5.54964L9.53125 5.51562V4.93229C9.53125 4.78271 9.41865 4.65944 9.2736 4.64259Z" + stroke="#4E5969" + strokeWidth="0.2" + /> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M9.09382 9.17188C9.09382 8.28592 9.81203 7.56771 10.698 7.56771C11.5839 7.56771 12.3022 8.28592 12.3022 9.17188C12.3022 9.85204 11.8789 10.4333 11.2813 10.6667V11.0677H12.7688C13.1554 11.0677 13.4688 11.3811 13.4688 11.7677V13.2844C13.4688 13.671 13.1554 13.9844 12.7688 13.9844H8.62715C8.24055 13.9844 7.92715 13.671 7.92715 13.2844V11.7677C7.92715 11.3811 8.24055 11.0677 8.62715 11.0677H10.1147V10.6667C9.51712 10.4333 9.09382 9.85204 9.09382 9.17188ZM10.698 8.73438C10.4564 8.73438 10.2605 8.93025 10.2605 9.17188C10.2605 9.4135 10.4564 9.60938 10.698 9.60938C10.9396 9.60938 11.1355 9.4135 11.1355 9.17188C11.1355 8.93025 10.9396 8.73438 10.698 8.73438ZM9.09382 12.8177V12.2344H12.3022V12.8177H9.09382Z" + stroke="#4E5969" + strokeWidth="0.2" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/CheckCircleFill.tsx b/web_console_v2/client/src/components/IconPark/icons/CheckCircleFill.tsx new file mode 100644 index 000000000..562cbee43 --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/CheckCircleFill.tsx @@ -0,0 +1,38 @@ +/** + * @file CheckCircleFill check-circle-fill + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'check-circle-fill', + false, + (props: ISvgIconProps) => ( + <svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M7.00008 1.16675C10.2217 1.16675 12.8334 3.77842 12.8334 7.00008C12.8334 10.2217 10.2217 12.8334 7.00008 12.8334C3.77842 12.8334 1.16675 10.2217 1.16675 7.00008C1.16675 3.77842 3.77842 1.16675 7.00008 1.16675Z" + fill="#00B42A" + /> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M9.14096 5.01191L9.47611 5.29313C9.66128 5.44835 9.68542 5.7243 9.53011 5.9094C9.53008 5.90943 9.53006 5.90946 9.52994 5.90941L6.60396 9.39454C6.44853 9.57948 6.17264 9.60358 5.9875 9.44839L5.31721 8.88595L8.52451 5.06575C8.67993 4.88081 8.95583 4.85671 9.14096 5.01191Z" + fill="white" + /> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M4.19338 7.32429L4.48693 6.99827C4.64605 6.82154 4.91721 6.80429 5.09745 6.95942L7.15529 8.7306L6.59541 9.40428C6.44097 9.59011 6.16513 9.61556 5.97931 9.46112C5.97701 9.45922 5.97474 9.45729 5.97249 9.45534L4.23206 7.94772C4.04942 7.78951 4.02962 7.51321 4.18782 7.33058C4.18965 7.32847 4.19151 7.32637 4.19338 7.32429Z" + fill="white" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/CloseCircleFill.tsx b/web_console_v2/client/src/components/IconPark/icons/CloseCircleFill.tsx new file mode 100644 index 000000000..77432a50e --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/CloseCircleFill.tsx @@ -0,0 +1,30 @@ +/** + * @file CloseCircleFill close-circle-fill + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'close-circle-fill', + false, + (props: ISvgIconProps) => ( + <svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + d="M7.00008 1.16675C10.2218 1.16675 12.8334 3.77842 12.8334 7.00008C12.8334 10.2218 10.2218 12.8334 7.00008 12.8334C3.77842 12.8334 1.16675 10.2218 1.16675 7.00008C1.16675 3.77842 3.77842 1.16675 7.00008 1.16675Z" + fill="#E63F3F" + /> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M8.89476 4.68747L9.32786 5.12058C9.46455 5.25726 9.46455 5.47887 9.32786 5.61555L7.93563 7.00813L9.32786 8.40029C9.46455 8.53697 9.46455 8.75858 9.32786 8.89526L8.89476 9.32837C8.75808 9.46505 8.53647 9.46505 8.39979 9.32837L7.00696 7.93563L5.61555 9.32786C5.47887 9.46454 5.25726 9.46454 5.12058 9.32786L4.68747 8.89476C4.55079 8.75807 4.55079 8.53647 4.68747 8.39978L6.07829 7.00696L4.68747 5.61606C4.55079 5.47937 4.55079 5.25777 4.68747 5.12108L5.12058 4.68798C5.25726 4.5513 5.47887 4.5513 5.61555 4.68798L7.00696 6.07946L8.39979 4.68747C8.53647 4.55079 8.75808 4.55079 8.89476 4.68747Z" + fill="white" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/CompressedPackage.tsx b/web_console_v2/client/src/components/IconPark/icons/CompressedPackage.tsx new file mode 100644 index 000000000..47db914a8 --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/CompressedPackage.tsx @@ -0,0 +1,26 @@ +/** + * @file CompressedPackage compressedPackage + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'compressedPackage', + false, + (props: ISvgIconProps) => ( + <svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M5.05801 1.4585C5.24687 1.4585 5.42597 1.54937 5.54687 1.70655L6.25363 2.62516H12.7037C13.0975 2.62516 13.4167 2.92115 13.4167 3.28627V11.8807C13.4167 12.2458 13.0975 12.5418 12.7037 12.5418H1.2963C0.90254 12.5418 0.583336 12.2458 0.583336 11.8807L0.583336 2.14789C0.583336 1.76715 0.868246 1.4585 1.2197 1.4585H5.05801ZM1.74984 3.79167V11.375L8.45817 11.375V10.5H9.04151V9.91667H8.74984C8.58876 9.91667 8.45817 9.78609 8.45817 9.625V9.04167C8.45817 8.88059 8.58876 8.75 8.74984 8.75H9.04151V8.16667H8.74984C8.58876 8.16667 8.45817 8.03609 8.45817 7.875V7.29167C8.45817 7.13059 8.58876 7 8.74984 7H9.04151V6.41667H8.74984C8.58876 6.41667 8.45817 6.28609 8.45817 6.125V5.54167C8.45817 5.38059 8.58876 5.25 8.74984 5.25H9.04151V4.66667H8.74984C8.58876 4.66667 8.45817 4.53609 8.45817 4.375V3.79167H1.74984ZM9.62463 4.66667H9.91629C10.0774 4.66667 10.208 4.79725 10.208 4.95834V5.54167C10.208 5.70275 10.0774 5.83334 9.91629 5.83334H9.62463V6.41667H9.91629C10.0774 6.41667 10.208 6.54725 10.208 6.70834V7.29167C10.208 7.45275 10.0774 7.58334 9.91629 7.58334H9.62463V8.16667H9.91629C10.0774 8.16667 10.208 8.29725 10.208 8.45834V9.04167C10.208 9.20275 10.0774 9.33334 9.91629 9.33334H9.62463V9.91667H9.91629C10.0774 9.91667 10.208 10.0473 10.208 10.2083V10.7917C10.208 10.9528 10.0774 11.0833 9.91629 11.0833H9.62256V11.375L12.2496 11.375V3.79167H9.62463V4.66667Z" + fill="#4E5969" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/EditNoUnderline.tsx b/web_console_v2/client/src/components/IconPark/icons/EditNoUnderline.tsx new file mode 100644 index 000000000..537bbcfa2 --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/EditNoUnderline.tsx @@ -0,0 +1,32 @@ +/** + * @file EditNoUnderline edit-no-underline + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'edit-no-underline', + false, + (props: ISvgIconProps) => ( + <svg + width={props.size || 14} + height={props.size || 14} + viewBox="0 0 14 14" + fill="currentColor" + xmlns="http://www.w3.org/2000/svg" + > + <path + fillRule="evenodd" + clipRule="evenodd" + d="M6.9923 4.14848L9.63547 6.79166L4.62709 11.8H2.54572C2.25802 11.8 2.02233 11.5774 2.0015 11.295L2 11.2543V9.17291L6.9923 4.14848ZM9.69573 2.12796L9.73145 2.16085L11.6055 4.0448C11.8177 4.25808 11.8173 4.60283 11.6045 4.81555L11.4028 5.01699L11.4064 5.0207L10.4071 6.01974L7.76094 3.37384L8.57897 2.54981L8.57381 2.5447L8.95868 2.15984C9.16058 1.95794 9.48132 1.94731 9.69573 2.12796Z" + fill="currentColor" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/ExclamationCircleFill.tsx b/web_console_v2/client/src/components/IconPark/icons/ExclamationCircleFill.tsx new file mode 100644 index 000000000..5e20a9bac --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/ExclamationCircleFill.tsx @@ -0,0 +1,30 @@ +/** + * @file ExclamationCircleFill exclamation-circle-fill + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'exclamation-circle-fill', + false, + (props: ISvgIconProps) => ( + <svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + d="M10 18.75C14.8325 18.75 18.75 14.8325 18.75 10C18.75 5.16751 14.8325 1.25 10 1.25C5.16751 1.25 1.25 5.16751 1.25 10C1.25 14.8325 5.16751 18.75 10 18.75Z" + fill="currentColor" + /> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M10.6295 12.3578C10.898 12.3578 11.1156 12.5754 11.1156 12.8439V14.1078C11.1156 14.3763 10.898 14.5939 10.6295 14.5939H9.36563C9.09716 14.5939 8.87952 14.3763 8.87952 14.1078V12.8439C8.87952 12.5754 9.09716 12.3578 9.36563 12.3578H10.6295ZM10.6295 5.21826C10.898 5.21826 11.1156 5.4359 11.1156 5.70437V10.8184C11.1156 11.0869 10.898 11.3046 10.6295 11.3046H9.36563C9.09716 11.3046 8.87952 11.0869 8.87952 10.8184V5.70437C8.87952 5.4359 9.09716 5.21826 9.36563 5.21826H10.6295Z" + fill="white" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/FileAddFill.tsx b/web_console_v2/client/src/components/IconPark/icons/FileAddFill.tsx new file mode 100644 index 000000000..9a6019b01 --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/FileAddFill.tsx @@ -0,0 +1,38 @@ +/** + * @file FileAddFill file-add-fill + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'file-add-fill', + false, + (props: ISvgIconProps) => ( + <svg + width={props.size || 16} + height={props.size || 16} + viewBox="0 0 16 16" + fill="none" + xmlns="http://www.w3.org/2000/svg" + > + <path + fillRule="evenodd" + clipRule="evenodd" + d="M9.86089 1.875C10.1708 1.875 10.4681 1.99834 10.6869 2.21781L12.9094 4.4462C13.1275 4.6649 13.25 4.96117 13.25 5.27005V13.5417C13.25 13.8638 12.9888 14.125 12.6667 14.125H3.33333C3.01117 14.125 2.75 13.8638 2.75 13.5417V2.45833C2.75 2.13617 3.01117 1.875 3.33333 1.875H9.86089Z" + fill="currentColor" + /> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M7.87433 5.91699C7.59818 5.91699 7.37433 6.14085 7.37433 6.41699V7.794H5.99731C5.72117 7.794 5.49731 8.01786 5.49731 8.294V8.54534C5.49731 8.82149 5.72117 9.04534 5.99732 9.04534H7.37433V10.4224C7.37433 10.6985 7.59818 10.9224 7.87433 10.9224H8.12567C8.40181 10.9224 8.62567 10.6985 8.62567 10.4224V9.04534H10.0027C10.2788 9.04534 10.5027 8.82149 10.5027 8.54534V8.294C10.5027 8.01786 10.2788 7.794 10.0027 7.794H8.62567V6.41699C8.62567 6.14085 8.40181 5.91699 8.12567 5.91699H7.87433Z" + fill="white" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/FolderAddFill.tsx b/web_console_v2/client/src/components/IconPark/icons/FolderAddFill.tsx new file mode 100644 index 000000000..5cb926520 --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/FolderAddFill.tsx @@ -0,0 +1,38 @@ +/** + * @file FolderAddFill folder-add-fill + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'folder-add-fill', + false, + (props: ISvgIconProps) => ( + <svg + width={props.size || 16} + height={props.size || 16} + viewBox="0 0 16 16" + fill="none" + xmlns="http://www.w3.org/2000/svg" + > + <path + fillRule="evenodd" + clipRule="evenodd" + d="M6.05792 2.57031C6.24678 2.57031 6.42589 2.66119 6.54679 2.81837L7.25354 3.73698H13.8055C14.143 3.73698 14.4166 3.99815 14.4166 4.32031V12.847C14.4166 13.1692 14.143 13.4304 13.8055 13.4304H2.19436C1.85686 13.4304 1.58325 13.1692 1.58325 12.847V3.25971C1.58325 2.87896 1.86816 2.57031 2.21962 2.57031H6.05792Z" + fill="currentColor" + /> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M7.87433 5.91699C7.59818 5.91699 7.37433 6.14085 7.37433 6.41699V7.794H5.99731C5.72117 7.794 5.49731 8.01786 5.49731 8.294V8.54534C5.49731 8.82149 5.72117 9.04534 5.99732 9.04534H7.37433V10.4224C7.37433 10.6985 7.59818 10.9224 7.87433 10.9224H8.12567C8.40181 10.9224 8.62567 10.6985 8.62567 10.4224V9.04534H10.0027C10.2788 9.04534 10.5027 8.82149 10.5027 8.54534V8.294C10.5027 8.01786 10.2788 7.794 10.0027 7.794H8.62567V6.41699C8.62567 6.14085 8.40181 5.91699 8.12567 5.91699H7.87433Z" + fill="white" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/InfoCircleFill.tsx b/web_console_v2/client/src/components/IconPark/icons/InfoCircleFill.tsx new file mode 100644 index 000000000..f6b9ffe17 --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/InfoCircleFill.tsx @@ -0,0 +1,37 @@ +/** + * @file InfoCircleFill info-circle-fill + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'info-circle-fill', + false, + (props: ISvgIconProps) => ( + <svg + viewBox="0 0 12 12" + width={props.size || 12} + height={props.size || 12} + fill="none" + xmlns="http://www.w3.org/2000/svg" + > + <path + fillRule="evenodd" + clipRule="evenodd" + d="M6.00008 0.166687C9.22177 0.166687 11.8334 2.77836 11.8334 6.00002C11.8334 9.22171 9.22177 11.8334 6.00008 11.8334C2.77842 11.8334 0.166748 9.22171 0.166748 6.00002C0.166748 2.77836 2.77842 0.166687 6.00008 0.166687Z" + /> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M6.45214 4.92151C6.55953 4.92151 6.64659 5.00856 6.64659 5.11595L6.64623 7.86148L7.22343 7.8619C7.3437 7.8619 7.4412 7.95941 7.4412 8.07968V8.77654C7.4412 8.89681 7.3437 8.99431 7.22343 8.99431H4.82105C4.70077 8.99431 4.60327 8.89681 4.60327 8.77654V8.07968C4.60327 7.95941 4.70077 7.8619 4.82105 7.8619L5.40179 7.86148V6.06741L5.29023 6.06831C5.16995 6.06831 5.07245 5.9708 5.07245 5.85053V5.14226C5.07245 5.02199 5.16995 4.92449 5.29023 4.92449L5.5831 4.92199C5.58767 4.92167 5.59229 4.92151 5.59694 4.92151H6.45214ZM6.33817 3.01334C6.45845 3.01334 6.55595 3.11084 6.55595 3.23111V4.04C6.55595 4.16028 6.45845 4.25778 6.33817 4.25778H5.52929C5.40901 4.25778 5.31151 4.16028 5.31151 4.04V3.23111C5.31151 3.11084 5.40901 3.01334 5.52929 3.01334H6.33817Z" + fill="white" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/Loading.tsx b/web_console_v2/client/src/components/IconPark/icons/Loading.tsx new file mode 100644 index 000000000..14aa6d0ff --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/Loading.tsx @@ -0,0 +1,40 @@ +/** + * @file Loading loading + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'loading', + false, + (props: ISvgIconProps) => ( + <svg width="12" height="12" viewBox="0 0 12 12" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M5.99886 0.166748C9.22052 0.166748 11.8322 2.77842 11.8322 6.00008C11.8322 9.22174 9.22052 11.8334 5.99886 11.8334C2.7772 11.8334 0.165527 9.22174 0.165527 6.00008C0.165527 2.77842 2.7772 0.166748 5.99886 0.166748ZM5.99886 2.11119C3.85109 2.11119 2.10997 3.85231 2.10997 6.00008C2.10997 8.14786 3.85109 9.88897 5.99886 9.88897C8.14663 9.88897 9.88775 8.14786 9.88775 6.00008C9.88775 3.85231 8.14663 2.11119 5.99886 2.11119Z" + fill="url(#paint0_angular)" + /> + <defs> + <radialGradient + id="paint0_angular" + cx="0" + cy="0" + r="1" + gradientUnits="userSpaceOnUse" + gradientTransform="translate(5.99886 6.00008) rotate(-23.9625) scale(5.74517 5.76738)" + > + <stop offset="0.921737" stop-color="#165DFF" stop-opacity="0" /> + <stop offset="0.944115" stop-color="#165DFF" /> + <stop offset="0.989583" stop-color="#165DFF" /> + </radialGradient> + </defs> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/Log.tsx b/web_console_v2/client/src/components/IconPark/icons/Log.tsx index 6cdfe1be6..c57c9c46d 100644 --- a/web_console_v2/client/src/components/IconPark/icons/Log.tsx +++ b/web_console_v2/client/src/components/IconPark/icons/Log.tsx @@ -13,8 +13,8 @@ export default IconWrapper( (props: ISvgIconProps) => ( <svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg"> <path - fill-rule="evenodd" - clip-rule="evenodd" + fillRule="evenodd" + clipRule="evenodd" d="M10.1267 1C10.481 1 10.8206 1.14096 11.0708 1.39178L13.6107 3.93851C13.86 4.18845 14 4.52705 14 4.88006V14.3333C14 14.7015 13.7015 15 13.3333 15H2.66667C2.29848 15 2 14.7015 2 14.3333V1.66667C2 1.29848 2.29848 1 2.66667 1H10.1267ZM9.72985 2.33341H3.33318V13.6667H12.6665V5.27675C12.6665 5.10027 12.5965 4.931 12.4719 4.80603L10.2019 2.52936C10.0769 2.40392 9.907 2.33341 9.72985 2.33341ZM8.33333 10.3334C8.51743 10.3334 8.66667 10.4826 8.66667 10.6667V11.3334C8.66667 11.5175 8.51743 11.6667 8.33333 11.6667H5.33333C5.14924 11.6667 5 11.5175 5 11.3334V10.6667C5 10.4826 5.14924 10.3334 5.33333 10.3334H8.33333ZM10.6667 5C10.8508 5 11 5.14924 11 5.33333V6C11 6.18409 10.8508 6.33333 10.6667 6.33333H5.33333C5.14924 6.33333 5 6.18409 5 6V5.33333C5 5.14924 5.14924 5 5.33333 5H10.6667ZM10.6667 7.66659C10.8508 7.66659 11 7.81583 11 7.99992V8.66659C11 8.85069 10.8508 8.99992 10.6667 8.99992H5.33333C5.14924 8.99992 5 8.85069 5 8.66659V7.99992C5 7.81583 5.14924 7.66659 5.33333 7.66659H10.6667Z" fill="#4E5969" /> diff --git a/web_console_v2/client/src/components/IconPark/icons/ModelCenter.tsx b/web_console_v2/client/src/components/IconPark/icons/ModelCenter.tsx new file mode 100644 index 000000000..72a7c3d47 --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/ModelCenter.tsx @@ -0,0 +1,31 @@ +/** + * @file ModelCenter model-center + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'model-center', + false, + (props: ISvgIconProps) => ( + <svg + width={props.size || 16} + height={props.size || 15} + viewBox="0 0 16 15" + fill="currentColor" + xmlns="http://www.w3.org/2000/svg" + > + <path + fillRule="evenodd" + clipRule="evenodd" + d="M8.34318 0.129989C8.12927 0.0133111 7.87074 0.0133111 7.65683 0.129989L1.17922 3.66323C1.1146 3.69847 1.0615 3.75158 1.02625 3.8162C0.924876 4.00205 0.993361 4.2349 1.17922 4.33628L7.65683 7.86952C7.87074 7.9862 8.12927 7.9862 8.34318 7.86952L14.8208 4.33628C15.0066 4.2349 15.0751 4.00205 14.9738 3.8162C14.9385 3.75158 14.8854 3.69847 14.8208 3.66323L8.34318 0.129989ZM8 6.42402L3.55552 3.99976L8 1.57549L12.4445 3.99976L8 6.42402ZM1.42008 6.31449C1.59064 6.25036 1.7918 6.25575 2.00046 6.36556L8 9.52322L13.9996 6.36556C14.2082 6.25575 14.4094 6.25036 14.5799 6.31449C14.7493 6.3782 14.8849 6.50906 14.9675 6.66597C15.0501 6.82288 15.0812 7.00875 15.0378 7.18446C14.9941 7.36137 14.8758 7.52413 14.6671 7.63395L8.42027 10.9218C8.30388 11.0104 8.15978 11.064 8 11.0595C7.84023 11.064 7.69613 11.0104 7.57974 10.9218L1.33289 7.63395C1.12423 7.52413 1.0059 7.36137 0.962204 7.18446C0.9188 7.00875 0.949893 6.82288 1.03248 6.66597C1.11507 6.50906 1.25067 6.3782 1.42008 6.31449ZM1.42008 9.31449C1.59064 9.25036 1.7918 9.25575 2.00046 9.36556L8 12.5232L13.9996 9.36556C14.2082 9.25575 14.4094 9.25036 14.5799 9.31449C14.7493 9.3782 14.8849 9.50906 14.9675 9.66597C15.0501 9.82288 15.0812 10.0087 15.0378 10.1845C14.9941 10.3614 14.8758 10.5241 14.6671 10.6339L8.42027 13.9218C8.30388 14.0104 8.15978 14.064 8 14.0595C7.84023 14.064 7.69613 14.0104 7.57974 13.9218L1.33289 10.6339C1.12423 10.5241 1.0059 10.3614 0.962204 10.1845C0.9188 10.0087 0.949893 9.82288 1.03248 9.66597C1.11507 9.50906 1.25067 9.3782 1.42008 9.31449Z" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/NormalFile.tsx b/web_console_v2/client/src/components/IconPark/icons/NormalFile.tsx new file mode 100644 index 000000000..2134dd727 --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/NormalFile.tsx @@ -0,0 +1,26 @@ +/** + * @file NormalFile normal-file + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'normal-file', + false, + (props: ISvgIconProps) => ( + <svg width="12" height="14" viewBox="0 0 12 14" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M7.86089 0.875C8.17084 0.875 8.46806 0.998344 8.68694 1.21781L10.9094 3.4462C11.1275 3.6649 11.25 3.96117 11.25 4.27005V12.5417C11.25 12.8638 10.9888 13.125 10.6667 13.125H1.33333C1.01117 13.125 0.75 12.8638 0.75 12.5417V1.45833C0.75 1.13617 1.01117 0.875 1.33333 0.875H7.86089ZM7.39952 2.22286H2.0817V11.756H9.91667V4.74572C9.91667 4.59446 9.85669 4.44937 9.74989 4.34225L7.80418 2.39082C7.69696 2.2833 7.55137 2.22286 7.39952 2.22286ZM6.29167 8.16667C6.45275 8.16667 6.58333 8.29725 6.58333 8.45833V9.17456C6.58333 9.33565 6.45275 9.46623 6.29167 9.46623H3.66667C3.50558 9.46623 3.375 9.33565 3.375 9.17456V8.45833C3.375 8.29725 3.50558 8.16667 3.66667 8.16667H6.29167ZM8.33333 5.09561C8.49442 5.09561 8.625 5.22619 8.625 5.38727V6.125C8.625 6.28608 8.49442 6.41667 8.33333 6.41667H3.66667C3.50558 6.41667 3.375 6.28608 3.375 6.125V5.38727C3.375 5.22619 3.50558 5.09561 3.66667 5.09561H8.33333Z" + fill="#42A5F5" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/PlusBold.tsx b/web_console_v2/client/src/components/IconPark/icons/PlusBold.tsx new file mode 100644 index 000000000..7a60f477f --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/PlusBold.tsx @@ -0,0 +1,26 @@ +/** + * @file Plus plus + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'plus', + false, + (props: ISvgIconProps) => ( + <svg fill="none" viewBox="0 0 12 12" xmlns="http://www.w3.org/2000/svg"> + <path + clipRule="evenodd" + d="m5.5 1.99951c-.27614 0-.5.22386-.5.5v2.5h-2.5c-.27614 0-.5.22386-.5.5v1c0 .27614.22386.5.5.5h2.5v2.5c0 .27614.22386.5.5.5h1c.27614 0 .5-.22386.5-.5v-2.5h2.5c.27614 0 .5-.22386.5-.5v-1c0-.27614-.22386-.5-.5-.5h-2.5v-2.5c0-.27614-.22386-.5-.5-.5z" + fill="currentColor" + fillRule="evenodd" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/Python.tsx b/web_console_v2/client/src/components/IconPark/icons/Python.tsx new file mode 100644 index 000000000..b43f43d04 --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/Python.tsx @@ -0,0 +1,28 @@ +/** + * @file Python python + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'python', + false, + (props: ISvgIconProps) => ( + <svg width="12" height="14" viewBox="0 0 12 14" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + d="M5.74663 12.9995C4.29495 12.9499 3.3892 12.5156 3.15346 11.7464L3.12865 11.6595V10.1706C3.12865 8.45839 3.12865 8.43357 3.2155 8.16061C3.3768 7.67671 3.77384 7.35412 4.3694 7.21764L4.50588 7.19282H6.1933C7.39682 7.19282 7.90553 7.19282 7.95516 7.16801C8.30257 7.09356 8.5135 7.03153 8.72442 6.87023C8.98498 6.68412 9.1835 6.36152 9.25795 6.01411C9.33239 5.75355 9.33239 5.77837 9.33239 4.88503V4.05373H10.5359L10.6476 4.07854C11.268 4.26466 11.7394 4.84781 11.9256 5.74115C12 6.08856 12 6.11337 12 6.9943C12 7.85042 12 7.85042 11.9256 8.1482C11.8511 8.42116 11.7891 8.65691 11.665 8.89265C11.4293 9.35172 11.0818 9.67432 10.6476 9.82321C10.387 9.91006 10.598 9.89765 8.16609 9.91006H5.96996V10.3319H8.88572V11.8332C8.86091 11.9201 8.83609 12.0441 8.77405 12.1558C8.69961 12.2675 8.5135 12.4536 8.40183 12.5529C8.00479 12.8134 7.40923 12.9747 6.61515 13.0119H5.74663V12.9995ZM7.75664 12.069C8.0296 12.0193 8.24053 11.7464 8.1909 11.4734C8.14127 11.2377 7.97997 11.0764 7.75664 11.0391C7.40923 10.9895 7.11145 11.2997 7.16108 11.6347C7.21071 11.8953 7.42164 12.0566 7.66979 12.069H7.75664ZM1.50327 9.94728C1.18067 9.87284 0.858077 9.68673 0.622335 9.38895C0.188073 8.85543 -0.0228541 7.94968 0.00196089 6.8206C0.0267759 6.12578 0.113628 5.59226 0.349371 5.158C0.622335 4.57484 1.04419 4.25225 1.60253 4.14058C1.71419 4.11577 1.73901 4.11577 3.8731 4.11577H6.04441C6.06922 4.11577 6.06922 4.09095 6.06922 3.90484V3.69391H3.15346V2.92465C3.15346 2.09335 3.15346 2.11816 3.22791 1.95686C3.48846 1.42334 4.19569 1.10075 5.36199 1.0139C5.44885 1.0139 5.65977 0.989081 5.8707 0.989081C7.16108 0.964266 8.09164 1.20001 8.60035 1.65909C8.64998 1.70872 8.71202 1.79557 8.76165 1.82038C8.83609 1.90723 8.92294 2.09335 8.97257 2.24224L8.99739 2.32909V3.90484C8.99739 5.35651 8.99739 5.48059 8.97257 5.56744C8.94776 5.70392 8.88572 5.89004 8.83609 5.9893C8.64998 6.33671 8.32738 6.58486 7.89312 6.70893C7.62016 6.78338 7.73182 6.78338 5.88311 6.79578C4.03439 6.79578 4.14606 6.79578 3.89791 6.87023C3.3892 7.00671 3.01698 7.40375 2.8805 7.96209C2.80605 8.22264 2.80605 8.19783 2.80605 9.09117V9.94728H2.24772C1.71419 9.9721 1.5529 9.9721 1.50327 9.94728ZM4.58032 2.91224C4.79125 2.82539 4.92773 2.56483 4.8781 2.3539C4.82847 2.14298 4.69199 1.98168 4.50588 1.93205C4.2081 1.8452 3.8855 2.0189 3.83587 2.3539C3.78624 2.61446 3.92273 2.86261 4.18328 2.93706C4.23291 2.96187 4.27014 2.96187 4.3694 2.96187C4.49347 2.96187 4.51829 2.96187 4.58032 2.91224Z" + fill="#FED142" + /> + <path + d="M1.50327 9.94729C1.18067 9.87284 0.858077 9.68673 0.622335 9.38895C0.188073 8.85543 -0.0228541 7.94968 0.00196089 6.8206C0.0267759 6.12578 0.113628 5.59226 0.34937 5.158C0.622335 4.57484 1.04419 4.25225 1.60253 4.14058C1.71419 4.11577 1.73901 4.11577 3.8731 4.11577H6.04441C6.06922 4.11577 6.06922 4.09095 6.06922 3.90484V3.69391H3.15346V2.92465C3.15346 2.09335 3.15346 2.11816 3.22791 1.95687C3.48846 1.42334 4.19569 1.10075 5.36199 1.0139C5.44885 1.0139 5.65977 0.989081 5.8707 0.989081C7.16108 0.964266 8.09164 1.20001 8.60035 1.65909C8.64998 1.70872 8.71202 1.79557 8.76164 1.82038C8.83609 1.90724 8.92294 2.09335 8.97257 2.24224L8.99739 2.32909V3.90484C8.99739 5.35652 8.99739 5.48059 8.97257 5.56744C8.94776 5.70393 8.88572 5.89004 8.83609 5.9893C8.64998 6.33671 8.32738 6.58486 7.89312 6.70893C7.62016 6.78338 7.73182 6.78338 5.88311 6.79578C4.03439 6.79578 4.14606 6.79578 3.89791 6.87023C3.3892 7.00671 3.01698 7.40375 2.8805 7.96209C2.80605 8.22265 2.80605 8.19783 2.80605 9.09117V9.94729H2.24772C1.71419 9.9721 1.5529 9.9721 1.50327 9.94729ZM4.58032 2.91224C4.79125 2.82539 4.92773 2.56483 4.8781 2.3539C4.82847 2.14298 4.69199 1.98168 4.50588 1.93205C4.2081 1.8452 3.8855 2.0189 3.83587 2.3539C3.78624 2.61446 3.92273 2.86261 4.18328 2.93706C4.23291 2.96187 4.27014 2.96187 4.3694 2.96187C4.49347 2.96187 4.51829 2.96187 4.58032 2.91224Z" + fill="#3571A3" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/RightAngle.tsx b/web_console_v2/client/src/components/IconPark/icons/RightAngle.tsx new file mode 100644 index 000000000..d708f446e --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/RightAngle.tsx @@ -0,0 +1,36 @@ +/** + * @file RightAngle right-angle + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'right-angle', + false, + (props: ISvgIconProps) => ( + <svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + d="M7.00008 12.8334C10.2217 12.8334 12.8334 10.2217 12.8334 7.00008C12.8334 3.77842 10.2217 1.16675 7.00008 1.16675C3.77842 1.16675 1.16675 3.77842 1.16675 7.00008C1.16675 10.2217 3.77842 12.8334 7.00008 12.8334Z" + fill="#FA9600" + /> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M7.41813 3.81226C7.59711 3.81226 7.74221 3.95735 7.74221 4.13633V7.54571C7.74221 7.72469 7.59711 7.86979 7.41813 7.86979H6.57554C6.39656 7.86979 6.25146 7.72469 6.25146 7.54571V4.13633C6.25146 3.95735 6.39656 3.81226 6.57554 3.81226H7.41813Z" + fill="white" + /> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M6.25146 6.70321C6.25146 6.52422 6.39656 6.37913 6.57554 6.37913H9.98492C10.1639 6.37913 10.309 6.52422 10.309 6.70321V7.5458C10.309 7.72478 10.1639 7.86987 9.98492 7.86987H6.57554C6.39656 7.86987 6.25146 7.72478 6.25146 7.5458V6.70321Z" + fill="white" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/Rocket.tsx b/web_console_v2/client/src/components/IconPark/icons/Rocket.tsx new file mode 100644 index 000000000..c6779812d --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/Rocket.tsx @@ -0,0 +1,48 @@ +/** + * @file Rocket rocket + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'rocket', + false, + (props: ISvgIconProps) => ( + <svg + width={props.size || 14} + height={props.size || 14} + viewBox="0 0 14 12" + fill="none" + xmlns="http://www.w3.org/2000/svg" + > + <path + d="M9.67204 4.95249C10.0076 4.95249 10.2797 4.68042 10.2797 4.34481C10.2797 4.0092 10.0076 3.73713 9.67204 3.73713C9.33642 3.73713 9.06436 4.0092 9.06436 4.34481C9.06436 4.68042 9.33642 4.95249 9.67204 4.95249Z" + fill="#165DFF" + /> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M10.3546 1.48755L12.5277 1.48755V3.66058C12.5277 4.81123 12.0706 5.91475 11.2569 6.72839L9.79466 8.19068L9.82447 10.7994L6.24477 12.5127L5.94033 10.8687C5.45125 11.2109 4.89447 11.4502 4.3038 11.5683L1.98271 12.0325L2.44693 9.71142C2.56704 9.11086 2.81234 8.54533 3.16381 8.05037L1.49438 7.78752L3.21586 4.19075L5.8245 4.22061L7.28684 2.75828C8.10047 1.94464 9.204 1.48755 10.3546 1.48755ZM10.5351 6.00655L8.76916 7.77284L8.79626 10.1598L7.00592 11.0166L6.80011 9.90527C6.67381 9.22326 6.34364 8.5956 5.85319 8.10515C5.34044 7.5924 4.67833 7.25542 3.96202 7.14263L3.00675 6.99223L3.85546 5.21897L6.24215 5.24629L6.24216 5.24584L6.24255 5.24624L8.00868 3.48012C8.63087 2.85793 9.47474 2.50838 10.3546 2.50838L11.5068 2.50838V3.66058C11.5068 4.54049 11.1573 5.38436 10.5351 6.00655ZM5.13135 8.82699C5.39191 9.08754 5.58805 9.40323 5.70652 9.74888C5.25845 10.1636 4.70391 10.4472 4.1036 10.5673L3.28402 10.7312L3.44794 9.91162C3.57042 9.29918 3.86316 8.73438 4.29165 8.28167C4.60494 8.40194 4.89143 8.58706 5.13135 8.82699Z" + fill="#165DFF" + /> + <path + d="M9.67204 4.95249C10.0076 4.95249 10.2797 4.68042 10.2797 4.34481C10.2797 4.0092 10.0076 3.73713 9.67204 3.73713C9.33642 3.73713 9.06436 4.0092 9.06436 4.34481C9.06436 4.68042 9.33642 4.95249 9.67204 4.95249Z" + stroke="#165DFF" + strokeWidth="0.3" + /> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M10.3546 1.48755L12.5277 1.48755V3.66058C12.5277 4.81123 12.0706 5.91475 11.2569 6.72839L9.79466 8.19068L9.82447 10.7994L6.24477 12.5127L5.94033 10.8687C5.45125 11.2109 4.89447 11.4502 4.3038 11.5683L1.98271 12.0325L2.44693 9.71142C2.56704 9.11086 2.81234 8.54533 3.16381 8.05037L1.49438 7.78752L3.21586 4.19075L5.8245 4.22061L7.28684 2.75828C8.10047 1.94464 9.204 1.48755 10.3546 1.48755ZM10.5351 6.00655L8.76916 7.77284L8.79626 10.1598L7.00592 11.0166L6.80011 9.90527C6.67381 9.22326 6.34364 8.5956 5.85319 8.10515C5.34044 7.5924 4.67833 7.25542 3.96202 7.14263L3.00675 6.99223L3.85546 5.21897L6.24215 5.24629L6.24216 5.24584L6.24255 5.24624L8.00868 3.48012C8.63087 2.85793 9.47474 2.50838 10.3546 2.50838L11.5068 2.50838V3.66058C11.5068 4.54049 11.1573 5.38436 10.5351 6.00655ZM5.13135 8.82699C5.39191 9.08754 5.58805 9.40323 5.70652 9.74888C5.25845 10.1636 4.70391 10.4472 4.1036 10.5673L3.28402 10.7312L3.44794 9.91162C3.57042 9.29918 3.86316 8.73438 4.29165 8.28167C4.60494 8.40194 4.89143 8.58706 5.13135 8.82699Z" + stroke="#165DFF" + strokeWidth="0.3" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/SortArrow.tsx b/web_console_v2/client/src/components/IconPark/icons/SortArrow.tsx new file mode 100644 index 000000000..4230067b9 --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/SortArrow.tsx @@ -0,0 +1,28 @@ +/** + * @file SortArrow sort-arrow + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'sort-arrow', + false, + (props: ISvgIconProps) => ( + <svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + d="M5.78475 9.3417C5.67168 9.20601 5.76817 9 5.9448 9H10.0552C10.2318 9 10.3283 9.20601 10.2152 9.34171L8.38411 11.5391C8.18421 11.7789 7.81579 11.7789 7.61589 11.5391L5.78475 9.3417Z" + fill="#86909C" + /> + <path + d="M5.78475 6.6583C5.67168 6.79399 5.76817 7 5.9448 7H10.0552C10.2318 7 10.3283 6.79399 10.2152 6.65829L8.38411 4.46093C8.18421 4.22106 7.81579 4.22106 7.61589 4.46093L5.78475 6.6583Z" + fill="#86909C" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/StarFull.tsx b/web_console_v2/client/src/components/IconPark/icons/StarFull.tsx new file mode 100644 index 000000000..63e6f3ba2 --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/StarFull.tsx @@ -0,0 +1,25 @@ +/** + * @file StarFull star-full + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'star-full', + false, + (props: ISvgIconProps) => ( + <svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg"> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M7.2408 10.8C7.08861 10.7268 6.91139 10.7268 6.7592 10.8L3.78203 12.2319C3.39024 12.4204 2.94359 12.1061 2.98868 11.6736L3.32683 8.43064C3.34313 8.27436 3.29248 8.11848 3.18743 8.00162L0.984664 5.55114C0.699033 5.23339 0.864024 4.7256 1.28187 4.63642L4.50431 3.9487C4.65798 3.9159 4.79057 3.81957 4.86925 3.68355L6.5191 0.831355C6.73304 0.461513 7.26696 0.461513 7.4809 0.831355L9.13075 3.68355C9.20942 3.81957 9.34202 3.9159 9.49569 3.9487L12.7181 4.63642C13.136 4.7256 13.301 5.23339 13.0153 5.55114L10.8126 8.00162C10.7075 8.11848 10.6569 8.27436 10.6732 8.43064L11.0113 11.6736C11.0564 12.1061 10.6098 12.4204 10.218 12.2319L7.2408 10.8Z" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/Struct.tsx b/web_console_v2/client/src/components/IconPark/icons/Struct.tsx index e20367db0..e4284d78a 100644 --- a/web_console_v2/client/src/components/IconPark/icons/Struct.tsx +++ b/web_console_v2/client/src/components/IconPark/icons/Struct.tsx @@ -6,8 +6,8 @@ import { ISvgIconProps, IconWrapper } from '../runtime'; export default IconWrapper('ad-product', false, (props: ISvgIconProps) => ( <svg width="14" height="14" viewBox="0 0 14 14" xmlns="http://www.w3.org/2000/svg"> <path - fill-rule="evenodd" - clip-rule="evenodd" + fillRule="evenodd" + clipRule="evenodd" d="M7 0C8.10457 0 9 0.895431 9 2C9 2.87073 8.44357 3.61149 7.6669 3.88612L7.66667 4.385L10.4641 6L10.464 9.92033L11.0104 10.2616C11.3022 10.0951 11.64 10 12 10C13.1046 10 14 10.8954 14 12C14 13.1046 13.1046 14 12 14C10.8954 14 10 13.1046 10 12C10 11.7481 10.0466 11.507 10.1316 11.285L9.22233 10.717L7 12L4.77733 10.717L3.8684 11.285C3.95342 11.507 4 11.7481 4 12C4 13.1046 3.10457 14 2 14C0.895431 14 0 13.1046 0 12C0 10.8954 0.895431 10 2 10C2.36014 10 2.69805 10.0952 2.98994 10.2618L3.53567 9.92067L3.5359 6L6.33333 4.385L6.33344 3.88623C5.55659 3.61171 5 2.87085 5 2C5 0.895431 5.89543 0 7 0ZM12 11.3333C11.6318 11.3333 11.3333 11.6318 11.3333 12C11.3333 12.3682 11.6318 12.6667 12 12.6667C12.3682 12.6667 12.6667 12.3682 12.6667 12C12.6667 11.6318 12.3682 11.3333 12 11.3333ZM2 11.3333C1.63181 11.3333 1.33333 11.6318 1.33333 12C1.33333 12.3682 1.63181 12.6667 2 12.6667C2.36819 12.6667 2.66667 12.3682 2.66667 12C2.66667 11.6318 2.36819 11.3333 2 11.3333ZM6.99999 5.53963L4.86922 6.76983V9.23023L6.99999 10.4604L9.13076 9.23023V6.76983L6.99999 5.53963ZM7 1.33333C6.63181 1.33333 6.33333 1.63181 6.33333 2C6.33333 2.36819 6.63181 2.66667 7 2.66667C7.36819 2.66667 7.66667 2.36819 7.66667 2C7.66667 1.63181 7.36819 1.33333 7 1.33333Z" fill="currentColor" /> diff --git a/web_console_v2/client/src/components/IconPark/icons/TeamOutlined.tsx b/web_console_v2/client/src/components/IconPark/icons/TeamOutlined.tsx new file mode 100644 index 000000000..6d13a0750 --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/TeamOutlined.tsx @@ -0,0 +1,29 @@ +/** + * @file UserGroup user-group + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'team-out-lined', + false, + (props: ISvgIconProps) => ( + <svg + viewBox="64 64 896 896" + focusable="false" + data-icon="team" + width="1em" + height="1em" + fill="currentColor" + aria-hidden="true" + > + <path d="M824.2 699.9a301.55 301.55 0 00-86.4-60.4C783.1 602.8 812 546.8 812 484c0-110.8-92.4-201.7-203.2-200-109.1 1.7-197 90.6-197 200 0 62.8 29 118.8 74.2 155.5a300.95 300.95 0 00-86.4 60.4C345 754.6 314 826.8 312 903.8a8 8 0 008 8.2h56c4.3 0 7.9-3.4 8-7.7 1.9-58 25.4-112.3 66.7-153.5A226.62 226.62 0 01612 684c60.9 0 118.2 23.7 161.3 66.8C814.5 792 838 846.3 840 904.3c.1 4.3 3.7 7.7 8 7.7h56a8 8 0 008-8.2c-2-77-33-149.2-87.8-203.9zM612 612c-34.2 0-66.4-13.3-90.5-37.5a126.86 126.86 0 01-37.5-91.8c.3-32.8 13.4-64.5 36.3-88 24-24.6 56.1-38.3 90.4-38.7 33.9-.3 66.8 12.9 91 36.6 24.8 24.3 38.4 56.8 38.4 91.4 0 34.2-13.3 66.3-37.5 90.5A127.3 127.3 0 01612 612zM361.5 510.4c-.9-8.7-1.4-17.5-1.4-26.4 0-15.9 1.5-31.4 4.3-46.5.7-3.6-1.2-7.3-4.5-8.8-13.6-6.1-26.1-14.5-36.9-25.1a127.54 127.54 0 01-38.7-95.4c.9-32.1 13.8-62.6 36.3-85.6 24.7-25.3 57.9-39.1 93.2-38.7 31.9.3 62.7 12.6 86 34.4 7.9 7.4 14.7 15.6 20.4 24.4 2 3.1 5.9 4.4 9.3 3.2 17.6-6.1 36.2-10.4 55.3-12.4 5.6-.6 8.8-6.6 6.3-11.6-32.5-64.3-98.9-108.7-175.7-109.9-110.9-1.7-203.3 89.2-203.3 199.9 0 62.8 28.9 118.8 74.2 155.5-31.8 14.7-61.1 35-86.5 60.4-54.8 54.7-85.8 126.9-87.8 204a8 8 0 008 8.2h56.1c4.3 0 7.9-3.4 8-7.7 1.9-58 25.4-112.3 66.7-153.5 29.4-29.4 65.4-49.8 104.7-59.7 3.9-1 6.5-4.7 6-8.7z" /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/Todo.tsx b/web_console_v2/client/src/components/IconPark/icons/Todo.tsx new file mode 100644 index 000000000..41f18f19b --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/Todo.tsx @@ -0,0 +1,31 @@ +/** + * @file Todo todo + * @author Auto Generated by IconPark + */ + +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper( + 'todo', + false, + (props: ISvgIconProps) => ( + <svg + width={props.size || 16} + height={props.size || 16} + viewBox="0 0 16 16" + fill="currentColor" + xmlns="http://www.w3.org/2000/svg" + > + <path + fillRule="evenodd" + clipRule="evenodd" + d="M6.33301 1.93354C6.33301 1.74944 6.18377 1.60021 5.99967 1.60021H5.16247C4.97837 1.60021 4.82913 1.74944 4.82913 1.93354L4.8288 2.60021H2.99967C2.63148 2.60021 2.33301 2.91526 2.33301 3.30391V12.5632C2.33301 12.9518 2.63148 13.2669 2.99967 13.2669H12.9997C13.3679 13.2669 13.6663 12.9518 13.6663 12.5632V3.30391C13.6663 2.91526 13.3679 2.60021 12.9997 2.60021L11.1771 2.6001V1.93343C11.1771 1.74934 11.0279 1.6001 10.8438 1.6001L9.99967 1.60021C9.81558 1.60021 9.66634 1.74944 9.66634 1.93354V2.60021H6.33301V1.93354ZM9.88209 5.85927C10.0121 5.7293 10.2228 5.7293 10.3528 5.85927L10.8234 6.32994C10.9534 6.45991 10.9534 6.67064 10.8234 6.80061L8.00164 9.62238L7.99942 9.62462L7.52875 10.0953C7.47877 10.1453 7.41683 10.176 7.35214 10.1876L7.31307 10.1922H7.27377C7.19526 10.1876 7.11807 10.1553 7.05809 10.0953L5.17542 8.21261C5.04544 8.08264 5.04544 7.87192 5.17542 7.74195L5.64608 7.27128C5.77605 7.14131 5.98678 7.14131 6.11675 7.27128L7.29347 8.44793L9.88209 5.85927Z" + /> + </svg> + ), + (props: ISvgIconProps) => ` +`, +); diff --git a/web_console_v2/client/src/components/IconPark/icons/UnStruct.tsx b/web_console_v2/client/src/components/IconPark/icons/UnStruct.tsx new file mode 100644 index 000000000..061c20cf3 --- /dev/null +++ b/web_console_v2/client/src/components/IconPark/icons/UnStruct.tsx @@ -0,0 +1,22 @@ +/* tslint:disable: max-line-length */ +/* eslint-disable max-len */ +import React from 'react'; +import { ISvgIconProps, IconWrapper } from '../runtime'; + +export default IconWrapper('ad-product', false, (props: ISvgIconProps) => ( + <svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg"> + <g clipPath="url(#clip0_12570_67994)"> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M7.33333 2.00001C7.33333 1.63182 7.03486 1.33334 6.66667 1.33334H1.66667C1.29848 1.33334 1 1.63182 1 2.00001V14C1 14.3682 1.29848 14.6667 1.66667 14.6667H6.66667C7.03486 14.6667 7.33333 14.3682 7.33333 14V2.00001ZM15 9.00001C15 8.63182 14.7015 8.33334 14.3333 8.33334H9C8.63181 8.33334 8.33333 8.63182 8.33333 9.00001V14C8.33333 14.3682 8.63181 14.6667 9 14.6667H14.3333C14.7015 14.6667 15 14.3682 15 14V9.00001ZM6 2.66668H2.33333V13.3333H6V2.66668ZM9.66667 9.66668H13.6667V13.3333H9.66667V9.66668ZM14.3333 1.33334C14.7015 1.33334 15 1.63182 15 2.00001V7.00001C15 7.3682 14.7015 7.66668 14.3333 7.66668H9C8.63181 7.66668 8.33333 7.3682 8.33333 7.00001V2.00001C8.33333 1.63182 8.63181 1.33334 9 1.33334H14.3333ZM9.66667 2.66668H13.6667V6.33334H9.66667V2.66668Z" + fill="#4E5969" + /> + </g> + <defs> + <clipPath id="clip0_12570_67994"> + <rect width="16" height="16" fill="white" /> + </clipPath> + </defs> + </svg> +)); diff --git a/web_console_v2/client/src/components/IconPark/index.ts b/web_console_v2/client/src/components/IconPark/index.ts index 14a4f45af..6625c2272 100644 --- a/web_console_v2/client/src/components/IconPark/index.ts +++ b/web_console_v2/client/src/components/IconPark/index.ts @@ -11,11 +11,13 @@ export { default as ClockCircle } from './icons/ClockCircle'; export { default as Check } from './icons/Check'; export { default as CloseCircle } from './icons/CloseCircle'; export { default as ExclamationCircle } from './icons/ExclamationCircle'; +export { default as ExclamationCircleFill } from './icons/ExclamationCircleFill'; export { default as InfoCircle } from './icons/InfoCircle'; export { default as MinusCircle } from './icons/MinusCircle'; export { default as Minus } from './icons/Minus'; export { default as PlusCircle } from './icons/PlusCircle'; export { default as Plus } from './icons/Plus'; +export { default as PlusBold } from './icons/PlusBold'; export { default as QuestionCircle } from './icons/QuestionCircle'; export { default as Stop } from './icons/Stop'; export { default as CheckCircle } from './icons/CheckCircle'; @@ -162,3 +164,34 @@ export { default as UpCircle } from './icons/UpCircle'; export { default as Up } from './icons/Up'; export { default as Crown } from './icons/Crown'; export { default as Struct } from './icons/Struct'; +export { default as UnStruct } from './icons/UnStruct'; +export { default as ModelCenter } from './icons/ModelCenter'; +export { default as Todo } from './icons/Todo'; +export { default as Rocket } from './icons/Rocket'; +export { default as StarFull } from './icons/StarFull'; + +export { default as CheckCircleFill } from './icons/CheckCircleFill'; +export { default as CloseCircleFill } from './icons/CloseCircleFill'; +export { default as RightAngle } from './icons/RightAngle'; +export { default as Loading } from './icons/Loading'; +export { default as InfoCircleFill } from './icons/InfoCircleFill'; +export { default as CompressedPackage } from './icons/CompressedPackage'; +export { default as NormalFile } from './icons/NormalFile'; +export { default as SortArrow } from './icons/SortArrow'; +export { default as Audit } from './icons/Audit'; +export { default as EditNoUnderline } from './icons/EditNoUnderline'; +export { default as ArrowUpFill } from './icons/ArrowUpFill'; +export { default as FolderAddFill } from './icons/FolderAddFill'; +export { default as FileAddFill } from './icons/FileAddFill'; + +export { default as Default } from './fileIcon/Default'; +export { default as Python } from './fileIcon/Python'; +export { default as Json } from './fileIcon/Json'; +export { default as Markdown } from './fileIcon/Markdown'; +export { default as Yaml } from './fileIcon/Yaml'; +export { default as Config } from './fileIcon/Config'; +export { default as GitIgnore } from './fileIcon/GitIgnore'; +export { default as Javascript } from './fileIcon/JavaScript'; +export { default as ArrowFillDown } from './icons/ArrowFillDown'; +export { default as ArrowFillRight } from './icons/ArrowFillRight'; +export { default as TeamOutlined } from './icons/TeamOutlined'; diff --git a/web_console_v2/client/src/components/InfoItem/index.tsx b/web_console_v2/client/src/components/InfoItem/index.tsx new file mode 100644 index 000000000..791f712ca --- /dev/null +++ b/web_console_v2/client/src/components/InfoItem/index.tsx @@ -0,0 +1,108 @@ +/* istanbul ignore file */ + +import React, { FC, useState, useEffect } from 'react'; +import styled from 'styled-components'; + +import { Input } from '@arco-design/web-react'; + +const Container = styled.div<{ + isBlock?: boolean; +}>` + display: ${(props) => (props.isBlock ? 'block' : 'inline-block')}; +`; + +const Label = styled.div` + font-size: 12px; + color: var(--textColor); + margin-bottom: 6px; +`; +const Content = styled.div<{ + valueColor?: string; + onClick?: any; +}>` + display: inline-block; + padding: 4px 8px; + background-color: #f6f7fb; + font-size: 12px; + color: ${(props) => props.valueColor || 'var(--textColorStrong)'}; + ${(props) => props.onClick && 'cursor: pointer'}; +`; + +const HoverInput = styled(Input.TextArea)` + width: 100%; + padding: 4px; + font-weight: 500; + font-size: 12px; + background-color: #f6f7fb; + &:hover { + background-color: #fff; + border: 1px solid var(--lineColor); + } + &:focus { + background-color: #fff; + border-color: var(--primaryColor); + } +`; + +type Props = { + /** Display title(header) */ + title?: string; + /** Display content(footer) */ + value?: any; + /** Value's color */ + valueColor?: string; + /** Is container display: block, otherwise inline-block */ + isBlock?: boolean; + /** Is input mode */ + isInputMode?: boolean; + /** Is value's type React.ReactElement */ + isComponentValue?: boolean; + onClick?: () => void; + onInputBlur?: (str: string) => void; +}; + +const InfoItem: FC<Props> = ({ + title, + value, + valueColor, + isBlock = false, + isInputMode = false, + isComponentValue = false, + onClick, + onInputBlur, +}) => { + const [innerValue, setInnerValue] = useState(); + + useEffect(() => { + if (!isComponentValue) { + setInnerValue((prevState) => value); + } + }, [value, isComponentValue]); + + return ( + <Container isBlock={isBlock}> + <Label>{title}</Label> + {isInputMode && !isComponentValue ? ( + <HoverInput + autoSize={{ + minRows: 1, + maxRows: 4, + }} + value={innerValue} + onClick={(e) => e.stopPropagation()} + onChange={(value: string, e) => setInnerValue(e.target.value)} + onBlur={(e) => { + e.stopPropagation(); + onInputBlur && onInputBlur(e.target.value); + }} + /> + ) : ( + <Content onClick={onClick} valueColor={valueColor}> + {value} + </Content> + )} + </Container> + ); +}; + +export default InfoItem; diff --git a/web_console_v2/client/src/components/InputGroup/NumberTextInput.test.tsx b/web_console_v2/client/src/components/InputGroup/NumberTextInput.test.tsx new file mode 100644 index 000000000..c373e023d --- /dev/null +++ b/web_console_v2/client/src/components/InputGroup/NumberTextInput.test.tsx @@ -0,0 +1,176 @@ +import React, { useState } from 'react'; +import { fireEvent, render, screen } from '@testing-library/react'; +import NumberInputNumber, { CpuInput, MemInput } from './NumberTextInput'; + +function typeInput(input: HTMLElement, value: string | number) { + fireEvent.change(input, { + target: { value }, + }); +} + +function triggerValueChange(input: HTMLElement, value: string | number) { + typeInput(input, value); + fireEvent.blur(input); +} + +// partly borrow from arco design repo +describe('<NumberTextInput />', () => { + it('init value correctly', () => { + const defaultValue = 8; + render(<NumberInputNumber defaultValue={defaultValue} min={0} max={12} />); + const input = screen.getByRole('textbox'); + expect(input).toHaveValue(defaultValue.toString()); + typeInput(input, 1000); + expect(input).toHaveValue('1000'); + fireEvent.blur(input); + expect(input).toHaveValue('12'); + + typeInput(input, -1000); + expect(input).toHaveValue('-1000'); + fireEvent.blur(input); + expect(input).toHaveValue('0'); + }); + + it('init value with empty string correctly', () => { + render(<NumberInputNumber value="" />); + expect(screen.getByRole('textbox')).toHaveValue(''); + }); + + it('init value with string correctly', () => { + render(<NumberInputNumber value="8.0000" precision={2} />); + expect(screen.getByRole('textbox')).toHaveValue('8.00'); + }); + + it('value control mode', () => { + const Demo = () => { + const [value, setValue] = useState<number | undefined>(0); + return ( + <div> + <button id="clear" onClick={() => setValue(undefined)}> + clear + </button> + <NumberInputNumber value={value} min={10} />; + </div> + ); + }; + render(<Demo />); + const input = screen.getByRole('textbox'); + expect(input).toHaveValue('0'); + fireEvent.click(screen.getByRole('button')); + expect(input).toHaveValue(''); + }); + + it('typing input', () => { + render(<NumberInputNumber min={0} max={100} />); + const input = screen.getByRole('textbox'); + triggerValueChange(input, 'abcdefg'); + expect(input).toHaveValue(''); + + triggerValueChange(input, '100abcdefg'); + expect(input).toHaveValue('100'); + + triggerValueChange(input, '1.0000abcdef'); + expect(input).toHaveValue('1'); + + triggerValueChange(input, '1000'); + // because the max is 100 + expect(input).toHaveValue('100'); + + triggerValueChange(input, '-100'); + // because the min is 0 + expect(input).toHaveValue('0'); + }); + + it('onChange calling', () => { + const onChange = jest.fn(); + render(<NumberInputNumber onChange={onChange} />); + const input = screen.getByRole('textbox'); + + typeInput(input, '1000'); + expect(onChange).toHaveBeenCalledTimes(0); + fireEvent.blur(input); + expect(onChange).toHaveBeenCalledWith(1000); + expect(onChange).toHaveBeenCalledTimes(1); + + // should ignore empty string + typeInput(input, ''); + expect(onChange).toHaveBeenCalledTimes(1); + fireEvent.blur(input); + expect(onChange).toHaveBeenCalledTimes(1); + }); + + it('onChange calling with correct approximate value', () => { + const onChange = jest.fn(); + const { rerender } = render(<NumberInputNumber defaultValue={8.5} precision={0} />); + const input = screen.getByRole('textbox'); + expect(input).toHaveValue('9'); + + rerender(<NumberInputNumber onChange={onChange} precision={2} />); + triggerValueChange(input, '8.666'); + expect(onChange).toHaveBeenCalledWith(8.67); + }); +}); + +describe('CpuInput', () => { + it('should render correctly', () => { + const wrapper = render(<CpuInput />); + const $input = wrapper.getByRole('textbox') as HTMLInputElement; + const $unit = wrapper.getByText('Core'); + expect($input).toBeInTheDocument(); + expect($unit).toBeInTheDocument(); + expect($input.value).toBe(''); + }); + it('should render correctly with value', () => { + const wrapper = render(<CpuInput value="1500m" />); + const $input = wrapper.getByRole('textbox') as HTMLInputElement; + const $unit = wrapper.getByText('Core'); + expect($input).toBeInTheDocument(); + expect($unit).toBeInTheDocument(); + expect($input.value).toBe('1.5'); + }); + + it('onChange calling with unit value', () => { + const onChange = jest.fn(); + const wrapper = render(<CpuInput value="1500m" onChange={onChange} />); + const $input = wrapper.getByRole('textbox') as HTMLInputElement; + const $unit = wrapper.getByText('Core'); + expect($input).toBeInTheDocument(); + expect($unit).toBeInTheDocument(); + expect($input.value).toBe('1.5'); + + triggerValueChange($input, '3'); + expect(onChange).toHaveBeenCalledWith('3000m'); + }); +}); + +describe('MemInput', () => { + it('should render correctly', () => { + const wrapper = render(<MemInput />); + const $input = wrapper.getByRole('textbox') as HTMLInputElement; + const $unit = wrapper.getByText('Gi'); + expect($input).toBeInTheDocument(); + expect($unit).toBeInTheDocument(); + expect($input.value).toBe(''); + }); + it('should render correctly with value', () => { + const wrapper = render(<MemInput value="3Gi" />); + const $input = wrapper.getByRole('textbox') as HTMLInputElement; + const $unit = wrapper.getByText('Gi'); + expect($input).toBeInTheDocument(); + expect($unit).toBeInTheDocument(); + expect($input.value).toBe('3'); + }); + + it('onChange calling with unit value', () => { + const onChange = jest.fn(); + const wrapper = render(<MemInput value="3Gi" onChange={onChange} />); + const $input = wrapper.getByRole('textbox') as HTMLInputElement; + const $unit = wrapper.getByText('Gi'); + expect($input).toBeInTheDocument(); + expect($unit).toBeInTheDocument(); + expect($input.value).toBe('3'); + + triggerValueChange($input, '64'); + expect(onChange).toHaveBeenCalledWith('64Gi'); + }); +}); diff --git a/web_console_v2/client/src/components/InputGroup/NumberTextInput.tsx b/web_console_v2/client/src/components/InputGroup/NumberTextInput.tsx new file mode 100644 index 000000000..529bb036e --- /dev/null +++ b/web_console_v2/client/src/components/InputGroup/NumberTextInput.tsx @@ -0,0 +1,157 @@ +import React, { FC, useState, useEffect, useCallback } from 'react'; +import { Input, InputNumberProps } from '@arco-design/web-react'; +import { convertCpuCoreToM, convertCpuMToCore } from 'shared/helpers'; + +/** + * <input type="text"> 伪装成的 number input,实现了 ArcoDesign InputNumber 的部分功能: + * * 不支持键盘上下箭头加减值 + * * 不支持 mode="button" 模式 + */ +const IntegerTextInput: FC<InputNumberProps> = (props) => { + const { + min = 0, + max = Number.MAX_SAFE_INTEGER, + value, + defaultValue, + precision = 0, + prefix, + suffix, + error, + onChange, + disabled, + } = props; + const isControlledMode = Object.prototype.hasOwnProperty.call(props, 'value'); + const [innerValue, setInnerValue] = useState<number | undefined>( + getInitValue(value, defaultValue), + ); + const [inputVal, setInputVal] = useState<string | undefined>(() => { + return typeof innerValue === 'number' ? innerValue.toFixed(precision) : undefined; + }); + const setInnerValueProxy = useCallback( + (val: number | string | undefined) => { + switch (typeof val) { + case 'undefined': + setInnerValue(undefined); + setInputVal(''); + break; + case 'number': + setInnerValue(val); + setInputVal(val.toFixed(precision)); + break; + case 'string': + const digital = parseFloat(val); + if (isNaN(digital)) { + return; + } + setInnerValue(digital); + setInputVal(digital.toFixed(precision)); + break; + } + }, + [precision], + ); + + const setValue = (val: number) => { + let outputVal = val; + if (val > max) { + outputVal = max; + } else if (val < min) { + outputVal = min; + } + + if (outputVal === innerValue) { + setInnerValueProxy(outputVal); + return; + } + if (!isControlledMode) { + setInnerValueProxy(outputVal); + } + + onChange?.(outputVal); + }; + const handleBlur = (evt: React.FocusEvent<HTMLInputElement>) => { + const val = evt.target.value; + const isFloat = precision > 0; + const digitalVal = isFloat ? parseFloat(val) : parseInt(val); + if (isNaN(digitalVal)) { + setInnerValueProxy(innerValue); + return; + } + + if (isFloat) { + const fixedNumber = digitalVal.toFixed(precision); + setValue(parseFloat(fixedNumber)); + return; + } + setValue(digitalVal); + }; + + useEffect(() => { + if (isControlledMode) { + const digitalVal = value as number; + setInnerValueProxy(digitalVal); + } + }, [value, isControlledMode, min, max, setInnerValueProxy]); + + return ( + <Input + error={error} + value={inputVal} + onBlur={handleBlur} + onChange={setInputVal} + prefix={prefix} + suffix={suffix} + disabled={disabled} + /> + ); +}; + +function getInitValue(value?: string | number, defaultVal?: number): number | undefined { + if (typeof value !== 'undefined') { + const valueType = typeof value; + if (valueType === 'string') { + const digital = parseFloat(value as string); + return isNaN(digital) ? undefined : digital; + } + if (valueType === 'number') { + return value as number; + } + } + + return defaultVal; +} + +type OnChangeWithSuffixProps = Omit<InputNumberProps, 'onChange'> & { + onChange?: (val: string) => void; +}; +export const OnChangeWithSuffix: FC<OnChangeWithSuffixProps> = ({ onChange, suffix, ...rest }) => { + const onChangeWrapper = (val: number) => { + onChange?.(`${val}${suffix || ''}`); + }; + return <IntegerTextInput {...rest} suffix={suffix} onChange={onChangeWrapper} />; +}; + +export const CpuInput: FC<Omit<OnChangeWithSuffixProps, 'suffix'>> = ({ + onChange, + value, + ...props +}) => { + const onChangeWrapper = (val: string) => { + onChange?.(convertCpuCoreToM(val, true)); + }; + + return ( + <OnChangeWithSuffix + suffix="Core" + precision={1} + onChange={onChangeWrapper} + value={value ? convertCpuMToCore(value as string, true) : undefined} + {...props} + /> + ); +}; +export const MemInput: FC<Omit<OnChangeWithSuffixProps, 'suffix'>> = ({ ...props }) => { + return <OnChangeWithSuffix suffix="Gi" {...props} />; +}; + +export default IntegerTextInput; diff --git a/web_console_v2/client/src/components/InputGroup/index.test.tsx b/web_console_v2/client/src/components/InputGroup/index.test.tsx new file mode 100644 index 000000000..654e65afd --- /dev/null +++ b/web_console_v2/client/src/components/InputGroup/index.test.tsx @@ -0,0 +1,405 @@ +import React from 'react'; +import { fireEvent, render, screen, waitFor } from '@testing-library/react'; +import { typeInput } from 'shared/testUtils'; +import InputGroup, { TColumn } from './index'; +import i18n from 'i18n'; + +const columns: TColumn[] = [ + { + type: 'TEXT', + title: 'Role', + dataIndex: 'role', + span: 6, + tooltip: '', + }, + { + type: 'INPUT_NUMBER', + title: 'CPU', + dataIndex: 'cpu', + placeholder: '请输入', + unitLabel: 'core', + max: 100, + precision: 1, + span: 6, + tooltip: i18n.t('tip_please_input_positive_integer'), + }, + { + type: 'INPUT_NUMBER', + title: 'MEM', + dataIndex: 'mem', + unitLabel: 'GiB', + span: 6, + tooltip: i18n.t('tip_please_input_positive_integer'), + }, + { + type: 'INPUT_NUMBER', + title: '实例数', + dataIndex: 'instance', + min: 1, + max: 10, + precision: 1, + mode: 'button', + span: 6, + rules: [ + { + max: 10, + message: '太多拉!', + validator(val: number, cb: any) { + cb(val > 10 ? '太多拉' : undefined); + }, + }, + ], + tooltip: i18n.t('tip_replicas_range'), + }, +]; + +// confirm whether the value is right grid by grid +function checkLengthAndValue(inputList: HTMLElement[], valueList: any[], columns: TColumn[]) { + const inputColumns = columns.filter((col) => col.type === 'INPUT' || col.type === 'INPUT_NUMBER'); + const totalLength = inputColumns.length * valueList.length; + expect(inputList.length).toBe(totalLength); + + if (valueList.length === 0 || columns.length === 0) { + return; + } + + for (let i = 0; i < valueList.length; i++) { + for (let j = 0; j < inputColumns.length; j++) { + const { dataIndex, precision, type } = inputColumns[j] as any; + const inputIndex = i * inputColumns.length + j; + const val = valueList[i][dataIndex]; + switch (type) { + case 'INPUT_NUMBER': + expect(inputList[inputIndex]).toHaveValue( + typeof val === 'number' ? val.toFixed(precision) : parseInt(val).toFixed(precision), + ); + break; + case 'INPUT': + expect(inputList[inputIndex]).toHaveValue(val); + break; + } + } + } +} + +describe('<InputGroup />', () => { + it('initial with default value', () => { + const defaultValue: any[] = [ + { + role: 'worker', + cpu: 1000, + mem: '200', + instance: 1, + }, + { + role: 'master', + cpu: 1000, + mem: '200', + instance: 2, + }, + { + role: 'slave', + cpu: 1000, + mem: '200', + instance: 30, + }, + ]; + + render(<InputGroup columns={columns} defaultValue={defaultValue} />); + checkLengthAndValue(screen.queryAllByRole('textbox'), defaultValue, columns); + }); + + it('controlled mode', () => { + const { rerender } = render(<InputGroup columns={columns} value={[]} />); + // can't find any 'textbox' element + expect(screen.queryAllByRole('gridcell').length).toBe(0); + rerender( + <InputGroup + columns={columns} + value={[ + { + role: 'slave', + cpu: 1000, + mem: '200', + instance: 30, + }, + ]} + />, + ); + expect(screen.queryAllByRole('gridcell').length).toBe(1 * columns.length); + rerender( + <InputGroup + columns={columns} + value={[ + { + role: 'slave', + cpu: 1000, + mem: '200', + instance: 30, + }, + { + role: 'slave', + cpu: 1000, + mem: '200', + instance: 30, + }, + ]} + />, + ); + expect(screen.getAllByRole('gridcell').length).toBe(2 * columns.length); + }); + + it('only trigger onChange when all grid pass validation', async () => { + const columns: TColumn[] = [ + { + type: 'INPUT', + dataIndex: 'name', + title: 'test', + span: 8, + rules: [ + { + validator(value, cb) { + cb(/failed/i.test(value) ? 'name failed: ' + value : undefined); + }, + }, + ], + }, + { + type: 'INPUT_NUMBER', + dataIndex: 'sum', + title: 'sum', + span: 16, + rules: [ + { + validator(value, cb) { + cb(value > 10 ? 'sum failed: ' + value : undefined); + }, + }, + ], + }, + ]; + const valueList = [ + { name: 'aaa', sum: 1 }, + { name: 'bbb', sum: 2 }, + ]; + const onChange = jest.fn(); + render(<InputGroup defaultValue={valueList} columns={columns} onChange={onChange} />); + const inputList = screen.queryAllByRole('textbox'); + + typeInput(inputList[0], 'failed'); + fireEvent.blur(inputList[0]); + await waitFor(() => { + expect(onChange).toHaveBeenCalledTimes(0); + }); + + typeInput(inputList[1], 100); + fireEvent.blur(inputList[1]); + await waitFor(() => { + expect(onChange).toHaveBeenCalledTimes(0); + }); + + typeInput(inputList[0], 'success'); + fireEvent.blur(inputList[0]); + typeInput(inputList[1], 1); + fireEvent.blur(inputList[1]); + await waitFor(() => { + expect(onChange).toHaveBeenCalledWith(expect.arrayContaining([{ name: 'success', sum: 1 }])); + }); + }); + + it('add button behavior', async () => { + const columns: TColumn[] = [ + { + title: 'name', + dataIndex: 'name', + type: 'INPUT', + span: 12, + }, + { + title: 'sum', + dataIndex: 'sum', + type: 'INPUT_NUMBER', + span: 12, + }, + ]; + const onChange = jest.fn(); + render(<InputGroup columns={columns} onChange={onChange} />); + checkLengthAndValue(screen.queryAllByRole('textbox'), [], columns); + + const addBtn = screen.getByTestId('addBtn'); + fireEvent.click(addBtn); + checkLengthAndValue(screen.queryAllByRole('textbox'), [{ name: '', sum: 0 }], columns); + await waitFor(() => { + expect(onChange).toHaveBeenCalledWith([{ name: '', sum: 0 }]); + }); + }); + + it('remove button behavior', async () => { + const columns: TColumn[] = [ + { + title: 'name', + dataIndex: 'name', + type: 'INPUT', + span: 12, + }, + { + title: 'sum', + dataIndex: 'sum', + type: 'INPUT_NUMBER', + span: 12, + }, + ]; + const valueList = [ + { name: 'a', sum: 1 }, + { name: 'b', sum: 2 }, + { name: 'c', sum: 3 }, + ]; + const onChange = jest.fn(); + render(<InputGroup columns={columns} defaultValue={[...valueList]} onChange={onChange} />); + checkLengthAndValue(screen.queryAllByRole('textbox'), valueList, columns); + + const performDelete = async (rowIndex: number) => { + const delBtnList = screen.queryAllByTestId('delBtn'); + valueList.splice(rowIndex, 1); + fireEvent.click(delBtnList[rowIndex]); + + const inputList = screen.queryAllByRole('textbox'); + await waitFor(() => { + checkLengthAndValue(inputList, valueList, columns); + expect(onChange).toHaveBeenCalledWith(valueList); + }); + }; + // test un-order deleting + await performDelete(1); + // after deleting one row , there're still two rows. + await performDelete(1); + // only one row left at this moment. + await performDelete(0); + }); + + // the ui should not change when the value props remains unchanged. + it('add/remove button behavior under controlled mode', async () => { + const columns: TColumn[] = [ + { + title: 'name', + dataIndex: 'name', + type: 'INPUT', + span: 12, + }, + { + title: 'sum', + dataIndex: 'sum', + type: 'INPUT_NUMBER', + span: 12, + }, + ]; + const valueList = [ + { name: 'a', sum: 1 }, + { name: 'b', sum: 2 }, + { name: 'c', sum: 3 }, + ]; + const onChange = jest.fn(); + render(<InputGroup columns={columns} value={[...valueList]} onChange={onChange} />); + const addBtn = screen.getByTestId('addBtn'); + fireEvent.click(addBtn); + await waitFor(() => { + expect(onChange).toHaveBeenCalledWith(valueList.concat([{ name: '', sum: 0 }])); + }); + expect(screen.getAllByRole('textbox').length).toBe(valueList.length * columns.length); + + const delBtnList = screen.getAllByTestId('delBtn'); + fireEvent.click(delBtnList[0]); + await waitFor(() => { + expect(onChange).toHaveBeenCalledWith(valueList.slice(1)); + }); + expect(screen.getAllByRole('textbox').length).toBe(valueList.length * columns.length); + }); + + it('disableAddAndDelete should works', () => { + const columns: TColumn[] = [ + { + title: 'name', + dataIndex: 'name', + type: 'INPUT', + span: 12, + }, + { + title: 'sum', + dataIndex: 'sum', + type: 'INPUT_NUMBER', + span: 12, + }, + ]; + const valueList = [ + { name: 'a', sum: 1 }, + { name: 'b', sum: 2 }, + { name: 'c', sum: 3 }, + ]; + + render(<InputGroup columns={columns} defaultValue={valueList} disableAddAndDelete={true} />); + expect(screen.queryByTestId('addBtn')).not.toBeInTheDocument(); + expect(screen.queryByTestId('delBtn')).not.toBeInTheDocument(); + }); + + it('formatValue should works', async () => { + const onChange = jest.fn(); + const columns: TColumn[] = [ + { + title: 'name', + dataIndex: 'name', + type: 'INPUT', + span: 12, + unitLabel: 'name', + formatValue(val, col) { + return val + col.unitLabel; + }, + }, + { + title: 'sum', + dataIndex: 'sum', + type: 'INPUT_NUMBER', + span: 12, + min: 1, + unitLabel: 'sum', + formatValue(value, col) { + return value * 2; + }, + }, + ]; + const valueList = [{ name: 'a', sum: '1' }]; + + render(<InputGroup columns={columns} defaultValue={valueList} onChange={onChange} />); + fireEvent.click(screen.getByTestId('addBtn')); + await waitFor(() => { + expect(onChange).toHaveBeenCalledWith(expect.arrayContaining([{ name: 'aname', sum: 2 }])); + }); + }); + + it('text with unitLabel should display correctly', () => { + const columns: TColumn[] = [ + { + title: '__name__', + dataIndex: 'name', + type: 'TEXT', + span: 24, + unitLabel: 'm', + }, + ]; + const valueList = [{ name: '100m100m' }]; + render(<InputGroup columns={columns} defaultValue={valueList} />); + expect(screen.getByText(/^100m100$/)).toBeInTheDocument(); + expect(screen.getByText(/^m$/)).toBeInTheDocument(); + }); + + it('should render tooltip', async () => { + const wrapper = render(<InputGroup columns={columns} value={[]} />); + const $tooltipList = wrapper.getAllByTestId('tooltip-icon'); + expect($tooltipList.length).toBe(3); + + fireEvent.mouseOver($tooltipList[0]); + + await waitFor(() => { + expect(screen.queryByText('tip_please_input_positive_integer')).toBeInTheDocument(); + }); + }); +}); diff --git a/web_console_v2/client/src/components/InputGroup/index.tsx b/web_console_v2/client/src/components/InputGroup/index.tsx new file mode 100644 index 000000000..868dc7aa1 --- /dev/null +++ b/web_console_v2/client/src/components/InputGroup/index.tsx @@ -0,0 +1,390 @@ +import React, { CSSProperties, FC, useEffect, useMemo } from 'react'; +import { + InputNumberProps, + Grid, + Form, + Input, + InputNumber, + Button, + RulesProps, + Tooltip, + Space, +} from '@arco-design/web-react'; +import { IconDelete, IconPlus, IconQuestionCircle } from '@arco-design/web-react/icon'; +import styled from 'styled-components'; +import NumberTextInput from './NumberTextInput'; + +export type TColumn = TInputColumn | TInputNumberColumn | TTextColumn; + +interface TInputColumn extends TColumnBasic { + type: 'INPUT'; + formatValue?: (value: string, column: TColumn, allValues: TValue[]) => string; +} +interface TInputNumberColumn + extends Pick<InputNumberProps, 'min' | 'max' | 'precision' | 'mode'>, + TColumnBasic { + type: 'INPUT_NUMBER'; + formatValue?: (value: number, column: TColumn, allValues: TValue[]) => number; +} +interface TTextColumn extends TColumnBasic { + type: 'TEXT'; + unitLabel?: string; +} + +interface TColumnBasic { + title: string; + dataIndex: string; + span: number; + placeholder?: string; + unitLabel?: string; + rules?: RulesProps[]; + tooltip?: string; + disabled?: boolean; +} + +type TValue = Record<string, any>; +type TInnerValue = { [key: string]: TValue[] }; +type TProps = { + /** customize style */ + style?: CSSProperties; + /** addition className */ + className?: string; + /** Definition of grid column */ + columns: TColumn[]; + /** Default value of form grid, often is an array of object */ + defaultValue?: TValue[]; + /** Use value to control the component when under controlled mode */ + value?: TValue[]; + /** + * @description Customize text on the button of "add a new row" + * @default "add" + */ + /** text of add button */ + addBtnText?: string; + /** disable add and delete */ + disableAddAndDelete?: boolean; + /** listen for the values changing */ + onChange?: (data: TValue[]) => void; +}; + +export type TInputGroupProps = TProps; + +const StyledContainer = styled.div` + .arco-input { + font-size: var(--textFontSizePrimary); + } + .arco-form-item { + margin-bottom: 8px; + } +`; +const StyledPlainText = styled.span` + display: flex; + box-sizing: border-box; + min-width: 111px; + height: 32px; + border-radius: 2px; + border: 1px solid #e5e8ef; + padding: 0 12px; + line-height: 32px; + .plainSuffix { + flex: 1; + text-align: right; + } +`; +const StyledHeader = styled.header` + margin-bottom: 6px; +`; +const StyledTitle = styled.span` + line-height: 20px; + font-size: var(--textFontSizePrimary); + color: var(--textColor); +`; +const StyledDeleteBtn = styled(Button)` + margin-top: 2px; + color: var(--textColor) !important; +`; +const StyledAddButton = styled(Button)` + width: 100px; + margin-left: -5px; + padding-left: 5px; + padding-right: 5px; + text-align: left; + font-size: var(--textFontSizePrimary); + &.arco-btn-text:not(.arco-btn-disabled):not(.arco-btn-loading):hover { + background: transparent; + } +`; +const StyledQuestionIcon = styled(IconQuestionCircle)` + font-size: var(--textFontSizePrimary); + color: var(--textColor); +`; + +const { Row, Col } = Grid; +const ROOT_FIELD = 'root'; +const FORM_FIELD_SPAN = 22; +const ROW_GUTTER = 12; +const InputGroup: FC<TProps> = (props) => { + const [form] = Form.useForm(); + const { + value, + style, + className, + columns, + addBtnText = 'add', + defaultValue = [], + disableAddAndDelete = false, + onChange, + } = props; + const controlled = Object.prototype.hasOwnProperty.call(props, 'value'); + + const columnSpanList = useMemo(() => { + let noSpanCount = columns.length; + let occupiedSpan = 0; + + for (const col of columns) { + if (col.span) { + noSpanCount -= 1; + occupiedSpan += col.span; + } + } + + if (noSpanCount > 0) { + throw new Error('InputGroup: every column should have span'); + } + + if (occupiedSpan !== 24) { + throw new Error('InputGroup: total columns span must be equal to 24'); + } + + return columns.map((col) => col.span); + }, [columns]); + useEffect(() => { + if (controlled) { + form.setFieldValue(ROOT_FIELD, value); + } + }, [value, controlled, form]); + + return ( + <StyledContainer style={style} className={className}> + <StyledHeader> + <Row gutter={ROW_GUTTER}> + <Col span={!disableAddAndDelete ? FORM_FIELD_SPAN : 24}> + <Row gutter={ROW_GUTTER}> + {columns.map((column, i) => ( + <Col span={columnSpanList[i]} key={column.dataIndex}> + <Space> + <StyledTitle>{column.title}</StyledTitle> + {column.tooltip && ( + <Tooltip content={column.tooltip}> + <StyledQuestionIcon data-testid="tooltip-icon" /> + </Tooltip> + )} + </Space> + </Col> + ))} + </Row> + </Col> + </Row> + </StyledHeader> + <div> + <Form + form={form} + initialValues={{ + [ROOT_FIELD]: value || defaultValue || [], + }} + onChange={(_, allValue: TInnerValue) => { + form.validate((error) => { + if (!error) { + onChangeWrapper(allValue[ROOT_FIELD]); + } + }); + }} + > + <Form.List field={ROOT_FIELD}> + {(fields, { remove, add }) => { + return ( + <> + {fields.map((item, index) => { + return ( + <Row gutter={ROW_GUTTER} key={item.field + item.key}> + <Col span={!disableAddAndDelete ? FORM_FIELD_SPAN : 24}> + <Row gutter={ROW_GUTTER}> + {columns.map((col, i) => { + const { dataIndex, rules } = col; + const field = item.field + '.' + dataIndex; + return ( + <Col key={field} span={columnSpanList[i]}> + <Form.Item + role="gridcell" + rules={rules} + field={field} + wrapperCol={{ span: 24 }} + > + {renderFormItem(col)} + </Form.Item> + </Col> + ); + })} + </Row> + </Col> + {!disableAddAndDelete && ( + <Col span={24 - FORM_FIELD_SPAN}> + <StyledDeleteBtn + name="delete row button" + size="small" + type="text" + data-index={index} + data-testid="delBtn" + icon={<IconDelete />} + onClick={() => { + controlled + ? performFormActionUnderControlled('delete', index) + : remove(index); + }} + /> + </Col> + )} + </Row> + ); + })} + {!disableAddAndDelete && ( + <StyledAddButton + type="text" + data-testid="addBtn" + icon={<IconPlus />} + onClick={() => { + controlled + ? performFormActionUnderControlled('add') + : add(getDefaultRowValueFromColumns(columns)); + }} + > + {addBtnText} + </StyledAddButton> + )} + </> + ); + }} + </Form.List> + </Form> + </div> + </StyledContainer> + ); + + function onChangeWrapper(values: TValue[]) { + const formattedValues = values.map((row) => { + for (const k in row) { + const col = columns.find((col) => col.dataIndex === k); + if (!col) { + continue; + } + if ( + (col.type === 'INPUT' || col.type === 'INPUT_NUMBER') && + typeof col.formatValue === 'function' + ) { + row[k] = col.formatValue(row[k] as never, col, values); + } + } + return row; + }); + + onChange?.(formattedValues); + } + + function performFormActionUnderControlled(action: 'add' | 'delete', index?: number) { + const currentValues = [...(form.getFieldValue(ROOT_FIELD) ?? [])]; + + if (action === 'add') { + currentValues.push(getDefaultRowValueFromColumns(columns)); + } else { + currentValues.splice(index!, 1); + } + onChangeWrapper(currentValues); + } +}; + +function getDefaultRowValueFromColumns(columns: TColumn[]) { + const ret: Record<string, any> = {}; + for (const col of columns) { + switch (col.type) { + case 'INPUT': + case 'TEXT': + ret[col.dataIndex] = ''; + break; + case 'INPUT_NUMBER': + ret[col.dataIndex] = col.min ?? 0; + break; + } + } + + return ret; +} + +function renderFormItem(column: TColumn) { + switch (column.type) { + case 'INPUT': + return ( + <Input + placeholder={column.placeholder} + suffix={column.unitLabel} + disabled={column.disabled} + /> + ); + case 'INPUT_NUMBER': + return column.mode ? ( + <InputNumber + min={column.min} + max={column.max} + precision={column.precision} + mode={column.mode} + suffix={column.unitLabel} + disabled={column.disabled} + /> + ) : ( + <NumberTextInput + min={column.min} + max={column.max} + precision={column.precision} + mode={column.mode} + suffix={column.unitLabel} + disabled={column.disabled} + /> + ); + case 'TEXT': + return <PlainText suffix={column.unitLabel} />; + } +} + +type TPlainTextProps = { + suffix?: string; + value?: string | number; + defaultValue?: string | number; +}; +function PlainText({ suffix, defaultValue, value }: TPlainTextProps) { + const purifyValue = useMemo(() => { + if (!value) { + return ''; + } + + if (!suffix) { + return value; + } + + if (typeof value === 'string') { + // there may be an unit label at tail + const tailString = value.slice(-1 * suffix.length); + if (tailString === suffix) { + return value.slice(0, value.length - suffix.length); + } + } + + return value; + }, [suffix, value]); + return ( + <StyledPlainText> + {purifyValue || defaultValue || ''} + {suffix ? <span className="plainSuffix">{suffix}</span> : null} + </StyledPlainText> + ); +} + +export default InputGroup; diff --git a/web_console_v2/client/src/components/InvitionTable/index.module.less b/web_console_v2/client/src/components/InvitionTable/index.module.less new file mode 100644 index 000000000..61138d7a7 --- /dev/null +++ b/web_console_v2/client/src/components/InvitionTable/index.module.less @@ -0,0 +1,34 @@ +.table_container{ + width: 480px; +} +.title_name{ + color:#1D2129; + font-size: 14px; + font-weight: 500; +} +.limited_length_text{ + overflow: hidden; + text-overflow: ellipsis; + display: -webkit-box; + font-size: 12; + font-weight: 400; + color: '#86909C'; + -webkit-line-clamp: 1; + -webkit-box-orient: vertical; + margin-right: 8px; +} + +.select_container{ + width: 480px; + background-color: var(--componentBackgroundColorGray); + margin: 10px 0px !important; +} +.select_content_left{ + margin:10px 0px !important; +} +.select_content_right{ + margin:10px 0px !important; + padding-left: 0px !important; + padding-right: 0px !important; + max-width: 400px; +} diff --git a/web_console_v2/client/src/components/InvitionTable/index.tsx b/web_console_v2/client/src/components/InvitionTable/index.tsx new file mode 100644 index 000000000..ae02a2786 --- /dev/null +++ b/web_console_v2/client/src/components/InvitionTable/index.tsx @@ -0,0 +1,171 @@ +import React, { FC, useEffect, useMemo, useState } from 'react'; + +import { Table, Input, Grid, Tag, Space, Divider, Tooltip } from '@arco-design/web-react'; +import { IconSearch } from '@arco-design/web-react/icon'; +import { transformRegexSpecChar } from 'shared/helpers'; +import GridRow from 'components/_base/GridRow'; +import { Participant, ParticipantType } from 'typings/participant'; +import { useRecoilQuery } from 'hooks/recoil'; +import { participantListQuery } from 'stores/participant'; +import ConnectionStatus from 'views/Partner/PartnerList/ConnectionStatus'; +import { CONSTANTS } from 'shared/constants'; +import { debounce } from 'lodash-es'; + +import styles from './index.module.less'; + +const { Row, Col } = Grid; + +export const PartnerItem: FC<{ + data: Participant; + isNeedTip?: boolean; +}> = ({ data, isNeedTip = false }) => { + const isLightClient = data.type === ParticipantType.LIGHT_CLIENT; + return ( + <div style={{ paddingRight: 2 }}> + <GridRow gap={2} justify="space-between"> + <div className={styles.title_container}> + <span className={styles.title_name}>{data.name}</span> + <Tooltip content={data.comment}> + <div className={`${styles.limited_length_text} choose `}> + {data.comment || '无描述'} + </div> + </Tooltip> + </div> + + <div className="choose"> + {data?.support_blockchain && ( + <Tag color="blue" size="small"> + 区块链服务 + </Tag> + )} + {isLightClient ? ( + CONSTANTS.EMPTY_PLACEHOLDER + ) : ( + <ConnectionStatus id={data.id} isNeedTip={isNeedTip} /> + )} + </div> + </GridRow> + </div> + ); +}; + +interface Props { + onChange?: (selectedParticipants: Participant[]) => void; + participantsType: ParticipantType; + isSupportCheckbox?: boolean; +} +const InvitionTable: FC<Props> = ({ onChange, participantsType, isSupportCheckbox = true }) => { + const [filterText, setFilterText] = useState(''); + const [selectedParticipants, setSelectedParticipants] = useState<Participant[]>([]); + // TODO: Need to filter out the successful connection + const { isLoading, data: participantList } = useRecoilQuery(participantListQuery); + + const Title = ( + <Space split={<Divider type="vertical" />} size="medium"> + <span> + {participantsType === ParticipantType.LIGHT_CLIENT ? '轻量级合作伙伴' : '标准合作伙伴'} + </span> + <Input + prefix={<IconSearch />} + placeholder="输入合作伙伴名称搜索" + allowClear + style={{ flexShrink: 0 }} + onPressEnter={handleSearch} + onChange={debounce((keyword) => { + setFilterText(keyword); + }, 300)} + /> + </Space> + ); + const columns = [ + { + title: Title, + render: (text: any, record: any) => <PartnerItem data={record} />, + }, + ]; + + const showList = useMemo(() => { + if (participantList) { + const regx = new RegExp(`^.*${transformRegexSpecChar(filterText)}.*$`); + return participantList.filter((item) => { + return ( + regx.test(item.name) && + (item.type === participantsType || + (!item.type && participantsType === ParticipantType.PLATFORM)) + ); + }); + } + return []; + }, [participantList, filterText, participantsType]); + + useEffect(() => { + if (participantsType) { + setSelectedParticipants([]); + } + }, [participantsType]); + return ( + <> + <Table + className={styles.table_container} + columns={columns} + data={showList} + rowKey="id" + rowSelection={{ + selectedRowKeys: selectedParticipants.map((item) => item.id), + checkCrossPage: true, + onChange: onValueChange, + type: + isSupportCheckbox && participantsType === ParticipantType.PLATFORM + ? 'checkbox' + : 'radio', + preserveSelectedRowKeys: true, + }} + pagination={{ + pageSize: 5, + showTotal: true, + total: showList.length, + hideOnSinglePage: true, + }} + loading={isLoading} + /> + <Row gutter={24} className={styles.select_container}> + <Col span={4} className={styles.select_content_left}> + 已选{selectedParticipants.length}个 + </Col> + <Col span={20} className={styles.select_content_right}> + <Space wrap> + {selectedParticipants.map((item) => ( + <Tag + key={item.id} + color="arcoblue" + closable + onClose={() => { + removeTag(item.id); + }} + > + {item.name} + </Tag> + ))} + </Space> + </Col> + </Row> + </> + ); + + function handleSearch(e: any) { + setFilterText(e.target.value); + // Block the enter event of the submit button of the form + e.preventDefault(); + } + function onValueChange(selectedRowKeys: ID[], selectedRows: Participant[]) { + onChange?.(selectedRows); + setSelectedParticipants(selectedRows); + } + function removeTag(id: ID) { + const newSelectParticipants = selectedParticipants.filter((item) => item.id !== id); + setSelectedParticipants(newSelectParticipants); + onChange?.(newSelectParticipants); + } +}; + +export default InvitionTable; diff --git a/web_console_v2/client/src/components/LineChart/index.tsx b/web_console_v2/client/src/components/LineChart/index.tsx new file mode 100644 index 000000000..a4ddf5851 --- /dev/null +++ b/web_console_v2/client/src/components/LineChart/index.tsx @@ -0,0 +1,92 @@ +/* istanbul ignore file */ + +import React, { FC, useMemo } from 'react'; +import { Line } from 'react-chartjs-2'; + +type Item = { + label: string; + value: any; +}; + +type Props = { + valueList: Item[]; + formatData?: (valueList: Item[]) => any; + options?: any; + width?: number; + height?: number; + maxValue?: number; +}; + +const defaultFormatData = (valueList: Item[]) => { + const labels: any[] = []; + const data: any[] = []; + + valueList.forEach((item) => { + labels.push(item.label); + data.push(item.value); + }); + + const finalData = { + labels, + datasets: [ + { + data, + backgroundColor: '#468DFF', + borderColor: 'rgb(53, 162, 235)', + }, + ], + }; + + return finalData; +}; + +const defaultMaxValue = 1; + +const getDefaultOptions = (maxValue = 1) => ({ + maintainAspectRatio: false, + responsive: true, + plugins: { + legend: { + display: false, + position: 'top', + }, + title: { + display: false, + }, + }, + scales: { + x: { + offset: true, + grid: { + color: 'transparent', + tickColor: '#cecece', + }, + }, + y: { + grid: { + borderColor: 'transparent', + }, + }, + }, +}); + +const LineChart: FC<Props> = ({ + valueList, + formatData = defaultFormatData, + options, + width, + height, + maxValue = defaultMaxValue, +}) => { + const data = useMemo(() => { + return formatData(valueList); + }, [valueList, formatData]); + + const defaultOptions = useMemo(() => { + return getDefaultOptions(maxValue); + }, [maxValue]); + + return <Line data={data} options={options || defaultOptions} width={width} height={height} />; +}; + +export default LineChart; diff --git a/web_console_v2/client/src/components/LineChartWithCard/index.tsx b/web_console_v2/client/src/components/LineChartWithCard/index.tsx new file mode 100644 index 000000000..eb51fdf48 --- /dev/null +++ b/web_console_v2/client/src/components/LineChartWithCard/index.tsx @@ -0,0 +1,134 @@ +/* istanbul ignore file */ + +import React, { FC, useMemo } from 'react'; +import styled from 'styled-components'; + +import LineChart from 'components/LineChart'; +import NoResult from 'components/NoResult'; +import TitleWithIcon from 'components/TitleWithIcon'; +import { QuestionCircle } from 'components/IconPark'; +import { useModelMetriesResult } from 'hooks/modelCenter'; +import { formatTimestamp } from 'shared/date'; +import { Space } from '@arco-design/web-react'; + +const Card = styled.div<{ height?: number }>` + display: flex; + align-items: center; + justify-content: center; + position: relative; + ${(props) => props.height && `height: ${props.height}px`}; + border: 1px solid var(--lineColor); + border-radius: 2px; + padding: 30px 16px; +`; +const Title = styled(TitleWithIcon)` + position: absolute; + left: 16px; + top: 12px; + color: var(--textColor); + font-size: 12px; +`; +const Content = styled.div` + position: relative; + width: 100%; + height: 100%; + display: flex; + flex-direction: row; + flex-wrap: wrap; + margin: 0 auto; +`; +type Item = { + label: string; + value: any; +}; + +export type Props = { + valueList: Item[]; + height?: number; + title?: string; + tip?: string; +}; + +export type ModelMetricsProps = { + id: ID; + participantId?: ID; + isTraining?: boolean; +}; + +type VariantComponent = { + ModelMetrics: FC<ModelMetricsProps>; +}; + +export const LineChartWithCard: FC<Props> & VariantComponent = ({ + valueList = [], + height = 260, + title = 'Acc', + tip = '', +}) => { + return ( + <Card height={height}> + <Title + title={title || ''} + isShowIcon={Boolean(tip)} + isLeftIcon={false} + isBlock={false} + tip={tip} + icon={QuestionCircle} + /> + {valueList.length > 0 ? ( + <Content> + <LineChart valueList={valueList} /> + </Content> + ) : ( + <NoResult.NoData /> + )} + </Card> + ); +}; + +const ModelMetrics: FC<ModelMetricsProps> = ({ id, participantId, isTraining = true }) => { + const { data } = useModelMetriesResult(id, participantId); + + const metricsList = useMemo(() => { + if (!data) { + return []; + } + + const list: Array<{ + label: string; + valueList: Array<{ + label: string; + value: number; + }>; + }> = []; + const obj = (isTraining ? data.train : data.eval) ?? {}; + + Object.keys(obj).forEach((key) => { + const steps: number[] = obj[key]?.steps ?? []; + const values: number[] = obj[key]?.values ?? []; + + list.push({ + label: key.toUpperCase(), + valueList: steps.map((item, index) => { + return { + label: formatTimestamp(item), + value: values[index] || 0, + }; + }), + }); + }); + + return list; + }, [data, isTraining]); + + return ( + <Space direction="vertical" style={{ width: '100%' }}> + {metricsList.map((item) => { + return <LineChartWithCard key={item.label} valueList={item.valueList} title={item.label} />; + })} + </Space> + ); +}; + +LineChartWithCard.ModelMetrics = ModelMetrics; +export default LineChartWithCard; diff --git a/web_console_v2/client/src/components/_base/MockDevtools/MockControlPanel.tsx b/web_console_v2/client/src/components/MockDevtools/MockControlPanel.tsx similarity index 84% rename from web_console_v2/client/src/components/_base/MockDevtools/MockControlPanel.tsx rename to web_console_v2/client/src/components/MockDevtools/MockControlPanel.tsx index f59242efc..faec24891 100644 --- a/web_console_v2/client/src/components/_base/MockDevtools/MockControlPanel.tsx +++ b/web_console_v2/client/src/components/MockDevtools/MockControlPanel.tsx @@ -1,23 +1,24 @@ +/* istanbul ignore file */ + import React, { useState } from 'react'; import styled from 'styled-components'; import { MixinCircle } from 'styles/mixins'; -import { Modal, Switch, Table, Tag, Input, Divider, Tooltip, Button } from 'antd'; +import { Modal, Switch, Table, Tag, Input, Divider, Tooltip, Button } from '@arco-design/web-react'; import { useToggle } from 'react-use'; import LOCAL_STORAGE_KEYS from 'shared/localStorageKeys'; import { removeRequestMock, toggleRequestMockState } from './utils'; import { useListenKeyboard, useReactiveLocalStorage } from 'hooks'; import store from 'store2'; import { Storage } from 'components/IconPark'; -import defaultTheme from 'styles/_theme'; const FloatButton = styled.button` ${MixinCircle(50)} position: fixed; - z-index: 10; + z-index: 9999; right: 5px; bottom: 64px; - background-color: var(--blue1); + background-color: rgb(var(--blue-1)); color: white; cursor: pointer; font-size: 12px; @@ -34,7 +35,7 @@ const Kbd = styled.kbd` padding: 0 5px; font-size: 12px; background-color: #fff; - color: var(--darkGray1); + color: rgb(var(--gray-10)); border-radius: 2px; `; @@ -70,7 +71,7 @@ const tableCols = [ title: 'Actions', key: 'actions', render: (_: any, record: { key: string }) => ( - <Button type="link" danger onClick={() => removeRequestMock(record.key)}> + <Button type="text" status="danger" onClick={() => removeRequestMock(record.key)}> 删除 </Button> ), @@ -110,36 +111,31 @@ function MockControlPanel() { <> {visible.toString() === 'true' && ( <Tooltip - placement="left" - title={() => ( + position="left" + content={() => ( <> Mock 控制面板,<Kbd>Ctrl</Kbd> + <Kbd>M</Kbd> 切换按钮的 隐藏/显示 </> )} > <FloatButton onClick={toggleModal}> - <Storage style={{ fontSize: '24px', color: defaultTheme.primaryColor }} /> + <Storage style={{ fontSize: '24px', color: 'var(--primaryColor)' }} /> </FloatButton> </Tooltip> )} <Modal title="Mock 接口列表" - centered + alignCenter visible={modalVisible} onOk={() => toggleModal(false)} onCancel={() => toggleModal(false)} - width={1000} + style={{ width: '1000px' }} > - <Input.Search placeholder="根据 Path 搜索" onSearch={setKeyword} enterButton /> + <Input.Search placeholder="根据 Path 搜索" onSearch={setKeyword} searchButton /> <Divider /> - <Table - columns={tableCols} - size="small" - dataSource={dataSource} - pagination={{ pageSize: 10 }} - /> + <Table columns={tableCols} size="small" data={dataSource} pagination={{ pageSize: 10 }} /> </Modal> </> ); diff --git a/web_console_v2/client/src/components/_base/MockDevtools/index.js b/web_console_v2/client/src/components/MockDevtools/index.js similarity index 100% rename from web_console_v2/client/src/components/_base/MockDevtools/index.js rename to web_console_v2/client/src/components/MockDevtools/index.js diff --git a/web_console_v2/client/src/components/MockDevtools/utils.ts b/web_console_v2/client/src/components/MockDevtools/utils.ts new file mode 100644 index 000000000..90c3da638 --- /dev/null +++ b/web_console_v2/client/src/components/MockDevtools/utils.ts @@ -0,0 +1,45 @@ +/* istanbul ignore file */ + +import { AxiosRequestConfig } from 'axios'; +import { omit } from 'lodash-es'; +import LOCAL_STORAGE_KEYS from 'shared/localStorageKeys'; +import store from 'store2'; + +export function getMockConfigs() { + return store.get(LOCAL_STORAGE_KEYS.mock_configs) || {}; +} + +export function isThisRequestMockEnabled(config: AxiosRequestConfig): boolean { + const key = `${config.method}|${config.url}`; + + return Boolean(getRequestMockState(key)); +} + +export function getRequestMockState(key: string): boolean | undefined { + return getMockConfigs()[key]; +} + +export function setRequestMockState(key: string, val: boolean): void { + if ( + !['post', 'get', 'patch', 'delete', 'put', 'head', 'options', 'connect'].some((method) => + key.toLowerCase().startsWith(method), + ) + ) { + throw new Error('Key 名不合法!'); + } + + const mocksConfig = store.get(LOCAL_STORAGE_KEYS.mock_configs) || {}; + mocksConfig[key] = val; + + store.set(LOCAL_STORAGE_KEYS.mock_configs, mocksConfig); +} + +export function removeRequestMock(key: string): void { + const mocksConfig = getMockConfigs(); + + store.set(LOCAL_STORAGE_KEYS.mock_configs, omit(mocksConfig, key)); +} + +export function toggleRequestMockState(key: string, val?: boolean): void { + setRequestMockState(key, typeof val === 'boolean' ? val : !getRequestMockState(key)); +} diff --git a/web_console_v2/client/src/components/Modal/index.tsx b/web_console_v2/client/src/components/Modal/index.tsx new file mode 100644 index 000000000..509fce694 --- /dev/null +++ b/web_console_v2/client/src/components/Modal/index.tsx @@ -0,0 +1,101 @@ +/* istanbul ignore file */ + +import React, { FC } from 'react'; +import i18n from 'i18n'; + +import { Z_INDEX_GREATER_THAN_HEADER } from 'components/Header'; + +import { Modal } from '@arco-design/web-react'; + +import { ModalProps } from '@arco-design/web-react/es/Modal/modal'; +import { ConfirmProps } from '@arco-design/web-react/es/Modal/confirm'; + +type ModalType = typeof Modal & { + delete: (props: ConfirmProps) => ReturnType<typeof Modal.confirm>; + stop: (props: ConfirmProps) => ReturnType<typeof Modal.confirm>; + terminate: (props: ConfirmProps) => ReturnType<typeof Modal.confirm>; + reject: (props: ConfirmProps) => ReturnType<typeof Modal.confirm>; +}; + +export const CUSTOM_CLASS_NAME = 'custom-modal'; + +export function withConfirmProps(props: ConfirmProps) { + return { + className: CUSTOM_CLASS_NAME, + zindex: Z_INDEX_GREATER_THAN_HEADER, + okText: i18n.t('confirm'), + cancelText: i18n.t('cancel'), + ...props, + }; +} + +export function withDeleteProps(props: ConfirmProps) { + return withConfirmProps({ + okText: i18n.t('delete'), + okButtonProps: { + status: 'danger', + }, + ...props, + }); +} + +export function withStopProps(props: ConfirmProps) { + return withConfirmProps({ + okText: i18n.t('stop'), + okButtonProps: { + status: 'danger', + }, + ...props, + }); +} + +export function withTerminate(props: ConfirmProps) { + return withConfirmProps({ + okText: i18n.t('terminate'), + okButtonProps: { + status: 'danger', + }, + ...props, + }); +} + +export function withRejectProps(props: ConfirmProps) { + return withConfirmProps({ + okText: '确认拒绝', + okButtonProps: { + status: 'danger', + }, + ...props, + }); +} + +const ProxyModal: FC<ModalProps> = (props) => { + return <Modal wrapClassName={CUSTOM_CLASS_NAME} {...props} />; +}; + +const MyModal = ProxyModal as ModalType; + +// Custom method +MyModal.delete = (props: ConfirmProps) => { + return Modal.confirm(withDeleteProps(props)); +}; +MyModal.stop = (props: ConfirmProps) => { + return Modal.confirm(withStopProps(props)); +}; +MyModal.terminate = (props: ConfirmProps) => { + return Modal.confirm(withTerminate(props)); +}; +MyModal.reject = (props: ConfirmProps) => { + return Modal.confirm(withRejectProps(props)); +}; + +// Proxy all static method +MyModal.info = (props: ConfirmProps) => Modal.info(withConfirmProps(props)); +MyModal.success = (props: ConfirmProps) => Modal.success(withConfirmProps(props)); +MyModal.error = (props: ConfirmProps) => Modal.error(withConfirmProps(props)); +MyModal.warning = (props: ConfirmProps) => Modal.warning(withConfirmProps(props)); +MyModal.confirm = (props: ConfirmProps) => Modal.confirm(withConfirmProps(props)); +MyModal.destroyAll = () => Modal.destroyAll(); +MyModal.useModal = () => Modal.useModal(); + +export default MyModal; diff --git a/web_console_v2/client/src/components/ModelCodesEditorButton/FileExplorer.tsx b/web_console_v2/client/src/components/ModelCodesEditorButton/FileExplorer.tsx index 155e9e3a6..4c7108bde 100644 --- a/web_console_v2/client/src/components/ModelCodesEditorButton/FileExplorer.tsx +++ b/web_console_v2/client/src/components/ModelCodesEditorButton/FileExplorer.tsx @@ -2,11 +2,13 @@ import React, { FC } from 'react'; import styled from 'styled-components'; import pythonSvg from 'assets/icons/python.svg'; import { VS_DARK_COLOR } from 'components/CodeEditor'; -import { Tooltip, Row, Input } from 'antd'; +import { Tooltip, Grid, Input, Message } from '@arco-design/web-react'; import { PlusCircle, MinusCircle, Undo } from 'components/IconPark'; import GridRow from 'components/_base/GridRow'; import { useToggle } from 'react-use'; +const Row = Grid.Row; + const Container = styled.aside` position: sticky; top: 0; @@ -84,12 +86,20 @@ const NewFileInput = styled(Input)` type Props = { active: string; files: string[]; + isReadOnly?: boolean; onSelect?: (path: string) => void; onCreate?: (newPath: string) => void; onDelete?: (path: string) => void; }; -const FileExplorer: FC<Props> = ({ files, active, onSelect, onCreate, onDelete }) => { +const FileExplorer: FC<Props> = ({ + files, + active, + onSelect, + onCreate, + onDelete, + isReadOnly = false, +}) => { const [inputVisible, toggleInputVisible] = useToggle(false); return ( @@ -102,14 +112,16 @@ const FileExplorer: FC<Props> = ({ files, active, onSelect, onCreate, onDelete } className={active === file ? 'is-active' : ''} onClick={() => onFileClick(file)} > - <Row justify="space-between" align="middle"> + <Row justify="space-between" align="center"> <span>{file}</span> - <Tooltip title="删除该文件" placement="right" color="orange"> - <MinusCircle - className="del-button" - onClick={(event) => onDelClick(file, event)} - /> - </Tooltip> + {!isReadOnly && ( + <Tooltip content="删除该文件" position="right" color="orange"> + <MinusCircle + className="del-button" + onClick={(event) => onDelClick(file, event)} + /> + </Tooltip> + )} </Row> </File> ); @@ -124,10 +136,12 @@ const FileExplorer: FC<Props> = ({ files, active, onSelect, onCreate, onDelete } /> )} - <AddFileButton gap="8" top="30" left="auto" onClick={onAddClick}> - {inputVisible ? <Undo /> : <PlusCircle />} - {inputVisible ? '取消添加' : '添加文件'} - </AddFileButton> + {!isReadOnly && ( + <AddFileButton gap="8" top="30" left="auto" onClick={onAddClick}> + {inputVisible ? <Undo /> : <PlusCircle />} + {inputVisible ? '取消添加' : '添加文件'} + </AddFileButton> + )} </Container> ); @@ -143,7 +157,14 @@ const FileExplorer: FC<Props> = ({ files, active, onSelect, onCreate, onDelete } toggleInputVisible(); } function onConfirmAdd(event: React.KeyboardEvent<HTMLInputElement>) { - onCreate && onCreate((event.target as any).value); + const fileName = (event.target as any).value; + + if (!fileName) { + Message.error('请输入文件名!'); + return; + } + + onCreate && onCreate(fileName); toggleInputVisible(); } }; diff --git a/web_console_v2/client/src/components/ModelCodesEditorButton/index.module.less b/web_console_v2/client/src/components/ModelCodesEditorButton/index.module.less new file mode 100644 index 000000000..4430a863d --- /dev/null +++ b/web_console_v2/client/src/components/ModelCodesEditorButton/index.module.less @@ -0,0 +1,9 @@ +.drawer_container{ + :global { + .arco-drawer-content{ + padding: 10px 0 0; + height: 100%; + background-color: #1e1e1e; + } + } +} diff --git a/web_console_v2/client/src/components/ModelCodesEditorButton/index.tsx b/web_console_v2/client/src/components/ModelCodesEditorButton/index.tsx index d369b4ed5..411240482 100644 --- a/web_console_v2/client/src/components/ModelCodesEditorButton/index.tsx +++ b/web_console_v2/client/src/components/ModelCodesEditorButton/index.tsx @@ -1,57 +1,77 @@ -import { Button, Drawer, Row, Col } from 'antd'; -import React, { FC, useState } from 'react'; -import styled from 'styled-components'; -import { CodeOutlined } from '@ant-design/icons'; +/* istanbul ignore file */ + +import { Button, Drawer, Grid } from '@arco-design/web-react'; +import React, { FC, useState, useMemo } from 'react'; +import { IconCodeSquare } from '@arco-design/web-react/icon'; import { useToggle } from 'react-use'; -import CodeEditor, { VS_DARK_COLOR } from 'components/CodeEditor'; +import CodeEditor from 'components/CodeEditor'; + import FileExplorer from './FileExplorer'; -import { isEmpty } from 'lodash'; +import { isEmpty } from 'lodash-es'; -const DEFAULT_MAIN_FILE = 'main.py'; +import { BaseButtonProps } from '@arco-design/web-react/es/Button/interface'; -const Container = styled.div``; +import styles from './index.module.less'; -const StyledDrawer = styled(Drawer)` - top: 60px; +const { Row, Col } = Grid; + +const DEFAULT_MAIN_FILE = 'main.py'; - .ant-drawer-body { - padding: 10px 0 0; - height: 100%; - background-color: ${VS_DARK_COLOR}; - } -`; type MultiPathCodes = { [path: string]: string }; type Props = { value?: MultiPathCodes; onChange?: (val?: MultiPathCodes) => any; + disabled?: boolean; + buttonText?: string; + buttonType?: BaseButtonProps['type']; + buttonIcon?: React.ReactNode; + buttonStyle?: React.CSSProperties; + renderButton?: (onClick: any) => React.ReactNode; }; let __onChangeTimer: number; -const CodeEditorButton: FC<Props> = ({ value, onChange }) => { - let data: MultiPathCodes = value!; +const CodeEditorButton: FC<Props> = ({ + value, + onChange, + disabled, + buttonText = '打开代码编辑器', + buttonType = 'default', + buttonIcon = <IconCodeSquare />, + buttonStyle = {}, + renderButton, +}) => { + const data = useMemo<MultiPathCodes>(() => { + let tempData; + if (typeof value === 'string' || isEmpty(value)) { + tempData = { [DEFAULT_MAIN_FILE]: '' }; + } else { + tempData = { ...value }; + } - if (typeof data === 'string' || isEmpty(data)) { - data = { [DEFAULT_MAIN_FILE]: '' }; - } + return tempData; + }, [value]); const files = Object.keys(data || {}); - const [visible, toggleVisible] = useToggle(false); const [activeFile, setActive] = useState<string>(files[0]); return ( - <Container> - <Button icon={<CodeOutlined />} onClick={onButtonClick}> - 打开模型代码编辑器 - </Button> - - <StyledDrawer - getContainer="#app-content" + <div> + {renderButton ? ( + renderButton(onButtonClick) + ) : ( + <Button icon={buttonIcon} onClick={onButtonClick} style={buttonStyle} type={buttonType}> + {buttonText} + </Button> + )} + + <Drawer + className={styles.drawer_container} placement="left" width={window.innerWidth - 250} visible={visible} - contentWrapperStyle={{ + style={{ contain: 'paint', }} bodyStyle={{ @@ -60,18 +80,20 @@ const CodeEditorButton: FC<Props> = ({ value, onChange }) => { headerStyle={{ display: 'none', }} + footer={null} maskStyle={{ backdropFilter: 'blur(3px)' }} - onClose={toggleVisible} + onCancel={toggleVisible} > - <Row style={{ height: '100%' }} wrap={false}> + <Row style={{ height: '100%', flexWrap: 'nowrap' }}> <FileExplorer files={files} active={activeFile} onSelect={onFileSelect} onCreate={onFileCreate} onDelete={onFileDelete} + isReadOnly={disabled} /> - <Col flex="1"> + <Col flex="1" style={{ height: '100%' }}> <CodeEditor path={activeFile} value={data[activeFile]} @@ -79,11 +101,12 @@ const CodeEditorButton: FC<Props> = ({ value, onChange }) => { defaultPath={activeFile} language="python" onChange={onCodeChange} + isReadOnly={disabled} /> </Col> </Row> - </StyledDrawer> - </Container> + </Drawer> + </div> ); function onButtonClick() { @@ -104,13 +127,19 @@ const CodeEditorButton: FC<Props> = ({ value, onChange }) => { setActive(newPath); } - function onFileDelete() { - if (data[activeFile]) { - delete data[activeFile]; + function onFileDelete(path: string) { + if (Object.prototype.hasOwnProperty.call(data, path)) { + delete data[path]; } + if (path === activeFile) { + const tempFiles = files.filter((fileKey) => fileKey !== path); + + // replace active file + setActive(tempFiles.length > 0 ? tempFiles[0] : ''); + } + onChange && onChange(data); } - function updateValue(val: MultiPathCodes) { clearTimeout(__onChangeTimer); diff --git a/web_console_v2/client/src/components/MoreActions/index.tsx b/web_console_v2/client/src/components/MoreActions/index.tsx new file mode 100644 index 000000000..f3b4215e7 --- /dev/null +++ b/web_console_v2/client/src/components/MoreActions/index.tsx @@ -0,0 +1,152 @@ +/* istanbul ignore file */ + +import React, { ReactNode, CSSProperties } from 'react'; +import styled, { createGlobalStyle } from 'styled-components'; + +import { Menu, Popover, Tooltip } from '@arco-design/web-react'; +import IconButton from 'components/IconButton'; +import { More } from 'components/IconPark'; + +import { PopoverProps } from '@arco-design/web-react/es/Popover'; + +export const GLOBAL_CLASS_NAME = 'global-more-actions'; + +const GlobalStyle = createGlobalStyle` + .${GLOBAL_CLASS_NAME} { + min-width: 72px; + border: 1px solid #e5e6e8; + box-shadow: 0px 4px 10px rgba(0, 0, 0, 0.1); + border-radius: 4px; + overflow: hidden; + padding: 0; + z-index: var(--zIndexLessThanModal); + + .arco-popover-content { + padding: 0; + .arco-popover-arrow { + display: none !important; + } + .arco-popover-inner { + border-radius: 0; + .arco-popover-inner-content { + padding: 6px 0; + } + } + } + } +`; + +const ActionListContainer = styled(Menu)` + .arco-menu-inner { + margin: 0; + padding: 0; + } + + && .actionItem { + width: 100%; + min-height: 36px; + text-align: center; + margin: 0; + padding: 0; + + .item { + padding: 0 16px; + } + + &:not(.arco-menu-disabled) { + color: var(--fontColor, var(--textColor)); + } + } +` as typeof Menu; + +export interface ActionItem { + /** Display Label */ + label: string; + onClick?: () => void; + /** Sometimes you need to disable the button */ + disabled?: boolean; + /** Sometimes you want a hint when the button is disabled */ + disabledTip?: string; + /** Danger button style, red color */ + danger?: boolean; + /** Just for test */ + testId?: string; +} + +export interface Props extends PopoverProps { + /** DataSource */ + actionList: ActionItem[]; + /** + * Customize content render + */ + renderContent?: (actionList: ActionItem[]) => ReactNode; + children?: any; + zIndex?: number | string; + className?: string | undefined; +} + +function renderDefaultContent(actionList: ActionItem[]) { + return ( + <ActionListContainer selectable={false}> + {actionList.map((item, index) => ( + // Because "div" has no disable effect, replace div with "Menu.Item" here + <Menu.Item + className="actionItem" + key={`${item.label}__${index}`} + disabled={Boolean(item.disabled)} + onClick={(event) => { + event?.stopPropagation(); + item.onClick?.(); + }} + style={ + { + '--fontColor': item.danger ? 'var(--errorColor)' : null, + } as CSSProperties + } + > + {item.disabledTip && item.disabled ? ( + <Tooltip content={item.disabledTip}> + <span className="item" data-testid={item.testId}> + {item.label} + </span> + </Tooltip> + ) : ( + <span className="item" data-testid={item.testId}> + {item.label} + </span> + )} + </Menu.Item> + ))} + </ActionListContainer> + ); +} + +function MoreActions({ + actionList, + trigger = 'click', + children, + renderContent, + zIndex = 'var(--zIndexLessThanModal)', + className, + ...resetProps +}: Props) { + return ( + <span onClick={(e) => e.stopPropagation()} className={className}> + <GlobalStyle /> + <Popover + content={renderContent ? renderContent(actionList) : renderDefaultContent(actionList)} + position="bl" + className={GLOBAL_CLASS_NAME} + triggerProps={{ + trigger, + }} + style={{ zIndex: zIndex as any }} + {...resetProps} + > + {children ?? <IconButton type="text" icon={<More />} data-testid="btn-more-actions" />} + </Popover> + </span> + ); +} + +export default MoreActions; diff --git a/web_console_v2/client/src/components/MultiSelect/index.module.less b/web_console_v2/client/src/components/MultiSelect/index.module.less new file mode 100644 index 000000000..7e845112a --- /dev/null +++ b/web_console_v2/client/src/components/MultiSelect/index.module.less @@ -0,0 +1,37 @@ +.select{ + :global{ + .arco-input-tag-input.arco-input-tag-input-size-default{ + font-size: 13px !important; + } + } +} + +.header{ + display: flex; + justify-content: space-between; + padding: 5px 12px; + border-bottom: 1px solid var(--lineColor); +} + +.label_strong{ + font-size: 14px; + color: var(--textColorStrong); +} + +.label{ + font-size: 14px; + color: var(--textColor); + margin-right: 8px; +} + +.label_index{ + display: inline-block; + width: 30px; + font-size: 14px; + color: var(--textColorSecondary); +} + +.item_cotainer{ + display: flex; + justify-content: space-between; +} diff --git a/web_console_v2/client/src/components/MultiSelect/index.tsx b/web_console_v2/client/src/components/MultiSelect/index.tsx new file mode 100644 index 000000000..3f941c4c6 --- /dev/null +++ b/web_console_v2/client/src/components/MultiSelect/index.tsx @@ -0,0 +1,116 @@ +/* istanbul ignore file */ + +import React, { FC } from 'react'; + +import { Select, Checkbox, Tag } from '@arco-design/web-react'; +import { IconSearch } from '@arco-design/web-react/icon'; + +import { SelectProps } from '@arco-design/web-react/es/Select'; +import styled from './index.module.less'; +export interface OptionItem { + /** Display label */ + label: string; + /** Form value */ + value: any; +} + +export interface Props extends SelectProps { + value?: any[]; + onChange?: (val: any) => void; + /** + * DataSource + */ + optionList: OptionItem[]; + /** + * Hide header layout + */ + isHideHeader?: boolean; + /** + * Hide index label + */ + isHideIndex?: boolean; +} + +const MultiSelect: FC<Props> = ({ + value, + onChange = () => {}, + optionList, + isHideHeader = false, + isHideIndex = false, + ...props +}) => { + const isAllChecked = value?.length === optionList.length; + return ( + <Select + mode="multiple" + value={value} + arrowIcon={false} + showSearch={true} + suffixIcon={<IconSearch fontSize={14} />} + onChange={(value, options) => { + onChange(value); + }} + className={styled.select} + dropdownRender={(menu) => ( + <div> + {!isHideHeader && ( + <div className={styled.header}> + <span className={styled.label_strong}>{`已选择 ${value?.length ?? 0} 项`}</span> + <div> + <span className={styled.label}>全选</span> + <Checkbox + disabled={!optionList || optionList.length === 0} + checked={isAllChecked} + onChange={(checked: boolean) => { + if (checked) { + onChange(optionList.map((item) => item.value)); + } else { + onChange([]); + } + }} + /> + </div> + </div> + )} + {menu} + </div> + )} + renderTag={ + isAllChecked + ? (props: any) => { + // only show first item + if (props.value !== optionList[0]?.value) { + return null as any; + } + return ( + <Tag + closable + onClose={() => { + onChange([]); + }} + > + 全选 + </Tag> + ); + } + : undefined + } + {...props} + > + {optionList.map((item, index) => { + return ( + <Select.Option key={item.value} value={item.value} title={item.label}> + <div className={styled.item_cotainer}> + <div> + {!isHideIndex && <span className={styled.label_index}>{index + 1}</span>} + <span className={styled.label_strong}>{item.label}</span> + </div> + </div> + </Select.Option> + ); + })} + </Select> + ); +}; + +export default MultiSelect; diff --git a/web_console_v2/client/src/components/NewDatasetSelect/hooks.ts b/web_console_v2/client/src/components/NewDatasetSelect/hooks.ts new file mode 100644 index 000000000..4426cb732 --- /dev/null +++ b/web_console_v2/client/src/components/NewDatasetSelect/hooks.ts @@ -0,0 +1,175 @@ +import { PageMeta } from 'typings/app'; +import { useRecoilValue } from 'recoil'; +import { useMemo, useState } from 'react'; +import { IPageInfo, Props } from './index'; +import { projectState } from 'stores/project'; +import { Message } from '@arco-design/web-react'; +import { useQuery, UseQueryResult } from 'react-query'; +import { DATASET_LIST_QUERY_KEY } from 'views/Datasets/DatasetList'; +import { + Dataset, + DatasetKindLabelCapitalMapper, + DatasetStateFront, + ParticipantDataset, +} from 'typings/dataset'; +import { + fetchDatasetDetail, + fetchDatasetList, + fetchParticipantDatasetList, +} from 'services/dataset'; +import { FILTER_OPERATOR_MAPPER, filterExpressionGenerator } from 'views/Datasets/shared'; + +type TGetDatasetList = [ + Array<Dataset | ParticipantDataset>, + UseQueryResult< + { + data: Array<Dataset | ParticipantDataset>; + }, + unknown + >, + IPageInfo, + (args: any) => void, + () => void, + (args: Array<Dataset | ParticipantDataset>) => void, +]; + +/** + * fetch dataset list depends lazy or not + * @param isParticipant + * @param queryParams + * @param kind + * @param lazyLoad + * @param disabled + * @param datasetJobKind + */ +export function useGetDatasetList({ + isParticipant, + queryParams, + kind, + lazyLoad = { + enable: false, + page_size: 10, + }, + disabled, + datasetJobKind, +}: Pick< + Props, + 'isParticipant' | 'queryParams' | 'kind' | 'lazyLoad' | 'disabled' | 'datasetJobKind' +>): TGetDatasetList { + const selectedProject = useRecoilValue(projectState); + const [pageInfo, setPageInfo] = useState<IPageInfo>({ + page: 1, + totalPages: 0, + keyword: '', + }); + const [options, setOptions] = useState([] as Array<ParticipantDataset | Dataset>); + + const query = useQuery<{ + data: Array<Dataset | ParticipantDataset>; + page_meta?: PageMeta; + }>( + [ + DATASET_LIST_QUERY_KEY, + selectedProject.current?.id, + isParticipant, + kind, + lazyLoad?.enable ? pageInfo.page : null, + lazyLoad?.enable ? pageInfo.keyword : null, + disabled, + datasetJobKind, + ], + () => { + const pageParams = lazyLoad?.enable + ? { + page: pageInfo.page, + page_size: lazyLoad.page_size, + } + : {}; + const filter = filterExpressionGenerator( + { + project_id: selectedProject.current?.id, + name: pageInfo.keyword, + is_published: isParticipant ? undefined : true, + dataset_kind: isParticipant ? undefined : DatasetKindLabelCapitalMapper[kind!], + }, + FILTER_OPERATOR_MAPPER, + ); + if (isParticipant) { + return fetchParticipantDatasetList(selectedProject.current?.id!, { + ...queryParams, + ...pageParams, + }); + } + return fetchDatasetList({ + filter, + ...queryParams, + ...pageParams, + state_frontend: [DatasetStateFront.SUCCEEDED], + dataset_job_kind: datasetJobKind, + }); + }, + { + enabled: Boolean(selectedProject.current && !disabled), + retry: 2, + refetchOnWindowFocus: false, + onSuccess: (res) => { + setPageInfo((pre) => { + const { page_meta } = res; + return { + ...pre, + page: page_meta?.current_page || pre.page, + totalPages: page_meta?.total_pages || pre.totalPages, + }; + }); + setOptions((pre) => { + const { data } = res; + const addOption = (data ?? []) as Dataset[]; + return pre.concat(addOption); + }); + }, + }, + ); + + const list = useMemo(() => { + return options; + }, [options]); + + const clearList = () => { + setOptions([]); + }; + + return [list, query, pageInfo, setPageInfo, clearList, setOptions] as TGetDatasetList; +} + +/** + * Gets the details of the last selected dataset when the page is initialized + * @param datasetId + * @param pageInit + */ +export function useGetLastSelectedDataset(datasetId: ID, pageInit: boolean) { + const detailDataQuery = useQuery( + ['fetch_last_dataset_detail', datasetId, pageInit], + () => fetchDatasetDetail(datasetId), + { + enabled: Boolean(pageInit && (datasetId || datasetId === 0)), + retry: false, + refetchOnWindowFocus: false, + onError: (error: any) => { + Message.error(error.message); + }, + }, + ); + + const isLoading = useMemo(() => { + return detailDataQuery.isFetching; + }, [detailDataQuery]); + + const data = useMemo(() => { + if (!detailDataQuery?.data) { + return undefined; + } + return detailDataQuery?.data?.data; + }, [detailDataQuery]); + + return [isLoading, data] as [boolean, undefined | Dataset | ParticipantDataset]; +} diff --git a/web_console_v2/client/src/components/NewDatasetSelect/index.tsx b/web_console_v2/client/src/components/NewDatasetSelect/index.tsx new file mode 100644 index 000000000..5b404fdd7 --- /dev/null +++ b/web_console_v2/client/src/components/NewDatasetSelect/index.tsx @@ -0,0 +1,228 @@ +/* istanbul ignore file */ +import React, { FC, useEffect, useMemo, useRef, useState } from 'react'; +import { Select } from '@arco-design/web-react'; +import { fetchDatasetDetail } from 'services/dataset'; +import { useTranslation } from 'react-i18next'; +import { Dataset, DatasetKindLabel, ParticipantDataset, DataJobBackEndType } from 'typings/dataset'; +import { SelectProps } from '@arco-design/web-react/es/Select'; +import { OptionInfo } from '@arco-design/web-react/es/Select/interface'; +import { debounce } from 'lodash-es'; +import { useGetDatasetList, useGetLastSelectedDataset } from './hooks'; +import { renderOption } from '../DatasetSelect'; + +interface ILazyLoad { + enable: boolean; + page_size?: number; +} + +export interface IPageInfo { + page?: number; + totalPages?: number; + keyword?: string; +} + +export interface Props extends SelectProps { + /** Is participant dataset */ + isParticipant?: boolean; + /** extra API query params */ + queryParams?: object; + /** raw or processed dataset */ + kind?: DatasetKindLabel; + shouldGetDatasetDetailAfterOnChange?: boolean; + onChange?: (value: any, option: OptionInfo | OptionInfo[], datasetDetail?: Dataset) => void; + /** open pagination and fetch list with page params */ + lazyLoad?: ILazyLoad; + /** DATA_ALIGNMENT or other type dataset */ + datasetJobKind?: DataJobBackEndType; +} + +const DatasetSelect: FC<Props> = ({ + value, + onChange, + queryParams, + isParticipant = false, + kind = DatasetKindLabel.PROCESSED, + shouldGetDatasetDetailAfterOnChange = false, + lazyLoad = { + enable: false, + page_size: 10, + }, + disabled = false, + datasetJobKind, + ...props +}) => { + const { t } = useTranslation(); + const [isLoading, setIsLoading] = useState(false); + const refCanTriggerLoadMore = useRef(true); + const pageInit = useRef(true); + const [ + datasetList, + datasetQuery, + pageInfo, + setPageInfo, + clearList, + setOptions, + ] = useGetDatasetList({ + isParticipant, + queryParams, + kind, + lazyLoad, + disabled, + datasetJobKind, + }); + + // fetch the details of the last selected dataset + const [lastDatasetLoading, lastDatasetDetail] = useGetLastSelectedDataset( + value as ID, + pageInit.current, + ); + + const popupScrollHandler = (element: any) => { + const curPage = pageInfo.page || 0; + const curTotalPage = pageInfo.totalPages || 0; + if (!lazyLoad?.enable || curPage >= curTotalPage) { + return; + } + const { scrollTop, scrollHeight, clientHeight } = element; + const scrollBottom = scrollHeight - (scrollTop + clientHeight); + if (scrollBottom < 10) { + if (!datasetQuery.isFetching && refCanTriggerLoadMore.current) { + setPageInfo((pre: any) => ({ + ...pre, + page: pre.page + 1, + })); + refCanTriggerLoadMore.current = false; + } + } else { + refCanTriggerLoadMore.current = true; + } + }; + + const debouncedFetchUser = debounce((inputValue: string) => { + if (!lazyLoad?.enable) { + return; + } + setPageInfo({ + keyword: inputValue, + page: 1, + totalPages: 0, + }); + clearList(); + }, 500); + + const isControlled = typeof value !== 'undefined'; + const valueProps = isControlled ? { value } : {}; + const disableProps = isLoading || disabled ? { disabled: true } : {}; + useEffect(() => { + setPageInfo({ + keyword: '', + page: 1, + totalPages: 0, + }); + setOptions([]); + }, [datasetJobKind, setPageInfo, setOptions]); + + return ( + <Select + onSearch={debouncedFetchUser} + onPopupScroll={popupScrollHandler} + placeholder={t('placeholder_select')} + onChange={onSelectChange} + value={value} + showSearch + allowClear + filterOption={(inputValue, option) => { + return option.props.extra.name.toLowerCase().indexOf(inputValue.toLowerCase()) >= 0; + }} + loading={datasetQuery.isFetching || isLoading || lastDatasetLoading} + {...valueProps} + {...props} + {...disableProps} + > + {datasetList.map((item) => ( + <Select.Option + key={isParticipant ? (item as ParticipantDataset).uuid : (item as Dataset).id} + value={isParticipant ? (item as ParticipantDataset).uuid : (item as Dataset).id} + extra={item} + > + {renderOption(item, isParticipant)} + </Select.Option> + ))} + {Boolean(lastDatasetDetail && lastDatasetDetail?.name) && ( + <Select.Option + key={ + isParticipant + ? (lastDatasetDetail as ParticipantDataset).uuid + : (lastDatasetDetail as Dataset).id + } + value={ + isParticipant + ? (lastDatasetDetail as ParticipantDataset).uuid + : (lastDatasetDetail as Dataset).id + } + extra={lastDatasetDetail} + > + {renderOption(lastDatasetDetail!, isParticipant)} + </Select.Option> + )} + </Select> + ); + + async function onSelectChange(id: string, options: OptionInfo | OptionInfo[]) { + pageInit.current = false; + let datasetDetail; + try { + if (shouldGetDatasetDetailAfterOnChange && !isParticipant) { + setIsLoading(true); + const resp = await fetchDatasetDetail(id); + datasetDetail = resp.data; + } + } catch (error) {} + setIsLoading(false); + onChange?.(id, options, datasetDetail); + } +}; + +type PathProps = { + /** Accept dataset path */ + value?: string; + /** Accept dataset path */ + onChange?: (val?: string, options?: OptionInfo | OptionInfo[]) => void; +} & Omit<Props, 'value' | 'onChange' | 'isParticipant'>; + +export const DatasetPathSelect: FC<PathProps> = ({ + value, + onChange, + queryParams, + kind = DatasetKindLabel.PROCESSED, + ...props +}) => { + const [datasetList] = useGetDatasetList({ + isParticipant: false, // Don't support participant dataset, because it's no path field + queryParams, + kind, + }); + + const datasetId = useMemo(() => { + const dataset = datasetList.find((item) => { + return (item as Dataset).path === value; + }) as Dataset; + + return dataset?.id; + }, [datasetList, value]); + + const valueProps = datasetId ? { value: datasetId } : {}; + + return ( + <DatasetSelect + onChange={(_, option) => { + const dataset = (option as OptionInfo)?.extra; + onChange?.(dataset?.path ?? undefined, option); + }} + {...valueProps} + {...props} + /> + ); +}; + +export default DatasetSelect; diff --git a/web_console_v2/client/src/components/NoResult/index.tsx b/web_console_v2/client/src/components/NoResult/index.tsx index 4c295b36d..24dc98a56 100644 --- a/web_console_v2/client/src/components/NoResult/index.tsx +++ b/web_console_v2/client/src/components/NoResult/index.tsx @@ -1,14 +1,18 @@ +/* istanbul ignore file */ + import React, { FC } from 'react'; import styled from 'styled-components'; import illustration from 'assets/images/empty.svg'; +import model from 'assets/images/empty-data.svg'; import { Link } from 'react-router-dom'; import { useTranslation } from 'react-i18next'; +import { convertToUnit } from 'shared/helpers'; const Container = styled.div` display: flex; flex-direction: column; align-items: center; - width: 15%; + width: ${(props: Pick<Props, 'width'>) => (props.width ? convertToUnit(props.width) : '15%')}; min-width: 100px; margin: auto; `; @@ -22,23 +26,40 @@ const Illustration = styled.img` const Text = styled.span` font-size: 14px; white-space: nowrap; + color: var(--textColorSecondary); +`; + +const Img = styled.img` + display: block; + width: 60px; + margin-bottom: 20px; + pointer-events: none; + user-select: none; `; type Props = { text: string; + noImage?: boolean; CTAText?: string; // call to action text to?: string; + src?: string; + width?: any; }; +type NoModelVersionResultProps = Partial<Props>; + /** * Common no result placeholder * NOTE: make sure you put inside a grid or flex container! */ -const NoResult: FC<Props> = ({ text, CTAText, to, ...props }) => { +const NoResult: FC<Props> & { + ModelVersion: FC<NoModelVersionResultProps>; + NoData: FC<any>; +} = ({ text, CTAText, to, noImage, src = illustration, ...props }) => { const { t } = useTranslation(); return ( <Container {...props}> - <Illustration src={illustration} /> + {!noImage && <Illustration src={illustration} />} <Text> <span>{text}</span> @@ -53,4 +74,74 @@ const NoResult: FC<Props> = ({ text, CTAText, to, ...props }) => { ); }; +const NoModelVersionResult: FC<NoModelVersionResultProps> = ({ + text, + CTAText, + to = '/model-center/model-management/sender/create/global', + src = model, + ...props +}) => { + const { t } = useTranslation(); + return ( + <Container {...props}> + <Img src={src} /> + <Text> + <span>{text || t('model_center.no_result')}</span> + + {to && <Link to={to}>{CTAText || t('model_center.btn_train_model')}</Link>} + </Text> + </Container> + ); +}; + +const NoData = () => { + const { t } = useTranslation(); + + return ( + <div className="arco-empty"> + <div className="arco-empty-wrapper"> + <div className="arco-empty-image"> + <svg + width="60" + height="60" + viewBox="0 0 60 60" + fill="none" + xmlns="http://www.w3.org/2000/svg" + > + <path + fillRule="evenodd" + clipRule="evenodd" + d="M45.9298 24.0005C46.2641 24.0005 46.5764 24.1676 46.7618 24.4458L53.1316 34.0005H6.86865L13.2385 24.4458C13.4239 24.1676 13.7362 24.0005 14.0705 24.0005H45.9298Z" + stroke="#C2C6CC" + strokeWidth="2" + /> + <path + fillRule="evenodd" + clipRule="evenodd" + d="M55 35.0006V47.0006C55 49.2097 53.2091 51.0006 51 51.0006H9C6.79086 51.0006 5 49.2097 5 47.0006V35.0006L24.083 35.0002C24.0284 35.3256 24 35.6597 24 36.0006C24 39.3143 26.6863 42.0006 30 42.0006C33.3137 42.0006 36 39.3143 36 36.0006C36 35.6597 35.9716 35.3256 35.917 35.0002L55 35.0006Z" + fill="#C2C6CC" + /> + <path + d="M32 10.0005C32 9.4482 31.5523 9.00049 31 9.00049C30.4477 9.00049 30 9.4482 30 10.0005V16.0005C30 16.5528 30.4477 17.0005 31 17.0005C31.5523 17.0005 32 16.5528 32 16.0005V10.0005Z" + fill="#C2C6CC" + /> + <path + d="M15.5857 11.1718C15.1952 10.7813 14.562 10.7813 14.1715 11.1718C13.7809 11.5623 13.7809 12.1955 14.1715 12.586L18.4141 16.8287C18.8046 17.2192 19.4378 17.2192 19.8283 16.8287C20.2188 16.4381 20.2188 15.805 19.8283 15.4144L15.5857 11.1718Z" + fill="#C2C6CC" + /> + <path + d="M46.4141 11.1718C46.8046 10.7813 47.4378 10.7813 47.8283 11.1718C48.2188 11.5623 48.2188 12.1955 47.8283 12.586L43.5857 16.8287C43.1951 17.2192 42.562 17.2192 42.1714 16.8287C41.7809 16.4381 41.7809 15.805 42.1714 15.4144L46.4141 11.1718Z" + fill="#C2C6CC" + /> + </svg> + </div> + <div className="arco-empty-description">{t('no_data')}</div> + </div> + </div> + ); +}; + +NoResult.ModelVersion = NoModelVersionResult; +NoResult.NoData = NoData; + export default NoResult; diff --git a/web_console_v2/client/src/components/PrettyMenu/index.tsx b/web_console_v2/client/src/components/PrettyMenu/index.tsx index fc57293bf..28d2f7ebc 100644 --- a/web_console_v2/client/src/components/PrettyMenu/index.tsx +++ b/web_console_v2/client/src/components/PrettyMenu/index.tsx @@ -1,4 +1,6 @@ -import { Menu } from 'antd'; +/* istanbul ignore file */ + +import { Menu } from '@arco-design/web-react'; import styled from 'styled-components'; const PrettyMenu = styled(Menu)` @@ -6,11 +8,11 @@ const PrettyMenu = styled(Menu)` min-height: 50px; max-height: 500px; padding: 8px 5px; - background-color: rgba(237, 238, 238, 0.76); + background-color: rgba(237, 238, 238, 0.8); border-radius: 5px; overflow-x: hidden; overflow-y: auto; - backdrop-filter: blur(3px); + backdrop-filter: blur(16px) saturate(180%); `; export const PrettyMenuItem = styled(Menu.Item)` diff --git a/web_console_v2/client/src/components/PrintLogs/index.tsx b/web_console_v2/client/src/components/PrintLogs/index.tsx index c6c3c76f4..8ebe0dbbd 100644 --- a/web_console_v2/client/src/components/PrintLogs/index.tsx +++ b/web_console_v2/client/src/components/PrintLogs/index.tsx @@ -1,20 +1,25 @@ +/* istanbul ignore file */ + import React, { FC, useCallback, useEffect, useRef, useState } from 'react'; import { QueryKey, useQuery } from 'react-query'; import styled from 'styled-components'; import { Refresh, Expand, Pause, CaretRight, ArrowDown } from 'components/IconPark'; import { convertToUnit } from 'shared/helpers'; -import { ScrollDown } from 'styles/animations'; +import { Suspense } from 'styles/animations'; import { useToggle } from 'react-use'; -import { Tooltip } from 'antd'; -import { last, debounce } from 'lodash'; +import { Tooltip, Message } from '@arco-design/web-react'; +import { last, debounce } from 'lodash-es'; import i18n from 'i18n'; import { ControlButton } from 'styles/elements'; +import { Download } from 'components/IconPark'; +import { saveBlob } from 'shared/helpers'; const Container = styled.div` position: relative; width: ${(props: Props) => convertToUnit(props.width || '100%')}; height: ${(props: Props) => convertToUnit(props.height || '100%')}; background-color: #292238; + border-radius: 4px; `; const Pre = styled.pre` width: 100%; @@ -48,7 +53,7 @@ const ScrollButton = styled(ControlButton)` } `; const AnimatedArrowDown = styled(ArrowDown)` - animation: ${ScrollDown} 1.2s linear infinite; + animation: ${Suspense} 1.2s linear infinite; `; type Props = { @@ -78,7 +83,7 @@ const PrintLogs: FC<Props> = (props) => { const [isFirstTimeResult, setFirstTime] = useState(true); const [lastestLog, setLastLog] = useState<string | undefined>(''); - const logsQuery = useQuery(queryKey, logsFetcher, { + const logsQuery = useQuery(queryKey, () => logsFetcher(), { refetchOnWindowFocus: true, retry: 2, refetchInterval: refetchInterval || 5000, @@ -141,7 +146,7 @@ const PrintLogs: FC<Props> = (props) => { <ControlsContainer> {fullscreenVisible && Boolean(onFullscreenClick) && ( <ControlButton onClick={onFullscreenClick}> - <Tooltip title={i18n.t('workflow.btn_full_screen')} placement="left"> + <Tooltip content={i18n.t('workflow.btn_full_screen')} position="left"> <Expand /> </Tooltip> </ControlButton> @@ -149,8 +154,8 @@ const PrintLogs: FC<Props> = (props) => { <ControlButton onClick={() => togglePaused()}> <Tooltip - placement="left" - title={ + position="left" + content={ paused ? i18n.t('workflow.btn_auto_refresh_logs') : i18n.t('workflow.btn_pause_auto_refresh') @@ -159,6 +164,11 @@ const PrintLogs: FC<Props> = (props) => { {paused ? <CaretRight /> : <Pause />} </Tooltip> </ControlButton> + <ControlButton onClick={() => onDownload()}> + <Tooltip position="left" content={i18n.t('workflow.action_download_log')}> + <Download /> + </Tooltip> + </ControlButton> </ControlsContainer> {scroll2ButtVisible && ( @@ -195,6 +205,19 @@ const PrintLogs: FC<Props> = (props) => { setScroll2Butt(false); } } + async function onDownload() { + try { + const data = await logsFetcher(5000); + if (data.data) { + const blob = new Blob([JSON.stringify(data.data, null, 2)], { + type: 'application/json', + }); + saveBlob(blob, `log.json`); + } + } catch (error) { + Message.error(error.message); + } + } }; export default PrintLogs; diff --git a/web_console_v2/client/src/components/ProgressWithText/index.module.less b/web_console_v2/client/src/components/ProgressWithText/index.module.less new file mode 100644 index 000000000..d2e64acd4 --- /dev/null +++ b/web_console_v2/client/src/components/ProgressWithText/index.module.less @@ -0,0 +1,13 @@ +.progress_container{ + font-size: 12px; + line-height: 22px; + color: rgb(var(--gray-7)); + .progress_name{ + display: block; + margin-bottom: -10px; + font-weight: 400; + font-size: 12px; + line-height: 20px; + color: #1D2129; + } +} diff --git a/web_console_v2/client/src/components/ProgressWithText/index.tsx b/web_console_v2/client/src/components/ProgressWithText/index.tsx new file mode 100644 index 000000000..79ffd7e1d --- /dev/null +++ b/web_console_v2/client/src/components/ProgressWithText/index.tsx @@ -0,0 +1,33 @@ +import React, { ReactElement } from 'react'; +import styles from './index.module.less'; +import { Progress, ProgressProps, Tooltip, TooltipProps } from '@arco-design/web-react'; +interface Props extends ProgressProps { + statusText: string; + toolTipPosition?: TooltipProps['position']; + toolTipContent?: TooltipProps['content']; +} +function ProgressWithText({ + style, + className, + percent, + status, + statusText, + toolTipPosition = 'top', + toolTipContent, + ...props +}: Props): ReactElement { + return ( + <Tooltip position={toolTipPosition} content={toolTipContent}> + <div className={`${styles.progress_container} ${className}`} style={style}> + <span className={styles.progress_name}>{statusText ?? '-'}</span> + <Progress + percent={percent ?? 100} + status={status ?? 'normal'} + showText={false} + trailColor="var(--color-primary-light-1)" + /> + </div> + </Tooltip> + ); +} +export default ProgressWithText; diff --git a/web_console_v2/client/src/components/PropList/index.tsx b/web_console_v2/client/src/components/PropList/index.tsx new file mode 100644 index 000000000..ee784f198 --- /dev/null +++ b/web_console_v2/client/src/components/PropList/index.tsx @@ -0,0 +1,62 @@ +/* istanbul ignore file */ + +import React, { FC, ReactNode } from 'react'; +import styled from 'styled-components'; +import { Grid } from '@arco-design/web-react'; +import { Label, LabelStrong } from 'styles/elements'; +import { Copy } from 'components/IconPark'; +import ClickToCopy from 'components/ClickToCopy'; + +const Row = Grid.Row; +const Col = Grid.Col; + +export interface Item { + key: string; + value: ReactNode; + isCanCopy?: boolean; + onClick?: () => void; +} +export interface Props { + leftSpan?: number; + rightSpan?: number; + list: Item[]; +} + +const StyledRow = styled(Row)` + margin-top: 16px; +`; +const StyledCopyIcon = styled(Copy)` + margin-left: 20px; + font-size: 14px; + &:hover { + color: #1664ff; + } +`; + +const PropList: FC<Props> = ({ list, leftSpan = 4, rightSpan = 20 }) => { + return ( + <> + {list.map((item) => { + return ( + <StyledRow> + <Col span={leftSpan}> + <Label>{item.key}</Label> + </Col> + <Col span={rightSpan}> + {item.isCanCopy ? ( + <ClickToCopy text={String(item.value)}> + <LabelStrong onClick={item.onClick}> + {item.value} <StyledCopyIcon /> + </LabelStrong> + </ClickToCopy> + ) : ( + <LabelStrong onClick={item.onClick}>{item.value}</LabelStrong> + )} + </Col> + </StyledRow> + ); + })} + </> + ); +}; +export default PropList; diff --git a/web_console_v2/client/src/components/PropertyList/index.tsx b/web_console_v2/client/src/components/PropertyList/index.tsx index b253deb30..da7db083f 100644 --- a/web_console_v2/client/src/components/PropertyList/index.tsx +++ b/web_console_v2/client/src/components/PropertyList/index.tsx @@ -1,37 +1,60 @@ -import React, { FC } from 'react'; +/* istanbul ignore file */ + +import React, { CSSProperties, FC } from 'react'; import styled from 'styled-components'; -import { Col } from 'antd'; import { convertToUnit } from 'shared/helpers'; import { useToggle } from 'react-use'; import { Down } from 'components/IconPark'; import { MixinCommonTransition } from 'styles/mixins'; +import { VariableAccessMode } from 'typings/variable'; +import { Tooltip } from '@arco-design/web-react'; +import VariablePermission from 'components/VariblePermission'; +import i18n from 'i18n'; +import { CONSTANTS } from 'shared/constants'; -const Container = styled.dl` +const Container = styled.dl<{ style?: CSSProperties }>` + --propMargin: 8px; position: relative; - display: flex; - flex-wrap: wrap; - margin: 15px 0; - padding: 7px 16px; + display: grid; + margin: 12px 0; + padding: 20px 20px 10px 20px; border-radius: 2px; - background-color: var(--gray1); + background-color: rgb(var(--gray-1)); `; const Prop = styled.dd` + position: relative; display: flex; - align-items: flex-start; - margin-bottom: 3px; + padding: 0 10px; + padding-left: 18px; + margin-bottom: var(--propMargin); font-size: 13px; - line-height: var(--lineHeight, 36px); - word-break: break-all; + line-height: 1.3; + color: rgb(var(--gray-10)); + + & > span { + word-break: break-all; + overflow: hidden; + } &::before { - display: inline-block; min-width: var(--labelWidth, 'auto'); margin-right: 5px; - content: attr(data-label) ': '; + content: attr(data-label); flex-shrink: 0; color: var(--textColorSecondary); } `; +const Content = styled.span` + white-space: pre-wrap; +`; + +const PermissionIndicatorContainer = styled.div` + display: inline-block; + position: absolute; + left: 0; + top: 1px; +`; + const CollapseButton = styled.div` ${MixinCommonTransition('background-color')} position: absolute; @@ -46,10 +69,10 @@ const CollapseButton = styled.div` padding: 2px 0 1px; border-radius: 50%; cursor: pointer; - background-color: var(--gray1); + background-color: rgb(var(--gray-1)); &:hover { - background-color: var(--darkGray10); + background-color: rgb(var(--gray-3)); } > .anticon { @@ -67,24 +90,77 @@ const CollapseButton = styled.div` } `; +type PropertyItem = { + /** Display label */ + label: string; + /** Display value */ + value: React.ReactNode; + /** Is Hidden */ + hidden?: boolean; + /** AccessMode */ + accessMode?: VariableAccessMode; +}; + type Props = { - properties: { - label: string; - value: any; - hidden?: boolean; - }[]; - cols?: 1 | 2 | 3 | 4 | 6; - initialVisibleRows?: number; // NOTE: should not <= 0 + /** DataSource */ + properties: Array<PropertyItem>; + /** How many cols in one row */ + cols?: number; + colProportions?: number[]; + /** + * How many rows can be visible when first render, other rows are folded. + * + * NOTE: should not <= 0 + */ + initialVisibleRows?: number; + /** + * Label min-width pre item + */ labelWidth?: number; + /** + * min-width pre item + */ + minWidth?: number; + /** + * line-height pre item + */ lineHeight?: number; + /** + * Vertical align + */ + align?: 'stretch' | 'start' | 'center' | 'end'; + /** + * className + */ + className?: string; }; -const PropertyList: FC<Props> = ({ +export const variableAccessModeToComponentMap: Record<VariableAccessMode, React.FC> = { + [VariableAccessMode.PEER_READABLE]: VariablePermission.Readable, + [VariableAccessMode.PEER_WRITABLE]: VariablePermission.Writable, + [VariableAccessMode.PRIVATE]: VariablePermission.Private, + [VariableAccessMode.UNSPECIFIED]: VariablePermission.Private, +}; + +export const variableAccessModeToDescMap: Record<VariableAccessMode, string> = { + [VariableAccessMode.PEER_READABLE]: i18n.t('workflow.var_auth_read', { prefix: '对侧' }), + [VariableAccessMode.PEER_WRITABLE]: i18n.t('workflow.var_auth_write', { prefix: '对侧' }), + [VariableAccessMode.PRIVATE]: i18n.t('workflow.var_auth_private', { prefix: '对侧' }), + [VariableAccessMode.UNSPECIFIED]: i18n.t('workflow.var_auth_private', { prefix: '对侧' }), +}; + +const PropertyList: FC<Props> & { + ModelCenter: React.FC<NewProps>; +} = ({ properties, cols = 2, + colProportions, labelWidth, initialVisibleRows, lineHeight = 36, + minWidth = 100, + align = 'stretch', + className, ...props }) => { // FIXME: remove next-line after basic_envs been remove @@ -97,26 +173,42 @@ const PropertyList: FC<Props> = ({ ); const propsToDisplay = collapsed ? properties.slice(0, cols * initialVisibleRows!) : properties; + const containerStyle = { + gridTemplateColumns: (colProportions || Array(cols).fill(1)).map((p) => `${p}fr`).join(' '), + }; return ( - <Container {...props}> + <Container {...props} style={containerStyle} className={className || ''}> {propsToDisplay .filter((item) => !item.hidden) .map((item, index) => { + const PermissionIndicator = item.accessMode + ? variableAccessModeToComponentMap[item.accessMode] + : null; + const title = item.accessMode ? variableAccessModeToDescMap[item.accessMode] : ''; return ( - <Col span={24 / cols} key={item.label + index}> - <Prop - data-label={item.label} - style={ - { - '--labelWidth': convertToUnit(labelWidth || 'auto'), - '--lineHeight': convertToUnit(lineHeight) || '', - } as any - } - > - {item.value || '-'} - </Prop> - </Col> + <Prop + key={item.label + index} + data-label={item.label} + style={ + { + '--labelWidth': convertToUnit(labelWidth || 'auto'), + '--lineHeight': convertToUnit(lineHeight) || '', + flexBasis: String(100 / cols) + '%', + minWidth: convertToUnit(minWidth), + alignItems: align, + } as CSSProperties + } + > + {PermissionIndicator && ( + <PermissionIndicatorContainer> + <Tooltip content={title}> + <PermissionIndicator /> + </Tooltip> + </PermissionIndicatorContainer> + )} + <Content>{item.value || CONSTANTS.EMPTY_PLACEHOLDER}</Content> + </Prop> ); })} {possibleToCollasped && ( @@ -128,4 +220,56 @@ const PropertyList: FC<Props> = ({ ); }; +const NewContainer = styled.dl` + position: relative; + display: flex; + flex-wrap: wrap; + margin: 15px 0; + padding: 0 20px; + border-radius: 2px; + background-color: rgb(var(--gray-1)); +`; +const NewProp = styled(Prop)` + margin-right: 80px; + line-height: var(--lineHeight, 36px); + &::before { + margin-right: 12px; + } +`; +const Triangle = styled.div` + position: absolute; + width: 12px; + height: 12px; + left: 16px; + top: -4px; + transform: rotate(45deg); + background: rgb(var(--gray-1)); +`; + +type NewProps = { + properties: { + label: string; + value: any; + hidden?: boolean; + }[]; +}; + +const NewPropertyList: FC<NewProps> = ({ properties, ...props }) => { + return ( + <NewContainer {...props}> + {properties + .filter((item) => !item.hidden) + .map((item, index) => { + return ( + <NewProp key={item.label + index} data-label={item.label}> + <Content>{item.value || CONSTANTS.EMPTY_PLACEHOLDER}</Content> + </NewProp> + ); + })} + <Triangle /> + </NewContainer> + ); +}; +PropertyList.ModelCenter = NewPropertyList; + export default PropertyList; diff --git a/web_console_v2/client/src/components/ReadFile/index.module.less b/web_console_v2/client/src/components/ReadFile/index.module.less new file mode 100644 index 000000000..0d72cab4c --- /dev/null +++ b/web_console_v2/client/src/components/ReadFile/index.module.less @@ -0,0 +1,90 @@ +.read_file_container{ + position: relative; + min-height: 32px; + border-radius: 2px; + background-color: var(--color-fill-2); +} + +.read_file{ + background-color: var(--color-fill-2); + position: absolute; + top: 0; + z-index: 2; + display: flex; + height: 32px; + width: 100%; + padding-left: 16px; + padding-right: 12px; + align-items: center; + opacity: 0; + pointer-events: none; + transition: opacity 0.4s cubic-bezier(0.4,0,0.2,1); + :global{ + .filename { + padding-left: 10px; + flex: 1; + } + + .anticon-check-circle { + color: var(--errorColor); + } + } +} + +.read_file_upload{ + padding: 0; +} + +.read_file_without_upload{ + transition: display 0.4s cubic-bezier(0.4,0,0.2,1),opacity 0.4s cubic-bezier(0.4,0,0.2,1); + max-height: 400px; + will-change: display; +} + +.read_file_content_inner{ + padding: 20px 0 40px; + display: flex; + justify-content: center; + flex-direction: column; + align-items: center; +} + +.read_file_upload_hint{ + display: block; + font-size: 12px; + line-height: 18px; + color: var(--textColorSecondary); +} + +.read_file_upload_placeholder{ + margin-bottom: 4px; + line-height: 24px; + font-size: 16px; +} + +.read_file_plus_icon{ + font-size: 16px; +} + +.hidden { + display: none; +} + +.visible { + opacity: 1; + pointer-events: initial; + + > .anticon-check-circle { + animation: zoomIn 0.3s cubic-bezier(0.12, 0.4, 0.29, 1.46); + } +} + +.delete_file_btn{ + position: absolute; + right: -20px; + cursor: pointer; + + &:hover { + color: var(--primaryColor); + } +} diff --git a/web_console_v2/client/src/components/ReadFile/index.tsx b/web_console_v2/client/src/components/ReadFile/index.tsx index 3ef9b57b7..785378efd 100644 --- a/web_console_v2/client/src/components/ReadFile/index.tsx +++ b/web_console_v2/client/src/components/ReadFile/index.tsx @@ -1,91 +1,12 @@ +/* istanbul ignore file */ + import React, { useState, FC, useEffect } from 'react'; -import styled from 'styled-components'; -import { message, Upload } from 'antd'; -import { useTranslation } from 'react-i18next'; -import classNames from 'classnames'; -import { MixinCommonTransition } from 'styles/mixins'; +import { Message, Upload } from '@arco-design/web-react'; import { ReactComponent as FileIcon } from 'assets/images/file.svg'; -import { RcFile } from 'antd/lib/upload'; -import { isNil } from 'lodash'; +import { UploadItem } from '@arco-design/web-react/es/Upload'; +import { isNil } from 'lodash-es'; import { PlusCircle, Delete } from 'components/IconPark'; - -const Container = styled.div` - position: relative; - min-height: 32px; - border-radius: 2px; -`; -const WithoutFile = styled.div` - ${MixinCommonTransition(['max-height', 'opacity'])}; - - max-height: 400px; - will-change: max-height; - - &.hidden { - max-height: 0; - } -`; -const File = styled.div` - ${MixinCommonTransition(['opacity'])}; - - position: absolute; - top: 0; - z-index: 2; - display: flex; - height: 32px; - width: 100%; - padding-left: 16px; - padding-right: 12px; - align-items: center; - opacity: 0; - pointer-events: none; - - &.visible { - opacity: 1; - pointer-events: initial; - - > .anticon-check-circle { - animation: zoomIn 0.3s cubic-bezier(0.12, 0.4, 0.29, 1.46); - } - } - - > .filename { - padding-left: 10px; - flex: 1; - } - - > .anticon-check-circle { - color: var(--errorColor); - } -`; -const DeleteFileBtn = styled.div` - position: absolute; - right: -20px; - cursor: pointer; - - &:hover { - color: var(--primaryColor); - } -`; -const ContentInner = styled.div` - padding: 20px 0 40px; -`; -const PlusIcon = styled.p` - font-size: 16px; -`; -const UploadPlaceholder = styled.div` - margin-bottom: 4px; - line-height: 24px; - font-size: 16px; -`; -const UploadHint = styled.small` - display: block; - font-size: 12px; - line-height: 18px; - color: var(--textColorSecondary); -`; -const DragUpload = styled(Upload.Dragger)` - padding: 0; -`; +import styles from './index.module.less'; type Props = React.ComponentProps<typeof Upload> & { reader: (file: File) => Promise<any>; @@ -96,9 +17,6 @@ type Props = React.ComponentProps<typeof Upload> & { }; const ReadFile: FC<Props> = ({ maxSize, value, reader, onRemoveFile, onChange, ...props }) => { - const { beforeUpload } = props; - - const { t } = useTranslation(); const [file, setFile] = useState<File>(); const hasValue = Boolean(value); @@ -108,58 +26,60 @@ const ReadFile: FC<Props> = ({ maxSize, value, reader, onRemoveFile, onChange, . setFile(null as any); } }, [value]); - const uploadProps = { ...props, disabled: hasValue || props.disabled, showUploadList: false, onChange: onFileChange, - beforeUpload: onFileInput, }; return ( - <Container> - <File className={classNames({ visible: hasValue })}> + <div className={styles.read_file_container}> + <div className={`${styles.read_file} ${hasValue && styles.visible}`}> <FileIcon /> <span className="filename">{file?.name}</span> - <DeleteFileBtn onClick={onDeleteClick}> + <div className={styles.delete_file_btn} onClick={onDeleteClick}> <Delete /> - </DeleteFileBtn> - </File> + </div> + </div> - <DragUpload {...(uploadProps as any)}> - <WithoutFile className={classNames({ hidden: hasValue })}> - <ContentInner> - <PlusIcon> + <Upload className={styles.read_file_upload} {...(uploadProps as any)} drag={true}> + <div className={`${styles.read_file_without_upload} ${hasValue && styles.hidden}`}> + <div className={styles.read_file_content_inner}> + <p className={styles.read_file_plus_icon}> <PlusCircle /> - </PlusIcon> - - <UploadPlaceholder>{t('upload.placeholder')}</UploadPlaceholder> - - <UploadHint> - {t('upload.hint', { fileTypes: props.accept, maxSize: maxSize })} - </UploadHint> - </ContentInner> - </WithoutFile> - </DragUpload> - </Container> + </p> + + <div className={styles.read_file_upload_placeholder}>点击或拖拽文件到此处上传</div> + + <small className={styles.read_file_upload_hint}> + {maxSize || maxSize === 0 + ? `请上传${props.accept}格式文件,大小不超过${maxSize}MB` + : `请上传${props.accept}格式文件`} + </small> + </div> + </div> + </Upload> + </div> ); - function onFileChange({ file, event }: any) { + function onFileChange(fileList: UploadItem[], { originFile: file }: UploadItem) { + if (!file) return; + if ((maxSize || maxSize === 0) && file.size > maxSize * 1024 * 1024) { + Message.warning(`大小不超过${maxSize}MB!`); + return; + } + return reader(file) .then((result) => { onChange && onChange(result, file); setFile(file); }) .catch((error) => { - message.error(error.message); + Message.error(error.message); }); } - function onFileInput(file: RcFile, fileList: RcFile[]) { - beforeUpload && beforeUpload(file, fileList); - return false; - } function onDeleteClick() { onRemoveFile && onRemoveFile(file); onChange && onChange(null as any); diff --git a/web_console_v2/client/src/components/ResourceConfig/index.tsx b/web_console_v2/client/src/components/ResourceConfig/index.tsx new file mode 100644 index 000000000..4665074fd --- /dev/null +++ b/web_console_v2/client/src/components/ResourceConfig/index.tsx @@ -0,0 +1,550 @@ +/* istanbul ignore file */ + +import React, { FC, useEffect, useRef, useState } from 'react'; +import styled from 'styled-components'; +import i18n from 'i18n'; + +import { convertCpuCoreToM } from 'shared/helpers'; + +import { Form, Collapse } from '@arco-design/web-react'; +import InputGroup, { TColumn } from 'components/InputGroup'; +import BlockRadio from 'components/_base/BlockRadio'; + +import { ResourceTemplateType, AlgorithmType } from 'typings/modelCenter'; +import { EnumAlgorithmProjectType } from 'typings/algorithm'; + +const StyledCollapse = styled(Collapse)` + overflow: initial; + .arco-collapse-item-header { + position: relative; + left: -14px; + border-width: 0; + &-title { + font-weight: 400 !important; + font-size: 12px; + } + } + .arco-collapse-item-content { + background-color: transparent; + } + .arco-collapse-item-content-box { + padding: 0; + } +`; + +const RESOURCE_HIGH_TREE = { + master_replicas: 1, + master_cpu: 0, + master_mem: 0, + ps_replicas: 1, + ps_cpu: 0, + ps_mem: 0, + worker_replicas: 1, + worker_cpu: 16, + worker_mem: 64, + worker_roles: 'worker', + ps_roles: 'ps', + master_roles: 'master', +}; +const RESOURCE_HIGH_NN = { + master_replicas: 1, + master_cpu: 2, + master_mem: 32, + ps_replicas: 1, + ps_cpu: 8, + ps_mem: 32, + worker_replicas: 1, + worker_cpu: 8, + worker_mem: 32, + worker_roles: 'worker', + ps_roles: 'ps', + master_roles: 'master', +}; +const RESOURCE_MEDIUM_TREE = { + master_replicas: 1, + master_cpu: 0, + master_mem: 0, + ps_replicas: 1, + ps_cpu: 0, + ps_mem: 0, + worker_replicas: 1, + worker_cpu: 8, + worker_mem: 32, + worker_roles: 'worker', + ps_roles: 'ps', + master_roles: 'master', +}; +const RESOURCE_MEDIUM_NN = { + master_replicas: 1, + master_cpu: 1, + master_mem: 16, + ps_replicas: 1, + ps_cpu: 4, + ps_mem: 16, + worker_replicas: 1, + worker_cpu: 4, + worker_mem: 16, + worker_roles: 'worker', + ps_roles: 'ps', + master_roles: 'master', +}; +const RESOURCE_LOW_TREE = { + master_replicas: 1, + master_cpu: 0, + master_mem: 0, + ps_replicas: 1, + ps_cpu: 0, + ps_mem: 0, + worker_replicas: 1, + worker_cpu: 4, + worker_mem: 8, + worker_roles: 'worker', + ps_roles: 'ps', + master_roles: 'master', +}; +const RESOURCE_LOW_NN = { + master_replicas: 1, + master_cpu: 1, + master_mem: 4, + ps_replicas: 1, + ps_cpu: 2, + ps_mem: 4, + worker_replicas: 1, + worker_cpu: 2, + worker_mem: 4, + worker_roles: 'worker', + ps_roles: 'ps', + master_roles: 'master', +}; +const RESOURCE_CUSTOM_NN = { + master_replicas: 1, + master_cpu: 1, + master_mem: 4, + ps_replicas: 1, + ps_cpu: 2, + ps_mem: 4, + worker_replicas: 1, + worker_cpu: 4, + worker_mem: 8, + worker_roles: 'worker', + ps_roles: 'ps', + master_roles: 'master', +}; + +enum TResourceFieldType { + MASTER = 'master', + PS = 'ps', + WORKER = 'worker', +} +type TResource = typeof RESOURCE_LOW_NN; + +const roleFieldList = [TResourceFieldType.MASTER, TResourceFieldType.PS, TResourceFieldType.WORKER]; +const resourceFieldList = ['replicas', 'cpu', 'mem']; + +export const classifyResourceWithTemplateType = (resource: Partial<TResource>) => { + const ret: Record<string, [Partial<Record<keyof TResource, number | string>>]> = {}; + for (const key in resource) { + const [type] = key.split('_'); + if (!ret[type]) { + ret[type] = [{}]; + } + ret[type][0][key as keyof TResource] = resource[key as keyof TResource]; + } + return ret; +}; + +export const unwrapResourceFromTemplateType = (payload: Record<string, any>) => { + const copied = { ...payload }; + for (const key in copied) { + if ( + [TResourceFieldType.MASTER, TResourceFieldType.PS, TResourceFieldType.WORKER].includes( + key as TResourceFieldType, + ) === false + ) { + continue; + } + + const [resource] = copied[key]; + + for (const k in resource) { + const [, resType] = k.split('_'); + let unit = ''; + let value = resource[k]; + + switch (resType as TResourceType) { + case 'cpu': + unit = 'm'; + value = convertCpuCoreToM(value, false); + break; + case 'mem': + unit = 'Gi'; + break; + } + copied[k] = value + unit; + } + delete copied[key]; + } + return copied; +}; + +export const wrapResource = (formValue: Record<string, any>) => { + const classifiedFormValue = classifyResourceWithTemplateType(formValue); + + const tempObj: any = {}; + + roleFieldList.forEach((role) => { + if (!classifiedFormValue[role] || !classifiedFormValue[role][0]) return; + const tempFormValue = classifiedFormValue[role][0] as any; + + Object.keys(tempFormValue).forEach((key) => { + const [role, field] = key.split('_'); + if (resourceFieldList.includes(field)) { + if (!tempObj[role]) { + tempObj[role] = [ + { + [`${role}_roles`]: role, + }, + ]; + } + + if (field === 'cpu') { + // convert cpu unit, m to Core + tempObj[role][0][key] = parseFloat(tempFormValue[key]) / 1000; + } else { + tempObj[role][0][key] = parseInt(tempFormValue[key]); + } + } + }); + }); + + return { ...formValue, ...tempObj }; +}; + +export const resourceTemplateParamsMap = { + [ResourceTemplateType.HIGH]: { + [EnumAlgorithmProjectType.TREE_VERTICAL]: classifyResourceWithTemplateType(RESOURCE_HIGH_TREE), + [EnumAlgorithmProjectType.TREE_HORIZONTAL]: classifyResourceWithTemplateType( + RESOURCE_HIGH_TREE, + ), + [EnumAlgorithmProjectType.NN_VERTICAL]: classifyResourceWithTemplateType(RESOURCE_HIGH_NN), + [EnumAlgorithmProjectType.NN_HORIZONTAL]: classifyResourceWithTemplateType(RESOURCE_HIGH_NN), + }, + [ResourceTemplateType.MEDIUM]: { + [EnumAlgorithmProjectType.TREE_VERTICAL]: classifyResourceWithTemplateType( + RESOURCE_MEDIUM_TREE, + ), + [EnumAlgorithmProjectType.TREE_HORIZONTAL]: classifyResourceWithTemplateType( + RESOURCE_MEDIUM_TREE, + ), + [EnumAlgorithmProjectType.NN_VERTICAL]: classifyResourceWithTemplateType(RESOURCE_MEDIUM_NN), + [EnumAlgorithmProjectType.NN_HORIZONTAL]: classifyResourceWithTemplateType(RESOURCE_MEDIUM_NN), + }, + [ResourceTemplateType.LOW]: { + [EnumAlgorithmProjectType.TREE_VERTICAL]: classifyResourceWithTemplateType(RESOURCE_LOW_TREE), + [EnumAlgorithmProjectType.TREE_HORIZONTAL]: classifyResourceWithTemplateType(RESOURCE_LOW_TREE), + [EnumAlgorithmProjectType.NN_VERTICAL]: classifyResourceWithTemplateType(RESOURCE_LOW_NN), + [EnumAlgorithmProjectType.NN_HORIZONTAL]: classifyResourceWithTemplateType(RESOURCE_LOW_NN), + }, + [ResourceTemplateType.CUSTOM]: { + [EnumAlgorithmProjectType.TREE_VERTICAL]: classifyResourceWithTemplateType(RESOURCE_LOW_TREE), + [EnumAlgorithmProjectType.TREE_HORIZONTAL]: classifyResourceWithTemplateType(RESOURCE_LOW_TREE), + [EnumAlgorithmProjectType.NN_VERTICAL]: classifyResourceWithTemplateType(RESOURCE_CUSTOM_NN), + [EnumAlgorithmProjectType.NN_HORIZONTAL]: classifyResourceWithTemplateType(RESOURCE_CUSTOM_NN), + }, +}; + +export const resourceTemplateTypeOptions = [ + { + value: ResourceTemplateType.HIGH, + label: i18n.t('model_center.label_radio_high'), + }, + { + value: ResourceTemplateType.MEDIUM, + label: i18n.t('model_center.label_radio_medium'), + }, + { + value: ResourceTemplateType.LOW, + label: i18n.t('model_center.label_radio_low'), + }, + { + value: ResourceTemplateType.CUSTOM, + label: i18n.t('model_center.label_radio_custom'), + }, +]; + +type TResourceType = 'cpu' | 'mem' | 'replicas' | undefined; +const getAlgorithmColumns = ( + roleType: TResourceFieldType, + columnTypes: [TResourceType, TResourceType, TResourceType], + disabled = false, + localDisabledList: string[] = [], +): TColumn[] => { + const type = 'INPUT_NUMBER'; + const columnsMap: Record<string, TColumn> = { + cpu: { + type, + dataIndex: `${roleType}_cpu`, + title: i18n.t('cpu'), + unitLabel: 'Core', + placeholder: i18n.t('placeholder_cpu'), + rules: [{ required: true, message: i18n.t('model_center.msg_required') }], + span: 0, + min: 0.1, + tooltip: i18n.t('tip_please_input_positive_number'), + precision: 1, + disabled: disabled || !!localDisabledList.find((item: string) => item === `${roleType}.cpu`), + }, + replicas: { + type, + dataIndex: `${roleType}_replicas`, + title: i18n.t('replicas'), + placeholder: i18n.t('placeholder_input'), + min: 1, + max: 100, + precision: 0, + rules: [ + { required: true, message: i18n.t('model_center.msg_required') }, + { min: 1, type: 'number' }, + { max: 100, type: 'number' }, + ], + span: 0, + tooltip: i18n.t('tip_replicas_range'), + mode: 'button', + disabled: + disabled || !!localDisabledList.find((item: string) => item === `${roleType}.replicas`), + }, + mem: { + type, + dataIndex: `${roleType}_mem`, + title: i18n.t('mem'), + unitLabel: 'Gi', + placeholder: i18n.t('placeholder_mem'), + rules: [{ required: true, message: i18n.t('model_center.msg_required') }], + span: 0, + min: 1, + tooltip: i18n.t('tip_please_input_positive_integer'), + disabled: disabled || !!localDisabledList.find((item: string) => item === `${roleType}.mem`), + }, + }; + + const ret: TColumn[] = [ + { + type: 'TEXT', + dataIndex: `${roleType}_roles`, + title: 'roles', + span: 0, + disabled: true, + }, + ]; + for (const type of columnTypes) { + if (type) { + ret.push(columnsMap[type]); + } + } + + return ret.map((col) => ({ + ...col, + span: Math.floor(24 / ret.length), + })); +}; + +export type MixedAlgorithmType = + | EnumAlgorithmProjectType.TREE_VERTICAL + | EnumAlgorithmProjectType.TREE_HORIZONTAL + | EnumAlgorithmProjectType.NN_VERTICAL + | EnumAlgorithmProjectType.NN_HORIZONTAL + | AlgorithmType.TREE + | AlgorithmType.NN; + +export type Value = { + resource_type: ResourceTemplateType | `${ResourceTemplateType}`; + + master_cpu?: string; + master_mem?: string; + master_replicas?: string; + + ps_cpu?: string; + ps_mem?: string; + ps_replicas?: string; + + worker_cpu?: string; + worker_mem?: string; + worker_replicas?: string; +}; + +export type Props = { + value?: Value; + onChange?: (value: Value) => void; + disabled?: boolean; + localDisabledList?: string[]; + defaultResourceType?: ResourceTemplateType | `${ResourceTemplateType}`; + algorithmType?: MixedAlgorithmType; + collapsedTitle?: string; + isIgnoreFirstRender?: boolean; + isTrustedCenter?: boolean; + collapsedOpen?: boolean; +}; + +const formLayout = { + labelCol: { + span: 0, + }, + wrapperCol: { + span: 24, + }, +}; + +export const ResourceConfig: FC<Props> = ({ + disabled: disabledFromProps = false, + localDisabledList = [], + defaultResourceType = ResourceTemplateType.LOW, + algorithmType = EnumAlgorithmProjectType.TREE_VERTICAL, + collapsedTitle = i18n.t('model_center.title_resource_config_detail'), + isIgnoreFirstRender = true, + isTrustedCenter = false, + collapsedOpen = true, + value, + onChange, +}) => { + const isControlled = typeof value === 'object' && value !== null; + const [form] = Form.useForm(); + + const [collapseActiveKey, setCollapseActiveKey] = useState<string[]>(collapsedOpen ? ['1'] : []); // default open status controlled by 'collapsedOpen' + const [resourceType, setResourceType] = useState(() => { + if (isControlled) { + return value?.resource_type; + } + + return defaultResourceType || ResourceTemplateType.LOW; + }); + const isAlreadyClickResource = useRef(false); + + useEffect(() => { + if (isControlled) { + form.setFieldsValue(wrapResource({ ...value })); + } + }, [form, isControlled, value]); + + useEffect(() => { + if (resourceType && algorithmType) { + const innerValue = { + ...(resourceTemplateParamsMap[resourceType] as any)[algorithmType], + }; + const finaleValue = { + resource_type: resourceType, + ...unwrapResourceFromTemplateType(innerValue), + }; + if (!isControlled) { + form.setFieldsValue(innerValue); + onChange?.(finaleValue as any); + } + if (isControlled) { + // Ignore first render + if (isIgnoreFirstRender && isAlreadyClickResource.current) { + onChange?.(finaleValue as any); + } + if (!isIgnoreFirstRender) { + onChange?.(finaleValue as any); + } + } + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [form, resourceType, algorithmType, isControlled, isIgnoreFirstRender]); + + const disabled = resourceType !== ResourceTemplateType.CUSTOM || disabledFromProps; + + return ( + <Form + form={form} + {...formLayout} + initialValues={{ + resource_type: resourceType, + }} + onChange={(_, values) => { + const finaleValue = unwrapResourceFromTemplateType(values); + onChange?.(finaleValue as any); + }} + > + <Form.Item field="resource_type"> + <BlockRadio + isCenter={true} + options={resourceTemplateTypeOptions} + disabled={disabledFromProps} + onChange={(value: ResourceTemplateType) => { + isAlreadyClickResource.current = true; + setResourceType(value); + if (value === ResourceTemplateType.CUSTOM) { + setCollapseActiveKey(['1']); + } + }} + /> + </Form.Item> + + <StyledCollapse + activeKey={collapseActiveKey} + expandIconPosition="left" + onChange={onCollapseChange} + lazyload={false} + bordered={false} + > + <Collapse.Item header={collapsedTitle} name="1"> + {algorithmType === EnumAlgorithmProjectType.NN_VERTICAL ? ( + <> + <Form.Item field={TResourceFieldType.WORKER}> + <InputGroup + columns={getAlgorithmColumns( + TResourceFieldType.WORKER, + ['replicas', 'cpu', 'mem'], + disabled, + localDisabledList, + )} + disableAddAndDelete={true} + /> + </Form.Item> + <Form.Item field={TResourceFieldType.MASTER}> + <InputGroup + columns={getAlgorithmColumns( + TResourceFieldType.MASTER, + ['replicas', 'cpu', 'mem'], + disabled, + localDisabledList, + )} + disableAddAndDelete={true} + /> + </Form.Item> + <Form.Item field={TResourceFieldType.PS}> + <InputGroup + columns={getAlgorithmColumns( + TResourceFieldType.PS, + ['replicas', 'cpu', 'mem'], + disabled, + localDisabledList, + )} + disableAddAndDelete={true} + /> + </Form.Item> + </> + ) : ( + <Form.Item field={TResourceFieldType.WORKER}> + <InputGroup + columns={getAlgorithmColumns( + TResourceFieldType.WORKER, + isTrustedCenter ? ['replicas', 'cpu', 'mem'] : [undefined, 'cpu', 'mem'], + disabled, + localDisabledList, + )} + disableAddAndDelete={true} + /> + </Form.Item> + )} + </Collapse.Item> + </StyledCollapse> + </Form> + ); + + function onCollapseChange(key: string, keys: string[]) { + setCollapseActiveKey(keys); + } +}; + +export default ResourceConfig; diff --git a/web_console_v2/client/src/components/ScheduledTaskSetter/index.tsx b/web_console_v2/client/src/components/ScheduledTaskSetter/index.tsx new file mode 100644 index 000000000..634f3427e --- /dev/null +++ b/web_console_v2/client/src/components/ScheduledTaskSetter/index.tsx @@ -0,0 +1,60 @@ +import React, { FC, useEffect } from 'react'; +import styled from 'styled-components'; +import { useToggle } from 'react-use'; +import CronTimePicker, { parseCron, PickerValue, toCron } from 'components/CronTimePicker'; +import { Switch } from '@arco-design/web-react'; +import i18n from 'i18n'; + +const SwitchContainer = styled.div` + margin-bottom: 16px; +`; + +type Props = { + value?: string; + onChange?: (value: string) => void; +}; + +const ScheduleTaskSetter: FC<Props> = (prop: Props) => { + const { value, onChange } = prop; + const isEnabled = !!value; + const [inputVisible, toggleVisible] = useToggle(isEnabled); + + useEffect(() => { + toggleVisible(isEnabled); + }, [isEnabled, toggleVisible]); + + const onSwitchChange = (checked: boolean) => { + toggleVisible(checked); + onValueChange(checked ? 'null' : ''); + }; + const onValueChange = (val: string) => { + onChange && onChange(val); + }; + + return ( + <> + <SwitchContainer> + <Switch checked={inputVisible} onChange={onSwitchChange} /> + </SwitchContainer> + + {inputVisible && ( + <CronTimePicker + value={parseCron(value || '')} + onChange={(value: PickerValue) => { + onValueChange(toCron(value)); + }} + /> + )} + </> + ); +}; +export function scheduleTaskValidator(val: any, cb: (error?: string) => void) { + // !val means switch be set to close status --> validate to pass + // val !== 'null' means has chosen correct time and validate to pass + if (!val || val !== 'null') { + return cb(); + } + return cb(i18n.t('model_center.msg_time_required')); +} + +export default ScheduleTaskSetter; diff --git a/web_console_v2/client/src/components/SharedPageLayout/index.tsx b/web_console_v2/client/src/components/SharedPageLayout/index.tsx index fa99cd188..7006b1346 100644 --- a/web_console_v2/client/src/components/SharedPageLayout/index.tsx +++ b/web_console_v2/client/src/components/SharedPageLayout/index.tsx @@ -1,13 +1,30 @@ -import React, { FC } from 'react'; +/* istanbul ignore file */ + +import React, { FC, useEffect, useState } from 'react'; import styled from 'styled-components'; -import { Tooltip, Card } from 'antd'; +import { useRecoilState, useRecoilValue } from 'recoil'; +import i18n from 'i18n'; + +import { appFlag, appState } from 'stores/app'; + +import { Tooltip, Card, Button, Message, Space } from '@arco-design/web-react'; import GridRow from 'components/_base/GridRow'; import { QuestionCircle } from 'components/IconPark'; -import defaultTheme from 'styles/_theme'; +import TitleWithIcon from 'components/TitleWithIcon'; +import { FlagKey } from 'typings/flag'; + +export const PAGE_SECTION_PADDING = 20; +export const PAGE_FIXED_BOTTOM_LAYOUT_HEIGHT = 64; +export const PAGE_HEADER_Z_INDEX = 6; -const PAGE_SECTION_PADDING = 20; +const Container = styled.div<{ isHideHeader?: boolean }>` + ${(props) => + props.isHideHeader && + ` + --pageHeaderHeight:0px; + padding-top: var(--contentOuterPadding) !important; + `}; -const Container = styled.div` position: relative; display: flex; flex-direction: column; @@ -15,15 +32,16 @@ const Container = styled.div` padding: 0 var(--contentOuterPadding); `; const PageHeader = styled.header` + --horizontalPadding: 20px; position: sticky; top: 0; - z-index: 2; + z-index: ${PAGE_HEADER_Z_INDEX}; display: flex; align-items: center; - padding: 0 20px; + padding: 0 var(--horizontalPadding); min-height: var(--pageHeaderHeight); max-height: var(--pageHeaderHeight); - margin: 0 -${defaultTheme.contentOuterPadding} var(--contentOuterPadding); + margin: 0 calc(0px - var(--contentOuterPadding)) var(--contentOuterPadding); /* For showing sidebar's border */ transform: translateX(1px); background-color: white; @@ -34,31 +52,72 @@ const PageTitle = styled.h2` font-size: 14px; line-height: 22px; `; +const PageCenterTitle = styled.span` + position: absolute; + left: 50%; + top: 50%; + transform: translate(-50%, -50%); + display: inline-block; + color: var(--textColorStrong); + font-size: 16px; + font-weight: 500; +`; +const PageRightTitle = styled.div` + position: absolute; + top: 50%; + right: var(--horizontalPadding); + transform: translateY(-50%); +`; const Tip = styled(Tooltip)` line-height: 0; `; -const PageSectionCard = styled(Card)` +export const PageSectionCard = styled(Card)<{ + $cardPadding?: number; + $isNestSpinFlexContainer?: boolean; +}>` position: relative; display: grid; grid-auto-flow: row; margin-bottom: 10px; - - > .ant-card-body { + font-size: 12px; + > .arco-card-body { display: flex; flex-direction: column; + /* --SidebarWidth comes from AppLayout element in views/index.tsx */ + width: calc(100vw - var(--contentOuterPadding) * 2 - var(--SidebarWidth)); + max-width: calc(100vw - var(--contentOuterPadding) * 2 - var(--SidebarWidth)); + min-width: calc(100vw - var(--contentOuterPadding) * 2 - var(--SidebarWidth)); min-height: calc( 100vh - var(--pageHeaderHeight) - var(--headerHeight) - var(--contentOuterPadding) * 2 ); background-color: inherit; - padding: ${PAGE_SECTION_PADDING}px; + padding: ${(props) => props.$cardPadding}px; + overflow-x: hidden; > *:not(:last-child) { margin-bottom: 18px; } + > [data-auto-fill] { + flex: 1; + } + &::before { content: none; } + + ${(props) => + props.$isNestSpinFlexContainer && + ` + > .arco-spin { + height: 100%; + .arco-spin-children { + display: flex; + flex-direction: column; + height: 100%; + } + } + `}; } .title-question { @@ -79,30 +138,200 @@ export const FormHeader = styled.h3` margin: -${PAGE_SECTION_PADDING}px -${PAGE_SECTION_PADDING}px 0; `; +const FixedBottomContainer = styled.div` + position: sticky; + bottom: 0; + display: flex; + align-items: center; + min-height: ${PAGE_FIXED_BOTTOM_LAYOUT_HEIGHT}px; + max-height: ${PAGE_FIXED_BOTTOM_LAYOUT_HEIGHT}px; + margin: var(--contentOuterPadding) calc(0px - var(--contentOuterPadding)) 0; + background-color: #fff; + padding: 0 var(--contentOuterPadding); + box-shadow: 0px -2px 4px rgba(26, 34, 51, 0.08); + border-radius: 2px; + + button { + margin-right: 12px; + } +`; + interface Props { - title: React.ReactNode; + /** + * Title on left + */ + title?: React.ReactNode; + /** + * Title on center + */ + centerTitle?: React.ReactNode; + /** + * Title on right + */ + rightTitle?: React.ReactNode; + /** + * Tooptip display text with <QuestionCircle/> icon + */ tip?: string; - children?: React.ReactNode; contentWrapByCard?: boolean; + cardPadding?: number; + /** + * Enable <Spin/> height:100% style + */ + isNestSpinFlexContainer?: boolean; + /** + * @deprecated is Hide siderbar + * + */ + removeSidebar?: boolean; + isHideHeader?: boolean; + /** Is show fixed bottom layout */ + isShowFixedBottomLayout?: boolean; + isNeedHelp?: boolean; + /** Render custom fixed bottom layout */ + renderFixedBottomLayout?: () => React.ReactNode; + /** Fixed bottom layout ok text */ + bottomOkText?: string; + /** Fixed bottom layout cancel text */ + bottomCancelText?: string; + /** Fixed bottom layout tip */ + bottomTip?: string; + /** Fixed bottom layout ok button loading */ + bottomOkButtonIsLoading?: boolean; + /** Fixed bottom layout ok button loading */ + bottomCancelButtonIsLoading?: boolean; + onBottomOkClick?: () => void | Promise<void>; + onBottomCancelClick?: () => void | Promise<void>; } -const SharedPageLayout: FC<Props> = ({ title, children, tip, contentWrapByCard = true }) => { +const SharedPageLayout: FC<Props> = ({ + title = '', + cardPadding = PAGE_SECTION_PADDING, + isNestSpinFlexContainer = false, + centerTitle, + removeSidebar = false, + children, + tip, + rightTitle, + contentWrapByCard = true, + isHideHeader = false, + isShowFixedBottomLayout = false, + isNeedHelp = true, + renderFixedBottomLayout, + bottomOkText = i18n.t('confirm'), + bottomCancelText = i18n.t('cancel'), + bottomTip, + bottomOkButtonIsLoading, + bottomCancelButtonIsLoading, + onBottomOkClick, + onBottomCancelClick, +}) => { + const appFlagValue = useRecoilValue(appFlag); + const [appStateValue, setAppState] = useRecoilState(appState); + const [okButtonIsLoading, setOkButtonIsLoading] = useState(false); + const [cancelButtonIsLoading, setCancelButtonIsLoading] = useState(false); + + useEffect(() => { + setAppState({ + ...appStateValue, + hideSidebar: Boolean(removeSidebar), + }); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [removeSidebar, setAppState]); + return ( - <Container> - <PageHeader> - <GridRow gap="10"> - <PageTitle className="title">{title}</PageTitle> - {tip && ( - <Tip title={tip} placement="rightBottom"> - <QuestionCircle /> - </Tip> + <Container id="share-page-layout" isHideHeader={isHideHeader}> + {!isHideHeader && ( + <PageHeader> + <GridRow gap="10"> + <PageTitle className="title">{title}</PageTitle> + {tip && ( + <Tip content={tip} position="rt"> + <QuestionCircle /> + </Tip> + )} + {centerTitle && <PageCenterTitle>{centerTitle}</PageCenterTitle>} + <PageRightTitle> + <Space size={12}> + {isNeedHelp && ( + <button + className="custom-text-button" + onClick={() => { + window.open(`${appFlagValue[FlagKey.HELP_DOC_URL]}`, '_blank'); + }} + > + 帮助文档 + </button> + )} + {rightTitle} + </Space> + </PageRightTitle> + </GridRow> + </PageHeader> + )} + {contentWrapByCard ? ( + <PageSectionCard + $cardPadding={cardPadding} + $isNestSpinFlexContainer={isNestSpinFlexContainer} + bordered={false} + > + {children} + </PageSectionCard> + ) : ( + children + )} + {isShowFixedBottomLayout && ( + <FixedBottomContainer> + {renderFixedBottomLayout ? ( + renderFixedBottomLayout() + ) : ( + <> + <Button + type="primary" + onClick={onOkButtonClick} + loading={bottomOkButtonIsLoading || okButtonIsLoading} + > + {bottomOkText} + </Button> + <Button + onClick={onCancelButtonClick} + loading={bottomCancelButtonIsLoading || cancelButtonIsLoading} + > + {bottomCancelText} + </Button> + {bottomTip && <TitleWithIcon isShowIcon={true} isLeftIcon={true} title={bottomTip} />} + </> )} - </GridRow> - </PageHeader> - - {contentWrapByCard ? <PageSectionCard>{children}</PageSectionCard> : children} + </FixedBottomContainer> + )} </Container> ); + + async function onOkButtonClick() { + if (!onBottomOkClick) return; + + try { + setOkButtonIsLoading(true); + await onBottomOkClick(); + setOkButtonIsLoading(false); + } catch (e) { + setOkButtonIsLoading(false); + Message.error(e.message); + } + } + + async function onCancelButtonClick() { + if (!onBottomCancelClick) return; + + try { + setCancelButtonIsLoading(true); + await onBottomCancelClick(); + setCancelButtonIsLoading(false); + } catch (e) { + setCancelButtonIsLoading(false); + Message.error(e.message); + } + } }; export default SharedPageLayout; diff --git a/web_console_v2/client/src/components/Sidebar/index.less b/web_console_v2/client/src/components/Sidebar/index.less new file mode 100644 index 000000000..a686961ab --- /dev/null +++ b/web_console_v2/client/src/components/Sidebar/index.less @@ -0,0 +1,46 @@ +.side-bar-container{ + display: flex; + flex-direction: column; + align-items: flex-end; + width: 200px; + padding: 16px 8px 8px; + background-color: white; + box-shadow: 1px 0px 0px var(--lineColor); + overflow-y: auto; + &.isFolded { + width: 48px; + padding: 16px 4px 8px; + align-items: center; + .arco-menu-item { + padding-left: 0 !important; + padding-right: 0; + text-align: center; + .anticon { + margin-right: 0; + } + } + } + &.isHidden { + display: none; + } +} + +.side-bar-menu{ + flex: 1; + user-select: none; +} + +.side-bar-fold-button{ + width: 24px; + height: 24px; + justify-content: center; + align-items: center; + display: inline-flex; + background-color: rgb(var(--gray-1)); + color: rgb(var(--gray-6)); + border-radius: 2px; + cursor: pointer; + &:hover { + background-color: rgb(var(--gray-2)); + } +} diff --git a/web_console_v2/client/src/components/Sidebar/index.tsx b/web_console_v2/client/src/components/Sidebar/index.tsx index b8b58bf7b..4163f1902 100644 --- a/web_console_v2/client/src/components/Sidebar/index.tsx +++ b/web_console_v2/client/src/components/Sidebar/index.tsx @@ -1,139 +1,331 @@ -import React, { useEffect, useState } from 'react'; -import styled from 'styled-components'; -import { Link, useLocation } from 'react-router-dom'; -import { Dropdown, Menu } from 'antd'; -import { Tooltip } from 'antd'; +/* istanbul ignore file */ +import React, { useMemo } from 'react'; +import { useHistory, useLocation } from 'react-router-dom'; +import { Dropdown, Menu, Tooltip } from '@arco-design/web-react'; import { useTranslation } from 'react-i18next'; -import { useToggle } from 'react-use'; -import { MixinFlexAlignCenter, MixinSquare } from 'styles/mixins'; import classNames from 'classnames'; import { StyledComponetProps } from 'typings/component'; -import store from 'store2'; -import LOCAL_STORAGE_KEYS from 'shared/localStorageKeys'; +import { useRecoilState, useRecoilValue } from 'recoil'; +import { appFlag, appPreference, appState } from 'stores/app'; import { Apps, + Audit, + Common, DataServer, - Workbench, + File, MenuFold, MenuUnfold, + ModelCenter, Settings, - Interaction, UserGroup, + Workbench, + Safe, + TeamOutlined, } from 'components/IconPark'; import { useRecoilQuery } from 'hooks/recoil'; import { userInfoQuery } from 'stores/user'; import { FedRoles, FedUserInfo } from 'typings/auth'; - -const Container = styled.aside` - display: flex; - flex-direction: column; - align-items: flex-end; - width: 200px; - padding: 16px 8px 8px; - background-color: white; - box-shadow: 1px 0px 0px var(--lineColor); - - &.isFolded { - width: 48px; - padding: 16px 4px 8px; - align-items: center; - - .ant-menu-item { - padding-left: 0 !important; - padding-right: 0; - text-align: center; - - .anticon { - margin-right: 0; - } - } - } -`; -const StyledMenu = styled(Menu)` - flex: 1; -`; -const DropdownLink = styled(Link)` - &[data-is-active='true'] { - color: var(--primaryColor); - } -`; - -const FoldButton = styled.div` - ${MixinSquare(24)} - ${MixinFlexAlignCenter()} - - display: inline-flex; - background-color: var(--gray1); - color: var(--gray6); - border-radius: 2px; - cursor: pointer; - - &:hover { - background-color: var(--gray2); - } -`; +import { FlagKey } from 'typings/flag'; +import { fetchDashboardList } from '../../services/operation'; +import { useQuery } from 'react-query'; +import './index.less'; +import { ProjectBaseAbilitiesType, ProjectTaskType } from 'typings/project'; +import { useGetCurrentProjectAbilityConfig } from 'hooks'; +import { ABILITIES_SIDEBAR_MENU_MAPPER } from 'shared/router'; type MenuRoute = { to: string; + key?: string; label: string; icon: any; only?: FedRoles[]; subRoutes?: Omit<MenuRoute, 'icon'>[]; + flagKeys?: string[]; + disabled?: boolean; + abilitiesSupport?: (ProjectBaseAbilitiesType | ProjectTaskType)[]; }; -const SIDEBAR_MENU_ROUTES: MenuRoute[] = [ +// the roles with dashboard +const AUTH_DASHBOARD: FedRoles[] = [FedRoles.Admin]; +// the path show dashboard +const SHOW_DASHBOARD_PATH = ['operation', 'dashboard', 'data_fix', 'composer', 'cleanup']; + +const NORMAL_SIDEBAR_MENU_ROUTES: MenuRoute[] = [ { to: '/projects', label: 'menu.label_project', icon: Apps, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.projects, + }, + { + to: '/datasets', + label: 'menu.label_datasets', + icon: DataServer, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.datasets, + subRoutes: [ + { + to: '/datasets/data_source', + label: 'menu.label_datasets_data_source', + }, + { + to: '/datasets/raw/my', + label: 'menu.label_datasets_raw', + }, + { + to: '/datasets/processed/my', + label: 'menu.label_datasets_processed', + }, + { + to: '/datasets/task_list', + label: 'menu.label_datasets_task_list', + }, + ], }, { - to: '/workflow-templates', - label: 'menu.label_workflow_tpl', - icon: Interaction, + to: '/model-center', + label: 'menu.label_model_center', + icon: ModelCenter, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.modelCenter, + subRoutes: [ + { + to: '/model-center/model-train/list', + label: 'menu.label_model_center_model_training', + }, + { + to: '/model-center/model-warehouse', + label: 'menu.label_model_center_model_warehouse', + }, + { + to: '/model-center/model-evaluation/list', + label: 'menu.label_model_center_model_evaluation', + }, + { + to: '/model-center/offline-prediction/list', + label: 'menu.label_model_center_offline_prediction', + }, + ], }, { - to: '/workflows', - label: 'menu.label_workflow', + to: '/workflow-center', + label: 'menu.label_workflow_center', icon: Workbench, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.workflowCenter, + subRoutes: [ + { + to: '/workflow-center/workflows', + label: 'menu.label_workflow', + }, + { + to: '/workflow-center/workflow-templates', + label: 'menu.label_workflow_tpl', + }, + ], }, { - to: '/datasets', - label: 'menu.label_datasets', - icon: DataServer, + to: '/trusted-center', + label: 'menu.label_trusted_center', + icon: Safe, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.trustedCenter, + flagKeys: [FlagKey.TRUSTED_COMPUTING_ENABLED], + }, + { + to: '/algorithm-management', + label: 'menu.label_algorithm_repository', + icon: File, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.algorithmManagement, }, + { + to: '/model-serving', + label: 'menu.label_model_serving', + icon: ModelCenter, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.modelServing, + subRoutes: [ + { + to: '/model-serving', + label: 'menu.label_model_serving_service', + }, + ], + }, +].filter(Boolean); +const USER_SIDEBAR_MENU_ROUTES: MenuRoute[] = [ { to: '/users', label: 'menu.label_users', icon: UserGroup, only: [FedRoles.Admin], + flagKeys: [FlagKey.USER_MANAGEMENT_ENABLED], + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.projects, }, +]; +const SETTING_SIDEBAR_MENU_ROUTES: MenuRoute[] = [ { to: '/settings', label: 'menu.label_settings', icon: Settings, only: [FedRoles.Admin], + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.projects, + subRoutes: ([ + process.env.REACT_APP_ENABLE_IMAGE_VERSION_PAGE !== 'false' && { + to: '/settings/image', + label: 'menu.label_settings_image', + only: [FedRoles.Admin], + }, + { + to: '/settings/variables', + label: 'menu.label_settings_variables', + only: [FedRoles.Admin], + }, + ] as Omit<MenuRoute, 'icon'>[]).filter(Boolean), + }, +]; +const AUDIT_SIDEBAR_MENU_ROUTES: MenuRoute[] = [ + { + to: '/audit', + label: 'menu.label_audit_log', + icon: Audit, + only: [FedRoles.Admin], + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.projects, + subRoutes: [ + { + to: '/audit/event', + label: 'menu.label_event_record', + only: [FedRoles.Admin], + }, + ], + }, +]; +const OP_SIDEBAR_MENU_ROUTES: MenuRoute[] = [ + { + to: '/operation', + label: 'menu.label_operation_maintenance', + only: [FedRoles.Admin], + icon: Common, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.projects, + }, + { + to: '/data_fix', + label: 'menu.label_dataset_fix', + only: [FedRoles.Admin], + flagKeys: [FlagKey.DATASET_STATE_FIX_ENABLED], + icon: Settings, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.projects, + }, + { + to: '/composer', + label: 'Composer', + only: [FedRoles.Admin], + icon: Common, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.projects, + subRoutes: [ + { + to: '/composer/scheduler-item/list', + label: '调度程序项', + only: [FedRoles.Admin], + }, + { + to: '/composer/scheduler-runner/list', + label: '调度程序运行器', + only: [FedRoles.Admin], + }, + ], + }, + { + to: '/cleanup', + label: 'Cleanup', + only: [FedRoles.Admin], + icon: Common, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.projects, }, ]; +const PARTNER_SIDEBAR_MENU_ROUTES: MenuRoute[] = [ + { + to: '/partners', + label: 'menu.label_partners', + icon: TeamOutlined, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.projects, + }, +]; + +/** Not in workspace */ +const pathnameToMenuRouteMap: { + [key: string]: MenuRoute[]; +} = { + users: USER_SIDEBAR_MENU_ROUTES, + settings: SETTING_SIDEBAR_MENU_ROUTES, + audit: AUDIT_SIDEBAR_MENU_ROUTES, + partners: PARTNER_SIDEBAR_MENU_ROUTES, + operation: OP_SIDEBAR_MENU_ROUTES, + composer: OP_SIDEBAR_MENU_ROUTES, + cleanup: OP_SIDEBAR_MENU_ROUTES, + dashboard: OP_SIDEBAR_MENU_ROUTES, + data_fix: OP_SIDEBAR_MENU_ROUTES, +}; + +export function isInWorkspace(pathname: string) { + const list = pathname.split('/'); + + if (list?.[1] && pathnameToMenuRouteMap[list[1]]) { + return false; + } + return true; +} function Sidebar({ className }: StyledComponetProps) { const { t } = useTranslation(); - const [isFolded, toggleFold] = useToggle(store.get(LOCAL_STORAGE_KEYS.sidebar_folded)); + const history = useHistory(); + const [preference, setPreference] = useRecoilState(appPreference); + const appStateValue = useRecoilValue(appState); + const appFlagValue = useRecoilValue(appFlag); const location = useLocation(); const userQuery = useRecoilQuery<FedUserInfo>(userInfoQuery); - const [sidebarMenuItems, setSidebarItems] = useState(SIDEBAR_MENU_ROUTES); + const { abilities } = useGetCurrentProjectAbilityConfig(); - const activeKeys = _calcActiveKeys(sidebarMenuItems, (location as unknown) as Location); + const currentPathName = useMemo(() => { + const { pathname } = location; + const list = pathname.split('/'); + return list?.[1] || ''; + }, [location]); + + const dashboardQuery = useQuery('fetchDashboardList', () => fetchDashboardList(), { + enabled: Boolean( + userQuery.data?.role && + AUTH_DASHBOARD.includes(userQuery.data.role) && + SHOW_DASHBOARD_PATH.includes(currentPathName), + ), + }); - useEffect(() => { - const nextItems = getMenuItemsForThisUser(); - setSidebarItems(nextItems); + const dashboards = useMemo(() => { + if (!dashboardQuery.data) { + return []; + } + return dashboardQuery.data?.data || []; + }, [dashboardQuery.data]); + + const sidebarMenuItems = useMemo(() => { + const { pathname } = location; + return getMenuItemsByAbilities( + getMenuItemsByFlagKey(getMenuItemsForThisUser(getMenuRoute(pathname))), + abilities, + ); // eslint-disable-next-line react-hooks/exhaustive-deps - }, [userQuery.data]); + }, [userQuery.data, appFlagValue, dashboards, abilities]); + + const activeKeys = _calcActiveKeys(sidebarMenuItems, (location as unknown) as Location); return ( - <Container className={classNames(className, { isFolded })}> - <StyledMenu mode="inline" selectedKeys={activeKeys} defaultOpenKeys={activeKeys}> + <aside + className={`side-bar-container ${classNames(className, { + isFolded: preference.sidebarFolded, + isHidden: appStateValue.hideSidebar, + })}`} + > + <Menu + className="side-bar-menu" + mode="vertical" + selectedKeys={activeKeys} + defaultOpenKeys={activeKeys} + levelIndent={28} + > {sidebarMenuItems.map((menu) => { /** Has subroutes */ if (menu.subRoutes) { @@ -142,39 +334,83 @@ function Sidebar({ className }: StyledComponetProps) { /** Doesn't have subroutes */ return renderPlainRoute(menu); })} - </StyledMenu> - <FoldButton onClick={onFoldClick}>{isFolded ? <MenuUnfold /> : <MenuFold />}</FoldButton> - </Container> + </Menu> + <div className="side-bar-fold-button" onClick={onFoldClick}> + {preference.sidebarFolded ? <MenuUnfold /> : <MenuFold />} + </div> + </aside> ); function onFoldClick() { - toggleFold(); - store.set(LOCAL_STORAGE_KEYS.sidebar_folded, !isFolded); + setPreference({ + ...preference, + sidebarFolded: !preference.sidebarFolded, + }); } - function getMenuItemsForThisUser() { - return SIDEBAR_MENU_ROUTES.filter((item) => { + function getMenuRoute(pathname: string) { + const list = pathname.split('/'); + + if (list?.[1] && pathnameToMenuRouteMap[list[1]]) { + // no workspace routes + let currentMenu = pathnameToMenuRouteMap[list[1]]; + if ( + ['operation', 'dashboard', 'data_fix', 'composer', 'cleanup'].includes(list[1]) && + Array.isArray(dashboards) + ) { + currentMenu = currentMenu.concat( + appFlagValue[FlagKey.DASHBOARD_ENABLED] + ? dashboards.map((item) => ({ + to: `/dashboard/${item.uuid}`, + label: item.name, + only: [FedRoles.Admin], + flagKeys: [FlagKey.DASHBOARD_ENABLED], + icon: Common, + })) + : [ + { + to: `/dashboard`, + label: '仪表盘', + icon: Common, + }, + ], + ); + } + return currentMenu; + } + + // workspace routes + return NORMAL_SIDEBAR_MENU_ROUTES; + } + + function getMenuItemsForThisUser(menuRoutes: MenuRoute[]) { + return menuRoutes.filter((item) => { if (!item.only) return true; if (!userQuery.data) return false; + // Role return item.only.includes(userQuery.data.role); }); } function renderPlainRoute(menu: MenuRoute) { return ( - <Menu.Item key={menu.to}> - {isFolded ? ( - <Tooltip title={t(menu.label)} placement="right"> - <Link to={menu.to}> - <menu.icon /> - </Link> + <Menu.Item + disabled={menu.disabled} + key={menu.key || menu.to} + onClick={() => handleMenuChange(menu.to)} + > + {preference.sidebarFolded ? ( + <Tooltip content={t(menu.label)} position="right"> + <menu.icon /> </Tooltip> ) : ( <> - <menu.icon /> - <Link to={menu.to}>{t(menu.label)}</Link> + <span style={{ display: 'inline-block', marginRight: 14 }}> + <menu.icon /> + </span> + {t(menu.label)} </> )} </Menu.Item> @@ -182,21 +418,21 @@ function Sidebar({ className }: StyledComponetProps) { } function renderWithSubRoutes(menu: MenuRoute) { - if (isFolded) { + if (preference.sidebarFolded) { return ( - <Menu.Item key={menu.to}> + <Menu.Item disabled={menu.disabled} key={menu.key || menu.to}> <Dropdown - placement="bottomRight" - overlay={ + position="br" + trigger={['click']} + droplist={ <Menu> {menu.subRoutes?.map((subRoute) => ( - <Menu.Item key={subRoute.to}> - <DropdownLink - to={subRoute.to} - data-is-active={activeKeys.includes(subRoute.to)} - > - {t(subRoute.label)} - </DropdownLink> + <Menu.Item + disabled={subRoute.disabled} + key={subRoute.key || subRoute.to} + onClick={() => handleMenuChange(subRoute.to)} + > + {t(subRoute.label)} </Menu.Item> ))} </Menu> @@ -209,15 +445,61 @@ function Sidebar({ className }: StyledComponetProps) { } return ( - <Menu.SubMenu key={menu.to} icon={<menu.icon />} title={t(menu.label)}> + <Menu.SubMenu + key={menu.key || menu.to} + title={ + <> + <span style={{ display: 'inline-block', marginRight: 14 }}> + <menu.icon /> + </span> + {t(menu.label)} + </> + } + > {menu.subRoutes?.map((subRoute) => ( - <Menu.Item key={subRoute.to}> - <Link to={subRoute.to}>{t(subRoute.label)}</Link> + <Menu.Item + disabled={subRoute.disabled} + key={subRoute.key || subRoute.to} + onClick={() => handleMenuChange(subRoute.to)} + > + {t(subRoute.label)} </Menu.Item> ))} </Menu.SubMenu> ); } + + function getMenuItemsByFlagKey(menuRoutes: MenuRoute[]) { + return menuRoutes.filter((item) => { + if (item.subRoutes) { + item.subRoutes = getMenuItemsByFlagKey((item.subRoutes as MenuRoute[]) || []); + } + + if (!item.flagKeys) return true; + if (item.flagKeys.length === 0) return false; + + // Flag key + // If appFlagValue has one flag(existed in item.flagKeys array), and it's value is false or not exist in appFlagValue + // so don't show this menu + return !item.flagKeys.some((flag) => !appFlagValue[flag]); + }); + } + + function handleMenuChange(target: string) { + const { pathname } = location; + if (pathname === target) return null; + history.push(target); + } +} + +function getMenuItemsByAbilities( + menuRoutes: MenuRoute[], + abilities: (ProjectBaseAbilitiesType | ProjectTaskType)[] | undefined, +) { + if (!abilities?.length) { + return menuRoutes; + } + return menuRoutes.filter((item) => item.abilitiesSupport?.includes(abilities?.[0])); } function _calcActiveKeys(menuItems: MenuRoute[], location: Location) { diff --git a/web_console_v2/client/src/components/StateIndicator/index.tsx b/web_console_v2/client/src/components/StateIndicator/index.tsx index 35dbc2b8f..0ee385c79 100644 --- a/web_console_v2/client/src/components/StateIndicator/index.tsx +++ b/web_console_v2/client/src/components/StateIndicator/index.tsx @@ -1,19 +1,29 @@ -import React, { FC } from 'react'; +/* istanbul ignore file */ + +import React, { FC, useMemo } from 'react'; import styled from 'styled-components'; -import { Tooltip, Tag } from 'antd'; -import { QuestionCircleOutlined } from '@ant-design/icons'; +import i18n from 'i18n'; + +import { Tooltip, Tag, Button } from '@arco-design/web-react'; +import { IconQuestionCircle } from '@arco-design/web-react/icon'; +import { InfoCircle } from 'components/IconPark'; +import MoreActions from 'components/MoreActions'; + +import { TooltipProps } from '@arco-design/web-react/es/Tooltip'; +import { TagProps } from '@arco-design/web-react/es/Tag'; const Container = styled.div` display: flex; align-items: center; - font-size: 13px; - line-height: 1; + font-size: var(--textFontSizePrimary); + line-height: 16px; white-space: nowrap; &::before { content: '●'; margin-right: 6px; font-size: 20px; + line-height: inherit; color: var(--color, var(--custom-color, #e0e0e0)); } @@ -21,22 +31,26 @@ const Container = styled.div` --color: var(--backgroundColorGray); } &[color='success'] { - --color: #00bab2; + --color: rgb(var(--green-6)); } &[color='warning'] { - --color: var(--orange6); + --color: rgb(var(--orange-6)); } - &[color='error'] { - --color: #fd5165; + &[color='error'], + &[color='deleted'] { + --color: rgb(var(--red-6)); + } + &[color='pending_accept'] { + --color: #fa9600; } &[color='processing'] { - --color: var(--primaryColor); + --color: rgb(var(--arcoblue-6)); } &[color='gold'] { - --color: var(--darkGold6); + --color: rgb(var(--gold-6)); } &[color='lime'] { - --color: var(--darkLime6); + --color: rgb(var(--lime-6)); } `; const Text = styled.span` @@ -45,11 +59,18 @@ const Text = styled.span` const Help = styled.div` cursor: help; `; -const QuestionMark = styled(QuestionCircleOutlined)` - width: 12px; - height: 12px; - color: var(--gray6); + +const StyledAfterButton = styled(Button)` + padding: 0 2px; + height: 20px; + font-size: 12px; `; +export interface ActionItem { + /** Display label */ + label: string; + onClick?: () => void; + isLoading?: boolean; +} export type StateTypes = | 'processing' @@ -58,32 +79,119 @@ export type StateTypes = | 'error' | 'default' | 'gold' - | 'lime'; + | 'lime' + | 'unknown' + | 'pending_accept' + | 'deleted'; + +export type ProgressType = 'success' | 'warning' | 'error' | 'normal' | undefined; type Props = { - tip?: string; + /** State Type, it control color */ type: StateTypes; + /** Display text */ text: string; + /** Tooptip tip */ + tip?: string; + /** Enable tag mode */ tag?: boolean; + /** Arco component <Tag/> props */ + tagProps?: TagProps; + /** <MoreAction/> actionList props */ + actionList?: ActionItem[]; + /** Container style */ + containerStyle?: React.CSSProperties; + /** Custom render after text layout, if you pass a string, it will render a button */ + afterText?: React.ReactNode; + /** Only work if afterText is string type */ + onAfterTextClick?: () => void; + /** Tooptip tip position */ + position?: TooltipProps['position']; +}; + +type LightClientTypeProps = { + isLightClient: boolean; +}; + +const stateTypeToColorMap = { + default: undefined, + unknown: 'gray', + success: 'green', + warning: 'orange', + error: 'red', + deleted: 'red', + pending_accept: 'orange', + processing: 'blue', + gold: 'gold', + lime: 'lime', }; -const StateIndicator: FC<Props> = ({ text, type = 'default', tip, tag }) => { - let Wrapper = tag ? Tag : Container; +const StateIndicator: FC<Props> & { + LigthClientType: FC<LightClientTypeProps>; +} = ({ + text, + type = 'default', + tip, + position = 'top', + tag, + tagProps, + actionList, + containerStyle, + afterText, + onAfterTextClick, +}) => { + const Wrapper = tag ? Tag : Container; + + const afterJsx = useMemo(() => { + if (typeof afterText === 'string') { + return ( + <StyledAfterButton type="text" size="small" onClick={onAfterTextClick}> + {afterText} + </StyledAfterButton> + ); + } + + return afterText; + }, [afterText, onAfterTextClick]); + if (actionList && actionList.length > 0) { + return ( + <Wrapper color={type} style={containerStyle}> + <Text>{text}</Text> + {afterJsx} + <MoreActions actionList={actionList} trigger="hover"> + <InfoCircle /> + </MoreActions> + </Wrapper> + ); + } if (tag) { - return <Tag color={type}>{text}</Tag>; + return ( + <Tooltip content={tip} position={position}> + <Tag + id="workflow-state" + bordered + color={stateTypeToColorMap[type || 'defualt']} + style={containerStyle} + {...tagProps} + > + {text} + </Tag> + </Tooltip> + ); } const Content = ( - <Wrapper color={type}> + <Wrapper color={type} style={containerStyle}> <Text>{text}</Text> - {tip && <QuestionMark />} + {tip && <IconQuestionCircle />} + {afterJsx} </Wrapper> ); if (tip?.trim()) { return ( - <Tooltip title={tip}> + <Tooltip content={tip}> <Help>{Content}</Help> </Tooltip> ); @@ -92,4 +200,18 @@ const StateIndicator: FC<Props> = ({ text, type = 'default', tip, tag }) => { return Content; }; +const LigthClientType: FC<LightClientTypeProps> = ({ isLightClient }) => ( + <StateIndicator + type={isLightClient ? 'processing' : 'default'} + text={ + isLightClient + ? i18n.t('project.label_type_light_client') + : i18n.t('project.label_type_platform') + } + tag + /> +); + +StateIndicator.LigthClientType = LigthClientType; + export default StateIndicator; diff --git a/web_console_v2/client/src/components/StatisticList/index.tsx b/web_console_v2/client/src/components/StatisticList/index.tsx new file mode 100644 index 000000000..079862cb9 --- /dev/null +++ b/web_console_v2/client/src/components/StatisticList/index.tsx @@ -0,0 +1,152 @@ +/* istanbul ignore file */ + +import React, { FC, useMemo } from 'react'; +import styled from 'styled-components'; + +import { Card, Empty } from '@arco-design/web-react'; +import TitleWithIcon, { Props as TitleWithIconProps } from 'components/TitleWithIcon'; +import { IconQuestionCircle } from '@arco-design/web-react/icon'; +import { CONSTANTS } from 'shared/constants'; +import { formatObjectToArray } from 'shared/helpers'; +import { useModelMetriesResult } from 'hooks/modelCenter'; + +const Container = styled.div` + display: inline-block; +`; + +const Label = styled.div` + font-weight: 500; + font-size: 20px; + color: var(--textColorStrong); +`; + +type NumberItemProps = { + value?: string | number; + className?: string; +} & TitleWithIconProps; + +type ModelEvaluationVariantProps = { + id: ID; + participantId?: ID; + isTraining?: boolean; +}; + +export const NumberItem: FC<NumberItemProps> = ({ + className, + value = CONSTANTS.EMPTY_PLACEHOLDER, + ...props +}) => { + return ( + <Container className={className}> + <TitleWithIcon isShowIcon={Boolean(props.tip)} icon={IconQuestionCircle} {...props} /> + <Label>{value}</Label> + </Container> + ); +}; + +const CardContainer = styled.div` + display: flex; + justify-content: flex-start; + flex-wrap: wrap; +`; +const StyledNumberItem = styled(NumberItem)<{ $cols: number }>` + width: ${(props) => String(100 / props.$cols) + '%'}; +`; + +export type OptionItem = { + /** Display title */ + text: string; + /** Display value */ + value: string | number; + /** Tip */ + tip?: string; +}; + +export type Props = { + /** DataSource */ + data: OptionItem[]; + /** How many cols in one row */ + cols?: number; +}; + +export type SubComponent = { + ModelEvaluation: FC<ModelEvaluationVariantProps>; +}; + +export const StatisticList: FC<Props> & SubComponent = ({ data, cols = 6 }) => { + return ( + <Card> + <CardContainer> + {data.length > 0 ? ( + data.map((item) => ( + <StyledNumberItem + $cols={cols} + key={item.text} + title={item.text} + value={item.value} + tip={item.tip} + isShowIcon={Boolean(item.tip)} + icon={IconQuestionCircle} + /> + )) + ) : ( + <Empty /> + )} + </CardContainer> + </Card> + ); +}; + +const sortingKeys = [ + 'acc', + 'auc', + 'precision', + 'recall', + 'f1', + 'ks', + 'mse', + 'msre', + 'abs', + 'loss', + 'tp', + 'tn', + 'fp', + 'fn', +]; + +const labelKeyMap: Record<string, string> = { + loss: 'LOSS', + f1: 'F1 score', + auc: 'AUC', + acc: 'Accuracy', + precision: 'Precision', + recall: 'Recall', +}; +export const ModelEvaluation: FC<ModelEvaluationVariantProps> = ({ + id, + participantId, + isTraining = true, +}) => { + const { data } = useModelMetriesResult(id, participantId); + + const list = useMemo(() => { + if (!data) { + return []; + } + + return formatObjectToArray(isTraining ? data.train : data.eval, sortingKeys).map( + ({ label, value }) => ({ + text: labelKeyMap[label] ?? label.toUpperCase(), + value: value.values?.length + ? value.values[value.values.length - 1]?.toFixed(3) + : CONSTANTS.EMPTY_PLACEHOLDER, + }), + ); + }, [data, isTraining]); + + return <StatisticList data={list} />; +}; + +StatisticList.ModelEvaluation = ModelEvaluation; + +export default StatisticList; diff --git a/web_console_v2/client/src/components/StatusProgress/index.module.less b/web_console_v2/client/src/components/StatusProgress/index.module.less new file mode 100644 index 000000000..315765fb3 --- /dev/null +++ b/web_console_v2/client/src/components/StatusProgress/index.module.less @@ -0,0 +1,10 @@ +.status_progress { + display: flex; + flex-direction: column; + justify-content: space-around; + cursor: pointer; +} +.status_progress_text{ + line-height: 20px; + margin-bottom: 2px; +} \ No newline at end of file diff --git a/web_console_v2/client/src/components/StatusProgress/index.tsx b/web_console_v2/client/src/components/StatusProgress/index.tsx new file mode 100644 index 000000000..49b325ad2 --- /dev/null +++ b/web_console_v2/client/src/components/StatusProgress/index.tsx @@ -0,0 +1,62 @@ +import React, { FC, useMemo } from 'react'; +import { Progress, Tooltip } from '@arco-design/web-react'; +import styled from './index.module.less'; + +type Option = { + status: string; + text: string; + color: string; + percent: number; +}; +type Props = { + options: Option[]; + status: string; + isTip?: boolean; + toolTipContent?: React.ReactNode; + className?: string; +}; + +const defaultStatus: Option = { + status: 'DEFAULT', + text: '未知', + color: '#165DFF', + percent: 0, +}; + +const StatusProgress: FC<Props> = ({ + options, + status, + isTip = false, + toolTipContent, + className, +}) => { + const currentStatus = useMemo(() => { + return options.find((option) => option.status === status) || defaultStatus; + }, [status, options]); + if (isTip) { + return ( + <Tooltip content={toolTipContent}> + <div className={`${styled.status_progress} ${className}`}> + <span className={styled.status_progress_text}>{currentStatus.text}</span> + <Progress + color={currentStatus?.color} + percent={currentStatus?.percent || 0} + showText={false} + /> + </div> + </Tooltip> + ); + } + return ( + <div className={`${styled.status_progress} ${className}`}> + <span className={styled.status_progress_text}>{currentStatus.text}</span> + <Progress + color={currentStatus?.color} + percent={currentStatus?.percent || 0} + showText={false} + /> + </div> + ); +}; + +export default StatusProgress; diff --git a/web_console_v2/client/src/components/Table/index.tsx b/web_console_v2/client/src/components/Table/index.tsx new file mode 100644 index 000000000..38a79e765 --- /dev/null +++ b/web_console_v2/client/src/components/Table/index.tsx @@ -0,0 +1,67 @@ +/* istanbul ignore file */ + +import React from 'react'; +import { useTranslation } from 'react-i18next'; +import { Table, TableProps } from '@arco-design/web-react'; + +import { useUrlState } from 'hooks'; + +export interface Props<RecordType = any> extends TableProps<RecordType> { + total?: number; + onShowSizeChange?: (current: number, size: number) => void; + onPageChange?: (page: number, pageSize: number) => void; + isShowTotal?: boolean; +} + +function MyTable<RecordType = any>({ + isShowTotal = true, + className, + ...restProps +}: Props<RecordType>) { + const { t } = useTranslation(); + + const [paginationParam, setPaginationParam] = useUrlState({ + page: 1, + pageSize: 10, + }); + + return ( + <Table + className={`custom-table custom-table-left-side-filter ${className}`} + pagination={{ + showSizeChanger: true, + onPageSizeChange: onShowSizeChange, + onChange: onPageChange, + showTotal: isShowTotal + ? (total) => + t('hint_total_table', { + total: total || 0, + }) + : undefined, + current: Number(paginationParam.page), + pageSize: Number(paginationParam.pageSize), + }} + {...(restProps as any)} + /> + ); + + function onShowSizeChange(size: number, current: number) { + restProps.onShowSizeChange && restProps.onShowSizeChange(current, size); + + setPaginationParam({ + page: current, + pageSize: size, + }); + } + function onPageChange(page: number, pageSize: number) { + restProps.onPageChange && restProps.onPageChange(page, pageSize); + + setPaginationParam({ + page: page, + pageSize: pageSize, + }); + } +} + +export { MyTable as Table }; +export default MyTable; diff --git a/web_console_v2/client/src/components/TitleWithIcon/index.tsx b/web_console_v2/client/src/components/TitleWithIcon/index.tsx new file mode 100644 index 000000000..28f01f196 --- /dev/null +++ b/web_console_v2/client/src/components/TitleWithIcon/index.tsx @@ -0,0 +1,103 @@ +/* istanbul ignore file */ +import React, { FC, CSSProperties } from 'react'; +import styled from 'styled-components'; +import { Tooltip, Space } from '@arco-design/web-react'; +import { InfoCircleFill } from 'components/IconPark'; + +const Container = styled.div<{ + $isBlock?: boolean; +}>` + display: ${(props) => (props.$isBlock ? 'block' : 'inline-block')}; +`; +const Label = styled.span` + display: inline-block; + font-size: 12px; + color: var(--color, --textColor); +`; + +const IconContainer = styled.span` + display: inline-block; +`; +const LeftIconContainer = styled.span` + display: inline-block; + margin-right: 5px; +`; + +const DefaultIcon = styled(InfoCircleFill)` + color: #86909c; +`; + +export type Props = { + className?: string; + /** Display title */ + title: string | React.ReactNode; + /** Custom icon */ + icon?: any; + /** Tooptip tip */ + tip?: string; + /** Icon on the left of title */ + isLeftIcon?: boolean; + isShowIcon?: boolean; + textColor?: string; + /** Is container display: block, otherwise inline-block */ + isBlock?: boolean; +}; + +const TitleWithIcon: FC<Props> = ({ + className, + title, + tip, + icon = DefaultIcon, + isLeftIcon = false, + isShowIcon = false, + isBlock = true, + textColor, +}) => { + if (isLeftIcon) { + return ( + <Container + className={className} + $isBlock={isBlock} + style={ + textColor + ? ({ + '--color': textColor, + } as CSSProperties) + : {} + } + > + {isShowIcon && ( + <LeftIconContainer> + <Tooltip content={tip}>{React.createElement(icon)}</Tooltip> + </LeftIconContainer> + )} + <Label>{title}</Label> + </Container> + ); + } + + return ( + <Container + className={className} + $isBlock={isBlock} + style={ + textColor + ? ({ + '--color': textColor, + } as CSSProperties) + : {} + } + > + <Space> + <Label>{title}</Label> + {isShowIcon && ( + <IconContainer> + <Tooltip content={tip}>{React.createElement(icon)}</Tooltip> + </IconContainer> + )} + </Space> + </Container> + ); +}; + +export default TitleWithIcon; diff --git a/web_console_v2/client/src/components/TodoListContainer/index.tsx b/web_console_v2/client/src/components/TodoListContainer/index.tsx new file mode 100644 index 000000000..f164a8694 --- /dev/null +++ b/web_console_v2/client/src/components/TodoListContainer/index.tsx @@ -0,0 +1,77 @@ +import React from 'react'; +import styled from 'styled-components'; +import { Button, Popover } from '@arco-design/web-react'; + +type TProps = { + btnText: string; + disabled: boolean; + loading?: boolean; + children: React.ReactNode; + containerStyle?: React.CSSProperties; +}; + +const TodoButton = styled(Button)<{ disabled: boolean }>` + background-color: ${(props) => + props.disabled ? 'var(--color-secondary)' : 'rgb(var(--arcoblue-1))'} !important; + color: ${(props) => + props.disabled ? 'var(--color-text-3)' : 'rgb(var(--arcoblue-6))'} !important; + z-index: 2; + + &[disabled] { + cursor: not-allowed; + } +`; + +const Icon: React.FC<{ className?: string }> = ({ className }) => ( + <span className={className}> + <svg fill="none" height="16" viewBox="0 0 16 16" width="16" xmlns="http://www.w3.org/2000/svg"> + <path + clipRule="evenodd" + d="m6.33301 1.93342c0-.1841-.14924-.33334-.33334-.33334h-.8372c-.1841 0-.33334.14924-.33334.33334l-.00033.66666h-1.82913c-.36819 0-.66666.31506-.66666.70371v9.25921c0 .3887.29847.7038.66666.7038h10.00003c.3682 0 .6666-.3151.6666-.7038v-9.25921c0-.38865-.2984-.70371-.6666-.70371l-1.8226-.0001v-.66667c0-.1841-.1492-.33333-.3333-.33333l-.84413.0001c-.18409 0-.33333.14924-.33333.33334v.66666h-3.33333zm3.54908 3.92573c.13001-.12997.34071-.12997.47071 0l.4706.47067c.13.12997.13.3407 0 .47067l-2.82176 2.82176-.00222.00225-.47067.4707c-.04998.05-.11192.0807-.17661.0923l-.03907.0046h-.0393c-.07851-.0046-.1557-.0369-.21568-.0969l-1.88267-1.88271c-.12998-.12997-.12998-.34069 0-.47067l.47066-.47066c.12997-.12998.3407-.12998.47067 0l1.17672 1.17664z" + fill="currentColor" + fillRule="evenodd" + /> + </svg> + </span> +); + +const StyledIcon = styled(Icon)` + position: relative; + display: inline-block; + top: 1px; + margin-right: 10px; +`; + +const TodoListContainer: React.FC<TProps> = ({ + btnText, + loading, + disabled, + children, + containerStyle, +}) => { + const shouldDisabled = disabled && !loading; + const btn = ( + <TodoButton loading={loading} disabled={shouldDisabled} style={containerStyle}> + {!loading ? <StyledIcon /> : null} + {btnText} + </TodoButton> + ); + if (shouldDisabled) { + return btn; + } + + return ( + <Popover + getPopupContainer={() => window.document.body} + content={children} + position="br" + style={{ + maxWidth: 1000, + }} + > + {btn} + </Popover> + ); +}; + +export default TodoListContainer; diff --git a/web_console_v2/client/src/components/TodoPopover/index.module.less b/web_console_v2/client/src/components/TodoPopover/index.module.less new file mode 100644 index 000000000..fcb04f5ad --- /dev/null +++ b/web_console_v2/client/src/components/TodoPopover/index.module.less @@ -0,0 +1,70 @@ +@import '~styles/mixins.less'; +.todo_button{ + z-index: 2; + background-color: rgb(var(--arcoblue-1)) !important; + color: var(--primaryColor) !important; + .icon_todo{ + position: relative; + margin-right: 10px; + font-size: 18px; + } + &[disabled] { + cursor: not-allowed; + } +} +.overlay_header{ + padding: 10px 16px; + border-bottom: 1px solid #e5e8ee; +} +.overlay_item_header{ + display: flex; + padding-right: 16px; +} +.number_tag{ + display: inline-block; + width: 18px; + height: 18px; + background: #f6f7fb; + border-radius: 40px; + font-size: 12px; + color: var(--textColorSecondary); + font-weight: 600; + text-align: center; + line-height: 18px; +} +.overlay_item{ + position: relative; + width: 100%; + padding: 7px 16px; + border-bottom: 1px solid #e5e8ee; + cursor: pointer; + &:hover { + background-color: #f6f7fb; + } +} +.label{ + display: inline-block; + margin-right: 4px; + font-size: 12px; + color: var(--textColorSecondary); + font-weight: 500; +} +.label_tiny{ + font-size: 11px; + color: var(--textColorSecondary); +} +.label_strong{ + .MixinEllipsis(); + flex:1; + font-size: 12px; + color: var(--textColorStrong); + font-weight: 500; +} +.icon_right{ + position: absolute; + right: 16px; + top: 50%; + transform: translateY(-50%); + font-size: 12px; +} + diff --git a/web_console_v2/client/src/components/TodoPopover/index.tsx b/web_console_v2/client/src/components/TodoPopover/index.tsx new file mode 100644 index 000000000..06dc3c317 --- /dev/null +++ b/web_console_v2/client/src/components/TodoPopover/index.tsx @@ -0,0 +1,538 @@ +/* istanbul ignore file */ + +import React, { FC, ReactNode, useMemo } from 'react'; +import { generatePath, useHistory } from 'react-router-dom'; + +import { formatTimestamp } from 'shared/date'; +import { TIME_INTERVAL } from 'shared/constants'; + +import { useQuery } from 'react-query'; +import { fetchProjectPendingList } from 'services/algorithm'; +import { fetchModelServingList_new } from 'services/modelServing'; +import { + fetchModelJobGroupList, + fetchModelJobList_new, + fetchPeerModelJobGroupDetail, +} from 'services/modelCenter'; +import { fetchPendingProjectList } from 'services/project'; + +import { Message, Button, Popover, Tooltip } from '@arco-design/web-react'; +import { Todo, Right } from 'components/IconPark'; + +import { Workflow } from 'typings/workflow'; +import { ModelServing, ModelServingState } from 'typings/modelServing'; +import { ModelJob, ModelJobGroup } from 'typings/modelCenter'; +import { Algorithm } from 'typings/algorithm'; + +import newModelCenterRoutes, { ModelEvaluationModuleType } from 'views/ModelCenter/routes'; +import { getCoordinateName, PENDING_PROJECT_FILTER_MAPPER } from 'views/Projects/shard'; +import { filterExpressionGenerator } from 'views/Datasets/shared'; +import { FILTER_MODEL_JOB_OPERATOR_MAPPER } from 'views/ModelCenter/shared'; +import { + useGetAppFlagValue, + useGetCurrentProjectId, + useGetCurrentProjectParticipantList, +} from 'hooks'; +import { APIResponse } from 'typings/app'; +import { NotificationItem, NotificationType } from 'typings/trustedCenter'; +import { fetchTrustedNotifications } from 'services/trustedCenter'; +import { FilterOp } from 'typings/filter'; +import { Project, ProjectStateType } from 'typings/project'; +import { constructExpressionTree } from 'shared/filter'; +import { FlagKey } from 'typings/flag'; + +import styles from './index.module.less'; + +const CUSTOM_CLASS_NAME = 'custom-popover'; + +export type Props<T = any> = { + isLoading?: boolean; + disabled?: boolean; + list: T[]; + buttonText?: string; + renderContent: (list: T[], options?: any) => ReactNode; +}; + +export type ApprovalProps<T = any> = Omit<Props<T>, 'renderContent'> & { + title?: string; + onClick?: (item?: T) => void; + dateField?: string; + creatorField?: string; + contentField?: string; + contentVerb?: string; + contentSuffix?: string; + contentPrefix?: string; + renderContent?: (list: T[], options?: any) => ReactNode; +}; + +export type TrainModelProps<T = any> = Omit<ApprovalProps<T>, 'list'> & {}; + +export type EvaluationModelNewProps<T = any> = TrainModelProps<T> & { + module: ModelEvaluationModuleType; +}; + +export type TrustedCenterProps<T = any> = Omit<ApprovalProps<T>, 'list'> & {}; + +function TodoPopover<T = any>({ + isLoading = false, + disabled = false, + list = [], + buttonText = '', + renderContent, + ...restProps +}: Props<T>) { + return ( + <> + <Popover className={CUSTOM_CLASS_NAME} content={renderContent(list, restProps)} position="br"> + <Button + className={styles.todo_button} + loading={isLoading} + icon={<Todo className={styles.icon_todo} />} + disabled={disabled || list.length === 0} + type={list.length ? 'primary' : 'default'} + > + {buttonText} + </Button> + </Popover> + </> + ); +} + +const RenderItem: FC<{ item: any; options?: any }> = ({ item, options }) => { + const coordinatorName = getCoordinateName(item?.participants_info?.participants_map); + const participantList = useGetCurrentProjectParticipantList(); + const participant = participantList.filter((item_) => item_?.id === item.coordinator_id); + + return ( + <div + className={styles.overlay_item} + onClick={(e) => { + e.stopPropagation(); + options.onClick(item); + }} + > + <div className={styles.overlay_item_header}> + <span className={styles.label}> + {coordinatorName || participant?.[0]?.name || participantList?.[0]?.name} + {options.contentVerb ?? ' 发起了'} + </span> + <Tooltip content={`「${item[options.contentField]}」${options.contentSuffix || ''}`}> + <span className={styles.label_strong}> + {`「${item[options.contentField]}」`} + {options.contentSuffix || ''} + </span> + </Tooltip> + </div> + <div> + <span className={styles.label_tiny}>{formatTimestamp(item[options.dateField])}</span> + </div> + <Right className={styles.icon_right} /> + </div> + ); +}; + +function renderDefaultModelCenterContent(list: any, options?: any) { + return ( + <div> + <div className={styles.overlay_header}> + <span className={styles.label}>{options.title}</span> + <div className={styles.number_tag}>{list.length}</div> + </div> + {list.map((item: any) => ( + <RenderItem item={item} options={options} key={item.id} /> + ))} + </div> + ); +} + +function ModelCenter({ + renderContent = renderDefaultModelCenterContent, + ...restProps +}: ApprovalProps) { + return <TodoPopover renderContent={renderContent} {...restProps} />; +} + +function EvaluationModelNew({ + dateField = 'created_at', + contentField = 'name', + module, + ...restProps +}: EvaluationModelNewProps) { + const history = useHistory(); + const projectId = useGetCurrentProjectId(); + const isPrediction = module === 'offline-prediction'; + const copywriting = isPrediction ? '预测' : '评估'; + + const { isError, data: workflowListData, error, isFetching } = useQuery( + ['todoPopover_model_evaluation_notice_list', projectId, module], + () => { + if (!projectId) { + Message.info('请选择工作区'); + return Promise.resolve({ data: [] as ModelJob[] }); + } + + return fetchModelJobList_new(projectId, { + filter: filterExpressionGenerator( + { auth_status: ['PENDING'] }, + FILTER_MODEL_JOB_OPERATOR_MAPPER, + ), + types: isPrediction ? 'PREDICTION' : 'EVALUATION', + }); + }, + { + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, // auto refresh every 1.5 min + }, + ); + + if (isError && error) { + Message.error((error as Error).message); + } + + return ( + <ModelCenter + isLoading={isFetching} + list={workflowListData?.data || []} + dateField={dateField} + contentField={contentField} + buttonText={`${workflowListData?.data.length || 0} 条待处理${copywriting}任务`} + title={`待处理${copywriting}任务`} + contentSuffix={`的${copywriting}任务`} + onClick={onClick} + {...restProps} + /> + ); + + function onClick(item: Workflow) { + history.push(`/model-center/${module}/receiver/edit/${item.id}`); + } +} + +function AlgorithmManagement({ + dateField = 'created_at', + creatorField = 'creator', + contentField = 'name', + ...restProps +}: TrainModelProps) { + const history = useHistory(); + const projectId = useGetCurrentProjectId(); + + const { isError, data: algorithmData, error, isFetching } = useQuery( + ['algorithmReceiveList', projectId], + () => { + if (!projectId) { + Message.info('请选择工作区'); + return Promise.resolve({ data: [] }) as APIResponse<Algorithm[]>; + } + return fetchProjectPendingList(projectId ?? 0); + }, + { + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, // auto refresh every 1.5 min + }, + ); + + if (isError && error) { + Message.error((error as Error).message); + } + + const todoList = algorithmData?.data || []; + return ( + <ModelCenter + isLoading={isFetching} + list={todoList} + dateField={dateField} + contentField={contentField} + buttonText={`${todoList.length} 条待处理算法消息`} + title={'待处理算法任务'} + contentVerb={'发布了'} + contentSuffix={' 算法'} + onClick={onClick} + {...restProps} + /> + ); + + function onClick(item: Algorithm) { + history.push(`/algorithm-management/acceptance/${item.id}`); + } +} + +function ModelServingNotice({ + dateField = 'created_at', + creatorField = 'creator', + contentField = 'name', + ...restProps +}: TrainModelProps) { + const history = useHistory(); + const projectId = useGetCurrentProjectId(); + + const { isError, isFetching, data, error } = useQuery( + ['fetchModelServingList', projectId], + () => { + if (!projectId) { + Message.info('请选择工作区'); + return Promise.resolve({ data: [] }) as APIResponse<ModelServing[]>; + } + + return fetchModelServingList_new(projectId); + }, + + { + refetchInterval: TIME_INTERVAL.LIST, + }, + ); + + if (isError && error) { + Message.error((error as Error).message); + } + + const todoList = useMemo(() => { + if (!data?.data) { + return []; + } + + return data.data.filter((item) => item.status === ModelServingState.WAITING_CONFIG); + }, [data?.data]); + + return ( + <ModelCenter + isLoading={isFetching} + list={todoList} + dateField={dateField} + contentField={contentField} + buttonText={`${todoList.length} 条待处理在线服务`} + title={'待处理在线服务'} + contentSuffix={' 的在线任务'} + onClick={onClick} + {...restProps} + /> + ); + + function onClick(item: ModelServing) { + history.push(`/model-serving/create/receiver/${item.id}`); + } +} + +function NewTrainModel({ + dateField = 'created_at', + contentField = 'name', + ...restProps +}: TrainModelProps) { + const history = useHistory(); + const projectId = useGetCurrentProjectId(); + + const model_job_global_config_enabled = useGetAppFlagValue( + FlagKey.MODEL_JOB_GLOBAL_CONFIG_ENABLED, + ); + + const { isError, data, error, isFetching } = useQuery<{ + data: ModelJobGroup[]; + }>( + ['fetchModelJobGroupList', projectId], + () => { + if (!projectId) { + Message.info('请选择工作区'); + return Promise.resolve({ data: [] }); + } + return fetchModelJobGroupList(projectId!, { + filter: constructExpressionTree([ + { + field: 'configured', + op: FilterOp.EQUAL, + bool_value: false, + }, + ]), + }); + }, + { + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, // auto refresh every 1.5 min + }, + ); + + if (isError && error) { + Message.error((error as Error).message); + } + + const todoList = useMemo(() => { + if (!data) { + return []; + } + const list = data.data || []; + + return list; + }, [data]); + + return ( + <ModelCenter + isLoading={isFetching} + list={todoList} + dateField={dateField} + contentField={contentField} + buttonText={`${todoList.length} 条待处理模型训练`} + title={'待处理模型训练'} + contentSuffix={'的模型训练'} + onClick={onClick} + {...restProps} + /> + ); + + async function onClick(item: ModelJobGroup) { + let isOldModelGroup = true; + if (model_job_global_config_enabled) { + try { + const res = await fetchPeerModelJobGroupDetail(projectId!, item.id, item.coordinator_id!); + const modelGroupDetail = res.data; + isOldModelGroup = Boolean(modelGroupDetail?.config?.job_definitions?.length); + } catch (error) { + Message.error('获取模型训练作业详情失败!'); + return; + } + } + model_job_global_config_enabled && !isOldModelGroup + ? history.push( + generatePath(newModelCenterRoutes.ModelTrainCreateCentralization, { + role: 'receiver', + id: item.id, + }), + ) + : history.push( + generatePath(newModelCenterRoutes.ModelTrainCreate, { + role: 'receiver', + action: 'create', + id: item.id, + }), + ); + } +} + +function TrustedCenter({ + dateField = 'created_at', + creatorField = 'coordinator_id', + contentField = 'name', + ...restProps +}: TrustedCenterProps) { + const history = useHistory(); + const projectId = useGetCurrentProjectId(); + + const { isError, data, error, isFetching } = useQuery<{ data: NotificationItem[] }>( + ['fetchTrustedNotifications', projectId], + () => { + if (!projectId) { + Message.info('请选择工作区'); + return Promise.resolve({ data: [] }); + } + return fetchTrustedNotifications(projectId!); + }, + { + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, // auto refresh every 1.5 min + }, + ); + + if (isError && error) { + Message.error((error as Error).message); + } + + const todoList = useMemo(() => { + if (!data) { + return []; + } + + const list = data.data || []; + + return list; + }, [data]); + + return ( + <ModelCenter + isLoading={isFetching} + list={todoList} + dateField={dateField} + contentField={contentField} + buttonText={`待处理任务 ${todoList.length}`} + title="待处理任务" + contentVerb="发起了" + contentSuffix="的任务" + onClick={onClick} + {...restProps} + /> + ); + + function onClick(item: NotificationItem) { + switch (item.type) { + case NotificationType.TRUSTED_JOB_GROUP_CREATE: + history.push(`/trusted-center/edit/${item.id}/receiver`); + break; + case NotificationType.TRUSTED_JOB_EXPORT: + history.push( + `/trusted-center/dataset-application/${item.id}/${item.coordinator_id}/${item.name}`, + ); + break; + default: + break; + } + } +} +function ProjectNotice({ + dateField = 'created_at', + contentField = 'name', + ...restProps +}: TrainModelProps) { + const history = useHistory(); + + const { isFetching, data: todoPendingProjectList } = useQuery( + ['fetchTodoPendingProjectList'], + () => + fetchPendingProjectList({ + filter: filterExpressionGenerator( + { + state: [ProjectStateType.PENDING], + }, + PENDING_PROJECT_FILTER_MAPPER, + ), + page: 1, + page_size: 0, + }), + + { + refetchInterval: TIME_INTERVAL.LIST, // auto refresh every 1.5 min + retry: 2, + onError: (error) => { + Message.error((error as Error).message); + }, + }, + ); + + const todoList = useMemo(() => { + return todoPendingProjectList?.data ?? []; + }, [todoPendingProjectList?.data]); + + return ( + <ModelCenter + isLoading={isFetching} + list={todoList} + dateField={dateField} + contentField={contentField} + buttonText={`${todoList.length}条待处理工作区`} + title="待处理工作区邀请" + contentSuffix="的工作区邀请" + onClick={onClick} + {...restProps} + /> + ); + + function onClick(item: Project) { + history.push(`/projects/receiver/${item.id}`); + } +} + +TodoPopover.ModelCenter = ModelCenter; +TodoPopover.NewTrainModel = NewTrainModel; +TodoPopover.EvaluationModelNew = EvaluationModelNew; +TodoPopover.AlgorithmManagement = AlgorithmManagement; +TodoPopover.ModelServing = ModelServingNotice; +TodoPopover.TrustedCenter = TrustedCenter; +TodoPopover.ProjectNotice = ProjectNotice; + +export default TodoPopover; diff --git a/web_console_v2/client/src/components/UserRoleBadge/index.tsx b/web_console_v2/client/src/components/UserRoleBadge/index.tsx index 6452642e4..3d2255f3d 100644 --- a/web_console_v2/client/src/components/UserRoleBadge/index.tsx +++ b/web_console_v2/client/src/components/UserRoleBadge/index.tsx @@ -1,3 +1,5 @@ +/* istanbul ignore file */ + import React, { FC } from 'react'; import styled from 'styled-components'; import { useTranslation } from 'react-i18next'; @@ -6,9 +8,9 @@ import { MixinCircle, MixinFlexAlignCenter } from 'styles/mixins'; import { Crown, User } from 'components/IconPark'; const Container = styled.div` - --background-color: var(--blue1); - --color: var(--blue6); - --badge-background: var(--blue3); + --background-color: rgb(var(--blue-1)); + --color: rgb(var(--blue-6)); + --badge-background: rgb(var(--blue-3)); display: inline-flex; align-items: center; @@ -23,9 +25,9 @@ const Container = styled.div` background-color: var(--background-color); &[data-role='admin'] { - --background-color: var(--gold1); - --color: var(--gold6); - --badge-background: var(--gold3); + --background-color: rgb(var(--gold-1)); + --color: rgb(var(--gold-6)); + --badge-background: rgb(var(--gold-3)); } `; diff --git a/web_console_v2/client/src/components/Username/index.tsx b/web_console_v2/client/src/components/Username/index.tsx index 448e5509c..5c64c672b 100644 --- a/web_console_v2/client/src/components/Username/index.tsx +++ b/web_console_v2/client/src/components/Username/index.tsx @@ -1,11 +1,14 @@ +/* istanbul ignore file */ + import React, { FC } from 'react'; import { useRecoilQuery } from 'hooks/recoil'; import { userInfoQuery } from 'stores/user'; +import { CONSTANTS } from 'shared/constants'; const Username: FC = () => { const query = useRecoilQuery(userInfoQuery); - return <span>{query.data?.username || '-'}</span>; + return <span>{query.data?.username || CONSTANTS.EMPTY_PLACEHOLDER}</span>; }; export default Username; diff --git a/web_console_v2/client/src/components/VariableLabel/index.module.less b/web_console_v2/client/src/components/VariableLabel/index.module.less new file mode 100644 index 000000000..e37df3911 --- /dev/null +++ b/web_console_v2/client/src/components/VariableLabel/index.module.less @@ -0,0 +1,6 @@ +.label_text{ + font-size: 13px; + line-height: 22px; + max-width: 125px; + overflow: hidden; +} diff --git a/web_console_v2/client/src/components/VariableLabel/index.tsx b/web_console_v2/client/src/components/VariableLabel/index.tsx index 8b0601b8a..e55bc2a7d 100644 --- a/web_console_v2/client/src/components/VariableLabel/index.tsx +++ b/web_console_v2/client/src/components/VariableLabel/index.tsx @@ -1,15 +1,10 @@ import React, { FC } from 'react'; -import { Tooltip } from 'antd'; +import { Tooltip } from '@arco-design/web-react'; import { QuestionCircle } from 'components/IconPark'; import { VariableAccessMode } from 'typings/variable'; import VariablePermission from 'components/VariblePermission'; import GridRow from 'components/_base/GridRow'; -import styled from 'styled-components'; - -const LabelText = styled.span` - font-size: 13px; - line-height: 22px; -`; +import styled from './index.module.less'; type Props = { label: string; @@ -31,8 +26,7 @@ const VariableLabel: FC<Props> = ({ label, tooltip, accessMode }) => { return ( <GridRow gap="8" role="label"> <PermissionIndicator /> - - <LabelText>{label}</LabelText> + <span className={styled.label_text}>{label}</span> </GridRow> ); } @@ -41,11 +35,11 @@ const VariableLabel: FC<Props> = ({ label, tooltip, accessMode }) => { <GridRow gap="8" role="label"> <PermissionIndicator /> - <Tooltip title={tooltip}> - <LabelText> + <Tooltip content={tooltip}> + <span className={styled.label_text}> {label} <QuestionCircle style={{ marginLeft: '5px' }} /> - </LabelText> + </span> </Tooltip> </GridRow> ); diff --git a/web_console_v2/client/src/components/VariableSchemaForm/index.tsx b/web_console_v2/client/src/components/VariableSchemaForm/index.tsx deleted file mode 100644 index ebb5c6257..000000000 --- a/web_console_v2/client/src/components/VariableSchemaForm/index.tsx +++ /dev/null @@ -1,67 +0,0 @@ -import React, { FunctionComponent } from 'react'; -import { Button } from 'antd'; -import { - SchemaForm, - FormButtonGroup, - Submit, - IAntdSchemaFormProps, - createFormActions, -} from '@formily/antd'; -import { Input, NumberPicker, Select, Radio, Checkbox, Switch } from '@formily/antd-components'; -import styled from 'styled-components'; -import { VariableComponent } from 'typings/variable'; -import ModelCodesEditorButton from 'components/ModelCodesEditorButton'; -import DatasetSelect from 'components/DatasetSelect'; - -const components: Record<VariableComponent, any> = { - [VariableComponent.Input]: Input, - [VariableComponent.NumberPicker]: NumberPicker, - [VariableComponent.Select]: Select, - [VariableComponent.Radio]: Radio, - [VariableComponent.Checkbox]: Checkbox, - [VariableComponent.TextArea]: Input.TextArea, - [VariableComponent.Switch]: Switch, - [VariableComponent.Code]: ModelCodesEditorButton, - [VariableComponent.Dataset]: DatasetSelect, -}; - -const StyledSchemaForm = styled(SchemaForm)` - .ant-form-item-label > .ant-form-item-required::before { - order: 2; - } -`; -interface Props extends IAntdSchemaFormProps { - onConfirm: (val: any) => void; - onCancel: (_: any) => void; - confirmText: string; - cancelText: string; -} - -export const formActions = createFormActions(); - -const VariableSchemaForm: FunctionComponent<Props> = ({ - schema, - onConfirm, - onCancel, - cancelText, - confirmText, -}: Props) => { - return ( - <StyledSchemaForm - labelAlign="left" - components={components} - schema={schema} - actions={formActions} - labelCol={8} - onSubmit={onConfirm} - > - <FormButtonGroup offset={8}> - <Submit>{confirmText}</Submit> - - {cancelText && <Button onClick={onCancel}>{cancelText}</Button>} - </FormButtonGroup> - </StyledSchemaForm> - ); -}; - -export default VariableSchemaForm; diff --git a/web_console_v2/client/src/components/VariblePermission/index.tsx b/web_console_v2/client/src/components/VariblePermission/index.tsx index 86e9bb331..9d644e6f6 100644 --- a/web_console_v2/client/src/components/VariblePermission/index.tsx +++ b/web_console_v2/client/src/components/VariblePermission/index.tsx @@ -1,3 +1,5 @@ +/* istanbul ignore file */ + import i18n from 'i18n'; import React, { FunctionComponent } from 'react'; import styled from 'styled-components'; @@ -43,21 +45,67 @@ const PrivateShape = styled(Container)` `; type Props = { + /** + * Enable desc + * @default false + */ desc?: boolean; + /** Desc prefix */ + prefix?: string; + /** Container style */ + style?: React.CSSProperties; }; const Writable: FunctionComponent<Props> = (props) => { - return <WritableShape {...props} data-desc={i18n.t('workflow.var_auth_write')} />; + return ( + <WritableShape + {...props} + data-desc={i18n.t('workflow.var_auth_write', { prefix: props.prefix })} + /> + ); }; const Readable: FunctionComponent<Props> = (props) => { - return <ReadableShape {...props} data-desc={i18n.t('workflow.var_auth_read')} />; + return ( + <ReadableShape + {...props} + data-desc={i18n.t('workflow.var_auth_read', { prefix: props.prefix })} + /> + ); }; const Private: FunctionComponent<Props> = (props) => { - return <PrivateShape {...props} data-desc={i18n.t('workflow.var_auth_private')} />; + return ( + <PrivateShape + {...props} + data-desc={i18n.t('workflow.var_auth_private', { prefix: props.prefix })} + /> + ); }; const VariablePermission = { Writable, Readable, Private }; +const LegendContainer = styled.div` + position: relative; + display: flex; + flex-wrap: wrap; + padding: 7px 16px; + border-radius: 2px; + background-color: rgb(var(--gray-1)); + + > div:not(:last-of-type) { + margin-right: 30px; + } +`; + +export const VariablePermissionLegend: FunctionComponent<Props> = ({ style, ...restProps }) => { + return ( + <LegendContainer style={style}> + <Writable {...restProps} /> + <Readable {...restProps} /> + <Private {...restProps} /> + </LegendContainer> + ); +}; + export default VariablePermission; diff --git a/web_console_v2/client/src/components/WhichAlgorithm/index.tsx b/web_console_v2/client/src/components/WhichAlgorithm/index.tsx new file mode 100644 index 000000000..aca3c8764 --- /dev/null +++ b/web_console_v2/client/src/components/WhichAlgorithm/index.tsx @@ -0,0 +1,57 @@ +import React, { useMemo } from 'react'; +import { useQuery } from 'react-query'; +import { Spin } from '@arco-design/web-react'; +import { fetchPeerAlgorithmDetail, getAlgorithmDetail } from 'services/algorithm'; +import { Algorithm } from 'typings/algorithm'; +import CONSTANTS from 'shared/constants'; +import { useGetCurrentProjectId } from 'hooks'; + +type Props = { + id: ID; + /** Format display text */ + formatter?: (algorithm: Algorithm) => string; + uuid?: ID; + participantId?: ID; +}; + +function defaultFormatter(algorithm: Algorithm) { + return `${algorithm.name} (V${algorithm.version})`; +} + +const WhichAlgorithm: React.FC<Props> = ({ + id, + uuid, + formatter = defaultFormatter, + participantId, +}) => { + const projectId = useGetCurrentProjectId(); + const algorithmDetailQuery = useQuery(['getAlgorithmDetail', id], () => getAlgorithmDetail(id!), { + enabled: (Boolean(id) || id === 0) && !Boolean(participantId), + retry: 2, + }); + const peerAlgorithmDetailQuery = useQuery( + ['getPeerAlgorithmDetailQuery', projectId, participantId, uuid], + () => fetchPeerAlgorithmDetail(projectId, participantId, uuid), + { + enabled: + (id === null || participantId !== 0) && + Boolean(uuid) && + (Boolean(projectId) || projectId === 0) && + Boolean(participantId), + retry: 2, + }, + ); + const algorithmDetail = useMemo(() => { + return id === null || participantId !== 0 + ? peerAlgorithmDetailQuery.data?.data + : algorithmDetailQuery.data?.data; + }, [peerAlgorithmDetailQuery, algorithmDetailQuery, id, participantId]); + + if (algorithmDetailQuery.isFetching || peerAlgorithmDetailQuery.isFetching) { + return <Spin />; + } + + return <span>{algorithmDetail ? formatter(algorithmDetail) : CONSTANTS.EMPTY_PLACEHOLDER}</span>; +}; + +export default WhichAlgorithm; diff --git a/web_console_v2/client/src/components/WhichDataset/index.tsx b/web_console_v2/client/src/components/WhichDataset/index.tsx new file mode 100644 index 000000000..035b0e4f0 --- /dev/null +++ b/web_console_v2/client/src/components/WhichDataset/index.tsx @@ -0,0 +1,126 @@ +/* istanbul ignore file */ +import React, { FC } from 'react'; + +import { formatIntersectionDatasetName } from 'shared/modelCenter'; + +import { useRecoilQuery } from 'hooks/recoil'; +import { intersectionDatasetListQuery } from 'stores/dataset'; +import { CONSTANTS } from 'shared/constants'; + +import { Spin } from '@arco-design/web-react'; +import { Dataset, IntersectionDataset } from 'typings/dataset'; +import { useQuery } from 'react-query'; +import { fetchDatasetDetail, fetchDatasetList } from 'services/dataset'; +import { FILTER_OPERATOR_MAPPER, filterExpressionGenerator } from 'views/Datasets/shared'; + +type Props = { + id?: ID; + loading?: boolean; +}; + +const WhichDataset: FC<Props> & { + UUID: FC<UUIDProps>; + IntersectionDataset: FC<Props>; + DatasetDetail: FC<Props>; +} = ({ id, loading }) => { + const datasetListQuery = useQuery( + ['fetchDatasetList', 'WhichDataset', id], + () => { + return fetchDatasetList().then((res) => { + return (res?.data || []).filter((item) => String(item.id) === String(id)); + }); + }, + { + enabled: Boolean(id), + refetchOnWindowFocus: false, + retry: 2, + }, + ); + + if (loading || datasetListQuery.isFetching) { + return <Spin />; + } + + return <span>{datasetListQuery.data?.[0]?.name ?? CONSTANTS.EMPTY_PLACEHOLDER}</span>; +}; + +// TODO: add mode props,for search raw dataset or intersection dataset +const _IntersectionDataset: FC<Props> = ({ id, loading }) => { + const { isLoading, data } = useRecoilQuery(intersectionDatasetListQuery); + + if (loading || isLoading) { + return <Spin />; + } + + const item = + data?.find((innerItem: any) => Number(innerItem.id) === Number(id)) || + ({ + name: CONSTANTS.EMPTY_PLACEHOLDER, + } as IntersectionDataset); + + return <span>{formatIntersectionDatasetName(item)}</span>; +}; + +type UUIDProps = { + uuid: string; + loading?: boolean; + onAPISuccess?: (data?: Dataset) => void; + displayKey?: Partial<keyof Dataset>; +}; + +const _UUID: FC<UUIDProps> = ({ uuid, loading, onAPISuccess, displayKey = 'name' }) => { + const datasetListQuery = useQuery( + ['fetchDatasetList', uuid], + () => + fetchDatasetList({ + filter: filterExpressionGenerator( + { + uuid, + }, + FILTER_OPERATOR_MAPPER, + ), + }), + { + enabled: Boolean(uuid), + refetchOnWindowFocus: false, + retry: 2, + onSuccess(res) { + onAPISuccess?.(res.data?.[0] ?? undefined); + }, + }, + ); + + if (loading || datasetListQuery.isFetching) { + return <Spin />; + } + + return <span>{datasetListQuery.data?.data?.[0]?.[displayKey] ?? '数据集已删除'}</span>; +}; + +const _DatasetDetail: FC<Props> = ({ id, loading }) => { + const datasetDetailQuery = useQuery( + ['fetchDatasetDetail', 'WhichDataset', id], + () => { + return fetchDatasetDetail(id).then((res) => { + return res.data; + }); + }, + { + enabled: Boolean(id), + refetchOnWindowFocus: false, + retry: 2, + }, + ); + + if (loading || datasetDetailQuery.isFetching) { + return <Spin />; + } + + return <div>{datasetDetailQuery.data?.name ?? CONSTANTS.EMPTY_PLACEHOLDER}</div>; +}; + +WhichDataset.UUID = _UUID; +WhichDataset.IntersectionDataset = _IntersectionDataset; +WhichDataset.DatasetDetail = _DatasetDetail; + +export default WhichDataset; diff --git a/web_console_v2/client/src/components/WhichModel/index.tsx b/web_console_v2/client/src/components/WhichModel/index.tsx new file mode 100644 index 000000000..078ad955d --- /dev/null +++ b/web_console_v2/client/src/components/WhichModel/index.tsx @@ -0,0 +1,49 @@ +import React, { FC } from 'react'; +import { useQuery } from 'react-query'; +import { Model } from 'typings/modelCenter'; +import { fetchModelDetail_new } from 'services/modelCenter'; +import { useGetCurrentProjectId } from 'hooks'; +import { Spin, Space, Tag } from '@arco-design/web-react'; +import CONSTANTS from 'shared/constants'; + +type Props = { + id: ID; + /** Fortmat display rext */ + formatter?: (model: Model) => string; + isModelGroup?: Boolean; +}; + +function defaultFormatter(model: Model) { + return model.name; +} + +const WhichModel: FC<Props> = ({ id, formatter = defaultFormatter, isModelGroup = false }) => { + const projectId = useGetCurrentProjectId(); + const modelQuery = useQuery( + [`/v2/projects/${projectId}/models/${id}`], + () => { + return fetchModelDetail_new(projectId!, id).then((res) => res.data); + }, + { + enabled: Boolean(projectId) && (Boolean(id) || id === 0), + retry: 2, + }, + ); + + if (modelQuery.isFetching) { + return <Spin />; + } + + return ( + <Space align="start"> + {modelQuery.data ? formatter(modelQuery.data) : CONSTANTS.EMPTY_PLACEHOLDER} + {isModelGroup ? ( + <Tag size="small" color="blue"> + 自动更新 + </Tag> + ) : null} + </Space> + ); +}; + +export default WhichModel; diff --git a/web_console_v2/client/src/components/WhichParticipant/index.tsx b/web_console_v2/client/src/components/WhichParticipant/index.tsx new file mode 100644 index 000000000..51bc832c2 --- /dev/null +++ b/web_console_v2/client/src/components/WhichParticipant/index.tsx @@ -0,0 +1,29 @@ +/* istanbul ignore file */ +import React, { FC } from 'react'; + +import { useRecoilQuery } from 'hooks/recoil'; +import { participantListQuery } from 'stores/participant'; +import { CONSTANTS } from 'shared/constants'; + +import { Spin } from '@arco-design/web-react'; + +type Props = { + id?: ID; + loading?: boolean; +}; + +const WhichParticipant: FC<Props> = ({ id, loading }) => { + const { isLoading, data } = useRecoilQuery(participantListQuery); + + if (loading || isLoading) { + return <Spin />; + } + + const participant = data?.find((item) => Number(item.id) === Number(id)) || { + name: CONSTANTS.EMPTY_PLACEHOLDER, + }; + + return <span>{participant.name}</span>; +}; + +export default WhichParticipant; diff --git a/web_console_v2/client/src/components/WhichParticipantDataset/index.tsx b/web_console_v2/client/src/components/WhichParticipantDataset/index.tsx new file mode 100644 index 000000000..439a8a99e --- /dev/null +++ b/web_console_v2/client/src/components/WhichParticipantDataset/index.tsx @@ -0,0 +1,51 @@ +/* istanbul ignore file */ +import React, { FC } from 'react'; +import { useQuery } from 'react-query'; + +import { fetchParticipantDatasetList } from 'services/dataset'; +import { useGetCurrentProjectId } from 'hooks'; +import { CONSTANTS } from 'shared/constants'; + +import { Spin } from '@arco-design/web-react'; +import { ParticipantDataset } from 'typings/dataset'; + +type UUIDProps = { + uuid: string; + loading?: boolean; + /** It's useful to get participantDataset data */ + onAPISuccess?: (data?: ParticipantDataset) => void; + emptyText?: React.ReactNode; +}; + +const WhichParticipantDataset: FC<UUIDProps> = ({ + uuid, + loading, + emptyText = CONSTANTS.EMPTY_PLACEHOLDER, + onAPISuccess, +}) => { + const projectId = useGetCurrentProjectId(); + + const listQuery = useQuery( + ['fetchParticipantDatasetList', 'WhichParticipantDataset', uuid, projectId], + () => + fetchParticipantDatasetList(projectId!, { + uuid, + }), + { + enabled: Boolean(projectId) && Boolean(uuid), + refetchOnWindowFocus: false, + retry: 2, + onSuccess(res) { + onAPISuccess?.(res.data?.[0] ?? undefined); + }, + }, + ); + + if (loading || listQuery.isFetching) { + return <Spin />; + } + + return <span>{listQuery.data?.data?.[0]?.name ?? emptyText}</span>; +}; + +export default WhichParticipantDataset; diff --git a/web_console_v2/client/src/components/WhichProject/index.tsx b/web_console_v2/client/src/components/WhichProject/index.tsx index c83d981d5..ee23aedda 100644 --- a/web_console_v2/client/src/components/WhichProject/index.tsx +++ b/web_console_v2/client/src/components/WhichProject/index.tsx @@ -1,16 +1,25 @@ -import { Spin } from 'antd'; -import { useRecoilQuery } from 'hooks/recoil'; +/* istanbul ignore file */ import React, { FC } from 'react'; + +import { useRecoilQuery } from 'hooks/recoil'; import { projectListQuery } from 'stores/project'; +import { CONSTANTS } from 'shared/constants'; + +import { Spin } from '@arco-design/web-react'; + +type Props = { + id?: ID; + loading?: boolean; +}; -const WhichProject: FC<{ id?: number | number; loading?: boolean }> = ({ id, loading }) => { +const WhichProject: FC<Props> = ({ id, loading }) => { const { isLoading, data } = useRecoilQuery(projectListQuery); if (loading || isLoading) { - return <Spin size="small" />; + return <Spin />; } - const project = data?.find((pj) => pj.id === Number(id)) || { name: '-' }; + const project = data?.find((pj) => pj.id === Number(id)) || { name: CONSTANTS.EMPTY_PLACEHOLDER }; return <span>{project.name}</span>; }; diff --git a/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/ConfigNode.tsx b/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/ConfigNode.tsx index fe2144233..9cc8b2815 100644 --- a/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/ConfigNode.tsx +++ b/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/ConfigNode.tsx @@ -7,7 +7,7 @@ import DisabledSwitch from './DisabledSwitch'; import PubSub from 'pubsub-js'; import { QuestionCircle } from 'components/IconPark'; import { useTranslation } from 'react-i18next'; -import { Tooltip } from 'antd'; +import { Tooltip } from '@arco-design/web-react'; const ConfigJobNode: FC<JobNodeProps> = ({ data, id }) => { const { t } = useTranslation(); @@ -24,7 +24,7 @@ const ConfigJobNode: FC<JobNodeProps> = ({ data, id }) => { <DisabledSwitch size="small" checked={!isDisabled} onChange={onDisabledChange} /> {isDisabled ? ( - <Tooltip title={t('workflow.msg_diable_job_will_cause')} placement="bottomLeft"> + <Tooltip content={t('workflow.msg_diable_job_will_cause')} position="bl"> <GridRow gap="4" style={{ fontSize: '11px' }}> <JobStatusText>{t('workflow.job_node_disabled')}</JobStatusText> <QuestionCircle /> diff --git a/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/DisabledSwitch.tsx b/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/DisabledSwitch.tsx index 380b87979..54aecd457 100644 --- a/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/DisabledSwitch.tsx +++ b/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/DisabledSwitch.tsx @@ -1,6 +1,6 @@ import React, { FC } from 'react'; import styled from 'styled-components'; -import { Switch, SwitchProps, Tooltip } from 'antd'; +import { Switch, SwitchProps, Tooltip } from '@arco-design/web-react'; import { useTranslation } from 'react-i18next'; const Container = styled.div` @@ -15,7 +15,7 @@ const DisabledSwitch: FC<SwitchProps> = (props) => { return ( <Container onClick={(e) => e.stopPropagation()}> - <Tooltip title={t('workflow.msg_toggle_job_disabled')}> + <Tooltip content={t('workflow.msg_toggle_job_disabled')}> <Switch {...props} /> </Tooltip> </Container> diff --git a/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/EditConfigNode.tsx b/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/EditConfigNode.tsx index 28bbcfd2a..70d8a7391 100644 --- a/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/EditConfigNode.tsx +++ b/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/EditConfigNode.tsx @@ -1,17 +1,15 @@ import React, { FC, useState } from 'react'; import { Handle, Position } from 'react-flow-renderer'; import { Container, JobName, JobStatusText, StatusIcon, InheritedTag } from './elements'; -import { configStatusText, JobNodeProps, statusIcons, WORKFLOW_JOB_NODE_CHANNELS } from './shared'; +import { configStatusText, JobNodeProps, statusIcons } from './shared'; import GridRow from 'components/_base/GridRow'; -import DisabledSwitch from './DisabledSwitch'; -import PubSub from 'pubsub-js'; import { QuestionCircle } from 'components/IconPark'; import { useTranslation } from 'react-i18next'; -import { Tooltip } from 'antd'; +import { Tooltip } from '@arco-design/web-react'; const ConfigJobNode: FC<JobNodeProps> = ({ data, id }) => { const { t } = useTranslation(); - const [useRawDisabled, setUseRawDisabled] = useState(true); + const [useRawDisabled] = useState(true); const icon = statusIcons[data.status]; const text = configStatusText[data.status]; @@ -24,10 +22,8 @@ const ConfigJobNode: FC<JobNodeProps> = ({ data, id }) => { {data.isTarget && <Handle type="target" position={Position.Top} />} <JobName>{id}</JobName> - <DisabledSwitch size="small" checked={!isDisabled} onChange={onDisabledChange} /> - {isDisabled ? ( - <Tooltip title={t('workflow.msg_diable_job_will_cause')} placement="bottomLeft"> + <Tooltip content={t('workflow.msg_diable_job_will_cause')} position="bl"> <GridRow gap="4" style={{ fontSize: '11px' }}> <JobStatusText>{t('workflow.job_node_disabled')}</JobStatusText> <QuestionCircle /> @@ -39,7 +35,7 @@ const ConfigJobNode: FC<JobNodeProps> = ({ data, id }) => { <JobStatusText>{text}</JobStatusText> {isReused && ( - <Tooltip title={t('workflow.msg_resued_job')} placement="bottom"> + <Tooltip content={t('workflow.msg_resued_job')} position="bottom"> <InheritedTag color="orange">{t('workflow.job_node_reused')}</InheritedTag> </Tooltip> )} @@ -49,11 +45,6 @@ const ConfigJobNode: FC<JobNodeProps> = ({ data, id }) => { {data.isSource && <Handle type="source" position={Position.Bottom} />} </Container> ); - - function onDisabledChange(val: boolean) { - setUseRawDisabled(false); - PubSub.publish(WORKFLOW_JOB_NODE_CHANNELS.disable_job, { id, data, disabled: !val }); - } }; export default ConfigJobNode; diff --git a/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/ExecutionNode.tsx b/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/ExecutionNode.tsx index bee19950b..3a9c0c7e7 100644 --- a/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/ExecutionNode.tsx +++ b/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/ExecutionNode.tsx @@ -5,14 +5,17 @@ import { executionStatusText, JobNodeProps, statusIcons } from './shared'; import GridRow from 'components/_base/GridRow'; import classNames from 'classnames'; import { useTranslation } from 'react-i18next'; -import { Tooltip } from 'antd'; +import { Tooltip } from '@arco-design/web-react'; import { ChartNodeStatus } from '../types'; const ExecutionJobNode: FC<JobNodeProps> = ({ data, id }) => { - const hasError = Boolean(data.raw.error_message); + const hasError = + Boolean(data.raw?.error_message) && + (data.raw.error_message?.app !== '' || JSON.stringify(data.raw.error_message?.pods) !== '{}'); const isDisabled = Boolean(data.raw.disabled); const { t } = useTranslation(); + const errorText = JSON.stringify(data.raw.error_message); const icon = statusIcons[hasError ? ChartNodeStatus.Error : data.status]; const text = executionStatusText[data.status]; @@ -35,8 +38,8 @@ const ExecutionJobNode: FC<JobNodeProps> = ({ data, id }) => { {icon && <StatusIcon src={icon} />} <JobStatusText> {hasError ? ( - <Tooltip className="error-message" title={data.raw.error_message} placement="topLeft"> - {data.raw.error_message} + <Tooltip className="error-message" content={errorText} position="tl"> + {errorText} </Tooltip> ) : ( text @@ -44,7 +47,7 @@ const ExecutionJobNode: FC<JobNodeProps> = ({ data, id }) => { </JobStatusText> {data.raw.reused && ( - <Tooltip title={t('workflow.msg_resued_job')} placement="bottom"> + <Tooltip content={t('workflow.msg_resued_job')} position="bottom"> <InheritedTag color="orange">{t('workflow.job_node_reused')}</InheritedTag> </Tooltip> )} diff --git a/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/ForkNode.tsx b/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/ForkNode.tsx index cfe8952f5..8050c355d 100644 --- a/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/ForkNode.tsx +++ b/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/ForkNode.tsx @@ -13,13 +13,11 @@ import { configStatusText, JobNodeProps, statusIcons, WORKFLOW_JOB_NODE_CHANNELS import GridRow from 'components/_base/GridRow'; import { useTranslation } from 'react-i18next'; import classNames from 'classnames'; -import { Dropdown, Modal, Menu } from 'antd'; -import { Z_INDEX_GREATER_THAN_HEADER } from 'components/Header'; -import { MenuInfo } from 'rc-menu/lib/interface'; +import { Dropdown, Menu, Tooltip } from '@arco-design/web-react'; +import Modal from 'components/Modal'; import DisabledSwitch from './DisabledSwitch'; import PubSub from 'pubsub-js'; import { QuestionCircle } from 'components/IconPark'; -import { Tooltip } from 'antd'; const ForkJobNode: FC<JobNodeProps> = ({ data, id }) => { const { t } = useTranslation(); @@ -43,7 +41,7 @@ const ForkJobNode: FC<JobNodeProps> = ({ data, id }) => { <DisabledSwitch size="small" checked={!isDisabled} onChange={onDisabledChange} /> {isDisabled ? ( - <Tooltip title={t('workflow.msg_diable_job_will_cause')} placement="bottomLeft"> + <Tooltip content={t('workflow.msg_diable_job_will_cause')} position="bl"> <GridRow gap="4" style={{ fontSize: '11px' }}> <JobStatusText>{t('workflow.job_node_disabled')}</JobStatusText> <QuestionCircle /> @@ -57,7 +55,7 @@ const ForkJobNode: FC<JobNodeProps> = ({ data, id }) => { )} <Dropdown - overlay={ + droplist={ <Menu> <InheritMentItem key="0" onClick={(e) => changeInheritance(e, true)}> {labelReusable} @@ -83,8 +81,8 @@ const ForkJobNode: FC<JobNodeProps> = ({ data, id }) => { PubSub.publish(WORKFLOW_JOB_NODE_CHANNELS.disable_job, { id, data, disabled: !val }); } - function changeInheritance(event: MenuInfo, whetherInherit: boolean) { - event.domEvent.stopPropagation(); + function changeInheritance(event: Event, whetherInherit: boolean) { + event.stopPropagation(); if (whetherInherit === data.inherited) { return; @@ -94,8 +92,6 @@ const ForkJobNode: FC<JobNodeProps> = ({ data, id }) => { title: t('workflow.title_toggle_reusable', { state: whetherInherit ? labelReusable : labelNonreusable, }), - zIndex: Z_INDEX_GREATER_THAN_HEADER, - icon: null, content: whetherInherit ? t('workflow.msg_reuse_noti', { name: id, @@ -105,11 +101,6 @@ const ForkJobNode: FC<JobNodeProps> = ({ data, id }) => { }), mask: false, - okText: t('confirm'), - cancelText: t('cancel'), - style: { - top: '35%', - }, onOk() { PubSub.publish(WORKFLOW_JOB_NODE_CHANNELS.change_inheritance, { id, data, whetherInherit }); }, diff --git a/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/GlobalConfigNode.tsx b/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/GlobalConfigNode.tsx index 1bd66426f..161e34626 100644 --- a/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/GlobalConfigNode.tsx +++ b/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/GlobalConfigNode.tsx @@ -1,4 +1,6 @@ import React, { FC } from 'react'; +import { Handle, Position } from 'react-flow-renderer'; + import { GlobalConfigNodeContainer, JobName, JobStatusText, StatusIcon } from './elements'; import { configStatusText, GlobalNodeProps, statusIcons } from './shared'; import GridRow from 'components/_base/GridRow'; @@ -9,11 +11,13 @@ const GlobalConfigNode: FC<GlobalNodeProps> = ({ data, id }) => { return ( <GlobalConfigNodeContainer> + <Handle type="target" position={Position.Top} /> <JobName>{data.raw.name}</JobName> <GridRow gap={5}> {icon && <StatusIcon src={icon} />} <JobStatusText>{text}</JobStatusText> </GridRow> + <Handle type="source" position={Position.Bottom} /> </GlobalConfigNodeContainer> ); }; diff --git a/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/elements.ts b/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/elements.ts index 3de761671..09ddc1ddd 100644 --- a/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/elements.ts +++ b/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/elements.ts @@ -1,8 +1,10 @@ +/* istanbul ignore file */ + import { convertToUnit } from 'shared/helpers'; import styled from 'styled-components'; import { NODE_WIDTH, NODE_HEIGHT, GLOBAL_CONFIG_NODE_SIZE } from '../helpers'; import { Down } from 'components/IconPark'; -import { Tag, Menu } from 'antd'; +import { Tag, Menu } from '@arco-design/web-react'; import { MixinEllipsis } from 'styles/mixins'; export const Container = styled.div` diff --git a/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/shared.ts b/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/shared.ts index 8a63bbdfe..5f09a8a13 100644 --- a/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/shared.ts +++ b/web_console_v2/client/src/components/WorkflowJobsCanvas/JobNodes/shared.ts @@ -1,3 +1,5 @@ +/* istanbul ignore file */ + import { ChartNodeStatus, GlobalNodeRawData, NodeData } from '../types'; import pendingIcon from 'assets/icons/workflow-pending.svg'; import completetdIcon from 'assets/icons/workflow-completed.svg'; @@ -9,6 +11,7 @@ import { NodeComponentProps } from 'react-flow-renderer'; export const statusIcons: Record<ChartNodeStatus, string> = { [ChartNodeStatus.Pending]: '', [ChartNodeStatus.Processing]: pendingIcon, + [ChartNodeStatus.Validating]: pendingIcon, [ChartNodeStatus.Success]: completetdIcon, [ChartNodeStatus.Warning]: warningIcon, [ChartNodeStatus.Error]: errorIcon, @@ -16,13 +19,14 @@ export const statusIcons: Record<ChartNodeStatus, string> = { export const configStatusText: Record<ChartNodeStatus, string> = { [ChartNodeStatus.Pending]: i18n.t('workflow.job_node_pending'), + [ChartNodeStatus.Validating]: i18n.t('workflow.job_node_validating'), [ChartNodeStatus.Processing]: i18n.t('workflow.job_node_configuring'), [ChartNodeStatus.Success]: i18n.t('workflow.job_node_config_completed'), [ChartNodeStatus.Warning]: i18n.t('workflow.job_node_unfinished'), [ChartNodeStatus.Error]: i18n.t('workflow.job_node_invalid'), }; -export const executionStatusText: Record<ChartNodeStatus, string> = { +export const executionStatusText: Partial<Record<ChartNodeStatus, string>> = { [ChartNodeStatus.Pending]: i18n.t('workflow.job_node_waiting'), [ChartNodeStatus.Processing]: i18n.t('workflow.job_node_running'), [ChartNodeStatus.Success]: i18n.t('workflow.job_node_success'), diff --git a/web_console_v2/client/src/components/WorkflowJobsCanvas/helpers.ts b/web_console_v2/client/src/components/WorkflowJobsCanvas/helpers.ts index fc8129685..5d071e82a 100644 --- a/web_console_v2/client/src/components/WorkflowJobsCanvas/helpers.ts +++ b/web_console_v2/client/src/components/WorkflowJobsCanvas/helpers.ts @@ -1,7 +1,7 @@ import { XYPosition, Edge } from 'react-flow-renderer'; import { Job, JobState } from 'typings/job'; import { isHead, isLast } from 'shared/array'; -import { Dictionary, head, isEmpty, isNil, last } from 'lodash'; +import { head, isEmpty, isNil, last } from 'lodash-es'; import { Variable } from 'typings/variable'; import i18n from 'i18n'; import { @@ -25,7 +25,7 @@ export type ConvertParams = { /** a.k.a. worlflow global settings */ variables: Variable[]; /** Extra data pass to react-flow node data */ - data: Dictionary<any>; + data: Record<string, any>; }; export type NodeOptions = { diff --git a/web_console_v2/client/src/components/WorkflowJobsCanvas/hooks.ts b/web_console_v2/client/src/components/WorkflowJobsCanvas/hooks.ts index 9e8adeb7d..77c7db229 100644 --- a/web_console_v2/client/src/components/WorkflowJobsCanvas/hooks.ts +++ b/web_console_v2/client/src/components/WorkflowJobsCanvas/hooks.ts @@ -1,4 +1,4 @@ -import { isEmpty, clone } from 'lodash'; +import { isEmpty, clone } from 'lodash-es'; import { useMemo } from 'react'; import { JobColorsMark, JobNodeRawData } from './types'; diff --git a/web_console_v2/client/src/components/WorkflowJobsCanvas/index.tsx b/web_console_v2/client/src/components/WorkflowJobsCanvas/index.tsx index 9b639a4e8..f4e6d3646 100644 --- a/web_console_v2/client/src/components/WorkflowJobsCanvas/index.tsx +++ b/web_console_v2/client/src/components/WorkflowJobsCanvas/index.tsx @@ -29,11 +29,12 @@ import ReactFlow, { } from 'react-flow-renderer'; import { Container } from './elements'; import { ChartWorkflowConfig } from 'typings/workflow'; -import { cloneDeep } from 'lodash'; +import { cloneDeep } from 'lodash-es'; import { useResizeObserver } from 'hooks'; import { Side } from 'typings/app'; import { WORKFLOW_JOB_NODE_CHANNELS } from './JobNodes/shared'; import PubSub from 'pubsub-js'; +import { nextTick } from 'shared/helpers'; type Props = { workflowConfig: ChartWorkflowConfig; @@ -172,9 +173,9 @@ const WorkflowJobsCanvas: ForwardRefRenderFunction<ChartExposedRef | undefined, } function onLoad(_reactFlowInstance: OnLoadParams) { setChartInstance(_reactFlowInstance!); + // Fit view at next tick - // TODO: implement nextTick - setImmediate(() => { + nextTick(() => { _reactFlowInstance!.fitView(); }); } diff --git a/web_console_v2/client/src/components/WorkflowJobsCanvas/types.ts b/web_console_v2/client/src/components/WorkflowJobsCanvas/types.ts index 2361e6a32..dfce32669 100644 --- a/web_console_v2/client/src/components/WorkflowJobsCanvas/types.ts +++ b/web_console_v2/client/src/components/WorkflowJobsCanvas/types.ts @@ -5,6 +5,7 @@ import { Variable } from 'typings/variable'; export enum ChartNodeStatus { Pending, Processing, + Validating, Warning, Success, Error, diff --git a/web_console_v2/client/src/components/YAMLTemplateEditorButton/index.module.less b/web_console_v2/client/src/components/YAMLTemplateEditorButton/index.module.less new file mode 100644 index 000000000..4430a863d --- /dev/null +++ b/web_console_v2/client/src/components/YAMLTemplateEditorButton/index.module.less @@ -0,0 +1,9 @@ +.drawer_container{ + :global { + .arco-drawer-content{ + padding: 10px 0 0; + height: 100%; + background-color: #1e1e1e; + } + } +} diff --git a/web_console_v2/client/src/components/YAMLTemplateEditorButton/index.tsx b/web_console_v2/client/src/components/YAMLTemplateEditorButton/index.tsx index 9c5269c2b..2592e9bd2 100644 --- a/web_console_v2/client/src/components/YAMLTemplateEditorButton/index.tsx +++ b/web_console_v2/client/src/components/YAMLTemplateEditorButton/index.tsx @@ -1,53 +1,58 @@ -import { Button, Drawer } from 'antd'; +/* istanbul ignore file */ + +import { Button, Drawer } from '@arco-design/web-react'; import React, { FC } from 'react'; -import styled from 'styled-components'; -import { CodeOutlined } from '@ant-design/icons'; import { useToggle } from 'react-use'; import CodeEditor from 'components/CodeEditor'; +import { formatJSONValue } from 'shared/helpers'; -const Container = styled.div``; - -const StyledDrawer = styled(Drawer)` - top: 60px; - - .ant-drawer-body { - padding: 10px 0 0; - height: 100%; - background-color: #1e1e1e; - } -`; +import styles from './index.module.less'; +import { IconCodeSquare } from '@arco-design/web-react/icon'; type Props = { value?: string; onChange?: (val: string) => any; + disabled?: boolean; + language?: string; + isCheck?: boolean; }; -const CodeEditorButton: FC<Props> = ({ value, onChange }) => { +const CodeEditorButton: FC<Props> = ({ + value, + onChange, + disabled, + language = 'python', + isCheck, +}) => { const [visible, toggleVisible] = useToggle(false); return ( - <Container> - <Button icon={<CodeOutlined />} onClick={onButtonClick}> + <div> + <Button icon={<IconCodeSquare />} onClick={onButtonClick}> 打开编辑器 </Button> - <StyledDrawer - getContainer="#app-content" + <Drawer + className={styles.drawer_container} placement="left" width={window.innerWidth - 400} visible={visible} - contentWrapperStyle={{ - boxShadow: 'none', - }} headerStyle={{ display: 'none', }} + bodyStyle={{ overflow: 'hidden' }} maskStyle={{ backdropFilter: 'blur(3px)' }} - onClose={toggleVisible} + onCancel={toggleVisible} + footer={null} > - <CodeEditor value={value} language="json" onChange={onCodeChange} /> - </StyledDrawer> - </Container> + <CodeEditor + value={language === 'json' ? formatJSONValue(value || '') : value} + language={language as any} + onChange={onCodeChange} + isReadOnly={disabled || isCheck} + /> + </Drawer> + </div> ); function onButtonClick() { diff --git a/web_console_v2/client/src/components/_base/BlockRadio/__snapshots__/index.test.tsx.snap b/web_console_v2/client/src/components/_base/BlockRadio/__snapshots__/index.test.tsx.snap new file mode 100644 index 000000000..9765e6143 --- /dev/null +++ b/web_console_v2/client/src/components/_base/BlockRadio/__snapshots__/index.test.tsx.snap @@ -0,0 +1,156 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`<BlockRadio /> default props layout 1`] = ` +.c2 { + display: grid; + grid-gap: 12px; + -webkit-align-items: center; + -webkit-box-align: center; + -ms-flex-align: center; + align-items: center; + -webkit-box-pack: start; + -webkit-justify-content: start; + -ms-flex-pack: start; + justify-content: start; + grid-auto-columns: auto; + grid-template-rows: auto; + grid-auto-flow: column; +} + +.c0 { + display: -webkit-box; + display: -webkit-flex; + display: -ms-flexbox; + display: flex; + -webkit-flex-wrap: wrap; + -ms-flex-wrap: wrap; + flex-wrap: wrap; + -webkit-align-items: stretch; + -webkit-box-align: stretch; + -ms-flex-align: stretch; + align-items: stretch; + margin-right: -77px; + margin-bottom: -77px; + -webkit-flex-direction: row; + -ms-flex-direction: row; + flex-direction: row; +} + +.c1 { + --border-color: transparent; + display: -webkit-box; + display: -webkit-flex; + display: -ms-flexbox; + display: flex; + -webkit-flex: 1 0 auto; + -ms-flex: 1 0 auto; + flex: 1 0 auto; + -webkit-align-items: center; + -webkit-box-align: center; + -ms-flex-align: center; + align-items: center; + -webkit-box-pack: center; + -webkit-justify-content: center; + -ms-flex-pack: center; + justify-content: center; + -webkit-box-flex: var(--flex-grow,1); + -webkit-flex-grow: var(--flex-grow,1); + -ms-flex-positive: var(--flex-grow,1); + flex-grow: var(--flex-grow,1); + min-height: 32px; + padding: 0 12px; + width: auto; + margin-right: 77px; + margin-bottom: 77px; + border: 1.5px solid var(--border-color,var(--lineColor)); + border-radius: 4px; + cursor: pointer; + background-color: var(--bg-color,var(--componentBackgroundColorGray)); +} + +.c1:hover { + --label-color: var(--primaryColor); +} + +.c1[data-is-active='true'] { + --label-color: var(--primaryColor); + --border-color: var(--primaryColor); + --label-weight: 500; + --bg-color: #fff; +} + +.c1[data-is-disabled='true'] { + cursor: not-allowed; + --label-color: var(--textColorDisabled); +} + +.c1[data-is-active='true'][data-is-disabled='true'] { + --label-color: initial; + --border-color: initial; + --label-weight: initial; + --bg-color: #fff; +} + +.c3 { + font-size: 12px; + color: var(--label-color,var(--textColorStrong)); + font-weight: var(--label-weight,normal); + cursor: inherit; +} + +<div + class="c0" +> + <div + class="c1" + data-is-active="false" + role="radio" + style="--flex-grow: 1;" + > + <div + class="c2" + role="grid" + > + <label + class="c3" + > + option 0 + </label> + </div> + </div> + <div + class="c1" + data-is-active="false" + role="radio" + style="--flex-grow: 1;" + > + <div + class="c2" + role="grid" + > + <label + class="c3" + > + option 1 + </label> + </div> + </div> + <div + class="c1" + data-is-active="false" + role="radio" + style="--flex-grow: 1;" + > + <div + class="c2" + role="grid" + > + <label + class="c3" + > + option 2 + </label> + </div> + </div> +</div> +`; diff --git a/web_console_v2/client/src/components/_base/BlockRadio/index.test.tsx b/web_console_v2/client/src/components/_base/BlockRadio/index.test.tsx new file mode 100644 index 000000000..394593291 --- /dev/null +++ b/web_console_v2/client/src/components/_base/BlockRadio/index.test.tsx @@ -0,0 +1,166 @@ +import React from 'react'; +import { render, fireEvent, screen, waitFor } from '@testing-library/react'; +import { sleep, getRandomInt } from 'shared/helpers'; +import Component from './index'; + +describe('<BlockRadio />', () => { + test('default props layout', () => { + const options = Array(3) + .fill(0) + .map((_, index) => { + return { + value: index, + label: `option ${index}`, + }; + }); + const props = { + options, + isCenter: true, + gap: 77, + }; + const { container } = render(<Component {...props} />); + expect(container.firstChild).toMatchSnapshot(); + }); + + test('beforeChange and onChange should be called', async () => { + const beforeChange = jest.fn(); + const onChange = jest.fn(); + const optionsLength = getRandomInt(10, 20); + + const props = { + options: generateRandomOptions(optionsLength), + beforeChange, + onChange, + }; + + render(<Component {...props} />); + const blockList = screen.getAllByRole('radio'); + const randomNumGenerator = getRandomIntWrapper(0, optionsLength - 1); + let clickIndex: number; + + // -------------- + beforeChange.mockReturnValue(Promise.resolve(false)); + clickIndex = randomNumGenerator.next(); + fireEvent.click(blockList[clickIndex]); + expect(beforeChange).toHaveBeenCalledWith(clickIndex); + expect(beforeChange).toHaveBeenCalledTimes(1); + await waitFor(() => { + expect(onChange).toHaveBeenCalledTimes(0); + }); + + // -------------- + beforeChange.mockReturnValue(Promise.resolve(true)); + clickIndex = randomNumGenerator.next(); + + fireEvent.click(blockList[clickIndex]); + // beforeChange 之前已经执行过一次,所以是 2 + expect(beforeChange).toHaveBeenCalledWith(clickIndex); + expect(beforeChange).toHaveBeenCalledTimes(2); + await waitFor(() => { + expect(onChange).toHaveBeenCalledWith(clickIndex); + expect(onChange).toHaveBeenCalledTimes(1); + }); + + // -------------- + beforeChange.mockReturnValue(sleep(1000).then(() => true)); + clickIndex = randomNumGenerator.next(); + fireEvent.click(blockList[clickIndex]); + expect(beforeChange).toHaveBeenCalledWith(clickIndex); + expect(beforeChange).toHaveBeenCalledTimes(3); + await waitFor( + () => { + expect(onChange).toHaveBeenCalledWith(clickIndex); + expect(onChange).toHaveBeenCalledTimes(2); + }, + { timeout: 2000 }, + ); + }); + + test('renderBlockInner should work', async () => { + const optionsLength = getRandomInt(10, 20); + const renderBlockInner = jest.fn((props, options) => { + return <h1 className={options.isActive ? 'active' : ''}>{props.value}</h1>; + }); + const props = { + options: generateRandomOptions(optionsLength), + renderBlockInner, + }; + + const { rerender } = render(<Component {...props} />); + expect(renderBlockInner).toHaveBeenCalledTimes(optionsLength); + expect(renderBlockInner).toHaveBeenCalledWith( + expect.objectContaining({ + label: expect.any(String), + value: expect.any(Number), + }), + expect.objectContaining({ + label: expect.anything(), + isActive: expect.any(Boolean), + }), + ); + + // 如果 value 改变,应该在相应下标传入 isActive + const selectedIndex = getRandomInt(0, optionsLength - 1); + rerender(<Component {...props} value={selectedIndex} />); + const blockList = screen.getAllByRole('radio'); + const activeBlock = blockList[selectedIndex]; + const innerContent = activeBlock.querySelector('.active'); + await waitFor(() => { + expect(innerContent).toBeTruthy(); + }); + }); + + test('set option to disabled should work', async () => { + const optionsLength = getRandomInt(10, 20); + const disabledIndex = getRandomInt(0, optionsLength - 1); + const options = generateRandomOptions(optionsLength); + const onChange = jest.fn(); + + options[disabledIndex].disabled = true; + const props = { + options, + onChange, + }; + + render(<Component {...props} />); + + const blockList = screen.getAllByRole('radio'); + const disabledBlock = blockList[disabledIndex]; + fireEvent.click(disabledBlock); + + await waitFor(() => { + expect(onChange).toHaveBeenCalledTimes(0); + }); + }); +}); + +function generateRandomOptions(length = 4) { + return Array(length) + .fill(0) + .map((_, index) => { + return { + value: index, + label: `option ${index}`, + disabled: false, + }; + }); +} + +function getRandomIntWrapper(min = 1, max = 10) { + const record: number[] = []; + + return { + next() { + if (record.length >= max - min) { + throw new Error('record overflow'); + } + + let tmp = getRandomInt(min, max); + while (record.includes(tmp) === true) { + tmp = getRandomInt(min, max); + } + record.push(tmp); + return tmp; + }, + }; +} diff --git a/web_console_v2/client/src/components/_base/BlockRadio/index.tsx b/web_console_v2/client/src/components/_base/BlockRadio/index.tsx index 759327575..1947f4445 100644 --- a/web_console_v2/client/src/components/_base/BlockRadio/index.tsx +++ b/web_console_v2/client/src/components/_base/BlockRadio/index.tsx @@ -1,94 +1,307 @@ -import React, { FC } from 'react'; -import styled from 'styled-components'; +import React, { FC, useMemo } from 'react'; +import styled, { CSSProperties } from 'styled-components'; +import { Tooltip } from '@arco-design/web-react'; +import TitleWithIcon from 'components/TitleWithIcon'; +import { IconInfoCircle } from '@arco-design/web-react/icon'; import GridRow from '../GridRow'; -import { MixinCircle } from 'styles/mixins'; + +const baseGap = 16; const Container = styled.div` display: flex; + flex-wrap: wrap; + align-items: stretch; + margin-right: -${(props: Partial<Props>) => props.gap || baseGap}px; + margin-bottom: -${(props: Partial<Props>) => props.gap || baseGap}px; + flex-direction: ${(props: Partial<Props>) => (props.isVertical ? 'column' : 'row')}; `; const Block = styled.div` - flex: 1; - padding: 9px 11px; + --border-color: transparent; + + display: flex; + flex: 1 0 auto; + align-items: center; + justify-content: ${(props: Partial<Props>) => (props.isCenter ? 'center' : 'normal')}; + flex-grow: var(--flex-grow, 1); + min-height: 32px; + padding: 0 12px; + width: ${(props: Partial<Props>) => + props.blockItemWidth ? `${props.blockItemWidth}px` : 'auto'}; + margin-right: ${(props: Partial<Props>) => props.gap || baseGap}px; + margin-bottom: ${(props: Partial<Props>) => props.gap || baseGap}px; border: 1.5px solid var(--border-color, var(--lineColor)); - border-radius: 2px; + border-radius: 4px; cursor: pointer; + background-color: var(--bg-color, var(--componentBackgroundColorGray)); + + &:hover { + --label-color: var(--primaryColor); + } &[data-is-active='true'] { --label-color: var(--primaryColor); + --border-color: var(--primaryColor); --label-weight: 500; + --bg-color: #fff; + } + + &[data-is-disabled='true'] { + cursor: not-allowed; + --label-color: var(--textColorDisabled); + } + + &[data-is-active='true'][data-is-disabled='true'] { + --label-color: initial; + --border-color: initial; + --label-weight: initial; + --bg-color: #fff; + } +`; +const ContainerOneHalf = styled.div` + display: flex; + flex: 1; +`; +const BlockOneHalf = styled.div` + --border-color: transparent; + display: flex; + flex: 1; + min-height: 32px; + padding: 0 12px; + border: 1.5px solid var(--border-color, var(--lineColor)); + border-radius: 4px; + cursor: pointer; + background-color: var(--bg-color, var(--componentBackgroundColorGray)); + justify-content: ${(props: Partial<Props>) => (props.isCenter ? 'center' : 'normal')}; + align-items: center; + + &[data-is-active='true'] { + --label-color: var(--primaryColor); --border-color: var(--primaryColor); + --label-weight: 500; + --bg-color: #fff; } &[data-is-disabled='true'] { cursor: not-allowed; - filter: grayscale(50); + --label-color: var(--textColorDisabled); + } + + &[data-is-active='true'][data-is-disabled='true'] { + --label-color: initial; + --border-color: initial; + --label-weight: initial; + --bg-color: #fff; } & + & { - margin-left: 16px; + margin-left: ${(props: Partial<Props>) => props.gap || baseGap}px; } `; + const Label = styled.label` font-size: 12px; - line-height: 20px; color: var(--label-color, var(--textColorStrong)); font-weight: var(--label-weight, normal); + cursor: inherit; +`; +const LabelTips = styled(Label)` + font-weight: normal; + font-size: 12px; + color: var(--label-color, var(--textColorSecondary)); `; -const RadioIcon = styled.div` - ${MixinCircle(14)} - border: 2px solid var(--border-color, var(--gray4)); - - &[data-is-active='true'] { - --border-color: var(--primaryColor); - border-width: 4px; - } +const ContainerWithTips = styled.div` + display: flex; + flex: 1; + align-items: center; + justify-content: space-between; + height: 100%; `; -type Option = { value: any; label: string; disabled?: boolean }; +type Option = { + /** Form value */ + value: any; + /** Display label */ + label: string; + disabled?: boolean; + /** Extra data, one of the function(renderBlockInner) arguments */ + data?: any; + /** Extra tip, only work in <BlockRadio.WithTip/> options */ + tip?: string; + /** Extra tip, only work in <BlockRadio.WithTooltip/> options */ + tooltip?: string; + warnTip?: string; +}; type Props = { value?: any; onChange?: (val: any) => void; + /** + * DataSource + */ options: Option[]; + /** + * Gap between items + */ + gap?: number; + /** + * flex-grow + */ + flexGrow?: number; + /** + * When beforeChange return false or Promise that is resolved false, don't call onChange function next time + */ + beforeChange?: (val: any) => boolean | Promise<boolean>; + /** + * Customize children render + */ renderBlockInner?: ( props: Option, - options: { label: React.ReactNode; isActive: boolean }, + options: { label: React.ReactNode; isActive: boolean; data?: any }, ) => React.ReactElement; + /** + * All items disabled + */ + disabled?: boolean; + /** + * Inline text center + */ + isCenter?: boolean; + isVertical?: boolean; + isOneHalfMode?: boolean; + isWarnTip?: boolean; + blockItemWidth?: number; }; -const BlockRadio: FC<Props> = ({ value, onChange, options, renderBlockInner }) => { +const BlockRadio: FC<Props> & { + WithTip: React.FC<Props>; + WithToolTip: React.FC<Props>; +} = ({ + gap = baseGap, + flexGrow = 1, + value, + beforeChange, + onChange, + options, + renderBlockInner, + disabled = false, + isVertical = false, + isCenter = false, + isOneHalfMode = false, + isWarnTip = false, + blockItemWidth = 0, +}) => { + const warnTip = useMemo(() => { + return options.find((item) => item.value === value)?.warnTip; + }, [value, options]); + if (isOneHalfMode) { + return ( + <ContainerOneHalf> + {options.map((item) => { + return ( + <BlockOneHalf + data-is-active={value === item.value} + data-is-disabled={disabled || item.disabled} + key={item.value} + onClick={() => onBlockClick(item)} + isCenter={isCenter} + gap={gap} + > + {renderBlockInner ? ( + renderBlockInner(item, { + label: <Label>{item.label}</Label>, + data: item.data, + isActive: value === item.value, + }) + ) : ( + <Label>{item.label}</Label> + )} + </BlockOneHalf> + ); + })} + </ContainerOneHalf> + ); + } + return ( - <Container> + <Container isVertical={isVertical} gap={gap}> {options.map((item) => { return ( <Block data-is-active={value === item.value} - data-is-disabled={item.disabled} + data-is-disabled={disabled || item.disabled} + key={item.value} + style={{ '--flex-grow': flexGrow } as CSSProperties} onClick={() => onBlockClick(item)} + isCenter={isCenter} + blockItemWidth={blockItemWidth} + gap={gap} + role="radio" > {renderBlockInner ? ( renderBlockInner(item, { label: <Label>{item.label}</Label>, + data: item.data, isActive: value === item.value, }) ) : ( <GridRow gap="12"> - <RadioIcon data-is-active={value === item.value} /> <Label>{item.label}</Label> </GridRow> )} </Block> ); })} + {isWarnTip && warnTip && ( + <TitleWithIcon title={warnTip} isLeftIcon={true} isShowIcon={true} icon={IconInfoCircle} /> + )} </Container> ); - function onBlockClick(item: Option) { - if (item.disabled) return; + async function onBlockClick(item: Option) { + if (disabled || item.disabled || item.value === value) return; + + if (beforeChange) { + const shouldChange = beforeChange(item.value); + if (!(await shouldChange)) { + return; + } + } onChange && onChange(item.value); } }; +const WithTip: FC<Props> = (props) => { + return ( + <BlockRadio + renderBlockInner={(item, { label, isActive }) => { + return ( + <ContainerWithTips> + <Label>{item.label}</Label> + <LabelTips>{item.tip}</LabelTips> + </ContainerWithTips> + ); + }} + {...props} + /> + ); +}; +const WithToolTip: FC<Props> = (props) => { + return ( + <BlockRadio + renderBlockInner={(item) => { + return ( + <Tooltip content={item.tooltip}> + <Label>{item.label}</Label> + </Tooltip> + ); + }} + {...props} + /> + ); +}; +BlockRadio.WithTip = WithTip; +BlockRadio.WithToolTip = WithToolTip; + export default BlockRadio; diff --git a/web_console_v2/client/src/components/_base/GridRow/index.tsx b/web_console_v2/client/src/components/_base/GridRow/index.tsx index d7cb44471..a105481db 100644 --- a/web_console_v2/client/src/components/_base/GridRow/index.tsx +++ b/web_console_v2/client/src/components/_base/GridRow/index.tsx @@ -12,11 +12,30 @@ const Container = styled.div` grid-auto-flow: column; `; -type Props = { +export type Props = { + /** + * margin-top + * @default 0 + */ top?: number | string; + /** + * margin-left + * @default 0 + */ left?: number | string; + /** + * @description 每个 item 之间的 gap 距离 + * @description.en-US gap between items + * @default 0 + */ gap?: number | string; + /** + * @default center + */ align?: 'center' | 'start' | 'end'; + /** + * @default start + */ justify?: | 'start' | 'end' @@ -25,8 +44,8 @@ type Props = { | 'space-between' | 'space-around' | 'space-evenly'; - onClick?: any; + className?: string; }; /** @@ -38,8 +57,8 @@ const GridRow: FC<Props> = ({ top, left, ...props }) => { role="grid" {...props} style={{ - marginTop: convertToUnit(top), - marginLeft: convertToUnit(left), + marginTop: top ? convertToUnit(top) : undefined, + marginLeft: left ? convertToUnit(left) : undefined, ...((props as any).style || {}), }} > diff --git a/web_console_v2/client/src/components/_base/MockDevtools/utils.ts b/web_console_v2/client/src/components/_base/MockDevtools/utils.ts deleted file mode 100644 index 938bf9d6a..000000000 --- a/web_console_v2/client/src/components/_base/MockDevtools/utils.ts +++ /dev/null @@ -1,45 +0,0 @@ -/* istanbul ignore file */ - -import { AxiosRequestConfig } from 'axios'; -import { omit } from 'lodash'; -import LOCAL_STORAGE_KEYS from 'shared/localStorageKeys'; -import store from 'store2'; - -export function getMockConfigs() { - return store.get(LOCAL_STORAGE_KEYS.mock_configs) || {}; -} - -export function isThisRequestMockEnabled(config: AxiosRequestConfig): boolean { - const key = `${config.method}|${config.url}`; - - return Boolean(getRequestMockState(key)); -} - -export function getRequestMockState(key: string): boolean | undefined { - return getMockConfigs()[key]; -} - -export function setRequestMockState(key: string, val: boolean): void { - if ( - !['post', 'get', 'patch', 'delete', 'put', 'head', 'options', 'connect'].some((method) => - key.toLowerCase().startsWith(method), - ) - ) { - throw new Error('Key 名不合法!'); - } - - const mocksConfig = store.get(LOCAL_STORAGE_KEYS.mock_configs) || {}; - mocksConfig[key] = val; - - store.set(LOCAL_STORAGE_KEYS.mock_configs, mocksConfig); -} - -export function removeRequestMock(key: string): void { - const mocksConfig = getMockConfigs(); - - store.set(LOCAL_STORAGE_KEYS.mock_configs, omit(mocksConfig, key)); -} - -export function toggleRequestMockState(key: string, val?: boolean): void { - setRequestMockState(key, typeof val === 'boolean' ? val : !getRequestMockState(key)); -} diff --git a/web_console_v2/client/src/components/shared.ts b/web_console_v2/client/src/components/shared.ts new file mode 100644 index 000000000..b2dc6b3cf --- /dev/null +++ b/web_console_v2/client/src/components/shared.ts @@ -0,0 +1,66 @@ +/* istanbul ignore file */ +import { + fetchAlgorithmProjectFileTreeList, + fetchAlgorithmProjectFileContentDetail, + fetchAlgorithmFileTreeList, + fetchAlgorithmFileContentDetail, + fetchPendingAlgorithmFileTreeList, + fetchPendingAlgorithmFileContentDetail, + fetchPeerAlgorithmFileTreeList, + fetchPeerAlgorithmProjectFileContentDetail, +} from 'services/algorithm'; + +export function getAlgorithmProjectProps(props: { id: ID }) { + const { id } = props; + + return { + id: id, + isAsyncMode: true, + getFileTreeList: () => fetchAlgorithmProjectFileTreeList(id!).then((res) => res.data || []), + getFile: (filePath: string) => + fetchAlgorithmProjectFileContentDetail(id!, { + path: filePath, + }).then((res) => res.data.content), + }; +} +export function getAlgorithmProps(props: { id: ID }) { + const { id } = props; + + return { + id: id, + isAsyncMode: true, + getFileTreeList: () => fetchAlgorithmFileTreeList(id!).then((res) => res.data || []), + getFile: (filePath: string) => + fetchAlgorithmFileContentDetail(id!, { + path: filePath, + }).then((res) => res.data.content), + }; +} + +export function getPendingAlgorithmProps(props: { projId: ID; id: ID }) { + const { id, projId } = props; + return { + id, + isAsyncMode: true, + getFileTreeList: () => + fetchPendingAlgorithmFileTreeList(projId!, id!).then((res) => res.data || []), + getFile: (filePath: string) => + fetchPendingAlgorithmFileContentDetail(projId!, id!, { path: filePath }).then( + (res) => res.data.content, + ), + }; +} + +export function getPeerAlgorithmProps(props: { projId: ID; participantId: ID; id: ID; uuid: ID }) { + const { id, participantId, projId, uuid } = props; + return { + id, + isAsyncMode: true, + getFileTreeList: () => + fetchPeerAlgorithmFileTreeList(projId!, participantId, uuid!).then((res) => res.data || []), + getFile: (filePath: string) => + fetchPeerAlgorithmProjectFileContentDetail(projId!, participantId, uuid!, { + path: filePath, + }).then((res) => res.data.content), + }; +} diff --git a/web_console_v2/client/src/hooks/dataset.ts b/web_console_v2/client/src/hooks/dataset.ts index 36fd1bf76..ffa91ead8 100644 --- a/web_console_v2/client/src/hooks/dataset.ts +++ b/web_console_v2/client/src/hooks/dataset.ts @@ -1,5 +1,5 @@ -import { useResetRecoilState } from 'recoil'; -import { datasetBasicForm } from 'stores/dataset'; +import { useResetRecoilState, useSetRecoilState } from 'recoil'; +import { datasetBasicForm, forceReloadDatasetList } from 'stores/dataset'; export function useResetCreateForm() { const resetBasicForm = useResetRecoilState(datasetBasicForm); @@ -8,3 +8,11 @@ export function useResetCreateForm() { resetBasicForm(); }; } + +export function useReloadDatasetList() { + const setter = useSetRecoilState(forceReloadDatasetList); + + return function () { + setter(Math.random()); + }; +} diff --git a/web_console_v2/client/src/hooks/index.test.tsx b/web_console_v2/client/src/hooks/index.test.tsx new file mode 100644 index 000000000..c993f99db --- /dev/null +++ b/web_console_v2/client/src/hooks/index.test.tsx @@ -0,0 +1,201 @@ +// eslint-disable-next-line @typescript-eslint/no-unused-vars +import { act, renderHook, RenderHookResult } from '@testing-library/react-hooks'; +import routeData from 'react-router'; + +import { useUrlState, useTablePaginationWithUrlState, useIsFormValueChange } from './index'; + +describe('useUrlState/useTablePaginationWithUrlState', () => { + it('should be defined', () => { + expect(useUrlState).toBeDefined(); + expect(useTablePaginationWithUrlState).toBeDefined(); + }); + + const replaceFn = jest.fn(); + + let mockLocation = { + pathname: '/', + hash: '', + search: '', + state: '', + }; + + const mockHistory: any = { + push: ({ search }: any) => { + replaceFn(); + mockLocation.search = search; + }, + }; + + beforeEach(() => { + jest.spyOn(routeData, 'useLocation').mockReturnValue(mockLocation); + jest.spyOn(routeData, 'useHistory').mockReturnValue(mockHistory); + }); + + afterEach(() => { + replaceFn.mockClear(); + mockLocation = { + pathname: '/', + hash: '', + search: '', + state: '', + }; + }); + + describe('useUrlState', () => { + it('history replace should work', async () => { + const hook = renderHook(() => { + return useUrlState({ mock: '0' }); + }) as any; + + // If const [urlState, setUrlState] = hook.result.current, urlState is the oldest data after invoking setUrlState + // hook.result.current[0] meaning the lastest data + const [, setUrlState] = hook.result.current; + + expect(replaceFn).toBeCalledTimes(0); + expect(hook.result.current[0]).toEqual({ mock: '0' }); + expect(mockLocation.search).toEqual(''); + act(() => { + setUrlState({ mock: 1 }); + }); + expect(hook.result.current[0]).toEqual({ mock: '1' }); + + expect(replaceFn).toBeCalledTimes(1); + expect(mockLocation.search).toEqual('?mock=1'); + act(() => { + setUrlState({ mock: 2, test: 3 }); + }); + expect(hook.result.current[0]).toEqual({ mock: '2', test: '3' }); + expect(mockLocation.search).toEqual('?mock=2&test=3'); + }); + }); + + describe('useTablePaginationWithUrlState', () => { + it('default options', async () => { + const hook: RenderHookResult< + any, + ReturnType<typeof useTablePaginationWithUrlState> + > = renderHook(() => { + return useTablePaginationWithUrlState(); + }) as any; + + expect(hook.result.current).toMatchObject({ + urlState: expect.objectContaining({ + page: '1', + pageSize: '10', + }), + setUrlState: expect.any(Function), + reset: expect.any(Function), + paginationProps: expect.objectContaining({ + current: 1, + pageSize: 10, + onChange: expect.any(Function), + onShowSizeChange: expect.any(Function), + }), + }); + + act(() => { + hook.result.current.paginationProps.onChange(2, 10); + }); + + expect(hook.result.current.urlState).toEqual({ + page: '2', + pageSize: '10', + }); + expect(hook.result.current.paginationProps).toMatchObject({ + current: 2, + pageSize: 10, + }); + + act(() => { + hook.result.current.paginationProps.onShowSizeChange(3, 20); + }); + + expect(hook.result.current.urlState).toEqual({ + page: '3', + pageSize: '20', + }); + expect(hook.result.current.paginationProps).toMatchObject({ + current: 3, + pageSize: 20, + }); + + act(() => { + hook.result.current.reset(); + }); + expect(hook.result.current.urlState).toEqual({ + page: '1', + pageSize: '10', + }); + expect(hook.result.current.paginationProps).toMatchObject({ + current: 1, + pageSize: 10, + }); + }); + + it('custom options', async () => { + const hook: RenderHookResult< + any, + ReturnType<typeof useTablePaginationWithUrlState> + > = renderHook(() => { + return useTablePaginationWithUrlState({ + defaultPage: 2, + defaultPageSize: 15, + }); + }) as any; + + expect(hook.result.current.urlState).toEqual({ + page: '2', + pageSize: '15', + }); + + act(() => { + hook.result.current.paginationProps.onChange(2, 10); + }); + + expect(hook.result.current.urlState).toEqual({ + page: '2', + pageSize: '10', + }); + expect(hook.result.current.paginationProps).toMatchObject({ + current: 2, + pageSize: 10, + }); + + act(() => { + hook.result.current.reset(); + }); + expect(hook.result.current.urlState).toEqual({ + page: '2', + pageSize: '15', + }); + expect(hook.result.current.paginationProps).toMatchObject({ + current: 2, + pageSize: 15, + }); + }); + }); +}); +it('useIsFormValueChange', () => { + const mockFn = jest.fn(); + + const { result } = renderHook(() => { + return useIsFormValueChange(mockFn); + }); + + expect(result.current.isFormValueChanged).toBe(false); + expect(mockFn).toBeCalledTimes(0); + + act(() => { + result.current.onFormValueChange(); + }); + + expect(result.current.isFormValueChanged).toBe(true); + expect(mockFn).toBeCalledTimes(1); + + act(() => { + result.current.resetChangedState(); + }); + + expect(result.current.isFormValueChanged).toBe(false); + expect(mockFn).toBeCalledTimes(1); +}); diff --git a/web_console_v2/client/src/hooks/index.ts b/web_console_v2/client/src/hooks/index.ts index aa53b0c0f..22f7fe154 100644 --- a/web_console_v2/client/src/hooks/index.ts +++ b/web_console_v2/client/src/hooks/index.ts @@ -1,8 +1,25 @@ -import { ChangeEvent, useEffect, useState, useMemo, useRef } from 'react'; +import { ChangeEvent, useEffect, useState, useMemo, useRef, useCallback } from 'react'; import store from 'store2'; +import { useRecoilValue } from 'recoil'; import keyboardjs, { KeyEvent } from 'keyboardjs'; import { useToggle, useUnmount } from 'react-use'; import PubSub from 'pubsub-js'; +import { parse, stringify, IStringifyOptions, IParseOptions } from 'qs'; +import { useHistory, useLocation } from 'react-router'; + +import { appFlag, systemInfoQuery } from 'stores/app'; +import { projectState } from 'stores/project'; + +import logoWhite from 'assets/images/logo-white.png'; +import logoBlack from 'assets/images/logo-black.png'; +import logoBioland from 'assets/icons/logo-bioland.png'; +import logoBiolandColoful from 'assets/icons/logo-bioland-colorful.svg'; + +import { FlagKey } from 'typings/flag'; +import { useRecoilQuery } from './recoil'; +import { useQuery } from 'react-query'; +import { getProjectDetailById } from 'services/project'; +import { ProjectBaseAbilitiesType, ProjectTaskType } from 'typings/project'; export function useInputChange<T>(defaultValue: T) { const [value, setState] = useState(defaultValue); @@ -70,11 +87,17 @@ export function useListenKeyboard( } export function useSubscribe(channel: string, cb: any, deps: any[] = []) { + // eslint-disable-next-line react-hooks/exhaustive-deps + const callback = useCallback((...inputs) => cb(...inputs), [channel, ...deps]); + useEffect(() => { - PubSub.subscribe(channel, cb); + const token = PubSub.subscribe(channel, callback); - return () => PubSub.unsubscribe(channel); - }, [cb, channel, deps]); + return () => { + PubSub.unsubscribe(token); + }; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [channel, callback, deps.join('')]); } /** @@ -84,8 +107,8 @@ export function useResizeObserver(callback: any) { const ref = useRef(); const resizeObserver = useMemo(() => { - return new ResizeObserver((entries) => { - for (let entry of entries) { + return new ResizeObserver((entries: ResizeObserverEntry[]) => { + for (const entry of entries) { if (entry.target === ref.current) { callback && callback(); } @@ -105,3 +128,336 @@ export function useResizeObserver(callback: any) { return ref; } + +export function useWhyDidYouUpdate(name: string, props: Record<any, any>) { + // Get a mutable ref object where we can store props ... + // ... for comparison next time this hook runs. + const previousProps = useRef<typeof props>(); + useEffect(() => { + if (previousProps.current) { + // Get all keys from previous and current props + const allKeys = Object.keys({ ...previousProps.current, ...props }); + // Use this object to keep track of changed props + const changesObj: Record<any, any> = {}; + // Iterate through keys + allKeys.forEach((key) => { + // If previous is different from current + if (previousProps.current?.[key] !== props[key]) { + // Add to changesObj + changesObj[key] = { + from: previousProps.current?.[key], + to: props[key], + }; + } + }); + // If changesObj not empty then output to console + if (Object.keys(changesObj).length) { + if (process.env.NODE_ENV === 'development') { + // eslint-disable-next-line no-console + console.log('[why-did-you-update]', name, changesObj); + } + } + } + // Finally update previousProps with current props for next hook call + previousProps.current = props; + }); +} + +export function useGetLogoSrc() { + const appFlagValue = useRecoilValue(appFlag); + + let primaryLogo = logoWhite; + let secondaryLogo = logoBlack; + + if (process.env.THEME === 'bioland') { + primaryLogo = logoBioland; + secondaryLogo = logoBiolandColoful; + } + + const logoSrc = useMemo(() => { + if (appFlagValue[FlagKey.LOGO_URL]) { + return appFlagValue[FlagKey.LOGO_URL]; + } + + return primaryLogo; + }, [appFlagValue, primaryLogo]); + + return { primaryLogo: logoSrc, secondaryLogo }; +} + +export function useGetThemeBioland() { + return process.env.THEME === 'bioland'; +} + +interface UseUrlStateOptions { + navigateMode?: 'push' | 'replace'; + parseConfig?: IParseOptions; + stringifyConfig?: IStringifyOptions; +} + +interface UrlState { + [key: string]: any; +} + +/** + * A hook that stores the state into url query parameters + */ +export function useUrlState<S extends UrlState = UrlState>( + initialState?: S | (() => S), + options?: UseUrlStateOptions, +) { + type state = Partial<{ [key in keyof S]: any }>; + + const { + navigateMode = 'push', + parseConfig = { + ignoreQueryPrefix: true, + }, + stringifyConfig = { + addQueryPrefix: true, + }, + } = options || {}; + const location = useLocation(); + const history = useHistory(); + + const [, forceUpdate] = useState(false); + + const initialStateRef = useRef( + typeof initialState === 'function' ? (initialState as () => S)() : initialState || {}, + ); + + const parseConfigRef = useRef(parseConfig); + + const queryFromUrl = useMemo(() => { + return parse(location.search, parseConfigRef.current); + }, [location.search]); + + const targetQuery: state = useMemo( + () => ({ + ...initialStateRef.current, + ...queryFromUrl, + }), + [queryFromUrl], + ); + + const setState = (s: React.SetStateAction<state>) => { + const newQuery = typeof s === 'function' ? (s as Function)(targetQuery) : s; + + // If the url search does not change after setState(), forceUpdate() is needed to trigger an update. + forceUpdate((v) => !v); + + history[navigateMode]({ + hash: location.hash, + search: stringify({ ...queryFromUrl, ...newQuery }, stringifyConfig) || '?', + }); + }; + + return [targetQuery, setState] as const; +} + +interface UseTablePaginationWithUrlStateOptions { + defaultPage?: number; + defaultPageSize?: number; + urlStateOption?: UseUrlStateOptions; +} + +export function useTablePaginationWithUrlState(options?: UseTablePaginationWithUrlStateOptions) { + const initialStateRef = useRef({ + page: String(options?.defaultPage || 1), + pageSize: String(options?.defaultPageSize || 10), + }); + + const [urlState, setUrlState] = useUrlState(initialStateRef.current, options?.urlStateOption); + + function reset() { + setUrlState((prevState) => ({ + ...prevState, + ...initialStateRef.current, + })); + } + function onChange(page: number, pageSize: number | undefined) { + setUrlState((prevState) => ({ + ...prevState, + page, + pageSize, + })); + } + function onShowSizeChange(page: number, pageSize: number) { + setUrlState((prevState) => ({ + ...prevState, + page, + pageSize, + })); + } + + return { + urlState, + setUrlState, + reset, + paginationProps: { + pageSize: Number(urlState.pageSize), + current: Number(urlState.page), + onChange, + onShowSizeChange, + showTotal: (total: number) => `共 ${total} 条记录`, + }, + }; +} + +// borrow from: https://github.com/alibaba/hooks/blob/master/packages/hooks/src/usePrevious/index.ts +type ShouldUpdateFunc<T> = (prev: T | undefined, next: T) => boolean; +const defaultShouldUpdate = <T>(a?: T, b?: T) => a !== b; +export function usePrevious<T>( + state: T, + shouldUpdate: ShouldUpdateFunc<T> = defaultShouldUpdate, +): T | undefined { + const prevRef = useRef<T>(); + const curRef = useRef<T>(); + + if (shouldUpdate(curRef.current, state)) { + prevRef.current = curRef.current; + curRef.current = state; + } + + return prevRef.current; +} + +export function useInterval( + fn: () => void, + delay: number | undefined, + options?: { + immediate?: boolean; + }, +) { + const immediate = options?.immediate; + const fnRef = useRef<() => void>(); + fnRef.current = fn; + + useEffect(() => { + if (typeof delay !== 'number' || delay <= 0) return; + if (immediate) { + fnRef.current?.(); + } + const timer = setInterval(() => { + fnRef.current?.(); + }, delay); + return () => { + clearInterval(timer); + }; + }, [delay, immediate]); +} + +export function useIsFormValueChange(cb?: Function) { + const [isFormValueChanged, setIsFormValueChanged] = useState(false); + + function onValuesChange(...args: any[]) { + if (!isFormValueChanged) { + setIsFormValueChanged(true); + } + cb?.(...args); + } + function resetChangedState() { + setIsFormValueChanged(false); + } + return { + isFormValueChanged: isFormValueChanged, + onFormValueChange: onValuesChange, + resetChangedState, + }; +} + +/* istanbul ignore next */ +export function useGetCurrentProjectId() { + const selectedProject = useRecoilValue(projectState); + return selectedProject.current?.id; +} +export function useGetCurrentUserName() { + const selectedProject = useRecoilValue(projectState); + return selectedProject.current?.creator; +} +/* istanbul ignore next */ +export function useGetCurrentProjectParticipantId() { + const selectedProject = useRecoilValue(projectState); + return selectedProject.current?.participants?.[0]?.id; +} + +/* istanbul ignore next */ +export function useGetCurrentProjectParticipantList() { + const selectedProject = useRecoilValue(projectState); + return selectedProject.current?.participants ?? []; +} +/* istanbul ignore next */ +export function useGetCurrentProjectParticipantName() { + const selectedProject = useRecoilValue(projectState); + return selectedProject.current?.participants?.[0]?.name ?? ''; +} +/* istanbul ignore next */ +export function useGetCurrentParticipantPureDomainName() { + const selectedProject = useRecoilValue(projectState); + return selectedProject.current?.participants?.[0]?.pure_domain_name ?? ''; +} +/* istanbul ignore next */ +export function useGetCurrentProjectType() { + const selectedProject = useRecoilValue(projectState); + return selectedProject.current?.participant_type; +} +/* istanbul ignore next */ +export function useGetCurrentDomainName() { + const { data: systemInfo } = useRecoilQuery(systemInfoQuery); + return systemInfo?.domain_name ?? ''; +} +/* istanbul ignore next */ +export function useGetCurrentPureDomainName() { + const { data: systemInfo } = useRecoilQuery(systemInfoQuery); + return systemInfo?.pure_domain_name ?? ''; +} + +/* istanbul ignore next */ +export function useGetAppFlagValue(flagKey?: FlagKey) { + const appFlagValue = useRecoilValue(appFlag); + return flagKey ? appFlagValue[flagKey] : appFlagValue; +} + +export function useGetCurrentProjectAbilityConfig(): { + abilities: (ProjectTaskType | ProjectBaseAbilitiesType)[] | undefined; + action_rules: any; + hasIdAlign: Boolean; + hasHorizontal: Boolean; + hasVertical: Boolean; + hasTrusted: Boolean; +} { + const projectId = useGetCurrentProjectId(); + const projectDetailQuery = useQuery( + ['getProjectDetailById', projectId], + () => getProjectDetailById(projectId!), + { + enabled: Boolean(projectId), + retry: 2, + }, + ); + const { abilities, action_rules } = useMemo(() => { + if (projectDetailQuery.isLoading) { + return { abilities: [], action_rules: {} }; + } + if (!projectId) { + return { abilities: [ProjectBaseAbilitiesType.BASE], action_rules: {} }; + } + const projectConfig = projectDetailQuery.data?.data.config; + return { + // If the user select a old project, it will be used as “VERTICAL” + abilities: projectConfig?.abilities?.length + ? projectConfig?.abilities + : [ProjectTaskType.VERTICAL], + // todo: old project action_rules define + action_rules: projectConfig?.action_rules, + }; + }, [projectDetailQuery, projectId]); + return { + abilities, + action_rules, + hasIdAlign: (abilities as ProjectTaskType[])?.includes(ProjectTaskType.ALIGN), // 是否有ID对齐能力 + hasHorizontal: (abilities as ProjectTaskType[])?.includes(ProjectTaskType.HORIZONTAL), // 是否有横向联邦能力 + hasVertical: (abilities as ProjectTaskType[])?.includes(ProjectTaskType.VERTICAL), // 是否有纵向联邦能力 + hasTrusted: (abilities as ProjectTaskType[])?.includes(ProjectTaskType.TRUSTED), // 是否有可信分析能力 + }; +} diff --git a/web_console_v2/client/src/hooks/modelCenter.tsx b/web_console_v2/client/src/hooks/modelCenter.tsx new file mode 100644 index 000000000..57b6d5255 --- /dev/null +++ b/web_console_v2/client/src/hooks/modelCenter.tsx @@ -0,0 +1,159 @@ +import { useResetRecoilState, useSetRecoilState } from 'recoil'; +import { + trainModelForm, + evaluationModelForm, + offlinePredictionModelForm, +} from 'stores/modelCenter'; +import { treeTemplateIdQuery, nnTemplateIdQuery } from 'stores/modelCenter'; +import { useRecoilQuery } from 'hooks/recoil'; +import { useEffect, useMemo } from 'react'; +import { useQueries, useQuery } from 'react-query'; +import { useGetCurrentProjectId } from 'hooks'; +import { + fetchModelJobMetries_new, + fetchPeerModelJobMetrics_new, + fetchModelJob_new, +} from 'services/modelCenter'; +import { ModelJob } from 'typings/modelCenter'; +import { cloneDeep } from 'lodash-es'; +import { getAlgorithmDetail } from 'services/algorithm'; +import { EnumAlgorithmProjectType } from 'typings/algorithm'; +import { Message } from '@arco-design/web-react'; + +export function useResetCreateForm() { + const resetTrainModelForm = useResetRecoilState(trainModelForm); + const resetEvaluationModelForm = useResetRecoilState(evaluationModelForm); + const resetOfflinePredictionModelForm = useResetRecoilState(offlinePredictionModelForm); + + return function () { + resetTrainModelForm(); + resetEvaluationModelForm(); + resetOfflinePredictionModelForm(); + }; +} + +export function useGetTrainModelTemplateId() { + const { data: treeTemplateId, error: treeTemplateError } = useRecoilQuery(treeTemplateIdQuery); + const { data: nnTemplateId, error: nnTemplateError } = useRecoilQuery(nnTemplateIdQuery); + + const setTreeTemplateId = useSetRecoilState(treeTemplateIdQuery); + const setNNTemplateId = useSetRecoilState(nnTemplateIdQuery); + + useEffect(() => { + if (treeTemplateId && !treeTemplateError) { + setTreeTemplateId(treeTemplateId); + } + }, [treeTemplateId, treeTemplateError, setTreeTemplateId]); + useEffect(() => { + if (nnTemplateId && !nnTemplateError) { + setNNTemplateId(nnTemplateId); + } + }, [nnTemplateId, nnTemplateError, setNNTemplateId]); + + return { + treeTemplateId, + nnTemplateId, + treeTemplateError, + nnTemplateError, + }; +} + +export function useModelMetriesResult(jobId: ID, jobParticipantId?: ID) { + const projectId = useGetCurrentProjectId(); + + return useQuery( + ['get_model_jobs_metries', jobId, projectId, jobParticipantId], + () => { + if (!jobParticipantId) { + return fetchModelJobMetries_new(projectId!, jobId).then( + (res: any) => res?.[0]?.data, + (error) => { + Message.error(error.message); + }, + ); + } else if (jobParticipantId !== jobId) { + return fetchPeerModelJobMetrics_new(projectId!, jobId, jobParticipantId!).then( + (res) => res.data, + (error) => { + Message.error(error.message); + }, + ); + } else { + return Promise.resolve({}); + } + }, + { + enabled: Boolean(projectId), + }, + ); +} + +export function useBatchModelJobMetricsAndConfig(jobList: ModelJob[], enable: boolean) { + const projectId = useGetCurrentProjectId(); + const metricsQuery = useQueries( + jobList.map((item) => ({ + enabled: Boolean(projectId && enable), + queryKey: ['batch_model_job_metrics', item.id], + queryFn() { + return fetchModelJobMetries_new(projectId!, item.id).then((res: any) => res?.[0]?.data); + }, + })), + ); + const configQuery = useQueries( + jobList.map((item) => ({ + enabled: Boolean(projectId && enable), + queryKey: ['batch_model_job_config', item.id], + async queryFn() { + const variables = await fetchModelJob_new(projectId!, item.id).then( + (res) => res.data.config.job_definitions?.[0].variables, + ); + let parameterKeyList: string[] = []; + + if ( + item.algorithm_type === EnumAlgorithmProjectType.NN_VERTICAL || + item.algorithm_type === EnumAlgorithmProjectType.NN_HORIZONTAL + ) { + const algorithm = await getAlgorithmDetail(item.algorithm_id).then((res) => res.data); + const parameters = algorithm.parameter?.variables ?? []; + parameterKeyList = ['epoch_num', 'verbosity', ...parameters.map((p) => p.name)]; + } else if (item.algorithm_type === EnumAlgorithmProjectType.TREE_VERTICAL) { + parameterKeyList = [ + 'learning_rate', + 'max_iters', + 'max_depth', + 'l2_regularization', + 'max_bins', + 'num_parallel', + ]; + } + + return variables.filter((v) => parameterKeyList.includes(v.name)); + }, + })), + ); + + return useMemo(() => { + const ret = []; + let isFetchedCount = 0; + for (let i = 0; i < metricsQuery.length; i++) { + const job = jobList[i]; + const mq = metricsQuery[i]; + const cq = configQuery[i]; + if (mq.isFetched || cq.isFetched) { + isFetchedCount++; + const mData = mq.data as any; + const cData = cq.data as any; + ret.push({ + id: job.name, + job: job, + config: cloneDeep(cData ?? []), + metric: cloneDeep(mData ?? {}), + }); + } + } + return { + dataList: ret, + isLoading: isFetchedCount < metricsQuery.length, + }; + }, [metricsQuery, configQuery, jobList]); +} diff --git a/web_console_v2/client/src/hooks/participant.ts b/web_console_v2/client/src/hooks/participant.ts new file mode 100644 index 000000000..d94b2069e --- /dev/null +++ b/web_console_v2/client/src/hooks/participant.ts @@ -0,0 +1,58 @@ +import { useMemo } from 'react'; +import { useSetRecoilState } from 'recoil'; +import { useQuery, UseQueryOptions } from 'react-query'; +import { checkParticipantConnection } from 'services/participant'; +import { ConnectionStatus, ConnectionStatusType, Version } from 'typings/participant'; +import { forceReloadParticipantList } from 'stores/participant'; + +export function useReloadParticipantList() { + const setter = useSetRecoilState(forceReloadParticipantList); + + return function () { + setter(Math.random()); + }; +} + +export function useCheckConnection( + partnerId: ID, + options?: UseQueryOptions<{ + data: { success: boolean; message: string; application_version: Version }; + }>, +): [ConnectionStatus, Function] { + const checkQuery = useQuery( + [`checkConnection-participant-${partnerId}`, partnerId], + () => checkParticipantConnection(partnerId), + { + enabled: Boolean(partnerId), + cacheTime: 1, + retry: false, + ...options, + }, + ); + + const finalStatus = useMemo(() => { + const status = { + success: ConnectionStatusType.Processing, + message: '', + application_version: {}, + }; + if (!checkQuery.isFetching) { + if (checkQuery.isError) { + status.success = ConnectionStatusType.Fail; + status.message = (checkQuery.error as Error).message; + } else { + const queryData = checkQuery.data?.data; + if (queryData) { + status.message = queryData.message; + status.application_version = queryData.application_version; + status.success = queryData.success + ? ConnectionStatusType.Success + : ConnectionStatusType.Fail; + } + } + } + return status; + }, [checkQuery]); + + return [finalStatus, checkQuery.refetch]; +} diff --git a/web_console_v2/client/src/hooks/project.ts b/web_console_v2/client/src/hooks/project.ts index adeda4cf0..0f24e44c9 100644 --- a/web_console_v2/client/src/hooks/project.ts +++ b/web_console_v2/client/src/hooks/project.ts @@ -1,7 +1,7 @@ import { useQuery, UseQueryOptions } from 'react-query'; -import { useSetRecoilState } from 'recoil'; +import { useResetRecoilState, useSetRecoilState } from 'recoil'; import { checkConnection } from 'services/project'; -import { forceReloadProjectList } from 'stores/project'; +import { forceReloadProjectList, projectCreateForm, projectJoinForm } from 'stores/project'; import { ConnectionStatus, Project } from 'typings/project'; export function useCheckConnection( @@ -9,7 +9,7 @@ export function useCheckConnection( options?: UseQueryOptions<{ data: { success: boolean } }>, ): [ConnectionStatus, Function] { const checkQuery = useQuery( - [`checkConnection-${project.id}`, project.id], + [`checkConnection-project-${project.id}`, project.id], () => checkConnection(project.id), { cacheTime: 1, @@ -36,3 +36,18 @@ export function useReloadProjectList() { setter(Math.random()); }; } + +export function useResetCreateForm() { + const resetCreateForm = useResetRecoilState(projectCreateForm); + + return function () { + resetCreateForm(); + }; +} +export function useResetJoinForm() { + const resetJoinForm = useResetRecoilState(projectJoinForm); + + return function () { + resetJoinForm(); + }; +} diff --git a/web_console_v2/client/src/hooks/user.tsx b/web_console_v2/client/src/hooks/user.tsx new file mode 100644 index 000000000..e038bcdfb --- /dev/null +++ b/web_console_v2/client/src/hooks/user.tsx @@ -0,0 +1,15 @@ +import { userInfoQuery, userInfoGetters } from 'stores/user'; +import { useRecoilQuery } from 'hooks/recoil'; +import { FedRoles } from 'typings/auth'; + +export function useGetUserInfo() { + const { data } = useRecoilQuery(userInfoQuery); + // TODO: maybe undefined + return data; +} + +export function useIsAdminRole() { + const { isLoading, data } = useRecoilQuery(userInfoGetters); + + return !isLoading && data && data.role === FedRoles.Admin; +} diff --git a/web_console_v2/client/src/i18n/index.ts b/web_console_v2/client/src/i18n/index.ts index 8145d0a88..96a5a5f23 100644 --- a/web_console_v2/client/src/i18n/index.ts +++ b/web_console_v2/client/src/i18n/index.ts @@ -35,5 +35,4 @@ export default i18next; export function setLocale(lng: FedLanguages) { i18next.changeLanguage(lng); dayjs.locale(lng); - store.set(LOCAL_STORAGE_KEYS.language, lng); } diff --git a/web_console_v2/client/src/i18n/resources/en.ts b/web_console_v2/client/src/i18n/resources/en.ts index 2a692dead..17a98a806 100644 --- a/web_console_v2/client/src/i18n/resources/en.ts +++ b/web_console_v2/client/src/i18n/resources/en.ts @@ -9,6 +9,15 @@ import app from './modules/app'; import dataset from './modules/dataset'; import settings from './modules/settings'; import users from './modules/users'; +import intersection_dataset from './modules/intersection_dataset'; +import validError from './modules/validError'; +import modelCenter from './modules/modelCenter'; +import modelServing from './modules/modelServing'; +import audit from './modules/audit'; +import algorithmManagement from './modules/algorithmManagement'; +import operation_maintenance from './modules/operation_maintenance'; +import dashboard from './modules/dashboard'; +import trusted_center from './modules/trustedCenter'; const messages = { translation: { @@ -23,6 +32,15 @@ const messages = { dataset: dataset.en, settings: settings.en, users: users.en, + intersection_dataset: intersection_dataset.en, + valid_error: validError.en, + model_center: modelCenter.en, + model_serving: modelServing.en, + audit: audit.en, + algorithm_management: algorithmManagement.en, + operation_maintenance: operation_maintenance.en, + dashboard: dashboard.en, + trusted_center: trusted_center.en, all: 'All', terms: 'Terms of Services', @@ -33,8 +51,11 @@ const messages = { cancel: 'Cancel', close: 'Close', edit: 'Edit', + scale: 'Scale Service', delete: 'Delete', reset: 'Reset', + stop: 'Stop', + terminate: 'Terminate', operation: 'Operation', previous_step: 'previous step', next_step: 'Next step', @@ -43,6 +64,131 @@ const messages = { yes: 'Yes', no: 'No', pls_try_again_later: 'Please try again later', + id: 'ID', + creator: 'Creator', + created_at: 'CreatedTime', + started_at: 'StartedTime', + stop_at: 'StopTime', + running_duration: 'Running duration', + + updated_at: 'UpdatedTime', + deleted_at: 'DeletedTime', + hint_total_table: 'Total {{total}} items', + msg_quit_warning: 'After canceling, the configured content will no longer be retained', + create: 'create', + save: 'save', + send_request: 'send request', + more_info: 'more information', + + placeholder_input: 'Please input', + placeholder_select: 'Please select', + placeholder_required: 'Required', + + hint_total_select: '{{total}} items selected', + select_all: 'select all', + + label_time_asc: 'Ascending by time', + label_time_desc: 'Descending by time', + + detail: 'Detail', + favorite_success: 'Favorite success', + favorite_fail: 'Favorite fail', + cancel_favorite_success: 'Cancel favorite success', + cancel_favorite_fail: 'Cancel favorite fail', + export: 'Export', + exporting: 'Exporting', + export_result: 'Export result', + + success: 'Success', + fail: 'Fail', + evaluating: 'Evaluating', + predicting: 'Predicting', + waitConfirm: 'WaitConfirm', + pass: 'Pass', + reject: 'Reject', + + add: 'Add', + change: 'Change', + publish: 'Publish', + revoke: 'Revoke', + + message_create_success: 'Create success', + message_create_fail: 'Create fail', + message_modify_success: 'Modify success', + message_modify_fail: 'Modify fail', + message_delete_success: 'Delete success', + message_delete_fail: 'Delete fail', + message_no_file: 'No file', + message_publish_success: 'Publish success', + message_publish_failed: 'Publish failed', + message_revoke_success: 'Revoke success', + message_revoke_failed: 'Revoke failed', + message_stop_success: 'Stop success', + message_stop_fail: 'Stop fail', + message_name_duplicated: 'Name duplicated', + message_export_loading: 'Exporting...', + message_export_success: 'Export success', + message_export_fail: 'Export fail', + + transfer_total: 'Total {{total}} items', + transfer_select_total: 'Select {{selectedCount}}/{{total}} items', + + open_code_editor: 'Open code editor', + no_data: 'No data', + no_label: 'No label', + + copy: 'Copy', + back: 'Back', + check: 'Check', + + create_folder: 'New Folder', + create_file: 'New File', + create_folder_on_root: 'New Folder on root', + create_file_on_root: 'New File on root', + + select_project_notice: 'Please select a project', + + msg_quit_modal_title: 'Are you sure you want to quit?', + msg_quit_modal_content: 'After quitting, the information currently filled in will be cleared.', + + hyper_parameters: 'Hyper parameters', + + tip_please_input_positive_integer: 'Please input positive integer', + tip_please_input_positive_number: + 'Please input a positive number with 1 digit after the decimal point', + tip_replicas_range: 'The number of replicas ranges from 1 to 100', + tip_peer_unauthorized: + '{{participantName}} is not authorized for the time being, please contact offline for handling', + + cpu: 'CPU', + mem: 'MEM', + replicas: 'Replicas', + + placeholder_cpu: 'Input CPU Specifications', + placeholder_mem: 'Input memory Specifications', + + label_horizontal_federalism: 'Horizontal federalism', + label_vertical_federalism: 'Vertical federalism', + + term_type: 'type', + term_federal_type: 'Federal type', + term_model: 'Model', + term_dataset: 'Dataset', + term_resource_config: 'Resource config', + term_algorithm_type: 'Algorithm Type', + term_model_type_nn_vertical: 'NN Vertical', + term_model_type_nn_horizontal: 'NN Horizontal', + term_model_type_tree_vertical: 'Tree Vertical', + term_true: 'Yes', + term_false: 'No', + + pod_id: 'Pod id', + authorized: 'authorized', + unauthorized: 'unauthorized', + local_authorized: 'This side has been authorized', + local_unauthorized: 'This side is not authorized', + peer_authorized: 'The opposite side is authorized', + peer_unauthorized: 'The opposite side is not authorized', }, }; diff --git a/web_console_v2/client/src/i18n/resources/modules/algorithmManagement.ts b/web_console_v2/client/src/i18n/resources/modules/algorithmManagement.ts new file mode 100644 index 000000000..d73bfe6e8 --- /dev/null +++ b/web_console_v2/client/src/i18n/resources/modules/algorithmManagement.ts @@ -0,0 +1,207 @@ +import { separateLng } from 'i18n/helpers'; + +const algorithm = { + label_tab_my_algorithm: { zh: '我的算法', en: 'My algorithm' }, + label_tab_my_built_in: { zh: '预置算法', en: 'Built-in algorithm' }, + btn_create_algorithm: { zh: '创建算法', en: 'Create algorithm' }, + btn_submit: { zh: '提交', en: 'Submit' }, + btn_submit_and_send: { zh: '提交并发版', en: 'Submit and send' }, + btn_update_preset_algorithm: { zh: '更新预置算法', en: 'Update preset algorithm' }, + col_name: { zh: '名称', en: 'Name' }, + col_type: { zh: '类型', en: 'Type' }, + col_kind: { zh: '来源', en: 'source' }, + col_state: { zh: '状态', en: 'State' }, + col_operate: { zh: '操作', en: 'operate' }, + col_latest_version: { zh: '最新版本', en: 'latest version' }, + title_create_algorithm: { zh: '创建算法', en: 'Create algorithm' }, + title_edit_algorithm: { zh: '编辑算法', en: 'Edit algorithm' }, + title_base_info: { zh: '基本信息', en: 'Base info' }, + title_todo_acceptance_tasks: { zh: '待接收算法', en: 'waiting prediction model task' }, + + label_algorithm_name: { zh: '算法名称', en: 'Algorithm name' }, + label_algorithm_name_placeholder: { + zh: '请输入算法名称', + en: 'Please input the name of algorithm', + }, + label_algorithm_type: { zh: '算法类型', en: 'Algorithm type' }, + label_algorithm_version: { zh: '版本', en: 'Version' }, + label_algorithm_comment: { zh: '算法描述', en: 'algorithm desc' }, + label_algorithm_comment_placeholder: { + zh: '最多为 200 个字符', + en: 'Up to 200 characters', + }, + label_algorithm_files: { zh: '算法文件', en: 'Algorithm files' }, + label_algorithm_files_upload_tip: { + zh: '仅支持上传1个 .tar 或 .gz 格式文件,大小不超过 100 MiB', + en: 'Only support .tar or .gz format file, size less than 100 MiB', + }, + label_algorithm_files_oversize: { + zh: '大小超过限制', + en: 'File size exceeds the limit', + }, + label_algorithm_files_unknown_format: { + zh: '格式不支持', + en: 'File format is not supported', + }, + label_algorithm_detail: { zh: '算法', en: 'detail' }, + label_algorithm_project: { zh: '算法项目', en: 'Algorithm project' }, + label_algorithm: { zh: '算法', en: 'Algorithm' }, + + label_model_type_unspecified: { zh: '自定义算法', en: 'Customize algorithm' }, + label_model_type_tree: { zh: '树模型', en: 'Tree model' }, + label_model_type_nn: { zh: 'NN模型', en: 'NN model' }, + label_model_type_tree_vertical: { zh: '纵向联邦-树模型', en: 'Tree model(vertical)' }, + label_model_type_tree_horizontal: { zh: '横向联邦-树模型', en: 'Tree model(horizontal)' }, + label_model_type_nn_vertical: { zh: '纵向联邦-NN模型', en: 'NN model(vertical)' }, + label_model_type_trusted_computing: { zh: '可信计算', en: 'Trusted computing' }, + label_model_type_nn_horizontal: { zh: '横向联邦-NN模型', en: 'NN model(horizontal)' }, + label_model_type_nn_local: { zh: '本地-NN模型', en: 'NN model(local)' }, + label_algorithm_params: { zh: '超参数', en: 'algorithm params' }, + label_algorithm_params_name: { zh: '名称', en: 'name' }, + label_algorithm_params_value: { zh: '默认值', en: 'default value' }, + label_algorithm_params_required: { zh: '是否必填', en: 'required' }, + label_algorithm_params_comment: { zh: '提示语', en: 'comment' }, + label_algorithm_add_params: { zh: '新增超参数', en: 'add new param' }, + placeholder_algorithm_params_name: { + zh: '请输入参数名称', + en: 'Please input the name of params', + }, + placeholder_algorithm_params_value: { zh: '请输入默认值', en: 'Please input the default value' }, + placeholder_algorithm_params_comment: { zh: '请输入提示语', en: 'Please input comment' }, + placeholder_searchbox_algorithm: { zh: '输入算法名称' }, + + kind_algorithm_user: { zh: '我方', en: 'user' }, + kind_algorithm_preset: { zh: '系统预置', en: 'preset' }, + kind_algorithm_third_party: { zh: '第三方', en: 'third party' }, + publish_modal_title: { zh: '发版「{{name}}」', en: 'confirm to publish {{name}} ' }, + publish_modal_title_comment: { zh: '版本描述', en: 'publish note' }, + send_modal_title: { zh: '发布「{{name}}」', en: 'confirm to send {{name}} ' }, + send_modal_title_algorithm: { zh: '算法', en: 'algorithm' }, + send_modal_title_comment: { zh: '算法描述', en: 'sending note' }, + send_modal_btn_send: { zh: '发布', en: 'send' }, + send_modal_btn_publish: { zh: '发版', en: 'publish' }, + send_modal_btn_cancel: { zh: '取消', en: 'cancel' }, + send_modal_property_name: { zh: '名称', en: 'name' }, + send_modal_property_type: { zh: '类型', en: 'type' }, + send_modal_property_version: { zh: '版本', en: 'version' }, + send_modal_property_source: { zh: '来源', en: 'version' }, + send_modal_property_description: { zh: '描述', en: 'description' }, + + type_algorithm_unspecified: { zh: '未指定', en: 'unspecified' }, + type_algorithm_tree: { zh: '树模型', en: 'Tree Model' }, + type_algorithm_nn: { zh: 'NN模型', en: 'NN Model' }, + type_algorithm_horizontal: { zh: '横向联邦', en: 'Horizontal Federal' }, + type_algorithm_vertical: { zh: '纵向联邦', en: 'Vertical Federal' }, + + action_algorithm_edit: { zh: '编辑', en: 'Edit' }, + action_algorithm_send: { zh: '发布最新版本', en: 'Send the latest version' }, + action_algorithm_download: { zh: '下载', en: 'Download' }, + action_algorithm_delete: { zh: '删除', en: 'Delete' }, + + col_inner_algorithm_publish: { zh: '发版', en: 'publish' }, + + state_text_published: { zh: '已发版', en: 'sent' }, + state_text_draft: { zh: '未发版', en: 'draft' }, + + title_create_algorithm_back_btn: { zh: '算法仓库', en: 'Algorithm Repository' }, + + msg_create_success: { zh: '创建成功', en: 'Create successfully' }, + + msg_update_success: { zh: '编辑成功', en: 'Edit successfully' }, + msg_publish_success: { zh: '发版成功', en: 'Publish successfully' }, + msg_send_success: { zh: '发布成功', en: 'Send successfully' }, + msg_create_and_publish_success: { zh: '创建并发版成功', en: 'Create and publish successfully' }, + msg_update_and_publish_success: { zh: '编辑并发版成功', en: 'Create and publish successfully' }, + msg_todo_algorithm_tasks: { zh: '{{count}} 条待处理算法消息' }, + msg_prefix_algorithm_tasks: { zh: '发布了', en: 'sent' }, + msg_suffix_algorithm_tasks: { zh: '的算法', en: "'s algorithm" }, + msg_update_preset_algorithm_success: { + zh: '更新预置算法成功', + en: 'Update preset algorithm successfully', + }, + msg_update_preset_algorithm_fail: { + zh: '更新预置算法失败', + en: 'Update preset algorithm fail', + }, + + title_todo_algorithm_tasks: { zh: '待处理算法任务', en: 'waiting algorithm' }, + suffix_algorithm_tasks: { zh: ' 算法', en: 'algorithm' }, + + detail_back_btn_text: { zh: '算法仓库', en: 'algorithm repository' }, + detail_prop_type: { zh: '类型', en: 'type' }, + detail_prop_latest_version: { zh: '最新版本', en: 'latest version' }, + detail_prop_update_time: { zh: '更新时间', en: 'update time' }, + detail_publish_btn_text: { zh: '发版', en: 'publish algorithm' }, + detail_publish_latest_version_algorithm: { zh: '发布最新版本', en: 'publish latest version' }, + detail_tab_title_files: { zh: '算法文件', en: 'Files' }, + detail_tab_version_list: { zh: '版本列表', en: 'Version List' }, + detail_section_title_algorithm_parameter: { zh: '超参数', en: 'algorithm parameter' }, + detail_section_title_code_files: { zh: '算法代码', en: 'algorithm code' }, + detail_param_table_col_name: { zh: '名称', en: 'name' }, + detail_param_table_col_config: { zh: '版本配置', en: 'detail' }, + detail_param_table_col_value: { zh: '默认值', en: 'value' }, + detail_param_table_col_required: { zh: '是否必填', en: 'required' }, + detail_param_table_col_comment: { zh: '提示语', en: 'comment' }, + detail_version_table_version: { zh: '版本号', en: 'version' }, + detail_version_table_creator: { zh: '创建者', en: 'creator' }, + detail_version_table_comment: { zh: '描述', en: 'desc' }, + detail_version_table_create_time: { zh: '发版时间', en: 'publish time' }, + detail_version_table_operate: { zh: '操作', en: 'operate' }, + detail_version_table_send_btn: { zh: '发布', en: 'send' }, + detail_version_drawer_title: { zh: '算法版本 V{{version}}', en: 'algorithm V{{version}}' }, + detail_version_drawer_full_title: { + zh: '算法 {{algorithmName}} - 版本 V{{version}}', + en: 'Algorithm {{algorithmName}} - V{{version}}', + }, + detail_version_table_empty_msg: { + zh: '暂无已发版的算法版本', + en: 'no published algorithm version', + }, + detail_version_table_empty_send_btn: { zh: '去发版', en: 'publish' }, + detail_version_table_detail_btn: { zh: '点击查看', en: 'detail' }, + + form_code_changed: { zh: '已保存', en: 'saved' }, + form_code_unchanged: { zh: '未编辑', en: 'not edited' }, + form_code_entry_tip: { zh: '点击进入代码编辑器', en: 'click to enter code editor' }, + form_rule_msg_name: { zh: '算法名称不能为空', en: 'algorithm name cannot be empty' }, + form_msg_name_duplicate: { zh: '算法名称已存在', en: 'this name already exist' }, + + delete_algorithm_confirm_title: { + zh: '确认要删除「{{name}}」?', + en: 'Are you sure to delete algorithm "{{name}}" ?', + }, + delete_algorithm_confirm_content: { + zh: '删除后,正在运行的任务可能受影响,历史任务无法运行,请谨慎删除', + en: + 'After deletion, the running task may be affected, the history task cannot be run, please be careful to delete', + }, + + empty_algorithm_list_text: { zh: '暂无算法,去创建', en: 'no algorithm, go to create' }, + empty_algorithm_preset_list_text: { zh: '暂无算法', en: 'no algorithm' }, + + acceptance_btn_detail: { zh: '查看算法配置', en: 'view algorithm config' }, + acceptance_btn_confirm: { zh: '同意并保存', en: 'accept' }, + acceptance_btn_cancel: { zh: '取消', en: 'reject' }, + acceptance_status_success: { + zh: '已同意并保存『{{name}}』', + en: 'accepted and saved algorithm "{{name}}"', + }, + acceptance_status_success_tip: { + zh: '{{second}}S 后自动前往算法列表', + en: '{{second}} seconds later, automatically go to algorithm list', + }, + acceptance_btn_go: { zh: '前往算法列表', en: 'go to algorithm list' }, + acceptance_form_name: { zh: '名称', en: 'name' }, + acceptance_form_type: { zh: '类型', en: 'type' }, + acceptance_form_version: { zh: '版本', en: 'version' }, + acceptance_form_comment: { zh: '描述', en: 'desc' }, + acceptance_form_detail: { zh: '算法', en: 'detail' }, + acceptance_msg_not_found: { zh: '算法不存在', en: 'algorithm is not found' }, + + tip_update_preset_algorithm: { + zh: '只有管理员才能更新预置算法', + en: 'Only admin can update preset algorithm', + }, +}; + +export default separateLng(algorithm); diff --git a/web_console_v2/client/src/i18n/resources/modules/app.ts b/web_console_v2/client/src/i18n/resources/modules/app.ts index 36823da43..930910905 100644 --- a/web_console_v2/client/src/i18n/resources/modules/app.ts +++ b/web_console_v2/client/src/i18n/resources/modules/app.ts @@ -7,7 +7,15 @@ const error = { logout: { zh: '退出登录', en: 'Logout' }, login_success: { zh: '登录成功', en: 'Login successfully' }, copy_success: { zh: '复制成功', en: 'Copied!' }, - system_settings: { zh: '系统配置', en: 'Sttings' }, + copy_fail: { zh: '复制失败', en: 'Copy fail!' }, + system_settings: { zh: '全局配置', en: 'Settings' }, + user_management: { zh: '用户管理', en: 'User Management' }, + audit_log: { zh: '审计日志', en: 'Audit log' }, + operation_maintenance: { zh: '运维模块', en: 'OP Module' }, + work_space: { zh: '工作台', en: 'Work Space' }, + participant: { zh: '合作伙伴', en: 'Participant' }, + coordinator: { zh: '本方', en: 'Coordinator' }, + help_document: { zh: '帮助文档', en: 'Help document' }, }; export default separateLng(error); diff --git a/web_console_v2/client/src/i18n/resources/modules/audit.ts b/web_console_v2/client/src/i18n/resources/modules/audit.ts new file mode 100644 index 000000000..5778088f9 --- /dev/null +++ b/web_console_v2/client/src/i18n/resources/modules/audit.ts @@ -0,0 +1,53 @@ +import { separateLng } from 'i18n/helpers'; + +const audit = { + title_event_record: { zh: '事件记录', en: 'Event record' }, + title_event_detail: { zh: '事件详情', en: 'Event detail' }, + title_base_info: { zh: '基础信息', en: 'Base info' }, + title_request_info: { zh: '请求信息', en: 'Request info' }, + + tip_event_record: { + zh: '以下列表最长展示过去9个月的事件记录', + en: 'The following list shows up to the past 9 months of event records', + }, + + col_event_time: { zh: '事件时间', en: 'Event time' }, + col_user_name: { zh: '用户名', en: 'User name' }, + col_event_name: { zh: '事件名称', en: 'Event name' }, + col_resource_type: { zh: '资源类型', en: 'Resource type' }, + col_resource_name: { zh: '资源名称', en: 'Resource name' }, + + placeholder_search: { zh: '搜索关键词', en: 'Search keyword' }, + + radio_label_all: { zh: '全部', en: 'All' }, + radio_label_one_week: { zh: '近7天', en: 'Nearly 7 days' }, + radio_label_one_month: { zh: '近1月', en: 'Nearly 1 month' }, + radio_label_three_months: { zh: '近3月', en: 'Nearly 3 months' }, + + btn_delete: { zh: '删除6个月前的记录', en: 'Delete records from 6 months ago' }, + + label_event_id: { zh: '事件ID', en: 'Event ID' }, + label_event_time: { zh: '事件时间', en: 'Event time' }, + label_event_name: { zh: '事件名称', en: 'Event name' }, + label_user_name: { zh: '用户名', en: 'User name' }, + label_operation_name: { zh: '操作名称', en: 'Operation name' }, + + label_request_id: { zh: '请求ID', en: 'Request ID' }, + label_access_key_id: { zh: 'AccessKey ID', en: 'AccessKey ID' }, + label_event_result: { zh: '事件结果', en: 'Event result' }, + label_error_code: { zh: '错误码', en: 'Error code' }, + label_resource_type: { zh: '资源类型', en: 'Resource type' }, + label_resource_name: { zh: '资源名称', en: 'Resource name' }, + label_original_ip_address: { zh: '源IP地址', en: 'Source ip address' }, + label_extra_info: { zh: '额外信息', en: 'Extra info' }, + + msg_title_confirm_delete: { zh: '确定要删除吗?', en: 'You sure you want to delete it?' }, + msg_content_confirm_delete: { + zh: '基于安全审核的原因,平台仅支持清理6个月前的事件记录', + en: + 'Due to security audit reasons, the platform only supports cleaning up the event records 6 months ago', + }, + msg_delete_success: { zh: '删除成功', en: 'Delete success' }, +}; + +export default separateLng(audit); diff --git a/web_console_v2/client/src/i18n/resources/modules/dashboard.ts b/web_console_v2/client/src/i18n/resources/modules/dashboard.ts new file mode 100644 index 000000000..81828c7dc --- /dev/null +++ b/web_console_v2/client/src/i18n/resources/modules/dashboard.ts @@ -0,0 +1,7 @@ +import { separateLng } from 'i18n/helpers'; + +const dashboard = { + dashboard: { zh: '仪表盘', en: 'dashboard' }, +}; + +export default separateLng(dashboard); diff --git a/web_console_v2/client/src/i18n/resources/modules/dataset.ts b/web_console_v2/client/src/i18n/resources/modules/dataset.ts index 3d798daed..233fd6850 100644 --- a/web_console_v2/client/src/i18n/resources/modules/dataset.ts +++ b/web_console_v2/client/src/i18n/resources/modules/dataset.ts @@ -6,18 +6,64 @@ const datasets = { selected_items: { zh: '已选择 {{count}} 项' }, btn_create: { zh: '创建数据集', en: 'Create Dataset' }, + btn_create_dataset_job: { zh: '创建数据任务', en: 'Create Dataset job' }, btn_add_batch: { zh: '追加数据', en: ' Add databatch' }, btn_view_records: { zh: '查看记录', en: 'View records' }, btn_finish_n_import: { zh: '完成创建并导入', en: 'Submit and start importing' }, btn_import: { zh: '开始导入', en: 'Start importing' }, - btn_copy_path: { zh: '复制路径', en: 'Start importing' }, + btn_create_now: { zh: '确认创建', en: 'Create' }, + btn_copy_path: { zh: '复制路径', en: 'Copy path' }, + btn_data_join: { zh: '数据求交', en: 'Data join' }, + btn_more_action: { zh: '...', en: '...' }, + btn_create_join_job: { zh: '创建求交任务', en: 'Start join job' }, + btn_export_dataset: { zh: '导出', en: 'Export' }, + btn_export_dataset_go_back: { zh: '前往求交数据集列表', en: 'Go back' }, + btn_publish_project: { zh: '发布', en: 'Publish' }, + btn_unpublish_project: { zh: '撤销发布', en: 'Unpublish' }, + btn_create_data_source: { zh: '添加数据源', en: 'Create data source' }, + btn_job_stop: { zh: '停止运行', en: 'Stop Running' }, - col_name: { zh: '数据集名称' }, - col_file_name: { zh: '文件名' }, + col_id: { zh: 'ID' }, + col_path: { zh: '文件路径' }, + col_name: { zh: '名称' }, + col_file_name: { zh: '数据源地址文件' }, col_type: { zh: '类型' }, - col_files_size: { zh: '数据总大小' }, + col_files_size: { zh: '数据大小' }, col_creator: { zh: '创建者' }, col_modification_time: { zh: '最近修改时间' }, + col_project: { zh: '关联工作区' }, + col_num_example: { zh: '数据集样本量' }, + col_participant: { zh: '合作伙伴名称' }, + col_data_format: { zh: '数据格式' }, + col_updated_at: { zh: '最近更新' }, + col_data_source_url: { zh: '数据来源', en: 'data source url' }, + col_dataset_path: { zh: '数据集路径', en: 'Dataset path' }, + col_publish_state: { zh: '发布状态', en: 'Publish state' }, + col_dataset_state: { zh: '数据集状态', en: 'Dataset state' }, + col_participant_name: { zh: '参与方', en: 'Participant' }, + col_data_value: { zh: '数据价值', en: 'Data value' }, + col_use_unit_price: { zh: '使用单价', en: 'Unit price' }, + + // for dataset task job table col + col_job_name: { zh: '任务名称', en: 'Task name' }, + col_job_type: { zh: '任务类型', en: 'Task type' }, + col_job_status: { zh: '任务状态', en: 'Task status' }, + col_job_coordinator: { zh: '任务发起方', en: 'Task coordinator' }, + col_job_create_time: { zh: '创建时间', en: 'Create time' }, + col_job_start_time: { zh: '开始时间', en: 'Start time' }, + col_job_finish_time: { zh: '结束时间', en: 'Finish time' }, + col_job_running_time: { zh: '运行时长', en: 'Running Time' }, + col_job_operation: { zh: '操作', en: 'Operation' }, + + col_ledger_hash: { zh: '哈希', en: 'Hash' }, + col_ledger_block: { zh: '所属块', en: 'Block' }, + col_ledger_trade_block_id: { zh: '交易块内ID', en: 'Trade block ID' }, + col_ledger_chain_time: { zh: '上链时间', en: 'Time of chain' }, + col_ledger_sender: { zh: '发送方', en: 'Sender' }, + col_ledger_receiver: { zh: '接收方', en: 'Receiver' }, + col_ledger_trade_fee: { zh: '交易费用', en: 'Fee of trade' }, + col_ledger_trade_status: { zh: '交易状态', en: 'Status of trade' }, + col_ledger_trade_info: { zh: '交易信息', en: 'Information of trade' }, msg_start_importing: { zh: '数据集创建成功,数据文件开始导入' }, msg_name_required: { zh: '数据集名称为必填项' }, @@ -27,31 +73,337 @@ const datasets = { msg_file_required: { zh: '请选择需要导入的文件' }, msg_id_required: { zh: '缺少数据集 ID,请检查' }, msg_is_importing: { zh: '存在数据正在导入中,暂不支持追加数据' }, + msg_todo_tasks: { zh: '{{count}}条待处理任务' }, + msg_edit_ok: { zh: '数据集编辑成功' }, + msg_export_id_empty: { + zh: '导出任务ID缺失,请手动跳转「任务管理」查看详情', + en: 'The ID of the export task is missing. Go to Task Management to view details', + }, tip_move_file: { zh: '导入成功后将移除所有原文件以节省磁盘空间' }, + tip_type_struct: { zh: '支持.csv/.tfrecords格式' }, + tip_type_picture: { zh: '支持.jpeg/.png/.bmp/.gif格式' }, + tip_limit_count: { zh: '仅展示最近{{count}}条', en: 'Show only the last {{count}} tasks' }, + tip_files_size: { + zh: '数据以系统格式存储的大小,较源文件会有一定变化', + en: 'The size of the data stored in the system format will vary from the source file', + }, label_name: { zh: '数据集名称' }, label_type: { zh: '数据集类型' }, - label_comment: { zh: '数据集说明' }, + label_data_type: { zh: '数据类型' }, + label_join_type: { zh: '求交类型' }, + label_source_location: { zh: '数据源地址' }, + label_data_source: { zh: '数据源', en: 'Data source' }, + label_data_job_type: { zh: '数据任务', en: 'Data job type' }, + label_data_job_type_create: { zh: '求交', en: 'Join' }, + label_data_job_type_alignment: { zh: '对齐', en: 'Alignment' }, + label_data_job_type_light_client_join: { zh: '轻客户端求交', en: 'Light client join' }, + label_data_job_type_import: { zh: '导入', en: 'import' }, + label_data_job_type_export: { zh: '导出', en: 'export' }, + label_data_join_type: { zh: '求交方式', en: 'Data join type' }, + label_data_join_type_normal: { zh: '数据求交', en: 'Data join' }, + label_data_join_type_psi: { zh: 'RSA-PSI 求交', en: 'RSA-PSI Data join' }, + label_data_join_type_light_client: { zh: 'RSA-PSI 求交', en: 'RSA-PSI Data join' }, + label_data_join_type_ot_psi_data_join: { zh: 'OT-PSI 求交', en: 'OT-PSI Data join' }, + label_dataset_my: { zh: '我方数据集', en: 'My dataset' }, + label_dataset_participant: { zh: '合作伙伴数据集', en: 'Participant dataset' }, + label_params_my: { zh: '我方参数', en: 'My params' }, + label_params_participant: { zh: '合作伙伴参数', en: 'Participant params' }, + label_comment: { zh: '数据集描述' }, label_event_time: { zh: '数据产生时间' }, label_move_file: { zh: '导入后移除源文件' }, + label_import_by: { zh: ' 导入方式' }, + label_import_by_remote: { zh: '数据源导入' }, + label_import_by_local: { zh: '本地导入' }, + label_raw_dataset: { zh: '原始数据集' }, + label_intersection_dataset: { zh: '求交数据集' }, + label_struct_type: { zh: '结构化数据' }, + label_picture_type: { zh: '图片' }, + label_feature_amount: { zh: '特征量' }, + label_row_amount: { zh: '数据集样本量' }, + label_type_amount: { zh: '分类数' }, + label_picture_amount: { zh: '总图片量' }, + label_total_cols: { zh: '总列数', en: 'Total cols' }, + label_total_rows: { zh: '总行数', en: 'Total rows' }, + label_update_time: { zh: '更新时间', en: 'Update time' }, + label_intersection_rate: { zh: '求交率', en: 'Intersection rate' }, + label_amount_of_data: { zh: '我方数据量', en: 'Number of our data' }, + label_amount_of_intersection: { zh: '交集数', en: 'Number of intersection' }, + label_my_dataset: { zh: '我方数据集', en: 'Processed datasets' }, + label_participant_dataset: { zh: '合作伙伴数据集', en: 'Participant datasets' }, + label_participant_dataset_revoke: { + zh: '对方已撤销发布', + en: 'The participant has withdrawn the release', + }, + + label_rsa_psi_data_join: { zh: 'RSA-PSI 求交', en: 'RSA PSI data join' }, + label_light_client_rsa_psi_data_join: { + zh: 'LIGHT_CLIENT_RSA_PSI数据求交', + en: 'Light client RSA PSI data join', + }, + label_ot_psi_data_join: { zh: 'OT-PSI数据求交', en: 'OT PSI data join' }, + label_data_join: { zh: '数据求交', en: 'Data join' }, + label_data_alignment: { zh: '数据对齐', en: 'Data alignment' }, + label_import_source: { zh: '数据导入', en: 'Import source' }, + + label_blockchain_storage: { zh: '区块链存证', en: 'Blockchain storage' }, + label_all_records: { zh: '追加记录' }, + label_schema: { zh: '校验错误信息' }, + label_join_workflows: { zh: '求交任务' }, + label_data_preview: { zh: '数据探查', en: 'Data preview' }, + label_image_preview: { zh: '图片预览', en: 'Image preview' }, + label_data_job_detail: { zh: '任务详情', en: 'Processed datasets' }, + label_processed_dataset: { zh: '结果数据集', en: 'Processed datasets' }, + label_relative_dataset: { zh: '下游数据集', en: 'Relative datasets' }, + label_need_schema_check: { zh: '数据检验', en: 'Data validation' }, + label_json_schema: { zh: '校验规则', en: 'Validation rule' }, + label_todo_tasks: { zh: '待处理任务', en: 'Todo tasks' }, + label_start_task: { zh: '发起了任务', en: 'start task' }, + label_export_path: { zh: '导出路径', en: 'Export path' }, + label_coordinator_self: { zh: '本方', en: 'this party' }, + label_local_upload: { zh: '本地上传', en: 'Local upload' }, + label_local_export: { zh: '导出地址', en: 'Export Url' }, + label_light_client: { zh: '轻量', en: 'Light client' }, + label_upload_by_light_client: { zh: '由客户侧本地上传', en: 'upload locally by light client' }, + label_data_source_name: { zh: '数据源名称', en: 'Data source name' }, + label_data_source_url: { zh: '数据来源', en: 'Data source url' }, + label_file_name_preview: { zh: '文件名预览', en: 'File name preview' }, + label_upload_by_data_source: { zh: '数据源上传', en: 'Upload from dataSource' }, + label_upload_task: { zh: '导入任务', en: 'Import Task' }, + label_data_not_found: { zh: '数据集信息未找到', en: 'Dataset Info Not Found' }, + + label_check_running_status_job: { zh: '查看运行中任务', en: 'Check the running jobs' }, + label_job_list: { zh: '任务管理', en: 'Job manager' }, + label_publish_to_workspace: { zh: '发布至工作区', en: 'Publish to workspace' }, + label_publish_credits: { zh: '积分', en: 'Credits' }, + label_use_price: { zh: '使用单价', en: 'Unit price' }, + label_job_wait_to_run: { zh: '待运行', en: 'Waiting to run' }, + label_current_dataset_value: { zh: '当前数据价值', en: 'Value of the dataset' }, + + label_resource_allocation: { zh: '资源配置', en: 'Resource allocation' }, + label_input_params: { zh: '输入参数', en: 'Input params' }, + label_job_config: { zh: '任务配置', en: 'Job config' }, + label_dataset_empty: { zh: '空集', en: 'Empty Dataset' }, + + label_dataset_check: { zh: '数据校验', en: 'Data Check' }, + label_dataset_join_checker: { zh: '求交数据校验', en: 'Join Data Checker' }, + label_dataset_numeric_checker: { zh: '全数值特征校验', en: 'Numeric Checker' }, - placeholder_name_searchbox: { zh: '输入数据名称搜索', en: 'Search by name' }, + placeholder_name_searchbox: { zh: '输入数据集名称搜索', en: 'Search by name' }, + placeholder_searchbox_data_source: { zh: '输入数据源名称', en: 'Search by data source name' }, placeholder_name: { zh: '请输入数据集名称' }, - placeholder_comment: { zh: '请输入数据集说明' }, + placeholder_comment: { + zh: '最多为 200 个字符', + en: 'Up to 200 characters', + }, placeholder_event_time: { zh: '请选择时间' }, placeholder_filename_filter: { zh: '输入文件名进行筛选' }, placeholder_directory_filter: { zh: '切换其他文件夹(按回车确认)' }, + placeholder_datasource_url: { + zh: '请填写有效文件目录地址,非文件,如 hdfs:///home/folder', + en: 'Please fill in a valid file directory address, not a file, such as hdfs:///home/folder', + }, + placeholder_job_name: { zh: '输入任务名称', en: 'Input the name of task' }, - title_create: { zh: '创建数据集' }, + title_create: { zh: '创建数据集', en: 'Create dataset' }, + title_edit: { zh: '编辑数据集' }, + title_export_dataset: { zh: '导出数据集', en: 'Export dataset' }, + title_export_start_time: { + zh: '开始时间: {{time}}', + en: 'Start time: {{time}}', + }, + tile_export_end_time: { + zh: '结束时间: {{time}}', + en: 'End time: {{time}}', + }, + title_export_path: { zh: '导出路径: {{path}}', en: 'Export path: {{path}}' }, + title_create_data_source: { zh: '创建数据源', en: 'Dreate data source' }, + title_edit_data_source: { zh: '编辑数据源', en: 'Edit data source' }, + title_base_config: { zh: '基本配置', en: 'Base config' }, + title_data_source_import: { zh: '数据源导入', en: 'Data source import' }, + title_local_import: { zh: '本地导入', en: 'Local import' }, - state_importing: { zh: '导入中({{imported}}/{{total}})' }, + title_task_flow: { zh: '任务流程', en: 'Task flow' }, + title_error_message: { zh: '错误信息', en: 'Error message' }, + + state_importing: { zh: '导入中' }, state_available: { zh: '可用' }, - state_error: { zh: '导入失败' }, - state_unknown: { zh: '状态未知' }, + state_import_error: { zh: '导入失败' }, + state_unknown: { zh: '状态未知', en: 'Unknown' }, + state_processing: { zh: '处理中' }, + state_process_failed: { zh: '处理失败' }, + state_deleting: { zh: '删除中' }, + state_error: { zh: '异常' }, + state_checking: { zh: '校验中' }, + state_checked: { zh: '校验通过' }, + state_check_error: { zh: '校验不通过' }, + state_exporting: { zh: '导出中', en: 'Exporting' }, + state_export_success: { zh: '导出成功', en: 'Export success' }, + state_export_failed: { zh: '导出失败', en: 'Export failed' }, + state_stopped: { zh: '已停止', en: 'stopped' }, + + state_dataset_job_pending: { zh: '待运行', en: 'Pending' }, + state_dataset_job_running: { zh: '运行中', en: 'Running' }, + state_dataset_job_succeeded: { zh: '成功', en: 'Succeeded' }, + state_dataset_job_failed: { zh: '失败', en: 'Failed' }, + state_dataset_job_stopped: { zh: '已停止', en: 'stopped' }, + + state_text_published: { zh: '已发布', en: 'Published' }, + state_text_unpublished: { zh: '未发布', en: 'Unpublished' }, + state_text_published_with_project: { zh: '已发布至工作区', en: 'Published' }, + state_text_unpublished_with_project: { zh: '未发布至工作区', en: 'Unpublished' }, + + state_transaction_failed: { zh: '失败', en: 'Failed' }, + state_transaction_success: { zh: '成功', en: 'Succeeded' }, + state_transaction_processing: { zh: '处理中', en: 'Processing' }, step_basic: { zh: '基础配置' }, step_add_batch: { zh: '选择数据文件' }, + + tip_only_show_read_task: { + zh: '仅展示所有参与方完成配置的任务', + en: 'Only show that all participants have completed the configured tasks', + }, + tip_state_process: { + zh: '数据处理中,请稍后', + en: 'Data processing, please wait', + }, + tip_state_error: { + zh: '抱歉,数据暂时无法显示', + en: 'Sorry, the data is temporarily unavailable', + }, + tip_check_error: { zh: '请进入数据集详情,查看校验错误信息' }, + tip_file_not_found: { zh: '输入路径不存在!', en: 'The input path does not exist!' }, + tip_file_no_permission: { zh: '无权限访问该路径!', en: 'No permission to access the path!' }, + tip_json_schema: { + zh: '支持以Json Schema的方式输入,只校验规则中的字段,其他字段不校验', + en: + 'Support input in Json Schema format, only the fields in the rule will be verified, other fields will be ignored', + }, + tip_data_source: { + zh: '数据源指数据的来源,创建数据源即定义访问数据存储空间的地址', + en: + 'The data source refers to the source of the data. Creating a data source defines the address for accessing the data storage space.', + }, + tips_publish: { + zh: '发布后,工作区中合作伙伴可使用该数据集', + en: 'Once published, the data set is available to partners in the workspace', + }, + tips_publish_default_value: { zh: '100积分', en: '100 points' }, + tips_first_publish: { zh: '首次发布可得 100 积分', en: '100 credits for first publish' }, + tips_relative_dataset: { + zh: '通过使用本数据集所产生的数据集', + en: 'The data set generated by using this data set', + }, + + tips_data_checker_join: { + zh: '当数据集需用于求交时,需勾选该选项,将要求数据集必须有raw_id 列且没有重复值', + en: + 'Select this option when the data set is used for intersection. The data set must have raw ID columns with no duplicate values', + }, + tips_data_checker_numeric: { + zh: '当数据集需用于树模型训练时,需勾选该选项,将要求数据集特征必须为全数值', + en: + 'Select this option when the data set is used for tree model training. The data set features must be full values', + }, + + msg_title_confirm_delete: { zh: '确认删除数据集?', en: 'Confirm to delete the dataset?' }, + msg_content_confirm_delete: { + zh: '删除操作无法恢复,请谨慎操作', + en: 'After deleting, the data set will be inoperable, please delete it carefully', + }, + msg_export_success: { + zh: '导出成功', + en: 'Export success', + }, + msg_export_failed: { + zh: '导出失败', + en: 'Export failed', + }, + msg_publish_confirm: { + zh: '发布「{{name}}」至工作区?', + en: 'Publish "{{name}}" to the workspace?', + }, + msg_publish_tip: { + zh: '发布后,在工作区中可使用该数据集', + en: 'After publishing, the data set can be used in the workspace', + }, + msg_unpublish_confirm: { + zh: '确认要撤销发布「{{name}}」?', + en: 'Confirm to unpublish "{{name}}"?', + }, + msg_unpublish_tip: { + zh: '撤销后,工作区的合作伙伴将不能使用该数据集', + en: 'After unpublishing, the partner in the workspace will not be able to use the dataset', + }, + + msg_title_confirm_delete_data_source: { + zh: '确认要删除「{{name}}」?', + en: 'Are you sure you want to delete "{{name}}"?', + }, + msg_content_confirm_delete_data_source: { + zh: '删除后,当该数据源将无法恢复,请谨慎操作。', + en: 'After deletion, when the data source cannot be recovered, please operate with caution.', + }, + + msg_connecting: { + zh: '连接中', + en: 'connecting', + }, + msg_connection_success: { + zh: '连接成功', + en: 'connection success', + }, + msg_connection_fail: { + zh: '连接失败', + en: 'connection fail', + }, + + msg_no_support_data_job: { + zh: '暂不支持该类型的数据任务', + en: 'This data job is not currently supported', + }, + + msg_title_confirm_stop_job: { + zh: '确认要停止「{{name}}」?', + en: 'Are you sure you want to stop "{{name}}"?', + }, + + msg_content_confirm_stop_job: { + zh: '停止后,该任务不能再重新运行,请谨慎操作', + en: 'After stopped, the job cannot be restart, please operate with caution', + }, + + msg_title_confirm_delete_job: { + zh: '确认要删除「{{name}}」?', + en: 'Are you sure you want to delete "{{name}}"?', + }, + + msg_content_confirm_delete_job: { + zh: '删除后,该任务及信息将无法恢复,请谨慎操作', + en: + 'After deletion, the job and related info cannot be recovered, please operate with caution.', + }, + + msg_stop_failed: { + zh: '停止失败', + en: 'Stop Failed', + }, + + msg_delete_failed: { + zh: '删除失败', + en: 'Delete Failed', + }, + + msg_create_success: { + zh: '创建成功,数据集可用后将自动发布', + en: + 'The data set has been created successfully and will be published automatically when it is available', + }, + + tag_raw: { zh: '原始', en: 'Raw' }, + tag_processed: { zh: '结果', en: 'Processed' }, }; export default separateLng(datasets); diff --git a/web_console_v2/client/src/i18n/resources/modules/error.ts b/web_console_v2/client/src/i18n/resources/modules/error.ts index 92ec8e7b0..9a011aa9a 100644 --- a/web_console_v2/client/src/i18n/resources/modules/error.ts +++ b/web_console_v2/client/src/i18n/resources/modules/error.ts @@ -6,7 +6,31 @@ const error = { zh: '登录状态已过期,请重新登录', en: 'Login status has been expired, please sign in again', }, + unauthorized: { + zh: '没有权限,请重新登录', + en: 'Signature verification failed, please sign in again', + }, no_result: { zh: '' }, + no_tree_train_model_template: { + zh: '找不到训练模型模板(树算法)', + en: 'train model template(tree algorithm) not found', + }, + no_nn_train_model_template: { + zh: '找不到训练模型模板(nn算法)', + en: 'train model template(nn algorithm) not found', + }, + no_nn_horizontal_train_model_template: { + zh: '找不到训练模型模板(横向nn算法)', + en: 'train model template(nn horizontal algorithm) not found', + }, + no_nn_horizontal_eval_model_template: { + zh: '找不到模型评估模板(横向nn算法)', + en: 'eval model template(nn horizontal algorithm) not found', + }, + no_peer_template: { + zh: '找不到对侧训练模型模板', + en: 'peer train model template not found', + }, }; export default separateLng(error); diff --git a/web_console_v2/client/src/i18n/resources/modules/intersection_dataset.ts b/web_console_v2/client/src/i18n/resources/modules/intersection_dataset.ts new file mode 100644 index 000000000..32c3e24ec --- /dev/null +++ b/web_console_v2/client/src/i18n/resources/modules/intersection_dataset.ts @@ -0,0 +1,17 @@ +import { separateLng } from 'i18n/helpers'; + +const intersection_dataset = { + status: { zh: '状态' }, + no_result: { zh: '暂无数据集' }, + + col_name: { zh: '名称' }, + col_status: { zh: '求交状态' }, + col_job_name: { zh: '求交任务' }, + col_peer_name: { zh: '求交参与方' }, + col_sample_num: { zh: '数据集样本量' }, + col_sample_filesize: { zh: '数据集大小' }, + col_files_size: { zh: '文件大小' }, + col_num_example: { zh: '数据集样本量' }, +}; + +export default separateLng(intersection_dataset); diff --git a/web_console_v2/client/src/i18n/resources/modules/login.ts b/web_console_v2/client/src/i18n/resources/modules/login.ts index 2ae11571d..0ce3c6469 100644 --- a/web_console_v2/client/src/i18n/resources/modules/login.ts +++ b/web_console_v2/client/src/i18n/resources/modules/login.ts @@ -14,6 +14,30 @@ const login = { zh: '如无账号,请发送申请邮件至管理员邮箱 {{email}}', en: "Please contact {{email}} if you don't have an account", }, + error_not_find_open_url: { + zh: '找不到即将跳转的 url 信息', + en: 'Can’t find the url information about to be redirected', + }, + error_not_find_sso_info: { + zh: '找不到登陆方式 {{ssoName}} 的相关信息', + en: 'Can’t find information about login method {{ssoName}}', + }, + error_sso_login: { + zh: '{{ssoName}}: SSO 验证失败', + en: '{{ssoName}}: SSO verification failed', + }, + error_not_find_access_token: { + zh: '在 URL query 中找不到 access_token 信息', + en: 'Can’t find access_token information in URL query', + }, + label_other_login_way: { + zh: '其他登陆方式', + en: 'Other login ways', + }, + label_hide_other_login_way: { + zh: '隐藏其他登陆方式', + en: 'Hide other login ways', + }, }; export default separateLng(login); diff --git a/web_console_v2/client/src/i18n/resources/modules/menu.ts b/web_console_v2/client/src/i18n/resources/modules/menu.ts index 54b41b03a..445cbd538 100644 --- a/web_console_v2/client/src/i18n/resources/modules/menu.ts +++ b/web_console_v2/client/src/i18n/resources/modules/menu.ts @@ -1,15 +1,42 @@ import { separateLng } from 'i18n/helpers'; const menu = { - label_project: { zh: '项目管理', en: 'Projects' }, + label_project: { zh: '工作区管理', en: 'Workspaces' }, + label_workflow_center: { zh: '工作流中心', en: 'Workflows centre' }, label_workflow: { zh: '工作流管理', en: 'Workflows' }, label_workflow_tpl: { zh: '模板管理', en: 'Workflow templates' }, - label_datasets: { zh: '数据集管理', en: 'Datasets' }, - label_datasets_training: { zh: '训练数据集管理', en: 'Training datasets' }, - label_datasets_test: { zh: '测试数据集', en: 'Test datasets' }, + label_datasets: { zh: '数据中心', en: 'Datasets' }, + label_datasets_training: { zh: '训练数据集', en: 'Training datasets' }, + label_datasets_test: { zh: '评估数据集', en: 'Test datasets' }, label_datasets_predict: { zh: '预测数据集', en: 'Predict datasets' }, - label_settings: { zh: '系统配置', en: 'Settings' }, + label_datasets_unknown: { zh: '未知数据集', en: 'Unknown datasets' }, + label_datasets_raw: { zh: '原始数据集', en: 'Raw datasets' }, + label_datasets_processed: { zh: '结果数据集', en: 'Processed datasets' }, + label_datasets_data_source: { zh: '数据源', en: 'Data source' }, + label_datasets_job: { zh: '数据任务', en: 'Datasets job' }, + label_datasets_task_list: { zh: '任务管理', en: 'Task manager' }, + label_settings: { zh: '全局配置', en: 'Settings' }, + label_settings_image: { zh: '镜像版本', en: 'Image Version' }, + label_settings_variables: { zh: '环境变量', en: 'Environment variables' }, label_users: { zh: '用户管理', en: 'User Management' }, + label_model_center: { zh: '模型中心', en: 'Model Center' }, + label_model_center_beta: { zh: '模型中心(Beta)', en: 'Model Center(Beta)' }, + label_model_center_model_warehouse: { zh: '模型仓库', en: 'Model warehouse' }, + label_model_center_model_training: { zh: '模型训练', en: 'Model training' }, + label_model_center_model_evaluation: { zh: '模型评估', en: 'Model evaluation' }, + label_model_center_offline_prediction: { zh: '离线预测', en: 'Offline prediction' }, + label_model_center_algorithm_management: { zh: '算法管理', en: 'Algorithm management' }, + label_algorithm_repository: { zh: '算法仓库', en: 'Algorithm Repository' }, + + label_model_serving: { zh: '在线中心', en: 'Online serving Center' }, + label_model_serving_list: { zh: '在线服务', en: 'Online serving list' }, + label_model_serving_service: { zh: '在线服务', en: 'Online Serving' }, + label_audit_log: { zh: '审计日志', en: 'Audit log' }, + label_operation_maintenance: { zh: '基础功能测试', en: 'OP Module' }, + label_event_record: { zh: '事件记录', en: 'Event record' }, + label_partners: { zh: '合作伙伴', en: 'Partners' }, + label_trusted_center: { zh: '可信中心', en: 'Trusted Center' }, + label_dataset_fix: { zh: '数据集修复', en: 'Dataset Fix' }, }; export default separateLng(menu); diff --git a/web_console_v2/client/src/i18n/resources/modules/modelCenter.ts b/web_console_v2/client/src/i18n/resources/modules/modelCenter.ts new file mode 100644 index 000000000..172ac1dc7 --- /dev/null +++ b/web_console_v2/client/src/i18n/resources/modules/modelCenter.ts @@ -0,0 +1,906 @@ +import { separateLng } from 'i18n/helpers'; + +const modelCenter = { + menu_label_model_management: { zh: '模型管理', en: 'Model management' }, + menu_label_model_evaluation: { zh: '模型评估', en: 'Model evaluation' }, + menu_label_offline_prediction: { zh: '离线预测', en: 'Offline prediction' }, + menu_label_algorithm_management: { zh: '算法管理', en: 'Algorithm management' }, + + label_tab_model_set: { zh: '模型集', en: 'Model Set' }, + label_tab_model_favourit: { zh: '我的收藏', en: 'My favourit model' }, + label_tab_my_algorithm: { zh: '我的算法', en: 'My algorithm' }, + label_tab_my_built_in: { zh: '预置算法', en: 'Built-in algorithm' }, + label_tab_model_evaluation: { zh: '模型评估', en: 'Model evaluation' }, + label_tab_model_compare: { zh: '模型对比', en: 'Model compare' }, + + label_tab_algorithm_preview: { zh: '算法预览', en: 'Algorithm preview' }, + label_tab_change_log: { zh: '变更记录', en: 'Change log' }, + + label_algorithm_params: { zh: '超参数', en: 'algorithm params' }, + label_algorithm_params_name: { zh: '名称', en: 'name' }, + label_algorithm_params_value: { zh: '默认值', en: 'default value' }, + label_algorithm_params_required: { zh: '是否必填', en: 'required' }, + label_algorithm_params_comment: { zh: '提示语(必填)', en: 'comment' }, + label_algorithm_add_params: { zh: '新增超参数', en: 'add new param' }, + placeholder_algorithm_params_name: { + zh: '请输入参数名称', + en: 'Please input the name of params', + }, + placeholder_algorithm_params_value: { zh: '请输入默认值', en: 'Please input the default value' }, + placeholder_algorithm_params_comment: { zh: '请输入提示语', en: 'Please input comment' }, + + msg_todo_train_tasks: { zh: '{{count}} 条待处理训练任务' }, + msg_todo_model_job_tasks: { zh: '{{count}} 条待处理模型训练' }, + msg_todo_evaluation_tasks: { zh: '{{count}} 条待处理评估任务' }, + msg_todo_prediction_tasks: { zh: '{{count}} 条待处理预测任务' }, + msg_todo_algorithm_tasks: { zh: '{{count}} 条待处理算法任务' }, + msg_before_revoke_authorize: { + zh: '撤销授权后,发起方不可运行模型训练,正在运行的任务不受影响', + en: + 'After revoking authorization, the initiator cannot run the model training, and the running task will not be affected', + }, + msg_before_authorize: { + zh: '授权后,发起方可以运行模型训练', + en: 'After authorization, the initiator can run the model training', + }, + + btn_create_model_set: { zh: '创建模型集', en: 'Create model set' }, + btn_create_model_job: { zh: '创建训练', en: 'Create model job' }, + btn_train_model: { zh: '发起训练', en: 'Start train' }, + btn_evaluation: { zh: '创建评估', en: 'Start evaluation' }, + btn_inspect_logs: { zh: '查看日志', en: 'Inspect log' }, + btn_restart: { zh: '重新发起', en: 'Restart' }, + btn_parameter_tuning: { zh: '调参', en: 'Parameter tuning' }, + btn_prediction: { zh: '创建预测', en: 'Start prediction' }, + btn_create_algorithm: { zh: '创建算法', en: 'Create algorithm' }, + btn_create_type: { zh: '新增类型', en: 'Create type' }, + btn_start_model: { zh: '开始训练', en: 'Start train' }, + btn_submit: { zh: '提交', en: 'Submit' }, + btn_compare: { zh: '发起对比', en: 'Start compare' }, + btn_create_compare_report: { zh: '创建对比报告', en: 'Create compare report' }, + btn_go_back_to_index_page: { zh: '回到首页', en: 'Go back to index page' }, + btn_start_new_job: { zh: '发起新任务', en: 'Start new job' }, + btn_submit_and_send_request: { zh: '提交并发送', en: 'Submit and send request' }, + btn_confirm_authorized: { zh: '确认授权', en: 'Confirm authorized' }, + btn_save_edit: { zh: '保存编辑', en: 'Save' }, + btn_next_step: { zh: '下一步', en: 'Next step' }, + btn_stop: { + zh: '终止', + en: 'Stop', + }, + btn_cancel: { + zh: '取消', + en: 'cancel', + }, + btn_terminal: { + zh: '终止', + en: 'terminal', + }, + + placeholder_searchbox_model: { zh: '输入模型名称', en: 'Please input model name' }, + placeholder_searchbox_model_set: { zh: '输入模型集名称', en: 'Please input model set name' }, + placeholder_searchbox_model_job: { + zh: '输入模型训练名称', + en: 'Please input model job name', + }, + placeholder_searchbox_evaluation_report: { + zh: '输入评估任务名称', + en: 'Please input evaluation task name', + }, + placeholder_searchbox_prediction_report: { + zh: '输入预测任务名称', + en: 'Please input prediction task name', + }, + placeholder_searchbox_algorithm: { zh: '输入算法名称', en: 'Please input algorithm name' }, + + placeholder_input_algorithm_type: { zh: '请输入算法类型', en: 'Please input algorithm type' }, + placeholder_searchbox_model_file: { + zh: '输入目标地址,搜索模型文件', + en: 'Input the target address, search for model files', + }, + placeholder_searchbox_compare_report: { + zh: '输入对比报告名称', + en: 'Please input report name', + }, + placeholder_searchbox_evaluation_model: { + zh: '输入关键词', + en: 'Please input name', + }, + + col_model_set_name: { zh: '模型集名称', en: 'Model set name' }, + col_algorithm: { zh: '算法', en: 'algorithm' }, + col_new_version: { zh: '最新版本', en: 'version' }, + col_comment: { zh: '{{name}}描述', en: '{{name}} comment' }, + col_model_name: { zh: '模型名称', en: 'Model name' }, + col_model_id: { zh: '模型ID', en: 'Model ID' }, + col_data_set: { zh: '数据集', en: 'dataset' }, + col_evaluation_task_name: { zh: '评估任务名称', en: 'Evaluation task name' }, + col_state: { zh: '运行状态', en: 'State' }, + col_evaluation_target: { zh: '评估对象', en: 'Evaluation target' }, + col_prediction_target: { zh: '预测对象', en: 'Prediction target' }, + col_prediction_task_name: { zh: '预测任务名称', en: 'Prediction task name' }, + col_name: { zh: '名称', en: 'Name' }, + col_type: { zh: '类型', en: 'Type' }, + col_compare_report_name: { zh: '对比报告名称', en: 'Compare report name' }, + col_compare_number: { zh: '对比项', en: 'Compare number' }, + col_model_type: { zh: '模型类型', en: 'Model type' }, + col_model_source: { zh: '模型来源', en: 'Model source' }, + col_creator: { zh: '创建者', en: 'creator' }, + col_initiator: { zh: '发起方', en: 'initiator' }, + col_authorized: { zh: '授权状态', en: 'authorized' }, + col_loca_authorized: { zh: '本侧授权状态', en: 'local authorized' }, + col_federal_type: { zh: '联邦类型', en: 'federal type' }, + col_total_jobs: { zh: '任务总数', en: 'total jobs' }, + col_latest_job_state: { zh: '最新任务状态', en: 'latest job state' }, + col_job_state: { zh: '任务状态', en: 'job state' }, + col_running_time: { zh: '运行时长', en: 'runtime' }, + col_start_time: { zh: '开始时间', en: 'start time' }, + col_stop_time: { zh: '结束时间', en: 'end time' }, + col_running_param: { zh: '运行参数', en: 'running param' }, + + title_create_model_set: { zh: '创建模型集', en: 'Create model set' }, + title_edit_model_set: { zh: '编辑模型集', en: 'Edit model set' }, + title_create_model_job: { zh: '创建训练', en: 'Create model job' }, + title_edit_model_train: { zh: '编辑训练', en: 'Edit model train' }, + title_start_new_job: { zh: '发起新任务', en: 'Start new job' }, + title_auth_model_train: { zh: '授权模型训练', en: '授权模型训练' }, + title_create_valuation: { zh: '创建评估', en: 'Create Evaluation' }, + title_create_prediction: { zh: '创建预测', en: 'Create Prediction' }, + + title_edit_model: { zh: '编辑模型', en: 'Edit model' }, + title_image_version: { zh: '镜像参数', en: 'Image params' }, + title_algorithm_config: { zh: '算法配置', en: 'Algorithm config' }, + title_train_info: { zh: '训练信息', en: 'Train information' }, + title_resource_config_detail: { zh: '资源配置参数详情', en: 'Resource config detail' }, + title_resource_config: { zh: '资源配置', en: 'Resource config' }, + title_advanced_config: { zh: '高级配置', en: 'Advanced config' }, + title_model_favourit: { zh: '我的收藏', en: 'My favourit model' }, + title_revision_history: { zh: '历史版本', en: 'Revision history' }, + title_base_info: { zh: '基本信息', en: 'Base info' }, + title_train_config: { zh: '训练配置', en: 'Train config' }, + title_train_report: { zh: '训练报告', en: 'Train report' }, + title_instance_info: { zh: '实例信息', en: 'Instance info' }, + title_version: { zh: '版本', en: 'Version' }, + title_model_train_dataset: { zh: '训练数据集', en: 'Train dataset' }, + title_train_duration: { zh: '训练时间', en: 'Train duration' }, + title_log: { zh: '日志', en: 'Log' }, + title_check: { zh: '查看', en: 'Check' }, + title_model_comment: { zh: '模型描述', en: 'Model comment' }, + title_indicator_auc_roc: { zh: 'AUC ROC', en: 'AUC ROC' }, + title_indicator_accuracy: { zh: 'Accuracy', en: 'Accuracy' }, + title_indicator_precision: { zh: 'Precision', en: 'Precision' }, + title_indicator_recall: { zh: 'Recall', en: 'Recall' }, + title_indicator_f1_score: { zh: 'F1 score', en: 'F1 score' }, + title_indicator_log_loss: { zh: 'log loss', en: 'log loss' }, + title_confusion_matrix: { zh: 'Confusion Matrix', en: 'Confusion Matrix' }, + title_confusion_matrix_normalization: { zh: '归一化', en: 'Normalization' }, + title_feature_importance: { + zh: 'Feature Importance(Top 15)', + en: 'Feature Importance(Top 15)', + }, + title_export_model: { + zh: '导出模型', + en: 'export model', + }, + title_export_evaluation_report: { + zh: '导出评估报告', + en: 'export evaluation report', + }, + title_export_prediction_report: { + zh: '导出预测报告', + en: 'export prediction report', + }, + title_export_compare_report: { + zh: '导出对比报告', + en: 'export compare report', + }, + title_export_dataset: { + zh: '导出数据集', + en: 'export dataset', + }, + title_evaluation_target: { zh: '评估对象', en: 'Evaluation target' }, + title_evaluation_report: { zh: '评估报告', en: 'Evaluation report' }, + title_prediction_report: { zh: '预测报告', en: 'Prediction report' }, + title_prediction_result: { zh: '预测结果', en: 'Prediction result' }, + title_evaluation_result: { zh: '评估结果', en: 'Evaluation result' }, + title_compare_report: { zh: '对比报告', en: 'Compare report' }, + title_result_dataset: { zh: '结果数据集', en: 'Result dataset' }, + + title_create_algorithm: { zh: '创建我的算法', en: 'Create algorithm' }, + title_edit_algorithm: { zh: '编辑算法', en: 'Edit algorithm' }, + title_algorithm_file: { zh: '算法文件', en: 'Algorithm file' }, + title_algorithm_label_owner: { zh: '算法文件 - 标签所有者', en: 'Algorithm file - label owner' }, + title_algorithm_no_label_owner: { + zh: '算法文件 - 非标签所有者', + en: 'Algorithm file - no label owner', + }, + title_todo_train_tasks: { zh: '待处理训练任务', en: 'waiting train model task' }, + title_todo_model_job_tasks: { zh: '待处理模型训练', en: 'waiting model job task' }, + title_todo_prediction_tasks: { zh: '待处理预测任务', en: 'waiting prediction model task' }, + title_todo_evaluation_tasks: { zh: '待处理评估任务', en: 'waiting evaluation model task' }, + title_todo_algorithm_tasks: { zh: '待处理算法任务', en: 'waiting algorithm model task' }, + title_create_compare_report: { zh: '创建对比报告', en: 'Create compare report' }, + + title_reject_application: { zh: '拒绝申请', en: 'Reject application' }, + + title_label_owner: { zh: '标签所有者', en: 'Label owner' }, + title_no_label_owner: { + zh: '非标签所有者', + en: 'No label owner', + }, + title_train_model: { + zh: '模型训练', + en: 'Train Model', + }, + title_algorithm_audit: { + zh: '算法审核', + en: 'Algorithm audit', + }, + title_authorization_request: { + zh: '{{peerName}}向您发起「{{name}}」训练授权申请', + en: '{{peerName}} initiates a training authorization application for "{{name}}"', + }, + title_train_model_job: { + zh: '训练任务', + en: 'Train model job', + }, + title_train_job_compare: { + zh: '训练任务对比', + en: 'Train job compare', + }, + + label_model_set: { zh: '模型集', en: 'Model Set' }, + label_model: { zh: '模型', en: 'Model' }, + title_model_set_info: { zh: '模型集信息', en: 'Model set info' }, + title_model_info: { zh: '模型信息', en: 'Model info' }, + label_data_set: { zh: '数据集', en: 'dataset' }, + label_intersection_set: { zh: '数据集', en: 'dataset' }, + label_data_source: { zh: '数据源', en: 'data source' }, + label_data_path: { zh: '数据源', en: 'data source' }, + label_manual_datasource: { zh: '手动输入数据源', en: 'Manually entering the data source' }, + + label_model_set_name: { zh: '模型集名称', en: 'Model set name' }, + label_model_set_comment: { zh: '模型集描述', en: 'Model set comment' }, + label_model_name: { zh: '模型名称', en: 'Model name' }, + label_model_comment: { zh: '模型描述', en: 'Model comment' }, + label_model_train_dataset: { zh: '训练数据集', en: 'Train dataset' }, + label_model_train_comment: { zh: '模型描述', en: 'Train comment' }, + label_resource_template: { zh: '资源模板', en: 'Resource template' }, + + label_image: { zh: '镜像', en: 'Image' }, + + label_file_ext: { zh: '文件扩展名', en: 'File extension' }, + label_file_type: { zh: '文件类型', en: 'File type' }, + label_enable_packing: { zh: '是否优化', en: 'Enable packing' }, + label_ignore_fields: { zh: '忽略字段', en: 'Ignore fields' }, + label_cat_fields: { zh: '类型变量字段', en: 'Categorical fields' }, + label_send_metrics_to_follower: { + zh: '是否将指标发送至 follower', + en: 'send_metrics_to_follower', + }, + label_send_scores_to_follower: { + zh: '是否将预测值发送至 follower', + en: 'send_scores_to_follower', + }, + label_verbosity: { zh: '日志输出等级', en: 'Verbosity' }, + label_label_field: { zh: '标签字段', en: 'Label field' }, + label_load_model_path: { zh: '加载模型路径', en: 'Load model path' }, + label_load_model_name: { zh: '加载模型名称', en: 'Load model name' }, + label_load_checkpoint_filename: { zh: '加载文件名', en: 'Load checkpoint filename' }, + label_load_checkpoint_filename_with_path: { + zh: '加载文件路径', + en: 'Load checkpoint filename with path', + }, + label_verify_example_ids: { zh: '是否检验 example_ids', en: 'verify_example_ids' }, + label_no_data: { zh: '标签方是否无特征', en: 'no_data' }, + label_role: { zh: '训练角色', en: 'Training role' }, + label_steps_per_sync: { zh: '参数同步 step 间隔', en: 'Steps per sync' }, + label_image_version: { zh: '镜像版本号', en: 'image_version' }, + label_num_partitions: { zh: 'num_partitions', en: 'num_partitions' }, + label_shuffle_data_block: { zh: '是否打乱顺序', en: 'shuffle_data_block' }, + + label_train_name: { zh: '训练名称', en: 'Train name' }, + label_federal_type: { zh: '联邦类型', en: 'Federal type' }, + label_train_role_type: { zh: '训练角色', en: 'Train role type' }, + label_radio_logistic: { zh: 'logistic', en: 'logistic' }, + label_radio_mse: { zh: 'mse', en: 'mse' }, + label_param_config: { zh: '参数配置', en: 'Params config' }, + + label_choose_algorithm: { zh: '选择算法', en: 'Choose algorithm' }, + label_algorithm: { zh: '算法', en: 'Algorithm' }, + label_role_type: { zh: '角色', en: 'Role type' }, + label_loss_type: { zh: '损失函数类型', en: 'Loss type' }, + label_radio_label: { zh: '标签方', en: 'Label' }, + label_radio_feature: { zh: '特征方', en: 'Feature' }, + label_learning_rate: { zh: '学习率', en: 'learning_rate' }, + label_max_iters: { zh: '迭代数', en: 'max_iters' }, + label_max_depth: { zh: '最大深度', en: 'max_depth' }, + label_l2_regularization: { zh: 'L2惩罚系数', en: 'l2_regularization' }, + label_max_bins: { zh: '最大分箱数量', en: 'max_bins' }, + label_code_tar: { zh: '代码', en: 'code_tar' }, + label_num_parallel: { zh: '线程池大小', en: 'num_parallel' }, + label_validation_data_path: { zh: '验证数据集地址', en: 'validation_data_path' }, + + label_epoch_num: { zh: 'epoch_num', en: 'epoch_num' }, + label_sparse_estimator: { zh: 'sparse_estimator', en: 'sparse_estimator' }, + label_save_checkpoint_steps: { zh: '保存备份间隔步数', en: 'save_checkpoint_steps' }, + label_save_checkpoint_secs: { zh: '保存备份间隔秒数', en: 'save_checkpoint_secs' }, + label_optimize_target: { zh: '训练目标', en: 'Train target' }, + label_model_feature: { zh: '不入模特征', en: 'Model feature' }, + + label_resource_template_type: { zh: '资源模板', en: 'Resource template' }, + label_radio_high: { zh: '大', en: 'High' }, + label_radio_medium: { zh: '中', en: 'Medium' }, + label_radio_low: { zh: '小', en: 'Low' }, + label_radio_custom: { zh: '自定义', en: 'Custom' }, + + label_master_replicas: { zh: 'Master 实例数', en: 'master_replicas' }, + label_master_cpu: { zh: 'Master CPU数量', en: 'master_cpu' }, + label_master_mem: { zh: 'Master内存大小', en: 'master_mem' }, + label_ps_replicas: { zh: 'PS 实例数', en: 'ps_replicas' }, + label_ps_cpu: { zh: 'PS CPU数量', en: 'ps_cpu' }, + label_ps_mem: { zh: 'PS内存大小', en: 'ps_mem' }, + label_ps_num: { zh: 'PS数量', en: 'ps_num' }, + label_worker_replicas: { zh: 'Worker 实例数', en: 'worker_replicas' }, + label_worker_cpu: { zh: 'Worker CPU数量', en: 'worker_cpu' }, + label_worker_mem: { zh: 'Worker内存大小', en: 'worker_mem' }, + label_worker_num: { zh: 'Worker数量', en: 'worker_num' }, + + label_is_share_model_evaluation_index: { + zh: '共享模型评价指标', + en: 'Share model evaluation index', + }, + label_is_share_model_evaluation_report: { + zh: '共享模型评价报告', + en: 'Share model evaluation report', + }, + label_is_share_offline_prediction_result: { + zh: '共享离线预测结果', + en: 'Share offline prediction result', + }, + label_is_allow_coordinator_parameter_tuning: { + zh: '允许合作伙伴自行发起调参任务', + en: 'Allow coordinator to initiate tuning tasks on their own', + }, + label_is_auto_create_compare_report: { + zh: '自动生成对比报告', + en: 'Auto create compare report', + }, + + label_model_version: { zh: '版本列表', en: 'Model version' }, + label_model_version_count: { zh: '共{{count}}个', en: '{{count}} item' }, + + label_score_threshold: { zh: 'Score threshold = {{number}}', en: 'Score threshold = {{number}}' }, + + label_download_model_package: { + zh: '下载模型包', + en: 'Download model package', + }, + label_download_evaluation_report: { + zh: '下载评估任务', + en: 'Download evaluation report', + }, + label_download_prediction_report: { + zh: '下载预测报告', + en: 'Download prediction report', + }, + label_download_compare_report: { + zh: '下载对比报告', + en: 'Download compare report', + }, + + label_evaluation_task_name: { zh: '评估任务名称', en: 'Evaluation task name' }, + label_evaluation_dataset: { zh: '评估数据集', en: 'Evaluation dataset' }, + label_evaluation_comment: { zh: '评估任务描述', en: 'Comment' }, + label_prediction_task_name: { zh: '预测任务名称', en: 'Prediction task name' }, + label_prediction_dataset: { zh: '预测数据集', en: 'Prediction dataset' }, + label_prediction_comment: { zh: '预测任务描述', en: 'Comment' }, + + label_algorithm_name: { zh: '算法名称', en: 'Algorithm name' }, + label_algorithm_type: { zh: '算法类型', en: 'Algorithm type' }, + label_type: { zh: '类型', en: 'Type' }, + label_federation_type: { zh: '联邦方式', en: 'Federation type' }, + label_import_type: { zh: '导入方式', en: 'Import type' }, + label_algorithm_file_path: { zh: '算法文件路径', en: 'Algorithm file path' }, + label_select_from_local_file: { zh: '从本地文件中选择', en: 'Select from local file' }, + label_radio_cross_sample: { zh: '跨样本', en: 'Cross sample' }, + label_radio_cross_feature: { zh: '跨特征', en: 'Cross feature' }, + label_radio_path_import: { zh: '路径导入', en: 'Path import' }, + label_radio_local_import: { zh: '本地导入', en: 'local import' }, + label_algorithm_type_tree_model: { zh: '树模型', en: 'Tree model' }, + label_algorithm_type_nn_model: { zh: 'NN模型', en: 'NN model' }, + + label_role_type_leader: { zh: 'leader', en: 'Leader' }, + label_role_type_follower: { zh: 'follower', en: 'Follower' }, + + label_name: { zh: '名称', en: 'name' }, + label_comment: { zh: '描述', en: 'desc' }, + label_reject_reason: { zh: '拒绝申请', en: 'reject reaso' }, + label_pass: { zh: '已通过申请', en: 'Application passed' }, + label_reject: { zh: '已拒绝申请', en: 'Application rejected' }, + + label_start_task: { zh: ' 发起了', en: ' start' }, + label_stopped_at: { zh: '结束时间', en: 'Stopped at' }, + label_started_at: { zh: '开始时间', en: 'Started at' }, + label_output_model: { zh: '输出模型', en: 'Exported model' }, + + suffix_train_tasks: { zh: '的训练任务', en: 'train task' }, + suffix_model_job_tasks: { zh: '的模型训练', en: 'model job task' }, + suffix_prediction_tasks: { zh: '的预测任务', en: 'prediction task' }, + suffix_evaluation_tasks: { zh: '的评估任务', en: 'evaluation task' }, + suffix_algorithm_tasks: { zh: '的算法任务', en: 'algorithm task' }, + + suffix_go_back_to_index: { + zh: '{{time}}S 后自动回到首页', + en: 'After {{time}}S,go back to index', + }, + + placeholder_model_set_name: { zh: '请输入模型集名称', en: 'Please input' }, + placeholder_model_set_comment: { + zh: '支持1~100位可见字符,且只包含大小写字母、中文、数字、中划线、下划线', + en: + 'Supports 1-100 visible characters, and only contains uppercase and lowercase letters, Chinese characters, numbers, underscores, and underscores', + }, + placeholder_model_name: { zh: '请填写', en: 'Please input' }, + placeholder_model_train_dataset: { zh: '请选择', en: 'Please select' }, + placeholder_model_train_comment: { + zh: '支持1~100位可见字符,且只包含大小写字母、中文、数字、中划线、下划线', + en: + 'Supports 1-100 visible characters, and only contains uppercase and lowercase letters, Chinese characters, numbers, underscores, and underscores', + }, + placeholder_input: { zh: '请填写', en: 'Please input' }, + placeholder_select: { zh: '请选择', en: 'Please select' }, + placeholder_comment: { + zh: '最多为 200 个字符', + en: 'Up to 200 characters', + }, + placeholder_data_source: { zh: '请输入数据源', en: 'Please input dataSource' }, + + msg_model_set_name_required: { zh: '模型集名称为必填项', en: 'Model set name is required' }, + msg_model_name_required: { zh: '模型名称为必填项', en: 'Model name is required' }, + msg_model_train_dataset: { zh: '训练数据集为必填项', en: 'Train dataset is required' }, + msg_required: { zh: '必填项', en: 'Required' }, + msg_modify_model_name_success: { zh: '修改模型名称成功', en: 'Modify model name success' }, + msg_modify_model_comment_success: { zh: '修改模型描述成功', en: 'Modify model comment success' }, + msg_delete_model_success: { zh: '删除模型成功', en: 'Delete model success' }, + msg_file_required: { zh: '请上传文件', en: 'Please upload file' }, + msg_quit_train_model_title: { + zh: '确认要退出「发起模型训练流程」?', + en: 'Are you sure you want to exit the "Initiate Model Training Process"?', + }, + msg_quit_train_model_content: { + zh: '退出后,当前所填写的信息将被清空。', + en: 'After logging out, the information currently filled in will be cleared.', + }, + msg_quit_evaluation_model_title: { + zh: '确认要退出「{{name}}」?', + en: 'Are you sure you want to exit the "{{name}}"?', + }, + msg_quit_prediction_model_title: { + zh: '确认要退出「{{name}}」?', + en: 'Are you sure you want to exit the "{{name}}"?', + }, + msg_quit_form_create: { + zh: '确认要退出?', + en: 'Are you sure you want to exit the "{{name}}"?', + }, + msg_quit_form_edit: { + zh: '确认要退出编辑「{{name}}」?', + en: 'Are you sure you want to exit the "{{name}}" editing?', + }, + + msg_please_select_evaluation_target: { + zh: '请选择评估对象', + en: 'Please select evaluation target', + }, + msg_model_compare_count_limit: { + zh: '至少{{min}}个,至多{{max}}个评估对象', + en: 'At least {{min}} and at most {{max}} evaluation target for compare', + }, + msg_title_confirm_delete_model: { + zh: '确认要删除该模型吗?', + en: 'Are you sure you want to delete the model?', + }, + msg_content_confirm_delete_model: { + zh: '删除后,不影响正在使用该模型的任务,使用该模型的历史任务不能再正常运行,请谨慎删除', + en: + 'After deleting, it will not affect the tasks that are using the model, and the historical tasks using the model can no longer run normally, please delete with caution', + }, + msg_create_model_job_success: { + zh: '创建成功,等待合作伙伴授权', + en: 'Created successfully, waiting for partner authorization', + }, + msg_create_model_job_success_peer: { + zh: '授权完成,等待合作伙伴运行', + en: 'Authorized successfully, waiting for partner run', + }, + msg_create_evaluation_job_success_peer: { + zh: '已授权模型评估,任务开始运行', + en: 'Model evaluation has been authorized, task starts to run', + }, + msg_edit_model_job_success: { + zh: '保存成功', + en: 'Save success', + }, + msg_launch_model_job_success: { + zh: '发起成功', + en: 'Launch success', + }, + msg_launch_model_job_no_peer_auth: { + zh: '合作伙伴未授权,不能发起新任务', + en: 'The partner is not authorized to launch a new task', + }, + msg_stop_model_job_success: { + zh: '终止成功', + en: 'Stop model job success', + }, + msg_title_confirm_delete_model_job_group: { + zh: '确认要删除「{{name}}」?', + en: 'Are you sure you want to delete "{{name}}"?', + }, + msg_content_confirm_delete_model_job_group: { + zh: '删除后,该模型训练下的所有信息无法复原,请谨慎操作', + en: + 'After deletion, all information under the model training cannot be recovered, please operate with caution', + }, + msg_can_not_edit_peer_config: { + zh: '合作伙伴未授权,不能编辑合作伙伴配置', + en: 'The partner is not authorized to edit the partner configuration', + }, + msg_stop_warnning: { + zh: '确认要终止「{{name}}」?', + en: 'Are you sure to stop {{name}}', + }, + msg_stop_warning_text: { + zh: '终止后,该评估任务将无法重新运行,请谨慎操作', + en: 'After stopping, the evaluation task will not be able to run again. Please be careful.', + }, + msg_stop_successful: { + zh: '终止成功', + en: 'Stop Successful', + }, + msg_delete_warnning: { + zh: '确认要删除「{{name}}」?', + en: 'Are you sure to delete {{name}}', + }, + msg_delete_evaluation_warning_text: { + zh: '删除后,该评估任务及信息将无法恢复,请谨慎操作', + en: + 'After deleting, the evaluation task and information will not be able to recover. Please be careful.', + }, + msg_delete_prediction_warning_text: { + zh: '删除后,该预测任务及信息将无法恢复,请谨慎操作', + en: + 'After deleting, the prediction task and information will not be able to recover. Please be careful.', + }, + msg_evaluation_invitation_text: { + zh: '向您发起「{{name}}」的模型评估授权申请', + en: 'invites you to evaluate the model "{{name}}"', + }, + msg_prediction_invitation_text: { + zh: '向您发起「{{name}}」的离线预测授权申请', + en: 'invites you to predict the model "{{name}}"', + }, + msg_target_model_not_found: { + zh: '目标模型不存在,请联系合作伙伴重新选择', + en: 'Target model does not exist, please contact the partner to re-select', + }, + msg_participant_tip_text: { + zh: '合作方均同意授权时,{{module}}任务将自动运行', + en: 'All partners agree to authorize, {{module}} task will automatically run', + }, + msg_time_required: { zh: '请选择时间', en: 'Set time please' }, + msg_model_job_edit_success: { + zh: '编辑成功', + en: 'Edit success', + }, + + hint_model_set_form_modal: { + zh: '模型集名称和描述将同步至所有合作伙伴。', + en: 'The model set name and description will be synchronized to all partners.', + }, + hint_no_share_offline_prediction_result: { + zh: '对方已选择不将预测结果分享给你', + en: 'The other party has chosen not to share the prediction result with you', + }, + hint_algorithm_audit: { + zh: '请注意检查算法代码的细节,这涉及到您的信息隐私等安全问题。', + en: + 'Please pay attention to check the details of the algorithm code, which involves security issues such as your information privacy.', + }, + + no_result: { + zh: '暂无模型版本,请 ', + en: 'No model , please ', + }, + + step_global: { zh: '全局配置', en: 'Global config' }, + step_param: { zh: '参数配置', en: 'Params config' }, + + step_coordinator: { zh: '本侧配置', en: 'Coordinator config' }, + step_participant: { zh: '合作伙伴配置', en: 'Participant config' }, + + tip_radio_logistic: { zh: '用于分类任务', en: 'For classification tasks' }, + tip_radio_mse: { zh: '用于回归任务', en: 'For regression tasks' }, + tip_choose_algorithm: { + zh: '后续模型训练将沿用该算法,只可调整算法版本和算法参数', + en: + 'Subsequent model training will continue to use this algorithm, and only the algorithm version and algorithm parameters can be adjusted', + }, + tip_share_model_evaluation_index: { + zh: '共享后合作伙伴能够获得模型训练任务相关的数据指标', + en: 'After sharing, partners can obtain data indicators related to model training tasks', + }, + tip_share_model_evaluation_report: { + zh: '共享后合作伙伴能够获得模型评估任务相关的数据指标', + en: 'After sharing, partners can obtain data indicators related to model evaluation tasks', + }, + tip_share_offline_prediction_result: { + zh: '确认要共享吗?合作伙伴将获得离线预测结果,请注意风险。', + en: + 'Are you sure you want to share? Partners will get offline prediction results, please be aware of risks.', + }, + tip_allow_coordinator_parameter_tuning: { + zh: + '确认要允许吗?合作伙伴可在不改变算法和数据的情况下,自行发起调参任务,不需要获得您的授权。', + en: + 'Are you sure you want to allow it? Partners can initiate adjustment tasks on their own without changing the algorithm and data, without your authorization.', + }, + tip_model_evaluation: { + zh: '可对单个模型进行评估', + en: 'Evaluate single model', + }, + tip_no_share_model_evaluation_report: { + zh: '对方已选择不与您共享报告结果,如果相关诉求,请联系对方进行协商', + en: + 'The other party has chosen not to share the results of the report with you. If you have relevant claims, please contact the other party for negotiation', + }, + tip_no_tip_share_offline_prediction_result: { + zh: '对方已选择不与您共享离线预测结果,如果相关诉求,请联系对方进行协商', + en: + 'The other party has chosen not to share the offline prediction results with you. If you have a request, please contact the other party for negotiation', + }, + tip_only_show_read_model: { + zh: '仅展示所有参与方完成配置的模型', + en: 'Only show models that have been configured by all participants', + }, + tip_if_data_error_please_check_template: { + zh: '如数据显示异常,请检查自定义模板是否符合编写规范', + en: + 'If the data is abnormal, please check whether the custom template complies with the writing specifications', + }, + tip_please_check_template: { + zh: '请检查自定义模板是否符合编写规范', + en: 'Please check whether the custom template complies with the writing specifications', + }, + tip_model_compare_count_limit: { + zh: '可对至少{{min}}个,至多{{max}}个评估对象进行对比', + en: 'You can select at least {{min}} and at most {{max}} evaluation target for compare', + }, + tip_confusion_matrix_normalization: { + zh: '归一化后将展示样本被预测成各类别的百分比', + en: 'After normalization, it will show the percentage of samples predicted into each category', + }, + tip_training_metrics_visibility: { + zh: '训练报告仅自己可见,如需共享报告,请前往训练详情页开启', + en: + 'The training report is only visible to you. If you want to share the report, please go to the training details page', + }, + tip_agree_authorization: { + zh: '授权后,发起方可以运行模型训练并修改参与方的训练参数,训练指标将对所有参与方可见', + en: + 'After agreeing to the authorization, the applicant can modify its own parameter configuration and resources on the opposite side and automatically run the model training, and the training indicators will be visible to all participants', + }, + tip_learning_rate: { + zh: '使用损失函数的梯度调整网络权重的超参数,​ 推荐区间(0.01-1]', + en: 'The hyperparameter of the learning rate, recommended range (0.01-1]', + }, + tip_max_iters: { + zh: '该模型包含树的数量,推荐区间(5-20)', + en: 'The number of trees in the model, recommended range (5-20)', + }, + tip_max_depth: { + zh: '树模型的最大深度,用来控制过拟合,推荐区间(4-7)', + en: 'The maximum depth of the tree model, used to control overfitting, recommended range (4-7)', + }, + tip_l2_regularization: { + zh: '对节点预测值的惩罚系数,推荐区间(0.01-10)', + en: 'The penalty coefficient of the prediction value of the node, recommended range (0.01-10)', + }, + tip_max_bins: { + zh: '离散化连续变量,可以减少数据稀疏度,一般不需要调整', + en: + 'Discretization of continuous variables, can reduce the data sparsity, generally do not need to adjust', + }, + tip_num_parallel: { + zh: '建议与CPU核数接近', + en: 'Recommended to be close to the number of CPU cores', + }, + tip_epoch_num: { + zh: '指一次完整模型训练需要多少次Epoch,一次Epoch是指将全部训练样本训练一遍', + en: + 'The number of Epochs required for a complete model training, one Epoch is one time training all samples', + }, + tip_verbosity: { + zh: '有 0、1、2、3 四种等级,等级越大日志输出的信息越多', + en: + 'There are four levels of verbosity, the level of which increases the amount of information output', + }, + tip_image: { + zh: '用于训练的镜像', + en: 'Image used for training', + }, + tip_file_ext: { + zh: '目前支持.data, .csv or .tfrecord', + en: 'Currently supports .data, .csv or .tfrecord', + }, + tip_file_type: { + zh: '目前支持csv or tfrecord', + en: 'Currently supports csv or tfrecord', + }, + tip_enable_packing: { + zh: '提高计算效率,true 为优化,false 为不优化。', + en: 'Increase the efficiency of computation, true is open, false is close.', + }, + tip_ignore_fields: { + zh: '不参与训练的字段', + en: 'Fields not included in training', + }, + tip_cat_fields: { + zh: '类别变量字段,训练中会特别处理', + en: 'Category fields, training will be specially processed', + }, + tip_send_scores_to_follower: { + zh: '是否将预测值发送至follower侧,fasle代表否,ture代表是', + en: 'Whether to send the predicted value to the follower side, false is no, true is yes', + }, + tip_send_metrics_to_follower: { + zh: '是否将指标发送至follower侧,fasle代表否,ture代表是', + en: 'Whether to send the indicators to the follower side, false is no, true is yes', + }, + tip_verify_example_ids: { + zh: '是否检验example_ids,一般情况下训练数据有example_ids,fasle代表否,ture代表是', + en: + 'Whether to verify example_ids, generally training data has example_ids, false is no, true is yes', + }, + tip_no_data: { + zh: '针对标签方没有特征的预测场景,fasle代表有特征,ture代表无特征。', + en: + 'For the scenario where the label does not have features, false is features, true is no features.', + }, + tip_label_field: { + zh: '用于指定label', + en: 'Label field', + }, + tip_load_model_name: { + zh: '评估和预测时,根据用户选择的模型,确定该字段的值。', + en: + 'When evaluating and predicting, determine the value of this field according to the user selection of the model.', + }, + tip_shuffle_data_block: { + zh: '打乱数据顺序,增加随机性,提高模型泛化能力', + en: 'Shuffle the data order, increase the randomness, improve the model generality', + }, + tip_save_checkpoint_secs: { + zh: '模型多少秒保存一次', + en: 'The model is saved every n seconds', + }, + tip_save_checkpoint_steps: { + zh: '模型多少step保存一次', + en: 'The model is saved every n steps', + }, + tip_load_checkpoint_filename: { + zh: '加载文件名,用于评估和预测时选择模型', + en: 'Load file name, used to select the model when evaluating and predicting', + }, + tip_load_checkpoint_filename_with_path: { + zh: '加载文件路径,用于更细粒度的控制到底选择哪个时间点的模型', + en: 'Load file path, used to control the selection of the model at a fine-grained level', + }, + tip_sparse_estimator: { + zh: '是否使用火山引擎的SparseEstimator,由火山引擎侧工程师判定,客户侧默认都为false', + en: + 'Whether to use the SparseEstimator of the engine, determined by the engine side engineer, the default is false', + }, + tip_steps_per_sync: { + zh: '用于指定参数同步的频率,比如step间隔为10,也就是训练10个batch同步一次参数。', + en: 'Frequency at which parameters are synchronized, for example, 10 steps per batch', + }, + tip_feature_importance: { + zh: '数值越高,表示该特征对模型的影响越大', + en: 'The higher the value, the greater the influence of this feature on the model', + }, + tip_metric_is_publish: { + zh: '开启后,将与合作伙伴共享本次训练指标', + en: 'After opening, the training indicators will be shared with participant', + }, + + state_success: { zh: '成功', en: 'Success' }, + state_failed: { zh: '失败', en: 'Fail' }, + state_ready_to_run: { zh: '待运行', en: 'Ready to run' }, + state_paused: { zh: '暂停', en: 'Pause' }, + state_running: { zh: '运行中', en: 'Running' }, + state_invalid: { zh: '已禁用', en: 'Invalid' }, + state_unknown: { zh: '状态未知', en: 'Unknown' }, + + label_model_type_unspecified: { zh: '模型集', en: 'Unspecified' }, + label_model_type_tree: { zh: '树模型', en: 'Tree model' }, + label_model_type_nn: { zh: 'NN模型', en: 'NN model' }, + label_model_source_from_model_job: { + zh: '{{modelJobName}}训练任务', + en: 'train job {{modelJobName}}', + }, + label_model_source_from_workflow: { + zh: '{{workflowName}}工作流-{{jobName}}任务', + en: '{{workflowName}} workflow-{{jobName}} job', + }, + label_enable_schedule_train: { zh: '启用定时重训', en: 'Enable schedule train' }, + label_metric_is_publish: { zh: '共享训练报告', en: 'Share the training report' }, + + name_model: { zh: '模型', en: 'model' }, + name_model_set: { zh: '模型集', en: 'model set' }, + name_algorithm: { zh: '算法', en: 'algorithm' }, + name_evaluation_job: { zh: '评估任务', en: 'evaluation job' }, + name_prediction_job: { zh: '预测任务', en: 'prediction job' }, + name_compare_report: { zh: '对比报告', en: 'compare report' }, + + form_field_name: { + zh: '名称', + en: 'name', + }, + form_field_name_placeholder: { + zh: '请输入名称', + en: 'Please enter the name of the evaluation', + }, + form_field_comment: { + zh: '描述', + en: 'description', + }, + form_field_comment_placeholder: { + zh: '最多为 200 个字符', + en: 'Up to 200 characters', + }, + form_field_job_type: { + zh: '联邦配置', + en: 'Federal type', + }, + form_field_model_id: { + zh: '模型', + en: 'model', + }, + form_field_dataset: { + zh: '数据集', + en: 'dataset', + }, + form_field_config: { + zh: '资源模板', + en: 'resource template', + }, + form_section_evaluation_config: { + zh: '评估配置', + en: 'Evaluation configuration', + }, + form_section_prediction_config: { + zh: '预测配置', + en: 'Prediction configuration', + }, + form_section_resource: { + zh: '资源配置', + en: 'Resource configuration', + }, + form_btn_submit: { + zh: '提交并发送', + en: 'Submit and send', + }, + form_section_resource_tip: { + zh: 'NN模型的资源配置', + en: 'Resource configuration of NN model', + }, + form_schedule_train_tip: { + zh: '启用该功能将间隔性地重跑训练任务,且每次训练都将从最新的可用版本开始', + en: + 'Enabling this feature reruns training tasks at intervals, with each session starting with the latest available version', + }, +}; + +export default separateLng(modelCenter); diff --git a/web_console_v2/client/src/i18n/resources/modules/modelServing.ts b/web_console_v2/client/src/i18n/resources/modules/modelServing.ts new file mode 100644 index 000000000..b32683669 --- /dev/null +++ b/web_console_v2/client/src/i18n/resources/modules/modelServing.ts @@ -0,0 +1,130 @@ +import { separateLng } from 'i18n/helpers'; + +const modelServing = { + menu_label_model_serving: { zh: '在线服务', en: 'Online serving' }, + + btn_inspect_logs: { zh: '查看日志', en: 'Inspect log' }, + btn_create_model_serving: { zh: '创建服务', en: 'Create model serving' }, + btn_check: { zh: '点击校验', en: 'Check' }, + btn_send_to_peer_side: { zh: '发送至对侧', en: 'Send to peer side' }, + + service_id: { zh: 'ID', en: 'ID' }, + col_state: { zh: '状态', en: 'State' }, + col_name: { zh: '名称', en: 'Name' }, + col_instance_amount: { zh: '实例数量', en: 'Instance amount' }, + col_invoke_privilege: { zh: '调用权限', en: 'Invoke privilege' }, + col_model_type: { zh: '模型类型', en: 'Model type' }, + col_model_id: { zh: '模型ID', en: 'Model ID' }, + col_model_name: { zh: '模型名称', en: 'Model name' }, + col_cpu: { zh: 'CPU', en: 'CPU' }, + col_men: { zh: '内存', en: 'Memory' }, + col_instance_id: { zh: '实例ID', en: 'Instance ID' }, + + title_create_model_serving: { zh: '创建服务', en: 'Create model set' }, + title_edit_model_serving: { zh: '编辑服务', en: 'Create model set' }, + info_receiver_create_model_serving: { + zh: '纵向模型服务仅发起方可查看调用地址和 Signature', + en: 'Only the sender can view the call address and signature', + }, + + label_name: { zh: '在线服务名称', en: 'Name' }, + label_comment: { zh: '在线服务描述', en: 'Desc' }, + label_type: { zh: '类型', en: 'type' }, + label_model_type: { zh: '联邦类型', en: 'Create model set' }, + label_model_type_vertical: { zh: '纵向联邦', en: 'Create model set' }, + label_model_type_horizontal: { zh: '横向联邦', en: 'Create model set' }, + label_model_inference_available: { zh: '可调用', en: 'Callable' }, + label_model_inference_unavailable: { zh: '不可调用', en: 'Uncallable' }, + label_instance_spec: { zh: '实例规格', en: 'Instance Specifications' }, + label_instance_amount: { zh: '实例数', en: 'Instance amount' }, + label_local_model_feature: { zh: '本测入模特征', en: 'Local model feature' }, + label_local_center_result: { zh: '本侧中间结果', en: 'Local center result' }, + label_peer_center_result: { zh: '对侧中间结果', en: 'Local center result' }, + label_feature_dataset: { zh: '特征数据集', en: 'Feature dataset' }, + label_model_set: { zh: '模型集', en: 'Model Set' }, + label_model: { zh: '模型', en: 'Model' }, + label_tab_user_guide: { zh: '调用指南', en: 'User guide' }, + label_tab_instance_list: { zh: '实例列表', en: 'Instance list' }, + label_input_params: { zh: '输入参数(仅本侧)', en: 'Input params' }, + label_output_params: { zh: '输出参数', en: 'Output params' }, + label_api_url: { zh: '访问地址', en: 'API URL' }, + label_local_feature: { zh: '本侧特征', en: 'Local feature' }, + label_signature: { zh: 'Signature', en: 'Signature' }, + + placeholder_searchbox_name: { zh: '请输入名称查询', en: 'Please input name' }, + placeholder_name: { zh: '请输入在线服务名称', en: 'Please input online serving name' }, + placeholder_input: { zh: '请输入', en: 'Please input' }, + placeholder_select: { zh: '请选择', en: 'Please select' }, + placeholder_select_model: { zh: '请选择模型', en: 'Please select model' }, + placeholder_comment: { + zh: '最多为 200 个字符', + en: 'Up to 200 characters', + }, + + msg_required: { zh: '必填项', en: 'Required' }, + msg_check_fail: { + zh: '请输入正确的特征、中间结果或特征数据集', + en: 'Please enter the correct feature, intermediate result or feature data set', + }, + + msg_title_confirm_delete: { zh: '确认要删除「{{name}}」?', en: 'Confirm to delete <{{name}}>?' }, + msg_content_confirm_delete: { + zh: '一旦删除,在线服务相关数据将无法复原,请谨慎操作', + en: 'The delete operation cannot be recovered, please operate with caution', + }, + msg_edit_service_desc: { zh: '在线服务信息', en: 'Service Info' }, + + tip_local_feature: { + zh: '请正确选择本侧特征,特征选择会影响推理结果', + en: + 'Please select the local feature correctly, the feature selection will affect the inference result', + }, + tip_instance_range: { + zh: '实例数范围1~100', + en: 'The number of instances ranges from 1 to 100', + }, + + state_loading: { zh: '部署中', en: 'Loading' }, + state_unloading: { zh: '删除中', en: 'Unloading' }, + state_running: { zh: '运行中', en: 'Running' }, + state_unknown: { zh: '异常', en: 'Unknown' }, + state_pending_accept: { zh: '待合作伙伴配置', en: 'Wait for accepting' }, + state_waiting_config: { zh: '待合作伙伴配置', en: 'Wait for config' }, + state_deleted: { zh: '异常', en: 'deleted' }, + tip_deleted: { zh: '对侧已经删除', en: 'Another side has been deleted' }, + + state_check_waiting: { zh: '待校验', en: 'Waiting' }, + state_check_success: { zh: '校验成功', en: 'Success' }, + state_check_fail: { zh: '校验不通过', en: 'Fail' }, + + cannot_create_service_without_models: { + zh: '因对应模型不存在,请选择两侧均存在的纵向联邦模型进行部署', + en: + 'Because there is no deployment of the corresponding model, please reselect the longitudinal federal model that exists on both sides.', + }, + + msg_todo_model_serving_tasks: { + zh: '{{count}} 条待处理在线服务', + en: '{{count}} tasks to be processed', + }, + + msg_title_todo_model_serving_tasks: { + zh: '待处理在线服务', + en: 'Tasks to be processed', + }, + + msg_suffix_model_serving_tasks: { + zh: ' 的在线任务', + en: "'s service", + }, + msg_duplicate_service_name: { + zh: '在线服务名称已存在', + en: 'Service name already exists', + }, + msg_duplicate_participant_service_name: { + zh: '合作伙伴侧在线服务名称已存在', + en: 'service name already exists on the participant side', + }, +}; + +export default separateLng(modelServing); diff --git a/web_console_v2/client/src/i18n/resources/modules/operation_maintenance.ts b/web_console_v2/client/src/i18n/resources/modules/operation_maintenance.ts new file mode 100644 index 000000000..f9e18677c --- /dev/null +++ b/web_console_v2/client/src/i18n/resources/modules/operation_maintenance.ts @@ -0,0 +1,18 @@ +import { separateLng } from 'i18n/helpers'; + +const operation_maintenance = { + btn_submit: { zh: '提交', en: 'Submit' }, + btn_reset: { zh: '重置', en: 'Reset' }, + + col_job_name: { zh: 'K8s Job名称', en: 'job_name' }, + col_job_type: { zh: '测试类型', en: 'job_type' }, + col_operation: { zh: '测试状态', en: 'Operation' }, + + job_detail: { zh: '工作详情', en: 'job_detail' }, + + state_check_success: { zh: '校验成功', en: 'Success' }, + state_check_fail: { zh: '校验不通过', en: 'Fail' }, + state_check_repeat: { zh: '工作已存在,请更改name_prefix字段', en: 'Repeat' }, +}; + +export default separateLng(operation_maintenance); diff --git a/web_console_v2/client/src/i18n/resources/modules/project.ts b/web_console_v2/client/src/i18n/resources/modules/project.ts index c8d5ef680..8f724fdb4 100644 --- a/web_console_v2/client/src/i18n/resources/modules/project.ts +++ b/web_console_v2/client/src/i18n/resources/modules/project.ts @@ -1,17 +1,17 @@ import { separateLng } from 'i18n/helpers'; const project = { - no_result: { zh: '暂无项目', en: 'No project yet' }, - create: { zh: '创建项目', en: 'Create project' }, + no_result: { zh: '暂无工作区', en: 'No workspace yet' }, + create: { zh: '创建工作区', en: 'Create workspace' }, describe: { zh: - '提供项目的新增和管理功能,支持对项目进行新增、编辑、查询、删除功能,可查看一个项目下的联邦工作流任务列表、模型列表、API列表,一个项目下可创建多个联邦工作流任务。', + '提供工作区的新增和管理功能,支持对工作区进行新增、编辑、查询、删除功能,可查看一个工作区下的联邦工作流任务列表、模型列表、API列表,一个工作区下可创建多个联邦工作流任务。', en: - 'Provide project addition and management functions, support adding, editing, querying, and deleting projects. You can view the federal workflow task list, model list, and API list under a project. Multiple federal tasks can be created under a project Stream tasks.', + 'Provide workspace addition and management functions, support adding, editing, querying, and deleting workspaces. You can view the federal workflow task list, model list, and API list under a workspace. Multiple federal tasks can be created under a workspace Stream tasks.', }, search_placeholder: { - zh: '输入项目名称关键词搜索', - en: 'Enter the project name or keyword to search', + zh: '输入工作区名称关键词搜索', + en: 'Enter the workspace name or keyword to search', }, display_card: { zh: '卡片视图', en: 'Card view' }, display_list: { zh: '表格视图', en: 'Table view' }, @@ -26,24 +26,24 @@ const project = { create_work_flow: { zh: '创建工作流', en: 'Create a workflow' }, connection_status: { zh: '连接状态', en: 'Connection status' }, workflow_number: { zh: '工作流任务数量', en: 'Total workflows' }, - name: { zh: '项目名称', en: 'Project name' }, + name: { zh: '工作区名称', en: 'workspace name' }, participant_name: { zh: '合作伙伴名称', en: 'Participant name' }, participant_url: { zh: '合作伙伴节点地址', en: 'Participant node address' }, participant_domain: { zh: '合作伙伴泛域名', en: "Participant participant's domain" }, selft_domain: { zh: '本侧泛域名', en: 'Self domain name' }, - remarks: { zh: '说明备注', en: 'Remarks' }, - name_placeholder: { zh: '请填写项目名称', en: 'Please enter name' }, + remarks: { zh: '说明描述', en: 'Remarks' }, + name_placeholder: { zh: '请填写工作区名称', en: 'Please enter name' }, participant_name_placeholder: { zh: '请输入合作伙伴名称', en: 'Please enter participant name' }, participant_domain_placeholder: { zh: '请填写泛域名', en: 'Please enter domain' }, - remarks_placeholder: { zh: '请填写说明备注', en: 'Please enter remarks' }, - name_message: { zh: '请填写项目名称', en: 'Please enter name' }, + remarks_placeholder: { zh: '请填写说明描述', en: 'Please enter remarks' }, + name_message: { zh: '请填写工作区名称', en: 'Please enter name' }, participant_name_message: { zh: '请填写合作伙伴名称', en: 'Please enter participant name' }, participant_url_message: { zh: '请填写合作伙伴节点地址', en: 'Please enter participant node address', }, - edit: { zh: '编辑项目', en: 'Edit project' }, + edit: { zh: '编辑工作区', en: 'Edit workspace' }, workflow: { zh: '工作流任务', en: 'Workflow task' }, mix_dataset: { zh: '融合数据集', en: 'Fusion data set' }, model: { zh: '模型', en: 'Model' }, @@ -66,10 +66,10 @@ const project = { }, upload_certificate_message: { zh: '请上传证书', en: 'Please upload the certificate' }, drag_to_upload: { zh: '拖拽到这里进行上传', en: 'Drag and drop here to upload' }, - create_success: { zh: '创建项目成功', en: 'Create project succeed!' }, + create_success: { zh: '创建工作区成功', en: 'Create workspace succeed!' }, label_token: { zh: '联邦密码' }, - edit_success: { zh: '编辑项目成功', en: 'Edit project succeed!' }, + edit_success: { zh: '编辑工作区成功', en: 'Edit workspace succeed!' }, msg_var_name: { zh: '请输入变量名' }, msg_var_value: { zh: '请输入变量值' }, msg_sure_2_cancel: { zh: '确认取消?' }, @@ -80,14 +80,17 @@ const project = { msg_no_var_yet: { zh: '当前没有环境变量参数,请添加' }, msg_token_required: { zh: '联邦密码为必填项' }, msg_token_invalid: { zh: '只允许英文、数字的组合' }, - placeholder_global_project_filter: { zh: '选择特定项目筛选资源' }, - placeholder_no_project: { zh: '暂无项目' }, + placeholder_global_project_filter: { zh: '选择特定工作区筛选资源' }, + placeholder_no_project: { zh: '暂无工作区' }, placeholder_domain_name: { zh: '泛域名间值' }, placeholder_token: { zh: '请输入联邦密码' }, placeholder_participant_url: { zh: 'IPv4/v6 地址(包含端口)', en: 'IP(v4 or v6) address with Port', }, + + label_type_light_client: { zh: '轻量级', en: 'Light client' }, + label_type_platform: { zh: '标准', en: 'Platform' }, }; export default separateLng(project); diff --git a/web_console_v2/client/src/i18n/resources/modules/settings.ts b/web_console_v2/client/src/i18n/resources/modules/settings.ts index d24c87308..e0b2710d2 100644 --- a/web_console_v2/client/src/i18n/resources/modules/settings.ts +++ b/web_console_v2/client/src/i18n/resources/modules/settings.ts @@ -13,8 +13,11 @@ const settings = { }, placeholder_image: { zh: '请选择镜像版本' }, - system_log: { zh: '系统日志', en: 'system log' }, - system_setting: { zh: '系统配置', en: 'system setting' }, + system_log: { zh: '系统日志', en: 'System log' }, + system_setting: { zh: '全局配置', en: 'Settings' }, + edit_success: { zh: '修改环境变量成功', en: 'Edit environment variables succeed!' }, + + msg_wrong_format: { zh: 'JSON {{type}} 格式错误', en: 'JSON {{type}} wrong format' }, }; export default separateLng(settings); diff --git a/web_console_v2/client/src/i18n/resources/modules/term.ts b/web_console_v2/client/src/i18n/resources/modules/term.ts index 9e6965873..aa3c1ef17 100644 --- a/web_console_v2/client/src/i18n/resources/modules/term.ts +++ b/web_console_v2/client/src/i18n/resources/modules/term.ts @@ -1,7 +1,7 @@ import { separateLng } from 'i18n/helpers'; const term = { - project: { zh: '项目', en: 'Project' }, + project: { zh: '工作区', en: 'Workspace' }, coordinator: { zh: '发起方', en: 'Coordinator' }, participant: { zh: '合作方', en: 'Participant' }, workflow: { zh: '工作流', en: 'Workflow' }, diff --git a/web_console_v2/client/src/i18n/resources/modules/trustedCenter.ts b/web_console_v2/client/src/i18n/resources/modules/trustedCenter.ts new file mode 100644 index 000000000..810f8e27e --- /dev/null +++ b/web_console_v2/client/src/i18n/resources/modules/trustedCenter.ts @@ -0,0 +1,147 @@ +import { separateLng } from 'i18n/helpers'; + +const trusted_center = { + btn_create_trusted_computing: { zh: '创建可信计算', en: 'Create trusted computing' }, + btn_authorized: { zh: '授权', en: 'Authorized' }, + btn_unauthorized: { zh: '撤销', en: 'Unauthorized' }, + btn_submit_apply: { zh: '提交并申请', en: 'Submit and apply' }, + btn_confirm_authorization: { zh: '确认授权', en: 'Confirm authorization' }, + btn_submit_and_run: { zh: '提交并执行', en: 'Submit and run' }, + btn_cancel: { zh: '取消', en: 'Cancel' }, + btn_post_task: { zh: '发起任务', en: 'Post task' }, + btn_termination: { zh: '终止', en: 'termination' }, + btn_export: { zh: '导出', en: 'Export' }, + btn_pass: { zh: '通过', en: 'Pass' }, + btn_reject: { zh: '拒绝', en: 'Reject' }, + btn_go_back: { zh: '返回', en: 'Go back' }, + btn_inspect_logs: { zh: '查看日志', en: 'Inspect log' }, + + label_trusted_center: { zh: '可信中心', en: 'Trusted Center' }, + label_coordinator_self: { zh: '本方', en: 'this party' }, + label_computing_name: { zh: '计算名称', en: 'Computing name' }, + label_description: { zh: '描述', en: 'Description' }, + label_algorithm_type: { zh: '算法类型', en: 'Algorithm Type' }, + label_algorithm_select: { zh: '选择算法', en: 'Algorithm select' }, + label_our_dataset: { zh: '我方数据集', en: 'Our dataset' }, + label_partner_one_dataset: { zh: '合作伙伴 1 数据集', en: 'Partner 1 dataset' }, + label_partner_two_dataset: { zh: '合作伙伴 2 数据集', en: 'Partner 2 dataset' }, + label_resource_template: { zh: '资源模板', en: 'Resource Template' }, + label_resource_config_params_detail: { + zh: '资源配置参数详情', + en: 'Resource config params detail', + }, + label_trusted_job_comment: { zh: '任务备注', en: 'Trusted job comment' }, + + placeholder_search_task: { zh: '输入任务名称', en: 'Enter task name' }, + placeholder_input: { zh: '请输入', en: 'Please Input' }, + placeholder_select: { zh: '请选择', en: 'Please select' }, + placeholder_select_algo_type: { zh: '请选择算法类型', en: 'Please select algorithm type' }, + placeholder_input_comment: { zh: '最多为200个字符', en: 'Maxsize 200 words' }, + placeholder_select_algo: { zh: '请选择算法', en: 'Please select algorithm' }, + placeholder_select_algo_version: { zh: '请选择算法版本', en: 'Please select algorithm version' }, + placeholder_select_dataset: { + zh: '请选择一发布的原始/结果数据集', + en: 'Please select released original/resulting dataset', + }, + placeholder_trusted_job_set_comment: { + zh: '支持1~100位可见字符,且只包含大小写字母、中文、数字、中划线、下划线', + en: + 'Supports 1-100 visible characters, and only contains uppercase and lowercase letters, Chinese characters, numbers, underscores, and underscores', + }, + + title_trusted_job_create: { zh: '创建可信计算', en: 'Create trusted computing' }, + title_trusted_job_edit: { zh: '编辑可信计算', en: 'Edit trusted computing' }, + title_authorization_request: { + zh: '{{peerName}}向您发起「{{name}}」可信计算申请', + en: '{{peerName}} initiates a trusted computing authorization application for "{{name}}"', + }, + title_base_info: { zh: '基本信息', en: 'Base info' }, + title_resource_config: { zh: '资源配置', en: 'Resource config' }, + title_computing_config: { zh: '计算配置', en: 'Computing config' }, + title_computing_task_list: { zh: '计算任务列表', en: 'Computing task list' }, + title_trusted_job_detail: { zh: '{{name}} 详情', en: '{{name}} Detail' }, + title_instance_info: { zh: '实例信息', en: 'Instance information' }, + title_todo_computing_tasks: { zh: '待处理计算任务', en: 'Pending computing job' }, + title_initiate_trusted_job: { zh: '发起任务 {{name}}', en: 'Initiate trusted job {{name}}' }, + title_edit_trusted_job: { zh: '编辑任务 {{name}}', en: 'Edit trusted job {{name}}' }, + title_dataset_export_application: { + zh: '「{{name}}」 的导出申请', + en: "「{{name}}」's export application ", + }, + title_export_application: { + zh: '数据集导出申请', + en: 'Dataset export application', + }, + title_passed: { + zh: '已通过申请', + en: 'Application passed', + }, + title_rejected: { + zh: '已拒绝申请', + en: 'Application rejected', + }, + title_status_tip: { + zh: '{{second}}S 后自动返回', + en: '{{second}} seconds later, automatically go back', + }, + + tip_agree_authorization: { + zh: '授权后,发起方可以运行可信计算任务', + en: 'After agreeing to the authorization, the applicant can run trusted computing job', + }, + + msg_required: { zh: '必填项', en: 'Required' }, + msg_trusted_computing_create: { + zh: '合作伙伴均同意后,任务将自动运行,计算完成后的计算结果授权后才可以导出到本地', + en: + 'After the partners agree, the task will run automatically, and the calculation results after the calculation is completed can be exported to the local machine after authorization.', + }, + unauthorized_confirm_title: { + zh: '确认撤销对「{{name}}」的授权?', + en: 'Are you sure to unauthorized trusted computing "{{name}}" ?', + }, + msg_todo_computing_tasks: { zh: '待处理计算任务 {{count}}' }, + msg_prefix_computing_tasks: { zh: '发起了', en: 'sent' }, + msg_suffix_computing_tasks: { zh: '的计算任务', en: "'s computing job" }, + msg_dataset_export_comment: { + zh: '该数据集为可信中心安全计算生成的计算结果,导出时需各合作伙伴审批通过', + en: + "The dataset is the calculation result generated by the trusted center's secure calculation, and it needs the approval of each partner when exporting", + }, + msg_create_success: { zh: '创建成功', en: 'Create success' }, + msg_auth_success: { zh: '授权成功', en: 'Authorize success' }, + msg_publish_success: { zh: '发布成功', en: 'Publish success' }, + msg_delete_success: { zh: '删除成功', en: 'Delete success' }, + msg_edit_success: { zh: '编辑成功', en: 'Edit success' }, + + col_trusted_job_name: { zh: '名称', en: 'Name' }, + col_trusted_job_coordinator: { zh: '发起方', en: 'Coordinator' }, + col_trusted_job_status: { zh: '状态', en: 'Status' }, + col_job_status: { zh: '任务状态', en: 'Job status' }, + col_trusted_job_runtime: { zh: '运行时长', en: 'Runtime' }, + col_trusted_job_start_time: { zh: '开始时间', en: 'Start time' }, + col_trusted_job_end_time: { zh: '结束时间', en: 'End time' }, + col_trusted_job_create_at: { zh: '创建时间', en: 'Create time' }, + col_trusted_job_update_at: { zh: '更新时间', en: 'Update time' }, + col_trusted_job_creator: { zh: '创建人', en: 'Creator' }, + col_trusted_job_dataset: { zh: '数据集', en: 'Dataset' }, + col_trusted_job_operation: { zh: '操作', en: 'Operation' }, + col_trusted_job_comment: { zh: '备注', en: 'Comment' }, + col_instance_id: { zh: '实例 ID', en: 'Instance ID' }, + col_instance_status: { zh: '状态', en: 'Status' }, + col_instance_cpu: { zh: 'CPU', en: 'CPU' }, + col_instance_memory: { zh: 'MEM', en: 'MEM' }, + col_instance_start_at: { zh: '开始时间', en: 'Start time' }, + + state_trusted_job_unknown: { zh: '未知', en: 'Unknown' }, + state_trusted_job_pending: { zh: '待执行', en: 'Pending' }, + state_trusted_job_running: { zh: '执行中', en: 'Running' }, + state_trusted_job_succeeded: { zh: '已成功', en: 'Succeeded' }, + state_trusted_job_failed: { zh: '已失败', en: 'Failed' }, + state_trusted_job_stopped: { zh: '已终止', en: 'Stopped' }, + + state_auth_status_authorized: { zh: '已授权', en: 'Authorized' }, + state_auth_status_unauthorized: { zh: '未授权', en: 'Unathorized' }, +}; + +export default separateLng(trusted_center); diff --git a/web_console_v2/client/src/i18n/resources/modules/upload.ts b/web_console_v2/client/src/i18n/resources/modules/upload.ts index 535f607d2..aa193d96d 100644 --- a/web_console_v2/client/src/i18n/resources/modules/upload.ts +++ b/web_console_v2/client/src/i18n/resources/modules/upload.ts @@ -3,6 +3,10 @@ import { separateLng } from 'i18n/helpers'; const upload = { placeholder: { zh: '点击或拖拽文件到此处上传' }, hint: { zh: '请上传{{fileTypes}}格式文件,大小不超过{{maxSize}}MB' }, + hint_without_file_size_limit: { zh: '请上传{{fileTypes}}格式文件' }, + hint_over_file_size_limit: { zh: '大小不超过{{maxSize}}MB!' }, + label_upload: { zh: '上传', en: 'Upload' }, + msg_upload_fail: { zh: '{{fileName}} 上传失败', en: '{{fileName}} upload file' }, }; export default separateLng(upload); diff --git a/web_console_v2/client/src/i18n/resources/modules/users.ts b/web_console_v2/client/src/i18n/resources/modules/users.ts index 3802e21a4..fded8bd27 100644 --- a/web_console_v2/client/src/i18n/resources/modules/users.ts +++ b/web_console_v2/client/src/i18n/resources/modules/users.ts @@ -1,6 +1,7 @@ import { separateLng } from 'i18n/helpers'; const users = { + no_result: { zh: '暂无用户', en: 'No user yet' }, yourself: { zh: '本账号' }, btn_create_user: { zh: '创建用户', en: 'Create User' }, @@ -11,7 +12,7 @@ const users = { col_username: { zh: '用户名', en: 'User Name' }, col_password: { zh: '密码', en: 'Password' }, col_role: { zh: '角色', en: 'Role' }, - col_name: { zh: '名字', en: 'Name' }, + col_name: { zh: '显示名', en: 'Display Name' }, col_email: { zh: '邮箱', en: 'Email' }, col_ops: { zh: '操作', en: 'Operations' }, @@ -21,7 +22,7 @@ const users = { msg_delete_done: { zh: '删除成功', en: 'Delete Done' }, title_user_create: { zh: '创建用户', en: 'Create User' }, - title_user_edit: { zh: '修改用户', en: 'Edit User' }, + title_user_edit: { zh: '编辑用户', en: 'Edit User' }, placeholder_name_searchbox: { zh: '输入关键词搜索用户', en: 'Search by name' }, placeholder_username: { zh: '请输入用户名' }, @@ -34,7 +35,9 @@ const users = { }, message_modify_success: { zh: '修改成功' }, - message_del_user: { zh: '确认删除该用户吗' }, + message_del_user_title: { zh: '确认删除该用户吗?' }, + message_del_user_content: { zh: '删除后,该用户将无法操作,请谨慎删除' }, + message_can_not_del_current_user: { zh: '不能删除自己的账号' }, }; export default separateLng(users); diff --git a/web_console_v2/client/src/i18n/resources/modules/validError.ts b/web_console_v2/client/src/i18n/resources/modules/validError.ts new file mode 100644 index 000000000..f68571969 --- /dev/null +++ b/web_console_v2/client/src/i18n/resources/modules/validError.ts @@ -0,0 +1,67 @@ +import { separateLng } from 'i18n/helpers'; + +const error = { + name_invalid: { + zh: '只支持大小写字母,数字,中文开头或结尾,可包含“_”和“-”,不超过 63 个字符', + en: + 'Only support uppercase and lowercase letters, numbers, the beginning or end of Chinese, can contain "_" and "-", no more than 63 characters', + }, + comment_invalid: { + zh: '只支持大小写字母,数字,中文开头或结尾,可包含“_”和“-”,不超过 100 个字符', + en: + 'Only support uppercase and lowercase letters, numbers, the beginning or end of Chinese, can contain "_" and "-", no more than 100 characters', + }, + comment_length_invalid: { + zh: '最多为 200 个字符', + en: 'Up to 200 characters', + }, + job_name_invalid: { + zh: '只支持小写字母,数字开头或结尾,可包含“-”,不超过 24 个字符', + en: + 'Only lowercase letters are supported, numbers start or end, may contain "-", and no more than 24 characters', + }, + cpu_invalid: { + zh: '请输入正确的格式,正确格式为 xxxm,例如: 4000m', + en: 'Please enter the correct format, the correct format is xxxm, for example: 4000m', + }, + memory_invalid: { + zh: '请输入正确的格式,正确格式为 xxxGi,xxxMi,例如: 16Gi,16Mi', + en: + 'Please enter the correct format, the correct format is xxxGi, xxxMi, for example: 16Gi,16Mi', + }, + email_invalid: { + zh: '请输入正确的邮箱格式', + en: 'Please enter the correct email format', + }, + password_invalid: { + zh: '请输入正确的密码格式,至少包含一个字母、一个数字、一个特殊字符,且长度在8到20之间', + en: + 'Please enter the correct password format,contain at least one letter, one number, one special character, and the length is between 8 and 20', + }, + missing_domain_name: { + zh: '获取本系统 domain_name 失败', + en: 'Failed to get domain_name of this system', + }, + empty_node_name_invalid: { + zh: '名称不能为空', + en: 'Name is required', + }, + same_node_name_invalid: { + zh: '已有重名元素', + en: 'There is a node with the same name', + }, + missing_dataset_id: { + zh: '请选择数据集', + en: 'Please select dataset, datasetId is required', + }, + missing_model_id: { + zh: '请选择模型', + en: 'Please select model, modelId is required', + }, + missing_name: { + zh: '请输入名称', + en: 'Please enter name', + }, +}; + +export default separateLng(error); diff --git a/web_console_v2/client/src/i18n/resources/modules/workflow.ts b/web_console_v2/client/src/i18n/resources/modules/workflow.ts index d40e39fac..23a268e5b 100644 --- a/web_console_v2/client/src/i18n/resources/modules/workflow.ts +++ b/web_console_v2/client/src/i18n/resources/modules/workflow.ts @@ -4,6 +4,7 @@ const workflow: I18nMessageModule = { no_result: { zh: '暂无工作流' }, no_tpl: { zh: '暂无工作流模板' }, execution_detail: { zh: '工作流详情' }, + template_detail: { zh: '模板详情' }, name: { zh: '工作流名称' }, our_config: { zh: '我方配置' }, peer_config: { zh: '对方配置' }, @@ -12,10 +13,11 @@ const workflow: I18nMessageModule = { current_config: { zh: '当前配置' }, create_workflow: { zh: '创建工作流', en: 'Create workflow' }, edit_workflow: { zh: '编辑工作流', en: 'Edit workflow' }, - create_tpl: { zh: '新建模板', en: 'Create template' }, + create_tpl: { zh: '创建模板', en: 'Create template' }, edit_tpl: { zh: '编辑模板', en: 'Edit template' }, fork_workflow: { zh: '复制工作流', en: 'Fork workflow' }, forked_from: { zh: '复制于', en: 'Forked from' }, + copy: { zh: '副本', en: '-copy' }, action_re_run: { zh: '重新运行' }, action_run: { zh: '立即运行' }, @@ -25,6 +27,7 @@ const workflow: I18nMessageModule = { action_detail: { zh: '详情' }, action_show_report: { zh: '查看模型报告' }, action_download: { zh: '下载' }, + action_download_log: { zh: '下载日志', en: 'Download log' }, action_invalid: { zh: '禁用' }, action_edit: { zh: '编辑' }, @@ -42,26 +45,31 @@ const workflow: I18nMessageModule = { btn_has_new_logs: { zh: '有新的日志' }, btn_fetch_metrics: { zh: '点击加载数据' }, btn_retry: { zh: '重试' }, - btn_access_ctrl: { zh: '权限配置面板' }, + btn_access_ctrl: { zh: '权限配置' }, btn_add_var: { zh: '新增自定义变量' }, btn_upload_tpl: { zh: '上传模板' }, - btn_go_create_new_tpl: { zh: '没有想要的模板?点击创建新模版' }, + btn_go_create_new_tpl: { zh: '没有想要的模板?点击创建新模板' }, btn_preview_kibana: { zh: '预览' }, btn_preview_kibana_fullscreen: { zh: '在新窗口预览' }, btn_add_kibana_chart: { zh: '添加新的图表' }, col_status: { zh: '任务状态' }, - col_project: { zh: ' 隶属项目' }, + col_project: { zh: ' 隶属工作区' }, col_creator: { zh: ' 创建者' }, col_date: { zh: ' 创建时间' }, col_actions: { zh: '操作' }, col_pod: { zh: 'POD' }, col_worker_status: { zh: '运行状态' }, col_worker_type: { zh: '类型' }, - col_tpl_name: { zh: '模版名' }, + col_tpl_name: { zh: '模板名' }, col_group_alias: { zh: 'Group 别名' }, col_pod_name: { zh: 'Pod' }, col_pod_ip: { zh: 'IP' }, + col_favorite: { zh: '收藏', en: 'favorite' }, + col_create_time: { zh: '创建时间', en: 'Create Time' }, + col_update_time: { zh: '更新时间', en: 'Update Time' }, + col_latest_version: { zh: '最新版本', en: 'Latest Version' }, + col_is_left: { zh: 'Is Left', en: 'Is Left' }, state_success: { zh: '成功' }, state_failed: { zh: '失败' }, @@ -75,6 +83,7 @@ const workflow: I18nMessageModule = { state_configuring: { zh: '合作伙伴配置中' }, state_invalid: { zh: '已禁用' }, state_unknown: { zh: '状态未知' }, + state_paused: { zh: '暂停' }, sent_failed: { zh: '发送失败' }, sent_failed_desc: { @@ -86,22 +95,24 @@ const workflow: I18nMessageModule = { label_name: { zh: '工作流名称' }, label_group_alias: { zh: 'Group' }, label_enable_batch_update_interval: { zh: '启用定时重训' }, - label_batch_update_interval: { zh: '重训间隔' }, + label_batch_update_interval: { zh: '定时重训' }, label_global_config: { zh: '全局配置' }, - label_project: { zh: '关联项目' }, + label_project: { zh: '关联工作区' }, label_peer_forkable: { zh: '合作伙伴复制权限' }, label_template: { zh: '工作流模板' }, + label_revision: { zh: '模板版本' }, + label_creator: { zh: '创建者' }, label_allow: { zh: '允许' }, label_not_allow: { zh: '不允许' }, label_exist_template: { zh: '选择已有' }, label_pairing_exist_template: { zh: '选择配对模板' }, - label_new_template: { zh: '新建模板' }, + label_new_template: { zh: '创建模板' }, label_pairing_new_template: { zh: '上传配对模板' }, label_new_template_name: { zh: '模板名称' }, - label_is_left: { zh: 'is Left' }, + label_is_left: { zh: 'Is left' }, label_upload_template: { zh: '上传模板文件' }, - label_template_comment: { zh: '工作流模板说明' }, - label_template_name: { zh: '模板 (Group)' }, + label_template_comment: { zh: '工作流模板描述' }, + label_template_group: { zh: '模板 (Group)' }, label_running_time: { zh: '运行时长' }, label_role: { zh: 'Role' }, label_job_created: { zh: '任务创建时间' }, @@ -126,13 +137,26 @@ const workflow: I18nMessageModule = { label_choose_new_tpl: { zh: '选择其他模板' }, label_job_basics: { zh: '任务基础信息' }, label_job_kibana_metrics: { zh: 'Kibana 参数' }, + label_job_yaml_detail: { zh: 'Yaml详细信息', en: 'Yaml detail' }, + label_tab_workflow_tpl_my: { zh: '我的模板', en: 'My templates' }, + label_tab_workflow_tpl_built_in: { zh: '预置模板', en: 'Built-in templates' }, + label_uuid: { zh: 'UUID', en: 'UUID' }, + label_instance_info: { zh: '实例信息', en: 'Instance information' }, + label_work_flow: { zh: '工作流', en: 'Workflow' }, + label_work_flow_id: { zh: '工作流 ID', en: 'Workflow ID' }, + label_jump_to_workflow: { zh: '点击查看工作流', en: 'Click and check the workflow' }, + label_template_version: { zh: '版本列表', en: 'Model version' }, + label_workflow_list: { zh: '工作流列表', en: 'Workflow List' }, + label_tab_my_workflow: { zh: '我的工作流', en: 'My workflow' }, + label_tab_system_workflow: { zh: '系统工作流', en: 'System workflow' }, placeholder_name_searchbox: { zh: '根据工作流名称搜索' }, placeholder_uuid_searchbox: { zh: '根据 UUID 搜索' }, placeholder_name: { zh: '请输入工作流名称' }, placeholder_template: { zh: '请选择模板' }, - placeholder_project: { zh: '请关联一个项目' }, - placeholder_comment: { zh: '请输入工作流模板说明' }, + placeholder_revision: { zh: '请选择模板版本' }, + placeholder_project: { zh: '请关联一个工作区' }, + placeholder_comment: { zh: '请输入工作流模板描述' }, placeholder_template_name: { zh: '请输入模板名称' }, placeholder_fetch_metrics: { zh: '查询 Metrics 性能消耗较大,故需手动确认执行' }, placeholder_no_metrics: { zh: '当前未查询到相关指标' }, @@ -159,10 +183,11 @@ const workflow: I18nMessageModule = { msg_sent_success: { zh: '工作流发送成功' }, msg_template_required: { zh: '请选择一个模板!' }, - msg_min_10_interval: { zh: ' 最小间隔为 10 分钟' }, + msg_revision_required: { zh: '请选择一个模板版本!' }, + msg_time_required: { zh: '请选择时间' }, msg_get_template_failed: { zh: '获取模板列表失败' }, msg_only_1_tpl: { zh: '只允许上传一个模板文件!' }, - msg_config_unfinished: { zh: '未完成配置,请先完成配置后再次点击发送' }, + msg_config_unfinished: { zh: '未完成配置或有正在编辑的任务,请确认后再次提交' }, msg_config_unconfirm_or_unfinished: { zh: '双侧配置未确认或未完成,请检查后进行发送' }, msg_sure_2_cancel_create: { zh: '确认取消创建工作流?' }, msg_sure_2_cancel_edit: { zh: '确认取消编辑工作流?' }, @@ -172,21 +197,22 @@ const workflow: I18nMessageModule = { msg_sure_2_exist_edit: { zh: '确定要离开吗,当前表单内容将全部丢失!' }, msg_will_drop_tpl_config: { zh: '取消后,已配置的模板内容将不再保留' }, msg_effect_of_cancel_create: { zh: '取消后,已配置内容将不再保留' }, - msg_project_required: { zh: '请选择项目!' }, + msg_project_required: { zh: '请选择工作区!' }, msg_name_required: { zh: '请输入名称!' }, - msg_no_abailable_tpl: { zh: '暂无可用模板,请手动新建' }, + msg_no_abailable_tpl: { zh: '暂无可用模板,请手动创建' }, msg_pairing_no_abailable_tpl: { zh: '暂无可用的配对模板,请手动上传' }, msg_tpl_file_required: { zh: '请选择一个合适的模板文件!' }, msg_tpl_name_required: { zh: '请输入模板名!' }, msg_tpl_config_missing: { zh: '模板格式错误,缺少 config 字段!' }, msg_tpl_alias_missing: { zh: '模板格式错误,缺少 config.group_alias 字段!' }, msg_tpl_alias_wrong: { zh: '模板 group_alias 与合作方模板不一致,请检查!' }, - msg_tpl_is_left_wrong: { zh: '模板 is_left 值须为{{value}},请检查' }, msg_peer_config_failed: { zh: '获取对侧工作流配置失败' }, msg_peer_not_ready: { zh: '对侧配置未完成' }, msg_not_config: { zh: '工作流配置未完成' }, msg_workflow_name_invalid: { zh: '最长允许255个字符' }, msg_sure_to_stop: { zh: '确认停止运行该工作流吗?' }, + msg_sure_to_invalidate_title: { zh: '确认禁用该工作流吗?' }, + msg_sure_to_invalidate_content: { zh: '禁用后,该工作流将无法操作,请谨慎禁用' }, msg_sure_to_delete: { zh: '确认删除吗?' }, msg_unforkable: { zh: '根据对侧配置,该工作流不允许被复制,请与对侧沟通后再试' }, msg_get_peer_cfg_failed: { zh: '获取对侧配置失败: ' }, @@ -197,11 +223,13 @@ const workflow: I18nMessageModule = { msg_upstreaming_nonreusable: { zh: '因存在上游依赖不继承,无法修改此任务继承与否' }, msg_chart_deps_loading: { zh: '图表依赖正在加载,请稍等' }, msg_get_tpl_detail_failed: { zh: '获取模板详情失败,请稍后再试' }, + msg_get_revision_detail_failed: { zh: '获取模板版本详情失败,请稍后再试' }, msg_group_required: { zh: '请输入 Group 名' }, msg_jobname_required: { zh: '请输入 Job 名' }, - msg_yaml_required: { zh: '请加入 YAML 模版' }, + msg_yaml_required: { zh: '请加入 YAML 模板' }, msg_varname_required: { zh: '请输入变量 Key' }, msg_varname_invalid: { zh: '只允许大小写英文字母数字及下划线的组合' }, + msg_slot_varname_invalid: { zh: '只允许大小写英文字母数字及下划线的组合' }, msg_del_job_warning: { zh: '删除后,该 Job 配置的内容都将丢失' }, msg_metric_public: { zh: '公开后,对侧将能查看你的「任务运行结果指标」' }, msg_toggle_job_disabled: { zh: '是否启用该Job' }, @@ -210,17 +238,24 @@ const workflow: I18nMessageModule = { msg_schduled_run: { zh: '重训功能目前只针对左模板生效,启用该功能将间隔性地重跑 Workflow,如有疑问请联系开发人员', }, - msg_sure_2_replace_tpl: { zh: '确认更换模板吗' }, + msg_sure_2_replace_tpl: { zh: '确认更换模板吗?' }, msg_loose_origin_vars_vals: { zh: '更换后原模板的配置值将丢失' }, msg_resued_job_cannot_edit: { zh: '已继承结果的任务将无法更改变量' }, msg_resued_job: { zh: '该任务直接复用了前次运行的结果' }, msg_no_available_kibana: { zh: '查询结果为空' }, + msg_workflow_name_existed: { zh: '系统中存在同名的工作流,请更换名称' }, + msg_project_variables_required: { zh: '请配置工作区变量:{{var}}' }, + msg_project_variables_link: { zh: '点击此链接进行配置' }, + msg_can_not_edit_preset_template: { zh: '该环境不支持修改预置模板' }, + msg_only_admin_edit_preset_template: { zh: '只有管理员能修改预置模板' }, title_toggle_reusable: { zh: '切换至{{state}}状态' }, + title_copy_template: { zh: '复制模板', en: 'Copy template' }, + title_template_name: { zh: '模板名称', en: 'Template name' }, - var_auth_write: { zh: '可编辑' }, - var_auth_read: { zh: '可见' }, - var_auth_private: { zh: '不可见' }, + var_auth_write: { zh: '{{prefix}}可编辑' }, + var_auth_read: { zh: '{{prefix}}可见' }, + var_auth_private: { zh: '{{prefix}}不可见' }, step_basic: { zh: '基础配置' }, step_tpl_basic: { zh: '基础信息' }, @@ -228,6 +263,7 @@ const workflow: I18nMessageModule = { step_tpl_config: { zh: '任务配置' }, job_node_pending: { zh: ' 待配置' }, + job_node_validating: { zh: '校验中...' }, job_node_configuring: { zh: '配置中' }, job_node_config_completed: { zh: '配置完成' }, job_node_unfinished: { zh: '未完成配置' }, @@ -244,6 +280,13 @@ const workflow: I18nMessageModule = { pod_unknown: { zh: '状态未知' }, pod_failed_cleared: { zh: '失败&已清理资源' }, pod_success_cleared: { zh: '成功&已释放资源' }, + + tpl_deleted: { zh: '模板已删除' }, + tpl_config_pending: { zh: '模板未配置' }, + tpl_modified: { zh: '原模板已修改,点击下载副本' }, + tpl_generate_suffix: { zh: '工作流生成模板' }, + tpl_generate_success: { zh: '模板生成成功' }, + tpl_generate_failed: { zh: '模板生成成功' }, }; export default separateLng(workflow); diff --git a/web_console_v2/client/src/i18n/resources/zh_CN.ts b/web_console_v2/client/src/i18n/resources/zh_CN.ts index f3083aa66..8d149ce66 100644 --- a/web_console_v2/client/src/i18n/resources/zh_CN.ts +++ b/web_console_v2/client/src/i18n/resources/zh_CN.ts @@ -9,6 +9,15 @@ import app from './modules/app'; import dataset from './modules/dataset'; import settings from './modules/settings'; import users from './modules/users'; +import intersection_dataset from './modules/intersection_dataset'; +import validError from './modules/validError'; +import modelCenter from './modules/modelCenter'; +import modelServing from './modules/modelServing'; +import audit from './modules/audit'; +import algorithmManagement from './modules/algorithmManagement'; +import operation_maintenance from './modules/operation_maintenance'; +import dashboard from './modules/dashboard'; +import trusted_center from './modules/trustedCenter'; const messages = { translation: { @@ -23,6 +32,15 @@ const messages = { dataset: dataset.zh, settings: settings.zh, users: users.zh, + valid_error: validError.zh, + model_center: modelCenter.zh, + intersection_dataset: intersection_dataset.zh, + model_serving: modelServing.zh, + audit: audit.zh, + algorithm_management: algorithmManagement.zh, + operation_maintenance: operation_maintenance.zh, + dashboard: dashboard.zh, + trusted_center: trusted_center.zh, all: '全部', terms: '服务协议', @@ -33,8 +51,11 @@ const messages = { cancel: '取消', close: '关闭', edit: '编辑', + scale: '扩缩容', delete: '删除', reset: '重置', + stop: '停止', + terminate: '终止', previous_step: '上一步', next_step: '下一步', operation: '操作', @@ -42,9 +63,143 @@ const messages = { click_to_retry: '点此重试', creator: '创建者', created_at: '创建时间', + started_at: '开始时间', + stop_at: '结束时间', + running_duration: '运行时长', yes: '是', no: '否', pls_try_again_later: '请稍后重试', + id: 'ID', + updated_at: '更新时间', + deleted_at: '删除时间', + hint_total_table: '共 {{total}} 条记录', + msg_quit_warning: '取消后,已配置内容将不再保留', + create: '创建', + save: '保存', + send_request: '发送请求', + send: '发送', + more_info: '更多信息', + + placeholder_input: '请输入', + placeholder_select: '请选择', + placeholder_required: '必填项', + + hint_total_select: '已选择 {{total}} 项', + select_all: '全选', + + label_time_asc: '按时间升序', + label_time_desc: '按时间降序', + + detail: '详情', + favorite_success: '收藏成功', + favorite_fail: '收藏失败', + cancel_favorite_success: '取消收藏成功', + cancel_favorite_fail: '取消收藏失败', + export: '导出', + exporting: '正在导出', + export_result: '结果导出', + + success: '成功', + fail: '失败', + evaluating: '评估中', + predicting: '预测中', + waitConfirm: '待确认', + pass: '通过', + reject: '拒绝', + + add: '添加', + change: '变更', + + message_create_success: '创建成功', + message_create_fail: '创建失败', + message_modify_success: '修改成功', + message_modify_fail: '修改失败', + message_delete_success: '删除成功', + message_delete_fail: '删除失败', + message_no_file: '没有文件', + message_publish_success: '发布成功', + message_publish_failed: '发布失败', + message_authorize_success: '授权成功', + message_authorize_failed: '授权失败', + message_revoke_success: '撤销成功', + message_revoke_failed: '撤销失败', + message_stop_success: '停止成功', + message_stop_fail: '停止失败', + message_name_duplicated: '名称已存在', + message_export_loading: '正在导出', + message_export_success: '导出成功', + message_export_fail: '导出失败', + + transfer_total: '全部共 {{total}} 项', + transfer_select_total: '已选 {{selectedCount}}/{{total}} 项', + + open_code_editor: '打开代码编辑器', + code_editor: '代码编辑器', + no_data: '暂无数据', + no_label: '无标签', + + copy: '复制', + back: '返回', + check: '查看', + publish: '发布', + revoke: '撤销', + + create_folder: '创建子文件夹', + create_file: '创建子文件', + create_folder_on_root: '创建根路径文件夹', + create_file_on_root: '创建根路径文件', + + select_project_notice: '请选择工作区', + + msg_quit_modal_title: '确认要退出?', + msg_quit_modal_content: '退出后,当前所填写的信息将被清空。', + + hyper_parameters: '超参数', + + tip_please_input_positive_integer: '请输入正整数', + tip_please_input_positive_number: '请输入正数,小数点后保留1位', + tip_replicas_range: '实例数范围1~100', + tip_peer_unauthorized: '{{participantName}}暂未授权,请线下联系处理', + + cpu: 'CPU', + mem: '内存', + replicas: '实例数', + + placeholder_cpu: '输入CPU规格', + placeholder_mem: '输入内存规格', + + label_horizontal_federalism: '横向联邦', + label_vertical_federalism: '纵向联邦', + coordinator: '本方', + participant: '合作伙伴', + + term_type: '类型', + term_federal_type: '联邦类型', + term_model: '模型', + term_dataset: '数据集', + term_resource_config: '资源配置', + term_algorithm_type: '算法类型', + term_model_type_nn_vertical: '纵向联邦-NN模型', + term_model_type_nn_horizontal: '横向联邦-NN模型', + term_model_type_tree_vertical: '纵向联邦-树模型', + term_favored: '已收藏', + term_unfavored: '未收藏', + term_name: '名称', + term_compare: '对比', + + term_true: '是', + term_false: '否', + + pod_id: '实例 ID', + authorized: '已授权', + unauthorized: '未授权', + local_authorized: '本侧已授权', + local_unauthorized: '本侧未授权', + peer_authorized: '对侧已授权', + peer_unauthorized: '对侧未授权', + + action_authorize: '授权', + action_revoke: '撤销', }, }; diff --git a/web_console_v2/client/src/index.tsx b/web_console_v2/client/src/index.tsx index 8ec09f4bf..d45479220 100644 --- a/web_console_v2/client/src/index.tsx +++ b/web_console_v2/client/src/index.tsx @@ -4,23 +4,26 @@ import { ReactQueryDevtools } from 'react-query/devtools'; import { QueryClientProvider } from 'react-query'; import queryClient from 'shared/queryClient'; import { BrowserRouter } from 'react-router-dom'; -import MockDevtools from 'components/_base/MockDevtools/MockControlPanel'; +import MockDevtools from 'components/MockDevtools/MockControlPanel'; import App from './App'; import { ThemeProvider } from 'styled-components'; import { RecoilRoot } from 'recoil'; -import defaultTheme from 'styles/_theme'; -import antdZhCN from 'antd/lib/locale/zh_CN'; -import antdEnUS from 'antd/lib/locale/en_US'; -import { ConfigProvider } from 'antd'; -import i18n from './i18n'; -import 'assets/fonts/ClarityMono/index.less'; -import './styles/_variables.css'; -import './styles/antd-overrides.less'; +import { setUseWhatChange } from '@simbathesailor/use-what-changed'; +import { ConfigProvider } from '@arco-design/web-react'; +import { defaultTheme } from 'styles'; + +import NoResult from 'components/NoResult'; + +setUseWhatChange(process.env.NODE_ENV === 'development'); ReactDOM.render( <BrowserRouter basename="/v2"> <RecoilRoot> - <ConfigProvider locale={i18n.language === 'zh' ? antdZhCN : antdEnUS}> + <ConfigProvider + renderEmpty={() => { + return <NoResult.NoData />; + }} + > <ThemeProvider theme={defaultTheme}> <QueryClientProvider client={queryClient}> <App /> diff --git a/web_console_v2/client/src/jobMetaDatas/BUILD.bazel b/web_console_v2/client/src/jobMetaDatas/BUILD.bazel new file mode 100644 index 000000000..70ff958ed --- /dev/null +++ b/web_console_v2/client/src/jobMetaDatas/BUILD.bazel @@ -0,0 +1,5 @@ +filegroup( + name = "srcs", + srcs = glob(["**"]), + visibility = ["//visibility:public"], +) diff --git a/web_console_v2/client/src/jobMetaDatas/analyzer.json b/web_console_v2/client/src/jobMetaDatas/analyzer.json new file mode 100644 index 000000000..498c45a4f --- /dev/null +++ b/web_console_v2/client/src/jobMetaDatas/analyzer.json @@ -0,0 +1,188 @@ + +{ + "Slot_image": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "镜像地址,建议不填写,默认会使用system.variables.image_repo + '/pp_data_inspection:' + system.version", + "reference_type": "DEFAULT", + "label": "镜像" + }, + "Slot_labels": { + "reference": "system.variables.labels", + "value_type": "OBJECT", + "default_value": {}, + "help": "建议不修改,格式: {}", + "reference_type": "SYSTEM", + "label": "额外元信息" + }, + "Slot_spark_main_file": { + "reference": "", + "value_type": "STRING", + "default_value": "/opt/spark/work-dir/analyzer_v2.py", + "help": "spark入口脚本", + "reference_type": "DEFAULT", + "label": "入口脚本文件" + }, + "Slot_storage_root_path": { + "reference": "project.variables.storage_root_path", + "value_type": "STRING", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "reference_type": "PROJECT", + "label": "存储根目录" + }, + "Slot_input_job_name": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "必须修改,求交任务名或数据集名称", + "reference_type": "WORKFLOW", + "label": "数据集名" + }, + "Slot_inner_folder_name": { + "reference": "", + "value_type": "STRING", + "default_value": "dataset", + "help": "为了兼容老的路径的临时Slot,['dataset', 'datasource']", + "reference_type": "DEFAULT", + "label": "中间文件夹名" + }, + "Slot_wildcard": { + "reference": "", + "value_type": "STRING", + "default_value": "batch/**/*.data", + "help": "文件通配符", + "reference_type": "DEFAULT", + "label": "文件通配符" + }, + "Slot_drvier_envs":{ + "reference": "", + "value_type": "LIST", + "default_value": [], + "help": "driver环境变量", + "reference_type": "DEFAULT", + "label": "driver环境变量" +}, + "Slot_executor_envs":{ + "reference": "", + "value_type": "LIST", + "default_value": [], + "help": "executor环境变量", + "reference_type": "DEFAULT", + "label": "executor环境变量" +}, + "Slot_driver_cores": { + "reference": "", + "value_type": "INT", + "default_value": 1, + "help": "driver核心数", + "reference_type": "DEFAULT", + "label": "driver核心数" + }, + "Slot_driver_core_limit": { + "reference": "", + "value_type": "STRING", + "default_value": "1200m", + "help": "driver核心数限制", + "reference_type": "DEFAULT", + "label": "driver核心数限制" + }, + "Slot_driver_memory": { + "reference": "", + "value_type": "STRING", + "default_value": "4g", + "help": "driver内存", + "reference_type": "DEFAULT", + "label": "driver内存" + }, + "Slot_executor_cores": { + "reference": "", + "value_type": "INT", + "default_value": 2, + "help": "executor核心数", + "reference_type": "DEFAULT", + "label": "executor核心数" + }, + "Slot_executor_instances": { + "reference": "", + "value_type": "INT", + "default_value": 2, + "help": "executor实例数", + "reference_type": "DEFAULT", + "label": "executor实例数" + }, + "Slot_executor_memory": { + "reference": "", + "value_type": "STRING", + "default_value": "4g", + "help": "executor内存", + "reference_type": "DEFAULT", + "label": "executor内存" + }, + "Slot_initial_executors":{ + "reference": "", + "value_type": "INT", + "default_value": 2, + "help": "初始化executor数量", + "reference_type": "DEFAULT", + "label": "初始化executor数量" +}, + "Slot_max_executors":{ + "reference": "", + "value_type": "INT", + "default_value": 64, + "help": "初始化executor数量", + "reference_type": "DEFAULT", + "label": "最大executor数量" + }, + "Slot_min_executors":{ + "reference": "", + "value_type": "INT", + "default_value": 2, + "help": "初始化executor数量", + "reference_type": "DEFAULT", + "label": "最小executor数量" + }, + + "Slot_volumes": { + "reference": "system.variables.volumes_list", + "value_type": "LIST", + "default_value": [{"persistentVolumeClaim": {"claimName": "pvc-fedlearner-default"},"name": "data"}], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "reference_type": "SYSTEM", + "label": "为Pod提供的卷" + }, + "Slot_volume_mounts": { + "reference": "system.variables.volume_mounts_list", + "value_type": "LIST", + "default_value": [{"mountPath": "/data","name": "data"}], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "reference_type": "SYSTEM", + "label": "卷挂载位置" + }, + "Slot_buckets_num": { + "reference": "", + "value_type": "INT", + "default_value": 10, + "help": "用于数据探查时统计直方图的分通数", + "reference_type": "WORKFLOW", + "label": "直方图分桶数" + }, + "Slot_thumbnail_path": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "用于存放预览图像的位置", + "reference_type": "WORKFLOW", + "label": "预览图像位置" + }, + "Slot_dataset_path": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "用于数据集存储的路径", + "reference_type": "WORKFLOW", + "label": "数据集存储路径" + } +} diff --git a/web_console_v2/client/src/jobMetaDatas/analyzer.metayml b/web_console_v2/client/src/jobMetaDatas/analyzer.metayml new file mode 100644 index 000000000..a2257d02f --- /dev/null +++ b/web_console_v2/client/src/jobMetaDatas/analyzer.metayml @@ -0,0 +1,63 @@ +{ + "apiVersion": "sparkoperator.k8s.io/v1beta2", + "kind": "SparkApplication", + "metadata": { + "name": self.name, + "namespace": system.variables.namespace, + "labels": ${Slot_labels}, + "annotations": { + "queue": "fedlearner-spark", + "schedulerName": "batch", + }, + }, + "spec": { + "type": "Python", + "pythonVersion": "3", + "mode": "cluster", + "image": ${Slot_image} or system.variables.image_repo + "/pp_data_inspection:" + system.version, + "imagePullPolicy": "IfNotPresent", + "volumes": ${Slot_volumes}, + "mainApplicationFile": ${Slot_spark_main_file}, + "arguments": [ + "--data_path="+ (${Slot_dataset_path} or ${Slot_storage_root_path} + "/" + ${Slot_inner_folder_name} + "/" + ${Slot_input_job_name}), + "--file_wildcard=" + ${Slot_wildcard}, + "--buckets_num=" + str(${Slot_buckets_num}), + "--thumbnail_path=" + ${Slot_thumbnail_path}, + ], + "sparkVersion": "3.0.0", + "restartPolicy": { + "type": "OnFailure", + "onFailureRetries": 3, + "onFailureRetryInterval": 10, + "onSubmissionFailureRetries": 5, + "onSubmissionFailureRetryInterval": 20 + }, + "driver": { + "cores": ${Slot_driver_cores}, + "coreLimit": ${Slot_driver_core_limit}, + "memory": ${Slot_driver_memory}, + "labels": { + "version": "3.0.0" + }, + "serviceAccount": "spark", + "volumeMounts": ${Slot_volume_mounts}, + "env": system.basic_envs_list + system.variables.envs_list + ${Slot_drvier_envs} + }, + "executor": { + "cores": ${Slot_executor_cores}, + "instances": ${Slot_executor_instances}, + "memory": ${Slot_executor_memory}, + "labels": { + "version": "3.0.0" + }, + "volumeMounts": ${Slot_volume_mounts}, + "env": system.basic_envs_list + system.variables.envs_list + ${Slot_executor_envs} + }, + "dynamicAllocation": { + "enabled": True, + "initialExecutors": ${Slot_initial_executors}, + "maxExecutors": ${Slot_max_executors}, + "minExecutors": ${Slot_min_executors}, + } + } +} diff --git a/web_console_v2/client/src/jobMetaDatas/data_join.json b/web_console_v2/client/src/jobMetaDatas/data_join.json new file mode 100644 index 000000000..50105c36f --- /dev/null +++ b/web_console_v2/client/src/jobMetaDatas/data_join.json @@ -0,0 +1,213 @@ +{ + "Slot_role": { + "reference": "", + "value_type": "STRING", + "default_value": "Leader", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "reference_type": "DEFAULT", + "label": "Flapp通讯时角色" + }, + "Slot_storage_root_path": { + "reference": "project.variables.storage_root_path", + "value_type": "STRING", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "reference_type": "PROJECT", + "label": "存储根目录" + }, + "Slot_image_version": { + "reference": "", + "value_type": "STRING", + "default_value": "882310f", + "help": "建议不修改,指定Pod中运行的容器镜像版本,前缀为system.variables.image_repo + '/fedlearner:'", + "reference_type": "DEFAULT", + "label": "容器镜像版本" + }, + "Slot_master_cpu": { + "reference": "", + "value_type": "STRING", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "Master的CPU" + }, + "Slot_master_memory": { + "reference": "", + "value_type": "STRING", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "Master的内存" + }, + "Slot_worker_cpu": { + "reference": "", + "value_type": "STRING", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "Worker的CPU" + }, + "Slot_worker_memory": { + "reference": "", + "value_type": "STRING", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "Worker的内存" + }, + "Slot_master_replicas": { + "reference": "", + "value_type": "INT", + "default_value": 1, + "help": "同时运行的完全相同的Master Pods数量", + "reference_type": "DEFAULT", + "label": "Master的Pod个数" + }, + "Slot_batch_mode": { + "reference": "", + "value_type": "STRING", + "default_value": "--batch_mode", + "help": "如果为空则为常驻求交", + "reference_type": "DEFAULT", + "label": "是否为批处理模式" + }, + "Slot_partition_num": { + "reference": "", + "value_type": "INT", + "default_value": 4, + "help": "建议修改,求交后数据分区的数量,建议和raw_data一致", + "reference_type": "DEFAULT", + "label": "数据分区的数量" + }, + "Slot_start_time": { + "reference": "", + "value_type": "INT", + "default_value": 0, + "help": "建议不修改,使用自这个时间起的数据,仅从文件名筛选所以格式依据文件名(yyyymmdd或timestamp)", + "reference_type": "DEFAULT", + "label": "数据起始时间" + }, + "Slot_end_time": { + "reference": "", + "value_type": "INT", + "default_value": 999999999999, + "help": "建议不修改,使用自这个时间以前的数据,仅从文件名筛选所以格式依据文件名(yyyymmdd或timestamp)", + "reference_type": "DEFAULT", + "label": "数据末尾时间" + }, + "Slot_enable_negative_example_generator": { + "reference": "", + "value_type": "BOOL", + "default_value": false, + "help": "建议不修改,是否开启负采样,当follower求交时遇到无法匹配上的leader的example id,会以negative_sampling_rate为概率生成一个新的样本。", + "reference_type": "DEFAULT", + "label": "负采样比例" + }, + "Slot_negative_sampling_rate": { + "reference": "", + "value_type": "NUMBER", + "default_value": 0, + "help": "建议不修改,负采样比例,当follower求交时遇到无法匹配上的leader的example id,会以此概率生成一个新的样本。", + "reference_type": "DEFAULT", + "label": "负采样比例" + }, + "Slot_raw_data_name": { + "reference": "", + "value_type": "STRING","default_value": "", + "help": "必须修改,原始数据的发布地址,根据参数内容在portal_publish_dir地址下寻找", + "reference_type": "JOB_PROPERTY", + "label": "raw_data名字" + }, + "Slot_data_block_dump_interval": { + "reference": "", + "value_type": "INT","default_value": -1, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次data block,小于0则无此限制", + "reference_type": "DEFAULT", + "label": "数据dump时间间隔" + }, + "Slot_data_block_dump_threshold": { + "reference": "", + "value_type": "INT","default_value": 4096, + "help": "建议不修改,最多多少个样本就dump为一个data block,小于等于0则无此限制", + "reference_type": "DEFAULT", + "label": "数据dump临界点" + }, + "Slot_example_id_dump_interval": { + "reference": "", + "value_type": "INT","default_value": -1, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次example id,小于0则无此限制", + "reference_type": "DEFAULT", + "label": "数据id dump时间间隔" + }, + "Slot_example_id_dump_threshold": { + "reference": "", + "value_type": "INT","default_value": 4096, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次example id,小于0则无此限制", + "reference_type": "DEFAULT", + "label": "数据id dump临界点" + }, + "Slot_min_matching_window": { + "reference": "", + "value_type": "INT","default_value": 1024, + "help": "建议不修改,the min matching window for example join ,<=0 means window size is infinite", + "reference_type": "DEFAULT", + "label": "最小匹配滑窗" + }, + "Slot_max_matching_window": { + "reference": "", + "value_type": "INT","default_value": 4096, + "help": "建议不修改,the max matching window for example join. <=0 means window size is infinite", + "reference_type": "DEFAULT", + "label": "最大匹配滑窗" + }, + "Slot_raw_data_iter": { + "reference": "", + "value_type": "STRING","default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "reference_type": "DEFAULT", + "label": "raw_data文件类型" + }, + "Slot_data_block_builder": { + "reference": "", + "value_type": "STRING", + "default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "reference_type": "DEFAULT", + "label": "data block output数据类型" + }, + "Slot_master_envs": { + "reference": "", + "value_type": "LIST","default_value": [], + "help": "数组类型,master pod额外的环境变量", + "reference_type": "DEFAULT", + "label": "Master额外环境变量" + }, + "Slot_worker_envs": { + "reference": "", + "value_type": "LIST","default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "reference_type": "DEFAULT", + "label": "Worker额外环境变量" + }, + "Slot_labels": { + "reference": "system.variables.labels", + "value_type": "OBJECT","default_value": {}, + "help": "建议不修改,格式: {}", + "reference_type": "SYSTEM", + "label": "FLAPP额外元信息" + }, + "Slot_volumes": { + "reference": "system.variables.volumes_list", + "value_type": "LIST","default_value": [{"persistentVolumeClaim": {"claimName": "pvc-fedlearner-default"},"name": "data"}], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "reference_type": "SYSTEM", + "label": "为Pod提供的卷" + }, + "Slot_volume_mounts": { + "reference": "system.variables.volume_mounts_list", + "value_type": "LIST","default_value": [{ "mountPath": "/data", "name": "data"}], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "reference_type": "SYSTEM", + "label": "卷挂载位置" + } +} diff --git a/web_console_v2/client/src/jobMetaDatas/data_join.metayml b/web_console_v2/client/src/jobMetaDatas/data_join.metayml new file mode 100644 index 000000000..add4ae44d --- /dev/null +++ b/web_console_v2/client/src/jobMetaDatas/data_join.metayml @@ -0,0 +1,261 @@ +{ + "apiVersion": "fedlearner.k8s.io/v1alpha1", + "kind": "FLApp", + "metadata": { + "name": self.name, + "namespace": system.variables.namespace, + "annotations":{ + "queue": "fedlearner", + "schedulerName": "batch" + }, + "labels": ${Slot_labels} + }, + "spec": { + "role": ${Slot_role}, + "cleanPodPolicy": "All", + "peerSpecs": { + "Leader" if ${Slot_role}=="Follower" else "Follower": { + "peerURL": "fedlearner-stack-ingress-nginx-controller.default.svc:80", + "authority": project.participants[0].egress_host, + "extraHeaders": { + "x-host": "fedlearner-operator." + project.participants[0].egress_domain + } + } + }, + "flReplicaSpecs": { + "Master": { + "template": { + "spec": { + "restartPolicy": "Never", + "containers": [ + { + "env": system.basic_envs_list + [ + { + "name": "STORAGE_ROOT_PATH", + "value": ${Slot_storage_root_path} + }, + { + "name": "ROLE", + "value": ${Slot_role}.lower() + }, + { + "name": "APPLICATION_ID", + "value": self.name + }, + { + "name": "OUTPUT_BASE_DIR", + "value": ${Slot_storage_root_path} + "/data_source/" + self.name + }, + { + "name": "EGRESS_URL", + "value": "fedlearner-stack-ingress-nginx-controller.default.svc:80" + }, + { + "name": "EGRESS_HOST", + "value": project.participants[0].egress_host + }, + { + "name": "EGRESS_DOMAIN", + "value": project.participants[0].egress_domain + }, + { + "name": "BATCH_MODE", + "value": ${Slot_batch_mode} + }, + { + "name": "PARTITION_NUM", + "value": str(${Slot_partition_num}) + }, + { + "name": "START_TIME", + "value": str(${Slot_start_time}) + }, + { + "name": "END_TIME", + "value": str(${Slot_end_time}) + }, + { + "name": "RAW_DATA_SUB_DIR", + "value": "portal_publish_dir/" + ${Slot_raw_data_name} + }, + { + # not work, remove it after prepare_launch_data_join_cli been removed + "name": "NEGATIVE_SAMPLING_RATE", + "value": str(${Slot_negative_sampling_rate}) + } + ] + ${Slot_master_envs}, + "imagePullPolicy": "IfNotPresent", + "name": "tensorflow", + "volumeMounts": + ${Slot_volume_mounts} + , + "image": system.variables.image_repo + "/fedlearner:" + ${Slot_image_version}, + "ports": [ + { + "containerPort": 50051, + "name": "flapp-port", + "protocol": "TCP" + } + ], + "command": [ + "/app/deploy/scripts/wait4pair_wrapper.sh" + ], + "args": [ + "/app/deploy/scripts/data_join/run_data_join_master.sh" + ], + "resources": { + "limits": { + "cpu": ${Slot_master_cpu}, + "memory": ${Slot_master_memory} + }, + "requests": { + "cpu": ${Slot_master_cpu}, + "memory": ${Slot_master_memory} + } + } + } + ], + "imagePullSecrets": [ + { + "name": "regcred" + } + ], + "volumes": + ${Slot_volumes} + + } + }, + "pair": true, + "replicas": ${Slot_master_replicas} + }, + "Worker": { + "template": { + "spec": { + "restartPolicy": "Never", + "containers": [ + { + "env": system.basic_envs_list + [ + { + "name": "STORAGE_ROOT_PATH", + "value": ${Slot_storage_root_path} + }, + { + "name": "ROLE", + "value": ${Slot_role}.lower() + }, + { + "name": "APPLICATION_ID", + "value": self.name + }, + { + "name": "OUTPUT_BASE_DIR", + "value": ${Slot_storage_root_path} + "/data_source/" + self.name + }, + { + "name": "EGRESS_URL", + "value": "fedlearner-stack-ingress-nginx-controller.default.svc:80" + }, + { + "name": "EGRESS_HOST", + "value": project.participants[0].egress_host + }, + { + "name": "EGRESS_DOMAIN", + "value": project.participants[0].egress_domain + }, + { + "name": "PARTITION_NUM", + "value": str(${Slot_partition_num}) + }, + { + "name": "RAW_DATA_SUB_DIR", + "value": "portal_publish_dir/" + ${Slot_raw_data_name} + }, + { + "name": "DATA_BLOCK_DUMP_INTERVAL", + "value": str(${Slot_data_block_dump_interval}) + }, + { + "name": "DATA_BLOCK_DUMP_THRESHOLD", + "value": str(${Slot_data_block_dump_threshold}) + }, + { + "name": "EXAMPLE_ID_DUMP_INTERVAL", + "value": str(${Slot_example_id_dump_interval}) + }, + { + "name": "EXAMPLE_ID_DUMP_THRESHOLD", + "value": str(${Slot_example_id_dump_threshold}) + }, + { + "name": "MIN_MATCHING_WINDOW", + "value": str(${Slot_min_matching_window}) + }, + { + "name": "MAX_MATCHING_WINDOW", + "value": str(${Slot_max_matching_window}) + }, + { + "name": "RAW_DATA_ITER", + "value": ${Slot_raw_data_iter} + }, + { + "name": "DATA_BLOCK_BUILDER", + "value": ${Slot_data_block_builder} + }, + { + "name": "ENABLE_NEGATIVE_EXAMPLE_GENERATOR", + "value": str(${Slot_enable_negative_example_generator}) + }, + { + "name": "NEGATIVE_SAMPLING_RATE", + "value": str(${Slot_negative_sampling_rate}) + }, + ] + ${Slot_worker_envs}, + "imagePullPolicy": "IfNotPresent", + "name": "tensorflow", + "volumeMounts": + ${Slot_volume_mounts} + , + "image": system.variables.image_repo + "/fedlearner:" + ${Slot_image_version}, + "ports": [ + { + "containerPort": 50051, + "name": "flapp-port", + "protocol": "TCP" + } + ], + "command": [ + "/app/deploy/scripts/wait4pair_wrapper.sh" + ], + "args": [ + "/app/deploy/scripts/data_join/run_data_join_worker.sh" + ], + "resources": { + "limits": { + "cpu": ${Slot_worker_cpu}, + "memory": ${Slot_worker_memory} + }, + "requests": { + "cpu": ${Slot_worker_cpu}, + "memory": ${Slot_worker_memory} + } + } + } + ], + "imagePullSecrets": [ + { + "name": "regcred" + } + ], + "volumes": + ${Slot_volumes} + + } + }, + "pair": true, + "replicas": ${Slot_partition_num} + } + } + } +} diff --git a/web_console_v2/client/src/jobMetaDatas/index.ts b/web_console_v2/client/src/jobMetaDatas/index.ts new file mode 100644 index 000000000..fbcb33816 --- /dev/null +++ b/web_console_v2/client/src/jobMetaDatas/index.ts @@ -0,0 +1,61 @@ +/* istanbul ignore file */ + +import { JobSlot } from 'typings/workflow'; +import { JobType } from 'typings/job'; + +export type JobMetaData = { + metaYamlString: string; + slots: { [k: string]: JobSlot }; +}; + +const DataJoin: JobMetaData = { + metaYamlString: require('./data_join.metayml').default, + slots: require('./data_join.json'), +}; +const PSIDataJoin: JobMetaData = { + metaYamlString: require('./psi_data_join.metayml').default, + slots: require('./psi_data_join.json'), +}; +const TreeModelEvaluation: JobMetaData = { + metaYamlString: require('./tree_model_evaluation.metayml').default, + slots: require('./tree_model_evaluation.json'), +}; +const TreeModelTraining: JobMetaData = { + metaYamlString: require('./tree_model_training.metayml').default, + slots: require('./tree_model_training.json'), +}; +const RawData: JobMetaData = { + metaYamlString: require('./raw_data.metayml').default, + slots: require('./raw_data.json'), +}; +const NNModelTraining: JobMetaData = { + metaYamlString: require('./nn_model_training.metayml').default, + slots: require('./nn_model_training.json'), +}; +const NNModelEvaluation: JobMetaData = { + metaYamlString: require('./nn_model_evaluation.metayml').default, + slots: require('./nn_model_evaluation.json'), +}; +const Transformer: JobMetaData = { + metaYamlString: require('./transformer.metayml').default, + slots: require('./transformer.json'), +}; + +const Analyzer: JobMetaData = { + metaYamlString: require('./analyzer.metayml').default, + slots: require('./analyzer.json'), +}; + +const jobTypeToMetaDatasMap: Map<JobType, JobMetaData> = new Map(); + +jobTypeToMetaDatasMap.set(JobType.DATA_JOIN, DataJoin); +jobTypeToMetaDatasMap.set(JobType.PSI_DATA_JOIN, PSIDataJoin); +jobTypeToMetaDatasMap.set(JobType.TREE_MODEL_EVALUATION, TreeModelEvaluation); +jobTypeToMetaDatasMap.set(JobType.TREE_MODEL_TRAINING, TreeModelTraining); +jobTypeToMetaDatasMap.set(JobType.RAW_DATA, RawData); +jobTypeToMetaDatasMap.set(JobType.NN_MODEL_EVALUATION, NNModelEvaluation); +jobTypeToMetaDatasMap.set(JobType.NN_MODEL_TRANINING, NNModelTraining); +jobTypeToMetaDatasMap.set(JobType.TRANSFORMER, Transformer); +jobTypeToMetaDatasMap.set(JobType.ANALYZER, Analyzer); + +export default jobTypeToMetaDatasMap; diff --git a/web_console_v2/client/src/jobMetaDatas/nn_model_evaluation.json b/web_console_v2/client/src/jobMetaDatas/nn_model_evaluation.json new file mode 100644 index 000000000..9a6d3ca9e --- /dev/null +++ b/web_console_v2/client/src/jobMetaDatas/nn_model_evaluation.json @@ -0,0 +1,298 @@ +{ + "Slot_role": { + "reference": "", + "value_type": "STRING", + "default_value": "Leader", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "reference_type": "WORKFLOW", + "label": "Flapp通讯时角色" + }, + "Slot_storage_root_path": { + "reference": "project.variables.storage_root_path", + "value_type": "STRING", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "reference_type": "PROJECT", + "label": "存储根目录" + }, + "Slot_image_version": { + "reference": "", + "value_type": "STRING", + "default_value": "882310f", + "help": "建议不修改,指定Pod中运行的容器镜像版本,前缀为system.variables.image_repo + '/fedlearner:'", + "reference_type": "DEFAULT", + "label": "容器镜像版本" + }, + "Slot_master_cpu": { + "reference": "", + "value_type": "STRING", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "Master的CPU" + }, + "Slot_master_memory": { + "reference": "", + "value_type": "STRING", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "Master的内存" + }, + "Slot_master_replicas": { + "reference": "", + "value_type": "INT", + "default_value": 1, + "help": "同时运行的完全相同的Master Pods数量", + "reference_type": "DEFAULT", + "label": "Master的Pod个数" + }, + "Slot_worker_cpu": { + "reference": "", + "value_type": "STRING", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "Worker的CPU" + }, + "Slot_worker_memory": { + "reference": "", + "value_type": "STRING", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "Worker的内存" + }, + "Slot_worker_replicas": { + "reference": "", + "value_type": "INT", + "default_value": 1, + "help": "同时运行的完全相同的Worker Pods数量", + "reference_type": "DEFAULT", + "label": "Worker的Pod个数" + }, + "Slot_ps_cpu": { + "reference": "", + "value_type": "STRING", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "PS的CPU" + }, + "Slot_ps_memory": { + "reference": "", + "value_type": "STRING", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "PS的内存" + }, + "Slot_ps_replicas": { + "reference": "", + "value_type": "INT", + "default_value": 1, + "help": "同时运行的完全相同的PS Pods数量", + "reference_type": "DEFAULT", + "label": "PS的Pod个数" + }, + "Slot_data_source": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "必须修改,求交任务的名字", + "reference_type": "JOB_PROPERTY", + "label": "数据源" + }, + "Slot_epoch_num": { + "reference": "", + "value_type": "INT", + "default_value": 1, + "help": "number of epoch for training, not support in online training", + "reference_type": "DEFAULT", + "label": "epoch数量" + }, + "Slot_start_date": { + "reference": "", + "value_type": "INT", + "default_value": null, + "help": "training data start date", + "reference_type": "DEFAULT", + "label": "开始时间" + }, + "Slot_end_date": { + "reference": "", + "value_type": "INT", + "default_value": null, + "help": "training data end date", + "reference_type": "DEFAULT", + "label": "结束时间" + }, + "Slot_online_training": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "['','--online_training'] 否 是,the train master run for online training", + "reference_type": "DEFAULT", + "label": "是否在线训练" + }, + "Slot_suffle_data_block": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "['','--shuffle_data_block'] 否 是,shuffle the data block or not", + "reference_type": "DEFAULT", + "label": "是否shuffle数据块" + }, + "Slot_mode": { + "reference": "", + "value_type": "STRING", + "default_value": "eval", + "help": "choices:['train','eval'] 训练还是验证", + "reference_type": "DEFAULT", + "label": "模式" + }, + "Slot_verbosity": { + "reference": "", + "value_type": "INT", + "default_value": 1, + "help": "int, Logging level", + "reference_type": "DEFAULT", + "label": "日志等级" + }, + "Slot_code_key": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "代码tar包地址,如果为空则使用code tar", + "reference_type": "WORKFLOW", + "label": "模型代码路径" + }, + "Slot_code_tar": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "代码包,variable中请使用代码类型", + "reference_type": "DEFAULT", + "label": "代码" + }, + "Slot_save_checkpoint_steps": { + "reference": "", + "value_type": "INT", + "default_value": 1000, + "help": "int, Number of steps between checkpoints.", + "reference_type": "DEFAULT", + "label": "SAVE_CHECKPOINT_STEPS" + }, + "Slot_save_checkpoint_secs": { + "reference": "", + "value_type": "INT", + "default_value": null, + "help": "int,Number of secs between checkpoints.", + "reference_type": "DEFAULT", + "label": "SAVE_CHECKPOINT_SECS" + }, + "Slot_sparse_estimator": { + "reference": "", + "value_type": "BOOL", + "default_value": false, + "help": "bool,default False Whether using sparse estimator.", + "reference_type": "DEFAULT", + "label": "SPARSE_ESTIMATOR" + }, + "Slot_summary_save_steps": { + "reference": "", + "value_type": "INT", + "default_value": null, + "help": "int, Number of steps to save summary files.", + "reference_type": "DEFAULT", + "label": "SUMMARY_SAVE_STEPS" + }, + "Slot_checkpoint_path": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "不建议修改,checkpoint输出路径,建议为空,会默认使用{storage_root_path}/job_output/{job_name}/checkpoints,强烈建议保持空值", + "reference_type": "DEFAULT", + "label": "CHECKPOINT_PATH" + }, + "Slot_load_checkpoint_filename": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "加载checkpoint_path下的相对路径的checkpoint, 默认会加载checkpoint_path下的latest checkpoint", + "reference_type": "DEFAULT", + "label": "LOAD_CHECKPOINT_FILENAME" + }, + "Slot_load_checkpoint_filename_with_path": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "加载绝对路径下的checkpoint,需要细致到文件名", + "reference_type": "DEFAULT", + "label": "从绝对路径加载checkpoint" + }, + "Slot_load_checkpoint_from_job": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "指定任务名job_output下的latest checkpoint", + "reference_type": "DEFAULT", + "label": "以任务名加载checkpoint" + }, + "Slot_export_path": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "使用默认空值,将把models保存到$OUTPUT_BASE_DIR/exported_models 路径下。", + "reference_type": "DEFAULT", + "label": "EXPORT_PATH" + }, + "Slot_master_envs": { + "reference": "", + "value_type": "LIST", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "reference_type": "DEFAULT", + "label": "Master额外环境变量" + }, + "Slot_ps_envs": { + "reference": "", + "value_type": "LIST", + "default_value": [], + "help": "数组类型,ps pod额外的环境变量", + "reference_type": "DEFAULT", + "label": "PS额外环境变量" + }, + "Slot_worker_envs": { + "reference": "", + "value_type": "LIST", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "reference_type": "DEFAULT", + "label": "Worker额外环境变量" + }, + "Slot_labels": { + "reference": "system.variables.labels", + "value_type": "OBJECT", + "default_value": {}, + "help": "建议不修改,格式: {}", + "reference_type": "SYSTEM", + "label": "FLAPP额外元信息" + }, + "Slot_volumes": { + "reference": "system.variables.volumes_list", + "value_type": "LIST", + "default_value": [{"persistentVolumeClaim": {"claimName": "pvc-fedlearner-default"},"name": "data"}], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "reference_type": "SYSTEM", + "label": "为Pod提供的卷" + }, + "Slot_volume_mounts": { + "reference": "system.variables.volume_mounts_list", + "value_type": "LIST", + "default_value": [{"mountPath": "/data","name": "data"}], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "reference_type": "SYSTEM", + "label": "卷挂载位置" + } + } diff --git a/web_console_v2/client/src/jobMetaDatas/nn_model_evaluation.metayml b/web_console_v2/client/src/jobMetaDatas/nn_model_evaluation.metayml new file mode 100644 index 000000000..a159d440b --- /dev/null +++ b/web_console_v2/client/src/jobMetaDatas/nn_model_evaluation.metayml @@ -0,0 +1,328 @@ +{ + "apiVersion": "fedlearner.k8s.io/v1alpha1", + "kind": "FLApp", + "metadata": { + "name": self.name, + "namespace": system.variables.namespace, + "annotations":{ + "queue": "fedlearner", + "schedulerName": "batch" + }, + "labels": ${Slot_labels} + }, + "spec": { + "role": ${Slot_role}, + "cleanPodPolicy": "All", + "peerSpecs": { + "Leader" if ${Slot_role}=="Follower" else "Follower": { + "peerURL": "fedlearner-stack-ingress-nginx-controller.default.svc:80", + "authority": project.participants[0].egress_host, + "extraHeaders": { + "x-host": "fedlearner-operator." + project.participants[0].egress_domain + } + } + }, + "flReplicaSpecs": { + "Master": { + "template": { + "spec": { + "restartPolicy": "Never", + "containers": [ + { + "env": system.basic_envs_list + [ + { + "name": "STORAGE_ROOT_PATH", + "value": ${Slot_storage_root_path} + }, + { + "name": "ROLE", + "value": ${Slot_role}.lower() + }, + { + "name": "APPLICATION_ID", + "value": self.name + }, + { + "name": "OUTPUT_BASE_DIR", + "value": ${Slot_storage_root_path} + "/job_output/" + self.name + }, + { + "name": "EGRESS_URL", + "value": "fedlearner-stack-ingress-nginx-controller.default.svc:80" + }, + { + "name": "EGRESS_HOST", + "value": project.participants[0].egress_host + }, + { + "name": "EGRESS_DOMAIN", + "value": project.participants[0].egress_domain + }, + { + "name": "EPOCH_NUM", + "value": str(${Slot_epoch_num}) + }, + { + "name": "START_DATE", + "value": str(${Slot_start_date}) + }, + { + "name": "END_DATE", + "value": str(${Slot_end_date}) + }, + { + "name": "DATA_SOURCE", + "value": ${Slot_data_source} + }, + { + "name": "ONLINE_TRAINING", + "value": ${Slot_online_training} + }, + { + "name": "SPARSE_ESTIMATOR", + "value": str(${Slot_sparse_estimator}) + }, + { + "name": "CODE_KEY", + "value": ${Slot_code_key} + }, + { + "name": "CODE_TAR", + "value": ${Slot_code_tar} + }, + { + "name": "CHECKPOINT_PATH", + "value": ${Slot_checkpoint_path} + }, + { + "name": "LOAD_CHECKPOINT_FILENAME", + "value": ${Slot_load_checkpoint_filename} + }, + { + "name": "LOAD_CHECKPOINT_FILENAME_WITH_PATH", + "value": ${Slot_load_checkpoint_filename_with_path} + }, + { + "name": "LOAD_CHECKPOINT_PATH", + "value": ${Slot_load_checkpoint_from_job} and ${Slot_storage_root_path} + "/job_output/" + ${Slot_load_checkpoint_from_job} + "/checkpoints" + }, + { + "name": "EXPORT_PATH", + "value": ${Slot_export_path} + } + ] + ${Slot_master_envs}, + "imagePullPolicy": "IfNotPresent", + "name": "tensorflow", + "volumeMounts": ${Slot_volume_mounts}, + "image": system.variables.image_repo + "/fedlearner:" + ${Slot_image_version}, + "ports": [ + { + "containerPort": 50051, + "name": "flapp-port", + "protocol": "TCP" + } + ], + "command": [ + "/app/deploy/scripts/trainer/run_trainer_master.sh" + ], + "args": [ + ], + "resources": { + "limits": { + "cpu": ${Slot_master_cpu}, + "memory": ${Slot_master_memory} + }, + "requests": { + "cpu": ${Slot_master_cpu}, + "memory": ${Slot_master_memory} + } + } + } + ], + "imagePullSecrets": [ + { + "name": "regcred" + } + ], + "volumes": ${Slot_volumes} + + } + }, + "pair": False, + "replicas": int(${Slot_master_replicas}) + }, + "PS": { + "template": { + "spec": { + "restartPolicy": "Never", + "containers": [ + { + "env": system.basic_envs_list + [ + { + "name": "STORAGE_ROOT_PATH", + "value": ${Slot_storage_root_path} + }, + { + "name": "EGRESS_URL", + "value": "fedlearner-stack-ingress-nginx-controller.default.svc:80" + }, + { + "name": "EGRESS_HOST", + "value": project.participants[0].egress_host + }, + { + "name": "EGRESS_DOMAIN", + "value": project.participants[0].egress_domain + } + + ] + ${Slot_ps_envs}, + "imagePullPolicy": "IfNotPresent", + "name": "tensorflow", + "volumeMounts": ${Slot_volume_mounts}, + "image": system.variables.image_repo + "/fedlearner:" + ${Slot_image_version}, + "ports": [ + { + "containerPort": 50051, + "name": "flapp-port", + "protocol": "TCP" + } + ], + "command": [ + "/app/deploy/scripts/trainer/run_trainer_ps.sh" + ], + "args": [ + ], + "resources": { + "limits": { + "cpu": ${Slot_ps_cpu}, + "memory": ${Slot_ps_memory} + }, + "requests": { + "cpu": ${Slot_ps_cpu}, + "memory": ${Slot_ps_memory} + } + } + } + ], + "imagePullSecrets": [ + { + "name": "regcred" + } + ], + "volumes": ${Slot_volumes} + } + }, + "pair": False, + "replicas": int(${Slot_ps_replicas}) + }, + "Worker": { + "template": { + "spec": { + "restartPolicy": "Never", + "containers": [ + { + "env": system.basic_envs_list + [ + { + "name": "STORAGE_ROOT_PATH", + "value": ${Slot_storage_root_path} + }, + { + "name": "ROLE", + "value": ${Slot_role}.lower() + }, + { + "name": "APPLICATION_ID", + "value": self.name + }, + { + "name": "OUTPUT_BASE_DIR", + "value": ${Slot_storage_root_path} + "/job_output/" + self.name + }, + { + "name": "EGRESS_URL", + "value": "fedlearner-stack-ingress-nginx-controller.default.svc:80" + }, + { + "name": "EGRESS_HOST", + "value": project.participants[0].egress_host + }, + { + "name": "EGRESS_DOMAIN", + "value": project.participants[0].egress_domain + }, + { + "name": "MODE", + "value": ${Slot_mode} + }, + { + "name": "VERBOSITY", + "value": str(${Slot_verbosity}) + }, + { + "name": "CODE_KEY", + "value": ${Slot_code_key} + }, + { + "name": "CODE_TAR", + "value": ${Slot_code_tar} + }, + { + "name": "SAVE_CHECKPOINT_STEPS", + "value": str(${Slot_save_checkpoint_steps}) + }, + { + "name": "SAVE_CHECKPOINT_SECS", + "value": str(${Slot_save_checkpoint_secs}) + }, + { + "name": "SPARSE_ESTIMATOR", + "value": str(${Slot_sparse_estimator}) + }, + { + "name": "SUMMARY_SAVE_STEPS", + "value": str(${Slot_summary_save_steps}) + } + ] + ${Slot_worker_envs}, + "imagePullPolicy": "IfNotPresent", + "name": "tensorflow", + "volumeMounts": ${Slot_volume_mounts}, + "image": system.variables.image_repo + "/fedlearner:" + ${Slot_image_version}, + "ports": [ + { + "containerPort": 50051, + "name": "flapp-port", + "protocol": "TCP" + } + ], + "command": [ + "/app/deploy/scripts/wait4pair_wrapper.sh" + ], + "args": [ + "/app/deploy/scripts/trainer/run_trainer_worker.sh" + ], + "resources": { + "limits": { + "cpu": ${Slot_worker_cpu}, + "memory": ${Slot_worker_memory} + }, + "requests": { + "cpu": ${Slot_worker_cpu}, + "memory": ${Slot_worker_memory} + } + } + } + ], + "imagePullSecrets": [ + { + "name": "regcred" + } + ], + "volumes": ${Slot_volumes} + } + }, + "pair": True, + "replicas": int(${Slot_worker_replicas}) + } + } + } +} diff --git a/web_console_v2/client/src/jobMetaDatas/nn_model_training.json b/web_console_v2/client/src/jobMetaDatas/nn_model_training.json new file mode 100644 index 000000000..2d84ed5b7 --- /dev/null +++ b/web_console_v2/client/src/jobMetaDatas/nn_model_training.json @@ -0,0 +1,298 @@ +{ + "Slot_role": { + "reference": "", + "value_type": "STRING", + "default_value": "Leader", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "reference_type": "WORKFLOW", + "label": "Flapp通讯时角色" + }, + "Slot_storage_root_path": { + "reference": "project.variables.storage_root_path", + "value_type": "STRING", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "reference_type": "PROJECT", + "label": "存储根目录" + }, + "Slot_image_version": { + "reference": "", + "value_type": "STRING", + "default_value": "882310f", + "help": "建议不修改,指定Pod中运行的容器镜像版本,前缀为system.variables.image_repo + '/fedlearner:'", + "reference_type": "DEFAULT", + "label": "容器镜像版本" + }, + "Slot_master_cpu": { + "reference": "", + "value_type": "STRING", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "Master的CPU" + }, + "Slot_master_memory": { + "reference": "", + "value_type": "STRING", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "Master的内存" + }, + "Slot_master_replicas": { + "reference": "", + "value_type": "INT", + "default_value": 1, + "help": "同时运行的完全相同的Master Pods数量", + "reference_type": "DEFAULT", + "label": "Master的Pod个数" + }, + "Slot_worker_cpu": { + "reference": "", + "value_type": "STRING", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "Worker的CPU" + }, + "Slot_worker_memory": { + "reference": "", + "value_type": "STRING", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "Worker的内存" + }, + "Slot_worker_replicas": { + "reference": "", + "value_type": "INT", + "default_value": 1, + "help": "同时运行的完全相同的Worker Pods数量", + "reference_type": "DEFAULT", + "label": "Worker的Pod个数" + }, + "Slot_ps_cpu": { + "reference": "", + "value_type": "STRING", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "PS的CPU" + }, + "Slot_ps_memory": { + "reference": "", + "value_type": "STRING", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "PS的内存" + }, + "Slot_ps_replicas": { + "reference": "", + "value_type": "INT", + "default_value": 1, + "help": "同时运行的完全相同的PS Pods数量", + "reference_type": "DEFAULT", + "label": "PS的Pod个数" + }, + "Slot_data_source": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "必须修改,求交任务的名字", + "reference_type": "JOB_PROPERTY", + "label": "数据源" + }, + "Slot_epoch_num": { + "reference": "", + "value_type": "INT", + "default_value": 1, + "help": "number of epoch for training, not support in online training", + "reference_type": "DEFAULT", + "label": "epoch数量" + }, + "Slot_start_date": { + "reference": "", + "value_type": "INT", + "default_value": null, + "help": "training data start date", + "reference_type": "DEFAULT", + "label": "开始时间" + }, + "Slot_end_date": { + "reference": "", + "value_type": "INT", + "default_value": null, + "help": "training data end date", + "reference_type": "DEFAULT", + "label": "结束时间" + }, + "Slot_online_training": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "['','--online_training'] 否 是,the train master run for online training", + "reference_type": "DEFAULT", + "label": "是否在线训练" + }, + "Slot_suffle_data_block": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "['','--shuffle_data_block'] 否 是,shuffle the data block or not", + "reference_type": "DEFAULT", + "label": "是否shuffle数据块" + }, + "Slot_mode": { + "reference": "", + "value_type": "STRING", + "default_value": "train", + "help": "choices:['train','eval'] 训练还是验证", + "reference_type": "DEFAULT", + "label": "模式" + }, + "Slot_verbosity": { + "reference": "", + "value_type": "INT", + "default_value": 1, + "help": "int, Logging level", + "reference_type": "DEFAULT", + "label": "日志等级" + }, + "Slot_code_key": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "代码tar包地址,如果为空则使用code tar", + "reference_type": "WORKFLOW", + "label": "模型代码路径" + }, + "Slot_code_tar": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "代码包,variable中请使用代码类型", + "reference_type": "DEFAULT", + "label": "代码" + }, + "Slot_save_checkpoint_steps": { + "reference": "", + "value_type": "INT", + "default_value": 1000, + "help": "int, Number of steps between checkpoints.", + "reference_type": "DEFAULT", + "label": "SAVE_CHECKPOINT_STEPS" + }, + "Slot_save_checkpoint_secs": { + "reference": "", + "value_type": "INT", + "default_value": null, + "help": "int,Number of secs between checkpoints.", + "reference_type": "DEFAULT", + "label": "SAVE_CHECKPOINT_SECS" + }, + "Slot_sparse_estimator": { + "reference": "", + "value_type": "BOOL", + "default_value": false, + "help": "bool,default False Whether using sparse estimator.", + "reference_type": "DEFAULT", + "label": "SPARSE_ESTIMATOR" + }, + "Slot_summary_save_steps": { + "reference": "", + "value_type": "INT", + "default_value": null, + "help": "int, Number of steps to save summary files.", + "reference_type": "DEFAULT", + "label": "SUMMARY_SAVE_STEPS" + }, + "Slot_checkpoint_path": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "不建议修改,checkpoint输出路径,建议为空,会默认使用{storage_root_path}/job_output/{job_name}/checkpoints,强烈建议保持空值", + "reference_type": "DEFAULT", + "label": "CHECKPOINT_PATH" + }, + "Slot_load_checkpoint_filename": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "加载checkpoint_path下的相对路径的checkpoint, 默认会加载checkpoint_path下的latest checkpoint", + "reference_type": "DEFAULT", + "label": "LOAD_CHECKPOINT_FILENAME" + }, + "Slot_load_checkpoint_filename_with_path": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "加载绝对路径下的checkpoint,需要细致到文件名", + "reference_type": "DEFAULT", + "label": "从绝对路径加载checkpoint" + }, + "Slot_load_checkpoint_from_job": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "指定任务名job_output下的latest checkpoint", + "reference_type": "DEFAULT", + "label": "以任务名加载checkpoint" + }, + "Slot_export_path": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "使用默认空值,将把models保存到$OUTPUT_BASE_DIR/exported_models 路径下。", + "reference_type": "DEFAULT", + "label": "EXPORT_PATH" + }, + "Slot_master_envs": { + "reference": "", + "value_type": "LIST", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "reference_type": "DEFAULT", + "label": "Master额外环境变量" + }, + "Slot_ps_envs": { + "reference": "", + "value_type": "LIST", + "default_value": [], + "help": "数组类型,ps pod额外的环境变量", + "reference_type": "DEFAULT", + "label": "PS额外环境变量" + }, + "Slot_worker_envs": { + "reference": "", + "value_type": "LIST", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "reference_type": "DEFAULT", + "label": "Worker额外环境变量" + }, + "Slot_labels": { + "reference": "system.variables.labels", + "value_type": "OBJECT", + "default_value": {}, + "help": "建议不修改,格式: {}", + "reference_type": "SYSTEM", + "label": "FLAPP额外元信息" + }, + "Slot_volumes": { + "reference": "system.variables.volumes_list", + "value_type": "LIST", + "default_value": [{"persistentVolumeClaim": {"claimName": "pvc-fedlearner-default"},"name": "data"}], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "reference_type": "SYSTEM", + "label": "为Pod提供的卷" + }, + "Slot_volume_mounts": { + "reference": "system.variables.volume_mounts_list", + "value_type": "LIST", + "default_value": [{"mountPath": "/data","name": "data"}], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "reference_type": "SYSTEM", + "label": "卷挂载位置" + } +} diff --git a/web_console_v2/client/src/jobMetaDatas/nn_model_training.metayml b/web_console_v2/client/src/jobMetaDatas/nn_model_training.metayml new file mode 100644 index 000000000..11a9ef161 --- /dev/null +++ b/web_console_v2/client/src/jobMetaDatas/nn_model_training.metayml @@ -0,0 +1,328 @@ +{ + "apiVersion": "fedlearner.k8s.io/v1alpha1", + "kind": "FLApp", + "metadata": { + "name": self.name, + "namespace": system.variables.namespace, + "annotations":{ + "queue": "fedlearner", + "schedulerName": "batch" + }, + "labels": ${Slot_labels} + }, + "spec": { + "role": ${Slot_role}, + "cleanPodPolicy": "All", + "peerSpecs": { + "Leader" if ${Slot_role}=="Follower" else "Follower": { + "peerURL": "fedlearner-stack-ingress-nginx-controller.default.svc:80", + "authority": project.participants[0].egress_host, + "extraHeaders": { + "x-host": "fedlearner-operator." + project.participants[0].egress_domain + } + } + }, + "flReplicaSpecs": { + "Master": { + "template": { + "spec": { + "restartPolicy": "Never", + "containers": [ + { + "env": system.basic_envs_list + [ + { + "name": "STORAGE_ROOT_PATH", + "value": ${Slot_storage_root_path} + }, + { + "name": "ROLE", + "value": ${Slot_role}.lower() + }, + { + "name": "APPLICATION_ID", + "value": self.name + }, + { + "name": "OUTPUT_BASE_DIR", + "value": ${Slot_storage_root_path} + "/job_output/" + self.name + }, + { + "name": "EGRESS_URL", + "value": "fedlearner-stack-ingress-nginx-controller.default.svc:80" + }, + { + "name": "EGRESS_HOST", + "value": project.participants[0].egress_host + }, + { + "name": "EGRESS_DOMAIN", + "value": project.participants[0].egress_domain + }, + { + "name": "EPOCH_NUM", + "value": str(${Slot_epoch_num}) + }, + { + "name": "START_DATE", + "value": str(${Slot_start_date}) + }, + { + "name": "END_DATE", + "value": str(${Slot_end_date}) + }, + { + "name": "DATA_SOURCE", + "value": ${Slot_data_source} + }, + { + "name": "ONLINE_TRAINING", + "value": ${Slot_online_training} + }, + { + "name": "SPARSE_ESTIMATOR", + "value": str(${Slot_sparse_estimator}) + }, + { + "name": "CODE_KEY", + "value": ${Slot_code_key} + }, + { + "name": "CODE_TAR", + "value": ${Slot_code_tar} + }, + { + "name": "CHECKPOINT_PATH", + "value": ${Slot_checkpoint_path} + }, + { + "name": "LOAD_CHECKPOINT_FILENAME", + "value": ${Slot_load_checkpoint_filename} + }, + { + "name": "LOAD_CHECKPOINT_FILENAME_WITH_PATH", + "value": ${Slot_load_checkpoint_filename_with_path} + }, + { + "name": "LOAD_CHECKPOINT_PATH", + "value": ${Slot_load_checkpoint_from_job} and ${Slot_storage_root_path} + "/job_output/" + ${Slot_load_checkpoint_from_job} + "/checkpoints" + }, + { + "name": "EXPORT_PATH", + "value": ${Slot_export_path} + } + ] + ${Slot_master_envs}, + "imagePullPolicy": "IfNotPresent", + "name": "tensorflow", + "volumeMounts": ${Slot_volume_mounts}, + "image": system.variables.image_repo + "/fedlearner:" + ${Slot_image_version}, + "ports": [ + { + "containerPort": 50051, + "name": "flapp-port", + "protocol": "TCP" + } + ], + "command": [ + "/app/deploy/scripts/trainer/run_trainer_master.sh" + ], + "args": [ + ], + "resources": { + "limits": { + "cpu": ${Slot_master_cpu}, + "memory": ${Slot_master_memory} + }, + "requests": { + "cpu": ${Slot_master_cpu}, + "memory": ${Slot_master_memory} + } + } + } + ], + "imagePullSecrets": [ + { + "name": "regcred" + } + ], + "volumes": ${Slot_volumes} + + } + }, + "pair": False, + "replicas": int(${Slot_master_replicas}) + }, + "PS": { + "template": { + "spec": { + "restartPolicy": "Never", + "containers": [ + { + "env": system.basic_envs_list + [ + { + "name": "STORAGE_ROOT_PATH", + "value": ${Slot_storage_root_path} + }, + { + "name": "EGRESS_URL", + "value": "fedlearner-stack-ingress-nginx-controller.default.svc:80" + }, + { + "name": "EGRESS_HOST", + "value": project.participants[0].egress_host + }, + { + "name": "EGRESS_DOMAIN", + "value": project.participants[0].egress_domain + } + + ] + ${Slot_ps_envs}, + "imagePullPolicy": "IfNotPresent", + "name": "tensorflow", + "volumeMounts": ${Slot_volume_mounts}, + "image": system.variables.image_repo + "/fedlearner:" + ${Slot_image_version}, + "ports": [ + { + "containerPort": 50051, + "name": "flapp-port", + "protocol": "TCP" + } + ], + "command": [ + "/app/deploy/scripts/trainer/run_trainer_ps.sh" + ], + "args": [ + ], + "resources": { + "limits": { + "cpu": ${Slot_ps_cpu}, + "memory": ${Slot_ps_memory} + }, + "requests": { + "cpu": ${Slot_ps_cpu}, + "memory": ${Slot_ps_memory} + } + } + } + ], + "imagePullSecrets": [ + { + "name": "regcred" + } + ], + "volumes": ${Slot_volumes} + } + }, + "pair": False, + "replicas": int(${Slot_ps_replicas}) + }, + "Worker": { + "template": { + "spec": { + "restartPolicy": "Never", + "containers": [ + { + "env": system.basic_envs_list + [ + { + "name": "STORAGE_ROOT_PATH", + "value": ${Slot_storage_root_path} + }, + { + "name": "ROLE", + "value": ${Slot_role}.lower() + }, + { + "name": "APPLICATION_ID", + "value": self.name + }, + { + "name": "OUTPUT_BASE_DIR", + "value": ${Slot_storage_root_path} + "/job_output/" + self.name + }, + { + "name": "EGRESS_URL", + "value": "fedlearner-stack-ingress-nginx-controller.default.svc:80" + }, + { + "name": "EGRESS_HOST", + "value": project.participants[0].egress_host + }, + { + "name": "EGRESS_DOMAIN", + "value": project.participants[0].egress_domain + }, + { + "name": "MODE", + "value": ${Slot_mode} + }, + { + "name": "VERBOSITY", + "value": str(${Slot_verbosity}) + }, + { + "name": "CODE_KEY", + "value": ${Slot_code_key} + }, + { + "name": "CODE_TAR", + "value": ${Slot_code_tar} + }, + { + "name": "SAVE_CHECKPOINT_STEPS", + "value": str(${Slot_save_checkpoint_steps}) + }, + { + "name": "SAVE_CHECKPOINT_SECS", + "value": str(${Slot_save_checkpoint_secs}) + }, + { + "name": "SPARSE_ESTIMATOR", + "value": str(${Slot_sparse_estimator}) + }, + { + "name": "SUMMARY_SAVE_STEPS", + "value": str(${Slot_summary_save_steps}) + } + ] + ${Slot_worker_envs}, + "imagePullPolicy": "IfNotPresent", + "name": "tensorflow", + "volumeMounts": ${Slot_volume_mounts}, + "image": system.variables.image_repo + "/fedlearner:" + ${Slot_image_version}, + "ports": [ + { + "containerPort": 50051, + "name": "flapp-port", + "protocol": "TCP" + } + ], + "command": [ + "/app/deploy/scripts/wait4pair_wrapper.sh" + ], + "args": [ + "/app/deploy/scripts/trainer/run_trainer_worker.sh" + ], + "resources": { + "limits": { + "cpu": ${Slot_worker_cpu}, + "memory": ${Slot_worker_memory} + }, + "requests": { + "cpu": ${Slot_worker_cpu}, + "memory": ${Slot_worker_memory} + } + } + } + ], + "imagePullSecrets": [ + { + "name": "regcred" + } + ], + "volumes": ${Slot_volumes} + } + }, + "pair": True, + "replicas": int(${Slot_worker_replicas}) + } + } + } +} diff --git a/web_console_v2/client/src/jobMetaDatas/psi_data_join.json b/web_console_v2/client/src/jobMetaDatas/psi_data_join.json new file mode 100644 index 000000000..06ae31f65 --- /dev/null +++ b/web_console_v2/client/src/jobMetaDatas/psi_data_join.json @@ -0,0 +1,269 @@ +{ + + "Slot_role": { + "reference": "", + "value_type": "STRING", + "default_value": "Leader", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "reference_type": "WORKFLOW", + "label": "Flapp通讯时角色" + }, + "Slot_storage_root_path": { + "reference": "project.variables.storage_root_path", + "value_type": "STRING", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "reference_type": "PROJECT", + "label": "存储根目录" + }, + "Slot_image_version": { + "reference": "", + "value_type": "STRING", + "default_value": "882310f", + "help": "建议不修改,指定Pod中运行的容器镜像版本,前缀为system.variables.image_repo + '/fedlearner:'", + "reference_type": "DEFAULT", + "label": "容器镜像版本" + }, + "Slot_master_cpu": { + "reference": "", + "value_type": "STRING", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "Master的CPU" + }, + "Slot_master_memory": { + "reference": "", + "value_type": "STRING", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "Master的内存" + }, + "Slot_worker_cpu": { + "reference": "", + "value_type": "STRING", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "Worker的CPU" + }, + "Slot_worker_memory": { + "reference": "", + "value_type": "STRING", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "Worker的内存" + }, + "Slot_batch_mode": { + "reference": "", + "value_type": "STRING", + "default_value": "--batch_mode", + "help": "如果为空则为常驻求交", + "reference_type": "DEFAULT", + "label": "是否为批处理模式" + }, + "Slot_partition_num": { + "reference": "", + "value_type": "INT", + "default_value": 4, + "help": "建议修改,求交后数据分区的数量,建议和raw_data一致", + "reference_type": "WORKFLOW", + "label": "数据分区的数量" + }, + "Slot_rsa_key_pem": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "直接输入RSA公钥和私钥,请使用Textarea,Leader会从中读取私钥,Follower会从中读取公钥。如果为空会使用path读取。", + "reference_type": "WORKFLOW", + "label": "RSA钥匙" + }, + "Slot_rsa_key_path": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "RSA公钥或私钥的地址,在无RSA_KEY_PEM时必填", + "reference_type": "WORKFLOW", + "label": "RSA钥匙地址" + }, + "Slot_kms_key_name": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "kms中的密钥名称,站内镜像需使用KMS", + "reference_type": "DEFAULT", + "label": "密钥名称" + }, + "Slot_kms_client": { + "reference": "", + "value_type": "STRING", + "default_value": "data.aml.fl", + "help": "kms client", + "reference_type": "DEFAULT", + "label": "kms client" + }, + "Slot_psi_raw_data_iter": { + "reference": "", + "value_type": "STRING", + "default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "reference_type": "DEFAULT", + "label": "raw data数据类型" + }, + "Slot_data_block_builder": { + "reference": "", + "value_type": "STRING", + "default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "reference_type": "DEFAULT", + "label": "data block output数据类型" + }, + "Slot_psi_output_builder": { + "reference": "", + "value_type": "STRING", + "default_value": "TF_RECORD", + "help": "建议不修改,choices=['TF_RECORD', 'CSV_DICT']", + "reference_type": "DEFAULT", + "label": "PSI output数据类型" + }, + "Slot_start_time": { + "reference": "", + "value_type": "INT", + "default_value": 0, + "help": "建议不修改,使用自这个时间起的数据,仅从文件名筛选所以格式依据文件名(yyyymmdd或timestamp)", + "reference_type": "DEFAULT", + "label": "数据起始时间" + }, + "Slot_end_time": { + "reference": "", + "value_type": "INT", + "default_value": 999999999999, + "help": "建议不修改,使用自这个时间以前的数据,仅从文件名筛选所以格式依据文件名(yyyymmdd或timestamp)", + "reference_type": "DEFAULT", + "label": "数据末尾时间" + }, + "Slot_enable_negative_example_generator": { + "reference": "", + "value_type": "BOOL", + "default_value": false, + "help": "建议不修改,是否开启负采样,当follower求交时遇到无法匹配上的leader的example id,会以negative_sampling_rate为概率生成一个新的样本。", + "reference_type": "DEFAULT", + "label": "负采样比例" + }, + "Slot_negative_sampling_rate": { + "reference": "", + "value_type": "NUMBER", + "default_value": 0, + "help": "建议不修改,负采样比例,当follower求交时遇到无法匹配上的leader的example id,会以此概率生成一个新的样本。", + "reference_type": "DEFAULT", + "label": "负采样比例" + }, + "Slot_raw_data_name": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "必须修改,原始数据的发布地址,根据参数内容在portal_publish_dir地址下寻找", + "reference_type": "JOB_PROPERTY", + "label": "raw_data名字" + }, + "Slot_data_block_dump_interval": { + "reference": "", + "value_type": "INT", + "default_value": -1, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次data block,小于0则无此限制", + "reference_type": "DEFAULT", + "label": "数据dump时间间隔" + }, + "Slot_data_block_dump_threshold": { + "reference": "", + "value_type": "INT", + "default_value": 4096, + "help": "建议不修改,最多多少个样本就dump为一个data block,小于等于0则无此限制", + "reference_type": "DEFAULT", + "label": "数据dump临界点" + }, + "Slot_example_id_dump_interval": { + "reference": "", + "value_type": "INT", + "default_value": -1, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次example id,小于0则无此限制", + "reference_type": "DEFAULT", + "label": "数据id dump时间间隔" + }, + "Slot_example_id_dump_threshold": { + "reference": "", + "value_type": "INT", + "default_value": 4096, + "help": "建议不修改,最多每隔多少时间(实际时间,非样本时间)就dump一次example id,小于0则无此限制", + "reference_type": "DEFAULT", + "label": "数据id dump临界点" + }, + "Slot_psi_read_ahead_size": { + "reference": "", + "value_type": "INT", + "default_value": null, + "help": "建议不填, the read ahead size for raw data", + "reference_type": "DEFAULT", + "label": "psi_read_ahead_size" + }, + "Slot_run_merger_read_ahead_buffer": { + "reference": "", + "value_type": "INT", + "default_value": null, + "help": "建议不填, sort run merger read ahead buffer", + "reference_type": "DEFAULT", + "label": "run_merger_read_ahead_buffer" + }, + + "Slot_master_envs": { + "reference": "", + "value_type": "LIST", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "reference_type": "DEFAULT", + "label": "Master额外环境变量" + }, + + "Slot_worker_envs": { + "reference": "", + "value_type": "LIST", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "reference_type": "DEFAULT", + "label": "Worker额外环境变量" + }, + "Slot_labels": { + "reference": "system.variables.labels", + "value_type": "OBJECT", + "default_value": {}, + "help": "建议不修改,格式: {}", + "reference_type": "SYSTEM", + "label": "FLAPP额外元信息" + }, + "Slot_volumes": { + "reference": "system.variables.volumes_list", + "value_type": "LIST", + "default_value": [{"persistentVolumeClaim": {"claimName": "pvc-fedlearner-default"},"name": "data" }], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "reference_type": "SYSTEM", + "label": "为Pod提供的卷" + }, + "Slot_volume_mounts": { + "reference": "system.variables.volume_mounts_list", + "value_type": "LIST", + "default_value": [{ "mountPath": "/data", "name": "data"}], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "reference_type": "SYSTEM", + "label": "卷挂载位置" + }, + "Slot_data_join_metrics_sample_rate":{ + "reference": "", + "value_type": "STRING", + "default_value": "0", + "help": "建议不修改,es metrics 取样比例", + "reference_type": "DEFAULT", + "label": "metrics_sample_rate" + } + } diff --git a/web_console_v2/client/src/jobMetaDatas/psi_data_join.metayml b/web_console_v2/client/src/jobMetaDatas/psi_data_join.metayml new file mode 100644 index 000000000..f7ab2808d --- /dev/null +++ b/web_console_v2/client/src/jobMetaDatas/psi_data_join.metayml @@ -0,0 +1,289 @@ +{ + "apiVersion": "fedlearner.k8s.io/v1alpha1", + "kind": "FLApp", + "metadata": { + "name": self.name, + "namespace": system.variables.namespace, + "annotations":{ + "queue": "fedlearner", + "schedulerName": "batch" + }, + "labels": ${Slot_labels} + }, + "spec": { + "role": ${Slot_role}, + "cleanPodPolicy": "All", + "peerSpecs": { + "Leader" if ${Slot_role}=="Follower" else "Follower": { + "peerURL": "fedlearner-stack-ingress-nginx-controller.default.svc:80", + "authority": project.participants[0].egress_host, + "extraHeaders": { + "x-host": "fedlearner-operator." + project.participants[0].egress_domain + } + } + }, + "flReplicaSpecs": { + "Master": { + "template": { + "spec": { + "restartPolicy": "Never", + "containers": [ + { + "env": system.basic_envs_list + [ + { + "name": "STORAGE_ROOT_PATH", + "value": ${Slot_storage_root_path} + }, + { + "name": "ROLE", + "value": ${Slot_role}.lower() + }, + { + "name": "APPLICATION_ID", + "value": self.name + }, + { + "name": "OUTPUT_BASE_DIR", + "value": ${Slot_storage_root_path} + "/data_source/" + self.name + }, + { + "name": "EGRESS_URL", + "value": "fedlearner-stack-ingress-nginx-controller.default.svc:80" + }, + { + "name": "EGRESS_HOST", + "value": project.participants[0].egress_host + }, + { + "name": "EGRESS_DOMAIN", + "value": project.participants[0].egress_domain + }, + { + "name": "PARTITION_NUM", + "value": str(${Slot_partition_num}) + }, + { + "name": "START_TIME", + "value": str(${Slot_start_time}) + }, + { + "name": "END_TIME", + "value": str(${Slot_end_time}) + }, + { + "name": "RAW_DATA_SUB_DIR", + "value": "portal_publish_dir/" + ${Slot_raw_data_name} + }, + { + # not work, remove it after prepare_launch_data_join_cli been removed + "name": "NEGATIVE_SAMPLING_RATE", + "value": str(${Slot_negative_sampling_rate}) + }, + { + "name": "DATA_JOIN_METRICS_SAMPLE_RATE", + "value": str(${Slot_data_join_metrics_sample_rate}) + } + ] + ${Slot_master_envs}, + "imagePullPolicy": "IfNotPresent", + "name": "tensorflow", + "volumeMounts": ${Slot_volume_mounts}, + "image": system.variables.image_repo + "/fedlearner:" + ${Slot_image_version}, + "ports": [ + { + "containerPort": 50051, + "name": "flapp-port", + "protocol": "TCP" + } + ], + "command": [ + "/app/deploy/scripts/wait4pair_wrapper.sh" + ], + "args": [ + "/app/deploy/scripts/rsa_psi/run_psi_data_join_master.sh" + ], + "resources": { + "limits": { + "cpu": ${Slot_master_cpu}, + "memory": ${Slot_master_memory} + }, + "requests": { + "cpu": ${Slot_master_cpu}, + "memory": ${Slot_master_memory} + } + } + } + ], + "imagePullSecrets": [ + { + "name": "regcred" + } + ], + "volumes": ${Slot_volumes} + } + }, + "pair": True, + "replicas": 1 + }, + "Worker": { + "template": { + "spec": { + "restartPolicy": "Never", + "containers": [ + { + "env": system.basic_envs_list + [ + { + "name": "STORAGE_ROOT_PATH", + "value": ${Slot_storage_root_path} + }, + { + "name": "EGRESS_URL", + "value": "fedlearner-stack-ingress-nginx-controller.default.svc:80" + }, + { + "name": "EGRESS_HOST", + "value": project.participants[0].egress_host + }, + { + "name": "EGRESS_DOMAIN", + "value": project.participants[0].egress_domain + }, + { + "name": "ROLE", + "value": ${Slot_role}.lower() + }, + { + "name": "APPLICATION_ID", + "value": self.name + }, + { + "name": "BATCH_MODE", + "value": ${Slot_batch_mode} + }, + { + "name": "OUTPUT_BASE_DIR", + "value": ${Slot_storage_root_path} + "/data_source/" + self.name + }, + { + "name": "PARTITION_NUM", + "value": str(${Slot_partition_num}) + }, + { + "name": "RAW_DATA_SUB_DIR", + "value": "portal_publish_dir/" + ${Slot_raw_data_name} + }, + { + "name": "RSA_KEY_PEM", + "value": ${Slot_rsa_key_pem} + }, + { + "name": "RSA_KEY_PATH", + "value": ${Slot_rsa_key_path} + }, + { + "name": "RSA_PRIVATE_KEY_PATH", + "value": ${Slot_rsa_key_path} + }, + { + "name": "KMS_KEY_NAME", + "value": ${Slot_kms_key_name} + }, + { + "name": "KMS_CLIENT", + "value": ${Slot_kms_client} + }, + { + "name": "PSI_RAW_DATA_ITER", + "value": ${Slot_psi_raw_data_iter} + }, + { + "name": "DATA_BLOCK_BUILDER", + "value": ${Slot_data_block_builder} + }, + { + "name": "PSI_OUTPUT_BUILDER", + "value": ${Slot_psi_output_builder} + }, + { + "name": "DATA_BLOCK_DUMP_INTERVAL", + "value": str(${Slot_data_block_dump_interval}) + }, + { + "name": "DATA_BLOCK_DUMP_THRESHOLD", + "value": str(${Slot_data_block_dump_threshold}) + }, + { + "name": "EXAMPLE_ID_DUMP_INTERVAL", + "value": str(${Slot_example_id_dump_interval}) + }, + { + "name": "EXAMPLE_ID_DUMP_THRESHOLD", + "value": str(${Slot_example_id_dump_threshold}) + }, + { + "name": "EXAMPLE_JOINER", + "value": "SORT_RUN_JOINER" + }, + { + "name": "PSI_READ_AHEAD_SIZE", + "value": str(${Slot_psi_read_ahead_size}) + }, + { + "name": "SORT_RUN_MERGER_READ_AHEAD_BUFFER", + "value": str(${Slot_run_merger_read_ahead_buffer}) + }, + { + "name": "NEGATIVE_SAMPLING_RATE", + "value": str(${Slot_negative_sampling_rate}) + }, + { + "name": "ENABLE_NEGATIVE_EXAMPLE_GENERATOR", + "value": str(${Slot_enable_negative_example_generator}) + }, + { + "name": "DATA_JOIN_METRICS_SAMPLE_RATE", + "value": str(${Slot_data_join_metrics_sample_rate}) + } + ] + ${Slot_worker_envs}, + "imagePullPolicy": "IfNotPresent", + "name": "tensorflow", + "volumeMounts": ${Slot_volume_mounts}, + "image": system.variables.image_repo + "/fedlearner:" + ${Slot_image_version}, + "ports": [ + { + "containerPort": 50051, + "name": "flapp-port", + "protocol": "TCP" + } + ], + "command": [ + "/app/deploy/scripts/wait4pair_wrapper.sh" + ], + "args": [ + "/app/deploy/scripts/rsa_psi/run_psi_data_join_worker.sh" + ], + "resources": { + "limits": { + "cpu": ${Slot_worker_cpu}, + "memory": ${Slot_worker_memory} + }, + "requests": { + "cpu": ${Slot_worker_cpu}, + "memory": ${Slot_worker_memory} + } + } + } + ], + "imagePullSecrets": [ + { + "name": "regcred" + } + ], + "volumes": ${Slot_volumes} + } + }, + "pair": True, + "replicas": int(${Slot_partition_num}) + } + } + } +} diff --git a/web_console_v2/client/src/jobMetaDatas/raw_data.json b/web_console_v2/client/src/jobMetaDatas/raw_data.json new file mode 100644 index 000000000..88f843519 --- /dev/null +++ b/web_console_v2/client/src/jobMetaDatas/raw_data.json @@ -0,0 +1,220 @@ +{ + "Slot_storage_root_path": { + "reference": "project.variables.storage_root_path", + "value_type": "STRING", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "reference_type": "PROJECT", + "label": "存储根目录" + }, + "Slot_image_version": { + "reference": "", + "value_type": "STRING", + "default_value": "882310f", + "help": "建议不修改,指定Pod中运行的容器镜像版本,前缀为system.variables.image_repo + '/fedlearner:'", + "reference_type": "DEFAULT", + "label": "容器镜像版本" + }, + "Slot_master_cpu": { + "reference": "", + "value_type": "STRING", + "default_value": "2000m", + "help": "Master Pod 所分配的CPU资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "Master的CPU" + }, + "Slot_master_memory": { + "reference": "", + "value_type": "STRING", + "default_value": "3Gi", + "help": "Master Pod 所分配的内存资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "Master的内存" + }, + "Slot_worker_cpu": { + "reference": "", + "value_type": "STRING", + "default_value": "2000m", + "help": "Worker Pod 所分配的CPU资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "Worker的CPU" + }, + "Slot_worker_memory": { + "reference": "", + "value_type": "STRING", + "default_value": "3Gi", + "help": "Worker Pod 所分配的内存资源(request和limit一致)", + "reference_type": "DEFAULT", + "label": "Worker的内存" + }, + "Slot_data_portal_type": { + "reference": "", + "value_type": "STRING", + "default_value": "Streaming", + "help": "运行过一次后修改无效!! the type of data portal type ,choices=['PSI', 'Streaming']", + "reference_type": "DEFAULT", + "label": "数据入口类型" + }, + "Slot_output_partition_num": { + "reference": "", + "value_type": "INT", + "default_value": 4, + "help": "运行过一次后修改无效!!输出数据的文件数量,对应Worker数量", + "reference_type": "WORKFLOW", + "label": "数据分区的数量" + }, + "Slot_input_base_dir": { + "reference": "", + "value_type": "STRING", + "default_value": "/app/deploy/integrated_test/tfrecord_raw_data", + "help": "必须修改,运行过一次后修改无效!!the base dir of input directory", + "reference_type": "WORKFLOW", + "label": "输入路径" + }, + "Slot_long_running": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "choices: ['','--long_running']否,是。是否为常驻上传原始数据", + "reference_type": "DEFAULT", + "label": "是否常驻" + }, + "Slot_check_success_tag": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "choices:['','--check_success_tag'] means false and true, Check that a _SUCCESS file exists before processing files in a subfolder", + "reference_type": "DEFAULT", + "label": "是否检查成功标志" + }, + "Slot_files_per_job_limit": { + "reference": "", + "value_type": "INT", + "default_value": null, + "help": "空即不设限制,Max number of files in a job", + "reference_type": "DEFAULT", + "label": "每个任务最多文件数" + }, + "Slot_single_subfolder": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "choices:['','--single_subfolder'] 否 是,Only process one subfolder at a time", + "reference_type": "DEFAULT", + "label": "是否单一子文件夹" + }, + "Slot_file_wildcard": { + "reference": "", + "value_type": "STRING", + "default_value": "*.rd", + "help": "文件名称的通配符, 将会读取input_base_dir下所以满足条件的文件,如\n1. *.csv,意为读取所有csv格式文件\n2. *.tfrecord,意为读取所有tfrecord格式文件\n3. xxx.txt,意为读取文件名为xxx.txt的文件", + "reference_type": "DEFAULT", + "label": "文件名称的通配符" + }, + "Slot_batch_size": { + "reference": "", + "value_type": "INT", + "default_value": 1024, + "help": "原始数据是一批一批的从文件系统中读出来,batch_size为batch的大小", + "reference_type": "DEFAULT", + "label": "Batch大小" + }, + "Slot_input_data_format": { + "reference": "", + "value_type": "STRING", + "default_value": "TF_RECORD", + "help": "choices=['TF_RECORD', 'CSV_DICT'] the type for input data iterator", + "reference_type": "DEFAULT", + "label": "输入数据格式" + }, + "Slot_compressed_type": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "choices=['', 'ZLIB', 'GZIP'] the compressed type of input data file", + "reference_type": "DEFAULT", + "label": "压缩方式" + }, + "Slot_output_data_format": { + "reference": "", + "value_type": "STRING", + "default_value": "TF_RECORD", + "help": "choices=['TF_RECORD', 'CSV_DICT'] the format for output file", + "reference_type": "DEFAULT", + "label": "输出格式" + }, + "Slot_builder_compressed_type": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "choices=['', 'ZLIB', 'GZIP'] the format for output file", + "reference_type": "DEFAULT", + "label": "输出压缩格式" + }, + "Slot_memory_limit_ratio": { + "reference": "", + "value_type": "INT", + "default_value": 70, + "help": "预测是否会OOM的时候用到,如果预测继续执行下去时占用内存会超过这个比例,就阻塞,直到尚未处理的任务处理完成。 注意这是个40-81之间的整数。", + "reference_type": "DEFAULT", + "label": "内存限制比例" + }, + "Slot_optional_fields": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "optional stat fields used in joiner, separated by comma between fields, e.g. \"label,rit\"Each field will be stripped", + "reference_type": "DEFAULT", + "label": "可选字段" + }, + + "Slot_master_envs": { + "reference": "", + "value_type": "LIST", + "default_value": [], + "help": "数组类型,master pod额外的环境变量", + "reference_type": "DEFAULT", + "label": "Master额外环境变量" + }, + + "Slot_worker_envs": { + "reference": "", + "value_type": "LIST", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "reference_type": "DEFAULT", + "label": "Worker额外环境变量" + }, + "Slot_labels": { + "reference": "system.variables.labels", + "value_type": "OBJECT", + "default_value": {}, + "help": "建议不修改,格式: {}", + "reference_type": "SYSTEM", + "label": "FLAPP额外元信息" + }, + "Slot_volumes": { + "reference": "system.variables.volumes_list", + "value_type": "LIST", + "default_value": [{"persistentVolumeClaim": {"claimName": "pvc-fedlearner-default"},"name": "data" }], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "reference_type": "SYSTEM", + "label": "为Pod提供的卷" + }, + "Slot_volume_mounts": { + "reference": "system.variables.volume_mounts_list", + "value_type": "LIST", + "default_value": [{ "mountPath": "/data", "name": "data"}], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "reference_type": "SYSTEM", + "label": "卷挂载位置" + }, + "Slot_raw_data_metrics_sample_rate": { + "reference": "", + "value_type": "STRING", + "default_value": "1", + "help": "建议不修改,es metrics 取样比例", + "reference_type": "DEFAULT", + "label": "metrics_sample_rate" + } +} diff --git a/web_console_v2/client/src/jobMetaDatas/raw_data.metayml b/web_console_v2/client/src/jobMetaDatas/raw_data.metayml new file mode 100644 index 000000000..fa6481df0 --- /dev/null +++ b/web_console_v2/client/src/jobMetaDatas/raw_data.metayml @@ -0,0 +1,246 @@ +{ + "apiVersion": "fedlearner.k8s.io/v1alpha1", + "kind": "FLApp", + "metadata": { + "name": self.name, + "namespace": system.variables.namespace, + "annotations":{ + "queue": "fedlearner", + "schedulerName": "batch" + }, + "labels": ${Slot_labels} + }, + "spec": { + "role": "Follower", + "peerSpecs": { + "Leader": { + "peerURL": "", + "authority": "" + } + }, + "flReplicaSpecs": { + "Master": { + "template": { + "spec": { + "restartPolicy": "Never", + "containers": [ + { + "env": system.basic_envs_list + [ + { + "name": "STORAGE_ROOT_PATH", + "value": ${Slot_storage_root_path} + }, + { + "name": "EGRESS_URL", + "value": "fedlearner-stack-ingress-nginx-controller.default.svc:80" + }, + { + "name": "EGRESS_HOST", + "value": project.participants[0].egress_host + }, + { + "name": "EGRESS_DOMAIN", + "value": project.participants[0].egress_domain + }, + { + "name": "APPLICATION_ID", + "value": self.name + }, + { + "name": "DATA_PORTAL_NAME", + "value": self.name + }, + { + "name": "DATA_PORTAL_TYPE", + "value": ${Slot_data_portal_type} + }, + { + "name": "OUTPUT_PARTITION_NUM", + "value": str(${Slot_output_partition_num}) + }, + { + "name": "INPUT_BASE_DIR", + "value": ${Slot_input_base_dir} + }, + { + "name": "OUTPUT_BASE_DIR", + "value": ${Slot_storage_root_path} + "/raw_data/" + self.name + }, + { + "name": "RAW_DATA_PUBLISH_DIR", + "value": "portal_publish_dir/" + self.name + }, + { + "name": "FILE_WILDCARD", + "value": ${Slot_file_wildcard} + }, + { + "name": "LONG_RUNNING", + "value": ${Slot_long_running} + }, + { + "name": "CHECK_SUCCESS_TAG", + "value": ${Slot_check_success_tag} + }, + { + "name": "FILES_PER_JOB_LIMIT", + "value": str(${Slot_files_per_job_limit}) + }, + { + "name": "SINGLE_SUBFOLDER", + "value": ${Slot_single_subfolder} + }, + { + "name": "RAW_DATA_METRICS_SAMPLE_RATE", + "value": str(${Slot_raw_data_metrics_sample_rate}) + } + + ] + ${Slot_master_envs}, + "imagePullPolicy": "IfNotPresent", + "name": "tensorflow", + "volumeMounts": ${Slot_volume_mounts}, + "image": system.variables.image_repo + "/fedlearner:" + ${Slot_image_version}, + "ports": [ + { + "containerPort": 50051, + "name": "flapp-port", + "protocol": "TCP" + } + ], + "command": [ + "/app/deploy/scripts/data_portal/run_data_portal_master.sh" + ], + "args": [ + ], + "resources": { + "limits": { + "cpu": ${Slot_master_cpu}, + "memory": ${Slot_master_memory} + }, + "requests": { + "cpu": ${Slot_master_cpu}, + "memory": ${Slot_master_memory} + } + } + } + ], + "imagePullSecrets": [ + { + "name": "regcred" + } + ], + "volumes": ${Slot_volumes} + } + }, + "pair": False, + "replicas": 1 + }, + "Worker": { + "template": { + "spec": { + "restartPolicy": "Never", + "containers": [ + { + "env": system.basic_envs_list + [ + { + "name": "STORAGE_ROOT_PATH", + "value": ${Slot_storage_root_path} + }, + { + "name": "APPLICATION_ID", + "value": self.name + }, + { + "name": "OUTPUT_BASE_DIR", + "value": ${Slot_storage_root_path} + "/data_source/" + self.name + }, + { + "name": "EGRESS_URL", + "value": "fedlearner-stack-ingress-nginx-controller.default.svc:80" + }, + { + "name": "EGRESS_HOST", + "value": project.participants[0].egress_host + }, + { + "name": "EGRESS_DOMAIN", + "value": project.participants[0].egress_domain + }, + + { + "name": "BATCH_SIZE", + "value": str(${Slot_batch_size}) + }, + { + "name": "INPUT_DATA_FORMAT", + "value": ${Slot_input_data_format} + }, + { + "name": "COMPRESSED_TYPE", + "value": ${Slot_compressed_type} + }, + { + "name": "OUTPUT_DATA_FORMAT", + "value": ${Slot_output_data_format} + }, + { + "name": "BUILDER_COMPRESSED_TYPE", + "value": ${Slot_builder_compressed_type} + }, + { + "name": "MEMORY_LIMIT_RATIO", + "value": str(${Slot_memory_limit_ratio}) + }, + { + "name": "OPTIONAL_FIELDS", + "value": ${Slot_optional_fields} + }, + { + "name": "RAW_DATA_METRICS_SAMPLE_RATE", + "value": str(${Slot_raw_data_metrics_sample_rate}) + } + + + ] + ${Slot_worker_envs}, + "imagePullPolicy": "IfNotPresent", + "name": "tensorflow", + "volumeMounts": ${Slot_volume_mounts}, + "image": system.variables.image_repo + "/fedlearner:" + ${Slot_image_version}, + "ports": [ + { + "containerPort": 50051, + "name": "flapp-port", + "protocol": "TCP" + } + ], + "command": [ + "/app/deploy/scripts/data_portal/run_data_portal_worker.sh" + ], + "args": [ + ], + "resources": { + "limits": { + "cpu": ${Slot_worker_cpu}, + "memory": ${Slot_worker_memory} + }, + "requests": { + "cpu": ${Slot_worker_cpu}, + "memory": ${Slot_worker_memory} + } + } + } + ], + "imagePullSecrets": [ + { + "name": "regcred" + } + ], + "volumes": ${Slot_volumes} + } + }, + "pair": False, + "replicas": ${Slot_output_partition_num} + } + } + } +} diff --git a/web_console_v2/client/src/jobMetaDatas/transformer.json b/web_console_v2/client/src/jobMetaDatas/transformer.json new file mode 100644 index 000000000..69100d7bd --- /dev/null +++ b/web_console_v2/client/src/jobMetaDatas/transformer.json @@ -0,0 +1,108 @@ + +{ + "Slot_image": { + "reference": "system.variables.spark_image", + "value_type": "STRING", + "default_value": "", + "help": "镜像地址,建议不填写,默认会使用system.variables.image_repo + '/pp_data_inspection:' + system.version", + "reference_type": "DEFAULT", + "label": "镜像" + }, + "Slot_labels": { + "reference": "system.variables.labels", + "value_type": "OBJECT", + "default_value": {}, + "help": "建议不修改,格式: {}", + "reference_type": "SYSTEM", + "label": "FLAPP额外元信息" + }, + "Slot_spark_transformer_file": { + "reference": "", + "value_type": "STRING", + "default_value": "transformer.py", + "help": "特征工程的脚本", + "reference_type": "DEFAULT", + "label": "特征工程脚本文件" + }, + "Slot_dataset": { + "reference": "", + "value_type": "STRING", + "default_value": "", + "help": "", + "reference_type": "DEFAULT", + "label": "输入数据集" + }, + "Slot_configs": { + "reference": "", + "value_type": "OBJECT", + "default_value": {}, + "help": "使用特征选择组件", + "reference_type": "DEFAULT", + "label": "配置" + }, + "Slot_driver_cores": { + "reference": "", + "value_type": "STRING", + "default_value": "1000m", + "help": "driver核心数", + "reference_type": "DEFAULT", + "label": "driver核心数" + }, + "Slot_driver_core_limit": { + "reference": "", + "value_type": "STRING", + "default_value": "1200m", + "help": "driver核心数限制", + "reference_type": "DEFAULT", + "label": "driver核心数限制" + }, + "Slot_driver_memory": { + "reference": "", + "value_type": "STRING", + "default_value": "1024m", + "help": "driver内存", + "reference_type": "DEFAULT", + "label": "driver内存" + }, + "Slot_executor_cores": { + "reference": "", + "value_type": "STRING", + "default_value": "1000m", + "help": "excutor核心数", + "reference_type": "DEFAULT", + "label": "excutor核心数" + }, + "Slot_executor_instances": { + "reference": "", + "value_type": "INT", + "default_value": 1, + "help": "excutor实例数", + "reference_type": "DEFAULT", + "label": "excutor实例数" + }, + "Slot_executor_memory": { + "reference": "", + "value_type": "STRING", + "default_value": "512m", + "help": "excutor内存", + "reference_type": "DEFAULT", + "label": "excutor内存" + }, + + "Slot_volumes": { + "reference": "system.variables.volumes_list", + "value_type": "LIST", + "default_value": [{"persistentVolumeClaim": {"claimName": "pvc-fedlearner-default"},"name": "data"}], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "reference_type": "SYSTEM", + "label": "为Pod提供的卷" + }, + "Slot_volume_mounts": { + "reference": "system.variables.volume_mounts_list", + "value_type": "LIST", + "default_value": [{"mountPath": "/data","name": "data"}], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "reference_type": "SYSTEM", + "label": "卷挂载位置" + } +} diff --git a/web_console_v2/client/src/jobMetaDatas/transformer.metayml b/web_console_v2/client/src/jobMetaDatas/transformer.metayml new file mode 100644 index 000000000..ca4a8420e --- /dev/null +++ b/web_console_v2/client/src/jobMetaDatas/transformer.metayml @@ -0,0 +1,54 @@ +{ + "apiVersion": "sparkoperator.k8s.io/v1beta2", + "kind": "SparkApplication", + "metadata": { + "name": self.name, + "namespace": system.variables.namespace, + "labels": ${Slot_labels}, + "annotations": { + "queue": "fedlearner-spark", + "schedulerName": "batch", + }, + }, + "spec": { + "type": "Python", + "pythonVersion": "3", + "mode": "cluster", + "image": ${Slot_image} or system.variables.image_repo + "/pp_data_inspection:" + system.version, + "imagePullPolicy": "IfNotPresent", + "volumes": ${Slot_volumes}, + "mainApplicationFile": ${Slot_spark_transformer_file}, + "arguments": [ + ${Slot_dataset}, + "rds/**", + str(${Slot_configs}) + ], + "sparkVersion": "3.0.0", + "restartPolicy": { + "type": "OnFailure", + "onFailureRetries": 3, + "onFailureRetryInterval": 10, + "onSubmissionFailureRetries": 5, + "onSubmissionFailureRetryInterval": 20 + }, + "driver": { + "cores": ${Slot_driver_cores}, + "coreLimit": ${Slot_driver_core_limit}, + "memory": ${Slot_driver_memory}, + "labels": { + "version": "3.0.0" + }, + "serviceAccount": "spark", + "volumeMounts": ${Slot_volume_mounts} + }, + "executor": { + "cores": ${Slot_executor_cores}, + "instances": ${Slot_executor_instances}, + "memory": ${Slot_executor_memory}, + "labels": { + "version": "3.0.0" + }, + "volumeMounts": ${Slot_volume_mounts} + } + } +} diff --git a/web_console_v2/client/src/jobMetaDatas/tree_model_evaluation.json b/web_console_v2/client/src/jobMetaDatas/tree_model_evaluation.json new file mode 100644 index 000000000..87ca2f2f5 --- /dev/null +++ b/web_console_v2/client/src/jobMetaDatas/tree_model_evaluation.json @@ -0,0 +1,274 @@ +{ + "Slot_role": { + "reference": "", + "value_type": "STRING", + "default_value": "Leader", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "reference_type": "DEFAULT", + "label": "Flapp通讯时角色" + }, + "Slot_storage_root_path": { + "reference": "project.variables.storage_root_path", + "value_type": "STRING", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "reference_type": "PROJECT", + "label": "存储根目录" + }, + "Slot_image_version": { + "reference": "", + "value_type": "STRING", + "default_value": "882310f", + "help": "建议不修改,指定Pod中运行的容器镜像版本,前缀为system.variables.image_repo + '/fedlearner:'", + "reference_type": "DEFAULT", + "label": "容器镜像版本" + }, + "Slot_worker_cpu": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "8000m", + "label": "所需CPU", + "help": "所需CPU" + }, + "Slot_worker_mem": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "16Gi", + "label": "所需内存", + "help": "所需内存" + }, + "Slot_data_source": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "", + "label": "求交数据集名称", + "help": "求交数据集名称" + }, + "Slot_data_path": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "", + "label": "数据存放位置", + "help": "数据存放位置" + }, + "Slot_mode": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "eval", + "label": "任务类型,train或eval", + "help": "任务类型,train或eval" + }, + "Slot_loss_type": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "logistic", + "label": "损失函数类型", + "help": "损失函数类型,logistic或mse,默认logistic" + }, + "Slot_file_ext": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": ".data", + "label": "文件后缀", + "help": "文件后缀" + }, + "Slot_file_type": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "tfrecord", + "label": "文件类型,csv或tfrecord", + "help": "文件类型,csv或tfrecord" + }, + "Slot_learning_rate": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "NUMBER", + "default_value": 0.3, + "label": "学习率", + "help": "学习率" + }, + "Slot_max_iters": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "INT", + "default_value": 5, + "label": "迭代数", + "help": "树的数量" + }, + "Slot_max_depth": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "INT", + "default_value": 3, + "label": "最大深度", + "help": "最大深度" + }, + "Slot_max_bins": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "INT", + "default_value": 33, + "label": "最大分箱数", + "help": "最大分箱数" + }, + "Slot_l2_regularization": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "NUMBER", + "default_value": 1, + "label": "L2惩罚系数", + "help": "L2惩罚系数" + }, + "Slot_num_parallel": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "INT", + "default_value": 1, + "label": "进程数量", + "help": "进程数量" + }, + "Slot_enable_packing": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "BOOL", + "default_value": true, + "label": "是否开启优化", + "help": "是否开启优化" + }, + "Slot_ignore_fields": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "", + "label": "不入模的特征", + "help": "以逗号分隔如:name,age,sex" + }, + "Slot_cat_fields": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "", + "label": "类别类型特征", + "help": "类别类型特征,特征的值需要是非负整数。以逗号分隔如:alive,country,sex" + }, + "Slot_label_field": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "label", + "label": "label特征名", + "help": "label特征名" + }, + "Slot_load_model_name": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "", + "label": "模型任务名称", + "help": "按任务名称加载模型,{STORAGE_ROOT_PATH}/job_output/{LOAD_MODEL_NAME}/exported_models" + }, + "Slot_load_model_path": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "", + "label": "模型文件地址", + "help": "模型文件地址" + }, + "Slot_validation_data_path": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "", + "label": "验证数据集地址", + "help": "验证数据集地址" + }, + "Slot_send_scores_to_follower": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "BOOL", + "default_value": false, + "label": "是否发送结果到follower", + "help": "是否发送结果到follower" + }, + "Slot_send_metrics_to_follower": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "BOOL", + "default_value": false, + "label": "是否发送指标到follower", + "help": "是否发送指标到follower" + }, + "Slot_verbosity": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "INT", + "default_value": 1, + "label": "日志输出等级", + "help": "日志输出等级" + }, + "Slot_no_data": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "BOOL", + "default_value": false, + "label": "Leader是否没数据", + "help": "Leader是否没数据" + }, + "Slot_verify_example_ids": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "BOOL", + "default_value": false, + "label": "是否检查example_id对齐", + "help": "是否检查example_id对齐 If set to true, the first column of the data will be treated as example ids that must match between leader and follower" + }, + "Slot_es_batch_size": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "INT", + "default_value": 10, + "label": "ES_BATCH_SIZE", + "help": "ES_BATCH_SIZE" + }, + "Slot_worker_envs": { + "reference": "", + "value_type": "LIST", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "reference_type": "DEFAULT", + "label": "Worker额外环境变量" + }, + "Slot_labels": { + "reference": "system.variables.labels", + "value_type": "OBJECT", + "default_value": {}, + "help": "建议不修改,格式: {}", + "reference_type": "SYSTEM", + "label": "FLAPP额外元信息" + }, + "Slot_volumes": { + "reference": "system.variables.volumes_list", + "value_type": "LIST", + "default_value": [{"persistentVolumeClaim": {"claimName": "pvc-fedlearner-default"},"name": "data"}], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "reference_type": "SYSTEM", + "label": "为Pod提供的卷" + }, + "Slot_volume_mounts": { + "reference": "system.variables.volume_mounts_list", + "value_type": "LIST", + "default_value": [{"mountPath": "/data","name": "data"}], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "reference_type": "SYSTEM", + "label": "卷挂载位置" + } +} diff --git a/web_console_v2/client/src/jobMetaDatas/tree_model_evaluation.metayml b/web_console_v2/client/src/jobMetaDatas/tree_model_evaluation.metayml new file mode 100644 index 000000000..5e8a031f9 --- /dev/null +++ b/web_console_v2/client/src/jobMetaDatas/tree_model_evaluation.metayml @@ -0,0 +1,209 @@ +{ + "apiVersion": "fedlearner.k8s.io/v1alpha1", + "kind": "FLApp", + "metadata": { + "name": self.name, + "namespace": system.variables.namespace, + "annotations":{ + "queue": "fedlearner", + "schedulerName": "batch" + }, + "labels": ${Slot_labels} + }, + "spec": { + "role": ${Slot_role}, + "cleanPodPolicy": "All", + "peerSpecs": { + "Leader" if ${Slot_role}=="Follower" else "Follower": { + "peerURL": "fedlearner-stack-ingress-nginx-controller.default.svc:80", + "authority": project.participants[0].egress_host, + "extraHeaders": { + "x-host": "fedlearner-operator." + project.participants[0].egress_domain + } + } + }, + "flReplicaSpecs": { + "Worker": { + "template": { + "spec": { + "restartPolicy": "Never", + "containers": [ + { + "env": system.basic_envs_list + [ + { + "name": "STORAGE_ROOT_PATH", + "value": ${Slot_storage_root_path} + }, + { + "name": "ROLE", + "value": ${Slot_role}.lower() + }, + { + "name": "APPLICATION_ID", + "value": self.name + }, + { + "name": "OUTPUT_BASE_DIR", + "value": ${Slot_storage_root_path} + "/job_output/" + self.name + }, + { + "name": "EGRESS_URL", + "value": "fedlearner-stack-ingress-nginx-controller.default.svc:80" + }, + { + "name": "EGRESS_HOST", + "value": project.participants[0].egress_host + }, + { + "name": "EGRESS_DOMAIN", + "value": project.participants[0].egress_domain + }, + { + "name": "MODE", + "value": ${Slot_mode} + }, + { + "name": "LOSS_TYPE", + "value": ${Slot_loss_type} + }, + { + "name": "DATA_SOURCE", + "value": ${Slot_data_source} + }, + { + "name": "DATA_PATH", + "value": ${Slot_data_path} + }, + { + "name": "VALIDATION_DATA_PATH", + "value": ${Slot_validation_data_path} + }, + { + "name": "NO_DATA", + "value": str(${Slot_no_data}) + }, + { + "name": "FILE_EXT", + "value": ${Slot_file_ext} + }, + { + "name": "FILE_TYPE", + "value": ${Slot_file_type} + }, + { + "name": "LOAD_MODEL_PATH", + "value": ${Slot_load_model_path} + }, + { + "name": "LOAD_MODEL_NAME", + "value": ${Slot_load_model_name} + }, + { + "name": "VERBOSITY", + "value": str(${Slot_verbosity}) + }, + { + "name": "LEARNING_RATE", + "value": str(${Slot_learning_rate}) + }, + { + "name": "MAX_ITERS", + "value": str(${Slot_max_iters}) + }, + { + "name": "MAX_DEPTH", + "value": str(${Slot_max_depth}) + }, + { + "name": "MAX_BINS", + "value": str(${Slot_max_bins}) + }, + { + "name": "L2_REGULARIZATION", + "value": str(${Slot_l2_regularization}) + }, + { + "name": "NUM_PARALLEL", + "value": str(${Slot_num_parallel}) + }, + { + "name": "VERIFY_EXAMPLE_IDS", + "value": str(${Slot_verify_example_ids}) + }, + { + "name": "IGNORE_FIELDS", + "value": ${Slot_ignore_fields} + }, + { + "name": "CAT_FIELDS", + "value": ${Slot_cat_fields} + }, + { + "name": "LABEL_FIELD", + "value": ${Slot_label_field} + }, + { + "name": "SEND_SCORES_TO_FOLLOWER", + "value": str(${Slot_send_scores_to_follower}) + }, + { + "name": "SEND_METRICS_TO_FOLLOWER", + "value": str(${Slot_send_metrics_to_follower}) + }, + { + "name": "ENABLE_PACKING", + "value": str(${Slot_enable_packing}) + }, + { + "name": "ES_BATCH_SIZE", + "value": str(${Slot_es_batch_size}) + } + ] + ${Slot_worker_envs}, + "imagePullPolicy": "IfNotPresent", + "name": "tensorflow", + "volumeMounts": ${Slot_volume_mounts}, + "image": system.variables.image_repo + "/fedlearner:" + ${Slot_image_version}, + "ports": [ + { + "containerPort": 50051, + "name": "flapp-port", + "protocol": "TCP" + }, + { + "containerPort": 50052, + "name": "tf-port", + "protocol": "TCP" + } + ], + "command": [ + "/app/deploy/scripts/wait4pair_wrapper.sh" + ], + "args": [ + "/app/deploy/scripts/trainer/run_tree_worker.sh" + ], + "resources": { + "limits": { + "cpu": ${Slot_worker_cpu}, + "memory": ${Slot_worker_mem} + }, + "requests": { + "cpu": ${Slot_worker_cpu}, + "memory": ${Slot_worker_mem} + } + } + } + ], + "imagePullSecrets": [ + { + "name": "regcred" + } + ], + "volumes": ${Slot_volumes} + } + }, + "pair": True, + "replicas": 1 + } + } + } +} diff --git a/web_console_v2/client/src/jobMetaDatas/tree_model_training.json b/web_console_v2/client/src/jobMetaDatas/tree_model_training.json new file mode 100644 index 000000000..ac37f8972 --- /dev/null +++ b/web_console_v2/client/src/jobMetaDatas/tree_model_training.json @@ -0,0 +1,274 @@ +{ + "Slot_role": { + "reference": "", + "value_type": "STRING", + "default_value": "Leader", + "help": "Flapp 通讯时的角色 Leader 或 Follower", + "reference_type": "DEFAULT", + "label": "Flapp通讯时角色" + }, + "Slot_storage_root_path": { + "reference": "project.variables.storage_root_path", + "value_type": "STRING", + "default_value": "/data", + "help": "联邦学习中任务存储根目录", + "reference_type": "PROJECT", + "label": "存储根目录" + }, + "Slot_image_version": { + "reference": "", + "value_type": "STRING", + "default_value": "882310f", + "help": "建议不修改,指定Pod中运行的容器镜像版本,前缀为system.variables.image_repo + '/fedlearner:'", + "reference_type": "DEFAULT", + "label": "容器镜像版本" + }, + "Slot_worker_cpu": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "8000m", + "label": "所需CPU", + "help": "所需CPU" + }, + "Slot_worker_mem": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "16Gi", + "label": "所需内存", + "help": "所需内存" + }, + "Slot_data_source": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "", + "label": "求交数据集名称", + "help": "求交数据集名称" + }, + "Slot_data_path": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "", + "label": "数据存放位置", + "help": "数据存放位置" + }, + "Slot_mode": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "train", + "label": "任务类型,train或eval", + "help": "任务类型,train或eval" + }, + "Slot_loss_type": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "logistic", + "label": "损失函数类型", + "help": "损失函数类型,logistic或mse,默认logistic" + }, + "Slot_file_ext": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": ".data", + "label": "文件后缀", + "help": "文件后缀" + }, + "Slot_file_type": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "tfrecord", + "label": "文件类型,csv或tfrecord", + "help": "文件类型,csv或tfrecord" + }, + "Slot_learning_rate": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "NUMBER", + "default_value": 0.3, + "label": "学习率", + "help": "学习率" + }, + "Slot_max_iters": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "INT", + "default_value": 5, + "label": "迭代数", + "help": "树的数量" + }, + "Slot_max_depth": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "INT", + "default_value": 3, + "label": "最大深度", + "help": "最大深度" + }, + "Slot_max_bins": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "INT", + "default_value": 33, + "label": "最大分箱数", + "help": "最大分箱数" + }, + "Slot_l2_regularization": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "NUMBER", + "default_value": 1, + "label": "L2惩罚系数", + "help": "L2惩罚系数" + }, + "Slot_num_parallel": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "INT", + "default_value": 1, + "label": "进程数量", + "help": "进程数量" + }, + "Slot_enable_packing": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "BOOL", + "default_value": true, + "label": "是否开启优化", + "help": "是否开启优化" + }, + "Slot_ignore_fields": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "", + "label": "不入模的特征", + "help": "以逗号分隔如:name,age,sex" + }, + "Slot_cat_fields": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "", + "label": "类别类型特征", + "help": "类别类型特征,特征的值需要是非负整数。以逗号分隔如:alive,country,sex" + }, + "Slot_label_field": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "label", + "label": "label特征名", + "help": "label特征名" + }, + "Slot_load_model_name": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "", + "label": "模型任务名称", + "help": "按任务名称加载模型,{STORAGE_ROOT_PATH}/job_output/{LOAD_MODEL_NAME}/exported_models" + }, + "Slot_load_model_path": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "", + "label": "模型文件地址", + "help": "模型文件地址" + }, + "Slot_validation_data_path": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "STRING", + "default_value": "", + "label": "验证数据集地址", + "help": "验证数据集地址" + }, + "Slot_send_scores_to_follower": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "BOOL", + "default_value": false, + "label": "是否发送结果到follower", + "help": "是否发送结果到follower" + }, + "Slot_send_metrics_to_follower": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "BOOL", + "default_value": false, + "label": "是否发送指标到follower", + "help": "是否发送指标到follower" + }, + "Slot_verbosity": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "INT", + "default_value": 1, + "label": "日志输出等级", + "help": "日志输出等级" + }, + "Slot_no_data": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "BOOL", + "default_value": false, + "label": "Leader是否没数据", + "help": "Leader是否没数据" + }, + "Slot_verify_example_ids": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "BOOL", + "default_value": false, + "label": "是否检查example_id对齐", + "help": "是否检查example_id对齐 If set to true, the first column of the data will be treated as example ids that must match between leader and follower" + }, + "Slot_es_batch_size": { + "reference_type": "DEFAULT", + "reference": "", + "value_type": "INT", + "default_value": 10, + "label": "ES_BATCH_SIZE", + "help": "ES_BATCH_SIZE" + }, + "Slot_worker_envs": { + "reference": "", + "value_type": "LIST", + "default_value": [], + "help": "数组类型,worker pod额外的环境变量", + "reference_type": "DEFAULT", + "label": "Worker额外环境变量" + }, + "Slot_labels": { + "reference": "system.variables.labels", + "value_type": "OBJECT", + "default_value": {}, + "help": "建议不修改,格式: {}", + "reference_type": "SYSTEM", + "label": "FLAPP额外元信息" + }, + "Slot_volumes": { + "reference": "system.variables.volumes_list", + "value_type": "LIST", + "default_value": [{"persistentVolumeClaim": {"claimName": "pvc-fedlearner-default"},"name": "data"}], + "help": "建议不修改,数组类型,和volume_mounts一一对应", + "reference_type": "SYSTEM", + "label": "为Pod提供的卷" + }, + "Slot_volume_mounts": { + "reference": "system.variables.volume_mounts_list", + "value_type": "LIST", + "default_value": [{"mountPath": "/data","name": "data"}], + "help": "建议不修改,容器中卷挂载的位置,数组类型", + "reference_type": "SYSTEM", + "label": "卷挂载位置" + } +} diff --git a/web_console_v2/client/src/jobMetaDatas/tree_model_training.metayml b/web_console_v2/client/src/jobMetaDatas/tree_model_training.metayml new file mode 100644 index 000000000..5e8a031f9 --- /dev/null +++ b/web_console_v2/client/src/jobMetaDatas/tree_model_training.metayml @@ -0,0 +1,209 @@ +{ + "apiVersion": "fedlearner.k8s.io/v1alpha1", + "kind": "FLApp", + "metadata": { + "name": self.name, + "namespace": system.variables.namespace, + "annotations":{ + "queue": "fedlearner", + "schedulerName": "batch" + }, + "labels": ${Slot_labels} + }, + "spec": { + "role": ${Slot_role}, + "cleanPodPolicy": "All", + "peerSpecs": { + "Leader" if ${Slot_role}=="Follower" else "Follower": { + "peerURL": "fedlearner-stack-ingress-nginx-controller.default.svc:80", + "authority": project.participants[0].egress_host, + "extraHeaders": { + "x-host": "fedlearner-operator." + project.participants[0].egress_domain + } + } + }, + "flReplicaSpecs": { + "Worker": { + "template": { + "spec": { + "restartPolicy": "Never", + "containers": [ + { + "env": system.basic_envs_list + [ + { + "name": "STORAGE_ROOT_PATH", + "value": ${Slot_storage_root_path} + }, + { + "name": "ROLE", + "value": ${Slot_role}.lower() + }, + { + "name": "APPLICATION_ID", + "value": self.name + }, + { + "name": "OUTPUT_BASE_DIR", + "value": ${Slot_storage_root_path} + "/job_output/" + self.name + }, + { + "name": "EGRESS_URL", + "value": "fedlearner-stack-ingress-nginx-controller.default.svc:80" + }, + { + "name": "EGRESS_HOST", + "value": project.participants[0].egress_host + }, + { + "name": "EGRESS_DOMAIN", + "value": project.participants[0].egress_domain + }, + { + "name": "MODE", + "value": ${Slot_mode} + }, + { + "name": "LOSS_TYPE", + "value": ${Slot_loss_type} + }, + { + "name": "DATA_SOURCE", + "value": ${Slot_data_source} + }, + { + "name": "DATA_PATH", + "value": ${Slot_data_path} + }, + { + "name": "VALIDATION_DATA_PATH", + "value": ${Slot_validation_data_path} + }, + { + "name": "NO_DATA", + "value": str(${Slot_no_data}) + }, + { + "name": "FILE_EXT", + "value": ${Slot_file_ext} + }, + { + "name": "FILE_TYPE", + "value": ${Slot_file_type} + }, + { + "name": "LOAD_MODEL_PATH", + "value": ${Slot_load_model_path} + }, + { + "name": "LOAD_MODEL_NAME", + "value": ${Slot_load_model_name} + }, + { + "name": "VERBOSITY", + "value": str(${Slot_verbosity}) + }, + { + "name": "LEARNING_RATE", + "value": str(${Slot_learning_rate}) + }, + { + "name": "MAX_ITERS", + "value": str(${Slot_max_iters}) + }, + { + "name": "MAX_DEPTH", + "value": str(${Slot_max_depth}) + }, + { + "name": "MAX_BINS", + "value": str(${Slot_max_bins}) + }, + { + "name": "L2_REGULARIZATION", + "value": str(${Slot_l2_regularization}) + }, + { + "name": "NUM_PARALLEL", + "value": str(${Slot_num_parallel}) + }, + { + "name": "VERIFY_EXAMPLE_IDS", + "value": str(${Slot_verify_example_ids}) + }, + { + "name": "IGNORE_FIELDS", + "value": ${Slot_ignore_fields} + }, + { + "name": "CAT_FIELDS", + "value": ${Slot_cat_fields} + }, + { + "name": "LABEL_FIELD", + "value": ${Slot_label_field} + }, + { + "name": "SEND_SCORES_TO_FOLLOWER", + "value": str(${Slot_send_scores_to_follower}) + }, + { + "name": "SEND_METRICS_TO_FOLLOWER", + "value": str(${Slot_send_metrics_to_follower}) + }, + { + "name": "ENABLE_PACKING", + "value": str(${Slot_enable_packing}) + }, + { + "name": "ES_BATCH_SIZE", + "value": str(${Slot_es_batch_size}) + } + ] + ${Slot_worker_envs}, + "imagePullPolicy": "IfNotPresent", + "name": "tensorflow", + "volumeMounts": ${Slot_volume_mounts}, + "image": system.variables.image_repo + "/fedlearner:" + ${Slot_image_version}, + "ports": [ + { + "containerPort": 50051, + "name": "flapp-port", + "protocol": "TCP" + }, + { + "containerPort": 50052, + "name": "tf-port", + "protocol": "TCP" + } + ], + "command": [ + "/app/deploy/scripts/wait4pair_wrapper.sh" + ], + "args": [ + "/app/deploy/scripts/trainer/run_tree_worker.sh" + ], + "resources": { + "limits": { + "cpu": ${Slot_worker_cpu}, + "memory": ${Slot_worker_mem} + }, + "requests": { + "cpu": ${Slot_worker_cpu}, + "memory": ${Slot_worker_mem} + } + } + } + ], + "imagePullSecrets": [ + { + "name": "regcred" + } + ], + "volumes": ${Slot_volumes} + } + }, + "pair": True, + "replicas": 1 + } + } + } +} diff --git a/web_console_v2/client/src/libs/mockAdapter.ts b/web_console_v2/client/src/libs/mockAdapter.ts index e067e13a2..22a29a023 100644 --- a/web_console_v2/client/src/libs/mockAdapter.ts +++ b/web_console_v2/client/src/libs/mockAdapter.ts @@ -1,5 +1,5 @@ import axios, { AxiosRequestConfig } from 'axios'; -import { isThisRequestMockEnabled } from 'components/_base/MockDevtools/utils'; +import { isThisRequestMockEnabled } from 'components/MockDevtools/utils'; import { sleep } from 'shared/helpers'; async function axiosMockAdapter(config: AxiosRequestConfig) { @@ -15,10 +15,10 @@ async function axiosMockAdapter(config: AxiosRequestConfig) { exportKey = method; } - const path = config.url?.replace(/\/([\d])+/i, (_, id) => { + const path = config.url?.replace(/\/([\d]+)/gi, (_, id) => { config._id = id; - return '/:id'; + return '/__id__'; }); let data = @@ -41,12 +41,13 @@ async function axiosMockAdapter(config: AxiosRequestConfig) { return data; } - return Promise.reject(data.data); + return Promise.reject({ + response: data, + }); } catch (error) { console.error('[⚠️ Mock Adapter]: ', error); } } - return axios.defaults.adapter!(config); } diff --git a/web_console_v2/client/src/libs/request.ts b/web_console_v2/client/src/libs/request.ts index 93cdfcff4..72d416453 100644 --- a/web_console_v2/client/src/libs/request.ts +++ b/web_console_v2/client/src/libs/request.ts @@ -1,10 +1,17 @@ -import axios, { AxiosInstance } from 'axios'; -import { getRequestMockState, setRequestMockState } from 'components/_base/MockDevtools/utils'; +import axios, { AxiosInstance, Method } from 'axios'; +import { + getRequestMockState, + isThisRequestMockEnabled, + setRequestMockState, +} from 'components/MockDevtools/utils'; import i18n from 'i18n'; import LOCAL_STORAGE_KEYS from 'shared/localStorageKeys'; import { removeFalsy, transformKeysToSnakeCase, binarizeBoolean } from 'shared/object'; import store from 'store2'; import { ErrorCodes } from 'typings/app'; +import PubSub from 'pubsub-js'; +import qs from 'qs'; +import { getJWTHeaders, saveBlob } from 'shared/helpers'; declare module 'axios' { interface AxiosRequestConfig { @@ -12,6 +19,10 @@ declare module 'axios' { removeFalsy?: boolean; snake_case?: boolean; _id?: ID; + disableExtractResponseData?: boolean; + } + interface AxiosInstance { + download(url: string, method?: Method, filename?: string): Promise<string | undefined>; } // AxiosResponse has a struct like { data: YourRealResponse, status, config }, @@ -37,13 +48,24 @@ export const BASE_URL = '/api'; let request: AxiosInstance; +const errorCodeToErrorMessageMap: { + [code: number]: string; +} = { + [ErrorCodes.TokenExpired]: i18n.t('error.token_expired'), + [ErrorCodes.Unauthorized]: i18n.t('error.unauthorized'), +}; + if (process.env.NODE_ENV === 'development' || process.env.REACT_APP_ENABLE_FULLY_MOCK) { // NOTE: DEAD CODES HERE // will be removed during prod building request = axios.create({ - adapter: require('./mockAdapter').default, baseURL: BASE_URL, + paramsSerializer: function (params) { + return qs.stringify(params, { + arrayFormat: 'repeat', + }); + }, }); // Mock controlling @@ -61,9 +83,22 @@ if (process.env.NODE_ENV === 'development' || process.env.REACT_APP_ENABLE_FULLY return config; }); + + request.interceptors.request.use((config) => { + if (isThisRequestMockEnabled(config) || process.env.REACT_APP_ENABLE_FULLY_MOCK === 'true') { + config.url = '/mock/20021' + config.url; + config.baseURL = ''; + } + return config; + }); } else { request = axios.create({ baseURL: BASE_URL, + paramsSerializer: function (params) { + return qs.stringify(params, { + arrayFormat: 'repeat', + }); + }, }); } @@ -71,10 +106,7 @@ if (process.env.NODE_ENV === 'development' || process.env.REACT_APP_ENABLE_FULLY * Authorization interceptor */ request.interceptors.request.use((config) => { - const token = store.get(LOCAL_STORAGE_KEYS.current_user)?.access_token; - if (token) { - config.headers.Authorization = `Bearer ${token}`; - } + config.headers = { ...config.headers, ...getJWTHeaders() }; return config; }); @@ -102,31 +134,77 @@ request.interceptors.request.use((config) => { */ request.interceptors.response.use( (response) => { + if (response?.config?.disableExtractResponseData) { + return response; + } return response.data; }, (error) => { - const response = error.response.data; + return new Promise((resolve, reject) => { + const response = error.response.data; - // Access token expired due to time fly or server reboot - if (response.code === ErrorCodes.TokenExpired) { - return Promise.reject( - new ServerError(i18n.t('error.token_expired'), ErrorCodes.TokenExpired), - ); - } - // Common errors handle - if (response && typeof response === 'object') { - const { data } = error.response; - const { details } = data; + // Access token expired due to time fly or server reboot + if (response.code === ErrorCodes.TokenExpired || response.code === ErrorCodes.Unauthorized) { + const errorMessage = response.message || errorCodeToErrorMessageMap[response.code]; + store.remove(LOCAL_STORAGE_KEYS.current_user); + store.remove(LOCAL_STORAGE_KEYS.sso_info); + // Trigger logout event + PubSub.publish('logout', { + message: errorMessage, + }); + return reject(new ServerError(errorMessage, response.code)); + } - const serverError = new ServerError( - typeof details === 'object' ? JSON.stringify(details) : details || data.message || data.msg, - error.satus, - ); + if ( + error.response && + error.response.config.responseType === 'blob' && + response.type === 'application/json' + ) { + const { data, status, statusText } = error.response; - return Promise.reject(serverError); - } + // Parse response(Blob) as JSON + const reader = new FileReader(); + reader.addEventListener('abort', reject); + reader.addEventListener('error', reject); + reader.addEventListener('loadend', () => { + try { + const resp = JSON.parse(reader.result as string); + const { details } = resp; - return Promise.reject(error); + const serverError = new ServerError( + typeof details === 'object' + ? JSON.stringify(details) + : details || resp.message || resp.msg || statusText, + status, + resp, + ); + + return reject(serverError); + } catch (error) { + return reject(error); + } + }); + reader.readAsText(data); + } else { + // Common errors handle + if (response && typeof response === 'object') { + const { data, status, statusText } = error.response; + const { details } = data; + + const serverError = new ServerError( + typeof details === 'object' + ? JSON.stringify(details) + : details || data.message || data.msg || statusText, + status, + data, + ); + + return reject(serverError); + } + + return reject(error); + } + }); }, ); @@ -164,4 +242,20 @@ request.interceptors.response.use( }, ); +request.download = async function (url, method = 'GET', filename = '') { + const { headers, data, status } = await request(url, { + method, + responseType: 'blob', + disableExtractResponseData: true, + }); + if (status === 204) { + return i18n.t('message_no_file'); + } + + const contentDisposition = + headers?.['content-disposition'] ?? headers?.['Content-Disposition'] ?? ''; + const finalFilename = filename || (contentDisposition?.split('filename=')[1] ?? ''); + saveBlob(data, finalFilename); +}; + export default request; diff --git a/web_console_v2/client/src/services/algorithm.ts b/web_console_v2/client/src/services/algorithm.ts new file mode 100644 index 000000000..ebd0eb6b9 --- /dev/null +++ b/web_console_v2/client/src/services/algorithm.ts @@ -0,0 +1,308 @@ +import request, { BASE_URL } from 'libs/request'; +import { APIResponse } from 'typings/app'; +import { + FileTreeNode, + FileContent, + FileQueryParams, + UploadFileQueryParams, + UpdateFileQueryParams, + RenameFileQueryParams, + DeleteFileQueryParams, + AlgorithmProject, + Algorithm, +} from 'typings/algorithm'; + +export function fetchAlgorithmProjectFileTreeList(id: ID): APIResponse<FileTreeNode[]> { + return request.get(`/v2/algorithm_projects/${id}/tree`).then((resp) => { + // 204 No Content + if (!resp) { + return { + data: [], + }; + } + return resp; + }); +} +export function fetchAlgorithmProjectFileContentDetail( + id: ID, + params: FileQueryParams, +): APIResponse<FileContent> { + return request.get(`/v2/algorithm_projects/${id}/files`, { + params, + }); +} + +export function uploadAlgorithmProjectFileContent( + id: ID, + payload: UploadFileQueryParams, +): APIResponse<Omit<FileContent, 'content'>> { + const formData = new FormData(); + + Object.keys(payload).forEach((key) => { + const value = (payload as any)[key]; + formData.append(key, value); + }); + + return request.post(`/v2/algorithm_projects/${id}/files`, formData); +} +export function createOrUpdateAlgorithmProjectFileContent( + id: ID, + payload: UpdateFileQueryParams, +): APIResponse<FileContent> { + const formData = new FormData(); + + Object.keys(payload).forEach((key) => { + const value = (payload as any)[key]; + formData.append(key, value); + }); + + return request.put(`/v2/algorithm_projects/${id}/files`, formData); +} +export function renameAlgorithmProjectFileContent( + id: ID, + payload: RenameFileQueryParams, +): Promise<null> { + return request.patch(`/v2/algorithm_projects/${id}/files`, payload); +} +export function deleteAlgorithmProjectFileContent( + id: ID, + params: DeleteFileQueryParams, +): Promise<null> { + return request.delete(`/v2/algorithm_projects/${id}/files`, { + params, + }); +} + +export function fetchAlgorithmFileTreeList(id?: ID): APIResponse<FileTreeNode[]> { + return request.get(`/v2/algorithms/${id}/tree`).then((resp) => { + // 204 No Content + if (!resp) { + return { + data: [], + }; + } + return resp; + }); +} + +export function fetchAlgorithmFileContentDetail( + id: ID, + params: FileQueryParams, +): APIResponse<FileContent> { + return request.get(`/v2/algorithms/${id}/files`, { + params, + }); +} + +export function fetchPendingAlgorithmFileTreeList( + projId?: ID, + id?: ID, +): APIResponse<FileTreeNode[]> { + return request.get(`/v2/projects/${projId}/pending_algorithms/${id}/tree`).then((resp) => { + // 204 No Content + if (!resp) { + return { + data: [], + }; + } + return resp; + }); +} + +export function fetchPendingAlgorithmFileContentDetail( + projId?: ID, + id?: ID, + params?: FileQueryParams, +): APIResponse<FileContent> { + return request.get(`/v2/projects/${projId}/pending_algorithms/${id}/files`, { + params, + }); +} + +export function fetchProjectList( + projectId?: ID, + params?: Record<string, any> | string, +): APIResponse<AlgorithmProject[]> { + if (!projectId && projectId !== 0) { + return Promise.reject(new Error('请选择工作区')); + } + return request.get(`/v2/projects/${projectId}/algorithm_projects`, { + params, + removeFalsy: true, + snake_case: true, + }); +} + +// 拉取对侧发送过来的算法列表 +export function fetchProjectPendingList(projectId?: ID): APIResponse<Algorithm[]> { + if (!projectId && projectId !== 0) { + return Promise.reject(new Error('请选择工作区')); + } + return request.get(`/v2/projects/${projectId}/pending_algorithms`); +} + +export function createProject( + platformProjId: ID, + payload: FormData, +): APIResponse<AlgorithmProject> { + return request.post(`/v2/projects/${platformProjId}/algorithm_projects`, payload); +} + +export function patchProject( + projectId: ID, + payload: Partial<AlgorithmProject>, +): APIResponse<AlgorithmProject> { + return request.patch(`/v2/algorithm_projects/${projectId}`, payload); +} + +export function fetchProjectDetail(id: ID): APIResponse<AlgorithmProject> { + return request.get(`/v2/algorithm_projects/${id}`); +} + +export function getAlgorithmDetail(id: ID): APIResponse<Algorithm> { + return request.get(`/v2/algorithms/${id}`); +} + +export function postPublishAlgorithm(id: ID, comment?: string): APIResponse<AlgorithmProject> { + return request.post(`/v2/algorithm_projects/${id}:publish`, { comment }); +} + +export function postSendAlgorithm(id: ID, comment?: string): APIResponse<AlgorithmProject> { + return request.post(`/v2/algorithms/${id}:send`, { + comment, + }); +} + +export function postAcceptAlgorithm( + projectId: ID, + algorithmProjId: ID, + payload: { + name: string; + comment?: string; + }, +) { + return request.post( + `/v2/projects/${projectId}/pending_algorithms/${algorithmProjId}:accept`, + payload, + ); +} + +export function fetchAlgorithmList( + project_id?: ID, + params?: { algo_project_id: ID }, +): APIResponse<Algorithm[]> { + return request.get(`/v2/projects/${project_id}/algorithms`, { params }); +} + +export function deleteAlgorithm(id: ID) { + return request.delete(`/v2/algorithms/${id}`); +} + +export function deleteAlgorithmProject(id: ID) { + return request.delete(`/v2/algorithm_projects/${id}`); +} + +export function getFullAlgorithmProjectDownloadHref(algorithmProjectId: ID): string { + return `${window.location.origin}${BASE_URL}/v2/algorithm_projects/${algorithmProjectId}?download=true`; +} +export function getFullAlgorithmDownloadHref(algorithmId: ID): string { + return `${window.location.origin}${BASE_URL}/v2/algorithms/${algorithmId}?download=true`; +} + +export function updatePresetAlgorithm(payload?: any): APIResponse<AlgorithmProject[]> { + return request.post(`/v2/preset_algorithms:update`, payload); +} + +export function releaseAlgorithmProject( + projectId?: ID, + algorithmId?: ID, + params?: { comment: string }, +): Promise<null> { + return request.post(`/v2/projects/${projectId}/algorithms/${algorithmId}:release`, { params }); +} + +export function publishAlgorithm( + projectId?: ID, + algorithmId?: ID, + params?: { comment: string }, +): Promise<null> { + return request.post(`/v2/projects/${projectId}/algorithms/${algorithmId}:publish`, { params }); +} + +export function unpublishAlgorithm(projectId?: ID, algorithmId?: ID): Promise<null> { + return request.post(`/v2/projects/${projectId}/algorithms/${algorithmId}:unpublish`); +} + +export function fetchPeerAlgorithmProjectList( + projectId?: ID, + participantId?: ID, + params?: Record<string, any> | string, +): APIResponse<AlgorithmProject[]> { + return request.get(`/v2/projects/${projectId}/participants/${participantId}/algorithm_projects`, { + params, + removeFalsy: true, + snake_case: true, + }); +} + +export function fetchPeerAlgorithmProjectById( + projectId?: ID, + participantId?: ID, + algorithm_project_uuid?: ID, +): APIResponse<AlgorithmProject> { + return request.get( + `/v2/projects/${projectId}/participants/${participantId}/algorithm_projects/${algorithm_project_uuid}`, + ); +} + +export function fetchPeerAlgorithmList( + projectId?: ID, + participantId?: ID, + params?: { algorithm_project_uuid: ID }, +): APIResponse<Algorithm[]> { + return request.get(`/v2/projects/${projectId}/participants/${participantId}/algorithms`, { + params, + }); +} + +export function fetchPeerAlgorithmDetail( + projectId?: ID, + participantId?: ID, + uuid?: ID, +): APIResponse<Algorithm> { + return request.get(`/v2/projects/${projectId}/participants/${participantId}/algorithms/${uuid}`); +} + +export function fetchPeerAlgorithmFileTreeList( + projectId?: ID, + participantId?: ID, + uuid?: ID, +): APIResponse<FileTreeNode[]> { + return request + .get(`/v2/projects/${projectId}/participants/${participantId}/algorithms/${uuid}/tree`) + .then((resp) => { + // 204 No Content + if (!resp) { + return { + data: [], + }; + } + return resp; + }); +} + +export function fetchPeerAlgorithmProjectFileContentDetail( + projectId?: ID, + participantId?: ID, + uuid?: ID, + params?: FileQueryParams, +): APIResponse<FileContent> { + return request.get( + `/v2/projects/${projectId}/participants/${participantId}/algorithms/${uuid}/files`, + { params }, + ); +} + +export function fetchAlgorithmByUuid(projectId: ID, algorithmUuid: ID): APIResponse<Algorithm> { + return request.get(`/v2/projects/${projectId}/algorithms/${algorithmUuid}`); +} diff --git a/web_console_v2/client/src/services/audit.ts b/web_console_v2/client/src/services/audit.ts new file mode 100644 index 000000000..e2cc018db --- /dev/null +++ b/web_console_v2/client/src/services/audit.ts @@ -0,0 +1,15 @@ +import request from 'libs/request'; +import { APIResponse } from 'typings/app'; +import { Audit, AuditQueryParams, AuditDeleteParams } from 'typings/audit'; + +export function fetchAuditList(params?: AuditQueryParams): APIResponse<Audit[]> { + return request.get('/v2/events', { + params, + removeFalsy: true, + snake_case: true, + }); +} + +export function deleteAudit(params: AuditDeleteParams) { + return request.delete('/v2/events', { params, removeFalsy: true, snake_case: true }); +} diff --git a/web_console_v2/client/src/services/cleanup.ts b/web_console_v2/client/src/services/cleanup.ts new file mode 100644 index 000000000..258122059 --- /dev/null +++ b/web_console_v2/client/src/services/cleanup.ts @@ -0,0 +1,15 @@ +import request from 'libs/request'; +import { APIResponse } from 'typings/app'; +import { Cleanup, CleanupQueryParams } from 'typings/cleanup'; + +export function fetchCleanupList(params?: CleanupQueryParams): APIResponse<Cleanup[]> { + return request(`/v2/cleanups`, { params, snake_case: true }); +} + +export function fetchCleanupById(cleanup_id?: ID): APIResponse<Cleanup> { + return request(`/v2/cleanups/${cleanup_id}`); +} + +export function postCleanupState(cleanup_id?: ID): APIResponse<Cleanup> { + return request.post(`/v2/cleanups/${cleanup_id}:cancel`); +} diff --git a/web_console_v2/client/src/services/composer.ts b/web_console_v2/client/src/services/composer.ts new file mode 100644 index 000000000..28859ec1d --- /dev/null +++ b/web_console_v2/client/src/services/composer.ts @@ -0,0 +1,29 @@ +import request from 'libs/request'; +import { APIResponse } from 'typings/app'; +import { ItemStatus, SchedulerItem, SchedulerRunner, SchedulerQueryParams } from 'typings/composer'; + +export function fetchSchedulerItemList( + params?: SchedulerQueryParams, +): APIResponse<SchedulerItem[]> { + return request(`/v2/scheduler_items`, { params, snake_case: true }); +} + +export function fetchSchedulerRunnerList( + params?: SchedulerQueryParams, +): APIResponse<SchedulerRunner[]> { + return request(`/v2/scheduler_runners`, { params, snake_case: true }); +} + +export function fetchRunnersByItemId( + item_id: ID, + params?: SchedulerQueryParams, +): APIResponse<SchedulerRunner[]> { + return request(`/v2/scheduler_items/${item_id}`, { params, snake_case: true }); +} + +export function patchEditItemState( + item_id: ID, + status: ItemStatus, +): APIResponse<SchedulerRunner[]> { + return request.patch(`/v2/scheduler_items/${item_id}`, { status }); +} diff --git a/web_console_v2/client/src/services/dataset.ts b/web_console_v2/client/src/services/dataset.ts index f5e414cde..0b1a7b6b2 100644 --- a/web_console_v2/client/src/services/dataset.ts +++ b/web_console_v2/client/src/services/dataset.ts @@ -3,28 +3,330 @@ import { DataBatchImportPayload, Dataset, DatasetCreatePayload, + DatasetEditPayload, FileToImport, + IntersectionDataset, + PreviewData, + FeatureMetric, + ExportInfo, + DataSource, + DataSourceCreatePayload, + DataSourceEditPayload, + DataSourceCheckConnectionPayload, + ParticipantDataset, + DataJobBackEndType, + DataJobVariable, + DatasetJobCreatePayload, + ProcessedDatasetCreatePayload, + DatasetJob, + DatasetJobListItem, + DatasetJobStop, + DatasetLedger, + ExportDataset, + DatasetStateFront, + DataBatchV2, + DatasetJobStage, + FileTreeNode, } from 'typings/dataset'; +import { PageMeta, APIResponse } from '../typings/app'; export function fetchDatasetList(params?: { - keyword?: string; - project?: number; -}): Promise<{ data: Dataset[] }> { + order_by?: string; + filter?: string; + state_frontend?: DatasetStateFront[]; + page_size?: number; + page?: number; + dataset_job_kind?: DataJobBackEndType; + cron_interval?: Array<'DAYS' | 'HOURS'>; +}): Promise<{ data: Dataset[]; page_meta?: PageMeta }> { return request('/v2/datasets', { params, removeFalsy: true, snake_case: true }); } +export function fetchParticipantDatasetList( + id: ID, + params?: any, +): Promise<{ data: ParticipantDataset[]; page_meta?: PageMeta }> { + return request(`/v2/project/${id}/participant_datasets`, { + params, + removeFalsy: true, + snake_case: true, + }); +} + +export function authorizeDataset(id: ID) { + return request.post(`/v2/datasets/${id}:authorize`); +} + +export function cancelAuthorizeDataset(id: ID) { + return request.delete(`/v2/datasets/${id}:authorize`); +} + +export function fetchDatasetFlushAuthStatus(id?: ID) { + return request.post(`/v2/datasets/${id}:flush_auth_status`); +} + +export function fetchDatasetDetail(id?: ID): Promise<{ data: Dataset }> { + return request(`/v2/datasets/${id}`); +} + export function createDataset(payload: DatasetCreatePayload) { return request.post('/v2/datasets', payload); } +export function editDataset(id: ID, payload: DatasetEditPayload) { + return request.patch(`/v2/datasets/${id}`, payload); +} + export function startToImportDataBatch(id: ID, payload: DataBatchImportPayload) { return request.post(`/v2/datasets/${id}/batches`, payload); } +export function fetchDataBatchs( + id: ID, + params = {}, +): Promise<{ data: DataBatchV2[]; page_meta?: PageMeta }> { + return request(`/v2/datasets/${id}/batches`, { + params, + removeFalsy: true, + snake_case: true, + }); +} + +export function fetchDataBatchById(dataset_id: ID, id: ID): Promise<{ data: DataBatchV2 }> { + return request(`/v2/datasets/${dataset_id}/batches/${id}`); +} + +export function rerunDatasetBatchById(dataset_id: ID, data_batch_id: ID, payload: any) { + return request.post(`/v2/datasets/${dataset_id}/batches/${data_batch_id}:rerun`, payload); +} + export function fetchFileList(params?: { directory?: string }): Promise<{ data: FileToImport[] }> { return request('/v2/files', { params, removeFalsy: true, snake_case: true }); } -export function deleteDataset(id: ID) { - return request.delete(`/v2/datasets/${id}`); +export function deleteDataset( + id: ID, +): Promise<{ + code?: number; + message?: string; +}> { + return request.delete(`/v2/datasets/${id}`).catch((error) => { + // If HTTP response status code is 409, meaning delete fail + if (error.code === 409) { + return Promise.resolve(error.extra); + } + return Promise.reject(error); + }); +} + +export function stopDatasetStreaming(project_id: ID, dataset_job_id: ID) { + return request.post(`/v2/projects/${project_id}/dataset_jobs/${dataset_job_id}:stop_scheduler`); +} + +// TODO: this api will be removed after ModelCenter module updated and do not use it anyway! +export function fetchIntersectionDatasetList(params?: { + kind?: number; + projectId?: ID; + datasetId?: ID; +}): Promise<{ data: IntersectionDataset[] }> { + return request('/v2/intersection_datasets', { params, removeFalsy: true, snake_case: true }); +} + +export function fetchDatasetPreviewData(id: ID): Promise<{ data: PreviewData }> { + return request(`/v2/datasets/${id}/preview`); +} + +export function fetchDataBatchPreviewData(id: ID, batch_id: ID): Promise<{ data: PreviewData }> { + return request(`/v2/datasets/${id}/preview`, { + params: { + batch_id, + }, + }); +} + +export function analyzeDataBatch(id: ID, batch_id: ID, payload: any): Promise<any> { + return request.post(`/v2/datasets/${id}/batches/${batch_id}:analyze`, payload); +} + +export async function fetchFeatureInfo( + id: ID, + batch_id: ID, + featKey: string, +): Promise<{ + data: FeatureMetric; +}> { + return request(`/v2/datasets/${id}/batches/${batch_id}/feature_metrics`, { + params: { + name: featKey, + }, + }); +} + +export function fetchDatasetExportInfo(id: ID, params = {}): Promise<{ data: ExportInfo[] }> { + return request(`/v2/datasets/${id}/exports`, params); +} + +export function exportDataset(id: ID, payload: any): Promise<{ data: ExportDataset }> { + return request.post(`/v2/datasets/${id}:export`, payload); +} + +export function postPublishDataset(id: ID, payload: { value?: number }) { + return request.post(`/v2/datasets/${id}:publish`, payload); +} + +export function unpublishDataset(id: ID) { + return request.delete(`/v2/datasets/${id}:publish`); +} + +export function fetchDataSourceList(params?: { + projectId?: ID; + keyword?: string; +}): Promise<{ data: DataSource[] }> { + return request('/v2/data_sources', { params, removeFalsy: true, snake_case: true }); +} +export function createDataSource(payload: DataSourceCreatePayload): Promise<{ data: DataSource }> { + return request.post('/v2/data_sources', payload); +} +export function updateDataSource(payload: DataSourceEditPayload): Promise<{ data: DataSource }> { + return request.put('/v2/data_sources', payload); +} + +export function fetchDataSourceDetail(params: { id: ID }): Promise<{ data: DataSource }> { + const { id } = params; + return request(`/v2/data_sources/${id}`); +} + +export function checkDataSourceConnection( + payload: DataSourceCheckConnectionPayload, +): Promise<{ + data: { + message: string; + file_names: string[]; + extra_nums: number; + }; +}> { + return request.post('/v2/data_sources:check_connection', payload); +} +export function deleteDataSource(dataSourceId: ID) { + return request.delete(`/v2/data_sources/${dataSourceId}`); +} + +export function fetchDataSourceFileTreeList(id: ID): APIResponse<FileTreeNode> { + return request.get(`/v2/data_sources/${id}/tree`); +} + +export function fetchDataJobVariableDetail( + dataJobType: DataJobBackEndType, + params?: any, +): Promise<{ + data: { + is_federated: boolean; + variables: DataJobVariable[]; + }; +}> { + return request(`/v2/dataset_job_definitions/${dataJobType}`, { + params, + removeFalsy: true, + snake_case: true, + }); +} + +export function fetchDatasetJobList( + project_id: ID, + params?: { + input_dataset_id?: ID; + dataset_job_kind?: string; + filter?: string; + order_by?: string; + page?: number; + page_size?: number; + }, +): Promise<{ data: DatasetJobListItem[]; page_meta?: PageMeta }> { + return request(`/v2/projects/${project_id}/dataset_jobs`, { + params: { + ...(params ?? {}), + }, + removeFalsy: true, + snake_case: true, + }); +} + +export function fetchChildrenDatasetList( + dataset_id: ID, +): Promise<{ data: DatasetJobListItem[]; page_meta?: PageMeta }> { + return request(`/v2/datasets/${dataset_id}/children_datasets`); +} + +export function createProcessedDataset( + project_id: ID, + payload: ProcessedDatasetCreatePayload, +): Promise<{ data: any }> { + return request.post(`/v2/projects/${project_id}/dataset_jobs`, payload); +} + +export function createDatasetJobs( + project_id: ID, + payload: DatasetJobCreatePayload, +): Promise<{ data: any }> { + return request.post(`/v2/projects/${project_id}/dataset_jobs`, payload); +} + +export function fetchDatasetJobDetail( + project_id: ID, + id: ID, + params?: any, +): Promise<{ data: DatasetJob }> { + return request(`/v2/projects/${project_id}/dataset_jobs/${id}`, { + params, + removeFalsy: true, + snake_case: true, + }); +} + +export function fetchDatasetJobStageList( + project_id: ID, + job_id: ID, + params = {}, +): Promise<{ data: DatasetJobStage[]; page_meta?: PageMeta }> { + return request(`/v2/projects/${project_id}/dataset_jobs/${job_id}/dataset_job_stages`, { + params, + removeFalsy: true, + snake_case: true, + }); +} + +export function fetchDatasetJobStageById( + projectId: ID, + datasetJobId: ID, + datasetJobStageId: ID, +): Promise<{ data: DatasetJobStage }> { + return request( + `/v2/projects/${projectId}/dataset_jobs/${datasetJobId}/dataset_job_stages/${datasetJobStageId}`, + ); +} + +export function stopDatasetJob(project_id: ID, job_id: ID): Promise<DatasetJobStop> { + return request.post(`/v2/projects/${project_id}/dataset_jobs/${job_id}:stop`); +} + +export function deleteDatasetJob( + project_id: ID, + job_id: ID, +): Promise<{ + code?: number; + message?: { + [job_id: string]: string[]; + }; +}> { + return request.delete(`/v2/projects/${project_id}/dataset_jobs/${job_id}`).catch((error) => { + // If HTTP response status code is 409, meaning delete fail + if (error.code === 409) { + return Promise.resolve(error.extra); + } + return Promise.reject(error); + }); +} + +export function fetchDatasetLedger(dataset_id: ID): Promise<{ data: DatasetLedger }> { + return request(`v2/datasets/${dataset_id}/ledger`); } diff --git a/web_console_v2/client/src/services/flag.ts b/web_console_v2/client/src/services/flag.ts new file mode 100644 index 000000000..ea475d437 --- /dev/null +++ b/web_console_v2/client/src/services/flag.ts @@ -0,0 +1,10 @@ +import request from 'libs/request'; +import { Flag } from 'typings/flag'; + +export function fetchFlagList(): Promise<{ data: Flag }> { + return request('/v2/flags'); +} + +export function fetchParticipantFlagById(participantId: ID): Promise<{ data: Flag }> { + return request(`/v2/participants/${participantId}/flags`); +} diff --git a/web_console_v2/client/src/services/mocks/v2/algorithm_projects/__id__/files/index.ts b/web_console_v2/client/src/services/mocks/v2/algorithm_projects/__id__/files/index.ts new file mode 100644 index 000000000..31d175035 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/algorithm_projects/__id__/files/index.ts @@ -0,0 +1,80 @@ +import { AxiosRequestConfig } from 'axios'; +import { FileContent } from 'typings/algorithm'; +import { getFileInfoByFilePath } from 'shared/file'; + +import { leaderPythonFile, followerPythonFile } from 'services/mocks/v2/algorithms/examples'; + +const get = (config: AxiosRequestConfig) => { + const { parentPath, fileName } = getFileInfoByFilePath(config.params.path); + let finalFile: FileContent = { + path: parentPath, + filename: fileName, + content: `I am ${config.params.path}`, + }; + + switch (config.params.path) { + case 'leader/main.py': + finalFile = leaderPythonFile; + break; + case 'follower/main.py': + finalFile = followerPythonFile; + break; + default: + break; + } + + return { + data: { + data: finalFile, + }, + status: 200, + }; +}; + +export const put = (config: AxiosRequestConfig) => { + const path = config.data.get('path'); + const fileName = config.data.get('filename'); + const content = config.data.get('file'); + const finalFile: FileContent = { + path: path, + filename: fileName, + content: content, + }; + + return { + data: { + data: finalFile, + }, + status: 200, + }; +}; + +export const post = (config: AxiosRequestConfig) => { + const path = config.data.get('path'); + const fileName = config.data.get('filename'); + const finalFile: Omit<FileContent, 'content'> = { + path: path, + filename: fileName, + }; + + return { + data: { + data: finalFile, + }, + status: 200, + }; +}; + +export const patch = { + data: { + data: null, + }, + status: 200, +}; +export const DELETE = { + data: { + data: null, + }, + status: 200, +}; +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/algorithm_projects/__id__/index.ts b/web_console_v2/client/src/services/mocks/v2/algorithm_projects/__id__/index.ts new file mode 100644 index 000000000..24a62ad00 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/algorithm_projects/__id__/index.ts @@ -0,0 +1,10 @@ +import { normalAlgorithmProject } from 'services/mocks/v2/algorithm_projects/examples'; + +const get = () => { + return { + data: { data: normalAlgorithmProject }, + status: 200, + }; +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/algorithm_projects/__id__/tree/index.ts b/web_console_v2/client/src/services/mocks/v2/algorithm_projects/__id__/tree/index.ts new file mode 100644 index 000000000..2e221716f --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/algorithm_projects/__id__/tree/index.ts @@ -0,0 +1,10 @@ +import { fileTree } from 'services/mocks/v2/algorithms/examples'; + +const get = { + data: { + data: fileTree, + }, + status: 200, +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/algorithm_projects/examples.ts b/web_console_v2/client/src/services/mocks/v2/algorithm_projects/examples.ts new file mode 100644 index 000000000..d6bc9dd3d --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/algorithm_projects/examples.ts @@ -0,0 +1,145 @@ +import { + FileTreeNode, + FileContent, + AlgorithmProject, + EnumAlgorithmProjectType, + AlgorithmVersionStatus, +} from 'typings/algorithm'; + +export const fileTree: FileTreeNode[] = [ + { + filename: 'follower', + path: 'follower', + size: 96, + mtime: 1637141275, + is_directory: true, + files: [ + { + filename: 'main.py', + path: 'follower/main.py', + mtime: 1637141275, + size: 0, + is_directory: false, + files: [], + }, + ], + }, + { + filename: 'leader', + path: 'leader', + size: 96, + mtime: 1637141275, + is_directory: true, + files: [ + { + filename: 'main.py', + path: 'leader/main.py', + size: 17, + mtime: 1637141275, + is_directory: false, + files: [], + }, + ], + }, +]; + +export const normalAlgorithmProject: AlgorithmProject = { + id: 3, + name: 'e2e test', + project_id: 1, + latest_version: 0, + type: 'NN_VERTICAL' as EnumAlgorithmProjectType, + source: 'USER', + publish_status: 'UNPUBLISHED', + release_status: 'RELEASED', + creator_id: null, + username: 'abcabc', + participant_id: null, + path: '/data/algorithm_projects/e2e-test-20211206_115929-57da1', + parameter: { + variables: [ + { + name: 'aaa', + required: true, + value: '', + display_name: '', + comment: '', + value_type: 'STRING', + }, + { + name: 'bbbb', + required: true, + value: '', + display_name: '', + comment: '', + value_type: 'STRING', + }, + ], + }, + comment: 'algorithm for end to end test', + created_at: 1638791969, + updated_at: 1638864824, + deleted_at: null, + algorithms: [ + { + id: 3, + name: 'e2e test', + project_id: 1, + version: 1, + type: 'NN_VERTICAL' as EnumAlgorithmProjectType, + source: 'USER', + algorithm_project_id: 3, + creator_id: null, + username: 'abcabc', + participant_id: null, + path: '/data/algorithms/e2e-test-v1-20211206_115929-2db0f', + status: AlgorithmVersionStatus.PUBLISHED, + parameter: { + variables: [ + { + name: 'aaa', + required: true, + value: '', + display_name: '', + comment: '', + value_type: 'STRING', + }, + { + name: 'bbbb', + required: true, + value: '', + display_name: '', + comment: '', + value_type: 'STRING', + }, + { + name: 'cccc', + required: false, + value: '', + display_name: '', + comment: '', + value_type: 'STRING', + }, + ], + }, + favorite: false, + comment: null, + created_at: 1638791969, + updated_at: 1638791969, + deleted_at: null, + }, + ], +}; + +export const followerPythonFile: FileContent = { + path: 'follower', + filename: 'main.py', + content: + "# coding: utf-8\nimport logging\nimport datetime\n\nimport tensorflow.compat.v1 as tf \nimport fedlearner.trainer as flt \nimport os\n\nfrom slot_2_bucket import slot_2_bucket\n\n_SLOT_2_IDX = {pair[0]: i for i, pair in enumerate(slot_2_bucket)}\n_SLOT_2_BUCKET = slot_2_bucket\nROLE = \"leader\"\n\nparser = flt.trainer_worker.create_argument_parser()\nparser.add_argument('--batch-size', type=int, default=256,\n help='Training batch size.')\nparser.add_argument('--clean-model', type=bool, default=True,\n help='clean checkpoint and saved_model')\nargs = parser.parse_args()\nargs.sparse_estimator = True\n\ndef apply_clean():\n if args.worker_rank == 0 and args.clean_model and tf.io.gfile.exists(args.checkpoint_path):\n tf.logging.info(\"--clean_model flag set. Removing existing checkpoint_path dir:\"\n \" {}\".format(args.checkpoint_path))\n tf.io.gfile.rmtree(args.checkpoint_path)\n\n if args.worker_rank == 0 and args.clean_model and args.export_path and tf.io.gfile.exists(args.export_path):\n tf.logging.info(\"--clean_model flag set. Removing existing savedmodel dir:\"\n \" {}\".format(args.export_path))\n tf.io.gfile.rmtree(args.export_path)\n\n\ndef input_fn(bridge, trainer_master=None):\n dataset = flt.data.DataBlockLoader(\n args.batch_size, ROLE, bridge, trainer_master).make_dataset()\n \n def parse_fn(example):\n feature_map = {}\n feature_map[\"example_id\"] = tf.FixedLenFeature([], tf.string)\n feature_map['fids'] = tf.VarLenFeature(tf.int64)\n # feature_map['y'] = tf.FixedLenFeature([], tf.int64)\n features = tf.parse_example(example, features=feature_map)\n # labels = {'y': features.pop('y')}\n labels = {'y': tf.constant(0)}\n return features, labels\n dataset = dataset.map(map_func=parse_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE)\n dataset = dataset.prefetch(2)\n return dataset\n \n # feature_map = {\"fids\": tf.VarLenFeature(tf.int64)}\n # feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\n # record_batch = dataset.make_batch_iterator().get_next()\n # features = tf.parse_example(record_batch, features=feature_map)\n # return features, None\n\ndef raw_serving_input_receiver_fn():\n feature_map = {\n 'fids_indices': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_indices'),\n 'fids_values': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_values'),\n 'fids_dense_shape': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_dense_shape')\n }\n return tf.estimator.export.ServingInputReceiver(\n feature_map, feature_map)\n\n\ndef model_fn(model, features, labels, mode):\n\n def sum_pooling(embeddings, slots):\n slot_embeddings = []\n for slot in slots:\n slot_embeddings.append(embeddings[_SLOT_2_IDX[slot]])\n if len(slot_embeddings) == 1:\n return slot_embeddings[0]\n return tf.add_n(slot_embeddings)\n\n global_step = tf.train.get_or_create_global_step()\n num_slot, embed_size = len(_SLOT_2_BUCKET), 8\n xavier_initializer = tf.glorot_normal_initializer()\n\n flt.feature.FeatureSlot.set_default_bias_initializer(\n tf.zeros_initializer())\n flt.feature.FeatureSlot.set_default_vec_initializer(\n tf.random_uniform_initializer(-0.0078125, 0.0078125))\n flt.feature.FeatureSlot.set_default_bias_optimizer(\n tf.train.FtrlOptimizer(learning_rate=0.01))\n flt.feature.FeatureSlot.set_default_vec_optimizer(\n tf.train.AdagradOptimizer(learning_rate=0.01))\n\n # deal with input cols\n categorical_embed = []\n num_slot, embed_dim = len(_SLOT_2_BUCKET), 8\n\n with tf.variable_scope(\"leader\"):\n for slot, bucket_size in _SLOT_2_BUCKET:\n fs = model.add_feature_slot(slot, bucket_size)\n fc = model.add_feature_column(fs)\n categorical_embed.append(fc.add_vector(embed_dim))\n\n\n # concate all embeddings\n slot_embeddings = categorical_embed\n concat_embedding = tf.concat(slot_embeddings, axis=1)\n output_size = len(slot_embeddings) * embed_dim\n\n model.freeze_slots(features)\n\n with tf.variable_scope(\"follower\"):\n fc1_size, fc2_size, fc3_size = 16, 16, 16\n w1 = tf.get_variable('w1', shape=[output_size, fc1_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b1 = tf.get_variable(\n 'b1', shape=[fc1_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n w2 = tf.get_variable('w2', shape=[fc1_size, fc2_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b2 = tf.get_variable(\n 'b2', shape=[fc2_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n w3 = tf.get_variable('w3', shape=[fc2_size, fc3_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b3 = tf.get_variable(\n 'b3', shape=[fc3_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n\n act1_l = tf.nn.relu(tf.nn.bias_add(tf.matmul(concat_embedding, w1), b1))\n act1_l = tf.layers.batch_normalization(act1_l, training=True)\n act2_l = tf.nn.relu(tf.nn.bias_add(tf.matmul(act1_l, w2), b2))\n act2_l = tf.layers.batch_normalization(act2_l, training=True)\n embedding = tf.nn.relu(tf.nn.bias_add(tf.matmul(act2_l, w3), b3))\n embedding = tf.layers.batch_normalization(embedding, training=True)\n\n if mode == tf.estimator.ModeKeys.TRAIN:\n embedding_grad = model.send('embedding', embedding, require_grad=True)\n optimizer = tf.train.GradientDescentOptimizer(0.01)\n train_op = model.minimize(\n optimizer, embedding, grad_loss=embedding_grad, global_step=global_step)\n return model.make_spec(mode, loss=tf.math.reduce_mean(embedding), train_op=train_op)\n elif mode == tf.estimator.ModeKeys.PREDICT:\n return model.make_spec(mode, predictions={'embedding': embedding})\n\nif __name__ == '__main__':\n logging.basicConfig(\n level=logging.INFO,\n format='%(asctime)-15s [%(filename)s:%(lineno)d] %(levelname)s %(message)s'\n )\n apply_clean()\n flt.trainer_worker.train(\n ROLE, args, input_fn,\n model_fn, raw_serving_input_receiver_fn)\n", +}; +export const leaderPythonFile: FileContent = { + path: 'leader', + filename: 'main.py', + content: + "# coding: utf-8\n# encoding=utf8\nimport logging\n\nimport tensorflow.compat.v1 as tf\n\nimport fedlearner.trainer as flt\nimport os\n\nROLE = 'follower'\n\nparser = flt.trainer_worker.create_argument_parser()\nparser.add_argument('--batch-size', type=int, default=256,\n help='Training batch size.')\nparser.add_argument('--clean-model', type=bool, default=True,\n help='clean checkpoint and saved_model')\nargs = parser.parse_args()\n\ndef apply_clean():\n if args.worker_rank == 0 and args.clean_model and tf.io.gfile.exists(args.checkpoint_path):\n tf.logging.info(\"--clean_model flag set. Removing existing checkpoint_path dir:\"\n \" {}\".format(args.checkpoint_path))\n tf.io.gfile.rmtree(args.checkpoint_path)\n\n if args.worker_rank == 0 and args.clean_model and args.export_path and tf.io.gfile.exists(args.export_path):\n tf.logging.info(\"--clean_model flag set. Removing existing savedmodel dir:\"\n \" {}\".format(args.export_path))\n tf.io.gfile.rmtree(args.export_path)\n\ndef input_fn(bridge, trainer_master=None):\n dataset = flt.data.DataBlockLoader(\n args.batch_size, ROLE, bridge, trainer_master).make_dataset()\n \n def parse_fn(example):\n feature_map = {}\n feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\n # feature_map['y'] = tf.FixedLenFeature([], tf.int64)\n features = tf.parse_example(example, features=feature_map)\n labels = {'y': tf.constant(0, shape=[1])}\n return features, labels\n \n dataset = dataset.map(map_func=parse_fn,\n num_parallel_calls=tf.data.experimental.AUTOTUNE)\n dataset = dataset.prefetch(2)\n return dataset\n \n\ndef raw_serving_input_receiver_fn():\n features = {}\n features['embedding'] = tf.placeholder(dtype=tf.float32, shape=[1, 16], name='embedding')\n receiver_tensors = {\n 'embedding': features['embedding']\n }\n return tf.estimator.export.ServingInputReceiver(\n features, receiver_tensors)\n\ndef model_fn(model, features, labels, mode):\n global_step = tf.train.get_or_create_global_step()\n xavier_initializer = tf.glorot_normal_initializer()\n\n fc1_size = 16\n with tf.variable_scope('follower'):\n w1f = tf.get_variable('w1f', shape=[\n fc1_size, 1], dtype=tf.float32, initializer=tf.random_uniform_initializer(-0.01, 0.01))\n b1f = tf.get_variable(\n 'b1f', shape=[1], dtype=tf.float32, initializer=tf.zeros_initializer())\n \n if mode == tf.estimator.ModeKeys.TRAIN:\n embedding = model.recv('embedding', tf.float32, require_grad=True)\n else:\n embedding = features['embedding']\n \n logits = tf.nn.bias_add(tf.matmul(embedding, w1f), b1f)\n\n if mode == tf.estimator.ModeKeys.TRAIN:\n y = tf.dtypes.cast(labels['y'], tf.float32)\n loss = tf.nn.sigmoid_cross_entropy_with_logits(\n labels=y, logits=logits)\n loss = tf.math.reduce_mean(loss)\n\n # cala auc\n pred = tf.math.sigmoid(logits)\n print('==============================================================')\n print(tf.shape(y))\n print(tf.shape(pred))\n _, auc = tf.metrics.auc(labels=y, predictions=pred)\n\n logging_hook = tf.train.LoggingTensorHook(\n {\"loss\": loss, \"auc\": auc}, every_n_iter=10)\n\n optimizer = tf.train.GradientDescentOptimizer(0.01)\n train_op = model.minimize(optimizer, loss, global_step=global_step)\n return model.make_spec(mode, loss=loss, train_op=train_op,\n training_hooks=[logging_hook])\n\n if mode == tf.estimator.ModeKeys.PREDICT:\n return model.make_spec(mode, predictions=logits)\n\nif __name__ == '__main__':\n logging.basicConfig(\n level=logging.INFO,\n format='%(asctime)-15s [%(filename)s:%(lineno)d] %(levelname)s %(message)s'\n )\n apply_clean()\n flt.trainer_worker.train(\n ROLE, args, input_fn,\n model_fn, raw_serving_input_receiver_fn)\n", +}; diff --git a/web_console_v2/client/src/services/mocks/v2/algorithm_projects/index.ts b/web_console_v2/client/src/services/mocks/v2/algorithm_projects/index.ts new file mode 100644 index 000000000..4b5422a15 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/algorithm_projects/index.ts @@ -0,0 +1,14 @@ +import { normalAlgorithmProject } from 'services/mocks/v2/algorithm_projects/examples'; + +const get = { + data: { + data: [normalAlgorithmProject], + }, + status: 200, +}; + +export const post = (config: any) => { + return { data: { data: config.data }, status: 200 }; +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/algorithms/__id__/files/index.ts b/web_console_v2/client/src/services/mocks/v2/algorithms/__id__/files/index.ts new file mode 100644 index 000000000..57c23c1fa --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/algorithms/__id__/files/index.ts @@ -0,0 +1,33 @@ +import { AxiosRequestConfig } from 'axios'; +import { FileContent } from 'typings/algorithm'; +import { getFileInfoByFilePath } from 'shared/file'; + +import { leaderPythonFile, followerPythonFile } from 'services/mocks/v2/algorithms/examples'; + +const get = (config: AxiosRequestConfig) => { + const { parentPath, fileName } = getFileInfoByFilePath(config.params.path); + let finalFile: FileContent = { + path: parentPath, + filename: fileName, + content: `I am ${config.params.path}`, + }; + switch (config.params.path) { + case 'leader/main.py': + finalFile = leaderPythonFile; + break; + case 'follower/main.py': + finalFile = followerPythonFile; + break; + default: + break; + } + + return { + data: { + data: finalFile, + }, + status: 200, + }; +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/algorithms/__id__/index.ts b/web_console_v2/client/src/services/mocks/v2/algorithms/__id__/index.ts new file mode 100644 index 000000000..d5eb6bd34 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/algorithms/__id__/index.ts @@ -0,0 +1,43 @@ +import { AxiosRequestConfig } from 'axios'; + +const get = (config: AxiosRequestConfig) => { + return { + data: { + data: { + id: config._id, + uuid: 'udea6a8a478404f85b17', + name: 'hang-e2e-test-122323', + project_id: 31, + version: 3, + type: 'NN_VERTICAL', + source: 'USER', + algorithm_project_id: 73, + username: 'admin', + participant_id: null, + path: + 'hdfs:///trimmed', + parameter: { + variables: [ + { + name: 'lr', + value: '0.1', + required: true, + display_name: '', + comment: '', + value_type: 'STRING', + }, + ], + }, + favorite: false, + comment: 'aaa', + created_at: 1641463720, + updated_at: 1649310452, + deleted_at: null, + participant_name: null, + }, + }, + status: config._id === 110 ? 500 : 200, + }; +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/algorithms/__id__/tree/index.ts b/web_console_v2/client/src/services/mocks/v2/algorithms/__id__/tree/index.ts new file mode 100644 index 000000000..2e221716f --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/algorithms/__id__/tree/index.ts @@ -0,0 +1,10 @@ +import { fileTree } from 'services/mocks/v2/algorithms/examples'; + +const get = { + data: { + data: fileTree, + }, + status: 200, +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/algorithms/examples.ts b/web_console_v2/client/src/services/mocks/v2/algorithms/examples.ts new file mode 100644 index 000000000..8eb414008 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/algorithms/examples.ts @@ -0,0 +1,59 @@ +import { FileTreeNode, FileContent } from 'typings/algorithm'; + +export const fileTree: FileTreeNode[] = [ + { + filename: 'follower', + path: 'follower', + size: 96, + mtime: 1637141275, + is_directory: true, + files: [ + { + filename: 'main.py', + path: 'follower/main.py', + mtime: 1637141275, + size: 0, + is_directory: false, + files: [], + }, + ], + }, + { + filename: 'leader', + path: 'leader', + size: 96, + mtime: 1637141275, + is_directory: true, + files: [ + { + filename: 'main.py', + path: 'leader/main.py', + size: 17, + mtime: 1637141275, + is_directory: false, + files: [], + }, + ], + }, + { + filename: 'test.py', + path: 'test.py', + mtime: 1637141275, + size: 0, + is_directory: false, + files: [], + }, +]; + +export const followerPythonFile: FileContent = { + path: 'follower', + filename: 'main.py', + content: + "# coding: utf-8\nimport logging\nimport datetime\n\nimport tensorflow.compat.v1 as tf \nimport fedlearner.trainer as flt \nimport os\n\nfrom slot_2_bucket import slot_2_bucket\n\n_SLOT_2_IDX = {pair[0]: i for i, pair in enumerate(slot_2_bucket)}\n_SLOT_2_BUCKET = slot_2_bucket\nROLE = \"leader\"\n\nparser = flt.trainer_worker.create_argument_parser()\nparser.add_argument('--batch-size', type=int, default=256,\n help='Training batch size.')\nparser.add_argument('--clean-model', type=bool, default=True,\n help='clean checkpoint and saved_model')\nargs = parser.parse_args()\nargs.sparse_estimator = True\n\ndef apply_clean():\n if args.worker_rank == 0 and args.clean_model and tf.io.gfile.exists(args.checkpoint_path):\n tf.logging.info(\"--clean_model flag set. Removing existing checkpoint_path dir:\"\n \" {}\".format(args.checkpoint_path))\n tf.io.gfile.rmtree(args.checkpoint_path)\n\n if args.worker_rank == 0 and args.clean_model and args.export_path and tf.io.gfile.exists(args.export_path):\n tf.logging.info(\"--clean_model flag set. Removing existing savedmodel dir:\"\n \" {}\".format(args.export_path))\n tf.io.gfile.rmtree(args.export_path)\n\n\ndef input_fn(bridge, trainer_master=None):\n dataset = flt.data.DataBlockLoader(\n args.batch_size, ROLE, bridge, trainer_master).make_dataset()\n \n def parse_fn(example):\n feature_map = {}\n feature_map[\"example_id\"] = tf.FixedLenFeature([], tf.string)\n feature_map['fids'] = tf.VarLenFeature(tf.int64)\n # feature_map['y'] = tf.FixedLenFeature([], tf.int64)\n features = tf.parse_example(example, features=feature_map)\n # labels = {'y': features.pop('y')}\n labels = {'y': tf.constant(0)}\n return features, labels\n dataset = dataset.map(map_func=parse_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE)\n dataset = dataset.prefetch(2)\n return dataset\n \n # feature_map = {\"fids\": tf.VarLenFeature(tf.int64)}\n # feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\n # record_batch = dataset.make_batch_iterator().get_next()\n # features = tf.parse_example(record_batch, features=feature_map)\n # return features, None\n\ndef raw_serving_input_receiver_fn():\n feature_map = {\n 'fids_indices': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_indices'),\n 'fids_values': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_values'),\n 'fids_dense_shape': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_dense_shape')\n }\n return tf.estimator.export.ServingInputReceiver(\n feature_map, feature_map)\n\n\ndef model_fn(model, features, labels, mode):\n\n def sum_pooling(embeddings, slots):\n slot_embeddings = []\n for slot in slots:\n slot_embeddings.append(embeddings[_SLOT_2_IDX[slot]])\n if len(slot_embeddings) == 1:\n return slot_embeddings[0]\n return tf.add_n(slot_embeddings)\n\n global_step = tf.train.get_or_create_global_step()\n num_slot, embed_size = len(_SLOT_2_BUCKET), 8\n xavier_initializer = tf.glorot_normal_initializer()\n\n flt.feature.FeatureSlot.set_default_bias_initializer(\n tf.zeros_initializer())\n flt.feature.FeatureSlot.set_default_vec_initializer(\n tf.random_uniform_initializer(-0.0078125, 0.0078125))\n flt.feature.FeatureSlot.set_default_bias_optimizer(\n tf.train.FtrlOptimizer(learning_rate=0.01))\n flt.feature.FeatureSlot.set_default_vec_optimizer(\n tf.train.AdagradOptimizer(learning_rate=0.01))\n\n # deal with input cols\n categorical_embed = []\n num_slot, embed_dim = len(_SLOT_2_BUCKET), 8\n\n with tf.variable_scope(\"leader\"):\n for slot, bucket_size in _SLOT_2_BUCKET:\n fs = model.add_feature_slot(slot, bucket_size)\n fc = model.add_feature_column(fs)\n categorical_embed.append(fc.add_vector(embed_dim))\n\n\n # concate all embeddings\n slot_embeddings = categorical_embed\n concat_embedding = tf.concat(slot_embeddings, axis=1)\n output_size = len(slot_embeddings) * embed_dim\n\n model.freeze_slots(features)\n\n with tf.variable_scope(\"follower\"):\n fc1_size, fc2_size, fc3_size = 16, 16, 16\n w1 = tf.get_variable('w1', shape=[output_size, fc1_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b1 = tf.get_variable(\n 'b1', shape=[fc1_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n w2 = tf.get_variable('w2', shape=[fc1_size, fc2_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b2 = tf.get_variable(\n 'b2', shape=[fc2_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n w3 = tf.get_variable('w3', shape=[fc2_size, fc3_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b3 = tf.get_variable(\n 'b3', shape=[fc3_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n\n act1_l = tf.nn.relu(tf.nn.bias_add(tf.matmul(concat_embedding, w1), b1))\n act1_l = tf.layers.batch_normalization(act1_l, training=True)\n act2_l = tf.nn.relu(tf.nn.bias_add(tf.matmul(act1_l, w2), b2))\n act2_l = tf.layers.batch_normalization(act2_l, training=True)\n embedding = tf.nn.relu(tf.nn.bias_add(tf.matmul(act2_l, w3), b3))\n embedding = tf.layers.batch_normalization(embedding, training=True)\n\n if mode == tf.estimator.ModeKeys.TRAIN:\n embedding_grad = model.send('embedding', embedding, require_grad=True)\n optimizer = tf.train.GradientDescentOptimizer(0.01)\n train_op = model.minimize(\n optimizer, embedding, grad_loss=embedding_grad, global_step=global_step)\n return model.make_spec(mode, loss=tf.math.reduce_mean(embedding), train_op=train_op)\n elif mode == tf.estimator.ModeKeys.PREDICT:\n return model.make_spec(mode, predictions={'embedding': embedding})\n\nif __name__ == '__main__':\n logging.basicConfig(\n level=logging.INFO,\n format='%(asctime)-15s [%(filename)s:%(lineno)d] %(levelname)s %(message)s'\n )\n apply_clean()\n flt.trainer_worker.train(\n ROLE, args, input_fn,\n model_fn, raw_serving_input_receiver_fn)\n", +}; +export const leaderPythonFile: FileContent = { + path: 'leader', + filename: 'main.py', + content: + "# coding: utf-8\n# encoding=utf8\nimport logging\n\nimport tensorflow.compat.v1 as tf\n\nimport fedlearner.trainer as flt\nimport os\n\nROLE = 'follower'\n\nparser = flt.trainer_worker.create_argument_parser()\nparser.add_argument('--batch-size', type=int, default=256,\n help='Training batch size.')\nparser.add_argument('--clean-model', type=bool, default=True,\n help='clean checkpoint and saved_model')\nargs = parser.parse_args()\n\ndef apply_clean():\n if args.worker_rank == 0 and args.clean_model and tf.io.gfile.exists(args.checkpoint_path):\n tf.logging.info(\"--clean_model flag set. Removing existing checkpoint_path dir:\"\n \" {}\".format(args.checkpoint_path))\n tf.io.gfile.rmtree(args.checkpoint_path)\n\n if args.worker_rank == 0 and args.clean_model and args.export_path and tf.io.gfile.exists(args.export_path):\n tf.logging.info(\"--clean_model flag set. Removing existing savedmodel dir:\"\n \" {}\".format(args.export_path))\n tf.io.gfile.rmtree(args.export_path)\n\ndef input_fn(bridge, trainer_master=None):\n dataset = flt.data.DataBlockLoader(\n args.batch_size, ROLE, bridge, trainer_master).make_dataset()\n \n def parse_fn(example):\n feature_map = {}\n feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\n # feature_map['y'] = tf.FixedLenFeature([], tf.int64)\n features = tf.parse_example(example, features=feature_map)\n labels = {'y': tf.constant(0, shape=[1])}\n return features, labels\n \n dataset = dataset.map(map_func=parse_fn,\n num_parallel_calls=tf.data.experimental.AUTOTUNE)\n dataset = dataset.prefetch(2)\n return dataset\n \n\ndef raw_serving_input_receiver_fn():\n features = {}\n features['embedding'] = tf.placeholder(dtype=tf.float32, shape=[1, 16], name='embedding')\n receiver_tensors = {\n 'embedding': features['embedding']\n }\n return tf.estimator.export.ServingInputReceiver(\n features, receiver_tensors)\n\ndef model_fn(model, features, labels, mode):\n global_step = tf.train.get_or_create_global_step()\n xavier_initializer = tf.glorot_normal_initializer()\n\n fc1_size = 16\n with tf.variable_scope('follower'):\n w1f = tf.get_variable('w1f', shape=[\n fc1_size, 1], dtype=tf.float32, initializer=tf.random_uniform_initializer(-0.01, 0.01))\n b1f = tf.get_variable(\n 'b1f', shape=[1], dtype=tf.float32, initializer=tf.zeros_initializer())\n \n if mode == tf.estimator.ModeKeys.TRAIN:\n embedding = model.recv('embedding', tf.float32, require_grad=True)\n else:\n embedding = features['embedding']\n \n logits = tf.nn.bias_add(tf.matmul(embedding, w1f), b1f)\n\n if mode == tf.estimator.ModeKeys.TRAIN:\n y = tf.dtypes.cast(labels['y'], tf.float32)\n loss = tf.nn.sigmoid_cross_entropy_with_logits(\n labels=y, logits=logits)\n loss = tf.math.reduce_mean(loss)\n\n # cala auc\n pred = tf.math.sigmoid(logits)\n print('==============================================================')\n print(tf.shape(y))\n print(tf.shape(pred))\n _, auc = tf.metrics.auc(labels=y, predictions=pred)\n\n logging_hook = tf.train.LoggingTensorHook(\n {\"loss\": loss, \"auc\": auc}, every_n_iter=10)\n\n optimizer = tf.train.GradientDescentOptimizer(0.01)\n train_op = model.minimize(optimizer, loss, global_step=global_step)\n return model.make_spec(mode, loss=loss, train_op=train_op,\n training_hooks=[logging_hook])\n\n if mode == tf.estimator.ModeKeys.PREDICT:\n return model.make_spec(mode, predictions=logits)\n\nif __name__ == '__main__':\n logging.basicConfig(\n level=logging.INFO,\n format='%(asctime)-15s [%(filename)s:%(lineno)d] %(levelname)s %(message)s'\n )\n apply_clean()\n flt.trainer_worker.train(\n ROLE, args, input_fn,\n model_fn, raw_serving_input_receiver_fn)\n", +}; diff --git a/web_console_v2/client/src/services/mocks/v2/algorithms/index.ts b/web_console_v2/client/src/services/mocks/v2/algorithms/index.ts new file mode 100644 index 000000000..988f41c57 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/algorithms/index.ts @@ -0,0 +1,39 @@ +import { FakeAlgorithm } from 'typings/modelCenter'; + +const list: FakeAlgorithm[] = new Array(4).fill(undefined).map((_, index) => { + return { + id: index + 1, + name: 'mock假算法名称' + (index + 1), + value: JSON.stringify({ + 'owner.py': '# coding: utf-8\n', + [`id_${index + 1}.py'`]: '# coding: utf-8\n', + 'leader/main.py': + "# coding: utf-8\nimport logging\nimport datetime\n\nimport tensorflow.compat.v1 as tf \nimport fedlearner.trainer as flt \nimport os\n\nfrom slot_2_bucket import slot_2_bucket\n\n_SLOT_2_IDX = {pair[0]: i for i, pair in enumerate(slot_2_bucket)}\n_SLOT_2_BUCKET = slot_2_bucket\nROLE = \"leader\"\n\nparser = flt.trainer_worker.create_argument_parser()\nparser.add_argument('--batch-size', type=int, default=256,\n help='Training batch size.')\nparser.add_argument('--clean-model', type=bool, default=True,\n help='clean checkpoint and saved_model')\nargs = parser.parse_args()\nargs.sparse_estimator = True\n\ndef apply_clean():\n if args.worker_rank == 0 and args.clean_model and tf.io.gfile.exists(args.checkpoint_path):\n tf.logging.info(\"--clean_model flag set. Removing existing checkpoint_path dir:\"\n \" {}\".format(args.checkpoint_path))\n tf.io.gfile.rmtree(args.checkpoint_path)\n\n if args.worker_rank == 0 and args.clean_model and args.export_path and tf.io.gfile.exists(args.export_path):\n tf.logging.info(\"--clean_model flag set. Removing existing savedmodel dir:\"\n \" {}\".format(args.export_path))\n tf.io.gfile.rmtree(args.export_path)\n\n\ndef input_fn(bridge, trainer_master=None):\n dataset = flt.data.DataBlockLoader(\n args.batch_size, ROLE, bridge, trainer_master).make_dataset()\n \n def parse_fn(example):\n feature_map = {}\n feature_map[\"example_id\"] = tf.FixedLenFeature([], tf.string)\n feature_map['fids'] = tf.VarLenFeature(tf.int64)\n # feature_map['y'] = tf.FixedLenFeature([], tf.int64)\n features = tf.parse_example(example, features=feature_map)\n # labels = {'y': features.pop('y')}\n labels = {'y': tf.constant(0)}\n return features, labels\n dataset = dataset.map(map_func=parse_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE)\n dataset = dataset.prefetch(2)\n return dataset\n \n # feature_map = {\"fids\": tf.VarLenFeature(tf.int64)}\n # feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\n # record_batch = dataset.make_batch_iterator().get_next()\n # features = tf.parse_example(record_batch, features=feature_map)\n # return features, None\n\ndef raw_serving_input_receiver_fn():\n feature_map = {\n 'fids_indices': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_indices'),\n 'fids_values': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_values'),\n 'fids_dense_shape': tf.placeholder(dtype=tf.int64, shape=[None], name='fids_dense_shape')\n }\n return tf.estimator.export.ServingInputReceiver(\n feature_map, feature_map)\n\n\ndef model_fn(model, features, labels, mode):\n\n def sum_pooling(embeddings, slots):\n slot_embeddings = []\n for slot in slots:\n slot_embeddings.append(embeddings[_SLOT_2_IDX[slot]])\n if len(slot_embeddings) == 1:\n return slot_embeddings[0]\n return tf.add_n(slot_embeddings)\n\n global_step = tf.train.get_or_create_global_step()\n num_slot, embed_size = len(_SLOT_2_BUCKET), 8\n xavier_initializer = tf.glorot_normal_initializer()\n\n flt.feature.FeatureSlot.set_default_bias_initializer(\n tf.zeros_initializer())\n flt.feature.FeatureSlot.set_default_vec_initializer(\n tf.random_uniform_initializer(-0.0078125, 0.0078125))\n flt.feature.FeatureSlot.set_default_bias_optimizer(\n tf.train.FtrlOptimizer(learning_rate=0.01))\n flt.feature.FeatureSlot.set_default_vec_optimizer(\n tf.train.AdagradOptimizer(learning_rate=0.01))\n\n # deal with input cols\n categorical_embed = []\n num_slot, embed_dim = len(_SLOT_2_BUCKET), 8\n\n with tf.variable_scope(\"leader\"):\n for slot, bucket_size in _SLOT_2_BUCKET:\n fs = model.add_feature_slot(slot, bucket_size)\n fc = model.add_feature_column(fs)\n categorical_embed.append(fc.add_vector(embed_dim))\n\n\n # concate all embeddings\n slot_embeddings = categorical_embed\n concat_embedding = tf.concat(slot_embeddings, axis=1)\n output_size = len(slot_embeddings) * embed_dim\n\n model.freeze_slots(features)\n\n with tf.variable_scope(\"follower\"):\n fc1_size, fc2_size, fc3_size = 16, 16, 16\n w1 = tf.get_variable('w1', shape=[output_size, fc1_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b1 = tf.get_variable(\n 'b1', shape=[fc1_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n w2 = tf.get_variable('w2', shape=[fc1_size, fc2_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b2 = tf.get_variable(\n 'b2', shape=[fc2_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n w3 = tf.get_variable('w3', shape=[fc2_size, fc3_size], dtype=tf.float32,\n initializer=xavier_initializer)\n b3 = tf.get_variable(\n 'b3', shape=[fc3_size], dtype=tf.float32, initializer=tf.zeros_initializer())\n\n act1_l = tf.nn.relu(tf.nn.bias_add(tf.matmul(concat_embedding, w1), b1))\n act1_l = tf.layers.batch_normalization(act1_l, training=True)\n act2_l = tf.nn.relu(tf.nn.bias_add(tf.matmul(act1_l, w2), b2))\n act2_l = tf.layers.batch_normalization(act2_l, training=True)\n embedding = tf.nn.relu(tf.nn.bias_add(tf.matmul(act2_l, w3), b3))\n embedding = tf.layers.batch_normalization(embedding, training=True)\n\n if mode == tf.estimator.ModeKeys.TRAIN:\n embedding_grad = model.send('embedding', embedding, require_grad=True)\n optimizer = tf.train.GradientDescentOptimizer(0.01)\n train_op = model.minimize(\n optimizer, embedding, grad_loss=embedding_grad, global_step=global_step)\n return model.make_spec(mode, loss=tf.math.reduce_mean(embedding), train_op=train_op)\n elif mode == tf.estimator.ModeKeys.PREDICT:\n return model.make_spec(mode, predictions={'embedding': embedding})\n\nif __name__ == '__main__':\n logging.basicConfig(\n level=logging.INFO,\n format='%(asctime)-15s [%(filename)s:%(lineno)d] %(levelname)s %(message)s'\n )\n apply_clean()\n flt.trainer_worker.train(\n ROLE, args, input_fn,\n model_fn, raw_serving_input_receiver_fn)\n", + 'leader/slot_2_bucket.py': + '# coding: utf-8\nslot_2_bucket = [(0, 2),(1, 2),(2, 2),(3, 2),(4, 2),(5, 2),(6, 2),(7, 2),(8, 2),(9, 2),(10, 2),(11, 2),(12, 2),(13, 1341),(14, 535),(15, 74138),(16, 70862),(17, 279),(18, 17),(19, 11019),(20, 591),(21, 4),(22, 30227),(23, 4791),(24, 75100),(25, 3075),(26, 27),(27, 9226),(28, 79191),(29, 11),(30, 3990),(31, 1898),(32, 5),\n(33, 76976),(34, 18),(35, 16),(36, 36534),(37, 74),(38, 29059)]\n', + 'follower/main.py': + "# coding: utf-8\n# encoding=utf8\nimport logging\n\nimport tensorflow.compat.v1 as tf\n\nimport fedlearner.trainer as flt\nimport os\n\nROLE = 'follower'\n\nparser = flt.trainer_worker.create_argument_parser()\nparser.add_argument('--batch-size', type=int, default=256,\n help='Training batch size.')\nparser.add_argument('--clean-model', type=bool, default=True,\n help='clean checkpoint and saved_model')\nargs = parser.parse_args()\n\ndef apply_clean():\n if args.worker_rank == 0 and args.clean_model and tf.io.gfile.exists(args.checkpoint_path):\n tf.logging.info(\"--clean_model flag set. Removing existing checkpoint_path dir:\"\n \" {}\".format(args.checkpoint_path))\n tf.io.gfile.rmtree(args.checkpoint_path)\n\n if args.worker_rank == 0 and args.clean_model and args.export_path and tf.io.gfile.exists(args.export_path):\n tf.logging.info(\"--clean_model flag set. Removing existing savedmodel dir:\"\n \" {}\".format(args.export_path))\n tf.io.gfile.rmtree(args.export_path)\n\ndef input_fn(bridge, trainer_master=None):\n dataset = flt.data.DataBlockLoader(\n args.batch_size, ROLE, bridge, trainer_master).make_dataset()\n \n def parse_fn(example):\n feature_map = {}\n feature_map['example_id'] = tf.FixedLenFeature([], tf.string)\n # feature_map['y'] = tf.FixedLenFeature([], tf.int64)\n features = tf.parse_example(example, features=feature_map)\n labels = {'y': tf.constant(0, shape=[1])}\n return features, labels\n \n dataset = dataset.map(map_func=parse_fn,\n num_parallel_calls=tf.data.experimental.AUTOTUNE)\n dataset = dataset.prefetch(2)\n return dataset\n \n\ndef raw_serving_input_receiver_fn():\n features = {}\n features['embedding'] = tf.placeholder(dtype=tf.float32, shape=[1, 16], name='embedding')\n receiver_tensors = {\n 'embedding': features['embedding']\n }\n return tf.estimator.export.ServingInputReceiver(\n features, receiver_tensors)\n\ndef model_fn(model, features, labels, mode):\n global_step = tf.train.get_or_create_global_step()\n xavier_initializer = tf.glorot_normal_initializer()\n\n fc1_size = 16\n with tf.variable_scope('follower'):\n w1f = tf.get_variable('w1f', shape=[\n fc1_size, 1], dtype=tf.float32, initializer=tf.random_uniform_initializer(-0.01, 0.01))\n b1f = tf.get_variable(\n 'b1f', shape=[1], dtype=tf.float32, initializer=tf.zeros_initializer())\n \n if mode == tf.estimator.ModeKeys.TRAIN:\n embedding = model.recv('embedding', tf.float32, require_grad=True)\n else:\n embedding = features['embedding']\n \n logits = tf.nn.bias_add(tf.matmul(embedding, w1f), b1f)\n\n if mode == tf.estimator.ModeKeys.TRAIN:\n y = tf.dtypes.cast(labels['y'], tf.float32)\n loss = tf.nn.sigmoid_cross_entropy_with_logits(\n labels=y, logits=logits)\n loss = tf.math.reduce_mean(loss)\n\n # cala auc\n pred = tf.math.sigmoid(logits)\n print('==============================================================')\n print(tf.shape(y))\n print(tf.shape(pred))\n _, auc = tf.metrics.auc(labels=y, predictions=pred)\n\n logging_hook = tf.train.LoggingTensorHook(\n {\"loss\": loss, \"auc\": auc}, every_n_iter=10)\n\n optimizer = tf.train.GradientDescentOptimizer(0.01)\n train_op = model.minimize(optimizer, loss, global_step=global_step)\n return model.make_spec(mode, loss=loss, train_op=train_op,\n training_hooks=[logging_hook])\n\n if mode == tf.estimator.ModeKeys.PREDICT:\n return model.make_spec(mode, predictions=logits)\n\nif __name__ == '__main__':\n logging.basicConfig(\n level=logging.INFO,\n format='%(asctime)-15s [%(filename)s:%(lineno)d] %(levelname)s %(message)s'\n )\n apply_clean()\n flt.trainer_worker.train(\n ROLE, args, input_fn,\n model_fn, raw_serving_input_receiver_fn)\n", + 'follower/slot_2_bucket.py': + '# coding: utf-8\nslot_2_bucket = [(0, 2),(1, 2),(2, 2),(3, 2),(4, 2),(5, 2),(6, 2),(7, 2),(8, 2),(9, 2),(10, 2),(11, 2),(12, 2),(13, 1341),(14, 535),(15, 74138),(16, 70862),(17, 279),(18, 17),(19, 11019),(20, 591),(21, 4),(22, 30227),(23, 4791),(24, 75100),(25, 3075),(26, 27),(27, 9226),(28, 79191),(29, 11),(30, 3990),(31, 1898),(32, 5),\n(33, 76976),(34, 18),(35, 16),(36, 36534),(37, 74),(38, 29059)]\n', + }), + comment: '备注' + (index + 1), + type: 'NN_MODEL', + + created_at: 1608582145, + updated_at: 1608582145, + deleted_at: 1608582145, + }; +}); + +const get = { + data: { + data: list, + }, + status: 200, +}; + +export const post = (config: any) => { + return { data: { data: config.data }, status: 200 }; +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/auth/signin.ts b/web_console_v2/client/src/services/mocks/v2/auth/signin.ts deleted file mode 100644 index efed6fb20..000000000 --- a/web_console_v2/client/src/services/mocks/v2/auth/signin.ts +++ /dev/null @@ -1,14 +0,0 @@ -export const post = { - data: { - data: { - access_token: 'token', - user: { - id: 1, - name: '', - username: '', - role: '', - }, - }, - }, - status: 204, -}; diff --git a/web_console_v2/client/src/services/mocks/v2/auth/signout.ts b/web_console_v2/client/src/services/mocks/v2/auth/signout.ts deleted file mode 100644 index 148ae0656..000000000 --- a/web_console_v2/client/src/services/mocks/v2/auth/signout.ts +++ /dev/null @@ -1,4 +0,0 @@ -export const post = { - data: { success: true }, - status: 204, -}; diff --git a/web_console_v2/client/src/services/mocks/v2/auth/users/:id.ts b/web_console_v2/client/src/services/mocks/v2/auth/users/:id.ts deleted file mode 100644 index d21260383..000000000 --- a/web_console_v2/client/src/services/mocks/v2/auth/users/:id.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { FedRoles } from 'typings/auth'; - -const fakeUserInfo = { - data: { - data: { id: 1, username: 'Mocked Admin', email: 'fl@mocked.com', role: FedRoles.Admin }, - }, - // to mock server error, just tweak the status code below - status: 200, -}; - -export default fakeUserInfo; diff --git a/web_console_v2/client/src/services/mocks/v2/compare_model/__id__/index.ts b/web_console_v2/client/src/services/mocks/v2/compare_model/__id__/index.ts new file mode 100644 index 000000000..dd82f2eba --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/compare_model/__id__/index.ts @@ -0,0 +1,124 @@ +import { AxiosRequestConfig } from 'axios'; + +const get = (config: AxiosRequestConfig) => ({ + data: { + data: { + id: 12342323, + name: 'mock评估任务', + state: Math.floor(Math.random() * 3), + dataset: 'test-dataset', + comment: '我是说明文案我是说明文案我是说明文案我是说明文案', + modelList: ['Xgbootst-v8', 'Xgbootst-v7', 'Xgbootst-v71'], + // modelList: ['Xgbootst-v8'], + extra: JSON.stringify({ + comment: + '我是说明文案我是说明文案我是说明文案我是说明文案我是说明文案我是说明文案我是说明文案我是说明文案', + creator: '测试员', + }), + algorithm: '树模型', + metrics: [ + { + auc_roc: 0.95, + accuracy: 0.52, + precision: 0.28, + recall: 0.48, + f1_score: 0.95, + log_loss: 0.35, + }, + { + auc_roc: 0.55, + accuracy: 0.35, + precision: 0.75, + recall: 0.65, + f1_score: 0.45, + log_loss: 0.105, + }, + { + auc_roc: 0.15, + accuracy: 0.25, + precision: 0.35, + recall: 0.45, + f1_score: 0.55, + log_loss: 0.35, + }, + ], + confusionMatrix: [ + [0.95, 0.05, 0.24, 0.76], + [0.45, 0.25, 0.34, 0.86], + [0.45, 0.25, 0.34, 0.86], + ], + featureImportance: [ + [ + { + label: 'Duration', + value: 77, + }, + { + label: 'Mooth', + value: 40, + }, + { + label: 'Day', + value: 37, + }, + { + label: 'Contact', + value: 30, + }, + + { + label: 'POutcome', + value: 23, + }, + { + label: 'PDay', + value: 17, + }, + { + label: 'Education', + value: 11, + }, + ], + [ + { + label: 'Duration', + value: 67, + }, + { + label: 'Mooth', + value: 20, + }, + { + label: 'Day', + value: 47, + }, + { + label: 'Contact', + value: 10, + }, + + { + label: 'POutcome', + value: 63, + }, + { + label: 'PDay', + value: 47, + }, + { + label: 'Education', + value: 31, + }, + ], + ], + created_at: 1608582145, + updated_at: 1608582145, + deleted_at: 1608582145, + }, + }, + status: 200, +}); + +export const patch = { data: {}, status: 200 }; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/compare_model/index.ts b/web_console_v2/client/src/services/mocks/v2/compare_model/index.ts new file mode 100644 index 000000000..7e24c456b --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/compare_model/index.ts @@ -0,0 +1,29 @@ +const list = new Array(4).fill(undefined).map((_, index) => { + return { + id: index + 1, + name: 'mock对比报告名称' + (index + 1), + comment: '我是说明文案', + compare_number: 5, + extra: JSON.stringify({ + comment: '我是说明', + creator: '测试员', + }), + + created_at: 1608582145, + updated_at: 1608582145, + deleted_at: 1608582145, + }; +}); + +const get = { + data: { + data: list, + }, + status: 200, +}; + +export const post = (config: any) => { + return { data: { data: config.data }, status: 200 }; +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/dashboards/index.ts b/web_console_v2/client/src/services/mocks/v2/dashboards/index.ts new file mode 100644 index 000000000..b3dc04d33 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/dashboards/index.ts @@ -0,0 +1,31 @@ +import { Dashboard } from 'typings/operation'; + +const list: Dashboard[] = [ + { + name: 'dashboard1', + url: + 'xxx', + uuid: 'xvlksdhjlfsdlf', + }, + { + name: 'dashboard2', + url: 'https://reactjs.org/', + uuid: 'xvlksdhjlfsfsdfdlf', + }, +]; + +const get = { + data: { + data: list, + }, + status: 200, +}; + +export const post = { + data: { + data: undefined, + }, + status: 201, +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/data_sources/__id__/index.ts b/web_console_v2/client/src/services/mocks/v2/data_sources/__id__/index.ts new file mode 100644 index 000000000..ad0f09c7a --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/data_sources/__id__/index.ts @@ -0,0 +1,23 @@ +import { AxiosRequestConfig } from 'axios'; + +const get = () => ({ + data: { + data: { + id: 1, + type: 'hdfs', + name: 'mock数据源1', + created_at: 1608582145, + url: 'hdfs://hadoop-master:9000/user/hadoop/test.csv', + }, + }, + status: 200, +}); + +export const DELETE = (config: AxiosRequestConfig) => { + return { + data: '', + status: 204, + }; +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/data_sources/index.ts b/web_console_v2/client/src/services/mocks/v2/data_sources/index.ts new file mode 100644 index 000000000..e3cc1e204 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/data_sources/index.ts @@ -0,0 +1,57 @@ +import { DataSource } from 'typings/dataset'; + +const list: DataSource[] = [ + { + id: 1, + uuid: 'uxbuxa', + type: 'hdfs', + name: 'mock数据源1', + created_at: 1608582145, + url: 'hdfs://hadoop-master:9000/user/hadoop/test.csv', + project_id: 1, + dataset_format: 'TABULAR', + dataset_type: 'STREAMING', + store_format: 'TFRECORDS', + }, + { + id: 2, + uuid: 'uxbusxa', + type: 'hdfs', + name: 'mock数据源2', + created_at: 1609582145, + url: + 'hdfs:///trimmed', + project_id: 1, + dataset_format: 'TABULAR', + dataset_type: 'STREAMING', + store_format: 'TFRECORDS', + }, + { + id: 3, + uuid: 'uxbusxa', + type: 'http', + name: 'mock数据源3', + created_at: 1610582145, + url: 'http://www.baidu.com', + project_id: 1, + dataset_format: 'TABULAR', + dataset_type: 'STREAMING', + store_format: 'TFRECORDS', + }, +]; + +const get = { + data: { + data: list, + }, + status: 200, +}; + +export const post = { + data: { + data: undefined, + }, + status: 201, +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/dataset_job_definitions/data_alignment/index.ts b/web_console_v2/client/src/services/mocks/v2/dataset_job_definitions/data_alignment/index.ts new file mode 100644 index 000000000..179f11a09 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/dataset_job_definitions/data_alignment/index.ts @@ -0,0 +1,35 @@ +import { DataJobVariable } from 'typings/dataset'; +import { + stringInput, + numberInput, + objectInput, + listInput, + asyncSwitch, + codeEditor, + hideStringInput, +} from '../../variables/examples'; + +const get = { + data: { + data: { + is_federated: false, + variables: [ + hideStringInput, + stringInput, + numberInput, + asyncSwitch, + objectInput, + listInput, + codeEditor, + ].map((item) => { + return { + ...item, + widget_schema: JSON.stringify(item.widget_schema), + }; + }) as DataJobVariable[], + }, + }, + status: 200, +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/dataset_job_definitions/data_join/index.ts b/web_console_v2/client/src/services/mocks/v2/dataset_job_definitions/data_join/index.ts new file mode 100644 index 000000000..179f11a09 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/dataset_job_definitions/data_join/index.ts @@ -0,0 +1,35 @@ +import { DataJobVariable } from 'typings/dataset'; +import { + stringInput, + numberInput, + objectInput, + listInput, + asyncSwitch, + codeEditor, + hideStringInput, +} from '../../variables/examples'; + +const get = { + data: { + data: { + is_federated: false, + variables: [ + hideStringInput, + stringInput, + numberInput, + asyncSwitch, + objectInput, + listInput, + codeEditor, + ].map((item) => { + return { + ...item, + widget_schema: JSON.stringify(item.widget_schema), + }; + }) as DataJobVariable[], + }, + }, + status: 200, +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/dataset_job_definitions/rsa_psi_data_join/index.ts b/web_console_v2/client/src/services/mocks/v2/dataset_job_definitions/rsa_psi_data_join/index.ts new file mode 100644 index 000000000..179f11a09 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/dataset_job_definitions/rsa_psi_data_join/index.ts @@ -0,0 +1,35 @@ +import { DataJobVariable } from 'typings/dataset'; +import { + stringInput, + numberInput, + objectInput, + listInput, + asyncSwitch, + codeEditor, + hideStringInput, +} from '../../variables/examples'; + +const get = { + data: { + data: { + is_federated: false, + variables: [ + hideStringInput, + stringInput, + numberInput, + asyncSwitch, + objectInput, + listInput, + codeEditor, + ].map((item) => { + return { + ...item, + widget_schema: JSON.stringify(item.widget_schema), + }; + }) as DataJobVariable[], + }, + }, + status: 200, +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/datasets/:id/index.ts b/web_console_v2/client/src/services/mocks/v2/datasets/:id/index.ts deleted file mode 100644 index faeebb9cf..000000000 --- a/web_console_v2/client/src/services/mocks/v2/datasets/:id/index.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { unfinishedImporting, importFailed } from '../examples'; - -const get = { - data: { - data: [unfinishedImporting, importFailed], - }, - status: 200, -}; - -export const DELETE = { - data: { - data: unfinishedImporting, - }, - status: 200, -}; - -export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/datasets/:id/batches.ts b/web_console_v2/client/src/services/mocks/v2/datasets/__id__/batches.ts similarity index 100% rename from web_console_v2/client/src/services/mocks/v2/datasets/:id/batches.ts rename to web_console_v2/client/src/services/mocks/v2/datasets/__id__/batches.ts diff --git a/web_console_v2/client/src/services/mocks/v2/datasets/__id__/feature.ts b/web_console_v2/client/src/services/mocks/v2/datasets/__id__/feature.ts new file mode 100644 index 000000000..1e132de51 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/datasets/__id__/feature.ts @@ -0,0 +1,37 @@ +const get = { + data: { + data: [ + { + key: '缺失率', + value: '23%', + }, + { + key: '均值', + value: '234243', + }, + { + key: '最大值', + value: '20', + }, + { + key: '最小值', + value: '234243', + }, + { + key: '中位数', + value: '234243', + }, + { + key: '峰度', + value: '3', + }, + { + key: '偏度', + value: '3', + }, + ], + }, + status: 200, +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/datasets/__id__/index.ts b/web_console_v2/client/src/services/mocks/v2/datasets/__id__/index.ts new file mode 100644 index 000000000..037cf7a5a --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/datasets/__id__/index.ts @@ -0,0 +1,33 @@ +import { AxiosRequestConfig } from 'axios'; + +import { unfinishedImporting } from '../examples'; + +const get = () => ({ + data: { + data: unfinishedImporting, + }, + status: 200, +}); + +export const DELETE = (config: AxiosRequestConfig) => { + const datasetId = config._id as string; + + return Math.random() > 0.5 + ? { + // Delete success + data: '', + status: 204, + } + : { + // Delete fail + data: { + code: 409, + message: { + [datasetId]: [`The dataset ${datasetId} is being processed`], + }, + }, + status: 409, + }; +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/datasets/__id__/preview.ts b/web_console_v2/client/src/services/mocks/v2/datasets/__id__/preview.ts new file mode 100644 index 000000000..7494d1eb2 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/datasets/__id__/preview.ts @@ -0,0 +1,324 @@ +const get = { + data: { + data: { + dtypes: [ + { + key: '_1', + value: 'double', + }, + { + key: '_2', + value: 'double', + }, + { + key: '_3', + value: 'double', + }, + { + key: '_4', + value: 'double', + }, + { + key: '_5', + value: 'double', + }, + { + key: '_6', + value: 'double', + }, + { + key: '_7', + value: 'double', + }, + { + key: '_8', + value: 'double', + }, + { + key: '_9', + value: 'double', + }, + { + key: '_10', + value: 'double', + }, + { + key: 'raw_id', + value: 'string', + }, + ], + count: 5000, + sample: [ + [ + 0, + 0.9500884413719177, + 0, + 0, + -0.10321885347366333, + -0.9772778749465942, + -0.15135720372200012, + 0.4105985164642334, + ], + [ + 1, + 1.4940791130065918, + 1, + 1, + 0.3130677044391632, + 0.3336743414402008, + -0.2051582634449005, + -0.8540957570075989, + ], + [ + 2, + 0.04575851559638977, + 2, + 2, + 1.5327792167663574, + -1.4543657302856445, + -0.18718385696411133, + 1.4693588018417358, + ], + [ + 3, + 1.2302906513214111, + 3, + 3, + -0.38732680678367615, + 0.15634897351264954, + 1.202379822731018, + -0.302302747964859, + ], + [ + 4, + -1.2527953386306763, + 4, + 4, + -1.6138978004455566, + -0.4380742907524109, + 0.7774903774261475, + -0.21274028718471527, + ], + [ + 5, + 0.06651721894741058, + 5, + 5, + -0.6343221068382263, + 0.4283318817615509, + 0.30247190594673157, + -0.3627411723136902, + ], + [ + 6, + -1.630198359489441, + 6, + 6, + -0.9072983860969543, + -0.4017809331417084, + 0.46278226375579834, + 0.05194539576768875, + ], + [ + 7, + -0.8707971572875977, + 7, + 7, + -0.3115525245666504, + -0.6848101019859314, + -0.5788496732711792, + 0.056165341287851334, + ], + [ + 8, + 1.1787796020507812, + 8, + 8, + -1.0707526206970215, + 1.895889163017273, + -0.1799248307943344, + 1.0544517040252686, + ], + [ + 9, + 0.01050002034753561, + 9, + 9, + 0.12691208720207214, + 0.7065731883049011, + 1.7858705520629883, + 0.4019893705844879, + ], + [ + 10, + -0.4136189818382263, + 10, + 10, + 1.922942042350769, + 1.9436211585998535, + -0.747454822063446, + 1.4805147647857666, + ], + [ + 11, + 0.9472519755363464, + 11, + 11, + 0.6140793561935425, + 0.8024563789367676, + -0.15501008927822113, + 0.922206699848175, + ], + [ + 12, + -0.4351535439491272, + 12, + 12, + 0.6722947359085083, + -0.14963454008102417, + 1.8492637872695923, + 0.40746182203292847, + ], + [ + 13, + 0.5765908360481262, + 13, + 13, + 0.39600670337677, + 0.676433265209198, + -0.20829875767230988, + -1.0930615663528442, + ], + [ + 14, + -0.9128222465515137, + 14, + 14, + -1.31590735912323, + 0.9444794654846191, + 1.117016315460205, + -0.46158459782600403, + ], + [ + 15, + 1.1266359090805054, + 15, + 15, + -1.1474686861038208, + -0.6634783148765564, + -1.0799314975738525, + -0.43782004714012146, + ], + [ + 16, + -1.0002152919769287, + 16, + 16, + 1.1880297660827637, + 0.8443629741668701, + -1.5447710752487183, + 0.31694260239601135, + ], + [ + 17, + -0.8034096360206604, + 17, + 17, + -0.4555324912071228, + 0.6815944910049438, + -0.6895498037338257, + 0.01747915893793106, + ], + [ + 18, + -1.1043833494186401, + 18, + 18, + -0.73956298828125, + -1.602057695388794, + 0.05216507986187935, + 1.543014645576477, + ], + [ + 19, + 0.7717905640602112, + 19, + 19, + 2.163235902786255, + -0.1715463250875473, + 0.8235041499137878, + 1.336527943611145, + ], + ], + metrics: { + raw_id: { + count: '5000', + mean: '2499.5', + stddev: '1443.5200033252052', + min: '0', + max: '4999', + missing_count: '0', + }, + z_2: { + count: '5000', + mean: '0.003840906049218029', + stddev: '0.999312078264177', + min: '-3.6270735', + max: '3.4571788', + missing_count: '0', + }, + example_id: { + count: '5000', + mean: '2499.5', + stddev: '1443.5200033252052', + min: '0', + max: '4999', + missing_count: '0', + }, + event_time: { + count: '5000', + mean: '2499.5', + stddev: '1443.5200033252052', + min: '0', + max: '4999', + missing_count: '0', + }, + z_4: { + count: '5000', + mean: '-0.0073523533177183705', + stddev: '0.9991068745409053', + min: '-4.4466324', + max: '3.8317902', + missing_count: '0', + }, + z_1: { + count: '5000', + mean: '0.00408399432664155', + stddev: '0.9994457324528213', + min: '-3.6942854', + max: '3.6361017', + missing_count: '0', + }, + z_3: { + count: '5000', + mean: '-0.012627735345379915', + stddev: '0.9899747337202139', + min: '-3.1699786', + max: '3.4915504', + missing_count: '0', + }, + z_5: { + count: '5000', + mean: '0.011706419334438396', + stddev: '0.9869785943536253', + min: '-3.5810459', + max: '3.6056588', + missing_count: '0', + }, + }, + }, + }, + status: 200, +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/datasets/examples.ts b/web_console_v2/client/src/services/mocks/v2/datasets/examples.ts index 0d33f0492..8d038f92c 100644 --- a/web_console_v2/client/src/services/mocks/v2/datasets/examples.ts +++ b/web_console_v2/client/src/services/mocks/v2/datasets/examples.ts @@ -1,82 +1,494 @@ -import { BatchState, DataBatch, DataFile, Dataset, DatasetType, FileState } from 'typings/dataset'; +import { + Dataset, + DatasetDataType, + DatasetKindBackEndType, + DatasetStateFront, + DatasetTransactionStatus, + DatasetType__archived, + DatasetRawPublishStatus, + DatasetProcessedAuthStatus, + DatasetProcessedMyAuthStatus, + DatasetJobListItem, + DataJobBackEndType, + DatasetJobState, +} from 'typings/dataset'; const sharedTimes = { created_at: 1611205103, updated_at: 1611305103, }; -const dataFile: DataFile = { - state: FileState.COMPLETED, - size: 1024, - source_path: '/path/to/file.data', - destination_path: '/path/to/dest/file.data', - error_message: 'Failed due to disk space is full', -}; - -const dataBatchImporting: DataBatch = { - id: 1, - move: false, - event_time: 1611305203, - dataset_id: 1, - state: BatchState.IMPORTING, - file_size: 10000, - details: { files: [dataFile] }, - num_imported_file: 2, - num_file: 10, - ...sharedTimes, -}; -const dataBatchImported: DataBatch = { - id: 2, - event_time: 1611305203, - dataset_id: 1, - state: BatchState.SUCCESS, - file_size: 12345, - details: { files: [dataFile] }, - move: false, - num_imported_file: 5, - num_file: 5, - ...sharedTimes, -}; -const dataBatchFailed: DataBatch = { - event_time: 1611305203, - id: 3, - dataset_id: 1, - state: BatchState.FAILED, - details: { files: [dataFile] }, - move: false, - file_size: 54321, - num_imported_file: 1, - num_file: 19, - ...sharedTimes, -}; - export const unfinishedImporting: Dataset = { id: 1, + uuid: 1, project_id: 1, + num_feature: 1, + num_example: 1, name: 'Mocked Dataset with a looooooooooog name', - dataset_type: DatasetType.STREAMING, + dataset_type: DatasetType__archived.STREAMING, comment: 'comment here', ...sharedTimes, path: '/path/to/dataset', - data_batches: [dataBatchImporting, dataBatchImported], + state_frontend: DatasetStateFront.FAILED, + dataset_format: DatasetDataType.STRUCT, + schema_errors: { + check_state: 'schema check failed', + files: [], + check_error: 'schema json file format is invalid.', + }, + validation_jsonschema: { + title: 'Test Matrix', + description: 'The structure of a line', + type: 'object', + properties: { + raw_id: { + description: 'raw id', + type: 'string', + }, + z_1: { + description: 'dimension 1', + type: 'number', + }, + z_2: { + description: 'dimension 2', + type: 'number', + exclusiveMinimum: 0, + }, + z_3: { + description: 'dimension 3', + type: 'number', + exclusiveMaximum: 100, + }, + z_4: { + description: 'dimension 4', + type: 'number', + }, + event_time: { + description: 'time', + type: 'number', + }, + }, + required: ['raw_id', 'z_1', 'z_2', 'z_3', 'x_3'], + }, + file_size: 1024, + dataset_kind: DatasetKindBackEndType.RAW, + publish_frontend_state: DatasetRawPublishStatus.PUBLISHED, + auth_frontend_state: DatasetProcessedAuthStatus.AUTH_APPROVED, + local_auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + participants_info: { + participants_map: { + my: { + auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + }, + }, + }, }; export const importFailed: Dataset = { id: 2, + uuid: 2, project_id: 2, + num_feature: 2, + num_example: 2, name: 'Failed one', - dataset_type: DatasetType.PSI, + dataset_type: DatasetType__archived.PSI, ...sharedTimes, path: '/path/to/dataset', - data_batches: [dataBatchImported, dataBatchFailed], + state_frontend: DatasetStateFront.FAILED, + dataset_format: DatasetDataType.STRUCT, + file_size: 2048, + dataset_kind: DatasetKindBackEndType.RAW, + publish_frontend_state: DatasetRawPublishStatus.PUBLISHED, + auth_frontend_state: DatasetProcessedAuthStatus.AUTH_APPROVED, + local_auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + participants_info: { + participants_map: { + my: { + auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + }, + }, + }, }; export const successfullyImport: Dataset = { id: 3, + uuid: 3, + project_id: 1, + num_feature: 100, + num_example: 2, + name: 'Import succeeded', + dataset_type: DatasetType__archived.PSI, + ...sharedTimes, + path: '/path/to/dataset', + state_frontend: DatasetStateFront.PROCESSING, + dataset_format: DatasetDataType.STRUCT, + file_size: 12345, + dataset_kind: DatasetKindBackEndType.RAW, + publish_frontend_state: DatasetRawPublishStatus.PUBLISHED, + auth_frontend_state: DatasetProcessedAuthStatus.AUTH_APPROVED, + local_auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + participants_info: { + participants_map: { + my: { + auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + }, + }, + }, +}; + +export const datasetStateFrontFailed: Dataset = { + id: 4, + uuid: 4, + project_id: 1, + num_feature: 100, + num_example: 2, + name: 'Import succeeded', + dataset_type: DatasetType__archived.PSI, + ...sharedTimes, + path: '/path/to/dataset', + state_frontend: DatasetStateFront.FAILED, + dataset_format: DatasetDataType.PICTURE, + file_size: 9527, + dataset_kind: DatasetKindBackEndType.RAW, + publish_frontend_state: DatasetRawPublishStatus.PUBLISHED, + auth_frontend_state: DatasetProcessedAuthStatus.AUTH_APPROVED, + local_auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + participants_info: { + participants_map: { + my: { + auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + }, + }, + }, +}; + +export const datasetStateFrontSuccess: Dataset = { + id: 4, + uuid: 4, + project_id: 1, + num_feature: 100, + num_example: 2, + name: 'Import succeeded', + dataset_type: DatasetType__archived.PSI, + ...sharedTimes, + path: '/path/to/dataset', + state_frontend: DatasetStateFront.SUCCEEDED, + dataset_format: DatasetDataType.PICTURE, + file_size: 9527, + dataset_kind: DatasetKindBackEndType.RAW, + publish_frontend_state: DatasetRawPublishStatus.PUBLISHED, + auth_frontend_state: DatasetProcessedAuthStatus.AUTH_APPROVED, + local_auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + participants_info: { + participants_map: { + my: { + auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + }, + }, + }, +}; + +export const datasetStateFrontPending: Dataset = { + id: 4, + uuid: 4, project_id: 1, + num_feature: 100, + num_example: 2, name: 'Import succeeded', - dataset_type: DatasetType.PSI, + dataset_type: DatasetType__archived.PSI, ...sharedTimes, path: '/path/to/dataset', - data_batches: [dataBatchImported], + state_frontend: DatasetStateFront.PENDING, + dataset_format: DatasetDataType.PICTURE, + file_size: 9527, + dataset_kind: DatasetKindBackEndType.RAW, + publish_frontend_state: DatasetRawPublishStatus.PUBLISHED, + auth_frontend_state: DatasetProcessedAuthStatus.AUTH_APPROVED, + local_auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + participants_info: { + participants_map: { + my: { + auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + }, + }, + }, }; + +export const datasetStateFrontProcess: Dataset = { + id: 4, + uuid: 4, + project_id: 1, + num_feature: 100, + num_example: 2, + name: 'Import succeeded', + dataset_type: DatasetType__archived.PSI, + ...sharedTimes, + path: '/path/to/dataset', + state_frontend: DatasetStateFront.PROCESSING, + dataset_format: DatasetDataType.PICTURE, + file_size: 9527, + dataset_kind: DatasetKindBackEndType.RAW, + publish_frontend_state: DatasetRawPublishStatus.PUBLISHED, + auth_frontend_state: DatasetProcessedAuthStatus.AUTH_APPROVED, + local_auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + participants_info: { + participants_map: { + my: { + auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + }, + }, + }, +}; + +export const datasetStateFrontDelete: Dataset = { + id: 4, + uuid: 4, + project_id: 1, + num_feature: 100, + num_example: 2, + name: 'Import succeeded', + dataset_type: DatasetType__archived.PSI, + ...sharedTimes, + path: '/path/to/dataset', + state_frontend: DatasetStateFront.DELETING, + dataset_format: DatasetDataType.PICTURE, + file_size: 9527, + dataset_kind: DatasetKindBackEndType.RAW, + publish_frontend_state: DatasetRawPublishStatus.PUBLISHED, + auth_frontend_state: DatasetProcessedAuthStatus.AUTH_APPROVED, + local_auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + participants_info: { + participants_map: { + my: { + auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + }, + }, + }, +}; + +export const importFailedWithNoErrorMessage: Dataset = { + id: 5, + uuid: 5, + project_id: 2, + num_feature: 2, + num_example: 2, + name: 'Failed one', + dataset_type: DatasetType__archived.PSI, + ...sharedTimes, + path: '/path/to/dataset', + state_frontend: DatasetStateFront.FAILED, + dataset_format: DatasetDataType.STRUCT, + file_size: 111222333, + dataset_kind: DatasetKindBackEndType.RAW, + publish_frontend_state: DatasetRawPublishStatus.PUBLISHED, + auth_frontend_state: DatasetProcessedAuthStatus.AUTH_APPROVED, + local_auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + participants_info: { + participants_map: { + my: { + auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + }, + }, + }, +}; + +export const deleting: Dataset = { + id: 6, + uuid: 6, + project_id: 2, + num_feature: 2, + num_example: 2, + name: 'Deleting', + dataset_type: DatasetType__archived.PSI, + ...sharedTimes, + path: '/path/to/dataset', + state_frontend: DatasetStateFront.DELETING, + dataset_format: DatasetDataType.STRUCT, + file_size: 223344, + dataset_kind: DatasetKindBackEndType.RAW, + publish_frontend_state: DatasetRawPublishStatus.PUBLISHED, + auth_frontend_state: DatasetProcessedAuthStatus.AUTH_APPROVED, + local_auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + participants_info: { + participants_map: { + my: { + auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + }, + }, + }, +}; +export const deleteFailed: Dataset = { + id: 7, + uuid: 7, + project_id: 2, + num_feature: 2, + num_example: 2, + name: 'Deleting fail', + dataset_type: DatasetType__archived.PSI, + ...sharedTimes, + path: '/path/to/dataset', + state_frontend: DatasetStateFront.FAILED, + dataset_format: DatasetDataType.STRUCT, + file_size: 5678, + dataset_kind: DatasetKindBackEndType.RAW, + publish_frontend_state: DatasetRawPublishStatus.PUBLISHED, + auth_frontend_state: DatasetProcessedAuthStatus.AUTH_APPROVED, + local_auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + participants_info: { + participants_map: { + my: { + auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + }, + }, + }, +}; + +export const frontendSucceeded: Dataset = { + id: 11, + uuid: 11, + project_id: 2, + num_feature: 2, + num_example: 2, + name: 'processing succeede', + dataset_type: DatasetType__archived.PSI, + ...sharedTimes, + path: '/path/to/dataset', + state_frontend: DatasetStateFront.SUCCEEDED, + dataset_format: DatasetDataType.STRUCT, + file_size: 5678, + dataset_kind: DatasetKindBackEndType.RAW, + publish_frontend_state: DatasetRawPublishStatus.PUBLISHED, + auth_frontend_state: DatasetProcessedAuthStatus.AUTH_APPROVED, + local_auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + participants_info: { + participants_map: { + my: { + auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + }, + }, + }, +}; +export const frontendFailed: Dataset = { + id: 12, + uuid: 12, + project_id: 2, + num_feature: 2, + num_example: 2, + name: 'processing failed', + dataset_type: DatasetType__archived.PSI, + ...sharedTimes, + path: '/path/to/dataset', + state_frontend: DatasetStateFront.FAILED, + dataset_format: DatasetDataType.STRUCT, + file_size: 5678, + dataset_kind: DatasetKindBackEndType.RAW, + publish_frontend_state: DatasetRawPublishStatus.PUBLISHED, + auth_frontend_state: DatasetProcessedAuthStatus.AUTH_APPROVED, + local_auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + participants_info: { + participants_map: { + my: { + auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + }, + }, + }, +}; +export const frontendProcessing: Dataset = { + id: 13, + uuid: 13, + project_id: 2, + num_feature: 2, + num_example: 2, + name: 'processing', + dataset_type: DatasetType__archived.PSI, + ...sharedTimes, + path: '/path/to/dataset', + state_frontend: DatasetStateFront.PROCESSING, + dataset_format: DatasetDataType.STRUCT, + file_size: 5678, + dataset_kind: DatasetKindBackEndType.RAW, + publish_frontend_state: DatasetRawPublishStatus.PUBLISHED, + auth_frontend_state: DatasetProcessedAuthStatus.AUTH_APPROVED, + local_auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + participants_info: { + participants_map: { + my: { + auth_status: DatasetProcessedMyAuthStatus.AUTHORIZED, + }, + }, + }, +}; + +export const datasetJobRunningState: DatasetJobListItem = { + coordinator_id: 0, + created_at: 1668050255, + has_stages: true, + id: 6181, + kind: DataJobBackEndType.EXPORT, + name: 'export-lhh-test-dataset-ot-0-2', + project_id: 31, + result_dataset_id: 9720, + result_dataset_name: 'export-lhh-test-dataset-ot-0-2', + state: DatasetJobState.RUNNING, + uuid: 'u70fb2b1cfcb5437893c', +}; + +export const datasetJobSuccessState: DatasetJobListItem = { + coordinator_id: 0, + created_at: 1668050255, + has_stages: true, + id: 6181, + kind: DataJobBackEndType.EXPORT, + name: 'export-lhh-test-dataset-ot-0-2', + project_id: 31, + result_dataset_id: 9720, + result_dataset_name: 'export-lhh-test-dataset-ot-0-2', + state: DatasetJobState.SUCCEEDED, + uuid: 'u70fb2b1cfcb5437893c', +}; + +export const datasetJobFailedState: DatasetJobListItem = { + coordinator_id: 0, + created_at: 1668050255, + has_stages: true, + id: 6181, + kind: DataJobBackEndType.EXPORT, + name: 'export-lhh-test-dataset-ot-0-2', + project_id: 31, + result_dataset_id: 9720, + result_dataset_name: 'export-lhh-test-dataset-ot-0-2', + state: DatasetJobState.FAILED, + uuid: 'u70fb2b1cfcb5437893c', +}; + +export const datasetJobPendingState: DatasetJobListItem = { + coordinator_id: 0, + created_at: 1668050255, + has_stages: true, + id: 6181, + kind: DataJobBackEndType.EXPORT, + name: 'export-lhh-test-dataset-ot-0-2', + project_id: 31, + result_dataset_id: 9720, + result_dataset_name: 'export-lhh-test-dataset-ot-0-2', + state: DatasetJobState.PENDING, + uuid: 'u70fb2b1cfcb5437893c', +}; + +export const datasetJobStopedState: DatasetJobListItem = { + coordinator_id: 0, + created_at: 1668050255, + has_stages: true, + id: 6181, + kind: DataJobBackEndType.EXPORT, + name: 'export-lhh-test-dataset-ot-0-2', + project_id: 31, + result_dataset_id: 9720, + result_dataset_name: 'export-lhh-test-dataset-ot-0-2', + state: DatasetJobState.STOPPED, + uuid: 'u70fb2b1cfcb5437893c', +}; + +export const transactionFailed = DatasetTransactionStatus.FAILED; +export const transactionProcessing = DatasetTransactionStatus.PROCESSING; +export const transactionSucceeded = DatasetTransactionStatus.SUCCEEDED; diff --git a/web_console_v2/client/src/services/mocks/v2/evaluation/__id__/index.ts b/web_console_v2/client/src/services/mocks/v2/evaluation/__id__/index.ts new file mode 100644 index 000000000..dd82f2eba --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/evaluation/__id__/index.ts @@ -0,0 +1,124 @@ +import { AxiosRequestConfig } from 'axios'; + +const get = (config: AxiosRequestConfig) => ({ + data: { + data: { + id: 12342323, + name: 'mock评估任务', + state: Math.floor(Math.random() * 3), + dataset: 'test-dataset', + comment: '我是说明文案我是说明文案我是说明文案我是说明文案', + modelList: ['Xgbootst-v8', 'Xgbootst-v7', 'Xgbootst-v71'], + // modelList: ['Xgbootst-v8'], + extra: JSON.stringify({ + comment: + '我是说明文案我是说明文案我是说明文案我是说明文案我是说明文案我是说明文案我是说明文案我是说明文案', + creator: '测试员', + }), + algorithm: '树模型', + metrics: [ + { + auc_roc: 0.95, + accuracy: 0.52, + precision: 0.28, + recall: 0.48, + f1_score: 0.95, + log_loss: 0.35, + }, + { + auc_roc: 0.55, + accuracy: 0.35, + precision: 0.75, + recall: 0.65, + f1_score: 0.45, + log_loss: 0.105, + }, + { + auc_roc: 0.15, + accuracy: 0.25, + precision: 0.35, + recall: 0.45, + f1_score: 0.55, + log_loss: 0.35, + }, + ], + confusionMatrix: [ + [0.95, 0.05, 0.24, 0.76], + [0.45, 0.25, 0.34, 0.86], + [0.45, 0.25, 0.34, 0.86], + ], + featureImportance: [ + [ + { + label: 'Duration', + value: 77, + }, + { + label: 'Mooth', + value: 40, + }, + { + label: 'Day', + value: 37, + }, + { + label: 'Contact', + value: 30, + }, + + { + label: 'POutcome', + value: 23, + }, + { + label: 'PDay', + value: 17, + }, + { + label: 'Education', + value: 11, + }, + ], + [ + { + label: 'Duration', + value: 67, + }, + { + label: 'Mooth', + value: 20, + }, + { + label: 'Day', + value: 47, + }, + { + label: 'Contact', + value: 10, + }, + + { + label: 'POutcome', + value: 63, + }, + { + label: 'PDay', + value: 47, + }, + { + label: 'Education', + value: 31, + }, + ], + ], + created_at: 1608582145, + updated_at: 1608582145, + deleted_at: 1608582145, + }, + }, + status: 200, +}); + +export const patch = { data: {}, status: 200 }; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/evaluation/index.ts b/web_console_v2/client/src/services/mocks/v2/evaluation/index.ts new file mode 100644 index 000000000..767bef977 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/evaluation/index.ts @@ -0,0 +1,32 @@ +const list = new Array(4).fill(undefined).map((_, index) => { + return { + id: index + 1, + name: 'mock评估任务名称' + (index + 1), + state: Math.floor(Math.random() * 3), + dataset: 'test-dataset', + dataset_id: 109, + comment: '我是说明文案', + modelList: ['Xgbootst-v8', 'Xgbootst-v7', 'Xgbootst-v6'], + extra: JSON.stringify({ + comment: '我是说明', + creator: '测试员', + }), + + created_at: 1608582145, + updated_at: 1608582145, + deleted_at: 1608582145, + }; +}); + +const get = { + data: { + data: list, + }, + status: 200, +}; + +export const post = (config: any) => { + return { data: { data: config.data }, status: 200 }; +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/events/index.ts b/web_console_v2/client/src/services/mocks/v2/events/index.ts new file mode 100644 index 000000000..b8a9bab79 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/events/index.ts @@ -0,0 +1,150 @@ +const get = { + data: { + data: [ + { + resource_type: 'user', + op_type: 'delete', + result: 'succeeded', + source: 'ui', + name: 'deleteUser', + id: 12, + user: { id: 50, username: 'admin', role: 'ADMIN' }, + user_id: 50, + uuid: 'ba4ec7c9-6f18-476e-a464-c03b25d754cb', + resource_name: '74', + extra: null, + created_at: 1631262553, + }, + { + resource_type: 'user', + op_type: 'create', + result: 'succeeded', + source: 'ui', + name: 'createUser', + id: 11, + user: { id: 50, username: 'admin', role: 'ADMIN' }, + user_id: 50, + uuid: '4ff3151d-ea60-4835-bb7d-72f99b4d3eec', + resource_name: '3543543', + extra: + '{"username": "3543543", "xxx": "xx==", "name": "sidofij", "email": "iajdofijo@ijoaif.com", "role": "USER"}', + created_at: 1631262541, + }, + { + resource_type: 'user', + op_type: 'delete', + result: 'succeeded', + source: 'ui', + name: 'deleteUser', + id: 10, + user: { id: 50, username: 'admin', role: 'ADMIN' }, + user_id: 50, + uuid: 'e675ed0b-d27a-4da2-af55-9420afc8a4fc', + resource_name: 'signin', + extra: null, + created_at: 1631262447, + }, + { + resource_type: 'user', + op_type: 'delete', + result: 'succeeded', + source: 'ui', + name: 'deleteUser', + id: 9, + user: { id: 50, username: 'admin', role: 'ADMIN' }, + user_id: 50, + uuid: '6fc153a5-8c0e-49bd-879e-ff3e3d721a94', + resource_name: 'signin', + extra: null, + created_at: 1631262344, + }, + { + resource_type: 'user', + op_type: 'update', + result: 'succeeded', + source: 'ui', + name: 'updateUser', + id: 8, + user: { id: 50, username: 'admin', role: 'ADMIN' }, + user_id: 50, + uuid: 'f28174ed-e357-4cc4-8598-3f6a65b9bbf9', + resource_name: '73', + extra: '{"password": "YWFhYWExMzIxQA=="}', + created_at: 1631260894, + }, + { + resource_type: 'user', + op_type: 'delete', + result: 'succeeded', + source: 'ui', + name: 'deleteUser', + id: 7, + user: { id: 50, username: 'admin', role: 'ADMIN' }, + user_id: 50, + uuid: 'c114dc20-97aa-418c-916a-99e73c841431', + resource_name: 'signin', + extra: null, + created_at: 1631260550, + }, + { + resource_type: 'user', + op_type: 'delete', + result: 'succeeded', + source: 'ui', + name: 'deleteUser', + id: 6, + user: { id: 50, username: 'admin', role: 'ADMIN' }, + user_id: 50, + uuid: 'df648970-c902-42b7-a917-ac267702726e', + resource_name: 'signin', + extra: null, + created_at: 1631260410, + }, + { + resource_type: 'user', + op_type: 'update', + result: 'succeeded', + source: 'ui', + name: 'updateUser', + id: 5, + user: { id: 50, username: 'admin', role: 'ADMIN' }, + user_id: 50, + uuid: 'eb4b7821-f4bd-4209-8529-8f018be79897', + resource_name: '73', + extra: '{"password": "YXNkZmFzZGZAMTIz"}', + created_at: 1631260076, + }, + { + resource_type: 'user', + op_type: 'delete', + result: 'succeeded', + source: 'ui', + name: 'deleteUser', + id: 4, + user: { id: 50, username: 'admin', role: 'ADMIN' }, + user_id: 50, + uuid: '700f6bda-6537-46ff-9338-c83aa5fe5dfc', + resource_name: '71', + extra: null, + created_at: 1631260069, + }, + { + resource_type: 'user', + op_type: 'delete', + result: 'succeeded', + source: 'ui', + name: 'deleteUser', + id: 3, + user: { id: 50, username: 'admin', role: 'ADMIN' }, + user_id: 50, + uuid: 'cfe49414-9883-4fba-b76e-971339b4f977', + resource_name: '72', + extra: null, + created_at: 1631260067, + }, + ], + page_meta: { current_page: 1, page_size: 10, total_pages: 2, total_items: 12 }, + }, + status: 200, +}; +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/files/index.ts b/web_console_v2/client/src/services/mocks/v2/files/index.ts index 509a6cf50..8caadf34a 100644 --- a/web_console_v2/client/src/services/mocks/v2/files/index.ts +++ b/web_console_v2/client/src/services/mocks/v2/files/index.ts @@ -23,4 +23,19 @@ const get = { status: 200, }; +export const post = { + data: { + data: { + uploaded_files: [ + { + display_file_name: 'mock-file.tar.gz', + internal_path: + 'hdfs:///trimmed', + }, + ], + }, + }, + status: 200, +}; + export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/flags/index.ts b/web_console_v2/client/src/services/mocks/v2/flags/index.ts new file mode 100644 index 000000000..a3af09362 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/flags/index.ts @@ -0,0 +1,20 @@ +const get = () => { + return { + data: { + data: { + bcs_support_enabled: true, + data_module_beta_enabled: true, + data_module_compatible_enabled: false, + preset_template_edit_enabled: true, + user_management_enabled: true, + workspace_enabled: false, + x_host_section_enabled: false, + dashboard_enabled: false, + ot_psi_enabled: true, + }, + }, + status: 200, + }; +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/intersection_datasets/__id__/preview.ts b/web_console_v2/client/src/services/mocks/v2/intersection_datasets/__id__/preview.ts new file mode 100644 index 000000000..274ceabff --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/intersection_datasets/__id__/preview.ts @@ -0,0 +1,328 @@ +const previewData = { + dtypes: [ + { + key: '_1', + value: 'double', + }, + { + key: '_2', + value: 'double', + }, + { + key: '_3', + value: 'double', + }, + { + key: '_4', + value: 'double', + }, + { + key: '_5', + value: 'double', + }, + { + key: '_6', + value: 'double', + }, + { + key: '_7', + value: 'double', + }, + { + key: '_8', + value: 'double', + }, + { + key: '_9', + value: 'double', + }, + { + key: '_10', + value: 'double', + }, + { + key: 'raw_id', + value: 'string', + }, + ], + count: 3000, + sample: [ + [ + 0, + 0.9500884413719177, + 0, + 0, + -0.10321885347366333, + -0.9772778749465942, + -0.15135720372200012, + 0.4105985164642334, + ], + [ + 1, + 1.4940791130065918, + 1, + 1, + 0.3130677044391632, + 0.3336743414402008, + -0.2051582634449005, + -0.8540957570075989, + ], + [ + 2, + 0.04575851559638977, + 2, + 2, + 1.5327792167663574, + -1.4543657302856445, + -0.18718385696411133, + 1.4693588018417358, + ], + [ + 3, + 1.2302906513214111, + 3, + 3, + -0.38732680678367615, + 0.15634897351264954, + 1.202379822731018, + -0.302302747964859, + ], + [ + 4, + -1.2527953386306763, + 4, + 4, + -1.6138978004455566, + -0.4380742907524109, + 0.7774903774261475, + -0.21274028718471527, + ], + [ + 5, + 0.06651721894741058, + 5, + 5, + -0.6343221068382263, + 0.4283318817615509, + 0.30247190594673157, + -0.3627411723136902, + ], + [ + 6, + -1.630198359489441, + 6, + 6, + -0.9072983860969543, + -0.4017809331417084, + 0.46278226375579834, + 0.05194539576768875, + ], + [ + 7, + -0.8707971572875977, + 7, + 7, + -0.3115525245666504, + -0.6848101019859314, + -0.5788496732711792, + 0.056165341287851334, + ], + [ + 8, + 1.1787796020507812, + 8, + 8, + -1.0707526206970215, + 1.895889163017273, + -0.1799248307943344, + 1.0544517040252686, + ], + [ + 9, + 0.01050002034753561, + 9, + 9, + 0.12691208720207214, + 0.7065731883049011, + 1.7858705520629883, + 0.4019893705844879, + ], + [ + 10, + -0.4136189818382263, + 10, + 10, + 1.922942042350769, + 1.9436211585998535, + -0.747454822063446, + 1.4805147647857666, + ], + [ + 11, + 0.9472519755363464, + 11, + 11, + 0.6140793561935425, + 0.8024563789367676, + -0.15501008927822113, + 0.922206699848175, + ], + [ + 12, + -0.4351535439491272, + 12, + 12, + 0.6722947359085083, + -0.14963454008102417, + 1.8492637872695923, + 0.40746182203292847, + ], + [ + 13, + 0.5765908360481262, + 13, + 13, + 0.39600670337677, + 0.676433265209198, + -0.20829875767230988, + -1.0930615663528442, + ], + [ + 14, + -0.9128222465515137, + 14, + 14, + -1.31590735912323, + 0.9444794654846191, + 1.117016315460205, + -0.46158459782600403, + ], + [ + 15, + 1.1266359090805054, + 15, + 15, + -1.1474686861038208, + -0.6634783148765564, + -1.0799314975738525, + -0.43782004714012146, + ], + [ + 16, + -1.0002152919769287, + 16, + 16, + 1.1880297660827637, + 0.8443629741668701, + -1.5447710752487183, + 0.31694260239601135, + ], + [ + 17, + -0.8034096360206604, + 17, + 17, + -0.4555324912071228, + 0.6815944910049438, + -0.6895498037338257, + 0.01747915893793106, + ], + [ + 18, + -1.1043833494186401, + 18, + 18, + -0.73956298828125, + -1.602057695388794, + 0.05216507986187935, + 1.543014645576477, + ], + [ + 19, + 0.7717905640602112, + 19, + 19, + 2.163235902786255, + -0.1715463250875473, + 0.8235041499137878, + 1.336527943611145, + ], + ], + metrics: { + raw_id: { + count: '5000', + mean: '2499.5', + stddev: '1443.5200033252052', + min: '0', + max: '4999', + missing_count: '0.1', + }, + z_2: { + count: '5000', + mean: '0.003840906049218029', + stddev: '0.999312078264177', + min: '-3.6270735', + max: '3.4571788', + missing_count: '0.2', + }, + example_id: { + count: '5000', + mean: '2499.5', + stddev: '1443.5200033252052', + min: '0', + max: '4999', + missing_count: '0.3', + }, + event_time: { + count: '5000', + mean: '2499.5', + stddev: '1443.5200033252052', + min: '0', + max: '4999', + missing_count: '0', + }, + z_4: { + count: '5000', + mean: '-0.0073523533177183705', + stddev: '0.9991068745409053', + min: '-4.4466324', + max: '3.8317902', + missing_count: '0', + }, + z_1: { + count: '5000', + mean: '0.00408399432664155', + stddev: '0.9994457324528213', + min: '-3.6942854', + max: '3.6361017', + missing_count: '0', + }, + z_3: { + count: '5000', + mean: '-0.012627735345379915', + stddev: '0.9899747337202139', + min: '-3.1699786', + max: '3.4915504', + missing_count: '0', + }, + z_5: { + count: '5000', + mean: '0.011706419334438396', + stddev: '0.9869785943536253', + min: '-3.5810459', + max: '3.6056588', + missing_count: '0', + }, + }, +}; +const get = { + data: { + data: { + current: previewData, + base: previewData, + }, + }, + status: 200, +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/intersection_datasets/examples.ts b/web_console_v2/client/src/services/mocks/v2/intersection_datasets/examples.ts new file mode 100644 index 000000000..c5e274a60 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/intersection_datasets/examples.ts @@ -0,0 +1,122 @@ +import { IntersectionDataset } from 'typings/dataset'; + +export const readyToRun: IntersectionDataset = { + comment: '', + created_at: 1621503114, + data_source: 'u9d9fd94b01324f5ba90-data-join-job', + dataset_id: 65, + dataset_name: 'yry-test', + deleted_at: null, + id: 1, + job_name: 'data-module2', + kind: 0, + name: '求交数据集 2021-05-20-17:31:54', + path: + 'hdfs:///trimmed', + peer_name: 'aliyun-test1', + project_id: 14, + status: 'READY_TO_RUN', + updated_at: 1621503114, + workflow_id: 34143, + file_size: 54321, +}; +export const invalid: IntersectionDataset = { + id: 42, + project_id: 14, + dataset_id: 187, + workflow_id: 121734, + name: '求交数据集 2021-08-11-21:27:44', + path: + 'hdfs:///trimmed', + comment: '', + kind: 0, + created_at: 1628688464, + updated_at: 1628688464, + deleted_at: null, + status: 'INVALID', + job_name: 'wzzz1', + peer_name: 'aliyun-test1', + dataset_name: 'wz-psi-test2', + data_source: 'u9d9fd94b01324f5ba90-data-join-job', + file_size: 54321, +}; +export const running: IntersectionDataset = { + id: 44, + project_id: 14, + dataset_id: 162, + workflow_id: 153713, + name: '求交数据集 2021-09-02-15:47:02', + path: + 'hdfs:///trimmed', + comment: '', + kind: 0, + created_at: 1630568822, + updated_at: 1630568822, + deleted_at: null, + status: 'RUNNING', + job_name: 'etst-12124234', + peer_name: 'aliyun-test1', + dataset_name: 'wz-test-2', + data_source: 'u7e0239d6f0f94271a33-data-join-job', + file_size: 54321, +}; +export const completed: IntersectionDataset = { + comment: '', + created_at: 1631168097, + data_source: 'ue24311305a4a4b6397d-psi-data-join-job', + dataset_id: 202, + dataset_name: 'wz-test-09071620', + deleted_at: null, + id: 53, + job_name: 'sfsdfsf', + kind: 0, + name: '求交数据集 2021-09-09-14:14:57', + path: + 'hdfs:///trimmed', + peer_name: 'aliyun-test1', + project_id: 14, + status: 'COMPLETED', + updated_at: 1631168097, + workflow_id: 153972, + file_size: 54321, +}; +export const stopped: IntersectionDataset = { + id: 18, + project_id: 14, + dataset_id: 126, + workflow_id: 88949, + name: '求交数据集 2021-07-19-21:06:12', + path: + 'hdfs:///trimmed', + comment: '', + kind: 0, + created_at: 1626699972, + updated_at: 1626699972, + deleted_at: null, + status: 'STOPPED', + job_name: 'workflow-fcg02', + peer_name: 'aliyun-test', + dataset_name: 'cg01', + data_source: 'ua01bceb669ee42a3b6b-data-join-job', + file_size: 54321, +}; +export const failed: IntersectionDataset = { + id: 9, + project_id: 14, + dataset_id: 98, + workflow_id: 68479, + name: '求交数据集 2021-07-01-12:37:00', + path: + 'hdfs:///trimmed', + comment: '', + kind: 0, + created_at: 1625114220, + updated_at: 1625114220, + deleted_at: null, + status: 'FAILED', + job_name: 'm09', + peer_name: 'aliyun-test', + dataset_name: 'mock10', + data_source: 'ua01bceb669ee42a3b6b-data-join-job', + file_size: 54321, +}; diff --git a/web_console_v2/client/src/services/mocks/v2/intersection_datasets/index.ts b/web_console_v2/client/src/services/mocks/v2/intersection_datasets/index.ts new file mode 100644 index 000000000..0bb9d3fc7 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/intersection_datasets/index.ts @@ -0,0 +1,10 @@ +import { readyToRun, invalid, running, completed, stopped, failed } from './examples'; + +const get = { + data: { + data: [running, completed, stopped, failed, readyToRun, invalid], + }, + status: 200, +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/jobs/:id/events.ts b/web_console_v2/client/src/services/mocks/v2/jobs/__id__/events.ts similarity index 100% rename from web_console_v2/client/src/services/mocks/v2/jobs/:id/events.ts rename to web_console_v2/client/src/services/mocks/v2/jobs/__id__/events.ts diff --git a/web_console_v2/client/src/services/mocks/v2/jobs/__id__/index.ts b/web_console_v2/client/src/services/mocks/v2/jobs/__id__/index.ts new file mode 100644 index 000000000..08c55e45e --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/jobs/__id__/index.ts @@ -0,0 +1,223 @@ +const get = (config: any) => { + return { + data: { + data: { + id: 13, + name: 'u0d81863120e64c35aae-raw-data-job', + job_type: 'RAW_DATA', + state: 'FAILED', + is_disabled: false, + workflow_id: 4, + project_id: 1, + flapp_snapshot: null, + sparkapp_snapshot: null, + snapshot: + '{"app": {"status": {"appState": "FLStateShutDown", "completionTime": null, "flReplicaStatus": {"Master": {"active": {}, "failed": {"u0d81863120e64c35aae-raw-data-job-follower-master-0-0fa05dfb-0ce1-40da-894b-69af79223197": {}, "u0d81863120e64c35aae-raw-data-job-follower-master-0-20581ad4-d24a-4b20-9958-e7b0c979b24e": {}, "u0d81863120e64c35aae-raw-data-job-follower-master-0-39116f42-0cad-484a-8c37-7424b5084626": {}, "u0d81863120e64c35aae-raw-data-job-follower-master-0-3e89045e-52ff-4457-ab0d-7a917d2278c6": {}, "u0d81863120e64c35aae-raw-data-job-follower-master-0-b741750c-efbb-4893-adb5-838d7fdcddff": {}, "u0d81863120e64c35aae-raw-data-job-follower-master-0-c5610c81-1b7b-4180-85fd-6e065f538bc9": {}}, "local": {"u0d81863120e64c35aae-raw-data-job-follower-master-0": {}}, "mapping": {}, "remote": {}, "succeeded": {}}, "Worker": {"active": {}, "failed": {"u0d81863120e64c35aae-raw-data-job-follower-worker-0-cc2ecf1f-011f-4b36-98cb-dcac561ca6bf": {}, "u0d81863120e64c35aae-raw-data-job-follower-worker-1-f9d2835f-eeca-4a88-89d0-9694904b48bd": {}, "u0d81863120e64c35aae-raw-data-job-follower-worker-2-90e2ee0e-ba90-4d5b-b841-b894a17533cd": {}, "u0d81863120e64c35aae-raw-data-job-follower-worker-3-bca627c6-2c72-4e26-82bc-bd0e1a131ae2": {}}, "local": {"u0d81863120e64c35aae-raw-data-job-follower-worker-0": {}, "u0d81863120e64c35aae-raw-data-job-follower-worker-1": {}, "u0d81863120e64c35aae-raw-data-job-follower-worker-2": {}, "u0d81863120e64c35aae-raw-data-job-follower-worker-3": {}}, "mapping": {}, "remote": {}, "succeeded": {}}}}}, "pods": {"items": [{"status": {"conditions": [{"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:33+00:00", "message": null, "reason": null, "status": "True", "type": "Initialized"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:41+00:00", "message": "containers with unready status: [tensorflow]", "reason": "ContainersNotReady", "status": "False", "type": "Ready"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:41+00:00", "message": "containers with unready status: [tensorflow]", "reason": "ContainersNotReady", "status": "False", "type": "ContainersReady"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:33+00:00", "message": null, "reason": null, "status": "True", "type": "PodScheduled"}], "container_statuses": [{"container_id": "docker://f96c6294df7efc5e5e46d83fae462ed0512b26d96946c306e062265a2b05380b", "image": "artifact.bytedance.com/fedlearner/fedlearner:882310f", "image_id": "docker-pullable://artifact.bytedance.com/fedlearner/fedlearner@sha256:170c117f8615b53372b5b8e3aaec14997f3d5a77c3921824ecc24f5b99dbf577", "last_state": {"running": null, "terminated": null, "waiting": null}, "name": "tensorflow", "ready": false, "restart_count": 0, "started": false, "state": {"running": null, "terminated": {"container_id": "docker://f96c6294df7efc5e5e46d83fae462ed0512b26d96946c306e062265a2b05380b", "exit_code": 1, "finished_at": "2022-04-21T08:41:41+00:00", "message": null, "reason": "Error", "signal": null, "started_at": "2022-04-21T08:41:35+00:00"}, "waiting": null}}], "ephemeral_container_statuses": null, "host_ip": "192.168.252.64", "init_container_statuses": null, "message": null, "nominated_node_name": null, "phase": "Failed", "pod_ip": "172.20.1.228", "pod_i_ps": [{"ip": "172.20.1.228"}], "qos_class": "Guaranteed", "reason": null, "start_time": "2022-04-21T08:41:33+00:00"}, "metadata": {"annotations": {"kubernetes.io/psp": "ack.privileged"}, "cluster_name": null, "creation_timestamp": "2022-04-21T08:41:33+00:00", "deletion_grace_period_seconds": 0, "deletion_timestamp": "2022-04-21T08:41:44+00:00", "finalizers": null, "generate_name": null, "generation": null, "labels": {"app-name": "u0d81863120e64c35aae-raw-data-job", "fl-replica-index": "0", "fl-replica-type": "master", "role": "follower"}, "managed_fields": null, "name": "u0d81863120e64c35aae-raw-data-job-follower-master-0-b741750c-efbb-4893-adb5-838d7fdcddff", "namespace": "default", "owner_references": [{"api_version": "fedlearner.k8s.io/v1alpha1", "block_owner_deletion": true, "controller": true, "kind": "FLApp", "name": "u0d81863120e64c35aae-raw-data-job", "uid": "620e92c7-0842-41dd-8eff-cf9c7e010b6b"}], "resource_version": "2982405396", "self_link": "/api/v1/namespaces/default/pods/u0d81863120e64c35aae-raw-data-job-follower-master-0-b741750c-efbb-4893-adb5-838d7fdcddff", "uid": "cf8adcce-4344-4777-8c3d-f6c7f47971dc"}}, {"status": {"conditions": [{"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:33+00:00", "message": null, "reason": null, "status": "True", "type": "Initialized"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:34+00:00", "message": null, "reason": null, "status": "True", "type": "Ready"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:34+00:00", "message": null, "reason": null, "status": "True", "type": "ContainersReady"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:33+00:00", "message": null, "reason": null, "status": "True", "type": "PodScheduled"}], "container_statuses": [{"container_id": "docker://4a2d516148b8d0457cd144fada8e39fa6b02cf6698d74ec5fcc351add764a4b6", "image": "artifact.bytedance.com/fedlearner/fedlearner:882310f", "image_id": "docker-pullable://artifact.bytedance.com/fedlearner/fedlearner@sha256:170c117f8615b53372b5b8e3aaec14997f3d5a77c3921824ecc24f5b99dbf577", "last_state": {"running": null, "terminated": null, "waiting": null}, "name": "tensorflow", "ready": true, "restart_count": 0, "started": true, "state": {"running": {"started_at": "2022-04-21T08:41:34+00:00"}, "terminated": null, "waiting": null}}], "ephemeral_container_statuses": null, "host_ip": "192.168.252.57", "init_container_statuses": null, "message": null, "nominated_node_name": null, "phase": "Running", "pod_ip": "172.20.0.222", "pod_i_ps": [{"ip": "172.20.0.222"}], "qos_class": "Guaranteed", "reason": null, "start_time": "2022-04-21T08:41:33+00:00"}, "metadata": {"annotations": {"kubernetes.io/psp": "ack.privileged"}, "cluster_name": null, "creation_timestamp": "2022-04-21T08:41:33+00:00", "deletion_grace_period_seconds": null, "deletion_timestamp": null, "finalizers": null, "generate_name": null, "generation": null, "labels": {"app-name": "u0d81863120e64c35aae-raw-data-job", "fl-replica-index": "0", "fl-replica-type": "worker", "role": "follower"}, "managed_fields": null, "name": "u0d81863120e64c35aae-raw-data-job-follower-worker-0-cc2ecf1f-011f-4b36-98cb-dcac561ca6bf", "namespace": "default", "owner_references": [{"api_version": "fedlearner.k8s.io/v1alpha1", "block_owner_deletion": true, "controller": true, "kind": "FLApp", "name": "u0d81863120e64c35aae-raw-data-job", "uid": "620e92c7-0842-41dd-8eff-cf9c7e010b6b"}], "resource_version": "2982404984", "self_link": "/api/v1/namespaces/default/pods/u0d81863120e64c35aae-raw-data-job-follower-worker-0-cc2ecf1f-011f-4b36-98cb-dcac561ca6bf", "uid": "03fdb82a-a9d7-4280-8c13-013676e9a752"}}, {"status": {"conditions": [{"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:33+00:00", "message": null, "reason": null, "status": "True", "type": "Initialized"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:35+00:00", "message": null, "reason": null, "status": "True", "type": "Ready"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:35+00:00", "message": null, "reason": null, "status": "True", "type": "ContainersReady"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:33+00:00", "message": null, "reason": null, "status": "True", "type": "PodScheduled"}], "container_statuses": [{"container_id": "docker://eca6aecd645b52dea03dfc2d64535ae33f1c78b39ae056299f5e9ff6bbf9a789", "image": "artifact.bytedance.com/fedlearner/fedlearner:882310f", "image_id": "docker-pullable://artifact.bytedance.com/fedlearner/fedlearner@sha256:170c117f8615b53372b5b8e3aaec14997f3d5a77c3921824ecc24f5b99dbf577", "last_state": {"running": null, "terminated": null, "waiting": null}, "name": "tensorflow", "ready": true, "restart_count": 0, "started": true, "state": {"running": {"started_at": "2022-04-21T08:41:34+00:00"}, "terminated": null, "waiting": null}}], "ephemeral_container_statuses": null, "host_ip": "192.168.252.62", "init_container_statuses": null, "message": null, "nominated_node_name": null, "phase": "Running", "pod_ip": "172.20.2.93", "pod_i_ps": [{"ip": "172.20.2.93"}], "qos_class": "Guaranteed", "reason": null, "start_time": "2022-04-21T08:41:33+00:00"}, "metadata": {"annotations": {"kubernetes.io/psp": "ack.privileged"}, "cluster_name": null, "creation_timestamp": "2022-04-21T08:41:33+00:00", "deletion_grace_period_seconds": null, "deletion_timestamp": null, "finalizers": null, "generate_name": null, "generation": null, "labels": {"app-name": "u0d81863120e64c35aae-raw-data-job", "fl-replica-index": "1", "fl-replica-type": "worker", "role": "follower"}, "managed_fields": null, "name": "u0d81863120e64c35aae-raw-data-job-follower-worker-1-f9d2835f-eeca-4a88-89d0-9694904b48bd", "namespace": "default", "owner_references": [{"api_version": "fedlearner.k8s.io/v1alpha1", "block_owner_deletion": true, "controller": true, "kind": "FLApp", "name": "u0d81863120e64c35aae-raw-data-job", "uid": "620e92c7-0842-41dd-8eff-cf9c7e010b6b"}], "resource_version": "2982405009", "self_link": "/api/v1/namespaces/default/pods/u0d81863120e64c35aae-raw-data-job-follower-worker-1-f9d2835f-eeca-4a88-89d0-9694904b48bd", "uid": "c2fb28d9-43ae-4ab3-988c-54dacfc2037c"}}, {"status": {"conditions": [{"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:33+00:00", "message": null, "reason": null, "status": "True", "type": "Initialized"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:35+00:00", "message": null, "reason": null, "status": "True", "type": "Ready"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:35+00:00", "message": null, "reason": null, "status": "True", "type": "ContainersReady"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:33+00:00", "message": null, "reason": null, "status": "True", "type": "PodScheduled"}], "container_statuses": [{"container_id": "docker://db00170b8d2ab93a5541e8207e94bdfe9fcc415b8d6742993c6cb970cac9fe0a", "image": "artifact.bytedance.com/fedlearner/fedlearner:882310f", "image_id": "docker-pullable://artifact.bytedance.com/fedlearner/fedlearner@sha256:170c117f8615b53372b5b8e3aaec14997f3d5a77c3921824ecc24f5b99dbf577", "last_state": {"running": null, "terminated": null, "waiting": null}, "name": "tensorflow", "ready": true, "restart_count": 0, "started": true, "state": {"running": {"started_at": "2022-04-21T08:41:34+00:00"}, "terminated": null, "waiting": null}}], "ephemeral_container_statuses": null, "host_ip": "192.168.252.60", "init_container_statuses": null, "message": null, "nominated_node_name": null, "phase": "Running", "pod_ip": "172.20.1.167", "pod_i_ps": [{"ip": "172.20.1.167"}], "qos_class": "Guaranteed", "reason": null, "start_time": "2022-04-21T08:41:33+00:00"}, "metadata": {"annotations": {"kubernetes.io/psp": "ack.privileged"}, "cluster_name": null, "creation_timestamp": "2022-04-21T08:41:33+00:00", "deletion_grace_period_seconds": null, "deletion_timestamp": null, "finalizers": null, "generate_name": null, "generation": null, "labels": {"app-name": "u0d81863120e64c35aae-raw-data-job", "fl-replica-index": "2", "fl-replica-type": "worker", "role": "follower"}, "managed_fields": null, "name": "u0d81863120e64c35aae-raw-data-job-follower-worker-2-90e2ee0e-ba90-4d5b-b841-b894a17533cd", "namespace": "default", "owner_references": [{"api_version": "fedlearner.k8s.io/v1alpha1", "block_owner_deletion": true, "controller": true, "kind": "FLApp", "name": "u0d81863120e64c35aae-raw-data-job", "uid": "620e92c7-0842-41dd-8eff-cf9c7e010b6b"}], "resource_version": "2982405014", "self_link": "/api/v1/namespaces/default/pods/u0d81863120e64c35aae-raw-data-job-follower-worker-2-90e2ee0e-ba90-4d5b-b841-b894a17533cd", "uid": "5c39edb3-f34b-47c6-9a7b-c86f1377982a"}}, {"status": {"conditions": [{"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:33+00:00", "message": null, "reason": null, "status": "True", "type": "Initialized"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:35+00:00", "message": null, "reason": null, "status": "True", "type": "Ready"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:35+00:00", "message": null, "reason": null, "status": "True", "type": "ContainersReady"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:33+00:00", "message": null, "reason": null, "status": "True", "type": "PodScheduled"}], "container_statuses": [{"container_id": "docker://ee4c255a9a0bcf865f0fcc9b8f3fd06817cfc45037525f998e77f50af62b0c1a", "image": "artifact.bytedance.com/fedlearner/fedlearner:882310f", "image_id": "docker-pullable://artifact.bytedance.com/fedlearner/fedlearner@sha256:170c117f8615b53372b5b8e3aaec14997f3d5a77c3921824ecc24f5b99dbf577", "last_state": {"running": null, "terminated": null, "waiting": null}, "name": "tensorflow", "ready": true, "restart_count": 0, "started": true, "state": {"running": {"started_at": "2022-04-21T08:41:34+00:00"}, "terminated": null, "waiting": null}}], "ephemeral_container_statuses": null, "host_ip": "192.168.252.64", "init_container_statuses": null, "message": null, "nominated_node_name": null, "phase": "Running", "pod_ip": "172.20.1.227", "pod_i_ps": [{"ip": "172.20.1.227"}], "qos_class": "Guaranteed", "reason": null, "start_time": "2022-04-21T08:41:33+00:00"}, "metadata": {"annotations": {"kubernetes.io/psp": "ack.privileged"}, "cluster_name": null, "creation_timestamp": "2022-04-21T08:41:33+00:00", "deletion_grace_period_seconds": null, "deletion_timestamp": null, "finalizers": null, "generate_name": null, "generation": null, "labels": {"app-name": "u0d81863120e64c35aae-raw-data-job", "fl-replica-index": "3", "fl-replica-type": "worker", "role": "follower"}, "managed_fields": null, "name": "u0d81863120e64c35aae-raw-data-job-follower-worker-3-bca627c6-2c72-4e26-82bc-bd0e1a131ae2", "namespace": "default", "owner_references": [{"api_version": "fedlearner.k8s.io/v1alpha1", "block_owner_deletion": true, "controller": true, "kind": "FLApp", "name": "u0d81863120e64c35aae-raw-data-job", "uid": "620e92c7-0842-41dd-8eff-cf9c7e010b6b"}], "resource_version": "2982405032", "self_link": "/api/v1/namespaces/default/pods/u0d81863120e64c35aae-raw-data-job-follower-worker-3-bca627c6-2c72-4e26-82bc-bd0e1a131ae2", "uid": "45506ffd-82c7-464d-8e0d-a1003ce5c7f8"}}, {"status": {"conditions": [{"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:44+00:00", "message": null, "reason": null, "status": "True", "type": "Initialized"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:52+00:00", "message": "containers with unready status: [tensorflow]", "reason": "ContainersNotReady", "status": "False", "type": "Ready"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:52+00:00", "message": "containers with unready status: [tensorflow]", "reason": "ContainersNotReady", "status": "False", "type": "ContainersReady"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:44+00:00", "message": null, "reason": null, "status": "True", "type": "PodScheduled"}], "container_statuses": [{"container_id": "docker://d3d7d35423c1da5822bf6edd311ec63e36b12f9c4e0e902d21db3ca0f5f274bb", "image": "artifact.bytedance.com/fedlearner/fedlearner:882310f", "image_id": "docker-pullable://artifact.bytedance.com/fedlearner/fedlearner@sha256:170c117f8615b53372b5b8e3aaec14997f3d5a77c3921824ecc24f5b99dbf577", "last_state": {"running": null, "terminated": null, "waiting": null}, "name": "tensorflow", "ready": false, "restart_count": 0, "started": false, "state": {"running": null, "terminated": {"container_id": "docker://d3d7d35423c1da5822bf6edd311ec63e36b12f9c4e0e902d21db3ca0f5f274bb", "exit_code": 1, "finished_at": "2022-04-21T08:41:52+00:00", "message": null, "reason": "Error", "signal": null, "started_at": "2022-04-21T08:41:45+00:00"}, "waiting": null}}], "ephemeral_container_statuses": null, "host_ip": "192.168.252.59", "init_container_statuses": null, "message": null, "nominated_node_name": null, "phase": "Failed", "pod_ip": "172.20.1.94", "pod_i_ps": [{"ip": "172.20.1.94"}], "qos_class": "Guaranteed", "reason": null, "start_time": "2022-04-21T08:41:44+00:00"}, "metadata": {"annotations": {"kubernetes.io/psp": "ack.privileged"}, "cluster_name": null, "creation_timestamp": "2022-04-21T08:41:44+00:00", "deletion_grace_period_seconds": 0, "deletion_timestamp": "2022-04-21T08:41:54+00:00", "finalizers": null, "generate_name": null, "generation": null, "labels": {"app-name": "u0d81863120e64c35aae-raw-data-job", "fl-replica-index": "0", "fl-replica-type": "master", "role": "follower"}, "managed_fields": null, "name": "u0d81863120e64c35aae-raw-data-job-follower-master-0-c5610c81-1b7b-4180-85fd-6e065f538bc9", "namespace": "default", "owner_references": [{"api_version": "fedlearner.k8s.io/v1alpha1", "block_owner_deletion": true, "controller": true, "kind": "FLApp", "name": "u0d81863120e64c35aae-raw-data-job", "uid": "620e92c7-0842-41dd-8eff-cf9c7e010b6b"}], "resource_version": "2982405829", "self_link": "/api/v1/namespaces/default/pods/u0d81863120e64c35aae-raw-data-job-follower-master-0-c5610c81-1b7b-4180-85fd-6e065f538bc9", "uid": "25066279-22f4-4a54-af52-e7b73b85ec13"}}, {"status": {"conditions": [{"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:54+00:00", "message": null, "reason": null, "status": "True", "type": "Initialized"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:42:03+00:00", "message": "containers with unready status: [tensorflow]", "reason": "ContainersNotReady", "status": "False", "type": "Ready"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:42:03+00:00", "message": "containers with unready status: [tensorflow]", "reason": "ContainersNotReady", "status": "False", "type": "ContainersReady"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:41:54+00:00", "message": null, "reason": null, "status": "True", "type": "PodScheduled"}], "container_statuses": [{"container_id": "docker://47f1bd13a385a28028977d590b23bc33246277aac34b7d2835b3dd7efd582a4a", "image": "artifact.bytedance.com/fedlearner/fedlearner:882310f", "image_id": "docker-pullable://artifact.bytedance.com/fedlearner/fedlearner@sha256:170c117f8615b53372b5b8e3aaec14997f3d5a77c3921824ecc24f5b99dbf577", "last_state": {"running": null, "terminated": null, "waiting": null}, "name": "tensorflow", "ready": false, "restart_count": 0, "started": false, "state": {"running": null, "terminated": {"container_id": "docker://47f1bd13a385a28028977d590b23bc33246277aac34b7d2835b3dd7efd582a4a", "exit_code": 1, "finished_at": "2022-04-21T08:42:02+00:00", "message": null, "reason": "Error", "signal": null, "started_at": "2022-04-21T08:41:55+00:00"}, "waiting": null}}], "ephemeral_container_statuses": null, "host_ip": "192.168.252.59", "init_container_statuses": null, "message": null, "nominated_node_name": null, "phase": "Failed", "pod_ip": "172.20.1.95", "pod_i_ps": [{"ip": "172.20.1.95"}], "qos_class": "Guaranteed", "reason": null, "start_time": "2022-04-21T08:41:54+00:00"}, "metadata": {"annotations": {"kubernetes.io/psp": "ack.privileged"}, "cluster_name": null, "creation_timestamp": "2022-04-21T08:41:54+00:00", "deletion_grace_period_seconds": 0, "deletion_timestamp": "2022-04-21T08:42:04+00:00", "finalizers": null, "generate_name": null, "generation": null, "labels": {"app-name": "u0d81863120e64c35aae-raw-data-job", "fl-replica-index": "0", "fl-replica-type": "master", "role": "follower"}, "managed_fields": null, "name": "u0d81863120e64c35aae-raw-data-job-follower-master-0-0fa05dfb-0ce1-40da-894b-69af79223197", "namespace": "default", "owner_references": [{"api_version": "fedlearner.k8s.io/v1alpha1", "block_owner_deletion": true, "controller": true, "kind": "FLApp", "name": "u0d81863120e64c35aae-raw-data-job", "uid": "620e92c7-0842-41dd-8eff-cf9c7e010b6b"}], "resource_version": "2982406271", "self_link": "/api/v1/namespaces/default/pods/u0d81863120e64c35aae-raw-data-job-follower-master-0-0fa05dfb-0ce1-40da-894b-69af79223197", "uid": "6fe1044e-e13d-4206-b9b4-6de1c52df48a"}}, {"status": {"conditions": [{"last_probe_time": null, "last_transition_time": "2022-04-21T08:42:04+00:00", "message": null, "reason": null, "status": "True", "type": "Initialized"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:42:13+00:00", "message": "containers with unready status: [tensorflow]", "reason": "ContainersNotReady", "status": "False", "type": "Ready"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:42:13+00:00", "message": "containers with unready status: [tensorflow]", "reason": "ContainersNotReady", "status": "False", "type": "ContainersReady"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:42:04+00:00", "message": null, "reason": null, "status": "True", "type": "PodScheduled"}], "container_statuses": [{"container_id": "docker://ef5b213dc9b879cf8a5cf14dd6af4b86f066f73a7c21479bc8e87ff3e68c6924", "image": "artifact.bytedance.com/fedlearner/fedlearner:882310f", "image_id": "docker-pullable://artifact.bytedance.com/fedlearner/fedlearner@sha256:170c117f8615b53372b5b8e3aaec14997f3d5a77c3921824ecc24f5b99dbf577", "last_state": {"running": null, "terminated": null, "waiting": null}, "name": "tensorflow", "ready": false, "restart_count": 0, "started": false, "state": {"running": null, "terminated": {"container_id": "docker://ef5b213dc9b879cf8a5cf14dd6af4b86f066f73a7c21479bc8e87ff3e68c6924", "exit_code": 1, "finished_at": "2022-04-21T08:42:12+00:00", "message": null, "reason": "Error", "signal": null, "started_at": "2022-04-21T08:42:05+00:00"}, "waiting": null}}], "ephemeral_container_statuses": null, "host_ip": "192.168.252.59", "init_container_statuses": null, "message": null, "nominated_node_name": null, "phase": "Failed", "pod_ip": "172.20.1.96", "pod_i_ps": [{"ip": "172.20.1.96"}], "qos_class": "Guaranteed", "reason": null, "start_time": "2022-04-21T08:42:04+00:00"}, "metadata": {"annotations": {"kubernetes.io/psp": "ack.privileged"}, "cluster_name": null, "creation_timestamp": "2022-04-21T08:42:04+00:00", "deletion_grace_period_seconds": 0, "deletion_timestamp": "2022-04-21T08:42:14+00:00", "finalizers": null, "generate_name": null, "generation": null, "labels": {"app-name": "u0d81863120e64c35aae-raw-data-job", "fl-replica-index": "0", "fl-replica-type": "master", "role": "follower"}, "managed_fields": null, "name": "u0d81863120e64c35aae-raw-data-job-follower-master-0-39116f42-0cad-484a-8c37-7424b5084626", "namespace": "default", "owner_references": [{"api_version": "fedlearner.k8s.io/v1alpha1", "block_owner_deletion": true, "controller": true, "kind": "FLApp", "name": "u0d81863120e64c35aae-raw-data-job", "uid": "620e92c7-0842-41dd-8eff-cf9c7e010b6b"}], "resource_version": "2982406709", "self_link": "/api/v1/namespaces/default/pods/u0d81863120e64c35aae-raw-data-job-follower-master-0-39116f42-0cad-484a-8c37-7424b5084626", "uid": "d7283a7a-a66e-427a-bfe9-12c5542bc43e"}}, {"status": {"conditions": [{"last_probe_time": null, "last_transition_time": "2022-04-21T08:42:14+00:00", "message": null, "reason": null, "status": "True", "type": "Initialized"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:42:23+00:00", "message": "containers with unready status: [tensorflow]", "reason": "ContainersNotReady", "status": "False", "type": "Ready"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:42:23+00:00", "message": "containers with unready status: [tensorflow]", "reason": "ContainersNotReady", "status": "False", "type": "ContainersReady"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:42:14+00:00", "message": null, "reason": null, "status": "True", "type": "PodScheduled"}], "container_statuses": [{"container_id": "docker://c0fc917668d5f3e8fbbf6b6fecee660bba3df7faaf9ce248f61985ca16484da0", "image": "artifact.bytedance.com/fedlearner/fedlearner:882310f", "image_id": "docker-pullable://artifact.bytedance.com/fedlearner/fedlearner@sha256:170c117f8615b53372b5b8e3aaec14997f3d5a77c3921824ecc24f5b99dbf577", "last_state": {"running": null, "terminated": null, "waiting": null}, "name": "tensorflow", "ready": false, "restart_count": 0, "started": false, "state": {"running": null, "terminated": {"container_id": "docker://c0fc917668d5f3e8fbbf6b6fecee660bba3df7faaf9ce248f61985ca16484da0", "exit_code": 1, "finished_at": "2022-04-21T08:42:22+00:00", "message": null, "reason": "Error", "signal": null, "started_at": "2022-04-21T08:42:15+00:00"}, "waiting": null}}], "ephemeral_container_statuses": null, "host_ip": "192.168.252.59", "init_container_statuses": null, "message": null, "nominated_node_name": null, "phase": "Failed", "pod_ip": "172.20.1.97", "pod_i_ps": [{"ip": "172.20.1.97"}], "qos_class": "Guaranteed", "reason": null, "start_time": "2022-04-21T08:42:14+00:00"}, "metadata": {"annotations": {"kubernetes.io/psp": "ack.privileged"}, "cluster_name": null, "creation_timestamp": "2022-04-21T08:42:14+00:00", "deletion_grace_period_seconds": 0, "deletion_timestamp": "2022-04-21T08:42:24+00:00", "finalizers": null, "generate_name": null, "generation": null, "labels": {"app-name": "u0d81863120e64c35aae-raw-data-job", "fl-replica-index": "0", "fl-replica-type": "master", "role": "follower"}, "managed_fields": null, "name": "u0d81863120e64c35aae-raw-data-job-follower-master-0-20581ad4-d24a-4b20-9958-e7b0c979b24e", "namespace": "default", "owner_references": [{"api_version": "fedlearner.k8s.io/v1alpha1", "block_owner_deletion": true, "controller": true, "kind": "FLApp", "name": "u0d81863120e64c35aae-raw-data-job", "uid": "620e92c7-0842-41dd-8eff-cf9c7e010b6b"}], "resource_version": "2982407144", "self_link": "/api/v1/namespaces/default/pods/u0d81863120e64c35aae-raw-data-job-follower-master-0-20581ad4-d24a-4b20-9958-e7b0c979b24e", "uid": "9b466c64-f70c-4449-ba34-f094200f3309"}}, {"status": {"conditions": [{"last_probe_time": null, "last_transition_time": "2022-04-21T08:42:24+00:00", "message": null, "reason": null, "status": "True", "type": "Initialized"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:42:32+00:00", "message": "containers with unready status: [tensorflow]", "reason": "ContainersNotReady", "status": "False", "type": "Ready"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:42:32+00:00", "message": "containers with unready status: [tensorflow]", "reason": "ContainersNotReady", "status": "False", "type": "ContainersReady"}, {"last_probe_time": null, "last_transition_time": "2022-04-21T08:42:24+00:00", "message": null, "reason": null, "status": "True", "type": "PodScheduled"}], "container_statuses": [{"container_id": "docker://de5ed97e5c40861378439814230db66654d54e88668e93e6379908d3a2a2f2f8", "image": "artifact.bytedance.com/fedlearner/fedlearner:882310f", "image_id": "docker-pullable://artifact.bytedance.com/fedlearner/fedlearner@sha256:170c117f8615b53372b5b8e3aaec14997f3d5a77c3921824ecc24f5b99dbf577", "last_state": {"running": null, "terminated": null, "waiting": null}, "name": "tensorflow", "ready": false, "restart_count": 0, "started": false, "state": {"running": null, "terminated": {"container_id": "docker://de5ed97e5c40861378439814230db66654d54e88668e93e6379908d3a2a2f2f8", "exit_code": 1, "finished_at": "2022-04-21T08:42:32+00:00", "message": null, "reason": "Error", "signal": null, "started_at": "2022-04-21T08:42:25+00:00"}, "waiting": null}}], "ephemeral_container_statuses": null, "host_ip": "192.168.252.59", "init_container_statuses": null, "message": null, "nominated_node_name": null, "phase": "Failed", "pod_ip": "172.20.1.98", "pod_i_ps": [{"ip": "172.20.1.98"}], "qos_class": "Guaranteed", "reason": null, "start_time": "2022-04-21T08:42:24+00:00"}, "metadata": {"annotations": {"kubernetes.io/psp": "ack.privileged"}, "cluster_name": null, "creation_timestamp": "2022-04-21T08:42:24+00:00", "deletion_grace_period_seconds": 0, "deletion_timestamp": "2022-04-21T08:42:34+00:00", "finalizers": null, "generate_name": null, "generation": null, "labels": {"app-name": "u0d81863120e64c35aae-raw-data-job", "fl-replica-index": "0", "fl-replica-type": "master", "role": "follower"}, "managed_fields": null, "name": "u0d81863120e64c35aae-raw-data-job-follower-master-0-3e89045e-52ff-4457-ab0d-7a917d2278c6", "namespace": "default", "owner_references": [{"api_version": "fedlearner.k8s.io/v1alpha1", "block_owner_deletion": true, "controller": true, "kind": "FLApp", "name": "u0d81863120e64c35aae-raw-data-job", "uid": "620e92c7-0842-41dd-8eff-cf9c7e010b6b"}], "resource_version": "2982407585", "self_link": "/api/v1/namespaces/default/pods/u0d81863120e64c35aae-raw-data-job-follower-master-0-3e89045e-52ff-4457-ab0d-7a917d2278c6", "uid": "b739e218-6b15-4c7b-b2d3-cddd30ddd370"}}], "deleted": false}}', + error_message: null, + crd_meta: 'api_version: "fedlearner.k8s.io/v1alpha1"\n', + crd_kind: 'FLApp', + created_at: 1650530491, + updated_at: 1650530554, + deleted_at: null, + complete_at: 0, + pods: [ + { + name: + 'u0d81863120e64c35aae-raw-data-job-follower-master-0-b741750c-efbb-4893-adb5-838d7fdcddff', + pod_type: 'MASTER', + state: 'FAILED_AND_FREED', + pod_ip: '172.20.1.228', + limits_cpu: '', + limits_memory: '', + requests_cpu: '', + requests_memory: '', + creation_timestamp: 1650530493, + message: + 'terminated:Error, Ready:containers with unready status: [tensorflow], ContainersReady:containers with unready status: [tensorflow]', + }, + { + name: + 'u0d81863120e64c35aae-raw-data-job-follower-worker-0-cc2ecf1f-011f-4b36-98cb-dcac561ca6bf', + pod_type: 'WORKER', + state: 'RUNNING', + pod_ip: '172.20.0.222', + limits_cpu: '', + limits_memory: '', + requests_cpu: '', + requests_memory: '', + creation_timestamp: 1650530493, + message: '', + }, + { + name: + 'u0d81863120e64c35aae-raw-data-job-follower-worker-1-f9d2835f-eeca-4a88-89d0-9694904b48bd', + pod_type: 'WORKER', + state: 'RUNNING', + pod_ip: '172.20.2.93', + limits_cpu: '', + limits_memory: '', + requests_cpu: '', + requests_memory: '', + creation_timestamp: 1650530493, + message: '', + }, + { + name: + 'u0d81863120e64c35aae-raw-data-job-follower-worker-2-90e2ee0e-ba90-4d5b-b841-b894a17533cd', + pod_type: 'WORKER', + state: 'RUNNING', + pod_ip: '172.20.1.167', + limits_cpu: '', + limits_memory: '', + requests_cpu: '', + requests_memory: '', + creation_timestamp: 1650530493, + message: '', + }, + { + name: + 'u0d81863120e64c35aae-raw-data-job-follower-worker-3-bca627c6-2c72-4e26-82bc-bd0e1a131ae2', + pod_type: 'WORKER', + state: 'RUNNING', + pod_ip: '172.20.1.227', + limits_cpu: '', + limits_memory: '', + requests_cpu: '', + requests_memory: '', + creation_timestamp: 1650530493, + message: '', + }, + { + name: + 'u0d81863120e64c35aae-raw-data-job-follower-master-0-c5610c81-1b7b-4180-85fd-6e065f538bc9', + pod_type: 'MASTER', + state: 'FAILED_AND_FREED', + pod_ip: '172.20.1.94', + limits_cpu: '', + limits_memory: '', + requests_cpu: '', + requests_memory: '', + creation_timestamp: 1650530504, + message: + 'terminated:Error, Ready:containers with unready status: [tensorflow], ContainersReady:containers with unready status: [tensorflow]', + }, + { + name: + 'u0d81863120e64c35aae-raw-data-job-follower-master-0-0fa05dfb-0ce1-40da-894b-69af79223197', + pod_type: 'MASTER', + state: 'FAILED_AND_FREED', + pod_ip: '172.20.1.95', + limits_cpu: '', + limits_memory: '', + requests_cpu: '', + requests_memory: '', + creation_timestamp: 1650530514, + message: + 'terminated:Error, Ready:containers with unready status: [tensorflow], ContainersReady:containers with unready status: [tensorflow]', + }, + { + name: + 'u0d81863120e64c35aae-raw-data-job-follower-master-0-39116f42-0cad-484a-8c37-7424b5084626', + pod_type: 'MASTER', + state: 'FAILED_AND_FREED', + pod_ip: '172.20.1.96', + limits_cpu: '', + limits_memory: '', + requests_cpu: '', + requests_memory: '', + creation_timestamp: 1650530524, + message: + 'terminated:Error, Ready:containers with unready status: [tensorflow], ContainersReady:containers with unready status: [tensorflow]', + }, + { + name: + 'u0d81863120e64c35aae-raw-data-job-follower-master-0-20581ad4-d24a-4b20-9958-e7b0c979b24e', + pod_type: 'MASTER', + state: 'FAILED_AND_FREED', + pod_ip: '172.20.1.97', + limits_cpu: '', + limits_memory: '', + requests_cpu: '', + requests_memory: '', + creation_timestamp: 1650530534, + message: + 'terminated:Error, Ready:containers with unready status: [tensorflow], ContainersReady:containers with unready status: [tensorflow]', + }, + { + name: + 'u0d81863120e64c35aae-raw-data-job-follower-master-0-3e89045e-52ff-4457-ab0d-7a917d2278c6', + pod_type: 'MASTER', + state: 'FAILED_AND_FREED', + pod_ip: '172.20.1.98', + limits_cpu: '', + limits_memory: '', + requests_cpu: '', + requests_memory: '', + creation_timestamp: 1650530544, + message: + 'terminated:Error, Ready:containers with unready status: [tensorflow], ContainersReady:containers with unready status: [tensorflow]', + }, + { + name: + 'u0d81863120e64c35aae-raw-a-job-follower-master-0-3e89045e-52ff-4457-ab0d-7a917d2278c6', + pod_type: 'MASTER', + state: 'FAILED_AND_FREED', + pod_ip: '172.20.1.98', + limits_cpu: '', + limits_memory: '', + requests_cpu: '', + requests_memory: '', + creation_timestamp: 1650530544, + message: + 'terminated:Error, Ready:containers with unready status: [tensorflow], ContainersReady:containers with unready status: [tensorflow]', + }, + { + name: + 'u0d81863120e64c35aae-raw-data-job-follower-mar-0-3e89045e-52ff-4457-ab0d-7a917d2278c6', + pod_type: 'MASTER', + state: 'FAILED_AND_FREED', + pod_ip: '172.20.1.98', + limits_cpu: '', + limits_memory: '', + requests_cpu: '', + requests_memory: '', + creation_timestamp: 1650530544, + message: + 'terminated:Error, Ready:containers with unready status: [tensorflow], ContainersReady:containers with unready status: [tensorflow]', + }, + { + name: + 'u0d81863120e64aae-raw-data-job-follower-master-0-3e89045e-52ff-4457-ab0d-7a917d2278c6', + pod_type: 'MASTER', + state: 'FAILED_AND_FREED', + pod_ip: '172.20.1.98', + limits_cpu: '', + limits_memory: '', + requests_cpu: '', + requests_memory: '', + creation_timestamp: 1650530544, + message: + 'terminated:Error, Ready:containers with unready status: [tensorflow], ContainersReady:containers with unready status: [tensorflow]', + }, + { + name: + 'u0d81863120e64c35aae-raw-data-job-follower-mas-0-3e89045e-52ff-4457-ab0d-7a917d2278c6', + pod_type: 'MASTER', + state: 'FAILED_AND_FREED', + pod_ip: '172.20.1.98', + limits_cpu: '', + limits_memory: '', + requests_cpu: '', + requests_memory: '', + creation_timestamp: 1650530544, + message: + 'terminated:Error, Ready:containers with unready status: [tensorflow], ContainersReady:containers with unready status: [tensorflow]', + }, + ], + }, + }, + status: 200, + }; +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/jobs/:id/log.ts b/web_console_v2/client/src/services/mocks/v2/jobs/__id__/log.ts similarity index 100% rename from web_console_v2/client/src/services/mocks/v2/jobs/:id/log.ts rename to web_console_v2/client/src/services/mocks/v2/jobs/__id__/log.ts diff --git a/web_console_v2/client/src/services/mocks/v2/jobs/:id/metrics.ts b/web_console_v2/client/src/services/mocks/v2/jobs/__id__/metrics.ts similarity index 100% rename from web_console_v2/client/src/services/mocks/v2/jobs/:id/metrics.ts rename to web_console_v2/client/src/services/mocks/v2/jobs/__id__/metrics.ts diff --git a/web_console_v2/client/src/services/mocks/v2/model_groups/index.ts b/web_console_v2/client/src/services/mocks/v2/model_groups/index.ts new file mode 100644 index 000000000..4f7e9bb95 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/model_groups/index.ts @@ -0,0 +1,31 @@ +const list = new Array(3).fill(undefined).map((_, index) => { + return { + id: index + 1, + + name: 'mock模型集' + (index + 1), + comment: '我是说明', + extra: JSON.stringify({ + name: 'mock模型集' + (index + 1), + comment: '我是说明', + creator: '测试员', + project_id: 14, + }), + + created_at: 1608582145, + updated_at: 1608582145, + deleted_at: 1608582145, + }; +}); + +const get = { + data: { + data: list, + }, + status: 200, +}; + +export const post = (config: any) => { + return { data: { data: config.data }, status: 200 }; +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/model_jobs/__id__/index.ts b/web_console_v2/client/src/services/mocks/v2/model_jobs/__id__/index.ts new file mode 100644 index 000000000..b8a6d41ab --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/model_jobs/__id__/index.ts @@ -0,0 +1,256 @@ +import { AxiosRequestConfig } from 'axios'; +import { modelJobMetric, modelJobMetric2 } from '../examples'; + +const workflowList = [ + { + id: 68420, + uuid: 'u8b4f360a5c2a4bb8b7c', + name: 'predict-job-3', + project_id: 31, + comment: null, + metric_is_public: false, + create_job_flags: [1], + job_ids: [1256], + forkable: true, + forked_from: null, + peer_create_job_flags: null, + recur_type: 'NONE', + recur_at: null, + trigger_dataset: null, + last_triggered_batch: null, + state: 'COMPLETED', + start_at: 1624521291, + stop_at: null, + created_at: 1624521217, + updated_at: 1624521291, + extra: null, + cron_config: '9 23 ? * 3', + }, + { + id: 68419, + uuid: 'u58da83ffb383465fa40', + name: 'wz-yyds1', + project_id: 23, + comment: null, + metric_is_public: false, + create_job_flags: [1], + job_ids: [], + forkable: true, + forked_from: null, + peer_create_job_flags: null, + recur_type: 'NONE', + recur_at: null, + trigger_dataset: null, + last_triggered_batch: null, + state: 'NEW', + start_at: null, + stop_at: null, + created_at: 1624456941, + updated_at: 1624456941, + extra: null, + cron_config: '28 2 * * ?', + }, + { + id: 68418, + uuid: 'u33fb2a7366d746a4b4f', + name: 'wz-yyds', + project_id: 23, + comment: null, + metric_is_public: false, + create_job_flags: null, + job_ids: [], + forkable: false, + forked_from: null, + peer_create_job_flags: null, + recur_type: 'NONE', + recur_at: null, + trigger_dataset: null, + last_triggered_batch: null, + state: 'NEW', + start_at: null, + stop_at: null, + created_at: 1624455371, + updated_at: 1624455371, + extra: '', + cron_config: '', + }, + { + id: 68410, + uuid: 'u9629530fb99844178e4', + name: 'ot-server', + project_id: 31, + comment: null, + metric_is_public: false, + create_job_flags: [1], + job_ids: [1245], + forkable: true, + forked_from: null, + peer_create_job_flags: null, + recur_type: 'NONE', + recur_at: null, + trigger_dataset: null, + last_triggered_batch: null, + state: 'READY', + start_at: null, + stop_at: null, + created_at: 1624279235, + updated_at: 1624279235, + extra: null, + cron_config: '', + }, + { + id: 68361, + uuid: 'ub81427e3f4634fc4874', + name: 'e2e-test-121f880060caf986-copy', + project_id: 31, + comment: null, + metric_is_public: false, + create_job_flags: [1, 1, 1], + job_ids: [1143, 1144, 1145], + forkable: true, + forked_from: 68360, + peer_create_job_flags: [1, 1, 1], + recur_type: 'NONE', + recur_at: null, + trigger_dataset: null, + last_triggered_batch: null, + state: 'RUNNING', + start_at: 1624458779, + stop_at: null, + created_at: 1623917337, + updated_at: 1624458779, + extra: null, + cron_config: '', + }, + { + id: 68402, + uuid: 'u00b486d9a80e486fae9', + name: 'e2e-test-7f5830060cf2925', + project_id: 31, + comment: null, + metric_is_public: false, + create_job_flags: [1, 1, 1], + job_ids: [1234, 1235, 1236], + forkable: true, + forked_from: null, + peer_create_job_flags: null, + recur_type: 'NONE', + recur_at: null, + trigger_dataset: null, + last_triggered_batch: null, + state: 'INVALID', + start_at: 1624189314, + stop_at: 1624189330, + created_at: 1624189243, + updated_at: 1624276779, + extra: null, + cron_config: '', + }, + { + id: 68399, + uuid: 'u20ae5a88618e4b2c880', + name: 'e2e-test-1ee17d0060cf177c', + project_id: 31, + comment: null, + metric_is_public: false, + create_job_flags: [1, 1, 1], + job_ids: [1227, 1228, 1229], + forkable: true, + forked_from: null, + peer_create_job_flags: null, + recur_type: 'NONE', + recur_at: null, + trigger_dataset: null, + last_triggered_batch: null, + state: 'STOPPED', + start_at: 1624188305, + stop_at: 1624276386, + created_at: 1624184722, + updated_at: 1624276386, + extra: null, + cron_config: '', + }, + { + id: 68318, + uuid: 'u1354c0b76c5441968a3', + name: 'default-credit-13', + project_id: 14, + comment: null, + metric_is_public: false, + create_job_flags: [1, 1, 1, 1], + job_ids: [1044, 1045, 1046, 1047], + forkable: true, + forked_from: null, + peer_create_job_flags: null, + recur_type: 'NONE', + recur_at: null, + trigger_dataset: null, + last_triggered_batch: null, + state: 'FAILED', + start_at: 1623834767, + stop_at: null, + created_at: 1623834636, + updated_at: 1623836863, + extra: '', + cron_config: '', + is_local: false, + }, +]; +const generateData = (index: any) => ({ + id: index, + name: '测试模型' + index, + version: index + 1, + type: null, + state: 1 + Math.floor(Math.random() * 6), + job_name: 'job测试模型' + index, + job: { + config: null, + created_at: 1626604927, + deleted_at: null, + error_message: null, + flapp_snapshot: null, + id: 9, + is_disabled: false, + job_type: 'TREE_MODEL_TRAINING', + name: 'uf31970209a1c40b49b7-tree-model', + pods: [], + project_id: 1, + sparkapp_snapshot: null, + state: 'STARTED', + updated_at: 1626604957, + workflow_id: 15, + }, + parent_id: null, + params: null, + metrics: index === 62 ? modelJobMetric : modelJobMetric2, + /** model set id */ + group_id: index, + extra: JSON.stringify({ + name: 'mock模型详情' + index, + comment: '我是说明' + index, + creator: '测试员', + }), + local_extra: JSON.stringify({ + 'model.desc': '我是说明' + index, + 'model.dataset_id': index, + }), + detail_level: [], + workflow: workflowList[Math.floor(Math.random() * workflowList.length)], + + created_at: 1608582145, + updated_at: 1608582145, + deleted_at: 1608582145, +}); + +export const post = (config: any) => { + return { data: { data: config.data }, status: 200 }; +}; + +const get = (config: AxiosRequestConfig) => { + return { + data: { data: generateData(Number(config._id! || 0) + 1) }, + status: 200, + }; +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/model_jobs/examples.ts b/web_console_v2/client/src/services/mocks/v2/model_jobs/examples.ts new file mode 100644 index 000000000..b45e8e6d2 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/model_jobs/examples.ts @@ -0,0 +1,165 @@ +import { ModelJobMetrics } from 'typings/modelCenter'; + +export const modelJobMetric: ModelJobMetrics = { + train: { + acc: [ + [1, 2], + [0.6, 0.9], + ], + auc: [ + [1, 2], + [0.6, 0.8], + ], + precision: [ + [1, 2], + [0.6, 0.7], + ], + recall: [ + [1, 2], + [0.7, 0.2], + ], + f1: [ + [1, 2], + [0.6, 0.1], + ], + ks: [ + [1, 2], + [0.6, 0.7], + ], + }, + eval: { + acc: [ + [1, 2], + [0.6, 0.9], + ], + auc: [ + [1, 2], + [0.6, 0.8], + ], + precision: [ + [1, 2], + [0.6, 0.7], + ], + recall: [ + [1, 2], + [0.7, 0.2], + ], + f1: [ + [1, 2], + [0.6, 0.1], + ], + ks: [ + [1, 2], + [0.6, 0.7], + ], + }, + feature_importance: { + 'peer-0': 0.08, + 'peer-1': 0.3, + 'peer-2': 0.3, + 'peer-3': 0.1, + 'peer-4': 0.03, + age: 0.3, + overall_score: 0.3, + education: 0.1, + salary: 0.2, + height: 0.1, + weight: 0.02, + cars: 0.001, + + test_13: 0.7, + test_14: 0.6, + test_15: 0.5, + test_16: 0.4, + test_17: 0.3, + test_19: 0.2, + }, + confusion_matrix: { + tp: 30, + fp: 8, + fn: 22, + tn: 40, + }, +}; + +export const modelJobMetric2: ModelJobMetrics = { + train: { + acc: [ + [1, 2], + [0.6, 0.1], + ], + auc: [ + [1, 2], + [0.6, 0.2], + ], + precision: [ + [1, 2], + [0.6, 0.3], + ], + recall: [ + [1, 2], + [0.7, 0.4], + ], + f1: [ + [1, 2], + [0.6, 0.5], + ], + ks: [ + [1, 2], + [0.6, 0.4], + ], + }, + eval: { + acc: [ + [1, 2], + [0.6, 0.1], + ], + auc: [ + [1, 2], + [0.6, 0.2], + ], + precision: [ + [1, 2], + [0.6, 0.3], + ], + recall: [ + [1, 2], + [0.7, 0.4], + ], + f1: [ + [1, 2], + [0.6, 0.5], + ], + ks: [ + [1, 2], + [0.6, 0.4], + ], + }, + feature_importance: { + 'peer-0': 0.08, + 'peer-1': 0.3, + 'peer-2': 0.3, + 'peer-3': 0.1, + 'peer-4': 0.03, + age: 0.3, + overall_score: 0.3, + education: 0.1, + salary: 0.2, + height: 0.1, + weight: 0.02, + cars: 0.001, + + test_13: 0.7, + test_14: 0.6, + test_15: 0.5, + test_16: 0.4, + test_17: 0.3, + test_19: 0.2, + }, + confusion_matrix: { + tp: 22, + fp: 8, + fn: 22, + tn: 50, + }, +}; diff --git a/web_console_v2/client/src/services/mocks/v2/model_jobs/index.ts b/web_console_v2/client/src/services/mocks/v2/model_jobs/index.ts new file mode 100644 index 000000000..6bc989b69 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/model_jobs/index.ts @@ -0,0 +1,266 @@ +const workflowList = [ + { + id: 68420, + uuid: 'u8b4f360a5c2a4bb8b7c', + name: 'predict-job-3', + project_id: 31, + comment: null, + metric_is_public: false, + create_job_flags: [1], + job_ids: [1256], + forkable: true, + forked_from: null, + peer_create_job_flags: null, + recur_type: 'NONE', + recur_at: null, + trigger_dataset: null, + last_triggered_batch: null, + state: 'COMPLETED', + start_at: 1624521291, + stop_at: null, + created_at: 1624521217, + updated_at: 1624521291, + extra: '{"isTrainMode":true,"model.name":"测试模型名称1","model_group.name":"测试模型集名称1"}', + cron_config: '9 23 ? * 3', + }, + { + id: 68419, + uuid: 'u58da83ffb383465fa40', + name: 'wz-yyds1', + project_id: 23, + comment: null, + metric_is_public: false, + create_job_flags: [1], + job_ids: [], + forkable: true, + forked_from: null, + peer_create_job_flags: null, + recur_type: 'NONE', + recur_at: null, + trigger_dataset: null, + last_triggered_batch: null, + state: 'NEW', + start_at: null, + stop_at: null, + created_at: 1624456941, + updated_at: 1624456941, + extra: + '{"model.name":"测试模型名称1","model.desc":"测试模型描述1","model.dataset_id":1,"prediction.name":"测试预测任务名称1","prediction.desc":"测试预测任务描述1","prediction.dataset_id":1,"model.parent_job_name":"测试模型名称1","model.creator":"测试用户1","model.resource_template_type":"high","is_share_offline_prediction_result":false,"isTrainMode":false,"isPredictionMode":true}', + cron_config: '28 2 * * ?', + }, + { + id: 68418, + uuid: 'u33fb2a7366d746a4b4f', + name: 'wz-yyds', + project_id: 23, + comment: null, + metric_is_public: false, + create_job_flags: null, + job_ids: [], + forkable: false, + forked_from: null, + peer_create_job_flags: null, + recur_type: 'NONE', + recur_at: null, + trigger_dataset: null, + last_triggered_batch: null, + state: 'NEW', + start_at: null, + stop_at: null, + created_at: 1624455371, + updated_at: 1624455371, + extra: + '{"model.name":"测试模型名称1","model.desc":"测试模型描述1","model.dataset_id":1,"prediction.name":"测试预测任务名称1","prediction.desc":"测试预测任务描述1","prediction.dataset_id":1,"model.parent_job_name":"测试模型名称1","model.creator":"测试用户1","model.resource_template_type":"high","is_share_offline_prediction_result":false,"isTrainMode":false,"isPredictionMode":true}', + cron_config: '', + }, + { + id: 68410, + uuid: 'u9629530fb99844178e4', + name: 'ot-server', + project_id: 31, + comment: null, + metric_is_public: false, + create_job_flags: [1], + job_ids: [1245], + forkable: true, + forked_from: null, + peer_create_job_flags: null, + recur_type: 'NONE', + recur_at: null, + trigger_dataset: null, + last_triggered_batch: null, + state: 'READY', + start_at: null, + stop_at: null, + created_at: 1624279235, + updated_at: 1624279235, + extra: + '{"model.name":"测试模型名称1","model.desc":"测试模型描述1","model.dataset_id":1,"prediction.name":"测试预测任务名称1","prediction.desc":"测试预测任务描述1","prediction.dataset_id":1,"model.parent_job_name":"测试模型名称1","model.creator":"测试用户1","model.resource_template_type":"high","is_share_offline_prediction_result":false,"isTrainMode":false,"isPredictionMode":true}', + cron_config: '', + }, + { + id: 68361, + uuid: 'ub81427e3f4634fc4874', + name: 'e2e-test-121f880060caf986-copy', + project_id: 31, + comment: null, + metric_is_public: false, + create_job_flags: [1, 1, 1], + job_ids: [1143, 1144, 1145], + forkable: true, + forked_from: 68360, + peer_create_job_flags: [1, 1, 1], + recur_type: 'NONE', + recur_at: null, + trigger_dataset: null, + last_triggered_batch: null, + state: 'RUNNING', + start_at: 1624458779, + stop_at: null, + created_at: 1623917337, + updated_at: 1624458779, + extra: + '{"model.name":"测试模型名称1","model.desc":"测试模型描述1","model.dataset_id":1,"evaluation.name":"测试预测任务名称1","evaluation.desc":"测试预测任务描述1","evaluation.dataset_id":1,"model.parent_job_name":"测试模型名称1","model.creator":"测试用户1","model.resource_template_type":"high","is_share_offline_prediction_result":false,"isTrainMode":false,"isPredictionMode":false,"isEvaluationMode":true}', + cron_config: '', + }, + { + id: 68402, + uuid: 'u00b486d9a80e486fae9', + name: 'e2e-test-7f5830060cf2925', + project_id: 31, + comment: null, + metric_is_public: false, + create_job_flags: [1, 1, 1], + job_ids: [1234, 1235, 1236], + forkable: true, + forked_from: null, + peer_create_job_flags: null, + recur_type: 'NONE', + recur_at: null, + trigger_dataset: null, + last_triggered_batch: null, + state: 'INVALID', + start_at: 1624189314, + stop_at: 1624189330, + created_at: 1624189243, + updated_at: 1624276779, + extra: + '{"model.name":"测试模型名称1","model.desc":"测试模型描述1","model.dataset_id":1,"evaluation.name":"测试预测任务名称1","evaluation.desc":"测试预测任务描述1","evaluation.dataset_id":1,"model.parent_job_name":"测试模型名称1","model.creator":"测试用户1","model.resource_template_type":"high","is_share_offline_prediction_result":false,"isTrainMode":false,"isPredictionMode":false,"isEvaluationMode":true}', + cron_config: '', + }, + { + id: 68399, + uuid: 'u20ae5a88618e4b2c880', + name: 'e2e-test-1ee17d0060cf177c', + project_id: 31, + comment: null, + metric_is_public: false, + create_job_flags: [1, 1, 1], + job_ids: [1227, 1228, 1229], + forkable: true, + forked_from: null, + peer_create_job_flags: null, + recur_type: 'NONE', + recur_at: null, + trigger_dataset: null, + last_triggered_batch: null, + state: 'STOPPED', + start_at: 1624188305, + stop_at: 1624276386, + created_at: 1624184722, + updated_at: 1624276386, + extra: + '{"model.name":"测试模型名称1","model.desc":"测试模型描述1","model.dataset_id":1,"evaluation.name":"测试预测任务名称1","evaluation.desc":"测试预测任务描述1","evaluation.dataset_id":1,"model.parent_job_name":"测试模型名称1","model.creator":"测试用户1","model.resource_template_type":"high","is_share_offline_prediction_result":false,"isTrainMode":false,"isPredictionMode":false,"isEvaluationMode":true}', + cron_config: '', + }, + { + id: 68318, + uuid: 'u1354c0b76c5441968a3', + name: 'default-credit-13', + project_id: 14, + comment: null, + metric_is_public: false, + create_job_flags: [1, 1, 1, 1], + job_ids: [1044, 1045, 1046, 1047], + forkable: true, + forked_from: null, + peer_create_job_flags: null, + recur_type: 'NONE', + recur_at: null, + trigger_dataset: null, + last_triggered_batch: null, + state: 'FAILED', + start_at: 1623834767, + stop_at: null, + created_at: 1623834636, + updated_at: 1623836863, + extra: + '{"model.name":"测试模型名称1","model.desc":"测试模型描述1","model.dataset_id":1,"evaluation.name":"测试预测任务名称1","evaluation.desc":"测试预测任务描述1","evaluation.dataset_id":1,"model.parent_job_name":"测试模型名称1","model.creator":"测试用户1","model.resource_template_type":"high","is_share_offline_prediction_result":false,"isTrainMode":false,"isPredictionMode":false,"isEvaluationMode":true}', + cron_config: '', + is_local: false, + }, + { + id: 68499, + uuid: 'u58da83ffb383465fa40', + name: 'wz-yyds1', + project_id: 14, + comment: null, + metric_is_public: false, + create_job_flags: [1], + job_ids: [], + forkable: true, + forked_from: null, + peer_create_job_flags: null, + recur_type: 'NONE', + recur_at: null, + trigger_dataset: null, + last_triggered_batch: null, + state: 'NEW', + start_at: null, + stop_at: null, + created_at: 1624456941, + updated_at: 1624456941, + extra: + '{"model.name":"测试模型名称1","model.desc":"测试模型描述1","model.dataset_id":1,"prediction.name":"测试预测任务名称1","prediction.desc":"测试预测任务描述1","prediction.dataset_id":1,"model.parent_job_name":"测试模型名称1","model.creator":"测试用户1","model.resource_template_type":"high","is_share_offline_prediction_result":false,"isTrainMode":false,"isPredictionMode":true}', + cron_config: '', + }, +]; +const list = new Array(11).fill(undefined).map((_, index) => { + return { + id: index + 1, + name: '测试模型' + (index + 1), + version: index + 1, + type: null, + state: 1 + Math.floor(Math.random() * 6), + job_name: 'job测试模型' + (index + 1), + parent_id: null, + params: null, + metrics: null, + /** model set id */ + group_id: (index % 3) + 1, + extra: JSON.stringify({ + name: 'mock模型' + (index + 1), + comment: '我是说明', + creator: '测试员', + }), + detail_level: [], + workflow: workflowList[index % workflowList.length], + + created_at: 1608582145 + index * 10000, + updated_at: 1608582145 + index * 10000, + deleted_at: 1608582145 + index * 10000, + }; +}); + +const get = { + data: { + data: list, + }, + status: 200, +}; + +export const post = (config: any) => { + return { data: { data: config.data }, status: 200 }; +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/models/index.ts b/web_console_v2/client/src/services/mocks/v2/models/index.ts new file mode 100644 index 000000000..4864a0e82 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/models/index.ts @@ -0,0 +1,109 @@ +import { Model } from 'typings/modelCenter'; + +const modelList: Model[] = [ + { + id: 7, + name: 'ucce42a49cbff4c4e930-nn-train-20210927-124432-3bd1a', + uuid: 'u3a0507d64bc442a2b66', + model_type: 'NN_MODEL', + model_path: + 'hdfs:///trimmed', + favorite: false, + comment: 'created_by ucce42a49cbff4c4e930-nn-train at 2021-09-27 12:44:32.419496+00:00', + group_id: null, + project_id: 14, + job_id: 42877, + model_job_id: null, + version: null, + created_at: 1632746672, + updated_at: 1632746672, + deleted_at: null, + workflow_id: 153952, + workflow_name: 'test-workflow', + job_name: 'test-job', + model_job_name: 'test-model-job', + federated_type: '', + }, + { + id: 8, + name: 'ucce42a49cbff4c4e930-nn-train-20210927-124437-3748d', + uuid: 'u195c5a39d1e44804a1a', + model_type: 'NN_MODEL', + model_path: + 'hdfs:///trimmed', + favorite: false, + comment: 'created_by ucce42a49cbff4c4e930-nn-train at 2021-09-27 12:44:37.030088+00:00', + group_id: null, + project_id: 14, + job_id: 42877, + model_job_id: null, + version: null, + created_at: 1632746677, + updated_at: 1632746677, + deleted_at: null, + workflow_id: 153952, + workflow_name: 'test-workflow', + job_name: 'test-job', + model_job_name: 'test-model-job', + federated_type: '', + }, + { + id: 86, + name: 'ud8b9cb500fc3435cb66-nn-train-20211009-070848-7c28f', + uuid: 'u70ca285687eb4d2fbb0', + model_type: 'NN_MODEL', + model_path: + 'hdfs:///trimmed', + favorite: false, + comment: 'created_by ud8b9cb500fc3435cb66-nn-train at 2021-10-09 07:08:48.729397+00:00', + group_id: 1, + project_id: 14, + job_id: null, + model_job_id: 1, + version: null, + created_at: 1633763328, + updated_at: 1633763328, + deleted_at: null, + workflow_id: 153952, + workflow_name: 'test-workflow', + job_name: 'test-job', + model_job_name: 'test-model-job', + federated_type: '', + }, + { + id: 87, + name: 'ud8b9cb500fc3435cb66-nn-train-20211009-070856-6ca71', + uuid: 'u4b6e82b75e644c90907', + model_type: 'NN_MODEL', + model_path: + 'hdfs:///trimmed', + favorite: false, + comment: 'created_by ud8b9cb500fc3435cb66-nn-train at 2021-10-09 07:08:56.799515+00:00', + group_id: 2, + project_id: 14, + job_id: null, + model_job_id: 2, + version: null, + created_at: 1633763336, + updated_at: 1633763336, + deleted_at: null, + workflow_id: 153952, + workflow_name: 'test-workflow', + job_name: 'test-job', + model_job_name: 'test-model-job', + federated_type: '', + }, +]; + +const get = { + data: { + data: modelList, + }, + status: 200, +}; + +export const post = (config: any) => { + return { data: { data: config.data }, status: 200 }; +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/offline_prediction/index.ts b/web_console_v2/client/src/services/mocks/v2/offline_prediction/index.ts new file mode 100644 index 000000000..abdfdb4f8 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/offline_prediction/index.ts @@ -0,0 +1,31 @@ +const list = new Array(4).fill(undefined).map((_, index) => { + return { + id: index + 1, + name: 'mock工作流名称' + (index + 1), + state: Math.floor(Math.random() * 3), + dataset: 'test-dataset', + comment: '我是说明文案', + target: '模型1-V1', + extra: JSON.stringify({ + comment: '我是说明', + creator: '测试员', + }), + + created_at: 1608582145, + updated_at: 1608582145, + deleted_at: 1608582145, + }; +}); + +const get = { + data: { + data: list, + }, + status: 200, +}; + +export const post = (config: any) => { + return { data: { data: config.data }, status: 200 }; +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/participant_candidates/index.ts b/web_console_v2/client/src/services/mocks/v2/participant_candidates/index.ts new file mode 100644 index 000000000..aa20b4a08 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/participant_candidates/index.ts @@ -0,0 +1,26 @@ +import { DomainName } from 'typings/participant'; + +const get = { + data: { + data: [ + { + domain_name: 'fl-aaa.com', + }, + { + domain_name: 'fl-alimama.com', + }, + { + domain_name: 'fl-aliyun-debug.com', + }, + { + domain_name: 'fl-aliyun-demo1.com', + }, + { + domain_name: 'fl-aliyun-test.com', + }, + ] as Array<DomainName>, + page_meta: {}, + }, + status: 200, +}; +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/participants/examples.ts b/web_console_v2/client/src/services/mocks/v2/participants/examples.ts new file mode 100644 index 000000000..12edca152 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/participants/examples.ts @@ -0,0 +1,68 @@ +import { Participant } from 'typings/participant'; + +export const participantList: Participant[] = [ + { + id: 1, + name: 'bytedance', + domain_name: 'fl-bytedance.com', + pure_domain_name: 'fl-byteddance', + host: '101.200.236.203', + port: 32443, + comment: 'migrate from projectconnection_test2', + extra: { + is_manual_configured: false, + }, + created_at: 1631519868, + updated_at: 1631519868, + num_project: 25, + type: 'PLATFORM', + }, + { + id: 2, + name: 'bytedance-test', + domain_name: 'fl-bytedance-test.com', + pure_domain_name: 'fl-bytedance-test', + host: 'xxx', + port: 443, + comment: 'migrate from projectxyx-test', + extra: { + is_manual_configured: false, + }, + created_at: 1631519868, + updated_at: 1631519868, + num_project: 1, + type: 'PLATFORM', + }, + { + id: 3, + name: 'Demo-test', + domain_name: 'fl-demo-test.com', + pure_domain_name: 'fl-demo-test', + host: 'xxx', + port: 443, + comment: 'migrate from projectDemo1', + extra: { + is_manual_configured: false, + }, + created_at: 1631519868, + updated_at: 1631785500, + num_project: 1, + type: 'PLATFORM', + }, + { + id: 4, + name: 'aliyun-test1', + domain_name: 'fl-aliyun-test.com', + pure_domain_name: 'fl-aliyun-test', + host: '11.11.11.11', + port: 443, + comment: null, + extra: { + is_manual_configured: false, + }, + created_at: 1632469805, + updated_at: 1632469805, + num_project: 1, + type: 'PLATFORM', + }, +]; diff --git a/web_console_v2/client/src/services/mocks/v2/participants/index.ts b/web_console_v2/client/src/services/mocks/v2/participants/index.ts new file mode 100644 index 000000000..f46678f14 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/participants/index.ts @@ -0,0 +1,10 @@ +import { participantList } from 'services/mocks/v2/participants/examples'; + +const get = { + data: { + data: participantList, + page_meta: {}, + }, + status: 200, +}; +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/project/__id__/participant_datasets/index.ts b/web_console_v2/client/src/services/mocks/v2/project/__id__/participant_datasets/index.ts new file mode 100644 index 000000000..71a5f3155 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/project/__id__/participant_datasets/index.ts @@ -0,0 +1,27 @@ +const get = { + data: { + data: [ + { + uuid: 'u26af7e549f30473382a', + project_id: 31, + name: 'e2e_test_dataset-20220411-043442', + participant_id: 1, + format: 'TABULAR', + file_size: 244662, + updated_at: 1649652778, + }, + { + uuid: 'u9e4cd8a42782465e93d', + project_id: 31, + name: 'e2e_test_dataset-20220411-023606', + participant_id: 1, + format: 'TABULAR', + file_size: 315206, + updated_at: 1649645562, + }, + ], + }, + status: 200, +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/projects/:id/connection_checks.ts b/web_console_v2/client/src/services/mocks/v2/projects/:id/connection_checks.ts deleted file mode 100644 index 1dac51b12..000000000 --- a/web_console_v2/client/src/services/mocks/v2/projects/:id/connection_checks.ts +++ /dev/null @@ -1,6 +0,0 @@ -export const post = () => ({ - data: { - data: { success: Math.random() < 0.6, details: [] }, - }, - status: 200, -}); diff --git a/web_console_v2/client/src/services/mocks/v2/projects/:id/index.ts b/web_console_v2/client/src/services/mocks/v2/projects/:id/index.ts deleted file mode 100644 index 6753afd1c..000000000 --- a/web_console_v2/client/src/services/mocks/v2/projects/:id/index.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { AxiosRequestConfig } from 'axios'; - -const get = (config: AxiosRequestConfig) => ({ - data: { - data: { - id: config._id, - name: 'Foo project', - token: '51aa8b39a5444f24ae7e403ac7f6029c', - config: { - token: '51aa8b39a5444f24ae7e403ac7f6029c', - participants: [ - { - name: 'name', - domain_name: 'fl-test.com', - url: '127.0.0.1:32443', - }, - ], - variables: [ - { - name: 'test', - value: 'test', - }, - ], - }, - comment: '3', - created_at: 1608582145.0, - updated_at: 1608582145.0, - deleted_at: null, - }, - }, - status: 200, -}); - -export const patch = { data: {}, status: 200 }; - -export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/projects/index.ts b/web_console_v2/client/src/services/mocks/v2/projects/index.ts deleted file mode 100644 index 2b6d6d84f..000000000 --- a/web_console_v2/client/src/services/mocks/v2/projects/index.ts +++ /dev/null @@ -1,41 +0,0 @@ -const project_list = new Array(1).fill(undefined).map((_, index) => { - return { - id: index + 1, - name: `Project-${index + 1}`, - token: '51aa8b39a5444f24ae7e403ac7f6029c', - config: { - token: '51aa8b39a5444f24ae7e403ac7f6029c', - participants: [ - { - name: `合作方-${index + 1}`, - domain_name: 'fl-test.com', - url: '127.0.0.1:32443', - }, - ], - variables: [ - { - name: 'testkey', - value: 'testval', - }, - ], - }, - comment: 'comment here', - created_at: 1608582145, - updated_at: 1608582145, - deleted_at: null, - num_workflow: ~~(Math.random() * 10), - }; -}); - -const get = { - data: { - data: project_list, - }, - status: 200, -}; - -export const post = (config: any) => { - return { data: { data: config.data }, status: 200 }; -}; - -export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/serving_services/__id__/index.ts b/web_console_v2/client/src/services/mocks/v2/serving_services/__id__/index.ts new file mode 100644 index 000000000..0ae1733d3 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/serving_services/__id__/index.ts @@ -0,0 +1,73 @@ +import { AxiosRequestConfig } from 'axios'; + +import { ModelServing, ModelServingState, ModelServingInstanceState } from 'typings/modelServing'; + +const get = (config: AxiosRequestConfig) => ({ + data: { + data: { + id: Number(config._id! || 0) + 1, + project_id: 1, + name: 'mock模型serving' + Number(config._id! || 0) + 1, + comment: '备注', + instances: [ + { + name: 's-20210909112859-64nj6-79cff4cb57-696kr', + status: ModelServingInstanceState.AVAILABLE, + cpu: '90%', + memory: '60%', + created_at: 1608582145, + updated_at: 1608582145, + deleted_at: 1608582145, + }, + { + name: 's-20210909112859-64nj6-79cff4cb57-999sr', + status: ModelServingInstanceState.UNAVAILABLE, + cpu: '20%', + memory: '30%', + created_at: 1608582145, + updated_at: 1608582145, + deleted_at: 1608582145, + }, + ], + deployment_id: 1, + resource: { + cpu: '2000m', + memory: '10Gi', + replicas: 2, + }, + model_id: 1, + model_type: 'TREE_MODEL', + signature: JSON.stringify({ + inputs: { + examples: { + dtype: 'DT_STRING', + tensor_shape: { dim: [], unknown_rank: true }, + name: 'examples:0', + }, + }, + outputs: { + output: { + dtype: 'DT_FLOAT', + tensor_shape: { dim: [{ size: '2', name: '' }], unknown_rank: false }, + name: 'Softmax:0', + }, + }, + method_name: 'tensorflow/serving/predict', + }), + status: ModelServingState.AVAILABLE, + endpoint: 'https://api/v2/models/inference', + extra: '', + instance_num_status: '1/3', + created_at: 1608582145, + updated_at: 1608582145, + deleted_at: 1608582145, + } as ModelServing, + }, + + status: 200, +}); + +export const patch = { data: {}, status: 200 }; +export const DELETE = { data: {}, status: 200 }; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/serving_services/__id__/instances/__id__/log/index.ts b/web_console_v2/client/src/services/mocks/v2/serving_services/__id__/instances/__id__/log/index.ts new file mode 100644 index 000000000..241b6121c --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/serving_services/__id__/instances/__id__/log/index.ts @@ -0,0 +1,77 @@ +const get = () => ({ + data: { + data: [ + 'I0917 01:00:57.550960 51 round_trippers.go:443] GET https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job 404 Not Found in 1 milliseconds', + 'E0917 01:00:57.551087 51 event_handler.go:225] RegisterHandler name = u11c2a27793c443c0888-nn-train-job, role = Follower, err = flapps.fedlearner.k8s.io "u11c2a27793c443c0888-nn-train-job" not found', + 'I0917 01:00:57.549489 51 server.go:49] Register received, name = u11c2a27793c443c0888-nn-train-job, role = Follower', + 'E0917 01:01:02.525449 1796675 event_handler.go:225] RegisterHandler name = u11c2a27793c443c0888-nn-train-job, role = Follower, err = flapps.fedlearner.k8s.io "u11c2a27793c443c0888-nn-train-job" not found', + 'I0917 01:01:02.525343 1796675 round_trippers.go:443] GET https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job 404 Not Found in 1 milliseconds', + 'I0917 01:01:02.523937 1796675 server.go:49] Register received, name = u11c2a27793c443c0888-nn-train-job, role = Follower', + 'I0917 01:01:07.503363 1796675 round_trippers.go:443] GET https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job 404 Not Found in 1 milliseconds', + 'I0917 01:01:07.502001 1796675 server.go:49] Register received, name = u11c2a27793c443c0888-nn-train-job, role = Follower', + 'I0917 01:01:12.521849 51 round_trippers.go:443] GET https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job 404 Not Found in 1 milliseconds', + 'E0917 01:01:07.503472 1796675 event_handler.go:225] RegisterHandler name = u11c2a27793c443c0888-nn-train-job, role = Follower, err = flapps.fedlearner.k8s.io "u11c2a27793c443c0888-nn-train-job" not found', + 'I0917 01:01:12.520381 51 server.go:49] Register received, name = u11c2a27793c443c0888-nn-train-job, role = Follower', + 'E0917 01:01:12.521968 51 event_handler.go:225] RegisterHandler name = u11c2a27793c443c0888-nn-train-job, role = Follower, err = flapps.fedlearner.k8s.io "u11c2a27793c443c0888-nn-train-job" not found', + 'I0917 01:01:22.641118 51 round_trippers.go:443] GET https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job 404 Not Found in 1 milliseconds', + 'E0917 01:01:22.641228 51 event_handler.go:225] RegisterHandler name = u11c2a27793c443c0888-nn-train-job, role = Follower, err = flapps.fedlearner.k8s.io "u11c2a27793c443c0888-nn-train-job" not found', + 'I0917 01:01:22.639836 51 server.go:49] Register received, name = u11c2a27793c443c0888-nn-train-job, role = Follower', + 'I0917 01:01:17.559534 1796675 server.go:49] Register received, name = u11c2a27793c443c0888-nn-train-job, role = Follower', + 'I0917 01:01:17.560804 1796675 round_trippers.go:443] GET https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job 404 Not Found in 1 milliseconds', + 'E0917 01:01:17.560935 1796675 event_handler.go:225] RegisterHandler name = u11c2a27793c443c0888-nn-train-job, role = Follower, err = flapps.fedlearner.k8s.io "u11c2a27793c443c0888-nn-train-job" not found', + 'I0917 01:01:25.532047 51 controller.go:144] add new Pod u11c2a27793c443c0888-nn-train-job-leader-worker-0-e54b506a-b032-44f9-8919-247b781e8a24', + 'time="2021-09-17T01:01:25+08:00" level=info msg="Controller u11c2a27793c443c0888-nn-train-job created service u11c2a27793c443c0888-nn-train-job-leader-worker-0"', + 'I0917 01:01:25.545137 51 round_trippers.go:443] GET https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job 200 OK in 2 milliseconds', + 'I0917 01:01:25.554689 51 round_trippers.go:443] PUT https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job/status 200 OK in 8 milliseconds', + 'I0917 01:01:30.721617 51 app_manager.go:229] sync new app, name = u11c2a27793c443c0888-nn-train-job', + 'I0917 01:01:25.475905 51 event.go:278] Event(v1.ObjectReference{Kind:"FLApp", Namespace:"fedlearner", Name:"u11c2a27793c443c0888-nn-train-job", UID:"3d5acdf9-9b45-470e-b8df-e6a70f28b8f1", APIVersion:"fedlearner.k8s.io/v1alpha1", ResourceVersion:"987444923", FieldPath:""}): type: \'Normal\' reason: \'SuccessfulCreatePod\' Created pod: u11c2a27793c443c0888-nn-train-job-leader-master-0-34e8d5d8-37ca-458a-8f41-11351b57bc01', + 'I0917 01:01:25.545524 51 status_updater.go:115] updating flapp u11c2a27793c443c0888-nn-train-job status, namespace = fedlearner, new state = FLStateNew', + 'I0917 01:01:30.757848 51 app_manager.go:446] sync bootstrapped app, name = u11c2a27793c443c0888-nn-train-job', + 'I0917 01:01:25.500923 51 controller.go:144] add new Pod u11c2a27793c443c0888-nn-train-job-leader-ps-0-10a6ed31-a2c2-4512-9c8e-19622ace3061', + 'I0917 01:01:30.726261 51 round_trippers.go:443] GET https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job 200 OK in 2 milliseconds', + 'I0917 01:01:25.476417 51 controller.go:144] add new Pod u11c2a27793c443c0888-nn-train-job-leader-master-0-34e8d5d8-37ca-458a-8f41-11351b57bc01', + 'I0917 01:01:25.533899 51 event.go:278] Event(v1.ObjectReference{Kind:"FLApp", Namespace:"fedlearner", Name:"u11c2a27793c443c0888-nn-train-job", UID:"3d5acdf9-9b45-470e-b8df-e6a70f28b8f1", APIVersion:"fedlearner.k8s.io/v1alpha1", ResourceVersion:"987444923", FieldPath:""}): type: \'Normal\' reason: \'SuccessfulCreateService\' Created service: u11c2a27793c443c0888-nn-train-job-leader-master-0', + 'I0917 01:01:25.566288 51 status_updater.go:115] updating flapp u11c2a27793c443c0888-nn-train-job status, namespace = fedlearner, new state = FLStateNew', + 'I0917 01:01:25.581371 51 app_manager.go:229] sync new app, name = u11c2a27793c443c0888-nn-train-job', + 'I0917 01:01:30.735962 51 round_trippers.go:443] PUT https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job/status 200 OK in 9 milliseconds', + 'I0917 01:01:25.500580 51 pod_control.go:168] Controller u11c2a27793c443c0888-nn-train-job created pod u11c2a27793c443c0888-nn-train-job-leader-ps-0-10a6ed31-a2c2-4512-9c8e-19622ace3061', + 'I0917 01:01:25.565883 51 round_trippers.go:443] GET https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job 200 OK in 2 milliseconds', + 'I0917 01:01:25.475794 51 pod_control.go:168] Controller u11c2a27793c443c0888-nn-train-job created pod u11c2a27793c443c0888-nn-train-job-leader-master-0-34e8d5d8-37ca-458a-8f41-11351b57bc01', + 'time="2021-09-17T01:01:25+08:00" level=info msg="Controller u11c2a27793c443c0888-nn-train-job created service u11c2a27793c443c0888-nn-train-job-leader-ps-0"', + 'I0917 01:01:25.531659 51 event.go:278] Event(v1.ObjectReference{Kind:"FLApp", Namespace:"fedlearner", Name:"u11c2a27793c443c0888-nn-train-job", UID:"3d5acdf9-9b45-470e-b8df-e6a70f28b8f1", APIVersion:"fedlearner.k8s.io/v1alpha1", ResourceVersion:"987444923", FieldPath:""}): type: \'Normal\' reason: \'SuccessfulCreatePod\' Created pod: u11c2a27793c443c0888-nn-train-job-leader-worker-0-e54b506a-b032-44f9-8919-247b781e8a24', + 'I0917 01:01:25.537846 51 event.go:278] Event(v1.ObjectReference{Kind:"FLApp", Namespace:"fedlearner", Name:"u11c2a27793c443c0888-nn-train-job", UID:"3d5acdf9-9b45-470e-b8df-e6a70f28b8f1", APIVersion:"fedlearner.k8s.io/v1alpha1", ResourceVersion:"987444923", FieldPath:""}): type: \'Normal\' reason: \'SuccessfulCreateService\' Created service: u11c2a27793c443c0888-nn-train-job-leader-worker-0', + 'I0917 01:01:30.757864 51 app_manager.go:456] sync bootstrapped leader app, name = u11c2a27793c443c0888-nn-train-job', + 'I0917 01:01:25.582708 51 round_trippers.go:443] PUT https://11.240.0.1:443/api/v1/namespaces/fedlearner/configmaps/u11c2a27793c443c0888-nn-train-job-leader-worker 200 OK in 1 milliseconds', + 'I0917 01:01:25.531550 51 pod_control.go:168] Controller u11c2a27793c443c0888-nn-train-job created pod u11c2a27793c443c0888-nn-train-job-leader-worker-0-e54b506a-b032-44f9-8919-247b781e8a24', + 'I0917 01:01:25.561049 51 app_manager.go:229] sync new app, name = u11c2a27793c443c0888-nn-train-job', + 'I0917 01:01:25.665674 51 app_manager.go:229] sync new app, name = u11c2a27793c443c0888-nn-train-job', + 'I0917 01:01:25.535827 51 event.go:278] Event(v1.ObjectReference{Kind:"FLApp", Namespace:"fedlearner", Name:"u11c2a27793c443c0888-nn-train-job", UID:"3d5acdf9-9b45-470e-b8df-e6a70f28b8f1", APIVersion:"fedlearner.k8s.io/v1alpha1", ResourceVersion:"987444923", FieldPath:""}): type: \'Normal\' reason: \'SuccessfulCreateService\' Created service: u11c2a27793c443c0888-nn-train-job-leader-ps-0', + 'I0917 01:01:25.670023 51 round_trippers.go:443] GET https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job 200 OK in 2 milliseconds', + 'I0917 01:01:30.723174 51 round_trippers.go:443] PUT https://11.240.0.1:443/api/v1/namespaces/fedlearner/configmaps/u11c2a27793c443c0888-nn-train-job-leader-worker 200 OK in 1 milliseconds', + 'I0917 01:01:30.742418 51 status_updater.go:115] updating flapp u11c2a27793c443c0888-nn-train-job status, namespace = fedlearner, new state = FLStateBootstrapped', + 'I0917 01:01:30.757875 51 app_manager.go:465] still waiting for follower, name = u11c2a27793c443c0888-nn-train-job, rtype = Worker', + 'I0917 01:01:25.442789 51 app_manager.go:229] sync new app, name = u11c2a27793c443c0888-nn-train-job', + 'I0917 01:01:30.742349 51 app_manager.go:229] sync new app, name = u11c2a27793c443c0888-nn-train-job', + 'I0917 01:01:25.574974 51 round_trippers.go:443] PUT https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job/status 200 OK in 8 milliseconds', + 'I0917 01:01:30.751224 51 round_trippers.go:443] PUT https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job/status 200 OK in 8 milliseconds', + 'I0917 01:01:25.500690 51 event.go:278] Event(v1.ObjectReference{Kind:"FLApp", Namespace:"fedlearner", Name:"u11c2a27793c443c0888-nn-train-job", UID:"3d5acdf9-9b45-470e-b8df-e6a70f28b8f1", APIVersion:"fedlearner.k8s.io/v1alpha1", ResourceVersion:"987444923", FieldPath:""}): type: \'Normal\' reason: \'SuccessfulCreatePod\' Created pod: u11c2a27793c443c0888-nn-train-job-leader-ps-0-10a6ed31-a2c2-4512-9c8e-19622ace3061', + 'I0917 01:01:30.726664 51 status_updater.go:115] updating flapp u11c2a27793c443c0888-nn-train-job status, namespace = fedlearner, new state = FLStateNew', + 'time="2021-09-17T01:01:25+08:00" level=info msg="Controller u11c2a27793c443c0888-nn-train-job created service u11c2a27793c443c0888-nn-train-job-leader-master-0"', + 'I0917 01:01:25.585747 51 round_trippers.go:443] GET https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job 200 OK in 2 milliseconds', + 'I0917 01:01:25.667049 51 round_trippers.go:443] PUT https://11.240.0.1:443/api/v1/namespaces/fedlearner/configmaps/u11c2a27793c443c0888-nn-train-job-leader-worker 200 OK in 1 milliseconds', + 'E0917 01:01:30.757881 51 controller.go:225] failed to sync FLApp fedlearner/u11c2a27793c443c0888-nn-train-job, err = still waiting for follower, name = u11c2a27793c443c0888-nn-train-job, rtype = Worker', + 'I0917 01:01:25.562610 51 round_trippers.go:443] PUT https://11.240.0.1:443/api/v1/namespaces/fedlearner/configmaps/u11c2a27793c443c0888-nn-train-job-leader-worker 200 OK in 1 milliseconds', + 'E0917 01:01:27.805979 1796675 event_handler.go:233] RegisterHandler leader is not bootstrapped, name = u11c2a27793c443c0888-nn-train-job, role = Follower, state = FLStateNew', + 'I0917 01:01:27.802569 1796675 server.go:49] Register received, name = u11c2a27793c443c0888-nn-train-job, role = Follower', + 'I0917 01:01:27.805507 1796675 round_trippers.go:443] GET https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job 200 OK in 2 milliseconds', + 'I0917 01:01:33.123219 51 round_trippers.go:443] GET https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job 200 OK in 2 milliseconds', + 'I0917 01:01:33.200786 51 round_trippers.go:443] GET https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job 200 OK in 2 milliseconds', + 'I0917 01:01:33.226327 51 round_trippers.go:443] PUT https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job/status 200 OK in 9 milliseconds', + 'I0917 01:01:33.210341 51 round_trippers.go:443] PUT https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job/status 200 OK in 8 milliseconds', + 'I0917 01:01:33.242903 51 round_trippers.go:443] GET https://11.240.0.1:443/apis/fedlearner.k8s.io/v1alpha1/namespaces/fedlearner/flapps/u11c2a27793c443c0888-nn-train-job 200 OK in 2 milliseconds', + ], + }, + status: 200, +}); + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/serving_services/index.ts b/web_console_v2/client/src/services/mocks/v2/serving_services/index.ts new file mode 100644 index 000000000..fe71ea9bc --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/serving_services/index.ts @@ -0,0 +1,68 @@ +import { ResponseInfo } from 'typings/app'; +import { ModelServing, ModelServingState } from 'typings/modelServing'; + +const statusList = [ + ModelServingState.AVAILABLE, + ModelServingState.LOADING, + ModelServingState.UNLOADING, + ModelServingState.UNKNOWN, +]; + +const list: ModelServing[] = new Array(12).fill(undefined).map((_, index) => { + return { + id: index + 1, + project_id: 1, + name: 'mock模型serving' + index, + comment: '备注', + instances: [], + deployment_id: 1, + resource: { + cpu: '2000m', + memory: '10Gi', + replicas: 2, + }, + is_local: index % 2 === 0, + support_inference: index % 2 === 0, + model_id: 1, + model_type: 'TREE_MODEL', + signature: JSON.stringify({ + inputs: { + examples: { + dtype: 'DT_STRING', + tensor_shape: { dim: [], unknown_rank: true }, + name: 'examples:0', + }, + }, + outputs: { + output: { + dtype: 'DT_FLOAT', + tensor_shape: { dim: [{ size: '2', name: '' }], unknown_rank: false }, + name: 'Softmax:0', + }, + }, + method_name: 'tensorflow/serving/predict', + }), + status: statusList[index % 4], + endpoint: '', + extra: '', + instance_num_status: '1/3', + created_at: 1608582145, + updated_at: 1608582145, + deleted_at: 1608582145, + }; +}); + +const get = { + data: { + data: list, + page_meta: { + current_page: 1, + page_size: 10, + total_pages: 2, + total_items: 12, + }, + }, + status: 200, +} as ResponseInfo; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/settings/examples.ts b/web_console_v2/client/src/services/mocks/v2/settings/examples.ts new file mode 100644 index 000000000..cd059376f --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/settings/examples.ts @@ -0,0 +1,65 @@ +import { SystemVariable } from 'typings/settings'; + +export const fixed: SystemVariable = { + name: 'fixed', + value: 'testval', + value_type: 'STRING', + fixed: true, +}; + +export const noFixed: SystemVariable = { + name: 'noFixed', + value: 'test val', + value_type: 'STRING', + fixed: false, +}; + +export const int: SystemVariable = { + name: 'int', + value: 1, + value_type: 'INT', + fixed: false, +}; + +export const string: SystemVariable = { + name: 'string', + value: 'string val', + value_type: 'STRING', + fixed: false, +}; + +export const emptyObject: SystemVariable = { + name: 'emptyObject', + value: {}, + value_type: 'OBJECT', + fixed: false, +}; +export const object: SystemVariable = { + name: 'object', + value: { a: 1, b: 2, c: 3, d: { e: 4 } }, + value_type: 'OBJECT', + fixed: false, +}; +export const emptyList: SystemVariable = { + name: 'emptyList', + value: [], + value_type: 'LIST', + fixed: false, +}; +export const list: SystemVariable = { + name: 'list', + value: [{ a: 1 }, { a: 2 }, { a: 3 }], + value_type: 'LIST', + fixed: false, +}; + +export const variables: SystemVariable[] = [ + int, + string, + fixed, + noFixed, + emptyObject, + object, + emptyList, + list, +]; diff --git a/web_console_v2/client/src/services/mocks/v2/settings/index.ts b/web_console_v2/client/src/services/mocks/v2/settings/index.ts index dc2ba2782..c30db11e4 100644 --- a/web_console_v2/client/src/services/mocks/v2/settings/index.ts +++ b/web_console_v2/client/src/services/mocks/v2/settings/index.ts @@ -1,15 +1,22 @@ import { SettingOptions } from 'typings/settings'; +import { variables } from './examples'; const get = { data: { - data: { webconsole_image: '2.0.0-rc.3' } as SettingOptions, + data: { + webconsole_image: '2.0.0-rc.3', + variables, + } as SettingOptions, + status: 200, }, - status: 200, }; export const patch = { data: { - data: { success: true }, + data: { + webconsole_image: '2.0.0-rc.3', + variables, + } as SettingOptions, }, status: 200, }; diff --git a/web_console_v2/client/src/services/mocks/v2/settings/system_info/index.ts b/web_console_v2/client/src/services/mocks/v2/settings/system_info/index.ts new file mode 100644 index 000000000..9993e245d --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/settings/system_info/index.ts @@ -0,0 +1,10 @@ +import { SystemInfo } from 'typings/settings'; + +const get = { + data: { + data: { name: '北京某某某公司', domain_name: 'fl-hahaha.com' } as SystemInfo, + status: 200, + }, +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/variables/examples.ts b/web_console_v2/client/src/services/mocks/v2/variables/examples.ts index cdbd86f2a..6ccbfeeeb 100644 --- a/web_console_v2/client/src/services/mocks/v2/variables/examples.ts +++ b/web_console_v2/client/src/services/mocks/v2/variables/examples.ts @@ -4,6 +4,7 @@ import { VariableComponent, VariableValueType, } from 'typings/variable'; +import { Tag } from 'typings/workflow'; export const unassignedComponent: Variable = { name: 'component_unassigned', @@ -13,8 +14,10 @@ export const unassignedComponent: Variable = { }; export const nameInput: Variable = { + tag: Tag.INPUT_PATH, name: 'some_name', value: 'initial value', + typed_value: 'initial value', value_type: VariableValueType.STRING, access_mode: VariableAccessMode.PEER_READABLE, widget_schema: { @@ -24,8 +27,11 @@ export const nameInput: Variable = { }; export const memSelect: Variable = { + tag: Tag.INPUT_PATH, name: 'worker_mem', value: 2, + typed_value: 2, + value_type: VariableValueType.NUMBER, access_mode: VariableAccessMode.PRIVATE, widget_schema: { component: VariableComponent.Select, @@ -40,8 +46,11 @@ export const memSelect: Variable = { }; export const asyncSwitch: Variable = { + tag: Tag.OPERATING_PARAM, name: 'is_async', value: false, + typed_value: false, + value_type: VariableValueType.BOOLEAN, access_mode: VariableAccessMode.PEER_WRITABLE, widget_schema: { component: VariableComponent.Switch, @@ -53,8 +62,11 @@ export const asyncSwitch: Variable = { }; export const cpuLimit: Variable = { + tag: Tag.OPERATING_PARAM, name: 'cpu_limit', - value: false, + value: 2, + typed_value: 2, + value_type: VariableValueType.NUMBER, access_mode: VariableAccessMode.PRIVATE, widget_schema: { component: VariableComponent.NumberPicker, @@ -65,8 +77,11 @@ export const cpuLimit: Variable = { }; export const commentTextArea: Variable = { + tag: Tag.RESOURCE_ALLOCATION, name: 'comment', value: '', + typed_value: '', + value_type: VariableValueType.STRING, access_mode: VariableAccessMode.PEER_WRITABLE, widget_schema: { component: VariableComponent.TextArea, @@ -79,22 +94,30 @@ export const commentTextArea: Variable = { export const gloabalVariables: Variable[] = [ { + tag: Tag.RESOURCE_ALLOCATION, name: 'image_version', value: 'v1.5-rc3', + typed_value: 'v1.5-rc3', + value_type: VariableValueType.STRING, access_mode: VariableAccessMode.PEER_READABLE, widget_schema: { required: true, }, }, { + tag: Tag.RESOURCE_ALLOCATION, name: 'num_partitions', value: '4', + typed_value: 'v1.5-rc3', + value_type: VariableValueType.STRING, access_mode: VariableAccessMode.PEER_READABLE, widget_schema: '' as any, }, { + tag: Tag.RESOURCE_ALLOCATION, name: 'worker_cpu', value: 1, + value_type: VariableValueType.NUMBER, access_mode: VariableAccessMode.PRIVATE, widget_schema: { component: VariableComponent.Select, @@ -107,3 +130,157 @@ export const gloabalVariables: Variable[] = [ }, }, ]; + +export const codeEditor: Variable = { + tag: Tag.INPUT_PATH, + name: 'code_tar', + value: { 'main.js': 'var a = 1;' }, + typed_value: { 'main.js': 'var a = 1;' }, + value_type: VariableValueType.CODE, + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: { + component: VariableComponent.Code, + placeholder: '代码', + }, +}; + +export const datasetSelect: Variable = { + name: 'dataset', + value: '', + typed_value: '', + value_type: VariableValueType.STRING, + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: { + component: VariableComponent.Dataset, + placeholder: '数据集', + }, +}; + +export const datasetPathSelect: Variable = { + name: 'dataset_path', + value: '', + typed_value: '', + value_type: VariableValueType.STRING, + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: { + component: VariableComponent.DatasetPath, + placeholder: '数据集路径', + }, +}; +export const featureSelect: Variable = { + name: 'feature', + value: {}, + typed_value: {}, + value_type: VariableValueType.OBJECT, + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: { + component: VariableComponent.FeatureSelect, + placeholder: '特征选择器', + }, +}; + +export const envsInput: Variable = { + name: 'kv_list', + value: [ + { key: 'key1', value: 'value1' }, + { key: 'key2', value: 'value2' }, + ], + typed_value: [ + { name: 'n1', value: 'v1' }, + { name: 'n2', value: 'v2' }, + ], + value_type: VariableValueType.LIST, + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: { + component: VariableComponent.EnvsInput, + placeholder: 'envsInput', + }, +}; + +export const stringInput: Variable = { + name: 'string_input', + value: 'initial value', + typed_value: 'initial value', + value_type: VariableValueType.STRING, + access_mode: VariableAccessMode.PEER_READABLE, + widget_schema: { + component: VariableComponent.Input, + tooltip: 'some hints', + required: true, + }, +}; +export const numberInput: Variable = { + tag: Tag.OPERATING_PARAM, + name: 'number_input', + value: '1', + typed_value: 1, + value_type: VariableValueType.NUMBER, + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: { + component: VariableComponent.NumberPicker, + type: 'number', + min: 1, + max: 100, + }, +}; + +export const objectInput: Variable = { + tag: Tag.INPUT_PATH, + name: 'object_input', + value: JSON.stringify({ a: 1 }), + typed_value: { a: 1 }, + value_type: VariableValueType.OBJECT, + access_mode: VariableAccessMode.PEER_READABLE, + widget_schema: { + component: VariableComponent.Input, + tooltip: 'some hints', + }, +}; +export const listInput: Variable = { + tag: Tag.SYSTEM_PARAM, + name: 'list_input', + value: JSON.stringify([{ a: 1 }]), + typed_value: [{ a: 1 }], + value_type: VariableValueType.LIST, + access_mode: VariableAccessMode.PEER_READABLE, + widget_schema: { + component: VariableComponent.Input, + tooltip: 'some hints', + }, +}; +export const forceObjectInput: Variable = { + name: 'force_object_input', + value: { a: 1 }, + typed_value: { a: 1 }, + value_type: VariableValueType.OBJECT, + access_mode: VariableAccessMode.PEER_READABLE, + widget_schema: { + component: VariableComponent.Input, + tooltip: 'some hints', + }, +}; +export const forceListInput: Variable = { + name: 'force_list_input', + value: [{ a: 1 }], + typed_value: [{ a: 1 }], + value_type: VariableValueType.LIST, + access_mode: VariableAccessMode.PEER_READABLE, + widget_schema: { + component: VariableComponent.Input, + tooltip: 'some hints', + }, +}; + +export const hideStringInput: Variable = { + tag: Tag.INPUT_PATH, + name: 'hide_string_input', + value: 'initial value', + typed_value: 'initial value', + value_type: VariableValueType.STRING, + access_mode: VariableAccessMode.PEER_READABLE, + widget_schema: { + component: VariableComponent.Input, + tooltip: 'some hints', + hidden: true, + }, +}; diff --git a/web_console_v2/client/src/services/mocks/v2/versions/index.ts b/web_console_v2/client/src/services/mocks/v2/versions/index.ts new file mode 100644 index 000000000..f994c3693 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/versions/index.ts @@ -0,0 +1,12 @@ +const get = { + data: { + data: { + revision: '71ea30e1906e1fcab8ccd74ccc5fffbee40a9ea1', + branch_name: null, + version: '2.1.9.10', + pub_date: '2021-09-18 20:43:17', + }, + }, + status: 200, +}; +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/workflow_templates/:id/index.ts b/web_console_v2/client/src/services/mocks/v2/workflow_templates/:id/index.ts deleted file mode 100644 index b8e8aaf8d..000000000 --- a/web_console_v2/client/src/services/mocks/v2/workflow_templates/:id/index.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { AxiosRequestConfig } from 'axios'; -import { normalTemplate, complexDepsTemplate, xShapeTemplate } from '../examples'; - -const get = (config: AxiosRequestConfig) => { - const rets: Record<ID, any> = { - 1: normalTemplate, - 2: complexDepsTemplate, - 3: xShapeTemplate, - }; - - return { - data: { data: rets[config._id!] }, - status: 200, - }; -}; - -export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/workflow_templates/__id__/index.ts b/web_console_v2/client/src/services/mocks/v2/workflow_templates/__id__/index.ts new file mode 100644 index 000000000..e833269d8 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/workflow_templates/__id__/index.ts @@ -0,0 +1,27 @@ +import { AxiosRequestConfig } from 'axios'; +import { + normalTemplate, + complexDepsTemplate, + xShapeTemplate, + localTpl, + withTypedValueTemplate, + noTypedValueTemplate, +} from '../examples'; + +const get = (config: AxiosRequestConfig) => { + const rets: Record<ID, any> = { + 1: normalTemplate, + 2: complexDepsTemplate, + 3: xShapeTemplate, + 4: localTpl, + 5: withTypedValueTemplate, + 6: noTypedValueTemplate, + }; + + return { + data: { data: rets[config._id!] }, + status: 200, + }; +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/workflow_templates/examples.ts b/web_console_v2/client/src/services/mocks/v2/workflow_templates/examples.ts index 137200123..7cfd453be 100644 --- a/web_console_v2/client/src/services/mocks/v2/workflow_templates/examples.ts +++ b/web_console_v2/client/src/services/mocks/v2/workflow_templates/examples.ts @@ -2,13 +2,12 @@ import { WorkflowTemplate } from 'typings/workflow'; import { JobType } from 'typings/job'; import { VariableAccessMode, VariableComponent, VariableValueType } from 'typings/variable'; import { DeepPartial } from 'utility-types'; -import { gloabalVariables } from '../variables/examples'; +import { gloabalVariables, stringInput, objectInput, listInput } from '../variables/examples'; export const normalTemplate: DeepPartial<WorkflowTemplate> = { - id: 2, + id: 1, name: 'Test template', group_alias: 'foo group', - is_left: true, config: { group_alias: 'foo group', variables: gloabalVariables, @@ -192,10 +191,9 @@ export const normalTemplate: DeepPartial<WorkflowTemplate> = { }; export const complexDepsTemplate: DeepPartial<WorkflowTemplate> = { - id: 10, + id: 2, name: 'Complex deps template', group_alias: 'c-group', - is_left: true, config: { group_alias: 'c-group', variables: [], @@ -314,10 +312,10 @@ export const complexDepsTemplate: DeepPartial<WorkflowTemplate> = { }; export const xShapeTemplate: DeepPartial<WorkflowTemplate> = { - id: 10, + id: 3, name: 'X Shape template', group_alias: 'x-group', - is_left: true, + is_local: true, config: { group_alias: 'x-group', variables: [], @@ -351,3 +349,119 @@ export const xShapeTemplate: DeepPartial<WorkflowTemplate> = { ], }, }; + +export const localTpl = { + id: 4, + name: 'local template', + comment: 'Comment here', + is_local: true, + group_alias: 'test-2', + config: { + group_alias: 'test-2', + job_definitions: [ + { + name: 'Initiative', + type: 'RAW_DATA', + is_federated: true, + variables: [ + { + name: 'job_name', + access_mode: 'PEER_WRITABLE', + widget_schema: '{"component":"Input","type":"string","required":true}', + value: '', + }, + ], + dependencies: [], + yaml_template: '', + }, + { + name: 'Raw data upload', + type: 'RAW_DATA', + is_federated: true, + variables: [ + { + name: 'job_name2', + access_mode: 'PEER_WRITABLE', + widget_schema: '{"component":"Input","type":"string"}', + value: '', + }, + { + name: 'comment2', + access_mode: 'PRIVATE', + widget_schema: '{"component":"TextArea","rows":4,"type":"string","required":true}', + value: '', + }, + ], + dependencies: [ + { + source: 'Initiative', + type: 3, + }, + ], + yaml_template: '', + }, + { + name: 'Training', + type: 'RAW_DATA', + is_federated: true, + variables: [ + { + name: 'job_name2', + access_mode: 'PEER_READABLE', + widget_schema: '{"component":"Input","type":"string"}', + value: '', + }, + ], + dependencies: [ + { + source: 'Raw data upload', + type: 'ON_COMPLETE', + }, + ], + yaml_template: '', + }, + ], + }, +}; + +export const withTypedValueTemplate: DeepPartial<WorkflowTemplate> = { + id: 5, + name: 'with typed value template', + group_alias: 'typed value group', + config: { + group_alias: 'typed value group', + variables: gloabalVariables, + job_definitions: [ + { + name: 'Initiative', + job_type: JobType.RAW_DATA, + is_federated: true, + dependencies: [], + variables: [stringInput, objectInput, listInput], + }, + ], + }, +}; + +export const noTypedValueTemplate: DeepPartial<WorkflowTemplate> = { + id: 6, + name: 'no typed value template', + group_alias: 'typed value group', + config: { + group_alias: 'typed value group', + variables: [], + job_definitions: [ + { + name: 'Initiative', + job_type: JobType.RAW_DATA, + is_federated: true, + dependencies: [], + variables: [ + { ...stringInput, typed_value: undefined }, + { ...objectInput, typed_value: undefined }, + { ...listInput, typed_value: undefined }, + ], + }, + ], + }, +}; diff --git a/web_console_v2/client/src/services/mocks/v2/workflow_templates/index.ts b/web_console_v2/client/src/services/mocks/v2/workflow_templates/index.ts index 85497a187..e6a7bb0ec 100644 --- a/web_console_v2/client/src/services/mocks/v2/workflow_templates/index.ts +++ b/web_console_v2/client/src/services/mocks/v2/workflow_templates/index.ts @@ -1,84 +1,11 @@ import { stringifyComplexDictField } from 'shared/formSchema'; -import { normalTemplate } from './examples'; +import { normalTemplate, localTpl } from './examples'; const normalTpl = stringifyComplexDictField(normalTemplate as any); -const simpleTpl = { - id: 1, - name: 'simple', - comment: 'Comment here', - group_alias: 'test-2', - config: { - group_alias: 'test-2', - is_left: true, - job_definitions: [ - { - name: 'Initiative', - type: 'RAW_DATA', - is_federated: true, - variables: [ - { - name: 'job_name', - access_mode: 'PEER_WRITABLE', - widget_schema: '{"component":"Input","type":"string","required":true}', - value: '', - }, - ], - dependencies: [], - yaml_template: '', - }, - { - name: 'Raw data upload', - type: 'RAW_DATA', - is_federated: true, - variables: [ - { - name: 'job_name2', - access_mode: 'PEER_WRITABLE', - widget_schema: '{"component":"Input","type":"string"}', - value: '', - }, - { - name: 'comment2', - access_mode: 'PRIVATE', - widget_schema: '{"component":"TextArea","rows":4,"type":"string","required":true}', - value: '', - }, - ], - dependencies: [ - { - source: 'Initiative', - type: 3, - }, - ], - yaml_template: '', - }, - { - name: 'Training', - type: 'RAW_DATA', - is_federated: true, - variables: [ - { - name: 'job_name2', - access_mode: 'PEER_READABLE', - widget_schema: '{"component":"Input","type":"string"}', - value: '', - }, - ], - dependencies: [ - { - source: 'Raw data upload', - type: 'ON_COMPLETE', - }, - ], - yaml_template: '', - }, - ], - }, -}; const get = { data: { - data: [normalTpl, simpleTpl], + data: [normalTpl, localTpl], }, status: 200, }; diff --git a/web_console_v2/client/src/services/mocks/v2/workflows/:id/index.ts b/web_console_v2/client/src/services/mocks/v2/workflows/:id/index.ts deleted file mode 100644 index 9f242b6c6..000000000 --- a/web_console_v2/client/src/services/mocks/v2/workflows/:id/index.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { AxiosRequestConfig } from 'axios'; -import { pendingAcceptAndConfig, withExecutionDetail, completed } from '../examples'; - -const get = (config: AxiosRequestConfig) => { - const rets: Record<ID, any> = { - 1: pendingAcceptAndConfig, - 2: withExecutionDetail, - 3: completed, - }; - - return { - data: { data: rets[config._id!] }, - status: 200, - }; -}; - -export const put = { - data: { data: { success: true } }, - status: 200, -}; - -export const patch = { - data: { data: { success: true } }, - status: 200, -}; - -export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/workflows/__id__/index.ts b/web_console_v2/client/src/services/mocks/v2/workflows/__id__/index.ts new file mode 100644 index 000000000..200002bf9 --- /dev/null +++ b/web_console_v2/client/src/services/mocks/v2/workflows/__id__/index.ts @@ -0,0 +1,27 @@ +import { AxiosRequestConfig } from 'axios'; +import { pendingAcceptAndConfig, newlyCreated, completed } from '../examples'; + +const get = (config: AxiosRequestConfig) => { + const rets: Record<ID, any> = { + 1: pendingAcceptAndConfig, + 2: newlyCreated, + 3: completed, + }; + + return { + data: { data: rets[config._id!] }, + status: 200, + }; +}; + +export const put = { + data: { data: { success: true } }, + status: 200, +}; + +export const patch = { + data: { data: { success: true } }, + status: 200, +}; + +export default get; diff --git a/web_console_v2/client/src/services/mocks/v2/workflows/:id/peer_workflows.ts b/web_console_v2/client/src/services/mocks/v2/workflows/__id__/peer_workflows.ts similarity index 91% rename from web_console_v2/client/src/services/mocks/v2/workflows/:id/peer_workflows.ts rename to web_console_v2/client/src/services/mocks/v2/workflows/__id__/peer_workflows.ts index 0c643cf65..735cfbecc 100644 --- a/web_console_v2/client/src/services/mocks/v2/workflows/:id/peer_workflows.ts +++ b/web_console_v2/client/src/services/mocks/v2/workflows/__id__/peer_workflows.ts @@ -1,4 +1,4 @@ -import { cloneDeep } from 'lodash'; +import { cloneDeep } from 'lodash-es'; import { JobState } from 'typings/job'; import { newlyCreated, withExecutionDetail } from '../examples'; diff --git a/web_console_v2/client/src/services/mocks/v2/workflows/examples.ts b/web_console_v2/client/src/services/mocks/v2/workflows/examples.ts index 06995252d..efb41fd3a 100644 --- a/web_console_v2/client/src/services/mocks/v2/workflows/examples.ts +++ b/web_console_v2/client/src/services/mocks/v2/workflows/examples.ts @@ -1,27 +1,21 @@ -import { cloneDeep, sample } from 'lodash'; +import { cloneDeep, sample } from 'lodash-es'; import { JobExecutionDetalis, JobState, JobType, Pod, PodState } from 'typings/job'; -import { - WorkflowState, - TransactionState, - WorkflowExecutionDetails, - Workflow, -} from 'typings/workflow'; +import { WorkflowState, WorkflowExecutionDetails, Workflow } from 'typings/workflow'; import { VariableAccessMode } from 'typings/variable'; import { normalTemplate } from '../workflow_templates/examples'; const uuid_1 = '9d73398659927'; -export const pendingAcceptAndConfig = { +export const pendingAcceptAndConfig: Workflow = { id: 1, uuid: uuid_1, name: 'Await-configure', project_id: 1, config: null, + is_local: false, forkable: true, + favour: false, comment: null, - state: WorkflowState.NEW, - target_state: WorkflowState.READY, - transaction_state: TransactionState.PARTICIPANT_PREPARE, - transaction_err: null, + state: WorkflowState.PENDING_ACCEPT, created_at: 1610238602, updated_at: 1610238602, }; @@ -32,9 +26,10 @@ export const newlyCreated: Workflow = { uuid: uuid_2, name: 'Newly-created', project_id: 1, + is_local: true, + cron_config: '9 23 * * ?', config: { group_alias: 'test-2', - is_left: true, job_definitions: [ { name: 'Initiative', @@ -98,13 +93,12 @@ export const newlyCreated: Workflow = { ], }, forkable: true, + favour: true, comment: null, - state: WorkflowState.NEW, - target_state: WorkflowState.READY, - transaction_state: TransactionState.COORDINATOR_COMMITTABLE, - transaction_err: null, + state: WorkflowState.PARTICIPANT_CONFIGURING, created_at: 1610239831, updated_at: 1610239831, + template_info: { id: 1, name: 'a-very-complete-template', is_modified: true }, }; const uuid_3 = '7d73398659927'; @@ -119,16 +113,15 @@ export const withExecutionDetail: WorkflowExecutionDetails = { const uuid_4 = '67d7339865992'; -export const completed = { +export const completed: WorkflowExecutionDetails = { ...cloneDeep(withExecutionDetail), id: 3, uuid: uuid_4, name: 'All-completed', config: normalTemplate.config as any, state: WorkflowState.COMPLETED, - target_state: WorkflowState.INVALID, - transaction_state: TransactionState.ABORTED, jobs: _generateJobExecutionDetails(uuid_4), + template_info: { id: 1, name: 'test_tpl', is_modified: true }, }; function _generateJobExecutionDetails(UUID: string): JobExecutionDetalis[] { diff --git a/web_console_v2/client/src/services/modelCenter.ts b/web_console_v2/client/src/services/modelCenter.ts new file mode 100644 index 000000000..7bb5728d5 --- /dev/null +++ b/web_console_v2/client/src/services/modelCenter.ts @@ -0,0 +1,371 @@ +import request, { BASE_URL } from 'libs/request'; +import { APIResponse } from 'typings/app'; +import { + ModelJob, + ModelSet, + ModelSetCreatePayload, + ModelSetUpdatePayload, + Algorithm, + AlgorithmChangeLog, + FakeAlgorithm, + Model, + ModelJobQueryParams, + ModelJobType, + ModelJobGroup, + ModelJobGroupCreatePayload, + ModelJobGroupUpdatePayload, + ModelJobMetrics, + ModelJobPatchFormData, + PeerModelJobGroupUpdatePayload, + ModelJobQueryParams_new, + ModelJobDefinitionQueryParams, + ModelJobDefinitionResult, + ModelJobTrainCreateForm, + ModelJobGroupCreateForm, +} from 'typings/modelCenter'; +import { formatExtra } from 'shared/modelCenter'; + +export function fetchModelSetList(params?: { keyword?: string }): Promise<{ data: ModelSet[] }> { + return request('/v2/model_groups', { params, removeFalsy: true, snake_case: true }); +} +export function createModelSet(payload: ModelSetCreatePayload): Promise<{ data: ModelSet }> { + return request.post('/v2/model_groups', payload); +} +export function updateModelSet( + id: ID, + payload: ModelSetUpdatePayload, +): Promise<{ data: ModelSet }> { + return request.patch(`/v2/model_groups/${id}`, payload); +} +export function deleteModelSet(id: ID) { + return request.delete(`/v2/model_groups/${id}`); +} + +export function fetchModelList( + project_id: ID, + params?: { + group_id?: ID; + algorithm_type?: 'NN_HORIZONTAL' | 'TREE_VERTICAL' | 'NN_VERTICAL'; + keyword?: string; + }, +): Promise<{ data: Model[] }> { + return request(`/v2/projects/${project_id}/models`, { + params, + removeFalsy: true, + snake_case: true, + }); +} + +export function fetchModelDetail_new(project_id: ID, model_id: ID): Promise<{ data: Model }> { + return request(`/v2/projects/${project_id}/models/${model_id}`); +} + +export function updateModel(project_id: ID, id: ID, payload: Partial<Model>) { + return request.patch(`/v2/projects/${project_id}/models/${id}`, payload); +} +export function deleteModel(project_id: ID, id: ID) { + return request.delete(`/v2/projects/${project_id}/models/${id}`); +} + +export function fetchModelJobList(params?: { + project_id?: ID; + group_id?: ID; + types?: ModelJobType[]; +}): Promise<{ data: ModelJob[] }> { + return request('/v2/model_jobs', { params, removeFalsy: true, snake_case: true }); +} +export function fetchModelJobDetail(id: ID, params?: any): Promise<{ data: ModelJob }> { + return request(`/v2/model_jobs/${id}`, { params }); +} +export function updateModelJob(projectId: ID, id: ID, payload: Partial<ModelJob>) { + return request.patch(`/v2/projects/${projectId}/model_jobs/${id}`, payload); +} +export function deleteModelJob(id: ID) { + return request.delete(`/v2/model_jobs/${id}`); +} +export function stopModelJob(projectId: ID, modelJobId: ID) { + return request.post(`/v2/projects/${projectId}/model_jobs/${modelJobId}:stop`); +} + +export function fetchFavouriteModelList(params?: {}): Promise<{ data: ModelJob[] }> { + return request('/v2/models/favourite', { params, removeFalsy: true, snake_case: true }); +} + +export function fetchEvaluationList(params?: ModelJobQueryParams): Promise<{ data: ModelJob[] }> { + const types: ModelJobType[] = ['TREE_EVALUATION', 'NN_EVALUATION', 'EVALUATION']; + + return request('/v2/model_jobs', { + params: { + ...params, + types, + }, + removeFalsy: true, + snake_case: true, + }).then((resp) => { + resp.data = resp.data.map((item: ModelJob) => { + item.workflow = formatExtra(item.workflow); + return formatExtra(item); + }); + return resp; + }); +} + +export function fetchCompareModelReportList(params?: {}): Promise<{ data: ModelSet[] }> { + return request('/v2/model_groups', { params, removeFalsy: true, snake_case: true }).then( + (resp) => { + resp.data = resp.data.map((item: ModelSet) => { + return formatExtra(item); + }); + return resp; + }, + ); +} +export function fetchCompareModelReportDetail(id: ID) { + return request(`/v2/model_groups/${id}`); +} + +export function fetchOfflinePredictionList( + params?: ModelJobQueryParams, +): Promise<{ data: ModelJob[] }> { + const types: ModelJobType[] = ['TREE_PREDICTION', 'NN_PREDICTION', 'PREDICTION']; + + return request('/v2/model_jobs', { + params: { + ...params, + types, + }, + removeFalsy: true, + snake_case: true, + }).then((resp) => { + resp.data = resp.data.map((item: ModelJob) => { + item.workflow = formatExtra(item.workflow); + return formatExtra(item); + }); + return resp; + }); +} + +export function fetchMyAlgorithm(params?: { keyword?: string }): Promise<{ data: Algorithm[] }> { + return request('/v2/algorithm', { params, removeFalsy: true, snake_case: true }); +} +export function fetchFakeAlgorithmList(params?: { + keyword?: string; +}): Promise<{ data: FakeAlgorithm[] }> { + return request('/v2/fake_algorithms', { params, removeFalsy: true, snake_case: true }); +} +export function fetchBuiltInAlgorithm(params?: { + keyword?: string; +}): Promise<{ data: Algorithm[] }> { + return request('/v2/algorithm/built-in', { params, removeFalsy: true, snake_case: true }); +} +export function fetchMyAlgorithmDetail(id: ID, params?: any): Promise<{ data: Algorithm }> { + return request(`/v2/algorithm/${id}`, { params }); +} + +export function fetchAlgorithmChangeLog( + id: ID, + params?: any, +): Promise<{ data: AlgorithmChangeLog[] }> { + return request(`/v2/algorithm/change_log/${id}`, { params }); +} + +export function getModelJobDownloadHref(projectId: ID, modelJobId: ID): string { + return `/v2/projects/${projectId}/model_jobs/${modelJobId}/results`; +} +export function getFullModelJobDownloadHref(projectId: ID, modelJobId: ID): string { + return `${window.location.origin}${BASE_URL}/v2/projects/${projectId}/model_jobs/${modelJobId}/results`; +} + +export function fetchModelJobGroupList( + projectId: ID, + params?: { + keyword?: string; + page?: number; + pageSize?: number; + filter?: string; + configured?: boolean; + }, +): APIResponse<ModelJobGroup[]> { + return request(`/v2/projects/${projectId}/model_job_groups`, { + params, + removeFalsy: true, + snake_case: true, + }); +} +export function fetchModelJobGroupDetail( + projectId: ID, + modelJobGroupId: ID, +): Promise<{ data: ModelJobGroup }> { + return request(`/v2/projects/${projectId}/model_job_groups/${modelJobGroupId}`); +} +export function fetchPeerModelJobGroupDetail( + projectId: ID, + modelJobGroupId: ID, + participantId: ID, +): Promise<{ data: ModelJobGroup }> { + return request( + `/v2/projects/${projectId}/model_job_groups/${modelJobGroupId}/peers/${participantId}`, + ); +} + +export function createModelJobGroup( + projectId: ID, + payload: ModelJobGroupCreatePayload, +): Promise<{ data: ModelJobGroup }> { + return request.post(`/v2/projects/${projectId}/model_job_groups`, payload); +} + +export function updateModelJobGroup( + projectId: ID, + modelJobGroupId: ID, + payload: ModelJobGroupUpdatePayload, +): Promise<{ data: ModelJobGroup }> { + return request.put(`/v2/projects/${projectId}/model_job_groups/${modelJobGroupId}`, payload); +} + +export function deleteModelJobGroup( + projectId: ID, + modelJobGroupId: ID, + payload?: any, +): Promise<{ data: ModelJobGroup }> { + return request.delete(`/v2/projects/${projectId}/model_job_groups/${modelJobGroupId}`, payload); +} + +export function updatePeerModelJobGroup( + projectId: ID, + modelJobGroupId: ID, + participantId: ID, + payload: PeerModelJobGroupUpdatePayload, +): Promise<{ data: ModelJobGroup }> { + return request.patch( + `/v2/projects/${projectId}/model_job_groups/${modelJobGroupId}/peers/${participantId}`, + payload, + ); +} + +export function launchModelJobGroup( + projectId: ID, + modelJobGroupId: ID, + payload: any = {}, // For auto set request header 'Content-Type': 'application/json' +): Promise<{ data: ModelJobGroup }> { + return request.post( + `/v2/projects/${projectId}/model_job_groups/${modelJobGroupId}:launch`, + payload, + ); +} + +export function authorizeModelJobGroup( + projectId: ID, + modelJobGroupId: ID, + authorized: boolean, +): Promise<any> { + return request.put(`/v2/projects/${projectId}/model_job_groups/${modelJobGroupId}`, { + authorized, + }); +} + +export function fetchModelJobDefinition( + params: ModelJobDefinitionQueryParams, +): Promise<{ data: ModelJobDefinitionResult }> { + return request(`/v2/model_job_definitions`, { params }); +} + +/** + * + * new service functions + * + */ + +export function fetchModelJobList_new( + project_id: ID, + params: ModelJobQueryParams_new, +): APIResponse<ModelJob[]> { + return request(`/v2/projects/${project_id}/model_jobs`, { params }); +} + +export function fetchModelJob_new(project_id: ID, job_id: ID): Promise<{ data: ModelJob }> { + return request(`/v2/projects/${project_id}/model_jobs/${job_id}`); +} + +export function fetchModelJobDetail_new( + project_id: ID, + model_job_id: ID, + params?: any, +): Promise<{ data: ModelJob }> { + return request(`/v2/projects/${project_id}/model_jobs/${model_job_id}`, { params }); +} +export function fetchModelJobMetrics_new( + project_id: ID, + model_job_id: ID, + params?: any, +): Promise<{ data: ModelJobMetrics }> { + return request(`/v2/projects/${project_id}/model_jobs/${model_job_id}/metrics`, { params }); +} +export function fetchPeerModelJobDetail_new( + project_id: ID, + model_job_id: ID, + participant_id: ID, +): Promise<{ data: ModelJob }> { + return request(`/v2/projects/${project_id}/model_jobs/${model_job_id}/peers/${participant_id}`); +} +export function fetchPeerModelJobMetrics_new( + project_id: ID, + model_job_id: ID, + participant_id: ID, +): Promise<{ data: ModelJobMetrics }> { + return request( + `/v2/projects/${project_id}/model_jobs/${model_job_id}/peers/${participant_id}/metrics`, + ); +} + +export function createModelJob_new( + project_id: ID, + data: ModelJobPatchFormData, +): Promise<{ data: ModelJob }> { + return request.post(`/v2/projects/${project_id}/model_jobs`, data); +} + +export function updateModelJob_new(project_id: ID, job_id: ID, data: ModelJobPatchFormData) { + return request.put(`/v2/projects/${project_id}/model_jobs/${job_id}`, data); +} + +export function stopJob_new(project_id: ID, job_id: ID): Promise<any> { + return request.post(`/v2/projects/${project_id}/model_jobs/${job_id}:stop`); +} + +export function deleteJob_new(project_id: ID, job_id: ID): Promise<any> { + return request.delete(`/v2/projects/${project_id}/model_jobs/${job_id}`); +} + +export function fetchModelJobMetries_new(project_id: ID, job_id: ID) { + return request(`/v2/projects/${project_id}/model_jobs/${job_id}/metrics`); +} + +export function fetchModelJobResult_new(project_id: ID, job_id: ID) { + return request(`/v2/projects/${project_id}/model_jobs/${job_id}/result`); +} + +// 中心化 + +export function createModeJobGroupV2(project_id: ID, data: ModelJobGroupCreateForm) { + return request.post(`/v2/projects/${project_id}/model_job_groups_v2`, data); +} +export function createModelJob( + project_id: ID, + data: ModelJobTrainCreateForm, +): Promise<{ data: ModelJob }> { + return request.post(`/v2/projects/${project_id}/model_jobs`, data); +} + +export function stopAutoUpdateModelJob(project_id: ID, model_group_id: ID) { + return request.post( + `/v2/projects/${project_id}/model_job_groups/${model_group_id}:stop_auto_update`, + {}, // For auto set request header 'Content-Type': 'application/json' + ); +} + +export function fetchAutoUpdateModelJobDetail(project_id: ID, model_group_id: ID) { + return request( + `/v2/projects/${project_id}/model_job_groups/${model_group_id}/next_auto_update_model_job`, + ); +} diff --git a/web_console_v2/client/src/services/modelServing.ts b/web_console_v2/client/src/services/modelServing.ts new file mode 100644 index 000000000..9a341f3c9 --- /dev/null +++ b/web_console_v2/client/src/services/modelServing.ts @@ -0,0 +1,109 @@ +import request from 'libs/request'; + +import { APIResponse, PageQueryParams } from 'typings/app'; + +import { ModelServing, ModelServingInstance, ModelServingQueryParams } from 'typings/modelServing'; + +export function fetchModelServingList( + params?: ModelServingQueryParams, +): APIResponse<ModelServing[]> { + return request('/v2/serving_services', { params, removeFalsy: true, snake_case: true }); +} +export function fetchModelServingDetail( + modelServingId: ID, + params?: ModelServingQueryParams, +): APIResponse<ModelServing> { + return request(`/v2/serving_services/${modelServingId}`, { params }); +} + +export function fetchModelServingInstanceList( + modelServingId: ID, + params: PageQueryParams, +): APIResponse<ModelServingInstance[]> { + return request(`/v2/serving_services/${modelServingId}/instances`, { + params, + removeFalsy: true, + snake_case: true, + }); +} + +export function fetchModelServingInstanceLog( + modelServingId: ID, + instanceName: string, + params?: { tail_lines: number }, +): APIResponse<string[]> { + return request(`/v2/serving_services/${modelServingId}/instances/${instanceName}/log`, { + params, + removeFalsy: true, + snake_case: true, + }); +} +export function createModelServing(modelId: ID, payload: any): APIResponse<ModelServing> { + return request.post(`/v2/models/${modelId}/serving_services`, payload); +} +export function updateModelServing(modelServingId: ID, payload: any): APIResponse<ModelServing> { + return request.patch(`/v2/serving_services/${modelServingId}`, payload); +} +export function deleteModelServing(modelServingId: ID) { + return request.delete(`/v2/serving_services/${modelServingId}`); +} + +/** + * + * new service functions + * old and new service functions will coexist for a period of time + * + */ + +export function fetchModelServingList_new( + projectId: ID, + params?: ModelServingQueryParams, +): APIResponse<ModelServing[]> { + return request(`/v2/projects/${projectId}/serving_services`, { + params, + removeFalsy: true, + snake_case: true, + }); +} + +export function fetchModelServingDetail_new( + projectId: ID, + modelServingId: ID, + params?: ModelServingQueryParams, +): APIResponse<ModelServing> { + return request(`/v2/projects/${projectId}/serving_services/${modelServingId}`, { params }); +} + +export function createModelServing_new(projectId: ID, payload: any): APIResponse<ModelServing> { + return request.post(`/v2/projects/${projectId}/serving_services`, payload); +} + +export function updateModelServing_new( + projectId: ID, + modelServingId: ID, + payload: any, +): APIResponse<ModelServing> { + return request.patch(`/v2/projects/${projectId}/serving_services/${modelServingId}`, payload); +} +export function deleteModelServing_new(projectId: ID, modelServingId: ID) { + return request.delete(`/v2/projects/${projectId}/serving_services/${modelServingId}`); +} + +export function fetchModelServingInstanceLog_new( + projectId: ID, + modelServingId: ID, + instanceName: string, + params?: { tail_lines: number }, +): APIResponse<string[]> { + return request( + `/v2/projects/${projectId}/serving_services/${modelServingId}/instances/${instanceName}/log`, + { + params, + removeFalsy: true, + snake_case: true, + }, + ); +} +export function fetchUserTypeInfo(projectId: ID): APIResponse { + return request(`/v2/projects/${projectId}/serving_services/remote_platforms`); +} diff --git a/web_console_v2/client/src/services/operation.ts b/web_console_v2/client/src/services/operation.ts new file mode 100644 index 000000000..f8586cfaa --- /dev/null +++ b/web_console_v2/client/src/services/operation.ts @@ -0,0 +1,26 @@ +import request from 'libs/request'; +import { APIResponse } from 'typings/app'; +import { JobInfo, JobGroupFetchPayload, JobItem, Dashboard } from 'typings/operation'; +import { DatasetForceState } from '../typings/dataset'; + +export function fetchOperationList(payload: Partial<JobGroupFetchPayload>): APIResponse<JobItem[]> { + return request.post('/v2/e2e_jobs:initiate', payload); +} + +export function fetchOperationDetail(params?: { job_name: string }): Promise<{ data: JobInfo }> { + return request(`/v2/e2e_jobs/${params?.job_name}`); +} + +export function fetchDashboardList(): Promise<{ data: Dashboard[] }> { + return request('/v2/dashboards'); +} + +export function datasetFix(params: { + datasetId: ID; + force?: DatasetForceState; +}): Promise<{ data: any }> { + const { datasetId, force } = params; + return request.post(`v2/datasets/${datasetId}:state_fix`, { + force, + }); +} diff --git a/web_console_v2/client/src/services/participant.ts b/web_console_v2/client/src/services/participant.ts new file mode 100644 index 000000000..2c23686d6 --- /dev/null +++ b/web_console_v2/client/src/services/participant.ts @@ -0,0 +1,46 @@ +import request from 'libs/request'; +import { + CreateParticipantPayload, + Participant, + UpdateParticipantPayload, + Version, + DomainName, +} from 'typings/participant'; +import { APIResponse } from 'typings/app'; + +export function fetchParticipants(): Promise<{ data: Participant[] }> { + return request.get('/v2/participants'); +} + +export function createParticipant(payload: CreateParticipantPayload): Promise<Participant> { + return request.post('/v2/participants', payload); +} + +export function updateParticipant( + id: ID, + payload: UpdateParticipantPayload, +): Promise<{ data: Participant }> { + return request.patch(`/v2/participants/${id}`, payload); +} + +export function getParticipantDetailById(id: ID): Promise<{ data: Participant }> { + return request.get(`/v2/participants/${id}`); +} + +export function checkParticipantConnection( + id: ID, +): Promise<{ data: { success: boolean; message: string; application_version: Version } }> { + return request.get(`/v2/participants/${id}/connection_checks`); +} + +export function getParticipantByProjectId(id: ID): Promise<{ data: Participant[] }> { + return request.get(`/v2/projects/${id}/participants`); +} + +export function deleteParticipant(id: ID): Promise<{ id: ID }> { + return request.delete(`/v2/participants/${id}`); +} + +export function fetchDomainNameList(): APIResponse<DomainName[]> { + return request.get('/v2/participant_candidates'); +} diff --git a/web_console_v2/client/src/services/project.ts b/web_console_v2/client/src/services/project.ts index 5d9d243e0..94c3cc5ae 100644 --- a/web_console_v2/client/src/services/project.ts +++ b/web_console_v2/client/src/services/project.ts @@ -1,5 +1,12 @@ import request from 'libs/request'; -import { CreateProjectPayload, Project, UpdateProjectPayload } from 'typings/project'; +import { APIResponse } from 'typings/app'; +import { + CreateProjectPayload, + Project, + UpdateProjectPayload, + FetchPendingProjectsPayload, + CreatePendingProjectPayload, +} from 'typings/project'; export function fetchProjectList(): Promise<{ data: Project[] }> { return request('/v2/projects', { @@ -18,6 +25,36 @@ export function updateProject(id: ID, data: UpdateProjectPayload): Promise<Proje return request.patch(`/v2/projects/${id}`, data); } +export function deleteProject(id: ID): Promise<APIResponse> { + return request.delete(`/v2/projects/${id}`); +} + export function checkConnection(id: ID): Promise<{ data: { success: boolean } }> { - return request.post(`/v2/projects/${id}/connection_checks`); + return request(`/v2/projects/${id}/connection_checks`); +} + +export function fetchPendingProjectList( + params?: FetchPendingProjectsPayload, +): Promise<{ data: Project[] }> { + return request('/v2/pending_projects', { params }); +} + +export function createPendingProject(data: CreatePendingProjectPayload): Promise<Project> { + return request.post('/v2/pending_projects', data); +} + +export function getPendingProjectDetailById(id: ID): Promise<{ data: Project }> { + return request(`/v2/pending_projects/${id}`); +} +// todo: support edit pendingProject +export function updatePendingProject(id: ID, data: UpdateProjectPayload): Promise<Project> { + return request.patch(`/v2/pending_projects/${id}`, data); +} + +export function authorizePendingProject(id: ID, params: { state: string }) { + return request.patch(`/v2/pending_project/${id}`, params); +} + +export function deletePendingProject(id: ID): Promise<APIResponse> { + return request.delete(`/v2/pending_project/${id}`); } diff --git a/web_console_v2/client/src/services/settings.ts b/web_console_v2/client/src/services/settings.ts index f1c26ea50..df585c14e 100644 --- a/web_console_v2/client/src/services/settings.ts +++ b/web_console_v2/client/src/services/settings.ts @@ -1,10 +1,27 @@ import request from 'libs/request'; -import { SettingOptions } from 'typings/settings'; +import { SettingOptions, SettingInfo, SystemInfo, SettingVariables } from 'typings/settings'; -export function fetchSettings(): Promise<{ data: SettingOptions }> { - return request('/v2/settings'); +export function fetchSettingsImage(): Promise<{ data: SettingInfo }> { + return request('/v2/settings/webconsole_image'); } -export function updateSettings(payload: SettingOptions): Promise<{ data: any }> { - return request.patch('/v2/settings', payload); +export function fetchSettingVariables(): Promise<{ data: SettingVariables }> { + return request('/v2/settings/system_variables'); +} +export function updateSettingVariables( + payload: SettingVariables, +): Promise<{ data: SettingVariables }> { + return request.post('/v2/settings:update_system_variables', payload); +} + +export function updateImage(payload: SettingOptions): Promise<{ data: SettingOptions }> { + return request.post('/v2/settings:update_image', payload); +} + +export function fetchSysEmailGroup(): Promise<{ data: SettingInfo }> { + return request('/v2/settings/sys_email_group'); +} + +export function fetchSysInfo(): Promise<{ data: SystemInfo }> { + return request('/v2/settings/system_info'); } diff --git a/web_console_v2/client/src/services/system.ts b/web_console_v2/client/src/services/system.ts index 39425c934..9f4420a02 100644 --- a/web_console_v2/client/src/services/system.ts +++ b/web_console_v2/client/src/services/system.ts @@ -7,3 +7,9 @@ export function fetchPodNameList(): Promise<{ data: string[] }> { export function fetchSystemLogs(tailLines: number, podName: string): Promise<{ data: string[] }> { return request(`/v2/system_pods/${podName}/logs`, { params: { tailLines }, snake_case: true }); } + +export function fetchSystemVersion(): Promise<{ + data: { version?: string; revision: string; pub_date: string }; +}> { + return request('/v2/versions'); +} diff --git a/web_console_v2/client/src/services/trustedCenter.ts b/web_console_v2/client/src/services/trustedCenter.ts new file mode 100644 index 000000000..5fcbeea31 --- /dev/null +++ b/web_console_v2/client/src/services/trustedCenter.ts @@ -0,0 +1,80 @@ +import request from 'libs/request'; +import { APIResponse } from 'typings/app'; +import { + AuthStatus, + NotificationItem, + TrustedJob, + TrustedJobGroup, + TrustedJobGroupItem, + TrustedJobGroupPayload, + TrustedJobListItem, + TrustedJobParamType, +} from 'typings/trustedCenter'; + +export function fetchTrustedJobGroupList( + projectId: ID, + params?: { + filter?: string; + page?: number; + pageSize?: number; + }, +): APIResponse<TrustedJobGroupItem[]> { + return request(`/v2/projects/${projectId}/trusted_job_groups`, { params }); +} + +export function fetchTrustedJobGroupById( + projectId: ID, + id: ID, +): Promise<{ data: TrustedJobGroup }> { + return request(`/v2/projects/${projectId}/trusted_job_groups/${id}`); +} + +export function createTrustedJobGroup(projectId: ID, payload: TrustedJobGroupPayload) { + return request.post(`/v2/projects/${projectId}/trusted_job_groups`, payload); +} + +export function updateTrustedJobGroup(projectId: ID, id: ID, payload: TrustedJobGroupPayload) { + return request.put(`/v2/projects/${projectId}/trusted_job_groups/${id}`, payload); +} + +export function deleteTrustedJobGroup(projectId: ID, id: ID) { + return request.delete(`/v2/projects/${projectId}/trusted_job_groups/${id}`); +} + +export function launchTrustedJobGroup(projectId: ID, id: ID, payload: { comment: string }) { + return request.post(`/v2/projects/${projectId}/trusted_job_groups/${id}:launch`, payload); +} + +export function fetchTrustedJobList( + projectId: ID, + params: { + trusted_job_group_id: ID; + type?: TrustedJobParamType; + }, +): APIResponse<TrustedJobListItem[]> { + return request(`/v2/projects/${projectId}/trusted_jobs`, { params }); +} + +export function fetchTrustedJob(projectId: ID, id: ID): Promise<{ data: TrustedJob }> { + return request(`/v2/projects/${projectId}/trusted_jobs/${id}`); +} + +export function updateTrustedJob( + projectId: ID, + id: ID, + payload: { comment: string; auth_status?: AuthStatus }, +) { + return request.put(`/v2/projects/${projectId}/trusted_jobs/${id}`, payload); +} + +export function exportTrustedJobResult(projectId: ID, id: ID) { + return request.post(`/v2/projects/${projectId}/trusted_jobs/${id}:export`); +} + +export function stopTrustedJob(projectId: ID, id: ID) { + return request.post(`/v2/projects/${projectId}/trusted_jobs/${id}:stop`); +} + +export function fetchTrustedNotifications(projectId: ID): APIResponse<NotificationItem[]> { + return request(`/v2/projects/${projectId}/trusted_notifications`); +} diff --git a/web_console_v2/client/src/services/user.ts b/web_console_v2/client/src/services/user.ts index 33b7a3a18..585fe6991 100644 --- a/web_console_v2/client/src/services/user.ts +++ b/web_console_v2/client/src/services/user.ts @@ -1,18 +1,19 @@ import request from 'libs/request'; -import { FedLoginFormData, FedUserInfo } from 'typings/auth'; +import { FedLoginFormData, FedUserInfo, FedLoginWay, FedLoginQueryParamsData } from 'typings/auth'; -export function fetchUserInfo(id: string): Promise<{ data: FedUserInfo }> { +export function fetchUserInfo(id: ID): Promise<{ data: FedUserInfo }> { return request(`/v2/auth/users/${id}`); } export function login( payload: FedLoginFormData, + queryParam: FedLoginQueryParamsData = {}, ): Promise<{ data: { user: FedUserInfo; access_token: string } }> { - return request.post('/v2/auth/signin', payload); -} - -export function addNewUser(payload: FedUserInfo): Promise<{ data: { user: FedUserInfo } }> { - return request.post('/v2/auth/users/', payload); + return request.post('/v2/auth/signin', payload, { + params: { + ...queryParam, + }, + }); } export function getAllUsers(): Promise<{ data: FedUserInfo[] }> { @@ -34,3 +35,11 @@ export function createNewUser(payload: FedUserInfo): Promise<{ data: FedUserInfo export function logout() { return request.delete('/v2/auth/signin'); } + +export function fetchLoginWayList(): Promise<{ data: FedLoginWay[] }> { + return request(`/v2/auth/sso_infos`); +} + +export function getMyUserInfo(): Promise<{ data: FedUserInfo }> { + return request(`/v2/auth/self`); +} diff --git a/web_console_v2/client/src/services/workflow.ts b/web_console_v2/client/src/services/workflow.ts index 4ff8653e6..1041d2669 100644 --- a/web_console_v2/client/src/services/workflow.ts +++ b/web_console_v2/client/src/services/workflow.ts @@ -1,20 +1,30 @@ import { KibanaQueryParams, KiabanaMetrics } from './../typings/kibana'; import request from 'libs/request'; +import { APIResponse } from 'typings/app'; import { WorkflowForkPayload, WorkflowInitiatePayload, WorkflowTemplate, WorkflowAcceptPayload, Workflow, - WorkflowState, WorkflowExecutionDetails, WorkflowTemplatePayload, + WorkflowStateFilterParamType, + TemplateRevision, + RevisionDetail, + RevisionPayload, } from 'typings/workflow'; +import { JobExecutionDetalis } from '../typings/job'; +import { JobNodeRawData } from 'components/WorkflowJobsCanvas/types'; +import { Job } from 'typings/job'; + +const specialProjectId = 0; export function fetchWorkflowTemplateList(params?: { - isLeft?: boolean; - groupAlias?: string; -}): Promise<{ data: WorkflowTemplate[] }> { + page?: number; + pageSize?: number; + filter?: string; +}): APIResponse<WorkflowTemplate[]> { return request('/v2/workflow_templates', { params, removeFalsy: true, @@ -22,10 +32,41 @@ export function fetchWorkflowTemplateList(params?: { }); } +export function fetchDataJoinTemplates(): Promise<{ data: WorkflowTemplate[] }> { + return request('/v2/workflow_templates?from=preset_datajoin'); +} + export function fetchTemplateById(id: ID): Promise<{ data: WorkflowTemplate }> { return request(`/v2/workflow_templates/${id}`); } +export function fetchRevisionList(id: ID): APIResponse<TemplateRevision[]> { + return request(`/v2/workflow_templates/${id}/workflow_template_revisions`); +} + +export function patchRevisionComment(id: ID, payload: RevisionPayload) { + return request.patch(`/v2/workflow_template_revisions/${id}`, payload); +} + +export function fetchWorkflowListByRevisionId( + project: ID, + params?: { + template_revision_id?: number; + }, +): APIResponse<Workflow[]> { + return request(`/v2/projects/${project}/workflows`, { + params, + }); +} + +export function fetchRevisionDetail(revision_id: ID): Promise<{ data: RevisionDetail }> { + return request(`/v2/workflow_template_revisions/${revision_id}`); +} + +export function deleteRevision(revision_id: ID) { + return request.delete(`/v2/workflow_template_revisions/${revision_id}`); +} + export function getTemplateDownloadHref(id: ID): string { return `/v2/workflow_templates/${id}?download=true`; } @@ -38,6 +79,10 @@ export function updateWorkflowTemplate(id: ID, payload: WorkflowTemplatePayload) return request.put(`/v2/workflow_templates/${id}`, payload); } +export function createTemplateRevision(id: ID) { + return request.post(`/v2/workflow_templates/${id}:create_revision`); +} + export function deleteTemplate(id: ID) { return request.delete(`/v2/workflow_templates/${id}`); } @@ -45,58 +90,84 @@ export function deleteTemplate(id: ID) { export function fetchWorkflowList(params?: { project?: ID; keyword?: string; -}): Promise<{ data: Workflow[] }> { - return request('/v2/workflows', { + states?: WorkflowStateFilterParamType[]; + state?: 'prepare'; + name?: string; + uuid?: ID; + page?: number; + pageSize?: number; + filter?: string; +}): APIResponse<Workflow[]> { + return request(`/v2/projects/${params?.project ?? specialProjectId}/workflows`, { params, removeFalsy: true, snake_case: true, }); } +export const PEER_WORKFLOW_DETAIL_QUERY_KEY = 'getPeerWorkflow'; export function getPeerWorkflowsConfig( - id: string | number, + id: ID, + projectId?: ID, ): Promise<{ data: Record<string, WorkflowExecutionDetails> }> { - return request(`/v2/workflows/${id}/peer_workflows`); + return request(`/v2/projects/${projectId ?? specialProjectId}/workflows/${id}/peer_workflows`); +} +export async function getPeerWorkflow(id: ID, projectId?: ID) { + const res = await getPeerWorkflowsConfig(id, projectId); + const anyPeerWorkflow = Object.values(res.data).find((item) => !!item.uuid)!; + + return anyPeerWorkflow; } export function getWorkflowDetailById( - id: string | number, + id: ID, + projectId?: ID, ): Promise<{ data: WorkflowExecutionDetails }> { - return request(`/v2/workflows/${id}`); + return request(`/v2/projects/${projectId ?? specialProjectId}/workflows/${id}`); +} +export function getWorkflowDownloadHref(id: ID, projectId?: ID): string { + return `/v2/projects/${projectId ?? specialProjectId}/workflows/${id}?download=true`; } -export function initiateAWorkflow(payload: WorkflowInitiatePayload) { - return request.post('/v2/workflows', payload); +export function initiateAWorkflow( + payload: WorkflowInitiatePayload<Job | JobNodeRawData>, + projectId: ID, +): any { + return request.post(`/v2/projects/${projectId}/workflows`, payload); } -export function acceptNFillTheWorkflowConfig(id: ID, payload: WorkflowAcceptPayload) { - return request.put(`/v2/workflows/${id}`, payload); +export function acceptNFillTheWorkflowConfig( + id: ID, + payload: WorkflowAcceptPayload<any>, + projectId: ID, +) { + return request.put(`/v2/projects/${projectId}/workflows/${id}`, payload); } -export function patchWorkflow(id: ID, payload: Partial<Workflow>) { - return request.patch(`/v2/workflows/${id}`, payload); +export function patchWorkflow(id: ID, payload: Partial<Workflow>, projectId: ID) { + return request.patch(`/v2/projects/${projectId}/workflows/${id}`, payload); } -export function runTheWorkflow(id: ID) { - return request.patch(`/v2/workflows/${id}`, { - target_state: WorkflowState.RUNNING, - }); +export function runTheWorkflow(id: ID, projectId: ID) { + return request.post(`/v2/projects/${projectId}/workflows/${id}:start`); } -export function stopTheWorkflow(id: ID) { - return request.patch(`/v2/workflows/${id}`, { - target_state: WorkflowState.STOPPED, - }); +export function stopTheWorkflow(id: ID, projectId: ID) { + return request.post(`/v2/projects/${projectId}/workflows/${id}:stop`); } -export function invalidTheWorkflow(id: ID) { - return request.patch(`/v2/workflows/${id}`, { - state: WorkflowState.INVALID, - }); +export function invalidTheWorkflow(id: ID, projectId: ID) { + return request.post(`/v2/projects/${projectId}/workflows/${id}:invalidate`); } -export function forkTheWorkflow(payload: WorkflowForkPayload) { - return request.post(`/v2/workflows`, payload); +export function forkTheWorkflow(payload: WorkflowForkPayload, projectId: ID) { + return request.post(`/v2/projects/${projectId}/workflows`, payload); +} + +export function favourTheWorkFlow(projectId: ID, workflowId: ID, favour: boolean) { + return request.patch(`/v2/projects/${projectId}/workflows/${workflowId}`, { + favour: favour ? 1 : 0, + }); } export function fetchJobLogs( @@ -120,12 +191,11 @@ export function fetchJobEvents( export function fetchPeerJobEvents( workflowUuid: string, k8sJobName: string, + participantId: ID, params?: { startTime?: DateTime; maxLines: number }, ): Promise<{ data: string[] }> { return request( - `/v2/workflows/${workflowUuid}/peer_workflows/${ - 0 /** peerId, fix to 0 so far */ - }/jobs/${k8sJobName}/events`, + `/v2/workflows/${workflowUuid}/peer_workflows/${participantId}/jobs/${k8sJobName}/events`, { params, snake_case: true, @@ -141,14 +211,18 @@ export function fetchPodLogs( return request(`/v2/jobs/${jobId}/pods/${podName}/log`, { params, snake_case: true }); } -export function toggleWofklowForkable(id: ID, forkable: boolean) { - return request.patch(`/v2/workflows/${id}`, { +export function fetchJobById(id: ID | undefined): Promise<{ data: JobExecutionDetalis }> { + return request(`/v2/jobs/${id}`); +} + +export function toggleWofklowForkable(id: ID, forkable: boolean, projectId: ID) { + return request.patch(`/v2/projects/${projectId}/workflows/${id}`, { forkable, }); } -export function toggleMetricsPublic(id: ID, metric_is_public: boolean) { - return request.patch(`/v2/workflows/${id}`, { +export function toggleMetricsPublic(id: ID, metric_is_public: boolean, projectId: ID) { + return request.patch(`/v2/projects/${projectId}/workflows/${id}`, { metric_is_public, }); } @@ -160,11 +234,10 @@ export function fetchJobMpld3Metrics(id: ID): Promise<{ data: any[] }> { export function fetchPeerJobMpld3Metrics( workflowUuid: string, jobName: string, + participantId: ID, ): Promise<{ data: any[] }> { return request( - `/v2/workflows/${workflowUuid}/peer_workflows/${ - 0 /** peerId, fix to 0 so far */ - }/jobs/${jobName}/metrics`, + `/v2/workflows/${workflowUuid}/peer_workflows/${participantId}/jobs/${jobName}/metrics`, ); } @@ -178,14 +251,27 @@ export function fetchJobEmbedKibanaSrc( export function fetchPeerKibanaMetrics( workflowUuid: string, k8sJobName: string, + participantId: ID, params: KibanaQueryParams, ): Promise<{ data: KiabanaMetrics }> { return request( - `/v2/workflows/${workflowUuid}/peer_workflows/${ - 0 /** peerId, fix to 0 so far */ - }/jobs/${k8sJobName}/kibana_metrics`, + `/v2/workflows/${workflowUuid}/peer_workflows/${participantId}/jobs/${k8sJobName}/kibana_metrics`, { params, }, ); } + +export function patchPeerWorkflow(id: ID, payload: Omit<Workflow, 'config'>, projectId: ID) { + return request.patch(`/v2/projects/${projectId}/workflows/${id}/peer_workflows`, payload); +} + +export function getTemplateRevisionDownloadHref(revision_id?: ID): string { + return `/v2/workflow_template_revisions/${revision_id}?download=true`; +} + +export function sendTemplateRevision(revision_id: ID, participant_id: ID) { + return request.post( + `/v2/workflow_template_revisions/${revision_id}:send?participant_id=${participant_id}`, + ); +} diff --git a/web_console_v2/client/src/shared/base64.test.ts b/web_console_v2/client/src/shared/base64.test.ts index 1d43feb62..4ebad02ea 100644 --- a/web_console_v2/client/src/shared/base64.test.ts +++ b/web_console_v2/client/src/shared/base64.test.ts @@ -19,9 +19,22 @@ describe('decode base64', () => { i: 'JUU0JUJEJUEwJUU1JUE1JUJEJTIwd29ybGQh', o: '你好 world!', }, + { + i: '', + o: '', + }, ]; cases.forEach(({ i, o }) => { expect(decodeBase64(i)).toBe(o); }); + + // Mock fake error + const decodeURIComponentSpy = jest + .spyOn(window, 'decodeURIComponent') + .mockImplementation(() => { + throw new Error('fake error'); + }); + expect(decodeBase64('aGVsbG8gd29ybGQh')).toBe(''); + decodeURIComponentSpy.mockRestore(); }); }); diff --git a/web_console_v2/client/src/shared/constants.ts b/web_console_v2/client/src/shared/constants.ts new file mode 100644 index 000000000..68afe5624 --- /dev/null +++ b/web_console_v2/client/src/shared/constants.ts @@ -0,0 +1,32 @@ +/* istanbul ignore file */ + +export const TIME_INTERVAL = { + /** 1.5 min */ + LIST: 90 * 1000, + /** 10 min */ + FLAG: 10 * 60 * 1000, + /** 10 min */ + CONNECTION_CHECK: 10 * 60 * 1000, + /** 10s */ + EXPORT_STATE_CHECK: 10 * 1000, +}; + +export const CONSTANTS = { + TIME_INTERVAL, + DELETED_DATASET_NAME: 'deleted', + TEMPLATE_LIGHT_CLIENT_DATA_JOIN: 'sys-preset-light-psi-data-join', + EMPTY_PLACEHOLDER: '-', +}; + +export const TABLE_COL_WIDTH = { + NAME: 200, + ID: 100, + COORDINATOR: 200, + TIME: 150, + OPERATION: 200, + THIN: 100, + NORMAL: 150, + BIG_WIDTH: 200, +}; + +export default CONSTANTS; diff --git a/web_console_v2/client/src/shared/dataset.test.ts b/web_console_v2/client/src/shared/dataset.test.ts index 6c544e2e7..05120ce11 100644 --- a/web_console_v2/client/src/shared/dataset.test.ts +++ b/web_console_v2/client/src/shared/dataset.test.ts @@ -1,72 +1,195 @@ import { - isAvailable, - isImportFailed, - isImportSuccess, - isImporting, getTotalDataSize, - getImportedProportion, getImportStage, + getIntersectionRate, + getTransactionStatus, + isFrontendPending, + isFrontendSucceeded, + isFrontendFailed, + isFrontendProcessing, + isFrontendDeleting, + isJobRunning, + isJobSucceed, + isJobFailed, + isJobStopped, + getDatasetJobState, } from './dataset'; import { successfullyImport, importFailed, unfinishedImporting, + deleting, + frontendSucceeded, + frontendFailed, + frontendProcessing, + transactionFailed, + transactionSucceeded, + transactionProcessing, + datasetStateFrontFailed, + datasetStateFrontPending, + datasetStateFrontProcess, + datasetStateFrontSuccess, + datasetStateFrontDelete, + datasetJobPendingState, + datasetJobFailedState, + datasetJobRunningState, + datasetJobStopedState, + datasetJobSuccessState, } from 'services/mocks/v2/datasets/examples'; +import { DatasetStateFront } from 'typings/dataset'; -describe('Datasets state judgement', () => { - it('Is import successfully', () => { - expect(isImportSuccess(successfullyImport)).toBe(true); - expect(isImportSuccess(importFailed)).toBe(false); - expect(isImportSuccess(unfinishedImporting)).toBe(false); +describe('Datasets state', () => { + it('state_frontend failed', () => { + expect(isFrontendFailed(datasetStateFrontFailed)).toBe(true); + expect(isFrontendFailed(datasetStateFrontPending)).toBe(false); + expect(isFrontendFailed(datasetStateFrontProcess)).toBe(false); + expect(isFrontendFailed(datasetStateFrontSuccess)).toBe(false); + expect(isFrontendFailed(datasetStateFrontDelete)).toBe(false); }); - it('Is available', () => { - expect(isAvailable(successfullyImport)).toBe(true); - expect(isAvailable(importFailed)).toBe(false); - expect(isAvailable(unfinishedImporting)).toBe(false); + it('state_frontend success', () => { + expect(isFrontendSucceeded(datasetStateFrontSuccess)).toBe(true); + expect(isFrontendSucceeded(datasetStateFrontPending)).toBe(false); + expect(isFrontendSucceeded(datasetStateFrontProcess)).toBe(false); + expect(isFrontendSucceeded(datasetStateFrontFailed)).toBe(false); + expect(isFrontendSucceeded(datasetStateFrontDelete)).toBe(false); }); - - it('Is failed', () => { - expect(isImportFailed(successfullyImport)).toBe(false); - expect(isImportFailed(importFailed)).toBe(true); - expect(isImportFailed(unfinishedImporting)).toBe(false); + it('state_frontend pending', () => { + expect(isFrontendPending(datasetStateFrontPending)).toBe(true); + expect(isFrontendPending(datasetStateFrontFailed)).toBe(false); + expect(isFrontendPending(datasetStateFrontProcess)).toBe(false); + expect(isFrontendPending(datasetStateFrontSuccess)).toBe(false); + expect(isFrontendPending(datasetStateFrontDelete)).toBe(false); + }); + it('state_frontend process', () => { + expect(isFrontendProcessing(datasetStateFrontProcess)).toBe(true); + expect(isFrontendProcessing(datasetStateFrontPending)).toBe(false); + expect(isFrontendProcessing(datasetStateFrontFailed)).toBe(false); + expect(isFrontendProcessing(datasetStateFrontSuccess)).toBe(false); + expect(isFrontendProcessing(datasetStateFrontDelete)).toBe(false); + }); + it('state_frontend deleting', () => { + expect(isFrontendDeleting(datasetStateFrontDelete)).toBe(true); + expect(isFrontendDeleting(datasetStateFrontPending)).toBe(false); + expect(isFrontendDeleting(datasetStateFrontFailed)).toBe(false); + expect(isFrontendDeleting(datasetStateFrontSuccess)).toBe(false); + expect(isFrontendDeleting(datasetStateFrontProcess)).toBe(false); }); - it('Is importing', () => { - expect(isImporting(successfullyImport)).toBe(false); - expect(isImporting(importFailed)).toBe(false); - expect(isImporting(unfinishedImporting)).toBe(true); +}); + +describe('Datasets job state', () => { + it('Datasets job failed', () => { + expect(isJobFailed(datasetJobFailedState)).toBe(true); + expect(isJobFailed(datasetJobPendingState)).toBe(false); + expect(isJobFailed(datasetJobRunningState)).toBe(false); + expect(isJobFailed(datasetJobStopedState)).toBe(false); + expect(isJobFailed(datasetJobSuccessState)).toBe(false); + }); + it('Datasets job success', () => { + expect(isJobSucceed(datasetJobSuccessState)).toBe(true); + expect(isJobSucceed(datasetJobPendingState)).toBe(false); + expect(isJobSucceed(datasetJobRunningState)).toBe(false); + expect(isJobSucceed(datasetJobStopedState)).toBe(false); + expect(isJobSucceed(datasetJobFailedState)).toBe(false); + }); + it('Datasets job stoped', () => { + expect(isJobStopped(datasetJobStopedState)).toBe(true); + expect(isJobStopped(datasetJobPendingState)).toBe(false); + expect(isJobStopped(datasetJobRunningState)).toBe(false); + expect(isJobStopped(datasetJobSuccessState)).toBe(false); + expect(isJobStopped(datasetJobFailedState)).toBe(false); + }); + it('Datasets job running', () => { + expect(isJobRunning(datasetJobStopedState)).toBe(false); + expect(isJobRunning(datasetJobPendingState)).toBe(true); + expect(isJobRunning(datasetJobRunningState)).toBe(true); + expect(isJobRunning(datasetJobSuccessState)).toBe(false); + expect(isJobRunning(datasetJobFailedState)).toBe(false); }); }); describe('Datasets helpers', () => { it('getTotalDataSize', () => { - expect(getTotalDataSize(unfinishedImporting)).toBe(22345); - expect(getTotalDataSize(importFailed)).toBe(66666); - expect(getTotalDataSize(successfullyImport)).toBe(12345); + expect(getTotalDataSize(unfinishedImporting)).toBe(unfinishedImporting.file_size); + expect(getTotalDataSize(importFailed)).toBe(importFailed.file_size); + expect(getTotalDataSize(successfullyImport)).toBe(successfullyImport.file_size); }); - it('getImportedProportion', () => { - expect(getImportedProportion(unfinishedImporting)).toEqual({ imported: 7, total: 15 }); - expect(getImportedProportion(successfullyImport)).toEqual({ imported: 5, total: 5 }); + it('getIntersectionRate', () => { + expect( + getIntersectionRate({ + input: 0, + output: 0, + }), + ).toEqual('0%'); + expect( + getIntersectionRate({ + input: 1000, + output: 0, + }), + ).toEqual('0%'); + expect( + getIntersectionRate({ + input: 1000, + output: 100, + }), + ).toEqual('10%'); + expect( + getIntersectionRate({ + input: 1000, + output: 250, + }), + ).toEqual('25%'); + expect( + getIntersectionRate({ + input: 1000, + output: 312, + }), + ).toEqual('31.2%'); + expect( + getIntersectionRate({ + input: 10000, + output: 3124, + }), + ).toEqual('31.24%'); }); }); describe('Datasets import stage getter', () => { it('getImportStage', () => { - expect(getImportStage(successfullyImport)).toEqual({ - text: 'dataset.state_available', + expect(getImportStage(frontendSucceeded)).toEqual({ + text: '可用', type: 'success', }); - - expect(getImportStage(unfinishedImporting)).toEqual({ - text: 'dataset.state_importing', + expect(getImportStage(frontendFailed)).toEqual({ + text: '处理失败', + type: 'error', + tip: '', + }); + expect(getImportStage(frontendProcessing)).toEqual({ + text: '处理中', + type: 'processing', + }); + expect(getImportStage(deleting)).toEqual({ + text: '删除中', type: 'processing', }); + }); +}); - expect(getImportStage(importFailed)).toEqual({ - text: 'dataset.state_error', +describe('Datasets Transactions status getter', () => { + it('getTransactionStatus', () => { + expect(getTransactionStatus(transactionFailed)).toEqual({ type: 'error', - tip: 'Failed due to disk space is full', + text: 'dataset.state_transaction_failed', + }); + expect(getTransactionStatus(transactionProcessing)).toEqual({ + type: 'processing', + text: 'dataset.state_transaction_processing', + }); + expect(getTransactionStatus(transactionSucceeded)).toEqual({ + type: 'success', + text: 'dataset.state_transaction_success', }); }); }); diff --git a/web_console_v2/client/src/shared/dataset.ts b/web_console_v2/client/src/shared/dataset.ts index e108219fd..b012b3cb2 100644 --- a/web_console_v2/client/src/shared/dataset.ts +++ b/web_console_v2/client/src/shared/dataset.ts @@ -1,69 +1,99 @@ import { StateTypes } from 'components/StateIndicator'; import i18n from 'i18n'; -import { memoize } from 'lodash'; -import { BatchState, Dataset } from 'typings/dataset'; +import { + DataJobBackEndType, + Dataset, + DatasetJobListItem, + DatasetJobState, + DatasetStateFront, + DatasetTransactionStatus, + IntersectionDataset, + DatasetJobStage, +} from 'typings/dataset'; -// --------- State judgement ------------ +export function isFrontendPending(data: Dataset) { + return data.state_frontend === DatasetStateFront.PENDING; +} -export function isImportSuccess(data: Dataset) { - return data.data_batches.every((item) => item.state === BatchState.SUCCESS); +export function isFrontendProcessing(data: Dataset) { + return data.state_frontend === DatasetStateFront.PROCESSING; +} +export function isFrontendDeleting(data: Dataset) { + return data.state_frontend === DatasetStateFront.DELETING; +} +export function isFrontendSucceeded(data: Dataset) { + return data.state_frontend === DatasetStateFront.SUCCEEDED; } +export function isFrontendFailed(data: Dataset) { + return data.state_frontend === DatasetStateFront.FAILED; +} + +// ------- dataset job states judgement ------- -export function isAvailable(data: Dataset) { - // TODO: how to determinte that the dataset is available? - return isImportSuccess(data); +export function isJobRunning(data: DatasetJobListItem | DatasetJobStage) { + if (!data || !data.state) { + return false; + } + return [DatasetJobState.PENDING, DatasetJobState.RUNNING].includes(data.state); } -export function isImportFailed(data: Dataset) { - return data.data_batches.some((item) => item.state === BatchState.FAILED); +export function isJobSucceed(data: DatasetJobListItem | DatasetJobStage) { + if (!data || !data.state) { + return false; + } + return [DatasetJobState.SUCCEEDED].includes(data.state); } -export function isImporting(data: Dataset) { - return data.data_batches.some((item) => item.state === BatchState.IMPORTING); +export function isJobFailed(data: DatasetJobListItem | DatasetJobStage) { + if (!data || !data.state) { + return false; + } + return [DatasetJobState.FAILED].includes(data.state); } -export function hasAppendingDataBatch(data: Dataset) { - return false; +export function isJobStopped(data: DatasetJobListItem | DatasetJobStage) { + if (!data || !data.state) { + return false; + } + return [DatasetJobState.STOPPED].includes(data.state); } // --------- Helpers ------------ -export function getTotalDataSize(data: Dataset) { - return _sumUp(data, 'file_size'); +export function getTotalDataSize(data: Dataset | IntersectionDataset) { + return data.file_size || 0; } -export const getImportedProportion = memoize((data: Dataset) => { - const total = _sumUp(data, 'num_file'); - const imported = _sumUp(data, 'num_imported_file'); - return { - total, - imported, - }; -}); - export function getImportStage(data: Dataset): { type: StateTypes; text: string; tip?: string } { - if (isImporting(data)) { + if (isFrontendPending(data)) { return { type: 'processing', - text: i18n.t('dataset.state_importing', getImportedProportion(data)), + text: '待处理', }; } - - if (isImportSuccess(data)) { + if (isFrontendDeleting(data)) { + return { + type: 'processing', + text: '删除中', + }; + } + if (isFrontendProcessing(data)) { + return { + type: 'processing', + text: '处理中', + }; + } + if (isFrontendSucceeded(data)) { return { type: 'success', - text: i18n.t('dataset.state_available'), + text: '可用', }; } - - if (isImportFailed(data)) { + if (isFrontendFailed(data)) { return { type: 'error', - text: i18n.t('dataset.state_error'), - tip: - data.data_batches - .find((item) => item.state === BatchState.FAILED)! - .details.files.find((item) => item.error_message)?.error_message || '', + text: '处理失败', + tip: '', }; } @@ -74,10 +104,91 @@ export function getImportStage(data: Dataset): { type: StateTypes; text: string; } as never; } -// -------- Private helpers ------ +/* istanbul ignore next */ +export function getDatasetJobState( + data: DatasetJobListItem | DatasetJobStage, +): { type: StateTypes; text: string; tip?: string } { + let type: StateTypes = 'default'; + let text = i18n.t('dataset.state_unknown'); + if (isJobRunning(data)) { + type = 'processing'; + text = i18n.t('dataset.state_dataset_job_running'); + } + if (isJobSucceed(data)) { + type = 'success'; + text = i18n.t('dataset.state_dataset_job_succeeded'); + } + if (isJobFailed(data)) { + type = 'error'; + text = i18n.t('dataset.state_dataset_job_failed'); + } + if (isJobStopped(data)) { + type = 'error'; + text = i18n.t('dataset.state_dataset_job_stopped'); + } + return { + type, + text, + }; +} -function _sumUp(data: Dataset, key: 'num_file' | 'num_imported_file' | 'file_size') { - return data.data_batches.reduce((result, current) => { - return result + current[key]; - }, 0); +/* istanbul ignore next */ +export function getDatasetJobType(kind: DataJobBackEndType) { + switch (kind) { + case DataJobBackEndType.DATA_JOIN: + case DataJobBackEndType.RSA_PSI_DATA_JOIN: + case DataJobBackEndType.OT_PSI_DATA_JOIN: + case DataJobBackEndType.LIGHT_CLIENT_RSA_PSI_DATA_JOIN: + case DataJobBackEndType.LIGHT_CLIENT_OT_PSI_DATA_JOIN: + case DataJobBackEndType.HASH_DATA_JOIN: + return i18n.t('dataset.label_data_job_type_create'); + case DataJobBackEndType.DATA_ALIGNMENT: + return i18n.t('dataset.label_data_job_type_alignment'); + case DataJobBackEndType.IMPORT_SOURCE: + return i18n.t('dataset.label_data_job_type_import'); + case DataJobBackEndType.EXPORT: + return i18n.t('dataset.label_data_job_type_export'); + case DataJobBackEndType.ANALYZER: + return '探查'; + default: + return 'unknown'; + } +} + +export function getIntersectionRate(data: { input: number; output: number }) { + let rate = 0; + + if (data.input && data.output) { + rate = data.output / data.input; + } + + return `${parseFloat((rate * 100).toFixed(2))}%`; +} + +export function getTransactionStatus( + status: DatasetTransactionStatus, +): { type: StateTypes; text: string } { + switch (status) { + case DatasetTransactionStatus.FAILED: + return { + type: 'error', + text: i18n.t('dataset.state_transaction_failed'), + }; + case DatasetTransactionStatus.PROCESSING: + return { + type: 'processing', + text: i18n.t('dataset.state_transaction_processing'), + }; + case DatasetTransactionStatus.SUCCEEDED: + return { + type: 'success', + text: i18n.t('dataset.state_transaction_success'), + }; + /* istanbul ignore next */ + default: + return { + type: 'default', + text: i18n.t('dataset.state_unknown'), + }; + } } diff --git a/web_console_v2/client/src/shared/date.test.ts b/web_console_v2/client/src/shared/date.test.ts index e078db2a1..7c97db94f 100644 --- a/web_console_v2/client/src/shared/date.test.ts +++ b/web_console_v2/client/src/shared/date.test.ts @@ -1,4 +1,5 @@ -import { formatTimestamp, fomatTimeCount } from './date'; +import dayjs from 'dayjs'; +import { formatTimestamp, formatTimeCount, disableFuture } from './date'; // Beijing time const DATE_20201_01_21_12_58_23 = 1611205103; @@ -9,12 +10,27 @@ describe('Date formatters', () => { expect(formatTimestamp(DATE_20201_01_21_12_58_23, 'MM/DD/YYYY')).toBe('01/21/2021'); expect(formatTimestamp(DATE_20201_01_21_12_58_23 * 1000)).toBe('2021-01-21 12:58:23'); expect(formatTimestamp(DATE_20201_01_21_12_58_23, 'MM/DD/YYYY')).toBe('01/21/2021'); + expect(formatTimestamp(10012341231241241412)).toBe('Invalid Date'); + + // Mock fake error + const unixSpy = jest.spyOn(dayjs, 'unix').mockImplementation(() => { + throw new Error('fake error'); + }); + expect(formatTimestamp(DATE_20201_01_21_12_58_23)).toBe('[formatTimestamp]: Input error'); + unixSpy.mockRestore(); }); - it('fomatTimeCount', () => { - expect(fomatTimeCount(3600)).toBe('01:00:00'); - expect(fomatTimeCount(60)).toBe('00:01:00'); - expect(fomatTimeCount(10000)).toBe('02:46:40'); - expect(fomatTimeCount(1)).toBe('00:00:01'); + it('formatTimeCount', () => { + expect(formatTimeCount(3600)).toBe('01:00:00'); + expect(formatTimeCount(60)).toBe('00:01:00'); + expect(formatTimeCount(10000)).toBe('02:46:40'); + expect(formatTimeCount(1)).toBe('00:00:01'); + }); + it('disableFuture', () => { + const dateNowSpy = jest.spyOn(Date, 'now').mockImplementation(() => 1631527486469); + expect(disableFuture(1631527486469 - 1000)).toBe(false); + expect(disableFuture(1631527486469)).toBe(false); + expect(disableFuture(1631527486469 + 1000)).toBe(true); + dateNowSpy.mockRestore(); }); }); diff --git a/web_console_v2/client/src/shared/date.ts b/web_console_v2/client/src/shared/date.ts index 79b668194..99d115879 100644 --- a/web_console_v2/client/src/shared/date.ts +++ b/web_console_v2/client/src/shared/date.ts @@ -24,7 +24,7 @@ export function formatTimestamp(input: number, format = 'YYYY-MM-DD HH:mm:ss') { * @param input a number accurate to seconds * @returns HH:mm:ss */ -export function fomatTimeCount(input: number): string { +export function formatTimeCount(input: number): string { const hours = Math.floor(input / 3600).toString(); const minutes = Math.floor((input % 3600) / 60).toString(); const seconds = ((input % 3600) % 60).toString(); diff --git a/web_console_v2/client/src/shared/file.test.ts b/web_console_v2/client/src/shared/file.test.ts new file mode 100644 index 000000000..a356cf107 --- /dev/null +++ b/web_console_v2/client/src/shared/file.test.ts @@ -0,0 +1,149 @@ +import { + readAsJSONFromFile, + readAsBinaryStringFromFile, + readAsTextFromFile, + humanFileSize, + getFileInfoByFilePath, + buildRelativePath, +} from './file'; + +const testImageFile = new File(['xyz'], 'test.png', { type: 'image/png' }); +const testJSONFile = new File([JSON.stringify({ a: 1, b: 2 })], 'test.json', { + type: 'application/json', +}); + +describe('readAsJSONFromFile', () => { + it('image file', async () => { + return expect(readAsJSONFromFile(testImageFile)).rejects.toThrow('Unexpected token x in JSON'); + }); + it('json file', async () => { + const value = await readAsJSONFromFile(testJSONFile); + expect(value).toEqual({ + a: 1, + b: 2, + }); + }); +}); + +describe('readAsBinaryStringFromFile', () => { + it('image file', async () => { + const value = await readAsBinaryStringFromFile(testImageFile); + expect(value).toBe(btoa('xyz')); + }); + it('json file', async () => { + const value = await readAsBinaryStringFromFile(testJSONFile); + expect(value).toBe(btoa(JSON.stringify({ a: 1, b: 2 }))); + }); +}); + +describe('readAsTextFromFile', () => { + it('image file', async () => { + const value = await readAsTextFromFile(testImageFile); + expect(value).toBe('xyz'); + }); + it('json file', async () => { + const value = await readAsTextFromFile(testJSONFile); + expect(value).toBe(JSON.stringify({ a: 1, b: 2 })); + }); +}); + +it('humanFileSize', async () => { + expect(humanFileSize(100)).toBe('100 B'); + expect(humanFileSize(1024)).toBe('1.0 KB'); + expect(humanFileSize(1024 * 1000)).toBe('1.0 MB'); + expect(humanFileSize(1024 * 1000 * 1000)).toBe('1.0 GB'); + expect(humanFileSize(1024 * 1000 * 1000 * 1000)).toBe('1.0 TB'); + expect(humanFileSize(1024 * 1000 * 1000 * 1000 * 1000)).toBe('1.0 PB'); + + expect(humanFileSize(100, false)).toBe('100 B'); + expect(humanFileSize(1024, false)).toBe('1.0 KiB'); + expect(humanFileSize(1024 * 1000, false)).toBe('1000.0 KiB'); + expect(humanFileSize(1024 * 1000 * 1000, false)).toBe('976.6 MiB'); + expect(humanFileSize(1024 * 1000 * 1000 * 1000, false)).toBe('953.7 GiB'); + expect(humanFileSize(1024 * 1000 * 1000 * 1000 * 1000, false)).toBe('931.3 TiB'); +}); + +it('getFileInfoByFilePath', () => { + expect(getFileInfoByFilePath('main.js')).toEqual({ + parentPath: '', + fileName: 'main.js', + fileExt: 'js', + }); + expect(getFileInfoByFilePath('leader/main.js')).toEqual({ + parentPath: 'leader', + fileName: 'main.js', + fileExt: 'js', + }); + expect(getFileInfoByFilePath('leader/folder/main.js')).toEqual({ + parentPath: 'leader/folder', + fileName: 'main.js', + fileExt: 'js', + }); + expect(getFileInfoByFilePath('main')).toEqual({ + parentPath: '', + fileName: 'main', + fileExt: '', + }); + expect(getFileInfoByFilePath('')).toEqual({ + parentPath: '', + fileName: '', + fileExt: '', + }); +}); +it('buildRelativePath', () => { + expect( + buildRelativePath({ + path: 'folder', + filename: 'a.js', + }), + ).toBe('folder/a.js'); + expect( + buildRelativePath({ + path: '', + filename: 'a.js', + }), + ).toBe('a.js'); + expect( + buildRelativePath({ + path: '.', + filename: 'a.js', + }), + ).toBe('a.js'); + expect( + buildRelativePath({ + path: 'a/b/c/d', + filename: 'e.js', + }), + ).toBe('a/b/c/d/e.js'); + + expect( + buildRelativePath({ + path: 'a/b/c/d/', + filename: 'e.js', + }), + ).toBe('a/b/c/d/e.js'); + expect( + buildRelativePath({ + path: 'a/b/c/d//////', + filename: 'e.js', + }), + ).toBe('a/b/c/d/e.js'); + expect( + buildRelativePath({ + path: '//////', + filename: 'e.js', + }), + ).toBe('e.js'); + expect( + buildRelativePath({ + path: './/////', + filename: 'e.js', + }), + ).toBe('e.js'); + expect( + buildRelativePath({ + path: 'a//////', + filename: 'e.js', + }), + ).toBe('a/e.js'); +}); diff --git a/web_console_v2/client/src/shared/file.ts b/web_console_v2/client/src/shared/file.ts index a1abacf51..2044d6ae0 100644 --- a/web_console_v2/client/src/shared/file.ts +++ b/web_console_v2/client/src/shared/file.ts @@ -1,5 +1,6 @@ export function readAsJSONFromFile<T = object>(file: File): Promise<T> { return new Promise((resolve, reject) => { + /* istanbul ignore if */ if (!window.FileReader) { return reject( new Error( @@ -10,7 +11,7 @@ export function readAsJSONFromFile<T = object>(file: File): Promise<T> { const reader = new FileReader(); reader.onload = function () { try { - let result = JSON.parse(this.result?.toString()!); + const result = JSON.parse(this.result?.toString() || ''); resolve(result); } catch (error) { reject(error); @@ -25,8 +26,17 @@ export function readAsJSONFromFile<T = object>(file: File): Promise<T> { export function readAsBinaryStringFromFile(file: File): Promise<string> { return new Promise((resolve, reject) => { + /* istanbul ignore if */ + if (!window.FileReader) { + return reject( + new Error( + "Detect that Environment doesn't support FileReader yet, please using lastest Chrome", + ), + ); + } const reader = new FileReader(); reader.onload = function () { + /* istanbul ignore else */ if (typeof reader.result === 'string') { resolve(btoa(reader.result)); } @@ -36,3 +46,89 @@ export function readAsBinaryStringFromFile(file: File): Promise<string> { reader.readAsBinaryString(file); }); } + +export function readAsTextFromFile(file: File): Promise<string> { + return new Promise((resolve, reject) => { + /* istanbul ignore if */ + if (!window.FileReader) { + return reject( + new Error( + "Detect that Environment doesn't support FileReader yet, please using lastest Chrome", + ), + ); + } + const reader = new FileReader(); + reader.onload = function () { + /* istanbul ignore else */ + if (typeof reader.result === 'string') { + resolve(reader.result); + } + }; + reader.onerror = reject; + reader.onabort = reject; + reader.readAsText(file); + }); +} + +/** + * Format bytes as human-readable text + * + * @param bytes Number of bytes. + * @param si True to use metric (SI) units, aka powers of 1000. False to use + * binary (IEC), aka powers of 1024. + * @param dp Number of decimal places to display. + * + * @return Formatted string. + */ +export function humanFileSize(bytes: number, si = true, dp = 1) { + const thresh = si ? 1000 : 1024; + + if (Math.abs(bytes) < thresh) { + return bytes + ' B'; + } + + const units = si + ? ['KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'] + : ['KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB']; + let u = -1; + const r = 10 ** dp; + + do { + bytes /= thresh; + ++u; + } while (Math.round(Math.abs(bytes) * r) / r >= thresh && u < units.length - 1); + + return bytes.toFixed(dp) + ' ' + units[u]; +} + +/** + * Get file info by relative file path + * @param path file path + * @returns file info { fileName, fileExt, parentPath } + * @example getFileInfoByFilePath(leader/folder/main.js) => { parentPath: 'leader/folder', fileName: 'main.js', fileExt: 'js' } + */ +export function getFileInfoByFilePath(path: string) { + const pathList = path.split('/'); + const parentPath = pathList.slice(0, pathList.length - 1).join('/'); + const fileName = pathList[pathList.length - 1]; + + const extList = fileName.indexOf('.') > -1 ? fileName.split('.') : []; + const fileExt = extList && extList.length > 0 ? extList[extList.length - 1] : ''; + + return { + parentPath, + fileName, + fileExt, + }; +} +/** + * Build relative file path by file info + * @param fileInfo \{ path: string; filename: string} + * @returns relativePath string + * @example buildRelativePath({ path:'folder', filename: 'a.js'}) => 'folder/a.js' + */ +export function buildRelativePath(fileInfo: { path: string; filename: string }) { + const path = fileInfo?.path.replace(/\/+$/, ''); + + return path && path !== '.' ? `${path}/${fileInfo.filename}` : fileInfo.filename; +} diff --git a/web_console_v2/client/src/shared/filter.test.ts b/web_console_v2/client/src/shared/filter.test.ts new file mode 100644 index 000000000..1a2162bca --- /dev/null +++ b/web_console_v2/client/src/shared/filter.test.ts @@ -0,0 +1,116 @@ +import { + constructExpressionTree, + serializeFilterExpression, + confirmValue, + operationMap, + expression2Filter, +} from './filter'; +import { FilterExpressionKind, FilterOp } from '../typings/filter'; + +describe('expression serialization', () => { + it('cnstruct expression tree', () => { + const nodes = [ + { + field: 'state', + op: FilterOp.EQUAL, + string_value: 'RUNNING', + }, + { + field: 'favour', + op: FilterOp.EQUAL, + bool_value: true, + }, + ]; + constructExpressionTree(nodes); + }); + + it('serialization', () => { + const expression = { + kind: FilterExpressionKind.AND, + exps: [ + { + kind: FilterExpressionKind.BASIC, + simple_exp: { + field: 'state', + op: FilterOp.EQUAL, + string_value: 'RUNNING', + }, + }, + { + kind: FilterExpressionKind.BASIC, + simple_exp: { + field: 'favour', + op: FilterOp.EQUAL, + bool_value: true, + }, + }, + { + kind: FilterExpressionKind.AND, + exps: [ + { + kind: FilterExpressionKind.BASIC, + simple_exp: { + field: 'state', + op: FilterOp.EQUAL, + string_value: 'RUNNING', + }, + }, + { + kind: FilterExpressionKind.BASIC, + simple_exp: { + field: 'count', + op: FilterOp.EQUAL, + num_value: 10, + }, + }, + ], + }, + ], + }; + serializeFilterExpression(expression); + }); + + const expressionList = [ + { + kind: FilterExpressionKind.BASIC, + simple_exp: { + field: 'state', + op: FilterOp.EQUAL, + string_value: 'RUNNING', + }, + }, + { + kind: FilterExpressionKind.BASIC, + simple_exp: { + field: 'count', + op: FilterOp.EQUAL, + num_value: 10, + }, + }, + { + kind: FilterExpressionKind.BASIC, + simple_exp: { + field: 'favour', + op: FilterOp.EQUAL, + bool_value: true, + }, + }, + ]; + + it('confim value', () => { + expressionList.forEach((item) => { + return confirmValue(item.simple_exp); + }); + }); + + it('operationMap', () => { + expressionList.forEach((item) => { + return operationMap(item.simple_exp.op); + }); + }); + + it('expression2Filter', () => { + const expression = '(and((state=["SUCCESS","FAILED"])(is_publish=true))'; + expression2Filter(expression); + }); +}); diff --git a/web_console_v2/client/src/shared/filter.ts b/web_console_v2/client/src/shared/filter.ts new file mode 100644 index 000000000..e5e67229e --- /dev/null +++ b/web_console_v2/client/src/shared/filter.ts @@ -0,0 +1,120 @@ +import { + FilterExpression, + FilterExpressionKind, + FilterOp, + SimpleExpression, +} from '../typings/filter'; + +export let serializedExpression = ''; + +export function constructExpressionTree(simpleNodes: SimpleExpression[]) { + if (simpleNodes.length === 0) return; + const exporessioNodes = simpleNodes.map((item) => { + return { + kind: FilterExpressionKind.BASIC, + simple_exp: item, + }; + }); + if (simpleNodes.length === 1) { + return serializeFilterExpression(exporessioNodes[0]); + } + const expressionTree = { + kind: FilterExpressionKind.AND, + exps: exporessioNodes, + }; + + return serializeFilterExpression(expressionTree); +} + +/** + * preorder traversal to build serialized expressions + * @param expression + */ +export function serializeFilterExpression(expression: FilterExpression) { + expressionSerialize(expression); + // clear serializedExpression + const tempSerializedExpression = serializedExpression; + serializedExpression = ''; + return tempSerializedExpression; +} + +export function expressionSerialize(expression: FilterExpression) { + if (expression.kind === FilterExpressionKind.BASIC) { + serializedExpression += `(${expression.simple_exp?.field}${operationMap( + expression.simple_exp?.op!, + )}${confirmValue(expression.simple_exp!)})`; + return; + } + if (expression.kind === FilterExpressionKind.AND || expression.kind === FilterExpressionKind.OR) { + serializedExpression += `(${expression.kind}`; + expression.exps?.forEach((item, index) => { + expressionSerialize(item); + if (expression.exps && expression.exps.length - 1 === index) { + serializedExpression += ')'; + } + }); + } +} + +export function confirmValue(simpleExp: SimpleExpression) { + if (simpleExp.string_value !== undefined) { + return `"${simpleExp.string_value}"`; + } + if (simpleExp.number_value !== undefined) { + return simpleExp.number_value; + } + if (simpleExp.bool_value !== undefined) { + return simpleExp.bool_value; + } + return; +} + +export function operationMap(op: string) { + const map = new Map<string, string>(); + map.set(FilterOp.EQUAL, '='); + map.set(FilterOp.IN, ':'); + map.set(FilterOp.CONTAIN, '~='); + map.set(FilterOp.GREATER_THAN, '>'); + map.set(FilterOp.LESS_THAN, '<'); + + return map.get(op); +} + +/** + * expression string -> filter object eg: + * (and(state=["SUCCESS","FAILED"])(is_publish=true)) --> + * { + * state: ["SUCCESS","FAILED"], + * is_publish: true, + * } + * @param expressionString + */ +export function expression2Filter(expressionString: string) { + const res: { [key: string]: any } = {}; + if (!expressionString || !expressionString.length) { + return res; + } + const pureFilterExpression = expressionString.startsWith('(and') + ? expressionString.substring(4, expressionString.length - 1) + : expressionString.substring(0, expressionString.length); + const filterRegex = /(?<=\()(.+?)(?=\))/g; + const operationRegex = /(:|=|~=|>|<)/; + const valRegex = /("\[")/; + const filterPairArray = pureFilterExpression.match(filterRegex); + if (Array.isArray(filterPairArray)) { + filterPairArray.forEach((pair) => { + const index = pair.search(operationRegex); + const [filterKey, filterVal] = [ + pair.substring(0, index).trim(), + pair.indexOf('~=') === -1 + ? pair.substring(index + 1).trim() + : pair.substring(index + 2).trim(), + ]; + // "["SUCCESS", "PENDING"]" needs to delete " " + res[filterKey] = valRegex.test(filterVal) + ? JSON.parse(filterVal.slice(1, filterVal.length - 1)) + : JSON.parse(filterVal); + }); + } + return res; +} diff --git a/web_console_v2/client/src/shared/formSchema.test.ts b/web_console_v2/client/src/shared/formSchema.test.ts index 375f7aa4d..8d041c27a 100644 --- a/web_console_v2/client/src/shared/formSchema.test.ts +++ b/web_console_v2/client/src/shared/formSchema.test.ts @@ -1,12 +1,26 @@ import buildFormSchemaFromJobDef, { stringifyComplexDictField, parseComplexDictField, + parseVariableToFormValue, + stringifyVariableValue, + parseVariableValue, + JOB_NAME_PREFIX, + processTypedValue, + processVariableTypedValue, } from './formSchema'; import { Job, JobType } from 'typings/job'; -import { VariableComponent } from 'typings/variable'; +import { VariableComponent, VariableValueType } from 'typings/variable'; import { render, cleanup, screen } from '@testing-library/react'; -import { normalTemplate } from 'services/mocks/v2/workflow_templates/examples'; -import { withExecutionDetail } from 'services/mocks/v2/workflows/examples'; +import { + normalTemplate, + noTypedValueTemplate, +} from 'services/mocks/v2/workflow_templates/examples'; +import { + withExecutionDetail, + pendingAcceptAndConfig, + newlyCreated, + completed, +} from 'services/mocks/v2/workflows/examples'; import { unassignedComponent, nameInput, @@ -14,20 +28,48 @@ import { asyncSwitch, cpuLimit, commentTextArea, + codeEditor, + datasetSelect, + datasetPathSelect, + featureSelect, + envsInput, + stringInput, + objectInput, + listInput, + forceObjectInput, + forceListInput, } from 'services/mocks/v2/variables/examples'; import { WorkflowTemplatePayload } from 'typings/workflow'; +import { cloneDeep } from 'lodash-es'; const testJobDef: Job = { - name: 'Test job', + name: 'Test-job', job_type: JobType.RAW_DATA, is_federated: false, dependencies: [], - variables: [unassignedComponent, nameInput, memSelect, asyncSwitch, cpuLimit, commentTextArea], + variables: [ + unassignedComponent, + nameInput, + memSelect, + asyncSwitch, + cpuLimit, + commentTextArea, + codeEditor, + datasetSelect, + datasetPathSelect, + featureSelect, + envsInput, + stringInput, + objectInput, + listInput, + forceObjectInput, + forceListInput, + ], }; describe('Build a form schema with various components (without permissions)', () => { const schema = buildFormSchemaFromJobDef(testJobDef); - const fields = schema.properties!; + const fields = schema.properties![JOB_NAME_PREFIX + testJobDef.name].properties!; afterEach(cleanup); @@ -83,11 +125,25 @@ describe('Build a form schema with various components (without permissions)', () const label = screen.getByRole('label'); expect(label.innerHTML).toContain('some hints'); }); + + describe('Render correct default value', () => { + it('Input component with string value_type', () => { + expect(fields.string_input.default).toBe(stringInput.value); + }); + it('Input component with object value_type', () => { + expect(fields.object_input.default).toBe(objectInput.value); + expect(fields.force_object_input.default).toBe(JSON.stringify(forceObjectInput.typed_value)); + }); + it('Input component with list value_type', () => { + expect(fields.list_input.default).toBe(listInput.value); + expect(fields.force_list_input.default).toBe(JSON.stringify(forceListInput.typed_value)); + }); + }); }); describe('Build a form schema with permissions', () => { const schema = buildFormSchemaFromJobDef(testJobDef, { withPermissions: true }); - const fields = schema.properties!; + const fields = schema.properties![JOB_NAME_PREFIX + testJobDef.name].properties!; it('Permission check', () => { expect(fields.some_name.readOnly).toBeTruthy(); @@ -100,10 +156,39 @@ describe('Build a form schema with permissions', () => { }); }); +describe('Build a form schema with readonly', () => { + const schema = buildFormSchemaFromJobDef(testJobDef, { readonly: true }); + const fields = schema.properties![JOB_NAME_PREFIX + testJobDef.name].properties!; + + it('Readonly check', () => { + expect( + Object.keys(fields).every((key) => { + const field = fields[key]; + return field.readOnly === true; + }), + ).toBeTruthy(); + }); +}); + +describe('Build a form schema with variablePrefix', () => { + const variablePrefix = 'prefixName__'; + const schema = buildFormSchemaFromJobDef(testJobDef, { variablePrefix: variablePrefix }); + const fields = schema.properties![JOB_NAME_PREFIX + testJobDef.name].properties!; + + const regx = new RegExp(`^${variablePrefix}`); + + it('VariablePrefix check', () => { + expect( + Object.keys(fields).every((key) => { + return regx.test(key); + }), + ).toBeTruthy(); + }); +}); + describe('Stringify all Widget schemas inside a workflow config before send to server', () => { it('stringifyComplexDictField should works fine', () => { const stringified = stringifyComplexDictField(normalTemplate as WorkflowTemplatePayload); - expect( stringified.config.variables.every((item) => typeof item.widget_schema === 'string'), ).toBeTruthy(); @@ -141,3 +226,222 @@ describe('Parse all Widget schemas inside a workflow config from server side', ( ).toBeTruthy(); }); }); + +it('parseVariableToFormValue should works fine', () => { + expect(parseVariableToFormValue({} as any)).toEqual({}); + expect(parseVariableToFormValue(pendingAcceptAndConfig)).toEqual({}); + expect(parseVariableToFormValue(newlyCreated)).toEqual({ + comment2: '3', + image_version: 'v1.5-rc3', + job_name: '1', + job_name2: '4', + }); + expect(parseVariableToFormValue(withExecutionDetail)).toEqual({ + comment2: '3', + image_version: 'v1.5-rc3', + job_name: '1', + job_name2: '4', + }); + expect(parseVariableToFormValue(completed)).toEqual({ + comment: '', + comment2: '', + cpu_limit: '10', + image_version: 'v1.5-rc3', + is_pair: '', + job_name: '', + job_type: '1', + num_partitions: '4', + participant: '', + worker_cpu: 1, + }); + + expect( + parseVariableToFormValue({ + config: { + variables: [{ name: 'image', value: undefined }], + job_definitions: [ + { + variables: [{ name: 'image', value: '123' }], + }, + ], + }, + } as any), + ).toEqual({ + image: '123', + }); + + expect( + parseVariableToFormValue({ + config: { + variables: [{ name: 'image', value: '123' }], + job_definitions: [ + { + variables: undefined, + }, + { + variables: [{ name: 'v1', value: 'v1' }], + }, + ], + }, + } as any), + ).toEqual({ + image: '123', + v1: 'v1', + }); +}); + +it('stringifyVariableValue should works fine', () => { + const test1 = { ...codeEditor, value_type: VariableValueType.CODE }; + expect(typeof test1.value).toBe('object'); + stringifyVariableValue(test1); + expect(typeof test1.value).toBe('string'); + const test2 = { + ...codeEditor, + value_type: VariableValueType.CODE, + value: JSON.stringify({ 'main.js': 'var a = 1;' }), + }; + expect(typeof test2.value).toBe('string'); + stringifyVariableValue(test2); + expect(typeof test2.value).toBe('string'); + + const test3 = { ...nameInput, value_type: VariableValueType.STRING, value: 1 }; + expect(typeof test3.value).toBe('number'); + stringifyVariableValue(test3); + expect(test3.value).toBe('1'); + const test4 = { ...nameInput, value_type: VariableValueType.STRING, value: '1' }; + stringifyVariableValue(test4); + expect(test4.value).toBe('1'); + + const test5 = { ...nameInput, value_type: VariableValueType.BOOLEAN, value: 'true' }; + expect(typeof test5.value).toBe('string'); + stringifyVariableValue(test5); + expect(test5.value).toBe('true'); + const test6 = { ...nameInput, value_type: VariableValueType.BOOLEAN, value: true }; + expect(typeof test6.value).toBe('boolean'); + stringifyVariableValue(test6); + expect(test6.value).toBe('true'); + + const test7 = { ...envsInput }; + expect(typeof test7.value).toBe('object'); + expect(Array.isArray(test7.value)).toBe(true); + stringifyVariableValue(test7); + expect(typeof test1.value).toBe('string'); +}); + +it('parseVariableValue should works fine', () => { + const test1 = { ...codeEditor, value_type: VariableValueType.CODE }; + expect(typeof test1.value).toBe('object'); + parseVariableValue(test1); + expect(typeof test1.value).toBe('object'); + const test2 = { + ...codeEditor, + value_type: VariableValueType.CODE, + value: JSON.stringify({ 'main.js': 'var a = 1;' }), + }; + expect(typeof test2.value).toBe('string'); + parseVariableValue(test2); + expect(typeof test2.value).toBe('object'); + + const test3 = { ...envsInput }; + expect(typeof test3.value).toBe('object'); + parseVariableValue(test3); + expect(typeof test3.value).toBe('object'); + expect(Array.isArray(test3.value)).toBe(true); + const test4 = { + ...envsInput, + value: JSON.stringify([ + { name: 'n1', value: 'v1' }, + { name: 'n2', value: 'v2' }, + ]), + }; + expect(typeof test4.value).toBe('string'); + parseVariableValue(test4); + expect(typeof test4.value).toBe('object'); + expect(Array.isArray(test4.value)).toBe(true); + + const test5 = { + ...codeEditor, + value_type: VariableValueType.NUMBER, + value: '123', + }; + expect(typeof test5.value).toBe('string'); + parseVariableValue(test5); + expect(typeof test5.value).toBe('number'); + const test6 = { + ...codeEditor, + value_type: VariableValueType.NUMBER, + value: '', + }; + expect(typeof test6.value).toBe('string'); + parseVariableValue(test6); + expect(typeof test6.value).toBe('undefined'); +}); + +it('processVariableTypedValue should works fine', () => { + const test1 = { ...stringInput, typed_value: undefined, value_type: VariableValueType.STRING }; + + expect(typeof test1.value).toBe('string'); + expect(typeof test1.typed_value).toBe('undefined'); + expect(test1.typed_value).not.toBe(test1.value); + processVariableTypedValue(test1); + expect(typeof test1.value).toBe('string'); + expect(typeof test1.typed_value).toBe('string'); + expect(test1.typed_value).toBe(test1.value); + + const test2 = { + ...objectInput, + typed_value: undefined, + value_type: VariableValueType.OBJECT, + }; + expect(typeof test2.value).toBe('string'); + expect(typeof test2.typed_value).toBe('undefined'); + expect(test2.typed_value).not.toBe(test2.value); + processVariableTypedValue(test2); + expect(typeof test2.value).toBe('string'); + + expect(typeof test2.typed_value).toBe('object'); + expect(test2.typed_value).toEqual(JSON.parse(test2.value)); + + const test3 = { + ...listInput, + typed_value: undefined, + value_type: VariableValueType.LIST, + }; + expect(typeof test3.value).toBe('string'); + expect(typeof test3.typed_value).toBe('undefined'); + expect(test3.typed_value).not.toBe(test3.value); + processVariableTypedValue(test3); + expect(typeof test3.value).toBe('string'); + expect(typeof test3.typed_value).toBe('object'); + expect(test3.typed_value).toEqual(JSON.parse(test3.value)); +}); + +it('processTypedValue should works fine', () => { + let template = cloneDeep(noTypedValueTemplate) as WorkflowTemplatePayload; + + expect( + template.config.job_definitions.every((job) => { + return job.variables.every( + (item) => typeof item.value === 'string' && typeof item.typed_value === 'undefined', + ); + }), + ).toBeTruthy(); + + template = processTypedValue(template); + + expect( + template.config.job_definitions.every((job) => { + return job.variables.every((item) => { + if ( + item.widget_schema.component === VariableComponent.Input && + (item.value_type === VariableValueType.OBJECT || + item.value_type === VariableValueType.LIST) + ) { + return typeof item.value === 'string' && typeof item.typed_value === 'object'; + } + + return typeof item.typed_value !== 'undefined'; + }); + }), + ).toBeTruthy(); +}); diff --git a/web_console_v2/client/src/shared/formSchema.tsx b/web_console_v2/client/src/shared/formSchema.tsx index 636adce79..a18783b28 100644 --- a/web_console_v2/client/src/shared/formSchema.tsx +++ b/web_console_v2/client/src/shared/formSchema.tsx @@ -1,5 +1,7 @@ import React from 'react'; import { + RevisionDetail, + Workflow, WorkflowAcceptPayload, WorkflowExecutionDetails, WorkflowForkPayload, @@ -14,9 +16,15 @@ import { VariableValueType, } from 'typings/variable'; import { FormilySchema } from 'typings/formily'; -import { cloneDeep, merge } from 'lodash'; +import { cloneDeep, merge } from 'lodash-es'; import variablePresets, { VariablePresets } from './variablePresets'; import { FC } from 'react'; +import { + JobDefinitionForm, + VariableDefinitionForm, +} from 'views/WorkflowTemplates/TemplateForm/stores'; +import { Job } from 'typings/job'; +import { parseValueFromString } from 'shared/helpers'; const __IS_JEST__ = typeof jest !== 'undefined'; @@ -30,30 +38,39 @@ const FakeVariableLabel: FC<any> = ({ label, tooltip }: any) => { ); }; -const VariableLabel = __IS_JEST__ ? FakeVariableLabel : require('components/VariableLabel').default; +const VariableLabel = __IS_JEST__ + ? FakeVariableLabel + : /* istanbul ignore next */ require('components/VariableLabel').default; // ------- Build form Formily schema -------- type BuildOptions = { withPermissions?: boolean; readonly?: boolean; + labelAlign?: Position; + variablePrefix?: string; }; // Make option variables name end with __OPTION // for better recognition let withPermissions__OPTION = false; let readonly__OPTION = false; +let variablePrefix__OPTION = ''; function _enableOptions(options?: BuildOptions) { if (!options) return; withPermissions__OPTION = !!options.withPermissions; readonly__OPTION = !!options.readonly; + variablePrefix__OPTION = options.variablePrefix || ''; } function _resetOptions() { withPermissions__OPTION = false; readonly__OPTION = false; + variablePrefix__OPTION = ''; } +export const JOB_NAME_PREFIX = '___'; + /** * Give a job definition with varables inside, return a formily form-schema, * during progress we will merge client side variable presets with inputs @@ -64,15 +81,31 @@ export default function buildFormSchemaFromJobDef( options?: BuildOptions, ): FormilySchema { const { variables, name } = cloneDeep(job); + + const jobName = `${JOB_NAME_PREFIX}${name}`; // Avoid duplicate names with job variables + const schema: FormilySchema = { type: 'object', - title: name, - properties: {}, + title: jobName, + properties: { + [jobName]: { + type: 'void', + 'x-component': 'FormLayout', + 'x-component-props': { + labelAlign: options?.labelAlign ?? 'left', + labelCol: 8, + wrapperCol: 16, + // wrapperWidth: 'max-content', + }, + properties: {}, + }, + }, }; return variables.reduce((schema, current, index) => { const worker = componentToWorkersMap[current.widget_schema?.component || VariableComponent.Input] || + /* istanbul ignore next */ createInput; current.widget_schema = _mergeVariableSchemaWithPresets(current, variablePresets); @@ -80,7 +113,7 @@ export default function buildFormSchemaFromJobDef( _enableOptions(options); - Object.assign(schema.properties, worker(current)); + Object.assign(schema.properties![jobName].properties, worker(current)); _resetOptions(); @@ -90,19 +123,26 @@ export default function buildFormSchemaFromJobDef( //---- Variable to Schema private helpers -------- -function _getPermissions({ access_mode }: Variable) { +function _getPermissions(variable: Variable) { + const { access_mode, widget_schema } = variable; + const display = !widget_schema.hidden; + const readOnly = widget_schema.readOnly ?? false; + const permissionControl = withPermissions__OPTION; + return { - readOnly: - (withPermissions__OPTION && access_mode === VariableAccessMode.PEER_READABLE) || - readonly__OPTION, - display: withPermissions__OPTION === false ? true : access_mode !== VariableAccessMode.PRIVATE, + readOnly: readonly__OPTION + ? true + : permissionControl + ? access_mode === VariableAccessMode.PEER_READABLE + : readOnly, + display: permissionControl ? access_mode !== VariableAccessMode.PRIVATE : display, }; } -function _getDatas({ value, widget_schema: { type, enum: enums } }: Variable) { +function _getDatas({ value, widget_schema: { type, enum: enums } }: Variable, forceValue?: any) { return { type, - default: value, + default: forceValue ?? value, enum: enums, }; } @@ -118,6 +158,11 @@ function _getUIs({ : VariableLabel({ label: name, tooltip, accessMode: access_mode }), description, 'x-index': index, + 'x-decorator': 'FormItem', + 'x-decorator-props': { + colon: true, + tooltip: null, + }, 'x-component-props': { size, placeholder: placeholder || tooltip || `请输入 ${name}`, @@ -129,7 +174,7 @@ function _getValidations({ widget_schema: { pattern, rules, required } }: Variab return { required, pattern, - 'x-rules': rules, + 'x-validator': rules, }; } @@ -138,19 +183,31 @@ export function createInput(variable: Variable): FormilySchema { const { name, widget_schema: { prefix, suffix, maxLength }, + value_type, + value, } = variable; + let forceValue = value; + if ( + value_type && + [VariableValueType.CODE, VariableValueType.LIST, VariableValueType.OBJECT].includes( + value_type, + ) && + typeof variable.value === 'object' + ) { + forceValue = JSON.stringify(value); + } + return { - [name]: merge( + [variablePrefix__OPTION + name]: merge( _getUIs(variable), - _getDatas(variable), + _getDatas(variable, forceValue), _getPermissions(variable), + // TODO: JSON Check _getValidations(variable), { - 'x-component': 'Input', + 'x-component': VariableComponent.Input, 'x-component-props': { - // check here for more Input props: - // https://ant.design/components/input/#Input prefix, suffix, maxLength, @@ -168,13 +225,13 @@ export function createTextArea(variable: Variable): FormilySchema { } = variable; return { - [name]: merge( + [variablePrefix__OPTION + name]: merge( _getUIs(variable), _getDatas(variable), _getPermissions(variable), _getValidations(variable), { - 'x-component': 'TextArea', + 'x-component': VariableComponent.TextArea, 'x-component-props': { rows, showCount, @@ -192,17 +249,15 @@ export function createSelect(variable: Variable): FormilySchema { } = variable; return { - [name]: merge( + [variablePrefix__OPTION + name]: merge( _getUIs(variable), _getDatas(variable), _getPermissions(variable), _getValidations(variable), { enum: options?.source || /* istanbul ignore next: no need to test empty array */ [], - 'x-component': 'Select', + 'x-component': VariableComponent.Select, 'x-component-props': { - // check here for more Select props: - // https://ant.design/components/select allowClear: true, filterOption: filterOption, mode: multiple ? /* istanbul ignore next */ 'multiple' : null, @@ -219,13 +274,13 @@ export function createSwitch(variable: Variable): FormilySchema { } = variable; return { - [name]: merge( + [variablePrefix__OPTION + name]: merge( _getUIs(variable), _getDatas(variable), _getPermissions(variable), _getValidations(variable), { - 'x-component': 'Switch', + 'x-component': VariableComponent.Switch, 'x-component-props': { checkedChildren, unCheckedChildren, @@ -243,14 +298,14 @@ export function createCheckbox(variable: Variable): FormilySchema { } = variable; return { - [name]: merge( + [variablePrefix__OPTION + name]: merge( _getUIs(variable), _getDatas(variable), _getPermissions(variable), _getValidations(variable), { enum: options?.source || [], - 'x-component': 'Checkbox', + 'x-component': VariableComponent.Checkbox, }, ), }; @@ -264,14 +319,14 @@ export function createRadio(variable: Variable): FormilySchema { } = variable; return { - [name]: merge( + [variablePrefix__OPTION + name]: merge( _getUIs(variable), _getDatas(variable), _getPermissions(variable), _getValidations(variable), { enum: options?.source || [], - 'x-component': 'Radio', + 'x-component': VariableComponent.Radio, }, ), }; @@ -284,7 +339,7 @@ export function createNumberPicker(variable: Variable): FormilySchema { } = variable; return { - [name]: merge( + [variablePrefix__OPTION + name]: merge( _getUIs(variable), _getDatas(variable), _getPermissions(variable), @@ -292,12 +347,65 @@ export function createNumberPicker(variable: Variable): FormilySchema { { minimum: min, maximum: max, - 'x-component': 'NumberPicker', + 'x-component': VariableComponent.NumberPicker, 'x-component-props': { min, max, - parser: (v: string) => v, - formatter: (value: number) => `${value}`, + parser: /* istanbul ignore next */ (v: string) => v, + formatter: /* istanbul ignore next */ (value: number) => `${value}`, + }, + }, + ), + }; +} + +export function createCpuInput(variable: Variable): FormilySchema { + const { + name, + widget_schema: { min, max }, + } = variable; + const minVal = min || 1000; + const maxVal = max || Number.MAX_SAFE_INTEGER; + + return { + [variablePrefix__OPTION + name]: merge( + _getUIs(variable), + _getDatas(variable), + _getPermissions(variable), + _getValidations(variable), + { + // minimum: minVal, + // maximum: maxVal, + 'x-component': VariableComponent.CPU, + 'x-component-props': { + min: minVal, + max: maxVal, + }, + }, + ), + }; +} +export function createMemInput(variable: Variable): FormilySchema { + const { + name, + widget_schema: { min, max }, + } = variable; + const minVal = min || 1; + const maxVal = max || 100; + + return { + [variablePrefix__OPTION + name]: merge( + _getUIs(variable), + _getDatas(variable), + _getPermissions(variable), + _getValidations(variable), + { + minimum: minVal, + maximum: maxVal, + 'x-component': VariableComponent.MEM, + 'x-component-props': { + min: minVal, + max: maxVal, }, }, ), @@ -308,13 +416,31 @@ export function createModelCodesEditor(variable: Variable): FormilySchema { const { name } = variable; return { - [name]: merge( + [variablePrefix__OPTION + name]: merge( _getUIs(variable), _getDatas(variable), _getPermissions(variable), _getValidations(variable), { - 'x-component': 'Code', + 'x-component': VariableComponent.Code, + }, + ), + }; +} +export function createJSONEditor(variable: Variable): FormilySchema { + const { name } = variable; + + return { + [variablePrefix__OPTION + name]: merge( + _getUIs(variable), + _getDatas(variable), + _getPermissions(variable), + _getValidations(variable), + { + 'x-component': VariableComponent.JSON, + 'x-component-props': { + language: 'json', + }, }, ), }; @@ -324,13 +450,122 @@ export function createDatasetSelect(variable: Variable): FormilySchema { const { name } = variable; return { - [name]: merge( + [variablePrefix__OPTION + name]: merge( + _getUIs(variable), + _getDatas(variable), + _getPermissions(variable), + _getValidations(variable), + { + 'x-component': VariableComponent.Dataset, + }, + ), + }; +} + +export function createDatasePathSelect(variable: Variable): FormilySchema { + const { name } = variable; + + return { + [variablePrefix__OPTION + name]: merge( + _getUIs(variable), + _getDatas(variable), + _getPermissions(variable), + _getValidations(variable), + { + 'x-component': VariableComponent.DatasetPath, + }, + ), + }; +} + +export function createFeatureSelect(variable: Variable): FormilySchema { + const { name } = variable; + + return { + [variablePrefix__OPTION + name]: merge( + _getUIs(variable), + _getDatas(variable), + _getPermissions(variable), + _getValidations(variable), + { + 'x-component': VariableComponent.FeatureSelect, + }, + ), + }; +} + +export function createEnvsInput(variable: Variable): FormilySchema { + const { + name, + widget_schema: { required }, + } = variable; + return { + [variablePrefix__OPTION + name]: merge( + _getUIs(variable), + _getDatas(variable), + _getPermissions(variable), + _getValidations(variable), + { + type: 'array', + 'x-component': 'ArrayItems', + items: { + type: 'object', + properties: { + NO_NAME_FIELD_$0: { + type: 'void', + 'x-component': 'Space', + properties: { + name: { + key: 'name', + type: 'string', + title: 'name', + 'x-component': 'Input', + 'x-decorator': 'FormItem', + required, + }, + value: { + key: 'value', + type: 'string', + title: 'value', + 'x-component': 'Input', + 'x-decorator': 'FormItem', + required, + }, + remove: { + type: 'void', + 'x-decorator': 'FormItem', + 'x-component': 'ArrayItems.Remove', + }, + }, + }, + }, + }, + properties: { + add: { + type: 'void', + title: '添加参数', + 'x-component': 'ArrayItems.Addition', + }, + }, + }, + ), + }; +} + +export function createAlgorithmSelect(variable: Variable): FormilySchema { + const { name } = variable; + + return { + [variablePrefix__OPTION + name]: merge( _getUIs(variable), _getDatas(variable), _getPermissions(variable), _getValidations(variable), { - 'x-component': 'Dataset', + 'x-component': VariableComponent.AlgorithmSelect, + 'x-component-props': { + containerStyle: { width: '100%' }, + }, }, ), }; @@ -345,25 +580,62 @@ const componentToWorkersMap: { [key: string]: (v: Variable) => FormilySchema } = [VariableComponent.Select]: createSelect, [VariableComponent.Radio]: createRadio, [VariableComponent.NumberPicker]: createNumberPicker, + [VariableComponent.CPU]: createCpuInput, + [VariableComponent.MEM]: createMemInput, [VariableComponent.Code]: createModelCodesEditor, + [VariableComponent.JSON]: createJSONEditor, + [VariableComponent.DatasetPath]: createDatasePathSelect, [VariableComponent.Dataset]: createDatasetSelect, + [VariableComponent.FeatureSelect]: createFeatureSelect, + [VariableComponent.EnvsInput]: createEnvsInput, + [VariableComponent.AlgorithmSelect]: createAlgorithmSelect, }; // ---------- Widget schemas stringify, parse ----------- export function stringifyVariableValue(variable: Variable) { - if (variable.value_type === VariableValueType.CODE && typeof variable.value === 'object') { + if ( + [VariableValueType.CODE, VariableValueType.LIST, VariableValueType.OBJECT].includes( + variable.value_type!, + ) && + typeof variable.value === 'object' + ) { variable.value = JSON.stringify(variable.value); } + // Otherwise, type is STRING/NUMBER/BOOLEAN + if (typeof variable.value !== 'string') { + variable.value = String(variable.value); + } +} + +export function parseVariableValue(variable: Variable) { + if ( + [VariableValueType.CODE, VariableValueType.LIST, VariableValueType.OBJECT].includes( + variable.value_type!, + ) && + typeof variable.value !== 'object' + ) { + try { + variable.value = JSON.parse(variable.value); + } catch (error) {} + } if (variable.value_type === VariableValueType.STRING && typeof variable.value !== 'string') { variable.value = String(variable.value); } + if (variable.value_type === VariableValueType.NUMBER && typeof variable.value !== 'number') { + variable.value = variable.value ? Number(variable.value) : undefined; + } + if (variable.value_type === VariableValueType.BOOLEAN && typeof variable.value !== 'boolean') { + variable.value = Boolean(variable.value); + } } -export function parseVariableValue(variable: Variable) { - if (variable.value_type === VariableValueType.CODE && typeof variable.value === 'string') { - variable.value = JSON.parse(variable.value); +export function processVariableTypedValue(variable: Variable) { + if (variable.value_type) { + variable.typed_value = parseValueFromString(variable.value, variable.value_type as any); + } else { + variable.typed_value = variable.value; } } @@ -373,19 +645,20 @@ export function parseVariableValue(variable: Variable) { export function stringifyComplexDictField< T extends | WorkflowInitiatePayload + | WorkflowInitiatePayload<Job> | WorkflowTemplatePayload + | WorkflowTemplatePayload<JobDefinitionForm, VariableDefinitionForm> | WorkflowAcceptPayload | WorkflowForkPayload >(input: T): T { - const ret = cloneDeep(input); - + let ret = cloneDeep(input); ret.config?.job_definitions.forEach((job: any) => { job.variables.forEach(_stringify); }); ret.config.variables?.forEach(_stringify); - let ifIsForking = (ret as WorkflowForkPayload).fork_proposal_config; + const ifIsForking = (ret as WorkflowForkPayload).fork_proposal_config; /* istanbul ignore if */ if (ifIsForking) { @@ -396,6 +669,9 @@ export function stringifyComplexDictField< ifIsForking.variables?.forEach(_stringify); } + // process typed_value + ret = processTypedValue(ret); + return ret; function _stringify(variable: any) { @@ -412,7 +688,7 @@ export function stringifyComplexDictField< * Parse each variable's widget schema & codes value */ export function parseComplexDictField< - T extends WorkflowExecutionDetails | WorkflowTemplate | WorkflowForkPayload + T extends WorkflowExecutionDetails | WorkflowTemplate | WorkflowForkPayload | RevisionDetail >(input: T): T { const ret = cloneDeep(input); @@ -422,7 +698,7 @@ export function parseComplexDictField< ret.config?.variables?.forEach(_parse); - let ifIsForking = (ret as WorkflowForkPayload).fork_proposal_config; + const ifIsForking = (ret as WorkflowForkPayload).fork_proposal_config; /* istanbul ignore if: logic is same as above */ if (ifIsForking) { @@ -432,21 +708,85 @@ export function parseComplexDictField< ifIsForking.variables?.forEach(_parse); } - return ret; function _parse(variable: Variable): any { /* istanbul ignore next: needless to test */ if (typeof variable.widget_schema === 'string') { - variable.widget_schema = variable.widget_schema - ? JSON.parse(variable.widget_schema) - : /* istanbul ignore next */ {}; + try { + variable.widget_schema = variable.widget_schema + ? JSON.parse(variable.widget_schema) + : /* istanbul ignore next */ {}; + } catch (error) { + variable.widget_schema = {}; + } } parseVariableValue(variable); } } +export function processTypedValue< + T extends + | WorkflowInitiatePayload + | WorkflowInitiatePayload<Job> + | WorkflowTemplatePayload + | WorkflowTemplatePayload<JobDefinitionForm, VariableDefinitionForm> + | WorkflowAcceptPayload + | WorkflowForkPayload +>(input: T): T { + const ret = cloneDeep(input); + ret.config?.job_definitions.forEach((job: any) => { + job.variables.forEach(processVariableTypedValue); + }); + + ret.config.variables?.forEach(processVariableTypedValue); + + const ifIsForking = (ret as WorkflowForkPayload).fork_proposal_config; + + /* istanbul ignore if */ + if (ifIsForking) { + ifIsForking.job_definitions.forEach((job: any) => { + job.variables.forEach(processVariableTypedValue); + }); + + ifIsForking.variables?.forEach(processVariableTypedValue); + } + + return ret; +} + +export function parseVariableToFormValue<T extends Workflow>( + input: T, +): { + [key: string]: any; +} { + const formValue: any = {}; + const config = input.config; + + let variablesList: Variable[] = []; + + // global + if (config && config.variables && config.variables.length > 0) { + variablesList = variablesList.concat(config.variables); + } + + // job + if (config && config.job_definitions && config.job_definitions.length > 0) { + config.job_definitions.forEach((item) => { + variablesList = variablesList.concat(item.variables || []); + }); + } + + if (variablesList.length > 0) { + variablesList.forEach((item) => { + formValue[item.name] = item.value; + }); + } + + return formValue; +} + // -------------- Private helpers --------------- /** diff --git a/web_console_v2/client/src/shared/helpers.ts b/web_console_v2/client/src/shared/helpers.ts deleted file mode 100644 index 9df1153b9..000000000 --- a/web_console_v2/client/src/shared/helpers.ts +++ /dev/null @@ -1,132 +0,0 @@ -import { isNil } from 'lodash'; - -/** - * @param time time in ms - */ -export function sleep(time: number): Promise<null> { - return new Promise((resolve) => { - setTimeout(resolve, time); - }); -} - -/** - * Convert value to css acceptable stuffs - * @param val e.g. 10, '10%', '1.2em' - * @param unit e.g. px, %, em... - */ -export function convertToUnit(val: any, unit = 'px'): string { - if (isNil(val) || val === '') { - return '0'; - } else if (isNaN(val)) { - return String(val); - } else { - return `${Number(val)}${unit}`; - } -} - -/** - * Resolve promise inline - */ -export async function to<T, E = Error>(promise: Promise<T>): Promise<[T, E]> { - try { - const ret = await promise; - return [ret, (null as unknown) as E]; - } catch (e) { - return [(null as unknown) as T, e]; - } -} - -/** - * Give a random string base on Math.random - */ -export function giveWeakRandomKey() { - return Math.random().toString(16).slice(2); -} - -type ScriptStatus = 'loading' | 'idle' | 'ready' | 'error'; -export function loadScript(src: string): Promise<{ status: ScriptStatus; error?: Error }> { - return new Promise((resolve, reject) => { - if (!src) { - resolve({ status: 'idle' }); - return; - } - - // Fetch existing script element by src - // It may have been added by another intance of this util - let script = document.querySelector(`script[src="${src}"]`) as HTMLScriptElement; - - if (!script) { - // Create script - script = document.createElement('script'); - script.src = src; - script.async = true; - script.setAttribute('data-status', 'loading'); - // Add script to document body - document.body.appendChild(script); - - // Store status in attribute on script - // This can be read by other instances of this hook - const setAttributeFromEvent = (event: Event) => { - const status = event.type === 'load' ? 'ready' : 'error'; - script.setAttribute('data-status', status); - }; - - script.addEventListener('load', setAttributeFromEvent); - script.addEventListener('error', setAttributeFromEvent); - } else { - // Grab existing script status from attribute and set to state. - resolve({ status: script.getAttribute('data-status') as any }); - } - - // Script event handler to update status in state - // Note: Even if the script already exists we still need to add - // event handlers to update the state for *this* hook instance. - const setStateFromEvent = (event: Event) => { - const status = event.type === 'load' ? 'ready' : 'error'; - if (event.type === 'load') { - resolve({ status }); - } else { - reject({ status, error: event }); - } - }; - - // Add event listeners - script.addEventListener('load', setStateFromEvent); - script.addEventListener('error', setStateFromEvent); - }); -} - -/** - * Copy to the clipboard (only for PC, no mobile adaptation processing has been done yet) \nstr \nThe string to be copied\nIs the copy successful? - * - * @param {String} str need copied - * @return {Boolean} is success? - */ -/* istanbul ignore next */ -export function copyToClipboard(str: string) { - str = str.toString(); - - const inputEl = document.createElement('textArea') as HTMLTextAreaElement; - let copyOk = false; - - inputEl.value = str; - document.body.append(inputEl); - inputEl.select(); - - try { - copyOk = document.execCommand('Copy'); - } catch (e) { - copyOk = false; - } - - document.body.removeChild(inputEl); - - return copyOk; -} - -export function saveBlob(blob: Blob, fileName: string) { - const a = document.createElement('a'); - a.href = window.URL.createObjectURL(blob); - a.download = fileName; - a.dispatchEvent(new MouseEvent('click')); -} diff --git a/web_console_v2/client/src/shared/helpers.tsx b/web_console_v2/client/src/shared/helpers.tsx new file mode 100644 index 000000000..7a30325fa --- /dev/null +++ b/web_console_v2/client/src/shared/helpers.tsx @@ -0,0 +1,765 @@ +import React from 'react'; +import { isNil } from 'lodash-es'; +import store from 'store2'; + +import LOCAL_STORAGE_KEYS from 'shared/localStorageKeys'; + +import { + Python, + Javascript, + Default, + Config, + Yaml, + Json, + GitIgnore, + Markdown, +} from 'components/IconPark'; +import { FileData, FileDataNode } from 'components/FileExplorer'; + +import { ValueType } from 'typings/settings'; +import { FileTreeNode } from 'typings/algorithm'; + +/** + * @param time time in ms + */ +export function sleep(time: number): Promise<null> { + return new Promise((resolve) => { + setTimeout(resolve, time); + }); +} + +/** + * Run callback at next event loop, + */ +export function nextTick(cb: Function) { + return setTimeout(() => { + cb(); + }, 0); +} + +/** + * Convert value to css acceptable stuffs + * @param val e.g. 10, '10%', '1.2em' + * @param unit e.g. px, %, em... + */ +export function convertToUnit(val: any, unit = 'px'): string { + if (isNil(val) || val === '') { + return '0'; + } else if (isNaN(val)) { + return String(val); + } else { + return `${Number(val)}${unit}`; + } +} + +/** + * Resolve promise inline + */ +export async function to<T, E = Error>(promise: Promise<T>): Promise<[T, E]> { + try { + const ret = await promise; + return [ret, (null as unknown) as E]; + } catch (e) { + return [(null as unknown) as T, e]; + } +} + +export const weakRandomKeyCache = new Map<Symbol, string>(); +/** + * Give a random string base on Math.random, + * cache mechanism will be involved if a symbol is provided + */ +export function giveWeakRandomKey(symbol?: Symbol): string { + if (symbol) { + if (weakRandomKeyCache.has(symbol)) { + return weakRandomKeyCache.get(symbol)!; + } + + const ret = giveWeakRandomKey(); + weakRandomKeyCache.set(symbol, ret); + return ret; + } + + return Math.random().toString(16).slice(2); +} + +type ScriptStatus = 'loading' | 'idle' | 'ready' | 'error'; +/* istanbul ignore next */ +export function loadScript(src: string): Promise<{ status: ScriptStatus; error?: Error }> { + return new Promise((resolve, reject) => { + if (!src) { + resolve({ status: 'idle' }); + return; + } + + // Fetch existing script element by src + // It may have been added by another intance of this util + let script = document.querySelector(`script[src="${src}"]`) as HTMLScriptElement; + + if (!script) { + // Create script + script = document.createElement('script'); + script.src = src; + script.async = true; + script.setAttribute('data-status', 'loading'); + // Add script to document body + document.body.appendChild(script); + + // Store status in attribute on script + // This can be read by other instances of this hook + const setAttributeFromEvent = (event: Event) => { + const status = event.type === 'load' ? 'ready' : 'error'; + script.setAttribute('data-status', status); + }; + + script.addEventListener('load', setAttributeFromEvent); + script.addEventListener('error', setAttributeFromEvent); + } else { + // Grab existing script status from attribute and set to state. + resolve({ status: script.getAttribute('data-status') as any }); + } + + // Script event handler to update status in state + // Note: Even if the script already exists we still need to add + // event handlers to update the state for *this* hook instance. + const setStateFromEvent = (event: Event) => { + const status = event.type === 'load' ? 'ready' : 'error'; + if (event.type === 'load') { + resolve({ status }); + } else { + reject({ status, error: event }); + } + }; + + // Add event listeners + script.addEventListener('load', setStateFromEvent); + script.addEventListener('error', setStateFromEvent); + }); +} + +/** + * Copy to the clipboard (only for PC, no mobile adaptation processing has been done yet) \nstr \nThe string to be copied\nIs the copy successful? + * + * @param {String} str need copied + * @return {Boolean} is success? + */ +/* istanbul ignore next */ +export async function copyToClipboard(str: string) { + str = str.toString(); + + const inputEl = document.createElement('textArea') as HTMLTextAreaElement; + let copyOk = false; + + inputEl.value = str; + document.body.append(inputEl); + inputEl.select(); + + try { + copyOk = document.execCommand('Copy'); + } catch (e) { + return Promise.reject(e); + } + + document.body.removeChild(inputEl); + return Promise.resolve(copyOk); +} + +/* istanbul ignore next */ +export async function newCopyToClipboard(str: string) { + if (!navigator.clipboard) { + return await copyToClipboard(str); + } + return navigator.clipboard.writeText(str); +} + +export function saveBlob(blob: Blob, fileName: string) { + const a = document.createElement('a'); + a.href = window.URL.createObjectURL(blob); + a.download = fileName; + a.dispatchEvent(new MouseEvent('click')); +} + +/** + * Replace special characters in regular expressions, preceded by \ + */ +export const transformRegexSpecChar = (str: string) => { + const specCharList = ['\\', '*', '.', '?', '+', '$', '^', '[', ']', '(', ')', '{', '}', '|', '/']; + let resultString = str; + specCharList.forEach((char) => { + resultString = resultString.replace(char, `\\${char}`); + }); + return resultString; +}; + +/** + * Format object to array, support custom order + * @example + * from + * { + * 'key1': 1, + * 'key2': 2, + * } + * + * to + * [ + * { + * label: 'key1', + * value: 1, + * } + * { + * label: 'key2', + * value: 2, + * } + * ] + */ +export function formatObjectToArray<T = any>( + map: { + [key: string]: T; + }, + orderKeyList?: string[], +): Array<{ + label: string; + value: T | null; +}> { + const tempMap = { ...map }; + + const result: { + label: string; + value: T | null; + }[] = []; + + if (orderKeyList && orderKeyList.length > 0) { + orderKeyList.forEach((key) => { + if (!Object.prototype.hasOwnProperty.call(tempMap, key)) return; + const value = tempMap[key]; + delete tempMap[key]; + result.push({ + label: key, + value, + }); + }); + } + + Object.keys(tempMap).forEach((key) => { + const value = map[key]; + result.push({ + label: key, + value, + }); + }); + + return result; +} + +/** + * Format object string to JSON with space + */ +export function formatJSONValue(str: string, space: string | number | undefined = 2) { + try { + const value = JSON.stringify(JSON.parse(str), null, space); + return value; + } catch (error) { + return str; + } +} + +/** + * Format value to string + */ +export const formatValueToString = (value: any, valueType?: ValueType) => { + if (isNil(value)) { + return ''; + } + + if (valueType === 'OBJECT' || valueType === 'LIST') { + if (value) { + return JSON.stringify(value); + } + } + + return String(value); +}; + +/** + * Parse value from string + */ +export const parseValueFromString = (value: string, valueType?: ValueType) => { + if (valueType === 'OBJECT' || valueType === 'LIST' || valueType === 'CODE') { + try { + let tempValue = value; + // Parse twice-JSON-stringified string, e.g. <FeatureSelect /> 's value + while (typeof tempValue === 'string') { + tempValue = JSON.parse(tempValue); + } + return tempValue; + } catch (error) { + throw error; + } + } + + if (valueType === 'INT' || valueType === 'NUMBER') { + if (value === '') { + return null; + } + const res = Number(value); + if (isNaN(res)) { + throw new Error('数字格式不正确'); + } + return res; + } + + if (valueType === 'BOOL' || valueType === 'BOOLEAN') { + if (['True', 'true', '1'].includes(value)) { + return true; + } + if (['False', 'false', '0'].includes(value)) { + return false; + } + throw new Error('BOOL格式不正确'); + } + + return value; +}; + +/** + * Get a random integer between min and max + */ +export function getRandomInt(min = 1, max = 10) { + if (min > max) { + throw new Error('Min is not allowed to be greater than max!!!'); + } + + min = Math.ceil(min); + max = Math.floor(max); + return Math.floor(Math.random() * (max - min + 1)) + min; +} + +/** + * Is string is can be parse + */ +export const isStringCanBeParsed = (value: string) => { + try { + JSON.parse(value); + return true; + } catch (error) { + return false; + } +}; +/** + * Flatten array + * @param {array} array + */ +export function flatten<T extends Array<any>>(array: T): T { + return array.reduce( + (acc, cur) => (Array.isArray(cur) ? [...acc, ...flatten(cur)] : [...acc, cur]), + [], + ); +} + +/** + * Depth First Search + * @param {object} node node + * @param {function} filter filter function + * @param {string} childFieldName children field name + */ +export function dfs< + T extends { + [key: string]: any; + } +>( + node: T, + filter = (node: T, currentLevel: number) => true, + childFieldName = 'children', + currentLevel = 1, +): T[] { + let resultList = []; + + if (node[childFieldName] && node[childFieldName].length > 0) { + resultList = node[childFieldName].map((item: any) => + dfs(item, filter, childFieldName, currentLevel + 1), + ); + } + + if (filter(node, currentLevel)) { + resultList.unshift(node); + } + + return flatten(resultList); +} + +export const fileExtToIconMap: { + [ext: string]: React.ReactNode; +} = { + py: <Python />, + python: <Python />, + js: <Javascript />, + javascript: <Javascript />, + default: <Default />, + yml: <Yaml />, + yaml: <Yaml />, + json: <Json />, + config: <Config />, + md: <Markdown />, + gitignore: <GitIgnore />, +}; + +/** + * Format non-nested style to nested tree style + * @example + * from + * { + * 'main.py':'code...', + * 'leader/main.py':'code...', + * 'leader/test.py':'code...' + * } + * + * to + * [ + * { + * title:'main.py', + * key:'main.py', + * parentKey: '', + * isLeaf:true, + * code:'code...', + * icon:<Python />, + * fileExt:'py', + * children:[], + * isFolder:false + * } + * { + * title:'leader', + * key:'leader/', + * isFolder:true + * children:[ + * { + * title:'main.py', + * key:'leader/main.py', + * parentKey:'leader', + * isLeaf:true, + * code:'code...', + * icon:<Python />, + * fileExt:'py', + * children:[], + * isFolder:false + * }, + * ... + * ] + * } + * ] + */ +export function formatTreeData(filePathToCodeMap: FileData): FileDataNode[] { + const result: any = []; + const level = { result }; + + Object.keys(filePathToCodeMap).forEach((filePath) => { + const code = filePathToCodeMap[filePath]; + const filePathList = filePath.split('/'); + + let tempKey = ''; + filePathList.reduce((sum: any, fileNameOrFolderName: string, index: number) => { + const parentKey = tempKey; + tempKey += `${fileNameOrFolderName}/`; + if (!sum[fileNameOrFolderName]) { + sum[fileNameOrFolderName] = { result: [] }; + const isLastIndex = index === filePathList.length - 1; + const tempData: FileDataNode = { + title: fileNameOrFolderName, + key: tempKey.slice(0, -1), + children: sum[fileNameOrFolderName].result, + parentKey: parentKey.slice(0, -1), + isFolder: true, + isLeaf: isLastIndex + ? !Object.keys(filePathToCodeMap).some((innerPath) => { + // Case: filePathToCodeMap = { main: null, 'main/test.py': '1' } + // Because it is a file under 'main' folder, so 'main' folder isn't leaf node + return innerPath.startsWith(tempKey); + }) + : false, + }; + + // If code !== null, it will be treated as file + if (isLastIndex && code !== null) { + const extList = + fileNameOrFolderName.indexOf('.') > -1 ? fileNameOrFolderName.split('.') : []; + const fileExt = extList && extList.length > 0 ? extList[extList.length - 1] : ''; + tempData.icon = fileExtToIconMap[fileExt] || fileExtToIconMap['default']; + tempData.code = code; + tempData.isLeaf = true; + tempData.fileExt = fileExt; + tempData.isFolder = false; + } + sum.result.push(tempData); + } + + return sum[fileNameOrFolderName]; + }, level); + + tempKey = ''; + }); + return result; +} + +/** + * Get first file node,it will ignore folder + * @param {FileDataNode[]} formattedTreeData + */ +export function getFirstFileNode(formattedTreeData: FileDataNode[]): FileDataNode | null { + let node = null; + + for (let index = 0; index < formattedTreeData.length; index++) { + const currentNode = formattedTreeData[index]; + + if (!currentNode.isFolder) { + node = currentNode; + break; + } + + // Recursion, children + node = getFirstFileNode(currentNode.children ?? []); + + // If find target node,then stop recursion + if (node) { + break; + } + } + + return node; +} + +/** + * Format file tree node(Back-end format in algorithms api) to file data node(Front-end format in <FileExplorer/>) + * @example + * from + * { + filename: 'main.py', + path: 'main.py', + size: 96, + mtime: 1637141275, + is_directory: false, + files: [] + * } + * + * to + * { + title: 'main.py', + key: 'main.py', + parentKey: '', + isLeaf: true, + code: '', + icon: fileExtToIconMap['py'], + fileExt: 'py', + children: [], + isFolder: false + * } + */ +export function formatFileTreeNodeToFileDataNode(fileTreeNode: FileTreeNode): FileDataNode { + const pathList = fileTreeNode.path.split('/').filter((item) => item); + const extList = fileTreeNode.filename.indexOf('.') > -1 ? fileTreeNode.filename.split('.') : []; + const fileExt = extList.length > 0 ? extList[extList.length - 1] : ''; + + return { + title: fileTreeNode.filename, + key: fileTreeNode.path, + parentKey: pathList.length > 1 ? pathList.slice(0, -1).join('/') : '', + isFolder: fileTreeNode.is_directory, + isLeaf: fileTreeNode.files.length === 0, + code: fileTreeNode.is_directory ? null : '', + fileExt, + icon: fileExtToIconMap[fileExt] || fileExtToIconMap['default'], + children: fileTreeNode.files.map((item) => formatFileTreeNodeToFileDataNode(item)), + }; +} +export function formatFileTreeNodeListToFileDataNodeList( + fileTreeNodeList: FileTreeNode[], +): FileDataNode[] { + return fileTreeNodeList.map((item) => formatFileTreeNodeToFileDataNode(item)); +} + +/** + * Format file tree node(Back-end format in algorithms api) to file data (Front-end format in <FileExplorer/>) + * @example + * from + * { + title: 'follower', + key: 'follower', + parentKey: '', + isFolder: true, + isLeaf: false, + code: null, + icon: fileExtToIconMap['default'], + fileExt: '', + children: [ + { + title: 'main.py', + key: 'follower/main.py', + parentKey: 'follower', + isLeaf: true, + code: '', + icon: fileExtToIconMap['py'], + fileExt: 'py', + children: [], + isFolder: false, + }, + { + title: 'subfolder', + key: 'follower/subfolder', + parentKey: 'follower', + isLeaf: false, + code: null, + icon: fileExtToIconMap['default'], + fileExt: '', + isFolder: true, + children: [ + { + title: 'test.js', + key: 'follower/subfolder/test.js', + parentKey: 'follower/subfolder', + isLeaf: true, + code: '', + icon: fileExtToIconMap['js'], + fileExt: 'js', + children: [], + isFolder: false, + }, + ], + }, + ], + * } + * + * to + * { + 'follower/main.py': '', + 'follower/subfolder/test.js': '', + * } + */ +export function formatFileTreeNodeToFileData(fileTreeNode: FileTreeNode): FileData { + const finalFileData: FileData = {}; + dfs( + fileTreeNode, + (node) => { + if (node.files.length === 0) { + finalFileData[node.path] = node.is_directory ? null : ''; + } + return false; + }, + 'files', + ); + return finalFileData; +} +export function formatFileTreeNodeListToFileData(fileTreeNodeList: FileTreeNode[]): FileData { + const finalFileData: FileData = {}; + fileTreeNodeList.forEach((fileTreeNode) => { + dfs( + fileTreeNode, + (node) => { + // 将所有的pathKey都进行储存,防止一次性删除多层级空目录的情况 + // if (node.files.length === 0) { + // finalFileData[node.path] = node.is_directory ? null : ''; + // } + finalFileData[node.path] = node.is_directory ? null : ''; + return false; + }, + 'files', + ); + }); + + return finalFileData; +} + +/** + * Format file ext to full language that Monaco Editor will be receive + * @param language + * @returns formatted language + */ +export function formatLanguage(language: string) { + if (language === 'py') { + return 'python'; + } + if (language === 'js') { + return 'javascript'; + } + if (language === 'yml') { + return 'yaml'; + } + + if (language === 'sh') { + return 'shell'; + } + + return language; +} + +/** + * Get jwt headers + * @returns {object} headers { 'Authorization': string, 'x-pc-auth': string } + */ +export function getJWTHeaders() { + const accessToken = store.get(LOCAL_STORAGE_KEYS.current_user)?.access_token; + const tempAccessToken = store.get(LOCAL_STORAGE_KEYS.temp_access_token); + const { ssoName, ssoType } = store.get(LOCAL_STORAGE_KEYS.sso_info) ?? {}; + + let finalAccessToken = accessToken; + + // 1. If tempAccessToken is existed, then override accessToken + // 2. tempAccessToken will be assigned in views/TokenCallback/index.tsx + if (tempAccessToken) { + finalAccessToken = tempAccessToken; + } + + const headers: { + Authorization?: string; + 'x-pc-auth'?: string; + } = {}; + + if (finalAccessToken) { + headers.Authorization = `Bearer ${finalAccessToken}`; + } + + if (ssoName && ssoType && finalAccessToken) { + // Custom HTTP header, format like x-pc-auth: <sso_name> <type> <credentials> + headers['x-pc-auth'] = `${ssoName} ${ssoType} ${finalAccessToken}`; + } + return headers; +} + +/** + * Convert CPU unit, Core to M + * @example convertCpuCoreToM(1, false) => 1000, convertCpuCoreToM("2Core", true) => "2000m" + */ +export function convertCpuCoreToM(core: number | string, withUnit: true): string; +export function convertCpuCoreToM(core: number | string, withUnit?: false): number; +export function convertCpuCoreToM(core: number | string, withUnit = false): number | string { + let numberCore = 0; + + if (typeof core === 'number') { + numberCore = core; + } + if (typeof core === 'string') { + numberCore = parseFloat(core); + } + + const result = numberCore * 1000; + + return withUnit ? result + 'm' : result; +} + +/** + * Convert CPU unit, M to Core + * @example convertCpuCoreToM(1000, false) => 1, convertCpuCoreToM("2000m", true) => "2Core" + */ +export function convertCpuMToCore(core: number | string, withUnit: true): string; +export function convertCpuMToCore(core: number | string, withUnit?: false): number; +export function convertCpuMToCore(core: number | string, withUnit = false): number | string { + let number = 0; + + if (typeof core === 'number') { + number = core; + } + if (typeof core === 'string') { + number = parseFloat(core); + } + + const result = number / 1000; + + return withUnit ? result + 'Core' : result; +} diff --git a/web_console_v2/client/src/shared/localStorageKeys.ts b/web_console_v2/client/src/shared/localStorageKeys.ts index 3fe49f1c6..d9adab15d 100644 --- a/web_console_v2/client/src/shared/localStorageKeys.ts +++ b/web_console_v2/client/src/shared/localStorageKeys.ts @@ -1,3 +1,5 @@ +/* istanbul ignore file */ + const LOCAL_STORAGE_KEYS = { language: 'language', current_user: 'current_user', @@ -5,6 +7,11 @@ const LOCAL_STORAGE_KEYS = { sidebar_folded: 'sidebar_folded', projects_display: 'projects_display', current_project: 'current_project', + sys_email_group: 'sys_email_group', + app_flags: 'app_flags', + app_login_way_list: 'app_login_way_list', + sso_info: 'sso_info', + temp_access_token: 'temp_access_token', }; export default LOCAL_STORAGE_KEYS; diff --git a/web_console_v2/client/src/shared/modelCenter.test.ts b/web_console_v2/client/src/shared/modelCenter.test.ts new file mode 100644 index 000000000..c037541f5 --- /dev/null +++ b/web_console_v2/client/src/shared/modelCenter.test.ts @@ -0,0 +1,514 @@ +import { + formatExtra, + formatListWithExtra, + formatMetrics, + formatIntersectionDatasetName, + getDefaultVariableValue, +} from './modelCenter'; + +import { + readyToRun, + invalid, + running, + completed, + stopped, + failed, +} from 'services/mocks/v2/intersection_datasets/examples'; +import { completed as workflowCompletedItem } from 'services/mocks/v2/workflows/examples'; +import { modelJobMetric, modelJobMetric2 } from 'services/mocks/v2/model_jobs/examples'; +import { CONSTANTS } from 'shared/constants'; + +const testItem1 = { + name: 'item1', + extra: JSON.stringify({ e1: 1, e2: 2, e3: 3 }), +}; +const testItem2 = { + name: 'item2', + extra: JSON.stringify({ name: 'extraName2' }), +}; + +const testItem3 = { + ...testItem1, + name: 'item3', + local_extra: JSON.stringify({ le1: 1, le2: 2, le3: 3 }), +}; +const testItem4 = { + ...testItem2, + name: 'item4', + local_extra: JSON.stringify({ name: 'localExtraName4' }), +}; + +const testItem5 = { + name: 'name5', + extra: JSON.stringify({ name: 'extraName5', e1: 1, e2: 2 }), + local_extra: JSON.stringify({ name: 'localExtraName5', e1: 3 }), +}; +describe('Model center helpers', () => { + describe('FormatExtra', () => { + it('Empty input', () => { + expect(formatExtra({})).toEqual({}); + }); + + it('Own extra field, but no local_extra field', () => { + expect(formatExtra(testItem1)).toEqual({ + ...testItem1, + e1: 1, + e2: 2, + e3: 3, + }); + expect(formatExtra(testItem2)).toEqual({ + ...testItem2, + name: 'extraName2', + }); + }); + it('Own extra field and local_extra field', () => { + expect(formatExtra(testItem3)).toEqual({ + ...testItem3, + e1: 1, + e2: 2, + e3: 3, + le1: 1, + le2: 2, + le3: 3, + }); + }); + it('Own extra field and local_extra field, local_extra will override extra', () => { + expect(formatExtra(testItem4)).toEqual({ + ...testItem4, + name: 'localExtraName4', + }); + expect(formatExtra(testItem5)).toEqual({ + ...testItem5, + name: 'localExtraName5', + e1: 3, + e2: 2, + }); + }); + it('Override extra field', () => { + expect(formatExtra(testItem4, true)).toEqual({ + ...testItem4, + }); + expect(formatExtra(testItem5, true)).toEqual({ + e1: 3, + e2: 2, + ...testItem5, + }); + }); + }); + + describe('FormatListWithExtra', () => { + it('Empty input', () => { + expect(formatListWithExtra([])).toEqual([]); + }); + it('Own extra field, but no local_extra field', () => { + expect(formatListWithExtra([testItem1, testItem2])).toEqual([ + { + ...testItem1, + e1: 1, + e2: 2, + e3: 3, + }, + { + ...testItem2, + name: 'extraName2', + }, + ]); + }); + it('Own extra field and local_extra field', () => { + expect(formatListWithExtra([testItem3])).toEqual([ + { + ...testItem3, + e1: 1, + e2: 2, + e3: 3, + le1: 1, + le2: 2, + le3: 3, + }, + ]); + }); + it('Own extra field and local_extra field, local_extra will override extra', () => { + expect(formatListWithExtra([testItem4, testItem5])).toEqual([ + { + ...testItem4, + name: 'localExtraName4', + }, + { + ...testItem5, + name: 'localExtraName5', + e1: 3, + e2: 2, + }, + ]); + }); + + it('Override extra field', () => { + expect(formatListWithExtra([testItem4, testItem5], true)).toEqual([ + { + ...testItem4, + }, + { + e1: 3, + e2: 2, + ...testItem5, + }, + ]); + }); + }); + + it('FormatIntersectionDatasetName', () => { + expect(formatIntersectionDatasetName(running)).toBe(running.name); + expect(formatIntersectionDatasetName(completed)).toBe(completed.name); + expect(formatIntersectionDatasetName(stopped)).toBe(stopped.name); + expect(formatIntersectionDatasetName(invalid)).toBe(invalid.name); + expect(formatIntersectionDatasetName(failed)).toBe(failed.name); + expect(formatIntersectionDatasetName(readyToRun)).toBe(readyToRun.name); + expect(formatIntersectionDatasetName({} as any)).toBe(CONSTANTS.EMPTY_PLACEHOLDER); + }); + + it('FormatMetrics', () => { + expect(formatMetrics({} as any)).toEqual({ + confusion_matrix: [], + eval: [], + evalMaxValue: 0, + featureImportanceMaxValue: 0, + feature_importance: [], + train: [], + trainMaxValue: 0, + }); + + expect( + formatMetrics({ + train: { + acc: null, + }, + eval: { + acc: null, + }, + } as any), + ).toEqual({ + confusion_matrix: [], + eval: [{ label: 'acc', value: null }], + evalMaxValue: 0, + featureImportanceMaxValue: 0, + feature_importance: [], + train: [{ label: 'acc', value: null }], + trainMaxValue: 0, + }); + + expect( + formatMetrics({ + train: { + acc: { + steps: [1, 2, 3], + values: [4, 5, 6], + }, + auc: { + steps: [1, 2, 3], + values: [7, 8, 9], + }, + }, + eval: { + acc: { + steps: [1, 2, 3], + values: [14, 15, 16], + }, + auc: { + steps: [1, 2, 3], + values: [17, 18, 19], + }, + }, + } as any), + ).toEqual({ + confusion_matrix: [], + eval: [ + { label: 'acc', value: 16 }, + { label: 'auc', value: 19 }, + ], + evalMaxValue: 19, + featureImportanceMaxValue: 0, + feature_importance: [], + train: [ + { label: 'acc', value: 6 }, + { label: 'auc', value: 9 }, + ], + trainMaxValue: 9, + }); + + expect( + formatMetrics({ + train: { + acc: { + steps: [1], + values: [4], + }, + auc: { + steps: [1], + values: [7], + }, + }, + eval: { + acc: { + steps: [1], + values: [14], + }, + auc: { + steps: [1], + values: [17], + }, + }, + } as any), + ).toEqual({ + confusion_matrix: [], + eval: [ + { label: 'acc', value: 14 }, + { label: 'auc', value: 17 }, + ], + evalMaxValue: 17, + featureImportanceMaxValue: 0, + feature_importance: [], + train: [ + { label: 'acc', value: 4 }, + { label: 'auc', value: 7 }, + ], + trainMaxValue: 7, + }); + + expect( + formatMetrics({ + confusion_matrix: { + fp: null, + fn: undefined, + tn: 40, + tp: 0, + }, + } as any), + ).toEqual({ + confusion_matrix: [ + { label: 'tp', value: 0, percentValue: '0%' }, + { label: 'fn', value: 0, percentValue: '0%' }, + { label: 'fp', value: 0, percentValue: '0%' }, + { label: 'tn', value: 40, percentValue: '100%' }, + ], + eval: [], + evalMaxValue: 0, + featureImportanceMaxValue: 0, + feature_importance: [], + train: [], + trainMaxValue: 0, + }); + + expect( + formatMetrics({ + confusion_matrix: { + fp: null, + fn: undefined, + tn: 0, + tp: 0, + }, + } as any), + ).toEqual({ + confusion_matrix: [ + { label: 'tp', value: 0, percentValue: CONSTANTS.EMPTY_PLACEHOLDER }, + { label: 'fn', value: 0, percentValue: CONSTANTS.EMPTY_PLACEHOLDER }, + { label: 'fp', value: 0, percentValue: CONSTANTS.EMPTY_PLACEHOLDER }, + { label: 'tn', value: 0, percentValue: CONSTANTS.EMPTY_PLACEHOLDER }, + ], + eval: [], + evalMaxValue: 0, + featureImportanceMaxValue: 0, + feature_importance: [], + train: [], + trainMaxValue: 0, + }); + + expect(formatMetrics(modelJobMetric)).toEqual({ + confusion_matrix: [ + { label: 'tp', value: 30, percentValue: '30%' }, + { label: 'fn', value: 22, percentValue: '22%' }, + { label: 'fp', value: 8, percentValue: '8%' }, + { label: 'tn', value: 40, percentValue: '40%' }, + ], + eval: [ + { label: 'acc', value: 0.9 }, + { label: 'auc', value: 0.8 }, + { label: 'precision', value: 0.7 }, + { label: 'recall', value: 0.2 }, + { label: 'f1', value: 0.1 }, + { label: 'ks', value: 0.7 }, + ], + evalMaxValue: 0.9, + featureImportanceMaxValue: 0.7, + feature_importance: [ + { label: 'test_13', value: 0.7 }, + { label: 'test_14', value: 0.6 }, + { label: 'test_15', value: 0.5 }, + { label: 'test_16', value: 0.4 }, + { label: 'peer-1', value: 0.3 }, + { label: 'peer-2', value: 0.3 }, + { label: 'age', value: 0.3 }, + { label: 'overall_score', value: 0.3 }, + { label: 'test_17', value: 0.3 }, + { label: 'salary', value: 0.2 }, + { label: 'test_19', value: 0.2 }, + { label: 'peer-3', value: 0.1 }, + { label: 'education', value: 0.1 }, + { label: 'height', value: 0.1 }, + { label: 'peer-0', value: 0.08 }, + ], + train: [ + { label: 'acc', value: 0.9 }, + { label: 'auc', value: 0.8 }, + { label: 'precision', value: 0.7 }, + { label: 'recall', value: 0.2 }, + { label: 'f1', value: 0.1 }, + { label: 'ks', value: 0.7 }, + ], + trainMaxValue: 0.9, + }); + + expect(formatMetrics(modelJobMetric2)).toEqual({ + confusion_matrix: [ + { label: 'tp', value: 22, percentValue: '21.56%' }, + { label: 'fn', value: 22, percentValue: '21.56%' }, + { label: 'fp', value: 8, percentValue: '7.84%' }, + { label: 'tn', value: 50, percentValue: '49.01%' }, + ], + eval: [ + { label: 'acc', value: 0.1 }, + { label: 'auc', value: 0.2 }, + { label: 'precision', value: 0.3 }, + { label: 'recall', value: 0.4 }, + { label: 'f1', value: 0.5 }, + { label: 'ks', value: 0.4 }, + ], + evalMaxValue: 0.5, + featureImportanceMaxValue: 0.7, + feature_importance: [ + { label: 'test_13', value: 0.7 }, + { label: 'test_14', value: 0.6 }, + { label: 'test_15', value: 0.5 }, + { label: 'test_16', value: 0.4 }, + { label: 'peer-1', value: 0.3 }, + { label: 'peer-2', value: 0.3 }, + { label: 'age', value: 0.3 }, + { label: 'overall_score', value: 0.3 }, + { label: 'test_17', value: 0.3 }, + { label: 'salary', value: 0.2 }, + { label: 'test_19', value: 0.2 }, + { label: 'peer-3', value: 0.1 }, + { label: 'education', value: 0.1 }, + { label: 'height', value: 0.1 }, + { label: 'peer-0', value: 0.08 }, + ], + train: [ + { label: 'acc', value: 0.1 }, + { label: 'auc', value: 0.2 }, + { label: 'precision', value: 0.3 }, + { label: 'recall', value: 0.4 }, + { label: 'f1', value: 0.5 }, + { label: 'ks', value: 0.4 }, + ], + trainMaxValue: 0.5, + }); + }); + + it('getDefaultVariableValue', () => { + expect(getDefaultVariableValue(workflowCompletedItem)).toBe(undefined); + expect(getDefaultVariableValue(workflowCompletedItem, 'image_version')).toBe('v1.5-rc3'); + expect(getDefaultVariableValue({} as any)).toBe(undefined); + expect( + getDefaultVariableValue({ + config: { + variables: [], + job_definitions: [], + }, + } as any), + ).toBe(undefined); + expect( + getDefaultVariableValue({ + config: { + variables: [], + job_definitions: [ + { + variables: [{ name: 'image', value: '123' }], + }, + ], + }, + } as any), + ).toBe('123'); + expect( + getDefaultVariableValue({ + config: { + variables: [{ name: 'image', value: '456' }], + job_definitions: [ + { + variables: [{ name: 'image', value: '123' }], + }, + ], + }, + } as any), + ).toBe('456'); + expect( + getDefaultVariableValue({ + config: { + variables: [], + job_definitions: [ + { + variables: [ + { name: 'image', value: '123' }, + { name: 'image', value: '456' }, + ], + }, + ], + }, + } as any), + ).toBe('123'); + expect( + getDefaultVariableValue( + { + config: { + variables: [{ name: 'image', value: '456' }], + job_definitions: [ + { + variables: [ + { name: 'image', value: '123' }, + { name: 'image_version', value: '789' }, + ], + }, + ], + }, + } as any, + 'image_version', + ), + ).toBe('789'); + + expect( + getDefaultVariableValue({ + config: { + variables: [{ name: 'image', value: undefined }], + job_definitions: [ + { + variables: [{ name: 'image', value: '123' }], + }, + ], + }, + } as any), + ).toBe(undefined); + expect( + getDefaultVariableValue({ + config: { + variables: [], + job_definitions: [ + { + variables: [ + { name: 'image', value: undefined }, + { name: 'image', value: '456' }, + ], + }, + ], + }, + } as any), + ).toBe(undefined); + }); +}); diff --git a/web_console_v2/client/src/shared/modelCenter.ts b/web_console_v2/client/src/shared/modelCenter.ts new file mode 100644 index 000000000..2d28ed10f --- /dev/null +++ b/web_console_v2/client/src/shared/modelCenter.ts @@ -0,0 +1,177 @@ +import { floor, isPlainObject } from 'lodash-es'; +import { formatObjectToArray } from 'shared/helpers'; +import { ModelJobMetrics } from 'typings/modelCenter'; +import { IntersectionDataset } from 'typings/dataset'; +import { WorkflowExecutionDetails } from 'typings/workflow'; +import { CONSTANTS } from 'shared/constants'; + +export type WithExtra = { + extra?: any; + local_extra?: any; +}; + +type Item = { + label: string; + value: number | null; +}; + +export function formatExtra<T extends WithExtra>( + originModelSet: T, + isOverrideExtraField = false, +): T { + let tempExtra = {} as T; + let tempLocalExtra = {} as T; + try { + tempExtra = JSON.parse(originModelSet.extra); + } catch (error) {} + + try { + tempLocalExtra = JSON.parse(originModelSet.local_extra); + } catch (error) {} + + if (isOverrideExtraField) return { ...tempExtra, ...tempLocalExtra, ...originModelSet }; + + return { ...originModelSet, ...tempExtra, ...tempLocalExtra }; +} + +export function formatListWithExtra<T extends WithExtra>( + originModelSetList: T[], + isOverrideExtraField = false, +): T[] { + return originModelSetList.map((item) => formatExtra(item, isOverrideExtraField)); +} + +export function formatMetrics<T extends ModelJobMetrics>(metrics: T, maxFeatureCount = 15) { + let trainMaxValue = 0; + let evalMaxValue = 0; + + // train + const trainList = Object.keys(metrics.train ?? {}).reduce((sum, key) => { + const value = metrics.train[key]; + + let finalValue = null; + if (Array.isArray(value)) { + finalValue = + (value && value.length > 1 ? floor(value[1][value[1].length - 1], 3) : null) ?? null; // get last value from array + } + if (isPlainObject(value)) { + const values = value.values as number[]; + + finalValue = + (values && values.length > 0 ? floor(values[values.length - 1], 3) : null) ?? null; // get last value from array + } + + trainMaxValue = Math.max(Number(finalValue), trainMaxValue); + + sum.push({ + label: key, + value: finalValue, + }); + + return sum; + }, [] as Item[]); + + // eval + const evalList = Object.keys(metrics.eval ?? {}).reduce((sum, key) => { + const value = metrics.eval[key] || []; + + let finalValue = null; + if (Array.isArray(value)) { + finalValue = + (value && value.length > 1 ? floor(value[1][value[1].length - 1], 3) : null) ?? null; // get last value from array + } + if (isPlainObject(value)) { + const values = value.values as number[]; + + finalValue = + (values && values.length > 0 ? floor(values[values.length - 1], 3) : null) ?? null; // get last value from array + } + + evalMaxValue = Math.max(Number(finalValue), evalMaxValue); + + sum.push({ + label: key, + value: finalValue, + }); + + return sum; + }, [] as Item[]); + + // confusion_matrix + let confusion_matrix = formatObjectToArray(metrics.confusion_matrix ?? {}, [ + 'tp', + 'fn', + 'fp', + 'tn', + ]) as Array<{ + label: string; + value: number | null; + percentValue: string; + }>; + + const total = + Number(metrics?.confusion_matrix?.tp ?? 0) + + Number(metrics?.confusion_matrix?.fn ?? 0) + + Number(metrics?.confusion_matrix?.fp ?? 0) + + Number(metrics?.confusion_matrix?.tn ?? 0); + + // calc each confusion_matrix item percent + confusion_matrix = confusion_matrix.map((item) => { + return { + ...item, + value: item.value || 0, + percentValue: + total <= 0 + ? CONSTANTS.EMPTY_PLACEHOLDER + : `${floor(((item.value || 0) / total) * 100, 2)}%`, + }; + }); + + // feature_importance + let feature_importance = formatObjectToArray(metrics.feature_importance ?? {}); + + // order by value + feature_importance.sort((a, b) => Number(b.value) - Number(a.value)); + + // slice feature_importance, default Top 15 + feature_importance = feature_importance.slice(0, maxFeatureCount); + + return { + train: trainList, + eval: evalList, + confusion_matrix, + feature_importance, + trainMaxValue, + evalMaxValue, + featureImportanceMaxValue: feature_importance?.[0]?.value ?? 0, + }; +} + +export function formatIntersectionDatasetName(dataset: IntersectionDataset) { + return dataset.name || CONSTANTS.EMPTY_PLACEHOLDER; +} + +export function getDefaultVariableValue( + workflow: WorkflowExecutionDetails, + imageField = 'image', +): any { + // variables + if (workflow.config && workflow.config.variables) { + const imageVariable = workflow.config.variables.find((item) => item.name === imageField); + if (imageVariable) { + return imageVariable.value; + } + } + // job_definitions + if (workflow.config && workflow.config.job_definitions && workflow.config.job_definitions) { + for (let i = 0; i < workflow.config.job_definitions.length; i++) { + for (let j = 0; j < workflow.config.job_definitions[i].variables.length; j++) { + if (workflow.config.job_definitions[i].variables[j].name === imageField) { + return workflow.config.job_definitions[i].variables[j].value; + } + } + } + } + + return undefined; +} diff --git a/web_console_v2/client/src/shared/object.test.ts b/web_console_v2/client/src/shared/object.test.ts index 746febfb0..af712f73c 100644 --- a/web_console_v2/client/src/shared/object.test.ts +++ b/web_console_v2/client/src/shared/object.test.ts @@ -4,6 +4,7 @@ import { removePrivate, transformKeysToSnakeCase, binarizeBoolean, + record, } from './object'; describe('Object helpers', () => { @@ -87,4 +88,50 @@ describe('Object helpers', () => { expect(binarizeBoolean(i)).toEqual(o); }); }); + it('Record', () => { + expect( + record( + { + a: 1, + b: 2, + c: 3, + }, + 1, + ), + ).toEqual({ + a: 1, + b: 1, + c: 1, + }); + + expect( + record( + { + a: 1, + b: 2, + c: 3, + }, + false, + ), + ).toEqual({ + a: false, + b: false, + c: false, + }); + + expect( + record( + { + a: 1, + b: 2, + c: 3, + }, + undefined, + ), + ).toEqual({ + a: undefined, + b: undefined, + c: undefined, + }); + }); }); diff --git a/web_console_v2/client/src/shared/object.ts b/web_console_v2/client/src/shared/object.ts index 4dadebb0c..17f143b42 100644 --- a/web_console_v2/client/src/shared/object.ts +++ b/web_console_v2/client/src/shared/object.ts @@ -1,4 +1,4 @@ -import { isNil, isUndefined, omitBy, snakeCase } from 'lodash'; +import { isNil, isUndefined, omitBy, snakeCase } from 'lodash-es'; /* istanbul ignore next */ export function removeUndefined(values: object) { @@ -39,3 +39,10 @@ export function binarizeBoolean(values: object) { return ret; }, {} as Record<string, any>); } + +export function record<O = Object, V = any>(obj: O, targetValue: V): Record<keyof O, V> { + return Object.entries(obj).reduce((ret, [key, val]) => { + ret[key] = targetValue; + return ret; + }, {} as any); +} diff --git a/web_console_v2/client/src/shared/operationRecord.test.ts b/web_console_v2/client/src/shared/operationRecord.test.ts new file mode 100644 index 000000000..34f414910 --- /dev/null +++ b/web_console_v2/client/src/shared/operationRecord.test.ts @@ -0,0 +1,531 @@ +import { + getOperationRecordList, + addOperationRecord, + deleteOperationRecord, + editOperationRecord, + renameOperationRecord, +} from './operationRecord'; + +import { OperationRecord, OperationType } from 'typings/algorithm'; + +const testNameList = ['main.py', 'folder1', 'folder1/f1.py', 'folder2']; +const testContent = '# coding: utf-8\n'; + +const addRecordList: OperationRecord[] = []; +const deleteRecordList: OperationRecord[] = []; +const editRecordList: OperationRecord[] = []; +const renameRecordList: OperationRecord[] = []; + +testNameList.forEach((name) => { + const isFolder = name.indexOf('.') === -1; + + addRecordList.push({ + path: name, + content: '', + type: OperationType.ADD, + isFolder, + }); + deleteRecordList.push({ + path: name, + content: testContent, + type: OperationType.DELETE, + isFolder, + }); + editRecordList.push({ + path: name, + content: testContent, + type: OperationType.EDIT, + isFolder, + }); + renameRecordList.push({ + path: name, + content: '', + type: OperationType.RENAME, + isFolder, + newPath: 'no-main.py', + }); +}); + +describe('AddOperationRecord', () => { + it('Empty operation record list', () => { + expect(addOperationRecord([], addRecordList[0], false)).toEqual([addRecordList[0]]); + expect(addOperationRecord([], addRecordList[0], true)).toEqual([ + { ...addRecordList[0], type: OperationType.EDIT }, + ]); + }); + + it('Operation record list with same path record that type is OperationType.DELETE', () => { + expect(addOperationRecord([deleteRecordList[0]], addRecordList[0], false)).toEqual([ + { ...addRecordList[0], type: OperationType.EDIT }, + ]); + expect(addOperationRecord([deleteRecordList[0]], addRecordList[0], true)).toEqual([ + { ...addRecordList[0], type: OperationType.EDIT }, + ]); + }); + + it('Operation record list with same path record but different isFolder type', () => { + expect( + addOperationRecord([{ ...addRecordList[0], isFolder: true }], addRecordList[0], false), + ).toEqual([{ ...addRecordList[0], isFolder: true }, addRecordList[0]]); + + expect( + addOperationRecord([{ ...addRecordList[0], isFolder: true }], addRecordList[0], true), + ).toEqual([ + { ...addRecordList[0], isFolder: true }, + { ...addRecordList[0], type: OperationType.EDIT }, + ]); + }); + + it('Operation record list with no same path record', () => { + expect( + addOperationRecord( + [addRecordList[1], addRecordList[2], addRecordList[3], editRecordList[1]], + addRecordList[0], + false, + ), + ).toEqual([ + addRecordList[1], + addRecordList[2], + addRecordList[3], + editRecordList[1], + addRecordList[0], + ]); + }); + + it('Add existed folder', () => { + expect(addOperationRecord([], addRecordList[1], true)).toEqual([]); + expect(addOperationRecord([deleteRecordList[1]], addRecordList[1], true)).toEqual([]); + }); +}); + +describe('DeleteOperationRecord', () => { + it('Empty operation record list', () => { + expect(deleteOperationRecord([], deleteRecordList[0], false)).toEqual([]); + expect(deleteOperationRecord([], deleteRecordList[0], true)).toEqual([deleteRecordList[0]]); + }); + + it('Operation record list with same path record', () => { + expect( + deleteOperationRecord([addRecordList[0], editRecordList[0]], deleteRecordList[0], false), + ).toEqual([]); + expect( + deleteOperationRecord([addRecordList[0], editRecordList[0]], deleteRecordList[0], true), + ).toEqual([deleteRecordList[0]]); + }); + + it('Delete folder', () => { + expect( + deleteOperationRecord([addRecordList[1], addRecordList[2]], deleteRecordList[1], false), + ).toEqual([]); + expect( + deleteOperationRecord([addRecordList[1], addRecordList[2]], deleteRecordList[1], true), + ).toEqual([deleteRecordList[1]]); + }); +}); + +describe('EditOperationRecord', () => { + it('Empty operation record list', () => { + expect(editOperationRecord([], editRecordList[0], false)).toEqual([ + { ...editRecordList[0], type: OperationType.ADD }, + ]); + expect(editOperationRecord([], editRecordList[0], true)).toEqual([editRecordList[0]]); + }); + + it('Operation record list with same path record that type is OperationType.ADD', () => { + expect(editOperationRecord([addRecordList[0]], editRecordList[0], false)).toEqual([ + { ...editRecordList[0], type: OperationType.ADD }, + ]); + expect(editOperationRecord([addRecordList[0]], editRecordList[0], true)).toEqual([ + editRecordList[0], + ]); + }); + + it('Operation record list with no same path record', () => { + expect( + editOperationRecord( + [addRecordList[1], addRecordList[2], addRecordList[3], editRecordList[3]], + editRecordList[0], + false, + ), + ).toEqual([ + addRecordList[1], + addRecordList[2], + addRecordList[3], + editRecordList[3], + { ...editRecordList[0], type: OperationType.ADD }, + ]); + + expect( + editOperationRecord( + [addRecordList[1], addRecordList[2], addRecordList[3], editRecordList[3]], + editRecordList[0], + true, + ), + ).toEqual([ + addRecordList[1], + addRecordList[2], + addRecordList[3], + editRecordList[3], + editRecordList[0], + ]); + }); + + it('Edit folder', () => { + expect(editOperationRecord([], editRecordList[1], false)).toEqual([]); + expect(editOperationRecord([], editRecordList[1], true)).toEqual([]); + expect(editOperationRecord([addRecordList[1]], editRecordList[1], false)).toEqual([ + addRecordList[1], + ]); + expect(editOperationRecord([addRecordList[1]], editRecordList[1], true)).toEqual([ + addRecordList[1], + ]); + }); +}); + +describe('RenameOperationRecord', () => { + it('Empty operation record list', () => { + try { + renameOperationRecord([], renameRecordList[0], false); + } catch (error) { + expect(error.message).toMatch( + 'Required type === OperationType.ADD record in prev operation record list', + ); + } + expect(renameOperationRecord([], renameRecordList[0], true)).toEqual([renameRecordList[0]]); + }); + + it('Rename existed node in Back-end', () => { + expect(renameOperationRecord([], renameRecordList[0], true)).toEqual([renameRecordList[0]]); + expect(renameOperationRecord([], renameRecordList[1], true)).toEqual([renameRecordList[1]]); + expect(renameOperationRecord([editRecordList[0]], renameRecordList[0], true)).toEqual([ + editRecordList[0], + renameRecordList[0], + ]); + expect( + renameOperationRecord( + [addRecordList[1], addRecordList[2], addRecordList[3], editRecordList[3]], + renameRecordList[0], + true, + ), + ).toEqual([ + addRecordList[1], + addRecordList[2], + addRecordList[3], + editRecordList[3], + renameRecordList[0], + ]); + }); + + it('Rename file when isExistedNode = false', () => { + expect(renameOperationRecord([addRecordList[0]], renameRecordList[0], false)).toEqual([ + { + ...addRecordList[0], + path: renameRecordList[0].newPath, + type: OperationType.ADD, + }, + ]); + }); + it('Rename folder when isExistedNode = false', () => { + expect( + renameOperationRecord([addRecordList[1], addRecordList[2]], renameRecordList[1], false), + ).toEqual( + [addRecordList[1], addRecordList[2]].map((item) => { + const oldPathReg = new RegExp(`^${renameRecordList[1].path}`); + return { + ...item, + path: item.path.replace(oldPathReg, renameRecordList[1].newPath!), + }; + }), + ); + }); +}); + +describe('GetOperationRecordList', () => { + it('When isExistedNode = false', () => { + let prevOperationRecordList: OperationRecord[] = []; + let resultOperationRecordList: OperationRecord[] = []; + + // Add file + prevOperationRecordList = getOperationRecordList( + prevOperationRecordList, + addRecordList[0], + false, + ); + resultOperationRecordList = [addRecordList[0]]; + expect(prevOperationRecordList).toEqual(resultOperationRecordList); + + // Then add empty folder + prevOperationRecordList = getOperationRecordList( + prevOperationRecordList, + addRecordList[1], + false, + ); + resultOperationRecordList = [resultOperationRecordList[0], addRecordList[1]]; + expect(prevOperationRecordList).toEqual(resultOperationRecordList); + + // Then add file in the folder + prevOperationRecordList = getOperationRecordList( + prevOperationRecordList, + addRecordList[2], + false, + ); + resultOperationRecordList = [ + resultOperationRecordList[0], + resultOperationRecordList[1], + addRecordList[2], + ]; + expect(prevOperationRecordList).toEqual(resultOperationRecordList); + + // Edit first file + prevOperationRecordList = getOperationRecordList( + prevOperationRecordList, + editRecordList[0], + false, + ); + resultOperationRecordList = [ + { ...editRecordList[0], type: OperationType.ADD }, + resultOperationRecordList[1], + resultOperationRecordList[2], + ]; + expect(prevOperationRecordList).toEqual(resultOperationRecordList); + + // Edit second file + prevOperationRecordList = getOperationRecordList( + prevOperationRecordList, + editRecordList[2], + false, + ); + resultOperationRecordList = [ + resultOperationRecordList[0], + resultOperationRecordList[1], + { ...editRecordList[2], type: OperationType.ADD }, + ]; + expect(prevOperationRecordList).toEqual(resultOperationRecordList); + + // Rename first file + prevOperationRecordList = getOperationRecordList( + prevOperationRecordList, + renameRecordList[0], + false, + ); + resultOperationRecordList = [ + { + ...renameRecordList[0], + type: OperationType.ADD, + path: renameRecordList[0].newPath!, + content: editRecordList[0].content, + newPath: undefined, + }, + resultOperationRecordList[1], + resultOperationRecordList[2], + ]; + expect(prevOperationRecordList).toEqual(resultOperationRecordList); + + // Edit first file + prevOperationRecordList = getOperationRecordList( + prevOperationRecordList, + { + path: renameRecordList[0].newPath!, + content: '#123', + type: OperationType.EDIT, + isFolder: false, + }, + false, + ); + resultOperationRecordList = [ + { + path: renameRecordList[0].newPath!, + content: '#123', + type: OperationType.ADD, + isFolder: false, + }, + resultOperationRecordList[1], + resultOperationRecordList[2], + ]; + expect(prevOperationRecordList).toEqual(resultOperationRecordList); + + // Delete first file + prevOperationRecordList = getOperationRecordList( + prevOperationRecordList, + { + path: renameRecordList[0].newPath!, + content: '', + type: OperationType.DELETE, + isFolder: false, + }, + false, + ); + resultOperationRecordList = [resultOperationRecordList[1], resultOperationRecordList[2]]; + expect(prevOperationRecordList).toEqual(resultOperationRecordList); + + // Rename folder + prevOperationRecordList = getOperationRecordList( + prevOperationRecordList, + { + ...renameRecordList[1], + newPath: 'newFolderName', + }, + false, + ); + const oldPathReg = new RegExp(`^${renameRecordList[1].path}`); + resultOperationRecordList = [ + { ...resultOperationRecordList[0], path: 'newFolderName' }, + { + ...resultOperationRecordList[1], + path: editRecordList[2].path.replace(oldPathReg, 'newFolderName'), + }, + ]; + expect(prevOperationRecordList).toEqual(resultOperationRecordList); + + // Delete folder + prevOperationRecordList = getOperationRecordList( + prevOperationRecordList, + { + ...deleteRecordList[1], + path: 'newFolderName', + }, + false, + ); + resultOperationRecordList = []; + expect(prevOperationRecordList).toEqual(resultOperationRecordList); + }); + + it('When isExistedNode = true', () => { + let prevOperationRecordList: OperationRecord[] = []; + let resultOperationRecordList: OperationRecord[] = []; + + // Add file + prevOperationRecordList = getOperationRecordList( + prevOperationRecordList, + addRecordList[0], + true, + ); + resultOperationRecordList = [{ ...addRecordList[0], type: OperationType.EDIT }]; + expect(prevOperationRecordList).toEqual(resultOperationRecordList); + + // Then add empty folder + prevOperationRecordList = getOperationRecordList( + prevOperationRecordList, + addRecordList[1], + true, + ); + expect(prevOperationRecordList).toEqual(resultOperationRecordList); // If it is existed same folder node in Back-end, don't add record + + // Then add file in the folder + prevOperationRecordList = getOperationRecordList( + prevOperationRecordList, + addRecordList[2], + true, + ); + resultOperationRecordList = [ + resultOperationRecordList[0], + { ...addRecordList[2], type: OperationType.EDIT }, + ]; + expect(prevOperationRecordList).toEqual(resultOperationRecordList); + + // Edit first file + prevOperationRecordList = getOperationRecordList( + prevOperationRecordList, + editRecordList[0], + true, + ); + resultOperationRecordList = [resultOperationRecordList[1], editRecordList[0]]; + expect(prevOperationRecordList).toEqual(resultOperationRecordList); + + // Edit second file + prevOperationRecordList = getOperationRecordList( + prevOperationRecordList, + editRecordList[2], + true, + ); + resultOperationRecordList = [resultOperationRecordList[1], editRecordList[2]]; + expect(prevOperationRecordList).toEqual(resultOperationRecordList); + + // Rename first file + prevOperationRecordList = getOperationRecordList( + prevOperationRecordList, + renameRecordList[0], + true, + ); + resultOperationRecordList = [ + resultOperationRecordList[0], + resultOperationRecordList[1], + renameRecordList[0], + ]; + expect(prevOperationRecordList).toEqual(resultOperationRecordList); + + // Edit first file + prevOperationRecordList = getOperationRecordList( + prevOperationRecordList, + { + path: renameRecordList[0].newPath!, + content: '#123', + type: OperationType.EDIT, + isFolder: false, + }, + true, + ); + resultOperationRecordList.push({ + path: renameRecordList[0].newPath!, + content: '#123', + type: OperationType.EDIT, + isFolder: false, + }); + expect(prevOperationRecordList).toEqual(resultOperationRecordList); + + // Delete first file + prevOperationRecordList = getOperationRecordList( + prevOperationRecordList, + { + path: renameRecordList[0].newPath!, + content: '', + type: OperationType.DELETE, + isFolder: false, + }, + true, + ); + resultOperationRecordList = [ + resultOperationRecordList[0], + resultOperationRecordList[1], + resultOperationRecordList[2], + { + path: renameRecordList[0].newPath!, + content: '', + type: OperationType.DELETE, + isFolder: false, + }, + ]; + expect(prevOperationRecordList).toEqual(resultOperationRecordList); + + // Rename folder + prevOperationRecordList = getOperationRecordList( + prevOperationRecordList, + { + ...renameRecordList[1], + newPath: 'newFolderName', + }, + true, + ); + resultOperationRecordList.push({ + ...renameRecordList[1], + newPath: 'newFolderName', + }); + + expect(prevOperationRecordList).toEqual(resultOperationRecordList); + + // Delete folder + prevOperationRecordList = getOperationRecordList( + prevOperationRecordList, + { + ...deleteRecordList[1], + path: 'newFolderName', + }, + true, + ); + resultOperationRecordList.push({ + ...deleteRecordList[1], + path: 'newFolderName', + }); + expect(prevOperationRecordList).toEqual(resultOperationRecordList); + }); +}); diff --git a/web_console_v2/client/src/shared/operationRecord.ts b/web_console_v2/client/src/shared/operationRecord.ts new file mode 100644 index 000000000..aa4e71fcd --- /dev/null +++ b/web_console_v2/client/src/shared/operationRecord.ts @@ -0,0 +1,182 @@ +import { OperationRecord, OperationType } from 'typings/algorithm'; + +function _removePrevNode(arr: OperationRecord[], obj: OperationRecord) { + return arr.filter((item) => { + return item.path !== obj.path || item.isFolder !== obj.isFolder; + }); +} + +function _removeSubNode(arr: OperationRecord[], obj: OperationRecord) { + const pathReg = new RegExp(`^${obj.path}/`); + return arr.filter((item) => { + return !pathReg.test(item.path); + }); +} + +export function getOperationRecordList( + prev: OperationRecord[], + cur: OperationRecord, + isExistedNode = false, +): OperationRecord[] { + let finalOperationRecordList: OperationRecord[] = []; + + switch (cur.type) { + case OperationType.ADD: + finalOperationRecordList = addOperationRecord(prev, cur, isExistedNode); + break; + case OperationType.DELETE: + finalOperationRecordList = deleteOperationRecord(prev, cur, isExistedNode); + break; + case OperationType.EDIT: + finalOperationRecordList = editOperationRecord(prev, cur, isExistedNode); + break; + case OperationType.RENAME: + finalOperationRecordList = renameOperationRecord(prev, cur, isExistedNode); + break; + default: + break; + } + + return finalOperationRecordList; +} + +export function addOperationRecord( + prev: OperationRecord[], + cur: OperationRecord, + isExistedNode = false, +): OperationRecord[] { + // If it is existed same folder node in Back-end, don't add record + if (isExistedNode && cur.isFolder) { + return _removePrevNode(prev, cur); + } + + const prevDeleteFileRecord = prev.find( + (item) => + item.path === cur.path && + item.type === OperationType.DELETE && + item.isFolder === cur.isFolder && + !item.isFolder, // file node + ); + + /** + * Note: change type to OperationType.EDIT + * + * case 1: + * 1. delete one file + * 2. add same name file + * + * case2: + * If it is existed same file node in Back-end + */ + if (prevDeleteFileRecord || (isExistedNode && !cur.isFolder)) { + return _removePrevNode(prev, cur).concat([ + { + ...cur, + type: OperationType.EDIT, + content: '', // clear file content + }, + ]); + } + + // Just insert new record + return [...prev, cur]; +} + +export function deleteOperationRecord( + prev: OperationRecord[], + cur: OperationRecord, + isExistedNode = false, +): OperationRecord[] { + // TODO: How to handle rename node? + + if (cur.isFolder) { + // Delete all record in the folder and delete all same path record, and insert new one record if it is existed node in Back-end + return _removeSubNode(_removePrevNode(prev, cur), cur).concat(isExistedNode ? [cur] : []); + } + + // Delete all same path record, and insert new one record if it is existed node in Back-end + return _removePrevNode(prev, cur).concat(isExistedNode ? [cur] : []); +} + +export function editOperationRecord( + prev: OperationRecord[], + cur: OperationRecord, + isExistedNode = false, +): OperationRecord[] { + // Can't edit folder content + if (cur.isFolder) { + return prev; + } + + const prevAddRecordIndex = prev.findIndex( + (item) => + item.path === cur.path && item.type === OperationType.ADD && item.isFolder === cur.isFolder, + ); + + /** + * Note: change type to OperationType.ADD + * 1. add one file + * 2. re-edit this file many times + */ + if (prevAddRecordIndex > -1) { + return [ + ...prev.slice(0, prevAddRecordIndex), + { + ...prev[prevAddRecordIndex], + content: cur.content, // change content = cur.content + type: isExistedNode ? OperationType.EDIT : OperationType.ADD, + }, + ...prev.slice(prevAddRecordIndex + 1), + ]; + } + + // TODO: It is necessary to call _removePrevNode? + return _removePrevNode(prev, cur).concat([ + { ...cur, type: isExistedNode ? OperationType.EDIT : OperationType.ADD }, + ]); +} +export function renameOperationRecord( + prev: OperationRecord[], + cur: OperationRecord, + isExistedNode = false, +): OperationRecord[] { + if (isExistedNode) { + // Just insert new record + return [...prev, cur]; + } + + if (!Object.prototype.hasOwnProperty.call(cur, 'newPath')) { + throw new Error('Required newPath!!!'); + } + + if (cur.isFolder) { + const oldPathReg = new RegExp(`^${cur.path}`); + + // Replace all old path to new path in the folder + return prev.map((item) => { + return { ...item, path: item.path.replace(oldPathReg, cur.newPath!) }; + }); + } + + const prevAddRecordIndex = prev.findIndex( + (item) => + item.path === cur.path && + item.type === OperationType.ADD && + item.isFolder === cur.isFolder && + !cur.isFolder, + ); + + if (prevAddRecordIndex === -1) { + throw new Error('Required type === OperationType.ADD record in prev operation record list!!!'); + } + + return [ + ...prev.slice(0, prevAddRecordIndex), + { + ...prev[prevAddRecordIndex], + path: cur.newPath!, // change old path to new path + type: OperationType.ADD, + }, + ...prev.slice(prevAddRecordIndex + 1), + ]; +} diff --git a/web_console_v2/client/src/shared/queryClient.ts b/web_console_v2/client/src/shared/queryClient.ts index 97ef042a9..9f64990f3 100644 --- a/web_console_v2/client/src/shared/queryClient.ts +++ b/web_console_v2/client/src/shared/queryClient.ts @@ -2,13 +2,19 @@ import { QueryClient } from 'react-query'; -const queryClient = new QueryClient(); +const queryClient = new QueryClient({ + defaultOptions: { + queries: { + retry: 2, + }, + }, +}); /** * Force to trigger one or multiple query's refetch * by invalid there's result */ -export function forceToRefreshQuery(queryKey: string | string[]) { +export function forceToRefreshQuery(queryKey: string | any[]) { return queryClient.invalidateQueries(queryKey); } diff --git a/web_console_v2/client/src/shared/router.ts b/web_console_v2/client/src/shared/router.ts new file mode 100644 index 000000000..ea404dc37 --- /dev/null +++ b/web_console_v2/client/src/shared/router.ts @@ -0,0 +1,44 @@ +import { ProjectBaseAbilitiesType, ProjectTaskType } from 'typings/project'; + +type routePathName = + | 'projects' + | 'datasets' + | 'modelCenter' + | 'workflowCenter' + | 'trustedCenter' + | 'algorithmManagement' + | 'modelServing'; +// 当用户没有选择工作区只保留工作区管理和工作流中心 +export const ABILITIES_SIDEBAR_MENU_MAPPER: Record< + routePathName, + (ProjectBaseAbilitiesType | ProjectTaskType)[] +> = { + projects: [ + ProjectBaseAbilitiesType.BASE, + ProjectTaskType.ALIGN, + ProjectTaskType.HORIZONTAL, + ProjectTaskType.TRUSTED, + ProjectTaskType.VERTICAL, + ], + datasets: [ + ProjectTaskType.ALIGN, + ProjectTaskType.HORIZONTAL, + ProjectTaskType.TRUSTED, + ProjectTaskType.VERTICAL, + ], + modelCenter: [ProjectTaskType.HORIZONTAL, ProjectTaskType.VERTICAL], + workflowCenter: [ + ProjectBaseAbilitiesType.BASE, + ProjectTaskType.ALIGN, + ProjectTaskType.HORIZONTAL, + ProjectTaskType.TRUSTED, + ProjectTaskType.VERTICAL, + ], + trustedCenter: [ProjectTaskType.TRUSTED], + algorithmManagement: [ + ProjectTaskType.HORIZONTAL, + ProjectTaskType.TRUSTED, + ProjectTaskType.VERTICAL, + ], + modelServing: [ProjectTaskType.HORIZONTAL, ProjectTaskType.TRUSTED, ProjectTaskType.VERTICAL], +}; diff --git a/web_console_v2/client/src/shared/testUtils.ts b/web_console_v2/client/src/shared/testUtils.ts new file mode 100644 index 000000000..16903f7ba --- /dev/null +++ b/web_console_v2/client/src/shared/testUtils.ts @@ -0,0 +1,19 @@ +/* istanbul ignore file */ + +import { waitFor, RenderResult, fireEvent } from '@testing-library/react'; + +export async function waitForLoadingEnd(wrapper: RenderResult) { + // Start fetch async data, so loading should be displayed + await waitFor(() => expect(wrapper.container.querySelector('.arco-spin-icon')).toBeVisible()); + + // End fetch async data, so loading should be removed + await waitFor(() => + expect(wrapper.container.querySelector('.arco-spin-icon')).not.toBeInTheDocument(), + ); +} + +export function typeInput(input: HTMLElement, value: string | number) { + fireEvent.change(input, { + target: { value }, + }); +} diff --git a/web_console_v2/client/src/shared/trustedCenter.test.ts b/web_console_v2/client/src/shared/trustedCenter.test.ts new file mode 100644 index 000000000..f0ffb9d5e --- /dev/null +++ b/web_console_v2/client/src/shared/trustedCenter.test.ts @@ -0,0 +1,346 @@ +import { + AuthStatus, + TicketAuthStatus, + TicketStatus, + TrustedJobStatus, +} from 'typings/trustedCenter'; +import { + getLatestJobStatus, + getTicketAuthStatus, + getTrustedJobGroupAuthStatus, + getTrustedJobStatus, +} from './trustedCenter'; + +describe('trusted center shared function', () => { + const groupData: any = { + id: 1, + name: 'Trusted Computing', + created_at: Date.now(), + is_creator: true, + creator_id: 323, + auth_status: AuthStatus.AUTHORIZED, + ticket_status: TicketStatus.PENDING, + latest_job_status: undefined, + }; + + const jobData: any = { + id: 1, + name: 'Trusted Job', + job_id: 12, + comment: 'useful', + started_at: Date.now(), + finished_at: Date.now(), + status: undefined, + }; + + const tempJobData: any = undefined; + + /** getTrustedJobStatus */ + + it('get job no data status', () => { + expect(getTrustedJobStatus(tempJobData)).toEqual({ + type: 'default', + text: 'trusted_center.state_trusted_job_unknown', + tip: '', + }); + }); + + it('get job data undefined status', () => { + expect(getTrustedJobStatus(jobData)).toEqual({ + type: 'default', + text: 'trusted_center.state_trusted_job_unknown', + tip: '', + }); + }); + + it('get trusted job new status', () => { + expect(getTrustedJobStatus({ ...jobData, status: TrustedJobStatus.NEW })).toEqual({ + type: 'default', + text: '发起方创建成功', + tip: '', + }); + }); + + it('get trusted job created status', () => { + expect(getTrustedJobStatus({ ...jobData, status: TrustedJobStatus.CREATED })).toEqual({ + type: 'default', + text: '多方创建成功', + tip: '', + }); + }); + + it('get trusted job pending status', () => { + expect(getTrustedJobStatus({ ...jobData, status: TrustedJobStatus.PENDING })).toEqual({ + type: 'default', + text: 'trusted_center.state_trusted_job_pending', + tip: '', + }); + }); + + it('get trusted job running status', () => { + expect(getTrustedJobStatus({ ...jobData, status: TrustedJobStatus.RUNNING })).toEqual({ + type: 'processing', + text: 'trusted_center.state_trusted_job_running', + tip: '', + }); + }); + + it('get trusted job succeeded status', () => { + expect(getTrustedJobStatus({ ...jobData, status: TrustedJobStatus.SUCCEEDED })).toEqual({ + type: 'success', + text: 'trusted_center.state_trusted_job_succeeded', + tip: '', + }); + }); + + it('get trusted job failed status', () => { + expect(getTrustedJobStatus({ ...jobData, status: TrustedJobStatus.FAILED })).toEqual({ + type: 'error', + text: 'trusted_center.state_trusted_job_failed', + tip: '', + }); + }); + + it('get trusted job stopped status', () => { + expect(getTrustedJobStatus({ ...jobData, status: TrustedJobStatus.STOPPED })).toEqual({ + type: 'error', + text: 'trusted_center.state_trusted_job_stopped', + tip: '', + }); + }); + + it('get trusted job undefined status', () => { + expect( + getTrustedJobGroupAuthStatus({ + ...groupData, + auth_status: undefined, + }), + ).toEqual({ + type: 'default', + text: 'trusted_center.state_trusted_job_unknown', + tip: '', + }); + }); + + it('get trusted job authorization status', () => { + expect(getTrustedJobGroupAuthStatus(groupData)).toEqual({ + type: 'success', + text: 'trusted_center.state_auth_status_authorized', + tip: '', + }); + }); + + it('get trusted job unauthorization status', () => { + expect( + getTrustedJobGroupAuthStatus({ + ...groupData, + auth_status: AuthStatus.PENDING, + }), + ).toEqual({ + type: 'error', + text: 'trusted_center.state_auth_status_unauthorized', + tip: '', + }); + }); + + const exportJobData: any = { + id: 1, + name: 'Trusted Job', + job_id: 12, + comment: 'useful', + started_at: Date.now(), + finished_at: Date.now(), + status: TrustedJobStatus.PENDING, + ticket_auth_status: undefined, + }; + + const tempExportJobData: any = undefined; + /** getTicketAuthStatus */ + + it('get export job no data status', () => { + expect(getTicketAuthStatus(tempExportJobData)).toEqual({ + type: 'normal', + text: '未知', + percent: 0, + tip: '', + }); + }); + + it('get export job undefined status', () => { + expect(getTicketAuthStatus(exportJobData)).toEqual({ + type: 'normal', + text: '未知', + percent: 0, + tip: '', + }); + }); + + it('get export job create pending status', () => { + expect( + getTicketAuthStatus({ + ...exportJobData, + ticket_auth_status: TicketAuthStatus.CREATE_PENDING, + }), + ).toEqual({ + type: 'normal', + text: '创建中', + percent: 10, + tip: '', + }); + }); + + it('get export job create failed status', () => { + expect( + getTicketAuthStatus({ + ...exportJobData, + ticket_auth_status: TicketAuthStatus.CREATE_FAILED, + }), + ).toEqual({ + type: 'error', + text: '创建失败', + percent: 100, + tip: '', + }); + }); + + it('get export job ticket pending status', () => { + expect( + getTicketAuthStatus({ + ...exportJobData, + ticket_auth_status: TicketAuthStatus.TICKET_PENDING, + }), + ).toEqual({ + type: 'normal', + text: '待审批', + percent: 20, + tip: '', + }); + }); + + it('get export job ticket declined status', () => { + expect( + getTicketAuthStatus({ + ...exportJobData, + ticket_auth_status: TicketAuthStatus.TICKET_DECLINED, + }), + ).toEqual({ + type: 'error', + text: '审批失败', + percent: 100, + tip: '', + }); + }); + + it('get export job ticket auth pending status', () => { + expect( + getTicketAuthStatus({ + ...exportJobData, + ticket_auth_status: TicketAuthStatus.AUTH_PENDING, + }), + ).toEqual({ + type: 'normal', + text: '待授权', + percent: 80, + tip: '', + }); + }); + + it('get export job ticket authorized status', () => { + expect( + getTicketAuthStatus({ + ...exportJobData, + ticket_auth_status: TicketAuthStatus.AUTHORIZED, + }), + ).toEqual({ + type: 'normal', + text: '已授权', + percent: 100, + tip: '', + }); + }); + + /** getLatestJobStatus */ + it('get job no data status', () => { + expect(getLatestJobStatus(tempJobData)).toEqual({ + type: 'default', + text: '未知', + tip: '', + }); + }); + + it('get job data undefined status', () => { + expect(getLatestJobStatus(groupData)).toEqual({ + type: 'default', + text: '未知', + tip: '', + }); + }); + + it('get trusted job new status', () => { + expect(getLatestJobStatus({ ...groupData, latest_job_status: TrustedJobStatus.NEW })).toEqual({ + type: 'success', + text: '发起方创建成功', + tip: '', + }); + }); + + it('get trusted job created status', () => { + expect( + getLatestJobStatus({ ...groupData, latest_job_status: TrustedJobStatus.CREATED }), + ).toEqual({ + type: 'success', + text: '多方创建成功', + tip: '', + }); + }); + + it('get trusted job pending status', () => { + expect( + getLatestJobStatus({ ...groupData, latest_job_status: TrustedJobStatus.PENDING }), + ).toEqual({ + type: 'default', + text: '待执行', + tip: '', + }); + }); + + it('get trusted job running status', () => { + expect( + getLatestJobStatus({ ...groupData, latest_job_status: TrustedJobStatus.RUNNING }), + ).toEqual({ + type: 'processing', + text: '执行中', + tip: '', + }); + }); + + it('get trusted job succeeded status', () => { + expect( + getLatestJobStatus({ ...groupData, latest_job_status: TrustedJobStatus.SUCCEEDED }), + ).toEqual({ + type: 'success', + text: '已成功', + tip: '', + }); + }); + + it('get trusted job failed status', () => { + expect( + getLatestJobStatus({ ...groupData, latest_job_status: TrustedJobStatus.FAILED }), + ).toEqual({ + type: 'error', + text: '已失败', + tip: '', + }); + }); + + it('get trusted job stopped status', () => { + expect( + getLatestJobStatus({ ...groupData, latest_job_status: TrustedJobStatus.STOPPED }), + ).toEqual({ + type: 'error', + text: '已终止', + tip: '', + }); + }); +}); diff --git a/web_console_v2/client/src/shared/trustedCenter.ts b/web_console_v2/client/src/shared/trustedCenter.ts new file mode 100644 index 000000000..e8c587521 --- /dev/null +++ b/web_console_v2/client/src/shared/trustedCenter.ts @@ -0,0 +1,208 @@ +import { ProgressType, StateTypes } from 'components/StateIndicator'; +import i18n from 'i18n'; +import { + TrustedJobGroupItem, + TrustedJobStatus, + AuthStatus, + TrustedJobListItem, + TrustedJobGroup, + TicketAuthStatus, + TrustedJob, +} from 'typings/trustedCenter'; + +// ------- judge trusted job status ------ +export function getTrustedJobStatus( + data: TrustedJobListItem | TrustedJob, +): { type: StateTypes; text: string; tip?: string } { + let type: StateTypes = 'default'; + let text = i18n.t('trusted_center.state_trusted_job_unknown'); + const tip = ''; + + if (!data) { + return { + type, + text, + tip, + }; + } + + switch (data.status) { + case TrustedJobStatus.NEW: + type = 'default'; + text = '发起方创建成功'; + break; + case TrustedJobStatus.CREATED: + type = 'default'; + text = '多方创建成功'; + break; + case TrustedJobStatus.PENDING: + type = 'default'; + text = i18n.t('trusted_center.state_trusted_job_pending'); + break; + case TrustedJobStatus.RUNNING: + type = 'processing'; + text = i18n.t('trusted_center.state_trusted_job_running'); + break; + case TrustedJobStatus.SUCCEEDED: + type = 'success'; + text = i18n.t('trusted_center.state_trusted_job_succeeded'); + break; + case TrustedJobStatus.FAILED: + type = 'error'; + text = i18n.t('trusted_center.state_trusted_job_failed'); + break; + case TrustedJobStatus.STOPPED: + type = 'error'; + text = i18n.t('trusted_center.state_trusted_job_stopped'); + break; + default: + break; + } + return { + type, + text, + tip, + }; +} + +// ------ judge trusted job authorization status ------ +export function getTrustedJobGroupAuthStatus( + data: TrustedJobGroupItem, +): { type: StateTypes; text: string; tip?: string } { + let type: StateTypes = 'default'; + let text = i18n.t('trusted_center.state_trusted_job_unknown'); + const tip = ''; + switch (data.auth_status) { + case AuthStatus.AUTHORIZED: + type = 'success'; + text = i18n.t('trusted_center.state_auth_status_authorized'); + break; + case AuthStatus.PENDING: + type = 'error'; + text = i18n.t('trusted_center.state_auth_status_unauthorized'); + break; + default: + break; + } + + return { + type, + text, + tip, + }; +} + +// ------ judge ticket auth authorization status ------ +export function getTicketAuthStatus( + data: TrustedJobListItem | TrustedJob | TrustedJobGroupItem | TrustedJobGroup, +): { type: ProgressType; text: string; tip?: string; percent: number } { + let type: ProgressType = 'normal'; + let text = '未知'; + const tip = ''; + let percent = 0; + + if (!data) { + return { + type, + text, + tip, + percent, + }; + } + + switch (data.ticket_auth_status) { + case TicketAuthStatus.CREATE_PENDING: + type = 'normal'; + text = '创建中'; + percent = 10; + break; + case TicketAuthStatus.CREATE_FAILED: + type = 'error'; + text = '创建失败'; + percent = 100; + break; + case TicketAuthStatus.TICKET_PENDING: + type = 'normal'; + text = '待审批'; + percent = 20; + break; + case TicketAuthStatus.TICKET_DECLINED: + type = 'error'; + text = '审批失败'; + percent = 100; + break; + case TicketAuthStatus.AUTH_PENDING: + type = 'normal'; + text = '待授权'; + percent = 80; + break; + case TicketAuthStatus.AUTHORIZED: + type = 'normal'; + text = '已授权'; + percent = 100; + break; + default: + break; + } + return { + type, + text, + tip, + percent, + }; +} + +// ------ judge latest job status ------ +export function getLatestJobStatus( + data: TrustedJobGroupItem | TrustedJobGroup, +): { type: StateTypes; text: string; tip?: string } { + let type: StateTypes = 'default'; + let text = '未知'; + const tip = ''; + + if (!data) { + return { + type, + text, + tip, + }; + } + + switch (data.latest_job_status) { + case TrustedJobStatus.NEW: + type = 'success'; + text = '发起方创建成功'; + break; + case TrustedJobStatus.CREATED: + type = 'success'; + text = '多方创建成功'; + break; + case TrustedJobStatus.PENDING: + type = 'default'; + text = '待执行'; + break; + case TrustedJobStatus.RUNNING: + type = 'processing'; + text = '执行中'; + break; + case TrustedJobStatus.SUCCEEDED: + type = 'success'; + text = '已成功'; + break; + case TrustedJobStatus.FAILED: + type = 'error'; + text = '已失败'; + break; + case TrustedJobStatus.STOPPED: + type = 'error'; + text = '已终止'; + break; + default: + break; + } + return { + type, + text, + tip, + }; +} diff --git a/web_console_v2/client/src/shared/url.ts b/web_console_v2/client/src/shared/url.ts new file mode 100644 index 000000000..b231a2016 --- /dev/null +++ b/web_console_v2/client/src/shared/url.ts @@ -0,0 +1,7 @@ +/* istanbul ignore file */ + +import History from 'history'; + +export function parseSearch(location: History.Location | Location) { + return new URLSearchParams(location.search); +} diff --git a/web_console_v2/client/src/shared/validator.test.ts b/web_console_v2/client/src/shared/validator.test.ts index 1961d48c0..cbacf9252 100644 --- a/web_console_v2/client/src/shared/validator.test.ts +++ b/web_console_v2/client/src/shared/validator.test.ts @@ -1,8 +1,97 @@ -import { message } from 'antd'; -import { validatePassword } from './validator'; +import { + validatePassword, + validNamePattern, + validEmailPattern, + isValidJobName, + isValidCpu, + isValidMemory, + isWorkflowNameUniq, + isValidName, + isValidEmail, + isStringCanBeParsed, +} from './validator'; +import * as api from 'services/workflow'; +import { newlyCreated } from 'services/mocks/v2/workflows/examples'; + +jest.mock('services/workflow'); + +const mockApi = api as jest.Mocked<typeof api>; + +const validNameCases = [ + { + // Empty + i: '', + o: false, + }, + { + // Single lowercase char + i: 'a', + o: true, + }, + { + // Single uppercase char + i: 'A', + o: true, + }, + { + // Single number char + i: '0', + o: true, + }, + { + // Single chinese char + i: '中', + o: true, + }, + { + // Single _ char + i: '_', + o: false, + }, + { + i: 'a_', + o: false, + }, + { + i: 'A_', + o: false, + }, + { + i: '0_', + o: false, + }, + { + i: '中_', + o: false, + }, + { + i: '__', + o: false, + }, + { + i: 'a_A', + o: true, + }, + { + i: 'A_a', + o: true, + }, + { + i: '0_a', + o: true, + }, + { + i: '中_a', + o: true, + }, + { + i: '__a', + o: false, + }, +]; describe('Password validation', () => { - it('validatePassword', () => { + it('validatePassword', async () => { const cases = [ { // Empty @@ -33,5 +122,171 @@ describe('Password validation', () => { }), ).toBe(o); }); + + await expect(validatePassword('')).rejects.toThrow('users.placeholder_password_message'); + }); +}); + +describe('Name validation', () => { + it('validNamePattern', () => { + validNameCases.forEach(({ i, o }) => { + expect(validNamePattern.test(i)).toBe(o); + }); }); + + it('isValidName', () => { + validNameCases.forEach(({ i, o }) => { + expect(isValidName(i)).toBe(o); + }); + }); + + it('isValidJobName', () => { + expect(isValidJobName('')).toBe(false); + expect(isValidJobName('嗷嗷嗷')).toBe(false); + expect(isValidJobName('jobjobjobjobjobjobjobjob')).toBe(true); // 24 limit + expect( + isValidJobName( + 'jobjobjobjobjobjobjobjobjobjobjobjobjobjobjobjobjobjobjobjobjobjobjobjobjobjobjobjobjobjob', + ), + ).toBe(false); + expect(isValidJobName('-_aaa112399AAd')).toBe(false); + expect(isValidJobName('-aaa112399AAd')).toBe(false); + expect(isValidJobName('uuid-jobname')).toBe(true); + expect(isValidJobName('-jobname')).toBe(false); + expect(isValidJobName('_aaa112399AAd')).toBe(false); + expect(isValidJobName('A')).toBe(false); + expect(isValidJobName('aaaA')).toBe(false); + expect(isValidJobName('aaa.test')).toBe(true); + }); + + it('isWorkflowNameUniq', async () => { + try { + const callback = (error?: string) => { + return error; + }; + // Empty input string will return undefiend + await expect(isWorkflowNameUniq('', callback)).resolves.toBeUndefined(); + // Because api no mock resolve value, so it will throw error + await expect(isWorkflowNameUniq('1', callback)).rejects.toMatch('TypeError'); + + mockApi.fetchWorkflowList.mockResolvedValueOnce({ data: [] }); + const resolvedValue = await isWorkflowNameUniq('workflowName1', callback); + expect(resolvedValue).toBeUndefined(); + + mockApi.fetchWorkflowList.mockResolvedValueOnce({ data: [newlyCreated] }); + await isWorkflowNameUniq('workflowName1', callback).catch((error) => { + expect(error).toMatch('workflow.msg_workflow_name_existed'); + }); + } catch (error) { + return Promise.reject(error); + } + }); +}); + +describe('Resource config validation', () => { + it('isValidCpu', () => { + expect(isValidCpu('')).toBe(false); + expect(isValidCpu(' ')).toBe(false); + expect(isValidCpu('100m')).toBe(true); + expect(isValidCpu('100M')).toBe(false); + expect(isValidCpu('100mm')).toBe(false); + expect(isValidCpu('a100m')).toBe(false); + expect(isValidCpu('1a00m')).toBe(false); + expect(isValidCpu('100ma')).toBe(false); + expect(isValidCpu('m')).toBe(false); + expect(isValidCpu('M')).toBe(false); + expect(isValidCpu('100')).toBe(false); + }); + it('isValidMemory', () => { + expect(isValidMemory('')).toBe(false); + expect(isValidMemory(' ')).toBe(false); + + // Gi + expect(isValidMemory('Gi')).toBe(false); + expect(isValidMemory('gi')).toBe(false); + expect(isValidMemory('100')).toBe(false); + expect(isValidMemory('100Gi')).toBe(true); + expect(isValidMemory('100gi')).toBe(false); + expect(isValidMemory('100Gii')).toBe(false); + expect(isValidMemory('100gii')).toBe(false); + expect(isValidMemory('a100Gi')).toBe(false); + expect(isValidMemory('1a00Gi')).toBe(false); + expect(isValidMemory('100aGi')).toBe(false); + expect(isValidMemory('100Gai')).toBe(false); + + // Mi + expect(isValidMemory('Mi')).toBe(false); + expect(isValidMemory('mi')).toBe(false); + expect(isValidMemory('100')).toBe(false); + expect(isValidMemory('100Mi')).toBe(true); + expect(isValidMemory('100mi')).toBe(false); + expect(isValidMemory('100Mii')).toBe(false); + expect(isValidMemory('100mii')).toBe(false); + expect(isValidMemory('a100Mi')).toBe(false); + expect(isValidMemory('1a00Mi')).toBe(false); + expect(isValidMemory('100aMi')).toBe(false); + expect(isValidMemory('100Mai')).toBe(false); + }); +}); + +describe('Email validation', () => { + it('validEmailPattern', () => { + expect(validEmailPattern.test('')).toBe(false); + expect(validEmailPattern.test('a')).toBe(false); + expect(validEmailPattern.test('a@qq.com')).toBe(true); + expect(validEmailPattern.test('a_?s@qq.com')).toBe(true); + expect(validEmailPattern.test('a@@qq.com')).toBe(false); + expect(validEmailPattern.test('a@qqcom')).toBe(false); + expect(validEmailPattern.test('a@qq.')).toBe(false); + expect(validEmailPattern.test('aqq.com')).toBe(false); + }); + + it('isValidEmail', () => { + expect(isValidEmail('')).toBe(false); + expect(isValidEmail('a')).toBe(false); + expect(isValidEmail('a@qq.com')).toBe(true); + expect(isValidEmail('a_?s@qq.com')).toBe(true); + expect(isValidEmail('a@@qq.com')).toBe(false); + expect(isValidEmail('a@qqcom')).toBe(false); + expect(isValidEmail('a@qq.')).toBe(false); + expect(isValidEmail('aqq.com')).toBe(false); + }); +}); + +it('isStringCanBeParsed', async () => { + await expect(isStringCanBeParsed('', 'OBJECT')).rejects.toMatch('msg_wrong_format'); + await expect(isStringCanBeParsed('', 'LIST')).rejects.toMatch('msg_wrong_format'); + + await expect(isStringCanBeParsed('cc', 'OBJECT')).rejects.toMatch('msg_wrong_format'); + await expect(isStringCanBeParsed('cc', 'LIST')).rejects.toMatch('msg_wrong_format'); + + await expect(isStringCanBeParsed('{}', 'OBJECT')).resolves.toBeUndefined(); + await expect(isStringCanBeParsed('{}', 'LIST')).resolves.toBeUndefined(); + + await expect(isStringCanBeParsed('[]', 'OBJECT')).resolves.toBeUndefined(); + await expect(isStringCanBeParsed('[]', 'LIST')).resolves.toBeUndefined(); + + const testObjectString = JSON.stringify({ a: 1, b: 2 }); + await expect(isStringCanBeParsed(testObjectString, 'OBJECT')).resolves.toBeUndefined(); + await expect(isStringCanBeParsed(testObjectString, 'LIST')).resolves.toBeUndefined(); + + const testArrayString = JSON.stringify([{ a: 1, b: 2 }]); + await expect(isStringCanBeParsed(testArrayString, 'OBJECT')).resolves.toBeUndefined(); + await expect(isStringCanBeParsed(testArrayString, 'LIST')).resolves.toBeUndefined(); + + const testFakeObjectString = '{a:1}'; + await expect(isStringCanBeParsed(testFakeObjectString, 'OBJECT')).rejects.toMatch( + 'msg_wrong_format', + ); + await expect(isStringCanBeParsed(testFakeObjectString, 'LIST')).rejects.toMatch( + 'msg_wrong_format', + ); + + const testFakeArrayString = '[1],'; + await expect(isStringCanBeParsed(testFakeArrayString, 'OBJECT')).rejects.toMatch( + 'msg_wrong_format', + ); + await expect(isStringCanBeParsed(testFakeArrayString, 'LIST')).rejects.toMatch( + 'msg_wrong_format', + ); }); diff --git a/web_console_v2/client/src/shared/validator.ts b/web_console_v2/client/src/shared/validator.ts index 596c7c6ac..6b6f88856 100644 --- a/web_console_v2/client/src/shared/validator.ts +++ b/web_console_v2/client/src/shared/validator.ts @@ -1,13 +1,154 @@ +import debounce from 'debounce-promise'; import i18n from 'i18n'; -const PASSWORD_REGX = /^(?=.*[A-Za-z])(?=.*\d)(?=.*[!@#$%^&*()_=+|{};:'",<.>/?~])[A-Za-z\d!@#$%^&*()_=+|{};:'",<.>/?~]{8,20}$/i; +import { fetchWorkflowList } from 'services/workflow'; + +export const validPasswordPattern = /^(?=.*[A-Za-z])(?=.*\d)(?=.*[!@#$%^&*()_=+|{};:'",<.>/?~])[A-Za-z\d!@#$%^&*()_=+|{};:'",<.>/?~]{8,20}$/i; + export async function validatePassword( value: string, options: { message: string } = { message: i18n.t('users.placeholder_password_message') }, ) { - if (PASSWORD_REGX.test(value)) { + if (validPasswordPattern.test(value)) { return true; } throw new Error(options.message); } + +function generateInputPattern(maxSize: number) { + return new RegExp( + `^[a-zA-Z0-9\u4e00-\u9fa5][a-zA-Z0-9\u4e00-\u9fa5-_@.]{0,${ + maxSize - 2 + }}[a-zA-Z0-9\u4e00-\u9fa5]$|^[a-zA-Z0-9\u4e00-\u9fa5]{1}$`, + ); +} + +function generateParticipantValidName(maxSize: number) { + return new RegExp( + `^[a-zA-Z0-9\u4e00-\u9fa5][a-zA-Z0-9\u4e00-\u9fa5-_]{0,${ + maxSize - 2 + }}[a-zA-Z0-9\u4e00-\u9fa5]$|^[a-zA-Z0-9\u4e00-\u9fa5]{1}$`, + ); +} + +/** + * Support uppercase and lowercase letters, numbers, Chinese, "_" and "-" characters, + * can only start/end with uppercase and lowercase letters, numbers or Chinese, + * and no more than 63 characters + */ +export const validNamePattern = generateInputPattern(63); + +export const validParticipantNamePattern = generateParticipantValidName(63); + +export function isValidName(str: string) { + return validNamePattern.test(str); +} + +export const validCommentPattern = generateInputPattern(100); + +export const MAX_COMMENT_LENGTH = 200; + +/** + * limit from k8s + */ +export const jobNamePattern = /^[a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*$/; +export function isValidJobName(str: string) { + return str.length <= 24 && jobNamePattern.test(str); +} + +/** + * valid format: xxxm, x is number + * + * @example + * 100m + */ +export const validCpuPattern = /^\d+m$/; + +/** + * valid format: xxxMi or xxxGi, x is number + * + * @example + * 100Mi,100Gi + */ +export const validMemoryPattern = /^\d+(Mi|Gi)$/; + +/** + * valid format: xxxm, x is number + * + * @param cpu string + * @returns boolean + * @example + * 100m + */ +export function isValidCpu(cpu: string) { + return validCpuPattern.test(cpu); +} +/** + * valid format: xxxMi or xxxGi + * + * @param memory string + * @returns boolean + * @example + * 100Mi,100Gi + */ +export function isValidMemory(memory: string) { + return validMemoryPattern.test(memory); +} + +/** + * valid is workflow name unique + * @param workflowName string + * @returns Promise<string | null> + */ +export async function isWorkflowNameUniq(workflowName: string, callback: (error?: string) => void) { + if (!workflowName) { + return; + } + + try { + // filter by workflowName + const resp = await fetchWorkflowList({ + name: workflowName, + }); + const { data } = resp; + if (data && data.length > 0) { + callback('系统中存在同名的工作流,请更换名称'); + } + return; + } catch (error) { + return Promise.reject(String(error)); + } +} + +/* istanbul ignore next */ +export const isWorkflowNameUniqWithDebounce = debounce( + async (value: any, callback: (error?: string) => void) => { + return isWorkflowNameUniq(value, callback); + }, + 500, +); + +// https://github.com/any86/any-rule#email%E9%82%AE%E7%AE%B1 +export const validEmailPattern = /^(([^<>()[\]\\.,;:\s@"]+(\.[^<>()[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/; +export function isValidEmail(email: string) { + return validEmailPattern.test(email); +} + +/** + * valid string is can be parse + * @param value string + * @returns Promise<string | null> + */ +export function isStringCanBeParsed(value: string, valueType: 'LIST' | 'OBJECT') { + try { + JSON.parse(value); + return Promise.resolve(); + } catch (error) { + return Promise.reject( + i18n.t('settings.msg_wrong_format', { + type: valueType, + }), + ); + } +} diff --git a/web_console_v2/client/src/shared/variablePresets.ts b/web_console_v2/client/src/shared/variablePresets.ts index b58ec7b5a..34a2ea0bf 100644 --- a/web_console_v2/client/src/shared/variablePresets.ts +++ b/web_console_v2/client/src/shared/variablePresets.ts @@ -1,3 +1,5 @@ +/* istanbul ignore file */ + /** * Variable additional schema dictionary for Workflow * NOTE: each variable schema here should only maintain UI related stuffs diff --git a/web_console_v2/client/src/shared/workflow.test.ts b/web_console_v2/client/src/shared/workflow.test.ts index 0606908d0..b7f8fe36b 100644 --- a/web_console_v2/client/src/shared/workflow.test.ts +++ b/web_console_v2/client/src/shared/workflow.test.ts @@ -6,16 +6,141 @@ import { } from 'services/mocks/v2/workflows/examples'; import { findJobExeInfoByJobDef, - isAwaitParticipantConfig, - isCompleted, - isPendingAccpet, + getWorkflowStage, + isOperable, + isForkable, + isEditable, } from './workflow'; +import { WorkflowState } from 'typings/workflow'; + +const { + RUNNING, + STOPPED, + INVALID, + COMPLETED, + FAILED, + PREPARE_RUN, + PREPARE_STOP, + WARMUP_UNDERHOOD, + PENDING_ACCEPT, + READY_TO_RUN, + PARTICIPANT_CONFIGURING, + UNKNOWN, +} = WorkflowState; describe('Workflow state judgement', () => { it('All should works fine', () => { - expect(isAwaitParticipantConfig(newlyCreated)).toBeTruthy(); - expect(isCompleted(completed)).toBeTruthy(); - expect(isPendingAccpet(pendingAcceptAndConfig)).toBeTruthy(); + expect(getWorkflowStage(pendingAcceptAndConfig)).toEqual({ + text: 'workflow.state_pending_accept', + type: 'warning', + }); + expect(getWorkflowStage(newlyCreated)).toEqual({ + text: 'workflow.state_configuring', + type: 'gold', + }); + expect(getWorkflowStage(completed)).toEqual({ + text: 'workflow.state_success', + type: 'success', + }); + + expect(getWorkflowStage({ ...pendingAcceptAndConfig, state: READY_TO_RUN })).toEqual({ + text: 'workflow.state_ready_to_run', + type: 'lime', + }); + expect(getWorkflowStage({ ...pendingAcceptAndConfig, state: RUNNING })).toEqual({ + text: 'workflow.state_running', + type: 'processing', + }); + expect(getWorkflowStage({ ...pendingAcceptAndConfig, state: STOPPED })).toEqual({ + text: 'workflow.state_stopped', + type: 'error', + }); + expect(getWorkflowStage({ ...pendingAcceptAndConfig, state: COMPLETED })).toEqual({ + text: 'workflow.state_success', + type: 'success', + }); + expect(getWorkflowStage({ ...pendingAcceptAndConfig, state: FAILED })).toEqual({ + text: 'workflow.state_failed', + type: 'error', + }); + expect(getWorkflowStage({ ...pendingAcceptAndConfig, state: INVALID })).toEqual({ + text: 'workflow.state_invalid', + type: 'default', + }); + expect(getWorkflowStage({ ...pendingAcceptAndConfig, state: PREPARE_RUN })).toEqual({ + text: 'workflow.state_prepare_run', + type: 'warning', + }); + expect(getWorkflowStage({ ...pendingAcceptAndConfig, state: PREPARE_STOP })).toEqual({ + text: 'workflow.state_prepare_stop', + type: 'error', + }); + expect(getWorkflowStage({ ...pendingAcceptAndConfig, state: WARMUP_UNDERHOOD })).toEqual({ + text: 'workflow.state_warmup_underhood', + type: 'warning', + }); + expect(getWorkflowStage({ ...pendingAcceptAndConfig, state: PENDING_ACCEPT })).toEqual({ + text: 'workflow.state_pending_accept', + type: 'warning', + }); + expect(getWorkflowStage({ ...pendingAcceptAndConfig, state: PARTICIPANT_CONFIGURING })).toEqual( + { + text: 'workflow.state_configuring', + type: 'gold', + }, + ); + expect(getWorkflowStage({ ...pendingAcceptAndConfig, state: UNKNOWN })).toEqual({ + text: 'workflow.state_unknown', + type: 'default', + }); + expect(getWorkflowStage({ ...pendingAcceptAndConfig, state: UNKNOWN })).toEqual({ + text: 'workflow.state_unknown', + type: 'default', + }); + }); + + it('IsOperable', () => { + expect(isOperable(pendingAcceptAndConfig)).toBe(false); + expect(isOperable(newlyCreated)).toBe(false); + expect(isOperable(completed)).toBe(true); + expect(isOperable({ ...pendingAcceptAndConfig, state: READY_TO_RUN })).toBe(true); + expect(isOperable({ ...pendingAcceptAndConfig, state: RUNNING })).toBe(true); + expect(isOperable({ ...pendingAcceptAndConfig, state: STOPPED })).toBe(true); + expect(isOperable({ ...pendingAcceptAndConfig, state: COMPLETED })).toBe(true); + expect(isOperable({ ...pendingAcceptAndConfig, state: FAILED })).toBe(true); + expect(isOperable({ ...pendingAcceptAndConfig, state: INVALID })).toBe(false); + expect(isOperable({ ...pendingAcceptAndConfig, state: PREPARE_RUN })).toBe(false); + expect(isOperable({ ...pendingAcceptAndConfig, state: PREPARE_STOP })).toBe(false); + expect(isOperable({ ...pendingAcceptAndConfig, state: WARMUP_UNDERHOOD })).toBe(false); + expect(isOperable({ ...pendingAcceptAndConfig, state: PENDING_ACCEPT })).toBe(false); + expect(isOperable({ ...pendingAcceptAndConfig, state: PARTICIPANT_CONFIGURING })).toBe(false); + expect(isOperable({ ...pendingAcceptAndConfig, state: UNKNOWN })).toBe(false); + }); + + it('IsForkable', () => { + expect(isForkable(pendingAcceptAndConfig)).toBe(true); + expect(isForkable(newlyCreated)).toBe(true); + expect(isForkable(completed)).toBe(true); + expect(isForkable({ ...completed, forkable: true })).toBe(true); + expect(isForkable({ ...completed, forkable: false })).toBe(false); + }); + + it('IsEditable', () => { + expect(isEditable(pendingAcceptAndConfig)).toBe(false); + expect(isEditable(newlyCreated)).toBe(true); + expect(isEditable(completed)).toBe(true); + expect(isEditable({ ...pendingAcceptAndConfig, state: READY_TO_RUN })).toBe(true); + expect(isEditable({ ...pendingAcceptAndConfig, state: RUNNING })).toBe(false); + expect(isEditable({ ...pendingAcceptAndConfig, state: STOPPED })).toBe(true); + expect(isEditable({ ...pendingAcceptAndConfig, state: COMPLETED })).toBe(true); + expect(isEditable({ ...pendingAcceptAndConfig, state: FAILED })).toBe(true); + expect(isEditable({ ...pendingAcceptAndConfig, state: INVALID })).toBe(false); + expect(isEditable({ ...pendingAcceptAndConfig, state: PREPARE_RUN })).toBe(false); + expect(isEditable({ ...pendingAcceptAndConfig, state: PREPARE_STOP })).toBe(false); + expect(isEditable({ ...pendingAcceptAndConfig, state: WARMUP_UNDERHOOD })).toBe(false); + expect(isEditable({ ...pendingAcceptAndConfig, state: PENDING_ACCEPT })).toBe(false); + expect(isEditable({ ...pendingAcceptAndConfig, state: PARTICIPANT_CONFIGURING })).toBe(true); + expect(isEditable({ ...pendingAcceptAndConfig, state: UNKNOWN })).toBe(false); }); }); diff --git a/web_console_v2/client/src/shared/workflow.ts b/web_console_v2/client/src/shared/workflow.ts index 02066b29f..fdcef893c 100644 --- a/web_console_v2/client/src/shared/workflow.ts +++ b/web_console_v2/client/src/shared/workflow.ts @@ -1,105 +1,67 @@ import { Workflow, WorkflowState, - TransactionState, WorkflowExecutionDetails, + WorkflowStateFilterParam, + WorkflowStateFilterParamType, } from 'typings/workflow'; import i18n from 'i18n'; import { StateTypes } from 'components/StateIndicator'; import { Job } from 'typings/job'; -const { NEW, READY: W_READY, RUNNING, STOPPED, INVALID, COMPLETED, FAILED } = WorkflowState; const { - READY: T_READY, - COORDINATOR_PREPARE, - COORDINATOR_COMMITTABLE, - PARTICIPANT_PREPARE, - PARTICIPANT_COMMITTABLE, -} = TransactionState; - -// --------------- State judgement ---------------- - -export function isAwaitParticipantConfig(workflow: Workflow) { - const { state, target_state, transaction_state } = workflow; - - return ( - state === NEW && - target_state === W_READY && - [T_READY, COORDINATOR_COMMITTABLE, COORDINATOR_PREPARE].includes(transaction_state) - ); -} - -export function isPendingAccpet(workflow: Workflow) { - const { state, target_state, transaction_state } = workflow; - - return state === NEW && target_state === W_READY && transaction_state === PARTICIPANT_PREPARE; -} - -export function isWarmUpUnderTheHood(workflow: Workflow) { - const { state, target_state, transaction_state } = workflow; - return ( - state === NEW && - target_state === W_READY && - [PARTICIPANT_COMMITTABLE].includes(transaction_state) - ); -} - -export function isReadyToRun(workflow: Workflow) { - const { state, target_state, transaction_state } = workflow; - - return state === W_READY && target_state === INVALID && transaction_state === T_READY; -} - -export function isPreparingRun(workflow: Workflow) { - const { state, target_state } = workflow; - - return target_state === RUNNING && [W_READY, STOPPED].includes(state); -} - -export function isRunning(workflow: Workflow) { - const { state, target_state } = workflow; - - return state === RUNNING && target_state === INVALID; -} - -export function isPreparingStop(workflow: Workflow) { - const { state, target_state } = workflow; - - return target_state === STOPPED && [RUNNING, COMPLETED, FAILED].includes(state); -} - -export function isStopped(workflow: Workflow) { - const { state, target_state } = workflow; - - return target_state === STOPPED || (state === STOPPED && target_state === INVALID); -} - -export function isCompleted(workflow: Workflow) { - const { state } = workflow; - - return state === COMPLETED; -} - -export function isFailed(workflow: Workflow) { - const { state } = workflow; - return state === FAILED; -} - -export function isInvalid(workflow: Workflow) { - const { state } = workflow; - return state === INVALID; -} + RUNNING, + STOPPED, + INVALID, + COMPLETED, + FAILED, + PREPARE_RUN, + PREPARE_STOP, + WARMUP_UNDERHOOD, + PENDING_ACCEPT, + READY_TO_RUN, + PARTICIPANT_CONFIGURING, + UNKNOWN, +} = WorkflowState; + +export const workflowStateFilterParamToStateTextMap: Record< + WorkflowStateFilterParamType, + string +> = { + [WorkflowStateFilterParam.RUNNING]: i18n.t('workflow.state_running'), + [WorkflowStateFilterParam.STOPPED]: i18n.t('workflow.state_stopped'), + [WorkflowStateFilterParam.INVALID]: i18n.t('workflow.state_invalid'), + [WorkflowStateFilterParam.COMPLETED]: i18n.t('workflow.state_success'), + [WorkflowStateFilterParam.FAILED]: i18n.t('workflow.state_failed'), + [WorkflowStateFilterParam.PREPARE_RUN]: i18n.t('workflow.state_prepare_run'), + [WorkflowStateFilterParam.PREPARE_STOP]: i18n.t('workflow.state_prepare_stop'), + [WorkflowStateFilterParam.WARMUP_UNDERHOOD]: i18n.t('workflow.state_warmup_underhood'), + [WorkflowStateFilterParam.PENDING_ACCEPT]: i18n.t('workflow.state_pending_accept'), + [WorkflowStateFilterParam.READY_TO_RUN]: i18n.t('workflow.state_ready_to_run'), + [WorkflowStateFilterParam.PARTICIPANT_CONFIGURING]: i18n.t('workflow.state_configuring'), + [WorkflowStateFilterParam.UNKNOWN]: i18n.t('workflow.state_unknown'), +}; + +const workflowStateFilterOrder = [ + WorkflowStateFilterParam.PENDING_ACCEPT, + WorkflowStateFilterParam.READY_TO_RUN, + WorkflowStateFilterParam.COMPLETED, + WorkflowStateFilterParam.FAILED, + WorkflowStateFilterParam.RUNNING, + WorkflowStateFilterParam.STOPPED, + WorkflowStateFilterParam.INVALID, + WorkflowStateFilterParam.PARTICIPANT_CONFIGURING, + WorkflowStateFilterParam.WARMUP_UNDERHOOD, +]; +export const workflowStateOptionList = workflowStateFilterOrder.map((item) => ({ + label: workflowStateFilterParamToStateTextMap[item], + value: item, +})); // --------------- Xable judgement ---------------- -/** - * When target_state is not INVALID, - * means underlying service of both sides are communicating - * during which user cannot perform any action to this workflow - * server would response 'bad request' - */ export function isOperable(workflow: Workflow) { - return workflow.target_state === INVALID; + return [READY_TO_RUN, RUNNING, STOPPED, COMPLETED, FAILED].includes(workflow.state); } export function isForkable(workflow: Workflow) { @@ -107,98 +69,99 @@ export function isForkable(workflow: Workflow) { return forkable; } +export function isEditable(workflow: Workflow) { + const { state } = workflow; + return [PARTICIPANT_CONFIGURING, READY_TO_RUN, STOPPED, COMPLETED, FAILED].includes(state); +} + // --------------- General stage getter ---------------- export function getWorkflowStage(workflow: Workflow): { type: StateTypes; text: string } { - if (isAwaitParticipantConfig(workflow)) { - return { - text: i18n.t('workflow.state_configuring'), - type: 'gold', - }; - } - - if (isPendingAccpet(workflow)) { - return { - text: i18n.t('workflow.state_pending_accept'), - type: 'warning', - }; - } - - if (isWarmUpUnderTheHood(workflow)) { - return { - text: i18n.t('workflow.state_warmup_underhood'), - type: 'warning', - }; - } - - if (isPreparingRun(workflow)) { - return { - text: i18n.t('workflow.state_prepare_run'), - type: 'warning', - }; - } - - if (isReadyToRun(workflow)) { - return { - text: i18n.t('workflow.state_ready_to_run'), - type: 'lime', - }; - } - - if (isRunning(workflow)) { - return { - text: i18n.t('workflow.state_running'), - type: 'processing', - }; - } - - if (isPreparingStop(workflow)) { - return { - text: i18n.t('workflow.state_prepare_stop'), - type: 'error', - }; - } - - if (isStopped(workflow)) { - return { - text: i18n.t('workflow.state_stopped'), - type: 'error', - }; - } - - if (isCompleted(workflow)) { - return { - text: i18n.t('workflow.state_success'), - type: 'success', - }; - } - - if (isFailed(workflow)) { - return { - text: i18n.t('workflow.state_failed'), - type: 'error', - }; - } + const { state } = workflow; - if (isInvalid(workflow)) { - return { - text: i18n.t('workflow.state_invalid'), - type: 'default', - }; + switch (state) { + case PARTICIPANT_CONFIGURING: + return { + text: i18n.t('workflow.state_configuring'), + type: 'gold', + }; + + case PENDING_ACCEPT: + return { + text: i18n.t('workflow.state_pending_accept'), + type: 'warning', + }; + + case WARMUP_UNDERHOOD: + return { + text: i18n.t('workflow.state_warmup_underhood'), + type: 'warning', + }; + + case PREPARE_RUN: + return { + text: i18n.t('workflow.state_prepare_run'), + type: 'warning', + }; + + case READY_TO_RUN: + return { + text: i18n.t('workflow.state_ready_to_run'), + type: 'lime', + }; + + case RUNNING: + return { + text: i18n.t('workflow.state_running'), + type: 'processing', + }; + + case PREPARE_STOP: + return { + text: i18n.t('workflow.state_prepare_stop'), + type: 'error', + }; + + case STOPPED: + return { + text: i18n.t('workflow.state_stopped'), + type: 'error', + }; + + case COMPLETED: + return { + text: i18n.t('workflow.state_success'), + type: 'success', + }; + + case FAILED: + return { + text: i18n.t('workflow.state_failed'), + type: 'error', + }; + + case INVALID: + return { + text: i18n.t('workflow.state_invalid'), + type: 'default', + }; + case UNKNOWN: + default: + return { + text: i18n.t('workflow.state_unknown'), + type: 'default', + }; } - - return { - text: i18n.t('workflow.state_unknown'), - type: 'default', - }; } // --------------- Misc ---------------- export function findJobExeInfoByJobDef(jobDef: Job, workflow: WorkflowExecutionDetails) { - return workflow.jobs.find((exeInfo) => { + return workflow.jobs?.find((exeInfo) => { return ( exeInfo.name === `${workflow.uuid}-${jobDef.name}` || + /* istanbul ignore next */ exeInfo.name === `${workflow.name}-${jobDef.name}` || + /* istanbul ignore next */ exeInfo.name.endsWith(jobDef.name) ); }); diff --git a/web_console_v2/client/src/stores/algorithm.ts b/web_console_v2/client/src/stores/algorithm.ts new file mode 100644 index 000000000..811c297a9 --- /dev/null +++ b/web_console_v2/client/src/stores/algorithm.ts @@ -0,0 +1,31 @@ +import { atom } from 'recoil'; +import { + AlgorithmProject, + AlgorithmReleaseStatus, + AlgorithmStatus, + EnumAlgorithmProjectSource, + EnumAlgorithmProjectType, +} from 'typings/algorithm'; + +export const AlgorithmProjectDetail = atom<AlgorithmProject>({ + key: 'AlgorithmProjectDetail', + default: { + id: '', + name: '', + project_id: '', + type: EnumAlgorithmProjectType.NN_LOCAL, + source: EnumAlgorithmProjectSource.PRESET, + creator_id: null, + username: '', + participant_id: null, + path: '', + publish_status: AlgorithmStatus.PUBLISHED, + release_status: AlgorithmReleaseStatus.RELEASED, + parameter: null, + comment: null, + latest_version: 1, + created_at: Date.now(), + updated_at: Date.now(), + deleted_at: Date.now(), + }, +}); diff --git a/web_console_v2/client/src/stores/app.ts b/web_console_v2/client/src/stores/app.ts new file mode 100644 index 000000000..a38a3a5a8 --- /dev/null +++ b/web_console_v2/client/src/stores/app.ts @@ -0,0 +1,113 @@ +import { atom, DefaultValue, selector } from 'recoil'; +import LOCAL_STORAGE_KEYS from 'shared/localStorageKeys'; +import store from 'store2'; +import { DisplayType } from 'typings/component'; +import { Flag } from 'typings/flag'; +import { FedLoginWay } from 'typings/auth'; +import { fetchSysInfo } from 'services/settings'; +import { SystemInfo } from 'typings/settings'; + +export const appPreference = atom({ + key: 'AppPreference', + default: { + language: store.get(LOCAL_STORAGE_KEYS.language) as string, + sidebarFolded: store.get(LOCAL_STORAGE_KEYS.sidebar_folded) as boolean, + projectsDisplay: + (store.get(LOCAL_STORAGE_KEYS.projects_display) as DisplayType) || DisplayType.Card, + sysEmailGroup: store.get(LOCAL_STORAGE_KEYS.sys_email_group) as string, + }, + effects_UNSTABLE: [ + // LocalStorage persistence + ({ onSet }) => { + onSet((newValue) => { + if (newValue instanceof DefaultValue) { + // Do nothing + } else { + store.set(LOCAL_STORAGE_KEYS.sidebar_folded, newValue.sidebarFolded); + store.set(LOCAL_STORAGE_KEYS.language, newValue.language); + store.set(LOCAL_STORAGE_KEYS.projects_display, newValue.projectsDisplay); + store.set(LOCAL_STORAGE_KEYS.sys_email_group, newValue.sysEmailGroup); + } + }); + }, + ], +}); + +export const appState = atom({ + key: 'AppState', + default: { + hideSidebar: false, + }, +}); + +export const appGetters = selector({ + key: 'AppGetters', + get({ get }) { + const isSideBarHidden = get(appState).hideSidebar; + + return { + sidebarWidth: isSideBarHidden ? 0 : get(appPreference).sidebarFolded ? 48 : 200, + }; + }, +}); + +export const appEmailGetters = selector({ + key: 'AppEmailGetters', + get({ get }) { + return get(appPreference).sysEmailGroup; + }, +}); + +export const appFlag = atom<Flag>({ + key: 'AppFlag', + default: store.get(LOCAL_STORAGE_KEYS.app_flags) ?? {}, + effects_UNSTABLE: [ + // LocalStorage persistence + ({ onSet }) => { + onSet((newValue) => { + if (newValue instanceof DefaultValue) { + // Do nothing + } else { + store.set(LOCAL_STORAGE_KEYS.app_flags, newValue ?? {}); + } + }); + }, + ], +}); + +export const appLoginWayList = atom<FedLoginWay[]>({ + key: 'AppLoginWayList', + default: store.get(LOCAL_STORAGE_KEYS.app_login_way_list) ?? [], + effects_UNSTABLE: [ + // LocalStorage persistence + ({ onSet }) => { + onSet((newValue) => { + if (newValue instanceof DefaultValue) { + // Do nothing + } else { + store.set(LOCAL_STORAGE_KEYS.app_login_way_list, newValue); + } + }); + }, + ], +}); + +export const systemInfoState = atom<{ current?: SystemInfo }>({ + key: 'SystemInfoState', + default: { + current: undefined, + }, +}); + +export const systemInfoQuery = selector({ + key: 'FetchSystemInfoState', + get: async ({ get }) => { + try { + const res = await fetchSysInfo(); + + return res.data; + } catch (error) { + throw error; + } + }, +}); diff --git a/web_console_v2/client/src/stores/dataset.ts b/web_console_v2/client/src/stores/dataset.ts index 904ff6c78..9e51403a0 100644 --- a/web_console_v2/client/src/stores/dataset.ts +++ b/web_console_v2/client/src/stores/dataset.ts @@ -1,12 +1,67 @@ -import { atom } from 'recoil'; -import { DatasetCreatePayload, DatasetType } from 'typings/dataset'; +import { atom, selector } from 'recoil'; +import { fetchIntersectionDatasetList } from 'services/dataset'; +import { Dataset, DatasetCreatePayload, DatasetType__archived } from 'typings/dataset'; +import { WorkflowTemplate, WorkflowInitiatePayload, WorkflowConfig } from 'typings/workflow'; +import { Job } from 'typings/job'; +import { projectState } from 'stores/project'; export const datasetBasicForm = atom<DatasetCreatePayload>({ key: 'DatasetBasicForm', default: { name: '', project_id: (undefined as unknown) as ID, - dataset_type: DatasetType.PSI, + dataset_type: DatasetType__archived.PSI, comment: '', }, }); + +export const forceReloadDatasetList = atom({ + key: 'ForceReloadDatasetList', + default: 0, +}); + +export const intersectionDatasetListQuery = selector({ + key: 'FetchIntersectionDatasetList', + + get: async ({ get }) => { + get(forceReloadDatasetList); + const selectedProject = get(projectState); + const projectId = selectedProject?.current?.id ?? 0; + try { + const res = await fetchIntersectionDatasetList({ + projectId, + }); + + return res?.data ?? []; + } catch (error) { + throw error; + } + }, +}); + +export const datasetState = atom<{ current?: Dataset }>({ + key: 'DatasetState', + default: { + current: undefined, + }, +}); + +export const dataJoinTemplate = atom<WorkflowTemplate | undefined>({ + key: 'DataJoinTemplate', + default: undefined, +}); + +export const dataJoinWorkflowForm = atom<WorkflowInitiatePayload<Job>>({ + key: 'DataJoinWorkflowForm', + default: { + project_id: '', + template_id: undefined, + name: '', + forkable: false, + config: { + group_alias: '', + variables: [], + job_definitions: [], + } as WorkflowConfig, + }, +}); diff --git a/web_console_v2/client/src/stores/modelCenter.ts b/web_console_v2/client/src/stores/modelCenter.ts new file mode 100644 index 000000000..65cee376f --- /dev/null +++ b/web_console_v2/client/src/stores/modelCenter.ts @@ -0,0 +1,508 @@ +import { atom, selector } from 'recoil'; +import i18n from 'i18n'; +import { formatExtra } from 'shared/modelCenter'; + +import { fetchWorkflowTemplateList, fetchTemplateById } from 'services/workflow'; + +import { + ResourceTemplateType, + FederationType, + UploadType, + LossType, + Role, + RoleUppercase, + ModelSet, +} from 'typings/modelCenter'; +import { WorkflowConfig, WorkflowExecutionDetails, WorkflowTemplate } from 'typings/workflow'; +import { EnumAlgorithmProjectType } from 'typings/algorithm'; + +export const TREE_TRAIN_MODEL_TEMPLATE_NAME = 'sys-preset-tree-model'; +export const NN_TRAIN_MODEL_TEMPLATE_NAME = 'sys-preset-nn-model'; +export const NN_TRAIN_HORIZONTAL_MODEL_TEMPLATE_NAME = 'sys-preset-nn-horizontal-model'; +export const NN_EVAL_HORIZONTAL_MODEL_TEMPLATE_NAME = 'sys-preset-nn-horizontal-eval-model'; + +export const treeTemplateId = atom<number | null>({ + key: 'TreeTemplateId', + default: null, +}); +export const treeTemplateIdQuery = selector<number | null>({ + key: 'TreeTemplateIdQuery', + get: async ({ get }) => { + try { + const prevTemplateId = get(treeTemplateId); + if (prevTemplateId !== null) { + return prevTemplateId; + } + const { data } = await fetchWorkflowTemplateList(); + const templateItem = + data?.find((item) => item.name === TREE_TRAIN_MODEL_TEMPLATE_NAME) ?? null; + + if (!templateItem) { + throw new Error(i18n.t('error.no_tree_train_model_template')); + } + + return templateItem?.id ?? null; + } catch (error) { + throw error; + } + }, + set: ({ set }, newValue: any) => { + set(treeTemplateId, newValue); + }, +}); +export const treeTemplateDetailQuery = selector<WorkflowTemplate | null>({ + key: 'TreeTemplateDetailQuery', + get: async ({ get }) => { + try { + const templateId = get(treeTemplateIdQuery); + + if (!templateId) { + return null; + } + const { data } = await fetchTemplateById(templateId); + return data; + } catch (error) { + throw error; + } + }, +}); + +export const nnTemplateId = atom<number | null>({ + key: 'NNTemplateId', + default: null, +}); +export const nnTemplateIdQuery = selector<number | null>({ + key: 'NNTemplateIdQuery', + get: async ({ get }) => { + try { + const prevTemplateId = get(nnTemplateId); + if (prevTemplateId !== null) { + return prevTemplateId; + } + const { data } = await fetchWorkflowTemplateList(); + + const templateItem = data?.find((item) => item.name === NN_TRAIN_MODEL_TEMPLATE_NAME) ?? null; + if (!templateItem) { + throw new Error(i18n.t('error.no_nn_train_model_template')); + } + return templateItem?.id ?? null; + } catch (error) { + throw error; + } + }, + set: ({ set }, newValue: any) => { + set(nnTemplateId, newValue); + }, +}); +export const nnTemplateDetailQuery = selector<WorkflowTemplate | null>({ + key: 'NNTemplateDetailQuery', + get: async ({ get }) => { + try { + const templateId = get(nnTemplateIdQuery); + + if (!templateId) { + return null; + } + const { data } = await fetchTemplateById(templateId); + return data; + } catch (error) { + throw error; + } + }, +}); + +export const nnHorizontalTemplateId = atom<number | null>({ + key: 'NNHorizontalTemplateId', + default: null, +}); +export const nnHorizontalTemplateIdQuery = selector<number | null>({ + key: 'NNHorizontalTemplateIdQuery', + get: async ({ get }) => { + try { + const prevTemplateId = get(nnHorizontalTemplateId); + if (prevTemplateId !== null) { + return prevTemplateId; + } + const { data } = await fetchWorkflowTemplateList(); + + const templateItem = + data?.find((item) => item.name === NN_TRAIN_HORIZONTAL_MODEL_TEMPLATE_NAME) ?? null; + if (!templateItem) { + throw new Error(i18n.t('error.no_nn_horizontal_train_model_template')); + } + return templateItem?.id ?? null; + } catch (error) { + throw error; + } + }, + set: ({ set }, newValue: any) => { + set(nnHorizontalTemplateId, newValue); + }, +}); +export const nnHorizontalTemplateDetailQuery = selector<WorkflowTemplate | null>({ + key: 'NNHorizontalTemplateDetailQuery', + get: async ({ get }) => { + try { + const templateId = get(nnHorizontalTemplateIdQuery); + + if (!templateId) { + return null; + } + const { data } = await fetchTemplateById(templateId); + return data; + } catch (error) { + throw error; + } + }, +}); + +export const nnHorizontalEvalTemplateId = atom<number | null>({ + key: 'NNHorizontalEvalTemplateId', + default: null, +}); +export const nnHorizontalEvalTemplateIdQuery = selector<number | null>({ + key: 'NNHorizontalEvalTemplateIdQuery', + get: async ({ get }) => { + try { + const prevTemplateId = get(nnHorizontalEvalTemplateId); + if (prevTemplateId !== null) { + return prevTemplateId; + } + const { data } = await fetchWorkflowTemplateList(); + + const templateItem = + data?.find((item) => item.name === NN_EVAL_HORIZONTAL_MODEL_TEMPLATE_NAME) ?? null; + if (!templateItem) { + throw new Error(i18n.t('error.no_nn_horizontal_eval_model_template')); + } + return templateItem?.id ?? null; + } catch (error) { + throw error; + } + }, + set: ({ set }, newValue: any) => { + set(nnHorizontalEvalTemplateId, newValue); + }, +}); +export const nnHorizontalEvalTemplateDetailQuery = selector<WorkflowTemplate | null>({ + key: 'NNHorizontalEvalTemplateDetailQuery', + get: async ({ get }) => { + try { + const templateId = get(nnHorizontalEvalTemplateIdQuery); + + if (!templateId) { + return null; + } + const { data } = await fetchTemplateById(templateId); + return data; + } catch (error) { + throw error; + } + }, +}); + +export const existedPeerModelSet = atom<ModelSet | null>({ + key: 'ExistedPeerModelSet', + default: null, +}); + +export const currentWorkflow = atom<WorkflowExecutionDetails | null>({ + key: 'CurrentWorkflow', + default: null, +}); +export const formattedExtraCurrentWorkflow = selector<WorkflowExecutionDetails | null>({ + key: 'FormattedExtraCurrentWorkflow', + get: ({ get }) => { + const baseCurrentWorkflow = get(currentWorkflow); + if (baseCurrentWorkflow) { + return formatExtra(baseCurrentWorkflow); + } + return null; + }, +}); + +export const peerWorkflow = atom<WorkflowExecutionDetails | null>({ + key: 'PeerWorkflow', + default: null, +}); +export const formattedExtraPeerWorkflow = selector<WorkflowExecutionDetails | null>({ + key: 'FormattedExtraPeerWorkflow', + get: ({ get }) => { + const baseCurrentWorkflow = get(peerWorkflow); + if (baseCurrentWorkflow) { + return formatExtra(baseCurrentWorkflow); + } + return null; + }, +}); + +export const currentEnvWorkflow = atom<WorkflowExecutionDetails | null>({ + key: 'CurrentEnvWorkflow', + default: null, +}); +export const formattedExtraCurrentEnvWorkflow = selector<WorkflowExecutionDetails | null>({ + key: 'FormattedExtraCurrentEnvWorkflow', + get: ({ get }) => { + const baseCurrentEnvWorkflow = get(currentEnvWorkflow); + if (baseCurrentEnvWorkflow) { + return formatExtra(baseCurrentEnvWorkflow); + } + return null; + }, +}); +export const currentEnvWorkflowConfig = selector<WorkflowConfig | null>({ + key: 'CurrentEnvWorkflowConfig', + get: ({ get }) => { + const baseCurrentEnvWorkflow = get(currentEnvWorkflow); + if (baseCurrentEnvWorkflow) { + return baseCurrentEnvWorkflow.config; + } + return null; + }, +}); + +export const trainModelForm = atom({ + key: 'TrainModelForm', + default: { + project_id: undefined, + model_name: '', + train_comment: '', + dataset_id: undefined, + dataset_name: '', + + modelset_name: undefined, + modelset_comment: undefined, + + mode: 'train', + image: '', + num_partitions: '', + namespace: 'default', + send_metrics_to_follower: false, + send_scores_to_follower: false, + is_allow_coordinator_parameter_tuning: false, + is_share_model_evaluation_index: false, + algorithm_type: EnumAlgorithmProjectType.TREE_VERTICAL, + resource_template_type: ResourceTemplateType.LOW, + worker_role: Role.LEADER, + role: RoleUppercase.LEADER, + peer_role: RoleUppercase.FOLLOWER, + + // Tree model + loss_type: LossType.LOGISTIC, + learning_rate: 0.3, + max_iters: 10, + max_depth: 5, + l2_regularization: 1, + max_bins: 33, + num_parallel: 5, + validation_data_path: '', + + // Train info + label: 'label', + // ignore_fields: '', + + // NN model + algorithm: { algorithmId: undefined, algorithmProjectId: undefined, config: [], path: [] }, + save_checkpoint_steps: 1000, + save_checkpoint_secs: 600, + epoch_num: 1, + code_tar: {}, + code_key: '', + ps_replicas: '1', + master_replicas: '1', + worker_replicas: '1', + batch_size: '', + shuffle_data_block: '', + load_checkpoint_filename: '', + load_checkpoint_filename_with_path: '', + checkpoint_path: '', + sparse_estimator: '', + load_checkpoint_from: '', + + // resource + master_cpu: '', + master_mem: '', + ps_cpu: '', + ps_mem: '', + worker_cpu: '16m', + worker_mem: '64m', + ps_num: 1, + worker_num: 1, + + // temp + data_source: '', + data_path: '', + file_ext: '.data', + file_type: 'tfrecord', + load_model_name: '', + enable_packing: '1', + ignore_fields: '', + cat_fields: '', + verify_example_ids: '', + use_streaming: '', + no_data: '', + verbosity: '1', + }, +}); + +export const evaluationModelForm = atom({ + key: 'EvaluationModelForm', + default: { + project_id: undefined, + report_name: '', + comment: '', + dataset_id: undefined, + dataset_name: '', + targetList: [ + { + model_set_id: undefined, + model_id: undefined, + }, + ], + mode: 'eval', + is_share: false, + image: '', + num_partitions: '', + resource_template_type: ResourceTemplateType.LOW, + worker_role: Role.LEADER, + role: RoleUppercase.LEADER, + peer_role: RoleUppercase.FOLLOWER, + + // Tree model + loss_type: LossType.LOGISTIC, + learning_rate: 0.3, + max_iters: 10, + max_depth: 5, + l2_regularization: 1, + max_bins: 33, + num_parallel: 5, + validation_data_path: '', + + // Train info + label: 'label', + // ignore_fields: '', + + // NN model + save_checkpoint_steps: 1000, + save_checkpoint_secs: 600, + epoch_num: 1, + code_tar: {}, + code_key: '', + ps_replicas: '1', + master_replicas: '1', + worker_replicas: '1', + batch_size: '', + shuffle_data_block: '', + load_checkpoint_filename: '', + load_checkpoint_filename_with_path: '', + checkpoint_path: '', + sparse_estimator: '', + load_checkpoint_from: '', + + // resource + master_cpu: '', + master_mem: '', + ps_cpu: '', + ps_mem: '', + worker_cpu: '16m', + worker_mem: '64m', + ps_num: 1, + worker_num: 1, + + // temp + data_source: '', + data_path: '', + file_ext: '.data', + file_type: 'tfrecord', + load_model_name: '', + enable_packing: '1', + ignore_fields: '', + cat_fields: '', + verify_example_ids: '', + use_streaming: '', + no_data: '', + verbosity: '1', + }, +}); + +export const offlinePredictionModelForm = atom({ + key: 'OfflinePredictionModelForm', + default: { + project_id: undefined, + name: '', + comment: '', + dataset_id: undefined, + dataset_name: '', + model_set: { + model_set_id: undefined, + model_id: undefined, + }, + mode: 'eval', + image: '', + num_partitions: '', + resource_template_type: ResourceTemplateType.LOW, + worker_role: Role.LEADER, + role: RoleUppercase.LEADER, + peer_role: RoleUppercase.FOLLOWER, + + // Train info + label: 'label', + // ignore_fields: '', + + // NN model + save_checkpoint_steps: 1000, + save_checkpoint_secs: 600, + epoch_num: 1, + code_tar: {}, + code_key: '', + ps_replicas: '1', + master_replicas: '1', + worker_replicas: '1', + batch_size: '', + shuffle_data_block: '', + load_checkpoint_filename: '', + load_checkpoint_filename_with_path: '', + checkpoint_path: '', + sparse_estimator: '', + load_checkpoint_from: '', + + // resource + master_cpu: '', + master_mem: '', + ps_cpu: '', + ps_mem: '', + worker_cpu: '16m', + worker_mem: '64m', + ps_num: 1, + worker_num: 1, + + // temp + data_source: '', + data_path: '', + file_ext: '.data', + file_type: 'tfrecord', + load_model_name: '', + enable_packing: '1', + ignore_fields: '', + cat_fields: '', + verify_example_ids: '', + use_streaming: '', + no_data: '', + verbosity: '1', + }, +}); + +export const algorithmlForm = atom({ + key: 'AlgorithmlForm', + default: { + project_id: undefined, + name: '', + comment: '', + algorithm_type: undefined, + federation_type: FederationType.CROSS_SAMPLE, + + import_type: UploadType.PATH, + import_type_label: UploadType.PATH, + import_type_no_label: UploadType.PATH, + }, +}); diff --git a/web_console_v2/client/src/stores/operation.ts b/web_console_v2/client/src/stores/operation.ts new file mode 100644 index 000000000..0421ee3a7 --- /dev/null +++ b/web_console_v2/client/src/stores/operation.ts @@ -0,0 +1,22 @@ +import { atom, selector } from 'recoil'; +import { fetchDashboardList } from '../services/operation'; +import { Message } from '@arco-design/web-react'; + +export const forceReloadDashboard = atom({ + key: 'ForceReloadDashboard', + default: 0, +}); + +export const DashboardListQuery = selector({ + key: 'fetchDashboardList', + + get: async ({ get }) => { + get(forceReloadDashboard); + try { + const res = await fetchDashboardList(); + return res?.data ?? []; + } catch (error) { + Message.error(error.message); + } + }, +}); diff --git a/web_console_v2/client/src/stores/participant.ts b/web_console_v2/client/src/stores/participant.ts new file mode 100644 index 000000000..56dd74051 --- /dev/null +++ b/web_console_v2/client/src/stores/participant.ts @@ -0,0 +1,29 @@ +import { Message } from '@arco-design/web-react'; +import { atom, selector, atomFamily } from 'recoil'; +import { fetchParticipants } from 'services/participant'; +import { ConnectionStatus, ConnectionStatusType } from 'typings/participant'; + +export const forceReloadParticipantList = atom({ + key: 'ForceReloadParticipantList', + default: 0, +}); + +export const participantListQuery = selector({ + key: 'FetchParticipantList', + get: async ({ get }) => { + get(forceReloadParticipantList); + + try { + const res = await fetchParticipants(); + + return res.data; + } catch (error: any) { + Message.info(error.message); + } + }, +}); + +export const participantConnectionState = atomFamily<ConnectionStatus, ID>({ + key: 'ParticipantConnectionState', + default: { success: ConnectionStatusType.Fail, message: '', application_version: {} }, +}); diff --git a/web_console_v2/client/src/stores/project.ts b/web_console_v2/client/src/stores/project.ts index 072734aa3..90ed512e1 100644 --- a/web_console_v2/client/src/stores/project.ts +++ b/web_console_v2/client/src/stores/project.ts @@ -1,18 +1,43 @@ import { atom, selector } from 'recoil'; import { fetchProjectList } from 'services/project'; -import { Project } from 'typings/project'; +import { Project, ProjectTaskType, ProjectAbilityType, ProjectActionType } from 'typings/project'; import LOCAL_STORAGE_KEYS from 'shared/localStorageKeys'; import store from 'store2'; +import { userInfoState } from './user'; export const forceReloadProjectList = atom({ key: 'ForceReloadProjectList', default: 0, }); +const mockProject: any = { + id: 31, + name: 'test', + participant_type: 'PLATFORM', + created_at: 1623319258, + num_workflow: 8013, + participants: [ + { + id: 1, + name: 'aliyun-test1', + domain_name: 'fl-aliyun-test.com', + host: '101.200.236.203', + port: 32443, + type: 'PLATFORM', + comment: 'migrate from projectbytedance-test-hl', + created_at: 1631007123, + extra: { is_manual_configured: false, grpc_ssl_server_host: '' }, + updated_at: 1631007123, + num_project: 0, + last_connected_at: 0, + }, + ], + creator: '', +}; export const projectState = atom<{ current?: Project }>({ key: 'ProjectState', default: { - current: store.get(LOCAL_STORAGE_KEYS.current_project), + current: process.env.IS_DUMI_ENV ? mockProject : store.get(LOCAL_STORAGE_KEYS.current_project), }, }); @@ -20,6 +45,8 @@ export const projectListQuery = selector({ key: 'FetchProjectList', get: async ({ get }) => { get(forceReloadProjectList); + get(userInfoState); + try { const res = await fetchProjectList(); @@ -38,3 +65,57 @@ export const projectListGetters = selector({ }; }, }); + +export type ProjectCreateForm = { + name: string; + comment: string; + config: { + variables: never[]; + abilities: ProjectTaskType[]; + action_rules: Record<ProjectActionType, ProjectAbilityType>; + support_blockchain: boolean; + }; +}; +export type ProjectJoinForm = { + id: string; + comment: string; + config: { + variables: never[]; + }; +}; + +export const initialActionRules = { + [ProjectActionType.ID_ALIGNMENT]: ProjectAbilityType.ALWAYS_ALLOW, + [ProjectActionType.DATA_ALIGNMENT]: ProjectAbilityType.ALWAYS_ALLOW, + [ProjectActionType.HORIZONTAL_TRAIN]: ProjectAbilityType.MANUAL, + [ProjectActionType.WORKFLOW]: ProjectAbilityType.MANUAL, + [ProjectActionType.VERTICAL_TRAIN]: ProjectAbilityType.MANUAL, + [ProjectActionType.VERTICAL_EVAL]: ProjectAbilityType.MANUAL, + [ProjectActionType.VERTICAL_PRED]: ProjectAbilityType.MANUAL, + [ProjectActionType.VERTICAL_SERVING]: ProjectAbilityType.MANUAL, + [ProjectActionType.TEE_SERVICE]: ProjectAbilityType.MANUAL, + [ProjectActionType.TEE_RESULT_EXPORT]: ProjectAbilityType.MANUAL, +}; +export const projectCreateForm = atom<ProjectCreateForm>({ + key: 'ProjectCreateForm', + default: { + name: '', + comment: '', + config: { + variables: [], + abilities: [ProjectTaskType.ALIGN], + action_rules: initialActionRules, + support_blockchain: true, + }, + }, +}); +export const projectJoinForm = atom<ProjectJoinForm>({ + key: 'ProjectJoinForm', + default: { + id: '', + comment: '', + config: { + variables: [], + }, + }, +}); diff --git a/web_console_v2/client/src/stores/template.ts b/web_console_v2/client/src/stores/template.ts index 3e35f44be..edeac951a 100644 --- a/web_console_v2/client/src/stores/template.ts +++ b/web_console_v2/client/src/stores/template.ts @@ -1,7 +1,9 @@ +import { Perspective } from './../views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/DefaultMode/index'; import { JobColorsMark } from 'components/WorkflowJobsCanvas/types'; import { atom } from 'recoil'; import { giveWeakRandomKey } from 'shared/helpers'; import { JobDependency } from 'typings/job'; +import { WorkflowTemplateType } from 'typings/workflow'; export type JobNodeRawDataSlim = { uuid: string; @@ -10,11 +12,15 @@ export type JobNodeRawDataSlim = { }; export type WorkflowTemplateForm = { - id?: ID; + id: number; + revision_id?: number; name: string; - is_left: boolean; + is_local?: boolean; group_alias: string; comment?: string; + creator_username?: string; + updated_at?: number; + created_at?: number; /** * The values of config is actually inside ../store.ts (with Map<uuid, values> struct) not on recoil, * here we only keep datas minimum-required for rendering template flow chart: @@ -25,23 +31,83 @@ export type WorkflowTemplateForm = { variables: never[]; job_definitions: JobNodeRawDataSlim[]; }; + kind: WorkflowTemplateType; +}; + +export const jobComposeDrawerState = atom({ + key: 'JobComposeDrawerState', + default: { + isGLobal: false, + perspective: Perspective.Slots, + }, +}); + +export const defaultTemplateForm: WorkflowTemplateForm = { + id: 0, + revision_id: 0, + name: '', + is_local: false, + group_alias: '', + config: { + variables: [], + job_definitions: [ + // Give an initial empty job node + { + uuid: giveWeakRandomKey(), + dependencies: [], + }, + ], + }, + creator_username: '', + updated_at: 0, + created_at: 0, + comment: '', + kind: WorkflowTemplateType.MY, }; export const templateForm = atom<WorkflowTemplateForm>({ key: 'WorkflowTemplateForm', default: { + id: 0, + revision_id: 0, name: '', - is_left: true, + is_local: false, group_alias: '', config: { variables: [], job_definitions: [ + // Give an initial empty job node { uuid: giveWeakRandomKey(), dependencies: [], }, ], }, + creator_username: '', + updated_at: 0, + created_at: 0, + comment: '', + kind: WorkflowTemplateType.MY, + }, +}); + +export type baseInfoForm = { + name: string; + group_alias: string; + comment?: string; +}; + +export const defaultBaseInfoForm: baseInfoForm = { + name: '', + group_alias: '', + comment: '', +}; + +export const templateBaseInfoForm = atom<baseInfoForm>({ + key: 'templateBaseInfoForm', + default: { + name: '', + group_alias: '', comment: '', }, }); diff --git a/web_console_v2/client/src/stores/trustedCenter.ts b/web_console_v2/client/src/stores/trustedCenter.ts new file mode 100644 index 000000000..37d8c43ce --- /dev/null +++ b/web_console_v2/client/src/stores/trustedCenter.ts @@ -0,0 +1,26 @@ +import { atom } from 'recoil'; +import { ParticipantDataset } from 'typings/dataset'; +import { AuthStatus, TrustedJobResource } from 'typings/trustedCenter'; + +export type TrustedJbGroupForm = { + name: string; + comment: string; + algorithm_id?: ID; + dataset_id?: ID; + participant_datasets?: ParticipantDataset[]; + auth_status?: AuthStatus; + resource?: TrustedJobResource; +}; + +export const trustedJobGroupForm = atom<TrustedJbGroupForm>({ + key: 'TrustedJbGroupForm', + default: { + name: '', + comment: '', + algorithm_id: undefined, + dataset_id: undefined, + participant_datasets: undefined, + auth_status: undefined, + resource: undefined, + }, +}); diff --git a/web_console_v2/client/src/stores/user.ts b/web_console_v2/client/src/stores/user.ts index bed5885df..771d6aabd 100644 --- a/web_console_v2/client/src/stores/user.ts +++ b/web_console_v2/client/src/stores/user.ts @@ -3,7 +3,7 @@ import store from 'store2'; import { atom, selector } from 'recoil'; import { fetchUserInfo } from 'services/user'; import LOCAL_STORAGE_KEYS from 'shared/localStorageKeys'; -import { isNil } from 'lodash'; +import { isNil } from 'lodash-es'; import { FedRoles, FedUserInfo } from 'typings/auth'; export const userInfoState = atom<FedUserInfo>({ diff --git a/web_console_v2/client/src/stores/workflow.ts b/web_console_v2/client/src/stores/workflow.ts index 66a99e91a..5b5215f56 100644 --- a/web_console_v2/client/src/stores/workflow.ts +++ b/web_console_v2/client/src/stores/workflow.ts @@ -12,10 +12,9 @@ import { export type CreateWorkflowBasicForm = { _templateSelected?: string; + _revisionSelected?: string; _keepUsingOriginalTemplate?: boolean; -} & Partial< - Pick<WorkflowInitiatePayload, 'name' | 'forkable' | 'project_id' | 'batch_update_interval'> ->; +} & Partial<Pick<WorkflowInitiatePayload, 'name' | 'forkable' | 'project_id' | 'cron_config'>>; export type CreateTemplateForm = WorkflowTemplatePayload; @@ -25,12 +24,13 @@ export const workflowBasicForm = atom<CreateWorkflowBasicForm>({ // Fields start with underscore are solely UI releated things, // will not pass to backend on submit _templateSelected: undefined, + _revisionSelected: undefined, _keepUsingOriginalTemplate: true, name: '', project_id: undefined, forkable: true, - batch_update_interval: -1, + cron_config: '', }, }); @@ -81,6 +81,7 @@ export const forkWorkflowForm = atom<WorkflowForkPayload>({ default: { name: '', project_id: '', + is_local: false, forkable: true, config: null as any, fork_proposal_config: null as any, @@ -88,5 +89,6 @@ export const forkWorkflowForm = atom<WorkflowForkPayload>({ forked_from: '', create_job_flags: [] as CreateJobFlag[], peer_create_job_flags: [] as CreateJobFlag[], + template_id: undefined, }, }); diff --git a/web_console_v2/client/src/styles/_theme.ts b/web_console_v2/client/src/styles/_theme.ts deleted file mode 100644 index 5f8f765f7..000000000 --- a/web_console_v2/client/src/styles/_theme.ts +++ /dev/null @@ -1,316 +0,0 @@ -/* eslint-disable */ -/** - * WARNING: This file is auto-generated - * DO NOT modify it directly, ./variables.less is the file you should go - */ -const defaultTheme = { - red1: '#ffece8', - red2: '#fdcdc5', - red3: '#fbaca3', - red4: '#f98981', - red5: '#f76560', - red6: '#f53f3f', - red7: '#cb272d', - red8: '#a1151e', - red9: '#770813', - red10: '#4d000a', - orangered1: '#fff3e8', - orangered2: '#fdddc3', - orangered3: '#fcc59f', - orangered4: '#faac7b', - orangered5: '#f99057', - orangered6: '#f77234', - orangered7: '#cc5120', - orangered8: '#a23511', - orangered9: '#771f06', - orangered10: '#4d0e00', - orange1: '#fff7e8', - orange2: '#ffe4ba', - orange3: '#ffcf8b', - orange4: '#ffb65d', - orange5: '#ff9a2e', - orange6: '#ff7d00', - orange7: '#d25f00', - orange8: '#a64500', - orange9: '#792e00', - orange10: '#4d1b00', - gold1: '#fffce8', - gold2: '#fdf4bf', - gold3: '#fce996', - gold4: '#fadc6d', - gold5: '#f9cc45', - gold6: '#f7ba1e', - gold7: '#cc9213', - gold8: '#a26d0a', - gold9: '#774b04', - gold10: '#4d2d00', - yellow1: '#feffe8', - yellow2: '#fefebe', - yellow3: '#fdfa94', - yellow4: '#fcf26b', - yellow5: '#fbe842', - yellow6: '#fadc19', - yellow7: '#cfaf0f', - yellow8: '#a38408', - yellow9: '#785d03', - yellow10: '#4d3800', - lime1: '#fcffe8', - lime2: '#edf8bb', - lime3: '#dcf190', - lime4: '#c9e968', - lime5: '#b5e241', - lime6: '#9fdb1d', - lime7: '#7eb712', - lime8: '#5f940a', - lime9: '#437004', - lime10: '#2a4d00', - green1: '#e8ffea', - green2: '#aff0b5', - green3: '#7be188', - green4: '#4cd263', - green5: '#23c343', - green6: '#00b42a', - green7: '#009a29', - green8: '#008026', - green9: '#006622', - green10: '#004d1c', - cyan1: '#e8fffb', - cyan2: '#b7f4ec', - cyan3: '#89e9e0', - cyan4: '#5edfd6', - cyan5: '#37d4cf', - cyan6: '#14c9c9', - cyan7: '#0da5aa', - cyan8: '#07828b', - cyan9: '#03616c', - cyan10: '#00424d', - blue1: '#e8f7ff', - blue2: '#c3e7fe', - blue3: '#9fd4fd', - blue4: '#7bc0fc', - blue5: '#57a9fb', - blue6: '#3491fa', - blue7: '#206ccf', - blue8: '#114ba3', - blue9: '#063078', - blue10: '#001a4d', - arcoblue1: '#e8f3ff', - arcoblue2: '#bedaff', - arcoblue3: '#94bfff', - arcoblue4: '#6aa1ff', - arcoblue5: '#4080ff', - arcoblue6: '#165dff', - arcoblue7: '#0e42d2', - arcoblue8: '#072ca6', - arcoblue9: '#031a79', - arcoblue10: '#000d4d', - purple1: '#f5e8ff', - purple2: '#ddbef6', - purple3: '#c396ed', - purple4: '#a871e3', - purple5: '#8d4eda', - purple6: '#722ed1', - purple7: '#551db0', - purple8: '#3c108f', - purple9: '#27066e', - purple10: '#16004d', - pinkpurple1: '#ffe8fb', - pinkpurple2: '#f7baef', - pinkpurple3: '#f08ee6', - pinkpurple4: '#e865df', - pinkpurple5: '#e13edb', - pinkpurple6: '#d91ad9', - pinkpurple7: '#b010b6', - pinkpurple8: '#8a0993', - pinkpurple9: '#650370', - pinkpurple10: '#42004d', - magenta1: '#ffe8f1', - magenta2: '#fdc2db', - magenta3: '#fb9dc7', - magenta4: '#f979b7', - magenta5: '#f754a8', - magenta6: '#f5319d', - magenta7: '#cb1e83', - magenta8: '#a11069', - magenta9: '#77064f', - magenta10: '#4d0034', - darkRed1: '#4d000a', - darkRed2: '#770611', - darkRed3: '#a1161f', - darkRed4: '#cb2e34', - darkRed5: '#f54e4e', - darkRed6: '#f76965', - darkRed7: '#f98d86', - darkRed8: '#fbb0a7', - darkRed9: '#fdd1ca', - darkRed10: '#fff0ec', - darkOrangered1: '#4d0e00', - darkOrangered2: '#771e05', - darkOrangered3: '#a23714', - darkOrangered4: '#cc5729', - darkOrangered5: '#f77e45', - darkOrangered6: '#f9925a', - darkOrangered7: '#faad7d', - darkOrangered8: '#fcc6a1', - darkOrangered9: '#fddec5', - darkOrangered10: '#fff4eb', - darkOrange1: '#4d1b00', - darkOrange2: '#793004', - darkOrange3: '#a64b0a', - darkOrange4: '#d26913', - darkOrange5: '#ff8b1f', - darkOrange6: '#ff9626', - darkOrange7: '#ffb357', - darkOrange8: '#ffcd87', - darkOrange9: '#ffe3b8', - darkOrange10: '#fff7e8', - darkGold1: '#4d2d00', - darkGold2: '#774b04', - darkGold3: '#a26f0f', - darkGold4: '#cc961f', - darkGold5: '#f7c034', - darkGold6: '#f9cc44', - darkGold7: '#fadc6c', - darkGold8: '#fce995', - darkGold9: '#fdf4be', - darkGold10: '#fffce8', - darkYellow1: '#4d3800', - darkYellow2: '#785e07', - darkYellow3: '#a38614', - darkYellow4: '#cfb325', - darkYellow5: '#fae13c', - darkYellow6: '#fbe94b', - darkYellow7: '#fcf374', - darkYellow8: '#fdfa9d', - darkYellow9: '#fefec6', - darkYellow10: '#fefff0', - darkLime1: '#2a4d00', - darkLime2: '#447006', - darkLime3: '#629412', - darkLime4: '#84b723', - darkLime5: '#a8db39', - darkLime6: '#b8e24b', - darkLime7: '#cbe970', - darkLime8: '#def198', - darkLime9: '#eef8c2', - darkLime10: '#fdffee', - darkGreen1: '#004d1c', - darkGreen2: '#046625', - darkGreen3: '#0a802d', - darkGreen4: '#129a37', - darkGreen5: '#1db440', - darkGreen6: '#27c346', - darkGreen7: '#50d266', - darkGreen8: '#7ee18b', - darkGreen9: '#b2f0b7', - darkGreen10: '#ebffec', - darkCyan1: '#00424d', - darkCyan2: '#06616c', - darkCyan3: '#11838b', - darkCyan4: '#1fa6aa', - darkCyan5: '#30c9c9', - darkCyan6: '#3fd4cf', - darkCyan7: '#66dfd7', - darkCyan8: '#90e9e1', - darkCyan9: '#bef4ed', - darkCyan10: '#f0fffc', - darkBlue1: '#001a4d', - darkBlue2: '#052f78', - darkBlue3: '#134ca3', - darkBlue4: '#2971cf', - darkBlue5: '#4699fa', - darkBlue6: '#5aaafb', - darkBlue7: '#7dc1fc', - darkBlue8: '#a1d5fd', - darkBlue9: '#c6e8fe', - darkBlue10: '#eaf8ff', - darkArcoblue1: '#000d4d', - darkArcoblue2: '#041b79', - darkArcoblue3: '#0e32a6', - darkArcoblue4: '#1d4dd2', - darkArcoblue5: '#306eff', - darkArcoblue6: '#3c7eff', - darkArcoblue7: '#689fff', - darkArcoblue8: '#93beff', - darkArcoblue9: '#bedaff', - darkArcoblue10: '#eaf4ff', - darkPurple1: '#16004d', - darkPurple2: '#27066e', - darkPurple3: '#3e138f', - darkPurple4: '#5a25b0', - darkPurple5: '#7b3dd1', - darkPurple6: '#8e51da', - darkPurple7: '#a974e3', - darkPurple8: '#c59aed', - darkPurple9: '#dfc2f6', - darkPurple10: '#f7edff', - darkPinkpurple1: '#42004d', - darkPinkpurple2: '#650370', - darkPinkpurple3: '#8a0d93', - darkPinkpurple4: '#b01bb6', - darkPinkpurple5: '#d92ed9', - darkPinkpurple6: '#e13ddb', - darkPinkpurple7: '#e866df', - darkPinkpurple8: '#f092e6', - darkPinkpurple9: '#f7c1f0', - darkPinkpurple10: '#fff2fd', - darkMagenta1: '#4d0034', - darkMagenta2: '#770850', - darkMagenta3: '#a1176c', - darkMagenta4: '#cb2b88', - darkMagenta5: '#f545a6', - darkMagenta6: '#f756a9', - darkMagenta7: '#f97ab8', - darkMagenta8: '#fb9ec8', - darkMagenta9: '#fdc3db', - darkMagenta10: '#ffe8f1', - gray1: '#f7f8fa', - gray2: '#f2f3f5', - gray3: '#e5e6eb', - gray4: '#c9cdd4', - gray5: '#a9aeb8', - gray6: '#86909c', - gray7: '#6b7785', - gray8: '#4e5969', - gray9: '#272e3b', - gray10: '#1d2129', - darkGray1: '#17171a', - darkGray2: '#2e2e30', - darkGray3: '#484849', - darkGray4: '#5f5f60', - darkGray5: '#78787a', - darkGray6: '#929293', - darkGray7: '#ababac', - darkGray8: '#c5c5c5', - darkGray9: '#dfdfdf', - darkGray10: '#f6f6f6', - primaryColor: '#286af4', - infoColor: '#286af4', - successColor: '#00b42a', - processingColor: '#3491fa', - errorColor: '#f53f3f', - errorColorLight: '#ffece8', - highlightColor: '#f76560', - warningColor: '#ff7d00', - normalColor: '#dfdfdf', - textColor: '#4e5969', - textColorStrong: '#1d2129', - textColorSecondary: '#86909c', - textColorDisabled: '#c9cdd4', - textColorInverse: 'white', - componentBackgroundColorGray: '#f2f3f5', - backgroundColor: '#f7f8fa', - backgroundColorGray: '#e5e6eb', - backgroundColorError: '#ffece8', - backgroundColorErrorHover: '#fdcdc5', - lineColor: '#e5e8ef', - headerBackground: '#1d2129', - commonTiming: 'cubic-bezier(0.4, 0, 0.2, 1)', - fontFamily: "'nunito_for_arco', 'Helvetica Neue', Helvetica, 'PingFang SC', 'Hiragino Sans GB', 'Microsoft YaHei', '微软雅黑', Arial, sans-serif", - headerHeight: '60px', - pageHeaderHeight: '44px', - contentOuterPadding: '16px', - contentMinHeight: 'calc(100vh - @header-height - @content-outer-padding * 2)', -} - -export default defaultTheme diff --git a/web_console_v2/client/src/styles/_variables.css b/web_console_v2/client/src/styles/_variables.css deleted file mode 100644 index e7911f960..000000000 --- a/web_console_v2/client/src/styles/_variables.css +++ /dev/null @@ -1,313 +0,0 @@ -/** - * WARNING: This file is auto-generated - * DO NOT modify it directly, ./variables.less is the file you should go - */ -:root { - --red1: #ffece8; - --red2: #fdcdc5; - --red3: #fbaca3; - --red4: #f98981; - --red5: #f76560; - --red6: #f53f3f; - --red7: #cb272d; - --red8: #a1151e; - --red9: #770813; - --red10: #4d000a; - --orangered1: #fff3e8; - --orangered2: #fdddc3; - --orangered3: #fcc59f; - --orangered4: #faac7b; - --orangered5: #f99057; - --orangered6: #f77234; - --orangered7: #cc5120; - --orangered8: #a23511; - --orangered9: #771f06; - --orangered10: #4d0e00; - --orange1: #fff7e8; - --orange2: #ffe4ba; - --orange3: #ffcf8b; - --orange4: #ffb65d; - --orange5: #ff9a2e; - --orange6: #ff7d00; - --orange7: #d25f00; - --orange8: #a64500; - --orange9: #792e00; - --orange10: #4d1b00; - --gold1: #fffce8; - --gold2: #fdf4bf; - --gold3: #fce996; - --gold4: #fadc6d; - --gold5: #f9cc45; - --gold6: #f7ba1e; - --gold7: #cc9213; - --gold8: #a26d0a; - --gold9: #774b04; - --gold10: #4d2d00; - --yellow1: #feffe8; - --yellow2: #fefebe; - --yellow3: #fdfa94; - --yellow4: #fcf26b; - --yellow5: #fbe842; - --yellow6: #fadc19; - --yellow7: #cfaf0f; - --yellow8: #a38408; - --yellow9: #785d03; - --yellow10: #4d3800; - --lime1: #fcffe8; - --lime2: #edf8bb; - --lime3: #dcf190; - --lime4: #c9e968; - --lime5: #b5e241; - --lime6: #9fdb1d; - --lime7: #7eb712; - --lime8: #5f940a; - --lime9: #437004; - --lime10: #2a4d00; - --green1: #e8ffea; - --green2: #aff0b5; - --green3: #7be188; - --green4: #4cd263; - --green5: #23c343; - --green6: #00b42a; - --green7: #009a29; - --green8: #008026; - --green9: #006622; - --green10: #004d1c; - --cyan1: #e8fffb; - --cyan2: #b7f4ec; - --cyan3: #89e9e0; - --cyan4: #5edfd6; - --cyan5: #37d4cf; - --cyan6: #14c9c9; - --cyan7: #0da5aa; - --cyan8: #07828b; - --cyan9: #03616c; - --cyan10: #00424d; - --blue1: #e8f7ff; - --blue2: #c3e7fe; - --blue3: #9fd4fd; - --blue4: #7bc0fc; - --blue5: #57a9fb; - --blue6: #3491fa; - --blue7: #206ccf; - --blue8: #114ba3; - --blue9: #063078; - --blue10: #001a4d; - --arcoblue1: #e8f3ff; - --arcoblue2: #bedaff; - --arcoblue3: #94bfff; - --arcoblue4: #6aa1ff; - --arcoblue5: #4080ff; - --arcoblue6: #165dff; - --arcoblue7: #0e42d2; - --arcoblue8: #072ca6; - --arcoblue9: #031a79; - --arcoblue10: #000d4d; - --purple1: #f5e8ff; - --purple2: #ddbef6; - --purple3: #c396ed; - --purple4: #a871e3; - --purple5: #8d4eda; - --purple6: #722ed1; - --purple7: #551db0; - --purple8: #3c108f; - --purple9: #27066e; - --purple10: #16004d; - --pinkpurple1: #ffe8fb; - --pinkpurple2: #f7baef; - --pinkpurple3: #f08ee6; - --pinkpurple4: #e865df; - --pinkpurple5: #e13edb; - --pinkpurple6: #d91ad9; - --pinkpurple7: #b010b6; - --pinkpurple8: #8a0993; - --pinkpurple9: #650370; - --pinkpurple10: #42004d; - --magenta1: #ffe8f1; - --magenta2: #fdc2db; - --magenta3: #fb9dc7; - --magenta4: #f979b7; - --magenta5: #f754a8; - --magenta6: #f5319d; - --magenta7: #cb1e83; - --magenta8: #a11069; - --magenta9: #77064f; - --magenta10: #4d0034; - --darkRed1: #4d000a; - --darkRed2: #770611; - --darkRed3: #a1161f; - --darkRed4: #cb2e34; - --darkRed5: #f54e4e; - --darkRed6: #f76965; - --darkRed7: #f98d86; - --darkRed8: #fbb0a7; - --darkRed9: #fdd1ca; - --darkRed10: #fff0ec; - --darkOrangered1: #4d0e00; - --darkOrangered2: #771e05; - --darkOrangered3: #a23714; - --darkOrangered4: #cc5729; - --darkOrangered5: #f77e45; - --darkOrangered6: #f9925a; - --darkOrangered7: #faad7d; - --darkOrangered8: #fcc6a1; - --darkOrangered9: #fddec5; - --darkOrangered10: #fff4eb; - --darkOrange1: #4d1b00; - --darkOrange2: #793004; - --darkOrange3: #a64b0a; - --darkOrange4: #d26913; - --darkOrange5: #ff8b1f; - --darkOrange6: #ff9626; - --darkOrange7: #ffb357; - --darkOrange8: #ffcd87; - --darkOrange9: #ffe3b8; - --darkOrange10: #fff7e8; - --darkGold1: #4d2d00; - --darkGold2: #774b04; - --darkGold3: #a26f0f; - --darkGold4: #cc961f; - --darkGold5: #f7c034; - --darkGold6: #f9cc44; - --darkGold7: #fadc6c; - --darkGold8: #fce995; - --darkGold9: #fdf4be; - --darkGold10: #fffce8; - --darkYellow1: #4d3800; - --darkYellow2: #785e07; - --darkYellow3: #a38614; - --darkYellow4: #cfb325; - --darkYellow5: #fae13c; - --darkYellow6: #fbe94b; - --darkYellow7: #fcf374; - --darkYellow8: #fdfa9d; - --darkYellow9: #fefec6; - --darkYellow10: #fefff0; - --darkLime1: #2a4d00; - --darkLime2: #447006; - --darkLime3: #629412; - --darkLime4: #84b723; - --darkLime5: #a8db39; - --darkLime6: #b8e24b; - --darkLime7: #cbe970; - --darkLime8: #def198; - --darkLime9: #eef8c2; - --darkLime10: #fdffee; - --darkGreen1: #004d1c; - --darkGreen2: #046625; - --darkGreen3: #0a802d; - --darkGreen4: #129a37; - --darkGreen5: #1db440; - --darkGreen6: #27c346; - --darkGreen7: #50d266; - --darkGreen8: #7ee18b; - --darkGreen9: #b2f0b7; - --darkGreen10: #ebffec; - --darkCyan1: #00424d; - --darkCyan2: #06616c; - --darkCyan3: #11838b; - --darkCyan4: #1fa6aa; - --darkCyan5: #30c9c9; - --darkCyan6: #3fd4cf; - --darkCyan7: #66dfd7; - --darkCyan8: #90e9e1; - --darkCyan9: #bef4ed; - --darkCyan10: #f0fffc; - --darkBlue1: #001a4d; - --darkBlue2: #052f78; - --darkBlue3: #134ca3; - --darkBlue4: #2971cf; - --darkBlue5: #4699fa; - --darkBlue6: #5aaafb; - --darkBlue7: #7dc1fc; - --darkBlue8: #a1d5fd; - --darkBlue9: #c6e8fe; - --darkBlue10: #eaf8ff; - --darkArcoblue1: #000d4d; - --darkArcoblue2: #041b79; - --darkArcoblue3: #0e32a6; - --darkArcoblue4: #1d4dd2; - --darkArcoblue5: #306eff; - --darkArcoblue6: #3c7eff; - --darkArcoblue7: #689fff; - --darkArcoblue8: #93beff; - --darkArcoblue9: #bedaff; - --darkArcoblue10: #eaf4ff; - --darkPurple1: #16004d; - --darkPurple2: #27066e; - --darkPurple3: #3e138f; - --darkPurple4: #5a25b0; - --darkPurple5: #7b3dd1; - --darkPurple6: #8e51da; - --darkPurple7: #a974e3; - --darkPurple8: #c59aed; - --darkPurple9: #dfc2f6; - --darkPurple10: #f7edff; - --darkPinkpurple1: #42004d; - --darkPinkpurple2: #650370; - --darkPinkpurple3: #8a0d93; - --darkPinkpurple4: #b01bb6; - --darkPinkpurple5: #d92ed9; - --darkPinkpurple6: #e13ddb; - --darkPinkpurple7: #e866df; - --darkPinkpurple8: #f092e6; - --darkPinkpurple9: #f7c1f0; - --darkPinkpurple10: #fff2fd; - --darkMagenta1: #4d0034; - --darkMagenta2: #770850; - --darkMagenta3: #a1176c; - --darkMagenta4: #cb2b88; - --darkMagenta5: #f545a6; - --darkMagenta6: #f756a9; - --darkMagenta7: #f97ab8; - --darkMagenta8: #fb9ec8; - --darkMagenta9: #fdc3db; - --darkMagenta10: #ffe8f1; - --gray1: #f7f8fa; - --gray2: #f2f3f5; - --gray3: #e5e6eb; - --gray4: #c9cdd4; - --gray5: #a9aeb8; - --gray6: #86909c; - --gray7: #6b7785; - --gray8: #4e5969; - --gray9: #272e3b; - --gray10: #1d2129; - --darkGray1: #17171a; - --darkGray2: #2e2e30; - --darkGray3: #484849; - --darkGray4: #5f5f60; - --darkGray5: #78787a; - --darkGray6: #929293; - --darkGray7: #ababac; - --darkGray8: #c5c5c5; - --darkGray9: #dfdfdf; - --darkGray10: #f6f6f6; - --primaryColor: #286af4; - --infoColor: #286af4; - --successColor: #00b42a; - --processingColor: #3491fa; - --errorColor: #f53f3f; - --errorColorLight: #ffece8; - --highlightColor: #f76560; - --warningColor: #ff7d00; - --normalColor: #dfdfdf; - --textColor: #4e5969; - --textColorStrong: #1d2129; - --textColorSecondary: #86909c; - --textColorDisabled: #c9cdd4; - --textColorInverse: white; - --componentBackgroundColorGray: #f2f3f5; - --backgroundColor: #f7f8fa; - --backgroundColorGray: #e5e6eb; - --backgroundColorError: #ffece8; - --backgroundColorErrorHover: #fdcdc5; - --lineColor: #e5e8ef; - --headerBackground: #1d2129; - --commonTiming: cubic-bezier(0.4, 0, 0.2, 1); - --fontFamily: 'nunito_for_arco', 'Helvetica Neue', Helvetica, 'PingFang SC', 'Hiragino Sans GB', 'Microsoft YaHei', '微软雅黑', Arial, sans-serif; - --headerHeight: 60px; - --pageHeaderHeight: 44px; - --contentOuterPadding: 16px; - --contentMinHeight: calc(100vh - 60px - 16px * 2); -} diff --git a/web_console_v2/client/src/styles/animations.ts b/web_console_v2/client/src/styles/animations.ts index 3c952f9ea..d03747588 100644 --- a/web_console_v2/client/src/styles/animations.ts +++ b/web_console_v2/client/src/styles/animations.ts @@ -1,6 +1,8 @@ +/* istanbul ignore file */ + import { keyframes } from 'styled-components'; -export const ScrollDown = keyframes` +export const Suspense = keyframes` 0% { transform: translateY(0); } @@ -14,3 +16,18 @@ export const ScrollDown = keyframes` transform: translateY(0); } `; + +export const HighlightedWave = keyframes` + 0% { + box-shadow: 0 0 0 2px var(--primaryColor); + } + 33% { + box-shadow: 0 0 0 2px var(--primaryColor), 0 0 0 5px var(--blue2); + } + 66% { + box-shadow: 0 0 0 2px var(--primaryColor), 0 0 0 10px transparent; + } + 100% { + box-shadow: 0 0 0 2px var(--primaryColor); + } +`; diff --git a/web_console_v2/client/src/styles/antd-overrides.less b/web_console_v2/client/src/styles/antd-overrides.less deleted file mode 100644 index 1e15456cc..000000000 --- a/web_console_v2/client/src/styles/antd-overrides.less +++ /dev/null @@ -1,358 +0,0 @@ -@import '~antd/lib/style/themes/default.less'; -@import '~antd/dist/antd.less'; -@import './variables.less'; - -// Variables -@body-background: #eff2f7; - -// Button -// -- -@btn-default-color: @gray-8; -@btn-default-bg: @gray-2; -@btn-default-border: @gray-2; - -.ant-btn.ant-btn { - border: none; - - > .anticon { - // Remove sturt - display: inline-flex; - justify-content: center; - line-height: 0; - height: 1em; - } - - > .ant-btn-loading-icon .anticon { - position: relative; - z-index: 1; - padding-right: 0; - margin-right: 8px; - } - - &:hover { - color: @btn-default-color; - border-color: @gray-2; - background-color: @gray-3; - } - &-sm { - height: 28px; - line-height: 28px; - } - &-lg { - height: 36px; - padding: 0 20px; - } - &-primary { - box-shadow: none; - - &:hover { - background-color: @arcoblue-5; - color: white; - } - &:active { - background-color: @arcoblue-7; - color: white; - } - } -} - -// Icon -// -- -.anticon { - > svg { - width: 1em; - fill: currentColor; - } -} - -// Menu -// -- -@menu-item-active-bg: @gray-2; -@menu-item-active-border-width: 0; - -.ant-menu { - &-inline, - &-vertical { - border-right: none; - } - - &-item { - transition: color 0.3s cubic-bezier(0.645, 0.045, 0.355, 1), - background 0.3s cubic-bezier(0.645, 0.045, 0.355, 1); - } - - &-item-selected { - border-radius: 2px; - font-weight: bold; - } -} - -// Form -// --- -.ant-form-item { - --margin-bottom: 20px; - - &:not(.ant-form-item-with-help) { - margin-bottom: var(--margin-bottom); - } - - &-label { - > label { - font-size: 13px; - &::after { - margin-right: 14px; - } - } - } - - &-explain { - min-height: var(--margin-bottom); - line-height: var(--margin-bottom); - font-size: 12px; - } -} - -// Input -// --- -.ant-input { - &::placeholder { - color: @gray-7; - } -} - -.ant-input, -.ant-input-affix-wrapper { - border: 1px solid transparent; - transition: color 0.1s linear, border-color 0.1s linear, background-color 0.1s linear; - box-shadow: none !important; - background-color: var(--gray2); - color: var(--gray10); - - &:hover { - background-color: var(--backgroundColorGray); - border-color: transparent; - } - &:focus { - border-color: var(--arcoblue6); - background-color: white; - } - &-focused { - border-color: var(--arcoblue6) !important; - background-color: white !important; - } - - .ant-input { - background-color: transparent; - color: var(--gray10); - } -} - -.ant-form-item-has-error { - .ant-input, - .ant-input-affix-wrapper { - border-color: transparent; - background-color: var(--backgroundColorError); - - &:hover { - border-color: transparent; - background-color: var(--backgroundColorErrorHover); - } - &:focus { - border-color: @error-color; - background-color: white; - } - &-focused { - border-color: @error-color !important; - background-color: white !important; - } - } - .ant-input-group-addon { - background-color: var(--backgroundColorError); - } -} - -.ant-input-group-addon { - --border-color: var(--backgroundColorGray); - - background-color: var(--componentBackgroundColorGray); - border-width: 0; - - &:first-child { - border-right: 1px solid var(--border-color); - } - &:last-child { - border-left: 1px solid var(--border-color); - } -} - -// Select -// --- -@select-background: var(--componentBackgroundColorGray); -@select-border-color: transparent; -@input-placeholder-color: var(--gray7); - -.ant-select { - &:not(.ant-select-disabled) { - &:hover .ant-select-selector { - border-color: transparent; - background-color: var(--backgroundColorGray); - } - - &.ant-select-focused .ant-select-selector.ant-select-selector.ant-select-selector { - border-color: var(--primaryColor); - box-shadow: none; - background-color: transparent; - } - } - - &.ant-select-disabled { - opacity: 0.5; - } -} - -// Date Picker -// -- -@picker-bg:var (--componentBackgroundColorGray); -@picker-border-color: transparent; - -// Drawer -// -- -.ant-picker { - background-color: var(--componentBackgroundColorGray); - - &:hover { - border-color: transparent; - background-color: var(--backgroundColorGray); - } - - &.ant-picker-focused { - background-color: transparent; - } -} - -// Upload -// -- -.ant-upload { - color: @dark-gray-1; - overflow: hidden; - - &.ant-upload-drag.ant-upload-drag { - border-color: transparent; - background-color: @gray-2; - background-clip: content-box; - - &:not(.ant-upload-disabled):hover { - background-color: @arcoblue-1; - - .anticon-plus { - color: @primary-color; - } - } - - .anticon-plus { - display: block; - padding: 20px 0 30px; - font-size: 16px; - color: @text-color; - } - } -} - -.ant-form-item-has-error { - .ant-upload.ant-upload-drag.ant-upload-drag:not(:hover) { - border-color: transparent; - background-color: var(--red1); - } -} - -// Breadcrumb -// --- -.ant-breadcrumb { - > span:last-child { - color: @text-color-strong; - font-weight: 500; - } -} - -// Table -// --- -.ant-table-wrapper { - width: 100%; - max-width: 100%; -} - -// Radio -// --- -.ant-radio-button-wrapper { - display: inline-flex; - align-items: center; - justify-content: center; - min-width: 70px; - border: none !important; - - > .ant-radio-button { - padding: 3px; - border-radius: 6px; - background-color: transparent; - background-clip: content-box; - transition: 0.3s ease-out var(--commonTiming); - } - - &:focus-within { - box-shadow: none; - } - &:hover { - z-index: 1; - color: var(--textColor); - - > .ant-radio-button { - background-color: white; - } - } - &-checked:not(.ant-radio-button-wrapper-disabled) { - // Emphasize but not casue size change - text-shadow: 0 0 var(--primaryColor); - - &:focus-within { - box-shadow: none; - } - - > .ant-radio-button { - z-index: auto; - background-color: white; - - & + * { - position: relative; - } - } - } - &:not(:first-child)::before { - content: none; - } - - > .ant-radio-button { - padding: 3px; - border-radius: 6px; - background-color: white; - background-clip: content-box; - } -} - -// Picker -// --- -.ant-picker-footer { - padding: 5px 15px; -} - -// Switch -// --- -@keyframes spinWith50Translate { - 100% { - transform: translate(-50%, -50%) rotate(360deg); - } -} -.ant-switch { - &-loading-icon { - animation-name: spinWith50Translate; - } -} diff --git a/web_console_v2/client/src/styles/arco-overrides.less b/web_console_v2/client/src/styles/arco-overrides.less new file mode 100644 index 000000000..7da3e6a1a --- /dev/null +++ b/web_console_v2/client/src/styles/arco-overrides.less @@ -0,0 +1,41 @@ +@body-background: #eff2f7; +@btn-default-color: rgb(var(--gray-8)); +@btn-default-bg: rgb(var(--gray-2)); +@btn-default-border: rgb(var(--gray-2)); +@btn-height-lg: 36px; +@btn-padding-horizontal-lg: 20px; +@menu-item-active-bg: rgb(var(--gray-2)); +@menu-item-active-border-width: 0; +@select-background: var(--componentBackgroundColorGray); +@select-border-color: transparent; +@input-placeholder-color: rgb(var(--gray-7)); +@picker-bg: var(--componentBackgroundColorGray); +@picker-border-color: transparent; +@keyframes spinWith50Translate { + 100% { + transform: translate(-50%, -50%) rotate(360deg); + } +} + +.anticon { + > svg { + width: 1em; + fill: currentColor; + } +} + +.arco-spin { + display: block; +} + +.arco-btn { + border: none; + + > .anticon { + // Remove sturt + display: inline-flex; + justify-content: center; + height: 1em; + vertical-align: middle; + } +} diff --git a/web_console_v2/client/src/styles/elements.ts b/web_console_v2/client/src/styles/elements.ts index bb6370ea9..5d6e9f1e8 100644 --- a/web_console_v2/client/src/styles/elements.ts +++ b/web_console_v2/client/src/styles/elements.ts @@ -1,5 +1,7 @@ +/* istanbul ignore file */ + import styled from 'styled-components'; -import { MixinFlexAlignCenter, MixinSquare } from './mixins'; +import { MixinFlexAlignCenter, MixinSquare, MixinBaseFontInfo } from './mixins'; export const ControlButton = styled.div` ${MixinFlexAlignCenter()} @@ -25,3 +27,58 @@ export const ControlButton = styled.div` margin-top: 8px; } `; + +export type LabelProps = { + /** + * margin-right + * @default 0 + */ + marginRight?: number; + /** + * when isBlock = true, display: block, otherwise display: inline-block + * @default false + */ + isBlock?: boolean; + /** + * font-size + * @default 12 + */ + fontSize?: number; + /** + * color + * @default var(--textColor) + */ + fontColor?: string; + /** + * font-weight + * @default 400 + */ + fontWeight?: number; +}; + +export const Label = styled.span<LabelProps>` + ${(props) => { + const { fontSize = 12, fontColor = 'var(--textColor)', fontWeight = 400 } = props; + return MixinBaseFontInfo(fontSize, fontColor, fontWeight); + }} + display: ${(props) => (props.isBlock ? 'block' : 'inline-block')}; + margin-right: ${(props) => props.marginRight || 0}px; +`; + +export const LabelStrong = styled(Label).attrs((props: any) => ({ + fontSize: props.fontSize || 12, + fontColor: props.fontColor || 'var(--textColorStrong)', + fontWeight: props.fontWeight || 500, +}))``; + +export const LabelTint = styled(Label).attrs((props: any) => ({ + fontSize: props.fontSize || 12, + fontColor: props.fontColor || 'var(--textColorSecondary)', + fontWeight: props.fontWeight || 400, +}))``; + +export const LabelForm = styled(Label).attrs((props: any) => ({ + fontSize: props.fontSize || 13, + fontColor: props.fontColor || 'rgba(0, 0, 0, 0.85)', + fontWeight: props.fontWeight || 400, +}))``; diff --git a/web_console_v2/client/src/styles/global.less b/web_console_v2/client/src/styles/global.less new file mode 100644 index 000000000..c8058c41a --- /dev/null +++ b/web_console_v2/client/src/styles/global.less @@ -0,0 +1,320 @@ +@custom-header-font: 12px; + +body, +html { + width: 100%; + height: 100%; +} + +body { + margin: 0; + color: #4e5969; + font-size: 14px; + font-family: 'nunito_for_arco', 'Helvetica Neue', Helvetica, 'PingFang SC', 'Hiragino Sans GB', + 'Microsoft YaHei', '微软雅黑', Arial, sans-serif; + font-variant: tabular-nums; + line-height: 1.5715; + background-color: #eff2f7; + font-feature-settings: 'tnum', 'tnum'; + overflow: hidden; +} + +input::-ms-clear, +input::-ms-reveal { + display: none; +} + +*, +*::before, +*::after { + box-sizing: border-box; // 1 +} + +hr { + box-sizing: content-box; // 1 + height: 0; // 1 + overflow: visible; // 2 +} + +h1, +h2, +h3, +h4, +h5, +h6 { + margin-top: 0; + margin-bottom: 0.5em; + color: rgba(0, 0, 0, 0.85); + font-weight: 500; +} + +p { + margin-top: 0; + margin-bottom: 1em; +} + +address { + margin-bottom: 1em; + font-style: normal; + line-height: inherit; +} + +input[type='text'], +input[type='password'], +input[type='number'], +textarea { + -webkit-appearance: none; +} + +ol, +ul, +dl { + margin-top: 0; + margin-bottom: 1em; +} + +ol ol, +ul ul, +ol ul, +ul ol { + margin-bottom: 0; +} + +dt { + font-weight: 500; +} + +dd { + margin-bottom: 0.5em; + margin-left: 0; // Undo browser default +} + +blockquote { + margin: 0 0 1em; +} + +dfn { + font-style: italic; // Add the correct font style in Android 4.3- +} + +b, +strong { + font-weight: bolder; // Add the correct font weight in Chrome, Edge, and Safari +} + +small { + font-size: 80%; // Add the correct font size in all browsers +} + +a { + color: #1664ff; + text-decoration: none; + background-color: initial; + outline: none; + cursor: pointer; + transition: color 0.3s; +} + +pre { + // remove browser default top margin + margin-top: 0; + // Reset browser default of `1em` to use `em`s + margin-bottom: 1em; + // Don't allow content to break outside + overflow: auto; +} + +img { + vertical-align: middle; + border-style: none; // remove the border on images inside links in IE 10-. +} + +output { + display: inline-block; +} + +summary { + display: list-item; // Add the correct display in all browsers +} + +template { + display: none; // Add the correct display in IE +} + +// Always hide an element with the `hidden` HTML attribute (from PureCSS). +// Needed for proper display in IE 10-. +[hidden] { + display: none !important; +} + +// 解决 arco drawer 被平台 header 遮住的问题 +.arco-drawer-wrapper { + top: var(--headerHeight); + z-index: 1001 !important; +} +// Arco Modal +// --- +.custom-modal { + .arco-modal-content { + text-align: center; + } + .arco-modal-title { + word-break: break-all; + } +} + +.arco-modal-simple:not(.custom-modal) { + padding: 0 !important; + .arco-modal-header, + .arco-modal-content, + .arco-modal-footer { + padding: 16px 20px !important; + } + + .arco-modal-header { + border-bottom: 1px solid var(--color-border-2) !important; + padding: 12px 20px !important; + margin-bottom: 20px; + .arco-modal-title { + line-height: 22px; + } + } + .arco-modal-content { + padding-top: 0 !important; + } + .arco-modal-footer { + border-top: 1px solid var(--color-border-2) !important; + text-align: right !important; + .arco-btn { + min-width: 72px!important; + } + } +} + +// Arco Input +// --- + +// make clear icon always visible, otherwise it would make e2e test fail +.arco-input-clear-wrapper { + .arco-input-clear-icon { + display: inline-block; + margin-right: 5px; + } +} + +// button +.custom-text-button { + padding: 0; + border: none; + font-size: 12px; + background: transparent; + color: var(--primaryColor); + cursor: pointer; + &:hover { + color: var(--newPrimaryHover); + background-color: transparent !important; + } + &:disabled { + cursor: not-allowed; + color: var(--color-text-4); + } +} + +.custom-operation-button { + font-size: @custom-header-font; + .arco-btn-size-default { + font-size: @custom-header-font; + } +} + +// table + +.custom-table { + .arco-table-cell-wrap-value, + .arco-table-th-item-title { + font-size: var(--textFontSizePrimary); + } + .arco-pagination { + width: 100%; + display: flex; + justify-content: space-between; + } +} + +// make the filter icon on the left side +.custom-table-left-side-filter { + --custom-table-th-height: 40px; + .arco-table-col-has-filter { + padding-top: 0; + padding-bottom: 0; + height: var(--custom-table-th-height); + line-height: var(--custom-table-th-height); + } + .arco-table-filters { + position: unset; + display: inline-block; + margin-left: 2px; + height: var(--custom-table-th-height); + line-height: var(--custom-table-th-height); + text-align: center; + vertical-align: top; + & > svg { + vertical-align: -4px; + } + } +} + +// arco tabs with textFontSizePrimary +.custom-tabs { + .arco-tabs-header-title-text { + font-size: var(--textFontSizePrimary); + } +} + +// typography +.custom-typography { + &.arco-typography { + font-size: var(--textFontSizePrimary); + } +} + +// popover +.custom-popover { + &.arco-popover { + max-width: 400px; + width: 400px; + } + .arco-popover-content { + max-height: 600px; + border: 1px solid #e5e6e8; + box-shadow: 0px 4px 10px rgba(0, 0, 0, 0.1); + border-radius: 4px; + overflow: auto; + padding: 0; + .arco-popover-inner { + border-radius: 0; + } + } + + .arco-popover-arrow { + display: none !important; + } +} + +// select + +.custom-select { + font-size: 12px; +} + +// input + +.custom-input { + font-size: 12px; + input::-webkit-input-placeholder { + font-size: 12px; + } + input::-moz-placeholder { + font-size: 12px; + } +} diff --git a/web_console_v2/client/src/styles/index.ts b/web_console_v2/client/src/styles/index.ts new file mode 100644 index 000000000..c2514a02c --- /dev/null +++ b/web_console_v2/client/src/styles/index.ts @@ -0,0 +1,8 @@ +/* eslint-disable */ +/** + * WARNING: This file is auto-generated + * DO NOT modify it directly, src/.env.development or src/.env.production is the file you should go + */ +import defaultTheme from './themes/normal'; + +export { defaultTheme }; diff --git a/web_console_v2/client/src/styles/mixins.less b/web_console_v2/client/src/styles/mixins.less new file mode 100644 index 000000000..79391113b --- /dev/null +++ b/web_console_v2/client/src/styles/mixins.less @@ -0,0 +1,30 @@ +.MixinSquare(@size: 0px) { + width: @size; + height: @size; +} + +.MixinCircle(@size: 0px) { + .MixinSquare(@size); + border-radius: 50%; +} + +.MixinEllipsis(@maxWidth: 'auto') { + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + max-width: @maxWidth; +} + +.MixinFontClarity() { + font-family: 'ClarityMono', sans-serif; +} + +.MixinFlexAlignCenter() { + justify-content: center; + align-items: center; +} + +.MixinFlexAlignCenter() { + justify-content: center; + align-items: center; +} diff --git a/web_console_v2/client/src/styles/mixins.test.ts b/web_console_v2/client/src/styles/mixins.test.ts index 14f9e1dfb..6e6c16bc0 100644 --- a/web_console_v2/client/src/styles/mixins.test.ts +++ b/web_console_v2/client/src/styles/mixins.test.ts @@ -1,5 +1,5 @@ import { MixinCommonTransition, MixinSquare, MixinCircle } from './mixins'; -import defaultTheme from './_theme'; +import defaultTheme from './theme'; describe('Square and Circle', () => { it('Should works fine', () => { diff --git a/web_console_v2/client/src/styles/mixins.ts b/web_console_v2/client/src/styles/mixins.ts index cae705dc7..30e20cf60 100644 --- a/web_console_v2/client/src/styles/mixins.ts +++ b/web_console_v2/client/src/styles/mixins.ts @@ -1,5 +1,5 @@ import { convertToUnit } from 'shared/helpers'; -import defaultTheme from './_theme'; +import defaultTheme from './theme'; /* istanbul ignore next */ export function MixinFontClarity() { @@ -32,11 +32,12 @@ export function MixinFlexAlignCenter() { } /* istanbul ignore next */ -export function MixinEllipsis() { +export function MixinEllipsis(maxWidth?: any, unit?: string) { return ` overflow: hidden; text-overflow: ellipsis; white-space: nowrap; + max-width: ${convertToUnit(maxWidth ?? 'auto', unit)}; `; } @@ -58,3 +59,40 @@ export function MixinCommonTransition( transition: ${arr.map((i) => `${i} ${duration}s ${defaultTheme.commonTiming}`).join(',')}; `; } + +export function MixinWritableShape() { + return ` + width: 13px; + height: 11px; + background-color: var(--primaryColor); + clip-path: polygon(50% 0, 100% 100%, 0 100%, 50% 0); + transform: translateY(-0.5px); + `; +} + +export function MixinReadableShape() { + return ` + ${MixinSquare(11)}; + + background-color: var(--successColor); + `; +} +export function MixinPrivateShape() { + return ` + ${MixinCircle(12)}; + + background-color: var(--warningColor); + `; +} + +export function MixinBaseFontInfo( + fontSize: number = 12, + color: any = 'var(--textColor)', + fontWeight: any = 400, +) { + return ` + font-size: ${fontSize}px; + color: ${color}; + font-weight: ${fontWeight}; + `; +} diff --git a/web_console_v2/client/src/styles/theme.ts b/web_console_v2/client/src/styles/theme.ts new file mode 100644 index 000000000..d9fa1e94b --- /dev/null +++ b/web_console_v2/client/src/styles/theme.ts @@ -0,0 +1,8 @@ +/* eslint-disable */ +/** + * WARNING: This file is auto-generated + * DO NOT modify it directly, src/.env.development or src/.env.production is the file you should go + */ +import defaultTheme from './themes/normal/normal'; + +export default defaultTheme; diff --git a/web_console_v2/client/src/styles/themes/bioland/bioland.css b/web_console_v2/client/src/styles/themes/bioland/bioland.css new file mode 100644 index 000000000..bf2729245 --- /dev/null +++ b/web_console_v2/client/src/styles/themes/bioland/bioland.css @@ -0,0 +1,62 @@ +/** + * WARNING: This file is auto-generated + * DO NOT modify it directly, src/styles/variables/bioland.less is the file you should go + */ +:root { + --red1: #ffece8; + --red2: #fdcdc5; + --red5: #f76560; + --red6: #f53f3f; + --orange6: #ff7d00; + --green6: #00b42a; + --blue4: #7bc0fc; + --blue6: #3491fa; + --gray1: #f7f8fa; + --gray2: #f2f3f5; + --gray3: #e5e6eb; + --gray4: #c9cdd4; + --gray6: #86909c; + --gray8: #4e5969; + --gray9: #272e3b; + --gray10: #1d2129; + --newPrimaryDefault: #4450dc; + --newPrimaryHover: #6171e3; + --newPrimaryActive: #2b32b8; + --newPrimaryDisable: #a1b0f1; + --newPrimaryHoverGray: #c4cff8; + --primaryColor: #4450dc; + --infoColor: #4450dc; + --successColor: #00b42a; + --processingColor: #3491fa; + --errorColor: #f53f3f; + --errorColorLight: #ffece8; + --highlightColor: #f76560; + --warningColor: #ff7d00; + --normalColor: #272e3b; + --textColor: #4e5969; + --textColorStrong: #1d2129; + --textColorStrongSecondary: #4e5969; + --textColorSecondary: #86909c; + --textColorDisabled: #c9cdd4; + --textColorInverse: white; + --componentBackgroundColorGray: #f2f3f5; + --backgroundColor: #f7f8fa; + --backgroundColorGray: #e5e6eb; + --backgroundColorError: #ffece8; + --backgroundColorErrorHover: #fdcdc5; + --lineColor: #e5e8ef; + --headerBackground: #4450dc; + --headerBorderBottomWidth: 0; + --headerLogoHeight: 42px; + --headerProjectColor: #fff; + --headerPaddingLeft: 0; + --headerPaddingRight: 30px; + --commonTiming: cubic-bezier(0.4, 0, 0.2, 1); + --fontFamily: 'nunito_for_arco', 'Helvetica Neue', Helvetica, 'PingFang SC', 'Hiragino Sans GB', 'Microsoft YaHei', '微软雅黑', Arial, sans-serif; + --headerHeight: 60px; + --pageHeaderHeight: 44px; + --contentOuterPadding: 16px; + --contentMinHeight: calc(100vh - 60px - 16px * 2); + --textFontSizePrimary: 12px; + --zIndexLessThanModal: 999; +} diff --git a/web_console_v2/client/src/styles/themes/bioland/bioland.less b/web_console_v2/client/src/styles/themes/bioland/bioland.less new file mode 100644 index 000000000..ba64e5c0c --- /dev/null +++ b/web_console_v2/client/src/styles/themes/bioland/bioland.less @@ -0,0 +1,11 @@ +/* eslint-disable */ +/** + * WARNING: This file is auto-generated + * DO NOT modify it directly, src/styles/variables/bioland.less is the file you should go + */ +@import 'assets/fonts/ClarityMono/index.less'; +@import './bioland.css'; + +@import 'styles/variables/bioland.less'; +@import 'styles/arco-overrides.less'; +@import 'styles/global.less'; diff --git a/web_console_v2/client/src/styles/themes/bioland/bioland.ts b/web_console_v2/client/src/styles/themes/bioland/bioland.ts new file mode 100644 index 000000000..d0bc9c200 --- /dev/null +++ b/web_console_v2/client/src/styles/themes/bioland/bioland.ts @@ -0,0 +1,67 @@ +/* istanbul ignore file */ + +/* eslint-disable */ +/** + * WARNING: This file is auto-generated + * DO NOT modify it directly, src/styles/variables/bioland.less is the file you should go + */ +const defaultTheme = { + red1: '#ffece8', + red2: '#fdcdc5', + red5: '#f76560', + red6: '#f53f3f', + orange6: '#ff7d00', + green6: '#00b42a', + blue4: '#7bc0fc', + blue6: '#3491fa', + gray1: '#f7f8fa', + gray2: '#f2f3f5', + gray3: '#e5e6eb', + gray4: '#c9cdd4', + gray6: '#86909c', + gray8: '#4e5969', + gray9: '#272e3b', + gray10: '#1d2129', + newPrimaryDefault: '#4450dc', + newPrimaryHover: '#6171e3', + newPrimaryActive: '#2b32b8', + newPrimaryDisable: '#a1b0f1', + newPrimaryHoverGray: '#c4cff8', + primaryColor: '#4450dc', + infoColor: '#4450dc', + successColor: '#00b42a', + processingColor: '#3491fa', + errorColor: '#f53f3f', + errorColorLight: '#ffece8', + highlightColor: '#f76560', + warningColor: '#ff7d00', + normalColor: '#272e3b', + textColor: '#4e5969', + textColorStrong: '#1d2129', + textColorStrongSecondary: '#4e5969', + textColorSecondary: '#86909c', + textColorDisabled: '#c9cdd4', + textColorInverse: 'white', + componentBackgroundColorGray: '#f2f3f5', + backgroundColor: '#f7f8fa', + backgroundColorGray: '#e5e6eb', + backgroundColorError: '#ffece8', + backgroundColorErrorHover: '#fdcdc5', + lineColor: '#e5e8ef', + headerBackground: '#4450dc', + headerBorderBottomWidth: '0', + headerLogoHeight: '42px', + headerProjectColor: '#fff', + headerPaddingLeft: '0', + headerPaddingRight: '30px', + commonTiming: 'cubic-bezier(0.4, 0, 0.2, 1)', + fontFamily: "'nunito_for_arco', 'Helvetica Neue', Helvetica, 'PingFang SC', 'Hiragino Sans GB', 'Microsoft YaHei', '微软雅黑', Arial, sans-serif", + headerHeight: '60px', + pageHeaderHeight: '44px', + contentOuterPadding: '16px', + contentMinHeight: 'calc(100vh - 60px - 16px * 2)', + textFontSizePrimary: '12px', + zIndexLessThanModal: '999', +} + +export default defaultTheme diff --git a/web_console_v2/client/src/styles/themes/bioland/index.ts b/web_console_v2/client/src/styles/themes/bioland/index.ts new file mode 100644 index 000000000..25185f23e --- /dev/null +++ b/web_console_v2/client/src/styles/themes/bioland/index.ts @@ -0,0 +1,9 @@ +/* eslint-disable */ +/** + * WARNING: This file is auto-generated + * DO NOT modify it directly, src/styles/variables/bioland.less is the file you should go + */ +import './bioland.less'; +import defaultTheme from './bioland'; + +export default defaultTheme; diff --git a/web_console_v2/client/src/styles/themes/normal/index.ts b/web_console_v2/client/src/styles/themes/normal/index.ts new file mode 100644 index 000000000..1f82d3bdd --- /dev/null +++ b/web_console_v2/client/src/styles/themes/normal/index.ts @@ -0,0 +1,9 @@ +/* eslint-disable */ +/** + * WARNING: This file is auto-generated + * DO NOT modify it directly, src/styles/variables/normal.less is the file you should go + */ +import './normal.less'; +import defaultTheme from './normal'; + +export default defaultTheme; diff --git a/web_console_v2/client/src/styles/themes/normal/normal.css b/web_console_v2/client/src/styles/themes/normal/normal.css new file mode 100644 index 000000000..cd3d17a33 --- /dev/null +++ b/web_console_v2/client/src/styles/themes/normal/normal.css @@ -0,0 +1,62 @@ +/** + * WARNING: This file is auto-generated + * DO NOT modify it directly, src/styles/variables/normal.less is the file you should go + */ +:root { + --red1: #ffece8; + --red2: #fdcdc5; + --red5: #f76560; + --red6: #f53f3f; + --orange6: #ff7d00; + --green6: #00b42a; + --blue4: #7bc0fc; + --blue6: #3491fa; + --gray1: #f7f8fa; + --gray2: #f2f3f5; + --gray3: #e5e6eb; + --gray4: #c9cdd4; + --gray6: #86909c; + --gray8: #4e5969; + --gray9: #272e3b; + --gray10: #1d2129; + --newPrimaryDefault: #1664ff; + --newPrimaryHover: #4086ff; + --newPrimaryActive: #0e49d2; + --newPrimaryDisable: #94c2ff; + --newPrimaryHoverGray: #bedcff; + --primaryColor: #1664ff; + --infoColor: #1664ff; + --successColor: #00b42a; + --processingColor: #3491fa; + --errorColor: #f53f3f; + --errorColorLight: #ffece8; + --highlightColor: #f76560; + --warningColor: #ff7d00; + --normalColor: #272e3b; + --textColor: #4e5969; + --textColorStrong: #1d2129; + --textColorStrongSecondary: #4e5969; + --textColorSecondary: #86909c; + --textColorDisabled: #c9cdd4; + --textColorInverse: white; + --componentBackgroundColorGray: #f2f3f8; + --backgroundColor: #f7f8fa; + --backgroundColorGray: #e5e6eb; + --backgroundColorError: #ffece8; + --backgroundColorErrorHover: #fdcdc5; + --lineColor: #e5e8ef; + --headerBackground: #1d2129; + --headerBorderBottomWidth: 0; + --headerLogoHeight: 28px; + --headerProjectColor: #fff; + --headerPaddingLeft: 30px; + --headerPaddingRight: 30px; + --commonTiming: cubic-bezier(0.4, 0, 0.2, 1); + --fontFamily: 'nunito_for_arco', 'Helvetica Neue', Helvetica, 'PingFang SC', 'Hiragino Sans GB', 'Microsoft YaHei', '微软雅黑', Arial, sans-serif; + --headerHeight: 60px; + --pageHeaderHeight: 44px; + --contentOuterPadding: 16px; + --contentMinHeight: calc(100vh - 60px - 16px * 2); + --textFontSizePrimary: 12px; + --zIndexLessThanModal: 999; +} diff --git a/web_console_v2/client/src/styles/themes/normal/normal.less b/web_console_v2/client/src/styles/themes/normal/normal.less new file mode 100644 index 000000000..c592d00e2 --- /dev/null +++ b/web_console_v2/client/src/styles/themes/normal/normal.less @@ -0,0 +1,11 @@ +/* eslint-disable */ +/** + * WARNING: This file is auto-generated + * DO NOT modify it directly, src/styles/variables/normal.less is the file you should go + */ +@import 'assets/fonts/ClarityMono/index.less'; +@import './normal.css'; + +@import 'styles/variables/normal.less'; +@import 'styles/arco-overrides.less'; +@import 'styles/global.less'; diff --git a/web_console_v2/client/src/styles/themes/normal/normal.ts b/web_console_v2/client/src/styles/themes/normal/normal.ts new file mode 100644 index 000000000..81c7fcc12 --- /dev/null +++ b/web_console_v2/client/src/styles/themes/normal/normal.ts @@ -0,0 +1,67 @@ +/* istanbul ignore file */ + +/* eslint-disable */ +/** + * WARNING: This file is auto-generated + * DO NOT modify it directly, src/styles/variables/normal.less is the file you should go + */ +const defaultTheme = { + red1: '#ffece8', + red2: '#fdcdc5', + red5: '#f76560', + red6: '#f53f3f', + orange6: '#ff7d00', + green6: '#00b42a', + blue4: '#7bc0fc', + blue6: '#3491fa', + gray1: '#f7f8fa', + gray2: '#f2f3f5', + gray3: '#e5e6eb', + gray4: '#c9cdd4', + gray6: '#86909c', + gray8: '#4e5969', + gray9: '#272e3b', + gray10: '#1d2129', + newPrimaryDefault: '#1664ff', + newPrimaryHover: '#4086ff', + newPrimaryActive: '#0e49d2', + newPrimaryDisable: '#94c2ff', + newPrimaryHoverGray: '#bedcff', + primaryColor: '#1664ff', + infoColor: '#1664ff', + successColor: '#00b42a', + processingColor: '#3491fa', + errorColor: '#f53f3f', + errorColorLight: '#ffece8', + highlightColor: '#f76560', + warningColor: '#ff7d00', + normalColor: '#272e3b', + textColor: '#4e5969', + textColorStrong: '#1d2129', + textColorStrongSecondary: '#4e5969', + textColorSecondary: '#86909c', + textColorDisabled: '#c9cdd4', + textColorInverse: 'white', + componentBackgroundColorGray: '#f2f3f8', + backgroundColor: '#f7f8fa', + backgroundColorGray: '#e5e6eb', + backgroundColorError: '#ffece8', + backgroundColorErrorHover: '#fdcdc5', + lineColor: '#e5e8ef', + headerBackground: '#1d2129', + headerBorderBottomWidth: '0', + headerLogoHeight: '28px', + headerProjectColor: '#fff', + headerPaddingLeft: '30px', + headerPaddingRight: '30px', + commonTiming: 'cubic-bezier(0.4, 0, 0.2, 1)', + fontFamily: "'nunito_for_arco', 'Helvetica Neue', Helvetica, 'PingFang SC', 'Hiragino Sans GB', 'Microsoft YaHei', '微软雅黑', Arial, sans-serif", + headerHeight: '60px', + pageHeaderHeight: '44px', + contentOuterPadding: '16px', + contentMinHeight: 'calc(100vh - 60px - 16px * 2)', + textFontSizePrimary: '12px', + zIndexLessThanModal: '999', +} + +export default defaultTheme diff --git a/web_console_v2/client/src/styles/variables.less b/web_console_v2/client/src/styles/variables.less deleted file mode 100644 index 06fb8c12b..000000000 --- a/web_console_v2/client/src/styles/variables.less +++ /dev/null @@ -1,322 +0,0 @@ -// -------- Colors palette ----------- -@red-1: #ffece8; -@red-2: #fdcdc5; -@red-3: #fbaca3; -@red-4: #f98981; -@red-5: #f76560; -@red-6: #f53f3f; -@red-7: #cb272d; -@red-8: #a1151e; -@red-9: #770813; -@red-10: #4d000a; -@orangered-1: #fff3e8; -@orangered-2: #fdddc3; -@orangered-3: #fcc59f; -@orangered-4: #faac7b; -@orangered-5: #f99057; -@orangered-6: #f77234; -@orangered-7: #cc5120; -@orangered-8: #a23511; -@orangered-9: #771f06; -@orangered-10: #4d0e00; -@orange-1: #fff7e8; -@orange-2: #ffe4ba; -@orange-3: #ffcf8b; -@orange-4: #ffb65d; -@orange-5: #ff9a2e; -@orange-6: #ff7d00; -@orange-7: #d25f00; -@orange-8: #a64500; -@orange-9: #792e00; -@orange-10: #4d1b00; -@gold-1: #fffce8; -@gold-2: #fdf4bf; -@gold-3: #fce996; -@gold-4: #fadc6d; -@gold-5: #f9cc45; -@gold-6: #f7ba1e; -@gold-7: #cc9213; -@gold-8: #a26d0a; -@gold-9: #774b04; -@gold-10: #4d2d00; -@yellow-1: #feffe8; -@yellow-2: #fefebe; -@yellow-3: #fdfa94; -@yellow-4: #fcf26b; -@yellow-5: #fbe842; -@yellow-6: #fadc19; -@yellow-7: #cfaf0f; -@yellow-8: #a38408; -@yellow-9: #785d03; -@yellow-10: #4d3800; -@lime-1: #fcffe8; -@lime-2: #edf8bb; -@lime-3: #dcf190; -@lime-4: #c9e968; -@lime-5: #b5e241; -@lime-6: #9fdb1d; -@lime-7: #7eb712; -@lime-8: #5f940a; -@lime-9: #437004; -@lime-10: #2a4d00; -@green-1: #e8ffea; -@green-2: #aff0b5; -@green-3: #7be188; -@green-4: #4cd263; -@green-5: #23c343; -@green-6: #00b42a; -@green-7: #009a29; -@green-8: #008026; -@green-9: #006622; -@green-10: #004d1c; -@cyan-1: #e8fffb; -@cyan-2: #b7f4ec; -@cyan-3: #89e9e0; -@cyan-4: #5edfd6; -@cyan-5: #37d4cf; -@cyan-6: #14c9c9; -@cyan-7: #0da5aa; -@cyan-8: #07828b; -@cyan-9: #03616c; -@cyan-10: #00424d; -@blue-1: #e8f7ff; -@blue-2: #c3e7fe; -@blue-3: #9fd4fd; -@blue-4: #7bc0fc; -@blue-5: #57a9fb; -@blue-6: #3491fa; -@blue-7: #206ccf; -@blue-8: #114ba3; -@blue-9: #063078; -@blue-10: #001a4d; -@arcoblue-1: #e8f3ff; -@arcoblue-2: #bedaff; -@arcoblue-3: #94bfff; -@arcoblue-4: #6aa1ff; -@arcoblue-5: #4080ff; -@arcoblue-6: #165dff; -@arcoblue-7: #0e42d2; -@arcoblue-8: #072ca6; -@arcoblue-9: #031a79; -@arcoblue-10: #000d4d; -@purple-1: #f5e8ff; -@purple-2: #ddbef6; -@purple-3: #c396ed; -@purple-4: #a871e3; -@purple-5: #8d4eda; -@purple-6: #722ed1; -@purple-7: #551db0; -@purple-8: #3c108f; -@purple-9: #27066e; -@purple-10: #16004d; -@pinkpurple-1: #ffe8fb; -@pinkpurple-2: #f7baef; -@pinkpurple-3: #f08ee6; -@pinkpurple-4: #e865df; -@pinkpurple-5: #e13edb; -@pinkpurple-6: #d91ad9; -@pinkpurple-7: #b010b6; -@pinkpurple-8: #8a0993; -@pinkpurple-9: #650370; -@pinkpurple-10: #42004d; -@magenta-1: #ffe8f1; -@magenta-2: #fdc2db; -@magenta-3: #fb9dc7; -@magenta-4: #f979b7; -@magenta-5: #f754a8; -@magenta-6: #f5319d; -@magenta-7: #cb1e83; -@magenta-8: #a11069; -@magenta-9: #77064f; -@magenta-10: #4d0034; -@dark-red-1: #4d000a; -@dark-red-2: #770611; -@dark-red-3: #a1161f; -@dark-red-4: #cb2e34; -@dark-red-5: #f54e4e; -@dark-red-6: #f76965; -@dark-red-7: #f98d86; -@dark-red-8: #fbb0a7; -@dark-red-9: #fdd1ca; -@dark-red-10: #fff0ec; -@dark-orangered-1: #4d0e00; -@dark-orangered-2: #771e05; -@dark-orangered-3: #a23714; -@dark-orangered-4: #cc5729; -@dark-orangered-5: #f77e45; -@dark-orangered-6: #f9925a; -@dark-orangered-7: #faad7d; -@dark-orangered-8: #fcc6a1; -@dark-orangered-9: #fddec5; -@dark-orangered-10: #fff4eb; -@dark-orange-1: #4d1b00; -@dark-orange-2: #793004; -@dark-orange-3: #a64b0a; -@dark-orange-4: #d26913; -@dark-orange-5: #ff8b1f; -@dark-orange-6: #ff9626; -@dark-orange-7: #ffb357; -@dark-orange-8: #ffcd87; -@dark-orange-9: #ffe3b8; -@dark-orange-10: #fff7e8; -@dark-gold-1: #4d2d00; -@dark-gold-2: #774b04; -@dark-gold-3: #a26f0f; -@dark-gold-4: #cc961f; -@dark-gold-5: #f7c034; -@dark-gold-6: #f9cc44; -@dark-gold-7: #fadc6c; -@dark-gold-8: #fce995; -@dark-gold-9: #fdf4be; -@dark-gold-10: #fffce8; -@dark-yellow-1: #4d3800; -@dark-yellow-2: #785e07; -@dark-yellow-3: #a38614; -@dark-yellow-4: #cfb325; -@dark-yellow-5: #fae13c; -@dark-yellow-6: #fbe94b; -@dark-yellow-7: #fcf374; -@dark-yellow-8: #fdfa9d; -@dark-yellow-9: #fefec6; -@dark-yellow-10: #fefff0; -@dark-lime-1: #2a4d00; -@dark-lime-2: #447006; -@dark-lime-3: #629412; -@dark-lime-4: #84b723; -@dark-lime-5: #a8db39; -@dark-lime-6: #b8e24b; -@dark-lime-7: #cbe970; -@dark-lime-8: #def198; -@dark-lime-9: #eef8c2; -@dark-lime-10: #fdffee; -@dark-green-1: #004d1c; -@dark-green-2: #046625; -@dark-green-3: #0a802d; -@dark-green-4: #129a37; -@dark-green-5: #1db440; -@dark-green-6: #27c346; -@dark-green-7: #50d266; -@dark-green-8: #7ee18b; -@dark-green-9: #b2f0b7; -@dark-green-10: #ebffec; -@dark-cyan-1: #00424d; -@dark-cyan-2: #06616c; -@dark-cyan-3: #11838b; -@dark-cyan-4: #1fa6aa; -@dark-cyan-5: #30c9c9; -@dark-cyan-6: #3fd4cf; -@dark-cyan-7: #66dfd7; -@dark-cyan-8: #90e9e1; -@dark-cyan-9: #bef4ed; -@dark-cyan-10: #f0fffc; -@dark-blue-1: #001a4d; -@dark-blue-2: #052f78; -@dark-blue-3: #134ca3; -@dark-blue-4: #2971cf; -@dark-blue-5: #4699fa; -@dark-blue-6: #5aaafb; -@dark-blue-7: #7dc1fc; -@dark-blue-8: #a1d5fd; -@dark-blue-9: #c6e8fe; -@dark-blue-10: #eaf8ff; -@dark-arcoblue-1: #000d4d; -@dark-arcoblue-2: #041b79; -@dark-arcoblue-3: #0e32a6; -@dark-arcoblue-4: #1d4dd2; -@dark-arcoblue-5: #306eff; -@dark-arcoblue-6: #3c7eff; -@dark-arcoblue-7: #689fff; -@dark-arcoblue-8: #93beff; -@dark-arcoblue-9: #bedaff; -@dark-arcoblue-10: #eaf4ff; -@dark-purple-1: #16004d; -@dark-purple-2: #27066e; -@dark-purple-3: #3e138f; -@dark-purple-4: #5a25b0; -@dark-purple-5: #7b3dd1; -@dark-purple-6: #8e51da; -@dark-purple-7: #a974e3; -@dark-purple-8: #c59aed; -@dark-purple-9: #dfc2f6; -@dark-purple-10: #f7edff; -@dark-pinkpurple-1: #42004d; -@dark-pinkpurple-2: #650370; -@dark-pinkpurple-3: #8a0d93; -@dark-pinkpurple-4: #b01bb6; -@dark-pinkpurple-5: #d92ed9; -@dark-pinkpurple-6: #e13ddb; -@dark-pinkpurple-7: #e866df; -@dark-pinkpurple-8: #f092e6; -@dark-pinkpurple-9: #f7c1f0; -@dark-pinkpurple-10: #fff2fd; -@dark-magenta-1: #4d0034; -@dark-magenta-2: #770850; -@dark-magenta-3: #a1176c; -@dark-magenta-4: #cb2b88; -@dark-magenta-5: #f545a6; -@dark-magenta-6: #f756a9; -@dark-magenta-7: #f97ab8; -@dark-magenta-8: #fb9ec8; -@dark-magenta-9: #fdc3db; -@dark-magenta-10: #ffe8f1; -@gray-1: #f7f8fa; -@gray-2: #f2f3f5; -@gray-3: #e5e6eb; -@gray-4: #c9cdd4; -@gray-5: #a9aeb8; -@gray-6: #86909c; -@gray-7: #6b7785; -@gray-8: #4e5969; -@gray-9: #272e3b; -@gray-10: #1d2129; -@dark-gray-1: #17171a; -@dark-gray-2: #2e2e30; -@dark-gray-3: #484849; -@dark-gray-4: #5f5f60; -@dark-gray-5: #78787a; -@dark-gray-6: #929293; -@dark-gray-7: #ababac; -@dark-gray-8: #c5c5c5; -@dark-gray-9: #dfdfdf; -@dark-gray-10: #f6f6f6; - -// -------- Colors ----------- -@primary-color: #286af4; -@info-color: @primary-color; -@success-color: @green-6; -@processing-color: @blue-6; -@error-color: @red-6; -@error-color-light: @red-1; -@highlight-color: @red-5; -@warning-color: @orange-6; -@normal-color: @dark-gray-9; - -@text-color: @gray-8; -@text-color-strong: @gray-10; -@text-color-secondary: @gray-6; -@text-color-disabled: @gray-4; -@text-color-inverse: white; - -@component-background-color-gray: @gray-2; - -@background-color: @gray-1; -@background-color-gray: @gray-3; -@background-color-error: @error-color-light; -@background-color-error-hover: @red-2; - -@line-color: #e5e8ef; - -@header-background: @gray-10; - -// ----------- Transitions ---------------------- -@common-timing: cubic-bezier(0.4, 0, 0.2, 1); - -// ----------- Fonts ----------------------- -@font-family: 'nunito_for_arco', 'Helvetica Neue', Helvetica, 'PingFang SC', 'Hiragino Sans GB', - 'Microsoft YaHei', '微软雅黑', Arial, sans-serif; - -// ---------- Spacing ---------------------- -@header-height: 60px; -@page-header-height: 44px; -@content-outer-padding: 16px; // padding between real content and header&sidebar -@content-min-height: calc(100vh - @header-height - @content-outer-padding * 2); diff --git a/web_console_v2/client/src/styles/variables/bioland.less b/web_console_v2/client/src/styles/variables/bioland.less new file mode 100644 index 000000000..f10d3c7a2 --- /dev/null +++ b/web_console_v2/client/src/styles/variables/bioland.less @@ -0,0 +1,75 @@ +// -------- Colors palette ----------- +@red-1: #ffece8; +@red-2: #fdcdc5; +@red-5: #f76560; +@red-6: #f53f3f; +@orange-6: #ff7d00; +@green-6: #00b42a; +@blue-4: #7bc0fc; +@blue-6: #3491fa; +@gray-1: #f7f8fa; +@gray-2: #f2f3f5; +@gray-3: #e5e6eb; +@gray-4: #c9cdd4; +@gray-6: #86909c; +@gray-8: #4e5969; +@gray-9: #272e3b; +@gray-10: #1d2129; + +// -------- New Colors ----------- +@new-primary-default: #4450dc; +@new-primary-hover: #6171e3; +@new-primary-active: #2b32b8; +@new-primary-disable: #a1b0f1; +@new-primary-hover-gray: #c4cff8; + +// -------- Colors ----------- +@primary-color: @new-primary-default; +@info-color: @primary-color; +@success-color: @green-6; +@processing-color: @blue-6; +@error-color: @red-6; +@error-color-light: @red-1; +@highlight-color: @red-5; +@warning-color: @orange-6; +@normal-color: @gray-9; + +@text-color: @gray-8; +@text-color-strong: @gray-10; +@text-color-strong-secondary: #4e5969; +@text-color-secondary: @gray-6; +@text-color-disabled: @gray-4; +@text-color-inverse: white; + +@component-background-color-gray: @gray-2; + +@background-color: @gray-1; +@background-color-gray: @gray-3; +@background-color-error: @error-color-light; +@background-color-error-hover: @red-2; + +@line-color: #e5e8ef; + +@header-background: @primary-color; +@header-border-bottom-width: 0; +@header-logo-height: 42px; +@header-project-color: #fff; +@header-padding-left: 0; +@header-padding-right: 30px; + +// ----------- Transitions ---------------------- +@common-timing: cubic-bezier(0.4, 0, 0.2, 1); + +// ----------- Fonts ----------------------- +@font-family: 'nunito_for_arco', 'Helvetica Neue', Helvetica, 'PingFang SC', 'Hiragino Sans GB', + 'Microsoft YaHei', '微软雅黑', Arial, sans-serif; + +// ---------- Spacing ---------------------- +@header-height: 60px; +@page-header-height: 44px; +@content-outer-padding: 16px; // padding between real content and header&sidebar +@content-min-height: calc(100vh - @header-height - @content-outer-padding * 2); +@text-font-size-primary: 12px; + +// ---------- ZIndex ---------------------- +@z-index-less-than-modal: 999; diff --git a/web_console_v2/client/src/styles/variables/normal.less b/web_console_v2/client/src/styles/variables/normal.less new file mode 100644 index 000000000..668797422 --- /dev/null +++ b/web_console_v2/client/src/styles/variables/normal.less @@ -0,0 +1,75 @@ +// -------- Colors palette ----------- +@red-1: #ffece8; +@red-2: #fdcdc5; +@red-5: #f76560; +@red-6: #f53f3f; +@orange-6: #ff7d00; +@green-6: #00b42a; +@blue-4: #7bc0fc; +@blue-6: #3491fa; +@gray-1: #f7f8fa; +@gray-2: #f2f3f5; +@gray-3: #e5e6eb; +@gray-4: #c9cdd4; +@gray-6: #86909c; +@gray-8: #4e5969; +@gray-9: #272e3b; +@gray-10: #1d2129; + +// -------- New Colors ----------- +@new-primary-default: #1664ff; +@new-primary-hover: #4086ff; +@new-primary-active: #0e49d2; +@new-primary-disable: #94c2ff; +@new-primary-hover-gray: #bedcff; + +// -------- Colors ----------- +@primary-color: @new-primary-default; +@info-color: @primary-color; +@success-color: @green-6; +@processing-color: @blue-6; +@error-color: @red-6; +@error-color-light: @red-1; +@highlight-color: @red-5; +@warning-color: @orange-6; +@normal-color: @gray-9; + +@text-color: @gray-8; +@text-color-strong: @gray-10; +@text-color-strong-secondary: #4e5969; +@text-color-secondary: @gray-6; +@text-color-disabled: @gray-4; +@text-color-inverse: white; + +@component-background-color-gray: #f2f3f8; + +@background-color: @gray-1; +@background-color-gray: @gray-3; +@background-color-error: @error-color-light; +@background-color-error-hover: @red-2; + +@line-color: #e5e8ef; + +@header-background: @gray-10; +@header-border-bottom-width: 0; +@header-logo-height: 28px; +@header-project-color: #fff; +@header-padding-left: 30px; +@header-padding-right: 30px; + +// ----------- Transitions ---------------------- +@common-timing: cubic-bezier(0.4, 0, 0.2, 1); + +// ----------- Fonts ----------------------- +@font-family: 'nunito_for_arco', 'Helvetica Neue', Helvetica, 'PingFang SC', 'Hiragino Sans GB', + 'Microsoft YaHei', '微软雅黑', Arial, sans-serif; + +// ---------- Spacing ---------------------- +@header-height: 60px; +@page-header-height: 44px; +@content-outer-padding: 16px; // padding between real content and header&sidebar +@content-min-height: calc(100vh - @header-height - @content-outer-padding * 2); +@text-font-size-primary: 12px; + +// ---------- ZIndex ---------------------- +@z-index-less-than-modal: 999; diff --git a/web_console_v2/client/src/typings/algorithm.ts b/web_console_v2/client/src/typings/algorithm.ts new file mode 100644 index 000000000..0d6a9eb6c --- /dev/null +++ b/web_console_v2/client/src/typings/algorithm.ts @@ -0,0 +1,147 @@ +import { DateTimeInfo } from 'typings/app'; +import { ValueType } from 'typings/settings'; + +export interface FileQueryParams { + path: string; +} +export interface FileTreeNode { + filename: string; + path: string; + /** File size */ + size: number; + /** Last Time Modified */ + mtime: number; + is_directory: boolean; + files: FileTreeNode[]; +} + +export enum EnumAlgorithmProjectType { + UNSPECIFIED = 'UNSPECIFIED', + TREE_VERTICAL = 'TREE_VERTICAL', + TREE_HORIZONTAL = 'TREE_HORIZONTAL', + NN_VERTICAL = 'NN_VERTICAL', + NN_HORIZONTAL = 'NN_HORIZONTAL', + TRUSTED_COMPUTING = 'TRUSTED_COMPUTING', + NN_LOCAL = 'NN_LOCAL', +} + +export enum EnumAlgorithmProjectSource { + PRESET = 'PRESET', + USER = 'USER', + THIRD_PARTY = 'THIRD_PARTY', +} + +export enum AlgorithmStatus { + UNPUBLISHED = 'UNPUBLISHED', + PUBLISHED = 'PUBLISHED', +} + +export enum AlgorithmReleaseStatus { + UNRELEASED = 'UNRELEASED', + RELEASED = 'RELEASED', +} + +export enum AlgorithmVersionStatus { + UNPUBLISHED = 'UNPUBLISHED', + PUBLISHED = 'PUBLISHED', + PENDING = 'PENDING', + DECLINED = 'DECLINED', + APPROVED = 'APPROVED', +} + +export type AlgorithmParameter = { + name: string; + value: string; + required: boolean; + display_name: string; + comment: string; + value_type: ValueType; +}; + +export type AlgorithmProject = { + id: ID; + name: string; + project_id: ID; + type: EnumAlgorithmProjectType; + source?: EnumAlgorithmProjectSource | `${EnumAlgorithmProjectSource}`; + creator_id: ID | null; + username: string; + participant_id: ID | null; + participant_name?: string; + path: string; + publish_status: AlgorithmStatus | `${AlgorithmStatus}`; + release_status: AlgorithmReleaseStatus | `${AlgorithmReleaseStatus}`; + algorithms?: Algorithm[]; + parameter: { + variables: AlgorithmParameter[]; + } | null; + favorite?: boolean; + comment: string | null; + latest_version: number; + uuid?: ID; +} & DateTimeInfo; + +export type Algorithm = Omit< + AlgorithmProject, + 'publish_status' | 'release_status' | 'latest_version' +> & { + version: number; + algorithm_project_id: ID; + algorithm_project_uuid?: ID; + status: AlgorithmVersionStatus; + uuid?: ID; + participant_id?: ID | null; +}; + +export interface UploadFileQueryParams { + /** Parent path */ + path: string; + /** File name */ + filename: string; + /** File */ + file: File; +} +export interface UpdateFileQueryParams { + /** Parent path */ + path: string; + /** File name */ + filename: string; + is_directory: boolean; + /** File Content */ + file?: string; +} +export interface RenameFileQueryParams { + /** Old full file Path */ + path: string; + /** New full file path */ + dest: string; +} +export interface DeleteFileQueryParams { + /** Full File path */ + path: string; +} + +export interface FileContent { + path: string; + filename: string; + content: string; +} +export enum OperationType { + ADD = 'ADD', + EDIT = 'EDIT', + DELETE = 'DELETE', + RENAME = 'RENAME', +} + +export interface OperationRecord { + type: `${OperationType}`; + path: string; + isFolder: boolean; + content?: string; + newPath?: string; +} +export interface AlgorithmProjectQuery { + keyword?: string; + type?: string; + sources?: string; +} diff --git a/web_console_v2/client/src/typings/app.ts b/web_console_v2/client/src/typings/app.ts index 0563c56e6..ede4f67d3 100644 --- a/web_console_v2/client/src/typings/app.ts +++ b/web_console_v2/client/src/typings/app.ts @@ -1,11 +1,21 @@ /** Federation Learner global types */ import { FedRoles } from 'typings/auth'; +import { ProjectTaskType } from './project'; export interface FedRouteConfig { path: string; component: React.FunctionComponent; exact?: boolean; - auth?: boolean; // whether require logged in + /** Whether require logged in */ + auth?: boolean; roles?: FedRoles[]; + async?: boolean; + /** + * Whether can show route, must all flagKey in array be true + * 1. If flagKey no existed in appFlag/localstorage, will be treated as true value + * 2. If flagKey existed in appFlag/localstorage, Every flagKey must be true in appFlag/localstorage + */ + flagKeys?: string[]; + abilitiesSupport?: ProjectTaskType[]; } export enum FedLanguages { @@ -15,6 +25,47 @@ export enum FedLanguages { export enum ErrorCodes { TokenExpired = 422, + Unauthorized = 401, } export type Side = 'self' | 'peer'; + +export interface PageMeta { + /** Current page number */ + current_page: number; + /** Each page count */ + page_size: number; + /** Total page count */ + total_pages: number; + /** Total item count */ + total_items: number; +} + +export type ResponseInfo<T = any> = { + /** Response data */ + data: T; + /** Page meta info */ + page_meta?: PageMeta; + /** Error Code */ + code?: number; + /** Error message */ + message?: string; +}; + +export type APIResponse<T = any> = Promise<ResponseInfo<T>>; + +export interface PageQueryParams { + /** current page, default: 1 */ + page?: number; + /** each page count, default: 10 */ + page_size?: number; +} + +export interface DateTimeInfo { + /** Unix timestamps in seconds */ + created_at: DateTime; + /** Unix timestamps in seconds */ + updated_at: DateTime; + /** Unix timestamps in seconds */ + deleted_at: DateTime | null; +} diff --git a/web_console_v2/client/src/typings/audit.ts b/web_console_v2/client/src/typings/audit.ts new file mode 100644 index 000000000..b7ceffcee --- /dev/null +++ b/web_console_v2/client/src/typings/audit.ts @@ -0,0 +1,85 @@ +import { FedUserInfo } from './auth'; + +export interface Audit { + id: number; + uuid: string; + name: string; + user: FedUserInfo; + user_id: number; + resource_type: string; + resource_name: string; + /** operation type */ + op_type: string; + /** event result */ + result: string; + /** event source */ + source: string; + extra: string | 'null' | null; + request_id: string; + access_key_id: string; + error_code: string; + source_ip: string; + created_at: DateTime; + updated_at: DateTime; + deleted_at: DateTime; + coordinator_pure_domain_name?: string; + event_id?: number; + project_id?: number; + result_code?: string; +} + +export interface AuditQueryParams { + username?: string; + /** event name */ + // name?: string; + // resource_type?: string; + // resource_name?: string; + /** operation type */ + filter?: string; + op_type?: string; + // start_time: DateTime; + // end_time: DateTime; + /** current page, default: 1 */ + page?: number; + /** each page count, default: 10 */ + page_size?: number; +} + +export interface AuditDeleteParams { + event_type?: string; +} + +export enum EventType { + INNER = 'inner', + CROSS_DOMAIN = 'cross_domain', +} + +export type QueryParams = { + keyword?: string; + selectType?: SelectType; + startTime?: string; + endTime?: string; + radioType?: RadioType; + crossDomainSelectType?: CrossDomainSelectType; + eventType?: EventType; +}; +export enum RadioType { + ALL = 'all', + WEEK = 'week', + ONE_MONTH = 'one_month', + THREE_MONTHS = 'three_month', +} + +export enum SelectType { + EVENT_NAME = 'name', + RESOURCE_TYPE = 'resource_type', + USER_NAME = 'username', + RESOURCE_NAME = 'resource_name', +} +export enum CrossDomainSelectType { + EVENT_NAME = 'name', + RESOURCE_TYPE = 'resource_type', + COORDINATOR_PURE_DOMAIN_NAME = 'coordinator_pure_domain_name', + RESOURCE_NAME = 'resource_name', + OP_TYPE = 'op_type', +} diff --git a/web_console_v2/client/src/typings/auth.ts b/web_console_v2/client/src/typings/auth.ts index 1ef9b09c0..624abceaf 100644 --- a/web_console_v2/client/src/typings/auth.ts +++ b/web_console_v2/client/src/typings/auth.ts @@ -1,17 +1,48 @@ export interface FedUserInfo { - id: string; + id: number; username: string; name?: string; email?: string; role: FedRoles; + sso_name?: string; } export interface FedLoginFormData { - username: string; - password: string; + username?: string; + password?: string; + /** SSO code */ + code?: string; +} +export interface FedLoginQueryParamsData { + /** If sso_name is provided, Back-end will login with sso */ + sso_name?: string; } export enum FedRoles { Admin = 'ADMIN', User = 'USER', } + +export interface Cas { + authorize_url: string; + ticket_url: string; + code_key: string; +} +export interface OAuth { + client_id: string; + authorize_url: string; + access_token_url: string; + user_info_url: string; + logout_url: string; + code_key: string; +} + +export interface FedLoginWay { + name: string; + icon_url: string; + protocol_type: string; + display_name: string; + cas?: Cas; + oauth?: OAuth; + [customProtocol: string]: any; +} diff --git a/web_console_v2/client/src/typings/cleanup.ts b/web_console_v2/client/src/typings/cleanup.ts new file mode 100644 index 000000000..f6ec5f901 --- /dev/null +++ b/web_console_v2/client/src/typings/cleanup.ts @@ -0,0 +1,26 @@ +export enum CleanupState { + WAITING = 'WAITING', + RUNNING = 'RUNNING', + SUCCEEDED = 'SUCCEEDED', + FAILED = 'FAILED', + CANCELED = 'CANCELED', +} +export interface Cleanup { + id: ID; + payload: { + paths: string[]; + }; + resource_id: ID; + resource_type: string; + state: CleanupState; + target_start_at: DateTime; + updated_at: DateTime; + completed_at: DateTime; + created_at: DateTime; +} + +export interface CleanupQueryParams { + filter?: string; + page?: number; + pageSize?: number; +} diff --git a/web_console_v2/client/src/typings/component.ts b/web_console_v2/client/src/typings/component.ts index 874e44946..417b68ad3 100644 --- a/web_console_v2/client/src/typings/component.ts +++ b/web_console_v2/client/src/typings/component.ts @@ -14,3 +14,14 @@ export enum DisplayType { Card = 1, Table = 2, } + +export type InternalNamePath = string[]; + +export type ValidateErrorEntity<Values = any> = { + values: Values; + errorFields: { + name: InternalNamePath; + // errors: string[]; + }[]; + outOfDate: boolean; +}; diff --git a/web_console_v2/client/src/typings/composer.ts b/web_console_v2/client/src/typings/composer.ts new file mode 100644 index 000000000..710da6431 --- /dev/null +++ b/web_console_v2/client/src/typings/composer.ts @@ -0,0 +1,44 @@ +export enum ItemStatus { + ON = 'ON', + OFF = 'OFF', +} + +export enum RunnerStatus { + INIT = 'INIT', + RUNNING = 'RUNNING', + DONE = 'DONE', + FAILED = 'FAILED', +} + +export interface SchedulerItem { + id: ID; + name: string; + status: ItemStatus; + cron_config?: string; + last_run_at: DateTime; + retry_cnt: number; + created_at: DateTime; + updated_at: DateTime; + deleted_at?: DateTime; + pipeline: Object; +} + +export interface SchedulerRunner { + id: ID; + item_id: ID; + status: RunnerStatus; + start_at: DateTime; + end_at: DateTime; + created_at: DateTime; + updated_at: DateTime; + deleted_at?: DateTime; + pipeline: Object; + context: Object; + output: Object; +} + +export interface SchedulerQueryParams { + filter?: string; + page?: number; + pageSize?: number; +} diff --git a/web_console_v2/client/src/typings/dataset.ts b/web_console_v2/client/src/typings/dataset.ts index 7c505b82a..145894f6a 100644 --- a/web_console_v2/client/src/typings/dataset.ts +++ b/web_console_v2/client/src/typings/dataset.ts @@ -1,8 +1,38 @@ -export enum DatasetType { +import { Variable } from './variable'; +import { WorkflowState } from './workflow'; + +export enum DatasetType__archived { PSI = 'PSI', STREAMING = 'STREAMING', } +export enum DatasetDataType { + STRUCT = 'TABULAR', + PICTURE = 'IMAGE', + NONE_STRUCTURED = 'NONE_STRUCTURED', +} + +export enum DatasetDataTypeText { + STRUCT = '结构化数据', + PICTURE = '图片', + NONE_STRUCTURED = '非结构化数据', +} + +export enum DatasetStateFront { + PROCESSING = 'PROCESSING', + DELETING = 'DELETING', + SUCCEEDED = 'SUCCEEDED', + FAILED = 'FAILED', + PENDING = 'PENDING', +} +export enum DatasetJobState { + PENDING = 'PENDING', + RUNNING = 'RUNNING', + SUCCEEDED = 'SUCCEEDED', + FAILED = 'FAILED', + STOPPED = 'STOPPED', +} + export enum BatchState { NEW = 'NEW', SUCCESS = 'SUCCESS', @@ -15,6 +45,45 @@ export enum FileState { COMPLETED = 'COMPLETED', FAILED = 'FAILED', } +export enum DataJobType { + JOIN = 'join', + ALIGNMENT = 'alignment', +} +export enum DataJobBackEndType { + RSA_PSI_DATA_JOIN = 'RSA_PSI_DATA_JOIN', + LIGHT_CLIENT_RSA_PSI_DATA_JOIN = 'LIGHT_CLIENT_RSA_PSI_DATA_JOIN', + LIGHT_CLIENT_OT_PSI_DATA_JOIN = 'LIGHT_CLIENT_OT_PSI_DATA_JOIN', + OT_PSI_DATA_JOIN = 'OT_PSI_DATA_JOIN', + DATA_JOIN = 'DATA_JOIN', + DATA_ALIGNMENT = 'DATA_ALIGNMENT', + IMPORT_SOURCE = 'IMPORT_SOURCE', + EXPORT = 'EXPORT', + HASH_DATA_JOIN = 'HASH_DATA_JOIN', + ANALYZER = 'ANALYZER', +} + +export enum DatasetJobType { + JOIN = 'JOIN', + ALIGNMENT = 'ALIGNMENT', + IMPORT = 'IMPORT', + EXPORT = 'EXPORT', + ANALYZER = 'ANALYZER', +} + +export enum DataJoinType { + NORMAL = 'normal', + PSI = 'psi', + LIGHT_CLIENT = 'light_client', + LIGHT_CLIENT_OT_PSI_DATA_JOIN = 'light_client_ot_psi_data_join', + OT_PSI_DATA_JOIN = 'ot_psi_data_join', + HASH_DATA_JOIN = 'hash_data_join', +} + +export enum DataJoinToolTipText { + PSI = 'RSA-PSI是基于RSA盲签名的PSI方式,主要通过一对公钥和私钥以及RSA算法对样本进行加密以及对齐。 RSA-PSI对服务端性能要求较高,对客户端性能要求较低,且适用于数据量较小的情况。', + OT_PSI_DATA_JOIN = 'OT-PSI是基于不经意传输的PSI方式。OT-PSI具有求交效率高的优点,在数据量较大的情况下(比如>5000w),推荐使用OT方法进行求交,但同时其对内存、性能要求较高。', + HASH_DATA_JOIN = '哈希求交是通过借助公共hash函数,对hash结果进行比对来实现。当样本为保密级别较低的数据时,推荐使用该方式。', +} export type FileToImport = { path: string; @@ -22,6 +91,83 @@ export type FileToImport = { mtime: DateTime; }; +export enum DatasetKind { + RAW = 0, + PROCESSED = 1, +} + +export enum DatasetKindLabel { + RAW = 'raw', + PROCESSED = 'processed', +} + +export enum DatasetKindV2 { + RAW = 'raw', + PROCESSED = 'processed', + SOURCE = 'source', + EXPORTED = 'exported', +} + +export const DatasetKindLabelCapitalMapper = { + [DatasetKindLabel.RAW]: 'RAW', + [DatasetKindLabel.PROCESSED]: 'PROCESSED', +}; + +export enum DatasetKindBackEndType { + RAW = 'RAW', + PROCESSED = 'PROCESSED', + INTERNAL_PROCESSED = 'INTERNAL_PROCESSED', + SOURCE = 'SOURCE', + EXPORTED = 'EXPORTED', +} + +export enum DatasetTabType { + MY = 'my', + PARTICIPANT = 'participant', +} + +export interface FileTreeNode { + filename: string; + path: string; + /** File size */ + size: number; + /** Last Time Modified */ + mtime: number; + is_directory: boolean; + files: FileTreeNode[]; +} + +export enum DataSourceType { + /** local update datasource */ + LOCAL = 'local', + /** hdfs datasource path, e.g. hdfs:///home/xxx */ + HDFS = 'hdfs', + /** file datasource path, e.g. file:///data/xxx */ + FILE = 'file', +} + +export enum DataSourceDataType { + STRUCT = 'TABULAR', + NONE_STRUCTURED = 'NONE_STRUCTURED', + PICTURE = 'IMAGE', +} + +export enum DataSourceDataTypeText { + STRUCT = '结构化数据', + UNSTRUCT = '非结构化数据', + PICTURE = '图片', +} + +export enum DataSourceStructDataType { + CSV = 'CSV', + TFRECORDS = 'TFRECORDS', +} +// +export enum DatasetType { + PSI = 'PSI', + STREAMING = 'STREAMING', +} + export type DataFile = { state: FileState; source_path: string; @@ -33,8 +179,8 @@ export type DataBatch = { id: number; event_time: DateTime; dataset_id: number; + path?: string; state: BatchState; - move: boolean; // Serialized proto of DatasetBatch details: { files: DataFile[]; @@ -49,24 +195,435 @@ export type DataBatch = { files?: DataFile[]; }; +export type DataBatchV2 = { + id: ID; + name: string; + dataset_id: number; + path?: string; + file_size: number; + num_example: number; + comment?: string | null; + created_at: DateTime; + updated_at: DateTime; + state: DatasetStateFront; + latest_parent_dataset_job_stage_id: ID; + latest_analyzer_dataset_job_stage_id: ID; + has_stages: boolean; +}; + +export enum BatchAnalyzerState { + PROCESSING = 'PROCESSING', + DELETING = 'DELETING', + SUCCEEDED = 'SUCCEEDED', + FAILED = 'FAILED', +} + export type Dataset = { - id: number; + id: ID; + uuid: ID; project_id: ID; name: string; - dataset_type: DatasetType; - comment?: string | null; + dataset_type: DatasetType__archived; + data_source?: string; + comment?: string; created_at: DateTime; updated_at: DateTime; - deleted_at?: DateTime; - data_batches: DataBatch[]; + deleted_at?: DateTime | null; + num_feature?: number; + num_example: number; + path: string; + state_frontend: DatasetStateFront; + dataset_format: DatasetDataType; + dataset_kind: DatasetKindBackEndType; + validation_jsonschema?: object; + schema_errors?: { + check_state: string; + check_error: string; + files: any[]; + }; + parent_dataset_job_id?: ID; + /** File size, byte */ + file_size: number; + is_published?: boolean; + /** Credits price */ + value?: number; + need_publish?: boolean; + schema_checkers?: DATASET_SCHEMA_CHECKER[]; + store_format?: DataSourceStructDataType; + import_type?: DATASET_COPY_CHECKER; + analyzer_dataset_job_id?: ID; + publish_frontend_state: DatasetRawPublishStatus; + auth_frontend_state: DatasetProcessedAuthStatus; + local_auth_status: DatasetProcessedMyAuthStatus; + participants_info: { + participants_map: { + [key in string]: { + auth_status: DatasetProcessedMyAuthStatus; + }; + }; + }; +}; + +export type ParticipantInfo = { + name: string; + auth_status: DatasetProcessedMyAuthStatus; +}; + +export enum DatasetRawPublishStatus { + UNPUBLISHED = 'UNPUBLISHED', + TICKET_PENDING = 'TICKET_PENDING', + TICKET_DECLINED = 'TICKET_DECLINED', + PUBLISHED = 'PUBLISHED', +} + +export enum DatasetProcessedAuthStatus { + TICKET_PENDING = 'TICKET_PENDING', + TICKET_DECLINED = 'TICKET_DECLINED', + AUTH_PENDING = 'AUTH_PENDING', + AUTH_APPROVED = 'AUTH_APPROVED', +} + +export enum DatasetProcessedMyAuthStatus { + PENDING = 'PENDING', + AUTHORIZED = 'AUTHORIZED', + WITHDRAW = 'WITHDRAW', +} + +export type ParticipantDataset = { + /** File size, byte */ + file_size: number; + format: DatasetDataType; + name: string; + participant_id: ID; + project_id: ID; + updated_at: DateTime; + uuid: ID; + dataset_kind?: DatasetKindBackEndType; + value?: number; +}; + +export type IntersectionDataset = { + id: ID; + name: string; + comment?: string; + status: WorkflowState | `${WorkflowState}`; + kind: DatasetKind; + workflow_id: ID; + project_id: ID; + dataset_id: ID; + dataset_name: string; + job_name: string; + peer_name: string; + num_feature?: number; + num_example?: number; + raw_dataset_num_example?: number; + sample_filesize?: number; + feature_num?: number; + created_at: DateTime; + updated_at: DateTime; + deleted_at?: DateTime | null; path: string; + data_source: string; + /** File size, byte */ + file_size: number; }; export type DatasetCreatePayload = Pick< Dataset, - 'name' | 'dataset_type' | 'comment' | 'project_id' ->; + | 'name' + | 'dataset_type' + | 'comment' + | 'project_id' + | 'is_published' + | 'value' + | 'need_publish' + | 'schema_checkers' + | 'is_published' +> & { + time_range?: { + days?: number; + hours?: number; + }; +}; export type DataBatchImportPayload = { - files: FileToImport[]; -} & Pick<DataBatch, 'event_time' | 'move'>; + need_schema_check?: boolean; + json_schema?: string; + data_source_id?: ID; +}; + +export type DatasetEditDisplay = Pick<Dataset, 'name' | 'comment'>; +export type DatasetEditPayload = Pick<DatasetEditDisplay, 'comment'>; + +export type PreviewDataSample = (number | string)[]; + +export type PreviewDataMetric = { + count: string; + mean: string; + stddev: string; + min: string; + max: string; + missing_count: string; +}; + +export type ImageDetail = { + annotation: { caption?: string; label: string }; + created_at: string; + file_name?: string; + height: number; + name: string; + path: string; + width: number; + /** FE custom field, `/api/v2/image?name=${path}` */ + uri: string; +}; + +export type ValueType = 'bigint' | 'float' | 'string' | 'int' | 'double'; + +export type PreviewData = { + dtypes?: { + key: string; + value: ValueType; + }[]; + count?: number; + sample?: PreviewDataSample[]; + metrics?: { + [key: string]: PreviewDataMetric; + }; + images?: ImageDetail[]; +}; + +export type FeatureMetric = { + name: string; + metrics: { [key: string]: string }; + hist: { x: number[]; y: number[] }; +}; + +export enum ExportStatus { + INIT = 'INIT', + RUNNING = 'RUNNING', + DONE = 'DONE', + FAILED = 'FAILED', +} + +export type ExportInfo = { + status: ExportStatus; + start_time: DateTime; + end_time: DateTime | null; + export_path: string; + dataset_id: ID; +}; + +export type DataSource = { + id: ID; + uuid: string; + project_id: ID; + name: string; + comment?: string; + type: string; + url: string; + created_at: DateTime; + is_user_upload?: boolean; + dataset_format: `${DataSourceDataType}`; + store_format: `${DataSourceStructDataType}`; + dataset_type: `${DatasetType}`; +}; + +export type DataSourceCreatePayload = { + project_id: ID; + data_source: { + name: string; + data_source_url: string; + data_source_type?: DataSourceType; + is_user_upload?: boolean; + dataset_format?: DataSourceDataType; + store_format?: DataSourceStructDataType; + dataset_type?: DatasetType; + }; +}; +export type ProcessedDatasetCreatePayload = { + dataset_job_parameter: { + dataset_job_kind: DataJobBackEndType; + global_configs: { + [domainName: string]: { + dataset_uuid: ID; + variables: DataJobVariable[]; + }; + }; + }; + processed_dataset: { + name: string; + comment: string; + }; +}; + +export type DatasetJobCreatePayload = { + dataset_job_parameter: { + dataset_job_kind: DataJobBackEndType; + global_configs: { + [domainName: string]: { + dataset_uuid: ID; + variables: DataJobVariable[]; + }; + }; + }; + output_dataset_id: ID; + time_range?: { + days?: number; + hours?: number; + }; +}; + +export type DataSourceEditPayload = DataSourceCreatePayload; + +export type DataSourceCheckConnectionPayload = { + data_source_url: string; + file_num?: number; + dataset_type?: DatasetType; +}; + +export type DataJobVariable = Omit<Variable, 'widget_schema'> & { + widget_schema: string; +}; + +export type GlobalConfigs = { + [domainName: string]: { + dataset_uuid: string; + variables: DataJobVariable[]; + }; +}; + +export type DatasetJob = { + global_configs: { + global_configs: GlobalConfigs; + }; + kind: DataJobBackEndType; + project_id: ID; + result_dataset_name: string; + result_dataset_uuid: string; + uuid: string; + workflow_id: string; + input_data_batch_num_example: number; + output_data_batch_num_example: number; + state: DatasetJobState; + coordinator_id: ID; + created_at: DateTime; + creator_username: string; + finished_at: DateTime; + updated_at: DateTime; + started_at: DateTime; + name: string; + has_stages: boolean; + scheduler_state: DatasetJobSchedulerState; + time_range?: { + days?: number; + hours?: number; + }; + scheduler_message?: string; +}; + +export enum DatasetJobSchedulerState { + PENDING = 'PENDING', + RUNNABLE = 'RUNNABLE', + STOPPED = 'STOPPED', +} + +export type DatasetJobStage = { + id: ID; + name: string; + dataset_job_id: ID; + data_batch_id: ID; + state: DatasetJobState; + created_at: DateTime; + started_at: DateTime; + finished_at: DateTime; + kind: DataJobBackEndType; + output_data_batch_id: ID; + input_data_batch_num_example: number; + output_data_batch_num_example: number; + workflow_id: ID; + global_configs: { + global_configs: GlobalConfigs; + }; +}; + +export type DatasetJobListItem = { + name: string; + uuid: string; + project_id: ID; + result_dataset_id: ID; + result_dataset_name: string; + state: DatasetJobState; + kind: DataJobBackEndType; + created_at: DateTime; + id?: ID; + coordinator_id?: ID; + has_stages: boolean; +}; + +export type DatasetJobStop = { + code: number; + data?: { + id?: ID; + input_data_batch_num_example?: string; + output_data_batch_num_example?: string; + workflow_id?: ID; + coordinator_id?: ID; + created_at?: DateTime; + updated_at?: DateTime; + kind?: string; + state?: string; + finished_at?: DateTime; + global_configs?: any; + message?: string; + }; +}; + +export enum DatasetTransactionStatus { + SUCCEEDED = 'SUCCEEDED', + FAILED = 'FAILED', + PROCESSING = 'PROCESSING', +} + +export type TransactionExtraData = { + transaction_info: string; + dataset_uuid: ID; +}; + +export type DatasetTransactionItem = { + trans_hash: string; + block_number: number; + trans_index: number; + sender_name: string; + receiver_name: string; + value: number; + extra_data: TransactionExtraData; + timestamp: number; + status: DatasetTransactionStatus; +}; + +export type DatasetLedger = { + total_value: number; + transactions: DatasetTransactionItem[]; +}; + +export type ExportDataset = { + dataset_job_id: ID; + export_dataset_id: ID; +}; + +export enum DatasetForceState { + RUNNING = 'RUNNING', + SUCCEEDED = 'SUCCEEDED', + FAILED = 'FAILED', +} + +export enum DATASET_SCHEMA_CHECKER { + RAW_ID_CHECKER = 'RAW_ID_CHECKER', + NUMERIC_COLUMNS_CHECKER = 'NUMERIC_COLUMNS_CHECKER', +} + +export enum DATASET_COPY_CHECKER { + COPY = 'COPY', + NONE_COPY = 'NO_COPY', +} diff --git a/web_console_v2/client/src/typings/filter.ts b/web_console_v2/client/src/typings/filter.ts new file mode 100644 index 000000000..cd3a0b752 --- /dev/null +++ b/web_console_v2/client/src/typings/filter.ts @@ -0,0 +1,28 @@ +export enum FilterOp { + UNKNOWN = 'UNKNOWN', // 0 + EQUAL = 'EQUAL', // = + IN = 'IN', // : + CONTAIN = 'CONTAIN', // ~= + GREATER_THAN = 'GREATER_THAN', // > + LESS_THAN = 'LESS_THAN', // < +} + +export enum FilterExpressionKind { + BASIC = 'basic', + AND = 'and', + OR = 'or', +} + +export interface SimpleExpression { + field: string; + op: FilterOp; + bool_value?: boolean; + string_value?: string; + number_value?: number; +} + +export interface FilterExpression { + kind: FilterExpressionKind; + simple_exp?: SimpleExpression; + exps?: FilterExpression[]; +} diff --git a/web_console_v2/client/src/typings/flag.ts b/web_console_v2/client/src/typings/flag.ts new file mode 100644 index 000000000..fc942f2f3 --- /dev/null +++ b/web_console_v2/client/src/typings/flag.ts @@ -0,0 +1,28 @@ +export enum FlagKey { + USER_MANAGEMENT_ENABLED = 'user_management_enabled', + LOGO_URL = 'logo_url', + PRESET_TEMPLATE_EDIT_ENABLED = 'preset_template_edit_enabled', + BCS_SUPPORT_ENABLED = 'bcs_support_enabled', + TRUSTED_COMPUTING_ENABLED = 'trusted_computing_enabled', + DASHBOARD_ENABLED = 'dashboard_enabled', + DATASET_STATE_FIX_ENABLED = 'dataset_state_fix_enabled', + HASH_DATA_JOIN_ENABLED = 'hash_data_join_enabled', + HELP_DOC_URL = 'help_doc_url', + REVIEW_CENTER_CONFIGURATION = 'review_center_configuration', + MODEL_JOB_GLOBAL_CONFIG_ENABLED = 'model_job_global_config_enabled', +} + +export interface Flag { + [FlagKey.USER_MANAGEMENT_ENABLED]: boolean; + [FlagKey.LOGO_URL]: string; + [FlagKey.PRESET_TEMPLATE_EDIT_ENABLED]: boolean; + [FlagKey.BCS_SUPPORT_ENABLED]: boolean; + [FlagKey.TRUSTED_COMPUTING_ENABLED]: boolean; + [FlagKey.DASHBOARD_ENABLED]: boolean; + [FlagKey.DATASET_STATE_FIX_ENABLED]: boolean; + [FlagKey.HASH_DATA_JOIN_ENABLED]: boolean; + [FlagKey.MODEL_JOB_GLOBAL_CONFIG_ENABLED]: boolean; + [FlagKey.HELP_DOC_URL]: string; + [FlagKey.REVIEW_CENTER_CONFIGURATION]: string; + [key: string]: boolean | string; +} diff --git a/web_console_v2/client/src/typings/formily.ts b/web_console_v2/client/src/typings/formily.ts index 26ff4a72c..f9dc72d99 100644 --- a/web_console_v2/client/src/typings/formily.ts +++ b/web_console_v2/client/src/typings/formily.ts @@ -1,7 +1,6 @@ export type SchemaMessage = React.ReactNode; export type FormilyProperties = { [key: string]: FormilySchema }; - export interface FormilySchema { title?: SchemaMessage; description?: SchemaMessage; diff --git a/web_console_v2/client/src/typings/global.d.ts b/web_console_v2/client/src/typings/global.d.ts index 6a4e71505..c6b770977 100644 --- a/web_console_v2/client/src/typings/global.d.ts +++ b/web_console_v2/client/src/typings/global.d.ts @@ -1,6 +1,9 @@ /* eslint-disable @typescript-eslint/no-unused-vars */ declare type ID = number | string; +declare type TimeoutID = ReturnType<typeof setTimeout> | undefined; + +declare type Position = 'left' | 'right' | 'top' | 'bottom'; declare type DateTime = number; declare namespace JSX { @@ -8,6 +11,7 @@ declare namespace JSX { key?: string | number; style?: React.CSSProperties; role?: string; + id?: any; } } @@ -24,6 +28,9 @@ declare namespace NodeJS { } } +declare module 'mpld3'; +declare module 'mpld3/d3.v5.min.js'; + declare module 'ip-port-regex' { declare const ip: { (options?: { exact?: boolean }): RegExp; @@ -93,6 +100,11 @@ declare module '*.module.sass' { export default classes; } +declare module '*.module.less' { + const classes: { readonly [key: string]: string }; + export default classes; +} + /** * The **ResizeObserver** interface reports changes to the dimensions of an * [Element](https://developer.mozilla.org/en-US/docs/Web/API/Element)'s content diff --git a/web_console_v2/client/src/typings/job.ts b/web_console_v2/client/src/typings/job.ts index 415ab4dd6..e712763c1 100644 --- a/web_console_v2/client/src/typings/job.ts +++ b/web_console_v2/client/src/typings/job.ts @@ -4,7 +4,7 @@ export enum JobState { INVALID = 'INVALID', NEW = 'NEW', WAITING = 'WAITING', - /** @deprecated RUNNING changes to STARTED*/ + /** @deprecated RUNNING changes to STARTED */ RUNNING = 'RUNNING', STARTED = 'STARTED', COMPLETED = 'COMPLETED', @@ -18,9 +18,11 @@ export enum JobType { DATA_JOIN = 'DATA_JOIN', PSI_DATA_JOIN = 'PSI_DATA_JOIN', NN_MODEL_TRANINING = 'NN_MODEL_TRANINING', - TREE_MODEL_TRAINING = 'TREE_MODEL_TRAINING', NN_MODEL_EVALUATION = 'NN_MODEL_EVALUATION', + TREE_MODEL_TRAINING = 'TREE_MODEL_TRAINING', TREE_MODEL_EVALUATION = 'TREE_MODEL_EVALUATION', + TRANSFORMER = 'TRANSFORMER', // debug + ANALYZER = 'ANALYZER', } export interface JobDependency { @@ -34,11 +36,11 @@ export interface Job { is_federated: boolean; variables: Variable[]; dependencies: JobDependency[]; + /** If not under easy_mode, you MUST pass yaml_template manually */ + easy_mode?: boolean; yaml_template?: string; } -export type JobDefinitionForm = Omit<Job, 'dependencies'>; - export enum PodState { RUNNING = 'RUNNING', SUCCEEDED = 'SUCCEEDED', @@ -64,6 +66,7 @@ export interface Pod { name: string; pod_ip: string; state: PodState; + creation_timestamp: DateTime; /** @deprecated */ status?: PodState; pod_type: string; @@ -77,6 +80,11 @@ export enum CreateJobFlag { DISABLED = 3, } +export interface ErrorMessage { + app: string; + pods: any; +} + export interface JobExecutionDetalis { id: number; name: string; @@ -88,7 +96,8 @@ export interface JobExecutionDetalis { created_at: number; updated_at: number; deleted_at: number; - error_message?: string; + error_message?: ErrorMessage; completed_at?: number; yaml_template?: string; + start_at?: number; } diff --git a/web_console_v2/client/src/typings/kibana.ts b/web_console_v2/client/src/typings/kibana.ts index f71ddf34f..dbe56fd73 100644 --- a/web_console_v2/client/src/typings/kibana.ts +++ b/web_console_v2/client/src/typings/kibana.ts @@ -32,6 +32,19 @@ export enum KibanaQueryFields { split = 'split', } -export type KibanaQueryParams = Partial<Record<KibanaQueryFields, any>>; +export type KibanaQueryParams = { + type?: KibanaChartType; + interval?: string; + x_axis_field?: string; + query?: string; + start_time?: number; + end_time?: number; + numerator?: string; + denominator?: string; + aggregator?: KibanaAggregator; + value_field?: string; + timer_names?: string; + split?: boolean; +}; export type KiabanaMetrics = [DateTime, number][]; diff --git a/web_console_v2/client/src/typings/modelCenter.ts b/web_console_v2/client/src/typings/modelCenter.ts new file mode 100644 index 000000000..46badea89 --- /dev/null +++ b/web_console_v2/client/src/typings/modelCenter.ts @@ -0,0 +1,482 @@ +import { DateTimeInfo } from 'typings/app'; +import { Workflow, WorkflowConfig, WorkflowState } from 'typings/workflow'; +import { JobExecutionDetalis } from 'typings/job'; +import { EnumAlgorithmProjectType } from './algorithm'; +import { Variable, VariableWidgetSchema } from './variable'; + +export enum ModelManagementTabType { + SET = 'model-set', + FAVOURITE = 'model-favourit', +} +export enum ModelEvaluationTabType { + EVALUATION = 'evaluation', + COMPARE = 'compare', +} + +export enum AlgorithmManagementTabType { + MY = 'my', + BUILT_IN = 'built-in', + PARTICIPANT = 'participant', +} + +export enum AlgorithmDetailTabType { + PREVIEW = 'preview', + CHANGE_LOG = 'change-log', +} + +export enum ResourceTemplateType { + HIGH = 'high', + MEDIUM = 'medium', + LOW = 'low', + CUSTOM = 'custom', +} + +export enum FederationType { + CROSS_SAMPLE = 'cross_sample', + CROSS_FEATURE = 'cross_feature', +} +export enum UploadType { + PATH = 'path', + LOCAL = 'local', +} + +export enum LossType { + LOGISTIC = 'logistic', + MSE = 'mse', +} + +export enum AlgorithmType { + TREE = 'tree', + NN = 'nn', +} + +export enum FederalType { + VERTICAL = 'vertical', + HORIZONTAL = 'horizontal', +} + +export enum Role { + LEADER = 'leader', + FOLLOWER = 'follower', +} +export enum RoleUppercase { + LEADER = 'Leader', + FOLLOWER = 'Follower', +} +export enum TrainRoleType { + LABEL = RoleUppercase.LEADER, + FEATURE = RoleUppercase.FOLLOWER, +} + +export { WorkflowState as ModelJobState }; + +export enum ModelJobRole { + COORDINATOR = 'COORDINATOR', + PARTICIPANT = 'PARTICIPANT', +} + +export type ModelJobType = + | 'UNSPECIFIED' + | 'NN_TRAINING' + | 'TREE_TRAINING' + | 'NN_EVALUATION' + | 'TREE_EVALUATION' + | 'NN_PREDICTION' + | 'TREE_PREDICTION' + | 'TRAINING' + | 'EVALUATION' + | 'PREDICTION'; + +export type ModelSet = { + id: number; + name: string; + comment: string | null; + created_at: DateTime; + updated_at: DateTime; + deleted_at: DateTime; + extra: any; + + algorithm?: string; + version?: string; + creator?: string; + project_id?: string; + isCompareReport?: boolean; + selectedModelIdList?: string[]; +}; + +export type ModelSetCreatePayload = { + name: string; + project_id: ID; + comment?: string; + extra?: any; +}; +export type ModelSetUpdatePayload = { + comment?: string; + extra?: any; +}; + +export type ModelJobGroup = { + id: ID; + uuid: ID; + project_id: ID; + name: string; + comment: string | null; + role: ModelJobRole | `${ModelJobRole}`; + authorized: boolean; + intersection_dataset_id: ID; + dataset_id: ID; + algorithm_type: EnumAlgorithmProjectType | `${EnumAlgorithmProjectType}`; + algorithm_project_id: ID; + algorithm_id: ID; + configured: boolean; + config?: WorkflowConfig; + latest_job_state: ModelJobStatus; + latest_version: number; + model_jobs?: ModelJob[]; + creator_username: string; + coordinator_id?: ID; + auth_frontend_status?: ModelGroupStatus; + participants_info?: { participants_map: Record<string, any> }; + auto_update_status?: AutoModelJobStatus; + + // TODO: explicit type + extra: unknown; + + /** TODO: custom field, will be deleted soon */ + creator: string; + cron_config: string; + algorithm_project_uuid_list: + | { algorithm_projects: { [pure_domain_name: string]: ID } } + | undefined; +} & DateTimeInfo; +export type ModelJobGroupCreatePayload = { + name: string; + algorithm_type: EnumAlgorithmProjectType | `${EnumAlgorithmProjectType}`; + dataset_id?: ID; +}; +export type ModelJobGlobalConfig = { + dataset_uuid?: ID; + global_config: { + [pure_domain_name: string]: { + algorithm_uuid?: ID; + //TODO:support param algorithm_project_uuid + algorithm_project_uuid?: ID; + algorithm_parameter?: { ['variables']: AlgorithmParams[] | undefined }; + variables: Variable[]; + }; + }; +}; +export type ModelJobGroupUpdatePayload = { + authorized?: boolean; + dataset_id?: ID; + algorithm_id?: ID; + config?: WorkflowConfig; + comment?: string; + cron_config?: string; + global_config?: ModelJobGlobalConfig; +}; +export type PeerModelJobGroupUpdatePayload = { + config?: WorkflowConfig; + global_config?: ModelJobGlobalConfig; +}; + +export type ModelJobMetrics = { + train: { + [key: string]: + | [number[], number[]] + | { + steps: number[]; + values: number[]; + }; + }; + eval: { + [key: string]: + | [number[], number[]] + | { + steps: number[]; + values: number[]; + }; + }; + feature_importance: { [featureName: string]: number }; + confusion_matrix?: { + /** True Postive, position: top left */ + tp: number; + /** False Postive, position: top right */ + fp: number; + /** False Negative, position: bottom left */ + fn: number; + /** True Negative, position: bottom right */ + tn: number; + }; +}; + +export type Item = { + label: string; + value: number | null; +}; +export type FormattedModelJobMetrics = { + train: Item[]; + eval: Item[]; + feature_importance: Item[]; + confusion_matrix?: Array<Item & { percentValue: string }>; + trainMaxValue: number; + evalMaxValue: number; + featureImportanceMaxValue: number; +}; + +export type ModelJobQueryParams = { + projectId?: ID; + types?: Array<ModelJobType>; +}; + +export interface ModelJobQueryParams_new { + keyword?: string; + project_id?: string; + group_id?: string; + algorithm_types?: AlgorithmType[]; + states?: WorkflowState[]; + page?: number; + page_size?: number; + types?: 'EVALUATION' | 'PREDICTION'; + role?: string; + filter?: string; +} + +export type ModelJob = { + id: number; + uuid: ID; + name: string; + comment: string; + model_job_type: ModelJobType; + model_name?: string; + state: WorkflowState; + status: ModelJobStatus; + job_name: string; + job_id: number; + workflow_id: number; + workflow_uuid: ID; + /** model set id */ + group_id: number; + project_id: number; + code_id: number; + dataset_id: number; + dataset_name?: string; + intersection_dataset_id: number; + intersection_dataset_name?: string; + model_id: number; + params: any; + algorithm_type: EnumAlgorithmProjectType; + algorithm_id: ID; + role: 'COORDINATOR' | 'PARTICIPANT'; + config: WorkflowConfig; + parent?: Model; + metrics: ModelJobMetrics; + version: number; + + extra: any; + local_extra: any; + detail_level: any[]; + + created_at: DateTime; + updated_at: DateTime; + deleted_at: DateTime; + started_at?: DateTime; + stopped_at?: DateTime; + + job: JobExecutionDetalis; + workflow: Workflow; + + /** no source field */ + 'model.name'?: string; + 'model.desc'?: string; + 'model.dataset_id'?: any; + 'model.group_id'?: any; + 'model.parent_job_name'?: string; + + formattedMetrics?: FormattedModelJobMetrics; + + output_models: Model[]; + creator_username: string; + metric_is_public: boolean; + global_config?: ModelJobGlobalConfig; + auto_update?: boolean; + data_batch_id?: ID; + auth_status?: string; +}; + +export type ModelUpdatePayload = { + comment?: string; +}; + +export type ModelType = 'UNSPECIFIED' | 'TREE_MODEL' | 'NN_MODEL'; + +export type Model = { + id: number; + uuid: string; + name: string; + comment: string; + model_type: ModelType; + model_path: string; + // TODO: federated_type + federated_type: string; + version: number | null; + favorite: boolean; + /** Model set id */ + group_id: ID | null; + project_id: ID | null; + model_job_id: ID | null; + model_job_name: string | null; + job_id: ID | null; + job_name: string | null; + workflow_id: ID | null; + workflow_name: string | null; +} & DateTimeInfo; + +export type Algorithm = { + id: number; + name: string; + comment: string; + state: string; + type: string; + file: string; + file_owner: string; + file_no_owner: string; + project_id?: ID; + + creator: string; + created_at: DateTime; + updated_at: DateTime; + deleted_at: DateTime; +}; + +export type AlgorithmChangeLog = { + id: number; + comment: string; + creator: string; + created_at: DateTime; + updated_at: DateTime; + deleted_at: DateTime; +}; + +export type FakeAlgorithm = { + id: number; + name: string; + value: string; + comment: string; + type: ModelType; + + created_at: DateTime; + updated_at: DateTime; + deleted_at: DateTime; +}; + +export type AlgorithmParams = { + id?: string; + name: string; + display_name: string; + required: boolean; + comment: string; + value: string; +}; + +export interface ModelJobCreateFormData { + name: string; + model_job_type: ModelJobType; + algorithm_type: AlgorithmType; + algorithm_id: string; + eval_model_job_id?: string; + dataset_id: string; + config: Record<string, any>; + comment?: string; + model_id?: string; +} + +export type ModelJobPatchFormData = Pick< + ModelJobCreateFormData, + 'algorithm_id' | 'dataset_id' | 'config' | 'comment' +>; + +export interface ModelJobGroupCreateForm { + name: string; + comment: string | undefined; + dataset_id: ID; + algorithm_type: EnumAlgorithmProjectType; + algorithm_project_list?: { algorithmProjects: { [pure_domain_name: string]: ID } }; + loss_type?: LossType; +} +export interface ModelJobTrainCreateForm { + name: string; + model_job_type: ModelJobType; + algorithm_type?: EnumAlgorithmProjectType; + dataset_id?: string; + comment?: string; + global_config?: ModelJobGlobalConfig; + group_id?: ID; + data_batch_id?: ID; +} + +export interface ModelJobDefinitionQueryParams { + model_job_type: string; + algorithm_type: string; +} + +export interface ModelJobDefinitionResult { + is_federated: boolean; + variables: Variable[]; +} + +export type ModelJobVariable = Omit<Variable, 'widget_schema'> & { + widget_schema: string | VariableWidgetSchema; +}; +// TODO: 等后端定义 +export enum ModelGroupStatus { + TICKET_PENDING = 'TICKET_PENDING', + TICKET_DECLINE = 'TICKET_DECLINE', + CREATE_PENDING = 'CREATE_PENDING', + CREATE_FAILED = 'CREATE_FAILED', + SELF_AUTH_PENDING = 'SELF_AUTH_PENDING', + PART_AUTH_PENDING = 'PART_AUTH_PENDING', + ALL_AUTHORIZED = 'ALL_AUTHORIZED', +} + +export enum ModelJobAuthStatus { + TICKET_PENDING = 'TICKET_PENDING', + TICKET_DECLINE = 'TICKET_DECLINE', + CREATE_PENDING = 'CREATE_PENDING', + CREATE_FAILED = 'CREATE_FAILED', + SELF_AUTH_PENDING = 'SELF_AUTH_PENDING', + PART_AUTH_PENDING = 'PART_AUTH_PENDING', + ALL_AUTHORIZED = 'ALL_AUTHORIZED', +} + +export enum EnumModelJobType { + TRAINING = 'TRAINING', + EVALUATION = 'EVALUATION', + PREDICTION = 'PREDICTION', +} + +export type TRouteParams = { + id: string; + step: keyof typeof CreateSteps; + type: string; +}; +export enum CreateSteps { + coordinator = 1, + participant = 2, +} + +export enum AutoModelJobStatus { + INITIAL = 'INITIAL', + ACTIVE = 'ACTIVE', + STOPPED = 'STOPPED', +} +export enum ModelJobStatus { + PENDING = 'PENDING', + CONFIGURED = 'CONFIGURED', + ERROR = 'ERROR', + RUNNING = 'RUNNING', + STOPPED = 'STOPPED', + SUCCEEDED = 'SUCCEEDED', + FAILED = 'FAILED', + UNKNOWN = 'UNKNOWN', +} diff --git a/web_console_v2/client/src/typings/modelServing.ts b/web_console_v2/client/src/typings/modelServing.ts new file mode 100644 index 000000000..bbb8aa84a --- /dev/null +++ b/web_console_v2/client/src/typings/modelServing.ts @@ -0,0 +1,80 @@ +import { PageQueryParams, DateTimeInfo } from 'typings/app'; +import { ModelType } from 'typings/modelCenter'; + +export enum ModelDirectionType { + VERTICAL = 'vertical', + HORIZONTAL = 'horizontal', +} +export enum ModelServingState { + UNKNOWN = 'UNKNOWN', + LOADING = 'LOADING', + AVAILABLE = 'AVAILABLE', + UNLOADING = 'UNLOADING', + DELETED = 'DELETED', + PENDING_ACCEPT = 'PENDING_ACCEPT', + WAITING_CONFIG = 'WAITING_CONFIG', +} +export enum ModelServingInstanceState { + AVAILABLE = 'AVAILABLE', + UNAVAILABLE = 'UNAVAILABLE', +} + +export enum ModelServingDetailTabType { + USER_GUIDE = 'user-guide', + INSTANCE_LIST = 'instance-list', +} + +export interface ModelServingQueryParams extends PageQueryParams { + name?: string; + project_id?: ID; + keyword?: string; + filter?: string; + order_by?: string; +} + +export interface ModelServingInstance extends DateTimeInfo { + name: string; + status: ModelServingInstanceState; + cpu: string; + memory: string; +} + +export interface ModelServingResource { + cpu: string; + memory: string; + replicas: number; +} + +export interface ModelServing extends DateTimeInfo { + id: number; + project_id: number; + name: string; + comment: string; + instances: ModelServingInstance[]; + deployment_id: number; + resource: ModelServingResource; + model_id: number; + is_local: boolean; // 横向:true,纵向:false + support_inference: boolean; + model_type: ModelType; + + status: ModelServingState; + /** URL */ + endpoint: string; + /** API Input and Output */ + signature: string; + extra: any; + instance_num_status: string; + model_group_id?: number; + psm?: string; + remote_platform?: RemotePlatform; +} + +export interface UserTypeInfo { + type: string; +} + +export interface RemotePlatform { + payload: string; + platform: string; +} diff --git a/web_console_v2/client/src/typings/operation.ts b/web_console_v2/client/src/typings/operation.ts new file mode 100644 index 000000000..be3f3b321 --- /dev/null +++ b/web_console_v2/client/src/typings/operation.ts @@ -0,0 +1,30 @@ +export enum Role { + COORDINATOR = 'coordinator', + PARTICIPANT = 'participant', +} + +export type JobGroupFetchPayload = { + role: string; + name_prefix: string; + project_name: string; + e2e_image_url: string; + fedlearner_image_uri: string; + platform_endpoint?: string; +}; + +export type JobItem = { + job_name: string; + job_type: string; +}; + +export type JobInfo = { + job_name: string; + log: Array<string>; + status: object; +}; + +export type Dashboard = { + name: string; + url: string; + uuid: string; +}; diff --git a/web_console_v2/client/src/typings/participant.ts b/web_console_v2/client/src/typings/participant.ts new file mode 100644 index 000000000..66dce962e --- /dev/null +++ b/web_console_v2/client/src/typings/participant.ts @@ -0,0 +1,71 @@ +export enum ParticipantType { + PLATFORM = 'PLATFORM', + LIGHT_CLIENT = 'LIGHT_CLIENT', +} + +export interface UpdateParticipantPayload { + name?: string; + domain_name?: string; + grpc_ssl_server_host: string; + host?: number; + port?: string; + certificates?: string; + comment?: string; + type: ParticipantType | `${ParticipantType}`; +} + +export interface CreateParticipantPayload { + name: string; + domain_name: string; + is_manual_configured: boolean; + grpc_ssl_server_host?: string; + host?: string; + port?: number; + certificates?: string; + comment?: string; + type: ParticipantType | `${ParticipantType}`; +} + +export interface Participant { + id: number; + name: string; + domain_name: string; + pure_domain_name: string; + extra: { + is_manual_configured: boolean; + grpc_ssl_server_host?: string; + }; + host?: string; + port?: number; + certificates?: string; + comment?: string | null; + created_at?: number; + updated_at?: number; + num_project?: number; + type: ParticipantType | `${ParticipantType}`; + last_connected_at?: number; + support_blockchain?: boolean; +} + +export enum ConnectionStatusType { + Success = 'success', + Fail = 'error', + Processing = 'processing', +} + +export interface Version { + pub_date?: string; + revision?: string; + branch_name?: string; + version?: string; +} + +export interface ConnectionStatus { + success: ConnectionStatusType; + message: string; + application_version: Version; +} + +export interface DomainName { + domain_name: string; +} diff --git a/web_console_v2/client/src/typings/project.ts b/web_console_v2/client/src/typings/project.ts index 877ae5704..f3177abf1 100644 --- a/web_console_v2/client/src/typings/project.ts +++ b/web_console_v2/client/src/typings/project.ts @@ -1,4 +1,5 @@ import { PaginationConfig } from './component'; +import { Participant as NewParticipant, ParticipantType } from 'typings/participant'; export enum ConnectionStatus { Success, @@ -20,6 +21,7 @@ export interface ProjectVariable { export interface Participant { name: string; domain_name: string; + pure_domain_name?: string; url: string; certificates?: string | null; } @@ -34,19 +36,63 @@ export interface UpdateProjectPayload { export interface CreateProjectPayload { name: string; config: { - token: string; participants: Participant[]; variables: ProjectVariable[]; + abilities?: ProjectTaskType[]; + action_rules?: Record<ProjectActionType, ProjectAbilityType>; + support_blockchain?: boolean; }; comment: string; } + +export interface CreatePendingProjectPayload { + name: string; + config: { + participants?: Participant[]; + variables?: ProjectVariable[]; + abilities?: ProjectTaskType[]; + action_rules?: Record<ProjectActionType, ProjectAbilityType>; + support_blockchain?: boolean; + }; + participant_id: ID[]; +} + +export interface FetchPendingProjectsPayload { + filter?: string; + page?: number; + page_size?: number; +} + +export interface AbilityAuth {} export interface Project extends CreateProjectPayload { - id: number; + id: ID; + uuid?: ID; token: string; created_at: number; updated_at: number; deleted_at: null; num_workflow: number; + participants: NewParticipant[]; + variables: ProjectVariable[]; + creator: string; + creator_username?: string; + role: RoleType; + participant_type?: ParticipantType; + project_task_type: ProjectTaskType; + participants_info: { ['participants_map']: Record<string, ParticiPantMap> }; + state: ProjectStateType; + ticket_status: ProjectTicketStatus; +} +//export interface Complete +export interface ParticiPantMap { + name: string; + domain_name: string; + role: RoleType; +} + +export enum RoleType { + COORDINATOR = 'COORDINATOR', + PARTICIPANT = 'PARTICIPANT', } export interface ProjectList { @@ -55,13 +101,22 @@ export interface ProjectList { } export interface ProjectFormInitialValues { - certificateConfigType: number; + certificateConfigType?: number; name: string; - participantName: string; - participantUrl: string; - participantDomainName: string; + participantName?: string; + participantUrl?: string; + participantDomainName?: string; comment: string; variables?: ProjectVariable[]; + participants?: NewParticipant[]; + config?: ProjectConfig; +} +export interface ProjectConfig { + participants?: Participant[]; + variables?: ProjectVariable[]; + abilities?: ProjectTaskType[]; + action_rules?: Record<ProjectActionType, ProjectAbilityType>; + support_blockchain?: boolean; } export function getConnectionStatusClassName(status: ConnectionStatus) { @@ -83,16 +138,83 @@ export function getConnectionStatusClassName(status: ConnectionStatus) { export function getConnectionStatusTag(status: ConnectionStatus): string { switch (status) { case ConnectionStatus.Success: - return 'project.connection_status_success'; + return '成功'; case ConnectionStatus.Waiting: - return 'project.connection_status_waiting'; + return '待检查'; case ConnectionStatus.Checking: - return 'project.connection_status_checking'; + return '检查中'; case ConnectionStatus.Failed: - return 'project.connection_status_failed'; + return '失败'; case ConnectionStatus.CheckFailed: - return 'project.connection_status_check_failed'; + return '请重新检查'; default: - return 'project.connection_status_waiting' as never; + return '待检查' as never; } } + +export interface NewCreateProjectPayload { + name: string; + config: { + variables: ProjectVariable[]; + abilities?: ProjectTaskType[]; + action_rules?: Record<string, string>; + support_blockchain?: boolean; + }; + participant_ids: ID[]; + comment: string; +} + +export enum ProjectListType { + COMPLETE = 'complete', + PENDING = 'pending', +} + +export enum ProjectTaskType { + ALIGN = 'ID_ALIGNMENT', + HORIZONTAL = 'HORIZONTAL_FL', + VERTICAL = 'VERTICAL_FL', + TRUSTED = 'TEE', +} + +export enum ProjectAbilityType { + ALWAYS_ALLOW = 'ALWAYS_ALLOW', + ONCE = 'ONCE', + MANUAL = 'MANUAL', + ALWAYS_REFUSE = 'ALWAYS_REFUSE', +} + +export enum ProjectActionType { + ID_ALIGNMENT = 'ID_ALIGNMENT', + DATA_ALIGNMENT = 'DATA_ALIGNMENT', + HORIZONTAL_TRAIN = 'HORIZONTAL_TRAIN', + VERTICAL_TRAIN = 'VERTICAL_TRAIN', + VERTICAL_EVAL = 'VERTICAL_EVAL', + VERTICAL_PRED = 'VERTICAL_PRED', + VERTICAL_SERVING = 'VERTICAL_SERVING', + WORKFLOW = 'WORKFLOW', + TEE_SERVICE = 'TEE_SERVICE', + TEE_RESULT_EXPORT = 'TEE_RESULT_EXPORT', +} + +export enum ProjectStateType { + PENDING = 'PENDING', + ACCEPTED = 'ACCEPTED', + FAILED = 'FAILED', + CLOSED = 'CLOSED', +} + +export enum ProjectTicketStatus { + PENDING = 'PENDING', + APPROVED = 'APPROVED', + DECLINED = 'DECLINED', + FAILED = 'FAILED', +} + +export enum ProjectBlockChainType { + OPEN = '已开启', + CLOSED = '未开启', +} + +export enum ProjectBaseAbilitiesType { + BASE = 'BASE', +} diff --git a/web_console_v2/client/src/typings/settings.ts b/web_console_v2/client/src/typings/settings.ts index 9668035e5..c519c88ff 100644 --- a/web_console_v2/client/src/typings/settings.ts +++ b/web_console_v2/client/src/typings/settings.ts @@ -1,3 +1,38 @@ export type SettingOptions = { - webconsole_image: string; + webconsole_image?: string; + variables?: SystemVariable[]; }; + +export type ValueType = + | 'STRING' + | 'INT' + | 'LIST' + | 'OBJECT' + | 'NUMBER' + | 'BOOL' + | 'BOOLEAN' + | 'CODE'; +export interface SystemVariable { + name: string; + value: any; + /** If fixed = true, can't modify name and delete myself */ + fixed: boolean; + value_type: ValueType; +} + +export interface SettingInfo { + uniq_key: string; + value: string; + created_at?: DateTime; + updated_at?: DateTime; +} + +export interface SettingVariables { + variables?: SystemVariable[]; +} + +export interface SystemInfo { + name: string; + domain_name: string; + pure_domain_name?: string; +} diff --git a/web_console_v2/client/src/typings/trustedCenter.ts b/web_console_v2/client/src/typings/trustedCenter.ts new file mode 100644 index 000000000..576c6387c --- /dev/null +++ b/web_console_v2/client/src/typings/trustedCenter.ts @@ -0,0 +1,206 @@ +export enum TrustedJobGroupDisplayStatus { + CREATE_PENDING = 'CREATE_PENDING', + CREATE_FAILED = 'CREATE_FAILED', + TICKET_PENDING = 'TICKET_PENDING', + TICKET_DECLINED = 'TICKET_DECLINED', + SELF_AUTH_PENDING = 'SELF_AUTH_PENDING', + PART_AUTH_PENDING = 'PART_AUTH_PENDING', + JOB_PENDING = 'JOB_PENDING', + JOB_RUNNING = 'JOB_RUNNING', + JOB_SUCCEEDED = 'JOB_SUCCEEDED', + JOB_FAILED = 'JOB_FAILED', + JOB_STOPPED = 'JOB_STOPPED', +} + +export enum TrustedJobGroupStatus { + PENDING = 'PENDING', + SUCCEEDED = 'SUCCEEDED', + FAILED = 'FAILED', +} + +export enum AuthStatus { + PENDING = 'PENDING', + WITHDRAW = 'WITHDRAW', + AUTHORIZED = 'AUTHORIZED', +} + +export enum TrustedJobType { + ANALYZE = 'ANALYZE', + EXPORT = 'EXPORT', +} + +export enum TrustedJobStatus { + NEW = 'NEW', + CREATED = 'CREATED', + PENDING = 'PENDING', + RUNNING = 'RUNNING', + SUCCEEDED = 'SUCCEEDED', + FAILED = 'FAILED', + STOPPED = 'STOPPED', +} + +export enum TicketStatus { + PENDING = 'PENDING', + APPROVED = 'APPROVED', + DECLINED = 'DECLINED', +} + +export enum TrustedJobRole { + COORDINATOR = 'COORDINATOR', + PARTICIPANT = 'PARTICIPANT', +} + +export enum ResourceTemplateType { + HIGH = 'high', + MEDIUM = 'medium', + LOW = 'low', + CUSTOM = 'custom', +} + +export enum NotificationType { + TRUSTED_JOB_GROUP_CREATE = 'TRUSTED_JOB_GROUP_CREATE', + TRUSTED_JOB_EXPORT = 'TRUSTED_JOB_EXPORT', +} + +export enum TrustedJobParamType { + ANALYZE = 'ANALYZE', + EXPORT = 'EXPORT', +} + +export enum TicketAuthStatus { + CREATE_PENDING = 'CREATE_PENDING', + CREATE_FAILED = 'CREATE_FAILED', + TICKET_PENDING = 'TICKET_PENDING', + TICKET_DECLINED = 'TICKET_DECLINED', + AUTH_PENDING = 'AUTH_PENDING', + AUTHORIZED = 'AUTHORIZED', +} + +export interface TrustedJobResource { + cpu: number; + memory: number; + replicas?: number; +} + +export type ParticipantDataset = { + participant_id: ID; + uuid: ID; + name: string; +}; + +export type TrustedJobGroupPayload = { + name?: string; + comment?: string; + algorithm_id?: ID; + dataset_id?: ID; + participant_datasets?: ParticipantDataset[]; + resource?: TrustedJobResource; + auth_status?: AuthStatus; +}; + +export type datasetConstruct = { + items: ParticipantDataset[]; +}; + +export type TrustedJobGroup = { + id: ID; + name: string; + uuid: ID; + latest_version: string; + comment: string; + project_id: ID; + creator_username?: string; + created_at?: DateTime; + updated_at?: DateTime; + analyzer_id?: ID; + coordinator_id?: ID; + ticket_uuid: ID; + ticket_status: TicketStatus; + status?: TrustedJobGroupStatus; + auth_status?: AuthStatus; + latest_job_status?: TrustedJobStatus; + ticket_auth_status: TicketAuthStatus; + unauth_participant_ids?: ID[]; + algorithm_id?: ID; + algorithm_participant_id?: ID; + algorithm_uuid?: string; + algorithm_project_uuid?: string; + resource?: TrustedJobResource; + dataset_id?: ID; + creator_name: string; + participant_datasets: datasetConstruct; +}; + +export type TrustedJobGroupItem = { + id: ID; + name: string; + created_at: DateTime; + is_creator: boolean; + creator_id: ID; + ticket_status: TicketStatus; + is_configured?: boolean; + status?: TrustedJobGroupStatus; + auth_status?: AuthStatus; + latest_job_status?: TrustedJobStatus; + ticket_auth_status?: TicketAuthStatus; + participants_info: any; + unauth_participant_ids?: ID[]; +}; + +export type TrustedJob = { + id: ID; + name: string; + coordinator_id?: ID; + type?: TrustedJobType; + job_id: ID; + uuid: ID; + version: number; + comment: string; + project_id: ID; + trusted_job_group_id: ID; + started_at: DateTime; + finished_at: DateTime; + status: TrustedJobStatus; + algorithm_id: ID; + algorithm_uuid: ID; + resource: TrustedJobResource; + auth_status: AuthStatus; + dataset_job_id: ID; + ticket_uuid?: ID; + ticket_status?: string; + ticket_auth_status: TicketAuthStatus; + export_dataset_id?: ID; +}; + +export type TrustedJobListItem = { + id: ID; + name: string; + type?: TrustedJobType; + job_id: ID; + comment: string; + started_at: DateTime; + finished_at: DateTime; + status: TrustedJobStatus; + ticket_auth_status: TicketAuthStatus; + coordinator_id?: ID; +}; + +export type Instance = { + id: ID; + status: TrustedJobStatus; + created_at: DateTime; + resource: TrustedJobResource; +}; + +export type NotificationItem = { + type: NotificationType; + id: ID; + name: string; + created_at: DateTime; + coordinator_id: ID; +}; + +export enum TrustedJobGroupTabType { + COMPUTING = 'computing', + EXPORT = 'export', +} diff --git a/web_console_v2/client/src/typings/utils.ts b/web_console_v2/client/src/typings/utils.ts new file mode 100644 index 000000000..3d4984aae --- /dev/null +++ b/web_console_v2/client/src/typings/utils.ts @@ -0,0 +1,12 @@ +export type Overwrite<T, U> = Pick<T, Exclude<keyof T, keyof U>> & U; + +export type Notification = { + /** workflow id */ + id?: number | null; + kind: string; + workflow_name: string; + peer_name: string; + project_name: string; + created_at: DateTime; + project_id: ID; +}; diff --git a/web_console_v2/client/src/typings/variable.ts b/web_console_v2/client/src/typings/variable.ts index cfaa9c026..a80584c71 100644 --- a/web_console_v2/client/src/typings/variable.ts +++ b/web_console_v2/client/src/typings/variable.ts @@ -40,6 +40,11 @@ export interface SwitchWidgetSchema { unCheckedChildren?: string; } +/** + * ! @IMPORTANT: + * If you want to add new componet, + * remember to add a worker in formSchema.tsx > componentToWorkersMap + */ export enum VariableComponent { Input = 'Input', Select = 'Select', @@ -47,21 +52,25 @@ export enum VariableComponent { Checkbox = 'Checkbox', TextArea = 'TextArea', NumberPicker = 'NumberPicker', + CPU = 'CPU', + MEM = 'MEM', Switch = 'Switch', // -------- Custom components ---------- Code = 'Code', + JSON = 'JSON', Dataset = 'Dataset', + DatasetPath = 'DatasetPath', + FeatureSelect = 'FeatureSelect', + EnvsInput = 'EnvsInput', + AlgorithmSelect = 'AlgorithmSelect', // ------- Custom components ---------- // Uncomment it after we have usecase // TimePicker = 'TimePicker', // Upload = 'Upload', } -export type VariableRule = { validator: RegExp | string; message: string }; - export interface VariableWidgetSchema - extends SelectWidgetSchema, - NumberPickerWidgetSchema, + extends NumberPickerWidgetSchema, TextAreaWidgetSchema, SwitchWidgetSchema, SelectWidgetSchema, @@ -78,14 +87,16 @@ export interface VariableWidgetSchema index?: number; // will render a question icon beside the label, hover it to show the tooltip tooltip?: string; + // control variables' visibility, will not affect value + hidden?: boolean; // will render some text below the form item description?: string; placeholder?: string; /** ------ Validations ------ */ // RegExp string '\d' - pattern?: string; - rules?: VariableRule[]; + pattern?: any; + rules?: any[]; required?: boolean; /** ------ Miscs ------ */ @@ -102,6 +113,10 @@ export enum VariableAccessMode { export enum VariableValueType { STRING = 'STRING', CODE = 'CODE', + BOOLEAN = 'BOOLEAN', + NUMBER = 'NUMBER', + LIST = 'LIST', + OBJECT = 'OBJECT', } export interface Variable { @@ -109,6 +124,8 @@ export interface Variable { // Due to proto doesn't has more optional types, we fixed to use string as value type, // for boolean/number value, should convert to 'true', '2' directly (but so far, we don't need values like boolean) value: any; + tag?: string; + typed_value?: any; value_type?: VariableValueType; access_mode: VariableAccessMode; widget_schema: VariableWidgetSchema; diff --git a/web_console_v2/client/src/typings/workflow.ts b/web_console_v2/client/src/typings/workflow.ts index ebb811357..537ae5226 100644 --- a/web_console_v2/client/src/typings/workflow.ts +++ b/web_console_v2/client/src/typings/workflow.ts @@ -1,103 +1,236 @@ import { JobNodeRawData } from 'components/WorkflowJobsCanvas/types'; import { Job, JobExecutionDetalis, CreateJobFlag } from './job'; import { Variable } from './variable'; +import { + JobDefinitionForm, + VariableDefinitionForm, +} from 'views/WorkflowTemplates/TemplateForm/stores'; +import { ValueType } from './settings'; -export type WorkflowConfig<J = Job> = { +export type WorkflowConfig<J = Job, V = Variable> = { group_alias: string; - is_left: boolean; - variables: Variable[]; + variables: V[]; job_definitions: J[]; }; export type ChartWorkflowConfig = WorkflowConfig<JobNodeRawData>; -export interface WorkflowTemplate<J = Job> { +export enum WorkflowType { + MY = 'my', + SYSTEM = 'system', +} + +export enum WorkflowTemplateMenuType { + MY = 'my', + BUILT_IN = 'built-in', + PARTICIPANT = 'participant', +} +export enum WorkflowTemplateType { + MY = 0, + BUILT_IN = 1, + PARTICIPANT = 2, +} + +export interface WorkflowTemplate<J = Job, V = Variable> { id: number; name: string; - is_left: boolean; + is_local?: boolean; group_alias: string; comment?: string; - config: WorkflowConfig<J>; + config: WorkflowConfig<J, V>; + editor_info?: WorkflowTemplateEditInfo; + kind: WorkflowTemplateType; + creator_username?: string; + created_at?: number; + updated_at?: number; + revision_id?: number; } -export type WorkflowTemplatePayload<J = Job> = { - name: string; - is_left?: boolean; - group_alias?: string; +export interface RevisionDetail<J = Job, V = Variable> { + is_local?: boolean; + name?: string; + id: number; + revision_index: number; comment?: string; - config: WorkflowConfig<J>; + template_id: string; + config: WorkflowConfig<J, V>; + editor_info?: WorkflowTemplateEditInfo; +} + +export type RevisionPayload = { + comment?: string; +}; + +export interface TemplateRevision { + id: number; + revision_index: number; + comment?: string; + template_id: string; + created_at?: number; +} + +export type WorkflowTemplatePayload< + J = Job | JobDefinitionForm, + V = Variable | VariableDefinitionForm +> = Omit<WorkflowTemplate<J, V>, 'id' | 'is_local' | 'kind'>; + +export type WorkflowTemplateEditInfo = { + yaml_editor_infos: { + [jobName: string]: YamlEditorInfo; + }; }; -export type WorkflowInitiatePayload = { +export type YamlEditorInfo = { + slots: { + [slotName: string]: JobSlot; + }; + meta_yaml: string; + variables?: Variable[]; +}; + +export type JobSlot = { + reference: string; + reference_type: JobSlotReferenceType; + value_type: ValueType; + default_value: any; + help?: string; + label: string; +}; + +export enum JobSlotReferenceType { + DEFAULT = 'DEFAULT', + SELF = 'SELF', + OTHER_JOB = 'OTHER_JOB', + WORKFLOW = 'WORKFLOW', + PROJECT = 'PROJECT', + SYSTEM = 'SYSTEM', + JOB_PROPERTY = 'JOB_PROPERTY', +} + +export type WorkflowInitiatePayload<J = JobNodeRawData> = { name: string; project_id: ID; forkable: boolean; - batch_update_interval?: number; create_job_flags?: CreateJobFlag[]; - config: ChartWorkflowConfig; + cron_config?: string; + config: WorkflowConfig<J>; comment?: string; + local_extra?: string; + extra?: string; + template_id?: ID | null; + template_revision_id?: ID | null; }; -export type WorkflowAcceptPayload = { +export type WorkflowAcceptPayload<J = JobNodeRawData> = { forkable: boolean; create_job_flags?: CreateJobFlag[]; - batch_update_interval?: number; - config: ChartWorkflowConfig; + cron_config?: string; + config: WorkflowConfig<J>; comment?: string; + template_id?: ID | null; + template_revision_id?: ID | null; }; export type WorkflowForkPayload = WorkflowInitiatePayload & { forked_from: ID; - batch_update_interval?: number; + is_local?: boolean; + cron_config?: string; create_job_flags: CreateJobFlag[]; // e.g. [raw_data, training...] peer_create_job_flags: CreateJobFlag[]; fork_proposal_config: ChartWorkflowConfig; }; export enum WorkflowState { + UNKNOWN = 'UNKNOWN', INVALID = 'INVALID', - NEW = 'NEW', - READY = 'READY', RUNNING = 'RUNNING', STOPPED = 'STOPPED', FAILED = 'FAILED', COMPLETED = 'COMPLETED', + PREPARE_RUN = 'PREPARE_RUN', + PREPARE_STOP = 'PREPARE_STOP', + WARMUP_UNDERHOOD = 'WARMUP_UNDERHOOD', + PENDING_ACCEPT = 'PENDING_ACCEPT', + READY_TO_RUN = 'READY_TO_RUN', + PARTICIPANT_CONFIGURING = 'PARTICIPANT_CONFIGURING', } -export enum TransactionState { - READY = 'READY', - ABORTED = 'ABORTED', +export enum WorkflowStateFilterParam { + UNKNOWN = 'unknown', + INVALID = 'invalid', + RUNNING = 'running', + STOPPED = 'stopped', + FAILED = 'failed', + COMPLETED = 'completed', + PREPARE_RUN = 'prepare_run', + PREPARE_STOP = 'prepare_stop', + WARMUP_UNDERHOOD = 'warmup', + PENDING_ACCEPT = 'pending', + READY_TO_RUN = 'ready', + PARTICIPANT_CONFIGURING = 'configuring', +} - COORDINATOR_PREPARE = 'COORDINATOR_PREPARE', - COORDINATOR_COMMITTABLE = 'COORDINATOR_COMMITTABLE', - COORDINATOR_COMMITTING = 'COORDINATOR_COMMITTING', - COORDINATOR_ABORTING = 'COORDINATOR_ABORTING', +export type WorkflowStateType = `${WorkflowState}`; +export type WorkflowStateFilterParamType = `${WorkflowStateFilterParam}`; - PARTICIPANT_PREPARE = 'PARTICIPANT_PREPARE', - PARTICIPANT_COMMITTABLE = 'PARTICIPANT_COMMITTABLE', - PARTICIPANT_COMMITTING = 'PARTICIPANT_COMMITTING', - PARTICIPANT_ABORTING = 'PARTICIPANT_ABORTING', -} +type TemplateInfo = { + id: number; + name?: string | null; + revision_index?: number; + is_modified: boolean; +}; export type Workflow = { id: number; uuid?: string; name: string; - project_id: number; + project_id: ID; config: WorkflowConfig | null; + is_local?: boolean; forkable: boolean; + favour: boolean; metric_is_public?: boolean; forked_from?: number; comment: string | null; state: WorkflowState; - target_state: WorkflowState; - transaction_state: TransactionState; - transaction_err: string | null; created_at: DateTime; updated_at: DateTime; - batch_update_interval?: number; + cron_config?: string; start_at?: DateTime | null; stop_at?: DateTime | null; + template_info?: TemplateInfo; + editor_info?: WorkflowTemplateEditInfo; + creator?: string; + + extra?: any; + local_extra?: any; + + 'model.name'?: string; + 'model.desc'?: string; + 'model.dataset_id'?: any; + 'model.creator'?: any; + 'model.group_id'?: any; + 'model.parent_job_name'?: string; + 'model_group.name'?: string; + 'model_group.desc'?: string; + 'model.algorithm_type'?: string; + is_allow_coordinator_parameter_tuning?: boolean; + is_share_model_evaluation_index?: boolean; + isReceiver?: boolean; + /** created from model-center module(model train) */ + isTrainMode?: boolean; + /** created from model-center module(model offline pediction) */ + isPredictionMode?: boolean; + /** created from model-center module(model evaluation) */ + isEvaluationMode?: boolean; + + 'prediction.name'?: string; + 'prediction.desc'?: string; + 'prediction.dataset_id'?: string; + + 'evaluation.name'?: string; + 'evaluation.desc'?: string; + 'evaluation.dataset_id'?: string; }; export type WorkflowExecutionDetails = { @@ -107,3 +240,12 @@ export type WorkflowExecutionDetails = { create_job_flags?: CreateJobFlag[]; peer_create_job_flags?: CreateJobFlag[]; } & Workflow; + +export enum Tag { + RESOURCE_ALLOCATION = 'RESOURCE_ALLOCATION', // 资源配置 + INPUT_PARAM = 'INPUT_PARAM', // 输入参数 + INPUT_PATH = 'INPUT_PATH', // 输入路径 + OUTPUT_PATH = 'OUTPUT_PATH', // 输出路径 + OPERATING_PARAM = 'OPERATING_PARAM', // 运行参数 + SYSTEM_PARAM = 'SYSTEM_PARAM', // 系统变量 +} diff --git a/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmAcceptance/index.module.less b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmAcceptance/index.module.less new file mode 100644 index 000000000..5e74c24f3 --- /dev/null +++ b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmAcceptance/index.module.less @@ -0,0 +1,55 @@ +.styled_container { + box-sizing: border-box; + width: 440px; + margin: 80px auto 20px auto; + padding: 40px; + border-radius: 8px; + border: 1px solid var(--color-border-2); +} + +.styled_header { + text-align: center; +} + +.styled_avatar { + display: inline-block; +} + +.styled_form_text { + line-height: 24px; +} + +.styled_form_footer { + text-align: center; +} + +.styled_form_footer_button { + padding-left: 40px; + padding-right: 40px; +} + +.styled_success_icon { + display: block; + width: 70px; + height: 70px; + background-image: url('../../../assets/icons/successful-status-icon.svg'); + background-size: 68px 68px; + background-position: center center; + background-repeat: no-repeat; +} + +.styled_result { + :global(.arco-result-icon) { + margin-bottom: 0; + } + :global(.arco-result-title) { + margin-bottom: 30px; + font-size: 16px; + } + :global(.arco-result-subtitle) { + font-size: 12px; + } + :global(.arco-result-extra) { + margin-top: 10px; + } +} diff --git a/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmAcceptance/index.tsx b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmAcceptance/index.tsx new file mode 100644 index 000000000..8367b9fa7 --- /dev/null +++ b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmAcceptance/index.tsx @@ -0,0 +1,247 @@ +import React, { FC, useState } from 'react'; +import styled from './index.module.less'; +import { useParams, useHistory } from 'react-router'; +import { useQuery, useMutation } from 'react-query'; +import { + Typography, + Divider, + Form, + Input, + Button, + Space, + Result, + Drawer, + RulesProps, + Message, +} from '@arco-design/web-react'; +import { useRecoilValue } from 'recoil'; +import BackButton from 'components/BackButton'; +import { + postAcceptAlgorithm, + fetchProjectPendingList, + fetchProjectDetail, +} from 'services/algorithm'; +import { projectState } from 'stores/project'; +import SharedPageLayout from 'components/SharedPageLayout'; +import { AlgorithmManagementTabType } from 'typings/modelCenter'; +import { validNamePattern, MAX_COMMENT_LENGTH } from 'shared/validator'; +import { useInterval } from 'hooks'; +import AlgorithmType from 'components/AlgorithmType'; +import AlgorithmInfo from 'components/AlgorithmDrawer/AlgorithmInfo'; +import { Avatar } from '../shared'; + +enum FormField { + NAME = 'name', + COMMENT = 'comment', +} + +const rules: Record<string, RulesProps[]> = { + [FormField.NAME]: [ + { required: true, message: '算法名称不能为空' }, + { + match: validNamePattern, + message: '只支持大小写字母,数字,中文开头或结尾,可包含“_”和“-”,不超过 63 个字符', + }, + ], + [FormField.COMMENT]: [ + { + maxLength: MAX_COMMENT_LENGTH, + message: '最多为 200 个字符', + }, + ], +}; + +type TMutationParams = { + projId: ID; + id: ID; + payload: { + name: string; + comment?: string; + }; +}; + +const REDIRECT_COUNTDOWN_DURATION = 3; +const AlgorithmAcceptance: FC = () => { + const [form] = Form.useForm(); + const selectedProject = useRecoilValue(projectState); + const { id } = useParams<{ id: string }>(); + const history = useHistory(); + const [successful, setSuccessful] = useState(false); + const [previewVisible, setPreviewVisible] = useState(false); + const [redirectCountdown, setRedirectCountdown] = useState(REDIRECT_COUNTDOWN_DURATION); + const query = useQuery( + ['algorithm_acceptance', id, selectedProject.current?.id], + () => { + return fetchProjectPendingList(selectedProject.current?.id).then((res) => { + const algorithm = res.data.find((item) => item.id.toString() === id); + if (!algorithm) { + throw new Error('算法不存在'); + } + return algorithm; + }); + }, + { + refetchOnWindowFocus: false, + retry: 1, + onSuccess(res) { + form.setFieldsValue(res); + }, + onError(e: any) { + Message.error(e.message); + }, + }, + ); + + // Get algorithm project name + useQuery( + ['fetch_algorithm_project_detail', query?.data?.algorithm_project_id], + () => { + return fetchProjectDetail(query!.data!.algorithm_project_id!); + }, + { + enabled: Boolean(query?.data?.algorithm_project_id), + onSuccess(res) { + if (res?.data?.name) { + form.setFieldsValue({ + name: res.data.name, + }); + } + }, + }, + ); + + const accept = useMutation( + ({ projId, id, payload }: TMutationParams) => { + return postAcceptAlgorithm(projId, id, payload); + }, + { + onSuccess() { + setSuccessful(true); + }, + onError(e: any) { + if (e.code === 409 || /already\s*exist/.test(e.message)) { + Message.error('算法名称已存在'); + } + }, + }, + ); + const detail = query.data; + const layoutTitle = <BackButton onClick={goBackMyAlgorithmList}>{'算法仓库'}</BackButton>; + + useInterval( + () => { + if (redirectCountdown === 0) { + goBackMyAlgorithmList(); + return; + } + setRedirectCountdown(redirectCountdown - 1); + }, + successful ? 1000 : undefined, + { + immediate: false, + }, + ); + + if (successful) { + return ( + <SharedPageLayout title={layoutTitle}> + <Result + className={styled.styled_result} + status={null} + icon={<i className={styled.styled_success_icon} />} + title={`已同意并保存『${form.getFieldValue('name')}-V${detail?.version}』`} + subTitle={`${redirectCountdown}S 后自动前往算法列表`} + extra={[ + <Button key="back" type="primary" onClick={goBackMyAlgorithmList}> + {'前往算法列表'} + </Button>, + ]} + /> + </SharedPageLayout> + ); + } + + return ( + <SharedPageLayout title={layoutTitle}> + <div className={styled.styled_container}> + <header className={styled.styled_header}> + <Avatar /> + <br /> + <Typography.Text type="secondary">{detail?.comment}</Typography.Text> + </header> + <Divider /> + <Form form={form} labelCol={{ span: 5 }} onSubmit={acceptAlgorithm}> + <Form.Item + label={'名称'} + field="name" + rules={rules[FormField.NAME]} + disabled={Boolean(detail?.algorithm_project_id)} + > + <Input placeholder={'请输入算法名称'} /> + </Form.Item> + <Form.Item label={'描述'} field="comment" rules={rules[FormField.COMMENT]}> + <Input.TextArea rows={2} placeholder={'最多为 200 个字符'} /> + </Form.Item> + <Form.Item label={'类型'}> + {detail?.type && <AlgorithmType type={detail.type} />} + </Form.Item> + <Form.Item label={'版本'}> + <div className={styled.styled_form_text}>V{detail?.version}</div> + </Form.Item> + <Form.Item label={'算法'}> + <button + type="button" + className="custom-text-button" + onClick={() => setPreviewVisible(true)} + > + {'查看算法配置'} + </button> + </Form.Item> + <div className={styled.styled_form_footer}> + <Space> + <Button + className={styled.styled_form_footer_button} + type="primary" + htmlType="submit" + loading={accept.isLoading} + > + {'同意并保存'} + </Button> + <Button className={styled.styled_form_footer_button} onClick={goBackMyAlgorithmList}> + {'取消'} + </Button> + </Space> + </div> + </Form> + </div> + <Drawer + closable + onCancel={() => setPreviewVisible(false)} + width={1000} + visible={previewVisible} + title={`算法版本 V${detail?.version}`} + > + <AlgorithmInfo type="pending_algorithm" detail={detail} /> + </Drawer> + </SharedPageLayout> + ); + + async function acceptAlgorithm(formValues: any) { + if (selectedProject.current?.id) { + accept.mutate({ + projId: selectedProject.current.id, + id: detail?.id!, + payload: { + name: formValues.name as string, + comment: formValues.comment as string, + }, + }); + } + } + + function goBackMyAlgorithmList() { + history.replace(`/algorithm-management/${AlgorithmManagementTabType.MY}`); + } +}; + +export default AlgorithmAcceptance; diff --git a/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmDetail/VersionsTab.tsx b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmDetail/VersionsTab.tsx new file mode 100644 index 000000000..13d389ff3 --- /dev/null +++ b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmDetail/VersionsTab.tsx @@ -0,0 +1,283 @@ +import React, { FC, useMemo, useState } from 'react'; +import { Table, Button, Link, Drawer, Empty } from '@arco-design/web-react'; +import AlgorithmInfo from 'components/AlgorithmDrawer/AlgorithmInfo'; +import { + AlgorithmProject, + Algorithm, + EnumAlgorithmProjectSource, + AlgorithmVersionStatus, +} from 'typings/algorithm'; +import { formatTimestamp } from 'shared/date'; +import MoreActions from 'components/MoreActions'; +import { CONSTANTS } from 'shared/constants'; +import StateIndicator, { StateTypes } from 'components/StateIndicator'; +import { useGetCurrentProjectId, useGetCurrentProjectParticipantId } from 'hooks'; +import { useQuery } from 'react-query'; +import { fetchAlgorithmList, fetchPeerAlgorithmList } from 'services/algorithm'; +import styled from './index.module.less'; + +type TTableParams = { + onPreviewClick: (algorithm: Algorithm) => void; + onPublishClick: (algorithm: Algorithm) => void; + onUnpublishClick: (algorithm: Algorithm) => void; + onDeleteClick: (algorithm: Algorithm) => void; + onDownloadClick: (algorithm: Algorithm) => void; + algorithmProjectDetail: AlgorithmProject; + isParticipant?: boolean; +}; + +const calcStateIndicatorProps = ( + state: AlgorithmVersionStatus, +): { type: StateTypes; text: string; tip?: string } => { + const tip = ''; + const stateMap = new Map(); + stateMap.set(AlgorithmVersionStatus.UNPUBLISHED, { + text: '未发布', + type: 'gold', + }); + stateMap.set(AlgorithmVersionStatus.PUBLISHED, { + text: '已发布', + type: 'success', + }); + stateMap.set(AlgorithmVersionStatus.PENDING, { + text: '待审批', + type: 'goprocessingld', + }); + stateMap.set(AlgorithmVersionStatus.APPROVED, { + text: '已通过', + type: 'processing', + }); + stateMap.set(AlgorithmVersionStatus.DECLINED, { + text: '已拒绝', + type: 'error', + }); + return { + ...stateMap.get(state), + tip, + }; +}; + +const getTableProps = ({ + onPreviewClick, + onPublishClick, + onUnpublishClick, + onDeleteClick, + onDownloadClick, + algorithmProjectDetail, + isParticipant, +}: TTableParams) => { + const cols = [ + { + dataIndex: 'version', + title: '版本号', + render(version: number) { + return `V${version}`; + }, + }, + { + dataIndex: 'id', + title: '版本配置', + render(id: string, record: Algorithm) { + return <Link onClick={() => onPreviewClick(record)}>点击查看</Link>; + }, + }, + !isParticipant && + ({ + title: '状态', + dataIndex: 'status', + name: 'status', + width: 150, + render: (state: AlgorithmVersionStatus, record: any) => { + return <StateIndicator {...calcStateIndicatorProps(state)} />; + }, + } as any), + !isParticipant && + ({ + dataIndex: 'username', + title: '创建者', + } as any), + { + dataIndex: 'comment', + title: '描述', + render(val: string) { + return val || CONSTANTS.EMPTY_PLACEHOLDER; + }, + }, + { + dataIndex: 'created_at', + title: '发版时间', + render(val: number) { + return formatTimestamp(val); + }, + }, + !isParticipant && { + dataIndex: 'operation', + title: '操作', + render(_: any, record: Algorithm) { + return ( + <> + {(record.status === AlgorithmVersionStatus.PUBLISHED || + record.status === AlgorithmVersionStatus.APPROVED) && ( + <Button + className={styled.version_list_button} + type="text" + size="small" + onClick={() => { + onUnpublishClick(record); + }} + disabled={algorithmProjectDetail.source !== EnumAlgorithmProjectSource.USER} + > + {'撤销发布'} + </Button> + )} + {(record.status === AlgorithmVersionStatus.UNPUBLISHED || + record.status === AlgorithmVersionStatus.DECLINED) && ( + <Button + className={styled.version_list_button} + type="text" + size="small" + onClick={() => { + onPublishClick(record); + }} + disabled={algorithmProjectDetail.source !== EnumAlgorithmProjectSource.USER} + > + {'发布'} + </Button> + )} + <MoreActions + actionList={[ + { + label: '下载', + onClick: () => { + onDownloadClick(record); + }, + }, + { + label: '删除', + onClick: () => { + onDeleteClick(record); + }, + danger: true, + disabled: + algorithmProjectDetail.source !== EnumAlgorithmProjectSource.USER || + record.status === AlgorithmVersionStatus.PENDING, + }, + ]} + /> + </> + ); + }, + }, + ].filter(Boolean); + + return cols; +}; + +type Props = { + id?: ID; + detail?: AlgorithmProject; + isParticipant?: boolean; + isBuiltIn?: boolean; + onPublishClick: (algorithm: Algorithm) => void; + onUnpublishClick: (algorithm: Algorithm) => void; + onReleaseClick: () => void; + onDeleteClick: (algorithm: Algorithm) => void; + onDownloadClick: (algorithm: Algorithm) => void; +}; + +const VersionsTab: FC<Props> = ({ + id, + detail, + isParticipant, + isBuiltIn, + onPublishClick, + onUnpublishClick, + onReleaseClick, + onDeleteClick, + onDownloadClick, +}) => { + const projectId = useGetCurrentProjectId(); + const participantId = useGetCurrentProjectParticipantId(); + const listQuery = useQuery( + ['fetchAlgorithmList', projectId, participantId, id], + () => { + if (isParticipant) { + return fetchPeerAlgorithmList(projectId, participantId, { algorithm_project_uuid: id! }); + } + return fetchAlgorithmList(isBuiltIn ? 0 : projectId, { algo_project_id: id! }); + }, + { + retry: 2, + refetchOnWindowFocus: false, + }, + ); + const [previewAlgorithm, setPreviewAlgorithm] = useState<Algorithm>(); + + const formattedAlgorithms = useMemo(() => { + if (!listQuery.data?.data) { + return []; + } + return listQuery.data?.data; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [listQuery.data?.data]); + + if (!detail) { + return null; + } + return ( + <> + <Table + className="custom-table custom-table-left-side-filter" + hover={false} + data={formattedAlgorithms} + noDataElement={ + <Empty + description={ + <> + 暂无已发版的算法版本. + {!isBuiltIn && ( + <Button + className={styled.version_list_button} + size="small" + type="text" + style={{ marginLeft: '0.2em' }} + onClick={() => { + onReleaseClick(); + }} + > + 去发版 + </Button> + )} + </> + } + /> + } + columns={getTableProps({ + onPreviewClick, + onPublishClick, + onUnpublishClick, + onDeleteClick, + onDownloadClick, + algorithmProjectDetail: detail, + isParticipant: isParticipant, + })} + rowKey="uuid" + /> + <Drawer + closable + onCancel={() => setPreviewAlgorithm(undefined)} + width={1200} + visible={Boolean(previewAlgorithm)} + title={`算法版本 V${previewAlgorithm?.version}`} + > + <AlgorithmInfo isParticipant={isParticipant} type="algorithm" detail={previewAlgorithm} /> + </Drawer> + </> + ); + + function onPreviewClick(algorithm: Algorithm) { + setPreviewAlgorithm(algorithm); + } +}; + +export default VersionsTab; diff --git a/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmDetail/index.module.less b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmDetail/index.module.less new file mode 100644 index 000000000..1caf03a45 --- /dev/null +++ b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmDetail/index.module.less @@ -0,0 +1,43 @@ +.version_list_button { + margin-left: -5px; + padding-left: 5px; + padding-right: 5px; +} + +.padding_container { + padding: 20px 20px 0; +} + +.styled_name { + margin-top: 0; + margin-bottom: -3px; + font-size: 16px; + font-weight: 600; + line-height: 24px; +} + +.comment { + font-size: 12px; + color: var(--textColorSecondary); +} + +.content { + padding: 0 20px; +} + +.header_col { + margin-top: 9px; + text-align: right; +} + +.styled_version_amount_tag { + margin-left: 6px; + border-radius: 10px; + height: 20px; + line-height: 20px; + transform: translateY(-2px); +} + +.styled_avatar { + display: inline-block; +} diff --git a/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmDetail/index.tsx b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmDetail/index.tsx new file mode 100644 index 000000000..06614f18f --- /dev/null +++ b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmDetail/index.tsx @@ -0,0 +1,387 @@ +import React, { FC, useState, useMemo, useEffect } from 'react'; +import { Grid, Button, Space, Tabs, Tag, Message } from '@arco-design/web-react'; +import { useHistory, useParams, Redirect } from 'react-router-dom'; +import { forceToRefreshQuery } from 'shared/queryClient'; +import { useMutation, useQuery } from 'react-query'; + +import VersionsTab from './VersionsTab'; +import AlgorithmInfo from 'components/AlgorithmDrawer/AlgorithmInfo'; + +import BackButton from 'components/BackButton'; +import MoreActions from 'components/MoreActions'; +import PropertyList from 'components/PropertyList'; +import SharedPageLayout from 'components/SharedPageLayout'; +import { + fetchAlgorithmList, + fetchPeerAlgorithmList, + fetchPeerAlgorithmProjectById, + fetchProjectDetail, + getFullAlgorithmDownloadHref, + postPublishAlgorithm, + publishAlgorithm, +} from 'services/algorithm'; +import { formatTimestamp } from 'shared/date'; +import { + Algorithm, + EnumAlgorithmProjectSource, + AlgorithmReleaseStatus, + AlgorithmVersionStatus, +} from 'typings/algorithm'; +import showAlgorithmSendingModal from '../AlgorithmSendModal'; +import AlgorithmType from 'components/AlgorithmType'; + +import styled from './index.module.less'; +import { AlgorithmManagementTabType } from 'typings/modelCenter'; +import { Avatar, deleteConfirm, unpublishConfirm } from '../shared'; +import { CONSTANTS } from 'shared/constants'; +import request from 'libs/request'; +import { useGetCurrentProjectId, useGetCurrentProjectParticipantId } from 'hooks'; + +const { Row, Col } = Grid; + +enum AlgorithmDetailTabType { + FILES = 'files', + VERSIONS = 'versions', +} + +enum algorithmDetailType { + MY = 'my', + BUILT_IN = 'built-in', + PARTICIPANT = 'participant', +} + +type TRouteParams = { + id: string; + tabType: AlgorithmDetailTabType; + algorithmDetailType: algorithmDetailType; +}; + +const AlgorithmDetail: FC = () => { + const history = useHistory(); + const projectId = useGetCurrentProjectId(); + const participantId = useGetCurrentProjectParticipantId(); + const params = useParams<TRouteParams>(); + const [activeKey, setActiveKey] = useState<AlgorithmDetailTabType>(); + const [algoNumber, setAlgoNumber] = useState<number>(0); + const isParticipant = params.algorithmDetailType === algorithmDetailType.PARTICIPANT; + const isBuiltIn = params.algorithmDetailType === algorithmDetailType.BUILT_IN; + const queryKeys = ['algorithmDetail', params.id]; + + const detailQuery = useQuery( + queryKeys, + () => { + if (isParticipant) { + return fetchPeerAlgorithmProjectById(projectId, participantId, params.id).then( + (res) => res.data, + ); + } + return fetchProjectDetail(params.id).then((res) => res.data); + }, + { + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const listQuery = useQuery( + ['fetchAlgorithmList', projectId, participantId, params.id], + () => { + if (isParticipant) { + return fetchPeerAlgorithmList(projectId, participantId, { + algorithm_project_uuid: params.id, + }); + } + return fetchAlgorithmList(isBuiltIn ? 0 : projectId, { algo_project_id: params.id }); + }, + { + retry: 2, + refetchOnWindowFocus: false, + onSuccess(res) { + if (res.data) { + setAlgoNumber(res.data.length); + } + }, + }, + ); + + const detail = detailQuery.data; + + const publishMutation = useMutation( + (comment: string) => { + return postPublishAlgorithm(params.id, comment); + }, + { + onSuccess() { + if (activeKey === AlgorithmDetailTabType.FILES) { + onTabChange(AlgorithmDetailTabType.VERSIONS); + } else { + forceToRefreshQuery([...queryKeys] as string[]); + } + }, + }, + ); + + const displayedProps = useMemo( + () => [ + { + value: detail?.type ? ( + <div style={{ marginTop: '-4px' }}> + <AlgorithmType type={detail.type} /> + </div> + ) : ( + CONSTANTS.EMPTY_PLACEHOLDER + ), + label: '类型', + }, + { + value: detail?.username || CONSTANTS.EMPTY_PLACEHOLDER, + label: '创建者', + hidden: isParticipant, + }, + { + value: detail?.updated_at + ? formatTimestamp(detail.updated_at) + : CONSTANTS.EMPTY_PLACEHOLDER, + label: '更新时间', + }, + { + value: detail?.created_at + ? formatTimestamp(detail.created_at) + : CONSTANTS.EMPTY_PLACEHOLDER, + label: '创建时间', + }, + ], + [detail, isParticipant], + ); + + const algorithms = useMemo(() => { + if (!listQuery.data) { + return []; + } + return listQuery.data.data || []; + }, [listQuery.data]); + + useEffect(() => { + setActiveKey(params.tabType); + }, [params.tabType]); + + let tabContent: React.ReactNode; + + switch (params.tabType) { + case AlgorithmDetailTabType.FILES: + tabContent = <AlgorithmInfo type="algorithm_project" detail={detail} />; + break; + case AlgorithmDetailTabType.VERSIONS: + tabContent = detail ? ( + <VersionsTab + onPublishClick={handleAlgorithmPublish} + onUnpublishClick={handleAlgorithmVersionUnpublish} + onReleaseClick={onRelease} + onDeleteClick={handleAlgorithmVersionDelete} + onDownloadClick={handleAlgorithmVersionDownload} + detail={detail} + id={params.id} + isParticipant={isParticipant} + isBuiltIn={isBuiltIn} + /> + ) : null; + break; + default: + tabContent = null; + break; + } + + return ( + <SharedPageLayout + title={ + <BackButton + onClick={() => history.push(`/algorithm-management/${AlgorithmManagementTabType.MY}`)} + > + {'算法仓库'} + </BackButton> + } + cardPadding={0} + > + <div className={styled.padding_container}> + <Row> + <Col span={12}> + <Space size="medium"> + <Avatar + data-name={detail?.name ? detail.name.slice(0, 1) : CONSTANTS.EMPTY_PLACEHOLDER} + /> + <div> + <h3 className={styled.styled_name}>{detail?.name ?? '....'}</h3> + <Space className={styled.comment}> + {renderAlgorithmStatus(detail)} + {detail?.comment ?? CONSTANTS.EMPTY_PLACEHOLDER} + </Space> + </div> + </Space> + </Col> + <Col className={styled.header_col} span={12}> + {isParticipant ? ( + <></> + ) : ( + <Space> + <Button + type="primary" + disabled={detail?.source !== EnumAlgorithmProjectSource.USER} + onClick={() => { + history.push(`/algorithm-management/edit?id=${detail?.id}`); + }} + > + {'编辑'} + </Button> + {detail?.release_status === AlgorithmReleaseStatus.UNRELEASED && ( + <Button + loading={publishMutation.isLoading} + onClick={onRelease} + disabled={detail?.source !== EnumAlgorithmProjectSource.USER} + > + {'发版'} + </Button> + )} + <MoreActions + actionList={[ + { + label: '发布最新版本', + onClick: () => { + if (algorithms.length > 0) { + onPublishAlgorithm(algorithms[0]); + } + }, + disabled: + detail?.latest_version === 0 || + detail?.source !== EnumAlgorithmProjectSource.USER || + algorithms.length === 0 || + algorithms[0]?.status === AlgorithmVersionStatus.PUBLISHED, + }, + ]} + /> + </Space> + )} + </Col> + </Row> + <PropertyList cols={6} colProportions={[1, 1, 1, 1]} properties={displayedProps} /> + </div> + <Tabs activeTab={activeKey} onChange={onTabChange}> + {isParticipant ? ( + <></> + ) : ( + <Tabs.TabPane title={'算法文件'} key={AlgorithmDetailTabType.FILES} /> + )} + <Tabs.TabPane + title={ + <> + {'版本列表'}{' '} + <Tag + color={activeKey === AlgorithmDetailTabType.VERSIONS ? 'arcoblue' : ''} + className={styled.styled_version_amount_tag} + > + {algoNumber} + </Tag> + </> + } + key={AlgorithmDetailTabType.VERSIONS} + /> + </Tabs> + <div className={styled.content}>{tabContent}</div> + {!params.tabType ? ( + <Redirect + to={`/algorithm-management/detail/${params.id}/${AlgorithmDetailTabType.FILES}`} + /> + ) : null} + </SharedPageLayout> + ); + + function renderAlgorithmStatus(detail: any) { + if (isParticipant || isBuiltIn) { + return <></>; + } + + if (detail?.release_status === AlgorithmReleaseStatus.RELEASED) { + return ( + <Tag size="small" color="green"> + 已发版 + </Tag> + ); + } + + return ( + <Tag size="small" color="orange"> + 未发版 + </Tag> + ); + } + + function onTabChange(val: string) { + setActiveKey(val as AlgorithmDetailTabType); + detailQuery.refetch(); + history.replace( + `/algorithm-management/detail/${params.id}/${val}/${params.algorithmDetailType}`, + ); + } + + function onRelease() { + showAlgorithmSendingModal( + detail!, + async (comment: string) => { + return publishMutation.mutate(comment); + }, + () => {}, + true, + ); + } + + function refetchProjectDetail() { + listQuery.refetch(); + detailQuery.refetch(); + } + + function onPublishAlgorithm(algorithm: Algorithm) { + handleAlgorithmPublish(algorithm); + } + + function handleAlgorithmPublish(algorithm: Algorithm) { + showAlgorithmSendingModal( + algorithm, + (comment: string) => { + return publishAlgorithm(projectId, algorithm.id, { comment }).then((resp) => { + refetchProjectDetail(); + }); + }, + () => {}, + ); + } + + async function handleAlgorithmVersionUnpublish(algorithm: Algorithm) { + try { + await unpublishConfirm(projectId!, algorithm); + forceToRefreshQuery([...queryKeys] as string[]); + refetchProjectDetail(); + } catch (e) { + Message.error(e.message); + } + } + + async function handleAlgorithmVersionDelete(algorithm: Algorithm) { + try { + await deleteConfirm(algorithm, false); + forceToRefreshQuery([...queryKeys] as string[]); + refetchProjectDetail(); + } catch (e) { + Message.error(e.message); + } + } + + async function handleAlgorithmVersionDownload(algorithm: Algorithm) { + try { + const tip = await request.download(getFullAlgorithmDownloadHref(algorithm.id)); + tip && Message.info(tip); + } catch (error) { + Message.error(error.message); + } + } +}; + +export default AlgorithmDetail; diff --git a/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmForm/index.module.less b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmForm/index.module.less new file mode 100644 index 000000000..910b1efca --- /dev/null +++ b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmForm/index.module.less @@ -0,0 +1,69 @@ +.title_with_icon { + margin: -10px 0 10px 10px; +} + +.styled_form_item { + margin-left: 12px; + width: 50%; + + :global(.arco-form-item-symbol) { + margin-left: -16px; + } + + // note: 通过这种方式把 upload 组件上传进度隐藏 + :global(.arco-upload-list-status) { + display: none; + } +} + +.styled_big_text { + display: block; + margin-left: 12px; + margin-bottom: 12px; + font-size: 14px; +} + +.styled_icon_code_square { + font-size: 15px; +} + +.styled_footer_space { + margin-top: 40px; +} + +.styled_code_editor_entry { + position: relative; + width: 519px; + padding-top: 38px; + padding-bottom: 24px; + text-align: center; + cursor: pointer; + background: var(--color-fill-2); + transition: background 0.2s; + &:hover { + background: var(--color-fill-2); + } +} + +.styled_status_row { + margin-top: 4px; + line-height: 2; + font-size: 12px; + color: var(--color-text-3); +} + +.styled_unsaved_tag { + margin-right: 8px; + color: rgb(var(--green-5)); + font-size: 12px; + font-weight: 400; + background-color: #fff; +} + +.styled_saved_tag { + margin-right: 8px; + color: var(--color-text-1); + font-size: 12px; + font-weight: 400; + background-color: #fff; +} diff --git a/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmForm/index.tsx b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmForm/index.tsx new file mode 100644 index 000000000..9e898018b --- /dev/null +++ b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmForm/index.tsx @@ -0,0 +1,433 @@ +import React, { FC, useCallback, useEffect, useState } from 'react'; +import { + Typography, + RulesProps, + Message, + Select, + Upload, + Button, + Space, + Input, + Form, + Tag, +} from '@arco-design/web-react'; +import { useParams, useHistory } from 'react-router-dom'; +import { IconCodeSquare, IconInfoCircle } from '@arco-design/web-react/icon'; +import { useRecoilValue } from 'recoil'; +import { useMutation } from 'react-query'; +import { useGetAppFlagValue, useUrlState } from 'hooks/index'; +import SharedPageLayout, { FormHeader } from 'components/SharedPageLayout'; +import BackButton from 'components/BackButton'; +import CodeEditorModal from 'components/CodeEditorModal'; +import { + fetchProjectDetail, + createProject, + patchProject, + postPublishAlgorithm, +} from 'services/algorithm'; +import { FlagKey } from 'typings/flag'; +import showSendModal from '../AlgorithmSendModal'; +import { projectState } from 'stores/project'; +import ParamsInput from '../AlgorithmParamsInput'; +import AlgorithmType from 'components/AlgorithmType'; +import { AlgorithmTypeOptions } from '../shared'; +import { AlgorithmProject, EnumAlgorithmProjectType } from 'typings/algorithm'; +import { AlgorithmManagementTabType } from 'typings/modelCenter'; +import { validNamePattern, MAX_COMMENT_LENGTH } from 'shared/validator'; +import { UploadItem } from '@arco-design/web-react/es/Upload'; +import { useIsFormValueChange } from 'hooks'; +import ButtonWithModalConfirm from 'components/ButtonWithModalConfirm'; +import TitleWithIcon from 'components/TitleWithIcon'; +import styled from './index.module.less'; + +enum FormField { + NAME = 'name', + COMMENT = 'comment', + ALGORITHM_TYPE = 'type', + Files = 'file', + PARAMETER = 'parameter', +} + +const RULES: Record<FormField, RulesProps[]> = { + [FormField.NAME]: [ + { required: true, message: '算法名称不能为空' }, + { + match: validNamePattern, + message: '只支持大小写字母,数字,中文开头或结尾,可包含“_”和“-”,不超过 63 个字符', + }, + ], + [FormField.ALGORITHM_TYPE]: [{ required: true }], + [FormField.Files]: [ + { + required: false, + validator(fileList: UploadItem[], callback) { + const file = fileList[0]; + if (!file) { + callback(undefined); + return; + } + + const isOverSize = file.originFile?.size && file.originFile.size > 100 * 1024 * 1024; // 100M + callback(isOverSize ? '大小超过限制' : undefined); + }, + }, + ], + [FormField.COMMENT]: [ + { + maxLength: MAX_COMMENT_LENGTH, + message: '最多为 200 个字符', + }, + ], + [FormField.PARAMETER]: [{ required: false }], +}; + +const defaultValue: Record<FormField, any> = { + [FormField.NAME]: '', + [FormField.ALGORITHM_TYPE]: EnumAlgorithmProjectType.NN_HORIZONTAL, + [FormField.Files]: [], + [FormField.COMMENT]: '', + [FormField.PARAMETER]: [], +}; + +type TMutationParams<T, K = AlgorithmProject> = { + id: ID; + payload: T; + project?: K; + shouldPublish?: boolean; +}; + +const AlgorithmForm: FC = () => { + const [form] = Form.useForm(); + const history = useHistory(); + const [urlState] = useUrlState(); + const { action } = useParams<{ action: 'edit' | 'create' }>(); + const [project, setProject] = useState<AlgorithmProject>(); + const [codeEditorVisible, setCodeEditorVisible] = useState<boolean>(false); + const [codeEditorTouched, setCodeEditorTouched] = useState<boolean>(false); + const selectedProject = useRecoilValue(projectState); + const { isFormValueChanged, onFormValueChange } = useIsFormValueChange(); + const trusted_computing_enabled = useGetAppFlagValue(FlagKey.TRUSTED_COMPUTING_ENABLED); + + const isEdit = action === 'edit'; + if (trusted_computing_enabled) { + AlgorithmTypeOptions.push({ + label: '可信计算', + value: EnumAlgorithmProjectType.TRUSTED_COMPUTING, + }); + } + + const createProjectMutation = useMutation( + async ({ id, payload, project, shouldPublish }: TMutationParams<FormData>) => { + if (shouldPublish) { + await publishAlgorithmWrap(project!, () => + createProject(id, payload as FormData).then((res) => res.data), + ); + Message.success('创建并发版成功'); + } else { + await createProject(id, payload as FormData); + Message.success('创建成功'); + } + return project; + }, + { + onSuccess: () => { + goBackProjectList(); + }, + onError(e: any) { + if (e.code === 409) { + Message.error('算法名称已存在'); + } else { + Message.error(e.message); + } + }, + }, + ); + const updateProjectMutation = useMutation( + async (params: TMutationParams<Partial<AlgorithmProject>>) => { + const { id, payload, shouldPublish } = params; + const action = () => + patchProject(id, { + comment: payload.comment, + parameter: payload.parameter, + } as Partial<AlgorithmProject>).then((res) => res.data); + + if (shouldPublish) { + await publishAlgorithmWrap(project!, action); + Message.success('编辑并发版成功'); + } else { + await action(); + Message.success('编辑成功'); + } + return { ...project, ...payload }; + }, + { + onSuccess: () => { + goBackProjectList(); + }, + }, + ); + const goBackProjectList = useCallback( + (replace = false) => { + const url = `/algorithm-management/${AlgorithmManagementTabType.MY}`; + return replace ? history.replace(url) : history.push(url); + }, + [history], + ); + + useEffect(() => { + if (!isEdit) { + form.setFieldsValue(defaultValue); + return; + } + if (!urlState.id) { + goBackProjectList(true); + return; + } + fetchProjectDetail(urlState.id) + .then(({ data }) => { + setProject(data); + form.setFieldsValue({ + [FormField.NAME]: data.name, + [FormField.ALGORITHM_TYPE]: data.type, + [FormField.COMMENT]: data.comment, + [FormField.PARAMETER]: + (data.parameter?.variables ?? []).length > 0 + ? data.parameter!.variables + : defaultValue[FormField.PARAMETER], + }); + }) + .catch(() => {}); + }, [form, goBackProjectList, isEdit, urlState.id]); + + // if the code editor is visible, try to prevent user from closing the page + useEffect(() => { + const handler = (e: Event) => { + const msg = 'Are you sure to leave?'; + e.preventDefault(); + // @ts-ignore + e.returnValue = msg; + return msg; + }; + if (codeEditorVisible) { + window.addEventListener('beforeunload', handler); + } + + return () => { + window.removeEventListener('beforeunload', handler); + }; + }, [codeEditorVisible]); + + return ( + <SharedPageLayout + title={ + <BackButton + onClick={goBackProjectList} + isShowConfirmModal={isFormValueChanged || codeEditorTouched} + > + {'算法仓库'} + </BackButton> + } + > + <FormHeader>{isEdit ? '编辑算法' : '创建算法'}</FormHeader> + <Form form={form} layout="vertical" onChange={onFormValueChange}> + <Typography.Text className={styled.styled_big_text} bold> + {'基本信息'} + </Typography.Text> + <Form.Item + className={styled.styled_form_item} + field={FormField.NAME} + label={'算法名称'} + rules={RULES[FormField.NAME]} + > + {isEdit ? ( + <Typography.Text>{project?.name}</Typography.Text> + ) : ( + <Input readOnly={isEdit} placeholder={'请输入算法名称'} /> + )} + </Form.Item> + <Form.Item + className={styled.styled_form_item} + field={FormField.COMMENT} + label={'算法描述'} + rules={RULES[FormField.COMMENT]} + > + <Input.TextArea rows={2} placeholder={'最多为 200 个字符'} /> + </Form.Item> + <Form.Item + className={styled.styled_form_item} + field={FormField.ALGORITHM_TYPE} + label={'算法类型'} + rules={RULES[FormField.ALGORITHM_TYPE]} + > + {isEdit && project?.type ? ( + <AlgorithmType type={project.type} /> + ) : ( + <Select options={AlgorithmTypeOptions} /> + )} + </Form.Item> + <TitleWithIcon + title="选择纵向联邦-NN模型时,代码层级的首层必须为leader和follower两个文件夹" + isLeftIcon={true} + isShowIcon={true} + icon={IconInfoCircle} + className={styled.title_with_icon} + /> + {!isEdit ? ( + <Form.Item + className={styled.styled_form_item} + field={FormField.Files} + label={'算法文件'} + rules={RULES[FormField.Files]} + > + <Upload + drag + multiple={false} + limit={1} + accept=".gz,.tar" + tip={'仅支持上传1个 .tar 或 .gz 格式文件,大小不超过 100 MiB'} + /> + </Form.Item> + ) : ( + <Form.Item className={styled.styled_form_item} label={'算法文件'}> + <div + className={styled.styled_code_editor_entry} + onClick={() => { + setCodeEditorVisible(true); + }} + > + <div> + <IconCodeSquare className={styled.styled_icon_code_square} /> + <br /> + <Typography.Text bold>{'代码编辑器'}</Typography.Text> + </div> + <div className={styled.styled_status_row}> + <Tag + className={ + codeEditorTouched ? styled.styled_unsaved_tag : styled.styled_saved_tag + } + > + {codeEditorTouched ? '已保存' : '未编辑'} + </Tag> + {'点击进入代码编辑器'} + </div> + </div> + </Form.Item> + )} + <Typography.Text className={styled.styled_big_text} bold> + {'超参数'} + </Typography.Text> + <Form.Item + className={styled.styled_form_item} + field={FormField.PARAMETER} + style={{ marginLeft: 12, width: '80%', minWidth: 800 }} + > + <ParamsInput /> + </Form.Item> + + <Form.Item className={styled.styled_form_item} label=""> + <Space className={styled.styled_footer_space}> + <Button + loading={createProjectMutation.isLoading || updateProjectMutation.isLoading} + onClick={() => submitForm()} + type="primary" + > + {'提交'} + </Button> + <Button + loading={createProjectMutation.isLoading || updateProjectMutation.isLoading} + onClick={() => submitForm(true)} + type="primary" + > + {'提交并发版'} + </Button> + <ButtonWithModalConfirm + onClick={goBackProjectList} + isShowConfirmModal={isFormValueChanged || codeEditorTouched} + > + {'取消'} + </ButtonWithModalConfirm> + </Space> + </Form.Item> + </Form> + {project ? ( + <CodeEditorModal.AlgorithmProject + isAsyncMode={true} + id={project.id} + visible={codeEditorVisible} + title={project.name} + onClose={() => { + setCodeEditorVisible(false); + setCodeEditorTouched(true); + }} + /> + ) : null} + </SharedPageLayout> + ); + + async function submitForm(shouldPublish = false) { + const projectId = urlState.id; + await form.validate(); + const values = form.getFieldsValue(); + const parameter = { + variables: values[FormField.PARAMETER].filter((item: any) => item.name), + }; + + if (isEdit) { + updateProjectMutation.mutate({ + id: projectId, + shouldPublish, + payload: { + ...values, + [FormField.PARAMETER]: parameter, + } as any, + }); + } else { + if (!selectedProject.current?.id) { + Message.info('请选择工作区'); + return; + } + const file = values[FormField.Files]?.[0]?.originFile; + const formData = new FormData(); + formData.append(FormField.NAME, values[FormField.NAME]); + if (file) { + formData.append(FormField.Files, file); + } + formData.append(FormField.ALGORITHM_TYPE, values[FormField.ALGORITHM_TYPE]); + formData.append(FormField.PARAMETER, JSON.stringify(parameter)); + formData.append(FormField.COMMENT, values[FormField.COMMENT]); + + await createProjectMutation.mutate({ + id: selectedProject.current?.id, + shouldPublish, + project: values as AlgorithmProject, + payload: formData, + }); + } + } + + function publishAlgorithmWrap( + algorithm: AlgorithmProject, + beforePublish: () => Promise<AlgorithmProject>, + ) { + return new Promise((resolve, reject) => { + showSendModal( + algorithm, + async (comment: string) => { + try { + const res = await beforePublish(); + await postPublishAlgorithm(res.id, comment); + resolve(''); + } catch (e) { + reject(e); + } + }, + () => { + reject(null); + }, + true, + ); + }); + } +}; + +export default AlgorithmForm; diff --git a/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmList/AlgorithmTable/index.tsx b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmList/AlgorithmTable/index.tsx new file mode 100644 index 000000000..10b547d1a --- /dev/null +++ b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmList/AlgorithmTable/index.tsx @@ -0,0 +1,310 @@ +import React, { FC } from 'react'; +import { Link } from 'react-router-dom'; +import { Table, Empty, PaginationProps, Space } from '@arco-design/web-react'; +import { SorterResult } from '@arco-design/web-react/es/Table/interface'; +import { formatTimestamp } from 'shared/date'; +import { CONSTANTS } from 'shared/constants'; + +import AlgorithmType from 'components/AlgorithmType'; +import MoreActions from 'components/MoreActions'; +import StateIndicator, { StateTypes } from 'components/StateIndicator'; +import { + AlgorithmProject, + AlgorithmReleaseStatus, + EnumAlgorithmProjectSource, +} from 'typings/algorithm'; +import { Participant } from 'typings/participant'; +import { + algorithmReleaseStatusFilters, + algorithmTypeFilters, + FILTER_ALGORITHM_MY_OPERATOR_MAPPER, +} from 'views/AlgorithmManagement/shared'; +import { expression2Filter } from 'shared/filter'; +import { getSortOrder } from 'views/Datasets/shared'; +import { filterExpressionGenerator } from 'views/Datasets/shared'; + +type ColumnsGetterOptions = { + urlState: UrlState; + participant?: Participant[]; + onReleaseClick?: any; + onDeleteClick?: any; + onChangeClick?: any; + onPublishClick?: any; + onDownloadClick?: any; + withoutActions?: boolean; + isBuiltIn?: boolean; + isParticipant?: boolean; +}; + +interface UrlState { + [key: string]: any; +} + +const calcStateIndicatorProps = ( + state: AlgorithmReleaseStatus, + options: ColumnsGetterOptions, +): { type: StateTypes; text: string; tip?: string } => { + let text = CONSTANTS.EMPTY_PLACEHOLDER; + let type = 'default' as StateTypes; + const tip = ''; + + switch (state) { + case AlgorithmReleaseStatus.UNRELEASED: + text = '未发版'; + type = 'gold'; + break; + case AlgorithmReleaseStatus.RELEASED: + text = '已发版'; + type = 'success'; + break; + default: + break; + } + + return { + text, + type, + tip, + }; +}; + +export const getTableColumns = (options: ColumnsGetterOptions) => { + const cols = [ + { + title: '名称', + dataIndex: 'name', + key: 'name', + width: 200, + ellipsis: true, + render: (name: any, record: any) => { + if (options.isParticipant) { + return ( + <Link to={`/algorithm-management/detail/${record.uuid}/versions/participant`}> + {name} + </Link> + ); + } + if (options.isBuiltIn) { + return ( + <Link to={`/algorithm-management/detail/${record.id}/files/built-in`}>{name}</Link> + ); + } + return <Link to={`/algorithm-management/detail/${record.id}/files/my`}>{name}</Link>; + }, + }, + !options.isBuiltIn && + !options.isParticipant && + ({ + title: '状态', + dataIndex: 'release_status', + name: 'release_status', + width: 150, + ...algorithmReleaseStatusFilters, + filteredValue: expression2Filter(options.urlState.filter).release_status, + render: (state: AlgorithmReleaseStatus, record: any) => { + return <StateIndicator {...calcStateIndicatorProps(state, options)} />; + }, + } as any), + { + title: '类型', + dataIndex: 'type', + name: 'type', + width: 150, + ...algorithmTypeFilters, + filteredValue: expression2Filter(options.urlState.filter).type, + render(_: any, record: any) { + return <AlgorithmType type={record.type} />; + }, + }, + options.isParticipant && { + title: '合作伙伴名称', + dataIndex: 'participant_id', + name: 'participant_id', + width: 200, + render(_: any, record: any) { + let result: any = undefined; + if (Array.isArray(options.participant) && options.participant.length !== 0) { + result = options.participant.find((item) => item.id === record.participant_id); + } + return <span>{result ? result.name : '-'}</span>; + }, + }, + { + title: '更新时间', + dataIndex: 'updated_at', + name: 'updated_at', + width: 200, + sortOrder: getSortOrder(options.urlState, 'updated_at'), + sorter(a: AlgorithmProject, b: AlgorithmProject) { + return a.updated_at - b.updated_at; + }, + render: (date: number) => <div>{formatTimestamp(date * 1000)}</div>, + }, + ].filter(Boolean); + + if (!options.withoutActions) { + cols.push({ + title: '操作', + dataIndex: 'operation', + name: 'operation', + fixed: 'right', + width: 150, + render: (_: number, record: AlgorithmProject) => ( + <Space> + <button + className="custom-text-button" + onClick={() => { + options?.onReleaseClick?.(record); + }} + disabled={ + record.source !== EnumAlgorithmProjectSource.USER || + record.release_status === AlgorithmReleaseStatus.RELEASED + } + > + {'发版'} + </button> + <button + className="custom-text-button" + onClick={() => { + options?.onChangeClick?.(record); + }} + > + 编辑 + </button> + <MoreActions + actionList={[ + { + label: '发布最新版本', + disabled: + record.latest_version === 0 || + record.source === EnumAlgorithmProjectSource.THIRD_PARTY, + onClick() { + options?.onPublishClick?.(record); + }, + }, + { + label: '删除', + onClick() { + options?.onDeleteClick?.(record); + }, + danger: true, + }, + ]} + /> + </Space> + ), + } as any); + } + + return cols; +}; + +type Props = { + data: AlgorithmProject[]; + loading: boolean; + isBuiltIn?: boolean; + isParticipant?: boolean; + noDataElement?: string; + participant?: Participant[]; + pagination?: PaginationProps | boolean; + urlState?: UrlState; + setUrlState?: (newState: any) => void; + onReleaseClick?: (record: any) => void; + onPublishClick?: (record: any) => void; + onChangeClick?: (record: any) => void; + onDeleteClick?: (record: any) => void; + onDownloadClick?: (record: any) => void; + onShowSizeChange?: (current: number, size: number) => void; + onPageChange?: (page: number, pageSize: number) => void; +}; +const AlgorithmTable: FC<Props> = ({ + data, + urlState = {}, + setUrlState, + loading, + isBuiltIn, + isParticipant, + participant, + noDataElement, + pagination, + onReleaseClick, + onPublishClick, + onChangeClick, + onDeleteClick, + onDownloadClick, + onPageChange, +}) => { + return ( + <Table + className="custom-table custom-table-left-side-filter" + data={data} + rowKey="uuid" + loading={loading} + scroll={{ x: '100%' }} + pagination={pagination} + noDataElement={<Empty description={noDataElement} />} + onChange={handleChange} + columns={getTableColumns({ + urlState, + onReleaseClick, + onPublishClick, + onDeleteClick, + onChangeClick, + onDownloadClick, + isBuiltIn, + isParticipant, + participant, + withoutActions: isBuiltIn || isParticipant, + })} + /> + ); + + function handleChange( + pagination: PaginationProps, + sorter: SorterResult, + filters: any, + extra: any, + ) { + const { action } = extra; + + switch (action) { + case 'paginate': + onPageChange && onPageChange(pagination.current as number, pagination.pageSize as number); + break; + case 'filter': + onFilterChange && onFilterChange(filters); + break; + case 'sort': + onSortChange && onSortChange(sorter); + } + } + + function onFilterChange(filters: any) { + setUrlState && + setUrlState((prevState: any) => ({ + ...prevState, + filter: filterExpressionGenerator( + { + ...filters, + name: expression2Filter(urlState.filter).name, + }, + FILTER_ALGORITHM_MY_OPERATOR_MAPPER, + ), + page: 1, + })); + } + + function onSortChange(sorter: SorterResult) { + let orderValue = ''; + if (sorter.direction) { + orderValue = sorter.direction === 'ascend' ? 'asc' : 'desc'; + } + setUrlState && + setUrlState((prevState: any) => ({ + ...prevState, + order_by: orderValue ? `${sorter.field} ${orderValue}` : '', + })); + } +}; + +export default AlgorithmTable; diff --git a/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmList/BuiltInAlgorithmTab/index.tsx b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmList/BuiltInAlgorithmTab/index.tsx new file mode 100644 index 000000000..b9cacff55 --- /dev/null +++ b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmList/BuiltInAlgorithmTab/index.tsx @@ -0,0 +1,129 @@ +import React, { FC, useMemo, useState } from 'react'; +import { useMutation, useQuery } from 'react-query'; + +import { EnumAlgorithmProjectSource } from 'typings/algorithm'; +import { fetchProjectList, updatePresetAlgorithm } from 'services/algorithm'; +import { TIME_INTERVAL } from 'shared/constants'; +import AlgorithmTable from '../AlgorithmTable'; +import { + useGetCurrentProjectParticipantList, + useTablePaginationWithUrlState, + useUrlState, +} from 'hooks'; +import GridRow from 'components/_base/GridRow'; +import { Button, Input, Message, Tooltip } from '@arco-design/web-react'; +import { useIsAdminRole } from 'hooks/user'; +import { expression2Filter } from 'shared/filter'; +import { filterExpressionGenerator } from 'views/Datasets/shared'; +import { FILTER_ALGORITHM_MY_OPERATOR_MAPPER } from 'views/AlgorithmManagement/shared'; + +export const LIST_QUERY_KEY = 'PresetAlgorithmProjects'; + +const BuiltInAlgorithmTab: FC = () => { + const participantList = useGetCurrentProjectParticipantList(); + const isAdminRole = useIsAdminRole(); + + const [total, setTotal] = useState(0); + const [pageTotal, setPageTotal] = useState(0); + const { paginationProps } = useTablePaginationWithUrlState(); + const [urlState, setUrlState] = useUrlState({ + page: 1, + pageSize: 10, + filter: '', + order_by: '', + }); + + const listQuery = useQuery( + [LIST_QUERY_KEY, urlState], + () => + fetchProjectList(0, { + ...urlState, + sources: EnumAlgorithmProjectSource.PRESET, + }), + { + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, // auto refresh every 1.5 min + onSuccess: (res) => { + const { page_meta } = res || {}; + setTotal((pre) => page_meta?.total_items || pre); + setPageTotal(page_meta?.total_pages ?? 0); + }, + }, + ); + + const updatePresetAlgorithmMutation = useMutation( + (payload: any) => { + return updatePresetAlgorithm(payload); + }, + { + onSuccess() { + Message.success('更新预置算法成功'); + listQuery.refetch(); + }, + onError(e: any) { + Message.error(e.message); + }, + }, + ); + + const pagination = useMemo(() => { + return pageTotal <= 1 + ? false + : { + ...paginationProps, + total, + }; + }, [paginationProps, pageTotal, total]); + + return ( + <> + <GridRow justify="space-between" align="center"> + <Tooltip content="只有管理员才能更新预置算法" disabled={isAdminRole}> + <Button + className="custom-operation-button" + type="primary" + onClick={onUpdateClick} + loading={updatePresetAlgorithmMutation.isLoading} + disabled={!isAdminRole} + > + 更新预置算法 + </Button> + </Tooltip> + + <Input.Search + className="custom-input" + allowClear + defaultValue={expression2Filter(urlState.filter).name} + onSearch={onSearch} + onClear={() => onSearch('')} + placeholder="输入算法名称" + /> + </GridRow> + <AlgorithmTable + loading={listQuery.isFetching} + data={listQuery.data?.data ?? []} + urlState={urlState} + setUrlState={setUrlState} + isBuiltIn={true} + pagination={pagination} + noDataElement="暂无算法" + participant={participantList ?? []} + /> + </> + ); + + function onSearch(value: string) { + const filters = expression2Filter(urlState.filter); + filters.name = value; + setUrlState((prevState) => ({ + ...prevState, + page: 1, + filter: filterExpressionGenerator(filters, FILTER_ALGORITHM_MY_OPERATOR_MAPPER), + })); + } + function onUpdateClick() { + updatePresetAlgorithmMutation.mutate({}); + } +}; + +export default BuiltInAlgorithmTab; diff --git a/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmList/MyAlgorithmTab/index.tsx b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmList/MyAlgorithmTab/index.tsx new file mode 100644 index 000000000..569af1b5c --- /dev/null +++ b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmList/MyAlgorithmTab/index.tsx @@ -0,0 +1,197 @@ +import React, { FC, useMemo, useState } from 'react'; +import { useHistory } from 'react-router'; +import { useQuery } from 'react-query'; + +import request from 'libs/request'; +import { + fetchProjectDetail, + fetchProjectList, + postPublishAlgorithm, + getFullAlgorithmProjectDownloadHref, + publishAlgorithm, +} from 'services/algorithm'; +import { forceToRefreshQuery } from 'shared/queryClient'; +import { TIME_INTERVAL } from 'shared/constants'; + +import { + useGetCurrentProjectId, + useGetCurrentProjectParticipantList, + useTablePaginationWithUrlState, + useUrlState, +} from 'hooks'; + +import { Button, Input, Message } from '@arco-design/web-react'; +import { IconPlus } from '@arco-design/web-react/icon'; +import GridRow from 'components/_base/GridRow'; +import AlgorithmTable from '../AlgorithmTable'; +import showSendModal from '../../AlgorithmSendModal'; +import { AlgorithmProject, EnumAlgorithmProjectSource } from 'typings/algorithm'; +import { + deleteConfirm, + FILTER_ALGORITHM_MY_OPERATOR_MAPPER, +} from 'views/AlgorithmManagement/shared'; +import { filterExpressionGenerator } from 'views/Datasets/shared'; +import { expression2Filter } from 'shared/filter'; + +export const LIST_QUERY_KEY = 'my_algorithm_list_query'; + +const MyAlgorithmTab: FC = () => { + const history = useHistory(); + const projectId = useGetCurrentProjectId(); + const participantList = useGetCurrentProjectParticipantList(); + const [total, setTotal] = useState(0); + const [pageTotal, setPageTotal] = useState(0); + const { paginationProps } = useTablePaginationWithUrlState(); + const [urlState, setUrlState] = useUrlState({ + page: 1, + pageSize: 10, + filter: filterExpressionGenerator( + { + project_id: projectId, + }, + FILTER_ALGORITHM_MY_OPERATOR_MAPPER, + ), + order_by: '', + }); + const listQueryKey = [LIST_QUERY_KEY, projectId, urlState]; + const listQuery = useQuery( + listQueryKey, + () => { + if (!projectId) { + Message.info('请选择工作区'); + } + return fetchProjectList(projectId ?? 0, { + ...urlState, + sources: [EnumAlgorithmProjectSource.USER, EnumAlgorithmProjectSource.THIRD_PARTY], + }); + }, + { + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, // auto refresh every 1.5 min + onSuccess: (res) => { + const { page_meta } = res || {}; + setTotal((pre) => page_meta?.total_items || pre); + setPageTotal(page_meta?.total_pages ?? 0); + }, + }, + ); + + const pagination = useMemo(() => { + return pageTotal <= 1 + ? false + : { + ...paginationProps, + total, + }; + }, [paginationProps, pageTotal, total]); + + return ( + <> + <GridRow justify="space-between" align="center"> + <Button + className="custom-operation-button" + type="primary" + icon={<IconPlus />} + onClick={onCreateClick} + > + 创建算法 + </Button> + + <Input.Search + className="custom-input" + allowClear + defaultValue={expression2Filter(urlState.filter).name} + onSearch={onSearch} + onClear={() => onSearch('')} + placeholder="输入算法名称" + /> + </GridRow> + <AlgorithmTable + data={listQuery.data?.data ?? []} + urlState={urlState} + setUrlState={setUrlState} + noDataElement="暂无算法,去创建" + loading={listQuery.isFetching} + participant={participantList ?? []} + pagination={pagination} + onReleaseClick={onReleaseClick} + onPublishClick={onPublishClick} + onChangeClick={onChangeClick} + onDeleteClick={onDeleteClick} + onDownloadClick={onDownloadClick} + /> + </> + ); + function onSearch(value: string) { + const filters = expression2Filter(urlState.filter); + filters.name = value; + setUrlState((prevState) => ({ + ...prevState, + page: 1, + filter: filterExpressionGenerator(filters, FILTER_ALGORITHM_MY_OPERATOR_MAPPER), + })); + } + + function onCreateClick() { + history.push('/algorithm-management/create'); + } + function onChangeClick(record: any) { + history.push(`/algorithm-management/edit?id=${record.id}`); + } + + function onReleaseClick(record: any) { + showSendModal( + record, + async (comment: string) => { + await postPublishAlgorithm(record.id, comment); + forceToRefreshQuery([...listQueryKey]); + Message.success('发版成功'); + }, + () => {}, + true, + ); + } + + function onPublishClick(record: AlgorithmProject) { + // indicate that there're not any algorithm + if (record.latest_version === 0) { + return; + } + showSendModal( + () => + fetchProjectDetail(record.id).then((res) => { + const latestAlgorithm = res.data?.algorithms?.[0]; + if (!latestAlgorithm) { + Message.error('没有算法'); + throw new Error('no algorithm'); + } + return latestAlgorithm; + }), + async (comment: string, algorithm) => { + await publishAlgorithm(projectId, algorithm.id, { comment }); + Message.success('发布成功'); + }, + () => {}, + false, + ); + } + + async function onDeleteClick(record: AlgorithmProject) { + try { + await deleteConfirm(record, true); + forceToRefreshQuery([...listQueryKey]); + } catch (e) { + Message.error(e.message); + } + } + async function onDownloadClick(record: AlgorithmProject) { + try { + const tip = await request.download(getFullAlgorithmProjectDownloadHref(record.id)); + tip && Message.info(tip); + } catch (error) { + Message.error(error.message); + } + } +}; + +export default MyAlgorithmTab; diff --git a/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmList/ParticipantAlgorithmTab/index.tsx b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmList/ParticipantAlgorithmTab/index.tsx new file mode 100644 index 000000000..60ed0d265 --- /dev/null +++ b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmList/ParticipantAlgorithmTab/index.tsx @@ -0,0 +1,107 @@ +import React, { FC, useMemo, useState } from 'react'; +import { useQuery } from 'react-query'; + +import { fetchPeerAlgorithmProjectList } from 'services/algorithm'; +import { TIME_INTERVAL } from 'shared/constants'; +import { expression2Filter } from 'shared/filter'; +import { pageSplit } from '../../shared'; +import AlgorithmTable from '../AlgorithmTable'; +import { + useGetCurrentProjectId, + useGetCurrentProjectParticipantList, + useTablePaginationWithUrlState, + useUrlState, +} from 'hooks'; +import GridRow from 'components/_base/GridRow'; +import { Input } from '@arco-design/web-react'; +import { FILTER_ALGORITHM_MY_OPERATOR_MAPPER } from 'views/AlgorithmManagement/shared'; +import { filterExpressionGenerator } from 'views/Datasets/shared'; + +export const LIST_QUERY_KEY = 'PresetAlgorithmProjects'; + +const ParticipantAlgorithmTab: FC = () => { + const projectId = useGetCurrentProjectId(); + const participantList = useGetCurrentProjectParticipantList(); + const [total, setTotal] = useState(0); + const [pageTotal, setPageTotal] = useState(0); + const { paginationProps } = useTablePaginationWithUrlState(); + const [urlState, setUrlState] = useUrlState({ + page: 1, + pageSize: 10, + filter: filterExpressionGenerator( + { + project_id: projectId, + }, + FILTER_ALGORITHM_MY_OPERATOR_MAPPER, + ), + order_by: '', + }); + + const listQuery = useQuery( + [LIST_QUERY_KEY, projectId, urlState], + () => + fetchPeerAlgorithmProjectList(projectId, 0, { + filter: urlState.filter, + }), + { + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, // auto refresh every 1.5 min + onSuccess: (res) => { + setTotal((pre) => res.data?.length || pre); + setPageTotal(Math.ceil(res.data?.length / urlState.pageSize) ?? 0); + }, + }, + ); + + const pagination = useMemo(() => { + return pageTotal <= 1 + ? false + : { + ...paginationProps, + total, + }; + }, [paginationProps, pageTotal, total]); + + const list = useMemo(() => { + if (!listQuery.data?.data) return []; + const { page, pageSize } = urlState; + return pageSplit(listQuery.data.data, page, pageSize); + }, [listQuery.data, urlState]); + + return ( + <> + <GridRow justify="end" align="center"> + <Input.Search + className="custom-input" + allowClear + defaultValue={expression2Filter(urlState.filter).name} + onSearch={onSearch} + onClear={() => onSearch('')} + placeholder="输入算法名称" + /> + </GridRow> + <AlgorithmTable + loading={listQuery.isFetching} + data={list} + urlState={urlState} + setUrlState={setUrlState} + noDataElement="暂无算法,去创建" + isParticipant={true} + participant={participantList ?? []} + pagination={pagination} + /> + </> + ); + + function onSearch(value: string) { + const filters = expression2Filter(urlState.filter); + filters.name = value; + setUrlState((prevState) => ({ + ...prevState, + page: 1, + filter: filterExpressionGenerator(filters, FILTER_ALGORITHM_MY_OPERATOR_MAPPER), + })); + } +}; + +export default ParticipantAlgorithmTab; diff --git a/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmList/index.module.less b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmList/index.module.less new file mode 100644 index 000000000..208c49ca5 --- /dev/null +++ b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmList/index.module.less @@ -0,0 +1,11 @@ +.algorithm_tab{ + :global{ + .arco-tabs-header-nav-horizontal { + padding-left: 4px; + } + .arco-tabs-header-title { + margin-top: 4px; + margin-bottom: 4px; + } + } +} \ No newline at end of file diff --git a/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmList/index.tsx b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmList/index.tsx new file mode 100644 index 000000000..c355a496e --- /dev/null +++ b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmList/index.tsx @@ -0,0 +1,59 @@ +import React, { FC } from 'react'; +import { useParams, useHistory } from 'react-router'; + +import { AlgorithmManagementTabType } from 'typings/modelCenter'; + +import { Tabs } from '@arco-design/web-react'; +import SharedPageLayout, { RemovePadding } from 'components/SharedPageLayout'; +import MyAlgorithmTab from './MyAlgorithmTab'; +import BuiltInAlgorithmTab from './BuiltInAlgorithmTab'; +import ParticipantAlgorithmTab from './ParticipantAlgorithmTab'; +import { Redirect, Route } from 'react-router'; +import styled from './index.module.less'; + +const AlgorithmManagementList: FC = () => { + const history = useHistory(); + + const { tabType } = useParams<{ tabType: AlgorithmManagementTabType }>(); + if (!tabType) { + return <Redirect to={`/algorithm-management/${AlgorithmManagementTabType.MY}`} />; + } + return ( + <SharedPageLayout title="算法仓库"> + <RemovePadding style={{ height: 46 }}> + <Tabs className={styled.algorithm_tab} defaultActiveTab={tabType} onChange={onTabChange}> + <Tabs.TabPane title="我的算法" key={AlgorithmManagementTabType.MY} /> + <Tabs.TabPane title="预置算法" key={AlgorithmManagementTabType.BUILT_IN} /> + <Tabs.TabPane title="合作伙伴算法" key={AlgorithmManagementTabType.PARTICIPANT} /> + </Tabs> + </RemovePadding> + <Route + path={`/algorithm-management/${AlgorithmManagementTabType.MY}`} + exact + render={(props) => { + return <MyAlgorithmTab />; + }} + /> + <Route + path={`/algorithm-management/${AlgorithmManagementTabType.BUILT_IN}`} + exact + render={(props) => { + return <BuiltInAlgorithmTab />; + }} + /> + <Route + path={`/algorithm-management/${AlgorithmManagementTabType.PARTICIPANT}`} + exact + render={(props) => { + return <ParticipantAlgorithmTab />; + }} + /> + </SharedPageLayout> + ); + + function onTabChange(val: string) { + history.replace(`/algorithm-management/${val}`); + } +}; + +export default AlgorithmManagementList; diff --git a/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmParamsInput/index.module.less b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmParamsInput/index.module.less new file mode 100644 index 000000000..b29d3f0e8 --- /dev/null +++ b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmParamsInput/index.module.less @@ -0,0 +1,78 @@ +.styled_container { + font-size: 12px; +} + +.styled_button { + margin-left: -5px; + padding-left: 5px; + padding-right: 5px; + font-size: 12px; + &.arco-btn-text:not(.arco-btn-disabled):not(.arco-btn-loading):hover { + background: transparent; + } +} + +.styled_required_text { + --height: 28px; + display: block; + position: relative; + height: var(--height); + line-height: var(--height); +} + +.styled_row_with_border { + --rightSpaceWidth: 30px; + position: relative; + margin-bottom: 7px; + padding-right: var(--rightSpaceWidth); + line-height: 28px; + &::after { + position: absolute; + left: 5px; + right: calc(var(--rightSpaceWidth) + 5px); + bottom: 0; + height: 1px; + background: var(--color-neutral-3); + content: ''; + } +} + +.styled_row_without_border { + --rightSpaceWidth: 30px; + position: relative; + margin-bottom: 6px; + padding-right: var(--rightSpaceWidth); + line-height: 28px; + &::after { + display: none; + position: absolute; + left: 5px; + right: calc(var(--rightSpaceWidth) + 5px); + bottom: 0; + height: 1px; + background: var(--color-neutral-3); + content: ''; + } +} + +.styled_radio_group { + display: flex; + width: 100%; + :global(.arco-radio-button) { + flex: 1; + } + :global(.arco-radio-button-inner) { + width: 100%; + text-align: center; + } +} + +.styled_delete_button { + position: absolute; + top: 0; + right: 0; + width: 30px; + &:not(.arco-btn-disabled) { + color: var(--color-text-1); + } +} diff --git a/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmParamsInput/index.tsx b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmParamsInput/index.tsx new file mode 100644 index 000000000..b546ec68c --- /dev/null +++ b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmParamsInput/index.tsx @@ -0,0 +1,163 @@ +import React, { FC, useEffect, useState } from 'react'; +import { Grid, Input, Radio, Typography, Button } from '@arco-design/web-react'; +import { IconPlus, IconDelete } from '@arco-design/web-react/icon'; +import styled from './index.module.less'; +import { giveWeakRandomKey } from 'shared/helpers'; +import { AlgorithmParams } from 'typings/modelCenter'; + +type TProps = { + value?: AlgorithmParams[]; + defaultValue?: AlgorithmParams[]; + onChange?: (value: AlgorithmParams[]) => void; +}; + +const { Row, Col } = Grid; +const { Text } = Typography; +const { TextArea } = Input; + +const emptyRow: AlgorithmParams = { + name: '', + value: '', + display_name: '', + comment: '', + required: true, +}; + +type AlgorithmParamsState = { + id: string; + value: AlgorithmParams; +}; + +const AlgorithmParamsInput: FC<TProps> = ({ value, defaultValue, onChange }: TProps) => { + const [curValue, setCurValue] = useState<Array<AlgorithmParamsState>>( + getValue(value || defaultValue || []), + ); + // 用 value 是否有值判断是否受控 + const isControlled = Array.isArray(value); + + useEffect(() => { + if (isControlled) { + setCurValue(getValue(value || [])); + } + }, [value, isControlled]); + + return ( + <div className={styled.styled_container}> + <> + <Row gutter={10} className={styled.styled_row_with_border}> + <Col span={7}> + <Text className={styled.styled_required_text} type="secondary"> + {'名称'} + </Text> + </Col> + <Col span={7}> + <Text type="secondary">{'默认值'}</Text> + </Col> + <Col span={3}> + <Text type="secondary">{'是否必填'}</Text> + </Col> + <Col span={7}> + <Text type="secondary">{'提示语'}</Text> + </Col> + </Row> + {curValue.map(({ id, value: item }, index) => ( + <Row className={styled.styled_row_without_border} gutter={10} key={id}> + <Col span={7}> + <Input + placeholder={'请输入参数名称'} + defaultValue={item.name} + type="text" + onBlur={getFormHandler(index, 'name')} + /> + </Col> + <Col span={7}> + <TextArea + rows={1} + placeholder={'请输入默认值'} + defaultValue={item.value} + onBlur={getFormHandler(index, 'value')} + /> + </Col> + <Col span={3}> + <Radio.Group + className={styled.styled_radio_group} + defaultValue={item.required} + type="button" + onChange={getFormHandler(index, 'required')} + > + <Radio value={true}>是</Radio> + <Radio value={false}>否</Radio> + </Radio.Group> + </Col> + <Col span={7}> + <TextArea + rows={1} + placeholder={'请输入提示语'} + defaultValue={item.comment} + onBlur={getFormHandler(index, 'comment')} + /> + </Col> + <Button + className={styled.styled_delete_button} + onClick={(e: Event) => { + e.preventDefault(); + e.stopPropagation(); + delRow(index); + }} + type="text" + size="small" + icon={<IconDelete />} + /> + </Row> + ))} + </> + <Button className={styled.styled_button} onClick={addRow} type="text" size="small"> + <IconPlus /> + {'新增超参数'} + </Button> + </div> + ); + + function setValue(value: AlgorithmParamsState[]) { + // 如果受控,内部不处理 value,直接提交给外部处理,通过上方的 useEffect 来更新 curValue + if (isControlled) { + onChange?.(value.map((item) => item.value)); + return; + } + setCurValue(value); + } + + function getFormHandler(index: number, field: keyof AlgorithmParams) { + return (val: any | string) => { + const newValue = [...curValue]; + const { id, value } = newValue[index]; + newValue[index] = { + id, + value: { + ...value, + [field]: typeof val === 'object' ? val?.target?.value : val, + }, + }; + setValue([...newValue]); + }; + } + + function addRow(e: any) { + e.stopPropagation(); + e.preventDefault(); + const newValue = [...curValue, { id: `${Date.now()}`, value: { ...emptyRow } }]; + setValue(newValue); + } + function delRow(index: number) { + setValue(curValue.filter((_, i) => i !== index)); + } +}; + +function getValue(value: AlgorithmParams[]) { + return value.map((item) => ({ + id: item.name || giveWeakRandomKey(), + value: item, + })); +} + +export default AlgorithmParamsInput; diff --git a/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmSendModal/index.module.less b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmSendModal/index.module.less new file mode 100644 index 000000000..6402a609f --- /dev/null +++ b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmSendModal/index.module.less @@ -0,0 +1,11 @@ +.styled_container { + font-size: 14px; +} + +.styled_property_list { + margin-top: 6px; + margin-bottom: 20px; + padding: 20px; + border: 1px solid var(--color-border-2); + background: transparent; +} diff --git a/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmSendModal/index.tsx b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmSendModal/index.tsx new file mode 100644 index 000000000..051dfce69 --- /dev/null +++ b/web_console_v2/client/src/views/AlgorithmManagement/AlgorithmSendModal/index.tsx @@ -0,0 +1,126 @@ +import React, { FC } from 'react'; +import { Modal, Typography, Input, Message } from '@arco-design/web-react'; +import { IconLoading } from '@arco-design/web-react/icon'; +import PropertyList from 'components/PropertyList'; +import { Algorithm, AlgorithmProject } from 'typings/algorithm'; +import AlgorithmType from 'components/AlgorithmType'; +import { ConfirmProps } from '@arco-design/web-react/es/Modal/confirm'; +import styled from './index.module.less'; + +type Props = { + algorithm: Algorithm | AlgorithmProject; + isPublish?: boolean; + onChange: (comment: string) => void; +}; + +const { Text } = Typography; +const { TextArea } = Input; + +const AlgorithmSendModalContent: FC<Props> = ({ isPublish = false, algorithm, onChange }) => { + const curVersion = + (algorithm as AlgorithmProject).latest_version || (algorithm as Algorithm).version || 0; + const propertyList = [ + { + label: '名称', + value: algorithm.name, + }, + { + label: '类型', + value: algorithm?.type && <AlgorithmType type={algorithm.type} style={{ marginTop: -4 }} />, + }, + { + label: '版本', + value: `V${isPublish ? curVersion + 1 : curVersion}`, + }, + { + label: '描述', + value: algorithm.comment, + }, + ]; + return ( + <div className={styled.styled_container}> + <Text type="secondary">{'算法'}</Text> + <PropertyList className={styled.styled_property_list} cols={2} properties={propertyList} /> + {isPublish ? ( + <> + <Text type="secondary">{'版本描述'}</Text> + <TextArea style={{ marginTop: 6 }} rows={2} onChange={onChange} /> + </> + ) : ( + <></> + )} + </div> + ); +}; + +function sendModal( + algorithmGetter: Props['algorithm'] | (() => Promise<Props['algorithm']>), + onConfirm: (comment: string, algorithm: Props['algorithm']) => Promise<any>, + onCancel: () => void, + isPublish = false, + showMsg = false, +) { + return new Promise(async (resolve) => { + let algorithm: Props['algorithm'] | undefined = undefined; + const modalProps: ConfirmProps = { + icon: null, + title: <IconLoading />, + closable: true, + style: { + width: '600px', + }, + okButtonProps: { + disabled: true, + }, + okText: isPublish ? '发版' : '发布', + content: null, + cancelText: '取消', + onCancel, + + async onConfirm() { + if (!algorithm) { + return; + } + modalProps.confirmLoading = true; + modal.update({ ...modalProps }); + try { + await onConfirm(curComment, algorithm); + } catch (e) { + Message.error(e.message); + throw e; + } + if (showMsg) { + Message.success(isPublish ? '发版成功' : '发布成功'); + } + + modal.close(); + resolve(''); + }, + }; + const algorithmPromise = + typeof algorithmGetter === 'function' ? algorithmGetter() : Promise.resolve(algorithmGetter); + + const modal = Modal.confirm({ ...modalProps }); + + algorithm = await algorithmPromise; + let curComment = algorithm.comment ?? ''; + + // update the modal content and state with algorithm data + modalProps.title = isPublish ? `发版「${algorithm.name}」` : `发布${algorithm.name}」`; + modalProps.content = ( + <AlgorithmSendModalContent + isPublish={isPublish} + algorithm={algorithm} + onChange={(value: string) => { + curComment = value; + }} + /> + ); + modalProps.okButtonProps = { + disabled: false, + }; + modal.update({ ...modalProps }); + }); +} + +export default sendModal; diff --git a/web_console_v2/client/src/views/AlgorithmManagement/index.tsx b/web_console_v2/client/src/views/AlgorithmManagement/index.tsx new file mode 100644 index 000000000..04c4a9865 --- /dev/null +++ b/web_console_v2/client/src/views/AlgorithmManagement/index.tsx @@ -0,0 +1,36 @@ +import ErrorBoundary from 'components/ErrorBoundary'; +import React, { FC } from 'react'; +import { Route, Redirect, useLocation, Switch } from 'react-router-dom'; + +import { AlgorithmManagementTabType } from 'typings/modelCenter'; + +import AlgorithmDetail from './AlgorithmDetail'; +import AlgorithmForm from './AlgorithmForm'; +import AlgorithmList from './AlgorithmList'; + +const AlgorithmManagement: FC = () => { + const location = useLocation(); + + return ( + <ErrorBoundary> + <Switch> + <Route + path={`/algorithm-management/:tabType(${AlgorithmManagementTabType.MY}|${AlgorithmManagementTabType.BUILT_IN}|${AlgorithmManagementTabType.PARTICIPANT})`} + exact + component={AlgorithmList} + /> + <Route path="/algorithm-management/:action(create|edit)" component={AlgorithmForm} /> + <Route + path={`/algorithm-management/detail/:id/:tabType?/:algorithmDetailType?`} + exact + component={AlgorithmDetail} + /> + {location.pathname === '/algorithm-management' && ( + <Redirect to={`/algorithm-management/${AlgorithmManagementTabType.MY}`} /> + )} + </Switch> + </ErrorBoundary> + ); +}; + +export default AlgorithmManagement; diff --git a/web_console_v2/client/src/views/AlgorithmManagement/shared.module.less b/web_console_v2/client/src/views/AlgorithmManagement/shared.module.less new file mode 100644 index 000000000..12b47dfa7 --- /dev/null +++ b/web_console_v2/client/src/views/AlgorithmManagement/shared.module.less @@ -0,0 +1,22 @@ +@import '~styles/mixins.less'; +.avatar_container { + .MixinSquare(44px); + background-color: var(--primary-1); + color: white; + border-radius: 4px; + font-size: 18px; + text-align: center; + &::before { + display: inline-block; + width: 100%; + height: 100%; + content: ''; + background-image: url('../../assets/icons/atom-icon-algorithm-management.svg'); + background-repeat: no-repeat; + background-size: contain; + } +} +.plus_icon { + margin-right: 4px; + vertical-align: 0.03em !important; +} diff --git a/web_console_v2/client/src/views/AlgorithmManagement/shared.tsx b/web_console_v2/client/src/views/AlgorithmManagement/shared.tsx new file mode 100644 index 000000000..25b869e7f --- /dev/null +++ b/web_console_v2/client/src/views/AlgorithmManagement/shared.tsx @@ -0,0 +1,159 @@ +import React from 'react'; +import styled from './shared.module.less'; +import { + EnumAlgorithmProjectType, + EnumAlgorithmProjectSource, + AlgorithmProject, + Algorithm, + AlgorithmReleaseStatus, +} from 'typings/algorithm'; +import { deleteAlgorithm, deleteAlgorithmProject, unpublishAlgorithm } from 'services/algorithm'; +import { Modal, TableColumnProps } from '@arco-design/web-react'; +import { FilterOp } from 'typings/filter'; +type TableFilterConfig = Pick<TableColumnProps, 'filters' | 'onFilter'>; + +export const AlgorithmProjectTypeText = { + [EnumAlgorithmProjectType.UNSPECIFIED]: '自定义算法', + [EnumAlgorithmProjectType.NN_VERTICAL]: '纵向联邦-NN模型', + [EnumAlgorithmProjectType.NN_LOCAL]: '本地-NN模型', + [EnumAlgorithmProjectType.TREE_VERTICAL]: '纵向联邦-树模型', + [EnumAlgorithmProjectType.TREE_HORIZONTAL]: '横向联邦-树模型', +}; + +export const AlgorithmTypeOptions = [ + { + label: '自定义算法', + value: EnumAlgorithmProjectType.UNSPECIFIED, + }, + // { + // label: i18n.t('algorithm_management.label_model_type_nn_local'), + // value: EnumAlgorithmProjectType.NN_LOCAL, + // }, + { + label: '横向联邦-NN模型', + value: EnumAlgorithmProjectType.NN_HORIZONTAL, + }, + { + label: '纵向联邦-NN模型', + value: EnumAlgorithmProjectType.NN_VERTICAL, + }, + // { + // label: i18n.t('algorithm_management.label_model_type_tree_vertical'), + // value: EnumAlgorithmProjectType.TREE_VERTICAL, + // }, + // { + // label: i18n.t('algorithm_management.label_model_type_tree_horizontal'), + // value: EnumAlgorithmProjectType.TREE_HORIZONTAL, + // }, +]; + +export const AlgorithmSourceText = { + [EnumAlgorithmProjectSource.USER]: '我方', + [EnumAlgorithmProjectSource.PRESET]: '系统预置', + [EnumAlgorithmProjectSource.THIRD_PARTY]: '第三方', +}; + +export const Avatar: React.FC = () => { + return <div className={styled.avatar_container} />; +}; + +export function deleteConfirm( + algorithm: AlgorithmProject | Algorithm, + isProject = false, +): Promise<void> { + return new Promise((resolve, reject) => { + Modal.confirm({ + className: 'custom-modal', + style: { width: 360 }, + title: isProject + ? `确认删除「${algorithm.name}}」?` + : `确认删除版本「V${(algorithm as Algorithm).version}」?`, + content: isProject + ? '删除后,使用该算法的模型训练将无法发起新任务,请谨慎操作' + : '删除后,使用该算法版本的模型训练将无法发起新任务,请谨慎操作', + cancelText: '取消', + okText: '确认', + okButtonProps: { + status: 'danger', + }, + async onConfirm() { + try { + await (isProject ? deleteAlgorithmProject(algorithm.id) : deleteAlgorithm(algorithm.id)); + resolve(); + } catch (e) { + reject(e); + } + }, + }); + }); +} + +export function unpublishConfirm(projectId: ID, algorithm: Algorithm): Promise<void> { + return new Promise((resolve, reject) => { + Modal.confirm({ + className: 'custom-modal', + style: { width: 360 }, + title: `确认撤销发布 「V${algorithm.version}」?`, + content: '撤销发布后,合作伙伴使用该算法版本的模型训练将无法发起新任务,请谨慎操作', + cancelText: '取消', + okText: '确认', + okButtonProps: { + status: 'danger', + }, + async onConfirm() { + try { + await unpublishAlgorithm(projectId, algorithm.id); + resolve(); + } catch (e) { + reject(e); + } + }, + }); + }); +} + +export function pageSplit(data: any[], page: number, pageSize: number): any[] { + if (data.length === 0) return []; + const offset = (page - 1) * pageSize; + return offset + pageSize >= data.length + ? data.slice(offset, data.length) + : data.slice(offset, offset + pageSize); +} + +export const FILTER_ALGORITHM_MY_OPERATOR_MAPPER = { + release_status: FilterOp.IN, + type: FilterOp.IN, + name: FilterOp.CONTAIN, +}; + +export const algorithmReleaseStatusFilters: TableFilterConfig = { + filters: [ + { + text: '已发布', + value: AlgorithmReleaseStatus.RELEASED, + }, + { + text: '未发布', + value: AlgorithmReleaseStatus.UNRELEASED, + }, + ], + onFilter: (value: string, record: AlgorithmProject) => { + return value === record.release_status; + }, +}; + +export const algorithmTypeFilters: TableFilterConfig = { + filters: [ + { + text: '横向联邦-NN模型', + value: EnumAlgorithmProjectType.NN_HORIZONTAL, + }, + { + text: '纵向联邦-NN模型', + value: EnumAlgorithmProjectType.NN_VERTICAL, + }, + ], + onFilter: (value: string, record: AlgorithmProject) => { + return value === record.type; + }, +}; diff --git a/web_console_v2/client/src/views/Audit/EventList/EventDetailDrawer/index.module.less b/web_console_v2/client/src/views/Audit/EventList/EventDetailDrawer/index.module.less new file mode 100644 index 000000000..2d77f851f --- /dev/null +++ b/web_console_v2/client/src/views/Audit/EventList/EventDetailDrawer/index.module.less @@ -0,0 +1,24 @@ +.content{ + flex: 1; +} + +.gap{ + margin: 20px 0; + height: 1px; + background-color: var(--lineColor); +} + +.header{ + display: flex; + justify-content: space-between; + align-items: center; +} + +.click_text{ + font-size: 12px; + color: #1664ff; + cursor: pointer; +} +.styled_copy_button{ + color: var(--textColor) !important; +} diff --git a/web_console_v2/client/src/views/Audit/EventList/EventDetailDrawer/index.tsx b/web_console_v2/client/src/views/Audit/EventList/EventDetailDrawer/index.tsx new file mode 100644 index 000000000..59855ef61 --- /dev/null +++ b/web_console_v2/client/src/views/Audit/EventList/EventDetailDrawer/index.tsx @@ -0,0 +1,274 @@ +import React, { useState, useEffect } from 'react'; + +import { formatTimestamp } from 'shared/date'; +import { copyToClipboard, formatJSONValue } from 'shared/helpers'; +import { CONSTANTS } from 'shared/constants'; +import { systemInfoQuery } from 'stores/app'; +import { useRecoilQuery } from 'hooks/recoil'; + +import { Drawer, Button, Message, Tag } from '@arco-design/web-react'; +import { IconCopy } from '@arco-design/web-react/icon'; +import { LabelStrong } from 'styles/elements'; +import CodeEditor from 'components/CodeEditor'; +import BackButton from 'components/BackButton'; + +import PropList from '../PropList'; + +import { DrawerProps } from '@arco-design/web-react/es/Drawer'; +import { Audit, EventType } from 'typings/audit'; + +import styles from './index.module.less'; +import WhichParticipants from '../WhichParticipants'; + +export interface Props extends DrawerProps { + data?: Audit; + event_type: EventType; +} + +const hideExtraBlockList = ['null', '{}']; + +function EventDetailDrawer({ visible, data, title = '事件详情', event_type, ...restProps }: Props) { + const [isShowCodeEditor, setIsShowCodeEditor] = useState(false); + const { data: systemInfo } = useRecoilQuery(systemInfoQuery); + const { name: myName, domain_name: myDomainName, pure_domain_name: myPureDomainName } = + systemInfo || {}; + + useEffect(() => { + if (!visible) { + // reset isShowCodeEditor + setIsShowCodeEditor((prevState) => false); + } + }, [visible]); + + function renderInfoLayout() { + return ( + <> + <LabelStrong isBlock={true}>基础信息</LabelStrong> + <PropList + list={[ + { + key: '事件ID', + value: data?.uuid ?? CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + key: '事件时间', + value: data?.created_at + ? formatTimestamp(data.created_at) + : CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + key: '事件名称', + value: data?.name ?? CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + key: '用户名', + value: data?.user?.username ?? CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + key: '操作名称', + value: data?.op_type ?? CONSTANTS.EMPTY_PLACEHOLDER, + }, + ]} + /> + <div className={styles.gap} /> + <LabelStrong isBlock={true}>请求信息</LabelStrong> + <PropList + list={[ + { + key: '请求ID', + value: data?.uuid ?? CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + key: 'AccessKey ID', + value: data?.access_key_id ?? CONSTANTS.EMPTY_PLACEHOLDER, + isCanCopy: true, + }, + { + key: '事件结果', + value: data?.result ?? CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + key: '错误码', + value: data?.error_code ?? CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + key: '资源类型', + value: data?.resource_type ?? CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + key: '资源名称', + value: data?.resource_name ?? CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + key: '源IP地址', + value: data?.source_ip ?? CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + key: '额外信息', + value: + data?.extra && !hideExtraBlockList.includes(data.extra) ? ( + <span className={styles.click_text}>查看</span> + ) : ( + CONSTANTS.EMPTY_PLACEHOLDER + ), + onClick: () => { + if (data?.extra && !hideExtraBlockList.includes(data.extra)) { + setIsShowCodeEditor(true); + } + }, + }, + ]} + /> + </> + ); + } + function renderCrossDomainInfoLayout() { + return ( + <> + <LabelStrong isBlock={true}>基础信息</LabelStrong> + <PropList + list={[ + { + key: '事件ID', + value: data?.uuid ?? CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + key: '事件时间', + value: data?.created_at + ? formatTimestamp(data.created_at) + : CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + key: '事件名称', + value: data?.name ?? CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + key: '操作名称', + value: data?.op_type ?? CONSTANTS.EMPTY_PLACEHOLDER, + }, + ]} + /> + <div className={styles.gap} /> + <LabelStrong isBlock={true}>请求信息</LabelStrong> + <PropList + list={[ + { + key: '请求ID', + value: data?.uuid ?? CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + key: '事件结果', + value: data?.result ?? CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + key: '资源类型', + value: data?.resource_type ?? CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + key: '资源名称', + value: data?.resource_name ?? CONSTANTS.EMPTY_PLACEHOLDER, + }, + ]} + /> + <div className={styles.gap} /> + <LabelStrong isBlock={true}>跨域信息</LabelStrong> + <PropList + list={[ + { + key: '发起方', + value: + data?.coordinator_pure_domain_name === myPureDomainName ? ( + <span> + {`${myName} | ${myDomainName}`} <Tag color="arcoblue"> 本侧</Tag> + </span> + ) : ( + <WhichParticipants + pureDomainName={data?.coordinator_pure_domain_name} + showCoordinator={true} + showAll={true} + /> + ), + }, + { + key: '协作方', + value: ( + <WhichParticipants + currentDomainName={ + data?.coordinator_pure_domain_name !== myPureDomainName + ? myDomainName + : undefined + } + currentName={ + data?.coordinator_pure_domain_name !== myPureDomainName ? myName : undefined + } + pureDomainName={data?.coordinator_pure_domain_name} + projectId={data?.project_id} + showAll={true} + /> + ), + }, + ]} + /> + </> + ); + } + function renderCodeEditorLayout() { + return ( + <> + <div className={styles.header}> + <BackButton onClick={onBackClick}> 返回</BackButton> + <Button + className={styles.styled_copy_button} + icon={<IconCopy />} + onClick={onCopyClick} + type="text" + > + 复制 + </Button> + </div> + <CodeEditor + language="json" + isReadOnly={true} + theme="grey" + height="calc(100vh - 119px)" // 55(drawer header height) + 16*2(content padding) + 32(header height) + value={formatJSONValue(data?.extra ?? '')} + /> + </> + ); + } + + return ( + <Drawer + placement="right" + title={title} + closable={true} + width="50%" + visible={visible} + unmountOnExit + {...restProps} + > + <div className={styles.content}> + {isShowCodeEditor + ? renderCodeEditorLayout() + : event_type === EventType.CROSS_DOMAIN + ? renderCrossDomainInfoLayout() + : renderInfoLayout()} + </div> + </Drawer> + ); + + function onBackClick() { + setIsShowCodeEditor(false); + } + async function onCopyClick() { + const isOK = await copyToClipboard(formatJSONValue(data?.extra ?? '')); + + if (isOK) { + Message.success('复制成功'); + } else { + Message.error('复制失败'); + } + } +} + +export default EventDetailDrawer; diff --git a/web_console_v2/client/src/views/Audit/EventList/EventTable/index.module.less b/web_console_v2/client/src/views/Audit/EventList/EventTable/index.module.less new file mode 100644 index 000000000..84e9a1627 --- /dev/null +++ b/web_console_v2/client/src/views/Audit/EventList/EventTable/index.module.less @@ -0,0 +1,53 @@ + +.styled_title_icon{ + display: inline-block; + margin-left: 16px; + font-size: 12px; + color: var(--textColor); +} +.left{ + .arco-radio-group { + margin-right: 12px; + } +} +.styled_select{ + display: inline-block; + width: 128px; + .arco-select-view { + border-top-right-radius: 0 !important; + border-bottom-right-radius: 0 !important; + } +} +.styled_search{ + display: inline-block; + width: 230px; + .arco-input-group > :first-child { + border-top-left-radius: 0 !important; + border-bottom-left-radius: 0 !important; + } +} +.styled_button{ + width: 32px; + height: 32px; + margin-left: 8px; +} +.styled_footer_button{ + color: var(--textColor) !important; +} +.click_text{ + display: inline-block; + color: #1664ff; + cursor: pointer; +} +.footer{ + display: flex; + align-items: center; + justify-content: space-between; +} + +.styled_table{ + .arco-table-tr { + cursor: pointer; + } +} + diff --git a/web_console_v2/client/src/views/Audit/EventList/EventTable/index.tsx b/web_console_v2/client/src/views/Audit/EventList/EventTable/index.tsx new file mode 100644 index 000000000..3817fca87 --- /dev/null +++ b/web_console_v2/client/src/views/Audit/EventList/EventTable/index.tsx @@ -0,0 +1,198 @@ +import React, { FC, useState } from 'react'; +import { systemInfoQuery } from 'stores/app'; +import { useRecoilQuery } from 'hooks/recoil'; + +import { formatTimestamp } from 'shared/date'; +import WhichParticipants from '../WhichParticipants'; + +import { CONSTANTS } from 'shared/constants'; + +import { Table, Tag } from '@arco-design/web-react'; +import { Label, LabelTint } from 'styles/elements'; +import EventDetailDrawer from '../EventDetailDrawer'; + +import { EventType, Audit } from 'typings/audit'; + +import styles from './index.module.less'; + +interface Props { + event_type: EventType; + tableData: Audit[]; + isLoading: boolean; +} + +const EventTable: FC<Props> = ({ event_type, tableData, isLoading }) => { + const [isShowEventDetailDrawer, setIsShowEventDetailDrawer] = useState(false); + const [selectedAudit, setSelectedAudit] = useState<Audit>(); + + const { data: systemInfo } = useRecoilQuery(systemInfoQuery); + const { name: myName, domain_name: myDomainName, pure_domain_name: myPureDomainName } = + systemInfo || {}; + const columns = [ + { + title: '事件时间', + dataIndex: 'created_at', + key: 'created_at', + width: 250, + fixed: 'left', + render: (value: any, record: any) => { + return ( + <span + className={styles.click_text} + onClick={() => { + setSelectedAudit(record); + setIsShowEventDetailDrawer(() => true); + }} + > + {value ? formatTimestamp(value) : CONSTANTS.EMPTY_PLACEHOLDER} + </span> + ); + }, + }, + { + title: '用户名', + dataIndex: 'user', + key: 'user', + render: (_value: any, record: any) => { + return ( + <> + <Label marginRight={8} fontSize={14}> + {record.user?.username} + </Label> + <LabelTint fontSize={14}>{record.user?.role}</LabelTint> + </> + ); + }, + }, + { + title: '事件名称', + dataIndex: 'name', + key: 'name', + render: (value: any) => value || CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + title: '资源类型', + dataIndex: 'resource_type', + key: 'resource_type', + render: (value: any) => value || CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + title: '资源名称', + dataIndex: 'resource_name', + key: 'resource_name', + render: (value: any) => value || CONSTANTS.EMPTY_PLACEHOLDER, + }, + ]; + const cross_domain_columns = [ + { + title: '事件时间', + dataIndex: 'created_at', + key: 'created_at', + width: 250, + fixed: 'left', + render: (value: any, record: any) => { + return ( + <span + className={styles.click_text} + onClick={() => { + setSelectedAudit(record); + setIsShowEventDetailDrawer(() => true); + }} + > + {value ? formatTimestamp(value) : CONSTANTS.EMPTY_PLACEHOLDER} + </span> + ); + }, + }, + { + title: '发起方', + dataIndex: 'coordinator_pure_domain_name', + key: 'coordinator_pure_domain_name', + render: (value: any, record: any) => { + return ( + <> + {value === myPureDomainName ? ( + <> + <Label marginRight={8} fontSize={14}> + {systemInfo?.name} + </Label>{' '} + <Tag color="arcoblue"> 本侧</Tag> + </> + ) : ( + <Label marginRight={8} fontSize={14}> + <WhichParticipants showCoordinator={true} pureDomainName={value} /> + </Label> + )} + </> + ); + }, + }, + { + title: '协作方', + dataIndex: 'project_id', + key: 'participants', + render: (value: any, record: any) => ( + <WhichParticipants + currentDomainName={ + record.coordinator_pure_domain_name !== myPureDomainName ? myDomainName : undefined + } + currentName={ + record.coordinator_pure_domain_name !== myPureDomainName ? myName : undefined + } + pureDomainName={record.coordinator_pure_domain_name} + projectId={value} + /> + ), + }, + + { + title: '事件名称 ', + dataIndex: 'name', + key: 'name', + render: (value: any) => value || CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + title: '资源类型 ', + dataIndex: 'resource_type', + key: ' resource_type', + render: (value: any) => value || CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + title: ' 资源名称 ', + dataIndex: 'resource_name', + key: 'resource_name', + render: (value: any) => value || CONSTANTS.EMPTY_PLACEHOLDER, + }, + ]; + + return ( + <> + <Table<Audit> + className={`${styles.styled_table} ${styles.event_margin}`} + rowKey="uuid" + data={tableData} + loading={isLoading} + columns={(event_type === EventType.CROSS_DOMAIN ? cross_domain_columns : columns) as any} + pagination={false} + onRow={(record) => ({ + onClick: () => { + setSelectedAudit(record); + setIsShowEventDetailDrawer(() => true); + }, + })} + /> + <EventDetailDrawer + visible={isShowEventDetailDrawer} + data={selectedAudit} + onCancel={onEventDetailDrawerClose} + event_type={event_type} + /> + </> + ); + function onEventDetailDrawerClose() { + setIsShowEventDetailDrawer(false); + setSelectedAudit(undefined); + } +}; + +export default EventTable; diff --git a/web_console_v2/client/src/views/Audit/EventList/MoreParticipants/index.module.less b/web_console_v2/client/src/views/Audit/EventList/MoreParticipants/index.module.less new file mode 100644 index 000000000..1a1f9564c --- /dev/null +++ b/web_console_v2/client/src/views/Audit/EventList/MoreParticipants/index.module.less @@ -0,0 +1,3 @@ +.tag_content{ + margin-left: 8px; +} diff --git a/web_console_v2/client/src/views/Audit/EventList/MoreParticipants/index.tsx b/web_console_v2/client/src/views/Audit/EventList/MoreParticipants/index.tsx new file mode 100644 index 000000000..a52a0ce63 --- /dev/null +++ b/web_console_v2/client/src/views/Audit/EventList/MoreParticipants/index.tsx @@ -0,0 +1,44 @@ +import React, { FC } from 'react'; +import { Popover, Tag } from '@arco-design/web-react'; +import { PopoverProps } from '@arco-design/web-react/es/Popover'; +import { TagProps } from '@arco-design/web-react/es/Tag'; + +import styles from './index.module.less'; + +export interface MoreParticipantsProps { + textList: string[]; + count: number; + popoverProps?: PopoverProps; + tagProps?: TagProps; +} + +const MoreParticipants: FC<MoreParticipantsProps> = (props: MoreParticipantsProps) => { + const { textList = [], count = 1, popoverProps = {}, tagProps = {} } = props; + function renderText(textList: string[], count: number) { + const listLength = textList.length; + if (listLength === 0) { + return <div>-</div>; + } else if (listLength <= count) { + return <div>{textList.slice(0, count).join(' ')}</div>; + } + return ( + <> + <span>{textList.slice(0, count)}</span> + <Popover + {...popoverProps} + content={textList.map((item) => ( + <div>{item}</div> + ))} + > + <Tag className={styles.tag_content} {...tagProps}> + +{listLength - count} + </Tag> + </Popover> + </> + ); + } + + return renderText(textList, count); +}; + +export default MoreParticipants; diff --git a/web_console_v2/client/src/views/Audit/EventList/PropList/index.module.less b/web_console_v2/client/src/views/Audit/EventList/PropList/index.module.less new file mode 100644 index 000000000..174f18988 --- /dev/null +++ b/web_console_v2/client/src/views/Audit/EventList/PropList/index.module.less @@ -0,0 +1,11 @@ +.styled_row{ + margin-top: 16px; +} +.styled_copy_icon{ + margin-left: 20px; + font-size: 14px; + + &:hover { + color: #1664ff; + } +} diff --git a/web_console_v2/client/src/views/Audit/EventList/PropList/index.tsx b/web_console_v2/client/src/views/Audit/EventList/PropList/index.tsx new file mode 100644 index 000000000..8c8017d6f --- /dev/null +++ b/web_console_v2/client/src/views/Audit/EventList/PropList/index.tsx @@ -0,0 +1,49 @@ +import React, { FC, ReactNode } from 'react'; + +import { Grid } from '@arco-design/web-react'; +import { Label, LabelStrong } from 'styles/elements'; +import { Copy } from 'components/IconPark'; +import ClickToCopy from 'components/ClickToCopy'; + +import styles from './index.module.less'; + +const { Row, Col } = Grid; +export interface Item { + key: string; + value: ReactNode; + isCanCopy?: boolean; + onClick?: () => void; +} +export interface Props { + list: Item[]; +} + +const PropList: FC<Props> = ({ list }) => { + return ( + <> + {list.map((item) => { + return ( + <Row className={styles.styled_row} key={item.key}> + <Col span={4}> + <Label>{item.key}</Label> + </Col> + <Col span={20}> + {item.isCanCopy ? ( + <ClickToCopy text={String(item.value)}> + <LabelStrong onClick={item.onClick}> + {item.value} + <Copy className={styles.styled_copy_icon} /> + </LabelStrong> + </ClickToCopy> + ) : ( + <LabelStrong onClick={item.onClick}>{item.value}</LabelStrong> + )} + </Col> + </Row> + ); + })} + </> + ); +}; + +export default PropList; diff --git a/web_console_v2/client/src/views/Audit/EventList/WhichParticipants/index.tsx b/web_console_v2/client/src/views/Audit/EventList/WhichParticipants/index.tsx new file mode 100644 index 000000000..2b396caa1 --- /dev/null +++ b/web_console_v2/client/src/views/Audit/EventList/WhichParticipants/index.tsx @@ -0,0 +1,94 @@ +import React, { FC, useMemo } from 'react'; + +import { useRecoilQuery } from 'hooks/recoil'; +import { projectListQuery } from 'stores/project'; +import { participantListQuery } from 'stores/participant'; +import MoreParticipants from '../MoreParticipants'; + +import { Spin } from '@arco-design/web-react'; + +type Props = { + projectId?: ID; + pureDomainName?: string; + currentName?: string; + currentDomainName?: string; + loading?: boolean; + showAll?: boolean; + showCoordinator?: boolean; +}; + +const WhichParticipants: FC<Props> = ({ + projectId, + pureDomainName, + currentName, + currentDomainName, + loading, + showAll = false, + showCoordinator = false, +}) => { + const { isLoading: projectLoading, data: projectList } = useRecoilQuery(projectListQuery); + const { isLoading: participantsLoading, data: participantsList } = useRecoilQuery( + participantListQuery, + ); + + const { currentCoordinatorName, currentCoordinatorDomainName } = useMemo(() => { + const currentCoordinator = participantsList?.find( + (item) => item.pure_domain_name === pureDomainName, + ); + return { + currentCoordinatorName: currentCoordinator?.name || '-', + currentCoordinatorDomainName: currentCoordinator?.domain_name || '-', + }; + }, [participantsList, pureDomainName]); + + const { collaboratorsName, collaboratorsDomain } = useMemo(() => { + const currentParticipants = + projectList?.find((item) => Number(item.id) === Number(projectId))?.participants || []; + const collaborators = currentParticipants.filter( + (item) => item.pure_domain_name !== pureDomainName, + ); + const collaboratorsName = collaborators.map((item) => item.name); + const collaboratorsDomain = collaborators.map((item) => ({ + name: item.name, + domain_name: item.domain_name, + })); + return { + collaboratorsName, + collaboratorsDomain, + }; + }, [projectList, pureDomainName, projectId]); + + if (loading || projectLoading || participantsLoading) { + return <Spin />; + } + + if (showCoordinator) { + return showAll ? ( + <span>{`${currentCoordinatorName} | ${currentCoordinatorDomainName} `}</span> + ) : ( + <span>{currentCoordinatorName}</span> + ); + } + + return ( + <> + {!showAll ? ( + <MoreParticipants + textList={currentName ? [currentName, ...collaboratorsName] : collaboratorsName} + count={1} + /> + ) : collaboratorsDomain.length || currentName ? ( + <div> + {currentName && <div> {`${currentName} | ${currentDomainName}`}</div>} + {collaboratorsDomain.map((item) => ( + <div key={item.domain_name}>{`${item.name} | ${item.domain_name}`}</div> + ))} + </div> + ) : ( + '-' + )} + </> + ); +}; + +export default WhichParticipants; diff --git a/web_console_v2/client/src/views/Audit/EventList/index.module.less b/web_console_v2/client/src/views/Audit/EventList/index.module.less new file mode 100644 index 000000000..84e9a1627 --- /dev/null +++ b/web_console_v2/client/src/views/Audit/EventList/index.module.less @@ -0,0 +1,53 @@ + +.styled_title_icon{ + display: inline-block; + margin-left: 16px; + font-size: 12px; + color: var(--textColor); +} +.left{ + .arco-radio-group { + margin-right: 12px; + } +} +.styled_select{ + display: inline-block; + width: 128px; + .arco-select-view { + border-top-right-radius: 0 !important; + border-bottom-right-radius: 0 !important; + } +} +.styled_search{ + display: inline-block; + width: 230px; + .arco-input-group > :first-child { + border-top-left-radius: 0 !important; + border-bottom-left-radius: 0 !important; + } +} +.styled_button{ + width: 32px; + height: 32px; + margin-left: 8px; +} +.styled_footer_button{ + color: var(--textColor) !important; +} +.click_text{ + display: inline-block; + color: #1664ff; + cursor: pointer; +} +.footer{ + display: flex; + align-items: center; + justify-content: space-between; +} + +.styled_table{ + .arco-table-tr { + cursor: pointer; + } +} + diff --git a/web_console_v2/client/src/views/Audit/EventList/index.tsx b/web_console_v2/client/src/views/Audit/EventList/index.tsx new file mode 100644 index 000000000..1cdcba489 --- /dev/null +++ b/web_console_v2/client/src/views/Audit/EventList/index.tsx @@ -0,0 +1,383 @@ +import React, { FC, useState, useMemo } from 'react'; + +import { useUrlState, useTablePaginationWithUrlState } from 'hooks'; + +import dayjs, { Dayjs } from 'dayjs'; +import { useQuery } from 'react-query'; +import { useGetUserInfo } from 'hooks/user'; + +import { fetchAuditList, deleteAudit } from 'services/audit'; +import { expression2Filter } from 'shared/filter'; +import { filterExpressionGenerator } from 'views/Datasets/shared'; +import { + FILTER_EVENT_OPERATOR_MAPPER, + EVENT_SOURCE_TYPE_MAPPER, + EVENT_TYPE_DELETE_MAPPER, +} from '../shared'; + +import { + Input, + Button, + Radio, + DatePicker, + Select, + Message, + Pagination, + Tabs, +} from '@arco-design/web-react'; +import { IconRefresh, IconDownload, IconDelete, IconInfoCircle } from '@arco-design/web-react/icon'; +import GridRow from 'components/_base/GridRow'; +import Modal from 'components/Modal'; +import SharedPageLayout from 'components/SharedPageLayout'; +import TitleWithIcon from 'components/TitleWithIcon'; +import EventTable from './EventTable'; + +import { + EventType, + QueryParams, + RadioType, + SelectType, + CrossDomainSelectType, +} from 'typings/audit'; + +import styles from './index.module.less'; + +const { TabPane } = Tabs; +const { Search } = Input; +const { RangePicker } = DatePicker; + +type Props = {}; + +const EventList: FC<Props> = (props) => { + const userInfo = useGetUserInfo(); + const { + urlState: pageInfoState, + reset: resetPageInfoState, + paginationProps, + } = useTablePaginationWithUrlState(); + const [urlState, setUrlState] = useUrlState({ + radioType: RadioType.ALL, + selectType: SelectType.EVENT_NAME, + crossDomainSelectType: CrossDomainSelectType.EVENT_NAME, + eventType: EventType.INNER, + filter: initFilter(EventType.INNER), + }); + + const initFilterParams = expression2Filter(urlState.filter); + const keyword = + urlState.eventType === EventType.CROSS_DOMAIN + ? initFilterParams[urlState.crossDomainSelectType] + : initFilterParams[urlState.selectType]; + const [filterParams, setFilterParams] = useState<QueryParams>({ + keyword: keyword || '', + startTime: '', + endTime: '', + }); + + const [dateList, setDateList] = useState<null | Dayjs[]>(() => { + if (initFilterParams.start_time && initFilterParams.end_time) { + return [dayjs.unix(initFilterParams.start_time), dayjs.unix(initFilterParams.end_time)]; + } + + if (filterParams.startTime && filterParams.endTime) { + return [ + dayjs(filterParams.startTime, 'YYYY-MM-DD HH:mm:ss'), + dayjs(filterParams.endTime, 'YYYY-MM-DD HH:mm:ss'), + ]; + } + return null; + }); + + const auditListQuery = useQuery( + [ + 'fetchAuditList', + dateList, + pageInfoState.page, + pageInfoState.pageSize, + filterParams.keyword, + urlState.filter, + ], + () => + fetchAuditList({ + filter: urlState.filter, + page: pageInfoState.page, + page_size: pageInfoState.pageSize, + }), + { + retry: 2, + refetchOnWindowFocus: false, + enabled: Boolean(userInfo?.id), + }, + ); + + const auditList = useMemo(() => { + return auditListQuery.data?.data ?? []; + }, [auditListQuery]); + + return ( + <SharedPageLayout + title="审计日志" + isNeedHelp={false} + rightTitle={ + <TitleWithIcon + className={styles.styled_title_icon} + title="以下列表最长展示过去9个月的事件记录" + isLeftIcon={true} + isShowIcon={true} + icon={IconInfoCircle} + /> + } + > + <Tabs defaultActiveTab={urlState.eventType ?? EventType.INNER} onChange={onTabChange}> + <TabPane key="inner" title="内部事件" destroyOnHide={false} /> + <TabPane key="cross_domain" title="跨域事件" destroyOnHide={true} /> + </Tabs> + <GridRow justify="space-between" align="center"> + <div className={styles.left}> + <Radio.Group + value={urlState.radioType || null} + onChange={onRadioTypeChange} + type="button" + > + <Radio value={RadioType.ALL}>全部</Radio> + <Radio value={RadioType.WEEK}>近七天</Radio> + <Radio value={RadioType.ONE_MONTH}>近1月</Radio> + <Radio value={RadioType.THREE_MONTHS}>近3月</Radio> + </Radio.Group> + <RangePicker showTime value={dateList as any} onChange={onRangePickerChange} /> + </div> + <div> + {urlState.eventType === EventType.CROSS_DOMAIN ? ( + <Select + className={styles.styled_select} + value={urlState.crossDomainSelectType} + onChange={onCrossDomainSelectTypeChange} + > + <Select.Option value={CrossDomainSelectType.EVENT_NAME}>事件名称</Select.Option> + <Select.Option value={CrossDomainSelectType.RESOURCE_TYPE}>资源类型</Select.Option> + <Select.Option value={CrossDomainSelectType.RESOURCE_NAME}>资源名称</Select.Option> + <Select.Option value={CrossDomainSelectType.COORDINATOR_PURE_DOMAIN_NAME}> + 发起方 + </Select.Option> + </Select> + ) : ( + <Select + className={styles.styled_select} + value={urlState.selectType} + onChange={onSelectTypeChange} + > + <Select.Option value={SelectType.EVENT_NAME}>事件名称</Select.Option> + <Select.Option value={SelectType.RESOURCE_TYPE}>资源类型</Select.Option> + <Select.Option value={SelectType.RESOURCE_NAME}>资源名称</Select.Option> + <Select.Option value={SelectType.USER_NAME}>用户名</Select.Option> + </Select> + )} + + <Search + className={styles.styled_search} + allowClear + onSearch={onSearchTextChange} + onClear={() => onSearchTextChange('')} + placeholder="搜索关键词" + defaultValue={ + filterParams.keyword || + initFilterParams[urlState.selectType] || + initFilterParams[urlState.crossDomainSelectType] || + '' + } + /> + <Button className={styles.styled_button} icon={<IconRefresh />} onClick={onRefresh} /> + <Button className={styles.styled_button} icon={<IconDownload />} onClick={onDownload} /> + </div> + </GridRow> + + <EventTable + event_type={urlState.eventType || EventType.INNER} + tableData={auditList} + isLoading={auditListQuery.isLoading} + /> + <div className={styles.footer}> + <Button + className={styles.styled_footer_button} + icon={<IconDelete />} + onClick={onDelete} + type="text" + > + 删除6个月前的记录 + </Button> + <Pagination + sizeCanChange={true} + total={auditListQuery.data?.page_meta?.total_items ?? undefined} + {...paginationProps} + /> + </div> + </SharedPageLayout> + ); + function onTabChange(tab: string) { + constructFilterArray({ ...filterParams, ...urlState, eventType: tab as EventType }); + } + + function onRadioTypeChange(value: any) { + const type: RadioType = value; + + let currentDay = null; + let startDay: Dayjs | null = null; + let endDay: Dayjs | null = null; + switch (type) { + case RadioType.WEEK: + currentDay = dayjs(); + startDay = currentDay.subtract(7, 'day'); + endDay = dayjs(); + break; + case RadioType.ONE_MONTH: + currentDay = dayjs(); + startDay = currentDay.subtract(1, 'month'); + endDay = dayjs(); + break; + case RadioType.THREE_MONTHS: + currentDay = dayjs(); + startDay = currentDay.subtract(3, 'month'); + endDay = dayjs(); + break; + case RadioType.ALL: + default: + break; + } + if (startDay && endDay) { + setDateList([startDay, endDay]); + } else { + setDateList(null); + } + + constructFilterArray({ + ...filterParams, + ...urlState, + startTime: startDay?.format('YYYY-MM-DD HH:mm:ss') ?? '', + endTime: endDay?.format('YYYY-MM-DD HH:mm:ss') ?? '', + radioType: type, + }); + } + function onSelectTypeChange(value: any) { + constructFilterArray({ ...filterParams, ...urlState, selectType: value }); + } + function onCrossDomainSelectTypeChange(value: any) { + constructFilterArray({ ...filterParams, ...urlState, crossDomainSelectType: value }); + } + function onRangePickerChange(dateString: string[], date: any[]) { + setDateList(date as any); + if (date) { + // Clear radio type + constructFilterArray({ + ...filterParams, + ...urlState, + startTime: date?.[0]?.format('YYYY-MM-DD HH:mm:ss') ?? '', + endTime: date?.[1]?.format('YYYY-MM-DD HH:mm:ss') ?? '', + radioType: undefined, + }); + } else { + // Reset radio type + constructFilterArray({ + ...filterParams, + ...urlState, + startTime: '', + endTime: '', + radioType: RadioType.ALL, + }); + } + } + function onSearchTextChange(value: string) { + constructFilterArray({ ...filterParams, ...urlState, keyword: value }); + } + function onRefresh() { + auditListQuery.refetch(); + } + function onDownload() { + // TODO: onDownload + Message.info('Coming soon'); + } + function onDelete() { + Modal.delete({ + title: + urlState.eventType === EventType.CROSS_DOMAIN + ? '确定要删除跨域事件吗' + : '确定要删除内部事件吗?', + content: '基于安全审核的原因,平台仅支持清理6个月前的事件记录', + onOk() { + // Delete audit data from 6 month ago + deleteAudit({ + event_type: EVENT_TYPE_DELETE_MAPPER[urlState.eventType as EventType], + }) + .then(() => { + if (String(pageInfoState.page) !== '1') { + // Reset page info and refresh audit list data + resetPageInfoState(); + } else { + // Only refresh audit list data + auditListQuery.refetch(); + } + + Message.success('删除成功'); + }) + .catch((error) => { + Message.error(error.message); + }); + }, + }); + } + function constructFilterArray(value: QueryParams) { + let start_time = 0; + let end_time = 0; + + if (value.startTime && value.endTime) { + start_time = dayjs(value.startTime).utc().unix(); + end_time = dayjs(value.endTime).utc().unix(); + } else { + const currentDay = dayjs(); + const nineMonthAgoDay = currentDay.subtract(9, 'month'); + start_time = nineMonthAgoDay.utc().unix(); + end_time = currentDay.utc().unix(); + } + const keywordType = + value.eventType === EventType.CROSS_DOMAIN ? value.crossDomainSelectType : value.selectType; + const serialization = filterExpressionGenerator( + { + [keywordType!]: value.keyword, + start_time: start_time, + end_time: end_time, + source: EVENT_SOURCE_TYPE_MAPPER[value.eventType || EventType.INNER], + }, + FILTER_EVENT_OPERATOR_MAPPER, + ); + + setFilterParams({ + keyword: value.keyword, + startTime: value.startTime, + endTime: value.endTime, + }); + setUrlState((prevState) => ({ + ...prevState, + filter: serialization, + radioType: value.radioType, + selectType: value.selectType, + crossDomainSelectType: value.crossDomainSelectType, + eventType: value.eventType, + page: 1, + })); + } + function initFilter(event_type: EventType) { + const currentDay = dayjs(); + const nineMonthAgoDay = currentDay.subtract(9, 'month'); + const start_time = nineMonthAgoDay.utc().unix(); + const end_time = currentDay.utc().unix(); + const filter = filterExpressionGenerator( + { + start_time: start_time, + end_time: end_time, + source: EVENT_SOURCE_TYPE_MAPPER[event_type ?? EventType.INNER], + }, + FILTER_EVENT_OPERATOR_MAPPER, + ); + return filter; + } +}; +export default EventList; diff --git a/web_console_v2/client/src/views/Audit/index.tsx b/web_console_v2/client/src/views/Audit/index.tsx new file mode 100644 index 000000000..2ce10f0ec --- /dev/null +++ b/web_console_v2/client/src/views/Audit/index.tsx @@ -0,0 +1,14 @@ +import React, { FC } from 'react'; +import ErrorBoundary from 'components/ErrorBoundary'; +import { Route } from 'react-router-dom'; +import EventList from './EventList'; + +const Audit: FC = () => { + return ( + <ErrorBoundary> + <Route path="/audit/event" exact component={EventList} /> + </ErrorBoundary> + ); +}; + +export default Audit; diff --git a/web_console_v2/client/src/views/Audit/shared.tsx b/web_console_v2/client/src/views/Audit/shared.tsx new file mode 100644 index 000000000..fecec1ec0 --- /dev/null +++ b/web_console_v2/client/src/views/Audit/shared.tsx @@ -0,0 +1,22 @@ +import { FilterOp } from 'typings/filter'; + +export const FILTER_EVENT_OPERATOR_MAPPER = { + source: FilterOp.IN, + start_time: FilterOp.GREATER_THAN, + end_time: FilterOp.LESS_THAN, + name: FilterOp.CONTAIN, + resource_type: FilterOp.CONTAIN, + username: FilterOp.EQUAL, + resource_name: FilterOp.CONTAIN, + coordinator_pure_domain_name: FilterOp.CONTAIN, +}; + +export const EVENT_SOURCE_TYPE_MAPPER = { + inner: ['UI', 'API', 'UNKNOWN_SOURCE'], + cross_domain: ['RPC'], +}; + +export const EVENT_TYPE_DELETE_MAPPER = { + inner: 'USER_ENDPOINT', + cross_domain: 'RPC', +}; diff --git a/web_console_v2/client/src/views/Cleanup/CleanupDetailDrawer/index.module.less b/web_console_v2/client/src/views/Cleanup/CleanupDetailDrawer/index.module.less new file mode 100644 index 000000000..e69de29bb diff --git a/web_console_v2/client/src/views/Cleanup/CleanupDetailDrawer/index.tsx b/web_console_v2/client/src/views/Cleanup/CleanupDetailDrawer/index.tsx new file mode 100644 index 000000000..c2e834380 --- /dev/null +++ b/web_console_v2/client/src/views/Cleanup/CleanupDetailDrawer/index.tsx @@ -0,0 +1,127 @@ +import React, { FC, useMemo } from 'react'; +import { Drawer, Table, Empty } from '@arco-design/web-react'; +import { DrawerProps } from '@arco-design/web-react/es/Drawer'; +import { Cleanup } from 'typings/cleanup'; +import { LabelStrong } from 'styles/elements'; +import PropertyList from 'components/PropertyList'; +import StateIndicator from 'components/StateIndicator'; +import { calcStateIndicatorProps } from '../CleanupList'; +import { formatTimestamp } from 'shared/date'; +import CONSTANTS from 'shared/constants'; + +export interface Props extends DrawerProps { + data?: Cleanup; +} + +export interface TableProps { + file_path?: string; +} + +const CleanupDetailDrawer: FC<Props> = ({ visible, data, title = 'Cleanup详情', ...restProps }) => { + const displayedProps = useMemo( + () => [ + { + value: data?.id, + label: 'ID', + }, + { + value: <StateIndicator {...calcStateIndicatorProps(data?.state)} />, + label: '状态', + }, + { + value: ( + <span> + {data?.target_start_at + ? formatTimestamp(data?.target_start_at) + : CONSTANTS.EMPTY_PLACEHOLDER} + </span> + ), + label: '目标开始时间', + }, + { + value: ( + <span> + {data?.completed_at ? formatTimestamp(data?.completed_at) : CONSTANTS.EMPTY_PLACEHOLDER} + </span> + ), + label: '完成时间', + }, + { + value: <span>{data?.resource_id}</span>, + label: 'Resource ID', + }, + { + value: <span>{data?.resource_type}</span>, + label: '资源类型', + }, + { + value: ( + <span> + {data?.created_at ? formatTimestamp(data?.created_at) : CONSTANTS.EMPTY_PLACEHOLDER} + </span> + ), + label: '创建时间', + }, + { + value: ( + <span> + {data?.updated_at ? formatTimestamp(data?.updated_at) : CONSTANTS.EMPTY_PLACEHOLDER} + </span> + ), + label: '更新时间', + }, + ], + [data], + ); + + const filePathList = useMemo(() => { + if (!data || data.payload.paths.length === 0) return []; + const list = data.payload.paths.map((item) => { + return { file_path: item }; + }); + return list; + }, [data]); + + const columns = useMemo( + () => [ + { + title: 'payload文件路径', + dataIndex: 'file_path', + name: 'file_path', + render: (_: any, record: TableProps) => <span>{record.file_path}</span>, + }, + ], + [], + ); + + return ( + <Drawer + placement="right" + visible={visible} + title={title} + closable={true} + width="50%" + unmountOnExit + {...restProps} + > + {renderBaseInfo()} + <Table + data={filePathList} + columns={columns} + rowKey="file_path" + noDataElement={<Empty description="暂无数据" />} + /> + </Drawer> + ); + + function renderBaseInfo() { + return ( + <> + <LabelStrong isBlock={true}>基础信息</LabelStrong> + <PropertyList cols={12} colProportions={[1, 1]} properties={displayedProps} /> + </> + ); + } +}; + +export default CleanupDetailDrawer; diff --git a/web_console_v2/client/src/views/Cleanup/CleanupList/index.module.less b/web_console_v2/client/src/views/Cleanup/CleanupList/index.module.less new file mode 100644 index 000000000..60d1bd361 --- /dev/null +++ b/web_console_v2/client/src/views/Cleanup/CleanupList/index.module.less @@ -0,0 +1,5 @@ +.link_text { + display: inline-block; + color: #1664ff; + cursor: pointer; +} diff --git a/web_console_v2/client/src/views/Cleanup/CleanupList/index.tsx b/web_console_v2/client/src/views/Cleanup/CleanupList/index.tsx new file mode 100644 index 000000000..8d32e1c1b --- /dev/null +++ b/web_console_v2/client/src/views/Cleanup/CleanupList/index.tsx @@ -0,0 +1,348 @@ +import React, { FC, useEffect, useMemo, useState } from 'react'; +import { useQuery } from 'react-query'; +import { useToggle } from 'react-use'; +import { Button, Input, Table } from '@arco-design/web-react'; +import { useUrlState } from 'hooks'; +import { fetchCleanupList, postCleanupState } from 'services/cleanup'; +import { Cleanup, CleanupState } from 'typings/cleanup'; +import { FilterOp } from 'typings/filter'; +import StateIndicator, { StateTypes } from 'components/StateIndicator'; +import SharedPageLayout from 'components/SharedPageLayout'; +import GridRow from 'components/_base/GridRow'; +import { constructExpressionTree, expression2Filter } from 'shared/filter'; +import { formatTimestamp } from 'shared/date'; +import CONSTANTS from 'shared/constants'; +import styled from './index.module.less'; +import CleanupDetailDrawer from '../CleanupDetailDrawer'; + +export type QueryParams = { + state?: CleanupState; + resource_type?: string; + resource_id?: string; +}; + +export const calcStateIndicatorProps = ( + state?: CleanupState, +): { type: StateTypes; text: string; tip?: string } => { + let text = CONSTANTS.EMPTY_PLACEHOLDER; + let type = 'default' as StateTypes; + const tip = ''; + + switch (state) { + case CleanupState.WAITING: + text = '等待中'; + type = 'gold'; + break; + case CleanupState.CANCELED: + text = '已撤销'; + type = 'default'; + break; + case CleanupState.RUNNING: + text = '运行中'; + type = 'processing'; + break; + case CleanupState.FAILED: + text = '失败'; + type = 'error'; + break; + case CleanupState.SUCCEEDED: + text = '成功'; + type = 'success'; + break; + } + + return { + text, + type, + tip, + }; +}; + +const CleanupList: FC = () => { + const [isDrawerVisible, setIsDrawerVisible] = useToggle(false); + const [selectedCleanup, setSelectedCleanup] = useState<Cleanup>(); + const [urlState, setUrlState] = useUrlState({ + page: 1, + pageSize: 10, + filter: '', + }); + + const initFilterParams = expression2Filter(urlState.filter); + const [filterParams, setFilterParams] = useState<QueryParams>({ + state: initFilterParams.state, + resource_type: initFilterParams.resource_type, + resource_id: initFilterParams.resource_id, + }); + + const listQ = useQuery( + ['CLEANUP_QUERY_KEY', urlState], + () => + fetchCleanupList({ + page: urlState.page, + pageSize: urlState.pageSize, + filter: urlState.filter === '' ? undefined : urlState.filter, + }), + { + refetchOnWindowFocus: false, + keepPreviousData: true, + }, + ); + + // Filter the display list by the search string + const itemListShow = useMemo(() => { + if (!listQ.data?.data) { + return []; + } + + return listQ.data.data || []; + }, [listQ.data]); + + const columns = useMemo( + () => [ + { + title: 'ID', + dataIndex: 'id', + name: 'id', + render: (_: any, record: Cleanup) => ( + <span + className={styled.link_text} + onClick={() => { + setSelectedCleanup(record); + setIsDrawerVisible(true); + }} + > + {record.id} + </span> + ), + }, + { + title: '状态', + dataIndex: 'state', + name: 'state', + filters: [ + { + text: '等待中', + value: CleanupState.WAITING, + }, + { + text: '运行中', + value: CleanupState.RUNNING, + }, + { + text: '已撤销', + value: CleanupState.CANCELED, + }, + { + text: '失败', + value: CleanupState.FAILED, + }, + { + text: '成功', + value: CleanupState.SUCCEEDED, + }, + ], + defaultFilters: filterParams.state ? [filterParams.state as string] : [], + filterMultiple: false, + render: (_: any, record: Cleanup) => { + return <StateIndicator {...calcStateIndicatorProps(record.state)} />; + }, + }, + { + title: '目标开始时间', + dataIndex: 'target_start_at', + name: 'target_start_at', + render: (_: any, record: Cleanup) => ( + <span> + {record.target_start_at + ? formatTimestamp(record.target_start_at) + : CONSTANTS.EMPTY_PLACEHOLDER} + </span> + ), + }, + { + title: '完成时间', + dataIndex: 'completed_at', + name: 'completed_at', + render: (_: any, record: Cleanup) => ( + <span> + {record.completed_at + ? formatTimestamp(record.completed_at) + : CONSTANTS.EMPTY_PLACEHOLDER} + </span> + ), + }, + { + title: 'Resource ID', + dataIndex: 'resource_id', + name: 'resource_id', + render: (_: any, record: Cleanup) => <span>{record.resource_id}</span>, + }, + { + title: '资源类型', + dataIndex: 'resource_type', + name: 'resource_type', + render: (_: any, record: Cleanup) => <span>{record.resource_type}</span>, + }, + { + title: '创建时间', + dataIndex: 'created_at', + name: 'created_at', + render: (_: any, record: Cleanup) => ( + <span> + {record.created_at ? formatTimestamp(record.created_at) : CONSTANTS.EMPTY_PLACEHOLDER} + </span> + ), + }, + { + title: '更新时间', + dataIndex: 'updated_at', + name: 'updated_at', + render: (_: any, record: Cleanup) => ( + <span> + {record.updated_at ? formatTimestamp(record.updated_at) : CONSTANTS.EMPTY_PLACEHOLDER} + </span> + ), + }, + { + title: '操作', + dataIndex: 'operation', + name: 'operation', + width: 200, + render: (_: any, record: Cleanup) => { + return ( + <Button + disabled={record.state !== CleanupState.WAITING} + onClick={() => { + postCleanupState(record.id).then(() => { + listQ.refetch(); + }); + }} + > + 取消 + </Button> + ); + }, + }, + ], + // eslint-disable-next-line react-hooks/exhaustive-deps + [listQ], + ); + + useEffect(() => { + constructFilterArray(filterParams); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [filterParams]); + + return ( + <SharedPageLayout title="Cleanup"> + <GridRow justify="end" align="center"> + <Input.Search + style={{ paddingRight: 20 }} + allowClear + defaultValue={filterParams.resource_type} + placeholder={'请输入资源类型'} + onSearch={onResourceTypeSearch} + onClear={() => onResourceTypeSearch('')} + /> + <Input.Search + allowClear + defaultValue={filterParams.resource_id} + placeholder={'请输入resource id'} + onSearch={onResourceIdSearch} + onClear={() => onResourceIdSearch('')} + /> + </GridRow> + <Table + loading={listQ.isFetching} + data={itemListShow} + columns={columns} + scroll={{ x: '100%' }} + rowKey="id" + onChange={(_, sorter, filters, extra) => { + if (extra.action === 'filter') { + setFilterParams({ + ...filterParams, + state: (filters.state?.[0] as CleanupState) ?? undefined, + }); + } + }} + pagination={{ + total: listQ.data?.page_meta?.total_items ?? undefined, + current: Number(urlState.page), + pageSize: Number(urlState.pageSize), + onChange: onPageChange, + showTotal: (total: number) => `共 ${total} 条记录`, + }} + /> + <CleanupDetailDrawer + visible={isDrawerVisible} + data={selectedCleanup} + onCancel={onCleanupDetailDrawerClose} + /> + </SharedPageLayout> + ); + + function onPageChange(page: number, pageSize: number | undefined) { + setUrlState((prevState) => ({ + ...prevState, + page, + pageSize, + })); + } + + function onResourceTypeSearch(value: any) { + setFilterParams({ + ...filterParams, + resource_type: value, + }); + } + + function onResourceIdSearch(value: any) { + setFilterParams({ + ...filterParams, + resource_id: value, + }); + } + + function constructFilterArray(value: QueryParams) { + const expressionNodes = []; + + if (value.state) { + expressionNodes.push({ + field: 'state', + op: FilterOp.EQUAL, + string_value: value.state, + }); + } + if (value.resource_type) { + expressionNodes.push({ + field: 'resource_type', + op: FilterOp.EQUAL, + string_value: value.resource_type, + }); + } + if (value.resource_id) { + expressionNodes.push({ + field: 'resource_id', + op: FilterOp.EQUAL, + string_value: value.resource_id, + }); + } + + const serialization = constructExpressionTree(expressionNodes); + if ((serialization || urlState.filter) && serialization !== urlState.filter) { + setUrlState((prevState) => ({ + ...prevState, + filter: serialization, + page: 1, + })); + } + } + + function onCleanupDetailDrawerClose() { + setSelectedCleanup(undefined); + setIsDrawerVisible(false); + } +}; + +export default CleanupList; diff --git a/web_console_v2/client/src/views/Cleanup/index.tsx b/web_console_v2/client/src/views/Cleanup/index.tsx new file mode 100644 index 000000000..13b629045 --- /dev/null +++ b/web_console_v2/client/src/views/Cleanup/index.tsx @@ -0,0 +1,13 @@ +import React from 'react'; +import { Route } from 'react-router-dom'; +import CleanupList from './CleanupList'; + +function Cleanup() { + return ( + <> + <Route path="/cleanup" exact component={CleanupList} /> + </> + ); +} + +export default Cleanup; diff --git a/web_console_v2/client/src/views/Composer/SchedulerItemDetail/index.tsx b/web_console_v2/client/src/views/Composer/SchedulerItemDetail/index.tsx new file mode 100644 index 000000000..c0b8febb5 --- /dev/null +++ b/web_console_v2/client/src/views/Composer/SchedulerItemDetail/index.tsx @@ -0,0 +1,227 @@ +import React, { FC, useEffect, useMemo, useState } from 'react'; +import SharedPageLayout from 'components/SharedPageLayout'; +import { Table } from '@arco-design/web-react'; +import { useUrlState } from 'hooks'; +import { useQuery } from 'react-query'; +import { FilterOp } from 'typings/filter'; +import { constructExpressionTree, expression2Filter } from 'shared/filter'; +import { RunnerStatus, SchedulerRunner } from 'typings/composer'; +import { fetchRunnersByItemId } from 'services/composer'; +import { formatTimestamp } from 'shared/date'; +import { useParams } from 'react-router'; +import StateIndicator, { StateTypes } from 'components/StateIndicator'; +import CONSTANTS from 'shared/constants'; + +const calcStateIndicatorProps = ( + state: RunnerStatus, +): { type: StateTypes; text: string; tip?: string } => { + let text = CONSTANTS.EMPTY_PLACEHOLDER; + let type = 'default' as StateTypes; + const tip = ''; + + switch (state) { + case RunnerStatus.INIT: + text = '初始化'; + type = 'gold'; + break; + case RunnerStatus.RUNNING: + text = '运行中'; + type = 'processing'; + break; + case RunnerStatus.FAILED: + text = '运行失败'; + type = 'error'; + break; + case RunnerStatus.DONE: + text = '已结束'; + type = 'success'; + break; + default: + break; + } + + return { + text, + type, + tip, + }; +}; + +export type QueryParams = { + status?: RunnerStatus; +}; + +const SchedulerItemDetail: FC = () => { + const params = useParams<{ item_id: string }>(); + const [urlState, setUrlState] = useUrlState({ + page: 1, + pageSize: 10, + filter: '', + }); + + const initFilterParams = expression2Filter(urlState.filter); + const [filterParams, setFilterParams] = useState<QueryParams>({ + status: initFilterParams.status, + }); + + const listQ = useQuery( + ['SCHEDULE_RUNNER_QUERY_KEY', urlState], + () => + fetchRunnersByItemId(params.item_id, { + page: urlState.page, + pageSize: urlState.pageSize, + filter: urlState.filter === '' ? undefined : urlState.filter, + }), + { + refetchOnWindowFocus: false, + }, + ); + + // Filter the display list by the search string + const runnerListShow = useMemo(() => { + if (!listQ.data?.data) { + return []; + } + const templateList = listQ.data.data || []; + return templateList; + }, [listQ.data]); + + const columns = useMemo( + () => [ + { + title: '调度项ID', + dataIndex: 'item_id', + name: 'item_id', + render: (_: any, record: SchedulerRunner) => <span>{record.item_id}</span>, + }, + { + title: '状态', + dataIndex: 'status', + name: 'status', + filters: [ + { + text: '初始化', + value: RunnerStatus.INIT, + }, + { + text: '运行中', + value: RunnerStatus.RUNNING, + }, + { + text: '已结束', + value: RunnerStatus.DONE, + }, + { + text: '运行失败', + value: RunnerStatus.FAILED, + }, + ], + defaultFilters: filterParams.status ? [filterParams.status as string] : [], + filterMultiple: false, + render: (_: any, record: SchedulerRunner) => { + return <StateIndicator {...calcStateIndicatorProps(record.status)} />; + }, + }, + { + title: '开始时间', + dataIndex: 'start_at', + name: 'start_at', + render: (_: any, record: SchedulerRunner) => ( + <span>{formatTimestamp(record.start_at)}</span> + ), + }, + { + title: '结束时间', + dataIndex: 'end_at', + name: 'end_at', + render: (_: any, record: SchedulerRunner) => <span>{formatTimestamp(record.end_at)}</span>, + }, + { + title: '创建时间', + dataIndex: 'created_at', + name: 'created_at', + render: (_: any, record: SchedulerRunner) => ( + <span>{formatTimestamp(record.created_at)}</span> + ), + }, + { + title: '更新时间', + dataIndex: 'updated_at', + name: 'updated_at', + render: (_: any, record: SchedulerRunner) => ( + <span>{formatTimestamp(record.updated_at)}</span> + ), + }, + { + title: '删除时间', + dataIndex: 'deleted_at', + name: 'deleted_at', + render: (_: any, record: SchedulerRunner) => ( + <span>{record.deleted_at ? formatTimestamp(record.deleted_at!) : '——'}</span> + ), + }, + ], + // eslint-disable-next-line react-hooks/exhaustive-deps + [listQ], + ); + + useEffect(() => { + constructFilterArray(filterParams); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [filterParams]); + + return ( + <SharedPageLayout title="调度程序项详情"> + <Table + loading={listQ.isFetching} + data={runnerListShow} + columns={columns} + scroll={{ x: '100%' }} + rowKey="id" + onChange={(_, sorter, filters, extra) => { + if (extra.action === 'filter') { + setFilterParams({ + status: (filters.status?.[0] as RunnerStatus) ?? undefined, + }); + } + }} + pagination={{ + total: listQ.data?.page_meta?.total_items ?? undefined, + current: Number(urlState.page), + pageSize: Number(urlState.pageSize), + onChange: onPageChange, + showTotal: (total: number) => `共 ${total} 条记录`, + }} + /> + </SharedPageLayout> + ); + + function onPageChange(page: number, pageSize: number | undefined) { + setUrlState((prevState) => ({ + ...prevState, + page, + pageSize, + })); + } + + function constructFilterArray(value: QueryParams) { + const expressionNodes = []; + + if (value.status) { + expressionNodes.push({ + field: 'status', + op: FilterOp.EQUAL, + string_value: value.status, + }); + } + + const serialization = constructExpressionTree(expressionNodes); + setUrlState((prevState) => ({ + ...prevState, + filter: serialization, + page: 1, + })); + } +}; + +export default SchedulerItemDetail; diff --git a/web_console_v2/client/src/views/Composer/SchedulerItemList/SchedulerItemActions/index.tsx b/web_console_v2/client/src/views/Composer/SchedulerItemList/SchedulerItemActions/index.tsx new file mode 100644 index 000000000..9cb4d799c --- /dev/null +++ b/web_console_v2/client/src/views/Composer/SchedulerItemList/SchedulerItemActions/index.tsx @@ -0,0 +1,39 @@ +import React, { FC } from 'react'; +import GridRow from 'components/_base/GridRow'; +import { SchedulerItem } from 'typings/composer'; +import SchedulerPipelineDrawer from '../../components/SchedulerPipelineDrawer'; +import { useToggle } from 'react-use'; + +type Props = { + scheduler: SchedulerItem; +}; + +const SchedulerItemActions: FC<Props> = ({ scheduler }) => { + const [pipelineVisible, setPipelineVisible] = useToggle(false); + const code = JSON.stringify(scheduler.pipeline, null, 2); + return ( + <> + <GridRow> + <button + className="custom-text-button" + style={{ + marginRight: 10, + }} + type="button" + onClick={setPipelineVisible} + > + pipeline + </button> + {/* <MoreActions actionList={actionList} /> */} + </GridRow> + <SchedulerPipelineDrawer + title={<span>pipeline</span>} + visible={pipelineVisible} + onClose={setPipelineVisible} + code={code} + /> + </> + ); +}; + +export default SchedulerItemActions; diff --git a/web_console_v2/client/src/views/Composer/SchedulerItemList/index.tsx b/web_console_v2/client/src/views/Composer/SchedulerItemList/index.tsx new file mode 100644 index 000000000..1063b770b --- /dev/null +++ b/web_console_v2/client/src/views/Composer/SchedulerItemList/index.tsx @@ -0,0 +1,288 @@ +import React, { FC, useEffect, useMemo, useState } from 'react'; +import { useQuery } from 'react-query'; +import { Link } from 'react-router-dom'; +import { Input, Switch, Table } from '@arco-design/web-react'; +import SharedPageLayout from 'components/SharedPageLayout'; +import GridRow from 'components/_base/GridRow'; +import { useUrlState } from 'hooks'; +import { FilterOp } from 'typings/filter'; +import { ItemStatus, SchedulerItem } from 'typings/composer'; +import { fetchSchedulerItemList, patchEditItemState } from 'services/composer'; +import { constructExpressionTree, expression2Filter } from 'shared/filter'; +import { formatTimestamp } from 'shared/date'; +import SchedulerItemActions from './SchedulerItemActions'; +import { ColumnProps } from '@arco-design/web-react/es/Table'; +import { TABLE_COL_WIDTH } from 'shared/constants'; +import CONSTANTS from 'shared/constants'; + +export type QueryParams = { + is_cron?: boolean; + status?: ItemStatus; + name?: string; + id?: string; +}; + +const SchedulerItemList: FC = () => { + const [urlState, setUrlState] = useUrlState({ + page: 1, + pageSize: 10, + filter: '', + }); + const initFilterParams = expression2Filter(urlState.filter); + const [filterParams, setFilterParams] = useState<QueryParams>({ + is_cron: initFilterParams.is_cron || false, + status: initFilterParams.status, + name: initFilterParams.name || '', + id: initFilterParams.id, + }); + + const listQ = useQuery( + ['SCHEDULE_ITEM_QUERY_KEY', urlState], + () => + fetchSchedulerItemList({ + page: urlState.page, + pageSize: urlState.pageSize, + filter: urlState.filter === '' ? undefined : urlState.filter, + }), + { + refetchOnWindowFocus: false, + keepPreviousData: true, + }, + ); + + // Filter the display list by the search string + const itemListShow = useMemo(() => { + if (!listQ.data?.data) { + return []; + } + const templateList = listQ.data.data || []; + return templateList; + }, [listQ.data]); + + const columns: ColumnProps[] = useMemo( + () => [ + { + title: '名称', + dataIndex: 'name', + name: 'name', + width: TABLE_COL_WIDTH.NAME, + render: (name: string, record: SchedulerItem) => ( + <Link + to={`/composer/scheduler-item/detail/${record.id}`} + rel="nopener" + className="col-name-link" + > + {name} + </Link> + ), + }, + { + title: '状态', + dataIndex: 'status', + name: 'status', + width: TABLE_COL_WIDTH.NORMAL, + filters: [ + { + text: '开启', + value: ItemStatus.ON, + }, + { + text: '关闭', + value: ItemStatus.OFF, + }, + ], + defaultFilters: filterParams.status ? [filterParams.status as string] : [], + filterMultiple: false, + render: (_: any, record: SchedulerItem) => { + <span>{record.status}</span>; + return ( + <Switch + size="small" + onChange={(value: boolean) => { + patchEditItemState(record.id, value ? ItemStatus.ON : ItemStatus.OFF).then(() => { + listQ.refetch(); + }); + }} + checked={record.status === ItemStatus.ON} + /> + ); + }, + }, + { + title: 'cron_config', + dataIndex: 'cron_config', + name: 'cron_config', + width: TABLE_COL_WIDTH.NORMAL, + filters: [ + { + text: '展示cron_job_items', + value: true, + }, + { + text: '展示all items', + value: false, + }, + ], + filterMultiple: false, + render: (_: any, record: SchedulerItem) => <span>{record.cron_config}</span>, + }, + { + title: '最近运行时间', + dataIndex: 'last_run_at', + name: 'last_run_at', + width: TABLE_COL_WIDTH.TIME, + render: (_: any, record: SchedulerItem) => ( + <span>{formatTimestamp(record.last_run_at)}</span> + ), + }, + { + title: 'retry_cnt', + dataIndex: 'retry_cnt', + name: 'retry_cnt', + width: TABLE_COL_WIDTH.THIN, + render: (_: any, record: SchedulerItem) => <span>{record.retry_cnt}</span>, + }, + { + title: '创建时间', + dataIndex: 'created_at', + name: 'created_at', + width: TABLE_COL_WIDTH.TIME, + render: (_: any, record: SchedulerItem) => ( + <span>{formatTimestamp(record.created_at)}</span> + ), + }, + { + title: '更新时间', + dataIndex: 'updated_at', + name: 'updated_at', + width: TABLE_COL_WIDTH.NAME, + render: (_: any, record: SchedulerItem) => ( + <span>{formatTimestamp(record.updated_at)}</span> + ), + }, + { + title: '删除时间', + dataIndex: 'deleted_at', + name: 'deleted_at', + width: TABLE_COL_WIDTH.TIME, + render: (_: any, record: SchedulerItem) => ( + <span> + {record.deleted_at ? formatTimestamp(record.deleted_at!) : CONSTANTS.EMPTY_PLACEHOLDER} + </span> + ), + }, + { + title: '操作', + dataIndex: 'operation', + name: 'operation', + fixed: 'right', + width: TABLE_COL_WIDTH.NORMAL, + render: (_: number, record: SchedulerItem) => <SchedulerItemActions scheduler={record} />, + }, + ], + // eslint-disable-next-line react-hooks/exhaustive-deps + [listQ], + ); + + useEffect(() => { + constructFilterArray(filterParams); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [filterParams]); + + return ( + <SharedPageLayout title="调度程序项"> + <GridRow justify="end" align="center"> + <Input.Search + allowClear + defaultValue={filterParams.name} + onSearch={onSearch} + onClear={() => onSearch('')} + placeholder={'请输入名称'} + /> + </GridRow> + <Table + loading={listQ.isFetching} + data={itemListShow} + columns={columns} + scroll={{ x: '100%' }} + rowKey="id" + onChange={(_, sorter, filters, extra) => { + if (extra.action === 'filter') { + setFilterParams({ + ...filterParams, + is_cron: Boolean(filters.cron_config?.[0]), + status: (filters.status?.[0] as ItemStatus) ?? undefined, + }); + } + }} + pagination={{ + total: listQ.data?.page_meta?.total_items ?? undefined, + current: Number(urlState.page), + pageSize: Number(urlState.pageSize), + onChange: onPageChange, + showTotal: (total: number) => `共 ${total} 条记录`, + }} + /> + </SharedPageLayout> + ); + + function onPageChange(page: number, pageSize: number | undefined) { + setUrlState((prevState) => ({ + ...prevState, + page, + pageSize, + })); + } + + function onSearch(value: any) { + setFilterParams({ + ...filterParams, + name: value, + }); + } + + function constructFilterArray(value: QueryParams) { + const expressionNodes = []; + if (value.is_cron) { + expressionNodes.push({ + field: 'is_cron', + op: FilterOp.EQUAL, + bool_value: value.is_cron, + }); + } + if (value.name) { + expressionNodes.push({ + field: 'name', + op: FilterOp.CONTAIN, + string_value: value.name, + }); + } + + if (value.status) { + expressionNodes.push({ + field: 'status', + op: FilterOp.EQUAL, + string_value: value.status, + }); + } + + if (value.id) { + expressionNodes.push({ + field: 'id', + op: FilterOp.EQUAL, + number_value: Number(value.id), + }); + } + + const serialization = constructExpressionTree(expressionNodes); + if ((serialization || urlState.filter) && serialization !== urlState.filter) { + setUrlState((prevState) => ({ + ...prevState, + filter: serialization, + page: 1, + })); + } + } +}; + +export default SchedulerItemList; diff --git a/web_console_v2/client/src/views/Composer/SchedulerRunnerList/SchedulerRunnerActions/index.tsx b/web_console_v2/client/src/views/Composer/SchedulerRunnerList/SchedulerRunnerActions/index.tsx new file mode 100644 index 000000000..5c8cb7dbf --- /dev/null +++ b/web_console_v2/client/src/views/Composer/SchedulerRunnerList/SchedulerRunnerActions/index.tsx @@ -0,0 +1,51 @@ +import React, { FC, useMemo } from 'react'; +import GridRow from 'components/_base/GridRow'; +import { SchedulerRunner } from 'typings/composer'; +import SchedulerPipelineDrawer from '../../components/SchedulerPipelineDrawer'; +import { useToggle } from 'react-use'; + +type Props = { + scheduler: SchedulerRunner; +}; + +const SchedulerItemActions: FC<Props> = ({ scheduler }) => { + const [pipelineVisible, setPipelineVisible] = useToggle(false); + const codeString = useMemo(() => { + if (!scheduler) return ''; + const { pipeline, output, context } = scheduler; + return JSON.stringify( + { + pipeline, + context, + output, + }, + null, + 2, + ); + }, [scheduler]); + return ( + <> + <GridRow> + <button + className="custom-text-button" + style={{ + marginRight: 10, + }} + type="button" + onClick={setPipelineVisible} + > + 数据详情 + </button> + {/* <MoreActions actionList={actionList} /> */} + </GridRow> + <SchedulerPipelineDrawer + title={<span>数据详情</span>} + visible={pipelineVisible} + onClose={setPipelineVisible} + code={codeString} + /> + </> + ); +}; + +export default SchedulerItemActions; diff --git a/web_console_v2/client/src/views/Composer/SchedulerRunnerList/index.tsx b/web_console_v2/client/src/views/Composer/SchedulerRunnerList/index.tsx new file mode 100644 index 000000000..07c35f8dd --- /dev/null +++ b/web_console_v2/client/src/views/Composer/SchedulerRunnerList/index.tsx @@ -0,0 +1,278 @@ +import React, { FC, useEffect, useMemo, useState } from 'react'; +import { useQuery } from 'react-query'; +import SharedPageLayout from 'components/SharedPageLayout'; +import StateIndicator, { StateTypes } from 'components/StateIndicator'; +import { Table } from '@arco-design/web-react'; +import { useUrlState } from 'hooks'; +import { RunnerStatus, SchedulerRunner } from 'typings/composer'; +import { FilterOp } from 'typings/filter'; +import { fetchSchedulerRunnerList } from 'services/composer'; +import { constructExpressionTree, expression2Filter } from 'shared/filter'; +import { formatTimestamp } from 'shared/date'; +import { Link } from 'react-router-dom'; +import { filterExpressionGenerator } from 'views/Datasets/shared'; +import { TABLE_COL_WIDTH } from 'shared/constants'; +import SchedulerRunnerActions from './SchedulerRunnerActions'; +import { ColumnProps } from '@arco-design/web-react/es/Table'; +import CONSTANTS from 'shared/constants'; + +export type QueryParams = { + status?: RunnerStatus; +}; + +const calcStateIndicatorProps = ( + state: RunnerStatus, +): { type: StateTypes; text: string; tip?: string } => { + let text = CONSTANTS.EMPTY_PLACEHOLDER; + let type = 'default' as StateTypes; + const tip = ''; + + switch (state) { + case RunnerStatus.INIT: + text = '初始化'; + type = 'gold'; + break; + case RunnerStatus.RUNNING: + text = '运行中'; + type = 'processing'; + break; + case RunnerStatus.FAILED: + text = '运行失败'; + type = 'error'; + break; + case RunnerStatus.DONE: + text = '已结束'; + type = 'success'; + break; + default: + break; + } + + return { + text, + type, + tip, + }; +}; + +const SchedulerRunnerList: FC = () => { + const [urlState, setUrlState] = useUrlState({ + page: 1, + pageSize: 10, + filter: '', + }); + + const initFilterParams = expression2Filter(urlState.filter); + const [filterParams, setFilterParams] = useState<QueryParams>({ + status: initFilterParams.status, + }); + + const listQ = useQuery( + ['SCHEDULE_RUNNER_QUERY_KEY', urlState], + () => + fetchSchedulerRunnerList({ + page: urlState.page, + pageSize: urlState.pageSize, + filter: urlState.filter === '' ? undefined : urlState.filter, + }), + { + refetchOnWindowFocus: false, + keepPreviousData: true, + }, + ); + + // Filter the display list by the search string + const runnerListShow = useMemo(() => { + if (!listQ.data?.data) { + return []; + } + const templateList = listQ.data.data || []; + return templateList; + }, [listQ.data]); + + const columns: ColumnProps[] = useMemo( + () => [ + { + title: '调度项ID', + dataIndex: 'item_id', + name: 'item_id', + width: TABLE_COL_WIDTH.NAME, + render: (_: any, record: SchedulerRunner) => { + const filter = filterExpressionGenerator( + { + id: record.item_id, + }, + { + id: FilterOp.EQUAL, + }, + ); + return ( + <Link + to={(location) => ({ + ...location, + pathname: `/composer/scheduler-item/list`, + search: location.search + ? `${location.search}&filter=${filter}` + : `?filter=${filter}`, + })} + > + {record.item_id} + </Link> + ); + }, + }, + { + title: '状态', + dataIndex: 'status', + name: 'status', + width: TABLE_COL_WIDTH.NORMAL, + filters: [ + { + text: '初始化', + value: RunnerStatus.INIT, + }, + { + text: '运行中', + value: RunnerStatus.RUNNING, + }, + { + text: '已结束', + value: RunnerStatus.DONE, + }, + { + text: '运行失败', + value: RunnerStatus.FAILED, + }, + ], + defaultFilters: filterParams.status ? [filterParams.status as string] : [], + filterMultiple: false, + render: (_: any, record: SchedulerRunner) => { + return <StateIndicator {...calcStateIndicatorProps(record.status)} />; + }, + }, + { + title: '开始时间', + dataIndex: 'start_at', + name: 'start_at', + width: TABLE_COL_WIDTH.NORMAL, + render: (_: any, record: SchedulerRunner) => ( + <span>{formatTimestamp(record.start_at)}</span> + ), + }, + { + title: '结束时间', + dataIndex: 'end_at', + name: 'end_at', + width: TABLE_COL_WIDTH.NORMAL, + render: (_: any, record: SchedulerRunner) => ( + <span> + {record.end_at ? formatTimestamp(record.end_at) : CONSTANTS.EMPTY_PLACEHOLDER} + </span> + ), + }, + { + title: '创建时间', + dataIndex: 'created_at', + name: 'created_at', + width: TABLE_COL_WIDTH.NORMAL, + render: (_: any, record: SchedulerRunner) => ( + <span>{formatTimestamp(record.created_at)}</span> + ), + }, + { + title: '更新时间', + dataIndex: 'updated_at', + name: 'updated_at', + width: TABLE_COL_WIDTH.NORMAL, + render: (_: any, record: SchedulerRunner) => ( + <span>{formatTimestamp(record.updated_at)}</span> + ), + }, + { + title: '删除时间', + dataIndex: 'deleted_at', + name: 'deleted_at', + width: TABLE_COL_WIDTH.NORMAL, + render: (_: any, record: SchedulerRunner) => ( + <span> + {record.deleted_at ? formatTimestamp(record.deleted_at!) : CONSTANTS.EMPTY_PLACEHOLDER} + </span> + ), + }, + { + title: '操作', + dataIndex: 'operation', + name: 'operation', + fixed: 'right', + width: TABLE_COL_WIDTH.NORMAL, + render: (_: number, record: SchedulerRunner) => ( + <SchedulerRunnerActions scheduler={record} /> + ), + }, + ], + // eslint-disable-next-line react-hooks/exhaustive-deps + [listQ], + ); + + useEffect(() => { + constructFilterArray(filterParams); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [filterParams]); + + return ( + <SharedPageLayout title="调度程序运行器"> + <Table + loading={listQ.isFetching} + data={runnerListShow} + columns={columns} + scroll={{ x: '100%' }} + rowKey="id" + onChange={(_, sorter, filters, extra) => { + if (extra.action === 'filter') { + setFilterParams({ + status: (filters.status?.[0] as RunnerStatus) ?? undefined, + }); + } + }} + pagination={{ + total: listQ.data?.page_meta?.total_items ?? undefined, + current: Number(urlState.page), + pageSize: Number(urlState.pageSize), + onChange: onPageChange, + showTotal: (total: number) => `共 ${total} 条记录`, + }} + /> + </SharedPageLayout> + ); + + function onPageChange(page: number, pageSize: number | undefined) { + setUrlState((prevState) => ({ + ...prevState, + page, + pageSize, + })); + } + + function constructFilterArray(value: QueryParams) { + const expressionNodes = []; + + if (value.status) { + expressionNodes.push({ + field: 'status', + op: FilterOp.EQUAL, + string_value: value.status, + }); + } + + const serialization = constructExpressionTree(expressionNodes); + if ((serialization || urlState.filter) && serialization !== urlState.filter) { + setUrlState((prevState) => ({ + ...prevState, + filter: serialization, + page: 1, + })); + } + } +}; + +export default SchedulerRunnerList; diff --git a/web_console_v2/client/src/views/Composer/components/SchedulerPipelineDrawer/index.tsx b/web_console_v2/client/src/views/Composer/components/SchedulerPipelineDrawer/index.tsx new file mode 100644 index 000000000..74f3915ee --- /dev/null +++ b/web_console_v2/client/src/views/Composer/components/SchedulerPipelineDrawer/index.tsx @@ -0,0 +1,18 @@ +import React, { FC } from 'react'; +import { Drawer } from '@arco-design/web-react'; +import CodeEditor from 'components/CodeEditor'; +type Props = { + title: React.ReactNode; + visible: boolean; + onClose: () => void; + code: string; +}; +const SchedulerPipelineDrawer: FC<Props> = ({ title, visible, onClose, code }) => { + return ( + <Drawer width={500} title={title} visible={visible} onOk={onClose} onCancel={onClose}> + <CodeEditor value={code} language="json" theme="grey" /> + </Drawer> + ); +}; + +export default SchedulerPipelineDrawer; diff --git a/web_console_v2/client/src/views/Composer/index.tsx b/web_console_v2/client/src/views/Composer/index.tsx new file mode 100644 index 000000000..e5f7eba87 --- /dev/null +++ b/web_console_v2/client/src/views/Composer/index.tsx @@ -0,0 +1,26 @@ +import React from 'react'; +import { Redirect, Route, Switch, useLocation } from 'react-router-dom'; +import SchedulerItemList from './SchedulerItemList'; +import SchedulerItemDetail from './SchedulerItemDetail'; +import SchedulerRunnerList from './SchedulerRunnerList'; +import ErrorBoundary from 'components/ErrorBoundary'; + +function Composer() { + const location = useLocation(); + return ( + <ErrorBoundary> + <Switch> + <Route path={'/composer/scheduler-item/list'} exact component={SchedulerItemList} /> + <Route path={'/composer/scheduler-runner/list'} exact component={SchedulerRunnerList} /> + <Route + path={'/composer/scheduler-item/detail/:item_id'} + exact + component={SchedulerItemDetail} + /> + {location.pathname === '/composer' && <Redirect to="/composer/scheduler-item/lis" />} + </Switch> + </ErrorBoundary> + ); +} + +export default Composer; diff --git a/web_console_v2/client/src/views/Dashboard/DashboardDetail/index.module.less b/web_console_v2/client/src/views/Dashboard/DashboardDetail/index.module.less new file mode 100644 index 000000000..557b8c972 --- /dev/null +++ b/web_console_v2/client/src/views/Dashboard/DashboardDetail/index.module.less @@ -0,0 +1,7 @@ +.container { + display: flex; + justify-content: space-between; + flex-wrap: wrap; + width: 100%; + height: 100%; +} diff --git a/web_console_v2/client/src/views/Dashboard/DashboardDetail/index.tsx b/web_console_v2/client/src/views/Dashboard/DashboardDetail/index.tsx new file mode 100644 index 000000000..4349d2894 --- /dev/null +++ b/web_console_v2/client/src/views/Dashboard/DashboardDetail/index.tsx @@ -0,0 +1,92 @@ +import React, { FC, useMemo } from 'react'; +import { useParams } from 'react-router-dom'; +import SharedPageLayout from 'components/SharedPageLayout'; +import styled from './index.module.less'; +import { useQuery } from 'react-query'; +import { fetchDashboardList } from 'services/operation'; +import { Empty, Space, Spin, Tooltip } from '@arco-design/web-react'; +import { IconSend } from '@arco-design/web-react/icon'; +import CodeEditor from 'components/CodeEditor'; + +type Props = {}; + +const Dashboard: FC<Props> = () => { + const { uuid } = useParams<{ uuid: string }>(); + const dashboardQuery = useQuery('fetchDashboardList', () => fetchDashboardList(), {}); + const dashboardURL = useMemo(() => { + if (!dashboardQuery.data) { + return ''; + } + return (dashboardQuery.data?.data || []).find((item) => item.uuid === uuid)?.url; + }, [dashboardQuery.data, uuid]); + if (!uuid) { + return ( + <SharedPageLayout title={'仪表盘'}> + <div className={styled.container}> + <Empty description={renderEmptyTips()} /> + </div> + </SharedPageLayout> + ); + } + return ( + <SharedPageLayout + title={'仪表盘'} + rightTitle={ + <span> + <button + className="custom-text-button" + onClick={() => { + window.open(dashboardURL, '_blank'); + }} + > + <Tooltip position={'left'} trigger="hover" content="新页面打开"> + <IconSend /> + </Tooltip> + </button> + </span> + } + > + <div className={styled.container}> + {dashboardQuery.isFetching ? ( + <Spin style={{ display: 'block' }} /> + ) : ( + <iframe + style={{ + width: '100%', + height: '100%', + }} + title={'dashboard'} + src={dashboardURL} + /> + )} + </div> + </SharedPageLayout> + ); + + function renderEmptyTips() { + return ( + <Space direction={'vertical'}> + <h3> + dashboard功能暂未开启,如需开启须确保此FLAG 「dashboard_enabled」 和环境变量 + 「KIBANA_DASHBOARD_LIST」设置正确 + </h3> + <div + style={{ + height: '100px', + }} + > + <CodeEditor + language={'python'} + isReadOnly={true} + value={ + 'FLAGS=\'{"dashboard_enabled": true}\'\n' + + 'KIBANA_DASHBOARD_LIST=\'[{"name": "overview", "uuid": "<uuid-of-kibana-dashboard>"}]\'' + } + /> + </div> + </Space> + ); + } +}; + +export default Dashboard; diff --git a/web_console_v2/client/src/views/Dashboard/index.tsx b/web_console_v2/client/src/views/Dashboard/index.tsx index 3c848bc31..f5ee04b70 100644 --- a/web_console_v2/client/src/views/Dashboard/index.tsx +++ b/web_console_v2/client/src/views/Dashboard/index.tsx @@ -1,7 +1,14 @@ -import React, { ReactElement } from 'react'; +import React from 'react'; +import { Route } from 'react-router-dom'; +import DashboardDetail from './DashboardDetail'; -function DashboardPage(): ReactElement { - return <div className="container">dashboard</div>; +function Dashboard() { + return ( + <> + <Route path="/dashboard" exact component={DashboardDetail} /> + <Route path="/dashboard/:uuid" exact component={DashboardDetail} /> + </> + ); } -export default DashboardPage; +export default Dashboard; diff --git a/web_console_v2/client/src/views/DataFix/DataFixForm/index.less b/web_console_v2/client/src/views/DataFix/DataFixForm/index.less new file mode 100644 index 000000000..5e43c625d --- /dev/null +++ b/web_console_v2/client/src/views/DataFix/DataFixForm/index.less @@ -0,0 +1,11 @@ +.data-fix-container{ + display: flex; + justify-content: space-between; + flex-wrap: wrap; + width: 100%; + height: 100%; + .data-fix-form{ + width: 520px; + margin: 0 auto; + } +} diff --git a/web_console_v2/client/src/views/DataFix/DataFixForm/index.tsx b/web_console_v2/client/src/views/DataFix/DataFixForm/index.tsx new file mode 100644 index 000000000..b8a5892af --- /dev/null +++ b/web_console_v2/client/src/views/DataFix/DataFixForm/index.tsx @@ -0,0 +1,100 @@ +import React from 'react'; +import SharedPageLayout from 'components/SharedPageLayout'; +import { Button, Form, Input, Message, Switch, Select } from '@arco-design/web-react'; +import { datasetFix } from 'services/operation'; +import { to } from 'shared/helpers'; +import { DatasetForceState } from 'typings/dataset'; +import { useToggle } from 'react-use'; +import './index.less'; + +const FormItem = Form.Item; + +type formData = { + dataset_id: ID; + open_force: boolean; + force: DatasetForceState; +}; + +const forceOptions = [ + { label: '成功', value: DatasetForceState.SUCCEEDED }, + { label: '运行中', value: DatasetForceState.RUNNING }, + { label: '失败', value: DatasetForceState.FAILED }, +]; + +export default function DataFixForm() { + const [forceToggle, setForceToggle] = useToggle(false); + const [formInstance] = Form.useForm<formData>(); + const initFormData = { + open_force: false, + force: DatasetForceState.SUCCEEDED, + }; + return ( + <SharedPageLayout title={'数据集修复'}> + <div className="data-fix-container"> + <Form className="data-fix-form" initialValues={initFormData} form={formInstance}> + <FormItem + label="数据集ID" + field="dataset_id" + required + rules={[ + { + type: 'string', + required: true, + message: '请输入数据集ID', + }, + ]} + > + <Input placeholder="输入需要修复的数据集ID" allowClear /> + </FormItem> + <FormItem label="强制转换状态" field="open_force"> + <Switch onChange={(val) => setForceToggle(val)} /> + </FormItem> + {forceToggle && forceSelectRender()} + <FormItem + wrapperCol={{ + offset: 10, + }} + > + <Button type="primary" style={{ marginRight: 24 }} onClick={submitForm}> + {'提交'} + </Button> + <Button + onClick={() => { + formInstance.resetFields(); + }} + > + {'重置'} + </Button> + </FormItem> + </Form> + </div> + </SharedPageLayout> + ); + + function forceSelectRender() { + return ( + <FormItem label="数据集状态" field="force"> + <Select options={forceOptions} /> + </FormItem> + ); + } + + async function submitForm() { + const params = formInstance.getFieldsValue(); + const datasetId = params.dataset_id; + const openForce = params.open_force; + const force = params.force; + if (datasetId) { + const [, err] = await to( + datasetFix({ + datasetId, + force: openForce ? force : undefined, + }), + ); + if (err) { + return Message.error(err.message); + } + return Message.success('修复成功'); + } + } +} diff --git a/web_console_v2/client/src/views/DataFix/index.tsx b/web_console_v2/client/src/views/DataFix/index.tsx new file mode 100644 index 000000000..eba272285 --- /dev/null +++ b/web_console_v2/client/src/views/DataFix/index.tsx @@ -0,0 +1,13 @@ +import React from 'react'; +import { Route } from 'react-router-dom'; +import DataFixForm from './DataFixForm'; + +function DataFix() { + return ( + <> + <Route path="/data_fix" exact component={DataFixForm} /> + </> + ); +} + +export default DataFix; diff --git a/web_console_v2/client/src/views/Datasets/AddBatchForm/FileToImportList.tsx b/web_console_v2/client/src/views/Datasets/AddBatchForm/FileToImportList.tsx deleted file mode 100644 index 42bce6f21..000000000 --- a/web_console_v2/client/src/views/Datasets/AddBatchForm/FileToImportList.tsx +++ /dev/null @@ -1,164 +0,0 @@ -import React, { FC, useState } from 'react'; -import styled from 'styled-components'; -import { Table, Row, Col, DatePicker, Input, Button } from 'antd'; -import i18n from 'i18n'; -import { disableFuture, formatTimestamp } from 'shared/date'; -import { useTranslation } from 'react-i18next'; -import dayjs, { Dayjs } from 'dayjs'; -import { FileToImport } from 'typings/dataset'; -import { useQuery } from 'react-query'; -import { fetchFileList } from 'services/dataset'; -import { Search } from 'components/IconPark'; -import { isEmpty } from 'lodash'; - -const Container = styled.div``; -const FiltersRow = styled(Row)` - margin-bottom: 10px; -`; -const TableContainer = styled.div` - .ant-table-thead { - position: sticky; - top: 0; - background-color: #fafafa; - } -`; -const FileName = styled.small` - font-size: 0.8em; -`; - -const columns = [ - { - title: i18n.t('dataset.col_file_name'), - dataIndex: 'path', - key: 'path', - ellipsis: true, - render: (path: string) => { - return <FileName>{path}</FileName>; - }, - }, - { - title: i18n.t('dataset.col_files_size'), - dataIndex: 'size', - key: 'size', - width: 140, - render: (path: number) => { - return <>{path.toLocaleString('en')} KB</>; - }, - }, - { - title: i18n.t('dataset.col_modification_time'), - dataIndex: 'mtime', - name: 'modification_time', - width: 130, - render: (time: number) => <div>{time ? formatTimestamp(time, 'YYYY/MM/DD HH:mm') : '-'}</div>, - }, -]; - -type Value = string[]; -type Props = { - value?: Value; - onChange?: (val: Value) => any; -}; - -const FileToImportList: FC<Props> = ({ value, onChange }) => { - const { t } = useTranslation(); - const [query, setLocalQuery] = useState({ - dateRange: [] as Dayjs[], - name: '', - }); - const [directory, setDirectory] = useState(''); - - const listQuery = useQuery(['getFileList'], () => fetchFileList({ directory }), { - refetchOnWindowFocus: false, - }); - - const listData = (listQuery.data?.data || []) - .map((item) => ({ ...item, key: item.path })) - .filter(filesFilter); - - return ( - <Container> - <FiltersRow> - <Input.Search - value={directory} - placeholder={t('dataset.placeholder_directory_filter')} - onChange={(event) => setDirectory(event.target.value as string)} - onSearch={() => listQuery.refetch()} - allowClear - /> - </FiltersRow> - - <FiltersRow align="middle" gutter={5}> - <Col span={3}> - <small>{t('dataset.selected_items', { count: value?.length || 0 })}</small> - </Col> - <Col> - <DatePicker.RangePicker - value={query.dateRange as any} - onChange={onDateChange as any} - disabledDate={disableFuture} - /> - </Col> - <Col span={9}> - <Input - value={query.name} - suffix={<Search />} - placeholder={t('dataset.placeholder_filename_filter')} - onChange={onKeywordChange} - /> - </Col> - <Col span={2}> - <Button type="link" size="small" onClick={onResetClick}> - {t('reset')} - </Button> - </Col> - </FiltersRow> - - <TableContainer> - <Table - loading={listQuery.isFetching} - size="small" - scroll={{ y: 350 }} - dataSource={listData} - pagination={false} - columns={columns} - rowSelection={{ - onChange(_: any, selected) { - onChange && onChange(selected.map((item) => item.path)); - }, - }} - /> - </TableContainer> - </Container> - ); - - function onDateChange(val: Dayjs[]) { - setLocalQuery({ - ...query, - dateRange: val, - }); - } - function onKeywordChange(event: any) { - setLocalQuery({ - ...query, - name: event.target.value, - }); - } - function onResetClick() { - setLocalQuery({ - dateRange: [] as Dayjs[], - name: '', - }); - } - function filesFilter(item: FileToImport): boolean { - const nameMatched = item.path.includes(query.name.trim()); - const timeMatched = - isEmpty(query.dateRange) || - (query.dateRange[0].isBefore(dayjs.unix(item.mtime)) && - query.dateRange[1].isAfter(dayjs.unix(item.mtime))); - - return nameMatched && timeMatched; - } -}; - -export default FileToImportList; diff --git a/web_console_v2/client/src/views/Datasets/AddBatchForm/index.tsx b/web_console_v2/client/src/views/Datasets/AddBatchForm/index.tsx deleted file mode 100644 index c0d6d6fd3..000000000 --- a/web_console_v2/client/src/views/Datasets/AddBatchForm/index.tsx +++ /dev/null @@ -1,155 +0,0 @@ -import React, { - forwardRef, - ForwardRefRenderFunction, - ReactElement, - useImperativeHandle, -} from 'react'; -import styled from 'styled-components'; -import { Form, message, Row, DatePicker, Checkbox } from 'antd'; -import { useTranslation } from 'react-i18next'; -import { DataBatchImportPayload, DatasetType } from 'typings/dataset'; - -import { useToggle } from 'react-use'; -import { to } from 'shared/helpers'; -import { startToImportDataBatch } from 'services/dataset'; -import FileToImportList from './FileToImportList'; -import { isEmpty } from 'lodash'; -import dayjs from 'dayjs'; - -const Container = styled.div` - .ant-form-item:not(.ant-form-item-with-help) { - margin-bottom: 16px; - } -`; -const FooterRow = styled(Row)` - padding-top: 15px; - border-top: 1px solid var(--backgroundColorGray); -`; - -type Props = { - datasetType?: DatasetType; - datasetId?: ID; - onSuccess?: any; - onError?: (err: any) => void; - renderButtons: (scope: { submitting: boolean }) => ReactElement; -}; - -export type AddBatchExposedRef = { - validate: () => Promise<boolean>; - submit: Function; - toggleSubmit(v: boolean): void; -}; - -const AddBatchForm: ForwardRefRenderFunction<AddBatchExposedRef, Props> = ( - { datasetType, datasetId, renderButtons }, - parentRef, -) => { - const { t } = useTranslation(); - const [formInstance] = Form.useForm<DataBatchImportPayload>(); - const [submitting, toggleSubmit] = useToggle(false); - - useImperativeHandle(parentRef, () => { - return { validate, submit, toggleSubmit }; - }); - - return ( - <Container> - <Form - initialValues={{ move: false }} - labelCol={{ span: 4 }} - wrapperCol={{ span: 20 }} - form={formInstance} - style={{ width: '1000px' }} - labelAlign="left" - > - {datasetType === DatasetType.STREAMING && ( - <Form.Item - name="event_time" - label={t('dataset.label_event_time')} - rules={[{ required: true, message: t('dataset.msg_event_time_required') }]} - > - <DatePicker - format="YYYY-MM-DD HH:mm" - showTime={{ format: 'HH:mm' }} - placeholder={t('dataset.placeholder_event_time')} - /> - </Form.Item> - )} - - <Form.Item name="files" labelCol={{ span: 0 }} wrapperCol={{ span: 24 }}> - <FileToImportList /> - </Form.Item> - - <Form.Item name="move" noStyle valuePropName="checked"> - <Checkbox style={{ position: 'absolute', bottom: '19px' }}> - {t('dataset.label_move_file')} - </Checkbox> - </Form.Item> - - <Form.Item wrapperCol={{ span: 24 }} style={{ marginBottom: 0 }}> - <FooterRow justify="end">{renderButtons({ submitting })}</FooterRow> - </Form.Item> - </Form> - </Container> - ); - - function checkFilesSelection(): boolean { - const files = formInstance.getFieldValue('files'); - return !isEmpty(files); - } - - async function validate() { - try { - await formInstance.validateFields(); - } catch { - return false; - } - - const isFilesValid = checkFilesSelection(); - - if (!isFilesValid) { - // Please select file firstly - message.warning(t('dataset.msg_file_required')); - return false; - } - - return true; - } - - /** - * @param datasetIdBackup prevent that datasetId from props is undefined - * (most happens when invoke submit right after setDatasetId) - */ - async function submit(datasetIdBackup?: ID) { - const isValid = await validate(); - - if (!isValid) throw new Error('FORM_VALIDATION_ERROR'); - - const id = datasetId || datasetIdBackup; - - if (!id) { - throw new Error(t('dataset.msg_id_required')); - } - - toggleSubmit(true); - - const values = formInstance.getFieldsValue(); - - values.event_time = dayjs(values.event_time).unix(); - - const [res, error] = await to(startToImportDataBatch(id, values)); - - if (error) { - toggleSubmit(false); - message.error(error.message); - - throw error; - } - - toggleSubmit(false); - - return res; - } -}; - -export default forwardRef(AddBatchForm); diff --git a/web_console_v2/client/src/views/Datasets/CreateDataSource/FormModel/index.module.less b/web_console_v2/client/src/views/Datasets/CreateDataSource/FormModel/index.module.less new file mode 100644 index 000000000..5383ffd47 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/CreateDataSource/FormModel/index.module.less @@ -0,0 +1,42 @@ +.datas_source_create_form{ + max-width: 480px; + margin: 0 auto; +} +.data_source_create_section{ + margin-bottom: 20px; + overflow: hidden; // bfc + > h3 { + margin-bottom: 20px; + font-weight: 500; + font-size: 14px; + color: #1d252f; + } + .title-tag{ + margin: 0 12px 0 12px; + } +} + +.data_source_url{ + display: inline-block; +} + +.data_source_form_label{ + display: inline-block; + font-size: 14px; + font-weight: 400; + color: #4e5969; +} + +.data_source_is_update_text{ + color: #1664FF; + cursor: pointer; +} + +.data_source_is_update_rule{ + color: #4E5969; + font-size: 12px; + line-height: 18px; + & p{ + margin-bottom: 0; + } +} diff --git a/web_console_v2/client/src/views/Datasets/CreateDataSource/FormModel/index.tsx b/web_console_v2/client/src/views/Datasets/CreateDataSource/FormModel/index.tsx new file mode 100644 index 000000000..da391fcb0 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/CreateDataSource/FormModel/index.tsx @@ -0,0 +1,336 @@ +import React, { useState, useMemo } from 'react'; +import { Form, Input, Button, Space, Switch, Tag, Popover } from '@arco-design/web-react'; +import { IconInfoCircle } from '@arco-design/web-react/icon'; +import { validNamePattern, MAX_COMMENT_LENGTH } from 'shared/validator'; +import ButtonWithModalConfirm from 'components/ButtonWithModalConfirm'; +import { Image, Struct, UnStruct } from 'components/IconPark'; +import StateIndicator, { StateTypes } from 'components/StateIndicator'; +import TitleWithIcon from 'components/TitleWithIcon'; +import GridRow from 'components/_base/GridRow'; +import BlockRadio from 'components/_base/BlockRadio'; +import { DataSourceDataType, DataSourceStructDataType, DatasetType } from 'typings/dataset'; +import debounce from 'debounce-promise'; +import { checkDataSourceConnection } from 'services/dataset'; +import { useGetAppFlagValue, useIsFormValueChange } from 'hooks'; +import { FlagKey } from 'typings/flag'; + +import styled from './index.module.less'; + +export interface Props<T = any> { + isEdit: boolean; + onCancel?: () => void; + onChange?: (values: T) => void; + onOk?: (values: T) => Promise<void>; +} + +export interface FormData { + name: string; + data_source_url: string; + dataset_format: DataSourceDataType; + store_format?: DataSourceStructDataType; + dataset_type: DatasetType; +} + +const dataSourceDataTypeAssets = { + [DataSourceDataType.STRUCT]: { explain: '支持 csv、tfrecords', icon: <Struct /> }, + [DataSourceDataType.NONE_STRUCTURED]: { + explain: '支持 fastq、bam、vcf、rsa等', + icon: <UnStruct />, + }, + [DataSourceDataType.PICTURE]: { explain: '支持 JPEG、PNG、BMP、GIF', icon: <Image /> }, +}; + +const structDataOptions = [ + { + value: DataSourceStructDataType.CSV, + label: 'csv', + }, + { + value: DataSourceStructDataType.TFRECORDS, + label: 'tfrecords', + }, +]; + +const FormModal: React.FC<Props<FormData>> = function ({ onCancel, onChange, onOk, isEdit }) { + const [formInstance] = Form.useForm<any>(); + const [formData, setFormData] = useState<Partial<FormData>>({ + dataset_format: DataSourceDataType.STRUCT, + store_format: DataSourceStructDataType.CSV, + dataset_type: DatasetType.PSI, + }); + const trusted_computing_enabled = useGetAppFlagValue(FlagKey.TRUSTED_COMPUTING_ENABLED); + const [isCreating, setIsCreating] = useState(false); + const { isFormValueChanged, onFormValueChange } = useIsFormValueChange(onFormChange); + const [connectionState, setConnectionState] = useState<'connecting' | 'success' | 'fail'>(); + const [fileNameList, setFileNameList] = useState<string[]>([]); + const [extraFileCount, setExtraFileCount] = useState<number>(0); + + const dataSourceDataTypeOptions = useMemo(() => { + const options = [ + { + value: DataSourceDataType.STRUCT, + label: '结构化数据', + }, + { + value: DataSourceDataType.PICTURE, + label: '图片', + }, + ]; + if (trusted_computing_enabled) { + options.push({ + value: DataSourceDataType.NONE_STRUCTURED, + label: '非结构化数据', + }); + } + return options; + }, [trusted_computing_enabled]); + + const stateIndicatorProps = useMemo(() => { + let type: StateTypes = 'processing'; + let text = 'processing'; + switch (connectionState) { + case 'connecting': + type = 'processing'; + text = '连接中'; + break; + case 'success': + type = 'success'; + text = '连接成功'; + break; + case 'fail': + type = 'error'; + text = '连接失败'; + break; + default: + break; + } + + return { + type, + text, + }; + }, [connectionState]); + + const handleCheckDataSource = debounce(async function (value: any, cb) { + if (isEdit || !formInstance.getFieldValue('data_source_url')) { + setConnectionState(undefined); + setFileNameList([]); + setExtraFileCount(0); + return; + } + setConnectionState('connecting'); + setFileNameList([]); + setExtraFileCount(0); + try { + const resp = await checkDataSourceConnection({ + dataset_type: formInstance.getFieldValue('dataset_type'), + data_source_url: formInstance.getFieldValue('data_source_url'), + file_num: 3, + }); + setFileNameList(resp?.data?.file_names ?? []); + setExtraFileCount(resp?.data?.extra_nums ?? 0); + setConnectionState('success'); + typeof cb === 'function' && cb(undefined); + } catch (error) { + setConnectionState('fail'); + typeof cb === 'function' && cb(' '); // one space string, validate error but don't show any message + } + }, 300); + + return ( + <Form + className={styled.datas_source_create_form} + initialValues={formData} + layout="vertical" + form={formInstance} + onSubmit={onSubmit} + onValuesChange={onFormValueChange} + scrollToFirstError + > + {renderBaseConfigLayout()} + {renderDataImport()} + {renderFooterButton()} + </Form> + ); + + function onSubmit(values: FormData) { + if (connectionState === 'connecting' || connectionState === 'fail') { + return; + } + setIsCreating(true); + values.dataset_type = formInstance.getFieldValue('dataset_type'); + + if (values.dataset_format !== DataSourceDataType.STRUCT) { + delete values.store_format; + } + onOk?.(values).finally(() => { + setIsCreating(false); + }); + } + function renderBaseConfigLayout() { + return ( + <section className={styled.data_source_create_section}> + <h3>基本配置</h3> + <Form.Item + field="name" + label="数据源名称" + hasFeedback + rules={[ + { required: true, message: '请输入' }, + { + match: validNamePattern, + message: '只支持大小写字母,数字,中文开头或结尾,可包含“_”和“-”,不超过 63 个字符', + }, + ]} + > + <Input placeholder="请输入" maxLength={60} /> + </Form.Item> + <Form.Item + field="comment" + label="描述" + rules={[{ maxLength: MAX_COMMENT_LENGTH, message: '最多为 200 个字符' }]} + > + <Input.TextArea rows={3} placeholder="最多为 200 个字符" showWordLimit /> + </Form.Item> + <Form.Item field="dataset_format" label="数据类型" rules={[{ required: true }]}> + <BlockRadio + options={dataSourceDataTypeOptions} + isOneHalfMode={false} + flexGrow={0} + blockItemWidth={232} + renderBlockInner={(item, { label, isActive }) => ( + <GridRow + style={{ + height: '55px', + }} + gap="10" + > + <div className="dataset-type-indicator" data-is-active={isActive}> + {dataSourceDataTypeAssets[item.value as DataSourceDataType].icon} + </div> + + <div> + {label} + <div className="dataset-type-explain"> + {dataSourceDataTypeAssets[item.value as DataSourceDataType].explain} + </div> + </div> + </GridRow> + )} + /> + </Form.Item> + {formData.dataset_format === DataSourceDataType.STRUCT ? ( + <Form.Item + field="store_format" + label="数据格式" + rules={[{ required: true, message: '请选择数据集类型' }]} + > + <BlockRadio options={structDataOptions} isOneHalfMode={true} isCenter={true} /> + </Form.Item> + ) : null} + </section> + ); + } + + function renderDataImport() { + return ( + <section className={styled.data_source_create_section}> + <h3>数据源导入</h3> + {formData.dataset_format === DataSourceDataType.STRUCT && ( + <Form.Item field="is_update" label="增量更新"> + <Space> + <Switch onChange={handleIsUpdateChange} /> + <TitleWithIcon + title="开启后,将校验数据源路径下子目录数据格式的文件," + isLeftIcon={true} + isShowIcon={true} + icon={IconInfoCircle} + /> + <Popover + trigger="click" + title="数据源格式要求" + content={ + <span className={styled.data_source_is_update_rule}> + <p> + 1. + 请确认将当前数据路径下包含子文件夹,并且子文件夹以YYYYMMDD(如20201231)或YYYYMMDD-HH(如20201231-12)命名 + </p> + <p>2. 请确认包含 raw_id 列</p> + </span> + } + > + <span className={styled.data_source_is_update_text}>查看格式要求</span> + </Popover> + </Space> + </Form.Item> + )} + <Form.Item + style={{ position: 'relative' }} + hasFeedback + field="data_source_url" + label={ + <div className={styled.data_source_url}> + <span>数据来源</span> + <StateIndicator + containerStyle={{ + position: 'absolute', + right: 0, + top: 0, + visibility: connectionState ? 'visible' : 'hidden', + }} + {...stateIndicatorProps} + /> + </div> + } + rules={[ + { required: true, message: '请输入' }, + { + validator: handleCheckDataSource, + }, + ]} + > + <Input + placeholder="请填写有效文件目录地址,非文件,如 hdfs:///home/folder" + onClear={onDataSourceUrlClear} + allowClear + /> + </Form.Item> + <Form.Item field="__file_name_list" label="文件名预览"> + <Space> + <span className={styled.data_source_form_label}> + {fileNameList.length > 0 ? fileNameList.join('、') : '暂无数据'} + </span> + {Boolean(extraFileCount) && <Tag>+{extraFileCount}</Tag>} + </Space> + </Form.Item> + </section> + ); + } + function renderFooterButton() { + return ( + <Space> + <Button type="primary" htmlType="submit" loading={isCreating}> + 确认创建 + </Button> + <ButtonWithModalConfirm onClick={onCancel} isShowConfirmModal={isFormValueChanged}> + 取消 + </ButtonWithModalConfirm> + </Space> + ); + } + function handleIsUpdateChange(value: boolean) { + formInstance.setFieldValue('dataset_type', value ? DatasetType.STREAMING : DatasetType.PSI); + handleCheckDataSource(value, () => {}); + } + function onFormChange(_: Partial<FormData>, values: FormData) { + onChange?.(values); + setFormData(values); + } + function onDataSourceUrlClear() { + setConnectionState(undefined); + setFileNameList([]); + setExtraFileCount(0); + } +}; + +export default FormModal; diff --git a/web_console_v2/client/src/views/Datasets/CreateDataSource/index.tsx b/web_console_v2/client/src/views/Datasets/CreateDataSource/index.tsx new file mode 100644 index 000000000..2dd9e2fd3 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/CreateDataSource/index.tsx @@ -0,0 +1,74 @@ +import React from 'react'; +import SharedPageLayout from 'components/SharedPageLayout'; +import BackButton from 'components/BackButton'; +import { useHistory, useParams } from 'react-router'; +import { useIsFormValueChange, useGetCurrentProjectId } from 'hooks'; +import { Spin, Message } from '@arco-design/web-react'; +import FormModal, { FormData } from './FormModel/index'; +import { createDataSource } from 'services/dataset'; +import { DataSourceCreatePayload } from 'typings/dataset'; +import { useMutation } from 'react-query'; + +const NewCreateDataSource: React.FC = function () { + const history = useHistory(); + const { action } = useParams<{ + action: 'create' | 'edit'; + }>(); + const { isFormValueChanged, onFormValueChange } = useIsFormValueChange(); + const projectId = useGetCurrentProjectId(); + + const isLoading = false; + const isEdit = action === 'edit'; + + const createMutation = useMutation( + (payload: DataSourceCreatePayload) => { + return createDataSource(payload); + }, + { + onSuccess() { + Message.success('创建成功'); + goBackToListPage(); + }, + onError(e: any) { + Message.error(e.message); + }, + }, + ); + + return ( + <SharedPageLayout + title={ + <BackButton isShowConfirmModal={isFormValueChanged} onClick={goBackToListPage}> + 数据源 + </BackButton> + } + centerTitle={isEdit ? '编辑数据源' : '创建数据源'} + > + <Spin loading={isLoading}> + <FormModal + isEdit={isEdit} + onCancel={backToList} + onChange={onFormValueChange} + onOk={onFormModalSubmit} + /> + </Spin> + </SharedPageLayout> + ); + + function goBackToListPage() { + history.push('/datasets/data_source'); + } + + function backToList() { + history.goBack(); + } + + async function onFormModalSubmit(values: FormData) { + createMutation.mutate({ + project_id: projectId!, + data_source: values, + }); + } +}; + +export default NewCreateDataSource; diff --git a/web_console_v2/client/src/views/Datasets/CreateDataset/DatasetChecker/index.tsx b/web_console_v2/client/src/views/Datasets/CreateDataset/DatasetChecker/index.tsx new file mode 100644 index 000000000..97ba4087e --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/CreateDataset/DatasetChecker/index.tsx @@ -0,0 +1,74 @@ +import React, { useEffect, useMemo, useState } from 'react'; +import { DATASET_SCHEMA_CHECKER } from 'typings/dataset'; +import { Checkbox, Space } from '@arco-design/web-react'; +import TitleWithIcon from 'components/TitleWithIcon'; +import { IconInfoCircle } from '@arco-design/web-react/icon'; + +interface IDatasetChecker { + value?: DATASET_SCHEMA_CHECKER[]; + onChange?: (val: DATASET_SCHEMA_CHECKER[]) => void; +} + +export default function DatasetChecker(props: IDatasetChecker) { + const { value, onChange } = props; + const [checkState, setCheckState] = useState({ + join: true, + numeric: false, + }); + useEffect(() => { + const newValue: DATASET_SCHEMA_CHECKER[] = []; + !!checkState.join && newValue.push(DATASET_SCHEMA_CHECKER.RAW_ID_CHECKER); + !!checkState.numeric && newValue.push(DATASET_SCHEMA_CHECKER.NUMERIC_COLUMNS_CHECKER); + onChange?.(newValue); + }, [checkState, onChange]); + + const updateState = (key: 'join' | 'numeric') => { + setCheckState((pre) => ({ + ...pre, + [key]: !pre[key], + })); + }; + + const isJoinChecked = useMemo(() => { + return Array.isArray(value) && value.includes(DATASET_SCHEMA_CHECKER.RAW_ID_CHECKER); + }, [value]); + const isNumericChecked = useMemo(() => { + return Array.isArray(value) && value.includes(DATASET_SCHEMA_CHECKER.NUMERIC_COLUMNS_CHECKER); + }, [value]); + return ( + <Space size="large"> + <Checkbox + checked={isJoinChecked} + onChange={() => { + updateState('join'); + }} + > + { + <TitleWithIcon + isShowIcon={true} + isBlock={false} + title="求交数据校验" + icon={IconInfoCircle} + tip="当数据集需用于求交时,需勾选该选项,将要求数据集必须有raw_id 列且没有重复值" + /> + } + </Checkbox> + <Checkbox + checked={isNumericChecked} + onChange={() => { + updateState('numeric'); + }} + > + { + <TitleWithIcon + isShowIcon={true} + isBlock={false} + title="全数值特征校验" + icon={IconInfoCircle} + tip="当数据集需用于树模型训练时,需勾选该选项,将要求数据集特征必须为全数值" + /> + } + </Checkbox> + </Space> + ); +} diff --git a/web_console_v2/client/src/views/Datasets/CreateDataset/PublishChecker/index.less b/web_console_v2/client/src/views/Datasets/CreateDataset/PublishChecker/index.less new file mode 100644 index 000000000..faed6f4c8 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/CreateDataset/PublishChecker/index.less @@ -0,0 +1,38 @@ +.publish-checker-container{ + height: 64px; + background-image: url('../../../../assets/images/dataset-publish-bg.png'); + background-size: cover; + position: relative; + display: flex; + flex-direction: row; + justify-content: flex-start; + align-items: center; + .publish-text{ + display: flex; + flex-direction: column; + align-items: flex-start; + margin-left: 12px; + font-family: 'PingFang SC'; + font-style: normal; + font-weight: 500; + font-size: 12px; + line-height: 20px; + color: #1d2129; + span:nth-child(2) { + color: #4e5969; + font-weight: 400; + } + } + .credit-card{ + position: absolute; + right: 12px; + top: 12px; + background: #ffffff; + opacity: 0.7; + border-radius: 40px; + padding: 0 8px; + } + .credit-icon{ + display: inline-block; + } +} diff --git a/web_console_v2/client/src/views/Datasets/CreateDataset/PublishChecker/index.tsx b/web_console_v2/client/src/views/Datasets/CreateDataset/PublishChecker/index.tsx new file mode 100644 index 000000000..309b4aa07 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/CreateDataset/PublishChecker/index.tsx @@ -0,0 +1,34 @@ +import React from 'react'; +import { Checkbox } from '@arco-design/web-react'; +import creditsIcon from 'assets/icons/credits-icon.svg'; +import { useGetAppFlagValue } from 'hooks'; +import { FlagKey } from 'typings/flag'; +import './index.less'; + +type TPublishChecker = { + value?: boolean; + onChange?: (val: boolean) => void; +}; + +export default function PublishChecker(prop: TPublishChecker) { + const { value, onChange } = prop; + const handleOnChange = (val: boolean) => { + onChange?.(val); + }; + const bcs_support_enabled = useGetAppFlagValue(FlagKey.BCS_SUPPORT_ENABLED); + return ( + <div className="publish-checker-container"> + <Checkbox checked={value} onChange={handleOnChange} /> + <div className="publish-text"> + <span>发布至工作区</span> + <span>发布后,工作区中合作伙伴可使用该数据集</span> + </div> + {!!bcs_support_enabled && ( + <div className="credit-card"> + <img className="credit-icon" src={creditsIcon} alt="credit-icon" /> + 100积分 + </div> + )} + </div> + ); +} diff --git a/web_console_v2/client/src/views/Datasets/CreateDataset/StepOneBasic/index.tsx b/web_console_v2/client/src/views/Datasets/CreateDataset/StepOneBasic/index.tsx deleted file mode 100644 index 0712f3f28..000000000 --- a/web_console_v2/client/src/views/Datasets/CreateDataset/StepOneBasic/index.tsx +++ /dev/null @@ -1,106 +0,0 @@ -import React, { FC } from 'react'; -import styled from 'styled-components'; -import { Form, Input, Radio, message, Button, Popconfirm, Select } from 'antd'; -import { useTranslation } from 'react-i18next'; -import { DatasetCreatePayload, DatasetType } from 'typings/dataset'; -import GridRow from 'components/_base/GridRow'; -import { useRecoilState } from 'recoil'; -import { datasetBasicForm } from 'stores/dataset'; -import { useRecoilQuery } from 'hooks/recoil'; -import { projectListQuery } from 'stores/project'; - -const FooterRow = styled(GridRow)` - padding-top: 15px; - border-top: 1px solid var(--backgroundColorGray); -`; - -type Props = { - onCancel: any; - onSuccess: any; -}; - -const StepOneBasic: FC<Props> = ({ onSuccess, onCancel }) => { - const { t } = useTranslation(); - const [formInstance] = Form.useForm<DatasetCreatePayload>(); - - const [formValues, saveToRecoil] = useRecoilState(datasetBasicForm); - const { data: projectList } = useRecoilQuery(projectListQuery); - - return ( - <Form - initialValues={{ ...formValues }} - labelCol={{ span: 6 }} - wrapperCol={{ span: 18 }} - style={{ width: '500px' }} - form={formInstance} - onFinish={submit} - > - <Form.Item - name="name" - label={t('dataset.label_name')} - rules={[{ required: true, message: t('dataset.msg_name_required') }]} - > - <Input placeholder={t('dataset.placeholder_name')} /> - </Form.Item> - - <Form.Item - name="project_id" - label={t('workflow.label_project')} - hasFeedback - rules={[{ required: true, message: t('workflow.msg_project_required') }]} - > - <Select placeholder={t('workflow.placeholder_project')}> - {projectList && - projectList.map((pj) => ( - <Select.Option key={pj.id} value={pj.id}> - {pj.name} - </Select.Option> - ))} - </Select> - </Form.Item> - - <Form.Item - name="dataset_type" - label={t('dataset.label_type')} - rules={[{ required: true, message: t('dataset.msg_type_required') }]} - > - <Radio.Group> - <Radio.Button value={DatasetType.PSI}>PSI</Radio.Button> - <Radio.Button value={DatasetType.STREAMING}>Streaming</Radio.Button> - </Radio.Group> - </Form.Item> - - <Form.Item name="comment" label={t('dataset.label_comment')}> - <Input.TextArea rows={4} placeholder={t('dataset.placeholder_comment')} /> - </Form.Item> - - <Form.Item wrapperCol={{ span: 24 }} style={{ marginBottom: 0 }}> - <FooterRow justify="end" gap="12"> - <Popconfirm - title={t('dataset.msg_quit_warning')} - cancelText={t('cancel')} - okText={t('submit')} - onConfirm={onCancel} - > - <Button>{t('cancel')}</Button> - </Popconfirm> - - <Button type="primary" htmlType="submit"> - {t('next_step')} - </Button> - </FooterRow> - </Form.Item> - </Form> - ); - - async function submit(value: DatasetCreatePayload) { - try { - saveToRecoil({ ...value }); - onSuccess(); - } catch (error) { - message.error(error.message); - } - } -}; - -export default StepOneBasic; diff --git a/web_console_v2/client/src/views/Datasets/CreateDataset/StepTwoAddBatch/index.tsx b/web_console_v2/client/src/views/Datasets/CreateDataset/StepTwoAddBatch/index.tsx deleted file mode 100644 index 1b79f96e4..000000000 --- a/web_console_v2/client/src/views/Datasets/CreateDataset/StepTwoAddBatch/index.tsx +++ /dev/null @@ -1,107 +0,0 @@ -import React, { FC, useRef, useState } from 'react'; -import styled from 'styled-components'; -import { message, Button, Popconfirm } from 'antd'; -import { useTranslation } from 'react-i18next'; -import GridRow from 'components/_base/GridRow'; -import { to } from 'shared/helpers'; -import { createDataset, deleteDataset } from 'services/dataset'; -import AddBatchForm, { AddBatchExposedRef } from '../../AddBatchForm'; -import { useRecoilValue } from 'recoil'; -import { datasetBasicForm } from 'stores/dataset'; - -const Container = styled.div``; - -type Props = { - onCancel: any; - onSuccess: any; - onPrevious: any; -}; - -const StepTwoAddBatches: FC<Props> = ({ onSuccess, onPrevious, onCancel }) => { - const { t } = useTranslation(); - const [datasetId, setDatasetId] = useState<ID>(null as any); - const formRef = useRef<AddBatchExposedRef>(); - - const basicForm = useRecoilValue(datasetBasicForm); - - return ( - <Container> - <AddBatchForm - ref={formRef as any} - datasetId={datasetId} - datasetType={basicForm.dataset_type} - renderButtons={({ submitting }) => { - return ( - <GridRow gap="12"> - <Popconfirm - title={t('dataset.msg_quit_warning')} - cancelText={t('cancel')} - okText={t('submit')} - onConfirm={onCancel} - > - <Button disabled={submitting}>{t('cancel')}</Button> - </Popconfirm> - - <Button disabled={submitting} onClick={onPrevious}> - {t('previous_step')} - </Button> - - <Button type="primary" onClick={submitDatasetNInitiateImporting} loading={submitting}> - {t('dataset.btn_finish_n_import')} - </Button> - </GridRow> - ); - }} - /> - </Container> - ); - - /** - * Q: Why do we send create dataset requet at step2 not step1? - * A: In the case of user quit the flow at step2, the dataset shouldn't be created - */ - async function submitDatasetNInitiateImporting() { - if (!formRef.current) return; - - const { submit: submitAddBatchForm, toggleSubmit, validate } = formRef.current; - - const isValid = await validate(); - - if (!isValid) return; - - toggleSubmit(true); - - const [res, error] = await to(createDataset({ ...basicForm })); - const datasetId = res.data.id; - - // NOTE: it's async !!!!!! - setDatasetId(datasetId); - - if (error) { - toggleSubmit(false); - return message.error(error.message); - } - - // Trigger submit add-batch form - const [, addBatchError] = await to(submitAddBatchForm(datasetId)); - - if (addBatchError) { - message.error(addBatchError.message); - // TODO: what if delete request also failed? - try { - deleteDataset(datasetId); - } catch { - /** ignore error */ - } - toggleSubmit(false); - return; - } - - message.success(t('dataset.msg_start_importing')); - toggleSubmit(false); - // Tell parent the happy news - onSuccess(); - } -}; - -export default StepTwoAddBatches; diff --git a/web_console_v2/client/src/views/Datasets/CreateDataset/index.less b/web_console_v2/client/src/views/Datasets/CreateDataset/index.less new file mode 100644 index 000000000..2e05fee21 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/CreateDataset/index.less @@ -0,0 +1,35 @@ +.dataset-type-indicator{ + width: 28px; + height: 28px; + border-radius: 50%; + justify-content: center; + align-items: center; + display: flex; + background-color: rgb(var(--gray-2)); +} + +.dataset-type-explain{ + font-size: 12px; + height: 16px; + color: var(--textColorSecondary); + transform: scale(0.9); + transform-origin: 0% 50%; +} + +.dataset-raw-create-form{ + width: 480px; + margin: 0 auto; + .form-section{ + overflow: hidden; // bfc + > h3 { + margin-bottom: 20px; + font-weight: 500; + font-size: 14px; + color: #1d252f; + } + } +} + +.dataset-raw-input-number{ + width: 112px; +} diff --git a/web_console_v2/client/src/views/Datasets/CreateDataset/index.tsx b/web_console_v2/client/src/views/Datasets/CreateDataset/index.tsx index f8c9e5e91..acbf77930 100644 --- a/web_console_v2/client/src/views/Datasets/CreateDataset/index.tsx +++ b/web_console_v2/client/src/views/Datasets/CreateDataset/index.tsx @@ -1,88 +1,727 @@ -import React, { FC, useState } from 'react'; -import styled from 'styled-components'; -import { Modal } from 'antd'; -import { Z_INDEX_GREATER_THAN_HEADER } from 'components/Header'; -import { useHistory } from 'react-router-dom'; -import { useTranslation } from 'react-i18next'; +import { + Button, + Form, + Input, + InputNumber, + Message, + Space, + Alert, + Checkbox, +} from '@arco-design/web-react'; +import BackButton from 'components/BackButton'; +import FileUpload, { UploadFile, UploadFileType } from 'components/FileUpload'; +import { Image, Struct, UnStruct } from 'components/IconPark'; +import SharedPageLayout from 'components/SharedPageLayout'; +import BlockRadio from 'components/_base/BlockRadio'; +import GridRow from 'components/_base/GridRow'; +import ButtonWithModalConfirm from 'components/ButtonWithModalConfirm'; +import DataSourceSelect from 'components/DataSourceSelect'; +import ConfigForm, { ExposedRef, ItemProps } from 'components/ConfigForm'; +import { useQuery } from 'react-query'; +import { isEmpty } from 'lodash-es'; +import React, { FC, useState, useMemo, useRef } from 'react'; +import { useHistory } from 'react-router'; +import { + createDataset, + createDataSource, + deleteDataset, + createDatasetJobs, + fetchDataJobVariableDetail, +} from 'services/dataset'; +import { to, isStringCanBeParsed } from 'shared/helpers'; +import { useGetAppFlagValue, useGetCurrentProjectId, useIsFormValueChange } from 'hooks'; +import { MAX_COMMENT_LENGTH, validNamePattern } from 'shared/validator'; +import { fetchSysInfo } from 'services/settings'; +import TitleWithIcon from 'components/TitleWithIcon'; +import { IconInfoCircle } from '@arco-design/web-react/icon'; +import { + DataBatchImportPayload, + DATASET_SCHEMA_CHECKER, + DatasetCreatePayload, + DatasetDataType, + DatasetType__archived, + DataJobBackEndType, + DataJobVariable, + DatasetKindV2, + DataSourceStructDataType, + DATASET_COPY_CHECKER, + DatasetType, + DataSourceDataType, + DatasetJobCreatePayload, +} from 'typings/dataset'; +import PublishChecker from './PublishChecker'; +import { FlagKey } from 'typings/flag'; +import DatasetChecker from './DatasetChecker'; +import { + Variable, + VariableComponent, + VariableValueType, + VariableWidgetSchema, +} from 'typings/variable'; +import { + NO_CATEGORY, + TAG_MAPPER, + VARIABLE_TIPS_MAPPER, + CREDITS_LIMITS, + CronType, + cronTypeOptions, +} from '../shared'; import { useToggle } from 'react-use'; -import { Steps, Row } from 'antd'; -import StepOneBasic from './StepOneBasic'; -import StepTwoAddBatch from './StepTwoAddBatch'; -import { useResetCreateForm } from 'hooks/dataset'; -import { forceToRefreshQuery } from 'shared/queryClient'; -import { DATASET_LIST_QUERY_KEY } from '../DatasetList'; - -const ContainerModal = styled(Modal)` - .ant-modal-body { - padding-bottom: 14px; - } - .ant-modal-footer { - display: none; - } -`; -const StepRow = styled(Row)` - width: 340px; - margin: 10px auto 35px; -`; +import { Tag as TagEnum } from 'typings/workflow'; +import { hydrate } from 'views/Workflows/shared'; +import './index.less'; + +type Props = Record<string, unknown>; + +enum DataImportWay { + Remote = 'remote', + Local = 'local', +} +type Params = { + [key: string]: any; +}; +type FormData = DatasetCreatePayload & + DataBatchImportPayload & { + _import_from: DataImportWay; + data_format: DatasetDataType; + files?: UploadFile[]; + params: Params; + data_source_uuid: ID; + store_format: DataSourceStructDataType; + import_type: DATASET_COPY_CHECKER; + cron_type: CronType; + }; + +const newDatasetTypeOptions = [ + { + value: DatasetDataType.STRUCT, + label: '结构化数据', + }, + { + value: DatasetDataType.PICTURE, + label: '图片', + }, + // { + // value: DatasetDataType.UNSTRUCT, + // label: '非结构化数据', + // }, +]; + +const importWayOptions = [ + { + value: DataImportWay.Remote, + label: '数据源导入', + }, + { + value: DataImportWay.Local, + label: '本地导入', + }, +]; + +const structDataOptions = [ + { + value: DataSourceStructDataType.CSV, + label: 'csv', + }, + { + value: DataSourceStructDataType.TFRECORDS, + label: 'tfrecords', + }, +]; -const CreateDataset: FC = () => { +const datasetTypeAssets = { + [DatasetDataType.STRUCT]: { explain: '支持csv、tfrecord', icon: <Struct /> }, + [DatasetDataType.PICTURE]: { explain: '支持JPEG、PNG、BMP、GIF', icon: <Image /> }, + [DatasetDataType.NONE_STRUCTURED]: { explain: '支持 fastq、bam、vcf、rsa等', icon: <UnStruct /> }, +}; + +const CreateDataset: FC<Props> = () => { + const [formInstance] = Form.useForm<FormData>(); const history = useHistory(); - const { t } = useTranslation(); - const [step, setStep] = useState(0); - const [visible, toggleVisible] = useToggle(true); - const resetForm = useResetCreateForm(); + const currentProjectId = useGetCurrentProjectId(); + + const dataJobVariableDetailQuery = useQuery( + ['getDataJobVariableDetail', DataJobBackEndType.IMPORT_SOURCE], + () => fetchDataJobVariableDetail(DataJobBackEndType.IMPORT_SOURCE), + { + enabled: true, + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const [formData, setFormData] = useState<Partial<FormData>>({ + project_id: currentProjectId, + dataset_type: DatasetType__archived.PSI, + store_format: DataSourceStructDataType.CSV, + data_format: DatasetDataType.STRUCT, + _import_from: DataImportWay.Remote, + need_publish: false, + value: 100, + schema_checkers: [DATASET_SCHEMA_CHECKER.RAW_ID_CHECKER], + cron_type: CronType.DAY, + }); + + const [isCreating, setIsCreating] = useState(false); + const [needPublish, setNeedPublish] = useState(false); + const [copyState, setCopyState] = useState({ + isShow: true, + isDisabled: false, + isCopy: true, + }); + const [isShowCron, toggleShowCron] = useToggle(false); + const [cronType, setCornType] = useState<CronType>(CronType.DAY); + const { isFormValueChanged, onFormValueChange } = useIsFormValueChange(onFormChange); + const bcs_support_enabled = useGetAppFlagValue(FlagKey.BCS_SUPPORT_ENABLED); + const configFormRefList = useRef<Array<ExposedRef | null>>([]); + + const sysInfoQuery = useQuery(['getSysInfo'], () => fetchSysInfo(), { + retry: 2, + refetchOnWindowFocus: false, + }); + + const myDomainName = useMemo<string>(() => { + return sysInfoQuery.data?.data?.domain_name ?? ''; + }, [sysInfoQuery.data]); + + const dataJobVariableList = useMemo<Variable[]>(() => { + if (!dataJobVariableDetailQuery.data?.data?.variables) { + return []; + } + + return dataJobVariableDetailQuery.data.data.variables.map((item) => { + let widget_schema: VariableWidgetSchema = {}; + + try { + widget_schema = JSON.parse(item.widget_schema); + } catch (error) {} + + return { + ...item, + widget_schema, + }; + }); + }, [dataJobVariableDetailQuery.data]); + + const paramsList = useMemo<ItemProps[]>(() => { + const list: ItemProps[] = []; + dataJobVariableList + .filter((item) => !item.widget_schema.hidden) + .forEach((item) => { + const baseRuleList = item.widget_schema.required + ? [ + { + required: true, + message: '必填项', + }, + ] + : []; + + list.push({ + tip: VARIABLE_TIPS_MAPPER[item.name], + label: item.name, + tag: TAG_MAPPER[item.tag as TagEnum] || NO_CATEGORY, + field: item.name, + initialValue: + item.widget_schema.component === VariableComponent.Input + ? item.value + : item.typed_value, + componentType: item.widget_schema.component, + rules: + item.widget_schema.component === VariableComponent.Input && + [VariableValueType.LIST, VariableValueType.OBJECT].includes(item.value_type!) + ? [ + ...baseRuleList, + { + validator: (value, callback) => { + if ((value && typeof value === 'object') || isStringCanBeParsed(value)) { + callback(); + return; + } + callback(`JSON ${item.value_type!} 格式错误`); + }, + }, + ] + : baseRuleList, + }); + }); + return list; + }, [dataJobVariableList]); return ( - <ContainerModal - title={t('dataset.title_create')} - visible={visible} - style={{ top: '20%' }} - width="fit-content" - closable={false} - maskClosable={false} - maskStyle={{ backdropFilter: 'blur(4px)' }} - keyboard={false} - afterClose={afterClose} - getContainer="body" - zIndex={Z_INDEX_GREATER_THAN_HEADER} - onCancel={() => toggleVisible(false)} + <SharedPageLayout + title={ + <BackButton onClick={backToList} isShowConfirmModal={isFormValueChanged}> + 原始数据集 + </BackButton> + } + centerTitle="创建数据集" > - <StepRow justify="center"> - <Steps current={step} size="small"> - <Steps.Step title={t('dataset.step_basic')} /> - <Steps.Step title={t('dataset.step_add_batch')} /> - </Steps> - </StepRow> - - {step === 0 && <StepOneBasic onSuccess={goAddBatch} onCancel={closeModal} />} - - {step === 1 && ( - <StepTwoAddBatch - onSuccess={onCreateNStartImportSuccess} - onPrevious={backToStepBasic} - onCancel={closeModal} - /> - )} - </ContainerModal> + <Form + className="dataset-raw-create-form" + initialValues={formData} + layout="vertical" + form={formInstance} + onSubmit={onSubmit} + onValuesChange={onFormValueChange} + scrollToFirstError + > + {renderBaseConfigLayout()} + {formData._import_from === DataImportWay.Remote && renderDataSourceImportLayout()} + {formData._import_from === DataImportWay.Local && renderLocalImportLayout()} + {formData.data_format === DatasetDataType.STRUCT && renderDatasetChecker()} + {renderParamsConfig()} + {renderPublishChecker()} + {needPublish && bcs_support_enabled && renderCreditsInput()} + {renderFooterButton()} + </Form> + </SharedPageLayout> ); - function afterClose() { - history.push('/datasets'); + function renderBaseConfigLayout() { + return ( + <section className="form-section"> + <h3>基本配置</h3> + <Form.Item + field="name" + label="数据集名称" + hasFeedback + rules={[ + { required: true, message: '请输入' }, + { + match: validNamePattern, + message: '只支持大小写字母,数字,中文开头或结尾,可包含“_”和“-”,不超过 63 个字符', + }, + ]} + > + <Input placeholder="请输入" maxLength={60} /> + </Form.Item> + <Form.Item + field="comment" + label="描述" + rules={[{ maxLength: MAX_COMMENT_LENGTH, message: '最多为 200 个字符' }]} + > + <Input.TextArea rows={2} placeholder="最多为 200 个字符" showWordLimit /> + </Form.Item> + <Form.Item + field="_import_from" + label="导入方式" + rules={[{ required: true, message: '请选择数据集类型' }]} + > + <BlockRadio + options={importWayOptions} + isOneHalfMode={true} + isCenter={true} + onChange={handleImportTypeChange} + /> + </Form.Item> + </section> + ); + } + + function renderDataSourceImportLayout() { + return ( + <section className="form-section"> + <h3>数据源导入</h3> + <Form.Item + field="data_source_uuid" + label="数据源" + rules={[{ required: true, message: '请选择' }]} + > + <DataSourceSelect valueKey="uuid" onChange={handleDataSourceChange} /> + </Form.Item> + {isShowCron && ( + <Form.Item field="cron_type" label="导入周期"> + <BlockRadio + flexGrow={0} + isCenter={false} + isWarnTip={true} + gap={10} + isOneHalfMode={false} + options={cronTypeOptions} + onChange={(val: CronType) => { + setCornType(val); + }} + /> + </Form.Item> + )} + + {copyState.isShow && ( + <Form.Item field="import_type" label="数据配置"> + <Space size="large"> + <Checkbox + checked={copyState.isCopy} + disabled={copyState.isDisabled} + onChange={handleChangeIsCopy} + > + { + <TitleWithIcon + isShowIcon={true} + isBlock={false} + title="数据拷贝" + icon={IconInfoCircle} + tip="开启后,数据信息将拷贝至平台对应的数据库存储中,以便后续训练。当数据集用于可信计算,或定时求交数据量过大(例如天级求交存量数据大于15天)时,建议关闭该选项。" + /> + } + </Checkbox> + </Space> + </Form.Item> + )} + </section> + ); + } + function renderLocalImportLayout() { + return ( + <section className="form-section"> + <h3>本地导入</h3> + <Form.Item field="data_format" label="数据类型" rules={[{ required: true }]}> + <BlockRadio + options={newDatasetTypeOptions} + isOneHalfMode={false} + flexGrow={0} + blockItemWidth={232} + renderBlockInner={(item, { label, isActive }) => ( + <GridRow + style={{ + height: '52px', + }} + gap="10" + > + <div className="dataset-type-indicator" data-is-active={isActive}> + {datasetTypeAssets[item.value as DatasetDataType].icon} + </div> + + <div> + {label} + <div className="dataset-type-explain"> + {datasetTypeAssets[item.value as DatasetDataType].explain} + </div> + </div> + </GridRow> + )} + /> + </Form.Item> + {formData.data_format === DatasetDataType.STRUCT && ( + <Form.Item + field="store_format" + label="数据格式" + rules={[{ required: true, message: '请选择数据集类型' }]} + > + <BlockRadio options={structDataOptions} isOneHalfMode={true} isCenter={true} /> + </Form.Item> + )} + + <Form.Item + field="files" + label="从本地文件中选择" + rules={[{ required: true, message: '请上传文件' }]} + > + <FileUpload + accept={ + formData.data_format === DatasetDataType.STRUCT ? '.csv,.tfrecords' : '.tar,.gz' + } + data={ + formData.data_format === DatasetDataType.STRUCT + ? { + extract: false, + kind: UploadFileType.Dataset, + } + : { + extract: true, + kind: UploadFileType.Dataset, + } + } + kind={UploadFileType.Dataset} + maxCount={1} + maxSize={1024 * 1024 * 100} // 100MB + dp={0} + /> + </Form.Item> + </section> + ); + } + + function renderDatasetChecker() { + return ( + <section className="form-section"> + <h3>数据校验</h3> + <Form.Item field="schema_checkers"> + <DatasetChecker /> + </Form.Item> + </section> + ); } - function goAddBatch() { - setStep(1); + function renderFooterButton() { + return ( + <Space> + <Button type="primary" htmlType="submit" loading={isCreating}> + 确认创建 + </Button> + <ButtonWithModalConfirm onClick={backToList} isShowConfirmModal={isFormValueChanged}> + 取消 + </ButtonWithModalConfirm> + </Space> + ); } - function backToStepBasic() { - setStep(0); + + function renderParamsConfig() { + return ( + <Space> + <Form.Item field="params"> + {dataJobVariableDetailQuery.isError ? ( + <Alert type="info" content="暂不支持该类型的数据任务" /> + ) : ( + <ConfigForm + filter={variableTagFilter} + groupBy={'tag'} + hiddenGroupTag={false} + cols={2} + collapseTitle="参数配置" + collapseFormItemList={paramsList} + ref={(ref) => { + configFormRefList.current[0] = ref; + }} + isResetOnFormItemListChange={true} + /> + )} + </Form.Item> + </Space> + ); + } + + function renderPublishChecker() { + return ( + <Form.Item field="need_publish"> + <PublishChecker onChange={setNeedPublish} /> + </Form.Item> + ); + } + + function renderCreditsInput() { + return ( + <Form.Item labelAlign="left" layout="horizontal" label="使用单价" field="value"> + <InputNumber + className="dataset-raw-input-number" + min={CREDITS_LIMITS.MIN} + max={CREDITS_LIMITS.MAX} + suffix="积分" + step={1} + /> + </Form.Item> + ); + } + + function variableTagFilter(item: ItemProps) { + return ( + !!item.tag && + [TAG_MAPPER[TagEnum.INPUT_PARAM], TAG_MAPPER[TagEnum.RESOURCE_ALLOCATION]].includes(item.tag) + ); + } + + function handleDataSourceChange(id: string, options: any) { + const dataSource = options ? options.extra : {}; + const isStreaming = dataSource.dataset_type === DatasetType__archived.STREAMING; + formInstance.setFieldsValue({ + data_format: dataSource.dataset_format, + store_format: dataSource.store_format, + dataset_type: dataSource.dataset_type, + cron_type: CronType.DAY, + }); + toggleShowCron(isStreaming); + setCornType(CronType.DAY); + switch (dataSource.dataset_format) { + case DatasetDataType.STRUCT: + // 结构化数据, 只有TFRECORDS格式支持,非拷贝 + setCopyState({ + isShow: dataSource.store_format === DataSourceStructDataType.TFRECORDS, + isDisabled: false, + isCopy: true, + }); + break; + case DatasetDataType.PICTURE: + setCopyState({ + isShow: false, + isDisabled: false, + isCopy: true, + }); + break; + case DatasetDataType.NONE_STRUCTURED: + setCopyState({ + isShow: true, + isDisabled: true, + isCopy: false, + }); + break; + default: + setCopyState({ + isShow: true, + isDisabled: false, + isCopy: true, + }); + break; + } + } + + function handleImportTypeChange() { + formInstance.resetFields(['data_source_uuid', 'data_format', 'store_format', 'dataset_type']); + setCopyState({ + isShow: true, + isDisabled: false, + isCopy: true, + }); + } + + function backToList() { + history.goBack(); + } + + function handleChangeIsCopy(val: boolean) { + setCopyState({ + ...copyState, + isCopy: val, + }); } - function closeModal() { - resetForm(); - toggleVisible(false); + + async function onSubmit() { + if (!currentProjectId) { + return Message.error('请选择工作区'); + } + + if (!myDomainName) { + return Message.error('获取本系统 domain_name 失败'); + } + + const { _import_from } = formInstance.getFieldsValue(); + const import_type = copyState.isCopy + ? DATASET_COPY_CHECKER.COPY + : DATASET_COPY_CHECKER.NONE_COPY; + // validate params + const files = (formInstance.getFieldValue('files') as any) as UploadFile[]; + + if (_import_from === DataImportWay.Local && isEmpty(files)) { + return Message.error('请选择需要导入的文件'); + } + + setIsCreating(true); + + let dataSourceUuid: ID = formInstance.getFieldValue('data_source_uuid') as ID; + if (_import_from === DataImportWay.Local) { + const data_format = formInstance.getFieldValue('data_format'); + const [dataSourceResp, createDataSourceError] = await to( + createDataSource({ + project_id: currentProjectId!, + data_source: { + // TODO: name + name: files?.[0]?.internal_directory, + data_source_url: files?.[0]?.internal_directory, + is_user_upload: true, + store_format: + data_format === DatasetDataType.STRUCT + ? (formInstance.getFieldValue('store_format') as DataSourceStructDataType) + : undefined, + dataset_type: formInstance.getFieldValue('dataset_type') as DatasetType, + dataset_format: formInstance.getFieldValue('data_format') as DataSourceDataType, + }, + }), + ); + + if (createDataSourceError) { + setIsCreating(false); + Message.error(createDataSourceError.message); + return; + } + + dataSourceUuid = dataSourceResp.data.uuid; + } + + let store_format = formInstance.getFieldValue('store_format'); + // 对于非拷贝及本地导入, 存储格式一定为TFRECORDS + if (import_type === DATASET_COPY_CHECKER.COPY || _import_from === DataImportWay.Local) { + store_format = DataSourceStructDataType.TFRECORDS; + } + + // create dataset + const [res, error] = await to( + createDataset({ + kind: DatasetKindV2.RAW, + project_id: currentProjectId, + name: formInstance.getFieldValue('name'), + comment: formInstance.getFieldValue('comment'), + dataset_type: formInstance.getFieldValue('dataset_type'), + dataset_format: formInstance.getFieldValue('data_format'), + store_format, + import_type, + need_publish: formInstance.getFieldValue('need_publish'), + value: + !!formInstance.getFieldValue('need_publish') && !!bcs_support_enabled + ? formInstance.getFieldValue('value') + : undefined, + schema_checkers: + formData.data_format === DatasetDataType.PICTURE + ? [] + : formInstance.getFieldValue('schema_checkers'), + } as DatasetCreatePayload), + ); + if (error) { + setIsCreating(false); + Message.error(error.message); + return; + } + const datasetId = res.data.id; + + const payload: DatasetJobCreatePayload = { + dataset_job_parameter: { + dataset_job_kind: DataJobBackEndType.IMPORT_SOURCE, + global_configs: { + [myDomainName]: { + dataset_uuid: dataSourceUuid, + variables: hydrate( + dataJobVariableList, + formInstance.getFieldValue('params') as Params, + { + isStringifyVariableValue: true, + isStringifyVariableWidgetSchema: true, + isProcessVariableTypedValue: true, + }, + ) as DataJobVariable[], + }, + }, + }, + output_dataset_id: datasetId, + }; + // 对于增量数据集, 原始数据集需要传入定时周期 + if (isShowCron) { + payload.time_range = {}; + if (cronType === CronType.DAY) { + payload.time_range.days = 1; + } + if (cronType === CronType.HOUR) { + payload.time_range.hours = 1; + } + } + + const [, addDatasetJobError] = await to(createDatasetJobs(currentProjectId, payload)); + if (addDatasetJobError) { + Message.error(addDatasetJobError.message); + // TODO: what if delete request also failed? + try { + deleteDataset(datasetId); + } catch { + /** ignore error */ + } + setIsCreating(false); + return; + } + + setIsCreating(false); + + Message.success(needPublish ? '创建成功,数据集可用后将自动发布' : '创建成功'); + + history.goBack(); } - function onCreateNStartImportSuccess() { - forceToRefreshQuery([DATASET_LIST_QUERY_KEY]); - closeModal(); + function onFormChange(_: Partial<FormData>, values: FormData) { + setFormData(values); } }; diff --git a/web_console_v2/client/src/views/Datasets/CreateProcessedDataset/DatasetInfo/index.module.less b/web_console_v2/client/src/views/Datasets/CreateProcessedDataset/DatasetInfo/index.module.less new file mode 100644 index 000000000..6cee9875e --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/CreateProcessedDataset/DatasetInfo/index.module.less @@ -0,0 +1,9 @@ +.dataset_processed_desc{ + font-weight: 500; + color: #1D2129; + font-size: 12px; + line-height: 20px; + .dataset_processed_name{ + margin-right: 8px; + } +} diff --git a/web_console_v2/client/src/views/Datasets/CreateProcessedDataset/DatasetInfo/index.tsx b/web_console_v2/client/src/views/Datasets/CreateProcessedDataset/DatasetInfo/index.tsx new file mode 100644 index 000000000..3dbc55f1a --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/CreateProcessedDataset/DatasetInfo/index.tsx @@ -0,0 +1,90 @@ +/* istanbul ignore file */ + +import React, { FC, useMemo, useState } from 'react'; +import { useQuery } from 'react-query'; +import { Tag } from '@arco-design/web-react'; +import { DATASET_LIST_QUERY_KEY } from 'views/Datasets/DatasetList'; +import { fetchDatasetList, fetchParticipantDatasetList } from 'services/dataset'; +import { Dataset, ParticipantDataset, DatasetKindBackEndType } from 'typings/dataset'; +import { useRecoilValue } from 'recoil'; +import { projectState } from 'stores/project'; +import { PageMeta } from 'typings/app'; +import { FILTER_OPERATOR_MAPPER, filterExpressionGenerator } from 'views/Datasets/shared'; +import styled from './index.module.less'; + +interface Props { + datasetUuid: string; + participantId?: ID; + isParticipant?: boolean; +} + +const DatasetInfo: FC<Props> = ({ isParticipant = false, datasetUuid, participantId }) => { + const selectedProject = useRecoilValue(projectState); + const [currentDataset, setCurrentDataset] = useState<Dataset | ParticipantDataset>(); + const query = useQuery<{ + data: Array<Dataset | ParticipantDataset>; + page_meta?: PageMeta; + }>( + [ + DATASET_LIST_QUERY_KEY, + selectedProject.current?.id, + datasetUuid, + participantId, + isParticipant, + ], + () => { + const filter = filterExpressionGenerator( + { + project_id: selectedProject.current?.id, + is_published: isParticipant ? undefined : true, + uuid: datasetUuid, + }, + FILTER_OPERATOR_MAPPER, + ); + if (isParticipant) { + return fetchParticipantDatasetList(selectedProject.current?.id!, { + uuid: datasetUuid, + participant_id: participantId, + }); + } + return fetchDatasetList({ + filter, + }); + }, + { + enabled: Boolean(selectedProject.current), + retry: 2, + refetchOnWindowFocus: false, + onSuccess: (res) => { + setCurrentDataset(res.data[0]); + }, + }, + ); + // Empty only if there is no keyword, and the 1st page is requested, and there is no data + const isEmpty = !query.isFetching && !currentDataset; + const tagText = useMemo(() => { + let tagText = ''; + if (!currentDataset) return tagText; + if (currentDataset.dataset_kind === DatasetKindBackEndType.RAW) { + tagText = '原始'; + } + if (currentDataset.dataset_kind === DatasetKindBackEndType.PROCESSED) { + tagText = '结果'; + } + return tagText; + }, [currentDataset]); + return ( + <> + {isEmpty ? ( + <div>暂无数据集</div> + ) : ( + <div className={styled.dataset_processed_desc}> + <span className={styled.dataset_processed_name}>{currentDataset?.name}</span> + {tagText ? <Tag>{tagText}</Tag> : <></>} + </div> + )} + </> + ); +}; + +export default DatasetInfo; diff --git a/web_console_v2/client/src/views/Datasets/CreateProcessedDataset/TitleWithRecommendedParam/index.less b/web_console_v2/client/src/views/Datasets/CreateProcessedDataset/TitleWithRecommendedParam/index.less new file mode 100644 index 000000000..e67d5584a --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/CreateProcessedDataset/TitleWithRecommendedParam/index.less @@ -0,0 +1,17 @@ +.params-title-tag{ + margin-bottom: 4px; + width: 50px; +} +.recommend-param-drawer{ + width: 1000px; + .main-title{ + font-size: 14px; + margin-right: 8px; + } + .params-tips{ + margin-bottom: 12px; + .arco-alert-content{ + font-size: 12px; + } + } +} diff --git a/web_console_v2/client/src/views/Datasets/CreateProcessedDataset/TitleWithRecommendedParam/index.tsx b/web_console_v2/client/src/views/Datasets/CreateProcessedDataset/TitleWithRecommendedParam/index.tsx new file mode 100644 index 000000000..4cc08d938 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/CreateProcessedDataset/TitleWithRecommendedParam/index.tsx @@ -0,0 +1,476 @@ +import React, { useMemo } from 'react'; +import { Alert, Button, Drawer, Table, Tag } from '@arco-design/web-react'; +import { ColumnProps } from '@arco-design/web-react/es/Table'; +import { TABLE_COL_WIDTH } from 'shared/constants'; +import ClickToCopy from 'components/ClickToCopy'; +import { useToggle } from 'react-use'; +import { DataJoinType } from 'typings/dataset'; +import './index.less'; + +type Props = { + joinType: DataJoinType; +}; + +type LevelInfo = { + name: string; + desc: string; +}; +type ParamsInfo = { + sender: { + cpu: string; + mem: string; + }; + receiver: { + cpu: string; + mem: string; + }; +}; + +type ParamsTableData = { + id: number; + level: LevelInfo; + replicas?: string | number; + num_partition?: number | string; + part_num?: number | string; + master?: ParamsInfo | string | number; + raw_data_worker?: ParamsInfo; + psi_join_worker?: ParamsInfo; + totalCost: ParamsInfo | string[]; +}; + +/** + * TODO: Temporary table component, which will be removed later + */ +const recommendedParams: ParamsTableData[] = [ + { + id: 1, + level: { + name: '微型数据集', + desc: '(数据量 ≤ 1万且数据大小 ≤ 1g)', + }, + num_partition: 1, + master: { + sender: { cpu: '2000m', mem: '4Gi' }, + receiver: { cpu: '2000m', mem: '4Gi' }, + }, + raw_data_worker: { + sender: { cpu: '4000m', mem: '8Gi' }, + receiver: { cpu: '1000m', mem: '2Gi' }, + }, + psi_join_worker: { + sender: { cpu: '4000m', mem: '8Gi' }, + receiver: { cpu: '1000m', mem: '2Gi' }, + }, + totalCost: { + sender: { cpu: '6000m', mem: '12Gi' }, + receiver: { cpu: '3000m', mem: '6Gi' }, + }, + }, + { + id: 2, + level: { + name: '小型数据集', + desc: '(1万 < 数据集样本量 ≤ 100万或1g ≤ 数据大小 ≤ 10g)', + }, + num_partition: 4, + master: { + sender: { cpu: '2000m', mem: '4Gi' }, + receiver: { cpu: '2000m', mem: '4Gi' }, + }, + raw_data_worker: { + sender: { cpu: '4000m', mem: '8Gi' }, + receiver: { cpu: '1000m', mem: '2Gi' }, + }, + psi_join_worker: { + sender: { cpu: '4000m', mem: '8Gi' }, + receiver: { cpu: '1000m', mem: '2Gi' }, + }, + totalCost: { + sender: { cpu: '18000m', mem: '36Gi' }, + receiver: { cpu: '6000m', mem: '12Gi' }, + }, + }, + { + id: 3, + level: { + name: '中型数据集', + desc: '(100万 < 数据集样本量 ≤ 1亿或10g < 数据大小 ≤ 100g)', + }, + num_partition: 16, + master: { + sender: { cpu: '2000m', mem: '4Gi' }, + receiver: { cpu: '2000m', mem: '4Gi' }, + }, + raw_data_worker: { + sender: { cpu: '8000m', mem: '16Gi' }, + receiver: { cpu: '1000m', mem: '2Gi' }, + }, + psi_join_worker: { + sender: { cpu: '8000m', mem: '16Gi' }, + receiver: { cpu: '1000m', mem: '2Gi' }, + }, + totalCost: { + sender: { cpu: '130000m', mem: '260Gi' }, + receiver: { cpu: '18000m', mem: '36Gi' }, + }, + }, + { + id: 4, + level: { + name: '大型数据集', + desc: '(数据集样本量 > 1亿或数据大小 > 100g)', + }, + num_partition: 32, + master: { + sender: { cpu: '2000m', mem: '4Gi' }, + receiver: { cpu: '2000m', mem: '4Gi' }, + }, + raw_data_worker: { + sender: { cpu: '16000m', mem: '32Gi' }, + receiver: { cpu: '2000m', mem: '4Gi' }, + }, + psi_join_worker: { + sender: { cpu: '16000m', mem: '32Gi' }, + receiver: { cpu: '2000m', mem: '4Gi' }, + }, + totalCost: { + sender: { cpu: '514000m', mem: '1028Gi' }, + receiver: { cpu: '66000m', mem: '132Gi' }, + }, + }, +]; + +const recommendedOTPSIParams: ParamsTableData[] = [ + { + id: 1, + level: { + name: '小型数据集', + desc: '(0万 < 数据集样本量 ≤ 400万)', + }, + num_partition: 1, + replicas: 1, + totalCost: ['spark任务动态分配资源', 'OtPsi:2c4g'], + }, + { + id: 2, + level: { + name: '中型数据集', + desc: '(数据集样本量 > 400w)', + }, + num_partition: 'max(发起方样本量,合作方样本量)/400万', + replicas: '5或10', + totalCost: ['spark任务动态分配资源', 'OtPsi:10c20g或20c40g'], + }, +]; + +const recommendedHASHParams: ParamsTableData[] = [ + { + id: 1, + level: { + name: '小型数据集', + desc: '(0万 < 数据集样本量 ≤ 400万)', + }, + num_partition: 1, + replicas: 1, + totalCost: ['spark任务动态分配资源', 'OtPsi:2c4g'], + }, + { + id: 2, + level: { + name: '中型数据集', + desc: '(数据集样本量 > 400w)', + }, + num_partition: 'max(发起方样本量,合作方样本量)/400万', + replicas: '5或10', + totalCost: ['spark任务动态分配资源', 'OtPsi:10c20g或20c40g'], + }, +]; + +const recommendedLightRSAPSIParams: ParamsTableData[] = [ + { + id: 1, + level: { + name: '小型数据集', + desc: '(0万 < 数据集样本量 ≤ 200万)', + }, + part_num: 1, + totalCost: ['spark任务动态分配资源', 'rsa求交:16c20g'], + }, + { + id: 2, + level: { + name: '中型数据集', + desc: '(数据集样本量 > 200w)', + }, + part_num: 'max(发起方样本量,合作方样本量)/200万', + totalCost: ['spark任务动态分配资源', 'rsa求交:16c20g'], + }, +]; + +const RECOMMENDED_PARAMS = { + [DataJoinType.PSI]: recommendedParams, + [DataJoinType.OT_PSI_DATA_JOIN]: recommendedOTPSIParams, + [DataJoinType.HASH_DATA_JOIN]: recommendedHASHParams, + [DataJoinType.NORMAL]: [], + [DataJoinType.LIGHT_CLIENT]: recommendedLightRSAPSIParams, + [DataJoinType.LIGHT_CLIENT_OT_PSI_DATA_JOIN]: [], +}; + +const TIPS_MAPPER = { + [DataJoinType.PSI]: + '请根据各方可用资源情况,选择数据集配置。当样本量和数据大小命中不同的规则时,请选择更大的资源规格。', + [DataJoinType.OT_PSI_DATA_JOIN]: '请根据各方可用资源情况,选择数据集配置。', + [DataJoinType.HASH_DATA_JOIN]: '请根据各方可用资源情况,选择数据集配置。', + [DataJoinType.NORMAL]: '', + [DataJoinType.LIGHT_CLIENT]: '请根据各方可用资源情况,选择数据集配置。', + [DataJoinType.LIGHT_CLIENT_OT_PSI_DATA_JOIN]: '', +}; + +const Title: React.FC<Props> = ({ joinType }) => { + const [drawerVisible, setDrawerVisible] = useToggle(false); + const columns: ColumnProps<any>[] = useMemo(() => { + switch (joinType) { + case DataJoinType.PSI: + return [ + { + title: '所有参与方最大数据量级', + dataIndex: 'level', + fixed: 'left', + width: TABLE_COL_WIDTH.NORMAL, + render: (_) => { + return renderLevel(_); + }, + }, + { + title: 'num_partition', + dataIndex: 'num_partition', + width: TABLE_COL_WIDTH.THIN, + }, + { + title: 'master', + dataIndex: 'master', + width: TABLE_COL_WIDTH.THIN, + render: (_) => { + return renderParams(_); + }, + }, + { + title: 'raw_worker', + dataIndex: 'raw_data_worker', + width: TABLE_COL_WIDTH.THIN, + render: (_) => { + return renderParams(_); + }, + }, + { + title: 'psi_worker', + dataIndex: 'psi_join_worker', + width: TABLE_COL_WIDTH.THIN, + render: (_) => { + return renderParams(_); + }, + }, + { + title: '总资源消耗', + dataIndex: 'totalCost', + width: TABLE_COL_WIDTH.THIN, + render: (_) => { + return renderParams(_); + }, + }, + ]; + case DataJoinType.OT_PSI_DATA_JOIN: + return [ + { + title: '所有参与方最大数据量级', + dataIndex: 'level', + fixed: 'left', + width: TABLE_COL_WIDTH.THIN, + render: (_) => { + return renderLevel(_); + }, + }, + { + title: 'num_partition', + dataIndex: 'num_partition', + width: TABLE_COL_WIDTH.THIN, + }, + { + title: 'replicas', + dataIndex: 'replicas', + width: TABLE_COL_WIDTH.THIN / 2, + }, + { + title: '总资源消耗', + dataIndex: 'totalCost', + width: TABLE_COL_WIDTH.THIN, + render: (_: string[]) => { + return ( + <ul> + {_.map((item) => ( + <li key={item}>{item}</li> + ))} + </ul> + ); + }, + }, + ]; + case DataJoinType.LIGHT_CLIENT: + return [ + { + title: '所有参与方最大数据量级', + dataIndex: 'level', + fixed: 'left', + width: TABLE_COL_WIDTH.THIN, + render: (_) => { + return renderLevel(_); + }, + }, + { + title: 'part_num', + dataIndex: 'part_num', + width: TABLE_COL_WIDTH.THIN, + }, + { + title: '总资源消耗', + dataIndex: 'totalCost', + width: TABLE_COL_WIDTH.THIN, + render: (_: string[]) => { + return ( + <ul> + {_.map((item) => ( + <li key={item}>{item}</li> + ))} + </ul> + ); + }, + }, + ]; + case DataJoinType.HASH_DATA_JOIN: + return [ + { + title: '所有参与方最大数据量级', + dataIndex: 'level', + fixed: 'left', + width: TABLE_COL_WIDTH.THIN, + render: (_) => { + return renderLevel(_); + }, + }, + { + title: 'num_partition', + dataIndex: 'num_partition', + width: TABLE_COL_WIDTH.THIN, + }, + { + title: 'replicas', + dataIndex: 'replicas', + width: TABLE_COL_WIDTH.THIN / 2, + }, + { + title: '总资源消耗', + dataIndex: 'totalCost', + width: TABLE_COL_WIDTH.THIN, + render: (_: string[]) => { + return ( + <ul> + {_.map((item) => ( + <li key={item}>{item}</li> + ))} + </ul> + ); + }, + }, + ]; + default: + return []; + } + }, [joinType]); + const dataSource = useMemo(() => { + if (!joinType) { + return []; + } + return RECOMMENDED_PARAMS[joinType] || []; + }, [joinType]); + return ( + <div className="recommended-param-table"> + <Drawer + title={<span className="main-title">推荐配置</span>} + className="recommend-param-drawer" + style={{ + width: 800, + }} + visible={drawerVisible} + onCancel={() => setDrawerVisible(false)} + > + <Alert className={'params-tips'} content={TIPS_MAPPER[joinType]} /> + <Table + pagination={false} + scroll={{ x: 1000, y: 600 }} + border={{ wrapper: true, cell: true }} + className="custom-table custom-table-left-side-filter" + rowKey={'id'} + columns={columns} + data={dataSource} + /> + </Drawer> + <Button + onClick={() => { + setDrawerVisible(true); + }} + size={'mini'} + type="text" + > + 推荐配置参数 + </Button> + </div> + ); + + function renderLevel(levelInfo: LevelInfo) { + return ( + <div> + <h5>{levelInfo.name}</h5> + <span>{levelInfo.desc}</span> + </div> + ); + } + + function renderParams(paramInfo: ParamsInfo) { + return ( + <div> + <h5>发起方:</h5> + <span> + <ClickToCopy text={paramInfo?.sender?.cpu}> + <Tag className="params-title-tag" color={'arcoblue'}> + {'CPU:'} + </Tag> + {paramInfo?.sender?.cpu} + </ClickToCopy> + <ClickToCopy text={paramInfo?.sender?.mem}> + <Tag className="params-title-tag" color={'green'}> + {'MEM:'} + </Tag> + {paramInfo?.sender?.mem} + </ClickToCopy> + </span> + <h5>合作伙伴:</h5> + <span> + <ClickToCopy text={paramInfo?.receiver?.cpu}> + <Tag className="params-title-tag" color={'arcoblue'}> + {'CPU:'} + </Tag> + {paramInfo?.receiver?.cpu} + </ClickToCopy> + <ClickToCopy text={paramInfo?.receiver?.mem}> + <Tag className="params-title-tag" color={'green'}> + {'MEM:'} + </Tag> + {paramInfo?.receiver?.mem} + </ClickToCopy> + </span> + </div> + ); + } +}; + +export default Title; diff --git a/web_console_v2/client/src/views/Datasets/CreateProcessedDataset/index.module.less b/web_console_v2/client/src/views/Datasets/CreateProcessedDataset/index.module.less new file mode 100644 index 000000000..57a16d197 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/CreateProcessedDataset/index.module.less @@ -0,0 +1,61 @@ +@import '~styles/mixins.less'; + +.dataset_processed_create_form{ + max-width: 600px; + margin: 0 auto; +} +.dataset_processed_create_section{ + margin-bottom: 20px; + overflow: hidden; // bfc + > h3 { + margin-bottom: 20px; + font-weight: 500; + font-size: 14px; + color: #1d252f; + } + .title-tag{ + margin: 0 12px 0 12px; + } +} +.dataset_processed_avatar{ + .MixinSquare(44px); + background-color: var(--primary-1); + color: white; + border-radius: 4px; + font-size: 18px; + text-align: center; + + &::before { + display: inline-block; + width: 100%; + height: 100%; + content: ''; + background: url('~assets/icons/atom-icon-algorithm-management.svg') no-repeat; + background-size: contain; + } +} + +.dataset_processed_card{ + :global(.arco-card-body){ + padding: 32px 40px; + } +} + +.dataset_processed_desc{ + font-weight: 500; + color: #1D2129; + font-size: 12px; + line-height: 20px; +} + +.dataset_processed_form_label{ + height: 32px; + display: flex; + justify-content: flex-end !important; + align-items: center; +} + +.dataset_processed_footer_button{ + padding-left: 130px; +} + diff --git a/web_console_v2/client/src/views/Datasets/CreateProcessedDataset/index.tsx b/web_console_v2/client/src/views/Datasets/CreateProcessedDataset/index.tsx new file mode 100644 index 000000000..1ea93650e --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/CreateProcessedDataset/index.tsx @@ -0,0 +1,1010 @@ +import { + Alert, + Button, + Form, + Input, + Message, + Space, + Spin, + Tag, + Switch, + Card, +} from '@arco-design/web-react'; +import BackButton from 'components/BackButton'; +import SharedPageLayout from 'components/SharedPageLayout'; +import BlockRadio from 'components/_base/BlockRadio'; +import ButtonWithModalConfirm from 'components/ButtonWithModalConfirm'; +import DatasetSelect from 'components/DatasetSelect'; +import DatasetInfo from './DatasetInfo'; +import ConfigForm, { ExposedRef, ItemProps } from 'components/ConfigForm'; +import { Tag as TagEnum } from 'typings/workflow'; +import React, { FC, useEffect, useMemo, useRef, useState } from 'react'; +import { useHistory, useParams } from 'react-router'; +import { + createDatasetJobs, + createDataset, + fetchDataJobVariableDetail, + fetchDatasetDetail, + fetchDatasetJobDetail, + authorizeDataset, +} from 'services/dataset'; +import { fetchSysInfo } from 'services/settings'; +import { isStringCanBeParsed, to } from 'shared/helpers'; +import { + useGetAppFlagValue, + useGetCurrentProjectId, + useGetCurrentProjectParticipantList, + useIsFormValueChange, + useGetCurrentProjectAbilityConfig, +} from 'hooks'; +import { MAX_COMMENT_LENGTH, validNamePattern } from 'shared/validator'; +import TitleWithIcon from 'components/TitleWithIcon'; +import { IconInfoCircle } from '@arco-design/web-react/icon'; +import { useQuery } from 'react-query'; +import FormLabel from 'components/FormLabel'; +import { LabelStrong } from 'styles/elements'; +import { + DataJobBackEndType, + DataJobType, + DataJobVariable, + DataJoinType, + Dataset, + DatasetJobCreatePayload, + DatasetCreatePayload, + DatasetKindV2, + DATASET_COPY_CHECKER, + DataSourceStructDataType, + DataJoinToolTipText, + DatasetType, + DatasetDataType, + DatasetType__archived, + DatasetKindBackEndType, +} from 'typings/dataset'; +import { + Variable, + VariableComponent, + VariableValueType, + VariableWidgetSchema, +} from 'typings/variable'; +import { Participant, ParticipantType } from 'typings/participant'; +import { hydrate } from 'views/Workflows/shared'; +import { + NO_CATEGORY, + SYNCHRONIZATION_VARIABLE, + TAG_MAPPER, + VARIABLE_TIPS_MAPPER, + isDataAlignment, + isDataLightClient, + isDataOtPsiJoin, + isDataHashJoin, + isHoursCronJoin, + CronType, + cronTypeOptions, +} from '../shared'; +import Title from './TitleWithRecommendedParam'; +import { FlagKey } from 'typings/flag'; +import styled from './index.module.less'; + +type Props = Record<string, unknown>; +type Params = { + [key: string]: any; +}; + +type FormData = { + name: string; + comment: string; + data_job_type: DataJobType; + data_join_type: DataJoinType; + dataset_info: Dataset; + params: Params; + cron_type: CronType; + participant: { + [participantName: string]: { + dataset_info: Dataset; + params: Params; + }; + }; +}; + +type DataJoinTypeOption = { + value: `${DataJoinType}`; + label: string; + tooltip?: string; +}; + +const dataJoinTypeOptionLightClient = [ + { + value: DataJoinType.LIGHT_CLIENT, + label: 'RSA-PSI 求交', + }, + { + value: DataJoinType.LIGHT_CLIENT_OT_PSI_DATA_JOIN, + label: 'OT-PSI 求交', + }, +]; + +const initialFormValues: Partial<FormData> = { + name: '', + data_join_type: DataJoinType.PSI, + cron_type: CronType.DAY, +}; + +const CreateDataset: FC<Props> = () => { + const { id, action } = useParams<{ + action: 'create' | 'edit' | 'authorize'; + id: string; + }>(); + const isAuthorize = action === 'authorize'; + const [formInstance] = Form.useForm<FormData>(); + const history = useHistory(); + const hash_data_join_enabled = useGetAppFlagValue(FlagKey.HASH_DATA_JOIN_ENABLED); + const participantList = useGetCurrentProjectParticipantList(); + const { hasIdAlign, hasVertical, hasHorizontal } = useGetCurrentProjectAbilityConfig(); + + const [dataJobType, setDataJobType] = useState<DataJobType>( + initialFormValues?.data_job_type ?? DataJobType.JOIN, + ); + const [dataJoinType, setDataJoinType] = useState<DataJoinType>( + initialFormValues?.data_join_type ?? DataJoinType.PSI, + ); + const [isDoing, setIsDoing] = useState(false); + const [isCron, setIsCron] = useState(false); + const [cronType, setCornType] = useState<CronType>(initialFormValues?.cron_type ?? CronType.DAY); + const { isFormValueChanged, onFormValueChange } = useIsFormValueChange(); + const currentProjectId = useGetCurrentProjectId(); + const configFormRefList = useRef<Array<ExposedRef | null>>([]); + const [globalConfigMap, setGlobalConfigMap] = useState<any>(); + + const finalDataJobType = useMemo<DataJobBackEndType>(() => { + if (dataJobType === DataJobType.ALIGNMENT) { + return DataJobBackEndType.DATA_ALIGNMENT; + } + if (dataJoinType === DataJoinType.LIGHT_CLIENT) { + return DataJobBackEndType.LIGHT_CLIENT_RSA_PSI_DATA_JOIN; + } + if (dataJoinType === DataJoinType.LIGHT_CLIENT_OT_PSI_DATA_JOIN) { + return DataJobBackEndType.LIGHT_CLIENT_OT_PSI_DATA_JOIN; + } + if (dataJoinType === DataJoinType.OT_PSI_DATA_JOIN) { + return DataJobBackEndType.OT_PSI_DATA_JOIN; + } + if (dataJoinType === DataJoinType.HASH_DATA_JOIN) { + return DataJobBackEndType.HASH_DATA_JOIN; + } + return dataJoinType === DataJoinType.PSI + ? DataJobBackEndType.RSA_PSI_DATA_JOIN + : DataJobBackEndType.DATA_JOIN; + }, [dataJobType, dataJoinType]); + + // ======= Dataset query ============ + const datasetQuery = useQuery(['fetchDatasetDetail', id], () => fetchDatasetDetail(id), { + refetchOnWindowFocus: false, + retry: 2, + enabled: isAuthorize && Boolean(id), + }); + + // 获取当前数据任务信息, 包括本方和合作伙伴方 + const datasetJobDetailQuery = useQuery( + ['fetchDatasetJobDetail', currentProjectId, datasetQuery.data?.data.parent_dataset_job_id], + () => fetchDatasetJobDetail(currentProjectId!, datasetQuery.data?.data.parent_dataset_job_id!), + { + refetchOnWindowFocus: false, + retry: 2, + enabled: + isAuthorize && Boolean(currentProjectId && datasetQuery.data?.data.parent_dataset_job_id), + onSuccess(res) { + const { + name, + kind, + global_configs: { global_configs }, + } = res.data; + const { comment, dataset_type } = datasetQuery.data?.data!; + if (!isAuthorize) return; + // 设置参数信息, 支持多方 + Object.keys(global_configs).forEach((key) => { + const globalConfig = global_configs[key]; + (globalConfig as any).variables = handleParseToConfigFrom( + handleParseDefinition(globalConfig.variables), + isAuthorize, + ); + }); + setGlobalConfigMap(global_configs); + + setDataJobType(isDataAlignment(kind) ? DataJobType.ALIGNMENT : DataJobType.JOIN); + setDataJoinType( + isDataLightClient(kind) + ? DataJoinType.LIGHT_CLIENT + : isDataOtPsiJoin(kind) + ? DataJoinType.OT_PSI_DATA_JOIN + : isDataHashJoin(kind) + ? DataJoinType.HASH_DATA_JOIN + : DataJoinType.PSI, + ); + setIsCron(dataset_type === DatasetType__archived.STREAMING); + isHoursCronJoin(res.data) ? setCornType(CronType.HOUR) : setCornType(CronType.DAY); + formInstance.setFieldsValue({ + name, + comment, + data_job_type: isDataAlignment(kind) ? DataJobType.ALIGNMENT : DataJobType.JOIN, + data_join_type: isDataLightClient(kind) + ? DataJoinType.LIGHT_CLIENT + : isDataOtPsiJoin(kind) + ? DataJoinType.OT_PSI_DATA_JOIN + : isDataHashJoin(kind) + ? DataJoinType.HASH_DATA_JOIN + : DataJoinType.PSI, + }); + }, + }, + ); + + const dataJobVariableDetailQuery = useQuery( + ['fetchDataJobVariableDetail', finalDataJobType], + () => fetchDataJobVariableDetail(finalDataJobType), + { + enabled: !isAuthorize && Boolean(finalDataJobType), + retry: 2, + refetchOnWindowFocus: false, + }, + ); + const sysInfoQuery = useQuery(['fetchSysInfo'], () => fetchSysInfo(), { + retry: 2, + refetchOnWindowFocus: false, + }); + + const myDomainName = useMemo<string>(() => { + return sysInfoQuery.data?.data?.domain_name ?? ''; + }, [sysInfoQuery.data]); + + const myPureDomainName = useMemo<string>(() => { + return sysInfoQuery.data?.data?.pure_domain_name ?? ''; + }, [sysInfoQuery.data]); + + const participantName = useMemo<string>(() => { + if (!datasetJobDetailQuery.data?.data.coordinator_id) return ''; + const participant = participantList.find( + (item) => item.id === datasetJobDetailQuery.data?.data.coordinator_id, + ); + return participant?.pure_domain_name ?? ''; + }, [datasetJobDetailQuery.data, participantList]); + + const dataJobVariableList = useMemo<Variable[]>(() => { + if (!dataJobVariableDetailQuery.data?.data?.variables) { + return []; + } + return handleParseDefinition(dataJobVariableDetailQuery.data.data.variables); + }, [dataJobVariableDetailQuery.data]); + + const datasetSelectFilterOptions = useMemo(() => { + // todo: 目前对齐要求只展示非增量数据集, 后续这块逻辑需要优化 + const options: any = { + dataset_type: DatasetType.PSI, + dataset_format: [DatasetDataType.STRUCT, DatasetDataType.PICTURE], + dataset_kind: [DatasetKindBackEndType.RAW, DatasetKindBackEndType.PROCESSED], + }; + if (isCron) { + options.dataset_type = DatasetType.STREAMING; + options.cron_interval = [cronType === CronType.DAY ? 'DAYS' : 'HOURS']; + } + return options; + }, [isCron, cronType]); + + const [paramsList, collapseParamsList] = useMemo(() => { + return handleParseToConfigFrom(dataJobVariableList, isAuthorize); + }, [dataJobVariableList, isAuthorize]); + + const dataJobTypeOptionsGenerator = useMemo(() => { + const dataJobTypeOptions = []; + if (hasIdAlign || hasVertical) { + dataJobTypeOptions.push({ + value: DataJobType.JOIN, + label: hasIdAlign ? '轻客户端求交' : '求交', + }); + } + if (hasHorizontal) { + dataJobTypeOptions.push({ + value: DataJobType.ALIGNMENT, + label: '对齐', + }); + } + return dataJobTypeOptions; + }, [hasIdAlign, hasVertical, hasHorizontal]); + + const dataJoinTypeOptionsGenerator = useMemo(() => { + const dataJoinTypeOptions: DataJoinTypeOption[] = [ + { + value: DataJoinType.OT_PSI_DATA_JOIN, + label: 'OT-PSI 求交', + tooltip: DataJoinToolTipText.OT_PSI_DATA_JOIN, + }, + { + value: DataJoinType.PSI, + label: 'RSA-PSI 求交', + tooltip: DataJoinToolTipText.PSI, + }, + ]; + if (hash_data_join_enabled) { + dataJoinTypeOptions.push({ + value: DataJoinType.HASH_DATA_JOIN, + label: '哈希求交', + tooltip: DataJoinToolTipText.HASH_DATA_JOIN, + }); + } + if (hasIdAlign) { + return dataJoinTypeOptionLightClient; + } + if (hasVertical) { + return dataJoinTypeOptions; + } + return dataJoinTypeOptions; + }, [hash_data_join_enabled, hasIdAlign, hasVertical]); + + useEffect(() => { + const defaultJoinType = dataJoinTypeOptionsGenerator[0]?.value as DataJoinType; + const defaultJobType = dataJobTypeOptionsGenerator[0]?.value; + formInstance.setFieldsValue({ + data_join_type: defaultJoinType, + data_job_type: defaultJobType, + }); + setDataJoinType(defaultJoinType); + setDataJobType(defaultJobType); + }, [dataJobTypeOptionsGenerator, dataJoinTypeOptionsGenerator, formInstance]); + + const authorizeDataJobType = useMemo(() => { + return dataJobTypeOptionsGenerator.find((item) => item.value === dataJobType)?.label; + }, [dataJobType, dataJobTypeOptionsGenerator]); + + const authorizeDataJoinType = useMemo(() => { + return (dataJoinTypeOptionsGenerator as DataJoinTypeOption[]).find( + (item) => item.value === dataJoinType, + )?.label; + }, [dataJoinType, dataJoinTypeOptionsGenerator]); + + return ( + <SharedPageLayout + title={ + <BackButton onClick={backToList} isShowConfirmModal={isFormValueChanged}> + 结果数据集 + </BackButton> + } + contentWrapByCard={false} + centerTitle={isAuthorize ? '授权结果数据集' : '创建数据集'} + > + <Spin loading={dataJobVariableDetailQuery.isFetching || datasetJobDetailQuery.isFetching}> + {isAuthorize && renderBannerCard()} + {renderCardFrom()} + </Spin> + </SharedPageLayout> + ); + + function renderBannerCard() { + const title = `${participantName}向您发起${datasetJobDetailQuery.data?.data.name}的数据集授权申请`; + return ( + <Card className="card" bordered={false} style={{ marginBottom: 20 }}> + <Space size="medium"> + <div className={styled.dataset_processed_avatar} /> + <> + <LabelStrong fontSize={16}>{title}</LabelStrong> + <TitleWithIcon + title="确认授权后,数据任务将自动运行,可在结果数据集页查看详细信息。" + isLeftIcon={true} + isShowIcon={true} + icon={IconInfoCircle} + /> + </> + </Space> + </Card> + ); + } + + function renderCardFrom() { + return ( + <Card className={styled.dataset_processed_card} bordered={false}> + <Form + className={styled.dataset_processed_create_form} + disabled={isAuthorize} + initialValues={initialFormValues} + form={formInstance} + onSubmit={onSubmit} + onValuesChange={onFormValueChange} + scrollToFirstError={true} + > + {renderBaseConfigLayout()} + {renderParticipantConfigLayout()} + {renderFooterButton()} + </Form> + </Card> + ); + } + + function renderBaseConfigLayout() { + return ( + <section className={styled.dataset_processed_create_section}> + <h3>基本配置</h3> + <Form.Item + field="name" + label="数据集名称" + hasFeedback + rules={[ + { required: true, message: '请输入' }, + { + match: validNamePattern, + message: '只支持大小写字母,数字,中文开头或结尾,可包含“_”和“-”,不超过 63 个字符', + }, + ]} + > + <Input placeholder="请输入" maxLength={60} /> + </Form.Item> + <Form.Item + field="comment" + label="数据集描述" + rules={[{ maxLength: MAX_COMMENT_LENGTH, message: '最多为 200 个字符' }]} + > + <Input.TextArea rows={3} placeholder="最多为 200 个字符" showWordLimit /> + </Form.Item> + <Form.Item field="data_job_type" label="数据任务" rules={[{ required: true }]}> + {isAuthorize ? ( + <div className={styled.dataset_processed_desc}>{authorizeDataJobType}</div> + ) : ( + <BlockRadio + flexGrow={0} + isCenter={true} + isOneHalfMode={false} + options={dataJobTypeOptionsGenerator} + onChange={(val: DataJobType) => { + setDataJobType(val); + setIsCron(false); + }} + /> + )} + </Form.Item> + {dataJobType === DataJobType.JOIN && ( + <Form.Item field="data_join_type" label="求交方式" rules={[{ required: true }]}> + {isAuthorize ? ( + <div className={styled.dataset_processed_desc}>{authorizeDataJoinType}</div> + ) : ( + <BlockRadio.WithToolTip + flexGrow={0} + isCenter={true} + isOneHalfMode={false} + options={dataJoinTypeOptionsGenerator} + onChange={(val: DataJoinType) => { + setDataJoinType(val); + }} + /> + )} + </Form.Item> + )} + {!hasIdAlign && dataJobType === DataJobType.JOIN && ( + <Form.Item field="is_cron" label="定时求交"> + {isAuthorize ? ( + <div className={styled.dataset_processed_desc}>{isCron ? '已开启' : '未开启'}</div> + ) : ( + <Space> + <Switch onChange={handleCronChange} /> + <TitleWithIcon + title="开启后仅支持选择增量数据集" + isLeftIcon={true} + isShowIcon={true} + icon={IconInfoCircle} + /> + </Space> + )} + </Form.Item> + )} + {dataJobType === DataJobType.JOIN && isCron && ( + <Form.Item + field="cron_type" + label={ + <FormLabel + className={styled.dataset_processed_form_label} + label="求交周期" + tooltip="会在提交后立即求交,后续任务将按照设定的时间定期进行" + /> + } + > + {isAuthorize ? ( + <div className={styled.dataset_processed_desc}> + {cronType === CronType.DAY ? '每天' : '每小时'} + </div> + ) : ( + <BlockRadio + flexGrow={0} + isCenter={false} + isOneHalfMode={false} + options={cronTypeOptions} + onChange={(val: CronType) => { + handleCronTypeChange(val); + }} + /> + )} + </Form.Item> + )} + {isAuthorize ? ( + <Form.Item + field="dataset_info" + label="我方数据集" + rules={[{ required: true, message: '请选择' }]} + > + <DatasetInfo + isParticipant={false} + datasetUuid={ + isAuthorize && globalConfigMap?.[myPureDomainName]?.dataset_uuid + ? globalConfigMap[myPureDomainName].dataset_uuid + : '' + } + /> + </Form.Item> + ) : ( + <Form.Item + field="dataset_info" + label="我方数据集" + rules={[{ required: true, message: '请选择' }]} + > + <DatasetSelect + lazyLoad={{ + enable: true, + page_size: 10, + }} + filterOptions={datasetSelectFilterOptions} + isParticipant={false} + /> + </Form.Item> + )} + <Form.Item field="params" label="我方参数"> + {dataJobVariableDetailQuery.isError ? ( + <Alert type="info" content="暂不支持该类型的数据任务" /> + ) : ( + <ConfigForm + filter={variableTagFilter} + groupBy={'tag'} + hiddenGroupTag={false} + cols={2} + configFormExtra={ + dataJobType === DataJobType.JOIN ? <Title joinType={dataJoinType} /> : undefined + } + formProps={{ + style: { + marginTop: 7, + }, + }} + formItemList={ + isAuthorize + ? globalConfigMap && globalConfigMap[myPureDomainName] + ? globalConfigMap[myPureDomainName].variables[0] + : [] + : paramsList + } + collapseFormItemList={ + isAuthorize + ? globalConfigMap && globalConfigMap[myPureDomainName] + ? globalConfigMap[myPureDomainName].variables[1] + : [] + : collapseParamsList + } + ref={(ref) => { + configFormRefList.current[0] = ref; + }} + isResetOnFormItemListChange={true} + onChange={(val) => { + syncConfigFormValue( + val, + [ + SYNCHRONIZATION_VARIABLE.NUM_PARTITIONS, + SYNCHRONIZATION_VARIABLE.PART_NUM, + SYNCHRONIZATION_VARIABLE.REPLICAS, + ], + false, + ); + }} + /> + )} + </Form.Item> + </section> + ); + } + + function variableTagFilter(item: ItemProps) { + return ( + !!item.tag && + [TAG_MAPPER[TagEnum.INPUT_PARAM], TAG_MAPPER[TagEnum.RESOURCE_ALLOCATION]].includes(item.tag) + ); + } + + function renderParticipantConfigLayout() { + return participantList?.map((item, index) => { + const { type, pure_domain_name, id } = item; + const isLightClient = type === ParticipantType.LIGHT_CLIENT; + return isLightClient ? ( + renderLightClientInfo(item) + ) : ( + <section key={item.domain_name}> + <h3>{item.name}</h3> + {isAuthorize ? ( + <Form.Item + field={`participant.${item.name}.dataset_info`} + label={`合作伙伴数据集`} + rules={[{ required: true, message: '请选择' }]} + > + <DatasetInfo + isParticipant={true} + participantId={id} + datasetUuid={ + isAuthorize && globalConfigMap?.[pure_domain_name!]?.dataset_uuid + ? globalConfigMap[pure_domain_name!].dataset_uuid + : '' + } + /> + </Form.Item> + ) : ( + <Form.Item + field={`participant.${item.name}.dataset_info`} + label={`合作伙伴数据集`} + rules={[{ required: true, message: '请选择' }]} + > + <DatasetSelect + queryParams={{ + //TODO Temporarily obtain full data and will be removed soon + participant_id: id, + page_size: 0, + }} + filterOptions={datasetSelectFilterOptions} + isParticipant={true} + /> + </Form.Item> + )} + <Form.Item field={`participant.${item.name}.params`} label={`合作伙伴参数`}> + {dataJobVariableDetailQuery.isError ? ( + <Alert type="info" content="暂不支持该类型的数据任务" /> + ) : ( + <ConfigForm + filter={variableTagFilter} + groupBy={'tag'} + hiddenGroupTag={false} + cols={2} + configFormExtra={ + dataJobType === DataJobType.JOIN ? <Title joinType={dataJoinType} /> : undefined + } + formProps={{ + style: { + marginTop: 7, + }, + }} + formItemList={ + isAuthorize + ? globalConfigMap && globalConfigMap[myPureDomainName] + ? globalConfigMap[myPureDomainName].variables[0] + : [] + : paramsList + } + collapseFormItemList={ + isAuthorize + ? globalConfigMap && globalConfigMap[myPureDomainName] + ? globalConfigMap[myPureDomainName].variables[1] + : [] + : collapseParamsList + } + ref={(ref) => { + configFormRefList.current[index + 1] = ref; + }} + isResetOnFormItemListChange={true} + onChange={(val) => { + syncConfigFormValue( + val, + [ + SYNCHRONIZATION_VARIABLE.NUM_PARTITIONS, + SYNCHRONIZATION_VARIABLE.PART_NUM, + SYNCHRONIZATION_VARIABLE.REPLICAS, + ], + true, + item.name, + ); + }} + /> + )} + </Form.Item> + </section> + ); + }); + } + + function renderFooterButton() { + return ( + <Space className={styled.dataset_processed_footer_button}> + {isAuthorize ? ( + <Button type="primary" loading={isDoing} onClick={onAuthorize}> + 确认授权 + </Button> + ) : ( + <Button + type="primary" + htmlType="submit" + loading={isDoing} + disabled={dataJobVariableDetailQuery.isError} + > + 确认创建 + </Button> + )} + + <ButtonWithModalConfirm onClick={backToList} isShowConfirmModal={isFormValueChanged}> + 取消 + </ButtonWithModalConfirm> + </Space> + ); + } + + function renderLightClientInfo(lightClientInfo: Participant) { + return ( + <section key={lightClientInfo.domain_name}> + <h3> + {' '} + {lightClientInfo.domain_name} + <Tag className={styled.title_tag} color="arcoblue"> + 轻量 + </Tag> + </h3> + <Form.Item + field={`participant.${lightClientInfo.name}.dataset_info`} + label="合作伙伴数据集" + rules={[{ message: '请选择' }]} + > + <div>由客户侧本地上传</div> + </Form.Item> + </section> + ); + } + + function backToList() { + history.goBack(); + } + + function handleParseDefinition(definitions: DataJobVariable[]) { + return definitions.map((item) => { + let widget_schema: VariableWidgetSchema = {}; + + try { + widget_schema = JSON.parse(item.widget_schema); + } catch (error) {} + return { + ...item, + widget_schema, + }; + }); + } + + function handleParseToConfigFrom(variableList: Variable[], disabled: boolean) { + const formItemList: ItemProps[] = []; + const collapseFormItemList: ItemProps[] = []; + variableList + .filter((item) => !item.widget_schema.hidden) + .forEach((item) => { + const baseRuleList = item.widget_schema.required + ? [ + { + required: true, + message: '必填项', + }, + ] + : []; + const formItemConfig = { + disabled, // 在授权时将参数配置禁用修改 + tip: VARIABLE_TIPS_MAPPER[item.name], + label: item.name, + tag: TAG_MAPPER[item.tag as TagEnum] || NO_CATEGORY, + field: item.name, + initialValue: + item.widget_schema.component === VariableComponent.Input + ? item.value + : item.typed_value, + componentType: item.widget_schema.component, + rules: + item.widget_schema.component === VariableComponent.Input && + [VariableValueType.LIST, VariableValueType.OBJECT].includes(item.value_type!) + ? [ + ...baseRuleList, + { + validator: (value: any, callback: (error?: string | undefined) => void) => { + if ((value && typeof value === 'object') || isStringCanBeParsed(value)) { + callback(); + return; + } + callback(`JSON ${item.value_type!} 格式错误`); + }, + }, + ] + : baseRuleList, + }; + + if (formItemConfig.tag === TAG_MAPPER[TagEnum.INPUT_PARAM]) { + formItemList.push(formItemConfig); + } + if (formItemConfig.tag === TAG_MAPPER[TagEnum.RESOURCE_ALLOCATION]) { + collapseFormItemList.push(formItemConfig); + } + }); + + return [formItemList, collapseFormItemList]; + } + + /** + * This function is used to synchronize advanced parameters of the sender and participants(at least one participant) + * @param value config values; + * @param keyList the variable name needs to be kept same; + * @param isParticipant is called by participant ro not; + * @param currentParticipant current participant name; + */ + function syncConfigFormValue( + value: { [prop: string]: any }, + keyList: string[], + isParticipant: boolean, + currentParticipant?: string, + ) { + if (!keyList || !keyList.length || !value) { + return; + } + const senderParams: any = formInstance.getFieldValue('params') || {}; + const participantParams: any = formInstance.getFieldValue('participant'); + keyList.forEach((key) => { + if (!Object.prototype.hasOwnProperty.call(value, key)) { + return; + } + if (isParticipant) { + senderParams[key] = value[key]; + } + participantList.forEach((item) => { + if (isParticipant && item.name === currentParticipant) { + return; + } + const params = participantParams?.[item.name]?.params || {}; + params[key] = value[key]; + }); + }); + formInstance.setFieldsValue({ + params: { + ...senderParams, + }, + participant: { + ...participantParams, + }, + }); + } + + function handleCronChange(value: boolean) { + // 切换定时求交之后, 需要将之前选中的数据集内容清空掉 + formInstance.setFieldValue('dataset_info', {}); + participantList.forEach((item) => { + formInstance.setFieldValue(`participant.${item.name}.dataset_info` as keyof FormData, {}); + }); + setIsCron(value); + setCornType(CronType.DAY); + } + + function handleCronTypeChange(value: CronType) { + // 切换定时求交之后, 需要将之前选中的数据集内容清空掉 + formInstance.setFieldValue('dataset_info', {}); + participantList.forEach((item) => { + formInstance.setFieldValue(`participant.${item.name}.dataset_info` as keyof FormData, {}); + }); + setCornType(value); + } + + async function onSubmit(values: FormData) { + if (!currentProjectId) { + return Message.error('请选择工作区'); + } + if (!myDomainName) { + return Message.error('获取本系统 domain_name 失败'); + } + + // Validate <ConfigForm/> form + try { + await Promise.all( + configFormRefList.current.filter((i) => i).map((i) => i?.formInstance.validate()), + ); + } catch (error) { + return Message.info('必填项'); + } + + setIsDoing(true); + + const { dataset_type, dataset_format, store_format, import_type } = formInstance.getFieldValue( + 'dataset_info', + ) as Dataset; + + // create dataset + const [res, addDataSetError] = await to( + createDataset({ + kind: DatasetKindV2.PROCESSED, + project_id: currentProjectId, + name: values.name, + comment: values.comment || '', + dataset_type, + dataset_format, + store_format: + import_type === DATASET_COPY_CHECKER.COPY + ? DataSourceStructDataType.TFRECORDS + : store_format, + import_type: DATASET_COPY_CHECKER.COPY, + is_published: true, + } as DatasetCreatePayload), + ); + + if (addDataSetError) { + setIsDoing(false); + Message.error(addDataSetError.message); + return; + } + const datasetId = res.data.id; + const payload: DatasetJobCreatePayload = { + dataset_job_parameter: { + dataset_job_kind: finalDataJobType, + global_configs: { + [myDomainName]: { + dataset_uuid: values?.dataset_info?.uuid, + variables: hydrate(dataJobVariableList, values.params, { + isStringifyVariableValue: true, + isStringifyVariableWidgetSchema: true, + isProcessVariableTypedValue: true, + }) as DataJobVariable[], + }, + ...participantList?.reduce( + (acc, item) => { + const participantValues = values.participant[item.name]; + acc[item.domain_name] = { + dataset_uuid: participantValues?.dataset_info?.uuid, + variables: hydrate(dataJobVariableList, participantValues.params, { + isStringifyVariableValue: true, + isStringifyVariableWidgetSchema: true, + isProcessVariableTypedValue: true, + }) as DataJobVariable[], + }; + + return acc; + }, + {} as { + [domainName: string]: { + dataset_uuid: ID; + variables: DataJobVariable[]; + }; + }, + ), + }, + }, + output_dataset_id: datasetId, + }; + + if (isCron) { + payload.time_range = {}; + if (cronType === CronType.DAY) { + payload.time_range.days = 1; + } + if (cronType === CronType.HOUR) { + payload.time_range.hours = 1; + } + } + + const [, error] = await to(createDatasetJobs(currentProjectId, payload)); + if (error) { + Message.error(error.message); + setIsDoing(false); + return; + } + + setIsDoing(false); + backToList(); + } + + async function onAuthorize() { + setIsDoing(true); + const [, error] = await to(authorizeDataset(id)); + if (error) { + Message.error(error.message); + setIsDoing(false); + return; + } + setIsDoing(false); + backToList(); + } +}; + +export default CreateDataset; diff --git a/web_console_v2/client/src/views/Datasets/DataSourceDetail/PreviewFile/index.module.less b/web_console_v2/client/src/views/Datasets/DataSourceDetail/PreviewFile/index.module.less new file mode 100644 index 000000000..9e4d740c4 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DataSourceDetail/PreviewFile/index.module.less @@ -0,0 +1,7 @@ +.preview_update{ + margin-top: 5px; + margin-bottom: 20px; + .preview_update_text{ + margin-left: 7px; + } +} diff --git a/web_console_v2/client/src/views/Datasets/DataSourceDetail/PreviewFile/index.tsx b/web_console_v2/client/src/views/Datasets/DataSourceDetail/PreviewFile/index.tsx new file mode 100644 index 000000000..510027b68 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DataSourceDetail/PreviewFile/index.tsx @@ -0,0 +1,44 @@ +import React, { FC, useState } from 'react'; +import FileExplorer from 'components/FileExplorer'; +import { fetchDataSourceFileTreeList } from 'services/dataset'; +import { useParams } from 'react-router'; +import { IconInfoCircle } from '@arco-design/web-react/icon'; +import { formatTimestamp } from 'shared/date'; +import { CONSTANTS } from 'shared/constants'; +import styled from './index.module.less'; + +type Props = {}; + +const PreviewFile: FC<Props> = () => { + const { id } = useParams<{ + id: string; + subtab: string; + }>(); + const [updateAt, setUpdateAt] = useState(0); + return ( + <div> + <div className={styled.preview_update}> + <IconInfoCircle /> + <span className={styled.preview_update_text}> + 最新更新时间 : {updateAt ? formatTimestamp(updateAt) : CONSTANTS.EMPTY_PLACEHOLDER} + </span> + </div> + <FileExplorer + isAsyncMode={true} + isReadOnly={true} + isShowNodeTooltip={false} + isAutoSelectFirstFile={false} + isExpandAll={false} + getFileTreeList={getFileTreeList} + /> + </div> + ); + function getFileTreeList() { + return fetchDataSourceFileTreeList(id).then((res) => { + setUpdateAt(res.data.mtime); + return res.data.files; + }); + } +}; + +export default PreviewFile; diff --git a/web_console_v2/client/src/views/Datasets/DataSourceDetail/index.module.less b/web_console_v2/client/src/views/Datasets/DataSourceDetail/index.module.less new file mode 100644 index 000000000..15ddfabd4 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DataSourceDetail/index.module.less @@ -0,0 +1,57 @@ +@import '~styles/mixins.less'; + +.data_source_detail_padding_box{ + padding: 20px; + padding-bottom: 0; + :global(.arco-spin) { + width: 100%; + } +} + +.data_source_detail_avatar{ + .MixinSquare(44px); + background-color: var(--primaryColor); + color: white; + border-radius: 2px; + font-size: 18px; + text-align: center; + + &::before { + content: attr(data-name); + line-height: 44px; + font-weight: bold; + } +} + +.data_source_name_container{ + display: flex; + align-items: center; +} + +.data_source_name{ + .MixinEllipsis('40vw'); + display: inline-block; + margin-bottom: 0; + margin-right: 7px; + font-size: 16px; + height: 24px; + font-weight: 600; +} + +.comment{ + font-size: 12px; + line-height: 18px; + color: var(--textColorSecondary); +} + +.data_source_text{ + .MixinEllipsis(); +} + +.data_source_detail_tab_pane{ + display: grid; +} + +.data_detail_tab{ + margin-bottom: 0 !important; +} diff --git a/web_console_v2/client/src/views/Datasets/DataSourceDetail/index.tsx b/web_console_v2/client/src/views/Datasets/DataSourceDetail/index.tsx new file mode 100644 index 000000000..2bc088fe7 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DataSourceDetail/index.tsx @@ -0,0 +1,217 @@ +import { Grid, Space, Spin, Tabs, Tooltip, Tag, Message } from '@arco-design/web-react'; +import BackButton from 'components/BackButton'; +import PropertyList from 'components/PropertyList'; +import SharedPageLayout from 'components/SharedPageLayout'; +import GridRow from 'components/_base/GridRow'; +import React, { FC, useEffect, useState } from 'react'; +import { useQuery, useMutation } from 'react-query'; +import { Redirect, Route, useHistory, useParams } from 'react-router'; +import { fetchDataSourceDetail, deleteDataSource } from 'services/dataset'; +import { formatTimestamp } from 'shared/date'; +import { DatasetType, DataSource } from 'typings/dataset'; +import { CONSTANTS } from 'shared/constants'; +import ClickToCopy from 'components/ClickToCopy'; +import MoreActions from 'components/MoreActions'; +import Modal from 'components/Modal'; +import PreviewFile from './PreviewFile'; +import styled from './index.module.less'; + +const { Row } = Grid; + +const { TabPane } = Tabs; + +export enum DataSourceDetailSubTabs { + PreviewFile = 'preview', + RawDataset = 'raw_dataset', +} + +const DataSourceDetail: FC<any> = () => { + const history = useHistory(); + + const { id, subtab } = useParams<{ + id: string; + subtab: string; + }>(); + const [activeTab, setActiveTab] = useState(subtab || DataSourceDetailSubTabs.PreviewFile); + + // ======= Data Source query ============ + const query = useQuery(['fetchDataSourceDetail', id], () => fetchDataSourceDetail({ id }), { + refetchOnWindowFocus: false, + }); + + const dataSource = query.data?.data; + + const { type, url, created_at, dataset_type } = dataSource ?? {}; + + const isStreaming = dataset_type === DatasetType.STREAMING; + + useEffect(() => { + setActiveTab(subtab || DataSourceDetailSubTabs.PreviewFile); + }, [subtab]); + + const deleteMutation = useMutation( + (dataSourceId: ID) => { + return deleteDataSource(dataSourceId); + }, + { + onSuccess() { + history.push('/datasets/data_source'); + Message.success('删除成功'); + }, + onError(e: any) { + Message.error(e.message); + }, + }, + ); + + /** IF no subtab be set, defaults to preview */ + if (!subtab) { + return <Redirect to={`/datasets/data_source/${id}/${DataSourceDetailSubTabs.PreviewFile}`} />; + } + + const displayedProps = [ + { + value: String(type).toLocaleUpperCase(), + label: '文件系统', + proport: 0.5, + }, + { + value: ( + <ClickToCopy text={url || ''}> + <Tooltip content={url}> + <div className={styled.data_source_text}>{url || CONSTANTS.EMPTY_PLACEHOLDER}</div> + </Tooltip> + </ClickToCopy> + ), + label: '数据来源', + proport: 1.5, + }, + { + value: getDataFormat(dataSource! ?? {}), + label: '数据格式', + proport: 1, + }, + { + value: created_at ? formatTimestamp(created_at) : CONSTANTS.EMPTY_PLACEHOLDER, + label: '创建时间', + proport: 1, + }, + ].filter(Boolean); + + return ( + <SharedPageLayout title={<BackButton onClick={backToList}>数据源</BackButton>} cardPadding={0}> + <div className={styled.data_source_detail_padding_box}> + <Spin loading={query.isFetching}> + <Row align="center" justify="space-between"> + <GridRow gap="12" style={{ maxWidth: '75%' }}> + <div + className={styled.data_source_detail_avatar} + data-name={query.data?.data.name.slice(0, 2)} + /> + <div> + <div className={styled.data_source_name_container}> + <h3 className={styled.data_source_name}>{query.data?.data.name ?? '....'}</h3> + </div> + {(isStreaming || query.data?.data.comment) && ( + <Space> + {isStreaming && <Tag color="blue">增量</Tag>} + {query.data?.data.comment && ( + <small className={styled.comment}>{query.data?.data.comment}</small> + )} + </Space> + )} + </div> + </GridRow> + + <Space> + <MoreActions + actionList={[ + { + label: '删除', + onClick: onDeleteClick, + danger: true, + }, + ]} + /> + </Space> + </Row> + </Spin> + <PropertyList + properties={displayedProps} + cols={displayedProps.length} + minWidth={150} + align="center" + colProportions={displayedProps.map((item) => item.proport)} + /> + </div> + <Tabs activeTab={activeTab} onChange={onSubtabChange} className={styled.data_detail_tab}> + <TabPane + className={styled.data_source_detail_tab_pane} + title="文件预览" + key={DataSourceDetailSubTabs.PreviewFile} + /> + {/* <TabPane + className={styled.data_source_detail_tab_pane} + title="原始数据集" + key={DataSourceDetailSubTabs.RawDataset} + /> */} + </Tabs> + <div className={`${styled.data_source_detail_padding_box}`}> + <Route + path={`/datasets/data_source/:id/${DataSourceDetailSubTabs.PreviewFile}`} + exact + render={() => { + return <PreviewFile />; + }} + /> + {/* <Route + path={`/datasets/data_source/:id/${DataSourceDetailSubTabs.RawDataset}`} + exact + render={() => { + return <div>原始数据集</div>; + }} + /> */} + </div> + </SharedPageLayout> + ); + + function getDataFormat(dataSource: DataSource) { + let dataDescText = ''; + switch (dataSource.dataset_format) { + case 'TABULAR': + dataDescText = `结构化数据${dataSource.store_format ? '/' + dataSource.store_format : ''}`; + break; + case 'NONE_STRUCTURED': + dataDescText = '非结构化数据'; + break; + case 'IMAGE': + dataDescText = '图片'; + break; + default: + dataDescText = '未知'; + break; + } + return dataDescText; + } + + function backToList() { + history.goBack(); + } + + function onDeleteClick() { + Modal.delete({ + title: '确认删除数据源?', + content: '删除后,当该数据源将无法恢复,请谨慎操作。', + onOk: async () => { + deleteMutation.mutate(id); + }, + }); + } + + function onSubtabChange(val: string) { + setActiveTab(val as DataSourceDetailSubTabs); + history.replace(`/datasets/data_source/${id}/${val}`); + } +}; + +export default DataSourceDetail; diff --git a/web_console_v2/client/src/views/Datasets/DataSourceList/index.module.less b/web_console_v2/client/src/views/Datasets/DataSourceList/index.module.less new file mode 100644 index 000000000..17a906758 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DataSourceList/index.module.less @@ -0,0 +1,14 @@ +.styled_plus_icon{ + margin-right: 4px; + vertical-align: 0.03em; +} + +.data_source_name{ + display: inline-block; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + vertical-align: middle; + max-width: 120px; + margin-right: 5px; +} \ No newline at end of file diff --git a/web_console_v2/client/src/views/Datasets/DataSourceList/index.tsx b/web_console_v2/client/src/views/Datasets/DataSourceList/index.tsx new file mode 100644 index 000000000..1d383393a --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DataSourceList/index.tsx @@ -0,0 +1,303 @@ +import React, { FC, useMemo, useState } from 'react'; +import { useQuery, useMutation } from 'react-query'; +import { uniqBy } from 'lodash-es'; + +import { useGetCurrentProjectId, useTablePaginationWithUrlState, useUrlState } from 'hooks'; + +import { TIME_INTERVAL } from 'shared/constants'; +import { formatTimestamp } from 'shared/date'; +import { transformRegexSpecChar } from 'shared/helpers'; +import { CONSTANTS } from 'shared/constants'; +import { useHistory, generatePath } from 'react-router'; +import routes from '../routes'; +import { Link } from 'react-router-dom'; +import { fetchDataSourceList, deleteDataSource } from 'services/dataset'; + +import { Button, Input, Message, Table, Tag, Tooltip, Typography } from '@arco-design/web-react'; +import GridRow from 'components/_base/GridRow'; +import SharedPageLayout from 'components/SharedPageLayout'; +import MoreActions from 'components/MoreActions'; +import Modal from 'components/Modal'; +import { IconPlus } from '@arco-design/web-react/icon'; + +import { ColumnProps } from '@arco-design/web-react/es/Table'; +import { DataSource, DatasetType, DataSourceDataType } from 'typings/dataset'; +import { DataSourceDetailSubTabs } from 'views/Datasets/DataSourceDetail'; +import styled from './index.module.less'; + +const { Text } = Typography; + +type TProps = {}; +const { Search } = Input; + +const List: FC<TProps> = function (props: TProps) { + const history = useHistory(); + // const [isEdit, setIsEdit] = useState(false); + // const [selectedData, setSelectedData] = useState<DataSource>(); + const [pageTotal, setPageTotal] = useState(0); + const [urlState, setUrlState] = useUrlState({ + keyword: '', + types: [], + created_at_sort: '', + }); + const { paginationProps } = useTablePaginationWithUrlState(); + + const projectId = useGetCurrentProjectId(); + + const listQuery = useQuery( + ['fetchDataSourceList', projectId], + () => { + if (!projectId) { + Message.info('请选择工作区'); + return Promise.resolve({ data: [] }); + } + return fetchDataSourceList({ + projectId, + }); + }, + { + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, + refetchOnWindowFocus: false, + }, + ); + + const deleteMutation = useMutation( + (dataSourceId: ID) => { + return deleteDataSource(dataSourceId); + }, + { + onSuccess() { + listQuery.refetch(); + Message.success('删除成功'); + }, + onError(e: any) { + Message.error(e.message); + }, + }, + ); + + const list = useMemo(() => { + if (!listQuery.data?.data) return []; + + let list = listQuery.data.data; + + if (urlState.keyword) { + const regx = new RegExp(`^.*${transformRegexSpecChar(urlState.keyword)}.*$`); // support fuzzy matching + list = list.filter((item) => regx.test(item.name)); + } + setPageTotal(Math.ceil(list.length / paginationProps.pageSize)); + return list; + }, [listQuery.data, urlState.keyword, paginationProps.pageSize]); + + const typeFilters = useMemo(() => { + if (!listQuery.data?.data) return []; + + const list = listQuery.data.data || []; + + return { + filters: uniqBy(list, 'type').map((item) => { + return { text: item.type, value: item.type }; + }), + onFilter: (value: string, record: DataSource) => { + return record?.type === value; + }, + }; + }, [listQuery.data]); + + const columns = useMemo<ColumnProps<DataSource>[]>(() => { + return [ + { + title: '名称', + dataIndex: 'name', + width: 200, + render: (value: any, record: any) => { + const to = `/datasets/data_source/${record.id}/${DataSourceDetailSubTabs.PreviewFile}`; + if (record.dataset_type === DatasetType.STREAMING) { + return ( + <> + <Tooltip + content={ + <Text style={{ color: '#fff' }} copyable> + {value} + </Text> + } + > + <Link to={to} className={styled.data_source_name}> + {value} + </Link> + </Tooltip> + <Tag color="blue" size="small"> + 增量 + </Tag> + </> + ); + } + return <Link to={to}>{value}</Link>; + }, + }, + { + title: '类型', + dataIndex: 'type', + width: 100, + ...typeFilters, + defaultFilters: urlState.types ?? [], + render: (value: any) => value ?? CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + title: '数据来源', + dataIndex: 'url', + width: 200, + }, + { + title: '创建时间', + dataIndex: 'created_at', + width: 150, + sorter(a: DataSource, b: DataSource) { + return a.created_at - b.created_at; + }, + defaultSortOrder: urlState?.created_at_sort, + render: (date: number) => <div>{formatTimestamp(date)}</div>, + }, + { + title: '格式', + dataIndex: 'dataset_format', + width: 150, + render: (value: any, record: any) => { + switch (value) { + case DataSourceDataType.STRUCT: + return record.store_format ? `结构化数据/${record.store_format}` : '结构化数据'; + case DataSourceDataType.NONE_STRUCTURED: + return '非结构化数据'; + case DataSourceDataType.PICTURE: + return '图片'; + default: + return '未知'; + } + }, + }, + { + title: '操作', + dataIndex: 'operation', + fixed: 'right', + width: 100, + render: (_: any, record) => ( + <> + <button + className="custom-text-button" + style={{ marginRight: 10 }} + onClick={() => { + onEditButtonClick(record); + }} + // No support for edit data source now + disabled={true} + > + 编辑 + </button> + <MoreActions + actionList={[ + { + label: '删除', + danger: true, + onClick() { + Modal.delete({ + title: `确认要删除「${record.name}」?`, + content: '删除后,当该数据源将无法恢复,请谨慎操作。', + onOk() { + deleteMutation.mutate(record.id); + }, + }); + }, + }, + ]} + /> + </> + ), + }, + ]; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [urlState, typeFilters]); + + const pagination = useMemo(() => { + return pageTotal <= 1 + ? false + : { + ...paginationProps, + }; + }, [paginationProps, pageTotal]); + + return ( + <SharedPageLayout + title="数据源" + tip="数据源指数据的来源,创建数据源即定义访问数据存储空间的地址" + > + <GridRow justify="space-between" align="center"> + <Button + className={'custom-operation-button'} + type="primary" + onClick={onCreateButtonClick} + icon={<IconPlus />} + > + 添加数据源 + </Button> + <Search + className={'custom-input'} + allowClear + placeholder="输入数据源名称" + defaultValue={urlState.keyword} + onSearch={onSearch} + onClear={() => onSearch('')} + /> + </GridRow> + <Table + className="custom-table custom-table-left-side-filter" + rowKey="id" + loading={listQuery.isFetching} + data={list} + scroll={{ x: '100%' }} + columns={columns} + pagination={pagination} + onChange={(pagination, sorter, filters, extra) => { + switch (extra.action) { + case 'sort': + setUrlState((prevState) => ({ + ...prevState, + [`${sorter.field}_sort`]: sorter.direction, + })); + break; + case 'filter': + setUrlState((prevState) => ({ + ...prevState, + page: 1, + types: filters?.type ?? [], + })); + break; + default: + } + }} + /> + </SharedPageLayout> + ); + + function onCreateButtonClick() { + history.push( + generatePath(routes.DatasetCreate, { + action: 'create', + }), + ); + } + function onEditButtonClick(selectedDataSource: DataSource) { + // setSelectedData(selectedDataSource); + // setIsEdit(true); + } + + function onSearch(value: string) { + setUrlState((prevState) => ({ + ...prevState, + keyword: value, + page: 1, + })); + } +}; + +export default List; diff --git a/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchAnalyze/DataBatchAnalyzeModal/index.module.less b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchAnalyze/DataBatchAnalyzeModal/index.module.less new file mode 100644 index 000000000..c84e1ec08 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchAnalyze/DataBatchAnalyzeModal/index.module.less @@ -0,0 +1,4 @@ +.footer_row{ + padding-top: 15px; + border-top: 1px solid var(--backgroundColorGray); +} \ No newline at end of file diff --git a/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchAnalyze/DataBatchAnalyzeModal/index.tsx b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchAnalyze/DataBatchAnalyzeModal/index.tsx new file mode 100644 index 000000000..297cd85cf --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchAnalyze/DataBatchAnalyzeModal/index.tsx @@ -0,0 +1,198 @@ +import React, { FC, useMemo } from 'react'; +import ConfigForm, { ItemProps } from 'components/ConfigForm'; +import { to, isStringCanBeParsed } from 'shared/helpers'; +import { fetchDataJobVariableDetail, analyzeDataBatch } from 'services/dataset'; +import { Modal, Button, Message, Form } from '@arco-design/web-react'; +import GridRow from 'components/_base/GridRow'; +import ButtonWithPopconfirm from 'components/ButtonWithPopconfirm'; +import { useQuery } from 'react-query'; +import { DataBatchV2, DataJobBackEndType, DataJobVariable } from 'typings/dataset'; +import { useParams } from 'react-router'; +import { + Variable, + VariableComponent, + VariableValueType, + VariableWidgetSchema, +} from 'typings/variable'; +import { TAG_MAPPER, VARIABLE_TIPS_MAPPER, NO_CATEGORY } from '../../../shared'; +import { Tag as TagEnum } from 'typings/workflow'; +import { hydrate } from 'views/Workflows/shared'; +import styled from './index.module.less'; + +type Props = { + dataBatch: DataBatchV2; + visible: boolean; + toggleVisible: (v: boolean) => void; + onSuccess?: (res: any) => void; +} & React.ComponentProps<typeof Modal>; +type Params = { + [key: string]: any; +}; +type FormData = { + params: Params; +}; + +const DataBatchAnalyzeModal: FC<Props> = ({ + dataBatch, + visible, + toggleVisible, + onSuccess, + ...props +}) => { + const [form] = Form.useForm<FormData>(); + const { id: dataBatchId } = dataBatch; + const { id } = useParams<{ + id: string; + }>(); + const dataJobVariableDetailQuery = useQuery( + ['getDataJobVariableDetail', DataJobBackEndType.ANALYZER], + () => fetchDataJobVariableDetail(DataJobBackEndType.ANALYZER), + { + enabled: true, + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const dataJobVariableList = useMemo<Variable[]>(() => { + if (!dataJobVariableDetailQuery.data?.data?.variables) { + return []; + } + + return dataJobVariableDetailQuery.data.data.variables.map((item) => { + let widget_schema: VariableWidgetSchema = {}; + + try { + widget_schema = JSON.parse(item.widget_schema); + } catch (error) {} + + return { + ...item, + widget_schema, + }; + }); + }, [dataJobVariableDetailQuery.data]); + + const paramsList = useMemo<ItemProps[]>(() => { + const list: ItemProps[] = []; + dataJobVariableList + .filter((item) => !item.widget_schema.hidden) + .forEach((item) => { + const baseRuleList = item.widget_schema.required + ? [ + { + required: true, + message: '必填项', + }, + ] + : []; + + list.push({ + tip: VARIABLE_TIPS_MAPPER[item.name], + label: item.name, + tag: TAG_MAPPER[item.tag as TagEnum] || NO_CATEGORY, + field: item.name, + initialValue: + item.widget_schema.component === VariableComponent.Input + ? item.value + : item.typed_value, + componentType: item.widget_schema.component, + rules: + item.widget_schema.component === VariableComponent.Input && + [VariableValueType.LIST, VariableValueType.OBJECT].includes(item.value_type!) + ? [ + ...baseRuleList, + { + validator: (value, callback) => { + if ((value && typeof value === 'object') || isStringCanBeParsed(value)) { + callback(); + return; + } + callback(`JSON ${item.value_type} 格式错误`); + }, + }, + ] + : baseRuleList, + }); + }); + return list; + }, [dataJobVariableList]); + return ( + <Modal + title="发起数据探查" + visible={visible} + maskClosable={false} + maskStyle={{ backdropFilter: 'blur(4px)' }} + afterClose={afterClose} + onCancel={closeModal} + okText="探查" + footer={null} + {...props} + > + <Form layout="vertical" form={form} onSubmit={submit}> + <Form.Item label="参数配置" field="params"> + <ConfigForm + filter={variableTagFilter} + groupBy={'tag'} + hiddenGroupTag={true} + hiddenCollapse={true} + cols={2} + formItemList={paramsList} + isResetOnFormItemListChange={true} + /> + </Form.Item> + + <Form.Item wrapperCol={{ span: 24 }} style={{ marginBottom: 0 }}> + <GridRow className={styled.footer_row} justify="end" gap="12"> + <ButtonWithPopconfirm buttonText="取消" onConfirm={closeModal} /> + <Button type="primary" htmlType="submit"> + 探查 + </Button> + </GridRow> + </Form.Item> + </Form> + </Modal> + ); + + function variableTagFilter(item: ItemProps) { + return ( + !!item.tag && + [TAG_MAPPER[TagEnum.INPUT_PARAM], TAG_MAPPER[TagEnum.RESOURCE_ALLOCATION]].includes(item.tag) + ); + } + + function closeModal() { + toggleVisible(false); + } + + async function submit(values: { params: Params }) { + if (!form) { + return; + } + const { params } = values; + const [res, error] = await to( + analyzeDataBatch(id, dataBatchId, { + dataset_job_config: { + variables: hydrate(dataJobVariableList, params, { + isStringifyVariableValue: true, + isStringifyVariableWidgetSchema: true, + isProcessVariableTypedValue: true, + }) as DataJobVariable[], + }, + }), + ); + if (error) { + Message.error(error.message); + return; + } + Message.success('数据探查发起成功'); + closeModal(); + onSuccess?.(res); + } + + function afterClose() { + form.resetFields(); + } +}; + +export default DataBatchAnalyzeModal; diff --git a/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchAnalyze/FeatureDrawer.tsx b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchAnalyze/FeatureDrawer.tsx new file mode 100644 index 000000000..ab182ddc5 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchAnalyze/FeatureDrawer.tsx @@ -0,0 +1,77 @@ +import React, { FC, useMemo } from 'react'; +import FeatureInfoDrawer, { + formatChartData, + METRIC_KEY_TRANSLATE_MAP, +} from 'components/DataPreview/StructDataTable/FeatureInfoDrawer'; +import { useQuery } from 'react-query'; +import { fetchFeatureInfo } from 'services/dataset'; +import { floor } from 'lodash-es'; + +type Props = { + id: ID; + batchId: ID; + activeKey?: string; + visible: boolean; + onClose: () => void; + toggleDrawerVisible: (val: boolean) => void; +}; + +const StructDataPreview: FC<Props> = ({ + id, + batchId, + activeKey, + visible, + onClose, + toggleDrawerVisible, +}) => { + const featInfoQuery = useQuery( + ['fetchFeatureInfo', activeKey, id, batchId], + () => fetchFeatureInfo(id, batchId, activeKey!), + { + enabled: Boolean(activeKey) && visible, + refetchOnWindowFocus: false, + }, + ); + const featData = useMemo(() => { + if (!activeKey || !featInfoQuery.data) return undefined; + + const data = featInfoQuery.data?.data; + + // Add custom filed missing_rate + const metrics = data?.metrics ?? {}; + if ( + Object.prototype.hasOwnProperty.call(metrics, 'count') && + Object.prototype.hasOwnProperty.call(metrics, 'missing_count') && + !Object.prototype.hasOwnProperty.call(metrics, 'missing_rate') + ) { + const missingCount = Number(metrics.missing_count) || 0; + const allCount = missingCount + (Number(metrics.count) || 0); + // Calc missing_rate + metrics['missing_rate'] = String(floor((missingCount / allCount) * 100, 2)); + } + + const table = Object.entries(metrics).map(([key, value]) => { + return { + key: METRIC_KEY_TRANSLATE_MAP[key], + value: key === 'missing_rate' ? value + '%' : floor(Number(value), 3), + }; + }); + + const hist = formatChartData(data.hist.x ?? [], [{ data: data.hist.y ?? [], label: '数据集' }]); + + return { table, hist }; + }, [featInfoQuery.data, activeKey]); + return ( + <FeatureInfoDrawer + data={featData?.table} + histData={featData?.hist} + featureKey={activeKey} + loading={featInfoQuery.isFetching} + visible={visible} + toggleVisible={toggleDrawerVisible} + onClose={onClose} + /> + ); +}; + +export default StructDataPreview; diff --git a/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchAnalyze/index.module.less b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchAnalyze/index.module.less new file mode 100644 index 000000000..b74ef2fe2 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchAnalyze/index.module.less @@ -0,0 +1,138 @@ +.data_batch_analyze{ + height: 100%; + display: flex; + justify-content: flex-start; +} + +.data_batch_list_wrapper{ + height: 100%; + border-right: 1px solid #e5e8ef; + position: relative; + .data_batch_list{ + overflow: scroll; + height: calc(100% - 40px); + .data_batch_list_header{ + color: #1D2129; + font-weight: 500; + font-size: 14px; + height: 60px; + line-height: 60px; + padding-left: 20px; + } + .data_batch_list_item{ + height: 56px; + border-top: 1px solid #E5E8EF; + padding-top: 8px; + padding-left: 20px; + position: relative; + cursor: pointer; + & .data_batch_list_item_title{ + font-size: 12px; + line-height: 20px; + height: 20px; + } + & .data_batch_list_item_action{ + position: absolute; + top: 3px; + right: 17px; + :global{ + .arco-btn-text:not(.arco-btn-disabled){ + color: #4E5969; + } + } + } + &.active { + background-color: #F2F3F8; + & .data_batch_list_item_title{ + font-size: 12px; + line-height: 20px; + height: 20px; + color: #1664FF; + } + } + } + } + .data_batch_list_count{ + position: absolute; + bottom: 0; + left: 0; + width: 100%; + height: 40px; + padding: 10px 0; + z-index: 10; + text-align: center; + background: #fff; + font-weight: 400; + font-size: 12px; + line-height: 20px; + color: #86909C; + } + .collapse { + transition: 0.1s background-color; + position: absolute; + top: 285px; + left: 200px; + z-index: 10; + display: flex; + justify-content: center; + align-items: center; + width: 24px; + height: 24px; + padding: 2px 0 1px; + border-radius: 50%; + cursor: pointer; + background: #FFFFFF; + border: 0.857143px solid #E5E8EF; + box-shadow: 0px 0px 7px #F2F3F5; + } + .is_reverse { + transition: 0.1s background-color cubic-bezier(0.4, 0, 0.2, 1); + position: absolute; + left: 5px; + top: 285px; + z-index: 10; + display: flex; + justify-content: center; + align-items: center; + width: 24px; + height: 24px; + transform: rotate(180deg); + padding: 1px 0 2px; + border-radius: 50%; + cursor: pointer; + background: #FFFFFF; + border: 0.857143px solid #E5E8EF; + box-shadow: 0px 0px 7px #F2F3F5; + } +} + +.data_batch_content{ + height: 100%; + flex: 1; + overflow: scroll; + .data_batch_no_success{ + height: 100%; + display: flex; + flex-direction: column; + justify-content: center; + align-items: center; + .empty{ + width: 200px; + height: 200px; + } + .no_batch_preview{ + display: inline-block; + margin: 8px 0 10px 0; + font-size: 16px; + line-height: 22px; + color: #606A78; + } + .data_batch_loading{ + width: auto; + } + } +} + +.data_batch_preview{ + overflow: scroll; +} \ No newline at end of file diff --git a/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchAnalyze/index.tsx b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchAnalyze/index.tsx new file mode 100644 index 000000000..0f0f25845 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchAnalyze/index.tsx @@ -0,0 +1,313 @@ +import React, { FC, useState, useMemo, useCallback } from 'react'; +import { useQuery } from 'react-query'; +import { + fetchDataBatchs, + fetchDataBatchPreviewData, + fetchDatasetJobStageById, +} from 'services/dataset'; +import { useParams, useHistory } from 'react-router'; +import { Left } from 'components/IconPark'; +import { TIME_INTERVAL } from 'shared/constants'; +import MoreActions from 'components/MoreActions'; +import StructDataPreviewTable from 'components/DataPreview/StructDataTable'; +import { DataBatchV2, DatasetStateFront } from 'typings/dataset'; +import { Progress, Button, Spin, Message } from '@arco-design/web-react'; +import emptyIcon from 'assets/images/empty.png'; +import { useToggle } from 'react-use'; +import DataBatchAnalyzeModal from './DataBatchAnalyzeModal'; +import { formatTimestamp } from 'shared/date'; +import { useGetCurrentProjectId } from 'hooks'; +import { + isDatasetJobStagePending, + isDatasetJobStageFailed, + isDatasetJobStageSuccess, +} from '../../shared'; +import { JobDetailSubTabs } from 'views/Datasets/NewDatasetJobDetail'; +import FeatureDrawer from './FeatureDrawer'; +import { useFeatureDrawerClickOutside } from 'components/DataPreview/StructDataTable/hooks'; + +import styled from './index.module.less'; + +type TProps = { + datasetJobId: ID; + isOldData: boolean; + onAnalyzeBatch: () => void; +}; +const DataBatchAnalyze: FC<TProps> = function (props: TProps) { + const projectId = useGetCurrentProjectId(); + const [collapsed, setCollapsed] = useState(true); + const [total, setTotal] = useState(0); + const [activeBatch, setActiveBatch] = useState<DataBatchV2>(); + const [visible, toggleVisible] = useToggle(false); + const [activeKey, setActiveFeatKey] = useState<string | undefined>(); + const [drawerVisible, toggleDrawerVisible] = useToggle(false); + const history = useHistory(); + const { id } = useParams<{ + id: string; + }>(); + + // generator listQuery + const listQuery = useQuery( + ['fetchDataBatchs', id], + () => { + return fetchDataBatchs(id!); + }, + { + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, + onSuccess: (res) => { + const { page_meta, data } = res || {}; + setTotal((pre) => page_meta?.total_items || pre); + if (!activeBatch) { + setActiveBatch(data[0]); + } else { + const newActiveBatch = data.find((item) => item.id === activeBatch.id); + setActiveBatch(newActiveBatch || data[0]); + } + }, + }, + ); + + const queryBatchState = useQuery( + [ + 'fetchDatasetJobStageById', + projectId, + props.datasetJobId, + activeBatch?.latest_analyzer_dataset_job_stage_id, + ], + () => { + if (!projectId) { + Message.info('请选择工作区'); + return; + } + return fetchDatasetJobStageById( + projectId, + props.datasetJobId, + activeBatch?.latest_analyzer_dataset_job_stage_id!, + ); + }, + { + retry: 2, + refetchInterval: TIME_INTERVAL.EXPORT_STATE_CHECK, + enabled: + Boolean(props.datasetJobId) && + Boolean(activeBatch) && + Boolean(activeBatch?.latest_analyzer_dataset_job_stage_id), + }, + ); + + const batchAnalyzeState = queryBatchState.data?.data; + + // ======= Preivew data query ============ + const previewDataQuery = useQuery( + ['fetchDataBatchPreviewData', id, activeBatch?.id], + () => fetchDataBatchPreviewData(id, activeBatch!?.id), + { + refetchOnWindowFocus: false, + retry: 2, + enabled: + Boolean(id) && Boolean(activeBatch) && Boolean(isDatasetJobStageSuccess(batchAnalyzeState)), + }, + ); + const list = useMemo(() => { + return listQuery.data?.data || []; + }, [listQuery.data]); + + const onDrawerClose = useCallback(() => { + setActiveFeatKey(undefined); + }, [setActiveFeatKey]); + + // When click outside struct data atable | feature drawer, close the drawer + useFeatureDrawerClickOutside({ setActiveFeatKey, toggleDrawerVisible }); + return ( + <div className={styled.data_batch_analyze}> + <div + className={styled.data_batch_list_wrapper} + style={{ + width: collapsed ? '212px' : '0px', + }} + > + {collapsed && ( + <div className={styled.data_batch_list}> + <div className={styled.data_batch_list_header}>批次列表</div> + {list.map((item, index) => ( + <div + key={index} + className={`${styled.data_batch_list_item} ${ + item.id === activeBatch?.id ? styled.active : '' + }`} + onClick={() => { + handleChangeBatch(item); + }} + > + <div className={styled.data_batch_list_item_title}>批次 {item.name}</div> + <div className={styled.data_batch_list_item_time}> + {formatTimestamp(item.updated_at, 'YYYY-MM-DD HH:mm')} + </div> + <MoreActions + className={styled.data_batch_list_item_action} + actionList={[ + { + label: '查看任务详情', + onClick: () => onDetailClick(item), + }, + ]} + /> + </div> + ))} + <div className={styled.data_batch_list_count}>{total}个记录</div> + </div> + )} + <div + onClick={() => setCollapsed(!collapsed)} + className={collapsed ? styled.collapse : styled.is_reverse} + > + <Left /> + </div> + </div> + <div className={styled.data_batch_content}> + {!activeBatch ? renderNoBatch() : renderBatchDetail()} + </div> + {activeBatch && ( + <DataBatchAnalyzeModal + visible={visible} + toggleVisible={toggleVisible} + dataBatch={activeBatch} + onSuccess={handleAnalyzeSuccess} + /> + )} + {activeBatch && ( + <FeatureDrawer + id={id} + batchId={activeBatch.id} + toggleDrawerVisible={toggleDrawerVisible} + visible={drawerVisible} + activeKey={activeKey} + onClose={onDrawerClose} + /> + )} + </div> + ); + + function renderNoBatch() { + return ( + <div className={styled.data_batch_no_success}> + <img alt="" src={emptyIcon} className={styled.empty} /> + <span className={styled.no_batch_preview}>无数据批次</span> + </div> + ); + } + + function renderBatchDetail() { + return ( + <> + {activeBatch?.latest_analyzer_dataset_job_stage_id === 0 ? ( + renderNoDoBatch() + ) : ( + <> + {isDatasetJobStagePending(batchAnalyzeState) && renderProcessBatch()} + {isDatasetJobStageFailed(batchAnalyzeState) && renderFailedBatch()} + {isDatasetJobStageSuccess(batchAnalyzeState) && renderSuccessBatch()} + </> + )} + </> + ); + } + + function renderNoDoBatch() { + const batchActionMap = { + [DatasetStateFront.SUCCEEDED]: () => ( + <Button type="primary" style={{ width: '136px' }} onClick={openAnalyzeModel}> + 发起探查 + </Button> + ), + [DatasetStateFront.PENDING]: () => ( + <span className={styled.no_batch_preview}>当前数据批次待处理, 请稍后探查</span> + ), + [DatasetStateFront.PROCESSING]: () => ( + <span className={styled.no_batch_preview}>当前数据批次正在处理, 请稍后探查</span> + ), + [DatasetStateFront.FAILED]: () => ( + <span className={styled.no_batch_preview}>当前数据批次处理失败, 无法探查</span> + ), + [DatasetStateFront.DELETING]: () => ( + <span className={styled.no_batch_preview}>当前数据批次正在删除, 无法探查</span> + ), + }; + return ( + <div className={styled.data_batch_no_success}> + <img alt="" src={emptyIcon} className={styled.empty} /> + <span className={styled.no_batch_preview}>无探查数据</span> + {activeBatch?.state && batchActionMap[activeBatch.state]()} + </div> + ); + } + + function renderProcessBatch() { + return ( + <div className={styled.data_batch_no_success}> + <Spin size={50} className={styled.data_batch_loading} /> + <span className={styled.no_batch_preview}>数据探查中,可能会耗时较长…</span> + </div> + ); + } + + function renderFailedBatch() { + return ( + <div className={styled.data_batch_no_success}> + <Progress type="circle" size="large" percent={50} status="error" /> + <span className={styled.no_batch_preview}>数据探查失败</span> + <Button type="text" style={{ width: 136 }} onClick={openAnalyzeModel}> + 点击重试 + </Button> + </div> + ); + } + function renderSuccessBatch() { + return ( + <StructDataPreviewTable + data={previewDataQuery.data?.data} + loading={previewDataQuery.isFetching} + isError={previewDataQuery.isError} + onActiveFeatChange={onActiveFeatChange} + {...props} + /> + ); + } + function handleChangeBatch(batch: DataBatchV2) { + setActiveBatch(batch); + } + + function openAnalyzeModel() { + toggleVisible(true); + } + + function handleAnalyzeSuccess() { + // hook 方法, 现在analyze_job_id 存在了 dataset 实体上, 所以需要刷新下dataset数据 + props.onAnalyzeBatch(); + listQuery.refetch(); + } + + function onActiveFeatChange(featKey: string) { + setActiveFeatKey(featKey); + toggleDrawerVisible(true); + } + + function onDetailClick(dataBatch: DataBatchV2) { + if (dataBatch.latest_analyzer_dataset_job_stage_id === 0) { + Message.warning('发起数据探查后才可查看详情'); + } else { + if (props.isOldData) { + history.push(`/datasets/job_detail/${props.datasetJobId}`); + } else { + history.push( + `/datasets/${id}/new/job_detail/${queryBatchState.data?.data.dataset_job_id}/${JobDetailSubTabs.TaskList}`, + ); + } + } + } + + // function onDeleteClick() {} +}; + +export default DataBatchAnalyze; diff --git a/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchTable/DataBatchActions/index.tsx b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchTable/DataBatchActions/index.tsx new file mode 100644 index 000000000..3c202242b --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchTable/DataBatchActions/index.tsx @@ -0,0 +1,60 @@ +import React from 'react'; +import { DataBatchV2, DatasetKindLabel, DatasetStateFront } from 'typings/dataset'; +import { Space } from '@arco-design/web-react'; +import MoreActions from 'components/MoreActions'; +interface IProp { + data: DataBatchV2; + onDelete?: () => void; + onStop?: () => void; + onExport?: (batchId: ID) => void; + onRerun?: (batchId: ID, batchName: string) => void; + kindLabel: DatasetKindLabel; +} + +export default function TaskActions(prop: IProp) { + const { data, kindLabel, onExport, onRerun } = prop; + + const isProcessedDataset = kindLabel === DatasetKindLabel.PROCESSED; + const isSuccess = data.state === DatasetStateFront.SUCCEEDED; + const isFailed = data.state === DatasetStateFront.FAILED; + return ( + <Space> + <button + className="custom-text-button" + style={{ + marginRight: 10, + }} + type="button" + key="rerun-batch" + disabled={!isFailed} + onClick={() => onRerun?.(data.id, data.name)} + > + 重新运行 + </button> + {isProcessedDataset && ( + <button + className="custom-text-button" + style={{ + marginRight: 10, + }} + type="button" + key="export-batch" + disabled={!isSuccess} + onClick={() => onExport?.(data.id)} + > + 导出 + </button> + )} + <MoreActions + actionList={[ + { + disabled: true, + label: '删除', + danger: true, + onClick() {}, + }, + ]} + /> + </Space> + ); +} diff --git a/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchTable/DataBatchRate/index.tsx b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchTable/DataBatchRate/index.tsx new file mode 100644 index 000000000..a4a210a23 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchTable/DataBatchRate/index.tsx @@ -0,0 +1,42 @@ +import React, { FC, useState } from 'react'; +import { fetchDatasetJobStageById } from 'services/dataset'; +import { useQuery } from 'react-query'; +import { getIntersectionRate } from 'shared/dataset'; +import { useGetCurrentProjectId } from 'hooks'; +import { Message } from '@arco-design/web-react'; + +type Props = { + datasetJobId: ID; + datasetJobStageId: ID; +}; +const DataBatchRate: FC<Props> = function ({ datasetJobId, datasetJobStageId }: Props) { + const projectId = useGetCurrentProjectId(); + const [rate, setRate] = useState('-'); + const queryBatchState = useQuery( + ['fetchDatasetJobStageById', projectId, datasetJobId, datasetJobStageId], + () => { + if (!projectId) { + Message.info('请选择工作区'); + return; + } + return fetchDatasetJobStageById(projectId, datasetJobId, datasetJobStageId); + }, + { + retry: 2, + enabled: Boolean(datasetJobId) && Boolean(datasetJobStageId), + onSuccess(res) { + if (!res) return; + const { input_data_batch_num_example, output_data_batch_num_example } = res.data; + setRate( + getIntersectionRate({ + input: input_data_batch_num_example, + output: output_data_batch_num_example, + }), + ); + }, + }, + ); + return <>{queryBatchState.isFetching ? '-' : rate}</>; +}; + +export default DataBatchRate; diff --git a/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchTable/DatasetBatchRerunModal/index.module.less b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchTable/DatasetBatchRerunModal/index.module.less new file mode 100644 index 000000000..ed9603421 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchTable/DatasetBatchRerunModal/index.module.less @@ -0,0 +1,13 @@ +.footer_grid_row{ + padding-top: 15px; + border-top: 1px solid var(--backgroundColorGray); +} +.model{ + width: 700px; + :global{ + .arco-form{ + max-height: 50vh; + overflow: scroll; + } + } +} diff --git a/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchTable/DatasetBatchRerunModal/index.tsx b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchTable/DatasetBatchRerunModal/index.tsx new file mode 100644 index 000000000..b87eecd22 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchTable/DatasetBatchRerunModal/index.tsx @@ -0,0 +1,331 @@ +import React, { FC, useState, useEffect } from 'react'; +import { to, isStringCanBeParsed } from 'shared/helpers'; +import { rerunDatasetBatchById } from 'services/dataset'; +import { Modal, Form, Input, Message } from '@arco-design/web-react'; +import { DataJobVariable, GlobalConfigs, DataJobBackEndType } from 'typings/dataset'; +import { + Variable, + VariableComponent, + VariableValueType, + VariableWidgetSchema, +} from 'typings/variable'; +import ConfigForm, { ItemProps } from 'components/ConfigForm'; +import { + TAG_MAPPER, + VARIABLE_TIPS_MAPPER, + NO_CATEGORY, + SYNCHRONIZATION_VARIABLE, + isSingleParams, +} from '../../../shared'; +import { Tag as TagEnum } from 'typings/workflow'; +import { + useGetCurrentProjectParticipantList, + useGetCurrentPureDomainName, + useGetCurrentDomainName, +} from 'hooks'; +import { hydrate } from 'views/Workflows/shared'; +import styled from './index.module.less'; + +export interface Props { + visible: boolean; + id: ID; + batchId: ID; + batchName: string; + kind: DataJobBackEndType; + globalConfigs: GlobalConfigs; + onSuccess?: () => void; + onFail?: () => void; + onCancel?: () => void; +} + +type Params = { + [key: string]: any; +}; +type FormData = { + batch_name: string; + params: Params; + participant: { + [participantName: string]: { + params: Params; + }; + }; +}; + +const DatasetBatchRerunModal: FC<Props> = ({ + id, + batchId, + batchName, + kind, + visible, + globalConfigs, + onSuccess, + onFail, + onCancel, +}) => { + const participantList = useGetCurrentProjectParticipantList(); + const myPureDomainName = useGetCurrentPureDomainName(); + const myDomainName = useGetCurrentDomainName(); + const [globalConfigMap, setGlobalConfigMap] = useState<any>({}); + + const [formInstance] = Form.useForm<FormData>(); + const isSingle = isSingleParams(kind); + useEffect(() => { + const globalConfigParseMap: any = {}; + Object.keys(globalConfigs).forEach((key) => { + const globalConfig = globalConfigs[key]; + globalConfigParseMap[key] = handleParseToConfigFrom( + handleParseDefinition(globalConfig.variables), + false, + ); + }); + setGlobalConfigMap(globalConfigParseMap); + }, [globalConfigs, batchId]); + + useEffect(() => { + formInstance.setFieldValue('batch_name', batchName); + }, [batchName, formInstance]); + return ( + <Modal + title="重新运行" + visible={visible} + maskClosable={false} + afterClose={afterClose} + onCancel={onCancel} + onOk={handleOnOk} + // footer={null} + className={styled.model} + > + <Form form={formInstance} onSubmit={onSubmit}> + <Form.Item field="batch_name" label="数据集批次" disabled={true}> + <Input /> + </Form.Item> + <Form.Item label="我方参数" field="params"> + <ConfigForm + filter={variableTagFilter} + groupBy={'tag'} + hiddenGroupTag={true} + hiddenCollapse={true} + cols={2} + formItemList={globalConfigMap[myPureDomainName]} + isResetOnFormItemListChange={true} + onChange={(val) => { + syncConfigFormValue( + val, + [ + SYNCHRONIZATION_VARIABLE.NUM_PARTITIONS, + SYNCHRONIZATION_VARIABLE.PART_NUM, + SYNCHRONIZATION_VARIABLE.REPLICAS, + ], + false, + ); + }} + /> + </Form.Item> + {!isSingle && renderParticipantConfigLayout()} + </Form> + </Modal> + ); + function renderParticipantConfigLayout() { + return participantList?.map((item, index) => { + const { pure_domain_name } = item; + return ( + <Form.Item field={`participant.${item.name}.params`} label={`${item.name}参数`} key={index}> + <ConfigForm + filter={variableTagFilter} + groupBy={'tag'} + hiddenGroupTag={true} + hiddenCollapse={true} + cols={2} + formItemList={globalConfigMap[pure_domain_name!]} + isResetOnFormItemListChange={true} + onChange={(val) => { + syncConfigFormValue( + val, + [ + SYNCHRONIZATION_VARIABLE.NUM_PARTITIONS, + SYNCHRONIZATION_VARIABLE.PART_NUM, + SYNCHRONIZATION_VARIABLE.REPLICAS, + ], + true, + item.name, + ); + }} + /> + </Form.Item> + ); + }); + } + function variableTagFilter(item: ItemProps) { + return !!item.tag && [TAG_MAPPER[TagEnum.RESOURCE_ALLOCATION]].includes(item.tag); + } + + function syncConfigFormValue( + value: { [prop: string]: any }, + keyList: string[], + isParticipant: boolean, + currentParticipant?: string, + ) { + if (!keyList || !keyList.length || !value) { + return; + } + const senderParams: any = formInstance.getFieldValue('params') || {}; + const participantParams: any = formInstance.getFieldValue('participant'); + keyList.forEach((key) => { + if (!Object.prototype.hasOwnProperty.call(value, key)) { + return; + } + if (isParticipant) { + senderParams[key] = value[key]; + } + participantList.forEach((item) => { + if (isParticipant && item.name === currentParticipant) { + return; + } + const params = participantParams?.[item.name]?.params || {}; + params[key] = value[key]; + }); + }); + formInstance.setFieldsValue({ + params: { + ...senderParams, + }, + participant: { + ...participantParams, + }, + }); + } + + function handleParseDefinition(definitions: DataJobVariable[]) { + return definitions.map((item) => { + let widget_schema: VariableWidgetSchema = {}; + + try { + widget_schema = JSON.parse(item.widget_schema); + } catch (error) {} + return { + ...item, + widget_schema, + }; + }); + } + + function handleParseToConfigFrom(variableList: Variable[], disabled: boolean) { + return variableList + .filter((item) => !item.widget_schema.hidden) + .map((item) => { + const baseRuleList = item.widget_schema.required + ? [ + { + required: true, + message: '必填项', + }, + ] + : []; + return { + disabled, // 在授权时将参数配置禁用修改 + tip: VARIABLE_TIPS_MAPPER[item.name], + label: item.name, + tag: TAG_MAPPER[item.tag as TagEnum] || NO_CATEGORY, + field: item.name, + initialValue: + item.widget_schema.component === VariableComponent.Input + ? item.value + : item.typed_value, + componentType: item.widget_schema.component, + rules: + item.widget_schema.component === VariableComponent.Input && + [VariableValueType.LIST, VariableValueType.OBJECT].includes(item.value_type!) + ? [ + ...baseRuleList, + { + validator: (value: any, callback: (error?: string | undefined) => void) => { + if ((value && typeof value === 'object') || isStringCanBeParsed(value)) { + callback(); + return; + } + callback(`JSON ${item.value_type!} 格式错误`); + }, + }, + ] + : baseRuleList, + }; + }); + } + + async function onSubmit(values: FormData) { + if (!formInstance) { + return; + } + const participantParams: { + [domainName: string]: { + dataset_uuid: ID; + variables: DataJobVariable[]; + }; + } = {}; + + if (!isSingle) { + participantList?.reduce( + (acc, item) => { + const participantValues = values.participant[item.name]; + acc[item.domain_name] = { + dataset_uuid: globalConfigs[item.pure_domain_name!]?.dataset_uuid, + variables: hydrate( + globalConfigs[item.pure_domain_name!]?.variables, + participantValues.params, + { + isStringifyVariableValue: true, + isStringifyVariableWidgetSchema: true, + isProcessVariableTypedValue: true, + }, + ) as DataJobVariable[], + }; + + return acc; + }, + {} as { + [domainName: string]: { + dataset_uuid: ID; + variables: DataJobVariable[]; + }; + }, + ); + } + + const [, err] = await to( + rerunDatasetBatchById(id!, batchId!, { + dataset_job_parameter: { + global_configs: { + [myDomainName]: { + dataset_uuid: globalConfigs[myPureDomainName].dataset_uuid, + variables: hydrate(globalConfigs[myPureDomainName].variables, values.params, { + isStringifyVariableValue: true, + isStringifyVariableWidgetSchema: true, + isProcessVariableTypedValue: true, + }) as DataJobVariable[], + }, + ...participantParams, + }, + }, + }), + ); + if (err) { + onFail?.(); + Message.error(err.message || '重新运行失败'); + return; + } + + Message.success('重新运行成功'); + onSuccess?.(); + } + + function handleOnOk() { + formInstance.submit(); + } + + function afterClose() { + // Clear all fields + formInstance.resetFields(); + } +}; + +export default DatasetBatchRerunModal; diff --git a/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchTable/index.module.less b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchTable/index.module.less new file mode 100644 index 000000000..2a473dc37 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchTable/index.module.less @@ -0,0 +1,5 @@ +@import '~styles/mixins.less'; +.data_batch_table_path{ + .MixinEllipsis('200px'); + cursor: pointer; +} diff --git a/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchTable/index.tsx b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchTable/index.tsx new file mode 100644 index 000000000..af9199731 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetDetail/DataBatchTable/index.tsx @@ -0,0 +1,390 @@ +import React, { FC, useMemo, useState } from 'react'; +import { useQuery } from 'react-query'; +import { useTablePaginationWithUrlState, useUrlState } from 'hooks'; +import { fetchDataBatchs } from 'services/dataset'; +import { TABLE_COL_WIDTH, TIME_INTERVAL } from 'shared/constants'; +import { formatTimestamp } from 'shared/date'; +import { useParams, useHistory } from 'react-router'; +import { + dataBatchStateFilters, + FILTER_DATA_BATCH_OPERATOR_MAPPER, + filterExpressionGenerator, + getSortOrder, +} from '../../shared'; +import { Table, Tooltip, Typography, Message, Tag } from '@arco-design/web-react'; +import StateIndicator, { StateTypes } from 'components/StateIndicator'; +import ExportModal from 'components/DatasetExportModal'; +import DatasetBatchRerunModal from './DatasetBatchRerunModal'; +import { Link } from 'react-router-dom'; +import { JobDetailSubTabs } from 'views/Datasets/NewDatasetJobDetail'; +import { ColumnProps } from '@arco-design/web-react/es/Table'; +import { + DataBatchV2, + DatasetStateFront, + DatasetKindLabel, + GlobalConfigs, + DataJobBackEndType, +} from 'typings/dataset'; +import { expression2Filter } from 'shared/filter'; +import DataBatchActions from './DataBatchActions'; +import DataBatchRate from './DataBatchRate'; +import { humanFileSize } from 'shared/file'; +import styled from './index.module.less'; + +const { Text } = Typography; + +type Props = { + datasetJobId: ID; + isOldData: boolean; + isDataJoin: boolean; + isCopy: boolean; + isInternalProcessed: boolean; + kind: DataJobBackEndType; + datasetRate: string; + globalConfigs: GlobalConfigs; +}; +const DataBatchTable: FC<Props> = function ({ + isOldData, + isDataJoin, + isCopy, + isInternalProcessed, + kind, + datasetRate, + datasetJobId, + globalConfigs, +}: Props) { + const { id, kind_label } = useParams<{ + id: string; + kind_label: DatasetKindLabel; + }>(); + const { paginationProps } = useTablePaginationWithUrlState(); + const [total, setTotal] = useState(0); + const [pageTotal, setPageTotal] = useState(0); + const [currentExportBatchId, setCurrentExportBatchId] = useState<ID>(); + const [isShowExportModal, setIsShowExportModal] = useState(false); + const [currentRerunBatchId, setCurrentRerunBatchId] = useState<ID>(); + const [currentRerunBatchName, setCurrentRerunBatchName] = useState<string>(''); + const [isShowRerunModal, setIsShowRerunModal] = useState(false); + const history = useHistory(); + + // store filter status into urlState + const [urlState, setUrlState] = useUrlState({ + filter: '', + order_by: '', + page: 1, + pageSize: 10, + }); + + // generator listQuery + const listQuery = useQuery( + ['fetchDataBatchs', id, urlState], + () => { + return fetchDataBatchs(id!, { + page: urlState.page, + page_size: urlState.pageSize, + order_by: urlState.order_by, + filter: urlState.filter, + }); + }, + { + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, + refetchOnWindowFocus: false, + onSuccess: (res) => { + const { page_meta } = res || {}; + setTotal((pre) => page_meta?.total_items || pre); + setPageTotal(page_meta?.total_pages ?? 0); + }, + }, + ); + + // generator listData from listQuery and watch listQuery + const list = useMemo(() => { + return listQuery.data?.data || []; + }, [listQuery.data]); + + const columns = useMemo<ColumnProps<DataBatchV2>[]>(() => { + return [ + { + title: '批次名称', + dataIndex: 'name', + key: 'name', + width: TABLE_COL_WIDTH.NAME, + ellipsis: true, + render: (name: string, record: DataBatchV2) => { + const to = isOldData + ? `/datasets/job_detail/${datasetJobId}` + : `/datasets/${id}/new/job_detail/${datasetJobId}/${JobDetailSubTabs.TaskList}?stageId=${record.latest_parent_dataset_job_stage_id}`; + return <Link to={to}>{name}</Link>; + }, + }, + { + title: '状态', + dataIndex: 'state', + key: 'state', + width: TABLE_COL_WIDTH.NORMAL, + ...dataBatchStateFilters, + filteredValue: expression2Filter(urlState.filter).state, + render: (_: any, record: DataBatchV2) => { + const { state, file_size } = record; + const isEmptyDataset = (file_size || 0) <= 0 && state === DatasetStateFront.SUCCEEDED; + const isErrorFileSize = record.file_size === -1; + let type: StateTypes; + let text: string; + switch (state) { + case DatasetStateFront.PENDING: + type = 'processing'; + text = '待处理'; + break; + case DatasetStateFront.PROCESSING: + type = 'processing'; + text = '处理中'; + break; + case DatasetStateFront.SUCCEEDED: + type = 'success'; + text = '可用'; + break; + case DatasetStateFront.DELETING: + type = 'processing'; + text = '删除中'; + break; + case DatasetStateFront.FAILED: + type = 'error'; + text = '处理失败'; + break; + + default: + type = 'default'; + text = '状态未知'; + break; + } + return ( + <div className="indicator-with-tip"> + <StateIndicator type={type} text={text} /> + {isEmptyDataset && !isErrorFileSize && !isInternalProcessed && ( + <Tag className={'dataset-empty-tag'} color="purple" size="small"> + 空集 + </Tag> + )} + </div> + ); + }, + }, + { + title: '文件大小', + dataIndex: 'file_size', + key: 'file_size', + width: TABLE_COL_WIDTH.THIN, + render: (_: any, record: DataBatchV2) => { + const isErrorFileSize = record.file_size === -1; + if (isErrorFileSize) { + return '未知'; + } + return <span>{isInternalProcessed ? '-' : humanFileSize(_ || 0)}</span>; + }, + }, + { + title: '样本量', + dataIndex: 'num_example', + key: 'num_example', + width: TABLE_COL_WIDTH.THIN, + render: (num_example: number, record: DataBatchV2) => { + const isErrorFileSize = record.file_size === -1; + if (isErrorFileSize) { + return '未知'; + } + return !isCopy || isInternalProcessed ? '-' : num_example; + }, + }, + isDataJoin && + ({ + title: '求交率', + dataIndex: 'latest_parent_dataset_job_stage_id', + key: 'latest_parent_dataset_job_stage_id', + width: TABLE_COL_WIDTH.ID, + render: (latest_parent_dataset_job_stage_id: ID, record: DataBatchV2) => { + const isErrorFileSize = record.file_size === -1; + if (isErrorFileSize) { + return '未知'; + } + if (isOldData) { + return datasetRate; + } else { + return ( + <DataBatchRate + datasetJobId={datasetJobId} + datasetJobStageId={latest_parent_dataset_job_stage_id} + /> + ); + } + }, + } as any), + { + title: '数据批次路径', + dataIndex: 'path', + key: 'path', + width: TABLE_COL_WIDTH.NORMAL, + render: (path: string) => ( + <Tooltip + content={ + <Text style={{ color: '#fff' }} copyable> + {path} + </Text> + } + > + <div className={styled.data_batch_table_path}>{path}</div> + </Tooltip> + ), + }, + { + title: '更新时间', + dataIndex: 'updated_at', + key: 'updated_at', + width: TABLE_COL_WIDTH.TIME, + sorter(a: DataBatchV2, b: DataBatchV2) { + return a.updated_at - b.updated_at; + }, + defaultSortOrder: getSortOrder(urlState, 'updated_at'), + render: (date: number) => <div>{formatTimestamp(date)}</div>, + }, + { + title: '操作', + dataIndex: 'state', + key: 'operation', + fixed: 'right', + width: TABLE_COL_WIDTH.OPERATION, + render: (state: DatasetStateFront, record: any) => ( + <DataBatchActions + kindLabel={kind_label} + data={record} + onDelete={listQuery.refetch} + onStop={listQuery.refetch} + onExport={onExport} + onRerun={onRerun} + /> + ), + }, + ].filter(Boolean); + }, [ + urlState, + listQuery.refetch, + datasetJobId, + isOldData, + datasetRate, + isDataJoin, + isCopy, + isInternalProcessed, + kind_label, + id, + ]); + + const pagination = useMemo(() => { + return pageTotal <= 0 + ? false + : { + ...paginationProps, + total, + }; + }, [paginationProps, pageTotal, total]); + + return ( + <> + <Table + className={'custom-table custom-table-left-side-filter'} + rowKey="id" + loading={listQuery.isFetching} + data={list} + scroll={{ x: '100%' }} + columns={columns} + pagination={pagination} + onChange={( + pagination, + sorter, + filters: Partial<Record<keyof DataBatchV2, any[]>>, + extra, + ) => { + switch (extra.action) { + case 'sort': { + let orderValue: string; + if (sorter.direction) { + orderValue = sorter.direction === 'ascend' ? 'asc' : 'desc'; + } + setUrlState((prevState) => ({ + ...prevState, + order_by: orderValue ? `${sorter.field} ${orderValue}` : '', + })); + break; + } + case 'filter': + setUrlState((prevState) => ({ + ...prevState, + filter: filterExpressionGenerator( + { + ...filters, + name: expression2Filter(urlState.filter).name, + }, + FILTER_DATA_BATCH_OPERATOR_MAPPER, + ), + page: 1, + })); + break; + default: + } + }} + /> + <ExportModal + id={id} + batchId={currentExportBatchId} + visible={isShowExportModal} + onCancel={onExportModalClose} + onSuccess={onExportSuccess} + /> + <DatasetBatchRerunModal + id={id} + batchId={currentRerunBatchId!} + batchName={currentRerunBatchName} + kind={kind} + visible={isShowRerunModal} + globalConfigs={globalConfigs} + onCancel={onRerunModalClose} + onSuccess={onRerunSuccess} + onFail={onRerunModalClose} + /> + </> + ); + function onExport(batchId: ID) { + setCurrentExportBatchId(batchId); + setIsShowExportModal(true); + } + function onExportSuccess(datasetId: ID, datasetJobId: ID) { + onExportModalClose(); + if (!datasetJobId && datasetJobId !== 0) { + Message.info('导出任务ID缺失,请手动跳转「任务管理」查看详情'); + } else { + history.push(`/datasets/${datasetId}/new/job_detail/${datasetJobId}`); + } + } + function onExportModalClose() { + setCurrentExportBatchId(undefined); + setIsShowExportModal(false); + } + + function onRerun(batchId: ID, batchName: string) { + setCurrentRerunBatchId(batchId); + setIsShowRerunModal(true); + setCurrentRerunBatchName(batchName); + } + + function onRerunModalClose() { + setCurrentRerunBatchId(undefined); + setIsShowRerunModal(false); + setCurrentRerunBatchName(''); + } + + function onRerunSuccess() { + onRerunModalClose(); + listQuery.refetch(); + } +}; + +export default DataBatchTable; diff --git a/web_console_v2/client/src/views/Datasets/DatasetDetail/DatasetJobStageList/index.module.less b/web_console_v2/client/src/views/Datasets/DatasetDetail/DatasetJobStageList/index.module.less new file mode 100644 index 000000000..6599740a7 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetDetail/DatasetJobStageList/index.module.less @@ -0,0 +1,3 @@ +.table{ + margin: 12px 0px; +} \ No newline at end of file diff --git a/web_console_v2/client/src/views/Datasets/DatasetDetail/DatasetJobStageList/index.tsx b/web_console_v2/client/src/views/Datasets/DatasetDetail/DatasetJobStageList/index.tsx new file mode 100644 index 000000000..7d5420689 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetDetail/DatasetJobStageList/index.tsx @@ -0,0 +1,205 @@ +import React, { FC, useMemo, useState } from 'react'; +import { useQuery } from 'react-query'; +import { useTablePaginationWithUrlState, useUrlState, useGetCurrentProjectId } from 'hooks'; +import { fetchDatasetJobStageList } from 'services/dataset'; +import { TABLE_COL_WIDTH, TIME_INTERVAL } from 'shared/constants'; +import { formatTimestamp } from 'shared/date'; +import { + datasetJobStateFilters, + FILTER_DATA_BATCH_OPERATOR_MAPPER, + filterExpressionGenerator, + getSortOrder, + getJobKindByFilter, + getJobStateByFilter, +} from '../../shared'; +import { Table, Message } from '@arco-design/web-react'; +import StateIndicator from 'components/StateIndicator'; +import DatasetJobsType from 'components/DatasetJobsType'; +import { Link } from 'react-router-dom'; +import { getDatasetJobState } from 'shared/dataset'; +import { ColumnProps } from '@arco-design/web-react/es/Table'; +import { DatasetJobStage, DataJobBackEndType } from 'typings/dataset'; +import { expression2Filter } from 'shared/filter'; +import { LabelStrong } from 'styles/elements'; +import { JobDetailSubTabs } from 'views/Datasets/NewDatasetJobDetail'; +import styled from './index.module.less'; + +type TProps = { + datasetId: ID; + datasetJobId: ID; +}; +const DataBatchTable: FC<TProps> = function (props: TProps) { + const { datasetJobId, datasetId } = props; + const projectId = useGetCurrentProjectId(); + const { paginationProps } = useTablePaginationWithUrlState(); + const [total, setTotal] = useState(0); + const [pageTotal, setPageTotal] = useState(0); + + // store filter status into urlState + const [urlState, setUrlState] = useUrlState({ + filter: '', + order_by: '', + page: 1, + pageSize: 10, + }); + + // generator listQuery + const listQuery = useQuery( + ['fetchDatasetJobStageList', projectId, datasetJobId, urlState], + () => { + if (!projectId) { + Message.info('请选择工作区'); + return; + } + const filter = expression2Filter(urlState.filter); + filter.state = getJobStateByFilter(filter.state); + filter.kind = getJobKindByFilter(filter.kind); + return fetchDatasetJobStageList(projectId!, datasetJobId, { + page: urlState.page, + page_size: urlState.pageSize, + order_by: urlState.order_by, + filter: filterExpressionGenerator(filter, FILTER_DATA_BATCH_OPERATOR_MAPPER), + }); + }, + { + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, + refetchOnWindowFocus: false, + onSuccess: (res) => { + const { page_meta } = res || {}; + setTotal((pre) => page_meta?.total_items || pre); + setPageTotal(page_meta?.total_pages ?? 0); + }, + }, + ); + + // generator listData from listQuery and watch listQuery + const list = useMemo(() => { + return listQuery.data?.data || []; + }, [listQuery.data]); + + const columns = useMemo<ColumnProps<DatasetJobStage>[]>(() => { + return [ + { + title: '任务名称', + dataIndex: 'name', + key: 'name', + width: TABLE_COL_WIDTH.NAME, + ellipsis: true, + render: (name: string, record) => { + return ( + <Link + to={(location) => ({ + ...location, + pathname: `/datasets/${datasetId}/new/job_detail/${record.dataset_job_id}/${JobDetailSubTabs.TaskList}`, + search: location.search + ? `${location.search}&stageId=${record.id}` + : `?stageId=${record.id}`, + })} + > + {name} + </Link> + ); + }, + }, + { + title: '任务类型', + dataIndex: 'kind', + key: 'kind', + width: TABLE_COL_WIDTH.NORMAL, + // ...datasetJobTypeFilters, + // filteredValue: expression2Filter(urlState.filter).kind, + render: (type) => { + return <DatasetJobsType type={type as DataJobBackEndType} />; + }, + }, + { + title: '状态', + dataIndex: 'state', + key: 'state', + width: TABLE_COL_WIDTH.NORMAL, + ...datasetJobStateFilters, + filteredValue: expression2Filter(urlState.filter).state, + render: (_: any, record: DatasetJobStage) => { + return ( + <div className="indicator-with-tip"> + <StateIndicator {...getDatasetJobState(record)} /> + </div> + ); + }, + }, + { + title: '创建时间', + dataIndex: 'created_at', + key: 'created_at', + width: TABLE_COL_WIDTH.TIME, + sorter(a: DatasetJobStage, b: DatasetJobStage) { + return a.created_at - b.created_at; + }, + defaultSortOrder: getSortOrder(urlState, 'created_at'), + render: (date: number) => <div>{formatTimestamp(date)}</div>, + }, + ]; + }, [urlState, datasetId]); + + const pagination = useMemo(() => { + return pageTotal <= 1 + ? false + : { + ...paginationProps, + total, + }; + }, [paginationProps, pageTotal, total]); + + return ( + <> + <LabelStrong fontSize={14} isBlock={true}> + 任务列表 + </LabelStrong> + <Table + className={`custom-table custom-table-left-side-filter ${styled.table}`} + rowKey="id" + loading={listQuery.isFetching} + data={list} + scroll={{ x: '100%' }} + columns={columns} + pagination={pagination} + onChange={( + pagination, + sorter, + filters: Partial<Record<keyof DatasetJobStage, any[]>>, + extra, + ) => { + switch (extra.action) { + case 'sort': { + let orderValue: string; + if (sorter.direction) { + orderValue = sorter.direction === 'ascend' ? 'asc' : 'desc'; + } + setUrlState((prevState) => ({ + ...prevState, + order_by: orderValue ? `${sorter.field} ${orderValue}` : '', + })); + break; + } + case 'filter': + setUrlState((prevState) => ({ + ...prevState, + filter: filterExpressionGenerator( + { + ...filters, + }, + FILTER_DATA_BATCH_OPERATOR_MAPPER, + ), + page: 1, + })); + break; + default: + } + }} + /> + </> + ); +}; + +export default DataBatchTable; diff --git a/web_console_v2/client/src/views/Datasets/DatasetDetail/ProcessedDatasetTable.tsx b/web_console_v2/client/src/views/Datasets/DatasetDetail/ProcessedDatasetTable.tsx new file mode 100644 index 000000000..67ec7e6e6 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetDetail/ProcessedDatasetTable.tsx @@ -0,0 +1,102 @@ +import { Table } from '@arco-design/web-react'; +import React, { FC, useMemo } from 'react'; +import { useQuery } from 'react-query'; +import { fetchChildrenDatasetList } from 'services/dataset'; +import { formatTimestamp } from 'shared/date'; +import { DatasetJobListItem, DatasetKindLabel } from 'typings/dataset'; +import { TIME_INTERVAL } from 'shared/constants'; +import { useGetCurrentProjectId, useGetCurrentProjectParticipantList } from 'hooks'; +import { Link } from 'react-router-dom'; +import { DatasetDetailSubTabs } from '.'; +import ImportProgress from '../DatasetList/ImportProgress'; + +const getTableColumns = (allParticipantName: string) => { + return [ + { + title: '名称', + dataIndex: 'name', + name: 'name', + render: (id: string, record: DatasetJobListItem) => { + return ( + <Link + to={`/datasets/${DatasetKindLabel.PROCESSED}/detail/${record.id}/${DatasetDetailSubTabs.DatasetJobDetail}`} + > + {record.name} + </Link> + ); + }, + }, + { + title: '数据集状态', + dataIndex: 'state_frontend', + width: 180, + render: (_: any, record: any) => { + return <ImportProgress dataset={record} />; + }, + }, + { + title: '参与方', + dataIndex: '__participant_name__', + name: '__participant_name__', + render: () => allParticipantName, + }, + { + title: '创建时间', + dataIndex: 'created_at', + name: 'created_at', + sorter(a: DatasetJobListItem, b: DatasetJobListItem) { + return a.created_at - b.created_at; + }, + render: (date: number) => <div>{formatTimestamp(date)}</div>, + }, + ]; +}; + +type Props = { + datasetId: ID; +}; + +const ProcessedDatasetTable: FC<Props> = ({ datasetId }) => { + const projectId = useGetCurrentProjectId(); + const participantList = useGetCurrentProjectParticipantList(); + + const listQuery = useQuery( + ['fetchChildrenDatasetList', datasetId], + () => { + return fetchChildrenDatasetList(datasetId!); + }, + { + enabled: Boolean(projectId && datasetId), + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, // auto refresh every 1.5 min + }, + ); + + const filteredList = useMemo(() => { + if (!listQuery.data) { + return []; + } + const list = listQuery.data.data || []; + + // desc sort + list.sort((a, b) => b.created_at - a.created_at); + + return list; + }, [listQuery.data]); + + const allParticipantName = useMemo(() => { + return participantList.map((item) => item.name).join('\n'); + }, [participantList]); + + return ( + <Table + loading={listQuery.isFetching} + data={filteredList || []} + scroll={{ x: '100%' }} + columns={getTableColumns(allParticipantName)} + rowKey="uuid" + /> + ); +}; + +export default ProcessedDatasetTable; diff --git a/web_console_v2/client/src/views/Datasets/DatasetDetail/index.module.less b/web_console_v2/client/src/views/Datasets/DatasetDetail/index.module.less new file mode 100644 index 000000000..50db3e14f --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetDetail/index.module.less @@ -0,0 +1,79 @@ +@import '~styles/mixins.less'; + +.dataset_detail_padding_box{ + padding: 20px; + padding-bottom: 0; + :global(.arco-spin) { + width: 100%; + } +} + +.dataset_detail_box{ + padding: 0px; + flex: 1; +} + +.dataset_detail_batch_box{ + padding-bottom: 20px; +} + +.dataset_detail_avatar{ + .MixinSquare(44px); + background-color: var(--primaryColor); + color: white; + border-radius: 2px; + font-size: 18px; + text-align: center; + + &::before { + content: attr(data-name); + line-height: 44px; + font-weight: bold; + } +} + +.dataset_name_container{ + display: flex; + align-items: center; +} + +.dataset_name{ + .MixinEllipsis('40vw'); + display: inline-block; + margin-bottom: 0; + margin-right: 7px; + font-size: 16px; + height: 24px; + font-weight: 600; +} + +.comment{ + font-size: 12px; + line-height: 18px; + color: var(--textColorSecondary); +} + +.data_source_text{ + .MixinEllipsis(); +} + +.data_detail_tab_pane{ + display: grid; +} + +.data_detail_tab{ + margin-bottom: 0 !important; +} + +.data_detail_icon_question_circle{ + margin: 0 4px; + color: var(--headerBackground); +} + +.dataset_detal_auth_status{ + width: 90px; +} + +.dataset_detail_cron{ + cursor: pointer; +} diff --git a/web_console_v2/client/src/views/Datasets/DatasetDetail/index.tsx b/web_console_v2/client/src/views/Datasets/DatasetDetail/index.tsx new file mode 100644 index 000000000..8e61524fe --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetDetail/index.tsx @@ -0,0 +1,712 @@ +import { Button, Grid, Message, Space, Spin, Tabs, Tooltip, Tag } from '@arco-design/web-react'; +import BackButton from 'components/BackButton'; +import PictureDataPreviewTable from 'components/DataPreview/PictureDataTable'; +import PropertyList from 'components/PropertyList'; +import SharedPageLayout from 'components/SharedPageLayout'; +import GridRow from 'components/_base/GridRow'; +import { isNil } from 'lodash-es'; +import React, { FC, useEffect, useMemo, useState } from 'react'; +import { useQuery } from 'react-query'; +import { Redirect, Route, useHistory, useParams } from 'react-router'; +import { useToggle } from 'react-use'; +import { + deleteDataset, + fetchDatasetDetail, + fetchDatasetPreviewData, + fetchDataBatchs, + fetchDatasetJobDetail, + fetchDatasetFlushAuthStatus, + authorizeDataset, + cancelAuthorizeDataset, + stopDatasetStreaming, +} from 'services/dataset'; +import { + getImportStage, + getTotalDataSize, + isFrontendDeleting, + isFrontendProcessing, + isFrontendSucceeded, +} from 'shared/dataset'; +import { formatTimestamp } from 'shared/date'; +import { humanFileSize } from 'shared/file'; +import { + Dataset, + DatasetDataType, + DatasetKindLabel, + DATASET_COPY_CHECKER, + DatasetType__archived, + DatasetProcessedAuthStatus, + ParticipantInfo, + DatasetProcessedMyAuthStatus, + DatasetKindBackEndType, + DatasetJobSchedulerState, +} from 'typings/dataset'; +import { datasetPageTitles, isDataJoin, RawAuthStatusOptions, isHoursCronJoin } from '../shared'; +import StateIndicator, { StateTypes } from 'components/StateIndicator'; +import { CONSTANTS, TIME_INTERVAL } from 'shared/constants'; +import CodeEditorDrawer from 'components/CodeEditorDrawer'; +import TaskDetail, { NodeType } from '../TaskDetail'; +import ProcessedDatasetTable from './ProcessedDatasetTable'; +import DatasetJobStageList from './DatasetJobStageList'; +import ClickToCopy from 'components/ClickToCopy'; +import { + IconCheckCircleFill, + IconCloseCircleFill, + IconQuestionCircle, +} from '@arco-design/web-react/icon'; +import MoreActions from 'components/MoreActions'; +import Modal from 'components/Modal'; +import DatasetEditModal from '../DatasetList/DatasetEditModal'; +import DatasetPublishAndRevokeModal from 'components/DatasetPublishAndRevokeModal'; +import BlockchainStorageTable from 'components/BlockchainStorageTable'; +import StatusProgress from 'components/StatusProgress'; +import DataBatchTable from './DataBatchTable/index'; +import DataBatchAnalyze from './DataBatchAnalyze/index'; +import { useGetAppFlagValue, useGetCurrentProjectId } from 'hooks'; +import { FlagKey } from 'typings/flag'; +import ImportProgress from '../DatasetList/ImportProgress/index'; +import { getIntersectionRate } from 'shared/dataset'; +import { fetchSysInfo } from 'services/settings'; +import { to } from 'shared/helpers'; +import styled from './index.module.less'; + +const { Row } = Grid; + +const { TabPane } = Tabs; + +export enum DatasetDetailSubTabs { + PreviewData = 'preview', + Schema = 'schema', + Image = 'image', + RelativeDataset = 'relative_dataset', + Databatch = 'data_batch', + DatasetJobDetail = 'dataset_job_detail', + BlockchainStorage = 'blockchain_storage', +} + +const DatasetDetail: FC<any> = () => { + const history = useHistory(); + const projectId = useGetCurrentProjectId(); + + const { kind_label, id, subtab } = useParams<{ + kind_label: DatasetKindLabel; + id: string; + subtab: string; + }>(); + const isProcessedDataset = kind_label === DatasetKindLabel.PROCESSED; + const isRaw = kind_label === DatasetKindLabel.RAW; + const [activeTab, setActiveTab] = useState(subtab || DatasetDetailSubTabs.PreviewData); + const [isShowPublishModal, setIsShowPublishModal] = useState(false); + const [selectDataset, setSelectDataset] = useState<Dataset>(); + const [editModalVisible, toggleEditModalVisible] = useToggle(false); + const bcs_support_enabled = useGetAppFlagValue(FlagKey.BCS_SUPPORT_ENABLED); + + const sysInfoQuery = useQuery(['fetchSysInfo'], () => fetchSysInfo(), { + retry: 2, + refetchOnWindowFocus: false, + enabled: Boolean(isProcessedDataset), + }); + + const myPureDomainName = useMemo<string>(() => { + return sysInfoQuery.data?.data?.pure_domain_name ?? ''; + }, [sysInfoQuery.data]); + + // 授权兜底策略, 前端刷新下接口 + useQuery(['fetchDatasetFlushAuthStatus', id], () => fetchDatasetFlushAuthStatus(id), { + refetchOnWindowFocus: false, + enabled: Boolean(isProcessedDataset), + onSuccess() { + query.refetch(); + }, + }); + + // ======= Dataset query ============ + const query = useQuery(['fetchDatasetDetail', id], () => fetchDatasetDetail(id), { + refetchOnWindowFocus: false, + refetchInterval: TIME_INTERVAL.CONNECTION_CHECK, + }); + + const datasetJobQuery = useQuery( + ['fetchDatasetJobDetail', projectId, query.data?.data.parent_dataset_job_id], + () => fetchDatasetJobDetail(projectId!, query.data?.data.parent_dataset_job_id!), + { + refetchOnWindowFocus: false, + retry: 2, + enabled: Boolean(projectId && query.data?.data.parent_dataset_job_id), + }, + ); + // ======= Preivew data query ============ + const previewDataQuery = useQuery( + ['fetchDatasetPreviewData', id], + () => fetchDatasetPreviewData(id), + { + refetchOnWindowFocus: false, + retry: 2, + enabled: + Boolean(id) && [DatasetDetailSubTabs.Image].includes(activeTab as DatasetDetailSubTabs), + }, + ); + + const batchListQuery = useQuery( + ['fetchDataBatchs', id], + () => { + return fetchDataBatchs(id!); + }, + { + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, + }, + ); + + const isAnalyzeSuccess = useMemo(() => { + const data = batchListQuery.data?.data || []; + return data.some((item) => item.latest_analyzer_dataset_job_stage_id !== 0); + }, [batchListQuery.data]); + const stateInfo = useMemo<{ + text: string; + type: StateTypes; + noResultText: string; + }>(() => { + if (query.data?.data) { + const { text, type } = getImportStage(query.data.data); + let noResultText = ''; + if (type === 'processing') { + noResultText = '数据处理中,请稍后'; + } + return { text, type, noResultText }; + } + + return { + text: '状态未知', + type: 'default', + noResultText: '抱歉,数据暂时无法显示', + }; + }, [query]); + + useEffect(() => { + setActiveTab(subtab || DatasetDetailSubTabs.PreviewData); + }, [subtab]); + + const datasetJobGlobalConfigs = useMemo(() => { + if (!datasetJobQuery.data?.data?.global_configs?.global_configs) { + return {}; + } + return datasetJobQuery.data?.data?.global_configs?.global_configs; + }, [datasetJobQuery]); + + const dataset = query.data?.data; + + const { + dataset_format, + dataset_kind, + updated_at, + num_feature, + num_example, + path, + is_published, + import_type, + dataset_type, + analyzer_dataset_job_id, + parent_dataset_job_id, + auth_frontend_state, + local_auth_status, + participants_info = { participants_map: {} }, + } = dataset ?? {}; + + const datasetJob = datasetJobQuery.data?.data; + const { + has_stages, + input_data_batch_num_example = 0, + output_data_batch_num_example = 0, + kind, + scheduler_state, + scheduler_message, + } = datasetJob ?? {}; + const isOldData = !Boolean(has_stages); + const isJoin = isDataJoin(kind); + const datasetRate = getIntersectionRate({ + input: input_data_batch_num_example, + output: output_data_batch_num_example, + }); + const isDatasetStructType = dataset_format === DatasetDataType.STRUCT; + const isDatasetPictureType = dataset_format === DatasetDataType.PICTURE; + const isDatasetNoneStructType = dataset_format === DatasetDataType.NONE_STRUCTURED; + const isCopy = !import_type || import_type === DATASET_COPY_CHECKER.COPY; + const isStreaming = dataset_type === DatasetType__archived.STREAMING; + const isStreamRunable = scheduler_state === DatasetJobSchedulerState.RUNNABLE; + const isStreamStopped = scheduler_state === DatasetJobSchedulerState.STOPPED; + const isAuthorized = local_auth_status === DatasetProcessedMyAuthStatus.AUTHORIZED; + const isInternalProcessed = dataset_kind === DatasetKindBackEndType.INTERNAL_PROCESSED; + const isHideAuth = Boolean(Object.keys(participants_info.participants_map).length === 0); + /** IF no subtab be set, defaults to preview */ + if (!subtab) { + return ( + <Redirect to={`/datasets/${kind_label}/detail/${id}/${DatasetDetailSubTabs.PreviewData}`} /> + ); + } + if (isInternalProcessed && subtab === DatasetDetailSubTabs.DatasetJobDetail) { + return ( + <Redirect to={`/datasets/${kind_label}/detail/${id}/${DatasetDetailSubTabs.Databatch}`} /> + ); + } + const isProcessing = dataset ? isFrontendProcessing(dataset) : false; + const isDeleting = dataset ? isFrontendDeleting(dataset) : false; + + const displayedProps = [ + { + value: isNil(dataset_format) + ? CONSTANTS.EMPTY_PLACEHOLDER + : isDatasetStructType + ? '结构化数据' + : isDatasetPictureType + ? '图片' + : '非结构化数据', + label: '数据格式', + proport: 0.5, + }, + { + value: isInternalProcessed ? '-' : dataset ? humanFileSize(getTotalDataSize(dataset)) : '0 B', + label: '数据大小', + proport: 0.5, + }, + { + value: !isCopy || isInternalProcessed ? '-' : num_feature?.toLocaleString('en') || '0', + label: '总列数', + proport: 0.5, + }, + { + value: !isCopy || isInternalProcessed ? '-' : num_example?.toLocaleString('en') || '0', + label: '总行数', + proport: 0.5, + }, + isStreaming && { + value: isStreamStopped ? ( + <StateIndicator type="error" text="已停止" tag={false} /> + ) : ( + <span className={styled.dataset_detail_cron}> + {isHoursCronJoin(datasetJob) ? '每小时' : '每天'} + <Tooltip content={scheduler_message}> + <IconQuestionCircle className={styled.data_detail_icon_question_circle} /> + </Tooltip> + </span> + ), + label: isRaw ? '导入周期' : '求交周期', + proport: 0.5, + }, + { + value: ( + <ClickToCopy text={path || ''}> + <Tooltip content={path}> + <div className={styled.data_source_text}>{path || CONSTANTS.EMPTY_PLACEHOLDER}</div> + </Tooltip> + </ClickToCopy> + ), + label: '数据集路径', + proport: 2, + }, + { + value: updated_at ? formatTimestamp(updated_at) : CONSTANTS.EMPTY_PLACEHOLDER, + label: '最近更新', + proport: 1, + }, + dataset?.validation_jsonschema && + Object.keys(dataset.validation_jsonschema).length > 0 && + ({ + value: ( + <CodeEditorDrawer.Button + title="校验规则" + value={JSON.stringify(dataset?.validation_jsonschema, null, 2)} + /> + ), + label: '校验规则', + proport: 0.5, + } as any), + ].filter(Boolean); + + return ( + <SharedPageLayout + title={<BackButton onClick={backToList}>{datasetPageTitles[kind_label]}</BackButton>} + cardPadding={0} + > + <div className={styled.dataset_detail_padding_box}> + <Spin loading={query.isFetching || datasetJobQuery.isFetching}> + <Row align="center" justify="space-between"> + <GridRow gap="12" style={{ maxWidth: '75%' }}> + <div + className={styled.dataset_detail_avatar} + data-name={query.data?.data.name.slice(0, 2)} + /> + <div> + <div className={styled.dataset_name_container}> + <h3 className={styled.dataset_name}>{query.data?.data.name ?? '....'}</h3> + {query.data && <ImportProgress dataset={query.data.data} tag={false} />} + </div> + {(isStreaming || !isCopy || query.data?.data.comment) && ( + <Space> + {isStreaming && <Tag color="blue">增量</Tag>} + {!isCopy && <Tag>{'非拷贝'}</Tag>} + {query.data?.data.comment && ( + <small className={styled.comment}>{query.data?.data.comment}</small> + )} + </Space> + )} + </div> + </GridRow> + + <Space> + {!isProcessedDataset && ( + <Space> + {is_published ? ( + <IconCheckCircleFill style={{ color: 'var(--successColor)' }} /> + ) : ( + <IconCloseCircleFill style={{ color: 'var(--warningColor)' }} /> + )} + <span>{is_published ? '已发布至工作区' : '未发布至工作区'}</span> + <Button + type={is_published ? 'default' : 'primary'} + onClick={onPublishClick} + disabled={dataset ? !isFrontendSucceeded(dataset) : true} + > + {is_published ? '撤销发布' : '发布'} + </Button> + </Space> + )} + {isProcessedDataset && isInternalProcessed && renderAuthStatus()} + {isProcessedDataset && !isHideAuth && ( + <Button type={isAuthorized ? 'default' : 'primary'} onClick={onAuthClick}> + {isAuthorized ? '撤销授权' : '授权'} + </Button> + )} + {isStreaming && isStreamRunable && ( + <Button type="default" onClick={onStopStreaming}> + {isProcessedDataset ? '终止定时求交' : '终止增量导入'} + </Button> + )} + <MoreActions + actionList={[ + { + label: '编辑', + onClick: onEditClick, + disabled: !dataset || isProcessing || isDeleting, + }, + { + label: '删除', + onClick: onDeleteClick, + danger: true, + disabled: !dataset || isProcessing || isDeleting, + }, + ]} + /> + </Space> + </Row> + </Spin> + <PropertyList + properties={displayedProps} + cols={displayedProps.length} + minWidth={150} + align="center" + colProportions={displayedProps.map((item) => item.proport)} + /> + </div> + <Tabs activeTab={activeTab} onChange={onSubtabChange} className={styled.data_detail_tab}> + {!isInternalProcessed && ( + <TabPane + className={styled.data_detail_tab_pane} + title="任务详情" + key={DatasetDetailSubTabs.DatasetJobDetail} + /> + )} + {isDatasetPictureType && isAnalyzeSuccess && ( + <TabPane + className={styled.data_detail_tab_pane} + title="图片预览" + key={DatasetDetailSubTabs.Image} + /> + )} + <TabPane + className={styled.data_detail_tab_pane} + title="数据批次" + key={DatasetDetailSubTabs.Databatch} + /> + {isCopy && !isDatasetNoneStructType && ( + <TabPane + className={styled.data_detail_tab_pane} + title="数据探查" + key={DatasetDetailSubTabs.PreviewData} + /> + )} + <TabPane + className={styled.data_detail_tab_pane} + title={ + <span> + 下游数据集 + <Tooltip content="通过使用本数据集所产生的数据集"> + <IconQuestionCircle className={styled.data_detail_icon_question_circle} /> + </Tooltip> + </span> + } + key={DatasetDetailSubTabs.RelativeDataset} + /> + {isRaw && bcs_support_enabled && ( + <TabPane + className={styled.data_detail_tab_pane} + title="区块链存证" + key={DatasetDetailSubTabs.BlockchainStorage} + /> + )} + </Tabs> + <div + className={`${styled.dataset_detail_padding_box} ${ + activeTab === DatasetDetailSubTabs.PreviewData ? styled.dataset_detail_box : '' + } ${activeTab === DatasetDetailSubTabs.Databatch ? styled.dataset_detail_batch_box : ''}`} + > + <Route + path={`/datasets/:kind_label/detail/:id/${DatasetDetailSubTabs.Databatch}`} + exact + render={() => { + return ( + <DataBatchTable + datasetJobId={parent_dataset_job_id!} + isOldData={isOldData} + isDataJoin={isJoin} + isCopy={isCopy} + datasetRate={datasetRate} + kind={kind!} + isInternalProcessed={isInternalProcessed} + globalConfigs={datasetJobGlobalConfigs} + /> + ); + }} + /> + {isCopy && ( + <Route + path={`/datasets/:kind_label/detail/:id/${DatasetDetailSubTabs.PreviewData}`} + exact + render={(props) => { + return ( + <DataBatchAnalyze + {...props} + datasetJobId={analyzer_dataset_job_id!} + isOldData={isOldData} + onAnalyzeBatch={() => { + query.refetch(); + }} + /> + ); + }} + /> + )} + {isDatasetPictureType && isAnalyzeSuccess && ( + <Route + path={`/datasets/:kind_label/detail/:id/${DatasetDetailSubTabs.Image}`} + exact + render={(props) => { + return ( + <PictureDataPreviewTable + data={previewDataQuery.data?.data} + loading={previewDataQuery.isFetching} + isError={previewDataQuery.isError} + noResultText={stateInfo.noResultText} + /> + ); + }} + /> + )} + + <Route + path={`/datasets/:kind_label/detail/:id/${DatasetDetailSubTabs.DatasetJobDetail}`} + exact + render={() => { + return ( + <> + <TaskDetail + middleJump={true} + datasetId={id} + datasetJobId={dataset?.parent_dataset_job_id} + isShowRatio={false} + isOldData={isOldData} + onNodeClick={(node) => { + if (node.type === NodeType.DATASET_PROCESSED) { + onSubtabChange(DatasetDetailSubTabs.PreviewData); + } + }} + // TODO: pass error message when state_frontend = DatasetStateFront.FAILED, + errorMessage="" + isProcessedDataset={isProcessedDataset} + /> + {(parent_dataset_job_id || parent_dataset_job_id === 0) && ( + <DatasetJobStageList datasetId={id} datasetJobId={parent_dataset_job_id!} /> + )} + </> + ); + }} + /> + <Route + path={`/datasets/:kind_label/detail/:id/${DatasetDetailSubTabs.RelativeDataset}`} + exact + render={() => { + return <ProcessedDatasetTable datasetId={id} />; + }} + /> + <Route + path={`/datasets/:kind_label/detail/:id/${DatasetDetailSubTabs.BlockchainStorage}`} + exact + render={() => { + return <BlockchainStorageTable datasetId={id} />; + }} + /> + </div> + {dataset && ( + <DatasetEditModal + dataset={dataset} + visible={editModalVisible} + toggleVisible={toggleEditModalVisible} + onSuccess={onEditSuccess} + /> + )} + + <DatasetPublishAndRevokeModal + onCancel={onPublishCancel} + onSuccess={onPublishSuccess} + dataset={selectDataset} + visible={isShowPublishModal} + /> + </SharedPageLayout> + ); + + function renderParticipantAuth(val: ParticipantInfo[]) { + return ( + <> + {val.map((participant, index) => ( + <div key={index}> + {participant.name}{' '} + {participant.auth_status === DatasetProcessedMyAuthStatus.AUTHORIZED + ? '已授权' + : '未授权'} + </div> + ))} + </> + ); + } + + function renderAuthStatus() { + if (auth_frontend_state === DatasetProcessedAuthStatus.AUTH_PENDING) { + const participants_info_map = Object.entries(participants_info.participants_map || {}).map( + ([key, value]) => ({ + name: key === myPureDomainName ? '我方' : key, + auth_status: value['auth_status'], + }), + ); + return ( + <StatusProgress + className={styled.dataset_detal_auth_status} + options={RawAuthStatusOptions} + status={auth_frontend_state || DatasetProcessedAuthStatus.AUTH_PENDING} + isTip={true} + toolTipContent={renderParticipantAuth(participants_info_map)} + /> + ); + } + return ( + <StatusProgress + className={styled.dataset_detal_auth_status} + options={RawAuthStatusOptions} + status={auth_frontend_state || DatasetProcessedAuthStatus.TICKET_PENDING} + /> + ); + } + + function onPublishSuccess() { + setIsShowPublishModal(false); + query.refetch(); + } + function onPublishCancel() { + setIsShowPublishModal(false); + } + + function backToList() { + history.goBack(); + } + + function onPublishClick() { + if (!dataset) { + return; + } + setSelectDataset(dataset); + setIsShowPublishModal(true); + } + async function onAuthClick() { + try { + if (isAuthorized) { + await cancelAuthorizeDataset(id); + } else { + await authorizeDataset(id); + } + query.refetch(); + } catch (err: any) { + Message.error(err.message); + } + } + + function onStopStreaming() { + Modal.delete({ + title: `确定终止${isProcessedDataset ? '定时求交' : '增量导入'}`, + content: '终止后将无法被重启,请谨慎操作', + okText: '终止', + onOk: async () => { + if (!projectId) { + Message.info('请选择工作区'); + return; + } + if (!parent_dataset_job_id) { + return; + } + const [, err] = await to(stopDatasetStreaming(projectId, parent_dataset_job_id)); + if (err) { + Message.error(err.message || '终止失败'); + return; + } + Message.success('终止成功'); + datasetJobQuery.refetch(); + }, + }); + } + function onEditClick() { + toggleEditModalVisible(true); + } + function onEditSuccess() { + query.refetch(); + } + function onDeleteClick() { + Modal.delete({ + title: '确认删除数据集?', + content: '删除操作无法恢复,请谨慎操作', + onOk: async () => { + if (!dataset) { + return; + } + try { + const resp = await deleteDataset(dataset.id); + // If delete success, HTTP response status code is 204, resp is empty string + const isDeleteSuccess = !resp; + if (isDeleteSuccess) { + Message.success('删除成功'); + history.replace(`/datasets/${kind_label}`); + } else { + const errorMessage = resp?.message ?? '删除失败'; + Message.error(errorMessage!); + } + } catch (error) { + Message.error(error.message); + } + }, + }); + } + + function onSubtabChange(val: string) { + setActiveTab(val as DatasetDetailSubTabs); + history.replace(`/datasets/${kind_label}/detail/${id}/${val}`); + } +}; + +export default DatasetDetail; diff --git a/web_console_v2/client/src/views/Datasets/DatasetJobDetail/JobBasicInfo/index.tsx b/web_console_v2/client/src/views/Datasets/DatasetJobDetail/JobBasicInfo/index.tsx new file mode 100644 index 000000000..e36ebbe3d --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetJobDetail/JobBasicInfo/index.tsx @@ -0,0 +1,60 @@ +import React, { useMemo } from 'react'; +import { formatTimeCount, formatTimestamp } from 'shared/date'; +import PropertyList from 'components/PropertyList'; +import WhichParticipant from 'components/WhichParticipant'; +import { DatasetJobState } from 'typings/dataset'; +import dayjs from 'dayjs'; +import CountTime from 'components/CountTime'; + +type TJobBasicInfo = { + coordinatorId: ID; + createTime: DateTime; + startTime: DateTime; + finishTime: DateTime; + jobState: DatasetJobState; +}; + +export default function JobBasicInfo(prop: TJobBasicInfo) { + const { coordinatorId, createTime = 0, startTime = 0, finishTime = 0, jobState } = prop; + const isRunning = [DatasetJobState.PENDING, DatasetJobState.RUNNING].includes(jobState); + const basicInfo = useMemo(() => { + function TimeRender(prop: { time: DateTime }) { + const { time } = prop; + return <span>{time <= 0 ? '-' : formatTimestamp(time)}</span>; + } + function RunningTimeRender(prop: { start: DateTime; finish: DateTime; isRunning: boolean }) { + const { start, finish, isRunning } = prop; + if (isRunning) { + return start <= 0 ? ( + <span>待运行</span> + ) : ( + <CountTime time={dayjs().unix() - start} isStatic={false} /> + ); + } + return <span>{finish - start <= 0 ? '-' : formatTimeCount(finish - start)}</span>; + } + return [ + { + label: '任务发起方', + value: coordinatorId === 0 ? '本方' : <WhichParticipant id={coordinatorId} />, + }, + { + label: '创建时间', + value: <TimeRender time={createTime} />, + }, + { + label: '开始时间', + value: <TimeRender time={startTime} />, + }, + { + label: '结束时间', + value: <TimeRender time={finishTime} />, + }, + { + label: '运行时长', + value: <RunningTimeRender start={startTime} finish={finishTime} isRunning={isRunning} />, + }, + ]; + }, [coordinatorId, createTime, startTime, finishTime, isRunning]); + return <PropertyList properties={basicInfo} cols={5} />; +} diff --git a/web_console_v2/client/src/views/Datasets/DatasetJobDetail/JobParamsPanel/index.module.less b/web_console_v2/client/src/views/Datasets/DatasetJobDetail/JobParamsPanel/index.module.less new file mode 100644 index 000000000..deab4d6c0 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetJobDetail/JobParamsPanel/index.module.less @@ -0,0 +1,39 @@ +.container{ + margin: 10px 0; + padding: 20px 20px 10px 20px; + border-radius: 2px; + background-color: rgb(var(--gray-1)); + width: 240px; + height: 313px; + overflow: auto; +} +.params_header{ + display: flex; + flex-direction: row; + justify-content: space-between; +} +.params_label{ + font-weight: 400; + font-size: 12px; + line-height: 18px; + color: #4e5969; + display: inline-block; + height: 18px; +} +.params_body{ + width: 100%; + border-bottom: 1px solid #e5e6eb; + margin: 8px 0; +} +.params_list_item{ + list-style: none; + display: flex; + margin-bottom: 8px; + width: 100%; +} +.styled_param_span{ + max-width: 100px; + text-overflow: ellipsis; + white-space: nowrap; + overflow: hidden; +} diff --git a/web_console_v2/client/src/views/Datasets/DatasetJobDetail/JobParamsPanel/index.tsx b/web_console_v2/client/src/views/Datasets/DatasetJobDetail/JobParamsPanel/index.tsx new file mode 100644 index 000000000..3d77f2cda --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetJobDetail/JobParamsPanel/index.tsx @@ -0,0 +1,114 @@ +import React, { FC, useMemo, useState } from 'react'; +import { Button, Dropdown, Menu, Tooltip } from '@arco-design/web-react'; +import { LabelStrong } from 'styles/elements'; +import { IconDown } from '@arco-design/web-react/icon'; +import { GlobalConfigs } from 'typings/dataset'; +import ClickToCopy from 'components/ClickToCopy'; +import { Tag } from 'typings/workflow'; +import styled from './index.module.less'; + +type TProps = { + globalConfigs: GlobalConfigs; +}; + +const JobParamsPanel: FC<TProps> = (props: TProps) => { + const { globalConfigs = {} } = props; + const [selected, setSelected] = useState(''); + + const selectRole = useMemo(() => { + if (!selected && globalConfigs && Object.keys(globalConfigs).length) { + return Object.keys(globalConfigs)[0]; + } + return selected; + }, [globalConfigs, selected]); + + const resourceAllocations = useMemo(() => { + if (!globalConfigs || !selectRole) { + return []; + } + return globalConfigs[selectRole].variables.filter( + (variable) => variable.tag === Tag.RESOURCE_ALLOCATION, + ); + }, [globalConfigs, selectRole]); + + const inputParams = useMemo(() => { + if (!globalConfigs || !selectRole) { + return []; + } + return globalConfigs[selectRole].variables.filter( + (variable) => variable.tag === Tag.INPUT_PARAM, + ); + }, [globalConfigs, selectRole]); + + const dropList = () => { + return ( + <Menu onClickMenuItem={(key) => setSelected(key)}> + {Object.keys(globalConfigs).map((item) => { + return <Menu.Item key={item}>{item}</Menu.Item>; + })} + </Menu> + ); + }; + return ( + <div> + <div className={styled.params_header}> + <LabelStrong fontSize={14} isBlock={true}> + 任务配置 + </LabelStrong> + <Dropdown droplist={dropList()} position="bl"> + <Button size={'mini'} type="text"> + {selectRole} <IconDown /> + </Button> + </Dropdown> + </div> + <div className={styled.container}> + <span className={styled.params_label}>资源配置</span> + <div className={styled.params_body}> + {resourceAllocations.map((item) => { + return <ParamListItemRender value={item} justifyContent={'space-between'} />; + })} + </div> + <span className={styled.params_label}>输入参数</span> + <div className={styled.params_body}> + {inputParams.map((item) => { + return <ParamListItemRender value={item} justifyContent={'start'} joiner={true} />; + })} + </div> + </div> + </div> + ); +}; + +function ParamListItemRender({ + value = {} as any, + justifyContent = 'space-between', + joiner = false, +}) { + return ( + <li style={{ justifyContent: justifyContent }} key={value.name}> + <span className={styled.styled_param_span}> + { + <Tooltip + position="left" + content={<ClickToCopy text={value.name}>{value.name}</ClickToCopy>} + > + <span>{value.name}</span> + </Tooltip> + } + </span> + {joiner ? <span> = </span> : ''} + <span className={styled.styled_param_span}> + { + <Tooltip + position="left" + content={<ClickToCopy text={value.value}>{value.value}</ClickToCopy>} + > + <span>{value.value}</span> + </Tooltip> + } + </span> + </li> + ); +} + +export default JobParamsPanel; diff --git a/web_console_v2/client/src/views/Datasets/DatasetJobDetail/JobTitle/index.less b/web_console_v2/client/src/views/Datasets/DatasetJobDetail/JobTitle/index.less new file mode 100644 index 000000000..4a984e7de --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetJobDetail/JobTitle/index.less @@ -0,0 +1,14 @@ +.job-title-icon{ + height: 44px; + width: 44px; + border-radius: 4px; + background: #686a72; + display: flex; + justify-content: center; + align-items: center; + font-size: 16px; + color: #ffffff; +} +.job-title-name{ + margin: 0 12px 0 12px; +} diff --git a/web_console_v2/client/src/views/Datasets/DatasetJobDetail/JobTitle/index.tsx b/web_console_v2/client/src/views/Datasets/DatasetJobDetail/JobTitle/index.tsx new file mode 100644 index 000000000..1d1ee08ed --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetJobDetail/JobTitle/index.tsx @@ -0,0 +1,47 @@ +import React from 'react'; +import { DatasetJob, DatasetJobListItem } from 'typings/dataset'; +import StateIndicator from 'components/StateIndicator'; +import GridRow from 'components/_base/GridRow'; +import { getDatasetJobState, getDatasetJobType } from 'shared/dataset'; +import TaskActions from '../../TaskList/TaskActions'; +import { Space } from '@arco-design/web-react'; +import './index.less'; + +type TJobTitleProp = { + data: DatasetJob; + onStop?: () => void; + onDelete?: () => void; + id: ID; +}; + +export default function JobTitle(prop: TJobTitleProp) { + const { data = {} as DatasetJobListItem, onStop, onDelete, id } = prop; + const jobData: DatasetJobListItem = { + name: '', + uuid: '', + project_id: data.project_id, + kind: data.kind, + state: data.state, + result_dataset_id: '', + result_dataset_name: '', + id, + created_at: 0, + coordinator_id: 0, + has_stages: false, + }; + return ( + <GridRow + justify={'space-between'} + style={{ + marginBottom: 0, + }} + > + <Space> + <span className="job-title-icon">{getDatasetJobType(jobData.kind)}</span> + <span className="job-title-name">{data.name}</span> + <StateIndicator {...getDatasetJobState(jobData)} /> + </Space> + <TaskActions data={jobData} onDelete={onDelete} onStop={onStop} /> + </GridRow> + ); +} diff --git a/web_console_v2/client/src/views/Datasets/DatasetJobDetail/WorkFlowPods/index.module.less b/web_console_v2/client/src/views/Datasets/DatasetJobDetail/WorkFlowPods/index.module.less new file mode 100644 index 000000000..a6fde20b5 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetJobDetail/WorkFlowPods/index.module.less @@ -0,0 +1,60 @@ +.workflow_pods_header{ + display: flex; + flex-direction: row; + justify-content: flex-start; + align-items: center; + :global(.arco-radio-button) { + &:after { + background-color: unset; + } + } +} + +.job_detail_more_title{ + color: #4e5969; + font-size: 12px; + +} +.job_detail_more_content{ + color: #1d2129; + font-size: 12px; +} + +.job_detail_more_link{ + display: inline-block; + font-weight: 400; + font-size: 12px; + line-height: 20px; + margin-bottom: 12px; +} + +.job_detail_more_button{ + margin-left: auto; + font-size: 12px; +} + +.job_detail_state_indicator{ + padding-left: 16px; + position: relative; + .dot { + display: inline-block; + position: absolute; + top: 4px; + left: 2px; + width: 8px; + height: 8px; + border-radius: 50%; + background-color: rgb(245, 63, 63); + } +} + +.job_detail_error_wrapper{ + max-height: 300px; + overflow: scroll; + .job_detail_error_title{ + color: #fff; + } + .job_detail_error_item{ + margin-bottom: 10px; + } +} \ No newline at end of file diff --git a/web_console_v2/client/src/views/Datasets/DatasetJobDetail/WorkFlowPods/index.tsx b/web_console_v2/client/src/views/Datasets/DatasetJobDetail/WorkFlowPods/index.tsx new file mode 100644 index 000000000..a1c91863b --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetJobDetail/WorkFlowPods/index.tsx @@ -0,0 +1,218 @@ +import React, { useMemo, useState } from 'react'; +import { LabelStrong } from 'styles/elements'; +import { Button, Message, Popover, Radio, Tooltip } from '@arco-design/web-react'; +import { useQuery } from 'react-query'; +import { useGetCurrentProjectId, useTablePaginationWithUrlState } from 'hooks'; +import { fetchJobById, getWorkflowDetailById } from 'services/workflow'; +import { TIME_INTERVAL } from 'shared/constants'; +import { get } from 'lodash-es'; +import { Pod, PodState } from 'typings/job'; +import { Table } from '@arco-design/web-react'; +import ClickToCopy from 'components/ClickToCopy'; +import StateIndicator from 'components/StateIndicator'; +import { getPodState, podStateFilters } from 'views/Workflows/shared'; +import { formatTimestamp } from 'shared/date'; +import { Link } from 'react-router-dom'; +import styled from './index.module.less'; +type TWorkFlowPods = { + workFlowId?: ID; +}; + +export default function WorkFlowPods(prop: TWorkFlowPods) { + const { workFlowId } = prop; + const { paginationProps, reset } = useTablePaginationWithUrlState({ + urlStateOption: { navigateMode: 'replace' }, + }); + const projectId = useGetCurrentProjectId(); + const [selectJobId, setSelectJobId] = useState<ID>(); + const workFlowDetail = useQuery( + ['fetch_workflow_detail', projectId, workFlowId], + () => { + if (!workFlowId) { + return Promise.resolve({ data: {} }); + } + if (!projectId) { + Message.info('请选择工作区'); + return Promise.resolve({ data: {} }); + } + return getWorkflowDetailById(workFlowId!, projectId); + }, + { + cacheTime: 1, + refetchInterval: TIME_INTERVAL.CONNECTION_CHECK, + onSuccess: (data) => { + const jobIds = get(data, 'data.job_ids') || []; + if (jobIds.length) { + setSelectJobId((pre) => { + return jobIds.indexOf(selectJobId) > -1 ? pre : jobIds[0]; + }); + } + }, + }, + ); + + const jobDetail = useQuery( + ['fetchJobById', selectJobId], + () => fetchJobById(Number(selectJobId)), + { + enabled: Boolean(selectJobId), + }, + ); + + const jobList = useMemo(() => { + if (!workFlowDetail.data) { + return []; + } + const jobIds = get(workFlowDetail.data, 'data.job_ids') || []; + const jobNames = get(workFlowDetail.data, 'data.config.job_definitions') || []; + const jobs = get(workFlowDetail.data, 'data.jobs') || []; + return jobIds.map((item: ID, index: number) => { + const { error_message, state } = jobs[index]; + return { + label: jobNames[index].name, + value: item, + hasError: + error_message && + (error_message.app || JSON.stringify(error_message.pods) !== '{}') && + state !== 'COMPLETED', + errorMessage: error_message, + }; + }); + }, [workFlowDetail.data]); + + const jobData = useMemo(() => { + return get(jobDetail, 'data.data.pods') || ([] as Pod[]); + }, [jobDetail]); + + const handleOnChangeJob = (val: ID) => { + setSelectJobId(() => val); + reset(); + }; + + const columns = [ + { + title: 'Pod', + dataIndex: 'name', + key: 'name', + width: 400, + render: (val: string) => { + return <ClickToCopy text={val}>{val}</ClickToCopy>; + }, + }, + { + title: '运行状态', + dataIndex: 'state', + key: 'state', + ...podStateFilters, + width: 200, + render: (_: PodState, record: Pod) => { + return <StateIndicator {...getPodState(record)} />; + }, + }, + { + title: '创建时间', + dataIndex: 'creation_timestamp', + key: 'creation_timestamp', + width: 150, + sorter(a: Pod, b: Pod) { + return a.creation_timestamp - b.creation_timestamp; + }, + render: (val: number) => { + return formatTimestamp(val); + }, + }, + { + title: '操作', + dataIndex: 'actions', + key: 'actions', + width: 150, + render: (_: any, record: Pod) => { + return ( + <Link target={'_blank'} to={`/logs/pod/${selectJobId}/${record.name}`}> + 查看日志 + </Link> + ); + }, + }, + ]; + + return ( + <> + <div className={styled.workflow_pods_header}> + <LabelStrong fontSize={14} isBlock={true}> + 实例信息 + </LabelStrong> + <Radio.Group onChange={handleOnChangeJob} size="small" type="button" value={selectJobId}> + {jobList.map((item: any) => { + return ( + <Radio key={item.value} value={item.value}> + {item.hasError ? ( + <Tooltip content={renderErrorMessage(item.errorMessage)}> + <span className={styled.job_detail_state_indicator}> + <span className={styled.dot} /> + {item.label} + </span> + </Tooltip> + ) : ( + item.label + )} + </Radio> + ); + })} + </Radio.Group> + <Popover + trigger="hover" + position="br" + content={ + <span> + <div className={styled.job_detail_more_title}>工作流</div> + <Link + className={styled.job_detail_more_link} + to={`/workflow-center/workflows/${workFlowId}`} + > + 点击查看工作流 + </Link> + <div className={styled.job_detail_more_title}>工作流 ID</div> + <div className={styled.job_detail_more_content}>{workFlowId}</div> + </span> + } + > + <Button className={styled.job_detail_more_button} type="text"> + 更多信息 + </Button> + </Popover> + </div> + {jobList.length > 0 ? ( + <Table + rowKey={'name'} + className={'custom-table custom-table-left-side-filter'} + loading={jobDetail.isFetching} + data={jobData} + columns={columns} + pagination={{ + ...paginationProps, + }} + onChange={(pagination, sorter, filters, extra) => { + if (extra.action === 'filter') { + reset(); + } + }} + /> + ) : null} + </> + ); + function renderErrorMessage(errorMessage: any) { + const { app, pods } = errorMessage; + return ( + <div className={styled.job_detail_error_wrapper}> + <h3 className={styled.job_detail_error_title}>Main Error: {app}</h3> + {Object.entries(pods).map(([pod, error], index) => ( + <div className={styled.job_detail_error_item} key={index}> + <div>Pod: {pod}</div> + <div>Error: {error}</div> + </div> + ))} + </div> + ); + } +} diff --git a/web_console_v2/client/src/views/Datasets/DatasetJobDetail/index.module.less b/web_console_v2/client/src/views/Datasets/DatasetJobDetail/index.module.less new file mode 100644 index 000000000..0c4fe8704 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetJobDetail/index.module.less @@ -0,0 +1,9 @@ +.flex_container{ + display: flex; + flex-direction: row; + justify-content: space-between; + :global(.arco-spin) { + flex-grow: 1; + margin-right: 12px; + } +} diff --git a/web_console_v2/client/src/views/Datasets/DatasetJobDetail/index.tsx b/web_console_v2/client/src/views/Datasets/DatasetJobDetail/index.tsx new file mode 100644 index 000000000..1050a4f4b --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetJobDetail/index.tsx @@ -0,0 +1,111 @@ +import React, { FC, useMemo, useState } from 'react'; +import SharedPageLayout from 'components/SharedPageLayout'; +import TaskDetail, { NodeType } from '../TaskDetail'; +import JobParamsPanel from './JobParamsPanel'; +import { useParams } from 'react-router-dom'; +import JobTitle from './JobTitle'; +import { useQuery } from 'react-query'; +import { fetchDatasetJobDetail } from 'services/dataset'; +import { useGetCurrentProjectId } from 'hooks'; +import { DatasetJob, DatasetJobState } from 'typings/dataset'; +import { get } from 'lodash-es'; +import JobBasicInfo from './JobBasicInfo'; +import WorkFlowPods from './WorkFlowPods'; +import { DatasetDetailSubTabs } from '../DatasetDetail'; +import { useHistory } from 'react-router'; +import { isDataJoin } from '../shared'; +import BackButton from 'components/BackButton'; +import styled from './index.module.less'; + +type TProps = {}; + +const DatasetJobDetail: FC<TProps> = function (props: TProps) { + const { job_id } = useParams<{ job_id: string }>(); + const projectId = useGetCurrentProjectId(); + const [workFlowId, setWorkFlowId] = useState<ID>(); + const history = useHistory(); + const [jobBasicInfo, setJobBasicInfo] = useState( + {} as { + coordinatorId: ID; + createTime: DateTime; + startTime: DateTime; + finishTime: DateTime; + jobState: DatasetJobState; + }, + ); + const jobDetailQuery = useQuery( + ['fetch_dataset_jobDetail', projectId, job_id], + () => fetchDatasetJobDetail(projectId!, job_id!), + { + refetchOnWindowFocus: false, + retry: 2, + enabled: Boolean(projectId && job_id), + onSuccess: (data) => { + const { workflow_id, coordinator_id, created_at, started_at, finished_at, state } = + data.data || {}; + setJobBasicInfo({ + coordinatorId: coordinator_id, + createTime: created_at, + startTime: started_at, + finishTime: finished_at, + jobState: state, + }); + setWorkFlowId(workflow_id); + }, + }, + ); + const jobDetail = useMemo(() => { + if (!jobDetailQuery.data) { + return {}; + } + return jobDetailQuery.data.data; + }, [jobDetailQuery.data]); + const isJoin = isDataJoin(jobDetailQuery.data?.data.kind); + const globalConfigs = useMemo(() => { + if (!jobDetailQuery.data) { + return {}; + } + return get(jobDetailQuery.data, 'data.global_configs.global_configs'); + }, [jobDetailQuery.data]); + + const backToList = () => { + history.goBack(); + }; + + return ( + <SharedPageLayout title={<BackButton onClick={backToList}>任务详情</BackButton>}> + <JobTitle + id={job_id} + data={jobDetail as DatasetJob} + onStop={jobDetailQuery.refetch} + onDelete={backToList} + /> + <JobBasicInfo {...jobBasicInfo} /> + <div className={styled.flex_container}> + <TaskDetail + middleJump={false} + datasetJobId={job_id} + isShowRatio={isJoin} + onNodeClick={(node, datasetMapper = {}) => { + if (node.type === NodeType.DATASET_PROCESSED) { + const datasetInfo = datasetMapper[node?.data?.dataset_uuid ?? '']; + datasetInfo && + datasetInfo.id && + history.push( + `/datasets/processed/detail/${datasetInfo.id}/${DatasetDetailSubTabs.PreviewData}`, + ); + } + }} + // TODO: pass error message when state_frontend = DatasetStateFront.FAILED, + errorMessage="" + // TODO: confirm the dataset is processed or not by the state of job + isProcessedDataset={true} + /> + <JobParamsPanel globalConfigs={globalConfigs} /> + </div> + <WorkFlowPods workFlowId={workFlowId} /> + </SharedPageLayout> + ); +}; + +export default DatasetJobDetail; diff --git a/web_console_v2/client/src/views/Datasets/DatasetList/AddBatchModal.tsx b/web_console_v2/client/src/views/Datasets/DatasetList/AddBatchModal.tsx deleted file mode 100644 index 300a79f20..000000000 --- a/web_console_v2/client/src/views/Datasets/DatasetList/AddBatchModal.tsx +++ /dev/null @@ -1,106 +0,0 @@ -import React, { FC, useRef } from 'react'; -import { Modal, Button, message } from 'antd'; -import { Z_INDEX_GREATER_THAN_HEADER } from 'components/Header'; -import IconButton from 'components/IconButton'; -import { useTranslation } from 'react-i18next'; -import { Close } from 'components/IconPark'; -import { DatasetType } from 'typings/dataset'; -import AddBatchForm, { AddBatchExposedRef } from '../AddBatchForm'; -import { to } from 'shared/helpers'; -import GridRow from 'components/_base/GridRow'; -import styled from 'styled-components'; - -const ContainerModal = styled(Modal)` - .ant-modal-body { - padding-bottom: 14px; - } - .ant-modal-footer { - display: none; - } -`; - -type Props = { - datasetId?: ID; - datasetType?: DatasetType; - visible: boolean; - toggleVisible: (v: boolean) => void; - onSuccess: Function; -} & React.ComponentProps<typeof Modal>; - -const AddBatchBatch: FC<Props> = ({ - datasetId, - datasetType, - visible, - toggleVisible, - onSuccess, - ...props -}) => { - const { t } = useTranslation(); - const formRef = useRef<AddBatchExposedRef>(); - - return ( - <ContainerModal - title={t('dataset.title_create')} - visible={visible} - width={900} - style={{ top: '10%' }} - closeIcon={<IconButton icon={<Close />} onClick={closeModal} />} - zIndex={Z_INDEX_GREATER_THAN_HEADER} - onCancel={closeModal} - okText={t('confirm')} - {...props} - > - <AddBatchForm - ref={formRef as any} - datasetId={datasetId} - datasetType={datasetType} - renderButtons={({ submitting }) => { - return ( - <GridRow gap="12"> - <Button disabled={submitting} onClick={closeModal}> - {t('cancel')} - </Button> - - <Button type="primary" onClick={submit} loading={submitting}> - {t('dataset.btn_import')} - </Button> - </GridRow> - ); - }} - /> - </ContainerModal> - ); - - function closeModal() { - toggleVisible(false); - } - async function submit() { - if (!formRef.current) return; - - if (!datasetType || !datasetId) { - return message.error('缺少 ID 或 Type'); - } - - const { submit: submitAddBatchForm, toggleSubmit, validate } = formRef.current; - - const isValid = await validate(); - - if (!isValid) return; - - const [res, error] = await to(submitAddBatchForm()); - - if (error) { - message.error(error.message); - toggleSubmit(false); - return; - } - - message.success(t('dataset.msg_start_importing')); - toggleSubmit(false); - closeModal(); - - onSuccess(res); - } -}; - -export default AddBatchBatch; diff --git a/web_console_v2/client/src/views/Datasets/DatasetList/BatchImportRecordsModal.tsx b/web_console_v2/client/src/views/Datasets/DatasetList/BatchImportRecordsModal.tsx deleted file mode 100644 index 901230342..000000000 --- a/web_console_v2/client/src/views/Datasets/DatasetList/BatchImportRecordsModal.tsx +++ /dev/null @@ -1,129 +0,0 @@ -import React, { FC } from 'react'; -import { Modal, Table, Button, Tooltip } from 'antd'; -import { Z_INDEX_GREATER_THAN_HEADER } from 'components/Header'; -import IconButton from 'components/IconButton'; -import { useTranslation } from 'react-i18next'; -import { Close } from 'components/IconPark'; -import { DataBatch } from 'typings/dataset'; -import i18n from 'i18n'; -import { formatTimestamp } from 'shared/date'; -import { DataBatchImportProgress } from './ImportProgress'; -import PropertyList from 'components/PropertyList'; -import { omit } from 'lodash'; -import GridRow from 'components/_base/GridRow'; - -type Props = { - visible: boolean; - records: DataBatch[]; - toggleVisible: (v: boolean) => void; -} & React.ComponentProps<typeof Modal>; - -const BatchImportRecords: FC<Props> = ({ visible, toggleVisible, records, ...props }) => { - const { t } = useTranslation(); - - const columns = [ - { - title: i18n.t('dataset.label_event_time'), - dataIndex: 'event_time', - key: 'event_time', - ellipsis: true, - render: (time: number) => { - return <div>{formatTimestamp(time)}</div>; - }, - }, - { - title: i18n.t('dataset.status'), - dataIndex: 'state', - name: 'state', - render: (_: any, record: DataBatch) => { - return <DataBatchImportProgress batch={record} />; - }, - }, - { - title: i18n.t('dataset.col_files_size'), - dataIndex: 'file_size', - name: 'file_size', - render: (fileSize: number) => { - return <span>{fileSize.toLocaleString('en')} KB</span>; - }, - }, - { - title: i18n.t('created_at'), - dataIndex: 'created_at', - name: 'created_at', - width: 190, - render: (date: number) => <div>{formatTimestamp(date)}</div>, - }, - { - title: i18n.t('operation'), - dataIndex: 'operation', - name: 'operation', - render: (_: number, record: DataBatch) => ( - <Tooltip title="Coming soon"> - <Button - size="small" - disabled - type="link" - style={{ marginLeft: '-10px' }} - onClick={onDeleteClick} - > - {t('delete')} - </Button> - </Tooltip> - ), - }, - ]; - - return ( - <Modal - title={t('dataset.title_create')} - visible={visible} - width={1000} - style={{ top: '20%' }} - closeIcon={<IconButton icon={<Close />} onClick={closeModal} />} - zIndex={Z_INDEX_GREATER_THAN_HEADER} - onCancel={closeModal} - okText={t('dataset.btn_add_batch')} - {...props} - > - <Table - expandable={{ - expandedRowRender: (record: DataBatch) => ( - <div> - {record.details.files.map((item) => { - const restProps = Object.entries( - omit(item, ['source_path', 'state']), - ).map(([label, value]) => ({ label, value })); - - return ( - <details> - <summary> - <GridRow gap="200" style={{ display: 'inline-grid' }} justify="space-between"> - <strong> {item.source_path}</strong> - {item.state} - </GridRow> - </summary> - <PropertyList lineHeight={16} properties={restProps} cols={1} /> - </details> - ); - })} - </div> - ), - rowExpandable: (record: DataBatch) => record.details.files.length > 0, - }} - size="small" - dataSource={records} - columns={columns} - /> - </Modal> - ); - - function closeModal() { - toggleVisible(false); - } - function onDeleteClick() { - // TODO: coming soon - } -}; - -export default BatchImportRecords; diff --git a/web_console_v2/client/src/views/Datasets/DatasetList/DatasetActions.tsx b/web_console_v2/client/src/views/Datasets/DatasetList/DatasetActions.tsx deleted file mode 100644 index 12e04ae84..000000000 --- a/web_console_v2/client/src/views/Datasets/DatasetList/DatasetActions.tsx +++ /dev/null @@ -1,71 +0,0 @@ -import React, { FC } from 'react'; -import styled from 'styled-components'; -import { Dataset, DatasetType } from 'typings/dataset'; -import GridRow from 'components/_base/GridRow'; -import { useTranslation } from 'react-i18next'; -import { Button } from 'antd'; -import { isImportFailed } from 'shared/dataset'; -import { ButtonType } from 'antd/lib/button'; - -const Container = styled(GridRow)` - margin-left: ${(props: any) => (props.type === 'link' ? '-10px !important' : 0)}; - - > .hide-on-bush { - visibility: hidden; - pointer-events: none; - } -`; - -export type DatasetAction = 'add-batch' | 'view-records' | 'delete' | 'copy-path'; -type Props = { - dataset: Dataset; - type: ButtonType; - onPerformAction: (args: { action: DatasetAction; dataset: Dataset }) => void; -}; - -const actions: DatasetAction[] = ['add-batch', 'view-records', 'copy-path', 'delete']; - -const DatasetActions: FC<Props> = ({ dataset, type = 'default', onPerformAction }) => { - const { t } = useTranslation(); - - const disabled: Record<DatasetAction, boolean> = { - 'add-batch': isImportFailed(dataset), - 'view-records': false, - 'copy-path': false, - delete: false, - }; - const visible = { - 'add-batch': dataset.dataset_type === DatasetType.STREAMING, - 'view-records': true, - 'copy-path': true, - delete: true, - }; - const text = { - 'add-batch': t('dataset.btn_add_batch'), - 'view-records': t('dataset.btn_view_records'), - 'copy-path': t('dataset.btn_copy_path'), - delete: t('delete'), - }; - - return ( - <Container {...{ type }}> - {actions.map((action) => { - return ( - <Button - size="small" - type={type} - key={action} - danger={action === 'delete'} - onClick={() => onPerformAction({ action, dataset })} - disabled={disabled[action]} - className={!visible[action] ? 'hide-on-bush' : ''} - > - {text[action]} - </Button> - ); - })} - </Container> - ); -}; - -export default DatasetActions; diff --git a/web_console_v2/client/src/views/Datasets/DatasetList/DatasetActions/index.module.less b/web_console_v2/client/src/views/Datasets/DatasetList/DatasetActions/index.module.less new file mode 100644 index 000000000..6dc37bfb0 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetList/DatasetActions/index.module.less @@ -0,0 +1,4 @@ +.disabled{ + cursor: not-allowed !important; + color: var(--color-text-4) !important; +} diff --git a/web_console_v2/client/src/views/Datasets/DatasetList/DatasetActions/index.tsx b/web_console_v2/client/src/views/Datasets/DatasetList/DatasetActions/index.tsx new file mode 100644 index 000000000..47d645f89 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetList/DatasetActions/index.tsx @@ -0,0 +1,155 @@ +import React, { FC, useMemo } from 'react'; +import { Dataset, DatasetKindLabel } from 'typings/dataset'; +import GridRow from 'components/_base/GridRow'; +import { ButtonProps, Popconfirm } from '@arco-design/web-react'; +import DatasetEditModal from '../DatasetEditModal'; +import { useToggle } from 'react-use'; +import MoreActions from 'components/MoreActions'; +import { isFrontendDeleting, isFrontendProcessing, isFrontendSucceeded } from 'shared/dataset'; +import { isDatasetTicket, isDatasetPublished } from 'views/Datasets/shared'; +import { isFrontendAuthorized } from 'views/Datasets/shared'; +import styled from './index.module.less'; + +export type DatasetAction = + | 'delete' + | 'publish-to-project' + | 'export' + | 'authorize' + | 'cancel-authorize'; +type Props = { + dataset: Dataset; + type: ButtonProps['type']; + onPerformAction: (args: { action: DatasetAction; dataset: Dataset }) => void; + kindLabel: DatasetKindLabel; +}; + +const DatasetActions: FC<Props> = ({ dataset, type = 'default', onPerformAction, kindLabel }) => { + const [editModalVisible, toggleEditModalVisible] = useToggle(false); + + const isProcessing = isFrontendProcessing(dataset); + const isSuccess = isFrontendSucceeded(dataset); + const isDeleting = isFrontendDeleting(dataset); + const isAuthorized = isFrontendAuthorized(dataset); + const isProcessedDataset = kindLabel === DatasetKindLabel.PROCESSED; + // 表示当前数据集是否处于审批环节 + const isTicket = isDatasetTicket(dataset); + const isPublished = isDatasetPublished(dataset); + // 老数据禁用授权及撤销授权功能 + const isDisabledAuth = useMemo(() => { + const participantsMap = dataset?.participants_info?.participants_map; + if (!participantsMap) return true; + return !Boolean(Object.keys(participantsMap).length > 0); + }, [dataset]); + + const actionList = [ + { + label: '编辑', + onClick: onEditClick, + disabled: isProcessing || isDeleting, + }, + { + label: '删除', + onClick: onDeleteClick, + danger: true, + disabled: isProcessing || isDeleting, + }, + ]; + + return ( + <> + <GridRow {...{ type }}> + {isProcessedDataset && isAuthorized && ( + <Popconfirm + title={`确认撤销对 ${dataset.name} 的授权?`} + disabled={isDisabledAuth} + onOk={onCancelAuthorizeClick} + > + <button + className={`custom-text-button ${isDisabledAuth ? styled.disabled : ''}`} + style={{ + marginRight: 10, + }} + type="button" + > + 撤销授权 + </button> + </Popconfirm> + )} + {isProcessedDataset && !isAuthorized && ( + <button + className="custom-text-button" + style={{ + marginRight: 10, + }} + type="button" + onClick={onAuthorizeClick} + disabled={isDisabledAuth} + > + 授权 + </button> + )} + {!isProcessedDataset && !isTicket && ( + <button + className="custom-text-button" + style={{ + marginRight: 10, + }} + type="button" + disabled={!isSuccess} + onClick={onPublishClick} + > + {isPublished ? '撤销发布' : '发布'} + </button> + )} + {isProcessedDataset && ( + <button + className="custom-text-button" + style={{ + marginRight: 10, + }} + type="button" + key="edit-dataset" + disabled={!isSuccess} // new processed dataset can't be exported + onClick={onExportClick} + > + 导出 + </button> + )} + <MoreActions actionList={actionList} /> + </GridRow> + <DatasetEditModal + dataset={dataset} + visible={editModalVisible} + toggleVisible={toggleEditModalVisible} + onSuccess={onEditSuccess} + /> + </> + ); + + function onEditClick() { + toggleEditModalVisible(true); + } + function onDeleteClick() { + onPerformAction?.({ action: 'delete', dataset }); + } + + function onEditSuccess() {} + + function onPublishClick() { + onPerformAction?.({ action: 'publish-to-project', dataset }); + } + + function onExportClick() { + onPerformAction?.({ action: 'export', dataset }); + } + + function onAuthorizeClick() { + onPerformAction?.({ action: 'authorize', dataset }); + } + + function onCancelAuthorizeClick() { + onPerformAction?.({ action: 'cancel-authorize', dataset }); + } +}; + +export default DatasetActions; diff --git a/web_console_v2/client/src/views/Datasets/DatasetList/DatasetEditModal/index.module.less b/web_console_v2/client/src/views/Datasets/DatasetList/DatasetEditModal/index.module.less new file mode 100644 index 000000000..c984639e1 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetList/DatasetEditModal/index.module.less @@ -0,0 +1,4 @@ +.footer_row{ + padding-top: 15px; + border-top: 1px solid var(--backgroundColorGray); +} diff --git a/web_console_v2/client/src/views/Datasets/DatasetList/DatasetEditModal/index.tsx b/web_console_v2/client/src/views/Datasets/DatasetList/DatasetEditModal/index.tsx new file mode 100644 index 000000000..32fe35991 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetList/DatasetEditModal/index.tsx @@ -0,0 +1,112 @@ +import React, { FC, useEffect } from 'react'; +import { to } from 'shared/helpers'; +import { MAX_COMMENT_LENGTH, validNamePattern } from 'shared/validator'; +import { forceToRefreshQuery } from 'shared/queryClient'; +import { editDataset } from 'services/dataset'; + +import { Modal, Button, Message, Form, Input } from '@arco-design/web-react'; +import GridRow from 'components/_base/GridRow'; + +import ButtonWithPopconfirm from 'components/ButtonWithPopconfirm'; +import { DATASET_LIST_QUERY_KEY } from '../DatasetTable'; + +import { Dataset, DatasetEditDisplay } from 'typings/dataset'; +import styled from './index.module.less'; + +type Props = { + dataset: Dataset; + visible: boolean; + toggleVisible: (v: boolean) => void; + onSuccess: Function; +} & React.ComponentProps<typeof Modal>; + +const DatasetEditModal: FC<Props> = ({ dataset, visible, toggleVisible, onSuccess, ...props }) => { + const [form] = Form.useForm<DatasetEditDisplay>(); + const { id, name, comment } = dataset; + + useEffect(() => { + if (visible && form && dataset) { + form.setFieldsValue({ + name: dataset.name, + comment: dataset.comment, + }); + } + }, [visible, form, dataset]); + + return ( + <Modal + title="编辑数据集" + visible={visible} + maskClosable={false} + maskStyle={{ backdropFilter: 'blur(4px)' }} + afterClose={afterClose} + onCancel={closeModal} + footer={null} + {...props} + > + <Form initialValues={{ name, comment }} layout="vertical" form={form} onSubmit={submit}> + <Form.Item + label="数据集名称" + field="name" + rules={[ + { required: true, message: 'Please input dataset name!' }, + { + match: validNamePattern, + message: '只支持大小写字母,数字,中文开头或结尾,可包含“_”和“-”,不超过 63 个字符', + }, + ]} + > + <Input placeholder="请输入数据集名称" disabled={true} /> + </Form.Item> + <Form.Item + label="数据集描述" + field="comment" + rules={[{ maxLength: MAX_COMMENT_LENGTH, message: '最多为 200 个字符' }]} + > + <Input.TextArea + placeholder="最多为 200 个字符" + maxLength={MAX_COMMENT_LENGTH} + showWordLimit + /> + </Form.Item> + + <Form.Item wrapperCol={{ span: 24 }} style={{ marginBottom: 0 }}> + <GridRow className={styled.footer_row} justify="end" gap="12"> + <ButtonWithPopconfirm buttonText="取消" onConfirm={closeModal} /> + <Button type="primary" htmlType="submit"> + 保存 + </Button> + </GridRow> + </Form.Item> + </Form> + </Modal> + ); + + function closeModal() { + toggleVisible(false); + } + + async function submit(values: { name: string; comment?: string }) { + if (!form) { + return; + } + const { comment } = values; + const [res, error] = await to(editDataset(id, { comment })); + if (error) { + Message.error(error.message); + return; + } + Message.success('数据集编辑成功'); + closeModal(); + // Force to refresh the dataset list + forceToRefreshQuery(DATASET_LIST_QUERY_KEY); + onSuccess(res); + } + + function afterClose() { + // Clear all fields + form.resetFields(); + } +}; + +export default DatasetEditModal; diff --git a/web_console_v2/client/src/views/Datasets/DatasetList/DatasetTable/index.less b/web_console_v2/client/src/views/Datasets/DatasetList/DatasetTable/index.less new file mode 100644 index 000000000..8f602d201 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetList/DatasetTable/index.less @@ -0,0 +1,15 @@ +.dataset-list-plus-button{ + margin-right: 4px; + vertical-align: 0.03em !important; +} + +.dataset_list_name{ + display: inline-block; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + vertical-align: middle; + max-width: 120px; + margin-right: 5px; + cursor: pointer; +} diff --git a/web_console_v2/client/src/views/Datasets/DatasetList/DatasetTable/index.tsx b/web_console_v2/client/src/views/Datasets/DatasetList/DatasetTable/index.tsx new file mode 100644 index 000000000..722d2ae88 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetList/DatasetTable/index.tsx @@ -0,0 +1,924 @@ +import React, { FC, useEffect, useMemo, useState } from 'react'; +import { + Button, + Input, + Message, + Space, + Statistic, + Table, + TableColumnProps, + Tabs, + Tooltip, + Tag, + Typography, +} from '@arco-design/web-react'; +import { IconInfoCircle } from '@arco-design/web-react/icon'; +import { formatTimestamp } from 'shared/date'; +import { + Dataset, + DatasetDataType, + DatasetDataTypeText, + DatasetKind, + DatasetKindLabel, + DatasetKindLabelCapitalMapper, + DatasetStateFront, + DatasetTabType, + ParticipantDataset, + DatasetRawPublishStatus, + DatasetProcessedAuthStatus, + DatasetProcessedMyAuthStatus, + ParticipantInfo, + DatasetKindBackEndType, + DatasetType__archived, + DATASET_COPY_CHECKER, +} from 'typings/dataset'; +import { useQuery } from 'react-query'; +import { + deleteDataset, + fetchDatasetList, + fetchParticipantDatasetList, + authorizeDataset, + cancelAuthorizeDataset, +} from 'services/dataset'; +import { getTotalDataSize } from 'shared/dataset'; +import { noop } from 'lodash-es'; +import DatasetActions, { DatasetAction } from '../DatasetActions'; +import ImportProgress from '../ImportProgress'; +import { Link, Redirect } from 'react-router-dom'; +import { + datasetKindLabelValueMap, + FILTER_OPERATOR_MAPPER, + filterExpressionGenerator, + getSortOrder, + RawAuthStatusOptions, + RawPublishStatusOptions, +} from '../../shared'; +import { expression2Filter } from 'shared/filter'; +import GridRow from 'components/_base/GridRow'; +import { generatePath, useHistory, useParams } from 'react-router'; +import { humanFileSize } from 'shared/file'; +import Modal from 'components/Modal'; +import { + useGetAppFlagValue, + useGetCurrentProjectId, + useGetCurrentProjectParticipantList, + useGetCurrentProjectType, + useTablePaginationWithUrlState, + useUrlState, + useGetCurrentProjectAbilityConfig, +} from 'hooks'; +import { TIME_INTERVAL, CONSTANTS } from 'shared/constants'; +import WhichParticipant from 'components/WhichParticipant'; +import { ColumnProps } from '@arco-design/web-react/es/Table'; +import ExportModal from 'components/DatasetExportModal'; +import StatusProgress from 'components/StatusProgress'; +import { DatasetDetailSubTabs } from '../../DatasetDetail'; +import { IconPlus } from '@arco-design/web-react/icon'; +import { transformRegexSpecChar } from 'shared/helpers'; +import DatasetPublishAndRevokeModal from 'components/DatasetPublishAndRevokeModal'; +import { ParticipantType } from 'typings/participant'; +import { PageMeta } from 'typings/app'; +import { PaginationProps } from '@arco-design/web-react/es/Pagination/pagination'; +import { SorterResult } from '@arco-design/web-react/es/Table/interface'; +import { FlagKey } from 'typings/flag'; +import { fetchSysInfo } from 'services/settings'; +import { FilterOp } from 'typings/filter'; +import './index.less'; + +const { Text } = Typography; + +type ColumnsGetterOptions = { + onDeleteClick?: any; + onPublishClick: (dataset: Dataset) => void; + onExportClick: (dataset: Dataset) => void; + onAuthorize: (dataset: Dataset) => void; + onCancelAuthorize: (dataset: Dataset) => void; + onSuccess?: any; + withoutActions?: boolean; +}; + +type TableFilterConfig = Pick<TableColumnProps, 'filters' | 'onFilter'>; + +const FILTER_OPERATOR_MAPPER_List = { + ...FILTER_OPERATOR_MAPPER, + dataset_kind: FilterOp.IN, +}; + +/** + * table columns generator + * TODO: there are too many 「if-else」 and need to chore + * @param projectId + * @param tab + * @param kindLabel + * @param options callback of operation + * @param bcsSupportEnabled Whether to access blockchain + */ +export const getDatasetTableColumns = ( + projectId: ID | undefined, + tab: DatasetTabType | undefined, + kindLabel: DatasetKindLabel, + options: ColumnsGetterOptions, + urlState: Partial<{ + page: number; + pageSize: number; + filter: string; + order_by: string; + state_frontend: DatasetStateFront[]; + }>, + datasetParticipantFilters: TableFilterConfig, + myPureDomainName: string, + bcsSupportEnabled: boolean, + reviewCenterConfiguration: string, +) => { + const onPerformAction = (payload: { action: DatasetAction; dataset: Dataset }) => { + return { + delete: options.onDeleteClick, + 'publish-to-project': options.onPublishClick, + export: options.onExportClick, + authorize: options.onAuthorize, + 'cancel-authorize': options.onCancelAuthorize, + }[payload.action](payload.dataset); + }; + const renderStatistic = (val: number | string) => { + return typeof val === 'number' ? ( + <Statistic + groupSeparator={true} + styleValue={{ fontSize: '12px', fontWeight: 400 }} + value={val} + /> + ) : ( + '-' + ); + }; + + const renderParticipantAuth = (val: ParticipantInfo[]) => { + return ( + <> + {val.map((participant, index) => ( + <div key={index}> + {participant.name}{' '} + {participant.auth_status === DatasetProcessedMyAuthStatus.AUTHORIZED + ? '已授权' + : '未授权'} + </div> + ))} + </> + ); + }; + + const getPublishStatusFilters = () => { + if (reviewCenterConfiguration === '{}') { + return [ + { + text: '未发布', + value: DatasetRawPublishStatus.UNPUBLISHED, + }, + { + text: '已发布', + value: DatasetRawPublishStatus.PUBLISHED, + }, + ]; + } + return [ + { + text: '未发布', + value: DatasetRawPublishStatus.UNPUBLISHED, + }, + { + text: '待审批', + value: DatasetRawPublishStatus.TICKET_PENDING, + }, + { + text: '审批拒绝', + value: DatasetRawPublishStatus.TICKET_DECLINED, + }, + { + text: '已发布', + value: DatasetRawPublishStatus.PUBLISHED, + }, + ]; + }; + + const cols: ColumnProps[] = [ + { + title: '名称', + dataIndex: 'name', + key: 'name', + width: 240, + render: (name: string, record: Dataset) => { + const to = `/datasets/${kindLabel}/detail/${record.id}/${DatasetDetailSubTabs.DatasetJobDetail}`; + if (record.dataset_type === DatasetType__archived.STREAMING) { + return ( + <> + <Tooltip + content={ + <Text style={{ color: '#fff' }} copyable> + {name} + </Text> + } + > + {tab === DatasetTabType.PARTICIPANT ? ( + <span className="dataset_list_name">{name}</span> + ) : ( + <Link className="dataset_list_name" to={to}> + {name} + </Link> + )} + </Tooltip> + <Tag color="blue" size="small"> + 增量 + </Tag> + </> + ); + } else { + return tab === DatasetTabType.PARTICIPANT ? name : <Link to={to}>{name}</Link>; + } + }, + }, + tab === DatasetTabType.PARTICIPANT + ? { + title: '合作伙伴名称', + dataIndex: 'participant_id', + width: 180, + ...datasetParticipantFilters, + filteredValue: expression2Filter(urlState.filter!).participant_id, + render(id) { + return <WhichParticipant id={id} />; + }, + } + : { + title: '数据集状态', + dataIndex: 'state_frontend', + width: 180, + filters: [ + { + text: '待处理', + value: DatasetStateFront.PENDING, + }, + { + text: '处理中', + value: DatasetStateFront.PROCESSING, + }, + { + text: '可用', + value: DatasetStateFront.SUCCEEDED, + }, + { + text: '处理失败', + value: DatasetStateFront.FAILED, + }, + { + text: '删除中', + value: DatasetStateFront.DELETING, + }, + ], + filteredValue: urlState.state_frontend, + render: (_: any, record: Dataset) => { + return <ImportProgress dataset={record} />; + }, + }, + { + title: '数据格式', + dataIndex: tab === DatasetTabType.PARTICIPANT ? 'format' : 'dataset_format', + width: 150, + filters: [ + { text: DatasetDataTypeText.STRUCT, value: DatasetDataType.STRUCT }, + { text: DatasetDataTypeText.PICTURE, value: DatasetDataType.PICTURE }, + ], + // Return different values depending on whether the tab is PARTICIPANT + onFilter: (value: string, record: any) => { + if (tab === DatasetTabType.PARTICIPANT) { + return ( + record?.[tab === DatasetTabType.PARTICIPANT ? 'format' : 'dataset_format'] === value + ); + } + return true; + }, + filteredValue: expression2Filter(urlState.filter!)[ + tab === DatasetTabType.PARTICIPANT ? 'format' : 'dataset_format' + ], + render(val: DatasetDataType) { + switch (val) { + case DatasetDataType.STRUCT: + return DatasetDataTypeText.STRUCT; + case DatasetDataType.PICTURE: + return DatasetDataTypeText.PICTURE; + case DatasetDataType.NONE_STRUCTURED: + return DatasetDataTypeText.NONE_STRUCTURED; + } + }, + }, + { + title: ( + <Space> + <span>数据大小</span> + <Tooltip content="数据以系统格式存储的大小,较源文件会有一定变化"> + <IconInfoCircle style={{ color: 'var(--color-text-3)', fontSize: 14 }} /> + </Tooltip> + </Space> + ), + dataIndex: 'file_size', + width: 180, + render: (file_size: number, record: Dataset) => { + const isInternalProcessed = + record.dataset_kind === DatasetKindBackEndType.INTERNAL_PROCESSED; + const isErrorFileSize = file_size === -1; + if (isErrorFileSize) { + return '异常'; + } + return <span>{isInternalProcessed ? '-' : humanFileSize(getTotalDataSize(record))}</span>; + }, + }, + ]; + + if (tab === DatasetTabType.MY) { + cols.push({ + title: '数据集样本量', + dataIndex: 'num_example', + width: 180, + render: (num: number, record: Dataset) => { + const isInternalProcessed = + record.dataset_kind === DatasetKindBackEndType.INTERNAL_PROCESSED; + const isNoCopy = record.import_type === DATASET_COPY_CHECKER.NONE_COPY; + const isErrorFileSize = record.file_size === -1; + if (isErrorFileSize) { + return '异常'; + } + return renderStatistic(isInternalProcessed || isNoCopy ? '' : num); + }, + }); + + if (kindLabel === DatasetKindLabel.RAW) { + if (bcsSupportEnabled) { + cols.push({ + title: '数据价值', + dataIndex: 'total_value', + width: 180, + render: renderStatistic, + }); + } + cols.push( + { + title: '创建者', + dataIndex: 'creator_username', + key: 'creator_username', + width: 100, + render(val: string) { + return val || CONSTANTS.EMPTY_PLACEHOLDER; + }, + }, + { + title: '发布状态', + dataIndex: 'publish_frontend_state', + width: 150, + filters: getPublishStatusFilters(), + onFilter: (value: string, record: Dataset) => { + return value === record.publish_frontend_state; + }, + filterMultiple: false, + filteredValue: expression2Filter(urlState.filter!).publish_frontend_state + ? [expression2Filter(urlState.filter!).publish_frontend_state] + : [], + render: (publish_frontend_state: DatasetRawPublishStatus) => { + return ( + <StatusProgress options={RawPublishStatusOptions} status={publish_frontend_state} /> + ); + }, + }, + { + title: '创建时间', + dataIndex: 'created_at', + width: 180, + sortOrder: getSortOrder(urlState, 'created_at'), + sorter(a: Dataset, b: Dataset) { + return a.created_at - b.created_at; + }, + render: (date: number) => <div>{formatTimestamp(date)}</div>, + }, + ); + } else { + cols.push( + { + title: '创建者', + dataIndex: 'creator_username', + key: 'creator_username', + width: 100, + render(val: string) { + return val || CONSTANTS.EMPTY_PLACEHOLDER; + }, + }, + { + title: '授权状态', + dataIndex: 'auth_frontend_state', + width: 150, + render: (auth_frontend_state: DatasetProcessedAuthStatus, record: Dataset) => { + // 处于待授权状态时展示hover + if (auth_frontend_state === DatasetProcessedAuthStatus.AUTH_PENDING) { + const participants_info = Object.entries( + record.participants_info.participants_map, + ).map(([key, value]) => ({ + name: key === myPureDomainName ? '我方' : key, + auth_status: value['auth_status'], + })); + return ( + <StatusProgress + options={RawAuthStatusOptions} + status={auth_frontend_state || DatasetProcessedAuthStatus.AUTH_PENDING} + isTip={true} + toolTipContent={renderParticipantAuth(participants_info || [])} + /> + ); + } + return ( + <StatusProgress + options={RawAuthStatusOptions} + status={auth_frontend_state || DatasetProcessedAuthStatus.TICKET_PENDING} + /> + ); + }, + }, + { + title: '创建时间', + dataIndex: 'created_at', + width: 180, + sortOrder: getSortOrder(urlState, 'created_at'), + sorter(a: Dataset, b: Dataset) { + return a.created_at - b.created_at; + }, + render: (date: number) => <div>{formatTimestamp(date)}</div>, + }, + ); + } + } + + if (tab === DatasetTabType.PARTICIPANT) { + if (bcsSupportEnabled) { + cols.push({ + title: '使用单价', + dataIndex: 'value', + width: 180, + sortOrder: getSortOrder(urlState, 'value'), + sorter(a: ParticipantDataset, b: ParticipantDataset) { + return (a.value || 0) - (b.value || 0); + }, + render: renderStatistic, + }); + } + cols.push({ + title: '最近更新', + dataIndex: 'updated_at', + width: 200, + sortOrder: getSortOrder(urlState, 'updated_at'), + sorter(a: ParticipantDataset, b: ParticipantDataset) { + return a.updated_at - b.updated_at; + }, + render: (date: number) => <div>{formatTimestamp(date)}</div>, + }); + } + + if (!options.withoutActions && tab === DatasetTabType.MY) { + cols.push({ + title: '操作', + dataIndex: 'operation', + name: 'operation', + fixed: 'right', + width: 200, + render: (_: number, record: Dataset) => ( + <DatasetActions + onPerformAction={onPerformAction} + dataset={record} + type="text" + kindLabel={kindLabel} + /> + ), + } as any); + } + + return cols; +}; + +export const DATASET_LIST_QUERY_KEY = 'fetchDatasetList'; + +export const GlobalDatasetIdToErrorMessageMapContext = React.createContext<{ + [key: number]: string; +}>({}); + +type Props = { + dataset_kind: DatasetKind; +}; + +const DatasetListTable: FC<Props> = ({ dataset_kind }) => { + const { kind_label, tab } = useParams<{ + kind_label: DatasetKindLabel; + tab?: DatasetTabType; + }>(); + const isProcessedDataset = kind_label === DatasetKindLabel.PROCESSED; + const history = useHistory(); + const projectId = useGetCurrentProjectId(); + const projectType = useGetCurrentProjectType(); + const participantList = useGetCurrentProjectParticipantList(); + const { hasTrusted } = useGetCurrentProjectAbilityConfig(); + const [currentExportId, setCurrentExportId] = useState<ID>(); + const [isShowExportModal, setIsShowExportModal] = useState(false); + const [isShowPublishModal, setIsShowPublishModal] = useState(false); + const [total, setTotal] = useState(0); + const [pageTotal, setPageTotal] = useState(0); + const [selectDataset, setSelectDataset] = useState<Dataset>(); + const [datasetIdToErrorMessageMap, setDatasetIdToErrorMessageMap] = useState<{ + [key: number]: string; + }>({}); + const [urlState, setUrlState] = useUrlState<any>({ + page: 1, + pageSize: 10, + filter: filterExpressionGenerator( + { + dataset_kind: + kind_label === DatasetKindLabel.RAW + ? [DatasetKindLabelCapitalMapper[kind_label]] + : [ + DatasetKindLabelCapitalMapper[kind_label], + DatasetKindBackEndType.INTERNAL_PROCESSED, + ], + project_id: projectId, + auth_status: isProcessedDataset + ? [DatasetProcessedMyAuthStatus.AUTHORIZED, DatasetProcessedMyAuthStatus.WITHDRAW] + : undefined, + }, + FILTER_OPERATOR_MAPPER_List, + ), + order_by: '', + }); + const { paginationProps } = useTablePaginationWithUrlState(); + const bcs_support_enabled = useGetAppFlagValue(FlagKey.BCS_SUPPORT_ENABLED); + const review_center_configuration = useGetAppFlagValue(FlagKey.REVIEW_CENTER_CONFIGURATION); + const isLightClient = projectType === ParticipantType.LIGHT_CLIENT; + const sysInfoQuery = useQuery(['fetchSysInfo'], () => fetchSysInfo(), { + retry: 2, + refetchOnWindowFocus: false, + enabled: Boolean(isProcessedDataset), + }); + + const myPureDomainName = useMemo<string>(() => { + return sysInfoQuery.data?.data?.pure_domain_name ?? ''; + }, [sysInfoQuery.data]); + const listQuery = useQuery<{ + data: Array<Dataset | ParticipantDataset>; + page_meta?: PageMeta; + }>( + [ + DATASET_LIST_QUERY_KEY, + tab === DatasetTabType.MY ? urlState : null, + projectId, + kind_label, + tab, + ], + () => { + if (!projectId!) { + Message.info('请选择工作区'); + return Promise.resolve({ data: [] }); + } + + if (!tab || tab === DatasetTabType.MY) { + return fetchDatasetList({ + page: urlState.page, + page_size: urlState.pageSize, + filter: urlState.filter, + state_frontend: urlState.state_frontend, + // when order_by is empty and set order_by = 'created_at desc' default + order_by: urlState.order_by || 'created_at desc', + }); + } + return fetchParticipantDatasetList(projectId!, { + page: urlState.page, + page_size: urlState.pageSize, + kind: DatasetKindLabel.RAW, + }); + }, + { + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, // auto refresh every 1.5 min + keepPreviousData: true, + refetchOnWindowFocus: false, + }, + ); + + const datasetParticipantFilters: TableFilterConfig = useMemo(() => { + let filters: { text: string; value: any }[] = []; + if (Array.isArray(participantList) && participantList.length) { + filters = participantList.map((item) => ({ + text: item.name, + value: '' + item.id, + })); + } + return { + filters, + onFilter: (value: string, record: ParticipantDataset) => { + return value === '' + record.participant_id; + }, + }; + }, [participantList]); + + const list = useMemo(() => { + if (!listQuery.data?.data) return []; + + let list = listQuery.data.data || []; + // Because participant_datasets api don't support search by name, we need to filter by web + if (tab === DatasetTabType.PARTICIPANT) { + const { name, format, participant_id } = expression2Filter(urlState.filter); + if (name) { + const regx = new RegExp(`^.*${transformRegexSpecChar(name)}.*$`); // support fuzzy matching + list = list.filter((item) => regx.test(item.name)); + } + if (format) { + list = list.filter((item: any) => format.includes(item.format)); + } + if (participant_id) { + list = list.filter((item: any) => participant_id.includes(String(item.participant_id))); + } + setTotal(list.length); + setPageTotal(Math.ceil(list.length / paginationProps.pageSize)); + } else { + const { page_meta, data = [] } = listQuery.data; + setTotal(page_meta?.total_items ?? data.length); + setPageTotal(page_meta?.total_pages ?? Math.ceil(data.length / paginationProps.pageSize)); + } + + return list; + }, [listQuery.data, urlState.filter, tab, paginationProps.pageSize]); + + const pagination = useMemo(() => { + return pageTotal <= 1 + ? false + : { + ...paginationProps, + total, + }; + }, [paginationProps, pageTotal, total]); + + const kindLabel = datasetKindLabelValueMap[dataset_kind]; + // 有可信分析能力不展示结果数据创建按钮 + const isHideCreateBtn = hasTrusted && kind_label === DatasetKindLabel.PROCESSED; + + useEffect(() => { + setUrlState({ + ...urlState, + filter: filterExpressionGenerator( + { + ...expression2Filter(urlState.filter), + project_id: projectId, + }, + FILTER_OPERATOR_MAPPER_List, + ), + }); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [projectId]); + + return ( + <> + <GridRow justify="space-between" align="center"> + <Space> + {!isHideCreateBtn && ( + <Button + className={'custom-operation-button'} + type="primary" + onClick={goCreateDataset} + icon={<IconPlus />} + > + 创建数据集 + </Button> + )} + {kind_label !== DatasetKindLabel.PROCESSED && ( + <Tabs + className="custom-tabs" + type="text" + defaultActiveTab={tab ?? DatasetTabType.MY} + onChange={onTabChange} + > + {Object.entries(DatasetTabType).map((item) => { + const [, value] = item; + const title = value === DatasetTabType.MY ? '我方数据集' : '合作伙伴数据集'; + return ( + <Tabs.TabPane + disabled={isLightClient && value === DatasetTabType.PARTICIPANT} + key={value} + title={title} + /> + ); + })} + </Tabs> + )} + </Space> + <Space> + <Input.Search + className={'custom-input'} + placeholder="输入数据集名称搜索" + defaultValue={expression2Filter(urlState.filter).name} + onSearch={onSearch} + onClear={() => onSearch('')} + allowClear + /> + </Space> + </GridRow> + <GlobalDatasetIdToErrorMessageMapContext.Provider value={datasetIdToErrorMessageMap}> + <Table + className={'custom-table custom-table-left-side-filter'} + loading={listQuery.isFetching} + data={list || []} + scroll={{ x: '100%' }} + columns={getDatasetTableColumns( + projectId, + tab, + kindLabel, + { + onSuccess: noop, + onDeleteClick, + onPublishClick, + onExportClick, + onAuthorize, + onCancelAuthorize, + }, + urlState, + datasetParticipantFilters, + myPureDomainName, + bcs_support_enabled as boolean, + review_center_configuration as string, + )} + rowKey="uuid" + pagination={pagination} + onChange={onTableChange} + /> + </GlobalDatasetIdToErrorMessageMapContext.Provider> + {!tab ? ( + <Redirect + to={generatePath('/datasets/:kind_label/:tab', { + kind_label, + tab: DatasetTabType.MY, + })} + /> + ) : null} + + <ExportModal + id={currentExportId} + visible={isShowExportModal} + onCancel={onExportModalClose} + onSuccess={onExportSuccess} + /> + + <DatasetPublishAndRevokeModal + onCancel={onPublishCancel} + onSuccess={onPublishSuccess} + dataset={selectDataset} + visible={isShowPublishModal} + /> + </> + ); + + function goCreateDataset() { + // 统一在路由区分新旧数据集入口 + history.push(`/datasets/${kindLabel}/create`); + } + + function onSearch(value: string) { + const filters = expression2Filter(urlState.filter); + filters.name = value; + setUrlState((prevState) => ({ + ...prevState, + page: 1, + filter: filterExpressionGenerator(filters, FILTER_OPERATOR_MAPPER_List), + })); + } + + function onTabChange(tab: string) { + history.push( + generatePath('/datasets/:kind_label/:tab', { + kind_label, + tab, + }), + ); + } + + function onDeleteClick(dataset: Dataset) { + Modal.delete({ + title: '确定要删除吗?', + content: '删除操作无法恢复,请谨慎操作', + onOk: async () => { + try { + const resp = await deleteDataset(dataset.id); + // If delete success, HTTP response status code is 204, resp is empty string + const isDeleteSuccess = !resp; + if (isDeleteSuccess) { + Message.success('删除成功'); + listQuery.refetch(); + setDatasetIdToErrorMessageMap((prevState) => { + const copyState = { ...prevState }; + delete copyState[dataset.id as number]; + return copyState; + }); + } else { + const errorMessage = resp?.message ?? '删除失败'; + Message.error(errorMessage!); + setDatasetIdToErrorMessageMap((prevState) => ({ + ...prevState, + [dataset.id]: errorMessage, + })); + } + } catch (error) { + Message.error(error.message); + } + }, + }); + } + + function onTableChange( + pagination: PaginationProps, + sorter: SorterResult, + filters: Partial<Record<keyof Dataset | keyof ParticipantDataset, string[]>>, + extra: { + currentData: Array<Dataset | ParticipantDataset>; + action: 'paginate' | 'sort' | 'filter'; + }, + ) { + switch (extra.action) { + case 'filter': + const filtersCopy = { + ...filters, + name: expression2Filter(urlState.filter).name, + project_id: projectId, + dataset_kind: + kind_label === DatasetKindLabel.RAW + ? [DatasetKindLabelCapitalMapper[kind_label]] + : [ + DatasetKindLabelCapitalMapper[kind_label], + DatasetKindBackEndType.INTERNAL_PROCESSED, + ], // 可信中心的结果数据集也进行展示 + publish_frontend_state: filters.publish_frontend_state?.[0] ?? undefined, + }; + setUrlState((prevState) => ({ + ...prevState, + page: 1, + filter: filterExpressionGenerator(filtersCopy, FILTER_OPERATOR_MAPPER_List), + state_frontend: filtersCopy.state_frontend ?? undefined, + })); + break; + case 'sort': + let orderValue = ''; + if (sorter.direction) { + orderValue = sorter.direction === 'ascend' ? 'asc' : 'desc'; + } + setUrlState((prevState) => ({ + ...prevState, + order_by: orderValue ? `${sorter.field} ${orderValue}` : '', + })); + break; + default: + break; + } + } + + function onPublishClick(dataset: Dataset) { + setSelectDataset(dataset); + setIsShowPublishModal(true); + } + function onPublishSuccess() { + setIsShowPublishModal(false); + listQuery.refetch(); + } + function onPublishCancel() { + setIsShowPublishModal(false); + } + async function onAuthorize(dataset: Dataset) { + try { + await authorizeDataset(dataset.id); + listQuery.refetch(); + } catch (err: any) { + Message.error(err.message); + } + } + + async function onCancelAuthorize(dataset: Dataset) { + try { + await cancelAuthorizeDataset(dataset.id); + listQuery.refetch(); + } catch (err: any) { + Message.error(err.message); + } + } + function onExportClick(dataset: Dataset) { + setCurrentExportId(dataset.id); + setIsShowExportModal(true); + } + function onExportSuccess(datasetId: ID, datasetJobId: ID) { + onExportModalClose(); + if (!datasetJobId && datasetJobId !== 0) { + Message.info('导出任务ID缺失,请手动跳转「任务管理」查看详情'); + } else { + history.push(`/datasets/${datasetId}/new/job_detail/${datasetJobId}`); + } + } + function onExportModalClose() { + setCurrentExportId(undefined); + setIsShowExportModal(false); + } +}; + +export default DatasetListTable; diff --git a/web_console_v2/client/src/views/Datasets/DatasetList/ImportProgress.tsx b/web_console_v2/client/src/views/Datasets/DatasetList/ImportProgress.tsx deleted file mode 100644 index 7e90da109..000000000 --- a/web_console_v2/client/src/views/Datasets/DatasetList/ImportProgress.tsx +++ /dev/null @@ -1,69 +0,0 @@ -import React, { FC } from 'react'; -import styled from 'styled-components'; -import { BatchState, DataBatch, Dataset } from 'typings/dataset'; -import StateIndicator from 'components/StateIndicator'; -import { getImportedProportion, getImportStage, isImporting } from 'shared/dataset'; -import { useTranslation } from 'react-i18next'; - -const ProgressBar = styled.div` - position: relative; - width: 100px; - height: 4px; - margin-top: 5px; - margin-left: 15px; - border-radius: 10px; - background-color: var(--backgroundColorGray); - overflow: hidden; - - &::before { - content: ''; - position: absolute; - width: inherit; - height: inherit; - border-radius: inherit; - background-color: var(--primaryColor); - transform: translateX(var(--progress, -100%)); - } -`; - -const ImportProgress: FC<{ dataset: Dataset }> = ({ dataset }) => { - const { total, imported } = getImportedProportion(dataset); - const proportion = Math.floor((imported / total) * 100); - - return ( - <div data-name="dataset-import-progress"> - <StateIndicator {...getImportStage(dataset)} /> - {isImporting(dataset) && ( - <ProgressBar style={{ '--progress': `${proportion - 100}%` } as any} /> - )} - </div> - ); -}; - -export const DataBatchImportProgress: FC<{ batch: DataBatch }> = ({ batch }) => { - const { t } = useTranslation(); - const { state, num_file, num_imported_file } = batch; - const proportion = Math.floor((num_imported_file / num_file) * 100); - - const isImporting = state === BatchState.IMPORTING; - - const indicatorPorps: React.ComponentProps<typeof StateIndicator> = ({ - [BatchState.IMPORTING]: { - type: 'processing', - text: t('dataset.state_importing', { total: num_file, imported: num_imported_file }), - }, - [BatchState.FAILED]: { type: 'error', text: t('dataset.state_error') }, - [BatchState.SUCCESS]: { type: 'success', text: t('dataset.state_available') }, - [BatchState.NEW]: { type: 'success', text: t('dataset.state_available') }, - } as const)[state]; - - return ( - <div data-name="data-batch-import-progress"> - <StateIndicator {...indicatorPorps} /> - - {isImporting && <ProgressBar style={{ '--progress': `${proportion - 100}%` } as any} />} - </div> - ); -}; - -export default ImportProgress; diff --git a/web_console_v2/client/src/views/Datasets/DatasetList/ImportProgress/index.less b/web_console_v2/client/src/views/Datasets/DatasetList/ImportProgress/index.less new file mode 100644 index 000000000..29d0965d2 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetList/ImportProgress/index.less @@ -0,0 +1,7 @@ +.import-progress-wrapper{ + display: flex; + justify-content: left; + .dataset-empty-tag{ + margin-left: 4px; + } +} diff --git a/web_console_v2/client/src/views/Datasets/DatasetList/ImportProgress/index.tsx b/web_console_v2/client/src/views/Datasets/DatasetList/ImportProgress/index.tsx new file mode 100644 index 000000000..8cfd3879a --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetList/ImportProgress/index.tsx @@ -0,0 +1,48 @@ +import React, { FC, useContext } from 'react'; +import { BatchState, DataBatch, Dataset, DatasetKindBackEndType } from 'typings/dataset'; +import StateIndicator from 'components/StateIndicator'; +import { getImportStage, isFrontendSucceeded } from 'shared/dataset'; +import { GlobalDatasetIdToErrorMessageMapContext } from '../DatasetTable'; +import { Tag } from '@arco-design/web-react'; +import './index.less'; + +const ImportProgress: FC<{ dataset: Dataset; tag?: boolean }> = ({ dataset, tag = false }) => { + const { type, text, tip } = getImportStage(dataset); + const isEmptyDataset = (dataset?.file_size || 0) <= 0 && isFrontendSucceeded(dataset); + const isInternalProcessed = dataset.dataset_kind === DatasetKindBackEndType.INTERNAL_PROCESSED; + const globalDatasetIdToErrorMessageMap = useContext(GlobalDatasetIdToErrorMessageMapContext); + const errorMessage = globalDatasetIdToErrorMessageMap[dataset.id as number] ?? ''; + + return ( + <div data-name="dataset-import-progress" className={'import-progress-wrapper'}> + <StateIndicator type={type} text={text} tip={errorMessage || tip} tag={tag} /> + {isEmptyDataset && !isInternalProcessed && ( + <Tag className={'dataset-empty-tag'} color="purple" size="small"> + 空集 + </Tag> + )} + </div> + ); +}; + +export const DataBatchImportProgress: FC<{ batch: DataBatch }> = ({ batch }) => { + const { state } = batch; + + const indicatorPorps: React.ComponentProps<typeof StateIndicator> = ({ + [BatchState.IMPORTING]: { + type: 'processing', + text: '导入中', + }, + [BatchState.FAILED]: { type: 'error', text: '导入失败' }, + [BatchState.SUCCESS]: { type: 'success', text: '可用' }, + [BatchState.NEW]: { type: 'success', text: '可用' }, + } as const)[state]; + + return ( + <div data-name="data-batch-import-progress"> + <StateIndicator {...indicatorPorps} /> + </div> + ); +}; + +export default ImportProgress; diff --git a/web_console_v2/client/src/views/Datasets/DatasetList/ProcessedDatasetTodoPopover/index.tsx b/web_console_v2/client/src/views/Datasets/DatasetList/ProcessedDatasetTodoPopover/index.tsx new file mode 100644 index 000000000..180b315e1 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/DatasetList/ProcessedDatasetTodoPopover/index.tsx @@ -0,0 +1,86 @@ +import React, { useMemo } from 'react'; +import { Message } from '@arco-design/web-react'; +import { useQuery } from 'react-query'; +import { Dataset } from 'typings/dataset'; +import { fetchDatasetList } from 'services/dataset'; +import { useHistory } from 'react-router-dom'; +import { useGetCurrentProjectId } from 'hooks'; +import TodoPopover from 'components/TodoPopover'; +import { TIME_INTERVAL } from 'shared/constants'; +import { ApprovalProps } from 'components/TodoPopover'; +import { FILTER_OPERATOR_MAPPER, filterExpressionGenerator } from '../../shared'; +import { DatasetProcessedMyAuthStatus } from 'typings/dataset'; + +type ProcessedDatasetProps<T = any> = Omit<ApprovalProps<T>, 'list'> & {}; + +function ProcessedDatasetTodoPopover({ + dateField = 'created_at', + creatorField = 'creator', + contentField = 'name', + ...restProps +}: ProcessedDatasetProps) { + const history = useHistory(); + const projectId = useGetCurrentProjectId(); + + const { isError, data, error, isFetching } = useQuery<{ + data: Array<Dataset>; + }>( + ['fetchDatasetList', projectId], + () => { + if (!projectId!) { + Message.info('请选择工作区'); + return Promise.resolve({ data: [] }); + } + return fetchDatasetList({ + filter: filterExpressionGenerator( + { + dataset_kind: 'PROCESSED', + project_id: projectId, + auth_status: [DatasetProcessedMyAuthStatus.PENDING], + }, + FILTER_OPERATOR_MAPPER, + ), + order_by: 'created_at desc', + }); + }, + { + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, + }, + ); + + if (isError && error) { + Message.error((error as Error).message); + } + + const todoList = useMemo(() => { + if (!data) { + return []; + } + + const list = data.data || []; + + return list; + }, [data]); + + return ( + <TodoPopover.ModelCenter + isLoading={isFetching} + list={todoList} + dateField={dateField} + contentField={contentField} + buttonText={`${todoList.length}条待处理结果数据集`} + title="待处理数据集授权申请" + contentVerb="发起了" + contentSuffix="数据集授权申请" + onClick={onClick} + {...restProps} + /> + ); + + function onClick(item: Dataset) { + history.push(`/datasets/processed/authorize/${item.id}`); + } +} + +export default ProcessedDatasetTodoPopover; diff --git a/web_console_v2/client/src/views/Datasets/DatasetList/index.tsx b/web_console_v2/client/src/views/Datasets/DatasetList/index.tsx index a10df1b6f..c01d69a94 100644 --- a/web_console_v2/client/src/views/Datasets/DatasetList/index.tsx +++ b/web_console_v2/client/src/views/Datasets/DatasetList/index.tsx @@ -1,246 +1,31 @@ -import React, { FC, useState } from 'react'; -import ListPageLayout from 'components/SharedPageLayout'; -import { useTranslation } from 'react-i18next'; -import { Row, Button, Col, Form, Input, Table, message } from 'antd'; -import { useHistory } from 'react-router-dom'; -import i18n from 'i18n'; -import { formatTimestamp } from 'shared/date'; -import { Dataset } from 'typings/dataset'; -import { useQuery } from 'react-query'; -import { fetchDatasetList } from 'services/dataset'; -import styled from 'styled-components'; -import NoResult from 'components/NoResult'; -import ImportProgress from './ImportProgress'; -import { getTotalDataSize, isImporting } from 'shared/dataset'; -import DatasetActions, { DatasetAction } from './DatasetActions'; -import { noop } from 'lodash'; -import BatchImportRecordsModal from './BatchImportRecordsModal'; -import { useToggle } from 'react-use'; -import AddBatchModal from './AddBatchModal'; -import { copyToClipboard } from 'shared/helpers'; -import WhichProject from 'components/WhichProject'; -import { useRecoilValue } from 'recoil'; -import { projectState } from 'stores/project'; +import SharedPageLayout from 'components/SharedPageLayout'; +import React, { FC } from 'react'; +import { Redirect, useParams } from 'react-router'; +import ProcessedDatasetTodoPopover from './ProcessedDatasetTodoPopover'; +import { DatasetKindLabel, DatasetTabType } from 'typings/dataset'; +import { datasetKindLabelValueMap } from '../shared'; +import DatasetTable from './DatasetTable'; -const ListContainer = styled.div` - display: flex; - flex: 1; -`; +export const DATASET_LIST_QUERY_KEY = 'fetchRawDatasetList'; -type ColumnsGetterOptions = { - onViewReordsClick?: any; - onDeleteClick?: any; - onAddDataBatchClick?: any; - onCopyPathClick?: any; - onSuccess?: any; - withoutActions?: boolean; -}; -export const getDatasetTableColumns = (options: ColumnsGetterOptions) => { - const onPerformAction = (payload: { action: DatasetAction; dataset: Dataset }) => { - return { - delete: options.onDeleteClick, - 'add-batch': options.onAddDataBatchClick, - 'view-records': options.onViewReordsClick, - 'copy-path': options.onCopyPathClick, - }[payload.action](payload.dataset); - }; +const DatasetList: FC = (props) => { + const { kind_label } = useParams<{ kind_label: DatasetKindLabel; tab?: DatasetTabType }>(); - const cols = [ - { - title: i18n.t('dataset.col_name'), - dataIndex: 'name', - key: 'name', - ellipsis: true, - render: (name: string) => { - return <strong>{name}</strong>; - }, - }, - { - title: i18n.t('dataset.col_type'), - dataIndex: 'dataset_type', - name: 'dataset_type', - width: 150, - }, - { - title: i18n.t('dataset.status'), - dataIndex: 'state', - name: 'state', - width: 210, - render: (_: any, record: Dataset) => { - return <ImportProgress dataset={record} />; - }, - }, - { - title: i18n.t('dataset.col_files_size'), - dataIndex: 'file_size', - name: 'file_size', - width: 130, - render: (_: any, record: Dataset) => { - return <span>{getTotalDataSize(record).toLocaleString('en')} KB</span>; - }, - }, - { - title: i18n.t('workflow.col_project'), - dataIndex: 'project_id', - name: 'project_id', - width: 150, - render: (project_id: number) => <WhichProject id={project_id} />, - }, - { - title: i18n.t('created_at'), - dataIndex: 'created_at', - name: 'created_at', - width: 190, - render: (date: number) => <div>{formatTimestamp(date)}</div>, - }, - ]; - if (!options.withoutActions) { - cols.push({ - title: i18n.t('operation'), - dataIndex: 'operation', - name: 'operation', - fixed: 'right', - render: (_: number, record: Dataset) => ( - <DatasetActions onPerformAction={onPerformAction} dataset={record} type="link" /> - ), - } as any); + if (!kind_label) { + return <Redirect to={`/datasets/${DatasetKindLabel.RAW}`} />; } - return cols; -}; - -export const DATASET_LIST_QUERY_KEY = 'datasetList'; - -const DatasetList: FC = () => { - const { t } = useTranslation(); - const [form] = Form.useForm(); - const history = useHistory(); - const [params, setParams] = useState({ keyword: '' }); - const [recordsVisible, toggleRecordsVisible] = useToggle(false); - const [addBatchVisible, toggleAddBatchVisible] = useToggle(false); - const [curDataset, setCurDataset] = useState<Dataset>(); - const project = useRecoilValue(projectState); - - const listQuery = useQuery( - [DATASET_LIST_QUERY_KEY, params.keyword, project.current?.id], - () => fetchDatasetList({ ...params, project: project.current?.id }), - { - retry: 2, - refetchInterval: 90 * 1000, // auto refresh every 1.5 min - }, - ); - - const isEmpty = !listQuery.isFetching && listQuery.data?.data.length === 0; - + const kind = datasetKindLabelValueMap[kind_label]; + const isProcess = kind_label === DatasetKindLabel.PROCESSED; return ( - <ListPageLayout title={t('menu.label_datasets')}> - <Row gutter={16} justify="space-between" align="middle"> - <Col> - <Button size="large" type="primary" onClick={goCreate}> - {t('dataset.btn_create')} - </Button> - </Col> - <Col> - <Form initialValues={{ ...params }} layout="inline" form={form} onFinish={onSearch}> - <Form.Item name="keyword"> - <Input.Search - placeholder={t('dataset.placeholder_name_searchbox')} - onPressEnter={form.submit} - /> - </Form.Item> - </Form> - </Col> - </Row> - - <ListContainer> - {isEmpty ? ( - <NoResult text={t('dataset.no_result')} to="/datasets/create" /> - ) : ( - <Table - loading={listQuery.isFetching} - dataSource={listQuery.data?.data || []} - scroll={{ x: '100%' }} - columns={getDatasetTableColumns({ - onSuccess: noop, - onViewReordsClick, - onAddDataBatchClick, - onDeleteClick, - onCopyPathClick, - })} - rowKey="name" - /> - )} - </ListContainer> - - <BatchImportRecordsModal - records={curDataset?.data_batches || []} - visible={recordsVisible} - toggleVisible={toggleRecordsVisible} - onOk={showAddBatchModal} - /> - - <AddBatchModal - datasetType={curDataset?.dataset_type} - datasetId={curDataset?.id} - visible={addBatchVisible} - toggleVisible={toggleAddBatchVisible} - onSuccess={onAddBatchSuccess} - /> - </ListPageLayout> + <SharedPageLayout + title={isProcess ? '结果数据集' : '原始数据集'} + key={kind} + rightTitle={isProcess && <ProcessedDatasetTodoPopover />} + > + <DatasetTable dataset_kind={kind} /> + </SharedPageLayout> ); - - function onSearch(values: any) { - setParams(values); - } - function onViewReordsClick(dataset: Dataset) { - setCurDataset(dataset); - toggleRecordsVisible(true); - } - function onCopyPathClick(dataset: Dataset) { - const okay = copyToClipboard(dataset.path); - if (okay) { - message.success(t('app.copy_success')); - } - } - function onAddDataBatchClick(dataset: Dataset) { - if (!checkIfHasImportingBatches(dataset)) { - return; - } - - setCurDataset(dataset); - toggleAddBatchVisible(true); - } - function onAddBatchSuccess() { - toggleAddBatchVisible(false); - listQuery.refetch(); - } - - function showAddBatchModal() { - if (!curDataset) return; - - if (!checkIfHasImportingBatches(curDataset)) { - return; - } - - toggleRecordsVisible(false); - toggleAddBatchVisible(true); - } - function onDeleteClick() { - // TODO: coming soon - message.info('Coming soon'); - } - function goCreate() { - history.push('/datasets/create'); - } - /** DOESN'T SUPPORT add batches for dataset which has unfinished importing YET */ - function checkIfHasImportingBatches(dataset: Dataset) { - if (isImporting(dataset)) { - message.info(t('dataset.msg_is_importing')); - return false; - } - - return true; - } }; export default DatasetList; diff --git a/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobBasicInfo/index.module.less b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobBasicInfo/index.module.less new file mode 100644 index 000000000..100f52691 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobBasicInfo/index.module.less @@ -0,0 +1,3 @@ +.job_basic_info{ + margin: 0 20px; +} \ No newline at end of file diff --git a/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobBasicInfo/index.tsx b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobBasicInfo/index.tsx new file mode 100644 index 000000000..9a52acda9 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobBasicInfo/index.tsx @@ -0,0 +1,40 @@ +import React, { useMemo } from 'react'; +import { formatTimestamp } from 'shared/date'; +import PropertyList from 'components/PropertyList'; +import WhichParticipant from 'components/WhichParticipant'; +import styled from './index.module.less'; + +type TJobBasicInfo = { + coordinatorId: ID; + createTime: DateTime; + creator_username: string; +}; + +export default function JobBasicInfo(prop: TJobBasicInfo) { + const { coordinatorId, createTime = 0, creator_username } = prop; + const basicInfo = useMemo(() => { + function TimeRender(prop: { time: DateTime }) { + const { time } = prop; + return <span>{time <= 0 ? '-' : formatTimestamp(time)}</span>; + } + return [ + { + label: '任务发起方', + value: coordinatorId === 0 ? '本方' : <WhichParticipant id={coordinatorId} />, + }, + { + label: '创建者', + value: creator_username, + }, + { + label: '创建时间', + value: <TimeRender time={createTime} />, + }, + ]; + }, [coordinatorId, createTime, creator_username]); + return ( + <div className={styled.job_basic_info}> + <PropertyList properties={basicInfo} cols={4} /> + </div> + ); +} diff --git a/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/JobParamsPanel/index.module.less b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/JobParamsPanel/index.module.less new file mode 100644 index 000000000..64847e3a6 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/JobParamsPanel/index.module.less @@ -0,0 +1,36 @@ +.params_panel_header{ + display: flex; + flex-direction: row; + justify-content: flex-start; + margin: 20px 0; + .title{ + font-size: 14px; + line-height: 22px; + margin-right: 8px; + } + :global(.arco-radio-button) { + &:after { + background-color: unset; + } + } +} +.params_panel_container{ + display: flex; + flex-direction: row; + justify-content: flex-start; + .params_panel_item_wrapper{ + flex: 1; + margin-right: 20px; + &:last-child{ + margin-right: 0; + } + } +} +.params_panel_label{ + font-weight: 400; + font-size: 12px; + line-height: 18px; + color: #4e5969; + display: inline-block; + height: 18px; +} \ No newline at end of file diff --git a/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/JobParamsPanel/index.tsx b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/JobParamsPanel/index.tsx new file mode 100644 index 000000000..6fa427cd4 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/JobParamsPanel/index.tsx @@ -0,0 +1,84 @@ +import React, { FC, useMemo, useState } from 'react'; +import { Button, Dropdown, Menu, Tooltip } from '@arco-design/web-react'; +import { IconDown } from '@arco-design/web-react/icon'; +import { GlobalConfigs } from 'typings/dataset'; +import ClickToCopy from 'components/ClickToCopy'; +import { TAG_MAPPER } from '../../../shared'; +import { Tag } from 'typings/workflow'; +import PropertyList from 'components/PropertyList'; +import styled from './index.module.less'; + +type TProps = { + globalConfigs: GlobalConfigs; +}; +// 配置参数信息展示的 +const paramsTypes = [Tag.RESOURCE_ALLOCATION, Tag.INPUT_PARAM]; +const JobParamsPanel: FC<TProps> = (props: TProps) => { + const { globalConfigs = {} } = props; + const [selected, setSelected] = useState(''); + + const selectRole = useMemo(() => { + if (!selected && globalConfigs && Object.keys(globalConfigs).length) { + return Object.keys(globalConfigs)[0]; + } + return selected; + }, [globalConfigs, selected]); + + const panelInfos = useMemo(() => { + return paramsTypes.map((paramsType) => { + const paramInfo: any = { + title: TAG_MAPPER[paramsType], + properties: [], + }; + if (!globalConfigs || !selectRole) { + return []; + } + paramInfo.properties = globalConfigs[selectRole].variables + .filter((variable) => variable.tag === paramsType) + .map((item) => ({ + label: item.name, + value: ( + <Tooltip + position="left" + content={<ClickToCopy text={item.value}>{item.value}</ClickToCopy>} + > + <span>{item.value}</span> + </Tooltip> + ), + })); + return paramInfo; + }); + }, [globalConfigs, selectRole]); + + const dropList = () => { + return ( + <Menu onClickMenuItem={(key) => setSelected(key)}> + {Object.keys(globalConfigs).map((item) => { + return <Menu.Item key={item}>{item}</Menu.Item>; + })} + </Menu> + ); + }; + return ( + <div> + <div className={styled.params_panel_header}> + <h3 className={styled.title}>参数信息</h3> + <Dropdown droplist={dropList()} position="bl"> + <Button size={'mini'} type="text"> + {selectRole} <IconDown /> + </Button> + </Dropdown> + </div> + <div className={styled.params_panel_container}> + {panelInfos.map((item, index) => ( + <div className={styled.params_panel_item_wrapper} key={index}> + <span className={styled.params_panel_label}>{item.title}</span> + <PropertyList properties={item.properties} cols={2} /> + </div> + ))} + </div> + </div> + ); +}; + +export default JobParamsPanel; diff --git a/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/JobStageItemContent/index.module.less b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/JobStageItemContent/index.module.less new file mode 100644 index 000000000..841d3e5ae --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/JobStageItemContent/index.module.less @@ -0,0 +1,13 @@ +@import '~styles/mixins.less'; +.job_stage_item_wrapper{ + padding: 0 20px; + .title{ + font-size: 14px; + line-height: 22px; + margin: 20px 0; + } +} + +.export_path_text{ + .MixinEllipsis(); +} diff --git a/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/JobStageItemContent/index.tsx b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/JobStageItemContent/index.tsx new file mode 100644 index 000000000..30d02c7df --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/JobStageItemContent/index.tsx @@ -0,0 +1,224 @@ +import React, { useMemo, FC, ReactElement } from 'react'; +import { useQuery } from 'react-query'; +import { useGetCurrentProjectId } from 'hooks'; +import { fetchDatasetJobStageById, fetchDataBatchById } from 'services/dataset'; +import { useParams } from 'react-router'; +import { Message, Spin, Tooltip } from '@arco-design/web-react'; +import PropertyList from 'components/PropertyList'; +import WorkFlowPods from '../WorkFlowPods'; +import JobParamsPanel from '../JobParamsPanel'; +import StateIndicator, { StateTypes } from 'components/StateIndicator'; +import { DatasetJobState, DatasetKindLabel } from 'typings/dataset'; +import { getIntersectionRate } from 'shared/dataset'; +import { formatTimeCount, formatTimestamp } from 'shared/date'; +import { Link } from 'react-router-dom'; +import dayjs from 'dayjs'; +import CountTime from 'components/CountTime'; +import { DatasetDetailSubTabs } from 'views/Datasets/DatasetDetail'; +import ClickToCopy from 'components/ClickToCopy'; +import { CONSTANTS } from 'shared/constants'; +import styled from './index.module.less'; + +type Props = { + jobStageId: ID; + batchId: ID; + isProcessed: boolean; + isJoin: boolean; + isExport: boolean; + importDatasetId: ID | null; +}; +const JobStageItemContent: FC<Props> = function (props: Props) { + const { dataset_id, job_id } = useParams<{ dataset_id: string; job_id: string }>(); + const { jobStageId, batchId, isProcessed, isJoin, isExport, importDatasetId } = props; + const projectId = useGetCurrentProjectId(); + const { isFetching, data } = useQuery( + ['fetchDatasetJobStageById', projectId, job_id, jobStageId], + () => { + if (!projectId) { + Message.info('请选择工作区'); + return; + } + return fetchDatasetJobStageById(projectId, job_id, jobStageId); + }, + { + retry: 2, + enabled: Boolean(job_id) && Boolean(jobStageId), + }, + ); + const { isFetching: isBatchFetch, data: batchData } = useQuery( + ['fetchDataBatchById', dataset_id, batchId], + () => { + return fetchDataBatchById(dataset_id, batchId); + }, + { + retry: 2, + enabled: Boolean(dataset_id) && Boolean(batchId), + }, + ); + const { + state, + started_at = 0, + finished_at = 0, + input_data_batch_num_example = 0, + output_data_batch_num_example = 0, + workflow_id, + global_configs, + } = data?.data || {}; + const { name, path } = batchData?.data || {}; + const isRunning = state + ? [DatasetJobState.PENDING, DatasetJobState.RUNNING].includes(state) + : false; + const basicInfo = useMemo(() => { + function TimeRender(prop: { time: DateTime }) { + const { time } = prop; + return <span>{time <= 0 ? '-' : formatTimestamp(time)}</span>; + } + function RunningTimeRender(prop: { start: DateTime; finish: DateTime; isRunning: boolean }) { + const { start, finish, isRunning } = prop; + if (isRunning) { + return start <= 0 ? ( + <span>待运行</span> + ) : ( + <CountTime time={dayjs().unix() - start} isStatic={false} /> + ); + } + return <span>{finish - start <= 0 ? '-' : formatTimeCount(finish - start)}</span>; + } + const basicInfoOptions: Array<{ + label: string; + value: ReactElement | string | number; + }> = [ + { + label: '任务状态', + value: renderDatasetJobState(state), + }, + { + label: '开始时间', + value: <TimeRender time={started_at} />, + }, + { + label: '结束时间', + value: <TimeRender time={finished_at} />, + }, + { + label: '运行时长', + value: <RunningTimeRender start={started_at} finish={finished_at} isRunning={isRunning} />, + }, + ]; + if (isExport) { + basicInfoOptions.push( + { + label: '导出批次', + value: ( + <Link + to={`/datasets/${ + isProcessed ? DatasetKindLabel.PROCESSED : DatasetKindLabel.RAW + }/detail/${importDatasetId}/${DatasetDetailSubTabs.Databatch}`} + > + {name} + </Link> + ), + }, + { + label: '导出路径', + value: ( + <ClickToCopy text={path || ''}> + <Tooltip content={path}> + <div className={styled.export_path_text}>{path || CONSTANTS.EMPTY_PLACEHOLDER}</div> + </Tooltip> + </ClickToCopy> + ), + }, + ); + } else { + basicInfoOptions.push( + { + label: '处理批次', + value: ( + <Link + to={`/datasets/${ + isProcessed ? DatasetKindLabel.PROCESSED : DatasetKindLabel.RAW + }/detail/${dataset_id}/${DatasetDetailSubTabs.Databatch}`} + > + {name} + </Link> + ), + }, + { + label: '输入样本量', + value: input_data_batch_num_example, + }, + { + label: '输出样本量', + value: output_data_batch_num_example, + }, + ); + } + if (isJoin) { + basicInfoOptions.push({ + label: '求交率', + value: getIntersectionRate({ + input: input_data_batch_num_example, + output: output_data_batch_num_example, + }), + }); + } + return basicInfoOptions; + }, [ + state, + started_at, + finished_at, + input_data_batch_num_example, + output_data_batch_num_example, + isRunning, + name, + path, + dataset_id, + isProcessed, + isJoin, + isExport, + importDatasetId, + ]); + return ( + <Spin loading={isFetching || isBatchFetch}> + <div className={styled.job_stage_item_wrapper}> + <h3 className={styled.title}>基本信息</h3> + <PropertyList properties={basicInfo} cols={4} /> + {global_configs && !isExport && ( + <JobParamsPanel globalConfigs={global_configs.global_configs} /> + )} + <WorkFlowPods workFlowId={workflow_id} /> + </div> + </Spin> + ); + + function renderDatasetJobState(state: DatasetJobState | undefined) { + let text: string; + let type: StateTypes; + switch (state) { + case DatasetJobState.SUCCEEDED: + text = '成功'; + type = 'success'; + break; + case DatasetJobState.PENDING: + case DatasetJobState.RUNNING: + text = '运行中'; + type = 'processing'; + break; + case DatasetJobState.FAILED: + text = '失败'; + type = 'error'; + break; + case DatasetJobState.STOPPED: + text = '已停止'; + type = 'error'; + break; + default: + text = '未知'; + type = 'default'; + } + return <StateIndicator text={text} type={type} />; + } +}; + +export default JobStageItemContent; diff --git a/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/WorkFlowPods/index.module.less b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/WorkFlowPods/index.module.less new file mode 100644 index 000000000..d8dcc4007 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/WorkFlowPods/index.module.less @@ -0,0 +1,61 @@ +.workflow_pods_header{ + display: flex; + flex-direction: row; + justify-content: flex-start; + align-items: center; + margin: 15px 0; + :global(.arco-radio-button) { + &:after { + background-color: unset; + } + } +} + +.job_detail_more_title{ + color: #4e5969; + font-size: 12px; + +} +.job_detail_more_content{ + color: #1d2129; + font-size: 12px; +} + +.job_detail_more_link{ + display: inline-block; + font-weight: 400; + font-size: 12px; + line-height: 20px; + margin-bottom: 12px; +} + +.job_detail_more_button{ + margin-left: auto; + font-size: 12px; +} + +.job_detail_state_indicator{ + padding-left: 16px; + position: relative; + .dot { + display: inline-block; + position: absolute; + top: 4px; + left: 2px; + width: 8px; + height: 8px; + border-radius: 50%; + background-color: rgb(245, 63, 63); + } +} + +.job_detail_error_wrapper{ + max-height: 300px; + overflow: scroll; + .job_detail_error_title{ + color: #fff; + } + .job_detail_error_item{ + margin-bottom: 10px; + } +} \ No newline at end of file diff --git a/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/WorkFlowPods/index.tsx b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/WorkFlowPods/index.tsx new file mode 100644 index 000000000..241471832 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/WorkFlowPods/index.tsx @@ -0,0 +1,218 @@ +import React, { useMemo, useState } from 'react'; +import { LabelStrong } from 'styles/elements'; +import { Button, Message, Popover, Radio, Tooltip } from '@arco-design/web-react'; +import { useQuery } from 'react-query'; +import { useGetCurrentProjectId, useTablePaginationWithUrlState } from 'hooks'; +import { fetchJobById, getWorkflowDetailById } from 'services/workflow'; +import { TIME_INTERVAL } from 'shared/constants'; +import { get } from 'lodash-es'; +import { Pod, PodState } from 'typings/job'; +import { Table } from '@arco-design/web-react'; +import ClickToCopy from 'components/ClickToCopy'; +import StateIndicator from 'components/StateIndicator'; +import { getPodState, podStateFilters } from 'views/Workflows/shared'; +import { formatTimestamp } from 'shared/date'; +import { Link } from 'react-router-dom'; +import styled from './index.module.less'; +type TWorkFlowPods = { + workFlowId?: ID; +}; + +export default function WorkFlowPods(prop: TWorkFlowPods) { + const { workFlowId } = prop; + const { paginationProps, reset } = useTablePaginationWithUrlState({ + urlStateOption: { navigateMode: 'replace' }, + }); + const projectId = useGetCurrentProjectId(); + const [selectJobId, setSelectJobId] = useState<ID>(); + const workFlowDetail = useQuery( + ['fetch_workflow_detail', projectId, workFlowId], + () => { + if (!workFlowId) { + return Promise.resolve({ data: {} }); + } + if (!projectId) { + Message.info('请选择工作区'); + return Promise.resolve({ data: {} }); + } + return getWorkflowDetailById(workFlowId!, projectId); + }, + { + cacheTime: 1, + refetchInterval: TIME_INTERVAL.CONNECTION_CHECK, + onSuccess: (data) => { + const jobIds = get(data, 'data.job_ids') || []; + if (jobIds.length) { + setSelectJobId((pre) => { + return jobIds.indexOf(selectJobId) > -1 ? pre : jobIds[0]; + }); + } + }, + }, + ); + + const jobDetail = useQuery( + ['fetchJobById', selectJobId], + () => fetchJobById(Number(selectJobId)), + { + enabled: Boolean(selectJobId), + }, + ); + + const jobList = useMemo(() => { + if (!workFlowDetail.data) { + return []; + } + const jobIds = get(workFlowDetail.data, 'data.job_ids') || []; + const jobNames = get(workFlowDetail.data, 'data.config.job_definitions') || []; + const jobs = get(workFlowDetail.data, 'data.jobs') || []; + return jobIds.map((item: ID, index: number) => { + const { error_message, state } = jobs[index]; + return { + label: jobNames[index].name, + value: item, + hasError: + error_message && + (error_message.app || JSON.stringify(error_message.pods) !== '{}') && + state !== 'COMPLETED', + errorMessage: error_message, + }; + }); + }, [workFlowDetail.data]); + + const jobData = useMemo(() => { + return get(jobDetail, 'data.data.pods') || ([] as Pod[]); + }, [jobDetail]); + + const handleOnChangeJob = (val: ID) => { + setSelectJobId(() => val); + reset(); + }; + + const columns = [ + { + title: 'Pod', + dataIndex: 'name', + key: 'name', + width: 400, + render: (val: string) => { + return <ClickToCopy text={val}>{val}</ClickToCopy>; + }, + }, + { + title: '运行状态', + dataIndex: 'state', + key: 'state', + ...podStateFilters, + width: 200, + render: (_: PodState, record: Pod) => { + return <StateIndicator {...getPodState(record)} />; + }, + }, + { + title: '创建时间', + dataIndex: 'creation_timestamp', + key: 'creation_timestamp', + width: 150, + sorter(a: Pod, b: Pod) { + return a.creation_timestamp - b.creation_timestamp; + }, + render: (val: number) => { + return formatTimestamp(val); + }, + }, + { + title: '操作', + dataIndex: 'actions', + key: 'actions', + width: 150, + render: (_: any, record: Pod) => { + return ( + <Link target={'_blank'} to={`/logs/pod/${selectJobId}/${record.name}`}> + 日志 + </Link> + ); + }, + }, + ]; + + return ( + <> + <div className={styled.workflow_pods_header}> + <LabelStrong fontSize={14} isBlock={true}> + 实例信息 + </LabelStrong> + <Radio.Group onChange={handleOnChangeJob} size="small" type="button" value={selectJobId}> + {jobList.map((item: any) => { + return ( + <Radio key={item.value} value={item.value}> + {item.hasError ? ( + <Tooltip content={renderErrorMessage(item.errorMessage)}> + <span className={styled.job_detail_state_indicator}> + <span className={styled.dot} /> + {item.label} + </span> + </Tooltip> + ) : ( + item.label + )} + </Radio> + ); + })} + </Radio.Group> + <Popover + trigger="hover" + position="br" + content={ + <span> + <div className={styled.job_detail_more_title}>工作流</div> + <Link + className={styled.job_detail_more_link} + to={`/workflow-center/workflows/${workFlowId}`} + > + 点击查看工作流 + </Link> + <div className={styled.job_detail_more_title}>工作流 ID</div> + <div className={styled.job_detail_more_content}>{workFlowId}</div> + </span> + } + > + <Button className={styled.job_detail_more_button} type="text"> + 更多信息 + </Button> + </Popover> + </div> + {jobList.length > 0 ? ( + <Table + rowKey={'name'} + className={'custom-table custom-table-left-side-filter'} + loading={jobDetail.isFetching} + data={jobData} + columns={columns} + pagination={{ + ...paginationProps, + }} + onChange={(pagination, sorter, filters, extra) => { + if (extra.action === 'filter') { + reset(); + } + }} + /> + ) : null} + </> + ); + function renderErrorMessage(errorMessage: any) { + const { app, pods } = errorMessage; + return ( + <div className={styled.job_detail_error_wrapper}> + <h3 className={styled.job_detail_error_title}>Main Error: {app}</h3> + {Object.entries(pods).map(([pod, error], index) => ( + <div className={styled.job_detail_error_item} key={index}> + <div>Pod: {pod}</div> + <div>Error: {error}</div> + </div> + ))} + </div> + ); + } +} diff --git a/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/index.module.less b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/index.module.less new file mode 100644 index 000000000..df0c7fca4 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/index.module.less @@ -0,0 +1,111 @@ +.dataset_job_stage_wrapper{ + flex: 1; + display: flex; + justify-content: flex-start; +} + +.job_stage_list_wrapper{ + height: 100%; + border-right: 1px solid #e5e8ef; + position: relative; + .job_stage_list{ + overflow: scroll; + height: calc(100% - 40px); + .job_stage_list_header{ + color: #1D2129; + font-weight: 500; + font-size: 14px; + height: 60px; + line-height: 60px; + padding-left: 20px; + } + .job_stage_list_item{ + height: 56px; + border-top: 1px solid #E5E8EF; + padding-top: 8px; + padding-left: 20px; + position: relative; + cursor: pointer; + & .job_stage_list_item_title{ + font-size: 12px; + line-height: 20px; + height: 20px; + } + &.active { + background-color: #F2F3F8; + & .job_stage_list_item_title{ + font-size: 12px; + line-height: 20px; + height: 20px; + color: #1664FF; + } + } + } + } + .job_stage_list_count{ + position: absolute; + bottom: 0; + left: 0; + width: 100%; + height: 40px; + padding: 10px 0; + z-index: 10; + text-align: center; + background: #fff; + font-weight: 400; + font-size: 12px; + line-height: 20px; + color: #86909C; + } + .collapse { + transition: 0.1s background-color; + position: absolute; + top: 285px; + left: 200px; + z-index: 10; + display: flex; + justify-content: center; + align-items: center; + width: 24px; + height: 24px; + padding: 2px 0 1px; + border-radius: 50%; + cursor: pointer; + background: #FFFFFF; + border: 0.857143px solid #E5E8EF; + box-shadow: 0px 0px 7px #F2F3F5; + } + .is_reverse { + transition: 0.1s background-color cubic-bezier(0.4, 0, 0.2, 1); + position: absolute; + left: 5px; + top: 285px; + z-index: 10; + display: flex; + justify-content: center; + align-items: center; + width: 24px; + height: 24px; + transform: rotate(180deg); + padding: 1px 0 2px; + border-radius: 50%; + cursor: pointer; + background: #FFFFFF; + border: 0.857143px solid #E5E8EF; + box-shadow: 0px 0px 7px #F2F3F5; + } +} + +.job_stage_content{ + height: 100%; + max-height: 600px; + flex: 1; + :global(.arco-spin) { + height: 100%; + display: block; + } + :global(.arco-spin-children){ + height: 100%; + overflow: scroll; + } +} \ No newline at end of file diff --git a/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/index.tsx b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/index.tsx new file mode 100644 index 000000000..63e343974 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobStageDetail/index.tsx @@ -0,0 +1,152 @@ +import React, { + useState, + useMemo, + ForwardRefRenderFunction, + forwardRef, + useImperativeHandle, +} from 'react'; +import { useQuery } from 'react-query'; +import { Left } from 'components/IconPark'; +import { DatasetJobStage, DatasetJobState } from 'typings/dataset'; +import { useGetCurrentProjectId } from 'hooks'; +import { fetchDatasetJobStageList } from 'services/dataset'; +import { useParams, useLocation } from 'react-router'; +import { Message } from '@arco-design/web-react'; +import StateIndicator, { StateTypes } from 'components/StateIndicator'; +import JobStageItemContent from './JobStageItemContent'; +import qs from 'qs'; +import styled from './index.module.less'; + +type ExposedRef = { + refetch: () => void; +}; + +type Props = { + isProcessed: boolean; + isJoin: boolean; // 表示是否是求交任务 + isExport: boolean; // 表示是否是导出任务 + importDatasetId: ID | null; +}; +const JobStageDetail: ForwardRefRenderFunction<ExposedRef, Props> = function ( + props: Props, + parentRef, +) { + const { job_id } = useParams<{ job_id: string }>(); + const projectId = useGetCurrentProjectId(); + const [collapsed, setCollapsed] = useState(true); + const [total, setTotal] = useState(0); + const [activeJobStage, setActiveJobStage] = useState<DatasetJobStage>(); + const location = useLocation(); + const query = location.search || ''; + const queryObject = qs.parse(query.slice(1)) || {}; + const listQuery = useQuery( + ['fetchDatasetJobStageList', projectId, job_id], + () => { + if (!projectId) { + Message.info('请选择工作区'); + return; + } + return fetchDatasetJobStageList(projectId!, job_id); + }, + { + retry: 2, + onSuccess: (res) => { + if (!res) return; + const { page_meta, data } = res || {}; + setTotal((pre) => page_meta?.total_items || pre); + if (queryObject.stageId) { + const activeJobStage = data.find((item) => `${item.id}` === queryObject.stageId); + setActiveJobStage(activeJobStage || data[0]); + } else { + setActiveJobStage(data[0]); + } + }, + }, + ); + + const list = useMemo(() => { + return listQuery.data?.data || []; + }, [listQuery.data]); + + useImperativeHandle(parentRef, () => { + return { + refetch: listQuery.refetch, + }; + }); + + return ( + <div className={styled.dataset_job_stage_wrapper}> + <div + className={styled.job_stage_list_wrapper} + style={{ + width: collapsed ? '212px' : '0px', + }} + > + {collapsed && ( + <div className={styled.job_stage_list}> + <div className={styled.job_stage_list_header}>任务</div> + {list.map((item, index) => ( + <div + key={index} + className={`${styled.job_stage_list_item} ${ + item.id === activeJobStage?.id ? styled.active : '' + }`} + onClick={() => { + handleChangeJobStage(item); + }} + > + <div className={styled.job_stage_list_item_title}>{item.name}</div> + {renderDatasetJobState(item)} + </div> + ))} + <div className={styled.job_stage_list_count}>{total}个记录</div> + </div> + )} + <div + onClick={() => setCollapsed(!collapsed)} + className={collapsed ? styled.collapse : styled.is_reverse} + > + <Left /> + </div> + </div> + <div className={styled.job_stage_content}> + {activeJobStage && ( + <JobStageItemContent + {...props} + jobStageId={activeJobStage.id} + batchId={activeJobStage.output_data_batch_id} + /> + )} + </div> + </div> + ); + function renderDatasetJobState(stage: DatasetJobStage) { + let text: string; + let type: StateTypes; + switch (stage.state) { + case DatasetJobState.SUCCEEDED: + text = '成功'; + type = 'success'; + break; + case DatasetJobState.PENDING: + case DatasetJobState.RUNNING: + text = '运行中'; + type = 'processing'; + break; + case DatasetJobState.FAILED: + text = '失败'; + type = 'error'; + break; + case DatasetJobState.STOPPED: + text = '已停止'; + type = 'error'; + break; + } + return <StateIndicator text={text} type={type} />; + } + function handleChangeJobStage(stage: DatasetJobStage) { + setActiveJobStage(stage); + } +}; + +export default forwardRef(JobStageDetail); diff --git a/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobTitle/index.module.less b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobTitle/index.module.less new file mode 100644 index 000000000..7c2b648ab --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobTitle/index.module.less @@ -0,0 +1,14 @@ +.job_title_icon{ + height: 44px; + width: 44px; + border-radius: 4px; + background: #686a72; + display: flex; + justify-content: center; + align-items: center; + font-size: 16px; + color: #ffffff; +} +.job_title_name{ + margin: 0 12px 0 12px; +} diff --git a/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobTitle/index.tsx b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobTitle/index.tsx new file mode 100644 index 000000000..2eec0ac92 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/JobTitle/index.tsx @@ -0,0 +1,47 @@ +import React from 'react'; +import { DatasetJob, DatasetJobListItem } from 'typings/dataset'; +import StateIndicator from 'components/StateIndicator'; +import GridRow from 'components/_base/GridRow'; +import { getDatasetJobState, getDatasetJobType } from 'shared/dataset'; +import TaskActions from '../../TaskList/TaskActions'; +import { Space } from '@arco-design/web-react'; +import styled from './index.module.less'; + +type TJobTitleProp = { + data: DatasetJob; + onStop?: () => void; + onDelete?: () => void; + id: ID; +}; + +export default function JobTitle(prop: TJobTitleProp) { + const { data = {} as DatasetJobListItem, onStop, onDelete, id } = prop; + const jobData: DatasetJobListItem = { + name: '', + uuid: '', + project_id: data.project_id, + kind: data.kind, + state: data.state, + result_dataset_id: '', + result_dataset_name: '', + id, + created_at: 0, + coordinator_id: 0, + has_stages: false, + }; + return ( + <GridRow + justify={'space-between'} + style={{ + margin: `20px 20px 0 20px`, + }} + > + <Space> + <span className={styled.job_title_icon}>{getDatasetJobType(jobData.kind)}</span> + <span className={styled.job_title_name}>{data.name}</span> + <StateIndicator {...getDatasetJobState(jobData)} /> + </Space> + <TaskActions data={jobData} onDelete={onDelete} onStop={onStop} /> + </GridRow> + ); +} diff --git a/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/index.module.less b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/index.module.less new file mode 100644 index 000000000..37b12926a --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/index.module.less @@ -0,0 +1,32 @@ +.dataset_job_detail_container{ + display: flex; + flex-direction: row; + justify-content: space-between; + :global(.arco-spin) { + flex-grow: 1; + margin-right: 12px; + } +} + +.data_detail_tab_pane{ + display: grid; +} + +.data_detail_tab{ + margin-bottom: 0 !important; +} + +.data_job_task_detail{ + padding: 20px 20px; + flex: 1; + :global(.arco-spin-children){ + height: 100%; + > div{ + height: 100%; + } + } + :global(.react-flow-container){ + height: 100%; + margin: 0px; + } +} \ No newline at end of file diff --git a/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/index.tsx b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/index.tsx new file mode 100644 index 000000000..b9da09790 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/NewDatasetJobDetail/index.tsx @@ -0,0 +1,216 @@ +import React, { FC, useMemo, useState, useRef } from 'react'; +import { Tabs, Message } from '@arco-design/web-react'; +import SharedPageLayout from 'components/SharedPageLayout'; +import TaskDetail, { NodeType } from '../TaskDetail'; +import { useParams } from 'react-router-dom'; +import JobTitle from './JobTitle'; +import { useQuery } from 'react-query'; +import { fetchDatasetJobDetail, fetchDatasetDetail, fetchDatasetList } from 'services/dataset'; +import { useGetCurrentProjectId } from 'hooks'; +import { DatasetJob, DatasetKindLabel, DatasetKindLabelCapitalMapper } from 'typings/dataset'; +import JobBasicInfo from './JobBasicInfo'; +import JobStageDetail from './JobStageDetail'; +import { DatasetDetailSubTabs } from '../DatasetDetail'; +import { useHistory, Route, Redirect } from 'react-router'; +import BackButton from 'components/BackButton'; +import { + isDataJoin, + isDataExport, + FILTER_OPERATOR_MAPPER, + filterExpressionGenerator, +} from '../shared'; +import { useGetCurrentDomainName } from 'hooks'; +import styled from './index.module.less'; + +const { TabPane } = Tabs; + +type TProps = {}; + +export enum JobDetailSubTabs { + TaskProcess = 'process', + TaskList = 'list', +} + +const DatasetJobDetail: FC<TProps> = function (props: TProps) { + const { job_id, subtab, dataset_id } = useParams<{ + job_id: string; + subtab: string; + dataset_id: string; + }>(); + const currentDomainName = useGetCurrentDomainName(); + const projectId = useGetCurrentProjectId(); + const history = useHistory(); + const jobStageDetailRef = useRef<any>(); + const [activeTab, setActiveTab] = useState(subtab || JobDetailSubTabs.TaskProcess); + const [jobBasicInfo, setJobBasicInfo] = useState( + {} as { + coordinatorId: ID; + createTime: DateTime; + creator_username: string; + }, + ); + // ======= Dataset query ============ + const query = useQuery(['fetchDatasetDetail', dataset_id], () => fetchDatasetDetail(dataset_id), { + refetchOnWindowFocus: false, + enabled: Boolean(dataset_id), + onError(e: any) { + Message.error(e.message); + }, + }); + // 表示任务的是是否是原始数据集, 用于后续跳转详情页 + const isProcessed = Boolean( + DatasetKindLabelCapitalMapper[DatasetKindLabel.PROCESSED] === query.data?.data.dataset_kind, + ); + const jobDetailQuery = useQuery( + ['fetch_dataset_jobDetail', projectId, job_id], + () => fetchDatasetJobDetail(projectId!, job_id!), + { + refetchOnWindowFocus: false, + retry: 2, + enabled: Boolean(projectId && job_id), + onSuccess: (data) => { + const { coordinator_id, created_at, creator_username } = data.data || {}; + setJobBasicInfo({ + coordinatorId: coordinator_id, + createTime: created_at, + creator_username, + }); + }, + }, + ); + + const jobDetail = useMemo(() => { + if (!jobDetailQuery.data) { + return {}; + } + return jobDetailQuery.data.data; + }, [jobDetailQuery.data]); + + const isJoin = isDataJoin(jobDetailQuery.data?.data.kind); + const isExport = isDataExport(jobDetailQuery.data?.data.kind); + // 导出任务的导入数据集uuid, 服务端担心直接加导入数据集的id会有问题? + const inportDatasetUuid = useMemo(() => { + const rawDatasetObject = jobDetailQuery.data?.data?.global_configs?.global_configs ?? {}; + let inportDatasetUuid: string = ''; + Object.keys(rawDatasetObject).forEach((key, index) => { + const rawDatasetInfo = rawDatasetObject[key]; + if (currentDomainName.indexOf(key) > -1) { + inportDatasetUuid = rawDatasetInfo.dataset_uuid; + } + }); + return inportDatasetUuid; + }, [jobDetailQuery, currentDomainName]); + + const datasetListQuery = useQuery( + ['fetchDatasetList', inportDatasetUuid], + () => + fetchDatasetList({ + filter: filterExpressionGenerator( + { + uuid: inportDatasetUuid, + }, + FILTER_OPERATOR_MAPPER, + ), + }), + { + enabled: Boolean(inportDatasetUuid) && Boolean(isExport), + refetchOnWindowFocus: false, + retry: 2, + }, + ); + + const importDatasetId = useMemo(() => { + if (!datasetListQuery.data?.data) return null; + return datasetListQuery.data?.data?.[0]?.id; + }, [datasetListQuery]); + + const backToList = () => { + history.goBack(); + }; + /** IF no subtab be set, defaults to preview */ + if (!subtab) { + return ( + <Redirect + to={`/datasets/${dataset_id}/new/job_detail/${job_id}/${JobDetailSubTabs.TaskProcess}`} + /> + ); + } + + return ( + <SharedPageLayout + title={<BackButton onClick={backToList}>任务详情</BackButton>} + cardPadding={0} + > + <JobTitle id={job_id} data={jobDetail as DatasetJob} onStop={onStop} onDelete={backToList} /> + <JobBasicInfo {...jobBasicInfo} /> + <Tabs activeTab={activeTab} onChange={onSubtabChange} className={styled.data_detail_tab}> + <TabPane + className={styled.data_detail_tab_pane} + title="任务流程" + key={JobDetailSubTabs.TaskProcess} + /> + <TabPane + className={styled.data_detail_tab_pane} + title="任务列表" + key={JobDetailSubTabs.TaskList} + /> + </Tabs> + <Route + path={`/datasets/:dataset_id/new/job_detail/:job_id/${JobDetailSubTabs.TaskProcess}`} + exact + render={(props) => { + return ( + <TaskDetail + className={styled.data_job_task_detail} + middleJump={false} + datasetJobId={job_id} + isShowTitle={false} + isShowRatio={!jobDetailQuery.data?.data?.has_stages} + onNodeClick={(node, datasetMapper = {}) => { + if (node.type === NodeType.DATASET_PROCESSED) { + const datasetInfo = datasetMapper[node?.data?.dataset_uuid ?? '']; + datasetInfo && + datasetInfo.id && + history.push( + `/datasets/processed/detail/${datasetInfo.id}/${DatasetDetailSubTabs.PreviewData}`, + ); + } + }} + // TODO: pass error message when state_frontend = DatasetStateFront.FAILED, + errorMessage="" + // TODO: confirm the dataset is processed or not by the state of job + isProcessedDataset={true} + /> + ); + }} + /> + <Route + path={`/datasets/:dataset_id/new/job_detail/:job_id/${JobDetailSubTabs.TaskList}`} + exact + render={(props) => { + return ( + <JobStageDetail + ref={jobStageDetailRef} + isProcessed={isProcessed} + isJoin={isJoin} + isExport={isExport} + importDatasetId={importDatasetId} + /> + ); + }} + /> + </SharedPageLayout> + ); + + function onSubtabChange(val: string) { + setActiveTab(val as JobDetailSubTabs); + history.replace(`/datasets/${dataset_id}/new/job_detail/${job_id}/${val}`); + } + + function onStop() { + jobDetailQuery.refetch(); + jobStageDetailRef.current.refetch(); + } +}; + +export default DatasetJobDetail; diff --git a/web_console_v2/client/src/views/Datasets/TaskDetail/DatasetNode/index.module.less b/web_console_v2/client/src/views/Datasets/TaskDetail/DatasetNode/index.module.less new file mode 100644 index 000000000..18b89c64e --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/TaskDetail/DatasetNode/index.module.less @@ -0,0 +1,28 @@ +.container{ + width: 260px; + min-height: 56px; + padding: 8px 12px; + background: #fff; + border-radius: 4px; + border: 1px solid #dde2e6; +} + +.active{ + border: 1px dashed var(--primaryColor); +} + +.can_click{ + cursor: pointer; +} + +.can_click_label{ + color: var(--primaryColor) !important; +} + +.handle_dot{ + width: 12px; + height: 12px; + border: 1px solid var(--backgroundColorGray); + border-radius: 50%; + background: #fff; +} diff --git a/web_console_v2/client/src/views/Datasets/TaskDetail/DatasetNode/index.tsx b/web_console_v2/client/src/views/Datasets/TaskDetail/DatasetNode/index.tsx new file mode 100644 index 000000000..aa63b6ab2 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/TaskDetail/DatasetNode/index.tsx @@ -0,0 +1,92 @@ +import React, { FC, useMemo, useState } from 'react'; +import { Handle, Position, NodeComponentProps } from 'react-flow-renderer'; +import WhichDataset from 'components/WhichDataset'; +import { Label, LabelStrong } from 'styles/elements'; +import { NodeType } from '..'; +import WhichParticipantDataset from 'components/WhichParticipantDataset'; +import { Dataset } from 'typings/dataset'; +import styled from './index.module.less'; + +export type Props = NodeComponentProps<{ + title: string; + dataset_name: string; + dataset_uuid?: string; + onAPISuccess?: (uuid: string, data?: any) => void; + isActive?: boolean; +}>; + +const DatasetNode: FC<Props> = ({ data, targetPosition, sourcePosition, type }) => { + const [myDataset, setMyDataset] = useState<null | Dataset>(); + const nameJsx = useMemo(() => { + let jsx: React.ReactNode = ''; + switch (type) { + case NodeType.DATASET_MY: + case NodeType.DATASET_PROCESSED: + case NodeType.UPLOAD: + case NodeType.DOWNLOAD: + jsx = data?.dataset_uuid ? ( + <WhichDataset.UUID + displayKey={type === NodeType.DOWNLOAD ? 'path' : 'name'} + uuid={data.dataset_uuid} + onAPISuccess={(apiData) => { + data?.onAPISuccess?.(data.dataset_uuid!, apiData); + setMyDataset(apiData); + }} + /> + ) : ( + '' + ); + break; + case NodeType.DATASET_PARTICIPANT: + jsx = data?.dataset_uuid ? ( + <WhichParticipantDataset + uuid={data.dataset_uuid} + onAPISuccess={(apiData) => { + data?.onAPISuccess?.(data.dataset_uuid!, apiData); + }} + emptyText="对方已撤销发布" + /> + ) : ( + '' + ); + break; + case NodeType.LIGHT_CLIENT: + jsx = '本地上传'; + break; + default: + break; + } + + return jsx; + }, [data, type]); + + const isCanClick = + (type === NodeType.DATASET_MY && Boolean(myDataset)) || type === NodeType.DATASET_PROCESSED; + return ( + <div + className={`${styled.container} ${isCanClick && styled.can_click} ${ + data?.isActive && styled.active + }`} + > + <Label isBlock>{data.title}</Label> + <LabelStrong + isBlock + fontSize={14} + className={isCanClick ? styled.can_click_label : ''} + style={{ + wordBreak: 'break-all', + }} + > + {nameJsx} + </LabelStrong> + {targetPosition && ( + <Handle className={styled.handle_dot} type="target" position={Position.Left} /> + )} + {sourcePosition && ( + <Handle className={styled.handle_dot} type="source" position={Position.Right} /> + )} + </div> + ); +}; + +export default DatasetNode; diff --git a/web_console_v2/client/src/views/Datasets/TaskDetail/ImportNode/index.module.less b/web_console_v2/client/src/views/Datasets/TaskDetail/ImportNode/index.module.less new file mode 100644 index 000000000..1e6d55464 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/TaskDetail/ImportNode/index.module.less @@ -0,0 +1,20 @@ +.container{ + width: 260px; + min-height: 56px; + padding: 8px 12px; + background: #fff; + border-radius: 4px; + border: 1px solid #dde2e6; +} + +.active{ + border: 1px dashed var(--primaryColor); +} + +.handle_dot{ + width: 12px; + height: 12px; + border: 1px solid var(--backgroundColorGray); + border-radius: 50%; + background: #fff; +} diff --git a/web_console_v2/client/src/views/Datasets/TaskDetail/ImportNode/index.tsx b/web_console_v2/client/src/views/Datasets/TaskDetail/ImportNode/index.tsx new file mode 100644 index 000000000..9b23a1d40 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/TaskDetail/ImportNode/index.tsx @@ -0,0 +1,97 @@ +import React, { FC, useMemo } from 'react'; +import { Handle, Position, NodeComponentProps } from 'react-flow-renderer'; +import { useQuery } from 'react-query'; +import { fetchDatasetList, fetchDataSourceDetail } from 'services/dataset'; +import { Label, LabelStrong } from 'styles/elements'; +import { Spin } from '@arco-design/web-react'; +import { FILTER_OPERATOR_MAPPER, filterExpressionGenerator } from '../../shared'; +import ClickToCopy from 'components/ClickToCopy'; +import styled from './index.module.less'; + +export type Props = NodeComponentProps<{ + title: string; + dataset_name: string; + dataset_uuid?: string; + onAPISuccess?: (uuid: string, data?: any) => void; + isActive?: boolean; +}>; + +export const ImportNode: FC<Props> = ({ data, targetPosition, sourcePosition, type }) => { + const datasetListQuery = useQuery( + ['fetchDatasetList', data.dataset_uuid], + () => + fetchDatasetList({ + filter: filterExpressionGenerator( + { + uuid: data.dataset_uuid, + }, + FILTER_OPERATOR_MAPPER, + ), + }), + { + enabled: Boolean(data.dataset_uuid), + refetchOnWindowFocus: false, + retry: 2, + onSuccess(res) { + data.dataset_uuid && data?.onAPISuccess?.(data.dataset_uuid, res.data?.[0] ?? undefined); + }, + }, + ); + + const dataSourceDetailQuery = useQuery( + ['data_source_detail_query', datasetListQuery], + () => + fetchDataSourceDetail({ + id: datasetListQuery.data?.data?.[0].id!, + }), + { + enabled: Boolean( + datasetListQuery.data?.data?.[0]?.id || datasetListQuery.data?.data?.[0]?.id === 0, + ), + refetchOnWindowFocus: false, + retry: 2, + }, + ); + + const [title, content] = useMemo(() => { + const localTitle = '本地上传'; + const dataSourceTitle = '数据源上传'; + const noTitle = '导入任务'; + const noContent = '数据集信息未找到'; + if (!dataSourceDetailQuery?.data?.data) { + return [noTitle, noContent]; + } + const { name, url, is_user_upload } = dataSourceDetailQuery.data.data; + const title = is_user_upload ? localTitle : `${dataSourceTitle}-${name}`; + const content = is_user_upload ? <ClickToCopy text={url}>{url}</ClickToCopy> : url; + return [title, content]; + }, [dataSourceDetailQuery]); + + return ( + <div className={`${styled.container} ${data?.isActive && styled.active}`}> + <Label + style={{ + wordBreak: 'break-all', + }} + isBlock + > + {dataSourceDetailQuery.isFetching ? <Spin /> : title} + </Label> + <LabelStrong + isBlock + fontSize={14} + style={{ + wordBreak: 'break-all', + }} + > + {dataSourceDetailQuery.isFetching ? <Spin /> : content} + </LabelStrong> + {targetPosition && ( + <Handle type="target" className={styled.handle_dot} position={Position.Left} /> + )} + {sourcePosition && ( + <Handle type="source" className={styled.handle_dot} position={Position.Right} /> + )} + </div> + ); +}; diff --git a/web_console_v2/client/src/views/Datasets/TaskDetail/TagNode/index.module.less b/web_console_v2/client/src/views/Datasets/TaskDetail/TagNode/index.module.less new file mode 100644 index 000000000..36de850e9 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/TaskDetail/TagNode/index.module.less @@ -0,0 +1,19 @@ +.container{ + min-width: 72px; + height: 24px; + padding: 0 12px; + text-align: center; + background: #ffffff; + border: 1px solid var(--backgroundColorGray); + border-radius: 40px; + cursor: pointer; +} + +.handle_dot{ + visibility: hidden; +} + +.label{ + font-weight: 400; + font-size: 12px; +} diff --git a/web_console_v2/client/src/views/Datasets/TaskDetail/TagNode/index.tsx b/web_console_v2/client/src/views/Datasets/TaskDetail/TagNode/index.tsx new file mode 100644 index 000000000..d9463c771 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/TaskDetail/TagNode/index.tsx @@ -0,0 +1,34 @@ +import React, { FC } from 'react'; +import { Handle, Position, NodeComponentProps } from 'react-flow-renderer'; +import { DataJobBackEndType } from 'typings/dataset'; +import styled from './index.module.less'; + +export type Props = NodeComponentProps<{ + title: string; + dataset_job_uuid: string; + workflow_uuid: string; + kind: DataJobBackEndType; + job_id: ID; +}>; + +const TagNode: FC<Props> = ({ data, targetPosition, sourcePosition }) => { + return ( + <div className={styled.container}> + <span + className={styled.label} + style={{ color: Boolean(data.job_id) ? 'var(--primaryColor)' : 'var(--textColor)' }} + title={data.title} + > + {data.title} + </span> + {targetPosition && ( + <Handle className={styled.handle_dot} type="target" position={Position.Left} /> + )} + {sourcePosition && ( + <Handle className={styled.handle_dot} type="source" position={Position.Right} /> + )} + </div> + ); +}; + +export default TagNode; diff --git a/web_console_v2/client/src/views/Datasets/TaskDetail/index.less b/web_console_v2/client/src/views/Datasets/TaskDetail/index.less new file mode 100644 index 000000000..a2b62b221 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/TaskDetail/index.less @@ -0,0 +1,53 @@ +.react-flow-container{ + position: relative; + height: 313px; + padding: 16px 20px; + margin: 12px 0 20px; + background-color: var(--backgroundColor); + border-radius: 4px; +} + +.statistic-group-container{ + position: absolute; + left: 20px; + top: 12px; + display: flex; + flex-direction: column; +} + +.statistic-item{ + margin-top: 10px; + .arco-statistic-title { + font-size: 12px; + margin-bottom: 0; + } + .arco-statistic-value { + font-size: 20px; + font-family: 'Byte Number'; + } +} + +.statistic-rate-item{ + .arco-statistic-extra { + font-size: 12px; + margin-top: 0px; + } + .arco-statistic-value { + .arco-statistic-value-int { + font-size: 20px; + } + .arco-statistic-value-decimal { + font-size: 20px; + } + } +} + +.statistic-progress{ + .arco-progress-circle-text { + font-size: 14px; + } +} + +.task-detail-tag{ + margin: 0 12px 0 12px; +} diff --git a/web_console_v2/client/src/views/Datasets/TaskDetail/index.tsx b/web_console_v2/client/src/views/Datasets/TaskDetail/index.tsx new file mode 100644 index 000000000..798d8f676 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/TaskDetail/index.tsx @@ -0,0 +1,537 @@ +import React, { FC, useCallback, useMemo, useRef, useState } from 'react'; +import { useHistory } from 'react-router'; +import { Alert, Message, Spin, Statistic, Tag, Progress } from '@arco-design/web-react'; +import ReactFlow, { + Controls, + Edge, + FlowElement, + isNode, + Node, + OnLoadParams, + Position, +} from 'react-flow-renderer'; +import { LabelStrong } from 'styles/elements'; +import DatasetNode from './DatasetNode'; +import TagNode from './TagNode'; + +import { fetchDatasetJobDetail } from 'services/dataset'; + +import { useQuery } from 'react-query'; +import { + useGetCurrentDomainName, + useGetCurrentProjectId, + useGetCurrentProjectParticipantList, +} from 'hooks'; +import { DataJobBackEndTypeToLabelMap, isDataImport, isDataJoin, isDataAnalyzer } from '../shared'; +import { + DataJobBackEndType, + Dataset, + DatasetKindBackEndType, + DatasetKindLabel, + ParticipantDataset, +} from 'typings/dataset'; + +import { DatasetDetailSubTabs } from '../DatasetDetail'; +import { ParticipantType } from 'typings/participant'; +import { ImportNode } from './ImportNode'; +import { JobDetailSubTabs } from 'views/Datasets/NewDatasetJobDetail'; +import './index.less'; + +export enum NodeType { + DATASET_MY = 'dataset_my', + DATASET_PARTICIPANT = 'dataset_participant', + DATASET_PROCESSED = 'dataset_processed', + Tag = 'tag', + UPLOAD = 'upload', + DOWNLOAD = 'download', + LIGHT_CLIENT = 'light_client', +} + +export type DatasetNodeData = { + title: string; + dataset_name: string; + dataset_uuid: string; + dataset_job_uuid?: string; + workflow_uuid?: string; + isActive?: boolean; + job_id?: ID; +}; + +type Props = { + datasetId?: ID; + datasetJobId?: ID; + onNodeClick?: ( + element: FlowElement<DatasetNodeData>, + datasetMapper?: { [key: string]: Dataset }, + ) => void; + errorMessage?: string; + isProcessedDataset?: boolean; + middleJump: Boolean; + className?: string; + isOldData?: boolean; // 是否是老数据 + isShowTitle?: boolean; + isShowRatio?: boolean; // 是否展示求交率等信息 +}; + +const nodeTypes = { + [NodeType.DATASET_MY]: DatasetNode, + [NodeType.DATASET_PARTICIPANT]: DatasetNode, + [NodeType.DATASET_PROCESSED]: DatasetNode, + [NodeType.Tag]: TagNode, + [NodeType.UPLOAD]: ImportNode, + [NodeType.DOWNLOAD]: DatasetNode, + [NodeType.LIGHT_CLIENT]: DatasetNode, +}; + +export const BASE_X = 0; +export const BASE_Y = 100; +export const WIDTH_DATASET_NODE = 260; +export const WIDTH_TAG_NODE = 120; +export const WIDTH_GAP = 120; +export const HEIGHT_DATASET_NODE = 56; +export const HEIGHT_TAG_NODE = 24; + +const TaskDetail: FC<Props> = ({ + datasetId, + datasetJobId, + onNodeClick, + errorMessage, + isProcessedDataset = false, + middleJump, + className, + isOldData = false, + isShowTitle = true, + isShowRatio = true, +}) => { + const history = useHistory(); + const projectId = useGetCurrentProjectId(); + const currentDomainName = useGetCurrentDomainName(); + const participantList = useGetCurrentProjectParticipantList(); + + const [intersectionRate, setIntersectionRate] = useState(0); + const [intersectionNumber, setIntersectionNumber] = useState(0); + const [myAmountOfData, setMyAmountOfData] = useState(0); + const myDatasetUuidToInfoMap = useRef<{ + [uuid: string]: Dataset; + }>({}); + const myParticipantDatasetUuidToDatasetInfoMap = useRef<{ + [uuid: string]: ParticipantDataset; + }>({}); + const myResultDatasetUuidToInfoMap = useRef<{ + [uuid: string]: Dataset; + }>({}); + + const getLeftNodeName = (jobKind: DataJobBackEndType) => { + switch (jobKind) { + case DataJobBackEndType.EXPORT: + return '结果数据集'; + case DataJobBackEndType.IMPORT_SOURCE: + return '本地上传'; + default: + return '我方数据集'; + } + }; + + const getRightNodeName = (jobKind: DataJobBackEndType) => { + switch (jobKind) { + case DataJobBackEndType.EXPORT: + return '导出地址'; + case DataJobBackEndType.IMPORT_SOURCE: + return '原始数据集'; + default: + return '结果数据集'; + } + }; + + const datasetJobDetailQuery = useQuery( + ['fetchDatasetJobDetail', projectId, datasetJobId], + () => fetchDatasetJobDetail(projectId!, datasetJobId!), + { + refetchOnWindowFocus: false, + retry: 2, + enabled: Boolean(projectId && datasetJobId), + onSuccess(res) { + const { input_data_batch_num_example, output_data_batch_num_example, kind } = res.data; + setIntersectionRate((prev) => { + if (input_data_batch_num_example === 0) { + return 0; + } + return parseFloat( + ((output_data_batch_num_example / input_data_batch_num_example) * 100).toFixed(2), + ); + }); + + setIntersectionNumber((prev) => { + return output_data_batch_num_example; + }); + + setMyAmountOfData((prev) => { + const myAmountOfData = isDataImport(kind) + ? output_data_batch_num_example + : input_data_batch_num_example; + return myAmountOfData || 0; + }); + }, + }, + ); + + const leftNodeType = useMemo(() => { + const getLeftNodeType = (jobKind: DataJobBackEndType) => { + switch (jobKind) { + case DataJobBackEndType.EXPORT: + return NodeType.DATASET_PROCESSED; + case DataJobBackEndType.IMPORT_SOURCE: + return NodeType.UPLOAD; + default: + return NodeType.DATASET_MY; + } + }; + // set empty and default value to <import> + if (!datasetJobDetailQuery?.data?.data) { + return NodeType.UPLOAD; + } + return getLeftNodeType(datasetJobDetailQuery.data.data?.kind); + }, [datasetJobDetailQuery.data]); + + const rightNodeType = useMemo(() => { + const getRightNodeType = (jobKind: DataJobBackEndType) => { + switch (jobKind) { + case DataJobBackEndType.EXPORT: + return NodeType.DOWNLOAD; + case DataJobBackEndType.IMPORT_SOURCE: + return NodeType.DATASET_MY; + default: + return NodeType.DATASET_PROCESSED; + } + }; + // set empty and default value to <export> + if (!datasetJobDetailQuery?.data?.data) { + return NodeType.DOWNLOAD; + } + return getRightNodeType(datasetJobDetailQuery.data.data?.kind); + }, [datasetJobDetailQuery.data]); + + const onMyDatasetAPISuccess = useCallback((uuid: ID, dataset: Dataset) => { + myDatasetUuidToInfoMap.current[uuid] = dataset; + }, []); + const onParticipantAPISuccess = useCallback( + (uuid: ID, participantDataset: ParticipantDataset) => { + myParticipantDatasetUuidToDatasetInfoMap.current[uuid] = participantDataset; + }, + [], + ); + const onResultDatasetAPISuccess = useCallback((uuid: ID, dataset: Dataset) => { + myResultDatasetUuidToInfoMap.current[uuid] = dataset; + }, []); + + const elementList = useMemo(() => { + const isLightClient = (curParticipant: string) => { + if (Array.isArray(participantList)) { + const filterParticipant = participantList.filter((item) => + item?.domain_name?.includes(curParticipant), + ); + if (filterParticipant.length) { + return filterParticipant[0].type === ParticipantType.LIGHT_CLIENT; + } + } + return false; + }; + + if (!datasetJobDetailQuery.data || !currentDomainName) { + return []; + } + + const datasetJobDetail = datasetJobDetailQuery.data.data; + const rawDatasetObject = datasetJobDetail?.global_configs?.global_configs ?? {}; + + let myDatasetElementList: Node[] = []; + let participantDatasetElementList: Node[] = []; + const kindElementList: Node[] = []; + const processedDatasetElementList: Node[] = []; + const edgeElementList: Edge[] = []; + + // col 1: raw dataset + Object.keys(rawDatasetObject).forEach((key, index) => { + const rawDatasetInfo = rawDatasetObject[key]; + + if (currentDomainName.indexOf(key) > -1) { + myDatasetElementList.push({ + id: `c1-${rawDatasetInfo.dataset_uuid}`, + sourcePosition: Position.Right, + type: leftNodeType, + data: { + title: getLeftNodeName(datasetJobDetail.kind), + dataset_uuid: rawDatasetInfo.dataset_uuid, + onAPISuccess: onMyDatasetAPISuccess, + isActive: !isProcessedDataset, + }, + position: { x: BASE_X, y: BASE_Y }, + }); + } else { + // check whether the participant is light client + const isCurLightClient = isLightClient(key); + participantDatasetElementList.push({ + id: `c1-${isCurLightClient ? key : rawDatasetInfo.dataset_uuid}`, + sourcePosition: Position.Right, + type: isCurLightClient ? NodeType.LIGHT_CLIENT : NodeType.DATASET_PARTICIPANT, + data: { + title: isCurLightClient ? ( + <> + 合作伙伴数据集{' '} + <Tag className="task-detail-tag" color="arcoblue"> + 轻量 + </Tag> + </> + ) : ( + `合作伙伴数据集 - ${key}` + ), + dataset_uuid: isCurLightClient ? key : rawDatasetInfo.dataset_uuid, + onAPISuccess: isCurLightClient ? null : onParticipantAPISuccess, + }, + position: { x: BASE_X, y: BASE_Y + HEIGHT_DATASET_NODE * 2 }, + }); + } + }); + + myDatasetElementList = myDatasetElementList.map((item, index) => ({ + ...item, + position: { x: BASE_X, y: BASE_Y + HEIGHT_DATASET_NODE * 2 * index }, + })); + + const PARTICIPANT_BASE_Y = + myDatasetElementList.length > 0 + ? myDatasetElementList[myDatasetElementList.length - 1].position.y + 2 * HEIGHT_DATASET_NODE + : BASE_Y; + + participantDatasetElementList = participantDatasetElementList.map((item, index) => ({ + ...item, + position: { + x: BASE_X, + y: PARTICIPANT_BASE_Y + HEIGHT_DATASET_NODE * 2 * index, + }, + })); + + const rawDatasetList = [...myDatasetElementList, ...participantDatasetElementList].map( + (item, index) => ({ + ...item, + id: `c1-${index}`, + }), + ); + + // col 2: kind + kindElementList.push({ + id: 'c2-1', + targetPosition: Position.Left, + sourcePosition: Position.Right, + type: NodeType.Tag, + data: { + title: DataJobBackEndTypeToLabelMap[datasetJobDetail.kind] || 'Unknown', + kind: datasetJobDetail.kind, + dataset_job_uuid: datasetJobDetail.uuid, + workflow_uuid: datasetJobDetail.workflow_id, + job_id: middleJump ? datasetJobId : null, // 中间不可跳转的话,数据不传job_id, 兼容置灰 + }, + position: { + x: WIDTH_DATASET_NODE + WIDTH_GAP, + y: + BASE_Y + + Math.floor((2 * rawDatasetList.length - 1) / 2) * HEIGHT_DATASET_NODE + + (HEIGHT_DATASET_NODE - HEIGHT_TAG_NODE) / 2, + }, + }); + + // col 3: processed dataset + processedDatasetElementList.push({ + id: 'c3-1', + targetPosition: Position.Left, + type: rightNodeType, + data: { + title: getRightNodeName(datasetJobDetail.kind), + dataset_name: datasetJobDetail.result_dataset_name, + dataset_uuid: datasetJobDetail.result_dataset_uuid, + isActive: isProcessedDataset, + onAPISuccess: onResultDatasetAPISuccess, + }, + position: { + x: WIDTH_DATASET_NODE + WIDTH_GAP * 2 + WIDTH_TAG_NODE, + y: BASE_Y + Math.floor((2 * rawDatasetList.length - 1) / 2) * HEIGHT_DATASET_NODE - 1.5, + }, + }); + + // edge + if (kindElementList.length > 0) { + rawDatasetList.forEach((item) => { + edgeElementList.push({ + id: `e|${item.id}_c2-1`, + source: `${item.id}`, + target: 'c2-1', + type: 'smoothstep', + animated: true, + }); + }); + } + + if (kindElementList.length > 0 && processedDatasetElementList.length > 0) { + edgeElementList.push({ + id: `e|c2-1_c3-1`, + source: 'c2-1', + target: 'c3-1', + type: 'smoothstep', + animated: true, + }); + } + + return [ + ...rawDatasetList, + ...kindElementList, + ...processedDatasetElementList, + ...edgeElementList, + ]; + }, [ + participantList, + datasetJobId, + onResultDatasetAPISuccess, + datasetJobDetailQuery.data, + currentDomainName, + isProcessedDataset, + onMyDatasetAPISuccess, + onParticipantAPISuccess, + leftNodeType, + rightNodeType, + middleJump, + ]); + + const isShowErrorMessage = errorMessage; + + return ( + <Spin loading={datasetJobDetailQuery.isFetching} className={className}> + <div> + {isShowTitle && ( + <LabelStrong fontSize={14} isBlock={true}> + 任务流程 + </LabelStrong> + )} + + <div className="react-flow-container"> + {isShowRatio && ( + <div className="statistic-group-container"> + {isDataJoin(datasetJobDetailQuery.data?.data?.kind) && ( + <> + <Progress + className="statistic-progress" + percent={intersectionRate ? intersectionRate : 0} + size="large" + type="circle" + status="normal" + trailColor="var(--color-primary-light-1)" + formatText={(percent: number) => { + return ( + <Statistic + className="statistic-rate-item" + extra="求交率" + value={intersectionRate} + suffix="%" + groupSeparator + /> + ); + }} + /> + + <Statistic + className="statistic-item" + title="交集数" + value={intersectionNumber || 0} + groupSeparator + /> + </> + )} + + {!isDataAnalyzer(datasetJobDetailQuery.data?.data?.kind) && ( + <Statistic + className="statistic-item" + title="我方数据量" + value={myAmountOfData || 0} + groupSeparator + /> + )} + </div> + )} + + {elementList.length > 0 && ( + <ReactFlow + elements={elementList} + onLoad={onReactFlowLoad} + onElementClick={(_, element: FlowElement) => onElementsClick(element)} + nodesDraggable={false} + nodesConnectable={false} + zoomOnScroll={false} + zoomOnPinch={false} + zoomOnDoubleClick={false} + minZoom={1} + maxZoom={1} + defaultZoom={1} + nodeTypes={nodeTypes} + > + <Controls showZoom={false} showInteractive={false} /> + </ReactFlow> + )} + </div> + {isShowErrorMessage && ( + <> + <LabelStrong fontSize={14} isBlock={true} style={{ marginBottom: 12 }}> + 错误信息 + </LabelStrong> + <Alert type="error" showIcon={false} content={errorMessage} /> + </> + )} + </div> + </Spin> + ); + + function onReactFlowLoad(reactFlowInstance: OnLoadParams) { + // Fits the view port so that all nodes are visible + reactFlowInstance!.fitView(); + } + async function onElementsClick(element: FlowElement<DatasetNodeData>) { + const allNodeMapper = { + ...myDatasetUuidToInfoMap.current, + ...myResultDatasetUuidToInfoMap.current, + }; + if (element && isNode(element)) { + onNodeClick?.(element, allNodeMapper); + switch (element?.type) { + case NodeType.DATASET_MY: + const datasetInfo = allNodeMapper[element?.data?.dataset_uuid ?? '']; + if (datasetInfo?.id) { + history.push( + `/datasets/${ + datasetInfo?.dataset_kind === DatasetKindBackEndType.PROCESSED + ? DatasetKindLabel.PROCESSED + : DatasetKindLabel.RAW + }/detail/${datasetInfo.id}/${DatasetDetailSubTabs.DatasetJobDetail}`, + ); + } + break; + case NodeType.Tag: + if (middleJump && element?.data?.job_id) { + try { + if (isOldData) { + history.push(`/datasets/job_detail/${element?.data?.job_id}`); + } else { + history.push( + `/datasets/${datasetId}/new/job_detail/${element?.data?.job_id}/${JobDetailSubTabs.TaskProcess}`, + ); + } + } catch (error) { + Message.error(error.message); + } + } + break; + default: + break; + } + } + } +}; + +export default TaskDetail; diff --git a/web_console_v2/client/src/views/Datasets/TaskList/TaskActions/index.tsx b/web_console_v2/client/src/views/Datasets/TaskList/TaskActions/index.tsx new file mode 100644 index 000000000..6bf83eacb --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/TaskList/TaskActions/index.tsx @@ -0,0 +1,82 @@ +import React from 'react'; +import { DatasetJobListItem } from 'typings/dataset'; +import { isJobRunning } from 'shared/dataset'; +import Modal from 'components/Modal'; +import { Button, Message, Space } from '@arco-design/web-react'; +import MoreActions from 'components/MoreActions'; +import { deleteDatasetJob, stopDatasetJob } from 'services/dataset'; +import { ButtonProps } from '@arco-design/web-react/es/Button/interface'; + +interface IProp { + data: DatasetJobListItem; + onDelete?: () => void; + onStop?: () => void; + buttonProps?: ButtonProps; +} + +export default function TaskActions(prop: IProp) { + const { data, onDelete, onStop, buttonProps } = prop; + + const handleOnJobStop = (projectId: ID, data: DatasetJobListItem) => { + stopDatasetJob(projectId, data.id!).then( + () => { + onStop && onStop(); + }, + (err) => Message.error(`停止失败: ${err?.message}`), + ); + }; + + const handleOnJobDelete = (projectId: ID, data: DatasetJobListItem) => { + deleteDatasetJob(projectId, data.id!).then( + () => { + onDelete && onDelete(); + }, + (err) => Message.error(`删除失败: ${err?.message}`), + ); + }; + return ( + <Space> + <Button + disabled={!isJobRunning(data)} + onClick={() => { + Modal.stop({ + title: `确认要停止「${data.result_dataset_name}」?`, + content: '停止后,该任务不能再重新运行,请谨慎操作', + onOk() { + if (!data.project_id) { + Message.info('请选择工作区'); + return; + } + handleOnJobStop(data.project_id, data); + }, + }); + }} + {...buttonProps} + > + 停止运行 + </Button> + <MoreActions + actionList={[ + { + disabled: isJobRunning(data), + label: '删除', + danger: true, + onClick() { + Modal.delete({ + title: `确认要删除「${data.result_dataset_name}」?`, + content: '删除后,该任务及信息将无法恢复,请谨慎操作', + onOk() { + if (!data.project_id) { + Message.info('请选择工作区'); + return; + } + handleOnJobDelete(data.project_id, data); + }, + }); + }, + }, + ]} + /> + </Space> + ); +} diff --git a/web_console_v2/client/src/views/Datasets/TaskList/index.less b/web_console_v2/client/src/views/Datasets/TaskList/index.less new file mode 100644 index 000000000..c1d8d9801 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/TaskList/index.less @@ -0,0 +1,14 @@ +.indicator-with-tip{ + display: flex; + flex-direction: row; + align-items: center; +} +.task-running-count-wrapper{ + display: inline-block; + background-color: white; + border-radius: 4px; + padding: 0 8px 0 8px; + height: 20px; + line-height: 20px; + margin-left: 8px; +} diff --git a/web_console_v2/client/src/views/Datasets/TaskList/index.tsx b/web_console_v2/client/src/views/Datasets/TaskList/index.tsx new file mode 100644 index 000000000..e68202d8e --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/TaskList/index.tsx @@ -0,0 +1,334 @@ +import React, { FC, useMemo, useState } from 'react'; +import { useQuery } from 'react-query'; +import { + useGetCurrentProjectId, + useGetCurrentProjectParticipantId, + useGetCurrentProjectParticipantList, + useTablePaginationWithUrlState, + useUrlState, +} from 'hooks'; +import { fetchDatasetJobList } from 'services/dataset'; +import { TABLE_COL_WIDTH, TIME_INTERVAL } from 'shared/constants'; +import { formatTimestamp } from 'shared/date'; +import { getDatasetJobState } from 'shared/dataset'; +import { + datasetJobStateFilters, + datasetJobTypeFilters, + FILTER_DATA_JOB_OPERATOR_MAPPER, + filterExpressionGenerator, + getJobKindByFilter, + getJobStateByFilter, + getSortOrder, +} from '../shared'; + +import { Button, Input, Message, Table, TableColumnProps } from '@arco-design/web-react'; +import GridRow from 'components/_base/GridRow'; +import SharedPageLayout from 'components/SharedPageLayout'; +import StateIndicator from 'components/StateIndicator'; +import DatasetJobsType from 'components/DatasetJobsType'; +import WhichParticipant from 'components/WhichParticipant'; +import { Link } from 'react-router-dom'; + +import { ColumnProps } from '@arco-design/web-react/es/Table'; +import { DataJobBackEndType, DatasetJobListItem, DatasetJobState } from 'typings/dataset'; +import TaskActions from './TaskActions'; +import { expression2Filter } from 'shared/filter'; +import { useToggle } from 'react-use'; +import './index.less'; + +type TProps = {}; +type TableFilterConfig = Pick<TableColumnProps, 'filters' | 'onFilter'>; +const { Search } = Input; + +const List: FC<TProps> = function (props: TProps) { + const { paginationProps } = useTablePaginationWithUrlState(); + const projectId = useGetCurrentProjectId(); + const participantId = useGetCurrentProjectParticipantId(); + const participantList = useGetCurrentProjectParticipantList(); + const [total, setTotal] = useState(0); + const [isViewRunning, setIsViewRunning] = useToggle(false); + const [pageTotal, setPageTotal] = useState(0); + + // Temporarily get the number of running tasks by calling the list-api again; + const runningCountQuery = useQuery( + ['fetchRunningCount', projectId], + () => { + if (!projectId) { + Message.info('请选择工作区'); + return; + } + return fetchDatasetJobList(projectId!, { + page: 1, + page_size: 1, + filter: filterExpressionGenerator( + { + state: [DatasetJobState.RUNNING, DatasetJobState.PENDING], + }, + FILTER_DATA_JOB_OPERATOR_MAPPER, + ), + }); + }, + { + refetchInterval: TIME_INTERVAL.LIST, + }, + ); + + // get participantFilter from participantList + const datasetJobCoordinatorFilters: TableFilterConfig = useMemo(() => { + let filters: { text: string; value: any }[] = []; + if (Array.isArray(participantList) && participantList.length) { + filters = participantList.map((item) => ({ + text: item.name, + value: item.id, + })); + filters.push({ + text: '本方', + value: 0, + }); + } + return { + filters, + onFilter: (value: number, record: DatasetJobListItem) => { + return value === record.coordinator_id; + }, + }; + }, [participantList]); + + // store filter status into urlState + const [urlState, setUrlState] = useUrlState({ + filter: '', + order_by: '', + page: 1, + pageSize: 10, + }); + + // generator listQuery + const listQuery = useQuery( + ['fetchDatasetJobList', projectId, urlState], + () => { + if (!projectId) { + Message.info('请选择工作区'); + return; + } + const filter = expression2Filter(urlState.filter); + filter.state = getJobStateByFilter(filter.state); + filter.kind = getJobKindByFilter(filter.kind); + return fetchDatasetJobList(projectId!, { + page: urlState.page, + page_size: urlState.pageSize, + filter: filterExpressionGenerator(filter, FILTER_DATA_JOB_OPERATOR_MAPPER), + order_by: urlState.order_by || 'created_at desc', + }); + }, + { + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, + refetchOnWindowFocus: false, + onSuccess: (res) => { + const { page_meta } = res || {}; + setTotal((pre) => page_meta?.total_items || pre); + setPageTotal(page_meta?.total_pages ?? 0); + }, + }, + ); + + // generator listData from listQuery and watch listQuery + const list = useMemo(() => { + return listQuery.data?.data || []; + }, [listQuery.data]); + + const runningCount = useMemo(() => { + if (!runningCountQuery.data) { + return 0; + } else { + return runningCountQuery.data?.page_meta?.total_items || 0; + } + }, [runningCountQuery.data]); + + const columns = useMemo<ColumnProps<DatasetJobListItem>[]>(() => { + return [ + { + title: '任务名称', + dataIndex: 'name', + key: 'name', + width: TABLE_COL_WIDTH.NAME, + ellipsis: true, + render: (name: string, record) => { + if (record.has_stages) { + // 新数据跳转新任务详情 + return ( + <Link to={`/datasets/${record.result_dataset_id}/new/job_detail/${record.id}`}> + {name} + </Link> + ); + } + return <Link to={`/datasets/job_detail/${record.id}`}>{name}</Link>; + }, + }, + { + title: '任务类型', + dataIndex: 'kind', + key: 'kind', + width: TABLE_COL_WIDTH.NORMAL, + ...datasetJobTypeFilters, + filteredValue: expression2Filter(urlState.filter).kind, + render: (type) => { + return <DatasetJobsType type={type as DataJobBackEndType} />; + }, + }, + { + title: '任务状态', + dataIndex: 'state', + key: 'state', + width: TABLE_COL_WIDTH.NORMAL, + ...datasetJobStateFilters, + filteredValue: expression2Filter(urlState.filter).state, + render: (_: any, record: DatasetJobListItem) => { + return ( + <div className="indicator-with-tip"> + <StateIndicator {...getDatasetJobState(record)} /> + </div> + ); + }, + }, + { + title: '任务发起方', + dataIndex: 'coordinator_id', + key: 'coordinator_id', + width: TABLE_COL_WIDTH.COORDINATOR, + ...datasetJobCoordinatorFilters, + filteredValue: expression2Filter(urlState.filter).coordinator_id, + render: (value: any) => { + return value === 0 ? '本方' : <WhichParticipant id={value} />; + }, + }, + { + title: '创建时间', + dataIndex: 'created_at', + key: 'created_at', + width: TABLE_COL_WIDTH.TIME, + sorter(a: DatasetJobListItem, b: DatasetJobListItem) { + return a.created_at - b.created_at; + }, + defaultSortOrder: getSortOrder(urlState, 'created_at'), + render: (date: number) => <div>{formatTimestamp(date)}</div>, + }, + { + title: '操作', + dataIndex: 'state', + key: 'operation', + fixed: 'right', + width: TABLE_COL_WIDTH.NORMAL, + render: (state: DatasetJobState, record) => ( + <TaskActions + buttonProps={{ + type: 'text', + className: 'custom-text-button', + }} + data={record} + onDelete={listQuery.refetch} + onStop={handleRefetch} + /> + ), + }, + ]; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [urlState, projectId, participantId]); + + // filter running jobs + const handleOnClick = () => { + const filter = expression2Filter(urlState.filter); + filter.state = isViewRunning ? undefined : [DatasetJobState.RUNNING]; + setUrlState((prevState) => ({ + ...prevState, + page: 1, + filter: filterExpressionGenerator(filter, FILTER_DATA_JOB_OPERATOR_MAPPER), + })); + setIsViewRunning((pre: boolean) => !pre); + }; + + // search by keyword + const onSearch = (value: string) => { + const filter = expression2Filter(urlState.filter); + filter.name = value; + setUrlState((prevState) => ({ + ...prevState, + page: 1, + filter: filterExpressionGenerator(filter, FILTER_DATA_JOB_OPERATOR_MAPPER), + })); + }; + + const pagination = useMemo(() => { + return pageTotal <= 1 + ? false + : { + ...paginationProps, + total, + }; + }, [paginationProps, pageTotal, total]); + + return ( + <SharedPageLayout title="任务管理"> + <GridRow justify="space-between" align="center"> + <Button className={'custom-operation-button'} onClick={handleOnClick}> + 查看运行中任务 + <span className="task-running-count-wrapper">{runningCount}</span> + </Button> + <Search + className={'custom-input'} + allowClear + placeholder="输入任务名称" + defaultValue={expression2Filter(urlState.filter).name} + onSearch={onSearch} + onClear={() => onSearch('')} + /> + </GridRow> + <Table + className={'custom-table custom-table-left-side-filter'} + rowKey="id" + loading={listQuery.isFetching} + data={list} + scroll={{ x: '100%' }} + columns={columns} + pagination={pagination} + onChange={( + pagination, + sorter, + filters: Partial<Record<keyof DatasetJobListItem, any[]>>, + extra, + ) => { + switch (extra.action) { + case 'sort': + let orderValue = ''; + if (sorter.direction) { + orderValue = sorter.direction === 'ascend' ? 'asc' : 'desc'; + } + setUrlState((prevState) => ({ + ...prevState, + order_by: orderValue ? `${sorter.field} ${orderValue}` : '', + })); + break; + case 'filter': + const filterCopy = { + ...filters, + name: expression2Filter(urlState.filter).name, + }; + setUrlState((prevState) => ({ + ...prevState, + filter: filterExpressionGenerator(filterCopy, FILTER_DATA_JOB_OPERATOR_MAPPER), + page: 1, + })); + break; + default: + } + }} + /> + </SharedPageLayout> + ); + function handleRefetch() { + listQuery.refetch(); + runningCountQuery.refetch(); + } +}; + +export default List; diff --git a/web_console_v2/client/src/views/Datasets/index.tsx b/web_console_v2/client/src/views/Datasets/index.tsx index 7f6bbc4b7..6067a5b45 100644 --- a/web_console_v2/client/src/views/Datasets/index.tsx +++ b/web_console_v2/client/src/views/Datasets/index.tsx @@ -1,14 +1,46 @@ -import ErrorBoundary from 'antd/lib/alert/ErrorBoundary'; +import ErrorBoundary from 'components/ErrorBoundary'; import React, { FC } from 'react'; import { Route } from 'react-router-dom'; -import CreateDataset from './CreateDataset'; +import WorkflowDetail from 'views/Workflows/WorkflowDetail'; +import DatasetDetail from './DatasetDetail'; import DatasetList from './DatasetList'; +import DataSourceList from './DataSourceList'; +import DataSourceDetail from './DataSourceDetail'; +import TaskList from './TaskList'; +import DatasetJobDetail from './DatasetJobDetail'; +import NewDatasetJobDetail from './NewDatasetJobDetail'; const DatasetsPage: FC = () => { return ( <ErrorBoundary> - <Route path="/datasets" component={DatasetList} /> - <Route path="/datasets/create" exact component={CreateDataset} /> + <Route path="/datasets/data_source" exact component={DataSourceList} /> + <Route path="/datasets/data_source/:id/:subtab?" exact component={DataSourceDetail} /> + <Route + path="/datasets/:kind_label(raw|processed)/:tab(my|participant)?" + exact + render={(props) => { + return <DatasetList {...props} key={(props?.match?.params as any)['kind_label']!} />; // force refresh when change kind_label + }} + /> + <Route + path="/datasets/:kind_label(raw|processed)/detail/:id/:subtab?" + exact + render={(props) => { + return <DatasetDetail {...props} key={(props?.match?.params as any)['kind_label']!} />; // force refresh when change kind_label + }} + /> + <Route path="/datasets/task_list" exact component={TaskList} /> + <Route + path="/datasets/:kind_label(processed)/workflows/:id" + exact + component={WorkflowDetail} + /> + <Route path="/datasets/job_detail/:job_id" exact component={DatasetJobDetail} /> + <Route + path="/datasets/:dataset_id/new/job_detail/:job_id/:subtab?" + exact + component={NewDatasetJobDetail} + /> </ErrorBoundary> ); }; diff --git a/web_console_v2/client/src/views/Datasets/routes.tsx b/web_console_v2/client/src/views/Datasets/routes.tsx new file mode 100644 index 000000000..e8c2a5e69 --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/routes.tsx @@ -0,0 +1,12 @@ +const INDEX_PATH = '/datasets'; + +const routes: Record<string, string> = { + DatasetCreate: `${INDEX_PATH}/:action(create|edit)/source`, +}; + +export default routes; + +export enum DatasetCreateAction { + Create = 'create', + Edit = 'edit', +} diff --git a/web_console_v2/client/src/views/Datasets/shared.ts b/web_console_v2/client/src/views/Datasets/shared.ts new file mode 100644 index 000000000..a047d85ef --- /dev/null +++ b/web_console_v2/client/src/views/Datasets/shared.ts @@ -0,0 +1,507 @@ +/* istanbul ignore file */ +import { + DataJobBackEndType, + DatasetJobListItem, + DatasetJobState, + DatasetJobType, + DatasetKind, + DatasetKindBackEndType, + DatasetKindLabel, + DatasetStateFront, + DataBatchV2, + DatasetJobStage, + DatasetRawPublishStatus, + Dataset, + DatasetProcessedAuthStatus, + DatasetProcessedMyAuthStatus, + DatasetJob, +} from 'typings/dataset'; +import { TableColumnProps } from '@arco-design/web-react'; +import { FilterOp } from 'typings/filter'; +import { expression2Filter, operationMap } from 'shared/filter'; +import { Tag } from 'typings/workflow'; + +type TableFilterConfig = Pick<TableColumnProps, 'filters' | 'onFilter'>; + +export const datasetPageTitles = { + [DatasetKindLabel.RAW]: '原始数据集', + [DatasetKindLabel.PROCESSED]: '结果数据集', + undefined: '未知数据集', +}; + +export const datasetKindLabelValueMap = { + [DatasetKindLabel.RAW]: DatasetKind.RAW, + [DatasetKindLabel.PROCESSED]: DatasetKind.PROCESSED, + [DatasetKind.RAW]: DatasetKindLabel.RAW, + [DatasetKind.PROCESSED]: DatasetKindLabel.PROCESSED, +}; + +export const DataJobBackEndTypeToLabelMap = { + [DataJobBackEndType.RSA_PSI_DATA_JOIN]: 'RSA-PSI 求交', + [DataJobBackEndType.LIGHT_CLIENT_RSA_PSI_DATA_JOIN]: 'LIGHT_CLIENT_RSA_PSI数据求交', + [DataJobBackEndType.LIGHT_CLIENT_OT_PSI_DATA_JOIN]: 'LIGHT_CLIENT_OT_PSI数据求交', + [DataJobBackEndType.OT_PSI_DATA_JOIN]: 'OT-PSI数据求交', + [DataJobBackEndType.DATA_JOIN]: '数据求交', + [DataJobBackEndType.DATA_ALIGNMENT]: '数据对齐', + [DataJobBackEndType.IMPORT_SOURCE]: '数据导入', + [DataJobBackEndType.EXPORT]: '导出', + [DataJobBackEndType.HASH_DATA_JOIN]: '哈希求交', + [DataJobBackEndType.ANALYZER]: '数据探查', +}; + +export function isDataJoin(kind?: DataJobBackEndType) { + if (!kind) return false; + return [ + DataJobBackEndType.RSA_PSI_DATA_JOIN, + DataJobBackEndType.LIGHT_CLIENT_RSA_PSI_DATA_JOIN, + DataJobBackEndType.OT_PSI_DATA_JOIN, + DataJobBackEndType.DATA_JOIN, + DataJobBackEndType.HASH_DATA_JOIN, + ].includes(kind); +} +export function isDataImport(kind?: DataJobBackEndType) { + if (!kind) return false; + return [DataJobBackEndType.IMPORT_SOURCE].includes(kind); +} +export function isDataExport(kind?: DataJobBackEndType) { + if (!kind) return false; + return [DataJobBackEndType.EXPORT].includes(kind); +} +export function isDataAlignment(kind?: DataJobBackEndType) { + if (!kind) return false; + return [DataJobBackEndType.DATA_ALIGNMENT].includes(kind); +} +export function isDataAnalyzer(kind?: DataJobBackEndType) { + if (!kind) return false; + return [DataJobBackEndType.ANALYZER].includes(kind); +} + +export function isDataLightClient(kind?: DataJobBackEndType) { + if (!kind) return false; + return [DataJobBackEndType.LIGHT_CLIENT_RSA_PSI_DATA_JOIN].includes(kind); +} + +export function isDataOtPsiJoin(kind?: DataJobBackEndType) { + if (!kind) return false; + return [DataJobBackEndType.OT_PSI_DATA_JOIN].includes(kind); +} + +export function isDataHashJoin(kind?: DataJobBackEndType) { + if (!kind) return false; + return [DataJobBackEndType.HASH_DATA_JOIN].includes(kind); +} + +export const datasetJobTypeOptions = [ + { + label: '求交', + value: DatasetJobType.JOIN, + }, + { + label: '对齐', + value: DatasetJobType.ALIGNMENT, + }, + { + label: '导入', + value: DatasetJobType.IMPORT, + }, + { + label: '导出', + value: DatasetJobType.EXPORT, + }, + { + label: '数据探查', + value: DatasetJobType.ANALYZER, + }, +]; + +export const datasetJobStateOptions = [ + { + label: '待运行', + value: DatasetJobState.PENDING, + }, + { + label: '运行中', + value: DatasetJobState.RUNNING, + }, + { + label: '成功', + value: DatasetJobState.SUCCEEDED, + }, + { + label: '失败', + value: DatasetJobState.FAILED, + }, + { + label: '已停止', + value: DatasetJobState.STOPPED, + }, +]; + +export const datasetJobTypeFilters: TableFilterConfig = { + filters: datasetJobTypeOptions.map((item) => ({ + text: item.label, + value: item.value, + })), + onFilter: (value: string, record: DatasetJobListItem) => { + switch (value) { + case DatasetJobType.JOIN: + return isDataJoin(record.kind); + case DatasetJobType.ALIGNMENT: + return [DataJobBackEndType.DATA_ALIGNMENT].includes(record.kind); + case DatasetJobType.IMPORT: + return [DataJobBackEndType.IMPORT_SOURCE].includes(record.kind); + case DatasetJobType.EXPORT: + return [DataJobBackEndType.EXPORT].includes(record.kind); + case DatasetJobType.ANALYZER: + return [DataJobBackEndType.ANALYZER].includes(record.kind); + default: + return false; + } + }, +}; + +export const datasetJobStateFilters: TableFilterConfig = { + filters: datasetJobStateOptions + .filter((opt) => opt.value !== DatasetJobState.PENDING) + .map((item) => ({ + text: item.label, + value: item.value, + })), + onFilter: (value: string, record: DatasetJobListItem) => { + if (value === DatasetJobState.RUNNING) { + return [DatasetJobState.PENDING, DatasetJobState.RUNNING].includes(record.state); + } + return value === record.state; + }, +}; + +export const FILTER_DATA_BATCH_OPERATOR_MAPPER = { + state: FilterOp.IN, +}; + +export const dataBatchStateFilters: TableFilterConfig = { + filters: [ + { + text: '待处理', + value: DatasetStateFront.PENDING, + }, + { + text: '处理中', + value: DatasetStateFront.PROCESSING, + }, + { + text: '可用', + value: DatasetStateFront.SUCCEEDED, + }, + { + text: '处理失败', + value: DatasetStateFront.FAILED, + }, + // { + // text: '删除中', + // value: DatasetStateFront.DELETING, + // }, + ], + onFilter: (value: string, record: DataBatchV2) => { + return value === record.state; + }, +}; + +export enum CREDITS_LIMITS { + MIN = 100, + MAX = 10000, +} + +export const NO_CATEGORY = '未分类'; + +export const TAG_MAPPER = { + [Tag.RESOURCE_ALLOCATION]: '资源配置', + [Tag.INPUT_PARAM]: '输入参数', + [Tag.INPUT_PATH]: '输入路径', + [Tag.OUTPUT_PATH]: '输出路径', + [Tag.OPERATING_PARAM]: '运行参数', + [Tag.SYSTEM_PARAM]: '系统变量', +}; + +export enum DatasetJobTypeFront { + RAW = 'RAW', + PROCESSED = 'PROCESSED', + IMPORT = 'IMPORT', + EXPORTED = 'EXPORTED', +} + +export const DATA_JOB_TYPE_MAPPER = { + [DatasetJobTypeFront.IMPORT]: [DatasetKindBackEndType.SOURCE], + [DatasetJobTypeFront.RAW]: [DatasetKindBackEndType.RAW], + [DatasetJobTypeFront.EXPORTED]: [DatasetKindBackEndType.EXPORTED], + [DatasetJobTypeFront.PROCESSED]: [DatasetKindBackEndType.PROCESSED], +}; + +/** + * check the originType form back-end is belonged to target type or not + * @param originType + * @param targetType + */ +export function dataJobTypeCheck( + originType: DatasetKindBackEndType, + targetType: DatasetJobTypeFront, +): boolean { + if (!originType || !DATA_JOB_TYPE_MAPPER[targetType]) { + return false; + } + return DATA_JOB_TYPE_MAPPER[targetType].includes(originType); +} + +/** + * generate an expression from filter object + * @param filter + * @param filterOpMapper + */ +export function filterExpressionGenerator( + filter: { [filed: string]: any }, + filterOpMapper: { [filed: string]: FilterOp }, +) { + const filterPairStringArray = []; + const keys = Object.keys(filter); + if (!keys.length) { + return ''; + } + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + const val = filter[key]; + if (typeof val !== 'boolean' && !val && val !== 0) { + continue; + } + const finalVal = JSON.stringify(val); + const op = filterOpMapper[key]; + op && filterPairStringArray.push(`(${key}${operationMap(op)}${finalVal})`); + } + switch (filterPairStringArray.length) { + case 0: + return ''; + case 1: + return filterPairStringArray[0]; + default: + return `(and${filterPairStringArray.join('')})`; + } +} + +/** + * get sortValue form urlState + * @param urlState + * @param key + */ +export function getSortOrder(urlState: any, key: string) { + const order = urlState.order_by?.split(' ') || []; + let res: 'ascend' | 'descend' | undefined = undefined; + const [keyword, value] = order; + if (keyword === key) { + switch (value) { + case 'asc': + res = 'ascend'; + break; + case 'desc': + res = 'descend'; + break; + default: + break; + } + } + return res; +} + +export function getPublishState(filter?: string) { + if (!filter) { + return undefined; + } + return expression2Filter(filter).publish_frontend_state; +} + +export const FILTER_OPERATOR_MAPPER = { + dataset_format: FilterOp.IN, + publish_frontend_state: FilterOp.EQUAL, + auth_status: FilterOp.IN, + name: FilterOp.CONTAIN, + project_id: FilterOp.EQUAL, + dataset_kind: FilterOp.EQUAL, + format: FilterOp.IN, + participant_id: FilterOp.IN, + uuid: FilterOp.EQUAL, + dataset_type: FilterOp.EQUAL, +}; + +export const FILTER_DATA_JOB_OPERATOR_MAPPER = { + coordinator_id: FilterOp.IN, + kind: FilterOp.IN, + state: FilterOp.IN, + name: FilterOp.CONTAIN, +}; + +export const JOB_FRONT_TYPE_TO_BACK_TYPE_MAPPER = { + [DatasetJobType.JOIN]: [ + DataJobBackEndType.RSA_PSI_DATA_JOIN, + DataJobBackEndType.LIGHT_CLIENT_RSA_PSI_DATA_JOIN, + DataJobBackEndType.OT_PSI_DATA_JOIN, + DataJobBackEndType.DATA_JOIN, + DataJobBackEndType.HASH_DATA_JOIN, + ], + [DatasetJobType.IMPORT]: [DataJobBackEndType.IMPORT_SOURCE], + [DatasetJobType.EXPORT]: [DataJobBackEndType.EXPORT], + [DatasetJobType.ALIGNMENT]: [DataJobBackEndType.DATA_ALIGNMENT], + [DatasetJobType.ANALYZER]: [DataJobBackEndType.ANALYZER], +}; + +export function getJobKindByFilter(kindList?: DatasetJobType[]) { + if (!kindList || !kindList.length) { + return; + } + return kindList.reduce((pre, cur) => { + return pre.concat(JOB_FRONT_TYPE_TO_BACK_TYPE_MAPPER[cur]); + }, [] as DataJobBackEndType[]); +} + +export function getJobStateByFilter(stateList?: DatasetJobState[]) { + if (!stateList || !stateList.length) { + return; + } + const runningFlag = stateList.includes(DatasetJobState.RUNNING); + const pendingFlag = stateList.includes(DatasetJobState.PENDING); + const spliceIndex = pendingFlag + ? stateList.findIndex((item) => item === DatasetJobState.PENDING) + : stateList.length; + pendingFlag && !runningFlag && stateList.splice(spliceIndex, 1); + runningFlag && !pendingFlag && stateList.push(DatasetJobState.PENDING); + return stateList; +} + +export const VARIABLE_TIPS_MAPPER: { [prop: string]: string } = { + num_partitions: '数据分片的数量,各方需保持一致', + part_num: '数据分片的数量,各方需保持一致', + replicas: '求交worker数量,各方需保持一致', + part_key: '用来当作求交列的列名', +}; + +export const SYNCHRONIZATION_VARIABLE = { + NUM_PARTITIONS: 'num_partitions', + PART_NUM: 'part_num', + REPLICAS: 'replicas', +}; + +export function isDatasetJobStagePending(datasetJobStage?: DatasetJobStage) { + if (!datasetJobStage) return false; + return [DatasetJobState.PENDING, DatasetJobState.RUNNING].includes(datasetJobStage?.state); +} + +export function isDatasetJobStageFailed(datasetJobStage?: DatasetJobStage) { + if (!datasetJobStage) return false; + return [DatasetJobState.FAILED, DatasetJobState.STOPPED].includes(datasetJobStage?.state); +} + +export function isDatasetJobStageSuccess(datasetJobStage?: DatasetJobStage) { + if (!datasetJobStage) return false; + return [DatasetJobState.SUCCEEDED].includes(datasetJobStage?.state); +} + +export function isDatasetTicket(data: Dataset) { + return [DatasetRawPublishStatus.TICKET_PENDING, DatasetRawPublishStatus.TICKET_DECLINED].includes( + data.publish_frontend_state, + ); +} + +export function isDatasetPublished(data: Dataset) { + return data.publish_frontend_state === DatasetRawPublishStatus.PUBLISHED; +} + +export function isFrontendAuthorized(data: Dataset) { + return data.local_auth_status === DatasetProcessedMyAuthStatus.AUTHORIZED; +} + +export const RawPublishStatusOptions = [ + { + status: DatasetRawPublishStatus.UNPUBLISHED, + text: '未发布', + color: '#165DFF', + percent: 25, + }, + { + status: DatasetRawPublishStatus.TICKET_PENDING, + text: '待审批', + color: '#165DFF', + percent: 70, + }, + { + status: DatasetRawPublishStatus.TICKET_PENDING, + text: '审批拒绝', + color: '#FA9600', + percent: 70, + }, + { + status: DatasetRawPublishStatus.PUBLISHED, + text: '已发布', + color: '#165DFF', + percent: 100, + }, +]; + +export const RawAuthStatusOptions = [ + { + status: DatasetProcessedAuthStatus.TICKET_PENDING, + text: '待审批', + color: '#165DFF', + percent: 25, + }, + { + status: DatasetProcessedAuthStatus.TICKET_PENDING, + text: '审批拒绝', + color: '#FA9600', + percent: 50, + }, + { + status: DatasetProcessedAuthStatus.AUTH_PENDING, + text: '待授权', + color: '#165DFF', + percent: 100, + }, + + { + status: DatasetProcessedAuthStatus.AUTH_APPROVED, + text: '授权通过', + color: '#00B42A', + percent: 100, + }, +]; + +export function isSingleParams(kind?: DataJobBackEndType) { + if (!kind) { + return false; + } + return [ + DataJobBackEndType.EXPORT, + DataJobBackEndType.IMPORT_SOURCE, + DataJobBackEndType.ANALYZER, + DataJobBackEndType.LIGHT_CLIENT_OT_PSI_DATA_JOIN, + DataJobBackEndType.LIGHT_CLIENT_RSA_PSI_DATA_JOIN, + ].includes(kind); +} + +export enum CronType { + DAY = 'DAY', + HOUR = 'HOUR', +} + +export const cronTypeOptions = [ + { + value: CronType.DAY, + label: '每天', + warnTip: '天级导入只会读取文件夹格式为 YYYYMMDD (如20220101) 下的数据。', + }, + { + value: CronType.HOUR, + label: '每小时', + warnTip: '小时级导入只会读取文件夹格式为 YYYYMMDD-HH(如20220101-12) 下的数据。', + }, +]; + +export function isHoursCronJoin(datasetJob?: DatasetJob) { + return datasetJob?.time_range?.hours === 1; +} diff --git a/web_console_v2/client/src/views/Login/index.module.less b/web_console_v2/client/src/views/Login/index.module.less new file mode 100644 index 000000000..437391387 --- /dev/null +++ b/web_console_v2/client/src/views/Login/index.module.less @@ -0,0 +1,133 @@ +@import '~styles/mixins.less'; + +.login_layout{ + display: grid; + grid-template-areas: 'left right'; + grid-template-columns: 520px 1fr; + min-width: 500px; + height: 100vh; + min-height: 500px; + background-color: #fff; + + @media screen and (max-width: 1000px) { + grid-template-columns: 0 1fr; + } +} + +.login_left_block{ + position: relative; + height: 100%; + grid-area: left; + display: flex; + justify-content: center; + align-items: center; + flex-direction: column; + background-position: 20px 20px, center; + background-repeat: no-repeat; + background-color: var(--primaryColor); + background-size: 140px auto, 80% auto; + + > * { + transform: translateY(-9vh); + } +} +.login_right_block{ + .MixinFlexAlignCenter(); + position: relative; + height: 100%; + display: flex; + background-color: white; + grid-area: right; + flex-direction: column; + @media screen and (max-width: 1000px) { + background: url('~assets/images/logo-black.png') top 24px left 32px no-repeat; + background-size: 160px; + } +} + +.login_bioland_right_block{ + display: flex; + .MixinFlexAlignCenter(); + position: relative; + height: 100%; + background-color: white; + grid-area: right; + flex-direction: column; + @media screen and (max-width: 1000px) { + background: url('~assets/icons/logo-bioland-colorful.svg') top 24px left 32px no-repeat; + background-size: 160px; + } +} + +.login_form{ + width: 360px !important; + :global{ + .arco-form-item{ + margin-bottom: 32px; + } + .arco-input-inner-wrapper{ + height: 48px; + } + .arco-col{ + flex: 1; + } + .checkboxItem { + margin-bottom: 0; + } + .arco-checkbox-text{ + color: #7a8499; + } + } + .form_title { + margin-bottom: 24px; + font-size: 27px; + line-height: 36px; + } + .login_button{ + width: 100%; + height: 48px; + } +} + +.no_account { + margin-top: 16px; + color: var(--textColorSecondary); + font-size: 12px; + white-space: nowrap; +} + +.login_way_layout{ + display: flex; + justify-content: flex-start; + flex-wrap: wrap; +} + +.login_way_item{ + text-align: center; + cursor: pointer; + flex: 0 0 33%; + word-break: break-all; + margin-bottom: 16px; + img { + display: inline-block; + width: 46px; + height: 46px; + } + div { + font-size: 14px; + color: var(--textColorStrong); + } +} + +.other_login_way_text{ + display: inline-block; + font-size: 14px; + color: var(--primaryColor); + cursor: pointer; + width: 100%; + text-align: left; + position: relative; + top: -20px; +} + + diff --git a/web_console_v2/client/src/views/Login/index.tsx b/web_console_v2/client/src/views/Login/index.tsx index b0a0d2b68..7310d606b 100644 --- a/web_console_v2/client/src/views/Login/index.tsx +++ b/web_console_v2/client/src/views/Login/index.tsx @@ -1,207 +1,282 @@ -import React, { FC } from 'react'; -import styled from 'styled-components'; -import { Input, Checkbox, Form, Button, message } from 'antd'; -import { EyeInvisibleOutlined, EyeTwoTone } from '@ant-design/icons'; +import React, { FC, useEffect, useState } from 'react'; +import { Input, Checkbox, Form, Button, Message } from '@arco-design/web-react'; import loginIllustration from 'assets/images/login-illustration.png'; -import logColorful from 'assets/images/logo-colorful.svg'; -import { MixinFlexAlignCenter } from 'styles/mixins'; -import { login } from 'services/user'; +import { login, fetchLoginWayList } from 'services/user'; import { Redirect, useHistory } from 'react-router-dom'; import { useToggle } from 'react-use'; -import { useTranslation } from 'react-i18next'; import store from 'store2'; import LOCAL_STORAGE_KEYS from 'shared/localStorageKeys'; -import { FedLoginFormData } from 'typings/auth'; +import { FedLoginFormData, FedLoginQueryParamsData } from 'typings/auth'; import { useRecoilQuery } from 'hooks/recoil'; +import { useGetLogoSrc, useUrlState, useGetThemeBioland } from 'hooks'; import { userInfoQuery } from 'stores/user'; -import { useSetRecoilState } from 'recoil'; -import i18n from 'i18n'; - -const Layout = styled.main` - display: grid; - grid-template-areas: 'left right'; - grid-template-columns: 520px 1fr; - min-width: 500px; - height: 100vh; - min-height: 500px; - background-color: #fff; - - @media screen and (max-width: 1000px) { - grid-template-columns: 0 1fr; - } -`; -const Block = styled.section` - position: relative; - height: 100%; -`; -const Left = styled(Block)` - grid-area: left; - display: flex; - justify-content: center; - align-items: center; - flex-direction: column; - background-image: url(${logColorful}), url(${loginIllustration}); - background-position: 20px 20px, center; - background-repeat: no-repeat; - background-color: #2b5ccc; - background-size: 121px auto, 80% auto; - - > * { - transform: translateY(-9vh); +import { useSetRecoilState, useRecoilState, useRecoilValue } from 'recoil'; +import { parseSearch } from 'shared/url'; +import { appPreference, appEmailGetters, appLoginWayList } from 'stores/app'; +import { fetchSysEmailGroup } from 'services/settings'; +import { useQuery } from 'react-query'; +import { FedLoginWay } from 'typings/auth'; +import iconSSODefault from 'assets/icons/icon-sso-oauth-default.svg'; // default is oauth icon +import iconOAuthDefault from 'assets/icons/icon-sso-oauth-default.svg'; +import logoCASDefault from 'assets/images/logo-sso-cas-default.jpeg'; +import * as H from 'history'; +import styled from './index.module.less'; + +function getDefaultLoginWayIcon(protocolType: string) { + let icon: string = iconSSODefault; + + switch (protocolType.toLocaleLowerCase()) { + case 'cas': + icon = logoCASDefault; + break; + case 'oauth': + case 'oauth2': + icon = iconOAuthDefault; + break; + default: + icon = iconSSODefault; + break; } -`; -const Right = styled(Block)` - ${MixinFlexAlignCenter()} + return icon; +} - display: flex; - background-color: white; - grid-area: right; +const Login: FC = () => { + const history = useHistory(); + const [submitting, toggleSubmit] = useToggle(false); - @media screen and (max-width: 1000px) { - background: url(${logColorful}) top 24px left 32px no-repeat; - } -`; -const LoginForm = styled(Form)` - width: 360px; - - > .form-title { - margin-bottom: 24px; - font-size: 27px; - line-height: 36px; - } + const query = useRecoilQuery(userInfoQuery); + const setUserInfo = useSetRecoilState(userInfoQuery); + const [preference, setPreference] = useRecoilState(appPreference); + const [loginWayList, setLoginWayList] = useRecoilState(appLoginWayList); + const email = useRecoilValue(appEmailGetters); + const isBioland = useGetThemeBioland(); - > .ant-space { - display: flex; - } + const [isShowLoginWayList, setIsShowLoginWayList] = useState(false); - .ant-form-item { - margin-bottom: 32px; + const { primaryLogo } = useGetLogoSrc(); - &.ant-form-item-with-help { - margin-bottom: 8px; - } - } + const [urlState] = useUrlState(); - .ant-input-lg { - padding: 5.5px 0 !important; - font-size: 14px; - } + useQuery(['fetchSysEmailGroup'], () => fetchSysEmailGroup(), { + retry: 2, + refetchOnWindowFocus: false, + onSuccess(data) { + const emailValue = data.data.value; + setPreference({ + ...preference, + sysEmailGroup: emailValue, + }); + }, + }); - .no-account { - margin-top: 16px; - color: var(--textColorSecondary); - font-size: 12px; - white-space: nowrap; - } + useQuery(['fetchLoginWayList'], () => fetchLoginWayList(), { + retry: 2, + refetchOnWindowFocus: false, + onSuccess(data) { + setLoginWayList(data.data || []); + }, + }); - > .checkboxItem { - margin-bottom: 0; - } -`; -const LoginFormButton = styled(Button)` - width: 100%; - height: 48px; - background-image: linear-gradient(270deg, #286af4 0%, #3e97fe 100%); -`; -const LoginFormCheckbox = styled(Checkbox)` - color: #7a8499; -`; + useEffect(() => { + const handler = async (event: MessageEvent<any>) => { + if (event.origin !== window.location.origin) return; -const Login: FC = () => { - const { t } = useTranslation(); - const history = useHistory(); - const [submitting, toggleSubmit] = useToggle(false); + if (!event.data || !event.data.ssoInfo) { + return; + } + const { ssoInfo } = event.data; - const query = useRecoilQuery(userInfoQuery); - const setUserInfo = useSetRecoilState(userInfoQuery); + const { ssoName, ssoType, ssoCode, codeKey } = ssoInfo; + + // If ssoName,ssoType,ssoCode existed, then call login api with code and sso_name + if (ssoName && ssoType && ssoCode && codeKey) { + try { + global_login( + { + [codeKey]: ssoCode, + }, + { + sso_name: ssoName, + }, + setUserInfo, + history, + ); + } catch (error) { + Message.error(error.message); + } + } + }; + + window.addEventListener('message', handler, false); + + if (urlState.auto_login) { + triggerSSOAuto(urlState.auto_login); + } + + return () => { + window.removeEventListener('message', handler); + }; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); if (query.data?.id) { return <Redirect to="/projects" />; } return ( - <Layout> - <Left /> - - <Right> - <LoginForm + <main className={styled.login_layout}> + <section + className={styled.login_left_block} + style={{ + backgroundImage: `url(${primaryLogo}), url(${loginIllustration})`, + }} + /> + <section className={isBioland ? styled.login_bioland_right_block : styled.login_right_block}> + <Form + className={styled.login_form} size="large" name="login-form" initialValues={{ remember: true }} - onFinish={onSubmit as any} + onSubmit={onSubmit} > - <h3 className="form-title">{t('login.form_title')}</h3> + <h3 className={styled.form_title}>账号登录</h3> - <Form.Item - name="username" - rules={[{ required: true, message: t('login.username_message') }]} - > - <Input allowClear name="username" placeholder={t('login.username_placeholder')} /> + <Form.Item field="username" rules={[{ required: true, message: '请输入用户名!' }]}> + <Input allowClear name="username" placeholder="用户名/邮箱" /> </Form.Item> - <Form.Item - name="password" - rules={[{ required: true, message: t('login.password_message') }]} - > + <Form.Item field="password" rules={[{ required: true, message: '请输入密码!' }]}> <Input.Password allowClear - placeholder={t('login.password_placeholder')} - iconRender={(visible) => (visible ? <EyeTwoTone /> : <EyeInvisibleOutlined />)} + placeholder="密码" + // iconRender={(visible) => (visible ? <EyeTwoTone /> : <EyeInvisibleOutlined />)} /> </Form.Item> - <Form.Item name="remember" valuePropName="checked" className="checkboxItem"> - <LoginFormCheckbox>{t('login.remember')}</LoginFormCheckbox> + <Form.Item field="remember" className="checkboxItem"> + <Checkbox defaultChecked={true}>记住登录状态</Checkbox> </Form.Item> <Form.Item> - <LoginFormButton loading={submitting} size="large" type="primary" htmlType="submit"> - {t('login.button')} - </LoginFormButton> + <Button + className={styled.login_button} + loading={submitting} + size="large" + type="primary" + htmlType="submit" + > + 登陆 + </Button> - <p className="no-account"> - {t('login.no_account_tip', { - email: 'admin@fedlearner.com', - })} + <p className={styled.no_account}> + {`如无账号,请发送申请邮件至管理员邮箱 ${email || 'privacy_computing@bytedance.com'}`} </p> </Form.Item> - </LoginForm> - </Right> - </Layout> + + {loginWayList && loginWayList.length > 0 && ( + <span className={styled.other_login_way_text} onClick={onOtherLoginWayClick}> + {isShowLoginWayList ? '隐藏其他登陆方式' : '其他登陆方式'} + </span> + )} + {isShowLoginWayList && ( + <div className={styled.login_way_layout}> + {loginWayList.map((item) => { + return ( + <div + className={styled.login_way_item} + key={item.display_name} + onClick={() => { + onLoginWayClick(item); + }} + > + <img + src={item.icon_url || getDefaultLoginWayIcon(item.protocol_type)} + alt={item.display_name} + /> + <div>{item.display_name}</div> + </div> + ); + })} + </div> + )} + </Form> + </section> + </main> ); // -------- Handlers ------------ - async function onSubmit(payload: FedLoginFormData) { + function onOtherLoginWayClick() { + setIsShowLoginWayList((prevState) => !prevState); + } + + async function onSubmit(payload: any) { toggleSubmit(true); try { - payload.password = btoa(payload.password); - const { data } = await login(payload as FedLoginFormData); + payload.password = btoa(payload.password!); - store.set(LOCAL_STORAGE_KEYS.current_user, { - ...data.user, - access_token: data.access_token, - date: Date.now(), - }); - - setUserInfo(data.user); + global_login(payload, {}, setUserInfo, history); + } catch (error) { + Message.error(error.message); + } + toggleSubmit(false); + } - message.success(i18n.t('app.login_success')); + function onLoginWayClick(loginWayInfo: FedLoginWay) { + const protocolType = loginWayInfo.protocol_type; + const { authorize_url } = loginWayInfo[protocolType] || {}; + let authorizeUrl = authorize_url; - if (history.location.search) { - const from = new URLSearchParams(history.location.search).get('from'); - if (from) { - return history.push(decodeURIComponent(from) || '/projects'); - } + // If authorize_url is provided, just use authorize_url that had assembled in Back-end. + // Otherwise, assemble it in Front-end. + if (!authorizeUrl) { + if (protocolType && protocolType.toLowerCase() === 'cas') { + const { cas_server_url, login_route, service_url } = loginWayInfo[protocolType]; + authorizeUrl = `${cas_server_url}${login_route}?service=${encodeURIComponent(service_url)}`; } + } + if (authorizeUrl) { + window.open(authorizeUrl, '_self'); + } else { + Message.error('找不到即将跳转的 url 信息'); + } + } - history.push('/projects'); - } catch (error) { - message.error(error.message); + function triggerSSOAuto(auto_login: string) { + const SSOInfo = loginWayList.find((item) => item.name === auto_login); + if (SSOInfo) { + onLoginWayClick(SSOInfo); } - toggleSubmit(false); } }; +export async function global_login( + payload: FedLoginFormData, + queryParam: FedLoginQueryParamsData = {}, + setUserInfo: (userInfo: any) => void, + history: H.History, +) { + try { + const { data } = await login(payload as FedLoginFormData, queryParam); + store.set(LOCAL_STORAGE_KEYS.current_user, { + ...data.user, + access_token: data.access_token, + date: Date.now(), + }); + setUserInfo(data.user); + Message.success('登录成功'); + //TODO: the from history will be lost when login by SSO and can not jump to the page before login; + const from = parseSearch(history.location).get('from'); + history.push(decodeURIComponent(from || '/projects')); + } catch (error) { + const { sso_name } = queryParam; + if (sso_name) { + Message.error(`${sso_name}登录验证失败!`); + } else { + Message.error(error + ''); + } + } +} + export default Login; diff --git a/web_console_v2/client/src/views/LogsViewer/JobEvents/index.tsx b/web_console_v2/client/src/views/LogsViewer/JobEvents/index.tsx index 818d47c5b..63da7eeb9 100644 --- a/web_console_v2/client/src/views/LogsViewer/JobEvents/index.tsx +++ b/web_console_v2/client/src/views/LogsViewer/JobEvents/index.tsx @@ -3,8 +3,13 @@ import { useParams } from 'react-router-dom'; import { fetchJobEvents, fetchPeerJobEvents } from 'services/workflow'; import PrintLogs from 'components/PrintLogs'; -const PodLogs: FC = () => { - const params = useParams<{ side: string; jobIdOrK8sName: string; uuid?: string }>(); +const JobEvents: FC = () => { + const params = useParams<{ + side: string; + jobIdOrK8sName: string; + uuid?: string; + participantId?: string; + }>(); const isPeerSide = params.side === 'peer'; @@ -16,25 +21,25 @@ const PodLogs: FC = () => { /> ); - async function getLogs() { + async function getLogs(maxLines = 5000) { if (!params.jobIdOrK8sName) { return { data: ['Job ID or Name invalid!'] }; } if (isPeerSide) { - return fetchPeerJobEvents(params.uuid!, params.jobIdOrK8sName, { - maxLines: 500, + return fetchPeerJobEvents(params.uuid!, params.jobIdOrK8sName, params?.participantId ?? 0, { + maxLines, }).catch((error) => ({ data: [error.message], })); } return fetchJobEvents(params.jobIdOrK8sName, { - maxLines: 500, + maxLines, }).catch((error) => ({ data: [error.message], })); } }; -export default PodLogs; +export default JobEvents; diff --git a/web_console_v2/client/src/views/LogsViewer/JobLogs/index.tsx b/web_console_v2/client/src/views/LogsViewer/JobLogs/index.tsx index 5c6c1c9fa..f8e221ad0 100644 --- a/web_console_v2/client/src/views/LogsViewer/JobLogs/index.tsx +++ b/web_console_v2/client/src/views/LogsViewer/JobLogs/index.tsx @@ -3,24 +3,24 @@ import { useParams } from 'react-router-dom'; import { fetchJobLogs } from 'services/workflow'; import PrintLogs from 'components/PrintLogs'; -const PodLogs: FC = () => { +const JobLogs: FC = () => { const params = useParams<{ jobId: string }>(); return ( <PrintLogs logsFetcher={getLogs} refetchInterval={4000} queryKey={['getJob', params.jobId]} /> ); - async function getLogs() { + async function getLogs(maxLines = 5000) { if (!params.jobId) { return { data: ['Job ID invalid!'] }; } return fetchJobLogs(params.jobId, { - maxLines: 500, + maxLines, }).catch((error) => ({ data: [error.message], })); } }; -export default PodLogs; +export default JobLogs; diff --git a/web_console_v2/client/src/views/LogsViewer/ModelServingInstanceLogs/index.tsx b/web_console_v2/client/src/views/LogsViewer/ModelServingInstanceLogs/index.tsx new file mode 100644 index 000000000..48996a01e --- /dev/null +++ b/web_console_v2/client/src/views/LogsViewer/ModelServingInstanceLogs/index.tsx @@ -0,0 +1,50 @@ +import React, { FC } from 'react'; +import { useParams } from 'react-router-dom'; + +import { fetchModelServingInstanceLog_new } from 'services/modelServing'; + +import { useGetCurrentProjectId } from 'hooks'; + +import PrintLogs from 'components/PrintLogs'; + +const ModelServingInstanceLogs: FC = () => { + const projectId = useGetCurrentProjectId(); + const params = useParams<{ + modelServingId: string; + instanceName: string; + }>(); + + return ( + <PrintLogs + logsFetcher={getLogs} + refetchInterval={4000} + queryKey={['getJob', params.modelServingId, params.instanceName]} + /> + ); + + async function getLogs(tailLines = 5000) { + if (!params.modelServingId) { + return { data: ['Model serving ID invalid!'] }; + } + if (!params.instanceName) { + return { data: ['Instance name invalid!'] }; + } + if (!projectId) { + return { data: ['请选择工作区!'] }; + } + return fetchModelServingInstanceLog_new( + projectId!, + params.modelServingId, + params.instanceName, + { + tail_lines: tailLines, + }, + ).catch((error) => { + return { + data: [error.message], + }; + }); + } +}; + +export default ModelServingInstanceLogs; diff --git a/web_console_v2/client/src/views/LogsViewer/PodLogs/index.tsx b/web_console_v2/client/src/views/LogsViewer/PodLogs/index.tsx index 88c8aaf5e..3350068dd 100644 --- a/web_console_v2/client/src/views/LogsViewer/PodLogs/index.tsx +++ b/web_console_v2/client/src/views/LogsViewer/PodLogs/index.tsx @@ -4,7 +4,7 @@ import { fetchPodLogs } from 'services/workflow'; import PrintLogs from 'components/PrintLogs'; const PodLogs: FC = () => { - const params = useParams<{ jobId: string; podname: string }>(); + const params = useParams<{ jobId: string; podname: string; startTime?: string }>(); return ( <PrintLogs @@ -14,12 +14,15 @@ const PodLogs: FC = () => { /> ); - async function getLogs() { + async function getLogs(maxLines = 900) { if (!params.podname || !params.jobId) { return { data: ['Pod name or Job ID invalid!'] }; } - return fetchPodLogs(params.podname, params.jobId, { maxLines: 900 }).catch((error) => { + return fetchPodLogs(params.podname, params.jobId, { + maxLines, + startTime: params.startTime !== undefined ? 0 : undefined, + }).catch((error) => { return { data: [error.message] }; }); } diff --git a/web_console_v2/client/src/views/LogsViewer/SystemLogs/index.tsx b/web_console_v2/client/src/views/LogsViewer/SystemLogs/index.tsx index 2979f0804..3f1f86f98 100644 --- a/web_console_v2/client/src/views/LogsViewer/SystemLogs/index.tsx +++ b/web_console_v2/client/src/views/LogsViewer/SystemLogs/index.tsx @@ -1,7 +1,7 @@ import React, { FC, useEffect, useState } from 'react'; import PrintLogs from 'components/PrintLogs'; import { fetchPodNameList, fetchSystemLogs } from 'services/system'; -import { Tabs } from 'antd'; +import { Tabs } from '@arco-design/web-react'; import { useQuery } from 'react-query'; const { TabPane } = Tabs; @@ -26,7 +26,7 @@ const SystemLogs: FC = () => { > {podNameList?.map((podName) => { return ( - <TabPane tab={podName} key={podName}> + <TabPane title={podName} key={podName}> <PrintLogs logsFetcher={getLogs} refetchInterval={4000} @@ -40,11 +40,11 @@ const SystemLogs: FC = () => { </> ); - async function getLogs() { + async function getLogs(tailLines = 500) { if (!currentPodName) { return { data: ['Current pod name is undefined'] }; } - return fetchSystemLogs(500, currentPodName).catch((error) => { + return fetchSystemLogs(tailLines, currentPodName).catch((error) => { return { data: [error.message] }; }); } diff --git a/web_console_v2/client/src/views/LogsViewer/index.module.less b/web_console_v2/client/src/views/LogsViewer/index.module.less new file mode 100644 index 000000000..4c8c7aac3 --- /dev/null +++ b/web_console_v2/client/src/views/LogsViewer/index.module.less @@ -0,0 +1,5 @@ +.container{ + padding-left: 10px; + height: 100vh; + background-color: #292238; +} diff --git a/web_console_v2/client/src/views/LogsViewer/index.tsx b/web_console_v2/client/src/views/LogsViewer/index.tsx index d61acc30e..79864121b 100644 --- a/web_console_v2/client/src/views/LogsViewer/index.tsx +++ b/web_console_v2/client/src/views/LogsViewer/index.tsx @@ -1,25 +1,27 @@ import React, { FC } from 'react'; import { Route } from 'react-router-dom'; -import styled from 'styled-components'; import PodLogs from './PodLogs'; import JobLogs from './JobLogs'; import JobEvents from './JobEvents'; import SystemLogs from './SystemLogs'; +import ModelServingInstanceLogs from './ModelServingInstanceLogs'; + +import styles from './index.module.less'; -const Container = styled.main` - padding-left: 10px; - height: 100vh; - background-color: #292238; -`; const LogsViewer: FC = () => { return ( - <Container> + <main className={styles.container}> <Route path="/logs/job/:jobId" exact component={JobLogs} /> - <Route path="/logs/pod/:jobId/:podname" exact component={PodLogs} /> + <Route path="/logs/pod/:jobId/:podname/:startTime?" exact component={PodLogs} /> <Route path="/logs/job/events/:jobIdOrK8sName" exact component={JobEvents} /> <Route path="/logs/job/events/:side/:jobIdOrK8sName/:uuid" exact component={JobEvents} /> <Route path="/logs/system" exact component={SystemLogs} /> - </Container> + <Route + path="/logs/model-serving/:modelServingId/:instanceName" + exact + component={ModelServingInstanceLogs} + /> + </main> ); }; diff --git a/web_console_v2/client/src/views/ModelCenter/InstanceInfo.tsx b/web_console_v2/client/src/views/ModelCenter/InstanceInfo.tsx new file mode 100644 index 000000000..80bf85eee --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/InstanceInfo.tsx @@ -0,0 +1,107 @@ +import React, { CSSProperties } from 'react'; +import { Table } from '@arco-design/web-react'; +import { ColumnProps } from '@arco-design/web-react/es/Table'; +import { useQuery } from 'react-query'; +import { fetchJobById } from 'services/workflow'; +import { useRecoilValue } from 'recoil'; +import { projectState } from 'stores/project'; +import StateIndicator from 'components/StateIndicator'; +import { formatTimestamp } from 'shared/date'; +import { Pod, PodState } from 'typings/job'; +import { getPodState } from 'views/Workflows/shared'; +import { Link } from 'react-router-dom'; + +type ColumnOptions = { + id: ID; + jobId: ID; +}; + +const getColumns = (options: ColumnOptions): ColumnProps[] => { + return [ + { + dataIndex: 'name', + title: '实例 ID', + width: 400, + }, + { + dataIndex: 'state', + title: '运行状态', + filters: [ + PodState.SUCCEEDED, + PodState.RUNNING, + PodState.FAILED, + PodState.PENDING, + PodState.FAILED_AND_FREED, + PodState.SUCCEEDED_AND_FREED, + PodState.UNKNOWN, + ].map((state) => { + const { text } = getPodState({ state } as Pod); + return { + text, + value: state, + }; + }), + onFilter: (state, record: Pod) => { + return record?.state === state; + }, + render(state, record: Pod) { + return <StateIndicator {...getPodState(record)} />; + }, + }, + { + dataIndex: 'creation_timestamp', + title: '创建时间', + width: 200, + render(value) { + return formatTimestamp(value); + }, + }, + { + key: 'operate', + title: '操作', + render(_, record: Pod) { + return ( + <Link target={'_blank'} to={`/logs/pod/${options.jobId}/${record.name}`}> + 查看日志 + </Link> + ); + }, + }, + ]; +}; + +const PAGE_SIZE = 10; + +const InstanceInfo: React.FC<{ id: ID; jobId: ID; style?: CSSProperties }> = ({ + id, + jobId, + style, +}) => { + const selectedProject = useRecoilValue(projectState); + const projectId = selectedProject.current?.id; + const { data } = useQuery( + ['workflow', '/jobs/:job_id'], + () => fetchJobById(jobId as number).then((res) => res.data.pods), + { + enabled: Boolean(projectId), + }, + ); + + return ( + <> + <Table + rowKey="name" + data={data} + style={{ marginTop: 20, ...style }} + className="custom-table custom-table-left-side-filter" + columns={getColumns({ + id, + jobId, + })} + pagination={!data || data.length <= PAGE_SIZE ? false : { pageSize: PAGE_SIZE }} + /> + </> + ); +}; + +export default InstanceInfo; diff --git a/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/CreateForm/index.module.less b/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/CreateForm/index.module.less new file mode 100644 index 000000000..b177dbf8b --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/CreateForm/index.module.less @@ -0,0 +1,36 @@ +.page_section_card { + position: relative; + :global(.arco-card-body) { + min-height: calc( + 100vh - var(--pageHeaderHeight) - var(--headerHeight) - var(--contentOuterPadding) * 2 + ); + background-color: inherit; + } +} +.title_text_large { + .title_text(16px, 0px, 0px); +} +.title_text_small { + .title_text(14px, 40px, 10px); +} +.title_text(@fontSize: 14px,@marginTop: 0px,@marginBottom: 0px) { + margin-top: @marginTop; + margin-bottom: @marginBottom; + padding-right: 16px; + text-align: right; + font-weight: 600; + font-size: @fontSize; + color: var(--color-text-1); +} +.small_text { + font-size: 12px; + color: var(--color-text-2); +} +.form_container { + width: 600px; + margin: 0 auto; + font-size: 12px; +} +.submit_btn_container { + width: 140px; +} diff --git a/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/CreateForm/index.tsx b/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/CreateForm/index.tsx new file mode 100644 index 000000000..93484cb5d --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/CreateForm/index.tsx @@ -0,0 +1,881 @@ +import React, { useEffect, useMemo, useRef, useState } from 'react'; +import { useQuery, useMutation, useQueries } from 'react-query'; +import { generatePath, useHistory, useParams } from 'react-router'; +import { + Button, + Form, + FormItemProps, + Input, + Space, + Select, + Message, + Card, + Spin, + Typography, + Checkbox, + Tooltip, + Tag, +} from '@arco-design/web-react'; +import { IconInfoCircle, IconQuestionCircle } from '@arco-design/web-react/icon'; +import { validNamePattern, MAX_COMMENT_LENGTH } from 'shared/validator'; +import ResourceConfig from 'components/ResourceConfig'; +import DoubleSelect from 'components/DoubleSelect'; + +import { + fetchModelJob_new, + fetchModelDetail_new, + fetchPeerModelJobDetail_new, + updateModelJob, + createModelJob, +} from 'services/modelCenter'; + +import routes, { ModelEvaluationCreateParams, ModelEvaluationListParams } from '../../routes'; +import { ModelJobType, ResourceTemplateType } from 'typings/modelCenter'; +import { ModelJob } from 'typings/modelCenter'; +import { + ALGORITHM_TYPE_LABEL_MAPPER, + Avatar, + getConfigInitialValues, + getConfigInitialValuesByDefinition, + hydrateModalGlobalConfig, +} from '../../shared'; +import { cloneDeep, omit } from 'lodash-es'; +import { Dataset, DatasetKindLabel } from 'typings/dataset'; +import ButtonWithModalConfirm from 'components/ButtonWithModalConfirm'; +import { useRecoilQuery } from 'hooks/recoil'; +import { + nnHorizontalEvalTemplateDetailQuery, + nnTemplateDetailQuery, + treeTemplateDetailQuery, +} from 'stores/modelCenter'; +import { EnumAlgorithmProjectType, Algorithm } from 'typings/algorithm'; +import { WorkflowTemplate } from 'typings/workflow'; +import { stringifyComplexDictField } from 'shared/formSchema'; +import { + useGetCurrentProjectId, + useGetCurrentProjectParticipantName, + useGetCurrentProjectParticipantId, + useGetCurrentProjectParticipantList, + useGetCurrentPureDomainName, +} from 'hooks'; +import DatasesetSelect from 'components/NewDatasetSelect'; +import { OptionInfo } from '@arco-design/web-react/es/Select/interface'; +import { fetchDatasetDetail } from 'services/dataset'; + +import styles from './index.module.less'; +import { fetchAlgorithmByUuid } from 'services/algorithm'; +import WhichAlgorithm from 'components/WhichAlgorithm'; + +const formLayout = { + labelCol: { + span: 4, + }, + wrapperCol: { + span: 20, + }, +}; + +enum Fields { + name = 'name', + comment = 'comment', + modelId = 'model_id', + algorithmType = 'algorithm_type', + datasetId = 'dataset_id', + config = 'config', +} + +enum ModelJobFields { + GROUP_KEY = 'model_group_id', + ITEM_KEY = 'dataset_id', +} + +const algorithmTypeOptions = [ + { + label: ALGORITHM_TYPE_LABEL_MAPPER[EnumAlgorithmProjectType.TREE_VERTICAL], + value: 'TREE_VERTICAL', + }, + { + label: ALGORITHM_TYPE_LABEL_MAPPER[EnumAlgorithmProjectType.NN_VERTICAL], + value: 'NN_VERTICAL', + }, + { + label: ALGORITHM_TYPE_LABEL_MAPPER[EnumAlgorithmProjectType.NN_HORIZONTAL], + value: 'NN_HORIZONTAL', + }, +]; + +const MODEL_JOB_TYPE_FIELD = 'model_job_type'; +const ALGORITHM_ID_FIELD = 'algorithm_id'; +const MODEL_JOB_ID_FIELD = 'eval_model_job_id'; +const disabledFieldWhenEdit = [ + Fields.algorithmType, + Fields.modelId, + MODEL_JOB_TYPE_FIELD, + MODEL_JOB_ID_FIELD, + Fields.name, +]; + +const formConfig: Record<Fields, Partial<FormItemProps>> = { + [Fields.name]: { + label: '名称', + field: Fields.name, + rules: [ + { + match: validNamePattern, + message: '只支持大小写字母,数字,中文开头或结尾,可包含“_”和“-”,不超过 63 个字符', + }, + { required: true, message: '请输入名称' }, + ], + }, + [Fields.comment]: { + label: '描述', + field: Fields.comment, + rules: [{ maxLength: MAX_COMMENT_LENGTH, message: '最多为 200 个字符' }], + }, + [Fields.config]: { + label: '资源模板', + field: Fields.config, + rules: [{ required: true }], + }, + [Fields.algorithmType]: { + label: '类型', + field: Fields.algorithmType, + rules: [{ required: true }], + initialValue: algorithmTypeOptions[0].value, + }, + + [Fields.modelId]: { + label: '模型', + field: Fields.modelId, + rules: [ + { + required: true, + message: '请选择模型', + validator: (val, cb) => { + const hasValue = Boolean( + val?.[ModelJobFields.GROUP_KEY] && val?.[ModelJobFields.ITEM_KEY], + ); + cb(!hasValue ? '请选择模型' : undefined); + }, + }, + ], + }, + [Fields.datasetId]: { + label: '数据集', + field: Fields.datasetId, + rules: [{ required: true, message: '请选择数据集' }], + }, +}; + +const resourceConfigList = [ + 'master_replicas', + 'master_cpu', + 'master_mem', + 'ps_replicas', + 'ps_cpu', + 'ps_mem', + 'worker_replicas', + 'worker_cpu', + 'worker_mem', +]; + +type Props = { + job?: ModelJob; + jobType: ModelJobType; + createReq: (data: any) => Promise<any>; + patchReq: (jobId: ID, data: any) => Promise<any>; +}; + +const CreateForm: React.FC<Props> = ({ job, createReq, patchReq, jobType }) => { + const history = useHistory(); + const params = useParams<ModelEvaluationCreateParams & ModelEvaluationListParams>(); + const [form] = Form.useForm(); + const projectId = useGetCurrentProjectId(); + const myPureDomainName = useGetCurrentPureDomainName(); + const participantName = useGetCurrentProjectParticipantName(); + const participantList = useGetCurrentProjectParticipantList(); + const participantId = useGetCurrentProjectParticipantId(); + const isEdit = (params.action === 'edit' && params.id != null) || params.role === 'receiver'; + const isReceiver = params.role === 'receiver'; + const [selectedModelJob, setSelectedModelJob] = useState<Record<ModelJobFields, ID>>({ + [ModelJobFields.GROUP_KEY]: 0, + [ModelJobFields.ITEM_KEY]: params.id, + }); + const [modelJobIsOldVersion, setModelJobIsOldVersion] = useState<boolean>(false); + + const selectedDatasetRef = useRef<Dataset>({} as Dataset); + + const { data: nnTreeTemplateDetail } = useRecoilQuery(treeTemplateDetailQuery); + const { data: nnHorizontalEvalTemplateDetail } = useRecoilQuery( + nnHorizontalEvalTemplateDetailQuery, + ); + const { data: nnVerticalTemplateDetailQuery } = useRecoilQuery(nnTemplateDetailQuery); + + const relativeModel = useQuery( + ['model-evaluation-relative-model', job?.model_id], + () => fetchModelDetail_new(projectId!, job?.model_id!).then((res) => res.data), + { + enabled: Boolean(projectId && job?.model_id), + onSuccess(res) { + if (!res.group_id) { + return; + } + setSelectedModelJob({ + ...selectedModelJob, + [ModelJobFields.GROUP_KEY]: res.group_id, + }); + const modelData = { + [ModelJobFields.GROUP_KEY]: res.group_id!, + [ModelJobFields.ITEM_KEY]: res.model_job_id!, + }; + form.setFieldValue(Fields.modelId, { ...modelData }); + }, + }, + ); + + const peerModelJobData = useQuery( + ['model-evaluation-peer-model-detail', modelJobIsOldVersion], + () => + fetchPeerModelJobDetail_new(projectId!, params.id, participantId!).then((res) => res.data), + { + enabled: Boolean(projectId && params.id && isEdit && modelJobIsOldVersion), + }, + ); + + const selectedModelJobDetail = useQuery( + ['model-evaluation-new-model-detail', selectedModelJob?.[ModelJobFields.ITEM_KEY]], + async () => { + const modelId = selectedModelJob?.[ModelJobFields.ITEM_KEY]; + if (!projectId || !modelId) { + return; + } + const jobDetail = await fetchModelJob_new(projectId, modelId); + return jobDetail.data; + }, + ); + + const createMutation = useMutation((value: any) => createReq(value), { + onError(err: any) { + Message.error(err.code === 409 ? '名称已存在' : err.message || err); + }, + onSuccess() { + Message.success('创建成功'); + history.replace( + generatePath(routes.ModelEvaluationList, { + module: params.module, + }), + ); + }, + }); + + const patchMutation = useMutation((value: any) => patchReq(params.id, value), { + onError(err: any) { + Message.error(err.message || err); + }, + onSuccess() { + Message.success('已授权模型评估,任务开始运行'); + history.replace( + generatePath(routes.ModelEvaluationList, { + module: params.module, + }), + ); + }, + }); + const algorithmDetailQueries = useQueries( + [...participantList.map((participant) => participant.pure_domain_name), myPureDomainName].map( + (pureDomain) => { + return { + queryKey: [ + 'fetch-algorithm-detail', + projectId, + selectedModelJobDetail?.data?.global_config?.global_config?.[pureDomain] + ?.algorithm_uuid!, + pureDomain, + ], + queryFn: async () => { + const res = await fetchAlgorithmByUuid( + projectId!, + selectedModelJobDetail?.data?.global_config?.global_config?.[pureDomain] + ?.algorithm_uuid!, + ); + return { [pureDomain]: res.data }; + }, + + retry: 2, + enabled: Boolean( + projectId && + selectedModelJobDetail?.data?.global_config?.global_config?.[pureDomain] + ?.algorithm_uuid, + ), + refetchOnWindowFocus: false, + }; + }, + ), + ); + const algorithmDetail = useMemo(() => { + let algorithmMap: Record<string, Algorithm> = {}; + algorithmDetailQueries.forEach((item) => { + const algorithmValue = item.data as { [key: string]: Algorithm }; + algorithmMap = { + ...algorithmMap, + ...algorithmValue, + }; + }); + return algorithmMap; + }, [algorithmDetailQueries]); + + useEffect(() => { + const data = job; + if (!data) { + return; + } + job?.dataset_id && + fetchDatasetDetail(job.dataset_id).then( + (detail) => { + selectedDatasetRef.current = detail.data; + }, + () => { + selectedDatasetRef.current.id = job?.dataset_id as ID; + }, + ); + + form.setFieldsValue({ + [Fields.name]: data.name, + [Fields.comment]: data.comment, + [Fields.datasetId]: data.dataset_id, + [Fields.algorithmType]: data.algorithm_type, + }); + }, [job, form]); + + useEffect(() => { + if (!relativeModel.data) { + return; + } + + const selectedModelJob = { + [ModelJobFields.GROUP_KEY]: relativeModel.data?.group_id!, + [ModelJobFields.ITEM_KEY]: relativeModel.data?.model_job_id!, + }; + setSelectedModelJob(selectedModelJob); + form.setFieldValue(Fields.modelId, { ...selectedModelJob }); + }, [relativeModel.data, form]); + + useEffect(() => { + if (!peerModelJobData.data || !modelJobIsOldVersion) { + return; + } + const resource_config = getConfigInitialValues( + peerModelJobData.data?.config!, + resourceConfigList, + ); + form.setFieldValue(Fields.config, { ...form.getFieldsValue().config, ...resource_config }); + }, [peerModelJobData, form, modelJobIsOldVersion]); + + useEffect(() => { + if (!job?.global_config?.global_config) { + return; + } + const globalConfig = job.global_config.global_config; + const myResourceConfig = getConfigInitialValuesByDefinition( + globalConfig?.[myPureDomainName]?.variables, + resourceConfigList, + ); + const participantResourceConfig: Record<string, any> = {}; + + participantList.forEach((participant) => { + participantResourceConfig[participant.pure_domain_name] = getConfigInitialValuesByDefinition( + globalConfig?.[participant.pure_domain_name]?.variables, + resourceConfigList, + ); + }); + form.setFieldValue(Fields.config, { ...form.getFieldsValue().config, ...myResourceConfig }); + form.setFieldValue('resource_config', { + ...form.getFieldsValue().resource_config, + ...participantResourceConfig, + }); + }, [form, job?.global_config?.global_config, myPureDomainName, participantList]); + + useEffect(() => { + if (isEdit) { + setModelJobIsOldVersion(!job?.global_config?.global_config); + return; + } + if (!selectedModelJob.dataset_id || selectedModelJobDetail.isFetching) { + setModelJobIsOldVersion(false); + return; + } + setModelJobIsOldVersion(!selectedModelJobDetail.data?.global_config?.global_config); + }, [ + isEdit, + job?.global_config?.global_config, + selectedModelJob.dataset_id, + selectedModelJobDetail.data?.global_config?.global_config, + selectedModelJobDetail.isFetching, + ]); + return ( + <> + {params.role === 'receiver' ? ( + <Card style={{ marginBottom: 20 }} bordered={false}> + <Space> + <Avatar /> + <div> + {!job ? ( + <Spin /> + ) : ( + <p className={styles.title_text_large}> + {participantName} + {params.module === 'model-evaluation' + ? `向您发起「${job.name}」的模型评估授权申请` + : `向您发起「${job.name}」的离线预测授权申请`} + </p> + )} + <small className={styles.small_text}> + <IconInfoCircle style={{ color: 'var(--color-text-3)', fontSize: 14 }} />{' '} + {`合作方均同意授权时,${ + params.module === 'model-evaluation' ? '评估' : '预测' + }任务将自动运行`} + </small> + </div> + </Space> + </Card> + ) : null} + <Card className={styles.page_section_card} bordered={false}> + <Form + className={`${styles.form_container} form-content`} + form={form} + {...formLayout} + onSubmit={modelJobIsOldVersion ? submitWrapper : submitWrapper_new} + disabled={createMutation.isLoading || patchMutation.isLoading} + onValuesChange={(changedValue: any) => { + if (changedValue[Fields.modelId] != null) { + setSelectedModelJob(changedValue[Fields.modelId]); + } + }} + > + <section className="form-section"> + <h3>基本信息</h3> + <Form.Item {...formConfig[Fields.name]}> + {isReceiver ? ( + <Typography.Text bold={true}> {job?.name} </Typography.Text> + ) : ( + <Input placeholder={'请输入名称'} /> + )} + </Form.Item> + <Form.Item {...formConfig[Fields.comment]}> + <Input.TextArea placeholder={'最多为 200 个字符'} /> + </Form.Item> + </section> + <section className="form-section"> + <h3>{params.module === 'model-evaluation' ? '评估配置' : '预测配置'}</h3> + <Form.Item {...formConfig[Fields.algorithmType]}> + {isReceiver ? ( + <Typography.Text bold={true}> + { + ALGORITHM_TYPE_LABEL_MAPPER[ + job?.algorithm_type || EnumAlgorithmProjectType.TREE_VERTICAL + ] + } + </Typography.Text> + ) : ( + <Select + options={algorithmTypeOptions} + showSearch={true} + onChange={() => { + form.setFieldValue(Fields.modelId, { + [ModelJobFields.GROUP_KEY]: undefined, + [ModelJobFields.ITEM_KEY]: undefined, + }); + }} + filterOption={(inputValue, option) => + option.props.children.toLowerCase().indexOf(inputValue.toLowerCase()) >= 0 + } + /> + )} + </Form.Item> + <Form.Item + {...formConfig[Fields.modelId]} + shouldUpdate={shouldModelJobDoubleSelectUpdate} + > + {isReceiver ? ( + <Typography.Text bold={true}>{selectedModelJobDetail?.data?.name} </Typography.Text> + ) : ( + (values: any) => ( + <DoubleSelect.ModelJobGroupSelect + type={values[Fields.algorithmType]} + leftField={ModelJobFields.GROUP_KEY} + rightField={ModelJobFields.ITEM_KEY} + onLeftOptionsEmpty={() => { + form.setFields({ + [Fields.modelId]: { + error: { + message: '目标模型不存在,请联系合作伙伴重新选择', + }, + }, + }); + }} + isClearRightValueAfterLeftSelectChange={true} + /> + ) + )} + </Form.Item> + {selectedModelJob.dataset_id && + selectedModelJobDetail?.data?.algorithm_type && + selectedModelJobDetail.data.algorithm_type !== + EnumAlgorithmProjectType.TREE_VERTICAL && + !modelJobIsOldVersion && ( + <Form className={'form-content'} labelCol={{ span: 6 }} wrapperCol={{ span: 18 }}> + <Form.Item label={'我方算法'} style={{ marginBottom: 0 }}> + <Typography.Text bold={true}> + <WhichAlgorithm + id={algorithmDetail?.[myPureDomainName]?.id} + uuid={algorithmDetail?.[myPureDomainName]?.uuid} + participantId={algorithmDetail?.[myPureDomainName]?.participant_id as ID} + /> + </Typography.Text> + </Form.Item> + {participantList.map((participant) => { + return ( + <Form.Item + key={participant.pure_domain_name} + label={`「${participant.name}」算法`} + > + <Typography.Text bold={true}> + <WhichAlgorithm + id={algorithmDetail?.[participant.pure_domain_name]?.id} + uuid={algorithmDetail?.[participant.pure_domain_name]?.uuid} + participantId={ + algorithmDetail?.[participant.pure_domain_name]?.participant_id as ID + } + /> + </Typography.Text> + </Form.Item> + ); + })} + </Form> + )} + <Form.Item {...formConfig[Fields.datasetId]}> + {isReceiver && !modelJobIsOldVersion ? ( + <Space> + <Typography.Text bold={true}> + {selectedDatasetRef.current.name || ''} + </Typography.Text> + <Tag color="arcoblue">结果</Tag> + </Space> + ) : ( + <DatasesetSelect + lazyLoad={{ + page_size: 10, + enable: true, + }} + kind={DatasetKindLabel.PROCESSED} + onChange={async (_, option) => { + const dataset = (option as OptionInfo)?.extra; + selectedDatasetRef.current = dataset; + }} + /> + )} + </Form.Item> + </section> + <section className="form-section"> + <h3>{'资源配置'}</h3> + <Form.Item + {...formConfig[Fields.config]} + shouldUpdate={(pre, cur) => pre[Fields.algorithmType] !== cur[Fields.algorithmType]} + disabled={isReceiver && !modelJobIsOldVersion} + > + {(val: any) => ( + <ResourceConfig + key={myPureDomainName} + algorithmType={val[Fields.algorithmType]} + defaultResourceType={ResourceTemplateType.CUSTOM} + isIgnoreFirstRender={false} + localDisabledList={['master.replicas']} + collapsedOpen={false} + /> + )} + </Form.Item> + </section> + {!modelJobIsOldVersion && + participantList.map((participant) => { + return ( + <section className="form-section" key={participant.pure_domain_name}> + <h3>{`「${participant.name}」资源配置`}</h3> + <Form.Item + field={`resource_config.${participant.pure_domain_name}`} + label="资源配置" + rules={[{ required: true }]} + shouldUpdate={(pre, cur) => + pre[Fields.algorithmType] !== cur[Fields.algorithmType] + } + disabled={isReceiver && !modelJobIsOldVersion} + > + {(val: any) => ( + <ResourceConfig + algorithmType={val[Fields.algorithmType]} + defaultResourceType={ResourceTemplateType.CUSTOM} + isIgnoreFirstRender={false} + localDisabledList={['master.replicas']} + collapsedOpen={false} + /> + )} + </Form.Item> + </section> + ); + })} + <Space size="large"> + <Button + className={styles.submit_btn_container} + onClick={() => form.submit()} + size="large" + type="primary" + loading={createMutation.isLoading || patchMutation.isLoading} + > + {params.role === 'receiver' + ? '确认授权' + : form.getFieldValue(Fields.algorithmType) !== + EnumAlgorithmProjectType.NN_HORIZONTAL + ? '提交并发送' + : '提交'} + </Button> + <ButtonWithModalConfirm + disabled={ + createMutation.isLoading || + patchMutation.isLoading || + !selectedModelJobDetail.isFetched + } + isShowConfirmModal={true} + size="large" + onClick={() => { + history.push( + generatePath(routes.ModelEvaluationList, { + module: params.module, + }), + ); + }} + title={params.action === 'edit' ? `确认要退出编辑「${job?.name}」?` : '确认要退出?'} + content={'退出后,当前所填写的信息将被清空。'} + > + 取消 + </ButtonWithModalConfirm> + {!modelJobIsOldVersion && + form.getFieldValue(Fields.algorithmType) !== + EnumAlgorithmProjectType.NN_HORIZONTAL && ( + <Form.Item + field="metric_is_public" + triggerPropName="checked" + style={{ marginBottom: 0 }} + > + <Checkbox style={{ width: 200, fontSize: 12 }}> + 共享模型评估结果 + <Tooltip content="共享后,合作伙伴能够查看本方评估结果"> + <IconQuestionCircle /> + </Tooltip> + </Checkbox> + </Form.Item> + )} + </Space> + </Form> + </Card> + </> + ); + + async function submitWrapper(value: any) { + const algorithmType = selectedModelJobDetail.data?.algorithm_type || job?.algorithm_type; + const selectedModelJob = selectedModelJobDetail.data; + const selectedDataset = selectedDatasetRef.current; + + if (!algorithmType || !selectedDataset || !selectedModelJob) { + return; + } + + let template: WorkflowTemplate | null = null; + + switch (algorithmType) { + case EnumAlgorithmProjectType.NN_HORIZONTAL: + template = nnHorizontalEvalTemplateDetail; + break; + case EnumAlgorithmProjectType.NN_VERTICAL: + template = nnVerticalTemplateDetailQuery; + break; + case EnumAlgorithmProjectType.TREE_VERTICAL: + template = nnTreeTemplateDetail; + break; + } + + const payload = { + ...value, + [Fields.algorithmType]: algorithmType, + [MODEL_JOB_TYPE_FIELD]: jobType, + [ALGORITHM_ID_FIELD]: selectedModelJob?.algorithm_id, + [MODEL_JOB_ID_FIELD]: selectedModelJob?.id, // handle DoubleSelect value + config: createPayloadWithWorkflowTemplate( + value, + selectedModelJob, + selectedDataset, + cloneDeep(template), + ), + }; + + if (isEdit) { + patchMutation.mutate(omit(payload, disabledFieldWhenEdit)); + return; + } + createMutation.mutate(omit(payload, Fields.modelId)); + } + + async function submitWrapper_new(value: any) { + if (!projectId) { + Message.info('请选择工作区!'); + return; + } + const algorithmType = selectedModelJobDetail.data?.algorithm_type || job?.algorithm_type; + const selectedModelJob = selectedModelJobDetail.data; + const selectedDataset = selectedDatasetRef.current; + + if (!algorithmType || !selectedDataset || !selectedModelJob) { + return; + } + + const globalConfig: Record<string, any> = {}; + const coordinatorGlobalConfig = selectedModelJob.global_config?.global_config[myPureDomainName]; + const baseConfig = getConfigInitialValuesByDefinition( + coordinatorGlobalConfig?.variables!, + coordinatorGlobalConfig?.variables.map((item) => item.name), + ); + globalConfig[myPureDomainName] = { + algorithm_uuid: coordinatorGlobalConfig?.algorithm_uuid, + algorithm_parameter: coordinatorGlobalConfig?.algorithm_parameter, + variables: hydrateModalGlobalConfig(coordinatorGlobalConfig?.variables!, { + ...baseConfig, + ...value.config, + }), + }; + participantList.forEach((participant) => { + const pureDomainName = participant.pure_domain_name; + const participantGlobalConfig = selectedModelJob.global_config?.global_config[pureDomainName]; + const participantBaseConfig = getConfigInitialValuesByDefinition( + participantGlobalConfig?.variables!, + participantGlobalConfig?.variables.map((item) => item.name), + ); + globalConfig[pureDomainName] = { + algorithm_uuid: participantGlobalConfig?.algorithm_uuid, + algorithm_parameter: participantGlobalConfig?.algorithm_parameter, + variables: hydrateModalGlobalConfig(participantGlobalConfig?.variables!, { + ...participantBaseConfig, + ...value.resource_config[pureDomainName], + }), + }; + }); + const payload = { + name: value.name, + comment: value.comment, + dataset_id: value.dataset_id, + [Fields.algorithmType]: algorithmType, + [MODEL_JOB_TYPE_FIELD]: jobType, + [MODEL_JOB_ID_FIELD]: selectedModelJob?.id, + model_id: selectedModelJob?.output_models[0].id, + global_config: { + dataset_uuid: selectedDatasetRef.current?.uuid, + model_uuid: selectedModelJob?.output_models[0].uuid, + global_config: globalConfig, + }, + }; + + if (isEdit) { + try { + updateModelJob(projectId!, params.id, { + metric_is_public: value.metric_is_public, + comment: value.comment, + auth_status: 'AUTHORIZED', + }); + Message.success('授权成功!所有合作伙伴授权完成后任务开始运行'); + history.replace( + generatePath(routes.ModelEvaluationList, { + module: params.module, + }), + ); + } catch (err: any) { + Message.error(err.message); + } + patchMutation.mutate( + omit( + { ...payload, metric_is_public: value.metric_is_public }, + disabledFieldWhenEdit.concat(['global_config', 'dataset_id']), + ), + ); + return; + } + try { + const res = await createModelJob(projectId!, payload); + value.metric_is_public && updateModelJob(projectId!, res.data.id, { metric_is_public: true }); + Message.success('创建成功'); + history.replace( + generatePath(routes.ModelEvaluationList, { + module: params.module, + }), + ); + } catch (err: any) { + Message.error(err.message); + } + } +}; + +function shouldModelJobDoubleSelectUpdate(prev: any, cur: any) { + return ( + prev[Fields.algorithmType] !== cur[Fields.algorithmType] || + prev[Fields.modelId]?.[ModelJobFields.GROUP_KEY] !== + cur[Fields.modelId]?.[ModelJobFields.GROUP_KEY] || + prev[Fields.modelId]?.[ModelJobFields.ITEM_KEY] !== + cur[Fields.modelId]?.[ModelJobFields.ITEM_KEY] + ); +} + +function createPayloadWithWorkflowTemplate( + formValue: any, + relativeJob: ModelJob, + dataset: Dataset, + template: WorkflowTemplate | null, +) { + if (!template) { + return {}; + } + + const { variables: tplVariables } = template.config.job_definitions[0]; + const { variables: jobVariables } = relativeJob.config.job_definitions[0]; + const varInForm = formValue.config; + + for (let i = 0; i < tplVariables.length; i++) { + const variable = tplVariables[i]; + const { name } = variable; + + if (varInForm.hasOwnProperty(variable.name)) { + variable.value = varInForm[variable.name]; + } else { + if (name === 'data_source' && dataset.data_source) { + variable.value = dataset.data_source; + } else if (name === 'data_path' && dataset.path) { + variable.value = dataset.path; + } else { + const jobVariable = jobVariables.find((v) => v.name === name); + switch (name) { + case 'mode': + variable.value = 'eval'; + break; + case 'load_model_name': + variable.value = relativeJob.job_name; + break; + default: + if (jobVariable) { + tplVariables[i] = { ...jobVariable }; + } + break; + } + } + } + } + + const processedTypedValueConfig = stringifyComplexDictField(template); + return processedTypedValueConfig.config; +} + +export default CreateForm; diff --git a/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/ListTable/index.tsx b/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/ListTable/index.tsx new file mode 100644 index 000000000..d1a3e39b8 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/ListTable/index.tsx @@ -0,0 +1,241 @@ +import React, { useMemo } from 'react'; +import { PaginationProps, Table, TableProps, Space } from '@arco-design/web-react'; +import { generatePath, Link } from 'react-router-dom'; +import StateIndicator from 'components/StateIndicator'; +import { ModelJobAuthStatus, ModelJobState } from 'typings/modelCenter'; +import { ModelJob } from 'typings/modelCenter'; +import { + ColumnsGetterOptions, + algorithmTypeFilters, + roleFilters, + statusFilters, + getModelJobStatus, + MODEL_JOB_STATUS_MAPPER, + resetAuthInfo, + AUTH_STATUS_TEXT_MAP, +} from '../../shared'; +import { formatTimestamp } from 'shared/date'; +import MoreActions from 'components/MoreActions'; +import routeMaps from '../../routes'; +import { CONSTANTS } from 'shared/constants'; +import AlgorithmType from 'components/AlgorithmType'; +import { EnumAlgorithmProjectType } from 'typings/algorithm'; +import ProgressWithText from 'components/ProgressWithText'; +import { useGetCurrentProjectParticipantList, useGetCurrentPureDomainName } from 'hooks'; + +const staticPaginationProps: Partial<PaginationProps> = { + pageSize: 10, + defaultCurrent: 0, + showTotal: true, + sizeCanChange: false, +}; + +export const getTableColumns = (options: ColumnsGetterOptions) => { + const cols = [ + { + title: options.nameFieldText, + dataIndex: 'name', + key: 'name', + ellipsis: true, + render: (_: any, record: ModelJob) => { + const name = record.name ? record.name : CONSTANTS.EMPTY_PLACEHOLDER; + return ( + <Link + to={generatePath(routeMaps.ModelEvaluationDetail, { + id: record.id, + module: options.module, + })} + > + {name} + </Link> + ); + }, + }, + { + title: '类型', + dataIndex: 'algorithm_type', + key: 'algorithm_type', + width: 200, + filteredValue: options.filterDropdownValues?.algorithm_type, + filters: algorithmTypeFilters.filters, + render(value: ModelJob['algorithm_type']) { + return <AlgorithmType type={value as EnumAlgorithmProjectType} />; + }, + }, + { + title: '授权状态', + dataIndex: 'auth_frontend_status', + key: 'auth_frontend_status', + width: 120, + render: (value: ModelJobAuthStatus, record: any) => { + const progressConfig = MODEL_JOB_STATUS_MAPPER?.[value]; + const authInfo = resetAuthInfo( + record.participants_info.participants_map, + options.participantList ?? [], + options.myPureDomainName ?? '', + ); + return ( + <ProgressWithText + status={progressConfig?.status} + statusText={progressConfig?.name} + percent={progressConfig?.status} + toolTipContent={ + [ModelJobAuthStatus.PART_AUTH_PENDING, ModelJobAuthStatus.SELF_AUTH_PENDING].includes( + value, + ) ? ( + <> + {authInfo.map((item: any) => ( + <div key={item.name}>{`${item.name}: ${ + AUTH_STATUS_TEXT_MAP?.[item.authStatus] + }`}</div> + ))} + </> + ) : undefined + } + /> + ); + }, + }, + { + title: '运行状态', + dataIndex: 'status', + key: 'status', + name: 'status', + width: 200, + filteredValue: options.filterDropdownValues?.status, + filters: statusFilters.filters, + render: (name: any, record: any) => { + return ( + <StateIndicator + {...getModelJobStatus(record.status, { + ...options, + isHideAllActionList: true, + onLogClick: () => { + options.onLogClick && options.onLogClick(record); + }, + onRestartClick: () => { + options.onRestartClick && options.onRestartClick(record); + }, + })} + /> + ); + }, + }, + { + title: '创建者', + dataIndex: 'creator_username', + key: 'creator_username', + name: 'creator', + width: 200, + render(val: string) { + return val ?? CONSTANTS.EMPTY_PLACEHOLDER; + }, + }, + { + title: '发起方', + dataIndex: 'role', + name: 'role', + key: 'role', + width: 200, + filters: roleFilters.filters, + filterMultiple: false, + filteredValue: options.filterDropdownValues?.role, + render(role: string) { + return role === 'COORDINATOR' ? '本方' : '合作伙伴'; + }, + }, + { + title: '创建时间', + dataIndex: 'created_at', + name: 'created_at', + key: 'created_at', + width: 200, + sorter(a: ModelJob, b: ModelJob) { + return a.created_at - b.created_at; + }, + render: (date: number) => <div>{formatTimestamp(date)}</div>, + }, + ]; + if (!options.withoutActions) { + cols.push({ + title: '操作', + dataIndex: 'state', + key: 'operation', + name: 'operation', + width: 200, + fixed: 'right', + render: (state: ModelJobState, record: ModelJob) => { + const disabledTerminateOperate = state !== ModelJobState.RUNNING; + return ( + <Space> + <button + className="custom-text-button" + disabled={disabledTerminateOperate} + onClick={() => { + if (disabledTerminateOperate) return; + options.onStopClick && options.onStopClick(record); + }} + > + 终止 + </button> + <MoreActions + actionList={[ + { + label: '删除', + onClick: () => { + options.onDeleteClick && options.onDeleteClick(record); + }, + danger: true, + }, + ]} + /> + </Space> + ); + }, + } as any); + } + + return cols; +}; + +interface EvaluationTableProps extends Omit<TableProps, 'columns'>, ColumnsGetterOptions {} +const EvaluationTable: React.FC<EvaluationTableProps> = (props) => { + const { + module, + pagination = false, + onDeleteClick, + onStopClick, + nameFieldText, + filterDropdownValues = {}, + } = props; + const participantList = useGetCurrentProjectParticipantList(); + const myPureDomainName = useGetCurrentPureDomainName(); + const paginationProps = useMemo(() => { + return { + ...staticPaginationProps, + ...(typeof pagination === 'object' ? pagination : {}), + }; + }, [pagination]); + return ( + <Table + rowKey="uuid" + className="custom-table-left-side-filter" + scroll={{ + x: 1500, + }} + columns={getTableColumns({ + module, + onDeleteClick, + onStopClick, + nameFieldText, + filterDropdownValues, + participantList, + myPureDomainName, + })} + pagination={paginationProps} + {...props} + /> + ); +}; + +export default EvaluationTable; diff --git a/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/ModelEvaluationCreate/index.tsx b/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/ModelEvaluationCreate/index.tsx new file mode 100644 index 000000000..17f1b6b35 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/ModelEvaluationCreate/index.tsx @@ -0,0 +1,77 @@ +import React from 'react'; +import { generatePath, useHistory, useParams } from 'react-router'; +import { ModelJobCreateFormData, ModelJobPatchFormData } from 'typings/modelCenter'; +import SharedPageLayout from 'components/SharedPageLayout'; +import BackButton from 'components/BackButton'; +import * as service from 'services/modelCenter'; +import CreateForm from '../CreateForm'; +import { useRecoilValue } from 'recoil'; +import { projectState } from 'stores/project'; +import routeMap, { + ModelEvaluationListParams, + ModelEvaluationCreateParams, + ModelEvaluationModuleType, +} from '../../routes'; +import { useQuery } from 'react-query'; + +const Create: React.FC = () => { + const history = useHistory(); + const params = useParams<ModelEvaluationCreateParams & ModelEvaluationListParams>(); + const selectedProject = useRecoilValue(projectState); + const project_id = selectedProject.current?.id; + const { data: job } = useQuery( + [params.id, params.action], + () => service.fetchModelJob_new(project_id!, params.id).then((res) => res.data), + { + enabled: params.action === 'edit' && Boolean(project_id), + }, + ); + + return ( + <SharedPageLayout + contentWrapByCard={false} + title={ + <BackButton + isShowConfirmModal={true} + modalClassName="custom-modal" + title={params.action === 'edit' ? `确认要退出编辑「${job?.name}」?` : '确认要退出?'} + content={'退出后,当前所填写的信息将被清空。'} + onClick={() => { + history.push( + generatePath(routeMap.ModelEvaluationList, { + module: params.module, + }), + ); + }} + > + {params.module === ModelEvaluationModuleType.Evaluation ? '模型评估' : '离线预测'} + </BackButton> + } + centerTitle={`${params.role === 'receiver' ? '授权' : '创建'}${ + params.module === ModelEvaluationModuleType.Evaluation ? '评估' : '预测' + }`} + > + <CreateForm + job={job} + jobType={ + params.module === ModelEvaluationModuleType.Evaluation ? 'EVALUATION' : 'PREDICTION' + } + createReq={createReqWrapper} + patchReq={patchReqWrapper} + /> + </SharedPageLayout> + ); + + function createReqWrapper(config: ModelJobCreateFormData) { + if (!project_id) { + return Promise.reject('请选择工作区'); + } + return service.createModelJob_new(project_id, config); + } + + function patchReqWrapper(jobId: ID, config: ModelJobPatchFormData) { + return service.updateModelJob_new(project_id!, jobId, config); + } +}; + +export default Create; diff --git a/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/ModelEvaluationDetail/index.less b/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/ModelEvaluationDetail/index.less new file mode 100644 index 000000000..d1979c549 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/ModelEvaluationDetail/index.less @@ -0,0 +1,35 @@ +.padding-container-card{ + padding: 20px; + padding-bottom: 0px; + .eval-name{ + margin-bottom: 0px; + font-size: 16px; + height: 24px; + font-weight: 600; + } + .eval-comment{ + font-size: 12px; + line-height: 1.2; + color: var(--textColorSecondary); + } + .eval-section-title{ + display: block; + margin-bottom: 5px; + } +} +.pop-title{ + color: #4e5969; +} +.pop-content{ + color: #1d2129; +} +.styled-link{ + display: inline-block; + font-weight: 400; + font-size: 12px; + line-height: 20px; + margin-bottom: 12px; +} +.padding-top-card{ + padding-top: 0px; +} diff --git a/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/ModelEvaluationDetail/index.tsx b/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/ModelEvaluationDetail/index.tsx new file mode 100644 index 000000000..cf4f3e97a --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/ModelEvaluationDetail/index.tsx @@ -0,0 +1,308 @@ +import React, { useState } from 'react'; +import { generatePath, Redirect, useHistory, useParams } from 'react-router'; +import { Link } from 'react-router-dom'; +import { Grid, Tabs, Space, Typography, Message, Popover, Button } from '@arco-design/web-react'; +import { IconShareInternal } from '@arco-design/web-react/icon'; +import { useQuery } from 'react-query'; +import SharedPageLayout from 'components/SharedPageLayout'; +import BackButton from 'components/BackButton'; +import GridRow from 'components/_base/GridRow'; +import MoreActions from 'components/MoreActions'; +import PropertyList from 'components/PropertyList'; +import StateIndicator from 'components/StateIndicator'; +import AlgorithmType from 'components/AlgorithmType'; +import CountTime from 'components/CountTime'; +import { getFullModelJobDownloadHref } from 'services/modelCenter'; +import { fetchModelDetail_new, fetchModelJob_new } from 'services/modelCenter'; +import { useGetCurrentProjectId } from 'hooks'; + +import { Avatar, deleteEvaluationJob, getModelJobStatus } from '../../shared'; +import routes, { ModelEvaluationDetailParams, ModelEvaluationDetailTab } from '../../routes'; +import ReportResult from '../../ReportResult'; +import InstanceInfo from '../../InstanceInfo'; +import WhichRole from '../WhichRole'; +import { formatTimestamp } from 'shared/date'; +import ResourceConfigTable from 'views/ModelCenter/ResourceConfigTable'; +import ModelJobDetailDrawer from 'views/ModelCenter/ModelJobDetailDrawer'; +import { TIME_INTERVAL, CONSTANTS } from 'shared/constants'; +import { ModelJobState, ModelJobStatus } from 'typings/modelCenter'; +import request from 'libs/request'; +import { isNNAlgorithm } from 'views/ModelCenter/shared'; + +import './index.less'; + +const ModelEvaluation: React.FC = () => { + const params = useParams<ModelEvaluationDetailParams>(); + const history = useHistory(); + const projectId = useGetCurrentProjectId(); + const [metricIsPublic, setMetricIsPublic] = useState(false); + const detailQuery = useQuery( + ['model-valuation-detail-page-query', params.id, projectId], + () => fetchModelJob_new(projectId!, params.id).then((res) => res.data), + { + enabled: Boolean(projectId), + refetchInterval: TIME_INTERVAL.LIST, + onSuccess: (res) => { + const { metric_is_public } = res; + setMetricIsPublic(!!metric_is_public); + }, + }, + ); + + const { data: detail } = detailQuery; + const { data: relativeModelData } = useQuery( + ['model_valuation-detail-relative-model', detail?.model_id, projectId], + () => fetchModelDetail_new(projectId!, detail?.model_id!).then((res) => res.data), + { + enabled: Boolean(detail?.model_id && projectId), + }, + ); + + const isModelEvaluation = params.module === 'model-evaluation'; + const isJobRunning = detail?.state === ModelJobState.RUNNING; + + const propertyList = [ + { + label: '运行状态', + value: detail ? ( + <StateIndicator {...getModelJobStatus(detail.status, { isHideAllActionList: true })} /> + ) : null, + }, + { + label: '发起方', + value: <WhichRole job={detail} />, + }, + { + label: '创建者', + value: detail?.creator_username || '-', + }, + { + label: '模型', + value: relativeModelData?.model_job_id ? ( + <ModelJobDetailDrawer.Button + id={relativeModelData?.model_job_id} + text={relativeModelData?.name} + title={ + <Space> + {relativeModelData?.name} + <StateIndicator + tag={true} + {...getModelJobStatus(detail?.status as ModelJobStatus, { + isHideAllActionList: true, + })} + /> + </Space> + } + /> + ) : ( + CONSTANTS.EMPTY_PLACEHOLDER + ), + }, + { + label: '数据集', + value: + detail?.dataset_name ?? detail?.intersection_dataset_name ? ( + <Link to={`/datasets/processed/detail/${detail?.dataset_id}/dataset_job_detail`}> + {detail?.dataset_name ?? detail?.intersection_dataset_name} + </Link> + ) : ( + CONSTANTS.EMPTY_PLACEHOLDER + ), + }, + { + label: '资源配置', + value: detail && ( + <ResourceConfigTable.Button + job={detail} + popoverProps={{ position: 'bl', style: { maxWidth: 500, width: 500 } }} + /> + ), + }, + { + label: '创建时间', + value: detail?.created_at ? formatTimestamp(detail?.created_at) : CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + label: '开始时间', + value: detail?.started_at && formatTimestamp(detail.started_at), + }, + { + label: '结束时间', + value: + detail?.state === ModelJobState.COMPLETED && detail?.stopped_at + ? formatTimestamp(detail.stopped_at) + : CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + label: '运行时长', + value: + detail?.started_at || detail?.stopped_at ? ( + <CountTime + time={ + isJobRunning + ? Math.floor(Date.now() / 1000) - (detail?.started_at ?? 0) + : (detail?.stopped_at ?? 0) - (detail?.started_at ?? 0) + } + isStatic={!isJobRunning} + /> + ) : ( + CONSTANTS.EMPTY_PLACEHOLDER + ), + }, + ]; + + const refreshModelJobDetail = () => { + detailQuery.refetch(); + }; + + return ( + <SharedPageLayout + cardPadding={0} + title={ + <BackButton onClick={() => goToListPage()}> + {isModelEvaluation ? '模型评估' : '离线预测'} + </BackButton> + } + > + <div className="padding-container-card"> + <Grid.Row align="center" justify="space-between"> + <GridRow gap="12" style={{ maxWidth: '85%' }}> + <Avatar data-name={detail?.name} /> + <div> + <Space> + <h3 className="eval-name">{detail?.name}</h3> + {detail && <AlgorithmType type={detail.algorithm_type} />} + </Space> + <small className="eval-comment">{detail?.comment}</small> + </div> + </GridRow> + + <GridRow> + <MoreActions + actionList={[ + { + label: '删除', + disabled: !projectId || !detail, + danger: true, + onClick: async () => { + if (projectId && detail) { + try { + const res = await deleteEvaluationJob( + projectId, + detail, + params.module, + ).then(); + if (res) { + goToListPage(true); + } + } catch (e) {} + } + }, + }, + ]} + /> + </GridRow> + </Grid.Row> + <div> + <PropertyList cols={5} colProportions={[1, 1, 1, 1, 1]} properties={propertyList} /> + </div> + </div> + {!params.tab && <Redirect to={getTabPath(ModelEvaluationDetailTab.Result)} />} + {params.module === 'model-evaluation' && ( + <> + <Tabs + defaultActiveTab={params.tab} + onChange={(tab) => history.push(getTabPath(tab))} + style={{ marginBottom: 0 }} + > + <Tabs.TabPane title={'评估结果'} key={ModelEvaluationDetailTab.Result} /> + <Tabs.TabPane title={'实例信息'} key={ModelEvaluationDetailTab.Info} /> + </Tabs> + <div className="padding-container-card padding-top-card"> + {params.tab === ModelEvaluationDetailTab.Result && ( + <ReportResult + onSwitch={refreshModelJobDetail} + metricIsPublic={metricIsPublic} + id={params.id} + isTraining={false} + isNNAlgorithm={detail ? isNNAlgorithm(detail?.algorithm_type) : false} + algorithmType={detail?.algorithm_type} + /> + )} + {params.tab === ModelEvaluationDetailTab.Info && detail?.job_id && ( + <> + <InstanceInfo id={params.id} jobId={detail?.job_id} /> + <Popover + trigger="hover" + position="br" + content={ + <> + <div className="pop-title">工作流</div> + <Link + className="styled-link" + to={`/workflow-center/workflows/${detail?.workflow_id}`} + > + 点击查看工作流 + </Link> + <div className="pop-title">工作流 ID</div> + <div className="pop-content">{detail?.workflow_id}</div> + </> + } + > + <Button size="mini" type="text"> + 更多信息 + </Button> + </Popover> + </> + )} + </div> + </> + )} + {params.module === 'offline-prediction' && detail?.job_id && ( + <div className="padding-container-card padding-top-card"> + <div> + <Typography.Text bold={true} className="custom-typography eval-section-title"> + 预测结果 + </Typography.Text> + <button className="custom-text-button" onClick={downloadDataset}> + <IconShareInternal /> + 结果数据集 + </button> + </div> + <div style={{ marginTop: 20 }}> + <Typography.Text bold={true} className="custom-typography eval-section-title"> + 实例信息 + </Typography.Text> + <InstanceInfo id={params.id} jobId={detail?.job_id} style={{ marginTop: 0 }} /> + </div> + </div> + )} + </SharedPageLayout> + ); + + function getTabPath(tab: string) { + return generatePath(routes.ModelEvaluationDetail, { + ...params, + tab: tab as ModelEvaluationDetailTab, + }); + } + + async function downloadDataset() { + try { + const tip = await request.download(getFullModelJobDownloadHref(projectId!, detail?.id!)); + tip && Message.info(tip); + } catch (error) { + Message.error(error.message); + } + } + + function goToListPage(isReplace = false) { + history[isReplace ? 'replace' : 'push']( + generatePath(routes.ModelEvaluationList, { + module: params.module, + }), + ); + } +}; + +export default ModelEvaluation; diff --git a/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/ModelEvaluationList/index.tsx b/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/ModelEvaluationList/index.tsx new file mode 100644 index 000000000..55d91bbe6 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/ModelEvaluationList/index.tsx @@ -0,0 +1,187 @@ +import React, { FC, useMemo } from 'react'; +import { debounce } from 'lodash-es'; +import { useQuery } from 'react-query'; +import { generatePath, useHistory, useParams } from 'react-router'; +import { Button, Input, Message } from '@arco-design/web-react'; +import SharedPageLayout from 'components/SharedPageLayout'; +import GridRow from 'components/_base/GridRow'; +import TodoPopover from 'components/TodoPopover'; +import { useGetCurrentProjectId, useTablePaginationWithUrlState, useUrlState } from 'hooks'; +import * as service from 'services/modelCenter'; +import { ModelJob, ModelJobQueryParams_new as ModelJobQueryParams } from 'typings/modelCenter'; +import { TIME_INTERVAL } from 'shared/constants'; + +import routesMap, { ModelEvaluationListParams } from '../../routes'; +import EvaluationTable from '../ListTable'; +import { + dangerConfirmWrapper, + deleteEvaluationJob, + FILTER_MODEL_JOB_OPERATOR_MAPPER, +} from '../../shared'; +import { IconPlus } from '@arco-design/web-react/icon'; +import { filterExpressionGenerator } from 'views/Datasets/shared'; +import { expression2Filter } from 'shared/filter'; + +type TProps = {}; +const { Search } = Input; +const List: FC<TProps> = function () { + const history = useHistory(); + const params = useParams<ModelEvaluationListParams>(); + const [urlState, setUrlState] = useUrlState<ModelJobQueryParams>({ + filter: filterExpressionGenerator( + { auth_status: ['AUTHORIZED'] }, + FILTER_MODEL_JOB_OPERATOR_MAPPER, + ), + }); + const { urlState: pageInfo, paginationProps } = useTablePaginationWithUrlState(); + const projectId = useGetCurrentProjectId(); + const isModelEvaluation = params.module === 'model-evaluation'; + const listQuery = useQuery( + [urlState.keyword, pageInfo.page, pageInfo.pageSize, projectId, params.module, urlState.filter], + () => { + if (!projectId) { + Message.info('请选择工作区'); + return; + } + return service.fetchModelJobList_new(projectId, { + page: pageInfo.page, + page_size: pageInfo.pageSize, + types: params.module === 'offline-prediction' ? 'PREDICTION' : 'EVALUATION', + filter: urlState.filter || undefined, + }); + }, + { + enabled: Boolean(projectId), + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, + keepPreviousData: true, + refetchOnWindowFocus: false, + onError(err: any) { + Message.error(err.message || err); + }, + }, + ); + const { isFetching, data } = listQuery; + const pagination = useMemo(() => { + return (data?.page_meta?.total_items || 0) <= paginationProps.pageSize + ? false + : { ...paginationProps, total: data?.page_meta?.total_items }; + }, [data?.page_meta?.total_items, paginationProps]); + return ( + <SharedPageLayout + title={isModelEvaluation ? '模型评估' : '离线预测'} + rightTitle={<TodoPopover.EvaluationModelNew module={params.module} />} + key={params.module} + > + <GridRow justify="space-between" align="center"> + <Button + type="primary" + className={'custom-operation-button'} + onClick={goToCreatePage} + icon={<IconPlus />} + > + {params.module === 'model-evaluation' ? '创建评估' : '创建预测'} + </Button> + + <Search + className={'custom-input'} + allowClear + placeholder={isModelEvaluation ? '输入评估任务名称' : '输入预测任务名称'} + defaultValue={urlState.keyword} + onChange={debounce((keyword) => { + const filter = expression2Filter(urlState.filter); + setUrlState((preState) => ({ + ...preState, + page: 1, + keyword, + filter: filterExpressionGenerator( + { ...filter, name: keyword, auth_status: ['AUTHORIZED'] }, + FILTER_MODEL_JOB_OPERATOR_MAPPER, + ), + })); + }, 300)} + /> + </GridRow> + <EvaluationTable + className="custom-table custom-table-left-side-filter" + data={data?.data} + loading={isFetching} + module={params.module} + pagination={pagination} + filterDropdownValues={{ + algorithm_type: expression2Filter(urlState.filter).algorithm_type, + status: expression2Filter(urlState.filter).status, + role: expression2Filter(urlState.filter).role, + }} + nameFieldText={!isModelEvaluation ? '预测任务名称' : '评估任务名称'} + onChange={(_, sorter, filters, extra) => { + if (extra.action === 'filter') { + setUrlState((preState) => ({ + ...preState, + filter: filterExpressionGenerator( + { + algorithm_type: filters.algorithm_type, + status: filters.status, + role: filters.role, + name: urlState.keyword, + auth_status: ['AUTHORIZED'], + }, + FILTER_MODEL_JOB_OPERATOR_MAPPER, + ), + page: 1, + })); + } + }} + onStopClick={(job: ModelJob) => { + return stopJob(job); + }} + onDeleteClick={(job: ModelJob) => { + return deleteJob(job); + }} + /> + </SharedPageLayout> + ); + + function goToCreatePage() { + history.push( + generatePath(routesMap.ModelEvaluationCreate, { + module: params.module, + role: 'sender', + action: 'create', + }), + ); + } + + async function stopJob(job: ModelJob) { + if (!projectId) { + throw new Error('请选择工作区'); + } + + dangerConfirmWrapper( + `确认要终止「${job.name}」?`, + '终止后,该评估任务将无法重新运行,请谨慎操作', + '终止', + async () => { + try { + await service.stopJob_new(projectId!, job.id); + Message.success('停止成功'); + listQuery.refetch(); + } catch (e) { + Message.error(e.message); + } + }, + ); + } + + async function deleteJob(job: ModelJob) { + if (!projectId) { + throw new Error('请选择工作区'); + } + try { + await deleteEvaluationJob(projectId, job, params.module); + listQuery.refetch(); + } catch (error: any) {} + } +}; + +export default List; diff --git a/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/WhichRole/index.tsx b/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/WhichRole/index.tsx new file mode 100644 index 000000000..98a8552a1 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/WhichRole/index.tsx @@ -0,0 +1,13 @@ +import React from 'react'; +import { ModelJob } from 'typings/modelCenter'; +import { useGetCurrentProjectParticipantName } from 'hooks'; + +const WhichRole: React.FC<{ job?: ModelJob }> = ({ job }) => { + const participantName = useGetCurrentProjectParticipantName(); + if (!job) { + return null; + } + return <span>{job.role === 'PARTICIPANT' ? participantName : '本方'}</span>; +}; + +export default WhichRole; diff --git a/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/index.tsx b/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/index.tsx new file mode 100644 index 000000000..7958ddd59 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelEvaluation/index.tsx @@ -0,0 +1,26 @@ +import React from 'react'; +import { Route, useRouteMatch, useParams, generatePath, Redirect } from 'react-router'; +import List from './ModelEvaluationList'; +import Detail from './ModelEvaluationDetail'; +import routesMap, { ModelEvaluationListParams } from '../routes'; + +const ModelEvaluation: React.FC = () => { + const matched = useRouteMatch(); + const params = useParams<ModelEvaluationListParams>(); + + return ( + <> + <Route exact path={routesMap.ModelEvaluationList} component={List} /> + <Route exact path={routesMap.ModelEvaluationDetail} component={Detail} /> + {matched.path === routesMap.ModelEvaluation && matched.isExact ? ( + <Redirect + to={generatePath(routesMap.ModelEvaluationList, { + module: params.module, + })} + /> + ) : null} + </> + ); +}; + +export default ModelEvaluation; diff --git a/web_console_v2/client/src/views/ModelCenter/ModelJobDetailDrawer.tsx b/web_console_v2/client/src/views/ModelCenter/ModelJobDetailDrawer.tsx new file mode 100644 index 000000000..bf7a22dcf --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelJobDetailDrawer.tsx @@ -0,0 +1,610 @@ +import React, { useState, useMemo } from 'react'; +import dayjs from 'dayjs'; +import { useQuery } from 'react-query'; +import { Link } from 'react-router-dom'; + +import { fetchModelJobDetail_new } from 'services/modelCenter'; +import { + useGetCurrentProjectId, + useGetCurrentProjectParticipantList, + useGetCurrentPureDomainName, +} from 'hooks'; + +import { formatJSONValue } from 'shared/helpers'; +import { formatTimestamp } from 'shared/date'; +import { + ALGORITHM_TYPE_LABEL_MAPPER, + isNNAlgorithm, + isTreeAlgorithm, + TRAIN_ROLE, +} from 'views/ModelCenter/shared'; +import { CONSTANTS } from 'shared/constants'; + +import { Drawer, Popover, Table, Button, Space, Tabs, Tag } from '@arco-design/web-react'; +import { LabelStrong } from 'styles/elements'; +import PropertyList from 'components/PropertyList'; +import BackButton from 'components/BackButton'; +import CodeEditor from 'components/CodeEditor'; +import CountTime from 'components/CountTime'; +import StateIndicator from 'components/StateIndicator'; +import ReportResult from './ReportResult'; +import InstanceInfo from './InstanceInfo'; + +import { DrawerProps } from '@arco-design/web-react/es/Drawer'; +import { WorkflowState } from 'typings/workflow'; +import AlgorithmDrawer from 'components/AlgorithmDrawer'; +import { fetchPodLogs, fetchJobById } from 'services/workflow'; +import { Pod, JobState } from 'typings/job'; +import WhichAlgorithm from 'components/WhichAlgorithm'; +import { ModelJob, ModelJobStatus } from 'typings/modelCenter'; +import ResourceConfigTable from './ResourceConfigTable'; +import { EnumAlgorithmProjectType } from 'typings/algorithm'; +import { getDefaultVariableValue } from 'shared/modelCenter'; +import { getModelJobStatus, LABEL_MAPPER } from './shared'; + +import './index.less'; +import { fetchAlgorithmByUuid } from 'services/algorithm'; +import { fetchDataBatchById } from 'services/dataset'; + +const { TabPane } = Tabs; +interface Props extends DrawerProps { + /** Model job id */ + id: ID; + pod?: Pod; + showCodeEditorBackBtn?: boolean; + datasetBatchType?: 'day' | 'hour'; +} +type ButtonProps = Omit<Props, 'visible'> & { + text?: string; + btnDisabled?: boolean; +}; + +const getPropertyList = (algorithmType?: EnumAlgorithmProjectType) => { + if (!algorithmType) { + return []; + } + + return [ + { + text: '联邦类型', + key: 'algorithm_type', + render(_: any, job?: ModelJob) { + return ALGORITHM_TYPE_LABEL_MAPPER[job?.algorithm_type || 'NN_VERTICAL']; + }, + }, + isNNAlgorithm(algorithmType) + ? { + text: '算法', + render(value: any) { + const { + algorithmProjectId, + algorithmId, + config = [], + algorithmUuid, + algorithmProjectUuid, + participantId, + } = JSON.parse(value?.algorithm?.value || '{}'); + return ( + <AlgorithmDrawer.Button + algorithmProjectId={algorithmProjectId} + algorithmId={algorithmId} + parameterVariables={config} + algorithmProjectUuid={algorithmProjectUuid} + algorithmUuid={algorithmUuid} + participantId={participantId} + > + <button className="custom-text-button"> + <WhichAlgorithm + id={algorithmId} + uuid={algorithmUuid} + participantId={participantId} + /> + </button> + </AlgorithmDrawer.Button> + ); + }, + } + : { key: 'loss_type', text: '损失函数类型' }, + { + key: 'role', + text: '训练角色', + render(configMap: any) { + const role = configMap?.role?.value; + return role === TRAIN_ROLE.LEADER ? '标签方' : '特征方'; + }, + }, + { + text: '数据集', + render(_: any, job?: ModelJob) { + const datasetName: string | undefined = job?.dataset_name ?? job?.intersection_dataset_name; + return datasetName ? ( + <Link to={`/datasets/processed/detail/${job?.dataset_id}/dataset_job_detail`}> + {datasetName} + </Link> + ) : ( + CONSTANTS.EMPTY_PLACEHOLDER + ); + }, + }, + { + key: 'dataset_batch_name', + text: '数据批次', + render(configMap: any, job?: ModelJob) { + const datasetBatchName = configMap?.dataset_batch_name?.value; + return datasetBatchName && job?.data_batch_id ? ( + <Link to={`/datasets/processed/detail/${job?.dataset_id}/data_batch`}> + {datasetBatchName} + </Link> + ) : ( + CONSTANTS.EMPTY_PLACEHOLDER + ); + }, + }, + { + text: '资源配置', + render(_: any, job?: ModelJob) { + return ( + job && ( + <ResourceConfigTable.Button + job={job} + popoverProps={{ position: 'br', style: { maxWidth: 440, width: 440 } }} + /> + ) + ); + }, + }, + { + text: '参数配置', + render(_: any, job?: ModelJob) { + let keyList: string[] = []; + + switch (algorithmType) { + case EnumAlgorithmProjectType.NN_VERTICAL: + keyList = [ + 'image', + 'data_source', + 'epoch_num', + 'verbosity', + 'shuffle_data_block', + 'save_checkpoint_secs', + 'save_checkpoint_steps', + 'load_checkpoint_filename', + 'load_checkpoint_filename_with_path', + 'sparse_estimator', + 'load_model_name', + ]; + break; + case EnumAlgorithmProjectType.NN_HORIZONTAL: + keyList = ['epoch_num', 'verbosity', 'image', 'steps_per_sync', 'data_path']; + break; + case EnumAlgorithmProjectType.TREE_VERTICAL: + keyList = [ + 'learning_rate', + 'max_iters', + 'max_depth', + 'l2_regularization', + 'max_bins', + 'num_parallel', + // 高级参数 + 'image', + 'data_source', + 'file_ext', + 'file_type', + 'enable_packing', + 'ignore_fields', + 'cat_fields', + 'send_scores_to_follower', + 'send_metrics_to_follower', + 'verify_example_ids', + 'verbosity', + 'no_data', + 'label_field', + 'load_model_name', + 'load_model_path', + ]; + break; + } + + const textList = keyList.map((key) => { + const value = getDefaultVariableValue(job as any, key); + return { + key: LABEL_MAPPER[key], + value: value !== '' ? value : CONSTANTS.EMPTY_PLACEHOLDER, + }; + }); + const table = ( + <Table + className="custom-table" + size="small" + showHeader={false} + columns={[ + { dataIndex: 'key', title: '' }, + { dataIndex: 'value', title: '' }, + ]} + scroll={{ + y: 500, + }} + border={false} + borderCell={false} + pagination={false} + data={textList} + /> + ); + return ( + <Popover + popupHoverStay={true} + content={table} + position="br" + className={'params-popover-padding'} + > + <button className="custom-text-button">{'查看'}</button> + </Popover> + ); + }, + }, + { + render(_: any, job?: ModelJob) { + return job?.started_at && formatTimestamp(job.started_at); + }, + key: 'started_at', + text: '开始时间', + }, + { + key: 'stopped_at', + text: '结束时间', + render(_: any, job?: ModelJob) { + return job?.stopped_at && formatTimestamp(job.stopped_at); + }, + }, + { + render(_: any, job?: ModelJob) { + if (!job) { + return CONSTANTS.EMPTY_PLACEHOLDER; + } + const { state, stopped_at, started_at } = job; + const { RUNNING, STOPPED, COMPLETED, FAILED } = WorkflowState; + const isRunning = state === RUNNING; + const isStopped = [STOPPED, COMPLETED, FAILED].includes(state); + let runningTime = 0; + + if (isRunning || isStopped) { + runningTime = isStopped ? stopped_at! - started_at! : dayjs().unix() - started_at!; + } + + return job ? <CountTime time={runningTime} isStatic={!isRunning} /> : 0; + }, + text: '运行时长', + }, + { + text: '输出模型', + render(_: any, job?: ModelJob) { + if (!job) { + return CONSTANTS.EMPTY_PLACEHOLDER; + } + + return job.output_models[0]?.name; + }, + }, + ]; +}; + +function ModelJobDetailDrawer({ + id, + visible, + pod, + onCancel, + showCodeEditorBackBtn = true, + title = '事件详情', + datasetBatchType, + ...restProps +}: Props) { + const projectId = useGetCurrentProjectId(); + const myPureDomainName = useGetCurrentPureDomainName(); + const participantList = useGetCurrentProjectParticipantList(); + const [selectParticipant, setSelectParticipant] = useState<string>(myPureDomainName); + const [metricIsPublic, setMetricIsPublic] = useState(false); + const modelJobDetailQuery = useQuery( + ['fetchModelJobDetail', id], + () => { + return fetchModelJobDetail_new(projectId!, id); + }, + { + enabled: Boolean(visible && id), + retry: 2, + refetchOnWindowFocus: false, + onSuccess: (res) => { + const { metric_is_public } = res.data; + setMetricIsPublic(!!metric_is_public); + }, + }, + ); + + const modelJobDetail = useMemo(() => { + return modelJobDetailQuery.data?.data; + }, [modelJobDetailQuery.data?.data]); + + const datasetBatchDetailQuery = useQuery( + ['fetchDatasetBatchDetail'], + () => fetchDataBatchById(modelJobDetail?.dataset_id!, modelJobDetail?.data_batch_id!), + { + enabled: Boolean(modelJobDetail?.dataset_id && modelJobDetail?.data_batch_id), + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const datasetBatchDetail = useMemo(() => { + if (!modelJobDetail?.data_batch_id) { + return undefined; + } + return datasetBatchDetailQuery.data?.data; + }, [datasetBatchDetailQuery.data?.data, modelJobDetail?.data_batch_id]); + const isOldModelJob = useMemo(() => { + return !modelJobDetailQuery.data?.data.global_config; + }, [modelJobDetailQuery.data?.data.global_config]); + + const algorithmDetailQuery = useQuery( + [ + 'fetchAlgorithmDetail', + projectId, + modelJobDetail?.global_config?.global_config?.[selectParticipant]?.algorithm_uuid, + ], + () => + fetchAlgorithmByUuid( + projectId!, + modelJobDetail?.global_config?.global_config?.[selectParticipant]?.algorithm_uuid!, + ), + { + enabled: Boolean( + projectId && + modelJobDetail?.global_config?.global_config?.[selectParticipant]?.algorithm_uuid, + ), + refetchOnWindowFocus: false, + }, + ); + + const algorithmDetail = useMemo(() => { + return algorithmDetailQuery.data?.data; + }, [algorithmDetailQuery.data?.data]); + + const { data: jobInstanceDetail } = useQuery( + ['workflow', modelJobDetail?.job_id], + () => fetchJobById(modelJobDetail?.job_id as number).then((res) => res.data), + { + enabled: Boolean(projectId) && Boolean(modelJobDetail?.job_id), + }, + ); + const errorMessage = useMemo(() => { + return jobInstanceDetail?.state !== JobState.COMPLETED && + jobInstanceDetail?.error_message && + (jobInstanceDetail?.error_message?.app || + JSON.stringify(jobInstanceDetail?.error_message?.pods) !== '{}') + ? JSON.stringify(jobInstanceDetail.error_message) + : ''; + }, [jobInstanceDetail]); + + const { data: podLog } = useQuery( + ['model-detail-drawer-pod-log', pod?.name], + () => { + return fetchPodLogs(pod?.name!, modelJobDetail?.job_id!, { maxLines: 5000 }).then( + (res) => res.data, + ); + }, + { enabled: Boolean(pod?.name && modelJobDetail?.job_id), refetchOnWindowFocus: false }, + ); + const configValueMap: { [key: string]: any } = useMemo(() => { + if (!modelJobDetail) { + return {}; + } + + return modelJobDetail.config?.job_definitions?.[0].variables.reduce((acc, cur) => { + acc[cur.name] = { ...cur }; + return acc; + }, {} as { [key: string]: any }); + }, [modelJobDetail]); + + const displayedProps = useMemo(() => { + if (!modelJobDetail?.config) { + return []; + } + let valueMap: { [key: string]: any } = {}; + if (isOldModelJob) { + valueMap = configValueMap; + } else { + valueMap = + modelJobDetail?.global_config?.global_config?.[selectParticipant]?.variables.reduce( + (acc, cur) => { + acc[cur.name] = { ...cur }; + return acc; + }, + {} as { [key: string]: any }, + ) ?? {}; + const algorithmId = algorithmDetail?.id ? algorithmDetail?.id : null; + const participantId = algorithmDetail?.participant_id ? algorithmDetail?.participant_id : 0; + valueMap['algorithm'] = { + value: JSON.stringify({ + algorithmId: algorithmId, + algorithmUuid: algorithmDetail?.uuid, + algorithmProjectId: algorithmDetail?.algorithm_project_id, + algorithmProjectUuid: algorithmDetail?.algorithm_project_uuid, + participantId: participantId, + }), + }; + valueMap['dataset_batch_name'] = { + value: datasetBatchDetail?.name, + }; + } + + return [ + ...getPropertyList(modelJobDetail?.algorithm_type).map((item) => { + const { text, key, render } = item; + + return { + label: text ?? key ?? CONSTANTS.EMPTY_PLACEHOLDER, + value: + typeof render === 'function' + ? render(valueMap, modelJobDetail) + : key && valueMap[key] + ? valueMap[key].value + : CONSTANTS.EMPTY_PLACEHOLDER, + }; + }), + ]; + }, [ + modelJobDetail, + isOldModelJob, + configValueMap, + selectParticipant, + algorithmDetail?.id, + algorithmDetail?.participant_id, + algorithmDetail?.uuid, + algorithmDetail?.algorithm_project_id, + algorithmDetail?.algorithm_project_uuid, + datasetBatchDetail?.name, + ]); + + const refreshModelJobDetail = () => { + modelJobDetailQuery.refetch(); + }; + + function renderInfoLayout() { + return ( + <> + {isOldModelJob ? ( + <LabelStrong isBlock={true}>训练配置</LabelStrong> + ) : ( + <Space> + <LabelStrong isBlock={true}>训练配置</LabelStrong> + <Tabs + className="custom-tabs" + type="text" + activeTab={selectParticipant} + onChange={setSelectParticipant} + > + <TabPane title={'本方'} key={myPureDomainName} /> + {participantList.map((participant) => { + return <TabPane title={participant.name} key={participant.pure_domain_name} />; + })} + </Tabs> + </Space> + )} + + <PropertyList properties={displayedProps} cols={4} /> + <ReportResult + onSwitch={refreshModelJobDetail} + metricIsPublic={metricIsPublic} + id={id} + title={'训练报告'} + algorithmType={modelJobDetail?.algorithm_type} + isNNAlgorithm={modelJobDetail ? isNNAlgorithm(modelJobDetail?.algorithm_type) : false} + hideConfusionMatrix={ + modelJobDetail && + isTreeAlgorithm(modelJobDetail.algorithm_type) && + configValueMap.loss_type === 'mse' + } + /> + <div className="left-container"> + <LabelStrong isBlock={true}>实例信息</LabelStrong> + <Popover + trigger="hover" + position="br" + content={ + <span> + <div className="pop-title">工作流</div> + <Link + className="styled-link" + to={`/workflow-center/workflows/${modelJobDetail?.workflow_id}`} + > + label_jump_to_workflow + </Link> + <div className="pop-title">工作流 ID</div> + <div className="pop-content">{modelJobDetail?.workflow_id}</div> + </span> + } + > + <Button className="right-button" size="mini" type="text"> + 更多信息 + </Button> + </Popover> + </div> + + {modelJobDetail?.job_id ? <InstanceInfo id={id} jobId={modelJobDetail?.job_id} /> : null} + </> + ); + } + function renderCodeEditorLayout() { + return ( + <> + {showCodeEditorBackBtn && ( + <div className="header"> + <BackButton onClick={onCancel}>返回</BackButton> + </div> + )} + <CodeEditor + language="json" + isReadOnly={true} + theme="grey" + height="calc(100vh - 119px)" // 55(drawer header height) + 16*2(content padding) + 32(header height) + value={formatJSONValue(podLog?.join('\n') ?? CONSTANTS.EMPTY_PLACEHOLDER)} + /> + </> + ); + } + + return ( + <Drawer + unmountOnExit={true} + placement="right" + title={ + <Space> + {modelJobDetail?.name} + <StateIndicator + tag={true} + tip={errorMessage} + position="bottom" + {...getModelJobStatus(modelJobDetail?.status as ModelJobStatus, { + isHideAllActionList: true, + })} + /> + {modelJobDetail?.auto_update && ( + <Tag color={datasetBatchType === 'hour' ? 'arcoblue' : 'purple'}> + {datasetBatchType === 'hour' ? '小时级' : '天级'} + </Tag> + )} + </Space> + } + closable={true} + width="1000px" + visible={visible} + onCancel={onCancel} + {...restProps} + > + <div className="drawer-content">{pod ? renderCodeEditorLayout() : renderInfoLayout()}</div> + </Drawer> + ); +} + +export function _Button({ text = '详情', btnDisabled, children, ...restProps }: ButtonProps) { + const [visible, setVisible] = useState(false); + return ( + <> + {children ? ( + <span onClick={() => setVisible(true)}>{children}</span> + ) : ( + <button + disabled={btnDisabled} + type="button" + className="custom-text-button" + onClick={() => setVisible(true)} + > + {text} + </button> + )} + <ModelJobDetailDrawer visible={visible} {...restProps} onCancel={() => setVisible(false)} /> + </> + ); +} + +ModelJobDetailDrawer.Button = _Button; + +export default ModelJobDetailDrawer; diff --git a/web_console_v2/client/src/views/ModelCenter/ModelTrain/Create/index.less b/web_console_v2/client/src/views/ModelCenter/ModelTrain/Create/index.less new file mode 100644 index 000000000..8af4e3945 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelTrain/Create/index.less @@ -0,0 +1,25 @@ +.steps-content{ + width: 288px; + margin: 0 auto 40px; +} + +.card{ + .arco-card-body{ + padding: 32px 40px; + } +} + +.form-content{ + max-width: 600px; + margin: 0 auto; + .form-section{ + margin-bottom: 20px; + overflow: hidden; + > h3{ + margin-bottom: 20px; + font-weight: 500; + font-size: 14px; + color: #1d252f; + } + } +} diff --git a/web_console_v2/client/src/views/ModelCenter/ModelTrain/Create/index.tsx b/web_console_v2/client/src/views/ModelCenter/ModelTrain/Create/index.tsx new file mode 100644 index 000000000..d71d75912 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelTrain/Create/index.tsx @@ -0,0 +1,1388 @@ +import React, { useEffect, useMemo, useRef, useState } from 'react'; +import { useHistory, useParams } from 'react-router'; +import { useQuery } from 'react-query'; + +import { MAX_COMMENT_LENGTH, validNamePattern } from 'shared/validator'; +import { + checkAlgorithmValueIsEmpty, + isTreeAlgorithm, + isNNAlgorithm, + isVerticalAlgorithm, + lossTypeOptions, +} from 'views/ModelCenter/shared'; +import { + useGetCurrentProjectId, + useGetCurrentProjectParticipantId, + useGetCurrentProjectParticipantName, + useGetCurrentParticipantPureDomainName, + useGetCurrentPureDomainName, + useIsFormValueChange, +} from 'hooks'; +import { to } from 'shared/helpers'; +import { + createModelJobGroup, + updateModelJobGroup, + updatePeerModelJobGroup, + fetchModelJobGroupDetail, + fetchPeerModelJobGroupDetail, + fetchModelJobDefinition, +} from 'services/modelCenter'; +import { + Avatar, + trainRoleTypeOptions, + algorithmTypeOptions, + treeBaseConfigList, + nnBaseConfigList, + getAdvanceConfigListByDefinition, + getTreeBaseConfigInitialValues, + getTreeAdvanceConfigInitialValues, + getConfigInitialValues, + getNNBaseConfigInitialValues, + getNNAdvanceConfigInitialValues, + getTreeBaseConfigInitialValuesByDefinition, + getNNBaseConfigInitialValuesByDefinition, + hydrateModalGlobalConfig, +} from '../../shared'; + +import { + Form, + Button, + Input, + Card, + Spin, + Select, + Message, + Space, + Steps, + Alert, + Switch, +} from '@arco-design/web-react'; +import { IconInfoCircle } from '@arco-design/web-react/icon'; +import BlockRadio from 'components/_base/BlockRadio'; +import SharedPageLayout from 'components/SharedPageLayout'; +import BackButton from 'components/BackButton'; +import TitleWithIcon from 'components/TitleWithIcon'; +import DatasesetSelect from 'components/NewDatasetSelect'; +import ButtonWithModalConfirm from 'components/ButtonWithModalConfirm'; +import ConfigForm from 'components/ConfigForm'; +import ResourceConfig, { + MixedAlgorithmType, + Value as ResourceConfigValue, +} from 'components/ResourceConfig'; +import { LabelStrong } from 'styles/elements'; + +import routes from '../../routes'; + +import { + FederalType, + LossType, + ModelJobRole, + ResourceTemplateType, + TrainRoleType, + ModelJobDefinitionResult, +} from 'typings/modelCenter'; +import { EnumAlgorithmProjectType } from 'typings/algorithm'; +import { AlgorithmType } from 'typings/modelCenter'; +import { Dataset, DatasetKindLabel, DataJobBackEndType } from 'typings/dataset'; +import { OptionInfo } from '@arco-design/web-react/es/Select/interface'; +import FormLabel from 'components/FormLabel'; +import ScheduleTaskSetter, { scheduleTaskValidator } from 'components/ScheduledTaskSetter'; +import { useToggle } from 'react-use'; +import { fetchDatasetDetail } from 'services/dataset'; +import AlgorithmSelect, { AlgorithmSelectValue } from 'components/AlgorithmSelect'; + +import './index.less'; + +const Step = Steps.Step; + +type TreeConfig = { + learning_rate: number | string; + max_iters: number | string; + max_depth: number | string; + l2_regularization: number | string; + max_bins: number | string; + num_parallel: number | string; + [key: string]: any; +}; +type NNConfig = { + epoch_num?: number | string; + verbosity?: number | string; + sparse_estimator?: string; + [key: string]: any; +}; + +export type BaseFormData = { + name?: string; + comment?: string; + federal_type?: FederalType; + algorithm_type?: AlgorithmType; + type?: EnumAlgorithmProjectType; + algorithm?: AlgorithmSelectValue; + role?: TrainRoleType | null; + loss_type?: LossType; + tree_config?: TreeConfig; + nn_config?: NNConfig; + dataset_id?: ID; + resource_config?: ResourceConfigValue; + cron_config?: string; + data_source_manual?: boolean; + custom_data_source?: string; +}; +export type FormData = { + [ModelJobRole.COORDINATOR]: BaseFormData; + [ModelJobRole.PARTICIPANT]: BaseFormData; +}; + +const baseInitialFormValues = { + role: null, + federal_type: FederalType.VERTICAL, + algorithm_type: AlgorithmType.TREE, + type: EnumAlgorithmProjectType.TREE_VERTICAL, + algorithm: { + algorithmId: undefined, + algorithmProjectId: undefined, + algorithmUuid: undefined, + config: [], + path: '', + }, + loss_type: LossType.LOGISTIC, + tree_config: { + learning_rate: 0.3, + max_iters: 10, + max_depth: 5, + l2_regularization: 1, + max_bins: 33, + num_parallel: 5, + }, + nn_config: { + epoch_num: undefined, + verbosity: undefined, + sparse_estimator: undefined, + }, + cron_config: '', + data_source_manual: false, + custom_data_source: '', +}; +const initialFormValues: FormData = { + [ModelJobRole.COORDINATOR]: baseInitialFormValues, + [ModelJobRole.PARTICIPANT]: baseInitialFormValues, +}; + +function getFieldKey(field: string, isParticipant = false) { + return `${isParticipant ? ModelJobRole.PARTICIPANT : ModelJobRole.COORDINATOR}.${field}`; +} + +function calcMixedAlgorithmType(federalType: FederalType, algorithmType: AlgorithmType) { + if (federalType === FederalType.HORIZONTAL) { + return algorithmType === AlgorithmType.TREE + ? EnumAlgorithmProjectType.TREE_HORIZONTAL + : EnumAlgorithmProjectType.NN_HORIZONTAL; + } else { + return algorithmType === AlgorithmType.TREE + ? EnumAlgorithmProjectType.TREE_VERTICAL + : EnumAlgorithmProjectType.NN_VERTICAL; + } +} + +const Create: React.FC = () => { + const history = useHistory(); + const { id, action, role } = useParams<{ + role: 'sender' | 'receiver'; + action: 'create' | 'edit'; + id?: string; + }>(); + const [formInstance] = Form.useForm<FormData>(); + + const [datasetId, setDatasetId] = useState<ID>(''); + const [currentStep, setCurrentStep] = useState(1); + const [dataSourceManual, toggleDataSourceManual] = useToggle(false); + const [selectedAlgorithmType, setSelectedAlgorithmType] = useState<AlgorithmType>( + initialFormValues[ModelJobRole.COORDINATOR].algorithm_type || AlgorithmType.TREE, + ); + const [selectedFederalType, setSelectedFederalType] = useState<FederalType>( + initialFormValues[ModelJobRole.COORDINATOR].federal_type || FederalType.VERTICAL, + ); + const [algorithmOwner, setAlgorithmOwner] = useState<string>(''); + + const selectedDatasetRef = useRef<Dataset>(); + + const { isFormValueChanged, onFormValueChange } = useIsFormValueChange(); + const projectId = useGetCurrentProjectId(); + const participantId = useGetCurrentProjectParticipantId(); + const participantName = useGetCurrentProjectParticipantName(); + const myPureDomainName = useGetCurrentPureDomainName(); + const participantPureDomainName = useGetCurrentParticipantPureDomainName(); + + const isReceiver = role === 'receiver'; + const isEdit = action === 'edit'; + + const currentModelJobGroupQuery = useQuery( + ['fetchModelJobGroupDetail', projectId, id], + () => fetchModelJobGroupDetail(projectId!, id!), + { + enabled: (isEdit || (isReceiver && !isEdit)) && Boolean(projectId && id), + retry: 2, + refetchOnWindowFocus: false, + onSuccess(res) { + if (isReceiver && !isEdit) { + // get default dataset for coordinator + setDatasetId(res.data.dataset_id || res.data.intersection_dataset_id); + } + + if (!isEdit) return; + + const currentModelJobGroupData = res.data; + + let algorithmType = AlgorithmType.TREE; + const federalType = isVerticalAlgorithm( + currentModelJobGroupData.algorithm_type as EnumAlgorithmProjectType, + ) + ? FederalType.VERTICAL + : FederalType.HORIZONTAL; + let treeConfig = {} as any; + let nnConfig = {}; + let topLevelConfig: Record<any, any> = {}; + + if (isTreeAlgorithm(currentModelJobGroupData.algorithm_type as EnumAlgorithmProjectType)) { + algorithmType = AlgorithmType.TREE; + + const baseConfigInitialValues = getTreeBaseConfigInitialValues( + currentModelJobGroupData.config!, + ); + const advanceConfigInitialValues = getTreeAdvanceConfigInitialValues( + currentModelJobGroupData.config!, + ); + + treeConfig = { + ...baseConfigInitialValues, + ...advanceConfigInitialValues, + }; + + const topLevelInitialValues = getConfigInitialValues(currentModelJobGroupData.config!, [ + 'role', + 'loss_type', + ]); + + topLevelConfig = topLevelInitialValues; + } else { + algorithmType = AlgorithmType.NN; + const baseConfigInitialValues = getNNBaseConfigInitialValues( + currentModelJobGroupData.config!, + ); + const advanceConfigInitialValues = getNNAdvanceConfigInitialValues( + currentModelJobGroupData.config!, + ); + + nnConfig = { + ...baseConfigInitialValues, + ...advanceConfigInitialValues, + }; + + const topLevelInitialValues = getConfigInitialValues(currentModelJobGroupData.config!, [ + 'role', + 'algorithm', + ]); + + topLevelConfig = { + ...topLevelInitialValues, + algorithm: + isEdit && topLevelInitialValues.algorithm + ? JSON.parse(topLevelInitialValues.algorithm) + : {}, + }; + setAlgorithmOwner( + topLevelConfig?.algorithm?.algorithmId === null || + topLevelConfig?.algorithm?.algorithmId === 0 + ? 'peer' + : 'self', + ); + } + setSelectedAlgorithmType(algorithmType); + setSelectedFederalType(federalType); + toggleDataSourceManual( + !currentModelJobGroupData.dataset_id && currentModelJobGroupData.dataset_id !== 0, + ); + formInstance.setFieldsValue({ + [ModelJobRole.COORDINATOR]: { + name: currentModelJobGroupData.name, + comment: currentModelJobGroupData.comment ?? '', + dataset_id: + currentModelJobGroupData.dataset_id || + currentModelJobGroupData.intersection_dataset_id, + federal_type: federalType, + algorithm_type: algorithmType, + type: calcMixedAlgorithmType(federalType, algorithmType), + tree_config: treeConfig, + nn_config: nnConfig, + ...topLevelConfig, + resource_config: getConfigInitialValues(currentModelJobGroupData.config!, [ + 'master_replicas', + 'master_cpu', + 'master_mem', + 'ps_replicas', + 'ps_cpu', + 'ps_mem', + 'worker_replicas', + 'worker_cpu', + 'worker_mem', + ]), + cron_config: currentModelJobGroupData.cron_config, + data_source_manual: !currentModelJobGroupData.dataset_id, + custom_data_source: treeConfig.data_source ?? '', + }, + }); + }, + }, + ); + const peerModelJobGroupQuery = useQuery( + ['fetchPeerModelJobGroupDetail', projectId, id, participantId], + () => fetchPeerModelJobGroupDetail(projectId!, id!, participantId!), + { + enabled: + currentModelJobGroupQuery.isSuccess && + Boolean(projectId && id && participantId) && + ((isReceiver && !isEdit) || (!isReceiver && isEdit)), + retry: 2, + refetchOnWindowFocus: false, + onSuccess: async (res) => { + const peerModelJobGroupData = res.data; + + const isPeerAuthorized = peerModelJobGroupData?.authorized ?? false; + + // If peer is not authorized, then we should not set peer form value. + if (!isReceiver && isEdit && !isPeerAuthorized) { + return; + } + + let algorithmType = AlgorithmType.TREE; + const federalType = isVerticalAlgorithm( + peerModelJobGroupData.algorithm_type as EnumAlgorithmProjectType, + ) + ? FederalType.VERTICAL + : FederalType.HORIZONTAL; + let treeConfig = {}; + let nnConfig = {}; + let topLevelConfig = {}; + + if (isTreeAlgorithm(peerModelJobGroupData.algorithm_type as EnumAlgorithmProjectType)) { + algorithmType = AlgorithmType.TREE; + + const baseConfigInitialValues = getTreeBaseConfigInitialValues( + peerModelJobGroupData.config!, + ); + const advanceConfigInitialValues = getTreeAdvanceConfigInitialValues( + peerModelJobGroupData.config!, + ); + + treeConfig = { + ...baseConfigInitialValues, + ...advanceConfigInitialValues, + }; + + const topLevelInitialValues = getConfigInitialValues(peerModelJobGroupData.config!, [ + 'role', + 'loss_type', + ]); + + topLevelConfig = { + ...topLevelInitialValues, + role: isReceiver + ? topLevelInitialValues.role === TrainRoleType.FEATURE + ? TrainRoleType.LABEL + : TrainRoleType.FEATURE + : topLevelInitialValues.role, + }; + } else { + algorithmType = AlgorithmType.NN; + + const baseConfigInitialValues = getNNBaseConfigInitialValues( + peerModelJobGroupData.config!, + ); + const advanceConfigInitialValues = getNNAdvanceConfigInitialValues( + peerModelJobGroupData.config!, + ); + + nnConfig = { + ...baseConfigInitialValues, + ...advanceConfigInitialValues, + }; + + // when role = receiver and action = create: replace data_path and data_source + if (isReceiver && !isEdit && datasetId) { + const [datasetDetail, error] = await to(fetchDatasetDetail(datasetId)); + if (error) { + Message.error(error.message); + } else { + const dataPath = datasetDetail?.data?.path ?? ''; + const dataSource = datasetDetail?.data?.data_source ?? ''; + treeConfig = { + ...treeConfig, + data_path: dataPath, + data_source: dataSource, + }; + nnConfig = { + ...nnConfig, + data_path: dataPath, + data_source: dataSource, + }; + } + } + + const topLevelInitialValues = getConfigInitialValues(peerModelJobGroupData.config!, [ + 'role', + 'algorithm', + ]); + + topLevelConfig = { + ...topLevelInitialValues, + role: isReceiver + ? topLevelInitialValues.role === TrainRoleType.FEATURE + ? TrainRoleType.LABEL + : TrainRoleType.FEATURE + : topLevelInitialValues.role, + algorithm: + isEdit && topLevelInitialValues.algorithm + ? JSON.parse(topLevelInitialValues.algorithm) + : {}, + }; + } + + setSelectedAlgorithmType(algorithmType); + setSelectedFederalType(federalType); + + formInstance.setFieldsValue({ + [isReceiver ? ModelJobRole.COORDINATOR : ModelJobRole.PARTICIPANT]: { + name: peerModelJobGroupData.name, + federal_type: federalType, + algorithm_type: algorithmType, + type: calcMixedAlgorithmType(federalType, algorithmType), + tree_config: treeConfig, + nn_config: nnConfig, + dataset_id: datasetId, + ...topLevelConfig, + resource_config: getConfigInitialValues(peerModelJobGroupData.config!, [ + 'master_replicas', + 'master_cpu', + 'master_mem', + 'ps_replicas', + 'ps_cpu', + 'ps_mem', + 'worker_replicas', + 'worker_cpu', + 'worker_mem', + ]), + }, + }); + }, + }, + ); + + const algorithmProjectType = useMemo<EnumAlgorithmProjectType>(() => { + return calcMixedAlgorithmType(selectedFederalType, selectedAlgorithmType); + }, [selectedAlgorithmType, selectedFederalType]); + + const modelJobDefinitionQuery = useQuery(['fetchModelJobDefinition', algorithmProjectType], () => + fetchModelJobDefinition({ + model_job_type: 'TRAINING', + algorithm_type: algorithmProjectType || EnumAlgorithmProjectType.TREE_VERTICAL, + }), + ); + const modelJobDefinition = useMemo(() => { + return modelJobDefinitionQuery?.data?.data; + }, [modelJobDefinitionQuery]); + + const treeAdvancedFormItemList = useMemo(() => { + if (isNNAlgorithm(algorithmProjectType)) { + return []; + } else return getAdvanceConfigListByDefinition(modelJobDefinition?.variables!); + }, [algorithmProjectType, modelJobDefinition]); + + const nnAdvancedFormItemList = useMemo(() => { + if (isTreeAlgorithm(algorithmProjectType)) { + return []; + } else return getAdvanceConfigListByDefinition(modelJobDefinition?.variables!, true); + }, [algorithmProjectType, modelJobDefinition]); + + // Set tree_config/nn_config initialValues when create model job group + useEffect(() => { + if (isReceiver || isEdit) { + return; + } + + let datasetConfigValues: { + data_path?: string; + data_source?: string; + } = {}; + + if (selectedDatasetRef.current) { + datasetConfigValues = { + data_path: selectedDatasetRef.current?.path ?? '', + data_source: selectedDatasetRef.current.data_source ?? '', + }; + } + + if (isTreeAlgorithm(algorithmProjectType)) { + if (!treeAdvancedFormItemList) { + return; + } + + const baseConfigInitialValues = getTreeBaseConfigInitialValuesByDefinition( + modelJobDefinition?.variables!, + ); + const advanceConfigConfigInitialValues = treeAdvancedFormItemList.reduce((acc, cur) => { + acc[cur.field!] = cur.initialValue; + return acc; + }, {} as any); + + formInstance.setFieldsValue({ + [ModelJobRole.COORDINATOR]: { + ...formInstance.getFieldValue(ModelJobRole.COORDINATOR), + tree_config: { + ...baseConfigInitialValues, + ...advanceConfigConfigInitialValues, + ...datasetConfigValues, + }, + }, + }); + } else { + if (!nnAdvancedFormItemList) { + return; + } + + const baseConfigInitialValues = getNNBaseConfigInitialValuesByDefinition( + modelJobDefinition?.variables!, + ); + const advanceConfigConfigInitialValues = nnAdvancedFormItemList.reduce((acc, cur) => { + acc[cur.field!] = cur.initialValue; + return acc; + }, {} as any); + + formInstance.setFieldsValue({ + [ModelJobRole.COORDINATOR]: { + ...formInstance.getFieldValue(ModelJobRole.COORDINATOR), + nn_config: { + epoch_num: 1, + verbosity: 1, + ...baseConfigInitialValues, + ...advanceConfigConfigInitialValues, + ...datasetConfigValues, + }, + }, + }); + } + }, [ + isReceiver, + isEdit, + formInstance, + algorithmProjectType, + treeAdvancedFormItemList, + modelJobDefinition, + nnAdvancedFormItemList, + ]); + + const isLoading = + modelJobDefinitionQuery.isFetching || + peerModelJobGroupQuery.isFetching || + currentModelJobGroupQuery.isFetching; + + const isPeerAuthorized = peerModelJobGroupQuery.data?.data?.authorized ?? false; + const isDisabled = isReceiver || (!isReceiver && isEdit); + const isShowStep = !isReceiver && isEdit && isPeerAuthorized; + const isShowCanNotEditPeerConfigAlert = !isReceiver && isEdit && !isPeerAuthorized; + + return ( + <SharedPageLayout + title={ + <BackButton isShowConfirmModal={isFormValueChanged} onClick={goBackToListPage}> + 模型训练 + </BackButton> + } + contentWrapByCard={false} + centerTitle={isEdit ? '编辑训练' : isReceiver ? '授权模型训练' : '创建训练'} + > + <Spin loading={isLoading}> + <div>{isReceiver ? renderReceiverLayout() : renderSenderLayout()}</div> + </Spin> + </SharedPageLayout> + ); + + function renderReceiverLayout() { + return ( + <> + {!isEdit && renderBannerCard()} + {renderContentCard()} + </> + ); + } + function renderSenderLayout() { + return <>{renderContentCard()}</>; + } + function renderBannerCard() { + const title = `${participantName}向您发起「${ + peerModelJobGroupQuery.data?.data?.name ?? '' + }」训练授权申请`; + return ( + <Card className="card" bordered={false} style={{ marginBottom: 20 }}> + <Space size="medium"> + <Avatar /> + <> + <LabelStrong fontSize={16}>{title ?? '....'}</LabelStrong> + <TitleWithIcon + title={ + '授权后,发起方可以运行模型训练并修改参与方的训练参数,训练指标将对所有参与方可见' + } + isLeftIcon={true} + isShowIcon={true} + icon={IconInfoCircle} + /> + </> + </Space> + </Card> + ); + } + function renderContentCard() { + return ( + <Card className="card" bordered={false}> + {isShowStep && ( + <Steps className="steps-content" current={currentStep} size="small"> + <Step title={'本侧配置'} /> + <Step title={'合作伙伴配置'} /> + </Steps> + )} + <Form<FormData> + className="form-content" + form={formInstance} + initialValues={initialFormValues} + onSubmit={onSubmit} + scrollToFirstError={true} + onValuesChange={onFormValueChange} + > + {isShowCanNotEditPeerConfigAlert && ( + <Alert content={'合作伙伴未授权,不能编辑合作伙伴配置'} style={{ marginBottom: 20 }} /> + )} + <div style={{ display: isShowStep && currentStep === 2 ? 'none' : 'initial' }}> + {renderBaseInfoConfig()} + {renderTrainConfig()} + {renderResourceConfig()} + </div> + <div style={{ display: isShowStep && currentStep === 2 ? 'initial' : 'none' }}> + {renderTrainConfig(true)} + {renderResourceConfig(true)} + </div> + {renderFooterButton()} + </Form> + </Card> + ); + } + + function renderBaseInfoConfig(isParticipant = false) { + const isHideRule = + (isParticipant && currentStep === 1) || (!isParticipant && currentStep === 2); + return ( + <section className="form-section"> + <h3>基本信息</h3> + <Form.Item + field={getFieldKey('name', isParticipant)} + label={'训练名称'} + rules={ + isHideRule + ? [] + : [ + { required: true, message: '必填项' }, + { + match: validNamePattern, + message: + '只支持大小写字母,数字,中文开头或结尾,可包含“_”和“-”,不超过 63 个字符', + }, + ] + } + disabled={isDisabled} + > + <Input placeholder={'请填写'} /> + </Form.Item> + <Form.Item + field={getFieldKey('comment', isParticipant)} + label={'描述'} + rules={ + isHideRule + ? [] + : [ + { + maxLength: MAX_COMMENT_LENGTH, + message: '最多为 200 个字符', + }, + ] + } + > + <Input.TextArea placeholder={'最多为 200 个字符'} /> + </Form.Item> + <Form.Item + field={getFieldKey('federal_type', isParticipant)} + label={'联邦类型'} + hidden={true} + > + <Input /> + </Form.Item> + </section> + ); + } + function renderTrainConfig(isParticipant = false) { + const isHideRule = + (isParticipant && currentStep === 1) || (!isParticipant && currentStep === 2); + + return ( + <section className="form-section"> + <h3>训练配置</h3> + {!isParticipant && ( + <> + <Form.Item + field={getFieldKey('type', isParticipant)} + label={'类型'} + rules={isHideRule ? [] : [{ required: true, message: '必填项' }]} + disabled={isDisabled} + > + <Select + placeholder={'请选择'} + options={algorithmTypeOptions} + onChange={(value) => { + const [algorithmType, federalType] = value.split('_'); + setSelectedAlgorithmType(algorithmType.toLowerCase()); + setSelectedFederalType(federalType.toLowerCase()); + resetAlgorithmFormValue(isParticipant); + if (federalType.toLowerCase() === FederalType.HORIZONTAL) { + resetRoleFormValue(isParticipant); + } + formInstance.setFieldsValue({ + [ModelJobRole.COORDINATOR]: { + ...formInstance.getFieldValue(ModelJobRole.COORDINATOR), + dataset_id: undefined, + }, + }); + }} + /> + </Form.Item> + <Form.Item + field={getFieldKey('algorithm_type', isParticipant)} + label={'算法类型'} + hidden={true} + > + <Input /> + </Form.Item> + </> + )} + + {isTreeAlgorithm(algorithmProjectType) && renderTreeParams(isParticipant)} + {isNNAlgorithm(algorithmProjectType) && renderNNParams(isParticipant)} + <Form.Item + field={getFieldKey('role', isParticipant)} + label={'训练角色'} + rules={isHideRule ? [] : [{ required: true, message: '必填项' }]} + disabled={isDisabled} + hidden={selectedFederalType === FederalType.HORIZONTAL} + > + <BlockRadio isCenter={true} options={trainRoleTypeOptions} /> + </Form.Item> + {!isParticipant && ( + <Form.Item + field={getFieldKey('data_source_manual', isParticipant)} + label={'手动输入数据源'} + disabled={isEdit} + triggerPropName="checked" + > + <Switch onChange={onDataSourceManual} /> + </Form.Item> + )} + {!isParticipant && renderDatasetSelectConfig(dataSourceManual, isParticipant, isHideRule)} + {!isParticipant && !isReceiver && ( + <Form.Item + field={getFieldKey('cron_config', isParticipant)} + label={ + <FormLabel + label={'启用定时重训'} + tooltip={'启用该功能将间隔性地重跑训练任务,且每次训练都将从最新的可用版本开始'} + /> + } + rules={[ + { + validator: scheduleTaskValidator, + message: '请选择时间', + validateTrigger: 'onSubmit', + }, + ]} + > + <ScheduleTaskSetter /> + </Form.Item> + )} + </section> + ); + } + + function renderDatasetSelectConfig( + dataSourceManual: boolean, + isParticipant: boolean, + isHideRule: boolean, + ) { + return dataSourceManual ? ( + <Form.Item + field={getFieldKey('custom_data_source', isParticipant)} + label={'数据源'} + rules={isHideRule ? [] : [{ required: true, message: '必填项' }]} + disabled={isEdit} + > + <Input + placeholder={'请输入数据源'} + onChange={async (val) => { + const configField = + selectedAlgorithmType === AlgorithmType.TREE + ? getFieldKey('tree_config', isParticipant) + : getFieldKey('nn_config', isParticipant); + + const prevConfig = (formInstance.getFieldValue(configField as any) || {}) as + | TreeConfig + | NNConfig; + formInstance.setFieldsValue({ + [configField]: { + ...prevConfig, + data_source: val, + }, + }); + }} + /> + </Form.Item> + ) : ( + <Form.Item + field={getFieldKey('dataset_id', isParticipant)} + label={'数据集'} + rules={isHideRule ? [] : [{ required: true, message: '必填项' }]} + disabled={isEdit} + > + <DatasesetSelect + lazyLoad={{ + page_size: 10, + enable: true, + }} + kind={DatasetKindLabel.PROCESSED} + //TODO:support filter for vertical + datasetJobKind={ + selectedFederalType === FederalType.HORIZONTAL + ? DataJobBackEndType.DATA_ALIGNMENT + : undefined + } + onChange={async (_, option) => { + const dataset = (option as OptionInfo)?.extra as Dataset; + const dataPath = dataset?.path ?? ''; + const dataSource = dataset?.data_source ?? ''; + + selectedDatasetRef.current = dataset; + + const configField = + selectedAlgorithmType === AlgorithmType.TREE + ? getFieldKey('tree_config', isParticipant) + : getFieldKey('nn_config', isParticipant); + + const prevConfig = (formInstance.getFieldValue(configField as any) || {}) as + | TreeConfig + | NNConfig; + formInstance.setFieldsValue({ + [configField]: { + ...prevConfig, + data_path: dataPath, + data_source: dataSource, + }, + }); + }} + /> + </Form.Item> + ); + } + + function renderResourceConfig(isParticipant = false) { + const isHideRule = + (isParticipant && currentStep === 1) || (!isParticipant && currentStep === 2); + return ( + <section className="form-section"> + <h3>资源配置</h3> + <Form.Item + field={getFieldKey('resource_config', isParticipant)} + label={'资源模板'} + rules={isHideRule ? [] : [{ required: true, message: '必填项' }]} + > + <ResourceConfig + algorithmType={algorithmProjectType as MixedAlgorithmType} + defaultResourceType={ResourceTemplateType.CUSTOM} + isIgnoreFirstRender={isReceiver} + localDisabledList={['master.replicas']} + /> + </Form.Item> + </section> + ); + } + function renderFooterButton() { + let submitText = '提交并发送'; + if (isReceiver) { + if (isEdit) { + submitText = '保存编辑'; + } else { + submitText = '确认授权'; + } + } else { + if (isEdit) { + submitText = '保存编辑'; + } else { + submitText = '提交并发送'; + } + } + return ( + <Space> + {isShowStep && currentStep === 1 && ( + <Button type="primary" onClick={onNextStepClick}> + 下一步 + </Button> + )} + + {(!isShowStep || (isShowStep && currentStep === 2)) && ( + <Button type="primary" htmlType="submit"> + {submitText} + </Button> + )} + + {isShowStep && currentStep === 2 && <Button onClick={onPrevStepClick}>上一步</Button>} + + {(!isShowStep || (isShowStep && currentStep === 1)) && ( + <ButtonWithModalConfirm + isShowConfirmModal={isFormValueChanged} + onClick={goBackToListPage} + > + 取消 + </ButtonWithModalConfirm> + )} + + {!isReceiver && !isEdit && ( + <TitleWithIcon + title={'训练报告仅自己可见,如需共享报告,请前往训练详情页开启'} + isLeftIcon={true} + isShowIcon={true} + icon={IconInfoCircle} + /> + )} + </Space> + ); + } + + function renderTreeParams(isParticipant = false) { + const isHideRule = + (isParticipant && currentStep === 1) || (!isParticipant && currentStep === 2); + return ( + <> + {!isParticipant && ( + <Form.Item + field={getFieldKey('loss_type', isParticipant)} + label={'损失函数类型'} + rules={isHideRule ? [] : [{ required: true, message: '必填项' }]} + disabled={isDisabled} + > + <BlockRadio.WithTip options={lossTypeOptions} isOneHalfMode={true} /> + </Form.Item> + )} + <Form.Item + field={getFieldKey('tree_config', isParticipant)} + label={'参数配置'} + rules={isHideRule ? [] : [{ required: true, message: '必填项' }]} + > + <ConfigForm + cols={2} + formItemList={treeBaseConfigList} + collapseFormItemList={treeAdvancedFormItemList} + formProps={{ + style: { + marginTop: 7, + }, + }} + /> + </Form.Item> + </> + ); + } + function renderNNParams(isParticipant = false) { + const isHideRule = + (isParticipant && currentStep === 1) || (!isParticipant && currentStep === 2); + return ( + <> + <Form.Item + field={getFieldKey('algorithm', isParticipant)} + label={isParticipant ? '算法超参数' : '算法'} + rules={ + isHideRule || isParticipant + ? [] + : [ + { required: true, message: '必填项' }, + { + validator: checkAlgorithmValueIsEmpty, + }, + ] + } + > + <AlgorithmSelect + leftDisabled={isEdit} + algorithmType={[algorithmProjectType]} + onAlgorithmOwnerChange={(value: any) => setAlgorithmOwner(value)} + algorithmOwnerType={algorithmOwner} + isParticipant={isParticipant} + /> + </Form.Item> + + <Form.Item + field={getFieldKey('nn_config', isParticipant)} + label={'参数配置'} + rules={isHideRule ? [] : [{ required: true, message: '必填项' }]} + > + <ConfigForm + cols={2} + formItemList={nnBaseConfigList} + collapseFormItemList={nnAdvancedFormItemList} + /> + </Form.Item> + </> + ); + } + + function resetAlgorithmFormValue(isParticipant = false) { + const defaultAlgorithmValue = { + algorithmId: undefined, + algorithmProjectId: undefined, + algorithmUuid: undefined, + config: [], + path: '', + }; + + const roleField = isParticipant ? ModelJobRole.PARTICIPANT : ModelJobRole.COORDINATOR; + + formInstance.setFieldsValue({ + [roleField]: { + ...formInstance.getFieldValue(roleField), + algorithm: defaultAlgorithmValue, + }, + }); + } + function resetRoleFormValue(isParticipant = false) { + const roleField = isParticipant ? ModelJobRole.PARTICIPANT : ModelJobRole.COORDINATOR; + formInstance.setFieldsValue({ + [roleField]: { + ...formInstance.getFieldValue(roleField), + role: TrainRoleType.LABEL, + }, + }); + } + function goBackToListPage() { + history.push(routes.ModelTrainList); + } + function onPrevStepClick() { + setCurrentStep((currentStep) => currentStep - 1); + } + function onDataSourceManual(checked: boolean) { + toggleDataSourceManual(checked); + } + async function onNextStepClick() { + await formInstance.validate(); + setCurrentStep((currentStep) => currentStep + 1); + } + + function getTemplateDetail() { + const isTree = selectedAlgorithmType === AlgorithmType.TREE; + + if (!modelJobDefinition) { + if (isTree) { + Message.error('找不到训练模型模板(树算法)'); + return; + } else if (selectedFederalType === FederalType.HORIZONTAL) { + Message.error('找不到训练模型模板(nn算法)'); + return; + } else { + Message.error('找不到训练模型模板(横向nn算法)'); + return; + } + } + return modelJobDefinition; + } + + function onSubmit(formValues: FormData) { + if (!projectId) { + Message.info('请选择工作区'); + return; + } + + const templateDetail: ModelJobDefinitionResult | undefined = getTemplateDetail(); + + if (!templateDetail) { + return; + } + + if (isReceiver) { + if (isEdit) { + onReceiverEditSubmit(formValues, templateDetail); + } else { + onReceiverCreateSubmit(formValues, templateDetail); + } + } else { + if (isEdit) { + onSenderEditSubmit(formValues, templateDetail); + } else { + onSenderCreateSubmit(formValues, templateDetail); + } + } + } + async function onSenderCreateSubmit( + formValues: FormData, + templateDetail: ModelJobDefinitionResult, + ) { + const coordinatorFormValues = formValues[ModelJobRole.COORDINATOR]; + const isTree = isTreeAlgorithm(coordinatorFormValues.type!); + + const [res, error] = await to( + createModelJobGroup(projectId!, { + name: coordinatorFormValues.name!, + algorithm_type: algorithmProjectType, + dataset_id: coordinatorFormValues.dataset_id!, + }), + ); + + if (error) { + Message.error(error.message); + return; + } + + const { id: modelJobGroupId } = res.data; + + const [, updateError] = await to( + updateModelJobGroup(projectId!, modelJobGroupId, { + comment: coordinatorFormValues.comment, + dataset_id: coordinatorFormValues.dataset_id!, + cron_config: coordinatorFormValues.cron_config, + global_config: { + global_config: { + [myPureDomainName]: isTree + ? { + variables: hydrateModalGlobalConfig(templateDetail?.variables!, { + role: coordinatorFormValues.role, + loss_type: coordinatorFormValues.loss_type, + ...coordinatorFormValues.tree_config, + ...coordinatorFormValues.resource_config, + }), + } + : { + algorithm_uuid: coordinatorFormValues?.algorithm?.algorithmUuid, + //TODO:support param algorithm_project_uuid + algorithm_parameter: { variables: coordinatorFormValues?.algorithm?.config }, + variables: hydrateModalGlobalConfig(templateDetail?.variables!, { + role: coordinatorFormValues.role, + ...coordinatorFormValues.nn_config, + ...coordinatorFormValues.resource_config, + }), + }, + }, + }, + }), + ); + + if (updateError) { + Message.error(updateError.message); + return; + } + + Message.success('创建成功,等待合作伙伴授权'); + goBackToListPage(); + } + + async function onSenderEditSubmit( + formValues: FormData, + templateDetail: ModelJobDefinitionResult, + ) { + const coordinatorFormValues = formValues[ModelJobRole.COORDINATOR]; + const participantFormValues = { + ...formValues[ModelJobRole.PARTICIPANT], + loss_type: coordinatorFormValues.loss_type, + }; + const isTree = isTreeAlgorithm(coordinatorFormValues.type!); + + const [, updateError] = await to( + updateModelJobGroup(projectId!, id!, { + comment: coordinatorFormValues.comment, + dataset_id: coordinatorFormValues.dataset_id!, + cron_config: coordinatorFormValues.cron_config, + global_config: { + global_config: { + [myPureDomainName]: isTree + ? { + variables: hydrateModalGlobalConfig(templateDetail?.variables!, { + role: coordinatorFormValues.role, + loss_type: coordinatorFormValues.loss_type, + ...coordinatorFormValues.tree_config, + ...coordinatorFormValues.resource_config, + }), + } + : { + algorithm_uuid: coordinatorFormValues?.algorithm?.algorithmUuid, + //TODO:support param algorithm_project_uuid + algorithm_parameter: { variables: coordinatorFormValues?.algorithm?.config }, + variables: hydrateModalGlobalConfig(templateDetail?.variables!, { + role: coordinatorFormValues.role, + ...coordinatorFormValues.nn_config, + ...coordinatorFormValues.resource_config, + }), + }, + }, + }, + }), + ); + + if (updateError) { + Message.error(updateError.message); + return; + } + + if (isShowCanNotEditPeerConfigAlert) { + Message.success('保存成功'); + goBackToListPage(); + return; + } + + if (!peerModelJobGroupQuery.data?.data?.config) { + Message.error('找不到对侧训练模型模板'); + return; + } + + const [, updatePeerError] = await to( + updatePeerModelJobGroup(projectId!, id!, participantId!, { + global_config: { + global_config: { + [participantPureDomainName]: isTree + ? { + variables: hydrateModalGlobalConfig(templateDetail?.variables!, { + role: participantFormValues.role, + loss_type: participantFormValues.loss_type, + ...participantFormValues.tree_config, + ...participantFormValues.resource_config, + }), + } + : { + //TODO:support algorithm_uuid + algorithm_parameter: { variables: participantFormValues?.algorithm?.config }, + // algorithm_uuid: participantFormValues?.algorithm?.algorithmUuid, + variables: hydrateModalGlobalConfig( + templateDetail?.variables!, + { + role: participantFormValues.role, + algorithm: participantFormValues.algorithm, + ...participantFormValues.nn_config, + ...participantFormValues.resource_config, + }, + false, + ), + }, + }, + }, + }), + ); + + if (updatePeerError) { + Message.error(updatePeerError.message); + return; + } + + Message.success('保存成功'); + goBackToListPage(); + } + async function onReceiverCreateSubmit( + formValues: FormData, + templateDetail: ModelJobDefinitionResult, + ) { + const coordinatorFormValues = formValues[ModelJobRole.COORDINATOR]; + + const isTree = isTreeAlgorithm(coordinatorFormValues.type!); + + const [, updateError] = await to( + updateModelJobGroup(projectId!, id!, { + comment: coordinatorFormValues.comment, + dataset_id: coordinatorFormValues.dataset_id!, + authorized: true, + global_config: { + global_config: { + [myPureDomainName]: isTree + ? { + variables: hydrateModalGlobalConfig(templateDetail?.variables!, { + role: coordinatorFormValues.role, + loss_type: coordinatorFormValues.loss_type, + ...coordinatorFormValues.tree_config, + ...coordinatorFormValues.resource_config, + }), + } + : { + algorithm_uuid: coordinatorFormValues?.algorithm?.algorithmUuid, + algorithm_parameter: { variables: coordinatorFormValues?.algorithm?.config }, + variables: hydrateModalGlobalConfig(templateDetail?.variables!, { + role: coordinatorFormValues.role, + ...coordinatorFormValues.nn_config, + ...coordinatorFormValues.resource_config, + }), + }, + }, + }, + }), + ); + + if (updateError) { + Message.error(updateError.message); + return; + } + + Message.success('授权完成,等待合作伙伴运行'); + goBackToListPage(); + } + async function onReceiverEditSubmit( + formValues: FormData, + templateDetail: ModelJobDefinitionResult, + ) { + const coordinatorFormValues = formValues[ModelJobRole.COORDINATOR]; + + const isTree = isTreeAlgorithm(coordinatorFormValues.type!); + + const [, updateError] = await to( + updateModelJobGroup(projectId!, id!, { + comment: coordinatorFormValues.comment, + dataset_id: coordinatorFormValues.dataset_id!, + global_config: { + global_config: { + [myPureDomainName]: isTree + ? { + variables: hydrateModalGlobalConfig(templateDetail?.variables!, { + role: coordinatorFormValues.role, + loss_type: coordinatorFormValues.loss_type, + ...coordinatorFormValues.tree_config, + ...coordinatorFormValues.resource_config, + }), + } + : { + algorithm_uuid: coordinatorFormValues?.algorithm?.algorithmUuid, + algorithm_parameter: { variables: coordinatorFormValues?.algorithm?.config }, + variables: hydrateModalGlobalConfig(templateDetail?.variables!, { + role: coordinatorFormValues.role, + ...coordinatorFormValues.nn_config, + ...coordinatorFormValues.resource_config, + }), + }, + }, + }, + }), + ); + + if (updateError) { + Message.error(updateError.message); + return; + } + + Message.success('保存成功'); + goBackToListPage(); + } +}; + +export default Create; diff --git a/web_console_v2/client/src/views/ModelCenter/ModelTrain/CreateCentralization/AlgorithmProjectSelect/index.module.less b/web_console_v2/client/src/views/ModelCenter/ModelTrain/CreateCentralization/AlgorithmProjectSelect/index.module.less new file mode 100644 index 000000000..9c7f10bd5 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelTrain/CreateCentralization/AlgorithmProjectSelect/index.module.less @@ -0,0 +1,15 @@ +li:has(.second_option_container) { + height: 54px; + line-height: 24px; +} +.second_option_container { + > span { + font-weight: 500; + } +} +.second_option_content { + color: var(--color-text-2); +} +.text_content { + font-size: 12px; +} diff --git a/web_console_v2/client/src/views/ModelCenter/ModelTrain/CreateCentralization/AlgorithmProjectSelect/index.tsx b/web_console_v2/client/src/views/ModelCenter/ModelTrain/CreateCentralization/AlgorithmProjectSelect/index.tsx new file mode 100644 index 000000000..72075b5d1 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelTrain/CreateCentralization/AlgorithmProjectSelect/index.tsx @@ -0,0 +1,198 @@ +import React, { useMemo } from 'react'; +import { useGetCurrentProjectId, useGetCurrentProjectParticipantList } from 'hooks'; +import { + AlgorithmStatus, + EnumAlgorithmProjectSource, + EnumAlgorithmProjectType, +} from 'typings/algorithm'; +import { ALGORITHM_TYPE_LABEL_MAPPER } from 'views/ModelCenter/shared'; +import { Cascader, Divider, Space, Spin, Tag, Typography } from '@arco-design/web-react'; + +import styles from './index.module.less'; +import { fetchPeerAlgorithmProjectList, fetchProjectList } from 'services/algorithm'; +import { useQuery } from 'react-query'; + +const ALGORITHM_OWNER_TEXT_MAPPER = { + self: '我方算法', + peer: '合作伙伴算法', + preset: '预置算法', +}; + +const ALGORITHM_OWNER_TAG_COLOR_MAPPER = { + self: 'purple', + peer: 'green', + preset: 'blue', +}; + +interface Props { + value?: ID; + algorithmType?: EnumAlgorithmProjectType[]; + onChange?: (algorithmProjectUuid: ID) => void; + supportEdit?: boolean; +} +export default function AlgorithmProjectSelect({ + algorithmType, + value, + onChange: onChangeFromProps, + supportEdit = true, +}: Props) { + const participantList = useGetCurrentProjectParticipantList(); + const projectId = useGetCurrentProjectId(); + + const algorithmProjectListQuery = useQuery( + ['fetchAllAlgorithmProjectList', algorithmType, projectId], + () => + fetchProjectList(projectId, { + type: algorithmType, + }), + { + retry: 2, + refetchOnWindowFocus: false, + }, + ); + const preAlgorithmProjectListQuery = useQuery( + ['fetchPreAlgorithmProjectListQuery', algorithmType], + () => + fetchProjectList(0, { + type: algorithmType, + sources: EnumAlgorithmProjectSource.PRESET, + }), + { + retry: 2, + refetchOnWindowFocus: false, + }, + ); + const peerAlgorithmProjectListQuery = useQuery( + ['fetchPeerAlgorithmProjectListQuery', projectId, algorithmType], + () => + fetchPeerAlgorithmProjectList(projectId, 0, { + filter: `(type:${JSON.stringify(algorithmType)})`, + }), + { + enabled: Boolean(projectId), + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const algorithmProjectList = useMemo(() => { + return [ + ...(algorithmProjectListQuery?.data?.data || []), + ...(preAlgorithmProjectListQuery.data?.data || []), + ].filter( + (algorithmProject) => + algorithmProject.source === 'PRESET' || + algorithmProject.publish_status === AlgorithmStatus.PUBLISHED, + ); + }, [algorithmProjectListQuery, preAlgorithmProjectListQuery]); + const peerAlgorithmProjectList = useMemo(() => { + return peerAlgorithmProjectListQuery.data?.data || []; + }, [peerAlgorithmProjectListQuery]); + + const cascaderOptions = useMemo(() => { + return [ + { + value: 'self', + label: '我方算法', + disabled: algorithmProjectList?.length === 0, + children: algorithmProjectList?.map((item) => ({ + ...item, + value: item.uuid, + label: item.name, + participantName: item.source === EnumAlgorithmProjectSource.PRESET ? '预置' : '我方', + })), + }, + { + value: 'peer', + label: '合作伙伴算法', + disabled: peerAlgorithmProjectList?.length === 0, + children: peerAlgorithmProjectList?.map((item) => ({ + ...item, + value: item.uuid, + label: item.name, + participantName: participantList.find( + (participant) => participant.id === item.participant_id, + )?.name, + })), + }, + ]; + }, [algorithmProjectList, peerAlgorithmProjectList, participantList]); + + const algorithmOwnerType = useMemo(() => { + if (algorithmProjectList?.find((item) => item.uuid === value)) { + return 'self'; + } else if (peerAlgorithmProjectList.find((item) => item.uuid === value)) { + return 'peer'; + } + return undefined; + }, [value, algorithmProjectList, peerAlgorithmProjectList]); + const algorithmProjectName = useMemo(() => { + return [...algorithmProjectList, ...peerAlgorithmProjectList].find( + (item) => item.uuid === value, + )?.name; + }, [value, algorithmProjectList, peerAlgorithmProjectList]); + + const algorithmTagType = useMemo(() => { + const algorithmProject = algorithmProjectList?.find((item) => item.uuid === value); + if (algorithmProject) { + return algorithmProject.source === EnumAlgorithmProjectSource.PRESET ? 'preset' : 'self'; + } else if (peerAlgorithmProjectList.find((item) => item.uuid === value)) { + return 'peer'; + } + return undefined; + }, [algorithmProjectList, peerAlgorithmProjectList, value]); + + const isLoading = useMemo(() => { + return ( + algorithmProjectListQuery.isFetching || + peerAlgorithmProjectListQuery.isFetching || + preAlgorithmProjectListQuery.isFetching + ); + }, [ + algorithmProjectListQuery.isFetching, + peerAlgorithmProjectListQuery.isFetching, + preAlgorithmProjectListQuery.isFetching, + ]); + + return supportEdit ? ( + <Cascader + loading={isLoading} + options={cascaderOptions} + placeholder="请选择算法" + showSearch={true} + onChange={(value) => { + handleChange(value?.[1] as ID); + }} + renderOption={(option, level) => { + if (level === 0) { + return <span>{option.label}</span>; + } + return ( + <div className={styles.second_option_container}> + <span style={{ display: 'block' }}>{option.name}</span> + <Space className={styles.second_option_content} split={<Divider type="vertical" />}> + <span>{option.participantName}</span> + <span>{ALGORITHM_TYPE_LABEL_MAPPER?.[option.type as string]}</span> + </Space> + </div> + ); + }} + /> + ) : ( + <Spin loading={isLoading}> + {!algorithmOwnerType || !algorithmProjectName || !algorithmTagType ? ( + <Typography.Text bold={true}> 暂无数据</Typography.Text> + ) : ( + <Space> + <Typography.Text bold={true}>{algorithmProjectName}</Typography.Text> + <Tag color={ALGORITHM_OWNER_TAG_COLOR_MAPPER[algorithmTagType]}> + {ALGORITHM_OWNER_TEXT_MAPPER[algorithmTagType]} + </Tag> + </Space> + )} + </Spin> + ); + function handleChange(algorithmProjectUuid: ID) { + onChangeFromProps?.(algorithmProjectUuid); + } +} diff --git a/web_console_v2/client/src/views/ModelCenter/ModelTrain/CreateCentralization/index.tsx b/web_console_v2/client/src/views/ModelCenter/ModelTrain/CreateCentralization/index.tsx new file mode 100644 index 000000000..726f433d5 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelTrain/CreateCentralization/index.tsx @@ -0,0 +1,332 @@ +import React, { useEffect, useMemo, useState } from 'react'; + +import { useHistory, useParams } from 'react-router'; +import { useQuery } from 'react-query'; +import { + useGetCurrentProjectId, + useGetCurrentProjectParticipantList, + useGetCurrentPureDomainName, + useIsFormValueChange, +} from 'hooks'; +import { + createModeJobGroupV2, + fetchModelJobGroupDetail, + updateModelJobGroup, +} from 'services/modelCenter'; +import { fetchDatasetDetail } from 'services/dataset'; +import { MAX_COMMENT_LENGTH } from 'shared/validator'; +import { EnumAlgorithmProjectType } from 'typings/algorithm'; +import { DataJobBackEndType, DatasetKindLabel } from 'typings/dataset'; +import routes from 'views/ModelCenter/routes'; +import { ALGORITHM_TYPE_LABEL_MAPPER } from 'views/ModelCenter/shared'; + +import { + Avatar, + Button, + Card, + Form, + Input, + Message, + Space, + Tag, + Typography, +} from '@arco-design/web-react'; +import { IconInfoCircle } from '@arco-design/web-react/icon'; +import BackButton from 'components/BackButton'; +import SharedPageLayout from 'components/SharedPageLayout'; +import BlockRadio from 'components/_base/BlockRadio'; +import ButtonWithModalConfirm from 'components/ButtonWithModalConfirm'; +import TitleWithIcon from 'components/TitleWithIcon'; +import DatasetSelect from 'components/NewDatasetSelect'; +import AlgorithmProjectSelect from './AlgorithmProjectSelect'; +import { LabelStrong } from 'styles/elements'; + +const federalTypeOptions = [ + { + value: EnumAlgorithmProjectType.TREE_VERTICAL, + label: ALGORITHM_TYPE_LABEL_MAPPER[EnumAlgorithmProjectType.TREE_VERTICAL], + }, + { + value: EnumAlgorithmProjectType.NN_VERTICAL, + label: ALGORITHM_TYPE_LABEL_MAPPER[EnumAlgorithmProjectType.NN_VERTICAL], + }, + { + value: EnumAlgorithmProjectType.NN_HORIZONTAL, + label: ALGORITHM_TYPE_LABEL_MAPPER[EnumAlgorithmProjectType.NN_HORIZONTAL], + }, +]; + +const defaultRules = [{ required: true, message: '必选项' }]; + +export default function CreateCentralization() { + const history = useHistory(); + const { role, id: modelJobGroupId } = useParams<{ role: string; id: string }>(); + const isReceiver = role === 'receiver'; + + const [formInstance] = Form.useForm(); + const { isFormValueChanged, onFormValueChange } = useIsFormValueChange(); + + const [selectedAlgorithmType, setSelectedAlgorithmType] = useState<EnumAlgorithmProjectType>( + EnumAlgorithmProjectType.TREE_VERTICAL, + ); + const projectId = useGetCurrentProjectId(); + const myPureDomain = useGetCurrentPureDomainName(); + const participantList = useGetCurrentProjectParticipantList(); + + const modelJobGroupDetailQuery = useQuery( + ['fetchModelJobGroupDetail', projectId, modelJobGroupId], + () => fetchModelJobGroupDetail(projectId!, modelJobGroupId), + { + enabled: !!projectId && !!modelJobGroupId, + retry: 2, + refetchOnWindowFocus: false, + }, + ); + const datasetDetailQuery = useQuery( + ['fetchDatasetDetail', modelJobGroupDetailQuery.data?.data.dataset_id], + () => fetchDatasetDetail(modelJobGroupDetailQuery.data?.data.dataset_id), + { + enabled: !!modelJobGroupDetailQuery.data?.data.dataset_id, + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const modelJobGroupDetail = useMemo(() => { + return modelJobGroupDetailQuery.data?.data; + }, [modelJobGroupDetailQuery.data?.data]); + const datasetDetail = useMemo(() => { + return datasetDetailQuery.data?.data; + }, [datasetDetailQuery.data?.data]); + + const coordinatorName = useMemo(() => { + return ( + participantList.find((participant) => participant.id === modelJobGroupDetail?.coordinator_id) + ?.name ?? '未知合作伙伴' + ); + }, [modelJobGroupDetail?.coordinator_id, participantList]); + + useEffect(() => { + if (!modelJobGroupDetail) { + return; + } + formInstance.setFieldsValue({ + ...modelJobGroupDetail, + algorithm_project_list: { + algorithmProjects: modelJobGroupDetail.algorithm_project_uuid_list?.algorithm_projects, + }, + }); + }, [formInstance, modelJobGroupDetail]); + + useEffect(() => { + if (!modelJobGroupDetail) { + return; + } + setSelectedAlgorithmType(modelJobGroupDetail.algorithm_type as EnumAlgorithmProjectType); + }, [modelJobGroupDetail]); + return ( + <SharedPageLayout + title={ + <BackButton isShowConfirmModal={isFormValueChanged} onClick={goBackToListPage}> + 模型训练 + </BackButton> + } + contentWrapByCard={false} + centerTitle={'创建模型训练'} + > + {isReceiver && ( + <Card className="card" bordered={false} style={{ marginBottom: 20 }}> + <Space size="medium"> + <Avatar /> + <> + <LabelStrong + fontSize={16} + >{`${coordinatorName}向您发起「${modelJobGroupDetail?.name}」的模型训练作业`}</LabelStrong> + <TitleWithIcon + title={'所有合作伙伴授权完成后,任意合作方均可发起模型训练任务。'} + isLeftIcon={true} + isShowIcon={true} + icon={IconInfoCircle} + /> + </> + </Space> + </Card> + )} + <Card className="card" bordered={false} style={{ height: '100%' }}> + <Form + className="form-content" + form={formInstance} + onChange={onFormValueChange} + onSubmit={handelSubmit} + > + <section className="form-section"> + <h3>基本信息</h3> + <Form.Item + field="name" + label="模型训练名称" + rules={[{ required: true, message: '必填项' }]} + > + {isReceiver ? ( + <Typography.Text bold={true}>{modelJobGroupDetail?.name}</Typography.Text> + ) : ( + <Input placeholder="请输入模型训练名称" /> + )} + </Form.Item> + <Form.Item + field={'comment'} + label={'描述'} + rules={[ + { + maxLength: MAX_COMMENT_LENGTH, + message: '最多为 200 个字符', + }, + ]} + > + <Input.TextArea placeholder={'最多为 200 个字符'} /> + </Form.Item> + </section> + <section className="form-section"> + <h3>训练配置</h3> + <Form.Item + field={'algorithm_type'} + label={'联邦类型'} + rules={[{ required: true, message: '必选项' }]} + initialValue={selectedAlgorithmType} + > + {isReceiver ? ( + <Typography.Text bold={true}> + { + ALGORITHM_TYPE_LABEL_MAPPER[ + modelJobGroupDetail?.algorithm_type || EnumAlgorithmProjectType.TREE_VERTICAL + ] + } + </Typography.Text> + ) : ( + <BlockRadio + options={federalTypeOptions} + isCenter={true} + onChange={(value) => { + setSelectedAlgorithmType(value); + formInstance.setFieldValue('dataset_id', undefined); + }} + /> + )} + </Form.Item> + <Form.Item + label={'数据集'} + field={'dataset_id'} + rules={defaultRules} + shouldUpdate={true} + > + {isReceiver ? ( + <Space> + <Typography.Text bold={true}>{datasetDetail?.name}</Typography.Text> + <Tag color="arcoblue">结果</Tag> + </Space> + ) : ( + <DatasetSelect + lazyLoad={{ + enable: true, + page_size: 10, + }} + kind={DatasetKindLabel.PROCESSED} + datasetJobKind={ + selectedAlgorithmType === EnumAlgorithmProjectType.NN_HORIZONTAL + ? DataJobBackEndType.DATA_ALIGNMENT + : undefined + } + /> + )} + </Form.Item> + {[ + EnumAlgorithmProjectType.NN_HORIZONTAL, + EnumAlgorithmProjectType.NN_VERTICAL, + ].includes(selectedAlgorithmType) && ( + <> + <Form.Item + field={resetField(myPureDomain, 'algorithm_project_list.algorithmProjects')} + label="我方算法" + rules={defaultRules} + > + <AlgorithmProjectSelect + algorithmType={[selectedAlgorithmType]} + supportEdit={!isReceiver} + /> + </Form.Item> + {participantList.map((participant) => { + return ( + <Form.Item + key={participant.id} + field={resetField( + participant.pure_domain_name, + 'algorithm_project_list.algorithmProjects', + )} + label={`${participant.name}算法`} + rules={defaultRules} + > + <AlgorithmProjectSelect + algorithmType={[selectedAlgorithmType]} + supportEdit={!isReceiver} + /> + </Form.Item> + ); + })} + </> + )} + </section> + <Space> + <Button type="primary" htmlType="submit"> + {isReceiver ? '授权' : '提交并发送'} + </Button> + + <ButtonWithModalConfirm + isShowConfirmModal={isFormValueChanged} + onClick={goBackToListPage} + > + 取消 + </ButtonWithModalConfirm> + <TitleWithIcon + title={'所有合作伙伴授权完成后,任意合作方均可发起模型训练任务。'} + isLeftIcon={true} + isShowIcon={true} + icon={IconInfoCircle} + /> + </Space> + </Form> + </Card> + </SharedPageLayout> + ); + function resetField(participantName: string, fieldName: string) { + return `${fieldName}.${participantName}`; + } + function goBackToListPage() { + history.push(routes.ModelTrainList); + } + + async function handelSubmit(value: any) { + if (!projectId) { + Message.info('请选择工作区!'); + return; + } + if (!isReceiver) { + try { + await createModeJobGroupV2(projectId, value); + Message.info('创建成功'); + history.push(routes.ModelTrainList); + } catch (error: any) { + Message.error(error.message); + } + } else { + try { + await updateModelJobGroup(projectId, modelJobGroupDetail?.id!, { + authorized: true, + comment: value?.comment, + }); + Message.info('授权成功'); + history.push(routes.ModelTrainList); + } catch (error: any) { + Message.error(error.message); + } + } + } +} diff --git a/web_console_v2/client/src/views/ModelCenter/ModelTrain/Detail/index.module.less b/web_console_v2/client/src/views/ModelCenter/ModelTrain/Detail/index.module.less new file mode 100644 index 000000000..9c54f4373 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelTrain/Detail/index.module.less @@ -0,0 +1,48 @@ +@import '~styles/mixins.less'; +.detail_container { + padding: 20px 20px 0; + border-bottom: 1px solid var(--lineColor); + h3 { + margin-top: 0; + margin-bottom: -3px; + font-size: 16px; + font-weight: 600; + line-height: 24px; + } + .detail_comment_space { + font-size: 12px; + color: var(--textColorSecondary); + } + .detail_header_col { + margin-top: 9px; + text-align: right; + } +} + +.detail_content { + padding: 0 20px; + .round_tag { + border-radius: 32px; + } + .table_header { + display: flex; + margin-top: 14px; + margin-bottom: 20px; + justify-content: space-between; + } +} +.detail_comment { + .MixinEllipsis(400px); +} + +.model_progress_container { + width: 100px; +} + +.algorithm_popover_padding { + width: 400px; + max-width: 400px !important; + :global(.arco-popover-content) { + padding: 0; + } +} diff --git a/web_console_v2/client/src/views/ModelCenter/ModelTrain/Detail/index.tsx b/web_console_v2/client/src/views/ModelCenter/ModelTrain/Detail/index.tsx new file mode 100644 index 000000000..4b769c1dd --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelTrain/Detail/index.tsx @@ -0,0 +1,878 @@ +import React, { FC, useMemo, useState } from 'react'; +import { generatePath, useHistory, useParams, Link } from 'react-router-dom'; +import { useMutation, useQuery } from 'react-query'; +import dayjs from 'dayjs'; + +import { + useGetAppFlagValue, + useGetCurrentProjectId, + useGetCurrentProjectParticipantId, + useGetCurrentProjectParticipantList, + useGetCurrentPureDomainName, + useTablePaginationWithUrlState, + useUrlState, +} from 'hooks'; +import { + authorizeModelJobGroup, + deleteModelJobGroup, + fetchModelJobGroupDetail, + fetchPeerModelJobGroupDetail, + launchModelJobGroup, + stopModelJob, + fetchModelJobList_new, + stopAutoUpdateModelJob, + fetchAutoUpdateModelJobDetail, +} from 'services/modelCenter'; +import { fetchDatasetDetail, fetchDatasetJobDetail } from 'services/dataset'; +import { formatTimestamp } from 'shared/date'; +import { + Avatar, + AUTH_STATUS_TEXT_MAP, + getConfigInitialValues, + getModelJobStatus, + isNNAlgorithm, + isVerticalNNAlgorithm, + MODEL_GROUP_STATUS_MAPPER, + resetAuthInfo, + statusFilters, + FILTER_MODEL_JOB_OPERATOR_MAPPER, + isTreeAlgorithm, +} from 'views/ModelCenter/shared'; +import { CONSTANTS } from 'shared/constants'; + +import { + Grid, + Button, + Space, + Tag, + Message, + Table, + Spin, + Tooltip, + Popover, +} from '@arco-design/web-react'; +import BackButton from 'components/BackButton'; +import MoreActions from 'components/MoreActions'; +import PropertyList from 'components/PropertyList'; +import SharedPageLayout from 'components/SharedPageLayout'; +import ModelJobDetailDrawer from '../../ModelJobDetailDrawer'; + +import routes from '../../routes'; +import { EnumAlgorithmProjectType } from 'typings/algorithm'; +import { LabelStrong } from 'styles/elements'; +import { ColumnProps } from '@arco-design/web-react/es/Table'; +import { + AutoModelJobStatus, + ModelGroupStatus, + ModelJob, + ModelJobRole, + ModelJobStatus, +} from 'typings/modelCenter'; +import { DatasetKindBackEndType, DatasetType__archived } from 'typings/dataset'; +import StateIndicator from 'components/StateIndicator'; +import { WorkflowState } from 'typings/workflow'; +import CountTime from 'components/CountTime'; +import Modal from 'components/Modal'; +import AlgorithmType from 'components/AlgorithmType'; +import { IconCheckCircleFill, IconExclamationCircleFill } from '@arco-design/web-react/icon'; +import TrainJobCompareModal from '../../TrainJobCompareModal'; +import WhichAlgorithm from 'components/WhichAlgorithm'; + +import styles from './index.module.less'; +import ProgressWithText from 'components/ProgressWithText'; +import { FlagKey } from 'typings/flag'; +import { filterExpressionGenerator } from 'views/Datasets/shared'; +import { expression2Filter } from 'shared/filter'; +import AlgorithmProjectSelect from '../CreateCentralization/AlgorithmProjectSelect'; + +const { Row, Col } = Grid; + +type TRouteParams = { + id: string; +}; +const AUTO_STATUS_TEXT_MAPPER: Record<AutoModelJobStatus, string> = { + [AutoModelJobStatus.INITIAL]: '发起定时续训任务', + [AutoModelJobStatus.ACTIVE]: '停止定时续训任务', + [AutoModelJobStatus.STOPPED]: '配置定时续训任务', +}; + +const Detail: FC = () => { + const history = useHistory(); + const params = useParams<TRouteParams>(); + const [autoBtnLoading, setAutoBtnLoading] = useState<boolean>(false); + const { urlState: pageInfoState, paginationProps } = useTablePaginationWithUrlState(); + const [urlState, setUrlState] = useUrlState<{ + filter?: string; + page?: number; + pageSize?: number; + }>({}); + const projectId = useGetCurrentProjectId(); + const participantId = useGetCurrentProjectParticipantId(); + const participantList = useGetCurrentProjectParticipantList(); + const myPureDomainName = useGetCurrentPureDomainName(); + + const model_job_global_config_enabled = useGetAppFlagValue( + FlagKey.MODEL_JOB_GLOBAL_CONFIG_ENABLED, + ); + + const queryKeys = ['modelJobDetail', params.id, projectId]; + + const detailQuery = useQuery( + queryKeys, + () => { + return fetchModelJobGroupDetail(projectId!, params.id); + }, + { + enabled: Boolean(projectId) && Boolean(params.id), + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const modelJobListQuery = useQuery( + [ + 'fetchModelJobListQuery', + projectId, + params.id, + pageInfoState.page, + pageInfoState.pageSize, + urlState.filter, + ], + () => + fetchModelJobList_new(projectId!, { + project_id: projectId as string, + group_id: params.id, + page: pageInfoState.page, + page_size: pageInfoState.pageSize, + filter: urlState.filter || undefined, + }), + { + enabled: Boolean(projectId) && Boolean(params.id), + retry: 2, + refetchOnWindowFocus: false, + keepPreviousData: true, + }, + ); + + const datasetDetailQuery = useQuery( + ['datasetDetailQuery', detailQuery.data?.data?.dataset_id], + () => { + return fetchDatasetDetail(detailQuery.data?.data?.dataset_id); + }, + { + enabled: detailQuery.data?.data?.dataset_id !== undefined, + }, + ); + + const authorizeMutate = useMutation( + (payload: { id: ID; authorized: boolean }) => { + return authorizeModelJobGroup(projectId!, payload.id, payload.authorized); + }, + { + onSuccess(_, { authorized }) { + detailQuery.refetch(); + Message.success(!authorized ? '撤销成功' : '授权成功'); + }, + onError(_, { authorized }) { + detailQuery.refetch(); + Message.error(!authorized ? '撤销失败' : '授权失败'); + }, + }, + ); + + const detail = useMemo(() => detailQuery.data?.data, [detailQuery]); + const modelJobList = useMemo(() => modelJobListQuery.data?.data, [modelJobListQuery]); + const datasetDetail = useMemo(() => datasetDetailQuery.data?.data, [datasetDetailQuery]); + + const datasetJobQuery = useQuery( + [ + 'fetchDatasetJobDetail', + projectId, + datasetDetail?.parent_dataset_job_id, + datasetDetail?.dataset_type, + ], + () => fetchDatasetJobDetail(projectId!, datasetDetail?.parent_dataset_job_id!), + { + refetchOnWindowFocus: false, + retry: 2, + enabled: + Boolean(projectId && datasetDetail?.parent_dataset_job_id) && + datasetDetail?.dataset_type === DatasetType__archived.STREAMING, + }, + ); + + const datasetJob = useMemo(() => datasetJobQuery.data?.data, [datasetJobQuery]); + + const isOldModelGroup = useMemo(() => { + return Boolean(detail?.config?.job_definitions?.length); + }, [detail?.config?.job_definitions?.length]); + const progressConfig = useMemo(() => { + if (!detail?.auth_frontend_status) { + return undefined; + } + return MODEL_GROUP_STATUS_MAPPER?.[detail.auth_frontend_status]; + }, [detail]); + + const displayedProps = useMemo( + () => { + const { loss_type, algorithm } = getConfigInitialValues(detail?.config!, [ + 'loss_type', + 'algorithm', + ]); + let algorithmValue = { + algorithmId: undefined, + algorithmUuid: undefined, + participantId: undefined, + }; + try { + algorithmValue = JSON.parse(algorithm); + } catch (e) {} + const { algorithmId, algorithmUuid, participantId } = algorithmValue; + const { name, status, percent } = progressConfig ?? {}; + const authInfo = resetAuthInfo( + detail?.participants_info?.participants_map, + participantList, + myPureDomainName, + ); + return [ + { + value: detail?.role + ? detail.role === ModelJobRole.COORDINATOR + ? '我方' + : participantList.find((item) => item.id === detail.coordinator_id)?.name || + CONSTANTS.EMPTY_PLACEHOLDER + : CONSTANTS.EMPTY_PLACEHOLDER, + label: '发起方', + }, + { + label: '授权状态', + value: ( + <ProgressWithText + className={styles.model_progress_container} + statusText={name} + status={status} + percent={percent} + toolTipContent={ + detail?.auth_frontend_status && + [ModelGroupStatus.PART_AUTH_PENDING, ModelGroupStatus.SELF_AUTH_PENDING].includes( + detail?.auth_frontend_status, + ) ? ( + <> + {authInfo.map((item: any) => ( + <div key={item.name}>{`${item.name}: ${ + AUTH_STATUS_TEXT_MAP?.[item.authStatus] + }`}</div> + ))} + </> + ) : undefined + } + /> + ), + }, + { + value: detail?.creator_username ?? CONSTANTS.EMPTY_PLACEHOLDER, + label: '创建者', + }, + { + value: + datasetDetail?.dataset_kind === DatasetKindBackEndType.PROCESSED ? ( + <Link + to={`/datasets/${DatasetKindBackEndType.PROCESSED.toLowerCase()}/detail/${ + detail?.dataset_id + }/dataset_job_detail`} + > + {datasetDetail?.name} + </Link> + ) : ( + CONSTANTS.EMPTY_PLACEHOLDER + ), + label: '数据集', + }, + detail?.algorithm_type && isNNAlgorithm(detail?.algorithm_type as EnumAlgorithmProjectType) + ? { + value: ( + <WhichAlgorithm + id={algorithmId!} + uuid={algorithmUuid} + participantId={participantId} + formatter={(algorithm: Algorithm) => algorithm.name} + /> + ), + label: '算法', + } + : { value: loss_type, label: '损失类型' }, + { + value: detail?.updated_at + ? formatTimestamp(detail.updated_at) + : CONSTANTS.EMPTY_PLACEHOLDER, + label: '更新时间', + }, + { + value: detail?.created_at + ? formatTimestamp(detail.created_at) + : CONSTANTS.EMPTY_PLACEHOLDER, + label: '创建时间', + }, + ]; + }, + // eslint-disable-next-line react-hooks/exhaustive-deps + [detail, datasetDetailQuery, participantList, progressConfig, myPureDomainName], + ); + + const displayedProps_new = useMemo(() => { + const { name, status, percent } = progressConfig ?? {}; + const authInfo = resetAuthInfo( + detail?.participants_info?.participants_map, + participantList, + myPureDomainName, + ); + const keyList = Object.keys(detail?.algorithm_project_uuid_list?.algorithm_projects ?? {}); + const textList = keyList + .map((item) => { + return { + name: item, + algorithmProjectUuid: detail?.algorithm_project_uuid_list?.algorithm_projects?.[item], + }; + }) + .sort((a, b) => (a.name > b.name ? 1 : -1)); + + const table = ( + <Table + className="custom-table" + size="small" + columns={[ + { dataIndex: 'name', title: '参与方', width: 150 }, + { + dataIndex: 'algorithmProjectUuid', + title: '算法', + render: (val) => ( + <AlgorithmProjectSelect + algorithmType={[detail?.algorithm_type as EnumAlgorithmProjectType]} + value={val} + supportEdit={false} + /> + ), + }, + ]} + scroll={{ + y: 300, + }} + border={false} + borderCell={false} + pagination={false} + data={textList} + /> + ); + const propsList = [ + { + value: detail?.role + ? detail.role === ModelJobRole.COORDINATOR + ? '我方' + : participantList.find((item) => item.id === detail.coordinator_id)?.name || + CONSTANTS.EMPTY_PLACEHOLDER + : CONSTANTS.EMPTY_PLACEHOLDER, + label: '发起方', + }, + { + label: '授权状态', + value: ( + <ProgressWithText + className={styles.model_progress_container} + statusText={name} + status={status} + percent={percent} + toolTipContent={ + detail?.auth_frontend_status && + [ModelGroupStatus.PART_AUTH_PENDING, ModelGroupStatus.SELF_AUTH_PENDING].includes( + detail?.auth_frontend_status, + ) ? ( + <> + {authInfo.map((item: any) => ( + <div key={item.name}>{`${item.name}: ${ + AUTH_STATUS_TEXT_MAP?.[item.authStatus] + }`}</div> + ))} + </> + ) : undefined + } + /> + ), + }, + { + value: detail?.creator_username ?? CONSTANTS.EMPTY_PLACEHOLDER, + label: '创建者', + }, + { + value: + datasetDetail?.dataset_kind === DatasetKindBackEndType.PROCESSED ? ( + <Space> + <Link + to={`/datasets/${DatasetKindBackEndType.PROCESSED.toLowerCase()}/detail/${ + detail?.dataset_id + }/dataset_job_detail`} + > + {datasetDetail?.name} + </Link> + {datasetDetail?.dataset_type === DatasetType__archived.STREAMING && + datasetJob?.time_range && ( + <Tag color={datasetJob?.time_range?.hours === 1 ? 'arcoblue' : 'purple'}> + {datasetJob?.time_range?.hours === 1 ? '小时级' : '天级'} + </Tag> + )} + </Space> + ) : ( + CONSTANTS.EMPTY_PLACEHOLDER + ), + label: '数据集', + }, + { + value: ( + <Popover + popupHoverStay={true} + content={table} + position="bl" + className={styles.algorithm_popover_padding} + > + <button className="custom-text-button">{'查看'}</button> + </Popover> + ), + label: '算法', + }, + { + value: detail?.updated_at + ? formatTimestamp(detail.updated_at) + : CONSTANTS.EMPTY_PLACEHOLDER, + label: '更新时间', + }, + { + value: detail?.created_at + ? formatTimestamp(detail.created_at) + : CONSTANTS.EMPTY_PLACEHOLDER, + label: '创建时间', + }, + ]; + isTreeAlgorithm(detail?.algorithm_type as EnumAlgorithmProjectType) && propsList.splice(4, 1); + return propsList; + }, [ + datasetDetail?.dataset_kind, + datasetDetail?.dataset_type, + datasetDetail?.name, + detail, + myPureDomainName, + participantList, + progressConfig, + datasetJob?.time_range, + ]); + + const columns = useMemo<ColumnProps<ModelJob>[]>(() => { + return [ + { + title: '名称', + dataIndex: 'version', + name: 'version', + width: 120, + render: (_, record) => { + if (record.auto_update) { + return ( + <Space> + {`V${record.version}`} + <Tag color="blue">定时</Tag> + </Space> + ); + } + return `V${record.version}`; + }, + }, + { + title: '发起方', + dataIndex: 'coordinator_id', + name: 'coordinator_id', + width: 100, + render: (val, record) => { + if (record.role === 'COORDINATOR') { + return '我方'; + } + return participantList.find((item) => item.id === val)?.name || '-'; + }, + }, + { + title: '任务状态', + dataIndex: 'status', + name: 'status', + width: 180, + filteredValue: expression2Filter(urlState.filter).status, + filters: statusFilters.filters, + render: (_, record) => { + return ( + <StateIndicator + {...getModelJobStatus(record.status ?? ModelJobStatus.UNKNOWN, { + isHideAllActionList: true, + })} + /> + ); + }, + }, + { + title: '运行时长', + dataIndex: 'running_time', + name: 'running_time', + width: 150, + render: (_, record) => { + let isRunning = false; + let isStopped = true; + let runningTime = 0; + + const { state } = record; + const { RUNNING, STOPPED, COMPLETED, FAILED } = WorkflowState; + isRunning = state === RUNNING; + isStopped = [STOPPED, COMPLETED, FAILED].includes(state); + + if (isRunning || isStopped) { + const { stopped_at, started_at } = record; + runningTime = isStopped ? stopped_at! - started_at! : dayjs().unix() - started_at!; + } + return <CountTime time={runningTime} isStatic={!isRunning} />; + }, + }, + { + title: '开始时间', + dataIndex: 'started_at', + name: 'started_at', + width: 150, + sorter(a: ModelJob, b: ModelJob) { + return (a?.started_at ?? 0) - (b?.started_at ?? 0); + }, + render: (date: number) => (date ? formatTimestamp(date) : CONSTANTS.EMPTY_PLACEHOLDER), + }, + { + title: '结束时间', + dataIndex: 'stopped_at', + name: 'stopped_at', + width: 150, + sorter(a: ModelJob, b: ModelJob) { + return (a?.stopped_at ?? 0) - (b?.stopped_at ?? 0); + }, + render: (date: number) => (date ? formatTimestamp(date) : CONSTANTS.EMPTY_PLACEHOLDER), + }, + { + title: '操作', + dataIndex: 'operation', + name: 'operation', + fixed: 'right', + width: 120, + render: (_: any, record) => ( + <> + <span> + <ModelJobDetailDrawer.Button + id={record.id} + text={'详情'} + btnDisabled={record.status === ModelJobStatus.PENDING} + datasetBatchType={datasetJob?.time_range?.hours === 1 ? 'hour' : 'day'} + /> + </span> + <button + className="custom-text-button" + style={{ marginLeft: 15 }} + disabled={record.status !== ModelJobStatus.RUNNING} + onClick={() => { + stopModelJob(projectId!, record.id) + .then(() => { + Message.success('终止成功'); + modelJobListQuery.refetch(); + }) + .catch((error) => Message.error(error.message)); + }} + > + 终止 + </button> + </> + ), + }, + ]; + }, [ + modelJobListQuery, + participantList, + projectId, + urlState.filter, + datasetJob?.time_range?.hours, + ]); + + return ( + <SharedPageLayout + title={ + <BackButton onClick={() => history.push(routes.ModelTrainList)}>{'模型训练'}</BackButton> + } + cardPadding={0} + > + <Spin loading={detailQuery.isFetching}> + <div className={styles.detail_container}> + <Row> + <Col span={12}> + <Space size="medium"> + <Avatar /> + <div> + <h3>{detail?.name ?? '....'}</h3> + <Space className={styles.detail_comment_space}> + {detail?.algorithm_type && ( + <AlgorithmType + type={detail.algorithm_type as EnumAlgorithmProjectType} + tagProps={{ + size: 'small', + }} + /> + )} + <Tag size="small" style={{ fontWeight: 'normal' }}> + ID: {detail?.id} + </Tag> + <Tooltip content={detail?.comment}> + <div className={styles.detail_comment}> + {detail?.comment ?? CONSTANTS.EMPTY_PLACEHOLDER} + </div> + </Tooltip> + </Space> + </div> + </Space> + </Col> + <Col className={styles.detail_header_col} span={12}> + <Space> + {detail?.role === 'PARTICIPANT' && ( + <Space> + <span> + {detail.authorized ? ( + <> + <IconCheckCircleFill style={{ color: 'rgb(var(--success-6))' }} /> + 我方已授权 + </> + ) : ( + <> + <IconExclamationCircleFill style={{ color: 'rgb(var(--primary-6))' }} /> + 待我方授权 + </> + )} + </span> + <button + className="custom-text-button" + onClick={() => { + Modal.confirm({ + title: detail.authorized ? '确认撤销授权?' : '确认授权?', + content: detail.authorized + ? '撤销授权后,发起方不可运行模型训练,正在运行的任务不受影响' + : '授权后,发起方可以运行模型训练', + okText: '确认', + onOk: () => { + authorizeMutate.mutate({ + id: detail.id, + authorized: !detail.authorized, + }); + }, + }); + }} + > + {detail.authorized ? '撤销授权' : '授权'} + </button> + </Space> + )} + {/* TODO: 中心化后支持每个参与方发起模型训练任务 */} + <Tooltip + content={ + detail?.role === 'COORDINATOR' || !isOldModelGroup + ? undefined + : '旧版模型训练作业非发起方暂不支持发起模型训练任务' + } + > + <Button + type="primary" + onClick={() => { + model_job_global_config_enabled && !isOldModelGroup + ? onStartModelTrainJobClick('once') + : onStartNewModelJobButtonClick(); + }} + disabled={!detail?.name || (detail?.role !== 'COORDINATOR' && isOldModelGroup)} + > + 发起新任务 + </Button> + </Tooltip> + {model_job_global_config_enabled && + !isOldModelGroup && + datasetDetail?.dataset_type === DatasetType__archived.STREAMING && + isVerticalNNAlgorithm(detail?.algorithm_type as EnumAlgorithmProjectType) && ( + <Button + type="primary" + loading={autoBtnLoading} + onClick={async () => { + if (detail?.auto_update_status === AutoModelJobStatus.ACTIVE) { + setAutoBtnLoading(true); + Modal.stop({ + title: '确定停止定时续训任务', + content: '请谨慎操作', + okText: '停止', + onOk: async () => { + try { + await stopAutoUpdateModelJob(projectId!, detail?.id!); + Message.success('停止定时续训任务成功'); + detailQuery.refetch(); + } catch (err: any) { + Message.error(err.message); + } + setAutoBtnLoading(false); + }, + onCancel: () => { + setAutoBtnLoading(false); + }, + }); + } else { + onStartModelTrainJobClick('repeat'); + } + }} + disabled={!detail?.name} + > + { + AUTO_STATUS_TEXT_MAPPER?.[ + detail?.auto_update_status || AutoModelJobStatus.INITIAL + ] + } + </Button> + )} + + {isOldModelGroup && ( + <Button onClick={onEditButtonClick} disabled={!detail?.name}> + 编辑 + </Button> + )} + <MoreActions + actionList={[ + { + label: '删除', + danger: true, + onClick: onDeleteButtonClick, + disabled: !detail?.name, + }, + ]} + /> + </Space> + </Col> + </Row> + <PropertyList + cols={4} + properties={isOldModelGroup ? displayedProps : displayedProps_new} + align="center" + /> + </div> + <div className={styles.detail_content}> + <div className={styles.table_header}> + <Space> + <LabelStrong>训练任务</LabelStrong> + <Tag className={styles.round_tag}>{detail?.model_jobs?.length ?? 0}</Tag> + </Space> + {detail?.algorithm_type && ( + <TrainJobCompareModal.Button + algorithmType={detail?.algorithm_type} + list={(detail?.model_jobs ?? []).slice(0, 10) /** NOTE: 只取前十条训练任务来对比 */} + /> + )} + </div> + <Table + className="custom-table custom-table-left-side-filter" + loading={modelJobListQuery.isFetching} + data={modelJobList ?? []} + rowKey="id" + columns={columns} + pagination={{ + ...paginationProps, + total: modelJobListQuery.data?.page_meta?.total_items ?? modelJobList?.length, + }} + onChange={(_, __, filters, extra) => { + if (extra.action === 'filter') { + setUrlState((preState) => ({ + ...preState, + page: 1, + filter: filterExpressionGenerator( + { + status: filters.status, + }, + FILTER_MODEL_JOB_OPERATOR_MAPPER, + ), + })); + } + }} + /> + </div> + </Spin> + </SharedPageLayout> + ); + + async function onStartNewModelJobButtonClick() { + let isPeerAuthorized = false; + + try { + const resp = await fetchPeerModelJobGroupDetail(projectId!, params.id!, participantId!); + isPeerAuthorized = resp?.data?.authorized ?? false; + } catch (error) { + Message.error(error.message); + } + + if (!isPeerAuthorized) { + Message.info('合作伙伴未授权,不能发起新任务'); + return; + } + + launchModelJobGroup(projectId!, params.id) + .then(() => { + Message.success('发起成功'); + setUrlState({ page: 1, filter: undefined }); + modelJobListQuery.refetch(); + }) + .catch((error) => Message.error(error.message)); + } + function onEditButtonClick() { + history.push( + generatePath(routes.ModelTrainCreate, { + role: detail!.role === ModelJobRole.COORDINATOR ? 'sender' : 'receiver', + action: 'edit', + id: params.id, + }), + ); + } + async function onStartModelTrainJobClick(type: string) { + if (detail?.auth_frontend_status !== ModelGroupStatus.ALL_AUTHORIZED) { + Message.info('所有合作伙伴授权通过后才可以发起模型训练任务'); + return false; + } + if (detail?.auto_update_status === AutoModelJobStatus.STOPPED && type === 'repeat') { + if (!projectId || !detail?.id) { + Message.info('请选择工作区!'); + return; + } + try { + await fetchAutoUpdateModelJobDetail(projectId, detail?.id); + } catch (err: any) { + if (err.message.indexOf('is running') !== -1) { + Message.info('有定时任务正在运行,请停止后重试!'); + return; + } + } + } + history.push( + generatePath(routes.ModelTrainJobCreate, { + type: type, + id: params.id, + step: 'coordinator', + }), + ); + } + function onDeleteButtonClick() { + Modal.delete({ + title: `确认要删除「${detail?.name}」?`, + content: '删除后,该模型训练下的所有信息无法复原,请谨慎操作', + onOk() { + deleteModelJobGroup(projectId!, params.id) + .then(() => { + Message.success('删除成功'); + history.push(routes.ModelTrainList); + }) + .catch((error) => Message.error(error.message)); + }, + }); + } +}; +export default Detail; diff --git a/web_console_v2/client/src/views/ModelCenter/ModelTrain/List/index.tsx b/web_console_v2/client/src/views/ModelCenter/ModelTrain/List/index.tsx new file mode 100644 index 000000000..1cb86bcfc --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelTrain/List/index.tsx @@ -0,0 +1,478 @@ +import React, { FC, useMemo } from 'react'; +import { generatePath, useHistory } from 'react-router'; +import { useMutation, useQueries, useQuery } from 'react-query'; + +import { + useGetAppFlagValue, + useGetCurrentProjectId, + useGetCurrentProjectParticipantId, + useGetCurrentProjectParticipantList, + useGetCurrentPureDomainName, + useTablePaginationWithUrlState, + useUrlState, +} from 'hooks'; +import { + fetchModelJobGroupList, + deleteModelJobGroup, + authorizeModelJobGroup, + fetchPeerModelJobGroupDetail, + fetchModelJobGroupDetail, +} from 'services/modelCenter'; +import { TIME_INTERVAL, CONSTANTS } from 'shared/constants'; +import { formatTimestamp } from 'shared/date'; +import { expression2Filter } from 'shared/filter'; +import { filterExpressionGenerator } from 'views/Datasets/shared'; +import { + algorithmTypeFilters, + roleFilters, + statusFilters, + FILTER_MODEL_TRAIN_OPERATOR_MAPPER, + MODEL_GROUP_STATUS_MAPPER, + resetAuthInfo, + AUTH_STATUS_TEXT_MAP, + getModelJobStatus, +} from 'views/ModelCenter/shared'; +import { launchModelJobGroup } from 'services/modelCenter'; + +import { Link } from 'react-router-dom'; +import { Button, Input, Message, Space, Table } from '@arco-design/web-react'; +import { IconPlus } from '@arco-design/web-react/icon'; +import GridRow from 'components/_base/GridRow'; +import SharedPageLayout from 'components/SharedPageLayout'; +import MoreActions from 'components/MoreActions'; +import TodoPopover from 'components/TodoPopover'; +import Modal from 'components/Modal'; + +import { ColumnProps } from '@arco-design/web-react/es/Table'; +import { ModelGroupStatus, ModelJobGroup, ModelJobRole } from 'typings/modelCenter'; + +import routes from '../../routes'; +import StateIndicator from 'components/StateIndicator'; +import ButtonWithPopconfirm from 'components/ButtonWithPopconfirm'; +import AlgorithmType from 'components/AlgorithmType'; +import { EnumAlgorithmProjectType } from 'typings/algorithm'; +import ProgressWithText from 'components/ProgressWithText'; +import { Flag, FlagKey } from 'typings/flag'; +import { fetchParticipantFlagById } from 'services/flag'; + +type TProps = {}; +const { Search } = Input; +const List: FC<TProps> = function (props: TProps) { + const history = useHistory(); + const { urlState: pageInfoState, paginationProps } = useTablePaginationWithUrlState(); + const [urlState, setUrlState] = useUrlState({ + //TODO: BE support states filter & sort + states: [], + updated_at_sort: '', + filter: filterExpressionGenerator( + { + configured: true, + }, + FILTER_MODEL_TRAIN_OPERATOR_MAPPER, + ), + }); + + const projectId = useGetCurrentProjectId(); + const participantId = useGetCurrentProjectParticipantId(); + const participantList = useGetCurrentProjectParticipantList(); + const model_job_global_config_enabled = useGetAppFlagValue( + FlagKey.MODEL_JOB_GLOBAL_CONFIG_ENABLED, + ); + const myPureDomainName = useGetCurrentPureDomainName(); + + const participantsFlagQueries = useQueries( + participantList.map((participant) => { + return { + queryKey: ['fetchParticipantFlag', participant.id], + queryFn: () => fetchParticipantFlagById(participant.id), + retry: 2, + enabled: Boolean(participant.id), + refetchOnWindowFocus: false, + }; + }), + ); + + const listQuery = useQuery( + ['fetchModelJobGroupList', projectId, pageInfoState.page, pageInfoState.pageSize, urlState], + () => { + if (!projectId) { + Message.info('请选择工作区'); + return; + } + return fetchModelJobGroupList(projectId!, { + page: pageInfoState.page, + pageSize: pageInfoState.pageSize, + filter: urlState.filter, + }); + }, + { + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, + keepPreviousData: true, + refetchOnWindowFocus: false, + }, + ); + + const authorizeMutate = useMutation( + (payload: { id: ID; authorized: boolean }) => { + return authorizeModelJobGroup(projectId!, payload.id, payload.authorized); + }, + { + onSuccess(_, { authorized }) { + listQuery.refetch(); + Message.success(!authorized ? '撤销成功' : '授权成功'); + }, + onError(_, { authorized }) { + listQuery.refetch(); + Message.error(!authorized ? '撤销失败' : '授权失败'); + }, + }, + ); + const linkToNewCreatePage = useMemo(() => { + let flag = true; + participantsFlagQueries.forEach((item) => { + const participantFlag = item.data as { data: Flag } | undefined; + if (participantFlag?.data.model_job_global_config_enabled === false) { + flag = false; + } + }); + return flag; + }, [participantsFlagQueries]); + + const list = useMemo(() => { + if (!listQuery.data?.data) return []; + return listQuery.data.data; + }, [listQuery.data]); + + const columns = useMemo<ColumnProps<ModelJobGroup>[]>(() => { + return [ + { + title: '名称', + dataIndex: 'name', + key: 'name', + width: 200, + ellipsis: true, + render: (name: string, record) => { + return ( + <Link + to={generatePath(routes.ModelTrainDetail, { + id: record.id, + })} + > + {name} + </Link> + ); + }, + }, + { + title: '类型', + dataIndex: 'algorithm_type', + name: 'algorithm_type', + width: 150, + filters: algorithmTypeFilters.filters, + filteredValue: expression2Filter(urlState.filter)?.algorithm_type, + render: (type) => { + return <AlgorithmType type={type as EnumAlgorithmProjectType} />; + }, + }, + { + title: '发起方', + dataIndex: 'role', + name: 'role', + width: 120, + filters: roleFilters.filters, + filteredValue: expression2Filter(urlState.filter)?.role, + filterMultiple: false, + render: (role: string, record) => + role + ? role === ModelJobRole.COORDINATOR + ? '我方' + : participantList.find((item) => item.id === record.coordinator_id)?.name || + CONSTANTS.EMPTY_PLACEHOLDER + : CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + title: '授权状态', + dataIndex: 'auth_frontend_status', + name: 'auth_frontend_status', + width: 120, + render: (value: ModelGroupStatus, record: any) => { + const progressConfig = MODEL_GROUP_STATUS_MAPPER?.[value]; + const authInfo = resetAuthInfo( + record.participants_info.participants_map, + participantList, + myPureDomainName, + ); + return ( + <ProgressWithText + statusText={progressConfig?.name} + status={progressConfig?.status} + percent={progressConfig?.percent} + toolTipContent={ + [ModelGroupStatus.PART_AUTH_PENDING, ModelGroupStatus.SELF_AUTH_PENDING].includes( + value, + ) ? ( + <> + {authInfo.map((item: any) => ( + <div key={item.name}>{`${item.name}: ${ + AUTH_STATUS_TEXT_MAP?.[item.authStatus] + }`}</div> + ))} + </> + ) : undefined + } + /> + ); + }, + }, + { + title: '任务总数', + dataIndex: 'latest_version', + name: 'model_jobs', + width: 100, + align: 'center', + render: (value: any) => { + return typeof value === 'number' ? value : CONSTANTS.EMPTY_PLACEHOLDER; + }, + }, + { + title: '最新任务状态', + dataIndex: 'latest_job_state', + name: 'latest_job_state', + width: 150, + // TODO: 后端筛选 + ...statusFilters, + filteredValue: urlState.states ?? [], + render: (value: any, record) => { + if (!value) { + return CONSTANTS.EMPTY_PLACEHOLDER; + } + return ( + <StateIndicator + {...getModelJobStatus(record.latest_job_state, { + isHideAllActionList: true, + })} + /> + ); + }, + }, + { + title: '创建者', + dataIndex: 'creator_username', + name: 'creator', + width: 120, + render: (value: any) => value ?? CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + title: '更新时间', + dataIndex: 'updated_at', + name: 'updated_at', + width: 150, + sorter(a: ModelJobGroup, b: ModelJobGroup) { + return a.created_at - b.created_at; + }, + defaultSortOrder: urlState?.updated_at_sort, + render: (date: number) => <div>{formatTimestamp(date)}</div>, + }, + { + title: '操作', + dataIndex: 'authorized', + name: 'operation', + fixed: 'right', + key: 'operate', + width: 200, + render: (authorized: boolean, record) => ( + <Space> + <button + className="custom-text-button" + style={{ + width: 60, + textAlign: 'left', + }} + onClick={async () => { + let isPeerAuthorized = false; + let isOldModelGroup = true; + try { + const res = await fetchModelJobGroupDetail(projectId!, record.id!); + const detail = res.data; + isOldModelGroup = Boolean(detail?.config?.job_definitions?.length); + } catch (error: any) { + Message.error(error.message); + } + if (isOldModelGroup && record.role !== 'COORDINATOR') { + Message.info('旧版模型训练作业非发起方暂不支持发起模型训练任务'); + return; + } + try { + // TODO:能否发起训练任务判定逻辑后续根据 auth_frontend_status 字段判断 + const resp = await fetchPeerModelJobGroupDetail( + projectId!, + record.id!, + participantId!, + ); + isPeerAuthorized = resp?.data?.authorized ?? false; + } catch (error: any) { + Message.error(error.message); + } + + if (!isPeerAuthorized) { + Message.info('合作伙伴未授权,不能发起新任务'); + return; + } + + model_job_global_config_enabled && !isOldModelGroup + ? history.push( + generatePath(routes.ModelTrainJobCreate, { + type: 'once', + id: record?.id, + step: 'coordinator', + }), + ) + : launchModelJobGroup(projectId!, record.id) + .then((resp) => { + Message.success('发起成功'); + listQuery.refetch(); + }) + .catch((error) => Message.error(error.message)); + }} + > + 发起新任务 + </button> + + <ButtonWithPopconfirm + title={ + record.authorized + ? '撤销授权后,发起方不可运行模型训练,正在运行的任务不受影响' + : '授权后,发起方可以运行模型训练' + } + buttonProps={{ + type: 'text', + className: 'custom-text-button', + style: { + width: 60, + textAlign: 'left', + }, + }} + buttonText={record.authorized ? '撤销' : '授权'} + onConfirm={() => { + authorizeMutate.mutate({ + id: record.id, + authorized: !record.authorized, + }); + }} + /> + + <MoreActions + actionList={[ + { + label: '删除' as any, + danger: true, + onClick() { + Modal.delete({ + title: `确认要删除「${record.name}」?`, + content: '删除后,该模型训练下的所有信息无法复原,请谨慎操作', + onOk() { + deleteModelJobGroup(projectId!, record.id) + .then((resp) => { + Message.success('删除成功'); + listQuery.refetch(); + }) + .catch((error) => Message.error(error.message)); + }, + }); + }, + }, + ]} + /> + </Space> + ), + }, + ]; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [urlState, projectId, participantId, myPureDomainName, participantList]); + + return ( + <SharedPageLayout title={'模型训练'} rightTitle={<TodoPopover.NewTrainModel />}> + <GridRow justify="space-between" align="center"> + <Button + type="primary" + className={'custom-operation-button'} + icon={<IconPlus />} + onClick={goToCreatePage} + > + 创建训练 + </Button> + <Search + className={'custom-input'} + allowClear + placeholder={'输入模型训练名称'} + defaultValue={expression2Filter(urlState.filter).name} + onSearch={onSearch} + onClear={() => onSearch('')} + /> + </GridRow> + <Table + className="custom-table custom-table-left-side-filter" + rowKey="id" + loading={listQuery.isFetching} + data={list} + scroll={{ x: '100%' }} + columns={columns} + pagination={{ + ...paginationProps, + total: listQuery.data?.page_meta?.total_items ?? undefined, + }} + onChange={(pagination, sorter, filters, extra) => { + switch (extra.action) { + case 'sort': + //TODO: BE support sort + setUrlState((prevState) => ({ + ...prevState, + [`${sorter.field}_sort`]: sorter.direction, + })); + break; + case 'filter': { + const copyFilters = { + ...filters, + name: expression2Filter(urlState.filter).name, + configured: true, + }; + setUrlState((prevState) => ({ + ...prevState, + page: 1, + filter: filterExpressionGenerator(copyFilters, FILTER_MODEL_TRAIN_OPERATOR_MAPPER), + states: filters.latest_job_state, + })); + break; + } + default: + } + }} + /> + </SharedPageLayout> + ); + + function goToCreatePage() { + model_job_global_config_enabled && linkToNewCreatePage + ? history.push(generatePath(routes.ModelTrainCreateCentralization, { role: 'sender' })) + : history.push( + generatePath(routes.ModelTrainCreate, { + role: 'sender', + action: 'create', + }), + ); + } + + function onSearch(value: string) { + const filters = expression2Filter(urlState.filter); + filters.name = value; + setUrlState((prevState) => ({ + ...prevState, + keyword: value, + page: 1, + filter: filterExpressionGenerator(filters, FILTER_MODEL_TRAIN_OPERATOR_MAPPER), + })); + } +}; + +export default List; diff --git a/web_console_v2/client/src/views/ModelCenter/ModelTrain/ModelTrainJobCreate/AlgorithmVersionSelect/index.tsx b/web_console_v2/client/src/views/ModelCenter/ModelTrain/ModelTrainJobCreate/AlgorithmVersionSelect/index.tsx new file mode 100644 index 000000000..c3153a95b --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelTrain/ModelTrainJobCreate/AlgorithmVersionSelect/index.tsx @@ -0,0 +1,132 @@ +import React, { useMemo } from 'react'; +import { Select, Grid, Input } from '@arco-design/web-react'; +import { AlgorithmParameter, AlgorithmProject, AlgorithmVersionStatus } from 'typings/algorithm'; +import { useQuery } from 'react-query'; +import { fetchAlgorithmList, fetchPeerAlgorithmList } from 'services/algorithm'; +import { useGetCurrentProjectId } from 'hooks'; + +const { Row, Col } = Grid; +type AlgorithmVersionValue = { + algorithmUuid?: ID; + config?: AlgorithmParameter[]; +}; +interface Props { + algorithmProjectList: AlgorithmProject[]; + peerAlgorithmProjectList: AlgorithmProject[]; + algorithmProjectUuid: ID; + onChange?: (value: AlgorithmVersionValue) => void; + value?: AlgorithmVersionValue; +} + +export default function AlgorithmVersionSelect({ + algorithmProjectList, + peerAlgorithmProjectList, + algorithmProjectUuid, + onChange: onSelectedAlgorithmVersionChange, + value, +}: Props) { + const projectId = useGetCurrentProjectId(); + const { algorithmOwner, selectedAlgorithmProject } = useMemo(() => { + if (algorithmProjectList?.find((item) => item.uuid === algorithmProjectUuid)) { + return { + algorithmOwner: 'self', + selectedAlgorithmProject: algorithmProjectList?.find( + (item) => item.uuid === algorithmProjectUuid, + ), + }; + } + if (peerAlgorithmProjectList?.find((item) => item.uuid === algorithmProjectUuid)) { + return { + algorithmOwner: 'peer', + selectedAlgorithmProject: peerAlgorithmProjectList?.find( + (item) => item.uuid === algorithmProjectUuid, + ), + }; + } + return {}; + }, [algorithmProjectList, algorithmProjectUuid, peerAlgorithmProjectList]); + + const configValueList = useMemo(() => { + return value?.config || []; + }, [value?.config]); + + const algorithmVersionListQuery = useQuery( + ['fetchAlgorithmVersionList', selectedAlgorithmProject, algorithmOwner], + () => { + if (algorithmOwner === 'self') { + return fetchAlgorithmList(0, { algo_project_id: selectedAlgorithmProject?.id! }); + } else { + return fetchPeerAlgorithmList(projectId, selectedAlgorithmProject?.participant_id!, { + algorithm_project_uuid: selectedAlgorithmProject?.uuid!, + }); + } + }, + { + enabled: Boolean(projectId && selectedAlgorithmProject && algorithmOwner), + }, + ); + + const algorithmVersionListOptions = useMemo(() => { + return algorithmVersionListQuery.data?.data + .filter( + (item) => item.status === AlgorithmVersionStatus.PUBLISHED || item.source === 'PRESET', + ) + .map((item) => { + return { + label: `V${item.version}`, + value: item.uuid as string, + extra: item, + }; + }); + }, [algorithmVersionListQuery.data?.data]); + + return ( + <> + <Select + value={value?.algorithmUuid || undefined} + options={algorithmVersionListOptions} + onChange={handleSelectChange} + /> + {configValueList.length > 0 && ( + <> + <Row gutter={[12, 12]}> + <Col span={12}>超参数</Col> + </Row> + + {configValueList.map((item, index) => ( + <Row key={`${value?.algorithmUuid}_$${item.name}_${index}`} gutter={[12, 12]}> + <Col span={12}> + <Input + value={item.name} + onChange={(value) => onConfigValueChange(value, 'name', index)} + disabled={true} + /> + </Col> + <Col span={12}> + <Input + value={item.value} + onChange={(value) => onConfigValueChange(value, 'value', index)} + /> + </Col> + </Row> + ))} + </> + )} + </> + ); + function handleSelectChange(val: any) { + const selectedAlgorithm = algorithmVersionListOptions?.find((item) => item.value === val); + onSelectedAlgorithmVersionChange?.({ + algorithmUuid: val, + config: selectedAlgorithm?.extra.parameter?.variables || [], + }); + } + function onConfigValueChange(val: string, key: string, index: number) { + const newConfigValueList = [...configValueList]; + newConfigValueList[index] = { ...newConfigValueList[index], [key]: val }; + onSelectedAlgorithmVersionChange?.({ + ...value, + config: newConfigValueList, + }); + } +} diff --git a/web_console_v2/client/src/views/ModelCenter/ModelTrain/ModelTrainJobCreate/StepOneCoordinator/index.tsx b/web_console_v2/client/src/views/ModelCenter/ModelTrain/ModelTrainJobCreate/StepOneCoordinator/index.tsx new file mode 100644 index 000000000..76256d2ed --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelTrain/ModelTrainJobCreate/StepOneCoordinator/index.tsx @@ -0,0 +1,294 @@ +import React, { useEffect, useMemo } from 'react'; +import { generatePath, useHistory } from 'react-router'; +import routes from 'views/ModelCenter/routes'; +import { ModelJobGroup, ResourceTemplateType, TrainRoleType } from 'typings/modelCenter'; +import { AlgorithmProject, EnumAlgorithmProjectType } from 'typings/algorithm'; +import { MAX_COMMENT_LENGTH } from 'shared/validator'; +import { + ALGORITHM_TYPE_LABEL_MAPPER, + isNNAlgorithm, + isTreeAlgorithm, + lossTypeOptions, + nnBaseConfigList, + trainRoleTypeOptions, + treeBaseConfigList, +} from 'views/ModelCenter/shared'; + +import { Form, Input, Space, Button, Spin, Tag, Select, Typography } from '@arco-design/web-react'; +import BlockRadio from 'components/_base/BlockRadio'; +import ConfigForm, { ItemProps } from 'components/ConfigForm'; +import ResourceConfig, { MixedAlgorithmType } from 'components/ResourceConfig'; +import ButtonWithModalConfirm from 'components/ButtonWithModalConfirm'; +import { useGetCurrentPureDomainName } from 'hooks'; +import AlgorithmProjectSelect from '../../CreateCentralization/AlgorithmProjectSelect'; +import AlgorithmVersionSelect from '../AlgorithmVersionSelect'; +import { useQuery } from 'react-query'; +import { fetchDataBatchs } from 'services/dataset'; + +type Props = { + isLoading?: boolean; + modelGroup?: ModelJobGroup; + jobType: string; + datasetName?: string; + isFormValueChanged?: boolean; + stepOneFormConfigValues?: Record<string, any>; + onFirstStepSubmit: (formInfo: Record<string, any>) => void; + onFormValueChange?: (...args: any[]) => void; + formInitialValues?: Record<string, any>; + treeAdvancedFormItemList?: ItemProps[]; + nnAdvancedFormItemList: ItemProps[]; + algorithmProjectList?: AlgorithmProject[]; + peerAlgorithmProjectList?: AlgorithmProject[]; + datasetBatchType?: 'day' | 'hour'; +}; + +export default function StepOneCoordinator({ + isLoading, + modelGroup, + jobType, + datasetName, + isFormValueChanged, + onFirstStepSubmit, + onFormValueChange, + formInitialValues, + nnAdvancedFormItemList, + treeAdvancedFormItemList, + algorithmProjectList, + peerAlgorithmProjectList, + datasetBatchType = 'day', +}: Props) { + const history = useHistory(); + const [formInstance] = Form.useForm(); + const myPureDomainName = useGetCurrentPureDomainName(); + + const dataBatchsQuery = useQuery( + ['fetchDataBatch'], + () => fetchDataBatchs(modelGroup?.dataset_id!), + { + enabled: !!modelGroup?.dataset_id, + }, + ); + const dataBatchsOptions = useMemo(() => { + return dataBatchsQuery.data?.data.map((item) => { + return { + label: item.name, + value: item.id, + }; + }); + }, [dataBatchsQuery.data?.data]); + + useEffect(() => { + if (!formInitialValues) { + return; + } + formInstance.setFieldsValue(formInitialValues); + }, [formInitialValues, formInstance]); + return ( + <Spin loading={isLoading}> + <Form + className="form-content" + form={formInstance} + scrollToFirstError={true} + initialValues={formInitialValues} + onValuesChange={onFormValueChange} + onSubmit={onNextStepClick} + > + <section className="form-section"> + <h3>基本信息</h3> + <Form.Item label="训练名称"> + <Typography.Text bold={true}>{formInitialValues?.name}</Typography.Text> + </Form.Item> + <Form.Item + field={'comment'} + label={'描述'} + rules={[ + { + maxLength: MAX_COMMENT_LENGTH, + message: '最多为 200 个字符', + }, + ]} + > + <Input.TextArea placeholder={'最多为 200 个字符'} /> + </Form.Item> + </section> + <section className="form-section"> + <h3>训练配置</h3> + + <Form.Item label={'联邦类型'}> + <Typography.Text bold={true}> + {ALGORITHM_TYPE_LABEL_MAPPER?.[modelGroup?.algorithm_type!]} + </Typography.Text> + </Form.Item> + + {isTreeAlgorithm(modelGroup?.algorithm_type as EnumAlgorithmProjectType) && + renderTreeParams()} + {isNNAlgorithm(modelGroup?.algorithm_type as EnumAlgorithmProjectType) && + renderNNParams()} + {modelGroup?.algorithm_type !== EnumAlgorithmProjectType.NN_HORIZONTAL && ( + <Form.Item + field={'role'} + label={'训练角色'} + rules={[{ required: true, message: '必须选择训练角色' }]} + > + <BlockRadio isCenter={true} options={trainRoleTypeOptions} /> + </Form.Item> + )} + + {jobType !== 'repeat' && ( + <Form.Item label={'数据集'}> + <Spin loading={false}> + <Space> + <Typography.Text bold={true}>{datasetName || ''}</Typography.Text> + <Tag color="arcoblue">结果</Tag> + </Space> + </Spin> + </Form.Item> + )} + </section> + <section className="form-section"> + <h3>我方资源配置</h3> + <Form.Item + field={'resource_config'} + label={'资源模版'} + rules={[{ required: true, message: '必填项' }]} + > + <ResourceConfig + algorithmType={modelGroup?.algorithm_type as MixedAlgorithmType} + defaultResourceType={ResourceTemplateType.CUSTOM} + isIgnoreFirstRender={false} + localDisabledList={['master.replicas']} + /> + </Form.Item> + </section> + {jobType === 'repeat' && ( + <section className="form-section"> + <h3>定时续训</h3> + <Form.Item label={'定时'}> + <Typography.Text bold={true}> + {datasetBatchType === 'hour' ? '每小时' : '每天'} + </Typography.Text> + </Form.Item> + <Form.Item + label={'数据集'} + field="data_batch_id" + rules={[{ required: true, message: '必填项' }]} + > + <Select + prefix={ + <Space align="center"> + <Typography.Text bold={true}>{datasetName || ''}</Typography.Text> + <Tag color={datasetBatchType === 'hour' ? 'arcoblue' : 'purple'}> + {datasetBatchType === 'hour' ? '小时级' : '天级'} + </Tag> + </Space> + } + placeholder="请选择数据批次" + options={dataBatchsOptions} + allowClear={true} + loading={dataBatchsQuery.isFetching} + /> + </Form.Item> + </section> + )} + <Space> + <Button type="primary" htmlType="submit"> + 下一步 + </Button> + <ButtonWithModalConfirm + isShowConfirmModal={isFormValueChanged} + onClick={() => { + history.goBack(); + }} + > + 取消 + </ButtonWithModalConfirm> + </Space> + </Form> + </Spin> + ); + function renderTreeParams() { + return ( + <> + { + <Form.Item + field={'loss_type'} + label={'损失函数类型'} + rules={[{ required: true, message: '必须选择损失函数类型' }]} + > + <BlockRadio.WithTip options={lossTypeOptions} isOneHalfMode={true} /> + </Form.Item> + } + <Form.Item + field={'tree_config'} + label={'参数配置'} + rules={[{ required: true, message: '必须填写参数配置' }]} + > + <ConfigForm + cols={2} + formItemList={treeBaseConfigList} + isResetOnFormItemListChange={true} + collapseFormItemList={treeAdvancedFormItemList} + formProps={{ + style: { + marginTop: 7, + }, + }} + /> + </Form.Item> + </> + ); + } + function renderNNParams() { + return ( + <> + <Form.Item label={'算法'} field={`algorithmProjects.${myPureDomainName}`}> + <AlgorithmProjectSelect + algorithmType={[modelGroup?.algorithm_type as EnumAlgorithmProjectType]} + supportEdit={false} + /> + </Form.Item> + <Form.Item + label={'算法版本'} + field={`${myPureDomainName}.algorithm`} + rules={[{ required: true, message: '必填项' }]} + > + <AlgorithmVersionSelect + algorithmProjectList={algorithmProjectList || []} + peerAlgorithmProjectList={peerAlgorithmProjectList || []} + algorithmProjectUuid={formInstance.getFieldValue( + `algorithmProjects.${myPureDomainName}`, + )} + /> + </Form.Item> + <Form.Item + field={'nn_config'} + label={'参数配置'} + rules={[{ required: true, message: '必填项' }]} + > + <ConfigForm + cols={2} + isResetOnFormItemListChange={true} + formItemList={nnBaseConfigList} + collapseFormItemList={nnAdvancedFormItemList} + /> + </Form.Item> + </> + ); + } + function onNextStepClick() { + formInstance.validate(); + const coordinatorConfig = formInstance.getFieldsValue(); + onFirstStepSubmit?.({ + role: TrainRoleType.LABEL, + ...formInitialValues, + ...coordinatorConfig, + }); + history.push( + generatePath(routes.ModelTrainJobCreate, { + id: modelGroup?.id, + type: jobType, + step: 'participant', + }), + ); + } +} diff --git a/web_console_v2/client/src/views/ModelCenter/ModelTrain/ModelTrainJobCreate/StepTwoParticipant/index.tsx b/web_console_v2/client/src/views/ModelCenter/ModelTrain/ModelTrainJobCreate/StepTwoParticipant/index.tsx new file mode 100644 index 000000000..d267196d2 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelTrain/ModelTrainJobCreate/StepTwoParticipant/index.tsx @@ -0,0 +1,285 @@ +import React, { useEffect } from 'react'; +import { useHistory } from 'react-router'; +import { useGetCurrentProjectParticipantList } from 'hooks'; +import { AlgorithmProject, EnumAlgorithmProjectType } from 'typings/algorithm'; +import { ModelJobGroup, ResourceTemplateType, TrainRoleType } from 'typings/modelCenter'; +import { + isNNAlgorithm, + isTreeAlgorithm, + nnBaseConfigList, + treeBaseConfigList, +} from 'views/ModelCenter/shared'; +import { Button, Checkbox, Form, Space, Tooltip, Typography } from '@arco-design/web-react'; +import ButtonWithModalConfirm from 'components/ButtonWithModalConfirm'; +import ConfigForm, { ItemProps } from 'components/ConfigForm'; +import ResourceConfig, { MixedAlgorithmType } from 'components/ResourceConfig'; +import AlgorithmProjectSelect from '../../CreateCentralization/AlgorithmProjectSelect'; +import AlgorithmVersionSelect from '../AlgorithmVersionSelect'; +import { IconQuestionCircle } from '@arco-design/web-react/icon'; + +type Props = { + modelGroup?: ModelJobGroup; + stepOneFormConfigValues?: Record<string, any>; + formInitialValues?: Record<string, any>; + isFormValueChanged?: boolean; + onFormValueChange?: (...args: any[]) => void; + onSecondStepSubmit?: (value: any) => void; + saveStepTwoValues?: (formInfo: Record<string, any>) => void; + treeAdvancedFormItemList?: ItemProps[]; + nnAdvancedFormItemList?: ItemProps[]; + algorithmProjectList?: AlgorithmProject[]; + peerAlgorithmProjectList?: AlgorithmProject[]; + submitLoading?: boolean; +}; + +export default function StepTwoParticipant({ + modelGroup, + formInitialValues, + isFormValueChanged, + onFormValueChange, + onSecondStepSubmit, + saveStepTwoValues, + stepOneFormConfigValues, + nnAdvancedFormItemList, + treeAdvancedFormItemList, + algorithmProjectList, + peerAlgorithmProjectList, + submitLoading, +}: Props) { + const history = useHistory(); + const [formInstance] = Form.useForm(); + const participantList = useGetCurrentProjectParticipantList(); + + useEffect(() => { + if (!stepOneFormConfigValues) { + history.goBack(); + } + }, [stepOneFormConfigValues, history]); + + useEffect(() => { + if (!formInitialValues) { + return; + } + participantList.forEach((participant) => { + formInstance.setFieldValue( + resetFiled(participant.pure_domain_name, 'algorithmProjectUuid'), + formInitialValues?.[participant.pure_domain_name].algorithmProjectUuid, + ); + formInstance.setFieldValue( + resetFiled(participant.pure_domain_name, 'nn_config'), + formInitialValues?.[participant.pure_domain_name].nn_config, + ); + formInstance.setFieldValue( + resetFiled(participant.pure_domain_name, 'tree_config'), + formInitialValues?.[participant.pure_domain_name].tree_config, + ); + }); + }, [formInitialValues, formInstance, participantList]); + useEffect(() => { + if (!formInitialValues) { + return; + } + participantList.forEach((participant) => { + if (formInitialValues?.[participant.pure_domain_name].resource_config) { + formInstance.setFieldValue( + resetFiled(participant.pure_domain_name, 'resource_config'), + formInitialValues?.[participant.pure_domain_name].resource_config, + ); + } + }); + }, [formInitialValues, formInstance, participantList]); + return ( + <Form + className="form-content" + form={formInstance} + scrollToFirstError={true} + initialValues={formInitialValues} + onValuesChange={onFormValueChange} + onSubmit={onSecondStepSubmit} + > + {modelGroup?.algorithm_type === EnumAlgorithmProjectType.NN_HORIZONTAL ? ( + <> + {participantList.map((participant) => { + return ( + <section key={participant.id} className="form-section"> + <h3>{participant.pure_domain_name}配置</h3> + <Form.Item + label={'算法'} + field={resetFiled(participant.pure_domain_name, 'algorithmProjectUuid')} + > + <AlgorithmProjectSelect + algorithmType={[modelGroup?.algorithm_type as EnumAlgorithmProjectType]} + supportEdit={false} + /> + </Form.Item> + <Form.Item + label={'算法版本'} + field={`${participant.pure_domain_name}.algorithm`} + rules={[{ required: true, message: '必填项' }]} + > + <AlgorithmVersionSelect + algorithmProjectList={algorithmProjectList || []} + peerAlgorithmProjectList={peerAlgorithmProjectList || []} + algorithmProjectUuid={formInstance.getFieldValue( + resetFiled(participant.pure_domain_name, 'algorithmProjectUuid'), + )} + /> + </Form.Item> + <Form.Item + field={resetFiled(participant.pure_domain_name, 'nn_config')} + label={'参数配置'} + rules={[{ required: true, message: '必填项' }]} + > + <ConfigForm + cols={2} + formItemList={nnBaseConfigList} + collapseFormItemList={nnAdvancedFormItemList} + isResetOnFormItemListChange={true} + /> + </Form.Item> + <Form.Item + field={resetFiled(participant.pure_domain_name, 'resource_config')} + label={'资源模版'} + rules={[{ required: true, message: '必填项' }]} + > + <ResourceConfig + algorithmType={modelGroup?.algorithm_type as MixedAlgorithmType} + defaultResourceType={ResourceTemplateType.CUSTOM} + isIgnoreFirstRender={false} + localDisabledList={['master.replicas']} + collapsedOpen={false} + /> + </Form.Item> + </section> + ); + })} + </> + ) : ( + <> + {participantList.map((participant) => { + return ( + <div key={participant.id}> + <section className="form-section"> + <h3>{participant.name}训练配置</h3> + + {isTreeAlgorithm(modelGroup?.algorithm_type as EnumAlgorithmProjectType) && ( + <> + <Form.Item label={'损失函数类型'}> + <Typography.Text bold={true}> + {stepOneFormConfigValues?.loss_type} + </Typography.Text> + </Form.Item> + + <Form.Item + field={resetFiled(participant.pure_domain_name, 'tree_config')} + label={'参数配置'} + rules={[{ required: true, message: '必须填写参数配置' }]} + > + <ConfigForm + cols={2} + isResetOnFormItemListChange={true} + formItemList={treeBaseConfigList} + collapseFormItemList={treeAdvancedFormItemList} + formProps={{ + style: { + marginTop: 7, + }, + }} + /> + </Form.Item> + </> + )} + {isNNAlgorithm(modelGroup?.algorithm_type as EnumAlgorithmProjectType) && ( + <> + <Form.Item + label={'算法'} + field={resetFiled(participant.pure_domain_name, 'algorithmProjectUuid')} + > + <AlgorithmProjectSelect + algorithmType={[modelGroup?.algorithm_type as EnumAlgorithmProjectType]} + supportEdit={false} + /> + </Form.Item> + <Form.Item + label={'算法版本'} + field={`${participant.pure_domain_name}.algorithm`} + rules={[{ required: true, message: '必填项' }]} + > + <AlgorithmVersionSelect + algorithmProjectList={algorithmProjectList || []} + peerAlgorithmProjectList={peerAlgorithmProjectList || []} + algorithmProjectUuid={formInstance.getFieldValue( + resetFiled(participant.pure_domain_name, 'algorithmProjectUuid'), + )} + /> + </Form.Item> + + <Form.Item + field={resetFiled(participant.pure_domain_name, 'nn_config')} + label={'参数配置'} + rules={[{ required: true, message: '必填项' }]} + > + <ConfigForm + cols={2} + formItemList={nnBaseConfigList} + collapseFormItemList={nnAdvancedFormItemList} + isResetOnFormItemListChange={true} + /> + </Form.Item> + </> + )} + <Form.Item label={'训练角色'}> + <Typography.Text bold={true}> + {stepOneFormConfigValues?.role === TrainRoleType.LABEL ? '特征方' : '标签方'} + </Typography.Text> + </Form.Item> + </section> + <section className="form-section"> + <h3>{participant.name}资源配置</h3> + <Form.Item + field={resetFiled(participant.pure_domain_name, 'resource_config')} + label={'资源模版'} + rules={[{ required: true, message: '必填项' }]} + > + <ResourceConfig + algorithmType={modelGroup?.algorithm_type as MixedAlgorithmType} + defaultResourceType={ResourceTemplateType.CUSTOM} + isIgnoreFirstRender={false} + localDisabledList={['master.replicas']} + /> + </Form.Item> + </section> + </div> + ); + })} + </> + )} + <Space align="center"> + <Button loading={submitLoading} type="primary" htmlType="submit"> + 提交 + </Button> + <ButtonWithModalConfirm + isShowConfirmModal={false} + onClick={() => { + saveStepTwoValues?.(formInstance.getFields()); + history.goBack(); + }} + > + 上一步 + </ButtonWithModalConfirm> + <Form.Item field="metric_is_public" triggerPropName="checked" style={{ marginBottom: 0 }}> + <Checkbox style={{ width: 200, fontSize: 12 }}> + 共享训练指标 + <Tooltip content="共享后,合作伙伴能够查看本方训练指标"> + <IconQuestionCircle /> + </Tooltip> + </Checkbox> + </Form.Item> + </Space> + </Form> + ); + + function resetFiled(participantName: string, filedName: string) { + return `${participantName}.${filedName}`; + } +} diff --git a/web_console_v2/client/src/views/ModelCenter/ModelTrain/ModelTrainJobCreate/index.module.less b/web_console_v2/client/src/views/ModelCenter/ModelTrain/ModelTrainJobCreate/index.module.less new file mode 100644 index 000000000..c089349d8 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelTrain/ModelTrainJobCreate/index.module.less @@ -0,0 +1,9 @@ +.step_container{ + width: 500px; + max-width: 780px; +} +.form_area{ + flex: 1; + margin: 20px auto 0; + background-color: white; +} diff --git a/web_console_v2/client/src/views/ModelCenter/ModelTrain/ModelTrainJobCreate/index.tsx b/web_console_v2/client/src/views/ModelCenter/ModelTrain/ModelTrainJobCreate/index.tsx new file mode 100644 index 000000000..ea3b7a15e --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelTrain/ModelTrainJobCreate/index.tsx @@ -0,0 +1,580 @@ +import React, { useEffect, useMemo, useState } from 'react'; +import { useQuery } from 'react-query'; +import { useParams, useHistory, Route, generatePath } from 'react-router'; +import routes from '../../routes'; +import { + createModelJob, + fetchModelJobDefinition, + fetchModelJobGroupDetail, + updateModelJob, + fetchAutoUpdateModelJobDetail, +} from 'services/modelCenter'; +import { fetchDatasetDetail, fetchDatasetJobDetail } from 'services/dataset'; +import { fetchPeerAlgorithmProjectList, fetchProjectList } from 'services/algorithm'; +import { + useGetCurrentProjectId, + useGetCurrentProjectParticipantList, + useGetCurrentPureDomainName, + useIsFormValueChange, +} from 'hooks'; +import { AutoModelJobStatus, EnumModelJobType, LossType, TrainRoleType } from 'typings/modelCenter'; +import { Participant } from 'typings/participant'; +import SharedPageLayout from 'components/SharedPageLayout'; +import { Message, Steps, Grid } from '@arco-design/web-react'; +import StepOneCoordinator from './StepOneCoordinator'; +import BackButton from 'components/BackButton'; +import StepTwoParticipant from './StepTwoParticipant'; +import { EnumAlgorithmProjectSource, EnumAlgorithmProjectType } from 'typings/algorithm'; +import { + getAdvanceConfigListByDefinition, + getConfigInitialValuesByDefinition, + getNNBaseConfigInitialValuesByDefinition, + getTreeBaseConfigInitialValuesByDefinition, + hydrateModalGlobalConfig, + isNNAlgorithm, + isTreeAlgorithm, +} from 'views/ModelCenter/shared'; + +import styles from './index.module.less'; + +const { Row } = Grid; +const { Step } = Steps; + +type TRouteParams = { + id: string; + step: keyof typeof CreateSteps; + type: string; +}; +enum CreateSteps { + coordinator = 1, + participant = 2, +} + +function ModelTrainJobCreate() { + const history = useHistory(); + const { id: modelGroupId, step: createStep, type: jobType } = useParams<TRouteParams>(); + const [currentStep, setCurrentStep] = useState(CreateSteps[createStep || 'coordinator']); + const [formConfig, setFormConfig] = useState<Record<string, any>>(); + const [stepTwoFormConfig, setStepTwoFormConfig] = useState<Record<string, any>>(); + const [submitLoading, setSubmitLoading] = useState<boolean>(false); + const { isFormValueChanged, onFormValueChange } = useIsFormValueChange(); + const projectId = useGetCurrentProjectId(); + const myPureDomainName = useGetCurrentPureDomainName(); + const participantList = useGetCurrentProjectParticipantList(); + + const modelGroupDetailQuery = useQuery( + ['fetchModelGroupDetail', projectId, modelGroupId], + () => { + if (!projectId) { + Message.info('请选择工作区!'); + return; + } + return fetchModelJobGroupDetail(projectId, modelGroupId); + }, + { + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const modelGroupDetail = useMemo(() => modelGroupDetailQuery.data?.data, [modelGroupDetailQuery]); + + const modelJobDefinitionQuery = useQuery( + ['fetchModelJobDefinition', modelGroupDetail?.algorithm_type], + () => + fetchModelJobDefinition({ + model_job_type: 'TRAINING', + algorithm_type: modelGroupDetail?.algorithm_type || EnumAlgorithmProjectType.TREE_VERTICAL, + }), + { + refetchOnWindowFocus: false, + }, + ); + + const datasetDetailQuery = useQuery( + ['fetchDatasetDetail', modelGroupDetail?.dataset_id], + () => fetchDatasetDetail(modelGroupDetail?.dataset_id), + { + enabled: Boolean(modelGroupDetail?.dataset_id) || modelGroupDetail?.dataset_id === 0, + retry: 2, + refetchOnWindowFocus: false, + }, + ); + const datasetJobQuery = useQuery( + ['fetchDatasetJobDetail', projectId, datasetDetailQuery.data?.data.parent_dataset_job_id], + () => fetchDatasetJobDetail(projectId!, datasetDetailQuery.data?.data.parent_dataset_job_id!), + { + refetchOnWindowFocus: false, + retry: 2, + enabled: + Boolean(projectId && datasetDetailQuery.data?.data.parent_dataset_job_id) && + jobType === 'repeat', + }, + ); + + const algorithmProjectListQuery = useQuery( + ['fetchAllAlgorithmProjectList', modelGroupDetail?.algorithm_type, projectId], + () => + fetchProjectList(projectId, { + type: [modelGroupDetail?.algorithm_type], + }), + { + enabled: Boolean(projectId && modelGroupDetail?.algorithm_type), + retry: 2, + refetchOnWindowFocus: false, + }, + ); + const preAlgorithmProjectListQuery = useQuery( + ['fetchPreAlgorithmProjectListQuery', modelGroupDetail?.algorithm_type], + () => + fetchProjectList(0, { + type: [modelGroupDetail?.algorithm_type], + sources: EnumAlgorithmProjectSource.PRESET, + }), + { + enabled: !!modelGroupDetail?.algorithm_type, + retry: 2, + refetchOnWindowFocus: false, + }, + ); + const peerAlgorithmProjectListQuery = useQuery( + ['fetchPeerAlgorithmProjectListQuery', projectId, modelGroupDetail?.algorithm_type], + () => + fetchPeerAlgorithmProjectList(projectId, 0, { + filter: `(type:${JSON.stringify([modelGroupDetail?.algorithm_type])})`, + }), + { + enabled: Boolean(projectId && modelGroupDetail?.algorithm_type), + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const modelJobDefinition = useMemo(() => { + return modelJobDefinitionQuery?.data?.data; + }, [modelJobDefinitionQuery]); + + const treeAdvancedFormItemList = useMemo(() => { + if (isNNAlgorithm(modelGroupDetail?.algorithm_type as EnumAlgorithmProjectType)) { + return []; + } else return getAdvanceConfigListByDefinition(modelJobDefinition?.variables!); + }, [modelGroupDetail?.algorithm_type, modelJobDefinition]); + + const nnAdvancedFormItemList = useMemo(() => { + if (isTreeAlgorithm(modelGroupDetail?.algorithm_type as EnumAlgorithmProjectType)) { + return []; + } else return getAdvanceConfigListByDefinition(modelJobDefinition?.variables!, true); + }, [modelGroupDetail?.algorithm_type, modelJobDefinition]); + + const { treeBaseConfigInitialValues, nnBaseConfigInitialValues } = useMemo(() => { + if (!modelJobDefinition?.variables) { + return {}; + } + return { + treeBaseConfigInitialValues: getTreeBaseConfigInitialValuesByDefinition( + modelJobDefinition.variables, + ), + nnBaseConfigInitialValues: getNNBaseConfigInitialValuesByDefinition( + modelJobDefinition.variables, + ), + }; + }, [modelJobDefinition?.variables]); + + const { treeAdvanceConfigInitialValues, nnAdvanceConfigInitialValues } = useMemo(() => { + return { + treeAdvanceConfigInitialValues: treeAdvancedFormItemList.reduce((acc, cur) => { + acc[cur.field!] = cur.initialValue; + return acc; + }, {} as any), + nnAdvanceConfigInitialValues: nnAdvancedFormItemList.reduce((acc, cur) => { + acc[cur.field!] = cur.initialValue; + return acc; + }, {} as any), + }; + }, [treeAdvancedFormItemList, nnAdvancedFormItemList]); + + const baseNN = useMemo(() => { + return { + epoch_num: 1, + verbosity: 1, + ...nnBaseConfigInitialValues, + ...nnAdvanceConfigInitialValues, + }; + }, [nnBaseConfigInitialValues, nnAdvanceConfigInitialValues]); + const baseTree = useMemo(() => { + return { + learning_rate: 0.3, + max_iters: 10, + max_depth: 5, + l2_regularization: 1, + max_bins: 33, + num_parallel: 5, + ...treeBaseConfigInitialValues, + ...treeAdvanceConfigInitialValues, + }; + }, [treeBaseConfigInitialValues, treeAdvanceConfigInitialValues]); + + const datasetDetail = useMemo(() => datasetDetailQuery.data?.data, [datasetDetailQuery]); + const datasetJob = useMemo(() => datasetJobQuery.data?.data, [datasetJobQuery]); + + const algorithmProjectList = useMemo(() => { + return [ + ...(algorithmProjectListQuery?.data?.data || []), + ...(preAlgorithmProjectListQuery.data?.data || []), + ]; + }, [algorithmProjectListQuery, preAlgorithmProjectListQuery]); + + const peerAlgorithmProjectList = useMemo(() => { + return peerAlgorithmProjectListQuery.data?.data || []; + }, [peerAlgorithmProjectListQuery]); + + const stepOneInitialValues = useMemo(() => { + return ( + formConfig ?? { + name: `${modelGroupDetail?.name}-v${modelGroupDetail?.latest_version! + 1}`, + algorithmProjects: modelGroupDetail?.algorithm_project_uuid_list?.algorithm_projects, + nn_config: baseNN, + loss_type: LossType.LOGISTIC, + tree_config: baseTree, + } + ); + }, [ + baseNN, + baseTree, + formConfig, + modelGroupDetail?.algorithm_project_uuid_list?.algorithm_projects, + modelGroupDetail?.latest_version, + modelGroupDetail?.name, + ]); + const stepTwoInitialValues = useMemo(() => { + if (stepTwoFormConfig) { + return stepTwoFormConfig; + } + const participantConfigInitialValues: Record<string, any> = {}; + participantList.forEach((participant: Participant) => { + participantConfigInitialValues[participant.pure_domain_name] = { + loss_type: formConfig?.loss_type, + algorithmProjectUuid: + modelGroupDetail?.algorithm_project_uuid_list?.algorithm_projects[ + participant.pure_domain_name + ], + nn_config: baseNN, + tree_config: baseTree, + }; + }); + return participantConfigInitialValues; + }, [ + stepTwoFormConfig, + participantList, + formConfig?.loss_type, + modelGroupDetail?.algorithm_project_uuid_list?.algorithm_projects, + baseNN, + baseTree, + ]); + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const lastModelJobQuery = useQuery( + [ + 'fetchLastAutoUpdateModelJob', + projectId, + modelGroupDetail?.id!, + modelGroupDetail?.auto_update_status, + ], + () => fetchAutoUpdateModelJobDetail(projectId!, modelGroupDetail?.id!), + { + enabled: + jobType === 'repeat' && + !!projectId && + !!modelGroupDetail?.id && + modelGroupDetail?.auto_update_status === AutoModelJobStatus.STOPPED, + retry: 1, + refetchOnWindowFocus: false, + onSuccess: (res) => { + const modelJobDetail = res.data; + const variablesList = ['loss_type', 'role']; + const sourceList = [ + 'master_replicas', + 'master_cpu', + 'master_mem', + 'ps_replicas', + 'ps_cpu', + 'ps_mem', + 'worker_replicas', + 'worker_cpu', + 'worker_mem', + ]; + const globalConfig = modelJobDetail.global_config?.global_config; + const coordinatorConfig = globalConfig?.[myPureDomainName]; + const coordinatorMapValue = getConfigInitialValuesByDefinition( + coordinatorConfig?.variables!, + variablesList, + ); + const coordinatorSource = getConfigInitialValuesByDefinition( + coordinatorConfig?.variables!, + sourceList, + ); + + const participantConfig: Record<string, any> = {}; + participantList.forEach((participant: Participant) => { + const curParticipantConfig = globalConfig?.[participant.pure_domain_name]; + const curParticipantSource = getConfigInitialValuesByDefinition( + curParticipantConfig?.variables!, + sourceList, + ); + participantConfig[participant.pure_domain_name] = { + loss_type: coordinatorMapValue?.loss_type, + algorithmProjectUuid: + modelGroupDetail?.algorithm_project_uuid_list?.algorithm_projects[ + participant.pure_domain_name + ], + algorithm: { + algorithmUuid: curParticipantConfig?.algorithm_uuid, + config: curParticipantConfig?.algorithm_parameter?.variables, + }, + nn_config: { + ...baseNN, + ...getNNBaseConfigInitialValuesByDefinition(curParticipantConfig?.variables!), + ...getAdvanceConfigListByDefinition(curParticipantConfig?.variables!, true).reduce( + (acc, cur) => { + acc[cur.field!] = cur.initialValue; + return acc; + }, + {} as any, + ), + }, + tree_config: { + ...baseTree, + ...getTreeBaseConfigInitialValuesByDefinition(curParticipantConfig?.variables!), + ...getAdvanceConfigListByDefinition(curParticipantConfig?.variables!).reduce( + (acc, cur) => { + acc[cur.field!] = cur.initialValue; + return acc; + }, + {} as any, + ), + }, + source_config: curParticipantSource, + }; + }); + + setFormConfig({ + name: `${modelGroupDetail?.name}-v${modelGroupDetail?.latest_version! + 1}`, + algorithmProjects: modelGroupDetail?.algorithm_project_uuid_list?.algorithm_projects, + data_batch_id: modelJobDetail.data_batch_id || undefined, + loss_type: coordinatorMapValue.loss_type, + role: coordinatorMapValue.role, + [myPureDomainName]: { + algorithm: { + algorithmUuid: coordinatorConfig?.algorithm_uuid, + config: coordinatorConfig?.algorithm_parameter?.variables, + }, + }, + nn_config: { + ...baseNN, + ...getNNBaseConfigInitialValuesByDefinition(coordinatorConfig?.variables!), + ...getAdvanceConfigListByDefinition(coordinatorConfig?.variables!, true).reduce( + (acc, cur) => { + acc[cur.field!] = cur.initialValue; + return acc; + }, + {} as any, + ), + }, + tree_config: { + ...baseTree, + ...getTreeBaseConfigInitialValuesByDefinition(coordinatorConfig?.variables!), + ...getAdvanceConfigListByDefinition(coordinatorConfig?.variables!).reduce( + (acc, cur) => { + acc[cur.field!] = cur.initialValue; + return acc; + }, + {} as any, + ), + }, + resource_config: coordinatorSource, + }); + setStepTwoFormConfig(participantConfig); + }, + onError: () => { + Message.info('获取历史定时续训任务配置失败,请重新填写配置信息!'); + }, + }, + ); + + useEffect(() => { + setCurrentStep(CreateSteps[createStep || 'coordinator']); + }, [createStep]); + + return ( + <SharedPageLayout + title={ + <BackButton onClick={() => history.goBack()} isShowConfirmModal={isFormValueChanged}> + {modelGroupDetail?.name} + </BackButton> + } + centerTitle={ + jobType === 'repeat' + ? modelGroupDetail?.auto_update_status === AutoModelJobStatus.STOPPED + ? '配置定时续训任务' + : '创建定时续训任务' + : '创建新任务' + } + > + <Row justify="center"> + <Steps className={styles.step_container} current={currentStep} size="small"> + <Step title="我方配置" /> + <Step title="合作伙伴配置" /> + </Steps> + </Row> + <section className={styles.form_area}> + <Route + path={generatePath(routes.ModelTrainJobCreate, { + id: modelGroupId, + type: jobType, + step: 'coordinator', + })} + exact + render={() => ( + <StepOneCoordinator + isLoading={modelGroupDetailQuery.isFetching || datasetDetailQuery.isFetching} + onFormValueChange={onFormValueChange} + modelGroup={modelGroupDetail} + jobType={jobType} + datasetName={datasetDetail?.name} + formInitialValues={stepOneInitialValues} + isFormValueChanged={isFormValueChanged} + onFirstStepSubmit={(value) => setFormConfig(value)} + treeAdvancedFormItemList={treeAdvancedFormItemList} + nnAdvancedFormItemList={nnAdvancedFormItemList} + algorithmProjectList={algorithmProjectList} + peerAlgorithmProjectList={peerAlgorithmProjectList} + datasetBatchType={datasetJob?.time_range?.hours === 1 ? 'hour' : 'day'} + /> + )} + /> + <Route + path={generatePath(routes.ModelTrainJobCreate, { + id: modelGroupId, + type: jobType, + step: 'participant', + })} + exact + render={() => ( + <StepTwoParticipant + modelGroup={modelGroupDetail} + formInitialValues={stepTwoInitialValues} + isFormValueChanged={isFormValueChanged} + stepOneFormConfigValues={formConfig} + onFormValueChange={onFormValueChange} + onSecondStepSubmit={(value) => { + createTrainModelJob(value); + }} + saveStepTwoValues={(value) => setStepTwoFormConfig(value)} + treeAdvancedFormItemList={treeAdvancedFormItemList} + nnAdvancedFormItemList={nnAdvancedFormItemList} + algorithmProjectList={algorithmProjectList} + peerAlgorithmProjectList={peerAlgorithmProjectList} + submitLoading={submitLoading} + /> + )} + /> + </section> + </SharedPageLayout> + ); + + async function createTrainModelJob(value: any) { + setSubmitLoading(true); + if (!projectId) { + return Message.info('请选择工作区!'); + } + + const isTree = isTreeAlgorithm(modelGroupDetail?.algorithm_type as EnumAlgorithmProjectType); + const coordinatorConfigValues = formConfig; + const participantsConfigValues = value; + const metricIsPublic = value.metric_is_public; + const participantsPayload: Record<string, any> = {}; + const participantRole = + modelGroupDetail?.algorithm_type === EnumAlgorithmProjectType.NN_HORIZONTAL + ? TrainRoleType.FEATURE + : coordinatorConfigValues?.role === TrainRoleType.FEATURE + ? TrainRoleType.LABEL + : TrainRoleType.FEATURE; + participantList.forEach((participant: Participant) => { + const curParticipantConfig = participantsConfigValues?.[participant.pure_domain_name]; + participantsPayload[participant.pure_domain_name] = isTree + ? { + variables: hydrateModalGlobalConfig(modelJobDefinition?.variables!, { + role: participantRole, + loss_type: coordinatorConfigValues?.loss_type, + ...curParticipantConfig?.tree_config, + ...curParticipantConfig?.resource_config, + }), + } + : { + algorithm_uuid: curParticipantConfig?.algorithm?.algorithmUuid, + algorithm_parameter: { variables: curParticipantConfig?.algorithm?.config }, + variables: hydrateModalGlobalConfig(modelJobDefinition?.variables!, { + role: participantRole, + ...curParticipantConfig?.nn_config, + ...curParticipantConfig?.resource_config, + }), + }; + }); + try { + const res = await createModelJob(projectId, { + name: `${modelGroupDetail?.name}-v${modelGroupDetail?.latest_version! + 1}`, + comment: coordinatorConfigValues?.comment, + group_id: modelGroupDetail?.id, + model_job_type: EnumModelJobType.TRAINING, + algorithm_type: modelGroupDetail?.algorithm_type as EnumAlgorithmProjectType, + data_batch_id: coordinatorConfigValues?.data_batch_id, + global_config: { + dataset_uuid: datasetDetail?.uuid, + global_config: { + [myPureDomainName]: isTree + ? { + variables: hydrateModalGlobalConfig(modelJobDefinition?.variables!, { + role: coordinatorConfigValues?.role, + loss_type: coordinatorConfigValues?.loss_type, + ...coordinatorConfigValues?.tree_config, + ...coordinatorConfigValues?.resource_config, + }), + } + : { + algorithm_uuid: + coordinatorConfigValues?.[myPureDomainName].algorithm?.algorithmUuid, + algorithm_parameter: { + variables: coordinatorConfigValues?.[myPureDomainName].algorithm?.config, + }, + variables: hydrateModalGlobalConfig(modelJobDefinition?.variables!, { + role: coordinatorConfigValues?.role, + ...coordinatorConfigValues?.nn_config, + ...coordinatorConfigValues?.resource_config, + }), + }, + ...participantsPayload, + }, + }, + }); + metricIsPublic && + (await updateModelJob(projectId, res.data.id, { metric_is_public: metricIsPublic })); + Message.success( + jobType === 'repeat' + ? modelGroupDetail?.auto_update_status === AutoModelJobStatus.STOPPED + ? '配置定时续训任务成功!' + : '创建定时续训任务成功!' + : '创建模型训练任务成功!', + ); + setSubmitLoading(false); + history.push( + generatePath(routes.ModelTrainDetail, { + id: modelGroupDetail?.id, + }), + ); + } catch (error: any) { + Message.error(error.message); + } + } +} + +export default ModelTrainJobCreate; diff --git a/web_console_v2/client/src/views/ModelCenter/ModelTrain/index.tsx b/web_console_v2/client/src/views/ModelCenter/ModelTrain/index.tsx new file mode 100644 index 000000000..726a6bf67 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelTrain/index.tsx @@ -0,0 +1,16 @@ +import React from 'react'; +import { Route } from 'react-router'; +import List from './List'; +import Detail from './Detail'; +import routesMap from '../routes'; + +const ModelTrain: React.FC = () => { + return ( + <> + <Route exact path={routesMap.ModelTrainList} component={List} /> + <Route exact path={routesMap.ModelTrainDetail} component={Detail} /> + </> + ); +}; + +export default ModelTrain; diff --git a/web_console_v2/client/src/views/ModelCenter/ModelWarehouse/ModelFormModal/index.module.less b/web_console_v2/client/src/views/ModelCenter/ModelWarehouse/ModelFormModal/index.module.less new file mode 100644 index 000000000..c984639e1 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelWarehouse/ModelFormModal/index.module.less @@ -0,0 +1,4 @@ +.footer_row{ + padding-top: 15px; + border-top: 1px solid var(--backgroundColorGray); +} diff --git a/web_console_v2/client/src/views/ModelCenter/ModelWarehouse/ModelFormModal/index.tsx b/web_console_v2/client/src/views/ModelCenter/ModelWarehouse/ModelFormModal/index.tsx new file mode 100644 index 000000000..5a9817542 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelWarehouse/ModelFormModal/index.tsx @@ -0,0 +1,95 @@ +import React, { FC, useEffect } from 'react'; + +import { Modal, Form, Input, Button } from '@arco-design/web-react'; +import GridRow from 'components/_base/GridRow'; +import ButtonWithPopconfirm from 'components/ButtonWithPopconfirm'; + +import { validNamePattern, MAX_COMMENT_LENGTH } from 'shared/validator'; + +import styles from './index.module.less'; + +export interface Props<T = any> { + visible: boolean; + isEdit?: boolean; + isLoading?: boolean; + initialValues?: any; + onOk?: (values: T) => void; + onCancel?: () => void; +} + +export interface ModelFormData { + id?: number; + name: string; + comment: string; +} + +const ModelFormModal: FC<Props<ModelFormData>> = ({ + visible, + isEdit = false, + isLoading = false, + onOk, + onCancel, + initialValues, +}) => { + const [formInstance] = Form.useForm<any>(); + + useEffect(() => { + if (visible && isEdit && initialValues && formInstance) { + formInstance.setFieldsValue({ + ...initialValues, + }); + } + }, [visible, isEdit, initialValues, formInstance]); + + return ( + <Modal + title={'编辑模型'} + visible={visible} + maskClosable={false} + afterClose={afterClose} + onCancel={onCancel} + footer={null} + > + <Form layout="vertical" form={formInstance} onSubmit={onOk}> + <Form.Item + field="name" + label={'模型名称'} + rules={[ + { required: true, message: '模型集名称为必填项' }, + { + match: validNamePattern, + message: '只支持大小写字母,数字,中文开头或结尾,可包含“_”和“-”,不超过 63 个字符', + }, + ]} + > + <Input disabled={true} /> + </Form.Item> + <Form.Item + field="comment" + label={'模型描述'} + rules={[{ max: MAX_COMMENT_LENGTH, message: '最多为 200 个字符' }]} + > + <Input.TextArea rows={4} placeholder={'最多为 200 个字符'} /> + </Form.Item> + <Form.Item field="id" style={{ display: 'none' }}> + <Input /> + </Form.Item> + <Form.Item wrapperCol={{ span: 24 }} style={{ marginBottom: 0 }}> + <GridRow className={styles.footer_row} justify="end" gap="12"> + <ButtonWithPopconfirm buttonText={'取消'} onConfirm={onCancel} /> + <Button type="primary" htmlType="submit" loading={isLoading}> + {isEdit ? '保存' : '创建'} + </Button> + </GridRow> + </Form.Item> + </Form> + </Modal> + ); + + function afterClose() { + // clear all fields + formInstance.resetFields(); + } +}; + +export default ModelFormModal; diff --git a/web_console_v2/client/src/views/ModelCenter/ModelWarehouse/ModelTable/index.tsx b/web_console_v2/client/src/views/ModelCenter/ModelWarehouse/ModelTable/index.tsx new file mode 100644 index 000000000..c1bd02756 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelWarehouse/ModelTable/index.tsx @@ -0,0 +1,180 @@ +import React, { FC } from 'react'; + +import { formatTimestamp } from 'shared/date'; + +import Table from 'components/Table'; +import MoreActions from 'components/MoreActions'; + +import { Model } from 'typings/modelCenter'; +import { generatePath, useHistory } from 'react-router'; +import routes from 'views/ModelCenter/routes'; +import CONSTANTS from 'shared/constants'; +import AlgorithmType from 'components/AlgorithmType'; +import { EnumAlgorithmProjectType } from 'typings/algorithm'; + +type ColumnsGetterOptions = { + onDeleteClick?: (model: Model) => void; + onEditClick?: (model: Model) => void; + onModelSourceClick?: (model: Model, to: string) => void; + withoutActions?: boolean; + isOldModelCenter?: boolean; +}; + +export const getTableColumns = (options: ColumnsGetterOptions) => { + const cols = [ + { + title: '名称', + dataIndex: 'name', + key: 'name', + width: 200, + // TODO: Click name go to detail page + }, + { + title: '模型类型', + dataIndex: 'algorithm_type', + key: 'algorithm_type', + width: 150, + render: (type: EnumAlgorithmProjectType) => { + return <AlgorithmType type={type} />; + }, + }, + { + title: '模型来源', + dataIndex: 'job_id', + key: 'job_id', + width: 200, + render: (value: any, record: Model) => { + const { + job_id, + model_job_id, + group_id, + workflow_id, + job_name, + model_job_name, + workflow_name, + } = record; + + let to = ''; + let displayText = ''; + if (job_id && workflow_id && job_name && workflow_name) { + to = `/workflow-center/workflows/${workflow_id}`; + displayText = `${workflow_name}工作流-${job_name}任务`; + } + if (model_job_id && group_id && model_job_name) { + to = options.isOldModelCenter + ? `/model-center/model-management/model-set/${group_id}` + : generatePath(routes.ModelTrainDetail, { + id: group_id, + }); + displayText = `${model_job_name}训练任务`; + } + + return ( + <button + className="custom-text-button" + style={{ textAlign: 'left' }} + onClick={() => { + options?.onModelSourceClick?.(record, to); + }} + > + {displayText} + </button> + ); + }, + }, + { + title: '模型描述', + dataIndex: 'comment', + key: 'comment', + width: 200, + render: (comment: string) => comment || CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + title: '创建时间', + dataIndex: 'created_at', + key: 'created_at', + width: 150, + render: (date: number) => <div>{formatTimestamp(date)}</div>, + sorter: (a: Model, b: Model) => a.created_at - b.created_at, + }, + ]; + if (!options.withoutActions) { + cols.push({ + title: '操作', + dataIndex: 'operation', + key: 'operation', + fixed: 'right', + width: 100, + render: (_: number, record: Model) => { + return ( + <> + <MoreActions + actionList={[ + { + label: '编辑', + onClick: () => { + options?.onEditClick?.(record); + }, + }, + { + label: '删除', + onClick: () => { + options?.onDeleteClick?.(record); + }, + danger: true, + }, + ]} + /> + </> + ); + }, + } as any); + } + + return cols; +}; + +type Props = { + loading: boolean; + isOldModelCenter: boolean; + dataSource: any[]; + onDeleteClick?: (record: Model) => void; + onEditClick?: (record: Model) => void; + onShowSizeChange?: (current: number, size: number) => void; + onPageChange?: (page: number, pageSize: number) => void; +}; +const ModelTable: FC<Props> = ({ + loading, + isOldModelCenter = false, + dataSource, + onDeleteClick, + onEditClick, + onShowSizeChange, + onPageChange, +}) => { + const history = useHistory(); + return ( + <> + <Table + rowKey="id" + scroll={{ x: '100%' }} + loading={loading} + data={dataSource} + columns={getTableColumns({ + isOldModelCenter, + onDeleteClick, + onEditClick, + onModelSourceClick: (model: Model, to: string) => { + if (to) { + history.push(to); + } + }, + })} + onShowSizeChange={onShowSizeChange} + onPageChange={onPageChange} + /> + </> + ); +}; + +export default ModelTable; diff --git a/web_console_v2/client/src/views/ModelCenter/ModelWarehouse/index.module.less b/web_console_v2/client/src/views/ModelCenter/ModelWarehouse/index.module.less new file mode 100644 index 000000000..c0c14a73c --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelWarehouse/index.module.less @@ -0,0 +1,3 @@ +.search_container{ + width: 280px; +} diff --git a/web_console_v2/client/src/views/ModelCenter/ModelWarehouse/index.tsx b/web_console_v2/client/src/views/ModelCenter/ModelWarehouse/index.tsx new file mode 100644 index 000000000..dd1d4342e --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ModelWarehouse/index.tsx @@ -0,0 +1,164 @@ +import React, { FC, useState, useMemo } from 'react'; +import { useRecoilValue } from 'recoil'; +import { useToggle } from 'react-use'; +import { useQuery } from 'react-query'; + +import { fetchModelList, updateModel, deleteModel } from 'services/modelCenter'; + +import { TIME_INTERVAL } from 'shared/constants'; +import { projectState } from 'stores/project'; +import { useUrlState } from 'hooks'; + +import { Input, Message } from '@arco-design/web-react'; +import GridRow from 'components/_base/GridRow'; +import SharedPageLayout from 'components/SharedPageLayout'; +import Modal from 'components/Modal'; +import ModelTable from './ModelTable'; +import ModelFormModal, { ModelFormData } from './ModelFormModal'; + +import { Model, ModelUpdatePayload } from 'typings/modelCenter'; + +import styles from './index.module.less'; + +type Props = { + isOldModelCenter: boolean; +}; + +const Page: FC<Props> = ({ isOldModelCenter = false }) => { + const [selectedData, setSelectedData] = useState<Model>(); + + const [isModelFormModalVisiable, toggleIsModelFormModalVisiable] = useToggle(false); + const [isUpdating, toggleIsUpdating] = useToggle(false); + + const selectedProject = useRecoilValue(projectState); + const [urlState, setUrlState] = useUrlState({ + keyword: '', + }); + + const listQuery = useQuery( + ['fetchModelList', urlState.keyword, selectedProject.current?.id], + () => { + if (!selectedProject.current?.id) { + Message.info('请选择工作区'); + return; + } + return fetchModelList(selectedProject.current?.id, { + keyword: urlState.keyword, + }); + }, + { + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, + }, + ); + + const tableDataSource = useMemo(() => { + if (!listQuery.data) { + return []; + } + let list = listQuery.data.data || []; + + // Filter deleted model + list = list.filter((item) => !item.deleted_at); + + return list; + }, [listQuery.data]); + + return ( + <SharedPageLayout title={'模型仓库'}> + <GridRow justify="end" align="center"> + <Input.Search + className={`${styles.search_container} custom-input`} + allowClear + onSearch={onSearch} + onClear={() => onSearch('')} + placeholder={'输入模型名称'} + defaultValue={urlState.keyword} + /> + </GridRow> + <ModelTable + loading={listQuery.isFetching} + dataSource={tableDataSource} + onDeleteClick={onDeleteClick} + onEditClick={onEditClick} + isOldModelCenter={isOldModelCenter} + /> + <ModelFormModal + visible={isModelFormModalVisiable} + isEdit={true} + isLoading={isUpdating} + onCancel={onModalClose} + onOk={onModalSubmit} + initialValues={selectedData} + /> + </SharedPageLayout> + ); + + function onSearch( + value: string, + event?: + | React.ChangeEvent<HTMLInputElement> + | React.MouseEvent<HTMLElement> + | React.KeyboardEvent<HTMLInputElement>, + ) { + setUrlState((prevState) => ({ + ...prevState, + keyword: value, + page: 1, + })); + } + + function onDeleteClick(record: Model) { + Modal.delete({ + title: '确认要删除该模型吗?', + content: '删除后,不影响正在使用该模型的任务,使用该模型的历史任务不能再正常运行,请谨慎删除', + onOk: async () => { + try { + if (!selectedProject.current?.id) { + Message.info('请选择工作区'); + return; + } + await deleteModel(selectedProject.current?.id, record.id); + listQuery.refetch(); + Message.success('删除成功'); + } catch (error) { + Message.error(error.message); + } + }, + }); + } + + function onEditClick(record: Model) { + setSelectedData(record); + toggleIsModelFormModalVisiable(true); + } + + async function onModalSubmit(value: ModelFormData) { + toggleIsUpdating(true); + + try { + const payload: ModelUpdatePayload = { + comment: value.comment, + }; + if (!selectedProject.current?.id) { + Message.info('请选择工作区'); + return; + } + await updateModel(selectedProject.current?.id, selectedData?.id!, payload); + toggleIsModelFormModalVisiable(false); + listQuery.refetch(); + Message.success('修改成功'); + } catch (error) { + Message.error(error.message); + } finally { + toggleIsUpdating(false); + } + } + + function onModalClose() { + setSelectedData(undefined); + toggleIsModelFormModalVisiable(false); + } +}; + +export default Page; diff --git a/web_console_v2/client/src/views/ModelCenter/ReportResult.module.less b/web_console_v2/client/src/views/ModelCenter/ReportResult.module.less new file mode 100644 index 000000000..58b978323 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ReportResult.module.less @@ -0,0 +1,9 @@ +.space_container{ + margin: 16px 0; + width: 100%; + font-size: 12px; + :global(.arco-space-item:nth-child(3)) { + margin-left: auto; + margin-right: 8px; + } +} diff --git a/web_console_v2/client/src/views/ModelCenter/ReportResult.tsx b/web_console_v2/client/src/views/ModelCenter/ReportResult.tsx new file mode 100644 index 000000000..3461bd617 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ReportResult.tsx @@ -0,0 +1,132 @@ +import React, { CSSProperties, FC, useMemo, useState } from 'react'; +import { Grid, Tabs, Space, Typography, Switch, Tooltip, Message } from '@arco-design/web-react'; +import { toString } from 'lodash-es'; +import StatisticList from 'components/StatisticList'; +import ConfusionMatrix from 'components/ConfusionMatrix'; +import FeatureImportance from 'components/FeatureImportance'; +import LineChartWithCard from 'components/LineChartWithCard'; +import { useGetCurrentProjectId, useGetCurrentProjectParticipantList } from 'hooks'; +import { EnumAlgorithmProjectType } from 'typings/algorithm'; +import { IconInfoCircle } from '@arco-design/web-react/icon'; +import { updateModelJob } from 'services/modelCenter'; + +import styles from './ReportResult.module.less'; + +type Props = { + id: ID; + algorithmType?: EnumAlgorithmProjectType; + title?: string; + style?: CSSProperties; + isTraining?: boolean; + isNNAlgorithm?: boolean; + hideConfusionMatrix?: boolean; + metricIsPublic?: boolean; + onSwitch?: () => void; +}; + +const ReportResult: FC<Props> = ({ + id, + title, + style, + algorithmType, + isTraining = true, + isNNAlgorithm, + hideConfusionMatrix = false, + metricIsPublic = false, + onSwitch, +}) => { + const stringifyId = toString(id); + + const projectId = toString(useGetCurrentProjectId()); + const participantList = useGetCurrentProjectParticipantList(); + const [switchLoading, setSwitchLoading] = useState(false); + const [resultTarget, setResultTarget] = useState<string>(stringifyId); + const participantId = useMemo(() => { + return resultTarget === stringifyId ? undefined : resultTarget; + }, [resultTarget, stringifyId]); + const handleOnChangeIsPublic = (checked: boolean) => { + setSwitchLoading(true); + updateModelJob(projectId, stringifyId, { + metric_is_public: checked, + }).then( + (res) => { + Message.success('编辑成功'); + setSwitchLoading(false); + onSwitch && onSwitch(); + }, + (err) => { + Message.error(err.message); + setSwitchLoading(false); + }, + ); + }; + + return ( + <div style={style}> + <Space className={styles.space_container}> + <Typography.Text className="custom-typography" bold={true}> + {title ?? '评估报告'} + </Typography.Text> + <Tabs + className="custom-tabs" + type="text" + activeTab={resultTarget} + onChange={setResultTarget} + > + <Tabs.TabPane title="本方" key={stringifyId} /> + {(isTraining || algorithmType !== EnumAlgorithmProjectType.NN_HORIZONTAL) && + (participantList ?? []).map((peer: any) => ( + <Tabs.TabPane title={peer.name} key={toString(peer.id)} /> + ))} + </Tabs> + {(isTraining || algorithmType !== EnumAlgorithmProjectType.NN_HORIZONTAL) && ( + <Space> + 共享训练报告 + <Tooltip content={'开启后,将与合作伙伴共享本次训练指标'}> + <IconInfoCircle /> + </Tooltip> + <Switch + loading={switchLoading} + checked={metricIsPublic} + onChange={handleOnChangeIsPublic} + /> + </Space> + )} + </Space> + <Space direction="vertical" style={{ width: '100%' }}> + <StatisticList.ModelEvaluation + id={stringifyId} + participantId={participantId} + isTraining={isTraining} + /> + {isNNAlgorithm ? ( + <LineChartWithCard.ModelMetrics + id={stringifyId} + participantId={participantId} + isTraining={isTraining} + /> + ) : ( + <Grid.Row gutter={20}> + {!hideConfusionMatrix && ( + <Grid.Col span={12}> + <ConfusionMatrix.ModelEvaluationVariant + id={stringifyId} + participantId={participantId} + /> + </Grid.Col> + )} + <Grid.Col span={hideConfusionMatrix ? 24 : 12}> + <FeatureImportance.ModelEvaluationVariant + tip={'数值越高,表示该特征对模型的影响越大'} + id={stringifyId} + participantId={participantId} + /> + </Grid.Col> + </Grid.Row> + )} + </Space> + </div> + ); +}; + +export default ReportResult; diff --git a/web_console_v2/client/src/views/ModelCenter/ResourceConfigTable.module.less b/web_console_v2/client/src/views/ModelCenter/ResourceConfigTable.module.less new file mode 100644 index 000000000..db9c58e22 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ResourceConfigTable.module.less @@ -0,0 +1,5 @@ +.table_container{ + :global(.arco-table-td){ + border-bottom: none; + } +} diff --git a/web_console_v2/client/src/views/ModelCenter/ResourceConfigTable.tsx b/web_console_v2/client/src/views/ModelCenter/ResourceConfigTable.tsx new file mode 100644 index 000000000..e7b7384c9 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/ResourceConfigTable.tsx @@ -0,0 +1,124 @@ +import React from 'react'; +import { CONSTANTS } from 'shared/constants'; +import { ModelJob } from 'typings/modelCenter'; +import { Tag, Popover, PopoverProps, TableColumnProps, Table } from '@arco-design/web-react'; + +import styles from './ResourceConfigTable.module.less'; + +const columns: TableColumnProps[] = [ + { + title: '', + dataIndex: 'type', + render(value: string) { + return <Tag>{value.toUpperCase()}</Tag>; + }, + }, + { + title: '', + dataIndex: 'cpu', + }, + { + title: '', + dataIndex: 'mem', + }, + { + title: '', + dataIndex: 'replicas', + }, +]; + +function genText(field: string, value: number) { + if (/cpu$/i.test(field)) { + return `${Math.floor(value / 1000)} Core`; + } + if (/mem$/i.test(field)) { + return `${value} GiB`; + } + if (/replicas$/i.test(field)) { + return `${value} 实例数`; + } +} + +type ResourceConfigTableProps = { + job: ModelJob; +}; + +type ResourceConfigTableButtonProps = ResourceConfigTableProps & { + btnText?: string; + popoverProps?: PopoverProps; +}; + +const ResourceConfigTable: React.FC<ResourceConfigTableProps> & { + Button: React.FC<ResourceConfigTableButtonProps>; +} = ({ job }) => { + const { config } = job; + + if (!config) { + return null; + } + + const { job_definitions } = config; + const { variables } = job_definitions[0]; + const group: any = {}; + + for (const item of variables) { + if (!/cpu|mem|replica/i.test(item.name)) { + continue; + } + + const [type, prop] = item.name.split('_'); + if (!group[type]) { + group[type] = {}; + } + + const numericVal = parseInt(item.value); + + if (numericVal > 0) { + group[type][prop] = genText(item.name, numericVal); + } + } + const tableData = ['master', 'ps', 'worker'] + .filter((type) => group[type] != null) + .map((type) => { + return { + type, + cpu: CONSTANTS.EMPTY_PLACEHOLDER, + mem: CONSTANTS.EMPTY_PLACEHOLDER, + replicas: CONSTANTS.EMPTY_PLACEHOLDER, + ...group[type], + }; + }); + + return ( + <Table + rowKey="type" + className={`${styles.table_container} custom-table`} + border={false} + showHeader={false} + size="small" + columns={columns} + data={tableData} + pagination={false} + /> + ); +}; + +const PopoverButton: React.FC<ResourceConfigTableButtonProps> = ({ + job, + btnText = '查看', + popoverProps = {}, +}) => { + return ( + <Popover + {...popoverProps} + content={<ResourceConfigTable job={job} />} + getPopupContainer={() => document.body} + > + <button className="custom-text-button">{btnText}</button> + </Popover> + ); +}; + +ResourceConfigTable.Button = PopoverButton; + +export default ResourceConfigTable; diff --git a/web_console_v2/client/src/views/ModelCenter/TrainJobCompareModal.tsx b/web_console_v2/client/src/views/ModelCenter/TrainJobCompareModal.tsx new file mode 100644 index 000000000..1b27b4148 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/TrainJobCompareModal.tsx @@ -0,0 +1,183 @@ +import React, { useMemo, useState } from 'react'; +import { Modal, Table, TableColumnProps } from '@arco-design/web-react'; +import { ModelJob } from 'typings/modelCenter'; +import { useBatchModelJobMetricsAndConfig } from 'hooks/modelCenter'; +import CONSTANTS from 'shared/constants'; +import { + LABEL_MAPPER, + NOT_NN_ADVANCE_CONFIG_FIELD_LIST, + NOT_TREE_ADVANCE_CONFIG_FIELD_LIST, +} from './shared'; +import { isNNAlgorithm, isTreeAlgorithm } from 'views/ModelCenter/shared'; +import { Variable } from 'typings/variable'; +import { EnumAlgorithmProjectType } from 'typings/algorithm'; + +const metricRenderFunc = (val: string) => val ?? CONSTANTS.EMPTY_PLACEHOLDER; +const columns: TableColumnProps[] = [ + { + title: '名称', + dataIndex: 'id', + width: 200, + }, + { + title: '运行参数', + dataIndex: 'config', + width: 300, + render(conf) { + return <div style={{ whiteSpace: 'pre-wrap' }}>{conf}</div>; + }, + }, +]; + +const treeColumns = [ + { + title: 'ACC', + dataIndex: 'acc', + width: 80, + render: metricRenderFunc, + }, + { + title: 'AUC', + dataIndex: 'auc', + width: 80, + render: metricRenderFunc, + }, + { + key: 'precision', + title: 'PRECISION', + dataIndex: 'precision', + width: 80, + render: metricRenderFunc, + }, + { + key: 'recall', + title: 'RECALL', + dataIndex: 'recall', + width: 80, + render: metricRenderFunc, + }, + { + key: 'f1', + title: 'F1', + dataIndex: 'f1', + width: 80, + render: metricRenderFunc, + }, + { + key: 'ks', + title: 'KS', + dataIndex: 'ks', + width: 80, + render: metricRenderFunc, + }, +]; + +const nnColumns = [ + { + title: 'AUC', + dataIndex: 'auc', + width: 80, + render: metricRenderFunc, + }, + { + title: 'Log Loss', + dataIndex: 'loss', + watch: 80, + render: metricRenderFunc, + }, +]; + +type Props = { + visible: boolean; + list: ModelJob[]; + algorithmType: EnumAlgorithmProjectType; + isTraining?: boolean; + onCancel?: () => void; +}; + +const TrainJobCompareModal: React.FC<Props> & { Button: React.FC<any> } = ({ + visible, + list, + isTraining = true, + onCancel, + algorithmType, +}) => { + const { dataList, isLoading } = useBatchModelJobMetricsAndConfig(list, visible); + const finalColumns = useMemo(() => { + const metricColumns = + algorithmType === EnumAlgorithmProjectType.NN_HORIZONTAL || + algorithmType === EnumAlgorithmProjectType.NN_VERTICAL + ? nnColumns + : treeColumns; + return [...columns, ...metricColumns]; + }, [algorithmType]); + const formattedList = useMemo(() => { + return dataList.map((item) => { + const metric = (isTraining ? item.metric.train : item.metric.eval) ?? {}; + const variables = item.config ?? []; + for (const k in metric) { + const { values = [] } = metric[k] || {}; + const numberValue = values[values.length - 1]; + + if (isNaN(numberValue)) { + metric[k] = CONSTANTS.EMPTY_PLACEHOLDER; + continue; + } + metric[k] = numberValue.toFixed(3); + } + + return { + id: item.id, + config: variables + .filter((v: Variable) => + (isNNAlgorithm(item.job.algorithm_type) + ? NOT_NN_ADVANCE_CONFIG_FIELD_LIST + : isTreeAlgorithm(item.job.algorithm_type) + ? NOT_TREE_ADVANCE_CONFIG_FIELD_LIST + : [] + ).includes(v.name), + ) + .map((v: Variable) => [LABEL_MAPPER[v.name] ?? v.name, v.value].join('=')) + .join('\n'), + ...metric, + }; + }); + }, [dataList, isTraining]); + + return ( + <Modal + visible={visible} + title={'训练任务对比'} + footer={null} + style={{ width: 1000 }} + onCancel={onCancel} + > + <Table + loading={isLoading} + rowKey="id" + columns={finalColumns} + data={formattedList} + pagination={false} + /> + </Modal> + ); +}; + +const Button: React.FC<{ btnText?: string } & Omit<Props, 'visible'>> = ({ + btnText = '对比', + ...modalProps +}) => { + const [visible, setVisible] = useState(false); + return ( + <> + <button className="custom-text-button" onClick={() => setVisible(!visible)}> + {btnText} + </button> + <TrainJobCompareModal visible={visible} {...modalProps} onCancel={() => setVisible(false)} /> + </> + ); +}; + +TrainJobCompareModal.Button = Button; + +export default TrainJobCompareModal; diff --git a/web_console_v2/client/src/views/ModelCenter/index.less b/web_console_v2/client/src/views/ModelCenter/index.less new file mode 100644 index 000000000..26817b410 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/index.less @@ -0,0 +1,39 @@ +.drawer-content{ + flex:1; + .header { + display: flex; + justify-content: space-between; + align-items: center; + } +} +.params-popover-padding{ + width: 600px; + max-width: 600px !important; + .arco-popover-content { + padding-left: 0px; + padding-right: 0px; + } +} +.left-container { + display: flex; + flex-direction: row; + justify-content: flex-start; + align-items: center; + margin-top: 24px; + .right-button{ + margin-left: auto; + } +} +.pop-title{ + color: #4e5969; +} +.pop-content{ + color: #1d2129; +} +.styled-link{ + display: inline-block; + font-weight: 400; + font-size: 12px; + line-height: 20px; + margin-bottom: 12px; +} diff --git a/web_console_v2/client/src/views/ModelCenter/index.tsx b/web_console_v2/client/src/views/ModelCenter/index.tsx new file mode 100644 index 000000000..bbbd780e8 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/index.tsx @@ -0,0 +1,34 @@ +import React from 'react'; +import { Route, RouteProps } from 'react-router'; + +import ModelTrain from './ModelTrain'; +import ModelWarehouse from './ModelWarehouse'; +import ModelEvaluation from './ModelEvaluation'; +import routesMap from './routes'; + +const routes: Array<RouteProps> = [ + { + path: routesMap.ModelTrain, + component: ModelTrain, + }, + { + path: routesMap.ModelWarehouse, + component: ModelWarehouse, + }, + { + path: routesMap.ModelEvaluation, + component: ModelEvaluation, + }, +]; + +const Index: React.FC = () => { + return ( + <> + {routes.map((r) => ( + <Route key={r.path as string} path={r.path} component={r.component} /> + ))} + </> + ); +}; + +export default Index; diff --git a/web_console_v2/client/src/views/ModelCenter/routes.tsx b/web_console_v2/client/src/views/ModelCenter/routes.tsx new file mode 100644 index 000000000..8cf392cea --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/routes.tsx @@ -0,0 +1,62 @@ +const INDEX_PATH = '/model-center'; +const ModelTrain = `${INDEX_PATH}/model-train`; +const ModelEvaluation = `${INDEX_PATH}/:module(model-evaluation|offline-prediction)`; +const OfflinePrediction = ModelEvaluation; + +const routes: Record<string, string> = { + ModelTrain, + ModelTrainList: `${ModelTrain}/list`, + ModelTrainCreate: `${ModelTrain}/:role(receiver|sender)/:action(create|edit)/:id?`, + ModelTrainDetail: `${ModelTrain}/detail/:id`, + ModelTrainJobCreate: `${ModelTrain}/model-train-job/:type/:id/:step`, + ModelTrainCreateCentralization: `${ModelTrain}/:role(receiver|sender)/create-centralization/:id?`, + + ModelWarehouse: `${INDEX_PATH}/model-warehouse`, + + ModelEvaluation, + ModelEvaluationList: `${ModelEvaluation}/list`, + ModelEvaluationCreate: `${ModelEvaluation}/:role(receiver|sender)/:action(create|edit)/:id?`, + ModelEvaluationDetail: `${ModelEvaluation}/detail/:id/:tab(result|info)?`, + + OfflinePrediction, + OfflinePredictionList: `${OfflinePrediction}/list`, + OfflinePredictionCreate: `${OfflinePrediction}/:role(receiver|sender)/:action(create|edit)/:id?`, + OfflinePredictionDetail: `${OfflinePrediction}/detail/:id/:tab(result|info)?`, +}; + +export default routes; + +export enum ModelEvaluationModuleType { + Evaluation = 'model-evaluation', + Prediction = 'offline-prediction', +} + +export enum ModelEvaluationCreateRole { + Receiver = 'receiver', + Sender = 'sender', +} + +export enum ModelEvaluationCreateAction { + Create = 'create', + Edit = 'edit', +} + +export enum ModelEvaluationDetailTab { + Result = 'result', + Info = 'info', +} + +export interface ModelEvaluationListParams { + module: ModelEvaluationModuleType; +} + +export interface ModelEvaluationCreateParams extends ModelEvaluationListParams { + role: ModelEvaluationCreateRole; + action: ModelEvaluationCreateAction; + id: string; +} + +export interface ModelEvaluationDetailParams extends ModelEvaluationListParams { + tab: ModelEvaluationDetailTab; + id: string; +} diff --git a/web_console_v2/client/src/views/ModelCenter/shared.module.less b/web_console_v2/client/src/views/ModelCenter/shared.module.less new file mode 100644 index 000000000..7bd9c3485 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/shared.module.less @@ -0,0 +1,22 @@ +@import '~styles/mixins.less'; +.avatar_container{ + .MixinSquare(44px); + background-color: var(--primary-1); + color: white; + border-radius: 4px; + font-size: 18px; + text-align: center; + &::before { + display: inline-block; + width: 100%; + height: 100%; + content: ''; + background-image: url('../../assets/icons/atom-icon-algorithm-management.svg'); + background-repeat: no-repeat; + background-size: contain; + } +} +.plus_icon{ + margin-right: 4px; + vertical-align: 0.03em !important; +} diff --git a/web_console_v2/client/src/views/ModelCenter/shared.test.ts b/web_console_v2/client/src/views/ModelCenter/shared.test.ts new file mode 100644 index 000000000..ad9e9f2a3 --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/shared.test.ts @@ -0,0 +1,735 @@ +import { + getAdvanceConfigList, + hydrateWorkflowConfig, + getDataSource, + isTreeAlgorithm, + isNNAlgorithm, + isOldAlgorithm, + isHorizontalAlgorithm, + isVerticalAlgorithm, +} from './shared'; + +import { JobType } from 'typings/job'; +import { VariableAccessMode, VariableValueType } from 'typings/variable'; +import { WorkflowConfig } from 'typings/workflow'; +import { EnumAlgorithmProjectType } from 'typings/algorithm'; +import { AlgorithmType } from 'typings/modelCenter'; + +const TREE_TEMPLATE_CONFIG: WorkflowConfig = { + group_alias: 'sys_preset_tree_model', + job_definitions: [ + { + name: 'tree-model', + job_type: JobType.TREE_MODEL_TRAINING, + is_federated: true, + variables: [ + { + name: 'image', + value: 'artifact.bytedance.com/fedlearner/fedlearner:d5d0bb5', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":true,"tooltip":"建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模版不适用"}' as any, + typed_value: 'artifact.bytedance.com/fedlearner/fedlearner:d5d0bb5', + value_type: VariableValueType.STRING, + }, + { + name: 'mode', + value: 'train', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Select","required":true,"enum":["train","eval"]}' as any, + typed_value: 'train', + value_type: VariableValueType.STRING, + }, + { + name: 'data_source', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false,"tooltip":"求交数据集名称"}' as any, + typed_value: '', + value: '', + value_type: VariableValueType.STRING, + }, + { + name: 'data_path', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false,"tooltip":"数据存放位置"}' as any, + typed_value: '', + value: '', + value_type: VariableValueType.STRING, + }, + { + name: 'validation_data_path', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false}' as any, + typed_value: '', + value: '', + value_type: VariableValueType.STRING, + }, + { + name: 'file_ext', + value: '.data', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":true,"tooltip":"example: .data, .csv or .tfrecord 文件后缀"}' as any, + typed_value: '.data', + value_type: VariableValueType.STRING, + }, + { + name: 'file_type', + value: 'tfrecord', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Select","required":true,"enum":["csv","tfrecord"],"tooltip":"文件类型,csv或tfrecord"}' as any, + typed_value: 'tfrecord', + value_type: VariableValueType.STRING, + }, + { + name: 'load_model_path', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false,"tooltip":"模型文件地址"}' as any, + typed_value: '', + value: '', + value_type: VariableValueType.STRING, + }, + { + name: 'loss_type', + value: 'logistic', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Select","required":false,"enum":["logistic","mse"],"tooltip":"损失函数类型,logistic或mse,默认logistic"}' as any, + typed_value: 'logistic', + value_type: VariableValueType.STRING, + }, + { + name: 'learning_rate', + value: '0.3', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false}' as any, + typed_value: '0.3', + value_type: VariableValueType.STRING, + }, + { + name: 'max_iters', + value: '10', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false,"tooltip":"树的数量"}' as any, + typed_value: '10', + value_type: VariableValueType.STRING, + }, + { + name: 'max_depth', + value: '5', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false}' as any, + typed_value: '5', + value_type: VariableValueType.STRING, + }, + { + name: 'max_bins', + value: '33', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false,"tooltip":"最大分箱数"}' as any, + typed_value: '33', + value_type: VariableValueType.STRING, + }, + { + name: 'l2_regularization', + value: '1', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false,"tooltip":"L2惩罚系数"}' as any, + typed_value: '1', + value_type: VariableValueType.STRING, + }, + { + name: 'num_parallel', + value: '5', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false,"tooltip":"进程数量"}' as any, + typed_value: '5', + value_type: VariableValueType.STRING, + }, + { + name: 'enable_packing', + value: 'true', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Select","required":false,"enum":["true","false"],"tooltip":"是否开启优化"}' as any, + typed_value: 'true', + value_type: VariableValueType.STRING, + }, + { + name: 'ignore_fields', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false,"tooltip":"不入模特征,以逗号分隔如:name,age,sex"}' as any, + typed_value: '', + value: '', + value_type: VariableValueType.STRING, + }, + { + name: 'cat_fields', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false,"tooltip":"类别类型特征,特征的值需要是非负整数。以逗号分隔如:alive,country,sex"}' as any, + typed_value: '', + value: '', + value_type: VariableValueType.STRING, + }, + { + name: 'send_scores_to_follower', + value: 'false', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Select","required":false,"enum":["false","true"]}' as any, + typed_value: 'false', + value_type: VariableValueType.STRING, + }, + { + name: 'send_metrics_to_follower', + value: 'false', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Select","required":false,"enum":["false","true"]}' as any, + typed_value: 'false', + value_type: VariableValueType.STRING, + }, + { + name: 'verify_example_ids', + value: 'false', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: + '{"component":"Select","required":false,"tooltip":"是否检查example_id对齐 If set to true, the first column of the data will be treated as example ids that must match between leader and follower","enum":["false","true"]}', + typed_value: 'false', + value_type: VariableValueType.STRING, + }, + { + name: 'verbosity', + value: '1', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: + '{"component":"Select","required":false,"enum":["0","1","2"],"tooltip":"日志输出等级"}', + typed_value: '1', + value_type: VariableValueType.STRING, + }, + { + name: 'no_data', + value: 'false', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: + '{"component":"Select","required":false,"tooltip":"Leader是否没数据,不建议乱用","enum":["false","true"]}', + typed_value: 'false', + value_type: VariableValueType.STRING, + }, + { + name: 'worker_cpu', + value: '8000m', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":true}' as any, + typed_value: '8000m', + value_type: VariableValueType.STRING, + }, + { + name: 'worker_mem', + value: '16Gi', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":true}' as any, + typed_value: '16Gi', + value_type: VariableValueType.STRING, + }, + { + name: 'role', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Select","required":true,"enum":["Leader","Follower"]}' as any, + typed_value: '', + value: '', + value_type: VariableValueType.STRING, + }, + { + name: 'label_field', + value: 'label', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false,"tooltip":"label特征名"}' as any, + typed_value: 'label', + value_type: VariableValueType.STRING, + }, + { + name: 'load_model_name', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false,"tooltip":"按任务名称加载模型,{STORAGE_ROOT_PATH}/job_output/{LOAD_MODEL_NAME}/exported_models"}' as any, + typed_value: '', + value: '', + value_type: VariableValueType.STRING, + }, + ], + yaml_template: '', + easy_mode: true, + dependencies: [], + }, + ], + variables: [], +}; + +const NN_TEMPLATE_CONFIG: WorkflowConfig = { + group_alias: 'sys_preset_nn_model', + variables: [ + { + name: 'image', + value: 'artifact.bytedance.com/fedlearner/fedlearner:21d2ae4', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":true}' as any, + typed_value: 'artifact.bytedance.com/fedlearner/fedlearner:21d2ae4', + value_type: VariableValueType.STRING, + }, + ], + job_definitions: [ + { + name: 'nn-model', + job_type: JobType.NN_MODEL_TRANINING, + is_federated: true, + variables: [ + { + name: 'master_cpu', + value: '3000m', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":true}' as any, + typed_value: '3000m', + value_type: VariableValueType.STRING, + }, + { + name: 'master_mem', + value: '4Gi', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":true}' as any, + typed_value: '4Gi', + value_type: VariableValueType.STRING, + }, + { + name: 'worker_cpu', + value: '2000m', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":true}' as any, + typed_value: '2000m', + value_type: VariableValueType.STRING, + }, + { + name: 'worker_mem', + value: '4Gi', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":true}' as any, + typed_value: '4Gi', + value_type: VariableValueType.STRING, + }, + { + name: 'ps_replicas', + value: '1', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":true}' as any, + typed_value: '1', + value_type: VariableValueType.STRING, + }, + { + name: 'master_replicas', + value: '1', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":true}' as any, + typed_value: '1', + value_type: VariableValueType.STRING, + }, + { + name: 'ps_cpu', + value: '2000m', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":true}' as any, + typed_value: '2000m', + value_type: VariableValueType.STRING, + }, + { + name: 'ps_mem', + value: '4Gi', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":true}' as any, + typed_value: '4Gi', + value_type: VariableValueType.STRING, + }, + { + name: 'worker_replicas', + value: '1', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":true}' as any, + typed_value: '1', + value_type: VariableValueType.STRING, + }, + { + name: 'data_source', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false}' as any, + typed_value: '', + value: '', + value_type: VariableValueType.STRING, + }, + { + name: 'epoch_num', + value: '1', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false}' as any, + typed_value: '1', + value_type: VariableValueType.STRING, + }, + { + name: 'shuffle_data_block', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false}' as any, + typed_value: '', + value: '', + value_type: VariableValueType.STRING, + }, + { + name: 'verbosity', + value: '1', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Select","required":false,"enum":["0","1","2"]}' as any, + typed_value: '1', + value_type: VariableValueType.STRING, + }, + { + name: 'mode', + value: 'train', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Select","required":true,"enum":["train","eval"]}' as any, + typed_value: 'train', + value_type: VariableValueType.STRING, + }, + { + name: 'save_checkpoint_secs', + value: '600', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false}' as any, + typed_value: '600', + value_type: VariableValueType.STRING, + }, + { + name: 'save_checkpoint_steps', + value: '1000', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false}' as any, + typed_value: '1000', + value_type: VariableValueType.STRING, + }, + { + name: 'load_checkpoint_filename', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false}' as any, + typed_value: '', + value: '', + value_type: VariableValueType.STRING, + }, + { + name: 'load_checkpoint_filename_with_path', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false}' as any, + typed_value: '', + value: '', + value_type: VariableValueType.STRING, + }, + { + name: 'sparse_estimator', + value: 'True', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false}' as any, + typed_value: 'True', + value_type: VariableValueType.STRING, + }, + { + name: 'role', + value: 'Leader', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Select","required":true,"enum":["Leader","Follower"]}' as any, + typed_value: 'Leader', + value_type: VariableValueType.STRING, + }, + { + name: 'load_model_name', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false}' as any, + typed_value: '', + value: '', + value_type: VariableValueType.STRING, + }, + { + name: 'algorithm', + value: '{"config":[],"path":""}', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"AlgorithmSelect","required":true}' as any, + value_type: VariableValueType.OBJECT, + typed_value: { + config: [], + path: '', + }, + }, + ], + yaml_template: '', + easy_mode: true, + dependencies: [], + }, + ], +}; + +it('getAdvanceConfigList', () => { + expect(getAdvanceConfigList(TREE_TEMPLATE_CONFIG)).toEqual([ + { + label: '镜像', + field: 'image', + initialValue: 'artifact.bytedance.com/fedlearner/fedlearner:d5d0bb5', + tip: expect.any(String), + }, + { + field: 'data_source', + initialValue: '', + label: '数据源', + tip: expect.any(String), + }, + { + label: '数据源', + field: 'data_path', + initialValue: '', + tip: expect.any(String), + }, + { + label: '验证数据集地址', + field: 'validation_data_path', + initialValue: '', + tip: undefined, + }, + { + label: '文件扩展名', + field: 'file_ext', + initialValue: '.data', + tip: expect.any(String), + }, + { + label: '文件类型', + field: 'file_type', + initialValue: 'tfrecord', + tip: expect.any(String), + }, + { + label: '加载模型路径', + field: 'load_model_path', + initialValue: '', + tip: expect.any(String), + }, + { + label: '是否优化', + field: 'enable_packing', + initialValue: 'true', + tip: expect.any(String), + }, + { + label: '忽略字段', + field: 'ignore_fields', + initialValue: '', + tip: expect.any(String), + }, + { + label: '类型变量字段', + field: 'cat_fields', + initialValue: '', + tip: expect.any(String), + }, + { + label: '是否将预测值发送至 follower', + field: 'send_scores_to_follower', + initialValue: 'false', + tip: expect.any(String), + }, + { + label: '是否将指标发送至 follower', + field: 'send_metrics_to_follower', + initialValue: 'false', + tip: expect.any(String), + }, + { + label: '是否检验 example_ids', + field: 'verify_example_ids', + initialValue: 'false', + tip: expect.any(String), + }, + { + label: '日志输出等级', + field: 'verbosity', + initialValue: '1', + tip: expect.any(String), + }, + { + label: '标签方是否无特征', + field: 'no_data', + initialValue: 'false', + tip: expect.any(String), + }, + { + label: '标签字段', + field: 'label_field', + initialValue: 'label', + tip: expect.any(String), + }, + { + label: '加载模型名称', + field: 'load_model_name', + initialValue: '', + tip: expect.any(String), + }, + ]); + expect(getAdvanceConfigList(NN_TEMPLATE_CONFIG, true)).toEqual([ + { + label: '镜像', + field: 'image', + initialValue: 'artifact.bytedance.com/fedlearner/fedlearner:21d2ae4', + tip: undefined, + }, + { + field: 'data_source', + initialValue: '', + label: '数据源', + tip: undefined, + }, + { + label: '是否打乱顺序', + field: 'shuffle_data_block', + initialValue: '', + tip: expect.any(String), + }, + { + label: '保存备份间隔秒数', + field: 'save_checkpoint_secs', + initialValue: '600', + tip: expect.any(String), + }, + { + label: '保存备份间隔步数', + field: 'save_checkpoint_steps', + initialValue: '1000', + tip: expect.any(String), + }, + { + label: '加载文件名', + field: 'load_checkpoint_filename', + initialValue: '', + tip: expect.any(String), + }, + { + label: '加载文件路径', + field: 'load_checkpoint_filename_with_path', + initialValue: '', + tip: expect.any(String), + }, + { + field: 'sparse_estimator', + initialValue: 'True', + tip: expect.any(String), + label: 'sparse_estimator', + }, + { + label: '加载模型名称', + field: 'load_model_name', + initialValue: '', + tip: expect.any(String), + }, + ]); +}); + +it('hydrateWorkflowConfig', () => { + expect(hydrateWorkflowConfig(TREE_TEMPLATE_CONFIG, {})).toEqual(TREE_TEMPLATE_CONFIG); + expect( + hydrateWorkflowConfig(TREE_TEMPLATE_CONFIG, { + image: '1', + mode: '2', + data_source: '3', + }), + ).toEqual({ + ...TREE_TEMPLATE_CONFIG, + job_definitions: [ + { + ...TREE_TEMPLATE_CONFIG.job_definitions[0], + variables: [ + { + name: 'image', + value: '1', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":true,"tooltip":"建议不修改,指定Pod中运行的容器镜像地址,修改此项可能导致本基本模版不适用"}' as any, + typed_value: 'artifact.bytedance.com/fedlearner/fedlearner:d5d0bb5', + value_type: VariableValueType.STRING, + }, + { + name: 'mode', + value: '2', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Select","required":true,"enum":["train","eval"]}' as any, + typed_value: 'train', + value_type: VariableValueType.STRING, + }, + { + name: 'data_source', + access_mode: VariableAccessMode.PEER_WRITABLE, + widget_schema: '{"component":"Input","required":false,"tooltip":"求交数据集名称"}' as any, + typed_value: '', + value: '3', + value_type: VariableValueType.STRING, + }, + ].concat(TREE_TEMPLATE_CONFIG.job_definitions[0].variables.slice(3) as any), + }, + ], + }); +}); + +it('getDataSource', () => { + expect(getDataSource('')).toBe(''); + expect(getDataSource('adasdsadsadsadsad')).toBe(''); + expect(getDataSource('data_source')).toBe(''); + expect(getDataSource('data_source/')).toBe(''); + expect(getDataSource('data_source/abc')).toBe(''); + expect(getDataSource('/data_source')).toBe(''); + expect(getDataSource('/data_source/')).toBe(''); + expect(getDataSource('/data_source/abc')).toBe('abc'); + expect( + getDataSource( + 'hdfs:///trimmed', + ), + ).toBe('u0bae4aa7dcde477e8ee-psi-data-join-job'); +}); + +it('isTreeAlgorithm', () => { + expect(isTreeAlgorithm(AlgorithmType.TREE)).toBe(true); + expect(isTreeAlgorithm(AlgorithmType.NN)).toBe(false); + expect(isTreeAlgorithm(EnumAlgorithmProjectType.TREE_VERTICAL)).toBe(true); + expect(isTreeAlgorithm(EnumAlgorithmProjectType.TREE_HORIZONTAL)).toBe(true); + expect(isTreeAlgorithm(EnumAlgorithmProjectType.NN_VERTICAL)).toBe(false); + expect(isTreeAlgorithm(EnumAlgorithmProjectType.NN_HORIZONTAL)).toBe(false); + expect(isTreeAlgorithm(EnumAlgorithmProjectType.NN_LOCAL)).toBe(false); + expect(isTreeAlgorithm(EnumAlgorithmProjectType.UNSPECIFIED)).toBe(false); +}); +it('isNNAlgorithm', () => { + expect(isNNAlgorithm(AlgorithmType.TREE)).toBe(false); + expect(isNNAlgorithm(AlgorithmType.NN)).toBe(true); + expect(isNNAlgorithm(EnumAlgorithmProjectType.TREE_VERTICAL)).toBe(false); + expect(isNNAlgorithm(EnumAlgorithmProjectType.TREE_HORIZONTAL)).toBe(false); + expect(isNNAlgorithm(EnumAlgorithmProjectType.NN_VERTICAL)).toBe(true); + expect(isNNAlgorithm(EnumAlgorithmProjectType.NN_HORIZONTAL)).toBe(true); + expect(isNNAlgorithm(EnumAlgorithmProjectType.NN_LOCAL)).toBe(true); + expect(isNNAlgorithm(EnumAlgorithmProjectType.UNSPECIFIED)).toBe(false); +}); +it('isOldAlgorithm', () => { + expect(isOldAlgorithm(AlgorithmType.TREE)).toBe(true); + expect(isOldAlgorithm(AlgorithmType.NN)).toBe(true); + expect(isOldAlgorithm(EnumAlgorithmProjectType.TREE_VERTICAL)).toBe(false); + expect(isOldAlgorithm(EnumAlgorithmProjectType.TREE_HORIZONTAL)).toBe(false); + expect(isOldAlgorithm(EnumAlgorithmProjectType.NN_VERTICAL)).toBe(false); + expect(isOldAlgorithm(EnumAlgorithmProjectType.NN_HORIZONTAL)).toBe(false); + expect(isOldAlgorithm(EnumAlgorithmProjectType.NN_LOCAL)).toBe(false); + expect(isOldAlgorithm(EnumAlgorithmProjectType.UNSPECIFIED)).toBe(false); +}); +it('isVerticalAlgorithm', () => { + expect(isVerticalAlgorithm(EnumAlgorithmProjectType.TREE_VERTICAL)).toBe(true); + expect(isVerticalAlgorithm(EnumAlgorithmProjectType.TREE_HORIZONTAL)).toBe(false); + expect(isVerticalAlgorithm(EnumAlgorithmProjectType.NN_VERTICAL)).toBe(true); + expect(isVerticalAlgorithm(EnumAlgorithmProjectType.NN_HORIZONTAL)).toBe(false); + expect(isVerticalAlgorithm(EnumAlgorithmProjectType.NN_LOCAL)).toBe(false); + expect(isVerticalAlgorithm(EnumAlgorithmProjectType.UNSPECIFIED)).toBe(false); +}); +it('isHorizontalAlgorithm', () => { + expect(isHorizontalAlgorithm(EnumAlgorithmProjectType.TREE_VERTICAL)).toBe(false); + expect(isHorizontalAlgorithm(EnumAlgorithmProjectType.TREE_HORIZONTAL)).toBe(true); + expect(isHorizontalAlgorithm(EnumAlgorithmProjectType.NN_VERTICAL)).toBe(false); + expect(isHorizontalAlgorithm(EnumAlgorithmProjectType.NN_HORIZONTAL)).toBe(true); + expect(isHorizontalAlgorithm(EnumAlgorithmProjectType.NN_LOCAL)).toBe(false); + expect(isHorizontalAlgorithm(EnumAlgorithmProjectType.UNSPECIFIED)).toBe(false); +}); diff --git a/web_console_v2/client/src/views/ModelCenter/shared.tsx b/web_console_v2/client/src/views/ModelCenter/shared.tsx new file mode 100644 index 000000000..32aea8a9d --- /dev/null +++ b/web_console_v2/client/src/views/ModelCenter/shared.tsx @@ -0,0 +1,1018 @@ +import React from 'react'; +import { cloneDeep, flattenDeep } from 'lodash-es'; + +import { workflowStateFilterParamToStateTextMap } from 'shared/workflow'; + +import { Message, Modal, TableColumnProps } from '@arco-design/web-react'; +import { ActionItem, StateTypes } from 'components/StateIndicator'; +import { WorkflowConfig, WorkflowState, WorkflowStateFilterParam, Tag } from 'typings/workflow'; +import { Variable, VariableWidgetSchema } from 'typings/variable'; +import { ItemProps } from 'components/ConfigForm'; +import { PlusBold } from 'components/IconPark'; +import { FilterOp } from 'typings/filter'; + +import { + FederalType, + ModelJobGroup, + ModelJobRole, + TrainRoleType, + ModelJob, + AlgorithmType, + LossType, + ModelJobVariable, + ModelGroupStatus, + ModelJobStatus, + ModelJobAuthStatus, +} from 'typings/modelCenter'; + +import { ModelEvaluationModuleType } from './routes'; +import { EnumAlgorithmProjectType } from 'typings/algorithm'; +import { VariableComponent } from 'typings/variable'; +import { processVariableTypedValue, stringifyVariableValue } from 'shared/formSchema'; +import { Participant } from 'typings/participant'; +import { deleteJob_new } from 'services/modelCenter'; + +import styles from './shared.module.less'; + +enum FiltersFields { + ALGORITHM_TYPE = 'algorithm_type', + STATUS = 'status', + ROLE = 'role', +} + +const TIP_MAPPER: Record<string, string> = { + image_version: '镜像版本', + learning_rate: '使用损失函数的梯度调整网络权重的超参数,​ 推荐区间(0.01-1]', + enable_packing: '提高计算效率,true 为优化,false 为不优化。', + ignore_fields: '不参与训练的字段', + cat_fields: '类别变量字段,训练中会特别处理', + send_scores_to_follower: '是否将预测值发送至follower侧,fasle代表否,ture代表是', + send_metrics_to_follower: '是否将指标发送至follower侧,fasle代表否,ture代表是', + verify_example_ids: + '是否检验example_ids,一般情况下训练数据有example_ids,fasle代表否,ture代表是', + no_data: '针对标签方没有特征的预测场景,fasle代表有特征,ture代表无特征。', + label_field: '用于指定label', + load_model_name: '评估和预测时,根据用户选择的模型,确定该字段的值。', + shuffle_data_block: '打乱数据顺序,增加随机性,提高模型泛化能力', + save_checkpoint_secs: '模型多少秒保存一次', + save_checkpoint_steps: '模型多少step保存一次', + load_checkpoint_filename: '加载文件名,用于评估和预测时选择模型', + load_checkpoint_filename_with_path: '加载文件路径,用于更细粒度的控制到底选择哪个时间点的模型', + sparse_estimator: + '是否使用火山引擎的SparseEstimator,由火山引擎侧工程师判定,客户侧默认都为false', + steps_per_sync: '用于指定参数同步的频率,比如step间隔为10,也就是训练10个batch同步一次参数。', + feature_importance: '数值越高,表示该特征对模型的影响越大', + metric_is_publish: '开启后,将与合作伙伴共享本次训练指标', +}; + +export const LABEL_MAPPER: Record<string, string> = { + image: '镜像', + data_source: '数据源', + epoch_num: 'epoch_num', + verbosity: '日志输出等级', + shuffle_data_block: '是否打乱顺序', + save_checkpoint_steps: '保存备份间隔步数', + save_checkpoint_secs: '保存备份间隔秒数', + load_checkpoint_filename: '加载文件名', + load_checkpoint_filename_with_path: '加载文件路径', + sparse_estimator: 'sparse_estimator', + load_model_name: '加载模型名称', + data_path: '数据源', + steps_per_sync: '参数同步 step 间隔', + + learning_rate: '学习率', + max_iters: '迭代数', + max_depth: '最大深度', + l2_regularization: 'L2惩罚系数', + max_bins: '最大分箱数量', + num_parallel: ' 线程池大小', + file_ext: '文件扩展名', + file_type: '文件类型', + enable_packing: '是否优化', + ignore_fields: '忽略字段', + cat_fields: '类型变量字段', + send_metrics_to_follower: '是否将指标发送至 follower', + send_scores_to_follower: '是否将预测值发送至 follower', + verify_example_ids: '是否检验 example_ids', + no_data: '标签方是否无特征', + image_version: '镜像版本号', + num_partitions: 'num_partitions', + validation_data_path: '验证数据集地址', + label_field: '标签字段', + load_model_path: '加载模型路径', +}; + +export const MODEL_JOB_STATUE_TEXT_MAPPER: Record<ModelJobStatus, string> = { + [ModelJobStatus.PENDING]: '未配置', + [ModelJobStatus.CONFIGURED]: '配置成功', + [ModelJobStatus.ERROR]: '错误', + [ModelJobStatus.RUNNING]: '运行中', + [ModelJobStatus.SUCCEEDED]: '成功', + [ModelJobStatus.STOPPED]: '已停止', + [ModelJobStatus.FAILED]: '失败', + [ModelJobStatus.UNKNOWN]: '未知状态', +}; + +export type TableFiltersValue = Partial<Record<FiltersFields, string[]>>; + +export type ColumnsGetterOptions = { + onDeleteClick?: any; + onRestartClick?: any; + onStopClick?: any; + onLogClick?: any; + onReportNameClick?: any; + + module?: ModelEvaluationModuleType; + nameFieldText?: string; + withoutActions?: boolean; + isRestartLoading?: boolean; + isHideAllActionList?: boolean; + filterDropdownValues?: TableFiltersValue; + participantList?: Participant[]; + myPureDomainName?: string; +}; + +export function getModelJobState( + state: ModelJob['state'], + options?: ColumnsGetterOptions, +): { type: StateTypes; text: string; actionList?: ActionItem[] } { + switch (state) { + case WorkflowState.PARTICIPANT_CONFIGURING: + return { + text: + workflowStateFilterParamToStateTextMap[WorkflowStateFilterParam.PARTICIPANT_CONFIGURING], + type: 'gold', + }; + + case WorkflowState.PENDING_ACCEPT: + return { + text: workflowStateFilterParamToStateTextMap[WorkflowStateFilterParam.PENDING_ACCEPT], + type: 'warning', + }; + + case WorkflowState.WARMUP_UNDERHOOD: + return { + text: workflowStateFilterParamToStateTextMap[WorkflowStateFilterParam.WARMUP_UNDERHOOD], + type: 'warning', + }; + + case WorkflowState.PREPARE_RUN: + return { + text: workflowStateFilterParamToStateTextMap[WorkflowStateFilterParam.PREPARE_RUN], + type: 'warning', + }; + + case WorkflowState.READY_TO_RUN: + return { + text: workflowStateFilterParamToStateTextMap[WorkflowStateFilterParam.READY_TO_RUN], + type: 'lime', + }; + + case WorkflowState.RUNNING: + return { + text: workflowStateFilterParamToStateTextMap[WorkflowStateFilterParam.RUNNING], + type: 'processing', + }; + + case WorkflowState.PREPARE_STOP: + return { + text: workflowStateFilterParamToStateTextMap[WorkflowStateFilterParam.PREPARE_STOP], + type: 'error', + }; + + case WorkflowState.STOPPED: + return { + text: workflowStateFilterParamToStateTextMap[WorkflowStateFilterParam.STOPPED], + type: 'error', + }; + + case WorkflowState.COMPLETED: + return { + text: workflowStateFilterParamToStateTextMap[WorkflowStateFilterParam.COMPLETED], + type: 'success', + }; + + case WorkflowState.FAILED: + return { + text: workflowStateFilterParamToStateTextMap[WorkflowStateFilterParam.FAILED], + type: 'error', + actionList: options?.isHideAllActionList + ? [] + : [ + { + label: '查看日志', + onClick: options?.onLogClick, + }, + { + label: '重新发起', + onClick: options?.onRestartClick, + isLoading: !!options?.isRestartLoading, + }, + ], + }; + + case WorkflowState.INVALID: + return { + text: workflowStateFilterParamToStateTextMap[WorkflowStateFilterParam.INVALID], + type: 'default', + }; + case WorkflowState.UNKNOWN: + default: + return { + text: workflowStateFilterParamToStateTextMap[WorkflowStateFilterParam.UNKNOWN], + type: 'default', + }; + } +} + +export function getModelJobStatus( + status: ModelJobStatus, + options?: ColumnsGetterOptions, +): { type: StateTypes; text: string; actionList?: ActionItem[] } { + const modelJobStatusText = MODEL_JOB_STATUE_TEXT_MAPPER[status]; + switch (status) { + case ModelJobStatus.PENDING: + return { + text: modelJobStatusText, + type: 'default', + }; + case ModelJobStatus.CONFIGURED: + return { + text: modelJobStatusText, + type: 'success', + }; + case ModelJobStatus.ERROR: + return { + text: modelJobStatusText, + type: 'error', + }; + case ModelJobStatus.RUNNING: + return { + text: modelJobStatusText, + type: 'processing', + }; + case ModelJobStatus.STOPPED: + return { + text: modelJobStatusText, + type: 'error', + }; + case ModelJobStatus.SUCCEEDED: + return { + text: modelJobStatusText, + type: 'success', + }; + case ModelJobStatus.FAILED: + return { + text: modelJobStatusText, + type: 'error', + actionList: options?.isHideAllActionList + ? [] + : [ + { + label: '查看日志', + onClick: options?.onLogClick, + }, + { + label: '重新发起', + onClick: options?.onRestartClick, + isLoading: !!options?.isRestartLoading, + }, + ], + }; + case ModelJobStatus.UNKNOWN: + default: + return { + text: MODEL_JOB_STATUE_TEXT_MAPPER[ModelJobStatus.UNKNOWN], + type: 'default', + }; + } +} + +export function getAlgorithmTypeText(val: ModelJob['algorithm_type']) { + const [, type] = (val ?? '').split('_'); + if (!type) { + return; + } + + switch (type.toLowerCase()) { + case 'vertical': + return '纵向联邦'; + case 'horizontal': + return '横向联邦'; + default: + return val; + } +} + +export const Avatar: React.FC = () => { + return <div className={styles.avatar_container} />; +}; + +export async function dangerConfirmWrapper( + title: string, + content: string, + okText: string, + onConfirm: () => Promise<any>, + onCancel?: () => void, +) { + Modal.confirm({ + className: 'custom-modal', + title, + content, + okText, + okButtonProps: { + status: 'danger', + }, + cancelText: '取消', + onConfirm: onConfirm, + onCancel, + }); +} + +export const lossTypeOptions = [ + { + value: LossType.LOGISTIC, + label: 'logistic', + tip: '用于分类任务', + }, + { + value: LossType.MSE, + label: 'mse', + tip: '用于回归任务', + }, +]; + +export const algorithmTypeOptions = [ + { + label: '纵向联邦-树模型', + value: EnumAlgorithmProjectType.TREE_VERTICAL, + }, + { + label: '横向联邦-NN模型', + value: EnumAlgorithmProjectType.NN_HORIZONTAL, + }, + { + label: '纵向联邦-NN模型', + value: EnumAlgorithmProjectType.NN_VERTICAL, + }, +]; + +export const federalTypeOptions = [ + { + value: FederalType.VERTICAL, + label: '纵向联邦', + }, + { + value: FederalType.HORIZONTAL, + label: '横向联邦', + }, +]; + +export const trainRoleTypeOptions = [ + { + value: TrainRoleType.LABEL, + label: '标签方', + }, + { + value: TrainRoleType.FEATURE, + label: '特征方', + }, +]; +export const treeBaseConfigList: ItemProps[] = [ + { + field: 'learning_rate', + label: '学习率', + tip: '使用损失函数的梯度调整网络权重的超参数,​ 推荐区间(0.01-1]', + componentType: VariableComponent.NumberPicker, + initialValue: 0.3, + }, + { + field: 'max_iters', + label: '迭代数', + tip: '该模型包含树的数量,推荐区间(5-20)', + componentType: VariableComponent.NumberPicker, + initialValue: 10, + }, + { + field: 'max_depth', + label: '最大深度', + tip: '树模型的最大深度,用来控制过拟合,推荐区间(4-7)', + componentType: VariableComponent.NumberPicker, + initialValue: 5, + }, + { + field: 'l2_regularization', + label: 'L2惩罚系数', + tip: '对节点预测值的惩罚系数,推荐区间(0.01-10)', + componentType: VariableComponent.NumberPicker, + initialValue: 1, + }, + { + field: 'max_bins', + label: '最大分箱数量', + tip: '离散化连续变量,可以减少数据稀疏度,一般不需要调整', + componentType: VariableComponent.NumberPicker, + initialValue: 33, + }, + { + field: 'num_parallel', + label: '线程池大小', + tip: '建议与CPU核数接近', + componentType: VariableComponent.NumberPicker, + initialValue: 5, + }, +]; +export const nnBaseConfigList: ItemProps[] = [ + { + field: 'epoch_num', + label: 'epoch_num', + tip: '指一次完整模型训练需要多少次Epoch,一次Epoch是指将全部训练样本训练一遍', + componentType: VariableComponent.NumberPicker, + initialValue: 1, + }, + { + field: 'verbosity', + label: '日志输出等级', + tip: '有 0、1、2、3 四种等级,等级越大日志输出的信息越多', + componentType: VariableComponent.NumberPicker, + initialValue: 1, + }, +]; + +export const TREE_BASE_CONFIG_FIELD_LIST = treeBaseConfigList.map((item) => item.field) as string[]; +export const NN_BASE_CONFIG_FIELD_LIST = nnBaseConfigList.map((item) => item.field) as string[]; + +export const NOT_TREE_ADVANCE_CONFIG_FIELD_LIST = [ + ...TREE_BASE_CONFIG_FIELD_LIST, + 'loss_type', + 'role', + 'worker_cpu', + 'worker_mem', + 'mode', + 'algorithm', +]; +export const NOT_NN_ADVANCE_CONFIG_FIELD_LIST = [ + ...NN_BASE_CONFIG_FIELD_LIST, + 'role', + 'worker_cpu', + 'worker_mem', + 'worker_replicas', + 'master_cpu', + 'master_mem', + 'master_replicas', + 'ps_cpu', + 'ps_mem', + 'ps_replicas', + 'mode', + 'algorithm', +]; + +export function getAdvanceConfigListByDefinition(variables: Variable[], isNN = false): ItemProps[] { + const blockList = isNN ? NOT_NN_ADVANCE_CONFIG_FIELD_LIST : NOT_TREE_ADVANCE_CONFIG_FIELD_LIST; + + const advanceConfigList: ItemProps[] = []; + const variableList = flattenDeep(variables); + variableList.forEach((item) => { + if (!blockList.includes(item.name) && item.tag === Tag.INPUT_PARAM) { + let widget_schema: VariableWidgetSchema = {}; + + try { + widget_schema = JSON.parse(item.widget_schema as any); + } catch (error) {} + advanceConfigList.push({ + field: item.name, + label: LABEL_MAPPER?.[item.name] ?? item.name, + initialValue: item.value, + tip: widget_schema.tooltip ?? TIP_MAPPER?.[item.name], + }); + } + }); + + return advanceConfigList; +} +export function getAdvanceConfigList(config: WorkflowConfig, isNN = false) { + const blockList = isNN ? NOT_NN_ADVANCE_CONFIG_FIELD_LIST : NOT_TREE_ADVANCE_CONFIG_FIELD_LIST; + + const advanceConfigList: ItemProps[] = []; + + const variableList = flattenDeep( + [config.variables || []].concat((config.job_definitions || []).map((item) => item.variables)), + ); + + variableList.forEach((item) => { + if (!blockList.includes(item.name)) { + const labelI18nKey = LABEL_MAPPER[item.name]; + const tipI18nKey = TIP_MAPPER[item.name]; + let widget_schema: VariableWidgetSchema = {}; + + try { + widget_schema = JSON.parse(item.widget_schema as any); + } catch (error) {} + advanceConfigList.push({ + field: item.name, + label: labelI18nKey ?? item.name, + initialValue: item.value, + tip: widget_schema.tooltip ?? tipI18nKey, + }); + } + }); + + return advanceConfigList; +} +export function getConfigInitialValuesByDefinition( + variables: Variable[], + list: string[] = [], + isBlockList = false, + valuePreset: Record<string, any> = {}, +) { + const variableList = flattenDeep(variables); + const initialValues: { [key: string]: any } = {}; + + variableList.forEach((item) => { + if (isBlockList) { + if (!list.includes(item.name)) { + initialValues[item.name] = item.value ?? valuePreset[item.name]; + } + } else { + if (list.includes(item.name)) { + initialValues[item.name] = item.value ?? valuePreset[item.name]; + } + } + }); + + return initialValues; +} +export function getConfigInitialValues( + config: WorkflowConfig, + list: string[] = [], + isBlockList = false, + valuePreset: Record<string, any> = {}, +) { + const variableList = flattenDeep( + [config?.variables || []].concat((config?.job_definitions || []).map((item) => item.variables)), + ); + + const initialValues: { [key: string]: any } = {}; + + variableList.forEach((item) => { + if (isBlockList) { + if (!list.includes(item.name)) { + initialValues[item.name] = item.value ?? valuePreset[item.name]; + } + } else { + if (list.includes(item.name)) { + initialValues[item.name] = item.value ?? valuePreset[item.name]; + } + } + }); + + return initialValues; +} +export function getTreeBaseConfigInitialValuesByDefinition(variables: Variable[]) { + return getConfigInitialValuesByDefinition(variables, TREE_BASE_CONFIG_FIELD_LIST, false, { + learning_rate: 0.3, + max_iters: 5, + max_depth: 3, + l2_regularization: 1.0, + max_bins: 33, + num_parallel: 5, + }); +} +export function getNNBaseConfigInitialValuesByDefinition(variables: Variable[]) { + return getConfigInitialValuesByDefinition(variables, NN_BASE_CONFIG_FIELD_LIST, false, { + epoch_num: 1, + verbosity: 1, + }); +} +export function getTreeBaseConfigInitialValues(config: WorkflowConfig) { + return getConfigInitialValues(config, TREE_BASE_CONFIG_FIELD_LIST, false, { + learning_rate: 0.3, + max_iters: 5, + max_depth: 3, + l2_regularization: 1.0, + max_bins: 33, + num_parallel: 5, + }); +} +export function getTreeAdvanceConfigInitialValues(config: WorkflowConfig) { + return getConfigInitialValues(config, NOT_TREE_ADVANCE_CONFIG_FIELD_LIST, true, { + file_ext: '.data', + file_type: 'tfrecord', + enable_packing: true, + send_scores_to_follower: false, + send_metrics_to_follower: false, + verify_example_ids: true, + verbosity: 1, + no_data: false, + label_field: 'label', + }); +} + +export function getNNBaseConfigInitialValues(config: WorkflowConfig) { + return getConfigInitialValues(config, NN_BASE_CONFIG_FIELD_LIST, false, { + epoch_num: 1, + verbosity: 1, + }); +} +export function getNNAdvanceConfigInitialValues(config: WorkflowConfig) { + return getConfigInitialValues(config, NOT_NN_ADVANCE_CONFIG_FIELD_LIST, true, { + shuffle_data_block: true, + save_checkpoint_secs: 600, + save_checkpoint_steps: 1000, + sparse_estimator: false, + steps_per_sync: 10, + }); +} + +export function hydrateWorkflowConfig( + workflowConfig: WorkflowConfig, + values: { [key: string]: any }, +) { + const tempConfig = cloneDeep(workflowConfig); + + const keyList = Object.keys(values); + + for (let index = 0; index < keyList.length; index++) { + const key = keyList[index]; + + // send_metrics_to_follower,send_scores_to_follower + // only empty string will treat as false + const formValue = ['send_metrics_to_follower', 'send_scores_to_follower'].includes(key) + ? values[key] + ? true + : '' + : values[key]; + + let isBreak = false; + + // variables + for (let j = 0; j < tempConfig.variables.length; j++) { + if (tempConfig.variables[j].name === key) { + tempConfig.variables[j].value = formValue; + isBreak = true; + break; + } + } + if (isBreak) { + continue; + } + + // job_definitions + for (let i = 0; i < tempConfig.job_definitions.length; i++) { + for (let j = 0; j < tempConfig.job_definitions[i].variables.length; j++) { + if (tempConfig.job_definitions[i].variables[j].name === key) { + tempConfig.job_definitions[i].variables[j].value = formValue; + isBreak = true; + break; + } + } + if (isBreak) { + break; + } + } + } + return tempConfig; +} + +/** + * @param variable Variable defintions without any user input value + * @param values User inputs + */ +export function hydrateModalGlobalConfig( + variables: Array<ModelJobVariable | Variable>, + values: { [key: string]: any }, + hasAlgorithmUuid: boolean = true, +): Array<Variable> { + const tempVariables = cloneDeep(variables); + const resultVariables = []; + const keyList = Object.keys(values); + + for (let index = 0; index < keyList.length; index++) { + const key = keyList[index]; + const formValue = ['send_metrics_to_follower', 'send_scores_to_follower'].includes(key) + ? values[key] + ? true + : '' + : values[key]; + + for (let j = 0; j < tempVariables.length; j++) { + const newVariable = cloneDeep(tempVariables[j]); + if ( + newVariable.name === key && + (newVariable.tag === Tag.INPUT_PARAM || + newVariable.tag === Tag.RESOURCE_ALLOCATION || + (newVariable.name === 'algorithm' && !hasAlgorithmUuid)) + ) { + newVariable.value = formValue; + stringifyVariableValue(newVariable as Variable); + processVariableTypedValue(newVariable as Variable); + if (typeof newVariable.widget_schema === 'object') { + newVariable.widget_schema = JSON.stringify(newVariable.widget_schema); + } + resultVariables.push(newVariable); + break; + } + } + } + return resultVariables as Variable[]; +} + +type TableFilterConfig = Pick<TableColumnProps, 'filters' | 'onFilter'>; + +export const algorithmTypeFilters: TableFilterConfig = { + filters: algorithmTypeOptions.map((item) => ({ + text: item.label, + value: item.value, + })), + onFilter: (value: string, record: any) => { + return record?.algorithm_type === value; + }, +}; + +export const roleFilters: TableFilterConfig = { + filters: [ + { + text: '本方', + value: ModelJobRole.COORDINATOR, + }, + { + text: '合作伙伴', + value: ModelJobRole.PARTICIPANT, + }, + ], + onFilter: (value: string, record: any) => { + return record?.role === value; + }, +}; + +export const stateFilters: TableFilterConfig = { + filters: [ + WorkflowState.RUNNING, + WorkflowState.STOPPED, + WorkflowState.INVALID, + WorkflowState.COMPLETED, + WorkflowState.FAILED, + WorkflowState.PREPARE_RUN, + WorkflowState.PREPARE_STOP, + WorkflowState.WARMUP_UNDERHOOD, + WorkflowState.PENDING_ACCEPT, + WorkflowState.READY_TO_RUN, + WorkflowState.PARTICIPANT_CONFIGURING, + WorkflowState.UNKNOWN, + ].map((state) => { + const { text } = getModelJobState(state); + return { text, value: state }; + }), + onFilter: (value: string, record: ModelJobGroup | ModelJob) => { + return ( + (record as ModelJob)?.state === value || (record as ModelJobGroup)?.latest_job_state === value + ); + }, +}; + +export const statusFilters: TableFilterConfig = { + filters: [ + ModelJobStatus.PENDING, + ModelJobStatus.CONFIGURED, + ModelJobStatus.ERROR, + ModelJobStatus.RUNNING, + ModelJobStatus.SUCCEEDED, + ModelJobStatus.STOPPED, + ModelJobStatus.FAILED, + ModelJobStatus.UNKNOWN, + ].map((status) => { + return { text: MODEL_JOB_STATUE_TEXT_MAPPER[status], value: status }; + }), + onFilter: (value: string, record: ModelJob | ModelJobGroup) => { + return ( + (record as ModelJob)?.status === value || + (record as ModelJobGroup)?.latest_job_state === value + ); + }, +}; + +export const StyledPlusIcon: React.FC = () => { + return <PlusBold className={styles.plus_icon} />; +}; + +export function getDataSource(path: string) { + const regex = /\/data_source\/(.*)$/; + const matchList = path.match(regex); + return matchList?.[1] ?? ''; +} + +export function deleteEvaluationJob( + projectId: ID, + job: ModelJob, + module: ModelEvaluationModuleType, +): Promise<boolean> { + return new Promise((resolve, reject) => { + dangerConfirmWrapper( + `确认要删除「${job.name}」?`, + + module === ModelEvaluationModuleType.Evaluation + ? '删除后,该评估任务及信息将无法恢复,请谨慎操作' + : '删除后,该预测任务及信息将无法恢复,请谨慎操作', + '删除', + async () => { + try { + await deleteJob_new(projectId, job.id); + Message.success('删除成功'); + resolve(true); + } catch (e: any) { + Message.error(e.message); + reject(e); + } + }, + () => { + resolve(false); + }, + ); + }); +} + +export function isTreeAlgorithm(algorithmType: EnumAlgorithmProjectType | AlgorithmType) { + return [ + EnumAlgorithmProjectType.TREE_HORIZONTAL, + EnumAlgorithmProjectType.TREE_VERTICAL, + AlgorithmType.TREE, + ].includes(algorithmType); +} +export function isNNAlgorithm(algorithmType: EnumAlgorithmProjectType | AlgorithmType) { + return [ + EnumAlgorithmProjectType.NN_HORIZONTAL, + EnumAlgorithmProjectType.NN_VERTICAL, + EnumAlgorithmProjectType.NN_LOCAL, + AlgorithmType.NN, + ].includes(algorithmType); +} + +export function isOldAlgorithm(algorithmType: EnumAlgorithmProjectType | AlgorithmType) { + return [AlgorithmType.TREE, AlgorithmType.NN].includes(algorithmType as AlgorithmType); +} +export function isHorizontalAlgorithm(algorithmType: EnumAlgorithmProjectType) { + return [ + EnumAlgorithmProjectType.TREE_HORIZONTAL, + EnumAlgorithmProjectType.NN_HORIZONTAL, + ].includes(algorithmType); +} +export function isVerticalAlgorithm(algorithmType: EnumAlgorithmProjectType) { + return [EnumAlgorithmProjectType.TREE_VERTICAL, EnumAlgorithmProjectType.NN_VERTICAL].includes( + algorithmType, + ); +} + +export function isVerticalNNAlgorithm(algorithmType: EnumAlgorithmProjectType) { + return algorithmType === EnumAlgorithmProjectType.NN_VERTICAL; +} + +export const checkAlgorithmValueIsEmpty = ( + value: { algorithmProjectId: any; algorithmId: any } | undefined, + callback: (error?: string) => void, +) => { + if ( + value && + (value.algorithmProjectId || value.algorithmProjectId === 0) && + (value.algorithmId || value.algorithmId === 0 || value.algorithmId === null) + ) { + return callback(); + } + return callback('必填项'); +}; + +export enum TRAIN_ROLE { + LEADER = 'Leader', + FOLLOWER = 'Follower', +} + +export const FILTER_MODEL_TRAIN_OPERATOR_MAPPER = { + role: FilterOp.IN, + algorithm_type: FilterOp.IN, + name: FilterOp.CONTAIN, + configured: FilterOp.EQUAL, + //TODO: 'states' support BE filter +}; +export const FILTER_MODEL_JOB_OPERATOR_MAPPER = { + role: FilterOp.IN, + algorithm_type: FilterOp.IN, + name: FilterOp.CONTAIN, + model_job_type: FilterOp.IN, + status: FilterOp.IN, + configured: FilterOp.EQUAL, + auth_status: FilterOp.IN, +}; + +export const MODEL_GROUP_STATUS_MAPPER: Record<ModelGroupStatus, any> = { + TICKET_PENDING: { + status: 'default', + percent: 30, + name: '待审批', + }, + CREATE_PENDING: { + status: 'default', + percent: 40, + name: '创建中', + }, + CREATE_FAILED: { + status: 'warning', + percent: 100, + name: '创建失败', + }, + TICKET_DECLINE: { + status: 'warning', + percent: 30, + name: '审批拒绝', + }, + SELF_AUTH_PENDING: { + status: 'default', + percent: 50, + name: '待我方授权', + }, + PART_AUTH_PENDING: { + status: 'default', + percent: 70, + name: '待合作伙伴授权', + }, + ALL_AUTHORIZED: { + status: 'success', + percent: 100, + name: '授权通过', + }, +}; + +export const AUTH_STATUS_TEXT_MAP: Record<string, string> = { + PENDING: '待授权', + AUTHORIZED: '已授权', + WITHDRAW: '待授权', +}; +export function resetAuthInfo( + participantsMap: Record<string, any> | undefined, + participantList: Participant[], + myPureDomainName: string, +) { + const resultList: any[] = []; + const keyList = Object.keys(participantsMap || {}); + keyList.forEach((key) => { + const curParticipant = participantList.find( + (participant: Participant) => participant?.pure_domain_name === key, + ); + key !== myPureDomainName && + curParticipant && + resultList.push({ + name: curParticipant?.name, + authStatus: participantsMap?.[key].auth_status, + }); + }); + resultList.sort((a: any, b: any) => { + return a.name > b.name ? 1 : -1; + }); + resultList.unshift({ + name: '我方', + authStatus: participantsMap?.[myPureDomainName]?.auth_status, + }); + return resultList; +} + +export const ALGORITHM_TYPE_LABEL_MAPPER: Record<string, string> = { + NN_HORIZONTAL: '横向联邦-NN模型', + NN_VERTICAL: '纵向联邦-NN模型', + TREE_VERTICAL: '纵向联邦-树模型', +}; + +export const MODEL_JOB_STATUS_MAPPER: Record<ModelJobAuthStatus, any> = { + TICKET_PENDING: { + status: 'default', + percent: 30, + name: '待审批', + }, + CREATE_PENDING: { + status: 'default', + percent: 40, + name: '创建中', + }, + CREATE_FAILED: { + status: 'warning', + percent: 100, + name: '创建失败', + }, + TICKET_DECLINE: { + status: 'warning', + percent: 30, + name: '审批拒绝', + }, + SELF_AUTH_PENDING: { + status: 'default', + percent: 50, + name: '待我方授权', + }, + PART_AUTH_PENDING: { + status: 'default', + percent: 70, + name: '待合作伙伴授权', + }, + ALL_AUTHORIZED: { + status: 'success', + percent: 100, + name: '授权通过', + }, +}; diff --git a/web_console_v2/client/src/views/ModelServing/InstanceNumberInput.module.less b/web_console_v2/client/src/views/ModelServing/InstanceNumberInput.module.less new file mode 100644 index 000000000..8aa82fd16 --- /dev/null +++ b/web_console_v2/client/src/views/ModelServing/InstanceNumberInput.module.less @@ -0,0 +1,6 @@ +.info_icon_container{ + margin-right: 5px; +} +.input_number_container{ + width: 120px; +} diff --git a/web_console_v2/client/src/views/ModelServing/InstanceNumberInput.tsx b/web_console_v2/client/src/views/ModelServing/InstanceNumberInput.tsx new file mode 100644 index 000000000..c959c9a16 --- /dev/null +++ b/web_console_v2/client/src/views/ModelServing/InstanceNumberInput.tsx @@ -0,0 +1,24 @@ +import React from 'react'; +import { InputNumber, InputNumberProps, Space } from '@arco-design/web-react'; +import { IconInfoCircle } from '@arco-design/web-react/icon'; + +import styles from './InstanceNumberInput.module.less'; + +const InstanceNumberInput: React.FC<InputNumberProps> = (props) => { + return ( + <Space size="large"> + <InputNumber + className={styles.input_number_container} + mode="button" + precision={0} + {...props} + /> + <span> + <IconInfoCircle className={styles.info_icon_container} /> + 实例数范围1~100 + </span> + </Space> + ); +}; + +export default InstanceNumberInput; diff --git a/web_console_v2/client/src/views/ModelServing/InstanceTable.module.less b/web_console_v2/client/src/views/ModelServing/InstanceTable.module.less new file mode 100644 index 000000000..e75bcc162 --- /dev/null +++ b/web_console_v2/client/src/views/ModelServing/InstanceTable.module.less @@ -0,0 +1,8 @@ + +.edit_text_container{ + display: inline-block; + margin-right: 18px; + font-size: 13px; + color: var(--primaryColor); + cursor: pointer; +} diff --git a/web_console_v2/client/src/views/ModelServing/InstanceTable.tsx b/web_console_v2/client/src/views/ModelServing/InstanceTable.tsx new file mode 100644 index 000000000..0f44fd360 --- /dev/null +++ b/web_console_v2/client/src/views/ModelServing/InstanceTable.tsx @@ -0,0 +1,176 @@ +import React, { FC } from 'react'; + +import { formatTimestamp } from 'shared/date'; + +import Table from 'components/Table'; +import StateIndicator, { StateTypes, ActionItem } from 'components/StateIndicator'; + +import { TableProps, TableColumnProps } from '@arco-design/web-react'; +import { ModelServingInstance, ModelServingInstanceState } from 'typings/modelServing'; +import { SortDirection } from '@arco-design/web-react/es/Table/interface'; + +import styles from './InstanceTable.module.less'; + +type FilterValue = string[]; +type ColumnsGetterOptions = { + filter?: Record<string, FilterValue>; + sorter?: Record<string, SortDirection>; + onLogClick?: any; +}; + +function getDotState( + instance: ModelServingInstance, + options: ColumnsGetterOptions, +): { type: StateTypes; text: string; tip?: string; actionList?: ActionItem[] } { + if (instance.status === ModelServingInstanceState.AVAILABLE) { + return { + text: '运行中', + type: 'success', + }; + } + if (instance.status === ModelServingInstanceState.UNAVAILABLE) { + return { + text: '异常', + type: 'error', + // TODO: error tips + }; + } + + return { + text: '异常', + type: 'error', + }; +} + +const getTableColumns = (options: ColumnsGetterOptions) => { + const cols: TableColumnProps[] = [ + { + key: 'name', + dataIndex: 'name', + title: '实例ID', + width: 320, + }, + { + key: 'instances_status', + dataIndex: 'status', + filterMultiple: false, + filteredValue: options?.filter?.instances_status, + filters: [ + { text: '运行中', value: ModelServingInstanceState.AVAILABLE }, + { + text: '异常', + value: ModelServingInstanceState.UNAVAILABLE, + }, + ], + onFilter: (value, record) => record.status === value, + title: '状态', + render: (_: any, record: any) => { + return ( + <StateIndicator + {...getDotState(record, { + ...options, + })} + /> + ); + }, + }, + // Because BE can't get cpu/memory info, so hide temporarily + // { + // title: i18n.t('model_serving.col_cpu'), + // dataIndex: 'cpu', + // key: 'cpu', + // render: (value, record) => { + // const percent = parseFloat(value) || 0; + // return <Progress percent={percent} size="small" format={(percent) => `${percent || 0}%`} />; + // }, + // }, + // { + // title: i18n.t('model_serving.col_men'), + // dataIndex: 'memory', + // key: 'memory', + // render: (value, record) => { + // const percent = parseFloat(value) || 0; + // return <Progress percent={percent} size="small" format={(percent) => `${percent || 0}%`} />; + // }, + // }, + { + key: 'created_at', + dataIndex: 'created_at', + title: '创建时间', + sorter: true, + sortOrder: options.sorter?.created_at, + render: (date: number) => <div>{formatTimestamp(date)}</div>, + }, + { + key: 'operation', + dataIndex: 'operation', + title: '操作', + fixed: 'right', + render: (_, record) => { + const isDisabled = false; + + return ( + <> + <span + className={styles.edit_text_container} + data-is-disabled={isDisabled} + onClick={(event) => { + event.stopPropagation(); + options?.onLogClick(record); + }} + > + 查看日志 + </span> + </> + ); + }, + }, + ]; + + return cols; +}; + +interface Props extends TableProps<ModelServingInstance> { + loading: boolean; + filter?: Record<string, FilterValue>; + sorter?: Record<string, SortDirection>; + dataSource: any[]; + total?: number; + onLogClick?: (record: ModelServingInstance) => void; + onShowSizeChange?: (current: number, size: number) => void; + onPageChange?: (page: number, pageSize: number) => void; +} + +const InstanceTable: FC<Props> = ({ + loading, + dataSource, + total, + filter, + sorter, + onLogClick, + onShowSizeChange, + onPageChange, + ...restProps +}) => { + return ( + <Table + className="customFilterIconTable" + rowKey="name" + scroll={{ x: '100%' }} + loading={loading} + total={total} + data={dataSource} + columns={getTableColumns({ + filter, + sorter, + onLogClick, + })} + onShowSizeChange={onShowSizeChange} + onPageChange={onPageChange} + pagination={{ hideOnSinglePage: true }} + {...restProps} + /> + ); +}; + +export default InstanceTable; diff --git a/web_console_v2/client/src/views/ModelServing/ModelServingDetail/index.module.less b/web_console_v2/client/src/views/ModelServing/ModelServingDetail/index.module.less new file mode 100644 index 000000000..ea303a04c --- /dev/null +++ b/web_console_v2/client/src/views/ModelServing/ModelServingDetail/index.module.less @@ -0,0 +1,52 @@ +@import '~styles/mixins.less'; +.avatar_container{ + .MixinSquare(44px); + background-color: #5360d8; + color: white; + border-radius: 4px; + font-size: 18px; + text-align: center; + + &::before { + display: inline-block; + width: 100%; + height: 100%; + content: ''; + background: url(../../../assets/icons/atom.svg) no-repeat; + background-size: contain; + } +} +.padding_container{ + padding: 20px 20px 0; +} +.name{ + margin-bottom: 0; + font-size: 16px; + height: 24px; + font-weight: 600; +} +.comment{ + .MixinEllipsis(400px); + display: block; + font-size: 12px; + line-height: 18px; + color: var(--textColorSecondary); +} +.change_button{ + margin-right: 20px ; +} + +.title{ + margin-top: 20px; + margin-bottom: 10px; + font-weight: bold; + color: rgb(var(--gray-10)); +} +.inference_hidden_info{ + height: 36px; + border-radius: 2px; + color: rgb(var(--gray-7)); + text-align: center; + line-height: 36px; + background: rgb(var(--gray-1)); +} diff --git a/web_console_v2/client/src/views/ModelServing/ModelServingDetail/index.tsx b/web_console_v2/client/src/views/ModelServing/ModelServingDetail/index.tsx new file mode 100644 index 000000000..18b8eff36 --- /dev/null +++ b/web_console_v2/client/src/views/ModelServing/ModelServingDetail/index.tsx @@ -0,0 +1,324 @@ +import React, { FC, useMemo } from 'react'; +import { cloneDeep } from 'lodash-es'; +import { useHistory, useParams } from 'react-router'; +import { useQuery } from 'react-query'; +import { useUrlState, useGetCurrentProjectId } from 'hooks'; + +import { fetchModelServingDetail_new, deleteModelServing_new } from 'services/modelServing'; +import { formatTimestamp } from 'shared/date'; +import { forceToRefreshQuery } from 'shared/queryClient'; +import { updateServiceInstanceNum } from '../shared'; +import { modelDirectionTypeToTextMap, getDotState, getTableFilterValue } from '../shared'; +import { CONSTANTS } from 'shared/constants'; + +import { Spin, Button, Message, Grid, Tooltip } from '@arco-design/web-react'; +import SharedPageLayout from 'components/SharedPageLayout'; +import StateIndicator from 'components/StateIndicator'; +import BackButton from 'components/BackButton'; +import MoreActions from 'components/MoreActions'; +import PropertyList from 'components/PropertyList'; +import GridRow from 'components/_base/GridRow'; +import Modal from 'components/Modal'; +import UserGuideTab from '../UserGuideTab'; +import InstanceTable from '../InstanceTable'; +import WhichModel from 'components/WhichModel'; + +import { + ModelServing, + ModelDirectionType, + ModelServingInstance, + ModelServingState, +} from 'typings/modelServing'; +import { SortDirection, SorterResult } from '@arco-design/web-react/es/Table/interface'; + +import styles from './index.module.less'; + +type Props = {}; + +const ModelServingDetail: FC<Props> = () => { + const { id } = useParams<{ + id: string; + tabType: string; + }>(); + + const history = useHistory(); + const [urlState, setUrlState] = useUrlState<Record<string, string | undefined>>({ + order_by: undefined, + instances_status: undefined, + }); + + const projectId = useGetCurrentProjectId(); + + const modelServingDetailQuery = useQuery( + ['fetchModelServingDetail', id, urlState.order_by], + + () => { + if (!projectId) { + Message.info('请选择工作区'); + return; + } + return fetchModelServingDetail_new(projectId!, id, { + order_by: urlState.order_by || 'created_at desc', + }); + }, + { + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const modelServingDetail = useMemo<ModelServing>(() => { + const emptyData = {} as ModelServing; + + if (!modelServingDetailQuery.data || !modelServingDetailQuery.data.data) { + return emptyData; + } + return modelServingDetailQuery.data.data; + }, [modelServingDetailQuery.data]); + + const displayedProps = useMemo(() => { + const modelDirectionText = + modelDirectionTypeToTextMap[ + modelServingDetail.is_local ? ModelDirectionType.HORIZONTAL : ModelDirectionType.VERTICAL + ]; + + const { instance_num_status: instanceAmount } = modelServingDetail; + let payload; + try { + payload = JSON.parse( + modelServingDetail.remote_platform?.payload || JSON.stringify({ target_psm: '-' }), + ); + } catch (error) {} + const thirdServingDisplayedList = [ + { + value: + <StateIndicator {...getDotState(modelServingDetail)} /> || CONSTANTS.EMPTY_PLACEHOLDER, + label: '状态', + }, + { + value: modelDirectionText || CONSTANTS.EMPTY_PLACEHOLDER, + label: '模型类型', + }, + { + value: ( + <WhichModel + id={modelServingDetail.model_id} + isModelGroup={Boolean(modelServingDetail.model_group_id)} + /> + ), + label: '模型', + }, + { + value: + instanceAmount !== 'UNKNOWN' + ? instanceAmount || CONSTANTS.EMPTY_PLACEHOLDER + : CONSTANTS.EMPTY_PLACEHOLDER, + label: '实例数量', + }, + { + value: + ( + <a href={modelServingDetail.endpoint} target="_blank" rel="noreferrer"> + {payload?.target_psm} + </a> + ) || CONSTANTS.EMPTY_PLACEHOLDER, + label: 'psm', + }, + + { + value: modelServingDetail.created_at + ? formatTimestamp(modelServingDetail.created_at) + : CONSTANTS.EMPTY_PLACEHOLDER, + label: '创建时间', + }, + { + value: modelServingDetail.updated_at + ? formatTimestamp(modelServingDetail.updated_at) + : CONSTANTS.EMPTY_PLACEHOLDER, + label: '更新时间', + }, + ]; + const innerServingDisplayedList = cloneDeep(thirdServingDisplayedList); + innerServingDisplayedList.splice(4, 1); + return modelServingDetail?.remote_platform + ? thirdServingDisplayedList + : innerServingDisplayedList; + }, [modelServingDetail]); + + const sorterProps = useMemo<Record<string, SortDirection>>(() => { + if (urlState.order_by) { + const order = urlState.order_by?.split(' ') || []; + return { + [order[0]]: order?.[1] === 'asc' ? 'ascend' : 'descend', + }; + } + + return {}; + }, [urlState.order_by]); + + const isLoading = modelServingDetailQuery.isFetching; + const tableDataList = modelServingDetail?.instances ?? []; + + return ( + <SharedPageLayout + title={<BackButton onClick={goBack}>{'在线服务'}</BackButton>} + cardPadding={0} + isNestSpinFlexContainer={true} + > + <Spin loading={isLoading}> + <div className={styles.padding_container}> + <Grid.Row align="center" justify="space-between"> + <GridRow gap="12" style={{ maxWidth: '75%' }}> + <div + className={styles.avatar_container} + data-name={ + modelServingDetail.name + ? modelServingDetail.name.slice(0, 1) + : CONSTANTS.EMPTY_PLACEHOLDER + } + /> + <div> + <h3 className={styles.name}>{modelServingDetail.name ?? '...'}</h3> + <Tooltip content={modelServingDetail?.comment}> + <small className={styles.comment}> + {modelServingDetail?.comment || CONSTANTS.EMPTY_PLACEHOLDER} + </small> + </Tooltip> + </div> + </GridRow> + + <GridRow> + <Button + className={styles.change_button} + type="primary" + disabled={ + modelServingDetail.status !== ModelServingState.AVAILABLE || + modelServingDetail.resource === undefined + } + onClick={onScaleClick} + > + 扩缩容 + </Button> + <MoreActions + actionList={[ + { + label: '编辑', + onClick: onChangeClick, + }, + { + label: '删除', + onClick: onDeleteClick, + danger: true, + }, + ]} + /> + </GridRow> + </Grid.Row> + <PropertyList cols={4} colProportions={[1, 1, 2, 1]} properties={displayedProps} /> + + <p className={styles.title}>调用指南</p> + {!modelServingDetail.is_local && + !modelServingDetail.support_inference && + !modelServingDetail.remote_platform ? ( + <div className={styles.inference_hidden_info}> + 纵向模型服务仅发起方可查看调用地址和 Signature + </div> + ) : ( + <UserGuideTab + isShowLabel={false} + isShowSignature={!modelServingDetail.remote_platform} + data={modelServingDetail} + /> + )} + {modelServingDetail.resource !== undefined && ( + <> + <p className={styles.title}>实例列表</p> + <InstanceTable + sorter={sorterProps} + filter={{ + instances_status: getTableFilterValue(urlState.instances_status), + }} + total={tableDataList.length} + dataSource={tableDataList} + loading={modelServingDetailQuery.isFetching} + onLogClick={onLogClick} + onChange={onTableChange} + /> + </> + )} + </div> + </Spin> + </SharedPageLayout> + ); + + function goBack() { + history.goBack(); + } + + function onLogClick(record: ModelServingInstance) { + window.open(`/v2/logs/model-serving/${id}/${record.name}`, '_blank noopener'); + } + + async function onChangeClick() { + if (modelServingDetail.id) { + history.push(`/model-serving/edit/${modelServingDetail.id}`); + } + } + + async function onScaleClick() { + updateServiceInstanceNum(modelServingDetail, () => { + forceToRefreshQuery(['fetchModelServingDetail', id]); + }); + } + + function onDeleteClick() { + if (!projectId) { + Message.info('请选择工作区!'); + return; + } + Modal.delete({ + title: `确认要删除「${modelServingDetail.name}」?`, + content: '一旦删除,在线服务相关数据将无法复原,请谨慎操作', + onOk() { + deleteModelServing_new(projectId!, modelServingDetail.id) + .then(() => { + Message.success('删除成功'); + history.replace('/model-serving'); + }) + .catch((error) => { + Message.error(error.message); + }); + }, + }); + } + + function onTableChange( + _: any, + sorter: SorterResult | SorterResult[], + filters: Record<string, any>, + extra: { action: string }, + ) { + const { action } = extra; + const latestSorter = Array.isArray(sorter) ? sorter[0] : sorter; + + if (action === 'sort' && latestSorter.field && latestSorter.direction) { + setUrlState({ + order_by: `${latestSorter.field as string} ${ + latestSorter.direction === 'ascend' ? 'asc' : 'desc' + }`, + }); + } else { + setUrlState({ + order_by: undefined, + }); + } + + if (action === 'filter') { + setUrlState({ + instances_status: filters.status?.[0] || undefined, + }); + } + } +}; + +export default ModelServingDetail; diff --git a/web_console_v2/client/src/views/ModelServing/ModelServingForm/index.module.less b/web_console_v2/client/src/views/ModelServing/ModelServingForm/index.module.less new file mode 100644 index 000000000..8b85d9485 --- /dev/null +++ b/web_console_v2/client/src/views/ModelServing/ModelServingForm/index.module.less @@ -0,0 +1,63 @@ +@path:'../../../assets/images'; +.spin_container{ + min-height: 500px; +} + +.container{ + flex: 1; +} + +.styled_form{ + --form-width: 600px; + margin-top: 20px; + > .form-title { + margin-bottom: 24px; + font-size: 27px; + line-height: 36px; + } + > .ant-space { + display: flex; + } + + > .ant-form-item { + &:last-child { + margin-bottom: 0; + } + } +} + +.button_group{ + display: flex; + align-items: center; + + &:not(:last-child) { + margin-bottom: 12px; + } + button:not(:last-child) { + margin-right: 12px; + } +} + +.styled_submit_button{ + min-width: 84px; +} + +.styled_info_alert{ + margin-top: 20px; +} + +.empty_model_tip_container{ + position: absolute; + left: 50%; + top: 40%; + width: 400px; + padding-top: 164px; + background-image: url('@{path}/empty.png'); + background-size: 140px auto; + background-repeat: no-repeat; + background-position: top center; + font-size: 12px; + color: rgb(var(--gray-6)); + text-align: center; + transform: translate(-50%, -50%); +} diff --git a/web_console_v2/client/src/views/ModelServing/ModelServingForm/index.tsx b/web_console_v2/client/src/views/ModelServing/ModelServingForm/index.tsx new file mode 100644 index 000000000..49adee326 --- /dev/null +++ b/web_console_v2/client/src/views/ModelServing/ModelServingForm/index.tsx @@ -0,0 +1,751 @@ +import React, { FC, useEffect, useMemo, useState } from 'react'; +import { useHistory, useParams } from 'react-router-dom'; +import { useQuery } from 'react-query'; + +import { + createModelServing_new, + updateModelServing_new, + fetchModelServingList_new, + fetchModelServingDetail_new, + fetchUserTypeInfo, +} from 'services/modelServing'; +import { + fetchModelList, + fetchModelJobGroupList, + fetchModelJobGroupDetail, +} from 'services/modelCenter'; +import { validNamePattern, MAX_COMMENT_LENGTH } from 'shared/validator'; +import { convertCpuMToCore } from 'shared/helpers'; +import { filterExpressionGenerator } from 'views/Datasets/shared'; +import { FILTER_SERVING_OPERATOR_MAPPER, cpuIsCpuM, memoryIsMemoryGi } from '../shared'; + +import { useIsFormValueChange, useGetCurrentProjectId } from 'hooks'; + +import { + Spin, + Form, + Input, + Button, + Message, + Grid, + Select, + Alert, + Switch, + Space, + Typography, +} from '@arco-design/web-react'; +import { IconInfoCircle } from '@arco-design/web-react/icon'; +import SharedPageLayout from 'components/SharedPageLayout'; +import BackButton from 'components/BackButton'; +import { FormHeader } from 'components/SharedPageLayout'; +import BlockRadio from 'components/_base/BlockRadio'; +import InputGroup, { TColumn } from 'components/InputGroup'; +import ButtonWithModalConfirm from 'components/ButtonWithModalConfirm'; +import TitleWithIcon from 'components/TitleWithIcon'; + +import debounce from 'debounce-promise'; +import i18n from 'i18n'; +import { EnumAlgorithmProjectType } from 'typings/algorithm'; +import { FilterOp } from 'typings/filter'; + +import styles from './index.module.less'; + +const { Row, Col } = Grid; + +type FormValues = { + name: string; + comment: string; + is_local: boolean; + model_set: { + model_set_id: number; + model_id: number; + }; + model_id: number; + resource: [ + { + cpu: string; + memory: string; + replicas: number; + }, + ]; + instance_num: number; + auto_update: boolean; + model_group_id: boolean; + third_serving: boolean; + psm: string; +}; + +const FILTER_MODEL_TRAIN_OPERATOR_MAPPER = { + role: FilterOp.EQUAL, + algorithm_type: FilterOp.IN, + name: FilterOp.CONTAIN, + configured: FilterOp.EQUAL, +}; + +const initialValues: any = { + is_local: false, + name: undefined, + comment: undefined, + model_set: undefined, + resource: [ + { + cpu: 1, + memory: 1, + replicas: 1, + }, + ], + auto_update: false, + third_serving: false, + psm: undefined, +}; + +const getResourceFormColumns = (readonly?: boolean): TColumn[] => [ + { + type: 'INPUT_NUMBER', + span: 8, + dataIndex: 'replicas', + title: i18n.t('model_serving.label_instance_amount'), + precision: 0, + rules: [ + { required: true, message: i18n.t('model_serving.msg_required') }, + { min: 1, type: 'number' }, + { max: 100, type: 'number' }, + ], + tooltip: i18n.t('tip_replicas_range'), + mode: 'button', + min: 1, + max: 100, + disabled: readonly, + }, + { + type: 'INPUT_NUMBER', + unitLabel: 'Core', + span: 8, + dataIndex: 'cpu', + title: i18n.t('cpu'), + precision: 1, + rules: [{ required: true, message: i18n.t('model_serving.msg_required') }], + tooltip: i18n.t('tip_please_input_positive_number'), + placeholder: i18n.t('placeholder_cpu'), + disabled: readonly, + }, + { + type: 'INPUT_NUMBER', + unitLabel: 'Gi', + span: 8, + dataIndex: 'memory', + title: i18n.t('mem'), + precision: 0, + rules: [{ required: true, message: i18n.t('model_serving.msg_required') }], + tooltip: i18n.t('tip_please_input_positive_integer'), + placeholder: i18n.t('placeholder_mem'), + disabled: readonly, + }, +]; + +const modelGroupIsEmptyRegx = /model\s*in\s*group\s*[0-9]*\s*is\s*not\s*found/i; + +const ModelServingForm: FC = () => { + const history = useHistory(); + const { action, role, id } = useParams<{ + action: string; + role: string; + id: string; + }>(); + const isEdit = action === 'edit'; + const isReceiver = role === 'receiver'; + + const [form] = Form.useForm(); + const [loading, setLoading] = useState(false); + const [modelChange, setModelChange] = useState(false); + const [formConfig, setFormConfig] = useState({ + isHorizontalModel: false, + autoUpdate: false, + thirdServing: false, + }); + + const { isFormValueChanged, onFormValueChange } = useIsFormValueChange(); + const projectId = useGetCurrentProjectId(); + + const userTypeQuery = useQuery( + ['fetchUserType', projectId], + () => fetchUserTypeInfo(projectId!), + { + retry: 2, + enabled: Boolean(projectId), + }, + ); + const modelServingDetailQuery = useQuery( + ['fetchModelServingDetail', id], + () => fetchModelServingDetail_new(projectId!, id), + { + cacheTime: 1, + refetchOnWindowFocus: false, + enabled: Boolean(id) && Boolean(projectId), + }, + ); + + const modelListQuery = useQuery( + ['fetchModelList', projectId, formConfig.isHorizontalModel], + () => { + if (!projectId) { + Message.info('请选择工作区'); + return; + } + return fetchModelList(projectId, { + algorithm_type: formConfig.isHorizontalModel + ? EnumAlgorithmProjectType.NN_HORIZONTAL + : EnumAlgorithmProjectType.NN_VERTICAL, + }); + }, + { + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const modelJobGroupListQuery = useQuery( + ['fetchModelJobGroupList', projectId, formConfig.isHorizontalModel], + () => { + if (!projectId) { + Message.error('请选择工作区'); + return; + } + return fetchModelJobGroupList(projectId, { + filter: filterExpressionGenerator( + { + configured: true, + algorithm_type: formConfig.isHorizontalModel + ? [EnumAlgorithmProjectType.NN_HORIZONTAL] + : [EnumAlgorithmProjectType.NN_VERTICAL], + }, + FILTER_MODEL_TRAIN_OPERATOR_MAPPER, + ), + }); + }, + { + refetchOnWindowFocus: false, + }, + ); + + const userType = useMemo(() => { + return userTypeQuery.data?.data || []; + }, [userTypeQuery]); + + const modelServingDetail = useMemo(() => { + return modelServingDetailQuery.data?.data; + }, [modelServingDetailQuery]); + + const modelList = useMemo(() => { + return modelListQuery.data?.data ?? []; + }, [modelListQuery]); + + const modelJobGroupList = useMemo(() => { + return modelJobGroupListQuery.data?.data ?? []; + }, [modelJobGroupListQuery]); + + const payload = useMemo(() => { + let resultPayload = { target_psm: '-' }; + try { + resultPayload = JSON.parse( + modelServingDetail?.remote_platform?.payload ?? JSON.stringify({ target_psm: '-' }), + ); + } catch (error) {} + return resultPayload; + }, [modelServingDetail]); + + const disabled: Partial<Record<keyof FormValues, boolean>> = { + name: isEdit || isReceiver, + comment: false, + is_local: isEdit || isReceiver, + instance_num: false, + model_id: isReceiver, + model_group_id: isReceiver, + auto_update: isReceiver, + third_serving: isEdit || isReceiver, + psm: isEdit || isReceiver, + }; + const readonlyField: Partial<Record<keyof FormValues, boolean>> = { + name: isReceiver || isEdit, + is_local: isReceiver || isEdit, + model_id: + isReceiver || + (isEdit && !modelServingDetail?.is_local && !modelServingDetail?.remote_platform), + model_group_id: + isReceiver || + (isEdit && !modelServingDetail?.is_local && !modelServingDetail?.remote_platform), + auto_update: + isReceiver || + (isEdit && !modelServingDetail?.is_local && !modelServingDetail?.remote_platform), + psm: isEdit, + third_serving: isEdit || isReceiver, + }; + + const selectedModelGroupQuery = useQuery( + ['fetchModelJobGroupDetail', projectId, modelServingDetail?.model_group_id], + () => fetchModelJobGroupDetail(projectId!, modelServingDetail?.model_group_id!), + { + enabled: Boolean(projectId && modelServingDetail?.model_group_id), + refetchOnWindowFocus: false, + }, + ); + + const selectedModel = useMemo(() => { + const curModelId = modelServingDetail?.model_id; + return modelList.find((item) => item.id === curModelId); + }, [modelList, modelServingDetail?.model_id]); + + const selectedModelGroup = useMemo(() => { + return selectedModelGroupQuery.data?.data; + }, [selectedModelGroupQuery.data?.data]); + + const isReceiverModelEmpty = useMemo(() => { + if (!isReceiver || modelListQuery.isLoading) { + return false; + } + + return selectedModel == null; + }, [isReceiver, modelListQuery.isLoading, selectedModel]); + + const handleOnChangeFederalType = (checked: boolean) => { + form.setFieldValue('model_id', undefined); + form.setFieldValue('model_group_id', undefined); + setFormConfig((prevState) => ({ + ...prevState, + isHorizontalModel: checked, + })); + }; + + useEffect(() => { + let isUnmount = false; + const data = modelServingDetail; + if (!data) { + return; + } + setFormConfig({ + isHorizontalModel: data.is_local, + autoUpdate: Boolean(data.model_group_id), + thirdServing: Boolean(data.remote_platform), + }); + form.setFieldsValue({ + name: data.name, + comment: data.comment, + is_local: data.is_local, + model_id: data.model_id, + model_group_id: data.model_group_id, + psm: payload.target_psm, + resource: [ + { + cpu: cpuIsCpuM(data.resource?.cpu ?? '1') + ? convertCpuMToCore(data.resource?.cpu, false) + : data.resource?.cpu, + memory: memoryIsMemoryGi(data.resource?.memory ?? '1Gi') + ? data.resource?.memory.slice(0, -2) + : data.resource?.memory, + replicas: data.resource?.replicas || 1, + }, + ], + }); + return () => { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + isUnmount = true; + }; + }, [modelServingDetail, form, payload]); + + return ( + <Spin className={styles.spin_container} loading={modelServingDetailQuery.isLoading}> + <SharedPageLayout + title={ + <BackButton + onClick={() => history.replace('/model-serving')} + isShowConfirmModal={isFormValueChanged} + > + 在线服务 + </BackButton> + } + > + {isReceiverModelEmpty ? ( + <div className={styles.empty_model_tip_container}> + <span>因对应模型不存在,请选择两侧均存在的纵向联邦模型进行部署</span> + </div> + ) : ( + <div className={styles.container}> + <FormHeader>{isEdit ? '编辑服务' : '创建服务'}</FormHeader> + {isReceiver ? ( + <Alert + className={styles.styled_info_alert} + content={'纵向模型服务仅发起方可查看调用地址和 Signature'} + type="info" + showIcon + /> + ) : null} + <Form + className={styles.styled_form} + layout="horizontal" + initialValues={initialValues} + form={form} + labelCol={{ span: 3 }} + wrapperCol={{ span: 12 }} + onSubmit={onFinish} + onSubmitFailed={onFinishFailed} + onValuesChange={onFormValueChange} + scrollToFirstError + > + <Form.Item + hasFeedback + field="name" + label={'在线服务名称'} + rules={[ + { required: true, message: '必填项' }, + { + match: validNamePattern, + message: + '只支持大小写字母,数字,中文开头或结尾,可包含“_”和“-”,不超过 63 个字符', + }, + { + validator: debounce(async function (value: any, cb) { + if (isEdit || isReceiver || !value) { + return; + } + const isDuplicate = await checkNameIsDuplicate(value); + cb(isDuplicate ? '在线服务名称已存在' : undefined); + }, 300), + }, + ]} + > + {readonlyField.name ? ( + <PlainText /> + ) : ( + <Input placeholder={'请输入在线服务名称'} disabled={disabled.name} allowClear /> + )} + </Form.Item> + <Form.Item + field="comment" + label={'在线服务描述'} + rules={[ + { + maxLength: MAX_COMMENT_LENGTH, + message: '最多为 200 个字符', + }, + ]} + > + <Input.TextArea + rows={4} + name="comment" + placeholder={'最多为 200 个字符'} + disabled={disabled.comment} + showWordLimit + /> + </Form.Item> + {Boolean(userType.length) && ( + <Form.Item field="third_serving" label="部署到第三方"> + {readonlyField.third_serving ? ( + <Typography.Text bold={true}> + {formConfig.thirdServing ? '开启' : '关闭'} + </Typography.Text> + ) : ( + <Space> + <Switch + disabled={disabled.third_serving} + onChange={(value) => { + setFormConfig((prevState) => ({ + ...prevState, + thirdServing: value, + })); + }} + /> + <TitleWithIcon + title="开启后服务将部署到reckon" + isLeftIcon={true} + isShowIcon={true} + icon={IconInfoCircle} + /> + </Space> + )} + </Form.Item> + )} + {formConfig.thirdServing && ( + <Form.Item field="psm" label="psm" rules={[{ required: true }]}> + {readonlyField.psm ? ( + <a href={modelServingDetail?.endpoint} target="_blank" rel="noreferrer"> + {payload.target_psm} + </a> + ) : ( + <Input disabled={disabled.psm} placeholder="请输入psm" /> + )} + </Form.Item> + )} + <Form.Item + field="is_local" + label={'联邦类型'} + wrapperCol={{ span: 6 }} + rules={[{ required: true }]} + > + {readonlyField.is_local ? ( + <PlainText valueFormat={(is_local) => (is_local ? '横向联邦' : '纵向联邦')} /> + ) : ( + <BlockRadio + onChange={handleOnChangeFederalType} + gap={8} + isCenter={true} + disabled={disabled.is_local} + options={[ + { + label: '纵向联邦', + value: false, // note: 先写死,改动 ModelDirectionType 需要动到的地方较多 + }, + { + label: '横向联邦', + value: true, + }, + ]} + /> + )} + </Form.Item> + <Form.Item field="auto_update" label="自动更新模型" rules={[{ required: true }]}> + {readonlyField.auto_update ? ( + <Typography.Text bold={true}> + {formConfig.autoUpdate ? '开启' : '关闭'} + </Typography.Text> + ) : ( + <Space> + <Switch + disabled={disabled.auto_update} + checked={formConfig.autoUpdate} + onChange={(value) => { + setFormConfig((prevState) => ({ + ...prevState, + autoUpdate: value, + })); + form.setFieldValue('model_id', undefined); + form.setFieldValue('model_group_id', undefined); + }} + /> + <TitleWithIcon + title="开启后,当所选择的模型训练作业产生新模型时,将自动更新到本服务" + isLeftIcon={true} + isShowIcon={true} + icon={IconInfoCircle} + /> + </Space> + )} + </Form.Item> + {formConfig.autoUpdate ? ( + <Form.Item + field="model_group_id" + label="模型训练作业" + rules={[{ required: true, message: '必填项' }]} + > + {readonlyField.model_group_id ? ( + <Spin loading={selectedModelGroupQuery.isFetching}> + <Typography.Text bold={true}>{selectedModelGroup?.name} </Typography.Text> + </Spin> + ) : ( + <Select + disabled={disabled.model_group_id} + placeholder="请选择模型训练作业" + loading={modelJobGroupListQuery.isFetching} + showSearch={true} + filterOption={(inputValue, option) => + option.props.children.toLowerCase().indexOf(inputValue.toLowerCase()) >= 0 + } + options={(modelJobGroupList ?? []).map((item) => ({ + label: item.name, + value: item.id, + }))} + onChange={(value) => { + setModelChange(value !== selectedModelGroup?.id); + }} + /> + )} + </Form.Item> + ) : ( + <Form.Item + field="model_id" + label={'模型'} + rules={[{ required: true, message: '必填项' }]} + > + {readonlyField.model_id ? ( + <Typography.Text bold={true}>{selectedModel?.name} </Typography.Text> + ) : ( + <Select + disabled={disabled.model_id} + placeholder={'请选择模型'} + loading={modelListQuery.isFetching} + showSearch={true} + filterOption={(inputValue, option) => + option.props.children.toLowerCase().indexOf(inputValue.toLowerCase()) >= 0 + } + options={(modelListQuery.data?.data ?? []).map((item) => ({ + label: item.name, + value: item.id, + }))} + onChange={(value) => { + setModelChange(value !== selectedModel?.id); + }} + /> + )} + </Form.Item> + )} + + {!formConfig.thirdServing && ( + <Form.Item label="实例规格" required={true} field="resource"> + <InputGroup columns={getResourceFormColumns()} disableAddAndDelete={true} /> + </Form.Item> + )} + <Row> + <Col offset={2} span={12}> + <div className={styles.button_group}> + <Button + className={styles.styled_submit_button} + type="primary" + loading={loading} + htmlType="submit" + > + {!formConfig.isHorizontalModel && + !isReceiver && + !formConfig.thirdServing && + (!isEdit || modelChange) + ? '发送至对侧' + : '确认'} + </Button> + <ButtonWithModalConfirm + onClick={onCancelClick} + isShowConfirmModal={isFormValueChanged} + > + 取消 + </ButtonWithModalConfirm> + </div> + </Col> + </Row> + </Form> + </div> + )} + </SharedPageLayout> + </Spin> + ); + + function onCancelClick() { + history.goBack(); + } + async function onFinish(values: FormValues) { + if (!projectId) { + Message.info('请选择工作区'); + return; + } + const { name, comment, model_id, model_group_id } = values; + setLoading(true); + const { cpu, memory, replicas } = values.resource?.[0] ?? { + cpu: '1', + memory: '1', + replicas: 1, + }; + const payload_new = { target_psm: values.psm }; + + // note: creating service at receiver side is also use the patch method + if (isEdit || isReceiver) { + const payload = { + comment: comment, + resource: formConfig.thirdServing + ? undefined + : { + cpu: cpu.toString(), + memory: `${memory}Gi`, + replicas, + }, + model_id: modelChange ? model_id : undefined, + model_group_id: modelChange ? model_group_id : undefined, + }; + + try { + await updateModelServing_new(projectId, id, payload); + Message.success(isReceiver ? '创建成功' : '修改成功'); + history.push('/model-serving'); + } catch (error: any) { + let msg = error.message; + if (modelGroupIsEmptyRegx.test(msg)) { + msg = '该模型训练作业暂无训练成功的模型'; + } + Message.error(msg); + } + } else { + const payload = { + name: name, + comment: comment, + resource: formConfig.thirdServing + ? undefined + : { + cpu: cpu.toString(), + memory: `${memory}Gi`, + replicas, + }, + is_local: formConfig.isHorizontalModel, + model_id: model_id, + model_group_id: model_group_id, + remote_platform: formConfig.thirdServing + ? { + platform: userType?.[0].platform, + payload: JSON.stringify(payload_new), + } + : undefined, + }; + try { + await createModelServing_new(projectId!, payload); + Message.success('创建成功'); + history.push('/model-serving'); + } catch (error: any) { + const { message: errMsg } = error; + let msg = error.message; + + if (/participant.+code\s*=\s*3/i.test(errMsg)) { + msg = '合作伙伴侧在线服务名称已存在'; + } else if (/duplicate\s*entry/i.test(msg)) { + msg = '在线服务名称已存在'; + } else if (modelGroupIsEmptyRegx.test(msg)) { + msg = '该模型训练作业暂无训练成功的模型'; + } + Message.error(msg); + } + } + setLoading(false); + } + + async function checkNameIsDuplicate(name: string) { + if (!projectId) { + Message.info('请选择工作区'); + return; + } + try { + const res = await fetchModelServingList_new(projectId!, { + filter: filterExpressionGenerator( + { + name, + }, + FILTER_SERVING_OPERATOR_MAPPER, + ), + }); + + return res.data?.length > 0; + } catch (e) { + // 如果网络无法请求,就先当没有重名处理 + return false; + } + } + + function onFinishFailed() {} +}; + +type TPlainTextProps = { + value?: any; + valueFormat?: (val: any) => string; +}; +function PlainText(props: TPlainTextProps) { + const { value, valueFormat } = props; + return ( + <Typography.Text bold={true}> + {typeof valueFormat === 'function' ? valueFormat(value) : value} + </Typography.Text> + ); +} + +export default ModelServingForm; diff --git a/web_console_v2/client/src/views/ModelServing/ModelServingList/index.module.less b/web_console_v2/client/src/views/ModelServing/ModelServingList/index.module.less new file mode 100644 index 000000000..878e3f6c5 --- /dev/null +++ b/web_console_v2/client/src/views/ModelServing/ModelServingList/index.module.less @@ -0,0 +1,4 @@ +.search_content{ + width: 280px; + margin-right: 12px; +} diff --git a/web_console_v2/client/src/views/ModelServing/ModelServingList/index.tsx b/web_console_v2/client/src/views/ModelServing/ModelServingList/index.tsx new file mode 100644 index 000000000..98401b934 --- /dev/null +++ b/web_console_v2/client/src/views/ModelServing/ModelServingList/index.tsx @@ -0,0 +1,228 @@ +import React, { FC, useMemo } from 'react'; +import { useHistory } from 'react-router'; + +import { useQuery } from 'react-query'; +import { fetchModelServingList_new, deleteModelServing_new } from 'services/modelServing'; + +import { forceToRefreshQuery } from 'shared/queryClient'; +import { useGetCurrentProjectId } from 'hooks'; + +import { useUrlState, useTablePaginationWithUrlState } from 'hooks'; +import { TIME_INTERVAL } from 'shared/constants'; +import { expression2Filter } from 'shared/filter'; + +import { Button, Input, Message, Space } from '@arco-design/web-react'; +import GridRow from 'components/_base/GridRow'; +import SharedPageLayout from 'components/SharedPageLayout'; +import Modal from 'components/Modal'; +import TodoPopover from 'components/TodoPopover'; +import { debounce } from 'lodash-es'; + +import ModelServingTable from '../ModelServingTable'; + +import { + updateServiceInstanceNum, + getTableFilterValue, + FILTER_SERVING_OPERATOR_MAPPER, +} from '../shared'; + +import { ModelServing, ModelServingState } from 'typings/modelServing'; +import { SortDirection, SorterResult } from '@arco-design/web-react/es/Table/interface'; + +import { filterExpressionGenerator } from 'views/Datasets/shared'; + +import styles from './index.module.less'; + +const { Search } = Input; + +const ModelServingList: FC = () => { + const history = useHistory(); + const projectId = useGetCurrentProjectId(); + const [urlState, setUrlState] = useUrlState<Record<string, string | undefined>>({ + filter: '', + order_by: '', + }); + const { paginationProps } = useTablePaginationWithUrlState(); + + const queryKey = ['fetchModelServingList', urlState.filter, urlState.order_by, projectId]; + + const listQuery = useQuery( + queryKey, + () => { + if (!projectId) { + Message.info('请选择工作区'); + return; + } + return fetchModelServingList_new(projectId!, { + filter: urlState.filter, + order_by: urlState.order_by || 'created_at desc', + }); + }, + { + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, // auto refresh every 1.5 min + }, + ); + + const tableDataSource = useMemo(() => { + if (!listQuery.data) { + return []; + } + return (listQuery.data.data || []).filter( + (item) => + item.status !== ModelServingState.WAITING_CONFIG && + (!urlState.status || urlState.status.includes(item.status)) && + (!urlState.is_local || urlState.is_local.includes(item.is_local)), + ); + }, [listQuery.data, urlState]); + + const sorterProps = useMemo<Record<string, SortDirection>>(() => { + if (urlState.order_by) { + const order = urlState.order_by?.split(' ') || []; + return { + [order[0]]: order?.[1] === 'asc' ? 'ascend' : 'descend', + }; + } + + return {}; + }, [urlState.order_by]); + + const pagination = useMemo(() => { + return tableDataSource.length <= paginationProps.pageSize + ? false + : { + ...paginationProps, + total: tableDataSource.length, + }; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [tableDataSource]); + + return ( + <SharedPageLayout title="在线服务"> + <GridRow justify="space-between" align="center"> + <Button className={'custom-operation-button'} type="primary" onClick={onCreateClick}> + 创建服务 + </Button> + + <Space> + <Search + className={`custom-input ${styles.search_content}`} + allowClear + defaultValue={expression2Filter(urlState.filter).keyword} + onChange={debounce(onSearch, 300)} + placeholder="请输入名称查询" + /> + <TodoPopover.ModelServing /> + </Space> + </GridRow> + <ModelServingTable + filter={{ + is_local: getTableFilterValue(urlState.is_local), + status: getTableFilterValue(urlState.status), + }} + sorter={sorterProps} + loading={listQuery.isFetching} + dataSource={tableDataSource} + total={listQuery.data?.page_meta?.total_items ?? undefined} + onRowClick={onRowClick} + onEditClick={onEditClick} + onScaleClick={onScaleClick} + onDeleteClick={onDeleteClick} + onFilterChange={onFilter} + onSortChange={onSort} + pagination={pagination} + /> + </SharedPageLayout> + ); + + function onSearch( + value: string, + event?: + | React.ChangeEvent<HTMLInputElement> + | React.MouseEvent<HTMLElement> + | React.KeyboardEvent<HTMLInputElement>, + ) { + const filters = expression2Filter(urlState.filter); + filters.keyword = value; + if (!value) { + setUrlState((prevState) => ({ + ...prevState, + filter: filterExpressionGenerator(filters, FILTER_SERVING_OPERATOR_MAPPER), + })); + return; + } + setUrlState((prevState) => ({ + ...prevState, + filter: filterExpressionGenerator(filters, FILTER_SERVING_OPERATOR_MAPPER), + page: 1, + })); + } + + function onCreateClick() { + history.push('/model-serving/create'); + } + function onRowClick(record: ModelServing) { + history.push(`/model-serving/detail/${record.id}`); + } + async function onEditClick(record: ModelServing) { + history.push(`/model-serving/edit/${record.id}`); + } + async function onScaleClick(record: ModelServing) { + updateServiceInstanceNum(record, () => { + forceToRefreshQuery([...queryKey]); + }); + } + function onDeleteClick(record: ModelServing) { + if (!projectId) { + Message.info('请选择工作区!'); + return; + } + Modal.delete({ + title: `确认要删除「${record.name}}」?`, + content: '一旦删除,在线服务相关数据将无法复原,请谨慎操作', + onOk() { + deleteModelServing_new(projectId!, record.id) + .then(() => { + Message.success('删除成功'); + listQuery.refetch(); + }) + .catch((error) => { + Message.error(error.message); + }); + }, + }); + } + function onFilter(filter: Record<string, Array<number | string>>) { + const filterParams: Record<string, any> = {}; + + for (const key in filter) { + filterParams[key] = booleanToString(filter[key]?.[0]); + } + + setUrlState({ + is_local: filterParams.is_local, + status: filterParams.status, + }); + } + function onSort(sorter: SorterResult) { + const { field, direction: order } = sorter; + if (field && order) { + setUrlState({ + order_by: `${field as string} ${order === 'ascend' ? 'asc' : 'desc'}`, + }); + } else { + setUrlState({ + order_by: undefined, + }); + } + } +}; + +function booleanToString(val: any) { + if (typeof val !== 'boolean') { + return val; + } + return val ? 'true' : 'false'; +} + +export default ModelServingList; diff --git a/web_console_v2/client/src/views/ModelServing/ModelServingTable.module.less b/web_console_v2/client/src/views/ModelServing/ModelServingTable.module.less new file mode 100644 index 000000000..0b7c9b26b --- /dev/null +++ b/web_console_v2/client/src/views/ModelServing/ModelServingTable.module.less @@ -0,0 +1,23 @@ +.edit_text_container{ + display: inline-block; + max-width: 100%; + line-height: inherit; + font-size: 13px; + color: var(--primaryColor); + cursor: pointer; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + vertical-align: top; + &[data-is-disabled='true'] { + color: rgb(var(--gray-6)); + cursor: not-allowed; + } +} +.tag_container{ + &.arco-tag { + border: none; + color: rgb(var(--gray-10)); + } +} + diff --git a/web_console_v2/client/src/views/ModelServing/ModelServingTable.tsx b/web_console_v2/client/src/views/ModelServing/ModelServingTable.tsx new file mode 100644 index 000000000..398bd36b8 --- /dev/null +++ b/web_console_v2/client/src/views/ModelServing/ModelServingTable.tsx @@ -0,0 +1,269 @@ +import React, { FC } from 'react'; + +import { formatTimestamp } from 'shared/date'; +import { modelDirectionTypeToTextMap, getDotState, modelServingStateToTextMap } from './shared'; + +import Table from 'components/Table'; +import MoreActions from 'components/MoreActions'; +import StateIndicator from 'components/StateIndicator'; + +import { Tag } from '@arco-design/web-react'; +import { ModelServing, ModelDirectionType, ModelServingState } from 'typings/modelServing'; + +import { TableColumnProps, TableProps, Space } from '@arco-design/web-react'; +import { SorterResult, SortDirection } from '@arco-design/web-react/es/Table/interface'; + +import styles from './ModelServingTable.module.less'; + +type FilterValue = string[]; + +type ColumnsGetterOptions = { + filter?: Record<string, FilterValue>; + sorter?: Record<string, SortDirection>; + onScaleClick?: any; + onDeleteClick?: any; + onEditClick?: any; +}; + +const getTableColumns = (options: ColumnsGetterOptions) => { + const cols: TableColumnProps[] = [ + { + title: '名称', + dataIndex: 'name', + key: 'name', + width: 200, + ellipsis: true, + render: (name) => { + return <span className={styles.edit_text_container}>{name}</span>; + }, + }, + { + title: '状态', + dataIndex: 'status', + key: 'status', + width: 130, + filterMultiple: false, + filteredValue: options?.filter?.status || [], + filters: [ + { + text: modelServingStateToTextMap.AVAILABLE, + value: ModelServingState.AVAILABLE, + }, + { + text: modelServingStateToTextMap.LOADING, + value: ModelServingState.LOADING, + }, + { + text: modelServingStateToTextMap.UNKNOWN, + value: ModelServingState.UNKNOWN, + }, + { + text: modelServingStateToTextMap.UNLOADING, + value: ModelServingState.UNLOADING, + }, + { + text: modelServingStateToTextMap.PENDING_ACCEPT, + value: ModelServingState.PENDING_ACCEPT, + }, + ], + render: (_, record) => { + return <StateIndicator {...getDotState(record)} />; + }, + }, + { + title: '模型类型', + dataIndex: 'is_local', + key: 'is_local', + width: 150, + filteredValue: options?.filter?.is_local || [], + filters: [ + { + text: modelDirectionTypeToTextMap.horizontal, + value: 'true', + }, + { + text: modelDirectionTypeToTextMap.vertical, + value: 'false', + }, + ], + filterMultiple: false, + render: (type: boolean) => { + const modelType = type ? ModelDirectionType.HORIZONTAL : ModelDirectionType.VERTICAL; + return ( + <Tag + className={styles.tag_container} + style={{ + background: + modelType === ModelDirectionType.HORIZONTAL + ? 'rgb(var(--orange-1))' + : 'rgb(var(--blue-1))', + }} + > + {modelDirectionTypeToTextMap[modelType]} + </Tag> + ); + }, + }, + { + title: '实例数量', + dataIndex: 'instance_num_status', + key: 'instance_num_status', + width: 100, + render: (value) => value || '-', + }, + { + title: '调用权限', + dataIndex: 'support_inference', + key: 'support_inference', + width: 100, + render(val: boolean) { + return ( + <Tag style={{ background: val ? 'rgb(var(--green-1))' : 'rgb(var(--gray-2))' }}> + {val ? '可调用' : '不可调用'} + </Tag> + ); + }, + }, + { + title: '创建时间', + dataIndex: 'created_at', + key: 'created_at', + width: 200, + sorter: true, + sortOrder: options.sorter?.created_at || undefined, + render: (date: number) => formatTimestamp(date), + }, + { + title: '操作', + dataIndex: 'operation', + key: 'operation', + fixed: 'right', + width: 120, + render: (_, record) => { + const isDisabled = + record.status !== ModelServingState.AVAILABLE || record.resource === undefined; + //TODO:等后端支持手动更新模型 + // const editIsDisabled = ![ModelServingState.AVAILABLE, ModelServingState.LOADING].includes( + // record.status, + // ); + + return ( + <Space> + <span + className={styles.edit_text_container} + data-is-disabled={isDisabled} + onClick={(event) => { + event.stopPropagation(); + if (!isDisabled) { + options?.onScaleClick(record); + } + }} + > + 扩缩容 + </span> + <MoreActions + actionList={[ + { + label: '编辑', + //TODO:等后端支持手动更新模型 + //disabled: editIsDisabled, + onClick: () => { + options?.onEditClick(record); + }, + }, + { + label: '删除', + onClick: () => { + options?.onDeleteClick(record); + }, + danger: true, + }, + ]} + /> + </Space> + ); + }, + }, + ]; + + return cols; +}; + +interface Props extends TableProps<ModelServing> { + total?: number; + loading: boolean; + dataSource: any[]; + filter?: Record<string, FilterValue>; + sorter?: Record<string, SortDirection>; + onRowClick?: (record: ModelServing) => void; + onEditClick?: (record: ModelServing) => void; + onScaleClick?: (record: ModelServing) => void; + onDeleteClick?: (record: ModelServing) => void; + onShowSizeChange?: (current: number, size: number) => void; + onPageChange?: (page: number, pageSize: number) => void; + onSortChange?: (sorter: SorterResult) => void; + onFilterChange?: (filter: Record<string, any>) => void; +} + +const ModelServingTable: FC<Props> = ({ + total, + loading, + filter, + sorter, + dataSource, + onRowClick, + onEditClick, + onScaleClick, + onDeleteClick, + onShowSizeChange, + onPageChange, + onSortChange, + onFilterChange, + ...restProps +}) => { + return ( + <> + <Table<ModelServing> + className="customFilterIconTable" + rowKey="id" + scroll={{ x: '100%' }} + loading={loading} + total={total} + data={dataSource} + columns={getTableColumns({ + filter, + sorter, + onEditClick, + onScaleClick, + onDeleteClick, + })} + onChange={(pagination, sorter, filters, extra) => { + const { action } = extra; + + switch (action) { + case 'paginate': + onShowSizeChange?.(pagination.current as number, pagination.pageSize as number); + break; + case 'sort': + onSortChange?.(sorter as SorterResult); + break; + case 'filter': + onFilterChange?.(filters); + break; + default: + } + }} + onShowSizeChange={onShowSizeChange} + onPageChange={onPageChange} + onRow={(record) => ({ + onClick: () => { + onRowClick?.(record); + }, + })} + {...restProps} + /> + </> + ); +}; + +export default ModelServingTable; diff --git a/web_console_v2/client/src/views/ModelServing/ServiceEditModal.module.less b/web_console_v2/client/src/views/ModelServing/ServiceEditModal.module.less new file mode 100644 index 000000000..43c34f485 --- /dev/null +++ b/web_console_v2/client/src/views/ModelServing/ServiceEditModal.module.less @@ -0,0 +1,17 @@ +.key_container{ + color: rgb(var(--gray-10)); +} +.value_container{ + color: rgb(var(--gray-10)); +} +.row_container{ + margin-bottom: 20px; + text-align: left; +} + +.label_col_container{ + text-align: right; +} +.text_area_container{ + font-size: 12px; +} diff --git a/web_console_v2/client/src/views/ModelServing/ServiceEditModal.tsx b/web_console_v2/client/src/views/ModelServing/ServiceEditModal.tsx new file mode 100644 index 000000000..860ea6399 --- /dev/null +++ b/web_console_v2/client/src/views/ModelServing/ServiceEditModal.tsx @@ -0,0 +1,88 @@ +import React, { FC } from 'react'; +import { Input, Grid, Button } from '@arco-design/web-react'; +import { ModelServing } from 'typings/modelServing'; +import Modal from 'components/Modal'; +import ButtonWithPopconfirm from 'components/ButtonWithPopconfirm'; + +import styles from './ServiceEditModal.module.less'; + +export interface Props { + service: ModelServing; + onChange: (params: Partial<ModelServing>) => void; +} + +const { Row, Col } = Grid; +const { TextArea } = Input; + +const ServiceEditModal: FC<Props> = ({ service, onChange }) => { + return ( + <div> + <Row className={styles.row_container} gutter={16}> + <Col className={styles.label_col_container} span={6}> + <span className={styles.key_container}>在线服务名称</span> + </Col> + <Col span={18}> + <span className={styles.value_container}>{service.name}</span> + </Col> + </Row> + <Row gutter={16}> + <Col className={styles.label_col_container} span={6}> + <span className={styles.key_container}>在线服务描述</span> + </Col> + <Col span={18}> + <TextArea + className={styles.text_area_container} + placeholder={'请输入'} + defaultValue={service.comment} + rows={3} + onChange={handleChange} + /> + </Col> + </Row> + </div> + ); + + function handleChange(comment: string) { + onChange({ + comment, + }); + } +}; + +export default ServiceEditModal; +export function editService(service: ModelServing, onOk: (params: Partial<ModelServing>) => void) { + let serviceParams: Partial<ModelServing> = {}; + serviceParams = { resource: service.resource, comment: service.comment }; + const modal = Modal.confirm({ + icon: null, + title: '在线服务信息', + content: ( + <ServiceEditModal + service={service} + onChange={(params) => { + serviceParams = { ...serviceParams, ...params }; + }} + /> + ), + footer: [ + <ButtonWithPopconfirm + key="back" + buttonText={'取消'} + onConfirm={() => { + modal.close(); + }} + />, + <Button + style={{ marginLeft: 12 }} + key="submit" + type="primary" + onClick={async () => { + await onOk(serviceParams); + modal.close(); + }} + > + 提交 + </Button>, + ], + }); +} diff --git a/web_console_v2/client/src/views/ModelServing/ServiceInstanceScaleDrawer.module.less b/web_console_v2/client/src/views/ModelServing/ServiceInstanceScaleDrawer.module.less new file mode 100644 index 000000000..1e36a2244 --- /dev/null +++ b/web_console_v2/client/src/views/ModelServing/ServiceInstanceScaleDrawer.module.less @@ -0,0 +1,10 @@ +.div_container{ + font-size: 12px; + color: rgb(var(--gray-8)); +} +.title_container{ + margin-bottom: 5px; +} +.text_container{ + color: rgb(var(--gray-10)); +} diff --git a/web_console_v2/client/src/views/ModelServing/ServiceInstanceScaleDrawer.tsx b/web_console_v2/client/src/views/ModelServing/ServiceInstanceScaleDrawer.tsx new file mode 100644 index 000000000..3dac7ea3f --- /dev/null +++ b/web_console_v2/client/src/views/ModelServing/ServiceInstanceScaleDrawer.tsx @@ -0,0 +1,60 @@ +import React, { FC } from 'react'; +import { Spin } from '@arco-design/web-react'; +import { ModelServing } from 'typings/modelServing'; +import InstanceNumberInput from './InstanceNumberInput'; +import drawerConfirm from 'components/DrawerConfirm'; + +import styles from './ServiceInstanceScaleDrawer.module.less'; + +export type TProps = { + service?: ModelServing; + onChange?: (instanceNum: number) => void; +}; + +const ModelServingScaleDrawer: FC<TProps> = ({ service, onChange }) => { + return ( + <div> + {!service ? ( + <Spin loading={true} /> + ) : ( + <> + <div className={styles.div_container}> + <p className={styles.title_container}>实例规格</p> + <p className={styles.text_container}> + {service?.resource?.cpu} + {service?.resource?.memory} + </p> + </div> + <div className={styles.div_container} style={{ marginTop: 20 }}> + <p className={styles.title_container}>实例数</p> + <InstanceNumberInput + min={1} + max={100} + precision={0} + defaultValue={service?.resource?.replicas} + onChange={onChange} + /> + </div> + </> + )} + </div> + ); +}; + +export function handleScaleEdit( + service: ModelServing, + onUpdate: (instanceNum: number) => Promise<any>, +) { + drawerConfirm({ + title: '扩缩容', + okText: '确认', + cancelText: '取消', + onOk: (instanceNum: number) => { + return onUpdate(instanceNum); + }, + renderContent(setOkParams) { + return <ModelServingScaleDrawer service={service} onChange={setOkParams} />; + }, + }); +} + +export default ModelServingScaleDrawer; diff --git a/web_console_v2/client/src/views/ModelServing/UserGuideTab/index.module.less b/web_console_v2/client/src/views/ModelServing/UserGuideTab/index.module.less new file mode 100644 index 000000000..8f5043b8c --- /dev/null +++ b/web_console_v2/client/src/views/ModelServing/UserGuideTab/index.module.less @@ -0,0 +1,37 @@ + +.div_container{ + flex: 1; + margin-bottom: 29px; +} + +.content_block{ + margin-right: 4px; +} + +.label_container{ + margin-bottom: 5px; + color: rgb(var(--gray-8)); +} + +.copy_icon_container{ + font-size: 14px; + &:hover { + color: #1664ff; + } +} + +.open_signature_btn{ + color: rgb(var(--arcoblue-6)) !important; +} + +.info_item_container{ + --height: 32px; + box-sizing: border-box; + padding: 0 10px; + border-radius: 2px; + border: 1px solid var(--lineColor); + height: var(--height); + font-size: 12px; + line-height: var(--height); + background: rgb(var(--gray-2)); +} diff --git a/web_console_v2/client/src/views/ModelServing/UserGuideTab/index.tsx b/web_console_v2/client/src/views/ModelServing/UserGuideTab/index.tsx new file mode 100644 index 000000000..5575dc0fa --- /dev/null +++ b/web_console_v2/client/src/views/ModelServing/UserGuideTab/index.tsx @@ -0,0 +1,77 @@ +import React, { FC, useState } from 'react'; +import { Button, Drawer, Space } from '@arco-design/web-react'; +import ClickToCopy from 'components/ClickToCopy'; +import { Copy } from 'components/IconPark'; +import CodeEditor from 'components/CodeEditor'; +import { formatJSONValue } from 'shared/helpers'; +import { CONSTANTS } from 'shared/constants'; + +import { ModelServing } from 'typings/modelServing'; + +import styles from './index.module.less'; + +type Props = { + data?: ModelServing; + isShowLabel?: boolean; + isShowSignature?: boolean; +}; + +const UserGuideTab: FC<Props> = ({ data, isShowLabel, isShowSignature = true }) => { + const [drawerVisible, setDrawerVisible] = useState<boolean>(false); + + // TODO: user guide field + const feature = CONSTANTS.EMPTY_PLACEHOLDER; + const url = data?.endpoint ?? CONSTANTS.EMPTY_PLACEHOLDER; + + return ( + <div className={styles.div_container}> + <Space size="medium"> + <div className={styles.content_block}> + <p className={styles.label_container}>访问地址</p> + <div className={styles.info_item_container}> + <ClickToCopy text={String(url)}> + <Space size="medium"> + <span>{url}</span> + <Copy className={styles.copy_icon_container} /> + </Space> + </ClickToCopy> + </div> + </div> + {isShowSignature && ( + <div className={styles.content_block}> + <p className={styles.label_container}>Signature</p> + <Button className={styles.open_signature_btn} onClick={toggleDrawerVisible}> + 查看 + </Button> + </div> + )} + {isShowLabel && ( + <div className={styles.content_block}> + <p className={styles.label_container}>本侧特征</p> + <p className={styles.label_container}>{feature}</p> + </div> + )} + </Space> + <Drawer + width={720} + visible={drawerVisible} + title={'Signature'} + closable={true} + onCancel={toggleDrawerVisible} + > + <CodeEditor + language="json" + isReadOnly={true} + theme="grey" + value={data?.signature ? formatJSONValue(data.signature) : ''} + /> + </Drawer> + </div> + ); + + function toggleDrawerVisible() { + setDrawerVisible(!drawerVisible); + } +}; + +export default UserGuideTab; diff --git a/web_console_v2/client/src/views/ModelServing/index.tsx b/web_console_v2/client/src/views/ModelServing/index.tsx new file mode 100644 index 000000000..65ad2ce35 --- /dev/null +++ b/web_console_v2/client/src/views/ModelServing/index.tsx @@ -0,0 +1,25 @@ +import ErrorBoundary from 'components/ErrorBoundary'; +import React, { FC } from 'react'; +import { Route, Switch } from 'react-router-dom'; + +import ModelServingList from 'views/ModelServing/ModelServingList'; +import ModelServingForm from 'views/ModelServing/ModelServingForm'; +import ModelServingDetail from 'views/ModelServing/ModelServingDetail'; + +const ModelServing: FC = () => { + return ( + <ErrorBoundary> + <Switch> + <Route path="/model-serving" exact component={ModelServingList} /> + <Route + path="/model-serving/:action(create|edit)/:role(sender|receiver)?/:id?" + exact + component={ModelServingForm} + /> + <Route path="/model-serving/detail/:id/:tabType?" exact component={ModelServingDetail} /> + </Switch> + </ErrorBoundary> + ); +}; + +export default ModelServing; diff --git a/web_console_v2/client/src/views/ModelServing/shared.ts b/web_console_v2/client/src/views/ModelServing/shared.ts new file mode 100644 index 000000000..907c6602b --- /dev/null +++ b/web_console_v2/client/src/views/ModelServing/shared.ts @@ -0,0 +1,141 @@ +/* istanbul ignore file */ + +import { Message } from '@arco-design/web-react'; +import { updateModelServing_new } from 'services/modelServing'; + +import { ModelDirectionType, ModelServing, ModelServingState } from 'typings/modelServing'; +import { handleScaleEdit } from './ServiceInstanceScaleDrawer'; +import { ActionItem, StateTypes } from 'components/StateIndicator'; +import { editService } from './ServiceEditModal'; +import { FilterOp } from 'typings/filter'; + +export const modelDirectionTypeToTextMap = { + [ModelDirectionType.HORIZONTAL]: '横向联邦', + [ModelDirectionType.VERTICAL]: '纵向联邦', +}; + +export const modelServingStateToTextMap = { + [ModelServingState.AVAILABLE]: '运行中', + [ModelServingState.LOADING]: '部署中', + [ModelServingState.UNLOADING]: '删除中', + [ModelServingState.UNKNOWN]: '异常', + [ModelServingState.PENDING_ACCEPT]: '待合作伙伴配置', + [ModelServingState.WAITING_CONFIG]: '待合作伙伴配置', + [ModelServingState.DELETED]: '异常', +}; + +// 打开修改服务实例数的 drawer,并处理 UI 和业务逻辑 +export async function updateServiceInstanceNum(service: ModelServing, onUpdate: () => void) { + handleScaleEdit(service, async (instanceNum: number) => { + try { + await updateModelServing_new(service.project_id, service.id, { + model_type: undefined, // 后端暂时不支持 model_type 字段 + resource: { + ...service.resource, + replicas: instanceNum, + }, + }); + onUpdate(); + } catch (e) { + Message.error(e.message); + throw e; + } + }); +} + +export function getDotState( + modelServing: ModelServing, +): { type: StateTypes; text: string; tip?: string; actionList?: ActionItem[] } { + const text = modelServingStateToTextMap[modelServing.status] || '异常'; + + if (modelServing.status === ModelServingState.AVAILABLE) { + return { + text, + type: 'success', + }; + } + if (modelServing.status === ModelServingState.LOADING) { + return { + text, + type: 'processing', + }; + } + + if (modelServing.status === ModelServingState.UNLOADING) { + return { + text, + type: 'gold', + }; + } + + if (modelServing.status === ModelServingState.UNKNOWN) { + return { + text, + type: 'error', + }; + } + + if (modelServing.status === ModelServingState.PENDING_ACCEPT) { + return { + text, + type: 'pending_accept', + }; + } + + if (modelServing.status === ModelServingState.WAITING_CONFIG) { + return { + text, + type: 'pending_accept', + }; + } + + if (modelServing.status === ModelServingState.DELETED) { + return { + text, + type: 'deleted', + tip: '对侧已经删除', + }; + } + + return { + text, + type: 'error', + }; +} + +export function handleServiceEdit(record: ModelServing) { + return new Promise((resolve, reject) => { + editService(record, async (params: any) => { + try { + await updateModelServing_new(record.project_id, record.id, params); + Message.success('修改成功'); + resolve(params); + } catch (error) { + Message.error(error.message); + reject(error); + } + }); + }); +} + +export function getTableFilterValue(val: string): string[] { + if (typeof val === 'undefined') { + return []; + } + return [val]; +} + +export function cpuIsCpuM(cpu: string): boolean { + const regx = new RegExp('[0-9]+m$'); + return regx.test(cpu); +} + +export function memoryIsMemoryGi(memory: string): boolean { + const regx = new RegExp('[0-9]+Gi$'); + return regx.test(memory); +} + +export const FILTER_SERVING_OPERATOR_MAPPER = { + name: FilterOp.EQUAL, + keyword: FilterOp.CONTAIN, +}; diff --git a/web_console_v2/client/src/views/OperationMaintenance/OperationList/JobDetailDrawer.module.less b/web_console_v2/client/src/views/OperationMaintenance/OperationList/JobDetailDrawer.module.less new file mode 100644 index 000000000..f21264222 --- /dev/null +++ b/web_console_v2/client/src/views/OperationMaintenance/OperationList/JobDetailDrawer.module.less @@ -0,0 +1,11 @@ +.drawer_content{ + flex: 1; +} +.header_container{ + display: flex; + justify-content: space-between; + align-items: center; +} +.copy_button{ + color: var(--textColor); +} diff --git a/web_console_v2/client/src/views/OperationMaintenance/OperationList/JobDetailDrawer.tsx b/web_console_v2/client/src/views/OperationMaintenance/OperationList/JobDetailDrawer.tsx new file mode 100644 index 000000000..211b7173c --- /dev/null +++ b/web_console_v2/client/src/views/OperationMaintenance/OperationList/JobDetailDrawer.tsx @@ -0,0 +1,89 @@ +import React, { useMemo } from 'react'; +import { Drawer, DrawerProps, Button, Message } from '@arco-design/web-react'; +import { useQuery } from 'react-query'; +import { fetchOperationDetail } from 'services/operation'; +import CodeEditor from 'components/CodeEditor'; +import { IconCopy } from '@arco-design/web-react/icon'; +import { copyToClipboard, formatJSONValue } from 'shared/helpers'; + +import styles from './JobDetailDrawer.module.less'; + +const ContentHeight = '119px'; // 55(drawer header height) + 16*2(content padding) + 32(header height) + +export interface Props { + data?: string; +} + +export interface Props extends DrawerProps { + data?: string; +} + +function JobDetailDrawer({ visible, data, title = '工作详情', ...restProps }: Props) { + const jobInfo = useQuery( + ['fetchOperationDetail', data], + () => { + if (data === '') return; + return fetchOperationDetail({ + job_name: data || '', + }); + }, + { + retry: 0, + cacheTime: 0, + refetchOnWindowFocus: false, + }, + ); + const job = useMemo(() => { + if (!jobInfo.data) return 'this job_name is not exits'; + return JSON.stringify(jobInfo.data); + }, [jobInfo]); + + function renderCodeEditorLayout() { + return ( + <> + <div className={styles.header_container}> + <Button + className={styles.copy_button} + icon={<IconCopy />} + onClick={onCopyClick} + type="text" + > + 复制 + </Button> + </div> + <CodeEditor + language="json" + isReadOnly={true} + theme="grey" + height={`calc(100vh - ${ContentHeight})`} + value={formatJSONValue(job ?? '')} + /> + </> + ); + } + + return ( + <Drawer + placement="right" + title={title} + closable={true} + width="50%" + visible={visible} + unmountOnExit + {...restProps} + > + <div className={styles.drawer_content}>{renderCodeEditorLayout()}</div> + </Drawer> + ); + + function onCopyClick() { + const isOK = copyToClipboard(formatJSONValue(job ?? '')); + if (isOK) { + Message.success('复制成功'); + } else { + Message.error('复制失败'); + } + } +} + +export default JobDetailDrawer; diff --git a/web_console_v2/client/src/views/OperationMaintenance/OperationList/index.module.less b/web_console_v2/client/src/views/OperationMaintenance/OperationList/index.module.less new file mode 100644 index 000000000..fed4f562b --- /dev/null +++ b/web_console_v2/client/src/views/OperationMaintenance/OperationList/index.module.less @@ -0,0 +1,14 @@ +.div_container{ + display: flex; + justify-content: space-between; + flex-wrap: wrap; + width: 100%; + padding: 20px 20px 0px 20px; +} +.form_container{ + width: 35%; + padding: 20px; +} +.table_container{ + width: 60%; +} diff --git a/web_console_v2/client/src/views/OperationMaintenance/OperationList/index.tsx b/web_console_v2/client/src/views/OperationMaintenance/OperationList/index.tsx new file mode 100644 index 000000000..9b22838b6 --- /dev/null +++ b/web_console_v2/client/src/views/OperationMaintenance/OperationList/index.tsx @@ -0,0 +1,271 @@ +import React, { FC, useMemo, useState } from 'react'; +import { Form, Input, Select, Button, Table, Message, Space } from '@arco-design/web-react'; +import { Role, JobGroupFetchPayload, JobItem } from 'typings/operation'; +import { useQuery } from 'react-query'; +import { fetchOperationList } from 'services/operation'; +import { fetchProjectList } from 'services/project'; +import SharedPageLayout from 'components/SharedPageLayout'; +import JobDetailDrawer from './JobDetailDrawer'; + +import styles from './index.module.less'; + +const FormItem = Form.Item; + +const initialFormValues: Partial<JobGroupFetchPayload> = { + role: undefined, + name_prefix: '', + project_name: undefined, + e2e_image_url: '', + fedlearner_image_uri: '', + platform_endpoint: undefined, +}; + +function equals(x: any, y: any) { + const f1 = x instanceof Object; + const f2 = y instanceof Object; + if (!f1 || !f2) { + return x === y; + } + if (Object.keys(x).length !== Object.keys(y).length) { + return false; + } + const newX = Object.keys(x); + for (let p in newX) { + p = newX[p]; + const a = x[p] instanceof Object; + const b = y[p] instanceof Object; + if (a && b) { + equals(x[p], y[p]); + } else if (x[p] !== y[p]) { + return false; + } + } + return true; +} + +const OperationList: FC = () => { + const [formInstance] = Form.useForm<JobGroupFetchPayload>(); + const [formParams, setFormParams] = useState<Partial<JobGroupFetchPayload>>(initialFormValues); + const [isShowJobDetailDrawer, setIsShowJobDetailDrawer] = useState(false); + const [jobName, setJobName] = useState(''); + const columns = [ + { + title: 'K8s Job名称', + dataIndex: 'job_name', + key: 'job_name', + }, + { + title: '测试类型', + dataIndex: 'job_type', + key: 'job_type', + }, + { + title: '测试状态', + key: 'operation', + render: (col: any, record: JobItem) => { + return ( + <Space> + <Button type="text" onClick={() => onCheck(record)}> + 查看 + </Button> + </Space> + ); + }, + }, + ]; + const roleOptions = [ + { + label: Role.COORDINATOR, + value: Role.COORDINATOR, + }, + { + label: Role.PARTICIPANT, + value: Role.PARTICIPANT, + }, + ]; + + const listQuery = useQuery( + ['fetchOperationList', formParams], + () => { + const flag = equals(formParams, initialFormValues); + if (flag) return; + return fetchOperationList(formParams); + }, + { + retry: 0, + cacheTime: 0, + refetchOnWindowFocus: false, + onError(e: any) { + if (e.code === 400 || /already\s*exist/.test(e.message)) { + Message.error('工作已存在,请更改name_prefix字段'); + } else { + Message.error(e.message); + } + }, + }, + ); + const projectList = useQuery(['fetchProjectList'], () => { + return fetchProjectList(); + }); + const projectOptions = useMemo(() => { + if (!projectList.data?.data) return []; + const tempData: Array<{ label: string; value: string }> = []; + projectList.data.data.forEach((item) => { + const temp = { + label: item.name, + value: item.name, + }; + tempData.push(temp); + }); + return tempData; + }, [projectList]); + + const list = useMemo(() => { + if (!listQuery.data?.data) { + return []; + } + const list = listQuery.data.data; + return list; + }, [listQuery.data]); + + async function submitForm() { + const value = formInstance.getFieldsValue(); + const flag = equals(value, formParams); + try { + await formInstance.validate(); + if (flag) { + Message.error('工作已存在,请更改name_prefix字段'); + } else { + setFormParams(value); + } + } catch (e) { + Message.error('校验不通过'); + } + } + + function onCheck(record: JobItem) { + setJobName(record.job_name); + setIsShowJobDetailDrawer(true); + } + + return ( + <SharedPageLayout title={'基础功能测试'}> + <div className={styles.div_container}> + <div className={styles.form_container}> + <Form + initialValues={initialFormValues} + labelCol={{ span: 10 }} + wrapperCol={{ span: 14 }} + form={formInstance} + > + <FormItem + label="role" + field="role" + required + rules={[ + { + type: 'string', + required: true, + message: '请选择role', + }, + ]} + > + <Select placeholder="please select" options={roleOptions} allowClear /> + </FormItem> + <FormItem + label="name_prefix" + field="name_prefix" + required + rules={[ + { + type: 'string', + required: true, + min: 5, + message: '请输入name_prefix,最少5个字符', + }, + ]} + > + <Input placeholder="please enter name_prefix, minimun 5 characters" /> + </FormItem> + <FormItem + label="project_name" + field="project_name" + required + rules={[ + { + type: 'string', + required: true, + message: '请选择project_name', + }, + ]} + > + <Select placeholder="please select" options={projectOptions} allowClear /> + </FormItem> + <FormItem + label="e2e_image_uri" + field="e2e_image_uri" + required + rules={[ + { + type: 'string', + required: true, + message: '请输入e2e_image_uri', + }, + ]} + > + <Input placeholder="please enter e2e_image_uri" /> + </FormItem> + <FormItem + label="fedlearner_image_uri" + field="fedlearner_image_uri" + required + rules={[ + { + type: 'string', + required: true, + message: '请输入fedlearner_image_uri', + }, + ]} + > + <Input placeholder="please enter fedlearner_image_uri" /> + </FormItem> + <FormItem label="platform_endpoint" field="platform_endpoint"> + <Input placeholder="please enter platform_endpoint" /> + </FormItem> + </Form> + <FormItem + wrapperCol={{ + offset: 10, + }} + > + <Button type="primary" style={{ marginRight: 24 }} onClick={submitForm}> + 提交 + </Button> + <Button + style={{ marginRight: 24 }} + onClick={() => { + formInstance.resetFields(); + }} + > + 重置 + </Button> + </FormItem> + </div> + <div className={styles.table_container}> + <Table columns={columns} data={list} style={{ height: '400px' }} rowKey="job_name" /> + </div> + <JobDetailDrawer + visible={isShowJobDetailDrawer} + data={jobName} + onCancel={onJobDetailDrawerClose} + /> + </div> + </SharedPageLayout> + ); + + function onJobDetailDrawerClose() { + setIsShowJobDetailDrawer(false); + } +}; + +export default OperationList; diff --git a/web_console_v2/client/src/views/OperationMaintenance/index.tsx b/web_console_v2/client/src/views/OperationMaintenance/index.tsx new file mode 100644 index 000000000..b3c82c1db --- /dev/null +++ b/web_console_v2/client/src/views/OperationMaintenance/index.tsx @@ -0,0 +1,13 @@ +import React from 'react'; +import { Route } from 'react-router-dom'; +import OperationList from './OperationList'; + +function OperationMaintenancePage() { + return ( + <> + <Route path="/operation" exact component={OperationList} /> + </> + ); +} + +export default OperationMaintenancePage; diff --git a/web_console_v2/client/src/views/Partner/AddPartnerForm/index.module.less b/web_console_v2/client/src/views/Partner/AddPartnerForm/index.module.less new file mode 100644 index 000000000..7173d10cb --- /dev/null +++ b/web_console_v2/client/src/views/Partner/AddPartnerForm/index.module.less @@ -0,0 +1,39 @@ +.form_list{ + padding: 0px 20px; +} +.form_container{ + margin-top: 20px; +} + +.think_divider{ + && { + margin-bottom: 0; + } +} +.delete_button{ + && { + color: #ffffff; + background-color: rgb(var(--red-6)); + } + + &&&:hover { + color: #ffffff; + background-color: rgb(var(--red-6)); + } +} + +.row_container{ + color: var(--primaryColor); + cursor: pointer; + + &:hover { + color: rgb(var(--arcoblue-5)); + } +} + +.title_container{ + font-size: 14px; + font-weight: 500; + color: #000000; + margin-top: 40px; +} diff --git a/web_console_v2/client/src/views/Partner/AddPartnerForm/index.tsx b/web_console_v2/client/src/views/Partner/AddPartnerForm/index.tsx new file mode 100644 index 000000000..8a2e17484 --- /dev/null +++ b/web_console_v2/client/src/views/Partner/AddPartnerForm/index.tsx @@ -0,0 +1,323 @@ +import React, { FC, useState } from 'react'; +import { + Form, + Input, + Divider, + Button, + Popconfirm, + Switch, + Select, + Grid, +} from '@arco-design/web-react'; +import { MAX_COMMENT_LENGTH, validParticipantNamePattern } from 'shared/validator'; +import GridRow from 'components/_base/GridRow'; +import { Plus } from 'components/IconPark'; +import { IconDelete } from '@arco-design/web-react/icon'; +import { Participant } from 'typings/participant'; +import FormLabel from 'components/FormLabel'; +import { DOMAIN_PREFIX, DOMAIN_SUFFIX } from 'shared/project'; +import { fetchDomainNameList } from 'services/participant'; +import { useQuery } from 'react-query'; +import { ParticipantType } from 'typings/participant'; + +import styles from './index.module.less'; + +const { Row } = Grid; + +interface Props { + onFinish: (value: any) => void; + data?: Participant; + isEdit: boolean; + /** + * @deprecated + * + * multi-add mode + */ + needAdd: boolean; +} + +const AddPartnerForm: FC<Props> = ({ onFinish, isEdit, needAdd, data }) => { + const [form] = Form.useForm(); + const [isManual, setIsManual] = useState(data?.extra?.is_manual_configured); + const [isLightClient, setIsLightClient] = useState(data?.type === ParticipantType.LIGHT_CLIENT); + + const domainNameListQuery = useQuery(['fetchDomainNameList'], () => fetchDomainNameList(), { + retry: 2, + refetchOnWindowFocus: false, + }); + + const dataInitial = + isEdit && data + ? [ + { + ...data, + domain_name: data.domain_name.slice(3, -4), + type: + (data.type ?? ParticipantType.PLATFORM) === ParticipantType.LIGHT_CLIENT + ? true + : false, + }, + ] + : [ + { + extra: { + is_manual_configured: false, + grpc_ssl_server_host: 'x-host', + }, + type: false, + }, + ]; + + const isShowManualInfo = isManual || isLightClient; + + return ( + <Form + form={form} + layout="vertical" + onSubmit={(value: any) => { + onFinish( + value.participants.map((item: any) => { + return isShowManualInfo + ? { + ...item, + domain_name: `fl-${item.domain_name}.com`, + type: item.type ? ParticipantType.LIGHT_CLIENT : ParticipantType.PLATFORM, + } + : { + ...item, + type: item.type ? ParticipantType.LIGHT_CLIENT : ParticipantType.PLATFORM, + }; + }), + ); + }} + > + <Form.List field="participants" initialValue={dataInitial as any}> + {(fields, { add, remove }) => { + return ( + <> + <div className={styles.form_list} id="add-modal"> + {fields.map((field, index) => ( + <div className={styles.form_container} key={field.field + index}> + {needAdd && <p className={styles.title_container}>合作伙伴{index + 1}</p>} + <Form.Item + label="企业名称" + field={field.field + '.name'} + rules={[ + { + required: true, + message: '请输入企业名称', + }, + { + match: validParticipantNamePattern, + message: + '只支持大小写字母,数字,中文开头或结尾,可包含“_”和“-”,不超过 63 个字符', + }, + ]} + > + <Input placeholder="请输入企业名称" /> + </Form.Item> + + <Form.Item + hidden={isEdit} + label="是否轻量级客户端" + field={field.field + '.type'} + triggerPropName="checked" + > + <Switch + onChange={(checked: boolean) => { + setIsLightClient(checked); + setIsManual(false); + // Reset field + const newData = [...form.getFieldValue('participants')]; + newData[field.key] = { + ...newData[field.key], + domain_name: undefined, + host: undefined, + port: undefined, + extra: { + ...newData[field.key].extra, + is_manual_configured: false, + }, + }; + form.setFieldsValue({ + participants: newData, + }); + }} + /> + </Form.Item> + + {!isEdit && !isLightClient && ( + <Form.Item + label={ + <FormLabel + label="是否手动配置" + tooltip="默认将使用平台配置,若您有手动配置需求且对配置内容了解,可点击进行手动配置" + /> + } + field={field.field + '.extra.is_manual_configured'} + triggerPropName="checked" + > + <Switch + onChange={(checked: boolean) => { + setIsManual(checked); + // Reset domain_name field + const newData = [...form.getFieldValue('participants')]; + newData[field.key] = { + ...newData[field.key], + domain_name: undefined, + }; + form.setFieldsValue({ + participants: newData, + }); + }} + /> + </Form.Item> + )} + + <Form.Item + label="泛域名" + field={field.field + '.domain_name'} + rules={ + isShowManualInfo + ? [ + { + required: true, + message: '请输入泛域名', + }, + { + match: /^[0-9a-z-]+$/g, + message: '只允许小写英文字母/中划线/数字,请检查', + }, + ] + : [ + { + required: true, + message: '请选择泛域名', + }, + ] + } + > + {isShowManualInfo ? ( + <Input + placeholder="请输入泛域名" + addBefore={DOMAIN_PREFIX} + addAfter={DOMAIN_SUFFIX} + /> + ) : ( + <Select + loading={domainNameListQuery.isFetching} + placeholder="请选择泛域名" + showSearch + allowClear + > + {(domainNameListQuery.data?.data ?? []).map((item) => { + return ( + <Select.Option key={item.domain_name} value={item.domain_name}> + {item.domain_name} + </Select.Option> + ); + })} + </Select> + )} + </Form.Item> + + {isShowManualInfo && ( + <> + {!isLightClient && ( + <> + <Form.Item + label="主机号" + field={field.field + '.host'} + style={{ + width: '50%', + display: 'inline-block', + verticalAlign: 'top', + }} + rules={[ + { + required: true, + message: '请输入主机号', + }, + ]} + > + <Input placeholder="请输入主机号" /> + </Form.Item> + <Form.Item + label="端口号" + field={field.field + '.port'} + style={{ + marginLeft: '2%', + width: '48%', + display: 'inline-block', + verticalAlign: 'top', + }} + rules={[ + { + required: isManual, + message: '请输入端口号', + }, + { + match: /^[0-9]*$/g, + message: '端口号不合法,请检查', + }, + ]} + > + <Input placeholder="请输入端口号" /> + </Form.Item> + </> + )} + </> + )} + <Form.Item label="合作伙伴描述" field={field.field + '.comment'}> + <Input.TextArea + showWordLimit + maxLength={MAX_COMMENT_LENGTH} + placeholder="请为合作伙伴添加描述" + /> + </Form.Item> + + {needAdd && <Divider className={styles.think_divider} />} + {index !== 0 && ( + <Row justify="end"> + <Popconfirm + title="是否确定删除上面这个表单?" + onConfirm={() => remove(index)} + > + <Button className={styles.delete_button} icon={<IconDelete />}> + 删除 + </Button> + </Popconfirm> + </Row> + )} + </div> + ))} + </div> + <div style={{ padding: '0px 20px' }}> + <GridRow justify={needAdd ? 'space-between' : 'end'} align="center"> + {needAdd && ( + <GridRow + className={styles.row_container} + gap={5} + style={{ fontWeight: 500 }} + onClick={() => add()} + > + <Plus /> + <span>继续添加</span> + </GridRow> + )} + </GridRow> + <GridRow justify="center" style={{ marginTop: 48 }}> + <Button type="primary" htmlType="submit" style={{ padding: '0 74px' }}> + {isLightClient ? '提交' : '发送请求'} + </Button> + </GridRow> + </div> + </> + ); + }} + </Form.List> + </Form> + ); +}; + +export default AddPartnerForm; diff --git a/web_console_v2/client/src/views/Partner/CreatePartner/index.module.less b/web_console_v2/client/src/views/Partner/CreatePartner/index.module.less new file mode 100644 index 000000000..fb849a170 --- /dev/null +++ b/web_console_v2/client/src/views/Partner/CreatePartner/index.module.less @@ -0,0 +1,5 @@ + +.div_container{ + overflow-x: hidden; + overflow-y: scroll; +} diff --git a/web_console_v2/client/src/views/Partner/CreatePartner/index.tsx b/web_console_v2/client/src/views/Partner/CreatePartner/index.tsx new file mode 100644 index 000000000..5644a5c8e --- /dev/null +++ b/web_console_v2/client/src/views/Partner/CreatePartner/index.tsx @@ -0,0 +1,36 @@ +import React, { FC } from 'react'; +import PartnerForm from '../PartnerForm'; +import { Message } from '@arco-design/web-react'; +import { createParticipant } from 'services/participant'; +import { useHistory } from 'react-router-dom'; +import BackButton from 'components/BackButton'; +import SharedPageLayout from 'components/SharedPageLayout'; + +import styles from './index.module.less'; + +const CreatePartner: FC = () => { + const history = useHistory(); + + return ( + <div className={styles.div_container}> + <SharedPageLayout + title={<BackButton onClick={() => history.goBack()}>合作伙伴列表</BackButton>} + centerTitle="添加合作伙伴" + > + <PartnerForm isEdit={false} onSubmit={onSubmit} /> + </SharedPageLayout> + </div> + ); + + async function onSubmit(payload: any) { + try { + await createParticipant(payload); + Message.success('添加成功'); + history.push('/partners'); + } catch (error: any) { + Message.error(error.message); + } + } +}; + +export default CreatePartner; diff --git a/web_console_v2/client/src/views/Partner/EditPartner/index.module.less b/web_console_v2/client/src/views/Partner/EditPartner/index.module.less new file mode 100644 index 000000000..96018684c --- /dev/null +++ b/web_console_v2/client/src/views/Partner/EditPartner/index.module.less @@ -0,0 +1,4 @@ +.div_container{ + overflow-x: hidden; + overflow-y: scroll; +} diff --git a/web_console_v2/client/src/views/Partner/EditPartner/index.tsx b/web_console_v2/client/src/views/Partner/EditPartner/index.tsx new file mode 100644 index 000000000..e7ec05b3e --- /dev/null +++ b/web_console_v2/client/src/views/Partner/EditPartner/index.tsx @@ -0,0 +1,63 @@ +import React, { FC, useMemo } from 'react'; +import { useParams, useHistory } from 'react-router-dom'; +import { Message } from '@arco-design/web-react'; +import PartnerForm from '../PartnerForm'; +import { getParticipantDetailById, updateParticipant } from 'services/participant'; +import { useQuery } from 'react-query'; +import SharedPageLayout from 'components/SharedPageLayout'; +import BackButton from 'components/BackButton'; + +import styles from './index.module.less'; + +const EditPartner: FC = () => { + const history = useHistory(); + const { id: partnerId } = useParams<{ + id: string; + }>(); + + const { data: participantQuery, isLoading, isError, error } = useQuery( + ['getParticipantDetail', partnerId], + () => getParticipantDetailById(partnerId), + { + cacheTime: 0, + }, + ); + + if (isError && error) { + Message.error((error as Error).message); + history.push('/partners'); + } + + const dataProps = useMemo(() => { + if (!isLoading && participantQuery?.data) { + return { data: participantQuery?.data }; + } + return {}; + }, [isLoading, participantQuery?.data]); + + return ( + <div className={styles.div_container}> + <SharedPageLayout + title={<BackButton onClick={() => history.goBack()}>合作伙伴列表</BackButton>} + centerTitle="变更合作伙伴" + > + <PartnerForm isEdit={true} {...dataProps} onSubmit={onSubmit} /> + </SharedPageLayout> + </div> + ); + + async function onSubmit(payload: any) { + try { + // If there is no modification, no request will be sent + if (Object.keys(payload).length) { + await updateParticipant(partnerId, payload); + } + Message.success('变更合作伙伴成功!'); + history.push('/partners'); + } catch (error: any) { + Message.error(error.message); + } + } +}; + +export default EditPartner; diff --git a/web_console_v2/client/src/views/Partner/PartnerForm.tsx b/web_console_v2/client/src/views/Partner/PartnerForm.tsx new file mode 100644 index 000000000..61eaf296f --- /dev/null +++ b/web_console_v2/client/src/views/Partner/PartnerForm.tsx @@ -0,0 +1,84 @@ +import React, { FC } from 'react'; +import { Spin } from '@arco-design/web-react'; +import { CreateParticipantPayload, UpdateParticipantPayload } from 'typings/participant'; +import GridRow from 'components/_base/GridRow'; +import AddPartnerForm from './AddPartnerForm'; + +interface Props { + isEdit: boolean; + data?: any; + onSubmit: (payload: any) => Promise<void>; +} + +const PartnerForm: FC<Props> = ({ isEdit, data, onSubmit }) => { + const dataProps = isEdit && data ? { data } : {}; + + return ( + <GridRow justify="center" style={{ minHeight: '100%' }} align="start"> + <Spin loading={isEdit && !data}> + <div style={{ width: 600 }}> + {(!isEdit || data) && ( + <AddPartnerForm onFinish={onFinish} {...dataProps} isEdit={isEdit} needAdd={false} /> + )} + </div> + </Spin> + </GridRow> + ); + function onFinish(value: any) { + const valueOnly = value[0]; + let payload = {} as UpdateParticipantPayload | CreateParticipantPayload; + if (!isEdit) { + if (valueOnly?.extra?.is_manual_configured) { + Object.keys(valueOnly).forEach((key: any) => { + const _key = key as keyof CreateParticipantPayload; + if (key === 'extra') { + payload = { + ...payload, + is_manual_configured: valueOnly?.extra?.is_manual_configured ?? true, + grpc_ssl_server_host: valueOnly?.extra?.grpc_ssl_server_host ?? 'x-host', + }; + } else { + payload = { + ...payload, + [key]: valueOnly[_key], + }; + } + }); + } else { + payload = { + name: valueOnly.name, + domain_name: valueOnly.domain_name, + is_manual_configured: valueOnly?.extra?.is_manual_configured ?? false, + comment: valueOnly.comment, + type: valueOnly.type, + }; + } + } else { + data && + Object.keys(valueOnly).forEach((key: any) => { + const _key = key as keyof UpdateParticipantPayload; + if (key === 'extra') { + if (valueOnly?.extra?.is_manual_configured) { + const grpc_ssl_server_host = valueOnly?.extra?.grpc_ssl_server_host; + if (grpc_ssl_server_host !== data?.extra?.grpc_ssl_server_host) { + payload = { + ...payload, + grpc_ssl_server_host: valueOnly?.extra?.grpc_ssl_server_host, + }; + } + } + } else { + if (valueOnly[_key] !== data[_key]) { + payload = { + ...payload, + [key]: valueOnly[_key], + }; + } + } + }); + } + payload && onSubmit(payload); + } +}; + +export default PartnerForm; diff --git a/web_console_v2/client/src/views/Partner/PartnerList/ConnectionStatus.tsx b/web_console_v2/client/src/views/Partner/PartnerList/ConnectionStatus.tsx new file mode 100644 index 000000000..f26469b12 --- /dev/null +++ b/web_console_v2/client/src/views/Partner/PartnerList/ConnectionStatus.tsx @@ -0,0 +1,82 @@ +import React, { FC, useEffect, useMemo } from 'react'; +import { useSetRecoilState } from 'recoil'; + +import { useCheckConnection } from 'hooks/participant'; +import { participantConnectionState } from 'stores/participant'; +import { TIME_INTERVAL } from 'shared/constants'; + +import GridRow from 'components/_base/GridRow'; +import StateIndicator from 'components/StateIndicator'; + +import { ConnectionStatusType } from 'typings/participant'; + +export interface Props { + id: ID; + isNeedTip?: boolean; + isNeedReCheck?: boolean; +} + +export const globalParticipantIdToConnectionStateMap: { + [key: number]: ConnectionStatusType; +} = {}; + +const PaticipantConnectionStatus: FC<Props> = ({ + id, + isNeedTip = false, + isNeedReCheck = false, +}) => { + const setConnectionStatus = useSetRecoilState(participantConnectionState(id)); + + const [checkStatus, reCheck] = useCheckConnection(id, { + refetchOnWindowFocus: false, + refetchInterval: TIME_INTERVAL.CONNECTION_CHECK, + }); + + const tipProps = useMemo(() => { + if (isNeedTip) { + return checkStatus.success === ConnectionStatusType.Fail ? { tip: checkStatus.message } : {}; + } + return false; + }, [checkStatus.message, checkStatus.success, isNeedTip]); + + useEffect(() => { + // Store all connection state to sort table col data + globalParticipantIdToConnectionStateMap[Number(id)] = checkStatus.success; + setConnectionStatus(checkStatus); + }, [id, checkStatus, setConnectionStatus]); + + return ( + <GridRow align="center" gap={5}> + <StateIndicator + type={checkStatus.success} + text={getTextByType(checkStatus.success)} + {...tipProps} + afterText={ + isNeedReCheck && + checkStatus.success === ConnectionStatusType.Fail && ( + <button + type="button" + className="custom-text-button" + onClick={() => reCheck(id)} + style={{ marginLeft: 10 }} + > + 重试 + </button> + ) + } + /> + </GridRow> + ); + function getTextByType(type: ConnectionStatusType) { + switch (type) { + case ConnectionStatusType.Success: + return '连接成功'; + case ConnectionStatusType.Processing: + return '连接中'; + case ConnectionStatusType.Fail: + return '连接失败'; + } + } +}; + +export default PaticipantConnectionStatus; diff --git a/web_console_v2/client/src/views/Partner/PartnerList/PartnerTable.module.less b/web_console_v2/client/src/views/Partner/PartnerList/PartnerTable.module.less new file mode 100644 index 000000000..b976cc7da --- /dev/null +++ b/web_console_v2/client/src/views/Partner/PartnerList/PartnerTable.module.less @@ -0,0 +1,3 @@ +.table_container{ + margin-top: 20px; +} diff --git a/web_console_v2/client/src/views/Partner/PartnerList/PartnerTable.tsx b/web_console_v2/client/src/views/Partner/PartnerList/PartnerTable.tsx new file mode 100644 index 000000000..76e5bda85 --- /dev/null +++ b/web_console_v2/client/src/views/Partner/PartnerList/PartnerTable.tsx @@ -0,0 +1,260 @@ +import React, { FC, useMemo, useState } from 'react'; +import { useHistory } from 'react-router-dom'; +import { useMount } from 'react-use'; + +import { useRecoilQuery } from 'hooks/recoil'; +import { userInfoQuery } from 'stores/user'; +import { useReloadParticipantList } from 'hooks/participant'; +import { useUrlState } from 'hooks'; + +import { participantListQuery } from 'stores/participant'; +import { deleteParticipant } from 'services/participant'; +import { transformRegexSpecChar } from 'shared/helpers'; +import { formatTimestamp } from 'shared/date'; +import { CONSTANTS } from 'shared/constants'; + +import { Button, Input, Message, Table } from '@arco-design/web-react'; +import GridRow from 'components/_base/GridRow'; +import { Plus } from 'components/IconPark'; +import Modal from 'components/Modal'; +import StateIndicator from 'components/StateIndicator'; +import PaticipantConnectionStatus, { + globalParticipantIdToConnectionStateMap, +} from './ConnectionStatus'; +import { ActionItem, VersionItem } from './TableItem'; + +import { FedRoles } from 'typings/auth'; +import { Participant, ParticipantType } from 'typings/participant'; + +import styles from './PartnerTable.module.less'; + +export const getParticipant_columns = (showNumOfWorkspace: boolean) => { + const columns = [ + { + title: '企业名称', + dataIndex: 'name', + ellipsis: true, + sorter: (a: Participant, b: Participant) => a.name.localeCompare(b.name), + }, + { + title: '类型', + dataIndex: 'type', + filters: [ + { + text: '轻量级', + value: ParticipantType.LIGHT_CLIENT, + }, + { + text: '标准', + value: ParticipantType.PLATFORM, + }, + ], + onFilter: (value: any, record: Participant) => { + if (value === ParticipantType.LIGHT_CLIENT) { + return record.type === ParticipantType.LIGHT_CLIENT; + } + if (value === ParticipantType.PLATFORM) { + return record.type === ParticipantType.PLATFORM || record.type == null; + } + return false; + }, + render: (value: ParticipantType) => ( + <StateIndicator.LigthClientType isLightClient={value === ParticipantType.LIGHT_CLIENT} /> + ), + }, + { + title: '状态', + dataIndex: 'status', + width: 150, + sorter: (a: Participant, b: Participant) => { + if ( + globalParticipantIdToConnectionStateMap[b.id] && + globalParticipantIdToConnectionStateMap[a.id] + ) { + return globalParticipantIdToConnectionStateMap[b.id].localeCompare( + globalParticipantIdToConnectionStateMap[a.id], + ); + } + // Keep default order + return 1; + }, + render: (_: any, record: Participant) => { + const isLightClient = record.type === ParticipantType.LIGHT_CLIENT; + + if (isLightClient) { + return CONSTANTS.EMPTY_PLACEHOLDER; + } + + return <PaticipantConnectionStatus id={record.id} isNeedTip={true} isNeedReCheck={true} />; + }, + }, + { + title: '泛域名', + dataIndex: 'domain_name', + sorter: (a: Participant, b: Participant) => a.domain_name.localeCompare(b.domain_name), + }, + { + title: '主机号', + dataIndex: 'host', + }, + { + title: '端口号', + dataIndex: 'port', + }, + { + title: '版本号', + dataIndex: 'version', + render: (_: any, record: Participant) => { + return <VersionItem partnerId={record.id} />; + }, + }, + { + title: '合作伙伴描述', + dataIndex: 'comment', + ellipsis: true, + render: (value: any) => { + return value || CONSTANTS.EMPTY_PLACEHOLDER; + }, + }, + { + title: '最近活跃时间', + dataIndex: 'last_connected_at', + render: (value: any) => { + return value ? formatTimestamp(value) : CONSTANTS.EMPTY_PLACEHOLDER; + }, + sorter: (a: Participant, b: Participant) => + (a.last_connected_at || 0) - (b.last_connected_at || 0), + }, + ]; + showNumOfWorkspace && + columns.push({ + title: '已关联的工作区数量', + dataIndex: 'num_project', + render: (value: any) => { + return value || 0; + }, + }); + return columns; +}; + +const PartnerTable: FC = () => { + const history = useHistory(); + const userInfo = useRecoilQuery(userInfoQuery); + const reloadParticipants = useReloadParticipantList(); + const { isLoading, data: participantList } = useRecoilQuery(participantListQuery); + const reloadList = useReloadParticipantList(); + const [isDeleting, setIsDeleting] = useState(false); + + const [urlState, setUrlState] = useUrlState({ + page: 1, + pageSize: 20, + keyword: '', + }); + + useMount(() => { + if (!isLoading || participantList) { + reloadList(); + } + }); + + const showList = useMemo(() => { + if (participantList && urlState.keyword) { + const regx = new RegExp(`^.*${transformRegexSpecChar(urlState.keyword)}.*$`); + return participantList.filter((item: Participant) => regx.test(item.name)); + } + return participantList || []; + }, [participantList, urlState.keyword]); + + const isAdmin = useMemo(() => { + if (userInfo.data) { + const _isAdmin = userInfo.data.role === FedRoles.Admin; + return _isAdmin; + } + return false; + }, [userInfo.data]); + + const columns = getParticipant_columns(true); + + isAdmin && + columns.push({ + title: '操作', + dataIndex: 'action', + render: (_: any, record: any) => { + return ( + <ActionItem + data={record} + onDelete={() => { + Modal.delete({ + title: '确认要删除合作伙伴?', + content: '删除后,该合作伙伴将退出当前所有运行中的工作流', + onOk() { + handleDelete(record.id); + }, + }); + }} + /> + ); + }, + }); + + return ( + <> + <GridRow justify={isAdmin ? 'space-between' : 'end'}> + {isAdmin && ( + <Button icon={<Plus />} type="primary" onClick={() => history.push('/partners/create')}> + 添加合作伙伴 + </Button> + )} + <Input.Search + allowClear + onSearch={onSearch} + onClear={() => onSearch('')} + placeholder="输入合作伙伴名称搜索" + defaultValue={urlState.keyword} + /> + </GridRow> + <Table<Participant> + className={`custom-table custom-table-left-side-filter ${styles.table_container}`} + rowKey="id" + data={showList} + columns={columns} + pagination={{ + pageSize: Number(urlState.pageSize), + current: Number(urlState.page), + onChange: onPageChange, + }} + loading={isLoading || isDeleting} + /> + </> + ); + + function onSearch(value: string) { + setUrlState((prevState) => ({ + ...prevState, + page: 1, + keyword: value, + })); + } + function onPageChange(page: number, pageSize: number | undefined) { + setUrlState((prevState) => ({ + ...prevState, + page, + pageSize, + })); + } + + async function handleDelete(id: ID) { + try { + setIsDeleting(true); + await deleteParticipant(id); + setIsDeleting(false); + Message.success('删除成功'); + reloadParticipants(); + } catch (error: any) { + setIsDeleting(false); + Message.error(error.message); + } + } +}; + +export default PartnerTable; diff --git a/web_console_v2/client/src/views/Partner/PartnerList/TableItem.tsx b/web_console_v2/client/src/views/Partner/PartnerList/TableItem.tsx new file mode 100644 index 000000000..4bc434a65 --- /dev/null +++ b/web_console_v2/client/src/views/Partner/PartnerList/TableItem.tsx @@ -0,0 +1,50 @@ +import React, { FC } from 'react'; +import { ConnectionStatusType, Participant } from 'typings/participant'; +import { useHistory } from 'react-router-dom'; +import { useRecoilValue } from 'recoil'; +import { participantConnectionState } from 'stores/participant'; +import MoreActions from 'components/MoreActions'; +import { CONSTANTS } from 'shared/constants'; + +export const VersionItem: FC<{ partnerId: ID }> = ({ partnerId: id }) => { + const connectionStatus = useRecoilValue(participantConnectionState(id)); + return ( + <span> + {connectionStatus.application_version?.version || + connectionStatus.application_version?.revision?.slice(-6) || + CONSTANTS.EMPTY_PLACEHOLDER} + </span> + ); +}; + +export const ActionItem: FC<{ data: Participant; onDelete: () => void }> = ({ data, onDelete }) => { + const history = useHistory(); + const connectionStatus = useRecoilValue(participantConnectionState(data.id)); + + const isProcessing = connectionStatus.success === ConnectionStatusType.Processing; + const isHaveLinkedProject = !!data.num_project; + + return ( + <MoreActions + actionList={[ + { + label: '删除', + onClick: onDelete, + disabled: isProcessing || isHaveLinkedProject, + disabledTip: isProcessing ? '连接中不可删除' : '需解除合作伙伴所有关联工作区才可删除', + danger: true, + }, + { + label: '变更', + onClick: () => { + history.push(`/partners/edit/${data.id}`); + }, + disabled: isProcessing, + disabledTip: isProcessing + ? '连接中不可变更' + : '需解除合作伙伴所有关联工作区且连接失败时才可变更', + }, + ]} + /> + ); +}; diff --git a/web_console_v2/client/src/views/Partner/PartnerList/index.module.less b/web_console_v2/client/src/views/Partner/PartnerList/index.module.less new file mode 100644 index 000000000..f0a512cc1 --- /dev/null +++ b/web_console_v2/client/src/views/Partner/PartnerList/index.module.less @@ -0,0 +1,26 @@ +@import '~styles/mixins.less'; +.avatar_container{ + .MixinSquare(44px); + background-color: #5360d8; + color: white; + border-radius: 4px; + font-size: 18px; + text-align: center; + + &::before { + content: attr(data-name); + line-height: 44px; + font-weight: bold; + } +} + +.user_message{ + padding-bottom: 20px; +} +.tag_container{ + background-color: '#F6F7FB'; + border: 0; + color: '#1A2233'; + border-radius: 40px; + padding: 0px 8px; +} diff --git a/web_console_v2/client/src/views/Partner/PartnerList/index.tsx b/web_console_v2/client/src/views/Partner/PartnerList/index.tsx new file mode 100644 index 000000000..a8f4a2735 --- /dev/null +++ b/web_console_v2/client/src/views/Partner/PartnerList/index.tsx @@ -0,0 +1,52 @@ +import React, { FC, useState } from 'react'; +import { useQuery } from 'react-query'; + +import { fetchSysInfo } from 'services/settings'; +import { CONSTANTS } from 'shared/constants'; + +import { Tag, Tabs, Spin } from '@arco-design/web-react'; +import GridRow from 'components/_base/GridRow'; +import SharedPageLayout, { RemovePadding } from 'components/SharedPageLayout'; +import PartnerTable from './PartnerTable'; + +import styles from './index.module.less'; + +const PartnerList: FC = () => { + const sysInfoQuery = useQuery(['fetchSysInfo'], () => fetchSysInfo(), { + retry: 2, + refetchOnWindowFocus: false, + }); + + const [activeKey] = useState<string>('partners'); + + return ( + <SharedPageLayout title={'合作伙伴'}> + <Spin loading={sysInfoQuery.isFetching}> + <div className={styles.user_message}> + <GridRow gap="12" style={{ maxWidth: '75%' }}> + <div + className={styles.avatar_container} + data-name={sysInfoQuery.data?.data?.name?.slice(0, 1) ?? CONSTANTS.EMPTY_PLACEHOLDER} + /> + <div> + <h3>{sysInfoQuery.data?.data.name ?? CONSTANTS.EMPTY_PLACEHOLDER}</h3> + <div> + <Tag className={styles.tag_container}> + 泛域名:{sysInfoQuery.data?.data?.domain_name ?? CONSTANTS.EMPTY_PLACEHOLDER} + </Tag> + </div> + </div> + </GridRow> + </div> + </Spin> + <RemovePadding style={{ height: 48 }}> + <Tabs defaultActiveTab={activeKey}> + <Tabs.TabPane title="合作伙伴" key="partners" /> + </Tabs> + </RemovePadding> + <div>{activeKey === 'partners' && <PartnerTable />}</div> + </SharedPageLayout> + ); +}; + +export default PartnerList; diff --git a/web_console_v2/client/src/views/Partner/index.tsx b/web_console_v2/client/src/views/Partner/index.tsx new file mode 100644 index 000000000..f8f3af5f1 --- /dev/null +++ b/web_console_v2/client/src/views/Partner/index.tsx @@ -0,0 +1,17 @@ +import React from 'react'; +import { Route } from 'react-router-dom'; +import PartnerList from './PartnerList'; +import CreatePartner from './CreatePartner'; +import EditPartner from './EditPartner'; + +function PartnerPage() { + return ( + <> + <Route path="/partners" exact component={PartnerList} /> + <Route path="/partners/create" exact component={CreatePartner} /> + <Route path="/partners/edit/:id" exact component={EditPartner} /> + </> + ); +} + +export default PartnerPage; diff --git a/web_console_v2/client/src/views/Projects/ConnectionStatus.tsx b/web_console_v2/client/src/views/Projects/ConnectionStatus.tsx index 80d3ed605..f8415dea5 100644 --- a/web_console_v2/client/src/views/Projects/ConnectionStatus.tsx +++ b/web_console_v2/client/src/views/Projects/ConnectionStatus.tsx @@ -1,27 +1,47 @@ -import React, { FC } from 'react'; +import React, { ForwardRefRenderFunction, useImperativeHandle, forwardRef } from 'react'; import { ConnectionStatus, getConnectionStatusClassName, getConnectionStatusTag, + Project, } from 'typings/project'; -import { useTranslation } from 'react-i18next'; import StateIndicator from 'components/StateIndicator'; +import { useCheckConnection } from 'hooks/project'; +import { TIME_INTERVAL } from 'shared/constants'; -interface Props { - status: ConnectionStatus; +export interface Props { + status?: ConnectionStatus; tag?: boolean; + project: Project; } -const ProjectConnectionStatus: FC<Props> = ({ status, tag }: Props) => { - const { t } = useTranslation(); +export interface ExposedRef { + checkConnection: Function; +} + +const ProjectConnectionStatus: ForwardRefRenderFunction<ExposedRef, Props> = ( + { status, tag, project }, + parentRef, +) => { + const [innerStatus, checkConnection] = useCheckConnection(project, { + refetchOnWindowFocus: false, + refetchInterval: TIME_INTERVAL.CONNECTION_CHECK, + enabled: !status, + }); + + useImperativeHandle(parentRef, () => { + return { + checkConnection, + }; + }); return ( <StateIndicator - type={getConnectionStatusClassName(status)} - text={t(getConnectionStatusTag(status))} + type={getConnectionStatusClassName(innerStatus)} + text={getConnectionStatusTag(innerStatus)} tag={tag} /> ); }; -export default ProjectConnectionStatus; +export default forwardRef(ProjectConnectionStatus); diff --git a/web_console_v2/client/src/views/Projects/CreateProject/StepOneConfig/EnvVariablesForm/index.module.less b/web_console_v2/client/src/views/Projects/CreateProject/StepOneConfig/EnvVariablesForm/index.module.less new file mode 100644 index 000000000..f6702fe14 --- /dev/null +++ b/web_console_v2/client/src/views/Projects/CreateProject/StepOneConfig/EnvVariablesForm/index.module.less @@ -0,0 +1,35 @@ +.header{ + margin-bottom: 20px; +} +.toggler { + display: inline-flex; + align-items: center; + font-size: 14px; + line-height: 1; + color: var(--primaryColor); + cursor: pointer; + user-select: none; + &:hover { + color: var(--newPrimaryHover); + } +} +.no_variables{ + color: var(--textColorSecondary); +} +.add_button{ + color: var(--primaryColor) !important; + font-weight: 500; +} +.list_container{ + transition: 0.4s var(--commonTiming); + overflow: hidden; + .hidden_variables { + opacity: 0; + overflow: hidden; + } +} +.remove_button{ + position: absolute; + right: 0; +} + diff --git a/web_console_v2/client/src/views/Projects/CreateProject/StepOneConfig/EnvVariablesForm/index.tsx b/web_console_v2/client/src/views/Projects/CreateProject/StepOneConfig/EnvVariablesForm/index.tsx new file mode 100644 index 000000000..2716d6766 --- /dev/null +++ b/web_console_v2/client/src/views/Projects/CreateProject/StepOneConfig/EnvVariablesForm/index.tsx @@ -0,0 +1,168 @@ +import React, { + useCallback, + useLayoutEffect, + useRef, + useImperativeHandle, + ForwardRefRenderFunction, + forwardRef, +} from 'react'; +import { Form, Input, Button } from '@arco-design/web-react'; +import { useToggle } from 'react-use'; +import { IconDelete, IconPlus, IconCaretDown, IconCaretUp } from '@arco-design/web-react/icon'; +import { FormInstance } from '@arco-design/web-react'; +import { convertToUnit } from 'shared/helpers'; +import { useSubscribe } from 'hooks'; +import GridRow from 'components/_base/GridRow'; + +import styles from './index.module.less'; + +export const VARIABLES_FIELD_NAME = 'variables'; +export const VARIABLES_ERROR_CHANNEL = 'project.field_variables_error'; + +type Props = { + formInstance?: FormInstance; + disabled?: boolean; + isEdit?: boolean; +}; + +export type ExposedRef = { + toggleFolded: (params: boolean) => void; +}; + +const EnvVariablesForm: ForwardRefRenderFunction<ExposedRef | undefined, Props> = ( + { disabled, isEdit = true }, + parentRef, +) => { + const [isFolded, toggleFolded] = useToggle(isEdit); + const listInnerRef = useRef<HTMLDivElement>(); + const listContainerRef = useRef<HTMLDivElement>(); + + useSubscribe(VARIABLES_ERROR_CHANNEL, () => { + toggleFolded(false); + }); + + useImperativeHandle(parentRef, () => { + return { + toggleFolded, + }; + }); + + const setListContainerMaxHeight = useCallback( + (nextHeight: any) => { + listContainerRef.current!.style.maxHeight = convertToUnit(nextHeight); + }, + [listContainerRef], + ); + const getListInnerHeight = useCallback(() => { + return listInnerRef.current!.offsetHeight!; + }, [listInnerRef]); + + useLayoutEffect(() => { + const innerHeight = getListInnerHeight() + 30; + + if (isFolded) { + setListContainerMaxHeight(innerHeight); + // Q: Why read inner's height one time before set maxHeight to 0 for folding + // A: Since we set maxHeight to 'initial' everytime unfold-transition ended, it's important + // to re-set maxHeight to innerHeight before folding, we need a ${specific value} → 0 transition + // not the `initial` → 0 in which case animation would lost + getListInnerHeight(); + setListContainerMaxHeight(0); + } else { + setListContainerMaxHeight(innerHeight); + } + }, [isFolded, getListInnerHeight, setListContainerMaxHeight]); + + return ( + <div> + {isEdit && ( + <div className={styles.header}> + <div className={styles.toggler} onClick={toggleFolded} data-folded={String(isFolded)}> + {isFolded ? ( + <> + 展开环境变量配置 <IconCaretDown /> + </> + ) : ( + <> + 收起环境变量配置 + <IconCaretUp /> + </> + )} + {/* <CaretDown /> */} + </div> + </div> + )} + + <div + className={styles.list_container} + ref={listContainerRef as any} + data-folded={String(isFolded)} + onTransitionEnd={onFoldAnimationEnd} + > + <Form.List field={`config.${VARIABLES_FIELD_NAME}`}> + {(fields, { add, remove }) => ( + <div ref={listInnerRef as any}> + {fields.map((field, index) => ( + <GridRow + style={{ position: 'relative', gridTemplateColumns: '1fr 1fr' }} + gap={10} + key={field.key} + align="start" + > + <Form.Item + field={`${field.field}.name`} + rules={[{ required: true, message: '请输入变量名' }]} + > + <Input.TextArea placeholder="name" disabled={disabled} /> + </Form.Item> + + <Form.Item + field={`${field.field}.value`} + rules={[{ required: true, message: '请输入变量值' }]} + > + <Input.TextArea placeholder="value" disabled={disabled} /> + </Form.Item> + <Button + className={styles.remove_button} + size="small" + icon={<IconDelete />} + shape="circle" + type="text" + onClick={() => remove(index)} + /> + </GridRow> + ))} + {/* Empty placeholder */} + {isEdit && fields.length === 0 && ( + <Form.Item className={styles.no_variables}>当前没有环境变量参数</Form.Item> + )} + + <Form.Item> + {/* DO NOT simplify `() => add()` to `add`, it will pollute form value with $event */} + <Button + className={styles.add_button} + size="small" + icon={<IconPlus />} + type="default" + onClick={() => add()} + > + 新增参数 + </Button> + </Form.Item> + </div> + )} + </Form.List> + </div> + </div> + ); + + function onFoldAnimationEnd(_: React.TransitionEvent) { + if (!isFolded) { + // Because of user can adjust list inner's height by resize value-textarea or add/remove variable + // we MUST set container's maxHeight to 'initial' after unfolded (after which user can interact) + listContainerRef.current!.style.maxHeight = 'initial'; + } + } +}; + +export default forwardRef(EnvVariablesForm); diff --git a/web_console_v2/client/src/views/Projects/CreateProject/StepOneConfig/index.module.less b/web_console_v2/client/src/views/Projects/CreateProject/StepOneConfig/index.module.less new file mode 100644 index 000000000..481acc76a --- /dev/null +++ b/web_console_v2/client/src/views/Projects/CreateProject/StepOneConfig/index.module.less @@ -0,0 +1,18 @@ +.container { + width: 480px; + margin: 40px auto 0; +} +.title { + font-size: 14px; + font-weight: 500; + color: #000000; +} +.participant_name { + margin-right: 8px; +} +.btn_container{ + margin-top: 40px; +} +.btn_content{ + padding: 0px 60px; +} diff --git a/web_console_v2/client/src/views/Projects/CreateProject/StepOneConfig/index.tsx b/web_console_v2/client/src/views/Projects/CreateProject/StepOneConfig/index.tsx new file mode 100644 index 000000000..b9acaa74b --- /dev/null +++ b/web_console_v2/client/src/views/Projects/CreateProject/StepOneConfig/index.tsx @@ -0,0 +1,166 @@ +import React, { FC } from 'react'; +import { useHistory } from 'react-router-dom'; +import { useRecoilState } from 'recoil'; + +import { useIsFormValueChange } from 'hooks'; +import { projectCreateForm, ProjectCreateForm } from 'stores/project'; +import { MAX_COMMENT_LENGTH, validNamePattern } from 'shared/validator'; + +import { Form, Input, Button, Radio } from '@arco-design/web-react'; +import GridRow from 'components/_base/GridRow'; +import ButtonWithModalConfirm from 'components/ButtonWithModalConfirm'; +import EnvVariablesForm from './EnvVariablesForm'; +import ActionRules from '../StepTwoPartner/ActionRules'; +import { ProjectFormInitialValues, ProjectTaskType } from 'typings/project'; + +import styles from './index.module.less'; + +const radioOptions = [ + { + value: ProjectTaskType.ALIGN, + label: 'ID对齐(隐私集合求交,求出共有交集,不泄漏交集之外原始数据)', + }, + { + value: ProjectTaskType.HORIZONTAL, + label: '横向联邦学习(包含特征对齐、横向联邦训练、评估、预测能力)', + }, + { + value: ProjectTaskType.VERTICAL, + label: '纵向联邦学习(包含ID对齐、纵向联邦训练、评估、预测能力)', + }, + { + value: ProjectTaskType.TRUSTED, + label: '可信分析服务(包含可信计算分析能力)', + }, +]; + +const StepOneConfig: FC<{ + isEdit?: boolean; + onEditFinish?: (payload: any) => Promise<void>; + initialValues?: ProjectFormInitialValues; + isLeftLayout?: boolean; + onFormValueChange?: () => void; +}> = ({ + isEdit = false, + onEditFinish, + initialValues, + isLeftLayout = false, + onFormValueChange: onFormValueChangeFromProps, +}) => { + const [form] = Form.useForm(); + const history = useHistory(); + const [projectForm, setProjectForm] = useRecoilState<ProjectCreateForm>(projectCreateForm); + const { isFormValueChanged, onFormValueChange } = useIsFormValueChange(onValuesChange); + + const defaultValues: any = initialValues?.name + ? initialValues + : { + name: '', + comment: '', + variables: [], + }; + + return ( + <div className={styles.container}> + <Form + form={form} + layout="vertical" + onSubmit={isEdit ? editFinish : goStepTwo} + initialValues={isEdit ? defaultValues : projectForm} + onValuesChange={onFormValueChange} + > + <div style={{ marginBottom: 30 }}> + <p className={styles.title}>基本信息</p> + <Form.Item + label="工作区名称" + field="name" + rules={[ + { required: true, message: '请输入工作区名称' }, + { + match: validNamePattern, + message: '只支持大小写字母,数字,中文开头或结尾,可包含“_”和“-”,不超过 63 个字符', + }, + ]} + > + <Input disabled={isEdit} placeholder="请输入工作区名称" /> + </Form.Item> + <Form.Item label="工作区描述" field="comment"> + <Input.TextArea maxLength={MAX_COMMENT_LENGTH} placeholder="请为工作区添加描述" /> + </Form.Item> + <p className={styles.title}>环境变量</p> + <EnvVariablesForm formInstance={form} isEdit={isEdit} /> + <p className={styles.title}>能力规格</p> + + {isEdit ? ( + <span> + {radioOptions.find((item) => item.value === initialValues?.config?.abilities?.[0]) + ?.label || '旧版工作区'} + </span> + ) : ( + <Form.Item + field="config.abilities" + rules={[{ required: true, message: '请选择能力规格' }]} + normalize={(value) => [value]} + formatter={(value) => value?.[0]} + > + <Radio.Group> + {radioOptions.map((item) => ( + <Radio value={item.value} key={item.value}> + {item.label} + </Radio> + ))} + </Radio.Group> + </Form.Item> + )} + + {isEdit && initialValues?.config?.abilities?.[0] && ( + <ActionRules taskType={initialValues?.config?.abilities?.[0] as ProjectTaskType} /> + )} + </div> + + <Form.Item> + <GridRow + className={styles.btn_container} + gap={10} + justify={isLeftLayout ? 'start' : 'center'} + > + <Button className={styles.btn_content} type="primary" htmlType="submit"> + {isEdit ? '提交' : '下一步'} + </Button> + <ButtonWithModalConfirm + onClick={backToList} + isShowConfirmModal={isFormValueChanged || Boolean(projectForm.name)} + > + 取消 + </ButtonWithModalConfirm> + </GridRow> + </Form.Item> + </Form> + </div> + ); + + function backToList() { + history.push(`/projects`); + } + function goStepTwo(values: any) { + setProjectForm({ + ...projectForm, + ...values, + config: { + ...projectForm.config, + ...values.config, + }, + }); + + history.push(`/projects/create/authorize`); + } + function editFinish(values: any) { + const editPayload = { ...values, config: { ...defaultValues.config, ...values.config } }; + onEditFinish?.(editPayload); + } + function onValuesChange() { + onFormValueChangeFromProps?.(); + } +}; + +export default StepOneConfig; diff --git a/web_console_v2/client/src/views/Projects/CreateProject/StepThreeAuthorize/index.module.less b/web_console_v2/client/src/views/Projects/CreateProject/StepThreeAuthorize/index.module.less new file mode 100644 index 000000000..1566f2376 --- /dev/null +++ b/web_console_v2/client/src/views/Projects/CreateProject/StepThreeAuthorize/index.module.less @@ -0,0 +1,30 @@ +.container{ + display: flex; + align-items: center; + flex-direction: column; +} +.title_container{ + margin-top: 40px; + margin-bottom: 20px; +} +.title_content{ + font-size: 14px; + font-weight: 500; + color: #000000; + margin: 10px 0px; +} +.popover_content{ + font-size: 12px; +} +.btn_container{ + margin-top: 40px; +} +.btn_content{ + padding: 0px 60px; +} +.block_chain_container{ + margin-bottom: 0px; +} + + + diff --git a/web_console_v2/client/src/views/Projects/CreateProject/StepThreeAuthorize/index.tsx b/web_console_v2/client/src/views/Projects/CreateProject/StepThreeAuthorize/index.tsx new file mode 100644 index 000000000..e87df8aed --- /dev/null +++ b/web_console_v2/client/src/views/Projects/CreateProject/StepThreeAuthorize/index.tsx @@ -0,0 +1,161 @@ +import React, { FC, useState, useEffect } from 'react'; +import { useHistory } from 'react-router'; +import { NewCreateProjectPayload, ProjectTaskType } from 'typings/project'; +import { ParticipantType, Participant } from 'typings/participant'; +import { useRecoilState } from 'recoil'; +import { initialActionRules, projectCreateForm, ProjectCreateForm } from 'stores/project'; +import { Button, Form, Popover, Switch, Space } from '@arco-design/web-react'; +import { IconInfoCircle, IconQuestionCircle } from '@arco-design/web-react/icon'; +import GridRow from 'components/_base/GridRow'; +import InvitionTable from 'components/InvitionTable'; +import BlockRadio from 'components/_base/BlockRadio'; +import TitleWithIcon from 'components/TitleWithIcon'; + +import styles from './index.module.less'; + +const options = [ + { + value: ParticipantType.PLATFORM, + label: '标准合作伙伴', + }, + { + value: ParticipantType.LIGHT_CLIENT, + label: '轻量合作伙伴', + }, +]; + +const StepThreeAuthorize: FC<{ onSubmit: (payload: NewCreateProjectPayload) => Promise<void> }> = ({ + onSubmit, +}) => { + const history = useHistory(); + const [form] = Form.useForm(); + const [projectForm, setProjectForm] = useRecoilState<ProjectCreateForm>(projectCreateForm); + const [isSubmitDisable, setIsSubmitDisable] = useState(true); + const [supportBlockChain, setSupportBlockChain] = useState(true); + const [participantsType, setParticipantsType] = useState(ParticipantType.PLATFORM); + + useEffect(() => { + if (!projectForm.config.abilities || !projectForm.name) { + history.push('/projects/create/config'); + } + }); + return ( + <div className={styles.container}> + <div> + <Form form={form} initialValues={projectForm} onSubmit={onFinish} layout="vertical"> + <p className={styles.title_content}>邀请合作伙伴</p> + <Form.Item + label={ + <span> + 合作伙伴类型 + <Popover + title="合作伙伴类型说明" + content={ + <span className={styles.popover_content}> + <p>标准合作伙伴:拥有可视化Web平台的合作伙伴;</p> + <p>轻量合作伙伴:合作伙伴的客户端形式为容器或可执行文本。</p> + </span> + } + > + <IconQuestionCircle /> + </Popover> + </span> + } + field="participant_type" + initialValue={ParticipantType.PLATFORM} + rules={[{ required: true }]} + > + <BlockRadio + options={options} + onChange={(value: any) => { + setParticipantsType(value); + }} + /> + </Form.Item> + <Form.Item label="合作伙伴" field="participant_ids"> + <InvitionTable + participantsType={participantsType} + onChange={(selectedParticipants: Participant[]) => { + setIsSubmitDisable(!selectedParticipants.length); + const hasNoBlockChain = selectedParticipants.find((item) => { + return !item?.support_blockchain; + }); + setSupportBlockChain(!hasNoBlockChain); + hasNoBlockChain && form.setFieldValue('config.support_blockchain', false); + }} + isSupportCheckbox={[ProjectTaskType.HORIZONTAL, ProjectTaskType.TRUSTED].includes( + projectForm?.config?.abilities?.[0], + )} + /> + </Form.Item> + <p className={styles.title_content}>区块链存证</p> + <Space> + <Form.Item + className={styles.block_chain_container} + field="config.support_blockchain" + initialValue={true} + > + {supportBlockChain ? ( + <Switch /> + ) : ( + <TitleWithIcon + title="你选择的部分合作伙伴无区块链服务,不可启用区块链存证" + isLeftIcon={true} + isShowIcon={true} + icon={IconInfoCircle} + /> + )} + </Form.Item> + {supportBlockChain && ( + <TitleWithIcon + title="在工作区创建提交后不可更改此设置" + isLeftIcon={true} + isShowIcon={true} + icon={IconInfoCircle} + /> + )} + </Space> + + <Form.Item> + <GridRow className={styles.btn_container} gap={10} justify="center"> + <Button + className={styles.btn_content} + type="primary" + htmlType="submit" + disabled={isSubmitDisable} + > + 提交并发送 + </Button> + <Button onClick={goToStepTwo}>上一步</Button> + </GridRow> + </Form.Item> + </Form> + </div> + </div> + ); + function onFinish(value: any) { + onSubmit({ + ...projectForm, + participant_ids: value.participant_ids.map((item: any) => item.id) || [], + config: { + ...projectForm.config, + ...value.config, + }, + }); + } + function goToStepTwo() { + setProjectForm({ + ...projectForm, + config: { + ...projectForm.config, + action_rules: { + ...initialActionRules, + ...projectForm.config.action_rules, + }, + }, + }); + history.goBack(); + } +}; + +export default StepThreeAuthorize; diff --git a/web_console_v2/client/src/views/Projects/CreateProject/StepTwoPartner/ActionRules/index.module.less b/web_console_v2/client/src/views/Projects/CreateProject/StepTwoPartner/ActionRules/index.module.less new file mode 100644 index 000000000..7c04a71a8 --- /dev/null +++ b/web_console_v2/client/src/views/Projects/CreateProject/StepTwoPartner/ActionRules/index.module.less @@ -0,0 +1,15 @@ +.title_container{ + margin-top: 40px; + margin-bottom: 20px; +} +.title_content{ + font-size: 14px; + font-weight: 500; + color: #000000; + margin: 10px 0px; +} +.popover_content{ + font-size: 12px; +} + + diff --git a/web_console_v2/client/src/views/Projects/CreateProject/StepTwoPartner/ActionRules/index.tsx b/web_console_v2/client/src/views/Projects/CreateProject/StepTwoPartner/ActionRules/index.tsx new file mode 100644 index 000000000..3e72826fb --- /dev/null +++ b/web_console_v2/client/src/views/Projects/CreateProject/StepTwoPartner/ActionRules/index.tsx @@ -0,0 +1,248 @@ +import React, { useState } from 'react'; +import { Form, Grid, Popover, Select, Typography } from '@arco-design/web-react'; +import { IconInfoCircle } from '@arco-design/web-react/icon'; +import TitleWithIcon from 'components/TitleWithIcon'; +import { ProjectAbilityType, ProjectActionType, ProjectTaskType } from 'typings/project'; + +import styles from './index.module.less'; + +const { Row, Col } = Grid; +const { Option } = Select; + +const options = [ + { + value: ProjectAbilityType.ALWAYS_ALLOW, + label: '始终允许', + }, + { + value: ProjectAbilityType.ONCE, + label: '允许一次', + }, + { + value: ProjectAbilityType.MANUAL, + label: '发起时询问', + }, + { + value: ProjectAbilityType.ALWAYS_REFUSE, + label: '拒绝', + }, +]; + +interface Props { + taskType: ProjectTaskType; + value?: any; +} +function ActionRules({ taskType, value }: Props) { + const [visible, setVisible] = useState(false); + return ( + <> + <div className={styles.title_container}> + <p className={styles.title_content}>本方授权策略</p> + <TitleWithIcon + title={ + <> + <span>配置任务时修改其授权策略,此初始授权策略将不再对任务生效。 </span> + <Popover + title="选项说明" + popupVisible={visible} + onVisibleChange={setVisible} + content={ + <span className={styles.popover_content}> + <p>1.始终允许:同类任务始终允许自动授权通过;</p> + + <p> + 2.允许一次:同类任务允许一次自动授权通过;一次执行后,具体任务的权限更新为发起时询问; + </p> + <p>3.发起时询问:同类任务发起时需要询问是否授权通过;</p> + <p>4.拒绝授权:同类任务始终授权拒绝。</p> + </span> + } + > + <Typography.Text type="primary">选项说明</Typography.Text> + </Popover> + </> + } + isLeftIcon={true} + isShowIcon={true} + icon={IconInfoCircle} + /> + </div> + {renderFormItem(taskType)} + </> + ); +} + +function resetFiled(filedValue: string) { + return `config.action_rules.${filedValue}`; +} +function renderFormItem(taskType: ProjectTaskType) { + let formItem; + switch (taskType) { + case ProjectTaskType.ALIGN: + formItem = renderAlignTask(); + break; + case ProjectTaskType.HORIZONTAL: + formItem = renderHorizontalTask(); + break; + case ProjectTaskType.VERTICAL: + formItem = renderVerticalTask(); + break; + case ProjectTaskType.TRUSTED: + formItem = renderTrustedTask(); + break; + default: + formItem = renderAlignTask(); + break; + } + return formItem; +} +function renderAlignTask() { + return ( + <Form.Item + field={resetFiled(ProjectActionType.ID_ALIGNMENT)} + label="ID对齐任务" + rules={[{ required: true }]} + > + {taskAuthorization()} + </Form.Item> + ); +} +function renderHorizontalTask() { + return ( + <> + <Row gutter={24}> + <Col span={12}> + <Form.Item + field={resetFiled(ProjectActionType.DATA_ALIGNMENT)} + label="横向数据对齐任务" + rules={[{ required: true }]} + > + {taskAuthorization()} + </Form.Item> + </Col> + <Col span={12}> + <Form.Item + field={resetFiled(ProjectActionType.HORIZONTAL_TRAIN)} + label="横向联邦模型训练" + rules={[{ required: true }]} + > + {taskAuthorization()} + </Form.Item> + </Col> + </Row> + <Row gutter={24}> + <Col span={12}> + <Form.Item + field={resetFiled(ProjectActionType.WORKFLOW)} + label="工作流任务" + rules={[{ required: true }]} + > + {taskAuthorization()} + </Form.Item> + </Col> + </Row> + </> + ); +} +function renderVerticalTask() { + return ( + <> + <Row gutter={24}> + <Col span={12}> + <Form.Item + field={resetFiled(ProjectActionType.ID_ALIGNMENT)} + label="ID对齐任务" + rules={[{ required: true }]} + > + {taskAuthorization()} + </Form.Item> + </Col> + <Col span={12}> + <Form.Item + field={resetFiled(ProjectActionType.VERTICAL_TRAIN)} + label="纵向联邦模型训练" + rules={[{ required: true }]} + > + {taskAuthorization()} + </Form.Item> + </Col> + </Row> + <Row gutter={24}> + <Col span={12}> + <Form.Item + field={resetFiled(ProjectActionType.VERTICAL_EVAL)} + label="纵向联邦模型评估" + rules={[{ required: true }]} + > + {taskAuthorization()} + </Form.Item> + </Col> + <Col span={12}> + <Form.Item + field={resetFiled(ProjectActionType.VERTICAL_PRED)} + label="纵向联邦模型离线预测" + rules={[{ required: true }]} + > + {taskAuthorization()} + </Form.Item> + </Col> + </Row> + <Row gutter={24}> + <Col span={12}> + <Form.Item + field={resetFiled(ProjectActionType.VERTICAL_SERVING)} + label="纵向联邦模型在线服务" + rules={[{ required: true }]} + > + {taskAuthorization()} + </Form.Item> + </Col> + <Col span={12}> + <Form.Item + field={resetFiled(ProjectActionType.WORKFLOW)} + label="工作流任务" + rules={[{ required: true }]} + > + {taskAuthorization()} + </Form.Item> + </Col> + </Row> + </> + ); +} +function renderTrustedTask() { + return ( + <Row gutter={24}> + <Col span={12}> + <Form.Item + field={resetFiled(ProjectActionType.TEE_SERVICE)} + label="可信分析服务" + rules={[{ required: true }]} + > + {taskAuthorization()} + </Form.Item> + </Col> + <Col span={12}> + <Form.Item + field={resetFiled(ProjectActionType.TEE_RESULT_EXPORT)} + label="可信分析服务结果导出" + rules={[{ required: true }]} + > + {taskAuthorization()} + </Form.Item> + </Col> + </Row> + ); +} +function taskAuthorization() { + return ( + <Select options={options}> + {options.map((option) => ( + <Option value={option.value} key={option.value}> + {option.label} + </Option> + ))} + </Select> + ); +} +export default ActionRules; diff --git a/web_console_v2/client/src/views/Projects/CreateProject/StepTwoPartner/index.module.less b/web_console_v2/client/src/views/Projects/CreateProject/StepTwoPartner/index.module.less new file mode 100644 index 000000000..389a25129 --- /dev/null +++ b/web_console_v2/client/src/views/Projects/CreateProject/StepTwoPartner/index.module.less @@ -0,0 +1,26 @@ +.container{ + display: flex; + align-items: center; + flex-direction: column; +} +.title_container{ + margin-top: 40px; + margin-bottom: 20px; +} +.title_content{ + font-size: 14px; + font-weight: 500; + color: #000000; + margin: 10px 0px; +} +.popover_content{ + font-size: 12px; +} +.btn_container{ + margin-top: 40px; +} +.btn_content{ + padding: 0px 60px; +} + + diff --git a/web_console_v2/client/src/views/Projects/CreateProject/StepTwoPartner/index.tsx b/web_console_v2/client/src/views/Projects/CreateProject/StepTwoPartner/index.tsx new file mode 100644 index 000000000..28798ccaf --- /dev/null +++ b/web_console_v2/client/src/views/Projects/CreateProject/StepTwoPartner/index.tsx @@ -0,0 +1,55 @@ +import React, { FC, useEffect } from 'react'; +import { useHistory } from 'react-router'; +import { ProjectTaskType } from 'typings/project'; +import { useRecoilState } from 'recoil'; +import { projectCreateForm, ProjectCreateForm } from 'stores/project'; +import { Button, Form } from '@arco-design/web-react'; +import GridRow from 'components/_base/GridRow'; +import ActionRules from './ActionRules'; + +import styles from './index.module.less'; + +const StepTwoPartner: FC<{ + onFormValuesChange?: () => void; +}> = () => { + const history = useHistory(); + const [form] = Form.useForm(); + const [projectForm, setProjectForm] = useRecoilState<ProjectCreateForm>(projectCreateForm); + + useEffect(() => { + if (!projectForm.config.abilities || !projectForm.name) { + history.push('/projects/create/config'); + } + }); + + return ( + <div className={styles.container}> + <div> + <Form form={form} initialValues={projectForm} onSubmit={goStepThree} layout="vertical"> + <ActionRules taskType={projectForm.config.abilities?.[0] as ProjectTaskType} /> + <Form.Item> + <GridRow className={styles.btn_container} gap={10} justify="center"> + <Button className={styles.btn_content} type="primary" htmlType="submit"> + 下一步 + </Button> + + <Button onClick={() => history.goBack()}>上一步</Button> + </GridRow> + </Form.Item> + </Form> + </div> + </div> + ); + function goStepThree(values: any) { + setProjectForm({ + ...projectForm, + config: { + ...projectForm.config, + ...values.config, + }, + }); + history.push(`/projects/create/partner`); + } +}; + +export default StepTwoPartner; diff --git a/web_console_v2/client/src/views/Projects/CreateProject/index.module.less b/web_console_v2/client/src/views/Projects/CreateProject/index.module.less new file mode 100644 index 000000000..a116f3575 --- /dev/null +++ b/web_console_v2/client/src/views/Projects/CreateProject/index.module.less @@ -0,0 +1,11 @@ +.container{ + overflow-x: hidden; +} +.step_container{ + width: 500px; +} +.form_area{ + flex: 1; + margin-top: 12px; + background-color: white; +} diff --git a/web_console_v2/client/src/views/Projects/CreateProject/index.tsx b/web_console_v2/client/src/views/Projects/CreateProject/index.tsx index 6e582f942..f1522f49e 100644 --- a/web_console_v2/client/src/views/Projects/CreateProject/index.tsx +++ b/web_console_v2/client/src/views/Projects/CreateProject/index.tsx @@ -1,28 +1,101 @@ -import React, { ReactElement } from 'react'; -import ProjectForm from '../ProjectForm'; -import { createProject } from 'services/project'; -import { CreateProjectPayload } from 'typings/project'; +import React, { ReactElement, useEffect, useState } from 'react'; +import { useUnmount } from 'react-use'; +import { Redirect, Route, useHistory, useParams } from 'react-router'; +import { useResetCreateForm } from 'hooks/project'; +import { createPendingProject } from 'services/project'; + +import { Message as message, Grid, Steps } from '@arco-design/web-react'; import SharedPageLayout from 'components/SharedPageLayout'; import BackButton from 'components/BackButton'; -import { useTranslation } from 'react-i18next'; -import { useHistory } from 'react-router'; +import StepOneConfig from './StepOneConfig'; +import StepTwoPartner from './StepTwoPartner'; +import StepThreeAuthorize from './StepThreeAuthorize'; + +import styles from './index.module.less'; + +const { Step } = Steps; +const { Row } = Grid; + +enum CreateSteps { + config, + authorize, + partner, +} function CreateProject(): ReactElement { - const { t } = useTranslation(); const history = useHistory(); + const { step } = useParams<{ step: keyof typeof CreateSteps | undefined }>(); + const [currentStep, setStep] = useState(CreateSteps[step || 'config']); + const [isFormValueChanged, setIsFormValueChanged] = useState(false); + + const reset = useResetCreateForm(); + + useUnmount(() => { + reset(); + }); + + useEffect(() => { + setStep(CreateSteps[step || 'config']); + }, [step]); + + if (!step) { + return <Redirect to={`/projects/create/config`} />; + } return ( - <SharedPageLayout - title={<BackButton onClick={() => history.goBack()}>{t('menu.label_project')}</BackButton>} - > - <ProjectForm onSubmit={onSubmit} /> - </SharedPageLayout> + <div className={styles.container}> + <SharedPageLayout + title={ + <BackButton + onClick={() => history.replace('/projects')} + isShowConfirmModal={isFormValueChanged} + > + 工作区管理 + </BackButton> + } + centerTitle="创建工作区" + > + <Row justify="center"> + <div className={styles.step_container}> + <Steps + current={currentStep + 1} + style={{ maxWidth: 780, margin: '0 auto' }} + size="small" + > + <Step title="全局配置" /> + <Step title="本方授权策略" /> + <Step title="邀请合作伙伴" /> + </Steps> + </div> + </Row> + <section className={styles.form_area}> + <Route + path={`/projects/create/config`} + exact + render={() => <StepOneConfig onFormValueChange={onFormValueChange} />} + /> + <Route path={'/projects/create/authorize'} exact render={() => <StepTwoPartner />} /> + <Route + path={`/projects/create/partner`} + exact + render={() => <StepThreeAuthorize onSubmit={onSubmit} />} + /> + </section> + </SharedPageLayout> + </div> ); - async function onSubmit(payload: CreateProjectPayload) { + async function onSubmit(payload: any) { try { - await createProject(payload); - } catch (error) { - throw error; + await createPendingProject(payload); + message.success('创建成功!'); + history.push('/projects?project_list_type=pending'); + } catch (error: any) { + message.error(error.message); + } + } + function onFormValueChange() { + if (!isFormValueChanged) { + setIsFormValueChanged(true); } } } diff --git a/web_console_v2/client/src/views/Projects/CreateTime.tsx b/web_console_v2/client/src/views/Projects/CreateTime.tsx index 13638caa3..693039d51 100644 --- a/web_console_v2/client/src/views/Projects/CreateTime.tsx +++ b/web_console_v2/client/src/views/Projects/CreateTime.tsx @@ -1,23 +1,22 @@ -import React, { ReactElement } from 'react'; -import styled, { CSSProperties } from 'styled-components'; +import React, { ReactElement, CSSProperties } from 'react'; import { formatTimestamp } from 'shared/date'; -const Container = styled.div` - flex-shrink: 0; - padding-right: 16px; - color: var(--gray7); - font-size: 12px; - line-height: 40px; -`; +import styles from './index.module.less'; +import classNames from 'classnames'; interface CreateTimeProps { time: number; style?: CSSProperties; + className?: string; } function CreateTime({ time, style }: CreateTimeProps): ReactElement { const _time = formatTimestamp(time); - return <Container style={style}>{_time}</Container>; + return ( + <div className={`${styles.create_time_container} ${classNames}`} style={style}> + {_time} + </div> + ); } export default CreateTime; diff --git a/web_console_v2/client/src/views/Projects/EditProject/index.module.less b/web_console_v2/client/src/views/Projects/EditProject/index.module.less new file mode 100644 index 000000000..c70d64c39 --- /dev/null +++ b/web_console_v2/client/src/views/Projects/EditProject/index.module.less @@ -0,0 +1,7 @@ +.container { + overflow-x: hidden; +} + +.spin_container { + min-height: 500px; +} diff --git a/web_console_v2/client/src/views/Projects/EditProject/index.tsx b/web_console_v2/client/src/views/Projects/EditProject/index.tsx index f6da254b2..c5d693df0 100644 --- a/web_console_v2/client/src/views/Projects/EditProject/index.tsx +++ b/web_console_v2/client/src/views/Projects/EditProject/index.tsx @@ -1,71 +1,86 @@ -import React, { ReactElement } from 'react'; -import ProjectForm from '../ProjectForm'; -import { Spin } from 'antd'; +import React, { ReactElement, useState, useMemo } from 'react'; +import { useQuery } from 'react-query'; import { useHistory, useParams } from 'react-router-dom'; import { getProjectDetailById, updateProject } from 'services/project'; -import { CertificateConfigType, UpdateProjectPayload } from 'typings/project'; +import { UpdateProjectPayload } from 'typings/project'; import { ProjectFormInitialValues } from 'typings/project'; -import { useQuery } from 'react-query'; -import styled from 'styled-components'; -import { unwrapDomainName } from 'shared/project'; +import { Message as message, Spin } from '@arco-design/web-react'; import SharedPageLayout from 'components/SharedPageLayout'; import BackButton from 'components/BackButton'; -import { useTranslation } from 'react-i18next'; +import StepOneConfig from '../CreateProject/StepOneConfig'; +import { useReloadProjectList } from 'hooks/project'; -const SpinContainer = styled(Spin)` - min-height: 500px; -`; +import styles from './index.module.less'; function EditProject(): ReactElement { - const { t } = useTranslation(); const history = useHistory(); const { id } = useParams<{ id: string }>(); + const reloadList = useReloadProjectList(); + const [isFormValueChanged, setIsFormValueChanged] = useState(false); const projectQuery = useQuery(['getProjectDetail', id], () => getProjectDetailById(id), { cacheTime: 1, refetchOnWindowFocus: false, }); - const initialValues = { - certificateConfigType: CertificateConfigType.BackendConfig, - }; + const initialValues = {}; const project = projectQuery.data?.data; + const projectDetail = useMemo(() => { + return projectQuery.data?.data; + }, [projectQuery]); if (project) { - const participant = project.config.participants[0]; - Object.assign(initialValues, { name: project.name, - participantName: participant.name, - participantUrl: participant.url, - participantDomainName: unwrapDomainName(participant.domain_name), comment: project.comment, - token: project.config.token, - variables: project.config.variables || [], + token: project?.token || '', + variables: project?.config?.variables || [], + participants: project.participants || '', + abilities: project?.config?.abilities || [], + action_rules: project.config?.action_rules || {}, }); } - return ( - <SpinContainer spinning={projectQuery.isLoading}> - <SharedPageLayout - title={<BackButton onClick={() => history.goBack()}>{t('menu.label_project')}</BackButton>} - > - {project && !projectQuery.isFetching && ( - <ProjectForm - onSubmit={onSubmit} - isEdit - initialValues={initialValues as ProjectFormInitialValues} - /> - )} - </SharedPageLayout> - </SpinContainer> + <div className={styles.container}> + <Spin className={styles.spin_container} loading={projectQuery.isLoading}> + <SharedPageLayout + title={ + <BackButton + onClick={() => history.replace(`/projects`)} + isShowConfirmModal={isFormValueChanged} + > + 工作区管理 + </BackButton> + } + centerTitle="编辑工作区" + > + {project && !projectQuery.isFetching && ( + <StepOneConfig + isEdit={true} + isLeftLayout={true} + onEditFinish={onSubmit} + initialValues={projectDetail as ProjectFormInitialValues} + onFormValueChange={onFormValueChange} + /> + )} + </SharedPageLayout> + </Spin> + </div> ); async function onSubmit(payload: UpdateProjectPayload) { try { await updateProject(project!.id, payload); - } catch (error) { - throw error; + message.success('编辑成功!'); + reloadList(); + history.push('/projects'); + } catch (error: any) { + message.error(error.message); + } + } + function onFormValueChange() { + if (!isFormValueChanged) { + setIsFormValueChanged(true); } } } diff --git a/web_console_v2/client/src/views/Projects/ProjectDetail/DetailHeader/index.module.less b/web_console_v2/client/src/views/Projects/ProjectDetail/DetailHeader/index.module.less new file mode 100644 index 000000000..d273c6bd4 --- /dev/null +++ b/web_console_v2/client/src/views/Projects/ProjectDetail/DetailHeader/index.module.less @@ -0,0 +1,42 @@ +@import '~styles/mixins.less'; +.avatar_container{ + .MixinSquare(44px); + background-color: var(--primaryColor); + color: white; + border-radius: 4px; + font-size: 18px; + text-align: center; + + &::before { + content: attr(data-name); + line-height: 44px; + font-weight: bold; + } +} +.project_name{ + margin-bottom: 0; + font-size: 16px; + height: 24px; + font-weight: 600; +} +.comment{ + display: block; + font-size: 12px; + line-height: 18px; + overflow: hidden; + white-space: nowrap; + text-overflow: ellipsis; + max-width: 400px; + color: var(--textColorSecondary); +} +.popover_table { + margin: -12px -16px !important; +} +.project_progress{ + width: 60px; +} +.variables_color{ + color: rgb(var(--primary-6)); + cursor: pointer; +} + diff --git a/web_console_v2/client/src/views/Projects/ProjectDetail/DetailHeader/index.tsx b/web_console_v2/client/src/views/Projects/ProjectDetail/DetailHeader/index.tsx new file mode 100644 index 000000000..1307ecced --- /dev/null +++ b/web_console_v2/client/src/views/Projects/ProjectDetail/DetailHeader/index.tsx @@ -0,0 +1,229 @@ +import React, { ReactElement, useRef } from 'react'; +import { useGetCurrentPureDomainName } from 'hooks'; +import ProjectConnectionStatus, { ExposedRef } from '../../ConnectionStatus'; +import GridRow from 'components/_base/GridRow'; +import { Button, Grid, Popover, Space, Table, Tag, Tooltip } from '@arco-design/web-react'; +import ProjectMoreActions from 'views/Projects/ProjectMoreActions'; +import PropertyList from 'components/PropertyList'; +import { formatTimestamp } from 'shared/date'; +import { ParticipantType } from 'typings/participant'; +import { + Project, + ProjectListType, + ProjectStateType, + ProjectTicketStatus, + RoleType, + ProjectBlockChainType, + ProjectTaskType, + ProjectActionType, + ProjectAbilityType, +} from 'typings/project'; +import { CONSTANTS } from 'shared/constants'; +import { + PARTICIPANT_TYPE_TAG_MAPPER, + ProjectProgress, + PROJECT_ABILITY_LABEL_MAPPER, + PROJECT_TASK_LABEL_MAPPER, + resetAbilitiesTableData, +} from '../../shard'; + +import styles from './index.module.less'; + +interface Props { + project: Project; + projectListType: ProjectListType; + onDeleteProject: (projectId: ID, projectListType: ProjectListType) => void; +} + +const { Row } = Grid; +const variableColumns = [ + { + title: 'name', + dataIndex: 'name', + }, + { + title: 'value', + dataIndex: 'value', + }, +]; +const abilitiesColumns = [ + { + title: '能力', + dataIndex: 'ability', + render: (value: ProjectActionType) => PROJECT_TASK_LABEL_MAPPER?.[value], + }, + { + title: '授权策略', + dataIndex: 'rule', + render: (value: ProjectAbilityType) => PROJECT_ABILITY_LABEL_MAPPER?.[value], + }, +]; +const ABILITY_LABEL_MAPPER = { + [ProjectTaskType.ALIGN]: 'ID对齐', + [ProjectTaskType.HORIZONTAL]: '横向联邦学习', + [ProjectTaskType.VERTICAL]: '纵向联邦学习', + [ProjectTaskType.TRUSTED]: '可信分析服务', +}; + +function DetailHeader({ project, projectListType, onDeleteProject }: Props): ReactElement { + const myPureDomainName = useGetCurrentPureDomainName(); + + const projectConnectionStatusRef = useRef<ExposedRef>(null); + + const isLightClient = project?.participant_type === ParticipantType.LIGHT_CLIENT; + const isPendingProject = projectListType === ProjectListType.PENDING; + + const properties = [ + { label: '工作区ID', value: project?.id ?? CONSTANTS.EMPTY_PLACEHOLDER }, + { + label: '区块链存证 ', + value: project?.config?.support_blockchain + ? ProjectBlockChainType.OPEN + : ProjectBlockChainType.CLOSED, + }, + { + label: '合作伙伴类型', + value: ( + <Tag + color={ + PARTICIPANT_TYPE_TAG_MAPPER?.[project?.participant_type || ParticipantType.PLATFORM] + .color + } + size="small" + > + { + PARTICIPANT_TYPE_TAG_MAPPER?.[project?.participant_type || ParticipantType.PLATFORM] + .label + } + </Tag> + ), + }, + { + label: '连接状态', + value: + !isLightClient && project && !isPendingProject ? ( + <ProjectConnectionStatus ref={projectConnectionStatusRef} project={project} /> + ) : ( + CONSTANTS.EMPTY_PLACEHOLDER + ), + }, + { + label: '创建人', + value: project?.creator || project?.creator_username || CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + label: '创建时间', + value: project?.created_at + ? formatTimestamp(project.created_at) + : CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + label: '环境变量', + value: project?.config?.variables?.length ? ( + <Popover + className={styles.popover_container} + content={ + <Table + className={styles.popover_table} + columns={variableColumns} + rowKey="name" + data={project.config.variables} + pagination={false} + /> + } + position="bottom" + > + <span className={styles.variables_color}>查看</span> + </Popover> + ) : ( + CONSTANTS.EMPTY_PLACEHOLDER + ), + }, + { + label: '能力规格', + value: project?.config?.abilities?.length ? ( + <Popover + className={styles.popover_container} + content={ + <Table + className={styles.popover_table} + columns={abilitiesColumns} + rowKey="ability" + data={resetAbilitiesTableData(project.config.action_rules)} + pagination={false} + /> + } + position="bottom" + > + <span className={styles.variables_color}> + {ABILITY_LABEL_MAPPER?.[project.config.abilities?.[0]]} + </span> + </Popover> + ) : ( + CONSTANTS.EMPTY_PLACEHOLDER + ), + }, + ]; + + return ( + <> + <Row justify="space-between" align="center"> + <Row align="center" justify="space-between"> + <GridRow gap="12"> + <div + className={styles.avatar_container} + data-name={project?.name ? project.name.slice(0, 1) : CONSTANTS.EMPTY_PLACEHOLDER} + /> + <div> + <Space> + <h3 className={styles.project_name}>{project?.name || '....'}</h3> + <ProjectProgress + className={styles.project_progress} + ticketStatus={ + project?.state === ProjectStateType.FAILED + ? ProjectTicketStatus.FAILED + : project?.ticket_status + } + /> + </Space> + <Tooltip content={project?.comment}> + <small className={styles.comment}> + {project?.comment || CONSTANTS.EMPTY_PLACEHOLDER} + </small> + </Tooltip> + </div> + </GridRow> + </Row> + <GridRow gap="10" style={{ flexBasis: 'auto' }}> + <Button + size="small" + onClick={onCheckConnectionClick} + disabled={isLightClient || isPendingProject} + > + 检查连接 + </Button> + <GridRow> + <ProjectMoreActions + project={project} + projectListType={projectListType} + role={ + project?.participants_info?.participants_map?.[myPureDomainName]?.role ?? + RoleType.COORDINATOR + } + onDeleteProject={onDeleteProject} + /> + </GridRow> + </GridRow> + </Row> + <PropertyList properties={properties} cols={4} /> + </> + ); + + function onCheckConnectionClick() { + if (projectConnectionStatusRef.current?.checkConnection) { + projectConnectionStatusRef.current.checkConnection(); + } + } +} + +export default DetailHeader; diff --git a/web_console_v2/client/src/views/Projects/ProjectDetail/ParticipantDetail/index.tsx b/web_console_v2/client/src/views/Projects/ProjectDetail/ParticipantDetail/index.tsx new file mode 100644 index 000000000..52662c444 --- /dev/null +++ b/web_console_v2/client/src/views/Projects/ProjectDetail/ParticipantDetail/index.tsx @@ -0,0 +1,27 @@ +import React, { FC } from 'react'; + +import { useRecoilQuery } from 'hooks/recoil'; +import { participantListQuery } from 'stores/participant'; +import { Spin } from '@arco-design/web-react'; + +type Props = { + pureDomainName?: string; + loading?: boolean; + contentField: string; +}; + +const ParticipantDetail: FC<Props> = ({ pureDomainName, loading, contentField }) => { + const { isLoading, data } = useRecoilQuery(participantListQuery); + const participant = data?.find((item) => item.pure_domain_name === pureDomainName) as Record< + string, + any + >; + + return ( + <Spin loading={isLoading || loading}> + <span>{participant?.[contentField] ?? '-'}</span>; + </Spin> + ); +}; + +export default ParticipantDetail; diff --git a/web_console_v2/client/src/views/Projects/ProjectDetail/index.module.less b/web_console_v2/client/src/views/Projects/ProjectDetail/index.module.less new file mode 100644 index 000000000..049f33dd2 --- /dev/null +++ b/web_console_v2/client/src/views/Projects/ProjectDetail/index.module.less @@ -0,0 +1,12 @@ +.participant_name_container{ + display: flex; + width: 100%; +} +.participant_name_content{ + display: inline-block; + flex: 1; + width: 80%; + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; +} diff --git a/web_console_v2/client/src/views/Projects/ProjectDetail/index.tsx b/web_console_v2/client/src/views/Projects/ProjectDetail/index.tsx new file mode 100644 index 000000000..cc884691a --- /dev/null +++ b/web_console_v2/client/src/views/Projects/ProjectDetail/index.tsx @@ -0,0 +1,251 @@ +import React, { FC, useState, useMemo } from 'react'; +import { useParams, useHistory } from 'react-router'; +import { useQuery } from 'react-query'; +import { useGetCurrentPureDomainName } from 'hooks'; +import { useRecoilQuery } from 'hooks/recoil'; +import { + deletePendingProject, + deleteProject, + fetchPendingProjectList, + getProjectDetailById, +} from 'services/project'; +import { fetchWorkflowList } from 'services/workflow'; +import { Message, Spin, Table, Tabs, Tag } from '@arco-design/web-react'; +import SharedPageLayout from 'components/SharedPageLayout'; +import DetailHeader from './DetailHeader'; +import BackButton from 'components/BackButton'; +import { VersionItem } from 'views/Partner/PartnerList/TableItem'; +import { getWorkflowTableColumns } from 'views/Workflows/WorkflowList/List'; +import PaticipantConnectionStatus, { + globalParticipantIdToConnectionStateMap, +} from 'views/Partner/PartnerList/ConnectionStatus'; +import { resetParticipantsInfo, PARTICIPANT_STATE_MAPPER } from '../shard'; +import { formatTimestamp } from 'shared/date'; +import { participantListQuery } from 'stores/participant'; +import { ProjectListType, ProjectStateType } from 'typings/project'; +import { ParticipantType, Participant } from 'typings/participant'; + +import styles from './index.module.less'; +import Modal from 'components/Modal'; + +const ProjectDetail: FC = () => { + const history = useHistory(); + const myPureDomainName = useGetCurrentPureDomainName(); + const { id, projectListType } = useParams<{ + id: string; + projectListType: ProjectListType; + }>(); + + const [activeKey, setActiveKey] = useState('participant'); + const { data, isLoading } = useQuery( + ['getProjectDetail', id, projectListType], + () => getProjectDetailById(id), + { + enabled: Boolean(id) && projectListType === ProjectListType.COMPLETE, + cacheTime: 1, + refetchOnWindowFocus: false, + }, + ); + const project = data?.data; + + const { data: pendingProjectList, isLoading: pendingProjectListLoading } = useQuery( + ['fetchPendingProjectList', projectListType], + () => fetchPendingProjectList({ page: 1, page_size: 0 }), + { enabled: Boolean(id) && projectListType === ProjectListType.PENDING }, + ); + + const workflowsQuery = useQuery(['fetchWorkflowList', id], () => + fetchWorkflowList({ project: id }), + ); + + const { isLoading: participantListLoading, data: participantList } = useRecoilQuery( + participantListQuery, + ); + + const pendingProjectDetail = useMemo(() => { + return pendingProjectList?.data.find((item) => item.id.toString() === id); + }, [pendingProjectList, id]); + + const completeParticipantList = useMemo(() => { + const participantListFromMap = resetParticipantsInfo( + project?.participants_info?.participants_map ?? + pendingProjectDetail?.participants_info?.participants_map ?? + {}, + participantList ?? [], + myPureDomainName, + ); + return participantListFromMap.length ? participantListFromMap : project?.participants; + }, [project, participantList, pendingProjectDetail, myPureDomainName]); + + const columns = [ + { + title: '合作伙伴名称', + dataIndex: 'name', + width: 160, + render: (value: any, record: any) => + record.pure_domain_name === myPureDomainName ? ( + <div className={styles.participant_name_container}> + <span className={styles.participant_name_content}>{value}</span> + <Tag>我方</Tag> + </div> + ) : ( + <span>{value}</span> + ), + sorter: (a: Participant, b: Participant) => a.name.localeCompare(b.name), + }, + { + title: '受邀状态', + dataIndex: 'state', + render: (val: ProjectStateType) => { + const { color, value } = PARTICIPANT_STATE_MAPPER?.[val ?? ProjectStateType.ACCEPTED]; + return <Tag color={color}>{value}</Tag>; + }, + }, + { + title: '连接状态', + dataIndex: 'status', + width: 150, + sorter: (a: Participant, b: Participant) => { + if ( + globalParticipantIdToConnectionStateMap[b.id] && + globalParticipantIdToConnectionStateMap[a.id] + ) { + return globalParticipantIdToConnectionStateMap[b.id].localeCompare( + globalParticipantIdToConnectionStateMap[a.id], + ); + } + return 1; + }, + render: (_: any, record: Participant) => { + const isLightClient = record.type === ParticipantType.LIGHT_CLIENT; + const isMy = myPureDomainName === record.pure_domain_name; + + if (isLightClient || isMy) { + return '-'; + } + + return <PaticipantConnectionStatus id={record.id} isNeedTip={true} isNeedReCheck={true} />; + }, + }, + { + title: '泛域名', + dataIndex: 'domain_name', + sorter: (a: Participant, b: Participant) => + a.domain_name ?? ''.localeCompare(b.domain_name ?? ''), + render: (value: any) => value || '-', + }, + { + title: '主机号', + dataIndex: 'host', + render: (value: any) => value || '-', + }, + { + title: '端口号', + dataIndex: 'port', + render: (value: any) => value || '-', + }, + { + title: '版本号', + dataIndex: 'version', + render: (_: any, record: Participant) => { + return <VersionItem partnerId={record.id} />; + }, + }, + { + title: '合作伙伴描述', + dataIndex: 'comment', + ellipsis: true, + render: (value: any) => { + return value || '-'; + }, + }, + { + title: '最近活跃时间', + dataIndex: 'last_connected_at', + render: (value: any) => { + return value ? formatTimestamp(value) : '-'; + }, + sorter: (a: Participant, b: Participant) => + (a.last_connected_at || 0) - (b.last_connected_at || 0), + }, + ]; + return ( + <SharedPageLayout title={<BackButton onClick={() => history.goBack()}>工作区管理</BackButton>}> + <Spin loading={isLoading || pendingProjectListLoading || participantListLoading}> + <DetailHeader + project={projectListType === ProjectListType.COMPLETE ? project! : pendingProjectDetail!} + projectListType={projectListType} + onDeleteProject={handleDelete} + /> + <Tabs onChange={onTabChange} activeTab={activeKey}> + <Tabs.TabPane title="合作伙伴" key="participant"> + <Table + className="custom-table" + columns={columns} + data={completeParticipantList} + rowKey="pure_domain_name" + /> + </Tabs.TabPane> + {projectListType === ProjectListType.COMPLETE && ( + <Tabs.TabPane title="工作流任务" key="workflow"> + <Table + className="custom-table" + loading={isLoading && workflowsQuery.isLoading} + data={workflowsQuery.data?.data || []} + columns={getWorkflowTableColumns({ withoutActions: true, withoutFavour: true })} + rowKey="id" + /> + </Tabs.TabPane> + )} + </Tabs> + </Spin> + </SharedPageLayout> + ); + function onTabChange(val: string) { + setActiveKey(val); + } + async function handleDelete(projectId: ID, projectListType: ProjectListType) { + if (!projectId) { + return; + } + try { + const { data: workflowList } = await fetchWorkflowList({ + project: projectId, + states: ['running'], + page: 1, + pageSize: 1, + }); + if (Boolean(workflowList.length)) { + Message.info('有正在运行的任务,请终止任务后再删除'); + return; + } + Modal.delete({ + title: '确认删除工作区?', + content: '删除工作区将清空我方全部资源,请谨慎操作', + async onOk() { + if (projectListType === ProjectListType.PENDING) { + try { + await deletePendingProject(projectId); + Message.success('删除工作区成功'); + history.push('/projects?project_list_type=pending'); + } catch (error: any) { + Message.error(error.message); + } + } else { + try { + await deleteProject(projectId); + Message.success('删除工作区成功'); + history.push('/projects?project_list_type=complete'); + } catch (error: any) { + Message.error(error.message); + } + } + }, + }); + } catch (error: any) { + return error.message; + } + } +}; + +export default ProjectDetail; diff --git a/web_console_v2/client/src/views/Projects/ProjectDetailDrawer/DetailBody.tsx b/web_console_v2/client/src/views/Projects/ProjectDetailDrawer/DetailBody.tsx deleted file mode 100644 index b6adc2bb8..000000000 --- a/web_console_v2/client/src/views/Projects/ProjectDetailDrawer/DetailBody.tsx +++ /dev/null @@ -1,69 +0,0 @@ -import React, { ReactElement } from 'react'; -import { Tabs, Table } from 'antd'; -import { useTranslation } from 'react-i18next'; -import { Project } from 'typings/project'; -import ErrorBoundary from 'antd/lib/alert/ErrorBoundary'; -import PropertyList from 'components/PropertyList'; -import { useQuery } from 'react-query'; -import { fetchWorkflowList } from 'services/workflow'; -import { getWorkflowTableColumns } from 'views/Workflows/WorkflowList'; - -interface DetailBodyProps { - project: Project; -} - -function DetailBody({ project }: DetailBodyProps): ReactElement { - const { t } = useTranslation(); - - const workflowsQuery = useQuery(['fetchWorkflowList', project.id], () => - fetchWorkflowList({ project: project.id }), - ); - - const participant = project.config.participants[0]; - const properties = [ - { - label: t('project.participant_name'), - value: participant.name || '-', - }, - { - label: t('project.participant_domain'), - value: participant.domain_name, - }, - { - label: t('project.participant_url'), - value: participant.url, - }, - - { - label: t('project.remarks'), - value: project.comment || '-', - }, - ...project.config.variables.map((item) => ({ label: item.name, value: item.value })), - ]; - return ( - <ErrorBoundary> - <PropertyList initialVisibleRows={3} properties={properties} cols={2} labelWidth={105} /> - - <Tabs defaultActiveKey="workflow"> - <Tabs.TabPane tab={t('project.workflow')} key="workflow"> - <Table - loading={workflowsQuery.isLoading} - dataSource={workflowsQuery.data?.data || []} - columns={getWorkflowTableColumns({ withoutActions: true })} - /> - </Tabs.TabPane> - <Tabs.TabPane tab={t('project.mix_dataset')} key="dataset"> - <Table /> - </Tabs.TabPane> - <Tabs.TabPane tab={t('project.model')} key="model"> - <Table /> - </Tabs.TabPane> - <Tabs.TabPane tab="API" key="api"> - <Table /> - </Tabs.TabPane> - </Tabs> - </ErrorBoundary> - ); -} - -export default DetailBody; diff --git a/web_console_v2/client/src/views/Projects/ProjectDetailDrawer/DetailHeader.tsx b/web_console_v2/client/src/views/Projects/ProjectDetailDrawer/DetailHeader.tsx deleted file mode 100644 index 4d0ba97ad..000000000 --- a/web_console_v2/client/src/views/Projects/ProjectDetailDrawer/DetailHeader.tsx +++ /dev/null @@ -1,57 +0,0 @@ -import React, { ReactElement } from 'react'; -import styled from 'styled-components'; -import { useTranslation } from 'react-i18next'; -import { useHistory } from 'react-router-dom'; -import { Project } from 'typings/project'; -import ProjectConnectionStatus from '../ConnectionStatus'; -import { useCheckConnection } from 'hooks/project'; -import GridRow from 'components/_base/GridRow'; -import { Button, Row } from 'antd'; -import { Command, Workbench } from 'components/IconPark'; - -const RowContainer = styled(Row)` - padding-right: 28px; -`; - -const ProjectName = styled.div` - font-weight: 500; - font-size: 20px; - line-height: 28px; - color: var(--textColor); -`; - -interface Props { - project: Project; -} - -function DetailHeader({ project }: Props): ReactElement { - const { t } = useTranslation(); - const history = useHistory(); - const [status, checkConnection] = useCheckConnection(project, { - refetchOnWindowFocus: false, - refetchInterval: 10 * 60 * 1000, // 10min - }); - - return ( - <RowContainer justify="space-between" align="middle"> - <GridRow gap="10"> - <ProjectName>{project.name}</ProjectName> - <ProjectConnectionStatus status={status} tag /> - </GridRow> - <GridRow gap="10"> - <Button size="small" icon={<Command />} onClick={checkConnection as any}> - {t('project.check_connection')} - </Button> - <Button - size="small" - icon={<Workbench />} - onClick={() => history.push(`/workflows/initiate/basic?project=${project.id}`)} - > - {t('project.create_work_flow')} - </Button> - </GridRow> - </RowContainer> - ); -} - -export default DetailHeader; diff --git a/web_console_v2/client/src/views/Projects/ProjectDetailDrawer/index.tsx b/web_console_v2/client/src/views/Projects/ProjectDetailDrawer/index.tsx deleted file mode 100644 index 60670e07e..000000000 --- a/web_console_v2/client/src/views/Projects/ProjectDetailDrawer/index.tsx +++ /dev/null @@ -1,38 +0,0 @@ -import React, { ReactElement } from 'react'; -import { Drawer } from 'antd'; -import { Project } from 'typings/project'; -import DetailBody from './DetailBody'; -import DetailHeader from './DetailHeader'; -import { Close } from 'components/IconPark'; -import IconButton from 'components/IconButton'; - -interface DetailProps { - visible: boolean; - onClose: () => void; - project?: Project; -} - -function CloseButton(): ReactElement { - return <IconButton icon={<Close />} />; -} - -function ProjectDetailDrawer({ visible, onClose, project }: DetailProps) { - if (!project) return null; - - return ( - <Drawer - placement="right" - closable={true} - width={880} - zIndex={1999} - closeIcon={<CloseButton />} - visible={visible} - onClose={onClose} - title={<DetailHeader project={project} />} - > - <DetailBody project={project} /> - </Drawer> - ); -} - -export default ProjectDetailDrawer; diff --git a/web_console_v2/client/src/views/Projects/ProjectForm/Certificate.tsx b/web_console_v2/client/src/views/Projects/ProjectForm/Certificate.tsx deleted file mode 100644 index 1b8636494..000000000 --- a/web_console_v2/client/src/views/Projects/ProjectForm/Certificate.tsx +++ /dev/null @@ -1,89 +0,0 @@ -import React, { FC, useState } from 'react'; -import { useTranslation } from 'react-i18next'; -import { readAsBinaryStringFromFile } from 'shared/file'; -import styled from 'styled-components'; -import { CertificateConfigType } from 'typings/project'; -import { Radio } from 'antd'; -import ReadFile from 'components/ReadFile'; -import classNames from 'classnames'; -import { MixinCommonTransition } from 'styles/mixins'; - -const { Upload, BackendConfig } = CertificateConfigType; - -const UploadContainer = styled.div` - ${MixinCommonTransition(['max-height', 'opacity', 'padding-top'])}; - padding-top: 15px; - max-height: 400px; - will-change: max-height; - - &.is-hidden { - padding-top: 0; - max-height: 0; - opacity: 0; - } -`; - -type Props = { - value?: string | null; - disabled?: boolean; - isEdit?: boolean; - onChange?: (v: string) => void; - onTypeChange?: (v: CertificateConfigType) => void; -}; -const Certificate: FC<Props> = ({ value, isEdit, onChange, onTypeChange, disabled }) => { - const [type, setType] = useState<CertificateConfigType>( - isEdit ? (value ? Upload : BackendConfig) : Upload, - ); - const [internalVal, setInternalVal] = useState<string>(); - const { t } = useTranslation(); - - return ( - <div> - <Radio.Group - value={type} - options={[ - { - label: t('project.upload_certificate'), - value: Upload, - }, - { - label: t('project.backend_config_certificate'), - value: BackendConfig, - }, - ]} - optionType="button" - onChange={onTypeChangeInternal} - disabled={isEdit || disabled} - /> - <UploadContainer className={classNames({ 'is-hidden': type !== Upload || isEdit })}> - <ReadFile - disabled={isEdit || disabled} - accept=".gz" - reader={readAsBinaryStringFromFile} - value={internalVal} - onChange={onFileChange as any} - /> - </UploadContainer> - </div> - ); - - function onFileChange(val: string) { - onChange && onChange(val); - setInternalVal(val); - } - function onTypeChangeInternal(event: any) { - const val = event.target.value; - setType(val); - onTypeChange && onTypeChange(val); - - if (val === BackendConfig) { - onChange && onChange(null as any); - // HACK WARNING: Waiting for UploadContainer shrink animation finishded - // then it's safe to set ReadFile value to null, - // otherwise ReadFile's expand animation will break container's shrink animation - setTimeout(() => setInternalVal(null as any), 200); - } - } -}; - -export default Certificate; diff --git a/web_console_v2/client/src/views/Projects/ProjectForm/EnvVariablesForm.tsx b/web_console_v2/client/src/views/Projects/ProjectForm/EnvVariablesForm.tsx deleted file mode 100644 index 313695c7e..000000000 --- a/web_console_v2/client/src/views/Projects/ProjectForm/EnvVariablesForm.tsx +++ /dev/null @@ -1,203 +0,0 @@ -import React, { FC, useCallback, useLayoutEffect, useRef } from 'react'; -import styled from 'styled-components'; -import { Form, Input, Button, Row, Col } from 'antd'; -import { useTranslation } from 'react-i18next'; -import { useToggle } from 'react-use'; -import { CaretDown, Delete, Plus } from 'components/IconPark'; -import { MixinCommonTransition } from 'styles/mixins'; -import { FormInstance } from 'antd/lib/form'; -import { convertToUnit } from 'shared/helpers'; -import { useSubscribe } from 'hooks'; - -const Container = styled.div` - margin-top: 30px; -`; -const Header = styled.div` - margin-bottom: 20px; -`; -const Heading = styled.h3` - ${MixinCommonTransition()} - margin-bottom: 0; - font-size: 16px; - font-weight: 500; - line-height: 24px; - color: var(--gray10); - - &[data-folded='true'] { - opacity: 0; - transform: translateX(30px); - } -`; -const Toggler = styled.div` - display: inline-flex; - - align-items: center; - font-size: 14px; - line-height: 1; - color: var(--arcoblue6); - cursor: pointer; - user-select: none; - - > .anticon { - ${MixinCommonTransition()} - margin-left: 5px; - } - - &[data-folded='false'] { - > .anticon { - transform: rotate(-180deg); - } - } -`; -const NoVariables = styled(Form.Item)` - color: var(--textColorSecondary); -`; -const AddButton = Button; -const ListContainer = styled.div` - ${MixinCommonTransition()} - width: calc(var(--form-width, 500px) * 2); - overflow: hidden; - - &[data-folded='true'] { - opacity: 0; - overflow: hidden; - } -`; -const RemoveButton = styled(Button)` - position: absolute; - right: 0; -`; - -export const VARIABLES_FIELD_NAME = 'variables'; -export const VARIABLES_ERROR_CHANNEL = 'project.field_variables_error'; - -const EnvVariablesForm: FC<{ - layout: { - labelCol: { span: number }; - wrapperCol: { span: number }; - }; - formInstance?: FormInstance; - disabled?: boolean; -}> = ({ layout, disabled }) => { - const { t } = useTranslation(); - const [isFolded, toggleFolded] = useToggle(true); - const listInnerRef = useRef<HTMLDivElement>(); - const listContainerRef = useRef<HTMLDivElement>(); - - useSubscribe(VARIABLES_ERROR_CHANNEL, () => { - toggleFolded(false); - }); - - const setListContainerMaxHeight = useCallback( - (nextHeight: any) => { - listContainerRef.current!.style.maxHeight = convertToUnit(nextHeight); - }, - [listContainerRef], - ); - const getListInnerHeight = useCallback(() => { - return listInnerRef.current!.offsetHeight!; - }, [listInnerRef]); - - useLayoutEffect(() => { - const innerHeight = getListInnerHeight() + 30; - - if (isFolded) { - setListContainerMaxHeight(innerHeight); - // Q: Why read inner's height one time before set maxHeight to 0 for folding - // A: Since we set maxHeight to 'initial' everytime unfold-transition ended, it's important - // to re-set maxHeight to innerHeight before folding, we need a ${specific value} → 0 transition - // not the `initial` → 0 in which case animation would lost - getListInnerHeight(); - setListContainerMaxHeight(0); - } else { - setListContainerMaxHeight(innerHeight); - } - }, [isFolded, getListInnerHeight, setListContainerMaxHeight]); - - return ( - <Container> - <Header> - <Row align="middle"> - <Col {...layout.labelCol}> - <Heading data-folded={String(isFolded)}>{t('project.env_path_config')}</Heading> - </Col> - <Col {...layout.wrapperCol}> - <Toggler onClick={toggleFolded} data-folded={String(isFolded)}> - {t(isFolded ? 'project.show_env_path_config' : 'project.hide_env_path_config')} - <CaretDown /> - </Toggler> - </Col> - </Row> - </Header> - - <ListContainer - ref={listContainerRef as any} - data-folded={String(isFolded)} - onTransitionEnd={onFoldAnimationEnd} - > - <Form.List name={VARIABLES_FIELD_NAME}> - {(fields, { add, remove }) => ( - <div ref={listInnerRef as any}> - {fields.map((field, index) => ( - <Row key={field.fieldKey + index} align="top" style={{ position: 'relative' }}> - <Form.Item - style={{ flex: '0 0 50%' }} - {...field} - label="Name" - name={[field.name, 'name']} - fieldKey={[field.fieldKey, 'name']} - rules={[{ required: true, message: t('project.msg_var_name') }]} - > - <Input placeholder="name" disabled={disabled} /> - </Form.Item> - - <Form.Item - labelCol={{ span: 4 }} - wrapperCol={{ span: 18 }} - style={{ flex: '0 0 50%' }} - label="Value" - {...field} - name={[field.name, 'value']} - fieldKey={[field.fieldKey, 'value']} - rules={[{ required: true, message: t('project.msg_var_value') }]} - > - <Input.TextArea placeholder="value" disabled={disabled} /> - </Form.Item> - - <RemoveButton - size="small" - icon={<Delete />} - shape="circle" - type="text" - onClick={() => remove(field.name)} - /> - </Row> - ))} - {/* Empty placeholder */} - {fields.length === 0 && ( - <NoVariables wrapperCol={{ offset: 4 }}>{t('project.msg_no_var_yet')}</NoVariables> - )} - - <Form.Item wrapperCol={{ offset: 4 }}> - {/* DO NOT simplify `() => add()` to `add`, it will pollute form value with $event */} - <AddButton type="primary" size="small" icon={<Plus />} onClick={() => add()}> - {t('project.add_parameters')} - </AddButton> - </Form.Item> - </div> - )} - </Form.List> - </ListContainer> - </Container> - ); - - function onFoldAnimationEnd(_: React.TransitionEvent) { - if (!isFolded) { - // Because of user can adjust list inner's height by resize value-textarea or add/remove variable - // we MUST set container's maxHeight to 'initial' after unfolded (after which user can interact) - listContainerRef.current!.style.maxHeight = 'initial'; - } - } -}; - -export default EnvVariablesForm; diff --git a/web_console_v2/client/src/views/Projects/ProjectForm/SecondaryForm.tsx b/web_console_v2/client/src/views/Projects/ProjectForm/SecondaryForm.tsx deleted file mode 100644 index d4cabd32e..000000000 --- a/web_console_v2/client/src/views/Projects/ProjectForm/SecondaryForm.tsx +++ /dev/null @@ -1,30 +0,0 @@ -import React, { FC } from 'react'; -import styled from 'styled-components'; - -const Container = styled.div` - background-color: white; - margin-bottom: 20px; - border-radius: 4px; -`; -const Heading = styled.h3` - margin-bottom: 0; - font-weight: 600; - font-size: 16px; - line-height: 24px; - color: var(--gray10); -`; -const Body = styled.div` - width: var(--form-width, 500px); - margin-top: 32px; -`; - -const SecondaryForm: FC<{ title: string }> = ({ title, children }) => { - return ( - <Container> - <Heading>{title}</Heading> - <Body>{children}</Body> - </Container> - ); -}; - -export default SecondaryForm; diff --git a/web_console_v2/client/src/views/Projects/ProjectForm/index.tsx b/web_console_v2/client/src/views/Projects/ProjectForm/index.tsx deleted file mode 100644 index c02e5786a..000000000 --- a/web_console_v2/client/src/views/Projects/ProjectForm/index.tsx +++ /dev/null @@ -1,308 +0,0 @@ -import React, { FC, useState } from 'react'; -import styled from 'styled-components'; -import { Form, Input, Button, message, Modal } from 'antd'; -import { ExclamationCircleOutlined } from '@ant-design/icons'; -import { useTranslation } from 'react-i18next'; -import SecondaryForm from './SecondaryForm'; -import EnvVariablesForm, { - VARIABLES_FIELD_NAME, - VARIABLES_ERROR_CHANNEL, -} from './EnvVariablesForm'; -import { CertificateConfigType } from 'typings/project'; -import { - ProjectFormInitialValues, - CreateProjectPayload, - UpdateProjectPayload, - Participant, -} from 'typings/project'; -import { useHistory } from 'react-router-dom'; -import GridRow from 'components/_base/GridRow'; -import i18n from 'i18n'; -import { useReloadProjectList } from 'hooks/project'; -import ip from 'ip-port-regex'; -import Certificate from './Certificate'; -import { DOMAIN_PREFIX, DOMAIN_SUFFIX, wrapWithDomainName } from 'shared/project'; -import { Z_INDEX_GREATER_THAN_HEADER } from 'components/Header'; -import { FormHeader } from 'components/SharedPageLayout'; - -const Container = styled.div` - flex: 1; -`; -const StyledForm = styled(Form)` - --form-width: 500px; - - display: grid; - grid-auto-rows: auto 1fr auto; - - > .form-title { - margin-bottom: 24px; - font-size: 27px; - line-height: 36px; - } - - > .ant-space { - display: flex; - } - - > .ant-form-item { - &:last-child { - margin-bottom: 0; - } - } -`; - -const layout = { - labelCol: { span: 8 }, - wrapperCol: { span: 16 }, -}; - -const SubmitContainer = styled(Form.Item)` - background-color: white; - padding: 24px; - margin-top: 14px; - border-radius: 4px; - width: 100%; -`; - -const defaultInitialValues: ProjectFormInitialValues = { - certificateConfigType: CertificateConfigType.Upload, - name: '', - participantName: '', - participantUrl: '', - participantDomainName: '', - comment: '', - variables: [], -}; - -interface Props { - onSubmit: (payload: any) => Promise<void>; - isEdit?: boolean; - initialValues?: ProjectFormInitialValues; -} - -const ProjectForm: FC<Props> = ({ onSubmit, isEdit, initialValues }) => { - const { t } = useTranslation(); - const [form] = Form.useForm(); - const [loading, setLoading] = useState(false); - const [isCertRequired, setCertRequired] = useState(!isEdit); - const history = useHistory(); - const defaultValues: ProjectFormInitialValues = initialValues ?? defaultInitialValues; - - const reloadList = useReloadProjectList(); - const domainRules = [ - { required: true, message: t('project.msg_domian_required') }, - { pattern: /^[0-9a-z-]+$/g, message: t('project.msg_domian_invalid') }, - ]; - - return ( - <Container> - <FormHeader>{isEdit ? '编辑项目' : '创建项目'}</FormHeader> - <StyledForm - {...layout} - initialValues={defaultValues} - form={form} - colon={false} - onFinish={onFinish} - onFinishFailed={onFinishFailed} - scrollToFirstError - > - {/* Project Config */} - <SecondaryForm title={t('project.basic_information')}> - <Form.Item - hasFeedback - name="name" - label={t('project.name')} - rules={[{ required: true, message: t('project.name_message') }]} - > - <Input - name="name" - placeholder={t('project.name_placeholder')} - disabled={isEdit || loading} - /> - </Form.Item> - {/* FIXME: Enable Token input after API support */} - {/* <Form.Item - name="token" - label={t('project.label_token')} - rules={[ - { required: true, message: t('project.msg_token_required') }, - { pattern: /^[a-zA-Z0-9]{0,64}$/g, message: t('project.msg_token_invalid') }, - ]} - > - <Input placeholder={t('project.placeholder_token')} disabled={isEdit || loading} /> - </Form.Item> */} - </SecondaryForm> - - {/* Participant config */} - <SecondaryForm title={t('project.participant_information')}> - <Form.Item - hasFeedback - name="participantName" - label={t('project.participant_name')} - rules={[{ required: true, message: t('project.participant_name_message') }]} - > - <Input - name="participantName" - placeholder={t('project.participant_name_placeholder')} - disabled={loading} - /> - </Form.Item> - - <Form.Item - name="participantDomainName" - label={t('project.participant_domain')} - rules={domainRules} - > - <Input - name="participantDomainName" - addonBefore={DOMAIN_PREFIX} - addonAfter={DOMAIN_SUFFIX} - placeholder={t('project.placeholder_domain_name')} - disabled={isEdit || loading} - /> - </Form.Item> - - <Form.Item - hidden={isEdit && !isCertRequired && !Boolean(initialValues?.participantUrl)} - hasFeedback - name="participantUrl" - label={t('project.participant_url')} - rules={[ - { required: isCertRequired, message: t('project.participant_url_message') }, - { - validator(_, value) { - if (!isCertRequired) return Promise.resolve(); - - if (ip({ exact: true }).test(value)) { - return Promise.resolve(); - } else { - return Promise.reject(t('project.msg_ip_addr_invalid')); - } - }, - }, - ]} - > - <Input - name="participantUrl" - placeholder={t('project.placeholder_participant_url')} - disabled={isEdit || loading} - /> - </Form.Item> - - <Form.Item - name="certificate" - label={t('certificate')} - rules={[{ required: isCertRequired, message: t('project.upload_certificate_message') }]} - > - <Certificate - isEdit={isEdit} - onTypeChange={(val) => { - setCertRequired(val === CertificateConfigType.Upload); - }} - disabled={loading} - /> - </Form.Item> - - <Form.Item name="comment" label={t('project.remarks')}> - <Input.TextArea - rows={4} - style={{ resize: 'none' }} - name="comment" - disabled={loading} - placeholder={t('project.remarks_placeholder')} - /> - </Form.Item> - - <EnvVariablesForm layout={layout} formInstance={form} disabled={loading} /> - </SecondaryForm> - - <SubmitContainer> - <GridRow gap="16"> - <Button type="primary" loading={loading} onClick={onSubmitClick}> - {t('submit')} - </Button> - <Button onClick={onCancelClick}>{t('cancel')}</Button> - </GridRow> - </SubmitContainer> - </StyledForm> - </Container> - ); - - function backToList() { - history.push('/projects'); - } - function onCancelClick() { - Modal.confirm({ - title: i18n.t('project.msg_sure_2_cancel'), - icon: <ExclamationCircleOutlined />, - content: i18n.t('project.msg_effect_of_cancel'), - zIndex: Z_INDEX_GREATER_THAN_HEADER, - getContainer: 'body', - style: { - top: '30%', - }, - onOk: backToList, - }); - } - function onSubmitClick() { - form.submit(); - } - function onFinishFailed({ errorFields }: any) { - if ( - errorFields.some( - (item: any) => - item.name === VARIABLES_FIELD_NAME || item.name.includes(VARIABLES_FIELD_NAME), - ) - ) { - PubSub.publish(VARIABLES_ERROR_CHANNEL); - } - } - async function onFinish(data: any) { - if (!isEdit && data.certificates === '') { - form.scrollToField('certificateConfigType', { block: 'center' }); - return; - } - - setLoading(true); - - try { - let params: CreateProjectPayload | UpdateProjectPayload; - - if (isEdit) { - params = { - participant_name: data.participantName, - variables: data.variables ?? [], - comment: data.comment, - }; - await onSubmit(params); - } else { - let participants: Participant[] = []; - participants.push({ - name: data.participantName, - url: data.participantUrl, - domain_name: wrapWithDomainName(data.participantDomainName), - certificates: data.certificate || null, - }); - - params = { - name: data.name, - config: { - token: data.token || '', - participants, - variables: data.variables ?? [], - }, - comment: data.comment, - }; - await onSubmit(params); - } - message.success(isEdit ? i18n.t('project.edit_success') : i18n.t('project.create_success')); - reloadList(); - backToList(); - } catch (error) { - message.error(error.message); - } - setLoading(false); - } -}; - -export default ProjectForm; diff --git a/web_console_v2/client/src/views/Projects/ProjectList/CardView/ProjectCard.tsx b/web_console_v2/client/src/views/Projects/ProjectList/CardView/ProjectCard.tsx deleted file mode 100644 index ea86c877e..000000000 --- a/web_console_v2/client/src/views/Projects/ProjectList/CardView/ProjectCard.tsx +++ /dev/null @@ -1,152 +0,0 @@ -import React, { FC, ReactElement } from 'react'; -import styled from 'styled-components'; -import ProjectProp from './ProjectCardProp'; -import ProjectMoreActions from '../../ProjectMoreActions'; -import CreateTime from '../../CreateTime'; -import { Tooltip, Row } from 'antd'; -import { useTranslation } from 'react-i18next'; -import ProjectName from '../../ProjectName'; -import { useHistory } from 'react-router-dom'; -import { Project } from 'typings/project'; -import ProjectConnectionStatus from '../../ConnectionStatus'; -import { MixinCommonTransition, MixinFontClarity } from 'styles/mixins'; -import { Command, Workbench } from 'components/IconPark'; -import IconButton from 'components/IconButton'; -import { useCheckConnection } from 'hooks/project'; - -const CardContainer = styled.div` - ${MixinCommonTransition('box-shadow')} - - border: 1px solid var(--backgroundColorGray); - border-radius: 4px; - overflow: hidden; // Prevent card from expanding grid - - &:hover { - box-shadow: 0px 4px 10px var(--gray2); - } -`; -const CardHeader = styled.div` - display: flex; - height: 40px; - border-bottom: 1px solid var(--backgroundColorGray); - justify-content: space-between; - cursor: pointer; - - @supports (gap: 10px) { - gap: 10px; - } -`; - -const CardMain = styled.div` - display: flex; - padding: 25px 0; - cursor: pointer; - - .workflow-number { - ${MixinFontClarity()} - - font-size: 32px; - text-indent: 1px; - line-height: 1; - color: var(--textColorStrong); - } -`; -const CardFooter = styled(Row)` - padding: 12px 10px; - - .left { - flex: 1; - font-size: 12px; - line-height: 22px; - color: var(--gray7); - padding-left: 6px; - } - .right { - display: flex; - min-width: 80px; - justify-content: space-between; - } -`; - -interface CardProps { - item: Project; - onViewDetail: (project: Project) => void; -} - -type IconButtonProps = { - onClick: Function; -}; - -const CreateWorkflow: FC<IconButtonProps> = ({ onClick }) => { - const { t } = useTranslation(); - - return ( - <Tooltip title={t('project.create_work_flow')} placement="top"> - <IconButton type="text" onClick={onClick as any} icon={<Workbench />} circle /> - </Tooltip> - ); -}; - -const CheckConnection: FC<IconButtonProps> = ({ onClick }) => { - const { t } = useTranslation(); - - return ( - <Tooltip title={t('project.check_connection')} placement="top"> - <IconButton type="text" onClick={onClick as any} icon={<Command />} circle /> - </Tooltip> - ); -}; - -function Card({ item: project, onViewDetail }: CardProps): ReactElement { - const { t } = useTranslation(); - const history = useHistory(); - const [status, checkConnection] = useCheckConnection(project, { - refetchOnWindowFocus: false, - refetchInterval: 10 * 60 * 1000, // 10min - }); - - const participant = project.config.participants[0].name || '-'; - - return ( - <CardContainer> - <CardHeader onClick={viewDetail}> - <ProjectName text={project.name} /> - <CreateTime time={project.created_at} /> - </CardHeader> - - <CardMain onClick={viewDetail}> - <ProjectProp label={t('project.workflow_number')}> - <strong className="workflow-number">{project.num_workflow || 0}</strong> - </ProjectProp> - - <ProjectProp label={t('project.connection_status')}> - <ProjectConnectionStatus status={status} /> - </ProjectProp> - </CardMain> - - <CardFooter align="middle"> - <div className="left">{participant}</div> - <div className="right"> - <CheckConnection onClick={checkConnection} /> - <CreateWorkflow onClick={initiateWorkflow} /> - - <ProjectMoreActions - onEdit={() => { - history.push(`/projects/edit/${project.id}`); - }} - onViewDetail={viewDetail} - /> - </div> - </CardFooter> - </CardContainer> - ); - - function viewDetail() { - onViewDetail(project); - } - function initiateWorkflow() { - history.push(`/workflows/initiate/basic?project=${project.id}`); - } -} - -export default Card; diff --git a/web_console_v2/client/src/views/Projects/ProjectList/CardView/ProjectCard/index.module.less b/web_console_v2/client/src/views/Projects/ProjectList/CardView/ProjectCard/index.module.less new file mode 100644 index 000000000..72ac93376 --- /dev/null +++ b/web_console_v2/client/src/views/Projects/ProjectList/CardView/ProjectCard/index.module.less @@ -0,0 +1,72 @@ +@import '~styles/mixins.less'; +.card_container{ + transition: box-shadow 0.4s cubic-bezier(0.4, 0, 0.2, 1) 0s; + border: 0px; + border-radius: 4px; + overflow: hidden; // Prevent card from expanding grid + background-color: #ffffff; + padding: 10px 20px; + + &:hover { + box-shadow: 0px 4px 10px rgb(var(--gray-6)); + } +} +.card_header{ + display: flex; + height: 40px; + justify-content: space-between; + cursor: pointer; + @supports (gap: 10px) { + gap: 10px; + } + .card_header_left{ + .MixinEllipsis(); + display: flex; + align-items: center; + } +} +.card_main{ + display: flex; + cursor: pointer; + font-size: 12px; + margin: 10px 0px; +} + +.card_footer{ + margin: 10px 0px; + .card_footer_left{ + font-size: 12px; + line-height: 22px; + color: rgb(var(--gray-7)); + padding-left: 6px; + + } + .card_footer_right { + padding-right: 0px !important; + display:flex; + align-items: center; + justify-content: space-between; + + } + .progress_name{ + display: block; + margin-bottom: -10px; + font-weight: 400; + font-size: 12px; + line-height: 20px; + color: #1D2129; + + } + .participant_name{ + font-weight: 400; + font-size: 12px; + line-height: 20px; + color: var(--gray-7); + overflow: hidden; + flex-shrink:1; + white-space: nowrap; + text-overflow: ellipsis; + -webkit-line-clamp: 1; + -webkit-box-orient: vertical; + } +} diff --git a/web_console_v2/client/src/views/Projects/ProjectList/CardView/ProjectCard/index.tsx b/web_console_v2/client/src/views/Projects/ProjectList/CardView/ProjectCard/index.tsx new file mode 100644 index 000000000..486661db2 --- /dev/null +++ b/web_console_v2/client/src/views/Projects/ProjectList/CardView/ProjectCard/index.tsx @@ -0,0 +1,98 @@ +import React, { ReactElement } from 'react'; +import { useGetCurrentPureDomainName } from 'hooks'; +import { Tooltip, Grid, Tag, Space, Divider } from '@arco-design/web-react'; +import ProjectMoreActions from '../../../ProjectMoreActions'; +import CreateTime from '../../../CreateTime'; +import ProjectName from '../../../ProjectName'; +import { + Project, + ProjectListType, + ProjectStateType, + ProjectTicketStatus, + RoleType, +} from 'typings/project'; +import { ParticipantType } from 'typings/participant'; +import { getCoordinateName, PARTICIPANT_TYPE_TAG_MAPPER } from 'views/Projects/shard'; +import { ProjectProgress } from 'views/Projects/shard'; + +import styles from './index.module.less'; + +const { Row, Col } = Grid; + +interface CardProps { + item: Project; + projectListType: ProjectListType; + onViewDetail: (project: Project) => void; + onDeleteProject: (projectId: ID, projectListType: ProjectListType) => void; +} + +function Card({ + item: project, + onViewDetail, + projectListType, + onDeleteProject, +}: CardProps): ReactElement { + const myPureDomainName = useGetCurrentPureDomainName(); + const tagConfig = + PARTICIPANT_TYPE_TAG_MAPPER?.[project?.participant_type || ParticipantType.PLATFORM]; + + return ( + <div className={styles.card_container}> + <div className={styles.card_header} onClick={viewDetail}> + <div className={styles.card_header_left}> + <ProjectName text={project.name} /> + </div> + <ProjectMoreActions + project={project} + projectListType={projectListType} + role={ + project?.participants_info?.participants_map?.[myPureDomainName]?.role ?? + RoleType.COORDINATOR + } + onDeleteProject={onDeleteProject} + /> + </div> + + <div className={styles.card_main} onClick={viewDetail}> + {`${String(project.num_workflow || 0)}个任务`} + </div> + <div> + <Space split={<Divider type="vertical" />}> + <Tag color={tagConfig.color}>{tagConfig.label}</Tag> + <Tag color="gray">{`${ + project?.participants_info?.participants_map?.[myPureDomainName]?.role === + RoleType.COORDINATOR + ? '我方' + : getCoordinateName(project?.participants_info?.participants_map) ?? '我方' + }创建`}</Tag> + </Space> + </div> + <Row gutter={24} className={styles.card_footer} align="center"> + <Col span={6} className={styles.card_footer_left}> + <ProjectProgress + ticketStatus={ + project.state === ProjectStateType.FAILED + ? ProjectTicketStatus.FAILED + : project.ticket_status + } + /> + </Col> + <Col span={18} className={styles.card_footer_right}> + <Tooltip content={project.creator ?? project.creator_username}> + <div className={styles.participant_name}> + {project.creator ?? project.creator_username ?? '-'} + </div> + </Tooltip> + <Divider type="vertical" /> + <CreateTime className={styles.create_time} time={project.created_at} /> + </Col> + </Row> + </div> + ); + + function viewDetail() { + onViewDetail(project); + } +} + +export default Card; diff --git a/web_console_v2/client/src/views/Projects/ProjectList/CardView/ProjectCardProp.module.less b/web_console_v2/client/src/views/Projects/ProjectList/CardView/ProjectCardProp.module.less new file mode 100644 index 000000000..200ff959b --- /dev/null +++ b/web_console_v2/client/src/views/Projects/ProjectList/CardView/ProjectCardProp.module.less @@ -0,0 +1,21 @@ +.div_container{ + display: flex; + flex-direction: column; + width: 50%; + height: 95px; + padding: 10px 20px; + color: rgb(var(--gray-7)); +} + +.label_container{ + margin-bottom: 15px; + font-size: 13px; + line-height: 22px; +} + +.value_container{ + flex: 1; + display: flex; + align-items: center; + color: var(--textColor); +} diff --git a/web_console_v2/client/src/views/Projects/ProjectList/CardView/ProjectCardProp.tsx b/web_console_v2/client/src/views/Projects/ProjectList/CardView/ProjectCardProp.tsx index ef1babfe2..77db0e891 100644 --- a/web_console_v2/client/src/views/Projects/ProjectList/CardView/ProjectCardProp.tsx +++ b/web_console_v2/client/src/views/Projects/ProjectList/CardView/ProjectCardProp.tsx @@ -1,32 +1,13 @@ import React, { FC } from 'react'; -import styled from 'styled-components'; -const Container = styled.div` - display: flex; - flex-direction: column; - width: 50%; - height: 95px; - padding: 10px 16px; - color: var(--gray7); -`; -const Label = styled.label` - margin-bottom: 15px; - font-size: 13px; - line-height: 22px; -`; -const Value = styled.div` - flex: 1; - display: flex; - align-items: center; - color: var(--textColor); -`; +import styles from './ProjectCardProp.module.less'; const ProjectCardProp: FC<{ label: string }> = ({ label, children }) => { return ( - <Container> - <Label>{label}</Label> - <Value>{children}</Value> - </Container> + <div className={styles.div_container}> + <label className={styles.label_container}>{label}</label> + <div className={styles.value_container}>{children}</div> + </div> ); }; diff --git a/web_console_v2/client/src/views/Projects/ProjectList/CardView/index.module.less b/web_console_v2/client/src/views/Projects/ProjectList/CardView/index.module.less new file mode 100644 index 000000000..35bce8f36 --- /dev/null +++ b/web_console_v2/client/src/views/Projects/ProjectList/CardView/index.module.less @@ -0,0 +1,26 @@ +.card_container{ + --cols: 4; + display: grid; + grid-template-columns: repeat(var(--cols), 1fr); + align-items: start; + justify-content: space-between; + grid-gap: 24px 20px; + width: 100%; + + + @media screen and (min-width: 1920px) and (max-width: 2560px) { + --cols: 5; + } + + @media screen and (max-width: 1440px) { + --cols: 3; + } + + @media screen and (max-width: 1200px) { + --cols: 2; + } + + @media screen and (max-width: 750px) { + --cols: 1; + } +} diff --git a/web_console_v2/client/src/views/Projects/ProjectList/CardView/index.tsx b/web_console_v2/client/src/views/Projects/ProjectList/CardView/index.tsx index 33231a7a3..abd676fa7 100644 --- a/web_console_v2/client/src/views/Projects/ProjectList/CardView/index.tsx +++ b/web_console_v2/client/src/views/Projects/ProjectList/CardView/index.tsx @@ -1,47 +1,34 @@ import React, { ReactElement } from 'react'; -import styled from 'styled-components'; -import { Project } from 'typings/project'; +import { Project, ProjectListType } from 'typings/project'; import ProjectCard from './ProjectCard'; -const Container = styled.div` - --cols: 4; - - display: grid; - grid-template-columns: repeat(var(--cols), 1fr); - align-items: start; - justify-content: space-between; - grid-gap: 24px 20px; - width: 100%; - - @media screen and (min-width: 1920px) and (max-width: 2560px) { - --cols: 5; - } - - @media screen and (max-width: 1440px) { - --cols: 3; - } - - @media screen and (max-width: 1200px) { - --cols: 2; - } - - @media screen and (max-width: 750px) { - --cols: 1; - } -`; +import styles from './index.module.less'; interface CardListProps { list: Project[]; onViewDetail: (project: Project) => void; + projectListType: ProjectListType; + onDeleteProject: (projectId: ID, projectListType: ProjectListType) => void; } -function CardList({ list, onViewDetail }: CardListProps): ReactElement { +function CardList({ + list, + onViewDetail, + projectListType, + onDeleteProject, +}: CardListProps): ReactElement { return ( - <Container> + <div className={styles.card_container}> {list.map((item, index) => ( - <ProjectCard item={item} key={'p-' + index} onViewDetail={onViewDetail} /> + <ProjectCard + item={item} + key={item.id} + onViewDetail={onViewDetail} + projectListType={projectListType} + onDeleteProject={onDeleteProject} + /> ))} - </Container> + </div> ); } diff --git a/web_console_v2/client/src/views/Projects/ProjectList/ProjectListFilters.tsx b/web_console_v2/client/src/views/Projects/ProjectList/ProjectListFilters.tsx index 1697cc1fb..82a1783b5 100644 --- a/web_console_v2/client/src/views/Projects/ProjectList/ProjectListFilters.tsx +++ b/web_console_v2/client/src/views/Projects/ProjectList/ProjectListFilters.tsx @@ -1,69 +1,87 @@ import React, { ReactElement } from 'react'; -import styled from 'styled-components'; -import { Button, Input, Radio } from 'antd'; -import { useTranslation } from 'react-i18next'; -import { useHistory } from 'react-router-dom'; +import { Input, Radio } from '@arco-design/web-react'; +import { IconApps, IconList } from '@arco-design/web-react/icon'; import store from 'store2'; import LOCAL_STORAGE_KEYS from 'shared/localStorageKeys'; import { DisplayType } from 'typings/component'; +import { ProjectListType } from 'typings/project'; + +import styles from './index.module.less'; -const Container = styled.div` - height: 32px; - display: flex; - justify-content: space-between; -`; -const SearchInput = styled(Input.Search)` - display: inline-block; - width: 227px; -`; -const DisplaySelector = styled(Radio.Group)` - display: inline-block; - margin-left: 15px; -`; const ProjectListDisplayOptions = [ { - labelKey: 'project.display_card', + label: <IconApps />, value: 1, }, { - labelKey: 'project.display_list', + label: <IconList />, value: 2, }, ]; +const ProjectListTypeOptions = [ + { + label: '可用工作区', + value: 'complete', + }, + { + label: '待授权工作区', + value: 'pending', + }, +]; + interface Props { onDisplayTypeChange: (type: number) => void; + onProjectListTypeChange?: (type: ProjectListType) => void; + onSearch: (value: string) => void; + onChange?: (value: string) => void; + defaultSearchText?: string; + searchText?: string; + projectListType: ProjectListType; } -function Action({ onDisplayTypeChange }: Props): ReactElement { - const { t } = useTranslation(); - const history = useHistory(); - +function Action({ + onDisplayTypeChange, + onProjectListTypeChange, + onSearch, + onChange, + defaultSearchText, + projectListType, +}: Props): ReactElement { return ( - <Container> + <div className={styles.list_filter_container}> <div> - <Button - type="primary" - size="large" - onClick={() => { - history.push('/projects/create'); + <Radio.Group + defaultValue={projectListType ?? ProjectListType.COMPLETE} + options={ProjectListTypeOptions} + type="button" + onChange={(value) => { + onProjectListTypeChange?.(value); }} - > - {t('project.create')} - </Button> + /> </div> <div> - <SearchInput placeholder={t('project.search_placeholder')} /> - <DisplaySelector + <Input.Search + className={`custom-input ${styles.filter_content_input}`} + placeholder={'输入工作区名称关键词搜索'} + onSearch={onSearch} + defaultValue={defaultSearchText || undefined} + onChange={(value) => { + onChange?.(value); + }} + allowClear + /> + <Radio.Group + className={`custom-radio ${styles.filter_content_radio}`} defaultValue={store.get(LOCAL_STORAGE_KEYS.projects_display) || DisplayType.Card} - options={ProjectListDisplayOptions.map((i) => ({ label: t(i.labelKey), value: i.value }))} - optionType="button" - onChange={(e) => { - onDisplayTypeChange(e.target.value); + options={ProjectListDisplayOptions} + type="button" + onChange={(value) => { + onDisplayTypeChange(value); }} /> </div> - </Container> + </div> ); } diff --git a/web_console_v2/client/src/views/Projects/ProjectList/TableView/index.module.less b/web_console_v2/client/src/views/Projects/ProjectList/TableView/index.module.less new file mode 100644 index 000000000..595b589bb --- /dev/null +++ b/web_console_v2/client/src/views/Projects/ProjectList/TableView/index.module.less @@ -0,0 +1,11 @@ +.table_list_container{ + width: 100%; +} +.project_name{ + color: var(--primaryColor); + cursor: pointer; + font-size: var(--textFontSizePrimary); + &:hover { + color: var(--newPrimaryHover); + } +} diff --git a/web_console_v2/client/src/views/Projects/ProjectList/TableView/index.tsx b/web_console_v2/client/src/views/Projects/ProjectList/TableView/index.tsx index e25098db2..ae6886e3a 100644 --- a/web_console_v2/client/src/views/Projects/ProjectList/TableView/index.tsx +++ b/web_console_v2/client/src/views/Projects/ProjectList/TableView/index.tsx @@ -1,140 +1,222 @@ -import React, { ReactElement, useState } from 'react'; -import styled from 'styled-components'; -import { useTranslation } from 'react-i18next'; -import { Button, Table } from 'antd'; -import { ConnectionStatus, Project } from 'typings/project'; -import { useHistory } from 'react-router-dom'; -import ProjectConnectionStatus from 'views/Projects/ConnectionStatus'; -import Username from 'components/Username'; -import { formatTimestamp } from 'shared/date'; -import { checkConnection } from 'services/project'; +import React, { ReactElement, useRef } from 'react'; +import { useGetCurrentPureDomainName } from 'hooks'; +import { Button, Table, Tag } from '@arco-design/web-react'; +import ProjectConnectionStatus, { ExposedRef } from 'views/Projects/ConnectionStatus'; import ProjectMoreActions from 'views/Projects/ProjectMoreActions'; import GridRow from 'components/_base/GridRow'; - -const Container = styled.div` - width: 100%; -`; -const Name = styled.strong` - color: var(--primaryColor); - cursor: pointer; - font-weight: 500; - font-size: 13px; -`; +import { ParticipantType } from 'typings/participant'; +import { + Project, + ProjectBlockChainType, + ProjectListType, + ProjectStateType, + ProjectTicketStatus, + RoleType, +} from 'typings/project'; +import { formatTimestamp } from 'shared/date'; +import { ProjectProgress } from '../../shard'; +import { getCoordinateName, PARTICIPANT_TYPE_TAG_MAPPER } from '../../shard'; +import { CONSTANTS } from 'shared/constants'; +import styles from './index.module.less'; interface TableListProps { list: Project[]; onViewDetail: (project: Project) => void; + onParticipantTypeChange: (value: string[]) => void; + participantType?: string[]; + projectLisType: ProjectListType; + onDeleteProject: (projectId: ID, projectListType: ProjectListType) => void; } -function TableList({ list, onViewDetail }: TableListProps): ReactElement { - const { t } = useTranslation(); - const history = useHistory(); +function TableList({ + list, + onViewDetail, + onParticipantTypeChange, + participantType, + projectLisType, + onDeleteProject, +}: TableListProps): ReactElement { + const myDomainName = useGetCurrentPureDomainName(); - const [statuses, setStatuses] = useState( + const projectConnectionStatusListRef = useRef<ExposedRef[]>( list.map((_) => { - return ConnectionStatus.Waiting; + return { + checkConnection: () => {}, + }; }), ); const statefulList = list.map((item, index) => { - const onCheckConnectionClick = async () => { - try { - setProjectStatus(index, ConnectionStatus.Checking); - - const res = await checkConnection(item.id); - if (res.data.success) { - setProjectStatus(index, ConnectionStatus.Success); - } else { - setProjectStatus(index, ConnectionStatus.Failed); - } - } catch (error) { - setProjectStatus(index, ConnectionStatus.CheckFailed); - } - }; return { ...item, - onCheckConnectionClick, + onCheckConnectionClick: () => { + if ( + projectConnectionStatusListRef.current && + projectConnectionStatusListRef.current[index] && + projectConnectionStatusListRef.current[index].checkConnection + ) { + projectConnectionStatusListRef.current[index].checkConnection(); + } + }, }; }); const columns = [ { - title: t('project.name'), + title: '工作区名称', dataIndex: 'name', key: 'name', ellipsis: true, render: (name: string, record: Project) => { - return <Name onClick={() => onViewDetail(record)}>{name}</Name>; + return ( + <span className={styles.project_name} onClick={() => onViewDetail(record)}> + {name} + </span> + ); }, }, { - title: t('project.connection_status'), + title: '任务数', + dataIndex: 'num_workflow', + name: 'num_workflow', + width: 80, + render: (value: any) => value ?? 0, + }, + { + title: '区块链存证', + dataIndex: 'config.support_blockchain', + name: 'config.support_blockchain', + width: 100, + render: (value: boolean) => + value ? ProjectBlockChainType.OPEN : ProjectBlockChainType.CLOSED, + }, + { + title: '审批状态', + dataIndex: 'ticket_status', + name: 'ticket_status', + width: 100, + render: (value: any, record: any) => ( + <ProjectProgress + ticketStatus={ + record.state === ProjectStateType.FAILED ? ProjectTicketStatus.FAILED : value + } + /> + ), + }, + { + title: '连接状态', dataIndex: 'status', name: 'status', + width: 100, + render: (_: any, record: Project, index: number) => { + const isLightClient = record.participant_type === ParticipantType.LIGHT_CLIENT; + if (isLightClient || projectLisType === ProjectListType.PENDING) { + return CONSTANTS.EMPTY_PLACEHOLDER; + } + + return ( + <ProjectConnectionStatus + project={record} + ref={(ref) => (projectConnectionStatusListRef.current[index] = ref!)} + /> + ); + }, + }, + { + title: '合作伙伴类型', + dataIndex: 'participant_type', width: 120, - render: (_: any, record: Project, index: number) => ( - <ProjectConnectionStatus status={statuses[index]} /> + filters: [ + { + text: '轻量级', + value: ParticipantType.LIGHT_CLIENT, + }, + { + text: '标准', + value: ParticipantType.PLATFORM, + }, + ], + defaultFilters: participantType ?? [], + render: (value: ParticipantType) => ( + <Tag color={PARTICIPANT_TYPE_TAG_MAPPER?.[value || ParticipantType.PLATFORM].color}> + {PARTICIPANT_TYPE_TAG_MAPPER?.[value || ParticipantType.PLATFORM].label} + </Tag> ), }, { - title: t('project.workflow_number'), - dataIndex: 'num_workflow', - name: 'num_workflow', + title: '创建方', + dataIndex: 'participants_info', + name: 'participants_info', + width: 100, + render: (value: any) => ( + <Tag> + {value?.participants_map?.[myDomainName]?.role === RoleType.COORDINATOR + ? '我方' + : getCoordinateName(value?.participants_map) ?? '我方'} + </Tag> + ), }, { - title: t('project.creator'), + title: '创建人', dataIndex: 'creator', name: 'creator', - render: (_: string) => <Username />, + render: (creator: string, record: Project) => + creator || record.creator_username || CONSTANTS.EMPTY_PLACEHOLDER, }, { - title: t('project.creat_time'), + title: '创建时间', dataIndex: 'created_at', name: 'created_at', render: (date: number) => <div>{formatTimestamp(date)}</div>, }, { - title: t('operation'), - dataIndex: 'created_at', - name: 'created_at', + title: '操作', + dataIndex: 'operation', + name: 'operation', fixed: 'right' as any, - width: 240, + width: 140, render: (_: any, record: Project & { onCheckConnectionClick: any }) => ( <GridRow left={-12}> - <Button size="small" type="link" onClick={record.onCheckConnectionClick}> - {t('project.check_connection')} - </Button> <Button - size="small" - type="link" - onClick={() => history.push(`/workflows/initiate/basic?project=${record.id}`)} + size="mini" + type="text" + onClick={record.onCheckConnectionClick} + disabled={ + record.participant_type === ParticipantType.LIGHT_CLIENT || + projectLisType === ProjectListType.PENDING + } > - {t('project.create_work_flow')} + 检查连接 </Button> - <ProjectMoreActions - style={{ marginTop: '13px' }} - onEdit={() => { - history.push(`/projects/edit/${record.id}`); - }} - onViewDetail={() => onViewDetail(record)} + project={record} + projectListType={projectLisType} + role={ + record?.participants_info?.participants_map?.[myDomainName]?.role ?? + RoleType.COORDINATOR + } + onDeleteProject={onDeleteProject} /> </GridRow> ), }, ]; return ( - <Container> - <Table dataSource={statefulList} columns={columns} rowKey="name" scroll={{ x: '100%' }} /> - </Container> + <div className={styles.table_list_container}> + <Table + className="custom-table" + data={statefulList} + columns={columns} + rowKey="id" + scroll={{ x: '100%' }} + pagination={false} + onChange={(_paginationProps, _sorterResult, filters) => { + onParticipantTypeChange(filters.participant_type!); + }} + /> + </div> ); - - function setProjectStatus(index: number, newStatus: ConnectionStatus) { - let newStatuses = [...statuses]; - newStatuses[index] = newStatus; - setStatuses(newStatuses); - } } export default TableList; diff --git a/web_console_v2/client/src/views/Projects/ProjectList/index.module.less b/web_console_v2/client/src/views/Projects/ProjectList/index.module.less new file mode 100644 index 000000000..2f0a90c5b --- /dev/null +++ b/web_console_v2/client/src/views/Projects/ProjectList/index.module.less @@ -0,0 +1,35 @@ +.pagination_container{ + padding: 20px; + background-image: url('../../../assets/images/project-list-bg.png') ; + background-repeat: no-repeat; + background-size: contain; +} +.spin_container{ + display: block; + + } + +.list_container{ + display: flex; + flex: 1; + align-items: flex-start; +} + +.list_filter_container{ + height: 32px; + margin-top: 40px; ; + margin-bottom: 20px; + display: flex; + justify-content: space-between; + .filter_content_input{ + display: inline-block; + width: 250px; + > span >span{ + background-color: #FFFFFF !important; + } + } + .filter_content_radio{ + display: inline-block; + margin-left: 12px; + } +} diff --git a/web_console_v2/client/src/views/Projects/ProjectList/index.tsx b/web_console_v2/client/src/views/Projects/ProjectList/index.tsx index 74a2166c3..c1aec5989 100644 --- a/web_console_v2/client/src/views/Projects/ProjectList/index.tsx +++ b/web_console_v2/client/src/views/Projects/ProjectList/index.tsx @@ -1,68 +1,104 @@ -import React, { ReactElement, useState, useEffect } from 'react'; +import React, { ReactElement, useState, useEffect, useMemo } from 'react'; +import { useQuery } from 'react-query'; +import { useHistory } from 'react-router'; +import { useUrlState } from 'hooks'; +import { useRecoilQuery } from 'hooks/recoil'; +import { useReloadProjectList } from 'hooks/project'; +import { useMount } from 'react-use'; +import { useRecoilState } from 'recoil'; +import { appPreference } from 'stores/app'; +import { projectListQuery } from 'stores/project'; +import { Pagination, Spin, Grid, Button, Space, Message } from '@arco-design/web-react'; +import { IconPlus } from '@arco-design/web-react/icon'; import ProjectListFilters from './ProjectListFilters'; -import { useTranslation } from 'react-i18next'; import CardView from './CardView'; import TableView from './TableView'; -import { Pagination, Spin, Row } from 'antd'; -import styled, { createGlobalStyle } from 'styled-components'; -import { projectListQuery } from 'stores/project'; -import { useRecoilQuery } from 'hooks/recoil'; -import { DisplayType } from 'typings/component'; -import { Project } from 'typings/project'; -import SharedPageLayout from 'components/SharedPageLayout'; +import GridRow from 'components/_base/GridRow'; import NoResult from 'components/NoResult'; -import ProjectDetailDrawer from '../ProjectDetailDrawer'; -import store from 'store2'; -import LOCAL_STORAGE_KEYS from 'shared/localStorageKeys'; -import { useMount } from 'react-use'; -import { useReloadProjectList } from 'hooks/project'; +import TodoPopover from 'components/TodoPopover'; +import { deletePendingProject, deleteProject, fetchPendingProjectList } from 'services/project'; +import { DisplayType } from 'typings/component'; +import { Project, ProjectListType, ProjectStateType } from 'typings/project'; +import { ParticipantType } from 'typings/participant'; +import { transformRegexSpecChar } from 'shared/helpers'; +import { filterExpressionGenerator } from 'views/Datasets/shared'; +import { PENDING_PROJECT_FILTER_MAPPER } from '../shard'; -const GlobalStyle = createGlobalStyle` -.project-actions { - width: 72px; - border: 1px solid #e5e6e8; - box-shadow: 0px 4px 10px rgba(0, 0, 0, 0.1); - border-radius: 4px; - overflow: hidden; - padding: 0; - - .ant-popover-content { - .ant-popover-arrow { - display: none !important; - } - .ant-popover-inner { - border-radius: 0; - .ant-popover-inner-content { - padding: 0; - } - } - } -} -`; -const StyledPagination = styled(Pagination)` - margin-top: 20px; -`; -const ListContainer = styled.section` - display: flex; - flex: 1; - align-items: flex-start; -`; +import styles from './index.module.less'; +import { fetchWorkflowList } from 'services/workflow'; +import Modal from 'components/Modal'; + +const { Row } = Grid; function ProjectList(): ReactElement { - const { t } = useTranslation(); + const history = useHistory(); const [projectListShow, setProjectListShow] = useState([] as Project[]); - const [pageSize, setPageSize] = useState(12); - const [currentPage, setCurrentPage] = useState(1); + + const [urlState, setUrlState] = useUrlState({ + pageSize: 12, + page: 1, + keyword: '', + participant_type: [], + project_list_type: ProjectListType.COMPLETE, + }); + const [total, setTotal] = useState(0); - const [drawerVisible, setDrawerVisible] = useState(false); - const [project, setCurrentProject] = useState<Project>(); - const [displayType, setDisplayType] = useState( - store.get(LOCAL_STORAGE_KEYS.projects_display) || DisplayType.Card, - ); + + const [preference, setPreference] = useRecoilState(appPreference); const reloadList = useReloadProjectList(); const { isLoading, data: projectList } = useRecoilQuery(projectListQuery); + const pendingProjectListQuery = useQuery( + ['fetchPendingProjectList'], + () => + fetchPendingProjectList({ + filter: filterExpressionGenerator( + { + state: [ProjectStateType.ACCEPTED, ProjectStateType.FAILED], + }, + PENDING_PROJECT_FILTER_MAPPER, + ), + page: 1, + page_size: 0, + }), + { + retry: 2, + }, + ); + const { pendingProjectListShow, pendingProjectListTotal } = useMemo(() => { + const pendingProjectList = pendingProjectListQuery?.data?.data; + const regx = new RegExp(`^.*${transformRegexSpecChar(urlState.keyword)}.*$`); + const filedPendingProjectList = pendingProjectList?.filter((item) => { + return ( + (regx.test(item.name) && + (!urlState.participant_type.length || + (!item.participant_type && + urlState.participant_type.includes(ParticipantType.PLATFORM)) || + urlState.participant_type.includes(item.participant_type))) || + item.state !== ProjectStateType.PENDING + ); + }); + return { + pendingProjectListShow: filedPendingProjectList?.slice( + (urlState.page - 1) * urlState.pageSize, + urlState.page * urlState.pageSize, + ), + pendingProjectListTotal: filedPendingProjectList?.length ?? 0, + }; + }, [pendingProjectListQuery, urlState]); + + const { nowProjectListShow, listTotal } = useMemo(() => { + if (urlState.project_list_type === ProjectListType.PENDING) { + return { nowProjectListShow: pendingProjectListShow, listTotal: pendingProjectListTotal }; + } + return { + nowProjectListShow: projectListShow, + listTotal: total, + }; + }, [pendingProjectListShow, projectListShow, urlState, total, pendingProjectListTotal]); + + const isEmpty = listTotal === 0; useMount(() => { if (!isLoading || projectList) { @@ -75,63 +111,174 @@ function ProjectList(): ReactElement { useEffect(() => { if (projectList) { - setProjectListShow(projectList.slice((currentPage - 1) * pageSize, currentPage * pageSize)); - setTotal(projectList.length); + const regx = new RegExp(`^.*${transformRegexSpecChar(urlState.keyword)}.*$`); + const filtedProjectList = projectList.filter((item) => { + return ( + regx.test(item.name) && + (!urlState.participant_type.length || + (!item.participant_type && + urlState.participant_type.includes(ParticipantType.PLATFORM)) || + urlState.participant_type.includes(item.participant_type)) + ); + }); + setProjectListShow( + filtedProjectList.slice( + (urlState.page - 1) * urlState.pageSize, + urlState.page * urlState.pageSize, + ), + ); + setTotal(filtedProjectList.length); } - }, [pageSize, currentPage, projectList]); - - const isEmpty = projectListShow.length === 0; + }, [projectList, urlState]); return ( - <Spin spinning={isLoading}> - <GlobalStyle /> - - <SharedPageLayout title={t('menu.label_project')} tip={t('project.describe')}> + <div className={styles.pagination_container}> + <GridRow justify="space-between"> + <div> + <h2>Hi,欢迎来到隐私计算平台</h2> + <p>打破数据孤岛,实现数据跨域共享开放,安全激活数据使用价值</p> + </div> + <div> + <Space> + <Button + className={'custom-operation-button'} + type="primary" + onClick={() => { + history.push('/projects/create'); + }} + icon={<IconPlus />} + > + 创建工作区 + </Button> + <TodoPopover.ProjectNotice /> + </Space> + </div> + </GridRow> + <Spin loading={isLoading} className={styles.spin_container}> <ProjectListFilters - onDisplayTypeChange={(type: number) => { - store.set(LOCAL_STORAGE_KEYS.projects_display, type); - setDisplayType(type); + onDisplayTypeChange={(type: DisplayType) => { + setPreference({ + ...preference, + projectsDisplay: type, + }); + }} + onProjectListTypeChange={(type: ProjectListType) => { + setUrlState((prevState) => ({ + ...prevState, + project_list_type: type, + page: 1, + })); }} + onSearch={onSearch} + defaultSearchText={urlState.keyword} + projectListType={urlState.project_list_type} /> - <ListContainer> - {isEmpty ? ( - <NoResult text={t('project.no_result')} to="/projects/create" /> - ) : displayType === DisplayType.Card ? ( - <CardView list={projectListShow} onViewDetail={viewDetail} /> + <section className={styles.list_container}> + {isEmpty && preference.projectsDisplay === DisplayType.Card ? ( + <NoResult text={'暂无工作区'} to="/projects/create" /> + ) : preference.projectsDisplay === DisplayType.Card ? ( + <CardView + list={nowProjectListShow ?? []} + onViewDetail={viewDetail} + projectListType={urlState.project_list_type} + onDeleteProject={handleDelete} + /> ) : ( - <TableView list={projectListShow} onViewDetail={viewDetail} /> + <TableView + list={nowProjectListShow ?? []} + onViewDetail={viewDetail} + onParticipantTypeChange={onParticipantTypeChange} + participantType={urlState.participant_type} + projectLisType={urlState.project_list_type} + onDeleteProject={handleDelete} + /> )} - </ListContainer> - - <ProjectDetailDrawer - project={project} - onClose={() => setDrawerVisible(false)} - visible={drawerVisible} - /> + </section> <Row justify="end"> {!isEmpty && ( - <StyledPagination - pageSizeOptions={['12', '24']} - pageSize={pageSize} - total={total} - current={currentPage} - showSizeChanger + <Pagination + className={styles.pagination_container} + pageSize={Number(urlState.pageSize)} + sizeOptions={[12, 24]} + total={listTotal} + showTotal={true} + current={Number(urlState.page)} + sizeCanChange onChange={onPageChange} /> )} </Row> - </SharedPageLayout> - </Spin> + </Spin> + </div> ); - function onPageChange(currentPage: number, page_size: number | undefined) { - setCurrentPage(currentPage); - setPageSize(Number(page_size)); + function onPageChange(page: number, pageSize: number | undefined) { + setUrlState((prevState) => ({ + ...prevState, + page, + pageSize, + })); } function viewDetail(project: Project) { - setCurrentProject(project); - setDrawerVisible(true); + history.push(`/projects/${urlState.project_list_type}/detail/${project.id}`); + } + function onSearch(value: string) { + setUrlState((prevState) => ({ + ...prevState, + page: 1, + keyword: value, + })); + } + function onParticipantTypeChange(value: string[]) { + setUrlState((prevState) => ({ + ...prevState, + page: 1, + participant_type: value, + })); + } + + async function handleDelete(projectId: ID, projectListType: ProjectListType) { + if (!projectId) { + return; + } + try { + const { data: workflowList } = await fetchWorkflowList({ + project: projectId, + states: ['running'], + page: 1, + pageSize: 1, + }); + if (Boolean(workflowList.length)) { + Message.info('有正在运行的任务,请终止任务后再删除'); + return; + } + Modal.delete({ + title: '确认删除工作区?', + content: '删除工作区将清空我方全部资源,请谨慎操作', + async onOk() { + if (projectListType === ProjectListType.PENDING) { + try { + await deletePendingProject(projectId); + Message.success('删除工作区成功'); + pendingProjectListQuery.refetch(); + } catch (error: any) { + Message.error(error.message); + } + } else { + try { + await deleteProject(projectId); + Message.success('删除工作区成功'); + reloadList(); + } catch (error: any) { + Message.error(error.message); + } + } + }, + }); + } catch (error: any) { + return error.message; + } } } diff --git a/web_console_v2/client/src/views/Projects/ProjectMoreActions.tsx b/web_console_v2/client/src/views/Projects/ProjectMoreActions.tsx index 625b27ca0..f8ce6b660 100644 --- a/web_console_v2/client/src/views/Projects/ProjectMoreActions.tsx +++ b/web_console_v2/client/src/views/Projects/ProjectMoreActions.tsx @@ -1,62 +1,53 @@ import React, { ReactElement } from 'react'; -import styled, { CSSProperties } from 'styled-components'; -import { useTranslation } from 'react-i18next'; -import { Popover } from 'antd'; -import IconButton from 'components/IconButton'; -import { More } from 'components/IconPark'; - -const ActionListContainer = styled.div` - display: flex; - flex-direction: column; - height: 74px; - padding: 6px 0; - border-radius: 4px; - - .actionItem { - flex: 1; - width: 100%; - background-color: transparent; - line-height: 30px; - padding-left: 12px; - cursor: pointer; - &:hover { - background-color: var(--gray1); - } - } -`; +import { useHistory } from 'react-router'; +import MoreActions from 'components/MoreActions'; +import { Project, ProjectListType, RoleType } from 'typings/project'; interface ProjectMoreActionsProps { - suffix?: React.ReactNode; - actionList?: React.ReactNode; - onEdit?: () => void; - onViewDetail?: () => void; - style?: CSSProperties; -} - -function ActionList({ onEdit, onViewDetail }: ProjectMoreActionsProps): ReactElement { - const { t } = useTranslation(); - return ( - <ActionListContainer> - <div className="actionItem" onClick={onEdit}> - {t('project.action_edit')} - </div> - <div className="actionItem" onClick={onViewDetail}> - {t('project.action_detail')} - </div> - </ActionListContainer> - ); + project: Project; + projectListType: ProjectListType; + role: RoleType; + onDeleteProject: (projectId: ID, projectListType: ProjectListType) => void; } -function ProjectMoreActions(props: ProjectMoreActionsProps): ReactElement { +function ProjectMoreActions({ + project, + role, + projectListType, + onDeleteProject, +}: ProjectMoreActionsProps): ReactElement { + const history = useHistory(); + //todo: Uncomment when editing pendingProjects is supported + // const editDisable = + // project.ticket_status === ProjectTicketStatus.PENDING || + // (role === RoleType.PARTICIPANT && project.state !== ProjectStateType.ACCEPTED); + const editDisable = projectListType === ProjectListType.PENDING; + //todo: Uncomment when deleting project is supported + // const deleteDisable = role === RoleType.PARTICIPANT; + const deleteDisable = + projectListType === ProjectListType.COMPLETE || role === RoleType.PARTICIPANT; return ( - <Popover - content={props.actionList ?? <ActionList {...props} />} - placement="bottomLeft" - overlayClassName="project-actions" - > - <IconButton type="text" icon={<More />} circle /> - </Popover> + <MoreActions + actionList={[ + { + label: '编辑', + onClick: handleEdit, + disabled: editDisable, + }, + { + label: '删除', + onClick: () => { + onDeleteProject(project.id, projectListType); + }, + disabled: deleteDisable, + danger: true, + }, + ]} + /> ); + function handleEdit() { + project && history.push(`/projects/edit/${project.id}`); + } } export default ProjectMoreActions; diff --git a/web_console_v2/client/src/views/Projects/ProjectName.tsx b/web_console_v2/client/src/views/Projects/ProjectName.tsx index b7e7a9684..ab4701e74 100644 --- a/web_console_v2/client/src/views/Projects/ProjectName.tsx +++ b/web_console_v2/client/src/views/Projects/ProjectName.tsx @@ -1,18 +1,8 @@ -import React, { ReactElement, useRef, useState } from 'react'; -import styled, { CSSProperties } from 'styled-components'; -import { Tooltip } from 'antd'; -import { MixinEllipsis } from 'styles/mixins'; +import React, { ReactElement, useRef, useState, CSSProperties } from 'react'; import { useMount } from 'react-use'; +import { Tooltip } from '@arco-design/web-react'; -const Container = styled.div` - ${MixinEllipsis()} - - color: var(--gray10); - font-weight: 500; - font-size: 15px; - line-height: 40px; - margin-left: 16px; -`; +import styles from './index.module.less'; interface CreateTimeProps { text: string; @@ -21,7 +11,7 @@ interface CreateTimeProps { function ProjectName({ text, style }: CreateTimeProps): ReactElement { const eleRef = useRef<HTMLDivElement>(); - const [trigger, setTrigger] = useState('click'); + const [toolTipContent, setToolTipContent] = useState<string | undefined>(); useMount(() => { // Check element overflow at next-tick @@ -29,16 +19,16 @@ function ProjectName({ text, style }: CreateTimeProps): ReactElement { const { current } = eleRef; if (current) { if (current.scrollWidth > current.offsetWidth) { - setTrigger('hover'); + setToolTipContent(text); } } }); }); return ( - <Tooltip title={text} trigger={trigger}> - <Container ref={eleRef as any} style={style}> + <Tooltip content={toolTipContent}> + <div className={styles.project_name_container} ref={eleRef as any} style={style}> {text} - </Container> + </div> </Tooltip> ); } diff --git a/web_console_v2/client/src/views/Projects/ReceiverProject/index.module.less b/web_console_v2/client/src/views/Projects/ReceiverProject/index.module.less new file mode 100644 index 000000000..78495cc11 --- /dev/null +++ b/web_console_v2/client/src/views/Projects/ReceiverProject/index.module.less @@ -0,0 +1,46 @@ +.card_container{ + width:440px; + border: 1px solid rgb(var(--gray-3)); + border-radius: 8px; + margin: auto; + padding: 0px 16px; + .card_header{ + width: 54px; + height: 54px; + margin: 60px auto; + margin-bottom: 50px; + background-image: url('../../../assets/icons/atom-icon-algorithm-management.svg'); + } + .card_content_title{ + text-align: center; + > :first-child{ + margin-right: 0px; + } + } + .card_content_comment{ + width: 60%; + margin: 0px auto; + } + .card_content_participant{ + width: 60%; + margin: 10px auto; + > :first-child{ + margin-bottom: 10px; + } + } + .card_footer{ + width: 60%; + margin: 0px auto; + margin-top: 40px; + margin-bottom: 60px; + .btn_container{ + width: 104px; + } + } +} +.result_container{ + margin: auto; +} +.btn_content{ + width: 124px; +} diff --git a/web_console_v2/client/src/views/Projects/ReceiverProject/index.tsx b/web_console_v2/client/src/views/Projects/ReceiverProject/index.tsx new file mode 100644 index 000000000..a47bdc083 --- /dev/null +++ b/web_console_v2/client/src/views/Projects/ReceiverProject/index.tsx @@ -0,0 +1,188 @@ +import React, { ReactElement, useMemo, useState } from 'react'; +import { useInterval } from 'react-use'; +import { useHistory, useParams } from 'react-router-dom'; +import { + Message as message, + Spin, + Typography, + Button, + Tag, + Space, + Result, + Message, +} from '@arco-design/web-react'; + +import { ResultProps } from '@arco-design/web-react/es/Result'; +import { authorizePendingProject, fetchPendingProjectList } from 'services/project'; +import { ProjectStateType } from 'typings/project'; + +import { useQuery } from 'react-query'; +import SharedPageLayout from 'components/SharedPageLayout'; +import BackButton from 'components/BackButton'; +import ProjectName from '../ProjectName'; + +import { getCoordinateName, getParticipantsName } from '../shard'; + +import styles from './index.module.less'; +import Modal from 'components/Modal'; + +function ResultPage({ status, title }: ResultProps): ReactElement { + const history = useHistory(); + const [redirectCountdown, setRedirectCountdown] = useState<number>(5); + + useInterval(() => { + if (redirectCountdown === 0) { + history.push('/projects'); + return; + } + setRedirectCountdown(redirectCountdown - 1); + }, 1000); + + return ( + <div className={styles.result_container}> + <Result + status={status} + title={title} + subTitle={`${redirectCountdown}s钟后自动回到首页`} + extra={[ + <Button + className={styles.btn_content} + key="back" + type="primary" + onClick={() => { + history.push('/projects?project_list_type=pending'); + }} + > + 回到首页 + </Button>, + ]} + /> + </div> + ); +} + +function ReceiverProject(): ReactElement { + const history = useHistory(); + const { id } = useParams<{ id: string }>(); + const [pageShow, setPageShow] = useState({ + mainPage: true, + okPage: false, + rejectPage: false, + }); + const [btnLoading, setBtnLoading] = useState({ + reject: false, + ok: false, + }); + const pendingProjectListQuery = useQuery(['fetchPendingProjectList'], () => + fetchPendingProjectList(), + ); + const pendingProjectDetail = useMemo(() => { + return pendingProjectListQuery.data?.data.find((item) => item.id.toString() === id); + }, [pendingProjectListQuery, id]); + + return ( + <div className={styles.container}> + <Spin className={styles.spin_container} loading={pendingProjectListQuery.isLoading}> + <SharedPageLayout + title={<BackButton onClick={() => history.goBack()}>工作区管理</BackButton>} + centerTitle="工作区邀请" + > + {pageShow.mainPage && ( + <div className={styles.card_container}> + <div className={styles.card_header} /> + <div className={styles.card_content_title}> + <ProjectName text={pendingProjectDetail?.name ?? ''} /> + <Typography.Text + className={styles.card_content_comment} + type="secondary" + ellipsis={{ + rows: 2, + showTooltip: true, + }} + > + {pendingProjectDetail?.comment || '-'} + </Typography.Text> + </div> + <div className={styles.card_content_participant}> + <Space> + <Tag color="arcoblue">创建方</Tag> + + <Tag> + {getCoordinateName(pendingProjectDetail?.participants_info.participants_map)} + </Tag> + </Space> + <div> + <Space align="start"> + <Tag color="arcoblue">参与方</Tag> + <Space wrap> + {getParticipantsName( + pendingProjectDetail?.participants_info?.participants_map, + ).map((item) => ( + <Tag key={item}>{item}</Tag> + ))} + </Space> + </Space> + </div> + </div> + <div className={styles.card_footer}> + <Space size={'medium'}> + <Button + loading={btnLoading.reject} + className={styles.btn_container} + onClick={onReject} + > + 拒绝 + </Button> + <Button + className={styles.btn_container} + loading={btnLoading.ok} + type="primary" + onClick={onOK} + > + 通过 + </Button> + </Space> + </div> + </div> + )} + {pageShow.okPage && <ResultPage status="success" title="已通过邀请" />} + {pageShow.rejectPage && <ResultPage status="error" title="已拒绝邀请" />} + </SharedPageLayout> + </Spin> + </div> + ); + async function onOK() { + if (!pendingProjectDetail?.id) { + return Message.error('找不到该工作区'); + } + try { + await authorizePendingProject(pendingProjectDetail?.id, { state: ProjectStateType.ACCEPTED }); + setBtnLoading({ ok: true, reject: false }); + setPageShow({ mainPage: false, okPage: true, rejectPage: false }); + } catch (error: any) { + message.error(error.message); + } + } + async function onReject() { + if (!pendingProjectDetail?.id) { + return Message.error('找不到该工作区'); + } + Modal.reject({ + title: '拒绝申请?', + content: '拒绝后无法撤销此操作。', + async onOk() { + try { + await authorizePendingProject(pendingProjectDetail?.id!, { + state: ProjectStateType.CLOSED, + }); + setBtnLoading({ ok: false, reject: true }); + setPageShow({ mainPage: false, okPage: false, rejectPage: true }); + } catch (error: any) { + message.error(error.message); + } + }, + }); + } +} + +export default ReceiverProject; diff --git a/web_console_v2/client/src/views/Projects/index.module.less b/web_console_v2/client/src/views/Projects/index.module.less new file mode 100644 index 000000000..d423dc594 --- /dev/null +++ b/web_console_v2/client/src/views/Projects/index.module.less @@ -0,0 +1,33 @@ + +@import '~styles/mixins.less'; +.create_time_container{ + flex-shrink: 0; + padding-right: 16px; + color: rgb(var(--gray-7)); + font-size: 12px; + line-height: 40px; +} +.project_name_container{ + .MixinEllipsis(); + margin-right: 16px; + color: rgb(var(--gray-10)); + font-weight: 500; + font-size: 20px; + font-family: 'PingFang SC'; + font-style: normal; + line-height: 40px; +} +.progress_container{ + font-size: 12px; + line-height: 22px; + color: rgb(var(--gray-7)); + .progress_name{ + display: block; + margin-bottom: -10px; + font-weight: 400; + font-size: 12px; + line-height: 20px; + color: #1D2129; + } +} + diff --git a/web_console_v2/client/src/views/Projects/index.tsx b/web_console_v2/client/src/views/Projects/index.tsx index 88c53f91f..48f6ea4f3 100644 --- a/web_console_v2/client/src/views/Projects/index.tsx +++ b/web_console_v2/client/src/views/Projects/index.tsx @@ -1,16 +1,25 @@ import React, { FC } from 'react'; -import ErrorBoundary from 'antd/lib/alert/ErrorBoundary'; -import { Route } from 'react-router-dom'; +import ErrorBoundary from 'components/ErrorBoundary'; +import { Redirect, Route } from 'react-router-dom'; import ProjectList from './ProjectList'; import CreateProject from './CreateProject'; import EditProject from './EditProject'; +import ProjectDetail from './ProjectDetail'; +import ReceiverProject from './ReceiverProject'; const ProjectsPage: FC = () => { return ( <ErrorBoundary> <Route path="/projects" exact component={ProjectList} /> - <Route path="/projects/create" exact component={CreateProject} /> + <Route + path="/projects/create" + exact + render={() => <Redirect to="/projects/create/config" />} + /> + <Route path="/projects/create/:step" exact component={CreateProject} /> <Route path="/projects/edit/:id" exact component={EditProject} /> + <Route path="/projects/:projectListType/detail/:id" exact component={ProjectDetail} /> + <Route path="/projects/receiver/:id" exact component={ReceiverProject} /> </ErrorBoundary> ); }; diff --git a/web_console_v2/client/src/views/Projects/shard.tsx b/web_console_v2/client/src/views/Projects/shard.tsx new file mode 100644 index 000000000..0a278240e --- /dev/null +++ b/web_console_v2/client/src/views/Projects/shard.tsx @@ -0,0 +1,187 @@ +import React, { ReactElement, CSSProperties } from 'react'; +import { Progress } from '@arco-design/web-react'; +import { + ParticiPantMap, + RoleType, + ProjectTicketStatus, + ProjectStateType, + ProjectAbilityType, + ProjectActionType, +} from 'typings/project'; +import { Participant, ParticipantType } from 'typings/participant'; +import { FilterOp } from 'typings/filter'; + +import styles from './index.module.less'; + +export function getCoordinateName(participantsMap?: Record<string, ParticiPantMap>) { + if (!participantsMap) { + return undefined; + } + const keyList = Object.keys(participantsMap); + const coordinate = keyList.find((item) => { + return participantsMap?.[item].role === RoleType.COORDINATOR; + }); + return coordinate ? participantsMap?.[coordinate].name : undefined; +} + +export function getParticipantsName(participantsMap?: Record<string, ParticiPantMap>) { + if (!participantsMap) { + return []; + } + const resultParticipantsName: string[] = []; + const keyList = Object.keys(participantsMap); + keyList.forEach((item) => { + if (participantsMap?.[item].role === RoleType.PARTICIPANT) { + resultParticipantsName.push(participantsMap?.[item].name); + } + }); + + return resultParticipantsName; +} + +export const TICKET_STATUS_MAPPER: Record<ProjectTicketStatus, any> = { + APPROVED: { status: 'default', percent: 100, name: '待授权' }, + PENDING: { + status: 'default', + percent: 50, + name: '待审批', + }, + DECLINED: { + status: 'warning', + percent: 50, + name: '审批拒绝', + }, + FAILED: { + status: 'warning', + percent: 100, + name: '失败', + }, +}; + +export const PARTICIPANT_STATE_MAPPER: Record<ProjectStateType, any> = { + PENDING: { + color: 'arcoblue', + value: '待授权', + }, + ACCEPTED: { + color: 'green', + value: '已授权', + }, + FAILED: { + color: 'orange', + value: '失败', + }, + CLOSED: { + color: 'red', + value: '已拒绝', + }, +}; + +export const PARTICIPANT_TYPE_TAG_MAPPER: Record<ParticipantType, any> = { + [ParticipantType.LIGHT_CLIENT]: { + color: 'purple', + label: '轻量级', + }, + [ParticipantType.PLATFORM]: { + color: 'arcoblue', + label: '标准', + }, +}; + +export const PROJECT_TASK_LABEL_MAPPER = { + [ProjectActionType.ID_ALIGNMENT]: 'ID对齐任务', + [ProjectActionType.DATA_ALIGNMENT]: '横向数据对齐任务', + [ProjectActionType.HORIZONTAL_TRAIN]: '横向联邦模型训练', + [ProjectActionType.VERTICAL_TRAIN]: '纵向联邦模型训练', + [ProjectActionType.VERTICAL_EVAL]: '纵向联邦模型评估', + [ProjectActionType.VERTICAL_PRED]: '纵向联邦模型离线预测', + [ProjectActionType.VERTICAL_SERVING]: '纵向联邦模型在线服务', + [ProjectActionType.WORKFLOW]: '工作流任务', + [ProjectActionType.TEE_SERVICE]: '可信分析服务', + [ProjectActionType.TEE_RESULT_EXPORT]: '可信分析服务结果导出', +}; +export const PROJECT_ABILITY_LABEL_MAPPER = { + [ProjectAbilityType.ALWAYS_ALLOW]: '始终允许', + [ProjectAbilityType.ONCE]: '允许一次', + [ProjectAbilityType.MANUAL]: '发起时询问', + [ProjectAbilityType.ALWAYS_REFUSE]: '拒绝', +}; + +export const PENDING_PROJECT_FILTER_MAPPER = { + state: FilterOp.IN, + ticket_status: FilterOp.EQUAL, +}; + +interface Props { + ticketStatus: ProjectTicketStatus; + style?: CSSProperties; + className?: string; +} +export function ProjectProgress({ ticketStatus, style, className }: Props): ReactElement { + const progress = TICKET_STATUS_MAPPER?.[ticketStatus]; + return ( + <div className={`${styles.progress_container} ${className}`} style={style}> + <span className={styles.progress_name}>{progress?.name ?? '成功'}</span> + <Progress + percent={progress?.percent ?? 100} + status={progress?.status ?? 'success'} + showText={false} + trailColor="var(--color-primary-light-1)" + /> + </div> + ); +} + +export function resetParticipantsInfo( + participantMap: Record<string, ParticiPantMap>, + participantList: Participant[], + myPureDomainName: string, +) { + const keyList = Object.keys(participantMap); + const resultList: any[] = []; + keyList.forEach((key: string) => { + const participantDetail = participantList.find((item) => item.pure_domain_name === key) ?? {}; + const completeParticipant = { + ...participantMap?.[key], + ...participantDetail, + pure_domain_name: key, + }; + key !== myPureDomainName && resultList.push(completeParticipant); + }); + const participantsList = resultList.sort((a: any, b: any) => { + return a.name > b.name ? 1 : -1; + }); + participantMap?.[myPureDomainName] && + participantsList.unshift({ + ...participantMap?.[myPureDomainName], + pure_domain_name: myPureDomainName, + state: ProjectStateType.ACCEPTED, + }); + return participantsList; +} + +export function resetAbilitiesTableData( + actionRules?: Record<ProjectActionType, ProjectAbilityType>, +) { + if (!actionRules) { + return []; + } + //保证顺序不变 + const keyList = [ + ProjectActionType.ID_ALIGNMENT, + ProjectActionType.DATA_ALIGNMENT, + ProjectActionType.HORIZONTAL_TRAIN, + ProjectActionType.VERTICAL_TRAIN, + ProjectActionType.VERTICAL_EVAL, + ProjectActionType.VERTICAL_PRED, + ProjectActionType.VERTICAL_SERVING, + ProjectActionType.WORKFLOW, + ProjectActionType.TEE_SERVICE, + ProjectActionType.TEE_RESULT_EXPORT, + ]; + const actionRulesList: any[] = []; + keyList.forEach((item: ProjectActionType) => { + actionRules?.[item] && actionRulesList.push({ ability: item, rule: actionRules?.[item] }); + }); + return actionRulesList; +} diff --git a/web_console_v2/client/src/views/ProtectedRoute.tsx b/web_console_v2/client/src/views/ProtectedRoute.tsx index 7d5dcf123..92f4fe73d 100644 --- a/web_console_v2/client/src/views/ProtectedRoute.tsx +++ b/web_console_v2/client/src/views/ProtectedRoute.tsx @@ -1,36 +1,109 @@ import { useRecoilQuery } from 'hooks/recoil'; import React from 'react'; -import { Redirect, Route, RouteProps, useLocation } from 'react-router-dom'; +import { Redirect, Route, RouteProps, useLocation, useHistory } from 'react-router-dom'; import { userInfoGetters } from 'stores/user'; +import { userInfoQuery, userInfoState } from 'stores/user'; +import { appFlag } from 'stores/app'; +import { Flag } from 'typings/flag'; import { FedRoles } from 'typings/auth'; +import { useGetCurrentProjectAbilityConfig, useSubscribe } from 'hooks'; +import store from 'store2'; +import LOCAL_STORAGE_KEYS from 'shared/localStorageKeys'; +import { useResetRecoilState, useRecoilValue } from 'recoil'; +import { Message } from '@arco-design/web-react'; +import { ProjectBaseAbilitiesType, ProjectTaskType } from 'typings/project'; interface Props extends RouteProps { isAuthenticated?: boolean; roles?: FedRoles[]; + flagKeys?: string[]; + abilitiesSupport?: (ProjectTaskType | ProjectBaseAbilitiesType)[]; } -function isMatchRoute(userRole: FedRoles, routeRoles: FedRoles[]) { +function isMatchRole(userRole: FedRoles, routeRoles: FedRoles[]) { return routeRoles.indexOf(userRole) !== -1; } +function isMatchFlag(flagValue: Flag, flagKeys: string[]) { + if ( + flagKeys.some((flag) => { + if (Object.prototype.hasOwnProperty.call(flagValue, flag) && !flagValue[flag]) { + return true; + } + + return false; + }) + ) { + return false; + } + + return true; +} + +function isMatchAbilities( + abilities: (ProjectTaskType | ProjectBaseAbilitiesType)[] | undefined, + abilitiesSupport: (ProjectTaskType | ProjectBaseAbilitiesType)[], +) { + if (!abilities?.[0]) { + return true; + } + return abilitiesSupport.includes(abilities?.[0]); +} + function ProtectedRoute(props: Props) { const { isLoading, data } = useRecoilQuery(userInfoGetters); + const { abilities } = useGetCurrentProjectAbilityConfig(); + const resetUserInfoState = useResetRecoilState(userInfoState); + const resetUserInfo = useResetRecoilState(userInfoQuery); + const appFlagValue = useRecoilValue(appFlag); + const history = useHistory(); + const location = useLocation(); - const { roles: routeRoles } = props; + const { roles: routeRoles, flagKeys, abilitiesSupport } = props; + + // If API return 422/401 status code, then go to login page + useSubscribe('logout', logout); if (isLoading) { - return <Route {...props} />; + return null; } if (!data || !data.isAuthenticated) { - return <Redirect to={`/login?from=${encodeURIComponent(location.pathname)}`} />; + return ( + <Redirect to={`/login?from=${encodeURIComponent(location.pathname + location.search)}`} /> + ); } - if (routeRoles && !isMatchRoute(data.role, routeRoles)) { + // FlagKey + if (flagKeys && !isMatchFlag(appFlagValue, flagKeys)) { + return <Redirect to="/" />; + } + + // Role + if (routeRoles && !isMatchRole(data.role, routeRoles)) { + return <Redirect to="/" />; + } + + // project abilities + if (abilitiesSupport && !isMatchAbilities(abilities, abilitiesSupport)) { return <Redirect to="/" />; } return <Route {...props} />; + + function logout(_: any, data: any) { + // clear local state + store.remove(LOCAL_STORAGE_KEYS.current_user); + store.remove(LOCAL_STORAGE_KEYS.sso_info); + resetUserInfoState(); + resetUserInfo(); + + // show tips + Message.info(data.message); + + // redirect + history.push(`/login`); + } } export default ProtectedRoute; diff --git a/web_console_v2/client/src/views/SSOCallback/index.tsx b/web_console_v2/client/src/views/SSOCallback/index.tsx new file mode 100644 index 000000000..a304f4d07 --- /dev/null +++ b/web_console_v2/client/src/views/SSOCallback/index.tsx @@ -0,0 +1,97 @@ +import { FC, useEffect } from 'react'; +import { useParams, useLocation, useHistory } from 'react-router-dom'; +import qs from 'qs'; +import store from 'store2'; +import { useTranslation } from 'react-i18next'; +import LOCAL_STORAGE_KEYS from 'shared/localStorageKeys'; +import { FedLoginWay } from 'typings/auth'; +import { global_login } from '../Login'; +import { useSetRecoilState } from 'recoil'; +import { userInfoQuery } from 'stores/user'; +import { Message } from '@arco-design/web-react'; + +const SSOCallback: FC = () => { + const { t } = useTranslation(); + const setUserInfo = useSetRecoilState(userInfoQuery); + const history = useHistory(); + + const { ssoName } = useParams<{ + ssoName: string; + }>(); + + const location = useLocation(); + + const query = location.search || ''; + + useEffect(() => { + if (!ssoName || !query) { + return; + } + + // Parse url query + const queryObject = qs.parse(query.slice(1)) || {}; // slice(1) to remove '?' prefix + + // Find current login way info + const loginWayList: FedLoginWay[] = store.get(LOCAL_STORAGE_KEYS.app_login_way_list) || []; + + const currentLoginWay = loginWayList.find((item: FedLoginWay) => { + return item.name === ssoName; + }); + + if (!currentLoginWay) { + Message.error( + t('login.error_not_find_sso_info', { + ssoName, + }), + ); + return; + } + + let codeKey = ''; + + switch (currentLoginWay.protocol_type.toLocaleLowerCase()) { + case 'cas': + codeKey = currentLoginWay[currentLoginWay.protocol_type]?.['code_key'] || 'ticket'; + break; + case 'oauth': + case 'oauth2': + codeKey = currentLoginWay[currentLoginWay.protocol_type]?.['code_key'] || 'code'; + break; + default: + codeKey = currentLoginWay[currentLoginWay.protocol_type]?.['code_key'] || 'code'; + break; + } + + const ssoInfo = { + ssoName: currentLoginWay.name, + ssoType: currentLoginWay.protocol_type, + ssoCode: queryObject[codeKey], + codeKey, + }; + + // Store sso_info into localstorage, it will be used in Axios request interceptors as custom HTTP header, like 'x-pc-auth': <sso_name> <type> <credentials> + store.set(LOCAL_STORAGE_KEYS.sso_info, ssoInfo); + // If ssoName,ssoType,ssoCode existed, then call login api with code and sso_name + if (ssoInfo.ssoName && ssoInfo.ssoType && ssoInfo.ssoCode && codeKey) { + try { + global_login( + { + [codeKey]: ssoInfo.ssoCode, + }, + { + sso_name: ssoName, + }, + setUserInfo, + history, + ); + } catch (error) { + Message.error(error.message); + } + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [ssoName, query]); + + return null; +}; + +export default SSOCallback; diff --git a/web_console_v2/client/src/views/Settings/ImageVersion/index.module.less b/web_console_v2/client/src/views/Settings/ImageVersion/index.module.less new file mode 100644 index 000000000..65d19f710 --- /dev/null +++ b/web_console_v2/client/src/views/Settings/ImageVersion/index.module.less @@ -0,0 +1,4 @@ +.styled_form { + width: 500px; + margin: 30vh auto auto; +} diff --git a/web_console_v2/client/src/views/Settings/ImageVersion/index.tsx b/web_console_v2/client/src/views/Settings/ImageVersion/index.tsx new file mode 100644 index 000000000..9b293ebd8 --- /dev/null +++ b/web_console_v2/client/src/views/Settings/ImageVersion/index.tsx @@ -0,0 +1,90 @@ +import React, { FC, useState } from 'react'; +import styled from './index.module.less'; +import { useMutation, useQuery } from 'react-query'; + +import { fetchSettingsImage, updateImage } from 'services/settings'; + +import { Form, Input, Button, Tooltip, Notification, Message } from '@arco-design/web-react'; +import SharedPageLayout from 'components/SharedPageLayout'; +import { QuestionCircle } from 'components/IconPark'; + +import { SettingOptions } from 'typings/settings'; + +const ImageVersion: FC = () => { + const [formInstance] = Form.useForm<SettingOptions>(); + const [currentImage, setImage] = useState<string>(); + + const query = useQuery('fetchSettingsImage', fetchSettingsImage, { + onSuccess(res) { + setImage(res.data.value); + formInstance.setFieldsValue({ webconsole_image: res.data.value }); + }, + onError(error: any) { + Message.error(error.message); + }, + refetchOnWindowFocus: false, + retry: 2, + }); + + const mutation = useMutation(updateImage, { + onSuccess() { + const isImageChanged = formInstance.getFieldValue('webconsole_image') !== currentImage; + + if (isImageChanged) { + Notification.info({ + title: '系统配置更新成功', + content: + '已启动更新程序,Pod 开始进行替换,完成后可能需要手动 Port forward,并且该窗口将在几分钟后变得不可用。', + duration: 2 * 1000 * 60, // 2min + }); + } else { + Message.success('编辑成功'); + } + }, + }); + + return ( + <SharedPageLayout title={'全局配置'}> + <Form + className={styled.styled_form} + form={formInstance} + onSubmit={onFinish} + labelCol={{ span: 6 }} + wrapperCol={{ span: 18 }} + > + <Form.Item + field="webconsole_image" + label={ + <> + <span style={{ marginRight: 4 }}>{'镜像版本'}</span> + <Tooltip content={'每次更新 Web Console 镜像版本后需等待一段时间,刷新页面后才可用'}> + <QuestionCircle /> + </Tooltip> + </> + } + rules={[{ required: true, message: '镜像版本为必填项' }]} + > + <Input placeholder={'请选择镜像版本'} disabled={query.isFetching || mutation.isLoading} /> + </Form.Item> + + <Form.Item wrapperCol={{ offset: 6, span: 18 }}> + <Button + disabled={query.isFetching} + type="primary" + htmlType="submit" + loading={mutation.isLoading} + long + > + {'确认'} + </Button> + </Form.Item> + </Form> + </SharedPageLayout> + ); + + async function onFinish(values: any) { + mutation.mutate(values); + } +}; + +export default ImageVersion; diff --git a/web_console_v2/client/src/views/Settings/ImageVersion/proxy.js b/web_console_v2/client/src/views/Settings/ImageVersion/proxy.js new file mode 100644 index 000000000..15a25511b --- /dev/null +++ b/web_console_v2/client/src/views/Settings/ImageVersion/proxy.js @@ -0,0 +1,7 @@ +if (process.env.REACT_APP_ENABLE_IMAGE_VERSION_PAGE !== 'false') { + module.exports = require('./index'); +} else { + module.exports = function () { + return null; + }; +} diff --git a/web_console_v2/client/src/views/Settings/Settings.tsx b/web_console_v2/client/src/views/Settings/Settings.tsx new file mode 100644 index 000000000..0076aafc3 --- /dev/null +++ b/web_console_v2/client/src/views/Settings/Settings.tsx @@ -0,0 +1,16 @@ +import React, { FC } from 'react'; +import ErrorBoundary from 'components/ErrorBoundary'; +import { Route } from 'react-router-dom'; +import ImageVersion from './ImageVersion'; +import SystemVariables from './SystemVariables'; + +const SettingsPage: FC = () => { + return ( + <ErrorBoundary> + <Route path="/settings/image" exact component={ImageVersion} /> + <Route path="/settings/variables" exact component={SystemVariables} /> + </ErrorBoundary> + ); +}; + +export default SettingsPage; diff --git a/web_console_v2/client/src/views/Settings/SystemVariables/EnvVariablesForm.module.less b/web_console_v2/client/src/views/Settings/SystemVariables/EnvVariablesForm.module.less new file mode 100644 index 000000000..222a91d0d --- /dev/null +++ b/web_console_v2/client/src/views/Settings/SystemVariables/EnvVariablesForm.module.less @@ -0,0 +1,32 @@ +.container { + margin-top: 30px; +} + +.header { + margin-bottom: 20px; +} + +.heading { + margin-bottom: 0; + font-size: 16px; + font-weight: 500; + line-height: 24px; + color: rgb(var(--gray-10)); + transition: 0.4s cubic-bezier(0.4, 0, 0.2, 1); +} + +.no_variables { + color: var(--textColorSecondary); +} + +.list_container { + width: calc(var(--form-width, 500px) * 2); + overflow: hidden; + transition: 0.4s cubic-bezier(0.4, 0, 0.2, 1); +} + +.remove_button { + position: absolute; + right: 0; + color: var(--textColor) !important; +} diff --git a/web_console_v2/client/src/views/Settings/SystemVariables/EnvVariablesForm.tsx b/web_console_v2/client/src/views/Settings/SystemVariables/EnvVariablesForm.tsx new file mode 100644 index 000000000..a30710896 --- /dev/null +++ b/web_console_v2/client/src/views/Settings/SystemVariables/EnvVariablesForm.tsx @@ -0,0 +1,164 @@ +import React, { FC, useCallback, useLayoutEffect, useRef } from 'react'; +import styled from './EnvVariablesForm.module.less'; + +import { convertToUnit, isStringCanBeParsed } from 'shared/helpers'; + +import { Form, Input, Button, Grid, Switch } from '@arco-design/web-react'; +import { Delete, Plus } from 'components/IconPark'; + +import { FormInstance } from '@arco-design/web-react/es/Form'; +import { SystemVariable } from 'typings/settings'; + +const { Row, Col } = Grid; + +export const VARIABLES_FIELD_NAME = 'variables'; +export const VARIABLES_ERROR_CHANNEL = 'system.field_variables_error'; + +const DEFAULT_VARIABLE = { + name: '', + value: '', + fixed: false, + value_type: 'STRING', +}; + +const EnvVariablesForm: FC<{ + layout: { + labelCol: { span: number }; + wrapperCol: { span: number }; + }; + formInstance?: FormInstance; + disabled?: boolean; +}> = ({ layout, disabled, formInstance }) => { + const listInnerRef = useRef<HTMLDivElement>(); + const listContainerRef = useRef<HTMLDivElement>(); + + const setListContainerMaxHeight = useCallback( + (nextHeight: any) => { + listContainerRef.current!.style.maxHeight = convertToUnit(nextHeight); + }, + [listContainerRef], + ); + const getListInnerHeight = useCallback(() => { + return listInnerRef.current!.offsetHeight!; + }, [listInnerRef]); + + useLayoutEffect(() => { + const innerHeight = getListInnerHeight() + 30; + setListContainerMaxHeight(innerHeight); + }); + + return ( + <div className={styled.container}> + <div className={styled.header}> + <Row align="center"> + <Col {...layout.labelCol}> + <h3 className={styled.heading}>{'环境变量参数配置'}</h3> + </Col> + </Row> + </div> + + <div + className={styled.list_container} + ref={listContainerRef as any} + onTransitionEnd={onFoldAnimationEnd} + > + <Form.List field={VARIABLES_FIELD_NAME}> + {(fields, { add, remove }) => ( + <div ref={listInnerRef as any}> + {fields.map((field, index) => { + const list = (formInstance?.getFieldValue(VARIABLES_FIELD_NAME) ?? + []) as SystemVariable[]; + const isFixed = list[index]?.fixed ?? false; + const valueType = list[index]?.value_type ?? 'STRING'; + + return ( + <Row key={field.key} align="center" style={{ position: 'relative' }}> + <Form.Item + style={{ flex: '0 0 50%' }} + label="Name" + field={field.field + '.name'} + rules={[{ required: true, message: '请输入变量名' }]} + > + <Input placeholder="name" disabled={disabled || isFixed} /> + </Form.Item> + + <Form.Item + labelCol={{ span: 4 }} + wrapperCol={{ span: 18 }} + style={{ flex: '0 0 50%' }} + label="Value" + field={field.field + '.value'} + rules={[ + { required: true, message: '请输入变量值' }, + valueType === 'LIST' || valueType === 'OBJECT' + ? { + validator: (value, callback) => { + if (isStringCanBeParsed(value)) { + callback(); + } else { + callback(`JSON ${valueType} 格式错误`); + } + }, + } + : {}, + ]} + > + <Input.TextArea placeholder="value" disabled={disabled} /> + </Form.Item> + <Form.Item + label="fixed" + field={field.field + '.fixed'} + triggerPropName="checked" + hidden + > + <Switch /> + </Form.Item> + <Form.Item label="value_type" field={field.field + '.value_type'} hidden> + <Input /> + </Form.Item> + + {!isFixed && ( + <Button + className={styled.remove_button} + size="small" + icon={<Delete />} + type="text" + onClick={() => remove(index)} + /> + )} + </Row> + ); + })} + {/* Empty placeholder */} + {fields.length === 0 && ( + <Form.Item className={styled.no_variables} wrapperCol={{ offset: 3 }}> + {'当前没有环境变量参数,请添加'} + </Form.Item> + )} + + <Form.Item wrapperCol={{ offset: 3 }}> + {/* DO NOT simplify `() => add()` to `add`, it will pollute form value with $event */} + <Button + type="primary" + size="small" + icon={<Plus />} + onClick={() => add(DEFAULT_VARIABLE)} + > + {'添加参数'} + </Button> + </Form.Item> + </div> + )} + </Form.List> + </div> + </div> + ); + + function onFoldAnimationEnd(_: React.TransitionEvent) { + // Because of user can adjust list inner's height by resize value-textarea or add/remove variable + // we MUST set container's maxHeight to 'initial' after unfolded (after which user can interact) + listContainerRef.current!.style.maxHeight = 'initial'; + } +}; + +export default EnvVariablesForm; diff --git a/web_console_v2/client/src/views/Settings/SystemVariables/index.module.less b/web_console_v2/client/src/views/Settings/SystemVariables/index.module.less new file mode 100644 index 000000000..db82666b9 --- /dev/null +++ b/web_console_v2/client/src/views/Settings/SystemVariables/index.module.less @@ -0,0 +1,13 @@ +.styled_form { + --form-width: 500px; + + display: grid; + grid-auto-rows: auto 1fr auto; + + > .form-title { + margin-bottom: 24px; + + font-size: 27px; + line-height: 36px; + } +} diff --git a/web_console_v2/client/src/views/Settings/SystemVariables/index.tsx b/web_console_v2/client/src/views/Settings/SystemVariables/index.tsx new file mode 100644 index 000000000..900d2dafa --- /dev/null +++ b/web_console_v2/client/src/views/Settings/SystemVariables/index.tsx @@ -0,0 +1,125 @@ +import React, { FC, useState, useRef } from 'react'; +import styled from './index.module.less'; +import { useQuery } from 'react-query'; + +import { fetchSettingVariables, updateSettingVariables } from 'services/settings'; +import { formatValueToString, parseValueFromString } from 'shared/helpers'; + +import { Form, Button, Message, Spin } from '@arco-design/web-react'; +import GridRow from 'components/_base/GridRow'; +import SharedPageLayout from 'components/SharedPageLayout'; +import Modal from 'components/Modal'; +import EnvVariablesForm, { + VARIABLES_FIELD_NAME, + VARIABLES_ERROR_CHANNEL, +} from './EnvVariablesForm'; + +import { FormProps } from '@arco-design/web-react/es/Form'; +import { SettingOptions, SystemVariable } from 'typings/settings'; + +const layout = { + labelCol: { span: 8 }, + wrapperCol: { span: 16 }, +}; +const Systemvariables: FC = () => { + const [form] = Form.useForm(); + + const [loading, setLoading] = useState(false); + const defaultVariables = useRef<SystemVariable[]>([]); + + const systemVariablesQuery = useQuery(['fetchSettingVariables'], () => fetchSettingVariables(), { + onSuccess(res) { + const variables: SystemVariable[] = (res.data?.variables ?? []).map((item) => { + return { + ...item, + value: formatValueToString(item.value, item.value_type), + }; + }); + + defaultVariables.current = variables; + form.setFieldsValue({ + variables: variables, + }); + }, + onError(error: any) { + Message.error(error.message); + }, + cacheTime: 1, + refetchOnWindowFocus: false, + }); + + return ( + <SharedPageLayout title={'全局配置'}> + <Form + className={styled.styled_form} + form={form} + labelCol={{ span: 6 }} + wrapperCol={{ span: 18 }} + onSubmit={onFinish} + onSubmitFailed={onFinishFailed} + > + <Spin loading={systemVariablesQuery.isFetching}> + <EnvVariablesForm layout={layout} formInstance={form} disabled={loading} /> + </Spin> + <Form.Item + wrapperCol={{ offset: 3 }} + style={{ + width: 'calc(var(--form-width, 500px) * 2)', + }} + > + <GridRow gap="16"> + <Button type="primary" loading={loading} onClick={onSubmitClick}> + {'确认'} + </Button> + <Button onClick={onCancelClick}>{'取消'}</Button> + </GridRow> + </Form.Item> + </Form> + </SharedPageLayout> + ); + + function resetForm() { + form.setFieldsValue({ + variables: defaultVariables.current, + }); + } + function onCancelClick() { + Modal.confirm({ + title: '确认取消?', + content: '取消后,已填写内容将不再保留', + onOk: resetForm, + }); + } + function onSubmitClick() { + form.submit(); + } + async function onFinish(data: any) { + setLoading(true); + try { + const params: SettingOptions = { + webconsole_image: undefined, + variables: (data.variables ?? []).map((item: SystemVariable) => { + return { + ...item, + value: parseValueFromString(item.value, item.value_type), + }; + }), + }; + + const systemVariables = await updateSettingVariables({ variables: params.variables }); + Message.success('修改环境变量成功'); + defaultVariables.current = systemVariables.data?.variables ?? []; + } catch (error) { + Message.error(error.message); + } + setLoading(false); + } + function onFinishFailed(errorInfo: Parameters<Required<FormProps>['onSubmitFailed']>[0]) { + const regx = new RegExp(`^${VARIABLES_FIELD_NAME}`); + if (Object.keys(errorInfo).some((key) => regx.test(key))) { + PubSub.publish(VARIABLES_ERROR_CHANNEL); + } + } +}; + +export default Systemvariables; diff --git a/web_console_v2/client/src/views/Settings/index.tsx b/web_console_v2/client/src/views/Settings/index.tsx index 0c53e4670..e2146687b 100644 --- a/web_console_v2/client/src/views/Settings/index.tsx +++ b/web_console_v2/client/src/views/Settings/index.tsx @@ -1,99 +1,16 @@ -import React, { FC, useState } from 'react'; -import { Form, Input, Button, Tooltip, notification, message } from 'antd'; -import SharedPageLayout from 'components/SharedPageLayout'; -import styled from 'styled-components'; -import { useTranslation } from 'react-i18next'; -import { useMutation, useQuery } from 'react-query'; -import { fetchSettings, updateSettings } from 'services/settings'; -import { QuestionCircle, Close } from 'components/IconPark'; -import GridRow from 'components/_base/GridRow'; -import { SettingOptions } from 'typings/settings'; - -const StyledForm = styled(Form)` - width: 500px; - margin: 30vh auto auto; -`; -const SubmitButton = styled(Button)` - width: 100%; -`; +import React, { FC } from 'react'; +import ErrorBoundary from 'components/ErrorBoundary'; +import { Route } from 'react-router-dom'; +import ImageVersion from './ImageVersion/proxy'; +import SystemVariables from './SystemVariables'; const SettingsPage: FC = () => { - const { t } = useTranslation(); - const [formInstance] = Form.useForm<SettingOptions>(); - const [currentImage, setImage] = useState<string>(); - - const query = useQuery('fetchSettings', fetchSettings, { - onSuccess(res) { - setImage(res.data.webconsole_image); - formInstance.setFieldsValue({ ...res.data }); - }, - onError(error: any) { - message.error(error.message); - }, - refetchOnWindowFocus: false, - retry: 2, - }); - - const mutation = useMutation(updateSettings, { - onSuccess() { - const isImageChanged = formInstance.getFieldValue('webconsole_image') !== currentImage; - - if (isImageChanged) { - notification.info({ - message: t('settings.msg_update_success'), - description: t('settings.msg_update_wc_image'), - duration: 2 * 60, // 2min - closeIcon: <Close />, - }); - } else { - message.success(t('settings.msg_update_success')); - } - }, - }); - return ( - <SharedPageLayout title={t('menu.label_settings')}> - <StyledForm - form={formInstance} - onFinish={onFinish} - labelCol={{ span: 6 }} - wrapperCol={{ span: 18 }} - > - <Form.Item - name="webconsole_image" - label={ - <GridRow gap="4"> - {t('settings.label_image_ver')} - <Tooltip title={t('settings.hint_update_image')}> - <QuestionCircle /> - </Tooltip> - </GridRow> - } - rules={[{ required: true, message: t('settings.msg_image_required') }]} - > - <Input - placeholder={t('settings.placeholder_image')} - disabled={query.isFetching || mutation.isLoading} - /> - </Form.Item> - - <Form.Item wrapperCol={{ offset: 6 }}> - <SubmitButton - disabled={query.isFetching} - type="primary" - htmlType="submit" - loading={mutation.isLoading} - > - {t('submit')} - </SubmitButton> - </Form.Item> - </StyledForm> - </SharedPageLayout> + <ErrorBoundary> + <Route path="/settings/image" exact component={ImageVersion as FC} /> + <Route path="/settings/variables" exact component={SystemVariables} /> + </ErrorBoundary> ); - - async function onFinish(values: any) { - mutation.mutate(values); - } }; export default SettingsPage; diff --git a/web_console_v2/client/src/views/TokenCallback/index.tsx b/web_console_v2/client/src/views/TokenCallback/index.tsx new file mode 100644 index 000000000..b067305ec --- /dev/null +++ b/web_console_v2/client/src/views/TokenCallback/index.tsx @@ -0,0 +1,87 @@ +import { FC, useEffect } from 'react'; +import { useHistory, useLocation } from 'react-router-dom'; +import qs from 'qs'; +import store from 'store2'; +import { useTranslation } from 'react-i18next'; +import { useSetRecoilState } from 'recoil'; +import { useUnmount } from 'react-use'; + +import { userInfoQuery } from 'stores/user'; +import { getMyUserInfo } from 'services/user'; + +import LOCAL_STORAGE_KEYS from 'shared/localStorageKeys'; + +import { Message } from '@arco-design/web-react'; + +const TokenCallback: FC = () => { + const { t } = useTranslation(); + + const history = useHistory(); + const location = useLocation(); + + const setUserInfo = useSetRecoilState(userInfoQuery); + + const query = location.search || ''; + + useEffect(() => { + if (!query) { + Message.error(t('login.error_not_find_access_token')); + store.remove(LOCAL_STORAGE_KEYS.temp_access_token); + return; + } + + // Parse url query + const queryObject = qs.parse(query.slice(1)) || {}; // slice(1) to remove '?' prefix + + // Get access_token info from queryObject + const accessToken = decodeURIComponent((queryObject['access_token'] as string) ?? ''); + + if (!accessToken) { + Message.error(t('login.error_not_find_access_token')); + store.remove(LOCAL_STORAGE_KEYS.temp_access_token); + return; + } + + // Store accessToken into localstorage, it will be used in Axios request interceptors as HTTP header, like Authorization = `Bearer ${token}` + store.set(LOCAL_STORAGE_KEYS.temp_access_token, accessToken); + + // Call API to get my userInfo + getMyUserInfo() + .then((resp) => { + const { data } = resp; + + // Remove temp_access_token + store.remove(LOCAL_STORAGE_KEYS.temp_access_token); + + // Store userInfo + store.set(LOCAL_STORAGE_KEYS.current_user, { + ...data, + access_token: accessToken, + date: Date.now(), + }); + setUserInfo(data); + + Message.success(t('app.login_success')); + + if (queryObject.from) { + history.replace(decodeURIComponent(queryObject.from as string) || '/projects'); + return; + } + + history.replace('/projects'); + }) + .catch((error) => { + store.remove(LOCAL_STORAGE_KEYS.temp_access_token); + Message.error(error + ''); + }); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [query]); + + useUnmount(() => { + store.remove(LOCAL_STORAGE_KEYS.temp_access_token); + }); + + return null; +}; + +export default TokenCallback; diff --git a/web_console_v2/client/src/views/TrustedCenter/CreateTrustedJobGroup/index.tsx b/web_console_v2/client/src/views/TrustedCenter/CreateTrustedJobGroup/index.tsx new file mode 100644 index 000000000..a18e5c9bc --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/CreateTrustedJobGroup/index.tsx @@ -0,0 +1,8 @@ +import React, { FC } from 'react'; +import TrustedJobGroupForm from '../TrustedJobGroupForm'; + +const CreateTrustedJobGroup: FC = () => { + return <TrustedJobGroupForm />; +}; + +export default CreateTrustedJobGroup; diff --git a/web_console_v2/client/src/views/TrustedCenter/DatasetExportApplication/ApplicationResult/index.less b/web_console_v2/client/src/views/TrustedCenter/DatasetExportApplication/ApplicationResult/index.less new file mode 100644 index 000000000..004318605 --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/DatasetExportApplication/ApplicationResult/index.less @@ -0,0 +1,26 @@ +.passed-container { + margin: 100px auto 0 auto; + padding: 70px; + text-align: center; + .arco-result-icon { + margin-bottom: 10px; + } + .arco-result-icon-tip { + width: 68px; + height: 68px; + } + .arco-icon { + margin-top: 20px; + font-size: 30px; + } + .arco-result-title { + margin-bottom: 30px; + font-size: 16px; + } + .arco-result-subtitle { + font-size: 12px; + } + .arco-result-extra { + margin-top: 10px; + } +} diff --git a/web_console_v2/client/src/views/TrustedCenter/DatasetExportApplication/ApplicationResult/index.tsx b/web_console_v2/client/src/views/TrustedCenter/DatasetExportApplication/ApplicationResult/index.tsx new file mode 100644 index 000000000..090cb4a56 --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/DatasetExportApplication/ApplicationResult/index.tsx @@ -0,0 +1,58 @@ +import React, { FC, useState } from 'react'; +import { useTranslation } from 'react-i18next'; +import { useHistory, useParams } from 'react-router'; +import BackButton from 'components/BackButton'; +import SharedPageLayout from 'components/SharedPageLayout'; +import { Button, Result } from '@arco-design/web-react'; +import './index.less'; +import { useInterval } from 'react-use'; + +const REDIRECT_COUNTDOWN_DURATION = 5; +const ApplicationResult: FC = () => { + const { t } = useTranslation(); + const history = useHistory(); + const [redirectCountdown, setRedirectCountdown] = useState(REDIRECT_COUNTDOWN_DURATION); + const { result } = useParams<{ result: string }>(); + + useInterval(() => { + if (redirectCountdown === 0) { + goBackTrustedCenter(); + return; + } + setRedirectCountdown(redirectCountdown - 1); + }, 1000); + + const layoutTitle = ( + <BackButton onClick={goBackTrustedCenter}> + {t('trusted_center.label_trusted_center')} + </BackButton> + ); + return ( + <SharedPageLayout title={layoutTitle} centerTitle="数据集导出申请"> + <div className="passed-container"> + <Result + status={result === 'passed' ? 'success' : 'warning'} + title={ + result === 'passed' + ? t('trusted_center.title_passed') + : t('trusted_center.title_rejected') + } + subTitle={t('trusted_center.title_status_tip', { + second: redirectCountdown, + })} + extra={[ + <Button key="back" type="primary" onClick={goBackTrustedCenter}> + {t('trusted_center.btn_go_back')} + </Button>, + ]} + /> + </div> + </SharedPageLayout> + ); + + function goBackTrustedCenter() { + history.push('/trusted-center'); + } +}; + +export default ApplicationResult; diff --git a/web_console_v2/client/src/views/TrustedCenter/DatasetExportApplication/index.less b/web_console_v2/client/src/views/TrustedCenter/DatasetExportApplication/index.less new file mode 100644 index 000000000..481f521d6 --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/DatasetExportApplication/index.less @@ -0,0 +1,40 @@ +.dataset-application-container { + width: 440px; + height: 438px; + margin: 120px auto 0 auto; + padding: 70px; + border: 1px solid #e5e6eb; + border-radius: 8px; + text-align: center; + .avatar { + display: inline-block; + width: 54px; + height: 54px; + } + .title { + font-weight: 500; + font-size: 20px; + margin-top: 60px; + } + .comment { + width: 300px; + height: 36px; + font-weight: 400; + font-size: 12px; + line-height: 18px; + } + .tag-container { + margin-top: 21px; + } + .bottom { + width: 230px; + height: 32px; + margin: 40px auto 0 auto; + display: flex; + justify-content: space-between; + .bottom___button { + width: 104px; + height: 32px; + } + } +} diff --git a/web_console_v2/client/src/views/TrustedCenter/DatasetExportApplication/index.tsx b/web_console_v2/client/src/views/TrustedCenter/DatasetExportApplication/index.tsx new file mode 100644 index 000000000..8a5ca8aff --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/DatasetExportApplication/index.tsx @@ -0,0 +1,78 @@ +import React, { FC } from 'react'; +import { useHistory, useParams } from 'react-router'; +import { Tag, Button, Message } from '@arco-design/web-react'; +import BackButton from 'components/BackButton'; +import SharedPageLayout from 'components/SharedPageLayout'; +import { updateTrustedJob } from 'services/trustedCenter'; +import { useGetCurrentProjectId, useGetCurrentProjectParticipantList } from 'hooks'; +import CONSTANTS from 'shared/constants'; +import { AuthStatus } from 'typings/trustedCenter'; +import { Avatar } from '../shared'; +import './index.less'; + +const DatasetExportApplication: FC = () => { + const projectId = useGetCurrentProjectId(); + const params = useParams<{ id: string; coordinator_id: string; name: string }>(); + const history = useHistory(); + const participantList = useGetCurrentProjectParticipantList(); + const participant = participantList.filter((item) => item?.id === Number(params.coordinator_id)); + const layoutTitle = <BackButton onClick={goBackTrustedCenter}>{'可信中心'}</BackButton>; + + return ( + <SharedPageLayout title={layoutTitle} centerTitle="数据集导出申请"> + <div className="dataset-application-container"> + <Avatar className="avatar" /> + <h3 className="title">{`「${params.name || CONSTANTS.EMPTY_PLACEHOLDER}」 的导出申请`}</h3> + <div className="comment"> + {'该数据集为可信中心安全计算生成的计算结果,导出时需各合作伙伴审批通过'} + </div> + <div className="tag-container"> + <Tag color="arcoblue">{'发起方'}</Tag> + <Tag style={{ marginLeft: '10px' }}> + {participant?.[0]?.name || CONSTANTS.EMPTY_PLACEHOLDER} + </Tag> + </div> + <div className="bottom"> + <Button className="bottom___button" onClick={onReject}> + {'拒绝'} + </Button> + <Button className="bottom___button" type="primary" onClick={onPass}> + {'通过'} + </Button> + </div> + </div> + </SharedPageLayout> + ); + + function goBackTrustedCenter() { + history.push('/trusted-center'); + } + + async function onReject() { + try { + await updateTrustedJob(projectId!, params.id, { + comment: '', + auth_status: AuthStatus.WITHDRAW, + }); + history.push('/trusted-center/dataset-application/rejected'); + } catch (error) { + Message.error(error.message); + return Promise.reject(error); + } + } + + async function onPass() { + try { + await updateTrustedJob(projectId!, params.id, { + comment: '', + auth_status: AuthStatus.AUTHORIZED, + }); + history.push('/trusted-center/dataset-application/passed'); + } catch (error) { + Message.error(error.message); + return Promise.reject(error); + } + } +}; + +export default DatasetExportApplication; diff --git a/web_console_v2/client/src/views/TrustedCenter/EditTrustedJobGroup/index.tsx b/web_console_v2/client/src/views/TrustedCenter/EditTrustedJobGroup/index.tsx new file mode 100644 index 000000000..5be47b32f --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/EditTrustedJobGroup/index.tsx @@ -0,0 +1,8 @@ +import React, { FC } from 'react'; +import TrustedJobForm from '../TrustedJobGroupForm'; + +const EditTrustedJobGroup: FC = () => { + return <TrustedJobForm isEdit={true} />; +}; + +export default EditTrustedJobGroup; diff --git a/web_console_v2/client/src/views/TrustedCenter/TrustedJobDetail/index.less b/web_console_v2/client/src/views/TrustedCenter/TrustedJobDetail/index.less new file mode 100644 index 000000000..9a37052e8 --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/TrustedJobDetail/index.less @@ -0,0 +1,5 @@ +.display-dataset__tooltip { + display: flex; + align-items: center; + margin-top: -4px; +} diff --git a/web_console_v2/client/src/views/TrustedCenter/TrustedJobDetail/index.tsx b/web_console_v2/client/src/views/TrustedCenter/TrustedJobDetail/index.tsx new file mode 100644 index 000000000..ef3bf4adc --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/TrustedJobDetail/index.tsx @@ -0,0 +1,331 @@ +import React, { FC, useMemo, useState } from 'react'; +import { Drawer, Table, Button, Tag, Tooltip } from '@arco-design/web-react'; +import { useTranslation } from 'react-i18next'; +import PropertyList from 'components/PropertyList'; +import { TrustedJob, TrustedJobGroup, TrustedJobStatus } from 'typings/trustedCenter'; +import { useQuery } from 'react-query'; +import { fetchTrustedJob } from 'services/trustedCenter'; +import { useGetCurrentProjectId } from 'hooks'; +import { Pod, PodState } from 'typings/job'; +import { fetchJobById } from 'services/workflow'; +import { formatTimestamp } from 'shared/date'; +import CONSTANTS from 'shared/constants'; +import CountTime from 'components/CountTime'; +import WhichAlgorithm from 'components/WhichAlgorithm'; +import WhichDataset from 'components/WhichDataset'; +import StateIndicator from 'components/StateIndicator'; +import { getPodState } from 'views/Workflows/shared'; +import dayjs from 'dayjs'; +import './index.less'; +import WhichParticipant from 'components/WhichParticipant'; + +export enum TResourceFieldType { + MASTER = 'master', + PS = 'ps', + WORKER = 'worker', +} + +export type TrustedJobProps = { + visible: boolean; + id?: ID; + jobId?: ID; + toggleVisible: (val: any) => void; + group: TrustedJobGroup; +}; + +const TrustedJobDetail: FC<TrustedJobProps> = ({ visible, toggleVisible, id, jobId, group }) => { + const { t } = useTranslation(); + const [trustedJobInfo, setTrustedJobInfo] = useState<TrustedJob>(); + const projectId = useGetCurrentProjectId(); + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const trustedJobQuery = useQuery( + ['fetchTrustedJob', id], + () => { + return fetchTrustedJob(projectId!, id!); + }, + { + retry: 1, + refetchOnWindowFocus: false, + enabled: visible && Boolean(id), + onSuccess(res) { + setTrustedJobInfo(res.data); + }, + }, + ); + + const jobsQuery = useQuery( + ['fetchJobById', jobId], + () => fetchJobById(jobId).then((res) => res.data.pods), + { + enabled: visible && Boolean(jobId), + retry: 1, + refetchOnWindowFocus: false, + }, + ); + + const jobList = useMemo(() => { + if (!jobsQuery?.data) { + return []; + } + const jobs = jobsQuery.data || []; + return jobs; + }, [jobsQuery.data]); + + const displayedProps = useMemo( + () => [ + { + value: '可信计算', + label: t('trusted_center.label_algorithm_type'), + }, + { + value: ( + <WhichAlgorithm + id={trustedJobInfo?.algorithm_id || 0} + uuid={trustedJobInfo?.algorithm_uuid} + participantId={group?.algorithm_participant_id} + /> + ), + label: t('trusted_center.label_algorithm_select'), + }, + { + value: renderDatasetTooltip(group), + label: t('trusted_center.col_trusted_job_dataset'), + }, + { + value: trustedJobInfo?.started_at + ? formatTimestamp(trustedJobInfo?.started_at || 0) + : CONSTANTS.EMPTY_PLACEHOLDER, + label: t('trusted_center.col_trusted_job_start_time'), + }, + { + value: trustedJobInfo?.finished_at + ? formatTimestamp(trustedJobInfo?.finished_at || 0) + : CONSTANTS.EMPTY_PLACEHOLDER, + label: t('trusted_center.col_trusted_job_end_time'), + }, + { + value: trustedJobInfo?.status ? renderRuntime(trustedJobInfo) : CONSTANTS.EMPTY_PLACEHOLDER, + label: t('trusted_center.col_trusted_job_runtime'), + }, + { + value: ( + <div> + {trustedJobInfo?.resource ? ( + <div> + <Tag color="arcoblue">{TResourceFieldType.WORKER}</Tag> + <span>{`${trustedJobInfo?.resource.cpu / 1000}CPU+${ + trustedJobInfo?.resource.memory + }GiB*${trustedJobInfo?.resource.replicas}个实例`}</span> + </div> + ) : ( + CONSTANTS.EMPTY_PLACEHOLDER + )} + </div> + ), + label: t('trusted_center.title_resource_config'), + }, + { + value: + trustedJobInfo?.coordinator_id === 0 ? ( + t('trusted_center.label_coordinator_self') + ) : ( + <WhichParticipant id={trustedJobInfo?.coordinator_id} /> + ), + label: '发起方', + }, + ], + // eslint-disable-next-line react-hooks/exhaustive-deps + [trustedJobInfo], + ); + + const columns = useMemo( + () => [ + { + title: t('trusted_center.col_instance_id'), + dataIndex: 'name', + name: 'name', + }, + { + dataIndex: 'state', + title: '状态', + filters: [ + PodState.SUCCEEDED, + PodState.RUNNING, + PodState.FAILED, + PodState.PENDING, + PodState.FAILED_AND_FREED, + PodState.SUCCEEDED_AND_FREED, + PodState.UNKNOWN, + ].map((state) => { + const { text } = getPodState({ state } as Pod); + return { + text, + value: state, + }; + }), + onFilter: (state: PodState, record: Pod) => { + return record?.state === state; + }, + render(state: any, record: Pod) { + return <StateIndicator {...getPodState(record)} />; + }, + }, + { + title: t('trusted_center.col_instance_start_at'), + dataIndex: 'created_at', + name: 'created_at', + render(_: any, record: any) { + return record.creation_timestamp + ? formatTimestamp(record.creation_timestamp) + : CONSTANTS.EMPTY_PLACEHOLDER; + }, + }, + { + title: t('trusted_center.col_trusted_job_operation'), + dataIndex: 'operation', + name: 'operation', + render: (_: any, record: any) => { + return ( + <> + <Button + type="text" + onClick={() => { + onLogClick(record); + }} + > + {t('trusted_center.btn_inspect_logs')} + </Button> + </> + ); + }, + }, + ], + // eslint-disable-next-line react-hooks/exhaustive-deps + [jobId], + ); + + return ( + <Drawer + width={807} + title={ + <span>{t('trusted_center.title_trusted_job_detail', { name: trustedJobInfo?.name })}</span> + } + visible={visible} + onOk={() => { + toggleVisible(false); + }} + onCancel={() => { + toggleVisible(false); + }} + > + {renderBasicInfo()} + {renderInstanceInfo()} + </Drawer> + ); + + function renderBasicInfo() { + return ( + <> + <h3>{t('trusted_center.title_base_info')}</h3> + <PropertyList cols={6} colProportions={[1.5, 1, 1]} properties={displayedProps} /> + </> + ); + } + + function renderInstanceInfo() { + return ( + <> + <h3>{t('trusted_center.title_instance_info')}</h3> + <Table + loading={trustedJobQuery.isFetching} + size="small" + rowKey="name" + scroll={{ x: '100%' }} + columns={columns} + data={jobId ? jobList : []} + pagination={{ + showTotal: true, + pageSizeChangeResetCurrent: true, + hideOnSinglePage: true, + }} + /> + </> + ); + } + + function renderRuntime(trustedJobInfo: TrustedJob) { + let isRunning = false; + let isStopped = true; + let runningTime = 0; + + const { status } = trustedJobInfo; + const { PENDING, RUNNING, STOPPED, SUCCEEDED, FAILED } = TrustedJobStatus; + isRunning = [RUNNING, PENDING].includes(status); + isStopped = [STOPPED, SUCCEEDED, FAILED].includes(status); + + if (isRunning || isStopped) { + const { finished_at, started_at } = trustedJobInfo; + runningTime = isStopped ? finished_at! - started_at! : dayjs().unix() - started_at!; + } + return <CountTime time={runningTime} isStatic={!isRunning} />; + } + + function onLogClick(pod: Pod) { + const startTime = 0; + window.open(`/v2/logs/pod/${jobId}/${pod.name}/${startTime}`, '_blank noopener'); + } + + function renderDatasetTooltip(record: TrustedJobGroup) { + if (!record) { + return CONSTANTS.EMPTY_PLACEHOLDER; + } + // without participant datasets + if (record.participant_datasets.items?.length === 0) { + return <WhichDataset.DatasetDetail id={record.dataset_id} />; + } + + const hasMyDataset = record.dataset_id !== 0; + let length = record.participant_datasets.items?.length || 0; + if (hasMyDataset) { + length += 1; + } + const datasets = record.participant_datasets.items!; + const nameList = datasets.map((item) => { + return item.name; + }); + + return ( + <div className="display-dataset__tooltip"> + {hasMyDataset ? ( + <WhichDataset.DatasetDetail id={record.dataset_id} /> + ) : ( + <div style={{ marginTop: '3px' }}>{nameList[0]}</div> + )} + {length > 1 ? ( + <Tooltip + position="top" + trigger="hover" + color="#FFFFFF" + content={nameList.map((item, index) => { + if (!hasMyDataset && index === 0) return <></>; + return ( + <> + <Tag style={{ marginTop: '5px' }} key={index}> + {item} + </Tag> + <br /> + </> + ); + })} + > + <Tag>{`+${length - 1}`}</Tag> + </Tooltip> + ) : ( + <></> + )} + </div> + ); + } +}; + +export default TrustedJobDetail; diff --git a/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupDetail/ComputingJobTab/index.tsx b/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupDetail/ComputingJobTab/index.tsx new file mode 100644 index 000000000..f85ac1799 --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupDetail/ComputingJobTab/index.tsx @@ -0,0 +1,371 @@ +import { Button, Input, Message, Progress, Table, Tooltip } from '@arco-design/web-react'; +import NoResult from 'components/NoResult'; +import GridRow from 'components/_base/GridRow'; +import Modal from 'components/Modal'; +import dayjs from 'dayjs'; +import { useGetCurrentProjectId, useUrlState } from 'hooks'; +import React, { FC, useMemo, useState } from 'react'; +import { useTranslation } from 'react-i18next'; +import { useQuery } from 'react-query'; +import { useParams } from 'react-router'; +import { useToggle } from 'react-use'; +import { + exportTrustedJobResult, + fetchTrustedJobList, + stopTrustedJob, + updateTrustedJob, +} from 'services/trustedCenter'; +import CONSTANTS from 'shared/constants'; +import { formatTimestamp } from 'shared/date'; +import { + AuthStatus, + TicketAuthStatus, + TrustedJobGroup, + TrustedJobGroupTabType, + TrustedJobListItem, + TrustedJobStatus, +} from 'typings/trustedCenter'; +import { Edit } from 'components/IconPark'; +import CountTime from 'components/CountTime'; +import StateIndicator from 'components/StateIndicator'; +import { getTicketAuthStatus, getTrustedJobStatus } from 'shared/trustedCenter'; +import { to } from 'shared/helpers'; +import TrustedJobDetail from 'views/TrustedCenter/TrustedJobDetail'; +import { AuthStatusMap } from 'views/TrustedCenter/shared'; + +export type Props = { + trustedJobGroup: TrustedJobGroup; +}; + +const ComputingJobTab: FC<Props> = ({ trustedJobGroup }) => { + const { t } = useTranslation(); + const projectId = useGetCurrentProjectId(); + const params = useParams<{ id: string; tabType: TrustedJobGroupTabType }>(); + const [commentVisible, setCommentVisible] = useState(false); + const [comment, setComment] = useState(''); + const [trustedJobId, setTrustedJobId] = useState<ID>(); + const [selectedJobId, setSelectedJobId] = useState<ID>(); + const [drawerVisible, toggleDrawerVisible] = useToggle(false); + const [urlState, setUrlState] = useUrlState({ + page: 1, + pageSize: 10, + filter: '', + }); + + const listQuery = useQuery( + ['trustedJobListQuery', params], + () => { + return fetchTrustedJobList(projectId!, { trusted_job_group_id: params.id }); + }, + { + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const columns = useMemo( + () => [ + { + title: '名称', + dataIndex: 'name', + name: 'name', + ellipsis: true, + render: (name: string, record: TrustedJobListItem) => { + return ( + <GridRow left={-13}> + <Button type="text" size="mini" onClick={() => onCheck(record)}> + {name} + </Button> + </GridRow> + ); + }, + }, + { + title: '授权状态', + dataIndex: 'ticket_auth_status', + name: 'ticket_auth_status', + render: (_: any, record: TrustedJobListItem) => { + const data = getTicketAuthStatus(record); + return ( + <> + <Tooltip + position="tl" + content={ + record.ticket_auth_status === TicketAuthStatus.AUTH_PENDING + ? renderUnauthParticipantList(record) + : undefined + } + > + <div>{data.text}</div> + </Tooltip> + <Progress + percent={data.percent} + showText={false} + style={{ width: 100 }} + status={data.type} + /> + </> + ); + }, + }, + { + title: '任务状态', + dataIndex: 'status', + name: 'status', + render: (_: any, record: TrustedJobListItem) => { + return ( + <div className="indicator-with-tip"> + <StateIndicator {...getTrustedJobStatus(record)} /> + </div> + ); + }, + }, + { + title: '运行时长', + dataIndex: 'runtime', + name: 'runtime', + render: (_: any, record: TrustedJobListItem) => { + let isRunning = false; + let isStopped = true; + let runningTime = 0; + + const { status } = record; + const { PENDING, RUNNING, STOPPED, SUCCEEDED, FAILED } = TrustedJobStatus; + isRunning = [RUNNING, PENDING].includes(status); + isStopped = [STOPPED, SUCCEEDED, FAILED].includes(status); + + if (isRunning || isStopped) { + const { finished_at, started_at } = record; + runningTime = isStopped ? finished_at! - started_at! : dayjs().unix() - started_at!; + } + return <CountTime time={runningTime} isStatic={!isRunning} />; + }, + }, + { + title: '开始时间', + dataIndex: 'started_at', + name: 'started_at', + sorter(a: TrustedJobListItem, b: TrustedJobListItem) { + return a.started_at - b.started_at; + }, + render: (date: number) => <div>{formatTimestamp(date)}</div>, + }, + { + title: '结束时间', + dataIndex: 'finished_at', + name: 'finished_at', + sorter(a: TrustedJobListItem, b: TrustedJobListItem) { + return a.finished_at - b.finished_at; + }, + render: (date: number) => ( + <div>{date ? formatTimestamp(date) : CONSTANTS.EMPTY_PLACEHOLDER}</div> + ), + }, + { + title: '备注', + dataIndex: 'comment', + name: 'comment', + width: 180, + render: (_: any, record: TrustedJobListItem) => { + return ( + <GridRow> + {record.comment ? ( + <Tooltip position="tl" content={record.comment}> + <div className="col-description">{record.comment}</div> + </Tooltip> + ) : ( + <></> + )} + + <Button + type="text" + size="mini" + icon={<Edit />} + onClick={() => { + setTrustedJobId(record.id); + setComment(record.comment); + setCommentVisible(true); + }} + /> + </GridRow> + ); + }, + }, + { + title: t('trusted_center.col_trusted_job_operation'), + dataIndex: 'operation', + name: 'operation', + render: (_: any, record: TrustedJobListItem) => { + return ( + <GridRow left={-15}> + <Button + disabled={record.status !== TrustedJobStatus.RUNNING} + type="text" + size="mini" + onClick={() => { + Modal.terminate({ + title: `确认终止${record.name || ''}吗?`, + content: '终止后,该任务将无法重启,请谨慎操作', + onOk() { + stopTrustedJob(projectId!, record.id) + .then(() => { + Message.success('终止成功!'); + listQuery.refetch(); + }) + .catch((error) => { + Message.error(error.message); + }); + }, + }); + }} + > + {'终止'} + </Button> + <Button + type="text" + disabled={record.status !== TrustedJobStatus.SUCCEEDED} + size="mini" + onClick={() => { + Modal.confirm({ + title: `可信数据导出申请`, + content: '导出需要工作区合作伙伴共同审批,是否确认发起申请?', + onOk() { + exportTrustedJobResult(projectId!, record.id) + .then(() => { + Message.success('开始导出,请在数据中心-结果数据集查看导出结果'); + }) + .catch((error) => { + Message.error(error.message); + }); + }, + }); + }} + > + {'导出'} + </Button> + </GridRow> + ); + }, + }, + ], + // eslint-disable-next-line react-hooks/exhaustive-deps + [], + ); + + const listShow = useMemo(() => { + if (!listQuery.data?.data) { + return []; + } + const trustedJobList = listQuery.data.data || []; + return trustedJobList; + }, [listQuery.data]); + + const isEmpty = false; + + return ( + <div> + <div className="list-container"> + {isEmpty ? ( + <NoResult text="暂无可信计算任务" /> + ) : ( + <Table + rowKey="id" + className="custom-table custom-table-left-side-filter" + loading={listQuery.isFetching} + data={listShow} + scroll={{ x: '100%' }} + columns={columns} + pagination={{ + showTotal: true, + pageSizeChangeResetCurrent: true, + hideOnSinglePage: true, + total: listQuery.data?.page_meta?.total_items ?? undefined, + current: Number(urlState.page), + pageSize: Number(urlState.pageSize), + onChange: onPageChange, + }} + /> + )} + </div> + <Modal + title={t('trusted_center.title_edit_trusted_job', { name: trustedJobGroup?.name })} + id={trustedJobId} + visible={commentVisible} + onOk={() => onCommentModalConfirm()} + onCancel={() => { + setCommentVisible(false); + setComment(''); + }} + autoFocus={false} + focusLock={true} + > + <div className="modal-label">{t('trusted_center.label_trusted_job_comment')}</div> + <Input.TextArea + placeholder={t('trusted_center.placeholder_trusted_job_set_comment')} + autoSize={{ minRows: 3 }} + value={comment} + onChange={setComment} + /> + </Modal> + <TrustedJobDetail + visible={drawerVisible} + group={trustedJobGroup!} + toggleVisible={toggleDrawerVisible} + id={trustedJobId} + jobId={selectedJobId} + /> + </div> + ); + + function onPageChange(page: number, pageSize: number | undefined) { + setUrlState((prevState) => ({ + ...prevState, + page, + pageSize, + })); + } + + function onCheck(record: any) { + setTrustedJobId(record.id); + setSelectedJobId(record.job_id); + toggleDrawerVisible(true); + } + + async function onCommentModalConfirm() { + const [res, error] = await to( + updateTrustedJob(projectId!, trustedJobId!, { + comment: comment, + }), + ); + setCommentVisible(false); + setComment(''); + if (error) { + Message.error(error.message); + return; + } + if (res.data) { + const msg = '编辑成功'; + Message.success(msg); + listQuery.refetch(); + return; + } + } + + function renderUnauthParticipantList(record: any) { + return ( + <div> + {Object.keys(record.participants_info.participants_map).map((key) => { + return ( + <div>{`${key} ${ + AuthStatusMap[ + record.participants_info?.participants_map[key].auth_status as AuthStatus + ] + }`}</div> + ); + })} + </div> + ); + } +}; + +export default ComputingJobTab; diff --git a/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupDetail/ExportJobDetailDrawer/index.tsx b/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupDetail/ExportJobDetailDrawer/index.tsx new file mode 100644 index 000000000..c39c37618 --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupDetail/ExportJobDetailDrawer/index.tsx @@ -0,0 +1,295 @@ +import React, { FC, useMemo, useState } from 'react'; +import { Drawer, Table, Button, Tooltip, Progress } from '@arco-design/web-react'; +import { Link } from 'react-router-dom'; +import { useTranslation } from 'react-i18next'; +import PropertyList from 'components/PropertyList'; +import { AuthStatus, TicketAuthStatus, TrustedJob, TrustedJobStatus } from 'typings/trustedCenter'; +import { useQuery } from 'react-query'; +import { fetchTrustedJob } from 'services/trustedCenter'; +import { useGetCurrentProjectId } from 'hooks'; +import { Pod, PodState } from 'typings/job'; +import { fetchJobById } from 'services/workflow'; +import { formatTimestamp } from 'shared/date'; +import CONSTANTS from 'shared/constants'; +import CountTime from 'components/CountTime'; +import StateIndicator from 'components/StateIndicator'; +import { getPodState } from 'views/Workflows/shared'; +import dayjs from 'dayjs'; +import { getTicketAuthStatus, getTrustedJobStatus } from 'shared/trustedCenter'; +import { DatasetDetailSubTabs } from 'views/Datasets/DatasetDetail'; + +const AuthStatusMap: Record<AuthStatus, string> = { + [AuthStatus.AUTHORIZED]: '已授权', + [AuthStatus.PENDING]: '待授权', + [AuthStatus.WITHDRAW]: '拒绝授权', +}; + +export type ExportJobProps = { + visible: boolean; + id?: ID; + jobId?: ID; + toggleVisible: (val: any) => void; +}; + +const ExportJobDetailDrawer: FC<ExportJobProps> = ({ visible, toggleVisible, id, jobId }) => { + const { t } = useTranslation(); + const [trustedJobInfo, setTrustedJobInfo] = useState<TrustedJob>(); + const projectId = useGetCurrentProjectId(); + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const trustedJobQuery = useQuery( + ['fetchTrustedJob', id], + () => { + return fetchTrustedJob(projectId!, id!); + }, + { + retry: 1, + refetchOnWindowFocus: false, + enabled: visible && Boolean(id), + onSuccess(res) { + setTrustedJobInfo(res.data); + }, + }, + ); + const jobsQuery = useQuery( + ['fetchJobById', jobId], + () => fetchJobById(jobId).then((res) => res.data.pods), + { + enabled: visible && Boolean(jobId), + retry: 1, + refetchOnWindowFocus: false, + }, + ); + + const jobList = useMemo(() => { + if (!jobsQuery?.data) { + return []; + } + const jobs = jobsQuery.data || []; + return jobs; + }, [jobsQuery.data]); + + const displayedProps = useMemo( + () => [ + { + value: trustedJobInfo?.name, + label: '导出任务', + }, + { + value: trustedJobInfo?.started_at + ? formatTimestamp(trustedJobInfo?.started_at || 0) + : CONSTANTS.EMPTY_PLACEHOLDER, + label: '开始时间', + }, + { + value: trustedJobInfo?.finished_at + ? formatTimestamp(trustedJobInfo?.finished_at || 0) + : CONSTANTS.EMPTY_PLACEHOLDER, + label: '结束时间', + }, + { + value: trustedJobInfo?.status ? renderRuntime(trustedJobInfo) : CONSTANTS.EMPTY_PLACEHOLDER, + label: '运行时长', + }, + { + value: (() => { + const data = getTicketAuthStatus(trustedJobInfo!); + return ( + <> + <Tooltip + position="tl" + content={ + trustedJobInfo?.ticket_auth_status === TicketAuthStatus.AUTH_PENDING + ? renderUnauthParticipantList(trustedJobInfo) + : undefined + } + > + <div>{data.text}</div> + </Tooltip> + <Progress + percent={data.percent} + showText={false} + style={{ width: 100 }} + status={data.type} + /> + </> + ); + })(), + label: '审批状态', + }, + { + value: (() => { + return ( + <div className="indicator-with-tip"> + <StateIndicator {...getTrustedJobStatus(trustedJobInfo!)} /> + {trustedJobInfo?.status === TrustedJobStatus.SUCCEEDED && ( + <Link + to={`/datasets/processed/detail/${trustedJobInfo.export_dataset_id}/${DatasetDetailSubTabs.DatasetJobDetail}`} + > + 查看数据集 + </Link> + )} + </div> + ); + })(), + label: '任务状态', + }, + ], + // eslint-disable-next-line react-hooks/exhaustive-deps + [trustedJobInfo], + ); + + const columns = useMemo( + () => [ + { + title: t('trusted_center.col_instance_id'), + dataIndex: 'name', + name: 'name', + }, + { + dataIndex: 'state', + title: '状态', + filters: [ + PodState.SUCCEEDED, + PodState.RUNNING, + PodState.FAILED, + PodState.PENDING, + PodState.FAILED_AND_FREED, + PodState.SUCCEEDED_AND_FREED, + PodState.UNKNOWN, + ].map((state) => { + const { text } = getPodState({ state } as Pod); + return { + text, + value: state, + }; + }), + onFilter: (state: PodState, record: Pod) => { + return record?.state === state; + }, + render(state: any, record: Pod) { + return <StateIndicator {...getPodState(record)} />; + }, + }, + { + title: t('trusted_center.col_instance_start_at'), + dataIndex: 'created_at', + name: 'created_at', + render(_: any, record: any) { + return record.creation_timestamp + ? formatTimestamp(record.creation_timestamp) + : CONSTANTS.EMPTY_PLACEHOLDER; + }, + }, + { + title: t('trusted_center.col_trusted_job_operation'), + dataIndex: 'operation', + name: 'operation', + render: (_: any, record: any) => { + return ( + <> + <Button + type="text" + onClick={() => { + onLogClick(record); + }} + > + {t('trusted_center.btn_inspect_logs')} + </Button> + </> + ); + }, + }, + ], + // eslint-disable-next-line react-hooks/exhaustive-deps + [jobId], + ); + + return ( + <Drawer + width={807} + title={ + <span>{t('trusted_center.title_trusted_job_detail', { name: trustedJobInfo?.name })}</span> + } + visible={visible} + onOk={() => { + toggleVisible(false); + }} + onCancel={() => { + toggleVisible(false); + }} + > + {renderBasicInfo()} + {renderInstanceInfo()} + </Drawer> + ); + + function renderBasicInfo() { + return ( + <> + <h3>{t('trusted_center.title_base_info')}</h3> + <PropertyList cols={6} colProportions={[1.5, 1, 1]} properties={displayedProps} /> + </> + ); + } + + function renderInstanceInfo() { + return ( + <> + <h3>{t('trusted_center.title_instance_info')}</h3> + <Table + loading={trustedJobQuery.isFetching} + size="small" + rowKey="name" + scroll={{ x: '100%' }} + columns={columns} + data={jobId ? jobList : []} + pagination={{ + showTotal: true, + pageSizeChangeResetCurrent: true, + hideOnSinglePage: true, + }} + /> + </> + ); + } + + function renderRuntime(trustedJobInfo: TrustedJob) { + let isRunning = false; + let isStopped = true; + let runningTime = 0; + + const { status } = trustedJobInfo; + const { PENDING, RUNNING, STOPPED, SUCCEEDED, FAILED } = TrustedJobStatus; + isRunning = [RUNNING, PENDING].includes(status); + isStopped = [STOPPED, SUCCEEDED, FAILED].includes(status); + + if (isRunning || isStopped) { + const { finished_at, started_at } = trustedJobInfo; + runningTime = isStopped ? finished_at! - started_at! : dayjs().unix() - started_at!; + } + return <CountTime time={runningTime} isStatic={!isRunning} />; + } + + function onLogClick(pod: Pod) { + const startTime = 0; + window.open(`/v2/logs/pod/${jobId}/${pod.name}/${startTime}`, '_blank noopener'); + } + + function renderUnauthParticipantList(record: any) { + return ( + <div> + {Object.keys(record.participants_info.participants_map).map((key) => { + return ( + <div>{`${key} ${ + AuthStatusMap[ + record.participants_info?.participants_map[key].auth_status as AuthStatus + ] + }`}</div> + ); + })} + </div> + ); + } +}; + +export default ExportJobDetailDrawer; diff --git a/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupDetail/ExportJobTab/index.tsx b/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupDetail/ExportJobTab/index.tsx new file mode 100644 index 000000000..c6aa45979 --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupDetail/ExportJobTab/index.tsx @@ -0,0 +1,268 @@ +import { Button, Message, Progress, Table, Tooltip } from '@arco-design/web-react'; +import NoResult from 'components/NoResult'; +import GridRow from 'components/_base/GridRow'; +import Modal from 'components/Modal'; +import dayjs from 'dayjs'; +import { useGetCurrentProjectId, useUrlState } from 'hooks'; +import React, { FC, useMemo, useState } from 'react'; +import { useQuery } from 'react-query'; +import { useParams } from 'react-router'; +import { useToggle } from 'react-use'; +import { fetchTrustedJobList, stopTrustedJob } from 'services/trustedCenter'; +import CONSTANTS from 'shared/constants'; +import { formatTimestamp } from 'shared/date'; +import { + AuthStatus, + TicketAuthStatus, + TrustedJobGroupTabType, + TrustedJobListItem, + TrustedJobParamType, + TrustedJobStatus, +} from 'typings/trustedCenter'; +import CountTime from 'components/CountTime'; +import StateIndicator from 'components/StateIndicator'; +import { getTicketAuthStatus, getTrustedJobStatus } from 'shared/trustedCenter'; +import ExportJobDetailDrawer from '../ExportJobDetailDrawer'; +import { AuthStatusMap } from 'views/TrustedCenter/shared'; + +const ExportJobTab: FC = () => { + const projectId = useGetCurrentProjectId(); + const params = useParams<{ id: string; tabType: TrustedJobGroupTabType }>(); + const [trustedJobId, setTrustedJobId] = useState<ID>(); + const [selectedJobId, setSelectedJobId] = useState<ID>(); + const [drawerVisible, toggleDrawerVisible] = useToggle(false); + const [urlState, setUrlState] = useUrlState({ + page: 1, + pageSize: 10, + filter: '', + }); + + const listQuery = useQuery( + ['trustedJobListQuery', params], + () => { + return fetchTrustedJobList(projectId!, { + trusted_job_group_id: params.id, + type: TrustedJobParamType.EXPORT, + }); + }, + { + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const columns = useMemo( + () => [ + { + title: '名称', + dataIndex: 'name', + name: 'name', + ellipsis: true, + render: (name: string, record: TrustedJobListItem) => { + return ( + <GridRow left={-13}> + <Button type="text" size="mini" onClick={() => onCheck(record)}> + {name} + </Button> + </GridRow> + ); + }, + }, + { + title: '授权状态', + dataIndex: 'ticket_auth_status', + name: 'ticket_auth_status', + render: (_: any, record: TrustedJobListItem) => { + const data = getTicketAuthStatus(record); + return ( + <> + <Tooltip + position="tl" + content={ + record.ticket_auth_status === TicketAuthStatus.AUTH_PENDING + ? renderUnauthParticipantList(record) + : undefined + } + > + <div>{data.text}</div> + </Tooltip> + <Progress + percent={data.percent} + showText={false} + style={{ width: 100 }} + status={data.type} + /> + </> + ); + }, + }, + { + title: '任务状态', + dataIndex: 'status', + name: 'status', + render: (_: any, record: TrustedJobListItem) => { + return ( + <div className="indicator-with-tip"> + <StateIndicator {...getTrustedJobStatus(record)} /> + </div> + ); + }, + }, + { + title: '运行时长', + dataIndex: 'runtime', + name: 'runtime', + render: (_: any, record: TrustedJobListItem) => { + let isRunning = false; + let isStopped = true; + let runningTime = 0; + + const { status } = record; + const { PENDING, RUNNING, STOPPED, SUCCEEDED, FAILED } = TrustedJobStatus; + isRunning = [RUNNING, PENDING].includes(status); + isStopped = [STOPPED, SUCCEEDED, FAILED].includes(status); + + if (isRunning || isStopped) { + const { finished_at, started_at } = record; + runningTime = isStopped ? finished_at! - started_at! : dayjs().unix() - started_at!; + } + return <CountTime time={runningTime} isStatic={!isRunning} />; + }, + }, + { + title: '开始时间', + dataIndex: 'started_at', + name: 'started_at', + sorter(a: TrustedJobListItem, b: TrustedJobListItem) { + return a.started_at - b.started_at; + }, + render: (date: number) => ( + <div>{date ? formatTimestamp(date) : CONSTANTS.EMPTY_PLACEHOLDER}</div> + ), + }, + { + title: '结束时间', + dataIndex: 'finished_at', + name: 'finished_at', + sorter(a: TrustedJobListItem, b: TrustedJobListItem) { + return a.finished_at - b.finished_at; + }, + render: (date: number) => ( + <div>{date ? formatTimestamp(date) : CONSTANTS.EMPTY_PLACEHOLDER}</div> + ), + }, + { + title: '操作', + dataIndex: 'operation', + name: 'operation', + render: (_: any, record: TrustedJobListItem) => { + return ( + <GridRow left={-15}> + <Button + disabled={record.status !== TrustedJobStatus.RUNNING} + type="text" + size="mini" + onClick={() => { + Modal.terminate({ + title: `确认终止${record.name || ''}吗?`, + content: '终止后,该任务将无法重启,请谨慎操作', + onOk() { + stopTrustedJob(projectId!, record.id) + .then(() => { + Message.success('终止成功!'); + listQuery.refetch(); + }) + .catch((error) => { + Message.error(error.message); + }); + }, + }); + }} + > + {'终止'} + </Button> + </GridRow> + ); + }, + }, + ], + // eslint-disable-next-line react-hooks/exhaustive-deps + [], + ); + + const listShow = useMemo(() => { + if (!listQuery.data?.data) { + return []; + } + const trustedJobList = listQuery.data.data || []; + return trustedJobList; + }, [listQuery.data]); + + const isEmpty = false; + + return ( + <div> + <div className="list-container"> + {isEmpty ? ( + <NoResult text="暂无导出任务" /> + ) : ( + <Table + rowKey="id" + className="custom-table custom-table-left-side-filter" + loading={listQuery.isFetching} + data={listShow} + scroll={{ x: '100%' }} + columns={columns} + pagination={{ + showTotal: true, + pageSizeChangeResetCurrent: true, + hideOnSinglePage: true, + total: listQuery.data?.page_meta?.total_items ?? undefined, + current: Number(urlState.page), + pageSize: Number(urlState.pageSize), + onChange: onPageChange, + }} + /> + )} + </div> + <ExportJobDetailDrawer + visible={drawerVisible} + toggleVisible={toggleDrawerVisible} + id={trustedJobId} + jobId={selectedJobId} + /> + </div> + ); + + function renderUnauthParticipantList(record: any) { + return ( + <div> + {Object.keys(record.participants_info.participants_map).map((key) => { + return ( + <div>{`${key} ${ + AuthStatusMap[ + record.participants_info?.participants_map[key].auth_status as AuthStatus + ] + }`}</div> + ); + })} + </div> + ); + } + + function onPageChange(page: number, pageSize: number | undefined) { + setUrlState((prevState) => ({ + ...prevState, + page, + pageSize, + })); + } + + function onCheck(record: any) { + setTrustedJobId(record.id); + setSelectedJobId(record.job_id); + toggleDrawerVisible(true); + } +}; + +export default ExportJobTab; diff --git a/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupDetail/index.module.less b/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupDetail/index.module.less new file mode 100644 index 000000000..9213c366b --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupDetail/index.module.less @@ -0,0 +1,62 @@ +.padding_container { + padding: 20px 20px 0; + .header_name { + display: flex; + align-items: center; + height: 24px; + font-size: 16px; + font-weight: 600; + .header_name__tag { + height: 24px; + font-size: 12px; + margin-left: 8px; + border-radius: 40px; + } + } + .header_comment { + font-size: 12px; + color: var(--textColorSecondary); + } + .header_col { + margin-top: 9px; + text-align: right; + } + .display_dataset__tooltip { + margin-top: -4px; + display: flex; + align-items: center; + } + .content { + position: relative; + .list_container { + display: flex; + flex: 1; + width: 100%; + .indicator_with_tip { + display: flex; + flex-direction: row; + align-items: center; + } + } + } +} + +.col_description { + width: 120px; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; +} + +.modal_label { + text-align: left; + margin-bottom: 10px; +} + +.data_detail_tab_pane { + display: grid; +} + +.data_detail_tab { + margin-bottom: 0 !important; +} diff --git a/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupDetail/index.tsx b/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupDetail/index.tsx new file mode 100644 index 000000000..72ca57cee --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupDetail/index.tsx @@ -0,0 +1,351 @@ +import React, { FC, useMemo, useState } from 'react'; +import { useHistory, useParams } from 'react-router'; +import { useQuery } from 'react-query'; +import { + Button, + Grid, + Input, + Message, + Progress, + Space, + Tabs, + Tag, + Tooltip, +} from '@arco-design/web-react'; +import { useGetCurrentProjectId } from 'hooks'; +import { useTranslation } from 'react-i18next'; + +import Modal from 'components/Modal'; +import BackButton from 'components/BackButton'; +import SharedPageLayout from 'components/SharedPageLayout'; +import WhichParticipant from 'components/WhichParticipant'; +import MoreActions from 'components/MoreActions'; +import PropertyList from 'components/PropertyList'; +import WhichDataset from 'components/WhichDataset'; + +import atomIcon from 'assets/icons/atom-icon-algorithm-management.svg'; +import { Avatar } from '../shared'; +import routeMaps from '../routes'; +import { formatTimestamp } from 'shared/date'; +import CONSTANTS from 'shared/constants'; +import styled from './index.module.less'; + +import { + deleteTrustedJobGroup, + fetchTrustedJobGroupById, + launchTrustedJobGroup, +} from 'services/trustedCenter'; +import { + AuthStatus, + TrustedJobGroup, + TrustedJobGroupStatus, + TrustedJobGroupTabType, +} from 'typings/trustedCenter'; +import ComputingJobTab from './ComputingJobTab'; +import ExportJobTab from './ExportJobTab'; +import { to } from 'shared/helpers'; +import { getTicketAuthStatus } from 'shared/trustedCenter'; + +const Row = Grid.Row; +const Col = Grid.Col; + +export enum CommentModalType { + INITIATE = 'initiate', + EDIT = 'edit', +} + +const TrustedJobGroupDetail: FC<{ isEdit?: boolean }> = ({ isEdit }) => { + const history = useHistory(); + const { t } = useTranslation(); + const projectId = useGetCurrentProjectId(); + const params = useParams<{ id: string; tabType: TrustedJobGroupTabType }>(); + const [trustedJobGroup, setTrustedJobGroup] = useState<TrustedJobGroup>(); + const [commentVisible, setCommentVisible] = useState(false); + const [comment, setComment] = useState(''); + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const trustedJobGroupQuery = useQuery( + ['fetchTrustedJobGroupById', params.id], + () => { + return fetchTrustedJobGroupById(projectId!, params.id); + }, + { + retry: 1, + refetchOnWindowFocus: false, + onSuccess(res) { + setTrustedJobGroup(res.data); + }, + }, + ); + + const displayedProps = useMemo( + () => [ + { + value: + trustedJobGroup?.coordinator_id === 0 ? ( + t('trusted_center.label_coordinator_self') + ) : ( + <WhichParticipant id={trustedJobGroup?.coordinator_id} /> + ), + label: t('trusted_center.col_trusted_job_coordinator'), + }, + { + value: ( + <div> + <div>{getTicketAuthStatus(trustedJobGroup!).text}</div> + <Progress + percent={getTicketAuthStatus(trustedJobGroup!).percent} + showText={false} + style={{ width: 100 }} + status={getTicketAuthStatus(trustedJobGroup!).type} + /> + </div> + ), + label: t('trusted_center.col_trusted_job_status'), + }, + { + value: renderDatasetTooltip(trustedJobGroup!), + label: t('trusted_center.col_trusted_job_dataset'), + }, + { + value: trustedJobGroup?.creator_username || CONSTANTS.EMPTY_PLACEHOLDER, + label: t('trusted_center.col_trusted_job_creator'), + }, + { + value: formatTimestamp(trustedJobGroup?.updated_at || 0), + label: t('trusted_center.col_trusted_job_update_at'), + }, + { + value: formatTimestamp(trustedJobGroup?.created_at || 0), + label: t('trusted_center.col_trusted_job_create_at'), + }, + ], + // eslint-disable-next-line react-hooks/exhaustive-deps + [trustedJobGroup], + ); + + return ( + <SharedPageLayout + title={ + <BackButton onClick={goBackToListPage}> + {t('trusted_center.label_trusted_center')} + </BackButton> + } + cardPadding={0} + > + <div className={styled.padding_container}> + <Row> + <Col span={12}> + <Space size="medium"> + <Avatar data-name={CONSTANTS.EMPTY_PLACEHOLDER} bgSrc={atomIcon} /> + <div> + <div className={styled.header_name}> + <div>{trustedJobGroup?.name ?? '....'}</div> + <Tag className={styled.header_name__tag}>可信计算</Tag> + <Tag className={styled.header_name__tag}> {`ID ${params.id}`}</Tag> + </div> + <Space className={styled.header_comment}> + {trustedJobGroup?.comment ?? CONSTANTS.EMPTY_PLACEHOLDER} + </Space> + </div> + </Space> + </Col> + <Col span={12} className={styled.header_col}> + <Space> + <Button + type="primary" + disabled={ + !trustedJobGroup?.name || + trustedJobGroup.status !== TrustedJobGroupStatus.SUCCEEDED || + trustedJobGroup.auth_status !== AuthStatus.AUTHORIZED || + trustedJobGroup.unauth_participant_ids?.length !== 0 + } + onClick={() => { + setCommentVisible(true); + }} + > + {t('trusted_center.btn_post_task')} + </Button> + <Button + disabled={ + !trustedJobGroup?.name || + trustedJobGroup.status !== TrustedJobGroupStatus.SUCCEEDED + } + onClick={() => + trustedJobGroup?.coordinator_id + ? history.push(`/trusted-center/edit/${params.id}/receiver`) + : history.push(`/trusted-center/edit/${params.id}/sender`) + } + > + {t('edit')} + </Button> + <MoreActions + actionList={[ + { + label: t('delete'), + danger: true, + disabled: !trustedJobGroup?.name || Boolean(trustedJobGroup.coordinator_id), + onClick: () => { + Modal.confirm({ + title: `确认删除${trustedJobGroup?.name || ''}吗?`, + content: '删除后,该可信计算将无法进行操作,请谨慎删除', + onOk() { + deleteTrustedJobGroup(projectId!, params.id) + .then(() => { + Message.success(t('trusted_center.msg_delete_success')); + }) + .catch((error) => { + Message.error(error.message); + }); + }, + }); + }, + }, + ]} + /> + </Space> + </Col> + </Row> + <PropertyList + cols={6} + colProportions={[1, 1, 1, 1, 1.5, 1.5]} + properties={displayedProps} + /> + </div> + <Tabs + defaultActiveTab={params.tabType} + onChange={(tab) => history.push(getTabPath(tab))} + style={{ marginBottom: 0 }} + className={styled.data_detail_tab} + > + <Tabs.TabPane + title="计算任务" + key={TrustedJobGroupTabType.COMPUTING} + className={styled.data_detail_tab_pane} + /> + <Tabs.TabPane + title="导出任务" + key={TrustedJobGroupTabType.EXPORT} + className={styled.data_detail_tab_pane} + /> + </Tabs> + <div style={{ padding: '20px 20px 0' }}> + {params.tabType === TrustedJobGroupTabType.COMPUTING && ( + <ComputingJobTab trustedJobGroup={trustedJobGroup!} /> + )} + {params.tabType === TrustedJobGroupTabType.EXPORT && <ExportJobTab />} + </div> + <Modal + title={t('trusted_center.title_initiate_trusted_job', { name: trustedJobGroup?.name })} + visible={commentVisible} + onOk={() => onCommentModalConfirm()} + onCancel={() => { + setCommentVisible(false); + setComment(''); + }} + autoFocus={false} + focusLock={true} + > + <div className={styled.modal_label}>{t('trusted_center.label_trusted_job_comment')}</div> + <Input.TextArea + placeholder={t('trusted_center.placeholder_trusted_job_set_comment')} + autoSize={{ minRows: 3 }} + value={comment} + onChange={setComment} + /> + </Modal> + </SharedPageLayout> + ); + + function getTabPath(tabType: string) { + let path = `/trusted-center/detail/${params.id}/computing`; + switch (tabType) { + case TrustedJobGroupTabType.COMPUTING: + path = `/trusted-center/detail/${params.id}/${TrustedJobGroupTabType.COMPUTING}`; + break; + case TrustedJobGroupTabType.EXPORT: + path = `/trusted-center/detail/${params.id}/${TrustedJobGroupTabType.EXPORT}`; + break; + default: + break; + } + return path; + } + + function goBackToListPage() { + history.push(routeMaps.TrustedJobGroupList); + } + + function renderDatasetTooltip(record: TrustedJobGroup) { + if (!record) { + return CONSTANTS.EMPTY_PLACEHOLDER; + } + // without participant datasets + if (record.participant_datasets.items?.length === 0) { + return <WhichDataset.DatasetDetail id={record.dataset_id} />; + } + + const hasMyDataset = record.dataset_id !== 0; + let length = record.participant_datasets.items?.length || 0; + if (hasMyDataset) { + length += 1; + } + const datasets = record.participant_datasets.items!; + const nameList = datasets.map((item) => { + return item.name; + }); + + return ( + <div className={styled.display_dataset__tooltip}> + {hasMyDataset ? ( + <WhichDataset.DatasetDetail id={record.dataset_id} /> + ) : ( + <div style={{ marginTop: '3px' }}>{nameList[0]}</div> + )} + {length > 1 ? ( + <Tooltip + position="top" + trigger="hover" + color="#FFFFFF" + content={nameList.map((item, index) => { + if (!hasMyDataset && index === 0) return <></>; + return ( + <> + <Tag style={{ marginTop: '5px' }} key={index}> + {item} + </Tag> + <br /> + </> + ); + })} + > + <Tag>{`+${length - 1}`}</Tag> + </Tooltip> + ) : ( + <></> + )} + </div> + ); + } + + async function onCommentModalConfirm() { + const [res, error] = await to( + launchTrustedJobGroup(projectId!, params.id, { + comment: comment, + }), + ); + setCommentVisible(false); + setComment(''); + if (error) { + Message.error(error.message); + return; + } + if (res.data) { + const msg = '发布成功'; + Message.success(msg); + return; + } + } +}; + +export default TrustedJobGroupDetail; diff --git a/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupForm/index.less b/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupForm/index.less new file mode 100644 index 000000000..0b9390f85 --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupForm/index.less @@ -0,0 +1,21 @@ +.group-form-container { + .card { + .arco-card-body { + padding: 32px 40px; + .form { + max-width: 600px; + margin: 0 auto; + .form-section { + margin-bottom: 20px; + overflow: hidden; // bfc + > h3 { + margin-bottom: 20px; + font-weight: 500; + font-size: 14px; + color: #1d252f; + } + } + } + } + } +} diff --git a/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupForm/index.tsx b/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupForm/index.tsx new file mode 100644 index 000000000..763658118 --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupForm/index.tsx @@ -0,0 +1,449 @@ +import React, { FC, useState } from 'react'; +import { useHistory, useParams } from 'react-router'; +import { Avatar, Button, Card, Form, Input, Message, Space, Spin } from '@arco-design/web-react'; +import { IconInfoCircle } from '@arco-design/web-react/icon'; +import { useToggle } from 'react-use'; +import { useQuery } from 'react-query'; +import { LabelStrong } from 'styles/elements'; +import { getResourceConfigInitialValues, defaultTrustedJobGroup } from '../shared'; +import { to } from 'shared/helpers'; + +import BackButton from 'components/BackButton'; +import DatasetSelect from 'components/DatasetSelect'; +import SharedPageLayout from 'components/SharedPageLayout'; +import ButtonWithModalConfirm from 'components/ButtonWithModalConfirm'; +import ResourceConfig, { Value as ResourceConfigValue } from 'components/ResourceConfig'; +import TitleWithIcon from 'components/TitleWithIcon'; +import { useTranslation } from 'react-i18next'; +import routeMaps from '../routes'; +import './index.less'; + +import { + useGetCurrentProjectId, + useGetCurrentProjectParticipantList, + useGetCurrentProjectParticipantName, + useIsFormValueChange, +} from 'hooks'; +import { + ResourceTemplateType, + TrustedJobGroupPayload, + ParticipantDataset, + AuthStatus, + TrustedJobGroup, + TrustedJobGroupStatus, +} from 'typings/trustedCenter'; +import { EnumAlgorithmProjectType } from 'typings/algorithm'; +import { Dataset, DatasetDataType, DatasetKindBackEndType } from 'typings/dataset'; +import { + createTrustedJobGroup, + fetchTrustedJobGroupById, + launchTrustedJobGroup, + updateTrustedJobGroup, +} from 'services/trustedCenter'; +import AlgorithmSelect, { AlgorithmSelectValue } from 'components/AlgorithmSelect'; + +type FormData = TrustedJobGroupPayload & { + resource_config: ResourceConfigValue; + algorithm_type: EnumAlgorithmProjectType; + algorithm_info?: AlgorithmSelectValue; + self_dataset_info: Dataset; + participant: any; +}; + +const TrustedJobGroupForm: FC<{ isEdit?: boolean }> = ({ isEdit }) => { + const [formInstance] = Form.useForm<FormData>(); + const history = useHistory(); + const { t } = useTranslation(); + const projectId = useGetCurrentProjectId(); + const participantList = useGetCurrentProjectParticipantList(); + const participantName = useGetCurrentProjectParticipantName(); + const params = useParams<{ id: string; role: 'sender' | 'receiver' }>(); + + const { isFormValueChanged, onFormValueChange } = useIsFormValueChange(onFormChange); + const [trustedJobGroup, setTrustedJobGroup] = useState<TrustedJobGroup>(defaultTrustedJobGroup); + const [formData, setFormData] = useState<Partial<FormData>>(); + const [algorithmOwner, setAlgorithmOwner] = useState<string>(''); + const [isLaunch, toggleIsLaunch] = useToggle(false); + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const trustedJobGroupQuery = useQuery( + ['fetchTrustedJobGroupById', params.id], + () => { + return fetchTrustedJobGroupById(projectId!, params.id); + }, + { + enabled: Boolean(isEdit), + retry: 1, + refetchOnWindowFocus: false, + onSuccess(res) { + const data = res.data; + setTrustedJobGroup(data); + setAlgorithmOwner(data.algorithm_participant_id === 0 ? 'self' : 'peer'); + const participant_datasets: any[] = []; + data.participant_datasets.items.forEach((item) => { + participant_datasets[item.participant_id as number] = { + dataset_info: { + participant_id: item.participant_id, + uuid: item.uuid, + name: item.name, + }, + }; + }); + formInstance.setFieldsValue({ + name: data.name, + comment: data.comment, + algorithm_id: data.algorithm_id, + algorithm_info: { + //可信计算暂时不需要配置算法超参数 + algorithmId: data.algorithm_id, + algorithmProjectUuid: data.algorithm_project_uuid, + algorithmUuid: data.algorithm_uuid, + participantId: data.algorithm_participant_id, + }, + + self_dataset_info: { + id: data.dataset_id, + }, + participant: participant_datasets, + resource_config: data?.resource + ? getResourceConfigInitialValues(data.resource!) + : undefined, + }); + }, + }, + ); + + const isReceiver = params.role === 'receiver'; + const isPeerUnauthorized = isReceiver && !trustedJobGroup?.resource; + const algorithmType = EnumAlgorithmProjectType.TRUSTED_COMPUTING; + + return ( + <SharedPageLayout + title={ + <BackButton onClick={goBackToListPage}> + {t('trusted_center.label_trusted_center')} + </BackButton> + } + contentWrapByCard={false} + centerTitle={isEdit ? (isPeerUnauthorized ? '授权可信计算' : '编辑可信计算') : '创建可信计算'} + > + <Spin loading={trustedJobGroupQuery.isLoading}> + <div className="group-form-container"> + {isPeerUnauthorized ? renderReceiverLayout() : renderSenderLayout()} + </div> + </Spin> + </SharedPageLayout> + ); + + function renderReceiverLayout() { + return ( + <> + {isEdit && renderBannerCard()} + {renderContentCard()} + </> + ); + } + function renderSenderLayout() { + return <>{renderContentCard()}</>; + } + function renderBannerCard() { + const title = t('trusted_center.title_authorization_request', { + peerName: participantName, + name: trustedJobGroupQuery.data?.data?.name ?? '', + }); + return ( + <Card className="card" bordered={false} style={{ marginBottom: 20 }}> + <Space size="medium"> + <Avatar /> + <> + <LabelStrong fontSize={16}>{title ?? '....'}</LabelStrong> + <TitleWithIcon + title={t('trusted_center.tip_agree_authorization')} + isLeftIcon={true} + isShowIcon={true} + icon={IconInfoCircle} + /> + </> + </Space> + </Card> + ); + } + function renderContentCard() { + return ( + <Card className="card" bordered={false}> + <Form + className="form" + form={formInstance} + initialValues={formData} + onSubmit={onSubmit} + onValuesChange={onFormValueChange} + scrollToFirstError + > + {renderBaseInfoConfig()} + {renderComputingConfig()} + {renderResourceConfig()} + {renderFooterButton()} + </Form> + </Card> + ); + } + + function renderBaseInfoConfig() { + return ( + <section className="form-section"> + <h3>{t('trusted_center.title_base_info')}</h3> + <Form.Item + field="name" + label={t('trusted_center.label_computing_name')} + rules={[{ required: true, message: t('trusted_center.msg_required') }]} + disabled={isEdit} + > + <Input placeholder={t('trusted_center.placeholder_input')} /> + </Form.Item> + <Form.Item field="comment" label={t('trusted_center.label_description')}> + <Input.TextArea placeholder={t('trusted_center.placeholder_input_comment')} /> + </Form.Item> + </section> + ); + } + + function renderComputingConfig() { + return ( + <section className="form-section"> + <h3>{t('trusted_center.title_computing_config')}</h3> + <Form.Item + field="algorithm_info" + label={t('trusted_center.label_algorithm_select')} + rules={[{ required: true, message: t('trusted_center.msg_required') }]} + > + <AlgorithmSelect + algorithmType={[algorithmType]} + algorithmOwnerType={algorithmOwner} + onAlgorithmOwnerChange={(value: any) => setAlgorithmOwner(value)} + leftDisabled={isEdit || isReceiver} + rightDisabled={isReceiver} + showHyperParameters={false} + filterReleasedAlgo={true} + /> + </Form.Item> + <Form.Item + field="self_dataset_info" + label={t('trusted_center.label_our_dataset')} + disabled={isEdit} + > + <DatasetSelect + lazyLoad={{ + enable: true, + page_size: 10, + }} + isParticipant={false} + isCreateVisible={!isPeerUnauthorized} + filterOptions={{ + dataset_format: [DatasetDataType.NONE_STRUCTURED], + dataset_kind: [DatasetKindBackEndType.RAW], + }} + placeholder={t('trusted_center.placeholder_select')} + /> + </Form.Item> + {participantList.map((item, index) => { + return ( + <Form.Item + field={`participant.${item.id}.dataset_info`} + label={item.name} + disabled={isEdit} + > + <DatasetSelect + queryParams={{ + //TODO Temporarily obtain full data and will be removed soon + page_size: 0, + }} + isParticipant={true} + isCreateVisible={!isPeerUnauthorized} + filterOptions={{ + dataset_format: [DatasetDataType.NONE_STRUCTURED], + dataset_kind: [DatasetKindBackEndType.RAW], + participant_id: item.id, + }} + placeholder={t('trusted_center.placeholder_select_dataset')} + /> + </Form.Item> + ); + })} + </section> + ); + } + + function renderResourceConfig() { + return ( + <section className="form-section"> + <h3>{t('trusted_center.title_resource_config')}</h3> + <Form.Item + field="resource_config" + label={t('model_center.label_resource_template')} + rules={[{ required: true, message: t('model_center.msg_required') }]} + > + <ResourceConfig + isTrustedCenter={true} + defaultResourceType={ResourceTemplateType.CUSTOM} + isIgnoreFirstRender={isReceiver} + /> + </Form.Item> + </section> + ); + } + + function renderFooterButton() { + let submitText = '提交并申请'; + if (isPeerUnauthorized) { + submitText = '确认授权'; + } else if (isEdit) { + submitText = '保存并执行'; + } + + return ( + <> + {!isReceiver && !isEdit && ( + <TitleWithIcon + title={t('trusted_center.msg_trusted_computing_create')} + isLeftIcon={true} + isShowIcon={true} + icon={IconInfoCircle} + /> + )} + <Space> + {isPeerUnauthorized || !isEdit ? ( + <></> + ) : ( + <Button type="primary" onClick={() => formInstance.submit()}> + 保存 + </Button> + )} + <Button + type="primary" + disabled={ + isEdit && + !isPeerUnauthorized && + (trustedJobGroup?.status !== TrustedJobGroupStatus.SUCCEEDED || + trustedJobGroup?.auth_status !== AuthStatus.AUTHORIZED || + trustedJobGroup?.unauth_participant_ids?.length !== 0) + } + onClick={() => { + if (isEdit && !isPeerUnauthorized) { + toggleIsLaunch(); + } + formInstance.submit(); + }} + > + {submitText} + </Button> + <ButtonWithModalConfirm + isShowConfirmModal={isFormValueChanged} + onClick={goBackToListPage} + > + {t('cancel')} + </ButtonWithModalConfirm> + </Space> + </> + ); + } + + async function onSubmit() { + if (!projectId) { + return Message.error(t('select_project_notice')); + } + // validate params + const self_dataset_info = formInstance.getFieldValue('self_dataset_info'); + const participant_datasets: ParticipantDataset[] = []; + const participantParams: any = formInstance.getFieldValue('participant'); + // self and participants dataset empty + if (!self_dataset_info && !participantParams) { + Message.warning('我方数据集及合作伙伴数据集不能全为空!'); + return; + } + participantList.forEach((item) => { + const dataset_info = participantParams?.[item.id]?.dataset_info || {}; + if (Object.keys(dataset_info).length === 0) { + return; + } + participant_datasets.push({ + participant_id: item.id, + name: dataset_info.name, + uuid: dataset_info.uuid, + }); + }); + const resource = formInstance.getFieldValue('resource_config') as ResourceConfigValue; + + if (isEdit) { + // edit + const payload = { + comment: formInstance.getFieldValue('comment'), + algorithm_uuid: isReceiver + ? undefined + : (formInstance.getFieldValue('algorithm_info') as AlgorithmSelectValue).algorithmUuid, + resource: { + cpu: parseInt(resource.worker_cpu?.replace('m', '') || ''), + memory: parseInt(resource.worker_mem?.replace('Gi', '') || ''), + replicas: parseInt(resource.worker_replicas || ''), + }, + auth_status: isReceiver ? AuthStatus.AUTHORIZED : undefined, + } as TrustedJobGroupPayload; + + const [res, error] = await to(updateTrustedJobGroup(projectId, params.id, payload)); + if (error) { + Message.error(error.message); + return; + } + if (res.data) { + if (isPeerUnauthorized) { + Message.success('授权成功'); + } else { + Message.success('编辑成功'); + } + // launch trusted computing group + if (isLaunch) { + launchTrustedJobGroup(projectId, params.id, { comment: '' }).catch((error) => { + Message.error(error.message); + }); + } + history.push('/trusted-center/list'); + return; + } + } else { + // create + const payload = { + name: formInstance.getFieldValue('name'), + comment: formInstance.getFieldValue('comment'), + algorithm_uuid: (formInstance.getFieldValue('algorithm_info') as AlgorithmSelectValue) + .algorithmUuid, + dataset_id: self_dataset_info ? self_dataset_info.id : undefined, + participant_datasets: participant_datasets, + resource: { + cpu: parseInt(resource.worker_cpu?.replace('m', '') || ''), + memory: parseInt(resource.worker_mem?.replace('Gi', '') || ''), + replicas: parseInt(resource.worker_replicas || ''), + }, + } as TrustedJobGroupPayload; + + const [res, error] = await to(createTrustedJobGroup(projectId, payload)); + if (error) { + Message.error(error.message); + return; + } + if (res.data) { + Message.success(t('trusted_center.msg_create_success')); + history.push('/trusted-center/list'); + return; + } + } + } + + function goBackToListPage() { + history.push(routeMaps.TrustedJobGroupList); + } + + function onFormChange(_: Partial<TrustedJobGroupPayload>, values: TrustedJobGroupPayload) { + setFormData(values); + } +}; + +export default TrustedJobGroupForm; diff --git a/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupList/index.less b/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupList/index.less new file mode 100644 index 000000000..3f1720ead --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupList/index.less @@ -0,0 +1,5 @@ +.group-list-container { + display: flex; + flex: 1; + width: 100%; +} diff --git a/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupList/index.tsx b/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupList/index.tsx new file mode 100644 index 000000000..0f83e8c95 --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/TrustedJobGroupList/index.tsx @@ -0,0 +1,430 @@ +import SharedPageLayout from 'components/SharedPageLayout'; +import React, { FC, useMemo, useState } from 'react'; +import { generatePath, useHistory } from 'react-router'; +import { Link } from 'react-router-dom'; +import { useQuery } from 'react-query'; +import { useGetCurrentProjectId, useGetCurrentProjectParticipantList, useUrlState } from 'hooks'; +import './index.less'; + +import { + Button, + Input, + Popconfirm, + Table, + Progress, + Message, + Tooltip, +} from '@arco-design/web-react'; +import { IconPlus } from '@arco-design/web-react/icon'; +import { ColumnProps } from '@arco-design/web-react/es/Table'; + +import i18n from 'i18n'; +import { useTranslation } from 'react-i18next'; + +import GridRow from 'components/_base/GridRow'; +import MoreActions, { ActionItem } from 'components/MoreActions'; +import Modal from 'components/Modal'; +import TodoPopover from 'components/TodoPopover'; +import NoResult from 'components/NoResult'; +import WhichParticipant from 'components/WhichParticipant'; +import routeMaps from '../routes'; + +import { + deleteTrustedJobGroup, + fetchTrustedJobGroupList, + launchTrustedJobGroup, + updateTrustedJobGroup, +} from 'services/trustedCenter'; +import { FilterOp } from 'typings/filter'; +import { + AuthStatus, + TrustedJobGroupItem, + TrustedJobGroupStatus, + TicketAuthStatus, +} from 'typings/trustedCenter'; +import { formatTimestamp } from 'shared/date'; +import { getTicketAuthStatus, getLatestJobStatus } from 'shared/trustedCenter'; +import { to } from 'shared/helpers'; +import { constructExpressionTree, expression2Filter } from 'shared/filter'; +import StateIndicator from 'components/StateIndicator'; + +export const LIST_QUERY_KEY = 'task_list_query'; + +type QueryParams = { + name?: string; +}; + +const TrustedJobList: FC = () => { + const { t } = useTranslation(); + const history = useHistory(); + const projectId = useGetCurrentProjectId(); + const participantList = useGetCurrentProjectParticipantList(); + const [urlState, setUrlState] = useUrlState({ + page: 1, + pageSize: 10, + filter: initFilter(), + }); + const [commentVisible, setCommentVisible] = useState(false); + const [comment, setComment] = useState(''); + const [selectedTrustedJobGroup, setSelectedTrustedJobGroup] = useState<TrustedJobGroupItem>(); + const listQueryKey = [LIST_QUERY_KEY, projectId, urlState]; + const initFilterParams = expression2Filter(urlState.filter); + const [filterParams, setFilterParams] = useState<QueryParams>({ + name: initFilterParams.name || '', + }); + + const listQuery = useQuery( + [listQueryKey, urlState], + () => { + return fetchTrustedJobGroupList(projectId!, urlState); + }, + { + retry: 2, + refetchOnWindowFocus: false, + }, + ); + + const trustedJobListShow = useMemo(() => { + if (!listQuery.data?.data) { + return []; + } + const trustedJobList = (listQuery.data.data || []).filter( + (item) => item.is_configured === true, + ); + return trustedJobList; + }, [listQuery.data]); + + const isEmpty = trustedJobListShow.length === 0; + + const columns = useMemo<ColumnProps<TrustedJobGroupItem>[]>( + () => [ + { + title: '名称', + dataIndex: 'name', + name: 'name', + ellipsis: true, + render: (name: string, record: TrustedJobGroupItem) => { + return <Link to={gotoTrustedJobGroupDetail(record)}>{name}</Link>; + }, + }, + { + title: '发起方', + dataIndex: 'creator_name', + name: 'creator_name', + render: (_: any, record: TrustedJobGroupItem) => { + return record.is_creator ? '本方' : <WhichParticipant id={record.creator_id} />; + }, + }, + { + title: '授权状态', + dataIndex: 'ticket_auth_status', + name: 'ticket_auth_status', + render: (_: any, record: TrustedJobGroupItem) => { + const data = getTicketAuthStatus(record); + return ( + <> + <Tooltip + position="tl" + content={ + record.ticket_auth_status === TicketAuthStatus.AUTH_PENDING + ? renderUnauthParticipantList(record) + : undefined + } + > + <div>{data.text}</div> + </Tooltip> + <Progress + percent={data.percent} + showText={false} + style={{ width: 100 }} + status={data.type} + /> + </> + ); + }, + }, + { + title: '任务状态', + dataIndex: 'status', + name: 'status', + render: (_: any, record: TrustedJobGroupItem) => { + return ( + <div className="indicator-with-tip"> + <StateIndicator {...getLatestJobStatus(record)} /> + </div> + ); + }, + }, + { + title: '创建时间', + dataIndex: 'created_at', + name: 'created_at', + sorter(a: TrustedJobGroupItem, b: TrustedJobGroupItem) { + return a.created_at - b.created_at; + }, + render: (date: number) => <div>{formatTimestamp(date)}</div>, + }, + { + title: '操作', + dataIndex: 'operation', + name: 'operation', + render: (_: any, record: TrustedJobGroupItem) => { + const actionList = [ + { + label: '编辑', + disabled: record.status !== TrustedJobGroupStatus.SUCCEEDED, + onClick: () => { + const editPath = generatePath(routeMaps.TrustedJobGroupEdit, { + id: record.id, + role: record.is_creator ? 'sender' : 'receiver', + }); + history.push(editPath); + }, + }, + { + label: '删除', + disabled: !record.is_creator, + onClick: () => { + Modal.delete({ + title: `确认删除${record.name || ''}吗?`, + content: '删除后,该可信计算将无法进行操作,请谨慎删除', + onOk() { + deleteTrustedJobGroup(projectId!, record.id) + .then(() => { + Message.success(t('trusted_center.msg_delete_success')); + listQuery.refetch(); + }) + .catch((error) => { + Message.error(error.message); + }); + }, + }); + }, + danger: true, + }, + ].filter(Boolean) as ActionItem[]; + + return ( + <GridRow left="-20"> + <Button + type="text" + onClick={() => { + setCommentVisible(true); + setSelectedTrustedJobGroup(record); + }} + disabled={ + record.status !== TrustedJobGroupStatus.SUCCEEDED || + record.auth_status !== AuthStatus.AUTHORIZED || + record.unauth_participant_ids?.length !== 0 + } + > + {'发起任务'} + </Button> + {record.auth_status === AuthStatus.AUTHORIZED ? ( + <Popconfirm + title={i18n.t('trusted_center.unauthorized_confirm_title', { + name: record.name, + })} + okText={i18n.t('submit')} + cancelText={i18n.t('cancel')} + onConfirm={() => onUnauthorizedConfirm(record)} + > + <Button type="text" disabled={record.status !== TrustedJobGroupStatus.SUCCEEDED}> + {'撤销'} + </Button> + </Popconfirm> + ) : ( + <Button + type="text" + disabled={record.status !== TrustedJobGroupStatus.SUCCEEDED} + onClick={() => { + updateTrustedJobGroup(projectId!, record.id, { + auth_status: AuthStatus.AUTHORIZED, + }).then(() => { + listQuery.refetch(); + }); + }} + > + {'授权'} + </Button> + )} + <MoreActions actionList={actionList} /> + </GridRow> + ); + }, + }, + ], + // eslint-disable-next-line react-hooks/exhaustive-deps + [listQuery], + ); + + return ( + <SharedPageLayout + title={i18n.t('menu.label_trusted_center')} + rightTitle={<TodoPopover.TrustedCenter />} + > + <GridRow justify="space-between" align="center"> + <Button + className={'custom-operation-button'} + type="primary" + icon={<IconPlus />} + onClick={onCreateClick} + > + {t('trusted_center.btn_create_trusted_computing')} + </Button> + <Input.Search + allowClear + defaultValue={filterParams.name} + onSearch={onSearch} + onClear={() => onSearch('')} + placeholder={t('trusted_center.placeholder_search_task')} + /> + </GridRow> + <div className="group-list-container"> + {isEmpty ? ( + <NoResult text="暂无工作可信计算任务" /> + ) : ( + <Table + rowKey="id" + className="custom-table custom-table-left-side-filter" + loading={listQuery.isFetching} + data={trustedJobListShow} + scroll={{ x: '100%' }} + columns={columns} + pagination={{ + showTotal: true, + hideOnSinglePage: true, + pageSizeChangeResetCurrent: true, + total: listQuery.data?.page_meta?.total_items ?? undefined, + current: Number(urlState.page), + pageSize: Number(urlState.pageSize), + onChange: onPageChange, + }} + /> + )} + </div> + <Modal + title={t('trusted_center.title_initiate_trusted_job', { + name: selectedTrustedJobGroup?.name, + })} + id={selectedTrustedJobGroup?.id} + visible={commentVisible} + onOk={() => onCommentModalConfirm()} + onCancel={() => { + setCommentVisible(false); + setComment(''); + }} + autoFocus={false} + focusLock={true} + > + <div className="modal-label">{t('trusted_center.label_trusted_job_comment')}</div> + <Input.TextArea + placeholder={t('trusted_center.placeholder_trusted_job_set_comment')} + autoSize={{ minRows: 3 }} + value={comment} + onChange={setComment} + /> + </Modal> + </SharedPageLayout> + ); + + function renderUnauthParticipantList(record: any) { + return ( + <div> + {participantList.map((item) => { + return ( + <div> + {`${item.name} ${ + record.unauth_participant_ids.includes(item.id) ? '未授权' : '已授权' + }`} + </div> + ); + })} + </div> + ); + } + + function onUnauthorizedConfirm(record: any) { + updateTrustedJobGroup(projectId!, record.id, { + auth_status: AuthStatus.PENDING, + }).then(() => { + listQuery.refetch(); + }); + } + + function gotoTrustedJobGroupDetail(record: any) { + return generatePath(routeMaps.TrustedJobGroupDetail, { + id: record.id, + tabType: 'computing', + }); + } + + function onCreateClick() { + const createPath = generatePath(routeMaps.TrustedJobGroupCreate, { + role: 'sender', + }); + history.push(createPath); + } + + function onSearch(value: any) { + constructFilterArray({ name: value }); + } + + function constructFilterArray(value: QueryParams) { + const expressionNodes = []; + if (value.name) { + expressionNodes.push({ + field: 'name', + op: FilterOp.CONTAIN, + string_value: value.name, + }); + } + const serialization = constructExpressionTree(expressionNodes); + setFilterParams({ + name: value.name, + }); + setUrlState((prevState) => ({ + ...prevState, + filter: serialization, + page: 1, + })); + } + + function onPageChange(page: number, pageSize: number | undefined) { + setUrlState((prevState) => ({ + ...prevState, + page, + pageSize, + })); + } + + async function onCommentModalConfirm() { + const [res, error] = await to( + launchTrustedJobGroup(projectId!, selectedTrustedJobGroup!.id, { + comment: comment, + }), + ); + setCommentVisible(false); + if (error) { + Message.error(error.message); + return; + } + if (res.data) { + Message.success(t('trusted_center.msg_publish_success')); + listQuery.refetch(); + return; + } + } + + function initFilter() { + const expressionNodes = []; + expressionNodes.push({ + field: 'name', + op: FilterOp.CONTAIN, + string_value: '', + }); + return constructExpressionTree(expressionNodes); + } +}; + +export default TrustedJobList; diff --git a/web_console_v2/client/src/views/TrustedCenter/index.tsx b/web_console_v2/client/src/views/TrustedCenter/index.tsx new file mode 100644 index 000000000..0e2444482 --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/index.tsx @@ -0,0 +1,43 @@ +import React, { FC } from 'react'; +import { Redirect, Route, Switch, useLocation } from 'react-router-dom'; +import ErrorBoundary from 'components/ErrorBoundary'; +import TrustedJobList from './TrustedJobGroupList'; +import CreateTrustedJobGroup from './CreateTrustedJobGroup'; +import EditTrustedJobGroup from './EditTrustedJobGroup'; +import TrustedJobGroupDetail from './TrustedJobGroupDetail'; +import DatasetExportApplication from './DatasetExportApplication'; +import ApplicationResult from './DatasetExportApplication/ApplicationResult'; + +const TrustedCenter: FC = () => { + const location = useLocation(); + return ( + <ErrorBoundary> + <Switch> + <Route path="/trusted-center/list" exact component={TrustedJobList} /> + <Route + path="/trusted-center/create/:role(receiver|sender)" + component={CreateTrustedJobGroup} + /> + <Route + path="/trusted-center/edit/:id/:role(receiver|sender)" + component={EditTrustedJobGroup} + /> + <Route + path="/trusted-center/detail/:id/:tabType(computing|export)" + component={TrustedJobGroupDetail} + /> + <Route + path="/trusted-center/dataset-application/:result(passed|rejected)" + component={ApplicationResult} + /> + <Route + path="/trusted-center/dataset-application/:id/:coordinator_id/:name" + component={DatasetExportApplication} + /> + {location.pathname === '/trusted-center' && <Redirect to="/trusted-center/list" />} + </Switch> + </ErrorBoundary> + ); +}; + +export default TrustedCenter; diff --git a/web_console_v2/client/src/views/TrustedCenter/routes.tsx b/web_console_v2/client/src/views/TrustedCenter/routes.tsx new file mode 100644 index 000000000..9af3d351b --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/routes.tsx @@ -0,0 +1,10 @@ +const INDEX_PATH = '/trusted-center'; + +const routes: Record<string, string> = { + TrustedJobGroupList: `${INDEX_PATH}/list`, + TrustedJobGroupDetail: `${INDEX_PATH}/detail/:id/:tabType(computing|export)`, + TrustedJobGroupCreate: `${INDEX_PATH}/create/:role(receiver|sender)`, + TrustedJobGroupEdit: `${INDEX_PATH}/edit/:id/:role(receiver|sender)`, +}; + +export default routes; diff --git a/web_console_v2/client/src/views/TrustedCenter/shared.test.ts b/web_console_v2/client/src/views/TrustedCenter/shared.test.ts new file mode 100644 index 000000000..2e8d1958c --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/shared.test.ts @@ -0,0 +1,27 @@ +import { getResourceConfigInitialValues } from './shared'; +import { ResourceTemplateType, TrustedJobResource } from 'typings/trustedCenter'; + +describe('getResourceConfigInitialValues', () => { + it('normal', () => { + const resource: TrustedJobResource = { + cpu: 16, + memory: 32, + replicas: 100, + }; + expect(getResourceConfigInitialValues(resource)).toEqual({ + master_cpu: '0m', + master_mem: '0Gi', + master_replicas: '1', + master_roles: 'master', + ps_cpu: '0m', + ps_mem: '0Gi', + ps_replicas: '1', + ps_roles: 'ps', + resource_type: ResourceTemplateType.CUSTOM, + worker_cpu: '16m', + worker_mem: '32Gi', + worker_replicas: '100', + worker_roles: 'worker', + }); + }); +}); diff --git a/web_console_v2/client/src/views/TrustedCenter/shared.ts b/web_console_v2/client/src/views/TrustedCenter/shared.ts new file mode 100644 index 000000000..307a6c5af --- /dev/null +++ b/web_console_v2/client/src/views/TrustedCenter/shared.ts @@ -0,0 +1,74 @@ +import styled from 'styled-components'; +import { MixinSquare } from 'styles/mixins'; +import atomIcon from 'assets/icons/atom-icon-algorithm-management.svg'; +import { + AuthStatus, + ResourceTemplateType, + TicketAuthStatus, + TicketStatus, + TrustedJobGroup, + TrustedJobResource, +} from 'typings/trustedCenter'; + +export const Avatar = styled.div<{ bgSrc?: string }>` + ${MixinSquare(48)}; + background-color: var(--primary-1); + color: white; + border-radius: 4px; + font-size: 18px; + text-align: center; + + &::before { + display: inline-block; + width: 100%; + height: 100%; + content: ''; + background: url(${(props) => props.bgSrc || atomIcon}) no-repeat; + background-size: contain; + } +`; + +export const AuthStatusMap: Record<AuthStatus, string> = { + [AuthStatus.AUTHORIZED]: '已授权', + [AuthStatus.PENDING]: '待授权', + [AuthStatus.WITHDRAW]: '拒绝授权', +}; + +export function getResourceConfigInitialValues(resource: TrustedJobResource) { + return { + master_cpu: '0m', + master_mem: '0Gi', + master_replicas: '1', + master_roles: 'master', + ps_cpu: '0m', + ps_mem: '0Gi', + ps_replicas: '1', + ps_roles: 'ps', + resource_type: ResourceTemplateType.CUSTOM, + worker_cpu: resource.cpu + 'm', + worker_mem: resource.memory + 'Gi', + worker_replicas: String(resource.replicas), + worker_roles: 'worker', + }; +} + +export const defaultTrustedJobGroup: TrustedJobGroup = { + id: 0, + name: '', + uuid: 0, + latest_version: '1', + comment: '', + project_id: 0, + ticket_uuid: 0, + ticket_status: TicketStatus.PENDING, + ticket_auth_status: TicketAuthStatus.CREATE_PENDING, + creator_name: '', + participant_datasets: { + items: [], + }, + resource: { + cpu: 4, + memory: 8, + replicas: 1, + }, +}; diff --git a/web_console_v2/client/src/views/Users/UserCreate/index.tsx b/web_console_v2/client/src/views/Users/UserCreate/index.tsx index 24f6fa9da..3de8f94f8 100644 --- a/web_console_v2/client/src/views/Users/UserCreate/index.tsx +++ b/web_console_v2/client/src/views/Users/UserCreate/index.tsx @@ -1,10 +1,13 @@ -import { message } from 'antd'; import React, { FC } from 'react'; import { useHistory } from 'react-router-dom'; + import { createNewUser } from 'services/user'; -import { FedRoles, FedUserInfo } from 'typings/auth'; + +import { Message } from '@arco-design/web-react'; import UserForm from '../UserForm'; +import { FedRoles, FedUserInfo } from 'typings/auth'; + const UserCreate: FC = () => { const history = useHistory(); @@ -15,8 +18,21 @@ const UserCreate: FC = () => { .then(() => { history.push('/users'); }) - .catch((e) => { - message.error(e.toString()); + .catch((error) => { + const { code, message } = error; + + let displayMessage = message; + + // Get username from error.message + // e.g. user username1 already exists => username1 + const regx = /user (.+) already exists/; + const result = String(message).match(regx); + + if (code === 409 && result && result[1]) { + displayMessage = `用户名:${result[1]} 已存在或者被删除`; + } + + Message.error(displayMessage); }); } }; diff --git a/web_console_v2/client/src/views/Users/UserEdit/index.tsx b/web_console_v2/client/src/views/Users/UserEdit/index.tsx index c14fe114b..06f715274 100644 --- a/web_console_v2/client/src/views/Users/UserEdit/index.tsx +++ b/web_console_v2/client/src/views/Users/UserEdit/index.tsx @@ -1,14 +1,15 @@ -import { message } from 'antd'; import React, { FC } from 'react'; -import { useTranslation } from 'react-i18next'; -import { useQuery } from 'react-query'; import { useHistory, useParams } from 'react-router-dom'; +import { useQuery } from 'react-query'; + import { fetchUserInfo, updateUser } from 'services/user'; -import { FedUserInfo } from 'typings/auth'; + +import { Message } from '@arco-design/web-react'; import UserForm from '../UserForm'; +import { FedUserInfo } from 'typings/auth'; + const UserEdit: FC = () => { - const { t } = useTranslation(); const { id } = useParams<{ id: string }>(); const currUserQuery = useQuery(['getCurrUserInfo', id], () => fetchUserInfo(id), { @@ -32,11 +33,11 @@ const UserEdit: FC = () => { await updateUser(initialValues.id!, payload) .then(() => { - message.success(t('users.message_modify_success')); + Message.success('修改成功'); history.push('/users'); }) .catch((e) => { - message.error(e.toString()); + Message.error(e.toString()); }); } diff --git a/web_console_v2/client/src/views/Users/UserForm/index.module.less b/web_console_v2/client/src/views/Users/UserForm/index.module.less new file mode 100644 index 000000000..7c0da6189 --- /dev/null +++ b/web_console_v2/client/src/views/Users/UserForm/index.module.less @@ -0,0 +1,6 @@ +.styled_form { + width: 600px; + margin: 0 auto; + background-color: white; + padding-top: 80px; +} diff --git a/web_console_v2/client/src/views/Users/UserForm/index.tsx b/web_console_v2/client/src/views/Users/UserForm/index.tsx index f287b71b2..6a9f38149 100644 --- a/web_console_v2/client/src/views/Users/UserForm/index.tsx +++ b/web_console_v2/client/src/views/Users/UserForm/index.tsx @@ -1,84 +1,111 @@ -import { Button, Form, Input, Radio } from 'antd'; -import GridRow from 'components/_base/GridRow'; import React, { FC } from 'react'; -import { useTranslation } from 'react-i18next'; import { useHistory } from 'react-router-dom'; import { useToggle } from 'react-use'; -import { validatePassword } from 'shared/validator'; -import styled from 'styled-components'; -import { FedUserInfo } from 'typings/auth'; +import styled from './index.module.less'; + +import { validNamePattern, validEmailPattern, validPasswordPattern } from 'shared/validator'; + +import { Button, Form, Input, Radio } from '@arco-design/web-react'; +import GridRow from 'components/_base/GridRow'; import SharedPageLayout, { FormHeader } from 'components/SharedPageLayout'; import BackButton from 'components/BackButton'; -const StyledForm = styled(Form)` - width: 600px; - margin: 0 auto; - background-color: white; - padding-top: 80px; -`; +import { FedUserInfo } from 'typings/auth'; const UserForm: FC<{ isEdit?: boolean; onSubmit?: any; initialValues?: any }> = ({ isEdit, onSubmit, initialValues, }) => { - const { t } = useTranslation(); const [form] = Form.useForm<FedUserInfo>(); const history = useHistory(); const [submitting, toggleSubmitting] = useToggle(false); return ( <SharedPageLayout - title={<BackButton onClick={() => history.goBack()}>{t('menu.label_users')}</BackButton>} + title={<BackButton onClick={() => history.goBack()}>{'用户管理'}</BackButton>} > - <FormHeader>{isEdit ? t('users.title_user_edit') : t('users.title_user_create')}</FormHeader> - <StyledForm + <FormHeader>{isEdit ? '编辑用户' : '创建用户'}</FormHeader> + <Form + className={styled.styled_form} labelCol={{ span: 6 }} wrapperCol={{ span: 14 }} initialValues={initialValues} form={form} - onFinish={onFinish} + onSubmit={onFinish} > - <Form.Item name="id" label={t('users.col_id')} hidden={!isEdit}> + <Form.Item field="id" label={'ID'} hidden={!isEdit}> <Input disabled={true} /> </Form.Item> - <Form.Item name="username" label={t('users.col_username')} rules={[{ required: true }]}> - <Input disabled={isEdit} placeholder={t('users.placeholder_username')} /> + <Form.Item + field="username" + label={'用户名'} + rules={[ + { required: true }, + { + match: validNamePattern, + message: '只支持大小写字母,数字,中文开头或结尾,可包含“_”和“-”,不超过 63 个字符', + }, + ]} + > + <Input disabled={isEdit} placeholder={'请输入用户名'} /> </Form.Item> - <Form.Item name="password" label={t('users.col_password')} rules={[ - { - async validator(_, value: string) { - return validatePassword(value); + <Form.Item + field="password" + label={'密码'} + rules={[ + { required: !isEdit }, + { + match: validPasswordPattern, + message: + '请输入正确的密码格式,至少包含一个字母、一个数字、一个特殊字符,且长度在8到20之间', }, - }, - ]}> - <Input placeholder={t('users.placeholder_password')} /> + ]} + > + <Input placeholder={'请输入登陆密码'} /> </Form.Item> - <Form.Item name="name" label={t('users.col_name')} rules={[{ required: true }]}> - <Input placeholder={t('users.placeholder_name')} /> + <Form.Item + field="name" + label={'名称'} + rules={[ + { required: true }, + { + match: validNamePattern, + message: '只支持大小写字母,数字,中文开头或结尾,可包含“_”和“-”,不超过 63 个字符', + }, + ]} + > + <Input placeholder={'请输入用户昵称'} /> </Form.Item> - <Form.Item name="email" label={t('users.col_email')} rules={[{ required: true }]}> - <Input placeholder={t('users.placeholder_email')} /> + <Form.Item + field="email" + label={'邮箱'} + rules={[ + { required: true }, + { match: validEmailPattern, message: '请输入正确的邮箱格式' }, + ]} + > + <Input placeholder={'请输入用户邮箱'} /> </Form.Item> - <Form.Item name="role" label={t('users.col_role')} rules={[{ required: true }]}> - <Radio.Group> - <Radio.Button value="USER">{t('users.role_user')}</Radio.Button> - <Radio.Button value="ADMIN">{t('users.role_admin')}</Radio.Button> + <Form.Item field="role" label={'角色'} rules={[{ required: true }]}> + <Radio.Group type="button"> + <Radio value="USER">{'普通用户'}</Radio> + <Radio value="ADMIN">{'管理员'}</Radio> </Radio.Group> </Form.Item> <Form.Item wrapperCol={{ offset: 6 }}> <GridRow gap={16} top="12"> <Button loading={submitting} type="primary" htmlType="submit"> - {t('users.btn_submit')} + {'提交'} </Button> <Button disabled={submitting} onClick={backToList}> - {t('cancel')} + {'取消'} </Button> </GridRow> </Form.Item> - </StyledForm> + </Form> </SharedPageLayout> ); async function backToList() { @@ -88,7 +115,9 @@ const UserForm: FC<{ isEdit?: boolean; onSubmit?: any; initialValues?: any }> = async function onFinish(data: any) { try { toggleSubmitting(true); - data.password = btoa(data.password); + if (data.password) { + data.password = btoa(data.password); + } await onSubmit(data); } catch { // ignore error diff --git a/web_console_v2/client/src/views/Users/UserList/index.module.less b/web_console_v2/client/src/views/Users/UserList/index.module.less new file mode 100644 index 000000000..6d3ccfd3a --- /dev/null +++ b/web_console_v2/client/src/views/Users/UserList/index.module.less @@ -0,0 +1,4 @@ +.list_container { + display: flex; + flex: 1; +} diff --git a/web_console_v2/client/src/views/Users/UserList/index.tsx b/web_console_v2/client/src/views/Users/UserList/index.tsx index 1b7ca4723..7bd62b943 100644 --- a/web_console_v2/client/src/views/Users/UserList/index.tsx +++ b/web_console_v2/client/src/views/Users/UserList/index.tsx @@ -1,37 +1,39 @@ -import React, { FC, useState, useMemo } from 'react'; -import SharedPageLayout from 'components/SharedPageLayout'; -import { useTranslation } from 'react-i18next'; -import { Row, Button, Col, Form, Input, Table, message, Tag, Popconfirm } from 'antd'; +import React, { FC, useMemo } from 'react'; +import styled from './index.module.less'; import { useHistory, Link } from 'react-router-dom'; -import styled from 'styled-components'; import { useQuery } from 'react-query'; + +import { useRecoilQuery } from 'hooks/recoil'; +import { userInfoQuery } from 'stores/user'; +import { transformRegexSpecChar } from 'shared/helpers'; import { deleteUser, getAllUsers } from 'services/user'; +import { useUrlState, useTablePaginationWithUrlState } from 'hooks'; + +import { Table, Button, Grid, Input, Message, Tag, Form, Space } from '@arco-design/web-react'; +import SharedPageLayout from 'components/SharedPageLayout'; import NoResult from 'components/NoResult'; -import i18n from 'i18n'; -import { FedRoles, FedUserInfo } from 'typings/auth'; -import GridRow from 'components/_base/GridRow'; import UserRoleBadge from 'components/UserRoleBadge'; -import { userInfoQuery } from 'stores/user'; -import { useRecoilQuery } from 'hooks/recoil'; +import MoreActions from 'components/MoreActions'; +import Modal from 'components/Modal'; -const ListContainer = styled.div` - display: flex; - flex: 1; -`; +import { FedRoles, FedUserInfo } from 'typings/auth'; + +const { Row } = Grid; export const USERS_LIST_QUERY_KEY = 'userList'; const UsersList: FC = () => { - const { t } = useTranslation(); - const history = useHistory(); - const [params, setParams] = useState({ keyword: '' }); const [form] = Form.useForm(); + + const history = useHistory(); const userInfo = useRecoilQuery(userInfoQuery); + const [urlState, setUrlState] = useUrlState({ keyword: '' }); + const { paginationProps } = useTablePaginationWithUrlState(); const columns = useMemo(() => { return [ { - title: i18n.t('users.col_id'), + title: 'ID', dataIndex: 'id', key: 'id', render: (id: number) => { @@ -39,7 +41,7 @@ const UsersList: FC = () => { }, }, { - title: i18n.t('users.col_username'), + title: '用户名', dataIndex: 'username', key: 'username', render: (username: string, record: FedUserInfo) => { @@ -47,8 +49,8 @@ const UsersList: FC = () => { <> <Link to={`/users/edit/${record.id}`}>{username}</Link> {record.id === userInfo.data?.id && ( - <Tag color="geekblue" style={{ marginLeft: '5px' }}> - {i18n.t('users.yourself')} + <Tag color="arcoblue" style={{ marginLeft: '5px' }}> + {'本账号'} </Tag> )} </> @@ -56,7 +58,7 @@ const UsersList: FC = () => { }, }, { - title: i18n.t('users.col_role'), + title: '角色', dataIndex: 'role', key: 'role', render: (role: FedRoles) => { @@ -64,81 +66,122 @@ const UsersList: FC = () => { }, }, { - title: i18n.t('users.col_email'), + title: '邮箱', dataIndex: 'email', key: 'email', }, { - title: i18n.t('users.col_name'), + title: '显示名', dataIndex: 'name', key: 'name', }, { - title: i18n.t('users.col_ops'), + title: 'Operations', key: 'operations', render: (_: number, record: FedUserInfo) => { return ( - <GridRow left={-10} gap="8"> - <Button size="small" onClick={() => goEdit(record)} type="link"> - {t('edit')} - </Button> - {record.id !== userInfo.data?.id && ( - <Popconfirm - title={t('users.message_del_user')} - onConfirm={() => onDeleteClick(record)} - > - <Button danger size="small" type="link"> - {t('delete')} - </Button> - </Popconfirm> - )} - </GridRow> + <Space> + <button className="custom-text-button" onClick={() => goEdit(record)}> + {'编辑'} + </button> + <MoreActions + actionList={[ + { + label: '删除', + disabled: record.id === userInfo.data?.id, + disabledTip: '不能删除自己的账号', + onClick: () => { + Modal.delete({ + title: '确认删除该用户吗?', + content: '删除后,该用户将无法操作,请谨慎删除', + onOk() { + onDeleteClick(record); + }, + }); + }, + danger: true, + }, + ]} + /> + </Space> ); }, }, ]; // eslint-disable-next-line react-hooks/exhaustive-deps - }, [t, userInfo]); + }, [userInfo]); - const query = useQuery([USERS_LIST_QUERY_KEY, params.keyword], () => getAllUsers(), { + const query = useQuery([USERS_LIST_QUERY_KEY], () => getAllUsers(), { retry: 2, cacheTime: 0, }); - const isEmpty = !query.isFetching && query.data?.data.length === 0; + const userListShow = useMemo(() => { + if (!query.data) { + return []; + } + + let userList = query.data?.data || []; + + if (urlState.keyword) { + const regx = new RegExp(`^.*${transformRegexSpecChar(urlState.keyword)}.*$`); // support fuzzy matching + + userList = userList.filter((item) => { + if (regx.test(String(item.name)) || regx.test(item.username)) { + return true; + } + return false; + }); + } + + return userList; + }, [urlState.keyword, query.data]); + + const isEmpty = !query.isFetching && userListShow.length === 0; return ( - <SharedPageLayout title={t('menu.label_users')}> - <Row gutter={16} justify="space-between" align="middle"> - <Col> - <Button size="large" type="primary" onClick={goCreate}> - {t('users.btn_create_user')} - </Button> - </Col> - <Col> - <Form initialValues={{ ...params }} layout="inline" form={form} onFinish={onSearch}> - <Form.Item name="keyword"> - <Input.Search - placeholder={t('users.placeholder_name_searchbox')} - onPressEnter={form.submit} - /> - </Form.Item> - </Form> - </Col> + <SharedPageLayout title={'用户管理'}> + <Row justify="space-between" align="center"> + <Button type="primary" onClick={goCreate}> + {'创建用户'} + </Button> + <Form + initialValues={{ ...urlState }} + form={form} + onSubmit={onSearch} + style={{ width: 280 }} + labelCol={{ span: 0 }} + wrapperCol={{ span: 24 }} + > + <Form.Item + field="keyword" + style={{ + marginBottom: 0, + }} + > + <Input.Search + placeholder={'输入关键词搜索用户'} + onSearch={form.submit} + onClear={form.submit} + allowClear + /> + </Form.Item> + </Form> </Row> - <ListContainer> + <div className={styled.list_container}> {isEmpty ? ( - <NoResult text={t('users.no_result')} to="/users/modify" /> + <NoResult text={'暂无用户'} to="/users/create" /> ) : ( <Table loading={query.isFetching} - dataSource={query.data?.data || []} + data={userListShow} scroll={{ x: '100%' }} columns={columns} - rowKey="name" + rowKey="id" + pagination={{ ...paginationProps }} /> )} - </ListContainer> + </div> </SharedPageLayout> ); @@ -149,16 +192,16 @@ const UsersList: FC = () => { history.push(`/users/edit/${userInfo.id}`); } function onSearch(values: any) { - setParams(values); + setUrlState({ ...values, page: 1 }); } function onDeleteClick(userInfo: FedUserInfo) { deleteUser(userInfo.id!.toString()) .then(() => { - message.success(t('users.msg_delete_done')); + Message.success('删除成功'); query.refetch(); }) - .catch((err) => { - message.error(err.message); + .catch((error) => { + Message.error(error.message); }); } }; diff --git a/web_console_v2/client/src/views/Users/index.tsx b/web_console_v2/client/src/views/Users/index.tsx index 955e9c46b..bf946261c 100644 --- a/web_console_v2/client/src/views/Users/index.tsx +++ b/web_console_v2/client/src/views/Users/index.tsx @@ -1,6 +1,8 @@ import React, { FC } from 'react'; -import ErrorBoundary from 'antd/lib/alert/ErrorBoundary'; import { Route } from 'react-router-dom'; + +import ErrorBoundary from 'components/ErrorBoundary'; + import UserList from './UserList'; import UserCreate from './UserCreate'; import UserEdit from './UserEdit'; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/CreateTemplate/index.tsx b/web_console_v2/client/src/views/WorkflowTemplates/CreateTemplate/index.tsx index fa40e0e15..5bbb9e354 100644 --- a/web_console_v2/client/src/views/WorkflowTemplates/CreateTemplate/index.tsx +++ b/web_console_v2/client/src/views/WorkflowTemplates/CreateTemplate/index.tsx @@ -1,8 +1,9 @@ -import React, { FC } from 'react'; +import React, { FC, useRef } from 'react'; import TemplateForm from '../TemplateForm'; const CreateTemplate: FC = () => { - return <TemplateForm />; + const isHydrated = useRef(false); + return <TemplateForm isHydrated={isHydrated} />; }; export default CreateTemplate; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/EditTemplate/index.tsx b/web_console_v2/client/src/views/WorkflowTemplates/EditTemplate/index.tsx index 04bfc623b..7d2e0e918 100644 --- a/web_console_v2/client/src/views/WorkflowTemplates/EditTemplate/index.tsx +++ b/web_console_v2/client/src/views/WorkflowTemplates/EditTemplate/index.tsx @@ -3,7 +3,6 @@ import TemplateForm from '../TemplateForm'; const EditTemplate: FC = () => { const isHydrated = useRef(false); - return <TemplateForm isEdit isHydrated={isHydrated} />; }; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/DefaultMode/index.module.less b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/DefaultMode/index.module.less new file mode 100644 index 000000000..741fc00e1 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/DefaultMode/index.module.less @@ -0,0 +1,41 @@ +.form_section { + margin-bottom: 20px; + padding-top: 24px; + &:not([data-fill-width]) { + padding-right: 60px; + } + > .section_heading { + background-color: white; + padding: 10px 0; + margin-bottom: 6px; + font-size: 14px; + color: var(--textColorStrong); + } +} + +.perspective_tab { + --error-icon-display: none; + + width: 150px; + text-align: center; + + > img[alt='error-icon'] { + display: var(--error-icon-display); + } + + &[data-has-error='true'] { + --error-icon-display: inline-block; + color: var(--errorColor); + } +} + +.has_error_icon { + width: 12px; + margin-right: 4px; +} + +.perspective_container { + &[data-hidden='true'] { + display: none; + } +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/DefaultMode/index.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/DefaultMode/index.tsx new file mode 100644 index 000000000..d74a8aae7 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/DefaultMode/index.tsx @@ -0,0 +1,218 @@ +import { Input, Grid, Switch, Tabs, Form } from '@arco-design/web-react'; +import Modal from 'components/Modal'; +import errorIcon from 'assets/icons/workflow-error.svg'; +import BlockRadio from 'components/_base/BlockRadio'; +import { useSubscribe } from 'hooks'; +import { omit } from 'lodash-es'; +import PubSub from 'pubsub-js'; +import React, { FC, useContext, useEffect, useState } from 'react'; +import { nextTick } from 'shared/helpers'; +import { isValidJobName } from 'shared/validator'; +import styled from './index.module.less'; +import { ValidateErrorEntity } from 'typings/component'; +import { JobType } from 'typings/job'; +import { definitionsStore } from 'views/WorkflowTemplates/TemplateForm/stores'; +import { + ComposeDrawerContext, + COMPOSE_DRAWER_CHANNELS, + HighlightPayload, + InspectPayload, +} from '..'; +import SlotEntryList from '../SloEntrytList'; +import VariableList from '../VariableList'; + +const Row = Grid.Row; + +type Props = { + isGlobal: boolean; + isCheck?: boolean; + onJobTypeChange: (type: JobType) => void; +}; + +const jobTypeOptions = Object.values(omit(JobType, 'UNSPECIFIED')).map((item) => ({ + value: item, + label: item, +})); + +export enum Perspective { + Slots = 'slots', + Variables = 'variables', +} + +const DefaultMode: FC<Props> = ({ isGlobal, isCheck, onJobTypeChange }) => { + const [perspective, setPerspective] = useState<Perspective>( + isGlobal ? Perspective.Variables : Perspective.Slots, + ); + const [errorTabs, setErrorTabs] = useState({ + slots: false, + variables: false, + }); + + const context = useContext(ComposeDrawerContext); + + const jobNameRules = [ + { required: true, message: '请输入 Job 名' }, + { + validator(value: any, callback: (error?: string) => void) { + if (!isValidJobName(value)) { + callback('只支持小写字母,数字开头或结尾,可包含“-”,不超过 24 个字符'); + } + }, + }, + { + validator(value: any, callback: (error?: string) => void) { + if ( + definitionsStore.entries + .filter(([uuid]) => uuid !== context.uuid) + .some(([_, jobDef]) => jobDef.name && jobDef.name === value.trim()) + ) { + callback('检测到任务重名'); + } + }, + }, + ]; + // ============ Subscribers ================ + useSubscribe(COMPOSE_DRAWER_CHANNELS.broadcast_error, (_: any, errInfo: ValidateErrorEntity) => { + const errors = { + slots: false, + variables: false, + }; + errors.slots = errInfo.errorFields.some((field) => /_slotEntries/.test(field.name[0])); + errors.variables = errInfo.errorFields.some((field) => /variables/.test(field.name[0])); + setErrorTabs(errors); + }); + useSubscribe(COMPOSE_DRAWER_CHANNELS.validation_passed, () => + setErrorTabs({ slots: false, variables: false }), + ); + useSubscribe( + COMPOSE_DRAWER_CHANNELS.inspect, + (_: string, { perspective, slotName, varUuid }: InspectPayload) => { + if (perspective) { + setPerspective(perspective); + } + + if (slotName || varUuid) { + if (slotName) { + setPerspective(Perspective.Slots); + } else { + setPerspective(Perspective.Variables); + } + nextTick(() => { + PubSub.publish(COMPOSE_DRAWER_CHANNELS.highlight, { + slotName, + varUuid, + } as HighlightPayload); + }); + } + }, + ); + // ============ Subscribers ================ + + useEffect(() => { + setErrorTabs({ slots: false, variables: false }); + }, [context.uuid]); + + useEffect(() => { + if (isGlobal) { + setPerspective(Perspective.Variables); + } + }, [isGlobal]); + + return ( + <> + {!isGlobal && ( + <section className={styled.form_section} style={{ width: 620 }}> + <h4 className={styled.section_heading}>基础配置</h4> + + <Form.Item field="name" label="Job 名称" rules={jobNameRules}> + <Input disabled={isCheck} placeholder="请输入 Job 名" /> + </Form.Item> + + <Form.Item field="is_federated" label="是否联邦" triggerPropName="checked"> + <Switch disabled={isCheck} /> + </Form.Item> + + <h4 className={styled.section_heading}>任务类型</h4> + <Form.Item + labelCol={{ span: 0 }} + field="job_type" + label="任务类型" + rules={[{ required: true, message: '请输入 Job 名' }]} + > + <BlockRadio + disabled={isCheck} + gap={10} + flexGrow={0} + options={jobTypeOptions} + beforeChange={beforeTypeChange} + onChange={onJobTypeChange} + /> + </Form.Item> + </section> + )} + + <section className={styled.form_section} data-fill-width> + {!isGlobal && ( + <> + <Row className={styled.section_heading} justify="space-between"> + <Tabs activeTab={perspective} onChange={onPerspectiveChange}> + <Tabs.TabPane + key={Perspective.Slots} + title={ + <div className={styled.perspective_tab} data-has-error={errorTabs.slots}> + <img className={styled.has_error_icon} src={errorIcon} alt="error-icon" /> + 插槽赋值 + </div> + } + /> + + <Tabs.TabPane + key={Perspective.Variables} + title={ + <div className={styled.perspective_tab} data-has-error={errorTabs.variables}> + <img className={styled.has_error_icon} src={errorIcon} alt="error-icon" /> + 自定义变量 + </div> + } + /> + </Tabs> + </Row> + </> + )} + <div + className={styled.perspective_container} + data-hidden={perspective !== Perspective.Variables} + > + <VariableList isCheck={isCheck} /> + </div> + + <div + className={styled.perspective_container} + data-hidden={perspective !== Perspective.Slots} + > + <SlotEntryList isCheck={isCheck} /> + </div> + </section> + </> + ); + + function onPerspectiveChange(val: string) { + setPerspective(val as Perspective); + } + function beforeTypeChange(): Promise<boolean> { + return new Promise((resolve) => { + Modal.confirm({ + title: '确认变更任务类型', + content: '更改任务类型将会使当前配置的插槽值将会丢失,确认这样做吗?', + onOk() { + resolve(true); + }, + onCancel() { + resolve(false); + }, + }); + }); + } +}; + +export default DefaultMode; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/ExpertMode/index.module.less b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/ExpertMode/index.module.less new file mode 100644 index 000000000..ffe3e220c --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/ExpertMode/index.module.less @@ -0,0 +1,14 @@ +.form_section { + margin-bottom: 20px; + padding-top: 24px; + &:not([data-fill-width]) { + padding-right: 60px; + } + > .section_heading { + background-color: white; + padding: 10px 0; + margin-bottom: 6px; + font-size: 14px; + color: var(--textColorStrong); + } +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/ExpertMode/index.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/ExpertMode/index.tsx new file mode 100644 index 000000000..e8a2175a2 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/ExpertMode/index.tsx @@ -0,0 +1,76 @@ +import { Input, Select, Switch, Form } from '@arco-design/web-react'; +import YAMLTemplateEditorButton from 'components/YAMLTemplateEditorButton'; +import { omit } from 'lodash-es'; +import React, { FC } from 'react'; +import { isValidJobName } from 'shared/validator'; +import { JobType } from 'typings/job'; +import VariableList from '../VariableList'; +import styled from './index.module.less'; + +type Props = { + isGlobal: boolean; + isCheck?: boolean; +}; + +const jobTypeOptions = Object.values(omit(JobType, 'UNSPECIFIED')); + +const ExpertMode: FC<Props> = ({ isGlobal, isCheck }) => { + return ( + <> + {!isGlobal && ( + <section className={styled.form_section} style={{ width: 620 }}> + <h4 className={styled.section_heading}>基本信息</h4> + <Form.Item + field="job_type" + label="任务类型" + rules={[{ required: true, message: '请输入 Job 名' }]} + > + <Select disabled={isCheck} placeholder="请选择任务类型"> + {jobTypeOptions.map((type) => ( + <Select.Option key={type} value={type}> + {type} + </Select.Option> + ))} + </Select> + </Form.Item> + + <Form.Item + field="name" + label="Job 名称" + rules={[ + { required: true, message: '请输入 Job 名' }, + { + validator(value: any, callback: (error?: string) => void) { + if (!isValidJobName(value)) { + callback('只支持小写字母,数字开头或结尾,可包含“-”,不超过 24 个字符'); + } + }, + }, + ]} + > + <Input disabled={isCheck} placeholder="请输入 Job 名" /> + </Form.Item> + + <Form.Item field="is_federated" label="是否联邦" triggerPropName="checked"> + <Switch disabled={isCheck} /> + </Form.Item> + + <Form.Item + field="yaml_template" + label="YAML 模板" + rules={[{ required: true, message: '请加入 YAML 模板' }]} + > + <YAMLTemplateEditorButton isCheck={isCheck} /> + </Form.Item> + </section> + )} + + <section className={styled.form_section} data-fill-width> + {!isGlobal && <h4 className={styled.section_heading}>自定义变量</h4>} + <VariableList isCheck={isCheck} /> + </section> + </> + ); +}; + +export default ExpertMode; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/JobProperty.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/JobProperty.tsx new file mode 100644 index 000000000..df51c02f8 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/JobProperty.tsx @@ -0,0 +1,93 @@ +import React, { FC, useContext, useEffect, useState } from 'react'; +import { OptionLabel } from './elements'; +import { Select, Grid } from '@arco-design/web-react'; +import { + definitionsStore, + JobDefinitionFormWithoutSlots, + TPL_GLOBAL_NODE_UUID, +} from 'views/WorkflowTemplates/TemplateForm/stores'; +import { ComposeDrawerContext } from '../../index'; +import { RefModelSharedProps } from './types'; +import { composeJobPropRef, parseJobPropRef } from '../helpers'; + +const Row = Grid.Row; +const Col = Grid.Col; + +type JobList = (JobDefinitionFormWithoutSlots & { uuid: string })[]; + +const jobPropOptions = [ + { + value: 'name', + label: 'name - 任务名', + }, +]; + +const JobProperty: FC<RefModelSharedProps> = ({ isCheck, value, onChange }) => { + const { uuid: selfJobUuid } = useContext(ComposeDrawerContext); + + const [jobUuid, prop] = parseJobPropRef(value); + + const [localJobUuid, setLocalJob] = useState(jobUuid); + const [localProp, setLocalProp] = useState(prop); + + const [jobList, setJobList] = useState<JobList>([]); + + useEffect(() => { + setJobList( + definitionsStore.entries + .filter(([uuid]) => uuid !== TPL_GLOBAL_NODE_UUID) + .map(([uuid, jobDef]) => ({ ...jobDef, uuid })), + ); + }, [selfJobUuid]); + + return ( + <Row gutter={10}> + <Col span={12}> + <Select + disabled={isCheck} + value={jobUuid === '__SELF__' ? selfJobUuid : jobUuid} + placeholder={'目标任务'} + onChange={onJobChangeChange} + > + {jobList.map((item, index: number) => { + return ( + <Select.Option key={item.uuid + index} value={item.uuid}> + {item.uuid === selfJobUuid ? ( + <OptionLabel>本任务</OptionLabel> + ) : ( + <OptionLabel data-empty-text="//未命名任务">{item.name}</OptionLabel> + )} + </Select.Option> + ); + })} + </Select> + </Col> + <Col span={12}> + <Select value={prop} disabled={!localJobUuid} placeholder={'属性'} onChange={onPropChange}> + {jobPropOptions.map((item, index: number) => { + return ( + <Select.Option key={item.value + index} value={item.value}> + {item.label} + </Select.Option> + ); + })} + </Select> + </Col> + </Row> + ); + + function onJobChangeChange(val: string) { + setLocalJob(val); + onChange && + onChange(composeJobPropRef({ isSelf: selfJobUuid === val, job: val, prop: localProp })); + } + function onPropChange(val: string) { + setLocalProp(val); + onChange && + onChange( + composeJobPropRef({ isSelf: selfJobUuid === localJobUuid, job: localJobUuid, prop: val }), + ); + } +}; + +export default JobProperty; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/OtherJobVariable.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/OtherJobVariable.tsx new file mode 100644 index 000000000..e466d1e4b --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/OtherJobVariable.tsx @@ -0,0 +1,171 @@ +import React, { FC, useContext, useEffect, useState } from 'react'; +import { NoAvailable, OptionLabel } from './elements'; +import { Select, Grid, Button } from '@arco-design/web-react'; +import { + definitionsStore, + JobDefinitionFormWithoutSlots, + TPL_GLOBAL_NODE_UUID, +} from 'views/WorkflowTemplates/TemplateForm/stores'; +import { ComposeDrawerContext, COMPOSE_DRAWER_CHANNELS, InspectPayload } from '../..'; +import { RefModelSharedProps } from './types'; +import { composeOtherJobRef, parseOtherJobRef } from '../helpers'; +import VariableLinkAnchor from './VariableLinkAnchor'; +import PubSub from 'pubsub-js'; +import { Perspective } from '../../DefaultMode'; +import { algorithmTypeOptionList } from './shared'; +import { Variable, VariableComponent } from 'typings/variable'; + +const Row = Grid.Row; +const Col = Grid.Col; + +type JobList = (JobDefinitionFormWithoutSlots & { uuid: string })[]; + +const OtherJobVariable: FC<RefModelSharedProps> = ({ isCheck, value, onChange }) => { + const context = useContext(ComposeDrawerContext); + + // workflow.jobs['186d1db127359'].variables.186d1db127359 + const [jobUuid, varUuid, algorithmType] = parseOtherJobRef(value); + + const [localJobUuid, setLocalJob] = useState(jobUuid); + const [localVarUuid, setLocalVar] = useState(varUuid); + const [isShowAlgorithmTypeSelect, setIsShowAlgorithmTypeSelect] = useState( + Boolean(algorithmType), + ); + + const [jobList, setJobList] = useState<JobList>([]); + + useEffect(() => { + const tmp: JobList = []; + definitionsStore.entries.forEach(([uuid, jobDef]) => { + if (uuid !== TPL_GLOBAL_NODE_UUID && uuid !== context.uuid) { + tmp.push({ ...jobDef, uuid }); + } + }); + + setJobList(tmp); + }, [context.uuid]); + + const hasOtherJobs = jobList.length !== 0; + const availableVariables = jobList.find((item) => localJobUuid === item.uuid)?.variables ?? []; + const hasVariables = availableVariables.length !== 0; + + if (!hasOtherJobs) { + return <NoAvailable>暂不存在其他任务</NoAvailable>; + } + + return ( + <Row gutter={5}> + <Col span={isShowAlgorithmTypeSelect ? 8 : 10}> + <Select + disabled={isCheck} + value={jobUuid} + placeholder={'目标任务'} + onChange={onJobChangeChange} + allowClear + > + {jobList.map((item, index: number) => { + return ( + <Select.Option key={item.uuid + index} value={item.uuid}> + <OptionLabel data-empty-text="//未命名任务">{item.name}</OptionLabel> + </Select.Option> + ); + })} + </Select> + </Col> + + {localJobUuid && ( + <Col span={isShowAlgorithmTypeSelect ? 8 : 12}> + {!hasVariables ? ( + <NoAvailable> + 该任务暂无变量, + <Button + disabled={isCheck} + type="text" + size="small" + onClick={onGoTheJobToCreateVarClick} + > + {'点击前往创建'} + </Button> + </NoAvailable> + ) : ( + <Select + value={varUuid} + disabled={!localJobUuid || isCheck} + placeholder={'目标变量'} + onChange={onVarChangeChange} + allowClear + > + {availableVariables.map((item, index: number) => { + return ( + <Select.Option key={item._uuid + index} value={item._uuid} extra={item}> + <OptionLabel data-empty-text="// 未命名变量">{item.name}</OptionLabel> + </Select.Option> + ); + })} + </Select> + )} + </Col> + )} + + {isShowAlgorithmTypeSelect && ( + <Col span={6}> + <Select + disabled={isCheck} + defaultValue={algorithmTypeOptionList[0].value} + onChange={onAlgorithmTypeSelectChange} + > + {algorithmTypeOptionList.map((item) => { + return ( + <Select.Option key={item.value} value={item.value}> + <OptionLabel>{item.label}</OptionLabel> + </Select.Option> + ); + })} + </Select> + </Col> + )} + + {localJobUuid && hasVariables && ( + <VariableLinkAnchor + jobUuid={jobUuid} + varUuid={varUuid} + disabled={!localJobUuid || !localVarUuid || isCheck} + /> + )} + </Row> + ); + + function onJobChangeChange(val: string) { + setLocalJob(val); + setLocalVar(undefined as any); + setIsShowAlgorithmTypeSelect(false); + + onChange?.(composeOtherJobRef(val, localVarUuid)); + } + function onVarChangeChange(val: string, options: any) { + setLocalVar(val); + + if ( + (options?.extra as Variable)?.widget_schema?.component === VariableComponent.AlgorithmSelect + ) { + setIsShowAlgorithmTypeSelect(true); + onChange?.(`${composeOtherJobRef(localJobUuid, val)}.${algorithmTypeOptionList[0].value}`); + } else { + setIsShowAlgorithmTypeSelect(false); + onChange?.(composeOtherJobRef(localJobUuid, val)); + } + } + function onAlgorithmTypeSelectChange(val: string) { + // workflow.jobs['186d1db127359'].variables.186d1db127359.path + onChange?.(`${composeOtherJobRef(localJobUuid, localVarUuid)}.${val}`); + } + function onGoTheJobToCreateVarClick() { + if (!localJobUuid) return; + PubSub.publish(COMPOSE_DRAWER_CHANNELS.inspect, { + jobUuid: localJobUuid, + perspective: Perspective.Variables, + } as InspectPayload); + } +}; + +export default OtherJobVariable; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/ProjectVariable.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/ProjectVariable.tsx new file mode 100644 index 000000000..e44118be7 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/ProjectVariable.tsx @@ -0,0 +1,41 @@ +import React, { FC, useState } from 'react'; +import { Input } from '@arco-design/web-react'; +import { RefModelSharedProps } from './types'; + +const PREFIX = 'project.variables'; + +const PrjectVariable: FC<RefModelSharedProps> = ({ isCheck, value, onChange }) => { + const varName = _parse(value); + const [localVarname, setLocalVar] = useState(varName); + + return ( + <Input + disabled={isCheck} + value={localVarname} + addBefore={`${PREFIX}.`} + onChange={onInputChange} + placeholder={'输入工作区变量名'} + /> + ); + + function onInputChange(value: string, e: any) { + setLocalVar(value); + onChange && onChange(_compose(value)); + } +}; + +function _compose(val: string) { + if (!val) return ''; + return `${PREFIX}.${val}`; +} +function _parse(reference: string | undefined): string { + if (!reference) return ''; + const fragments = reference.split('.'); + if (fragments.length !== 3) { + return ''; + } + const [, , varName] = fragments; + return varName; +} + +export default PrjectVariable; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/SelfVariable.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/SelfVariable.tsx new file mode 100644 index 000000000..9228ac8b6 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/SelfVariable.tsx @@ -0,0 +1,110 @@ +import React, { FC, useContext, useState } from 'react'; +import { COMPOSE_DRAWER_CHANNELS, ComposeDrawerContext, InspectPayload } from '../..'; +import { RefModelSharedProps } from './types'; +import { Button, Select, Grid } from '@arco-design/web-react'; +import { NoAvailable, OptionLabel } from './elements'; +import { composSelfRef, parseSelfRef } from '../helpers'; +import VariableLinkAnchor from './VariableLinkAnchor'; +import PuBSub from 'pubsub-js'; +import { Perspective } from '../../DefaultMode'; +import { Variable, VariableComponent } from 'typings/variable'; +import { algorithmTypeOptionList } from './shared'; + +const Row = Grid.Row; +const Col = Grid.Col; + +const SelfVariable: FC<RefModelSharedProps> = ({ isCheck, value, onChange }) => { + const { formData } = useContext(ComposeDrawerContext); + const [algorithmType, setAlgorithmType] = useState('path'); + const [isShowAlgorithmTypeSelect, setIsShowAlgorithmTypeSelect] = useState(() => { + // e.g. self.variables.${uuid} + // If compoment type of variable is VariableComponent.AlgorithmSelect, self.variables.${uuid}.path or self.variables.${uuid}.path + const list = value?.split('.') ?? []; + if (list.length >= 4) { + setAlgorithmType(list[list.length - 1]); + } + return list.length >= 4; + }); + + if (!formData) { + return null; + } + + const selectedVarUuid = parseSelfRef(value); + const noVariableAvailable = formData.variables.length === 0; + return ( + <Row gutter={5}> + <Col span={isShowAlgorithmTypeSelect ? 10 : 20}> + {noVariableAvailable ? ( + <NoAvailable> + 本任务暂无有效变量, + <Button type="text" size="small" onClick={onGoVarTabClick}> + {'点击前往创建'} + </Button> + </NoAvailable> + ) : ( + <Select + disabled={isCheck} + value={selectedVarUuid} + onChange={onSelectChange} + placeholder={'请选择需要引用的变量'} + allowClear + > + {formData.variables.map((variable, index: number) => { + return ( + <Select.Option key={variable._uuid + index} value={variable._uuid} extra={variable}> + <OptionLabel data-empty-text="// 未命名变量">{variable.name}</OptionLabel> + </Select.Option> + ); + })} + </Select> + )} + </Col> + + {isShowAlgorithmTypeSelect && ( + <Col span={10}> + <Select + disabled={isCheck} + defaultValue={algorithmType} + onChange={onAlgorithmTypeSelectChange} + > + {algorithmTypeOptionList.map((item) => { + return ( + <Select.Option key={item.value} value={item.value}> + <OptionLabel>{item.label}</OptionLabel> + </Select.Option> + ); + })} + </Select> + </Col> + )} + + {!noVariableAvailable && ( + <VariableLinkAnchor varUuid={selectedVarUuid} disabled={!selectedVarUuid} /> + )} + </Row> + ); + + function onSelectChange(val: string, options: any) { + if ( + (options?.extra as Variable)?.widget_schema?.component === VariableComponent.AlgorithmSelect + ) { + setIsShowAlgorithmTypeSelect(true); + onChange?.(`${composSelfRef(val)}.${algorithmTypeOptionList[0].value}`); + } else { + setIsShowAlgorithmTypeSelect(false); + onChange?.(composSelfRef(val)); + } + } + function onAlgorithmTypeSelectChange(val: string) { + // self.variables.ag.path + onChange?.(`${composSelfRef(selectedVarUuid)}.${val}`); + } + function onGoVarTabClick() { + PuBSub.publish(COMPOSE_DRAWER_CHANNELS.inspect, { + perspective: Perspective.Variables, + } as InspectPayload); + } +}; + +export default SelfVariable; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/SystemVariable.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/SystemVariable.tsx new file mode 100644 index 000000000..1624f8c82 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/SystemVariable.tsx @@ -0,0 +1,41 @@ +import React, { FC, useState } from 'react'; +import { Input } from '@arco-design/web-react'; +import { RefModelSharedProps } from './types'; + +const PREFIX = 'system.variables'; + +const SystemVariable: FC<RefModelSharedProps> = ({ isCheck, value, onChange }) => { + const varName = _parse(value); + const [localVarname, setLocalVar] = useState(varName); + + return ( + <Input + disabled={isCheck} + value={localVarname} + addBefore={`${PREFIX}.`} + onChange={onInputChange} + placeholder={'输入系统变量名'} + /> + ); + + function onInputChange(value: string, e: any) { + setLocalVar(value); + onChange && onChange(_compose(value)); + } +}; + +function _compose(val: string) { + if (!val) return ''; + return `${PREFIX}.${val}`; +} +function _parse(reference: string | undefined): string { + if (!reference) return ''; + const fragments = reference.split('.'); + if (fragments.length !== 3) { + return ''; + } + const [, , varName] = fragments; + return varName; +} + +export default SystemVariable; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/VariableLinkAnchor.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/VariableLinkAnchor.tsx new file mode 100644 index 000000000..688a10182 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/VariableLinkAnchor.tsx @@ -0,0 +1,36 @@ +import React, { FC } from 'react'; +import { AnchorIcon } from '../../elements'; +import IconButton from 'components/IconButton'; +import { Grid, Tooltip } from '@arco-design/web-react'; +import { COMPOSE_DRAWER_CHANNELS } from '../..'; + +const Col = Grid.Col; + +export type InspetVariableParams = { + jobUuid?: string; + varUuid: string; + disabled?: boolean; +}; + +type Props = InspetVariableParams; + +const VariableLinkAnchor: FC<Props> = ({ jobUuid, varUuid, disabled }) => { + return ( + <Col style={{ flex: '1' }}> + <Tooltip content={'查看变量'}> + <IconButton + disabled={disabled} + style={{ width: '100%', height: '100%' }} + icon={<AnchorIcon style={{ marginLeft: 0 }} />} + onClick={inspectVariable} + /> + </Tooltip> + </Col> + ); + + function inspectVariable() { + PubSub.publish(COMPOSE_DRAWER_CHANNELS.inspect, { jobUuid, varUuid }); + } +}; + +export default VariableLinkAnchor; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/WorkflowVariable.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/WorkflowVariable.tsx new file mode 100644 index 000000000..4d198e3ce --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/WorkflowVariable.tsx @@ -0,0 +1,107 @@ +import React, { FC, useState } from 'react'; +import { RefModelSharedProps } from './types'; +import { NoAvailable, OptionLabel } from './elements'; +import { Button, Select, Grid } from '@arco-design/web-react'; +import { + definitionsStore, + TPL_GLOBAL_NODE_UUID, +} from 'views/WorkflowTemplates/TemplateForm/stores'; +import { composeWorkflowRef, parseWorkflowRef } from '../helpers'; +import VariableLinkAnchor from './VariableLinkAnchor'; +import PubSub from 'pubsub-js'; +import { COMPOSE_DRAWER_CHANNELS } from '../..'; +import { algorithmTypeOptionList } from './shared'; +import { Variable, VariableComponent } from 'typings/variable'; + +const Row = Grid.Row; +const Col = Grid.Col; + +const WorkflowVariable: FC<RefModelSharedProps> = ({ isCheck, value, onChange }) => { + const [isShowAlgorithmTypeSelect, setIsShowAlgorithmTypeSelect] = useState(() => { + // e.g. workflow.variables.${uuid} + // If compoment type of variable is VariableComponent.AlgorithmSelect, workflow.variables.${uuid}.path or workflow.variables.${uuid}.path + const list = value?.split('.') ?? []; + return list.length >= 4; + }); + + const globalNodeDef = definitionsStore.getValueById(TPL_GLOBAL_NODE_UUID); + if (!globalNodeDef || !globalNodeDef?.variables || globalNodeDef?.variables.length === 0) { + return ( + <NoAvailable> + 暂无全局变量, + <Button disabled={isCheck} type="text" size="small" onClick={onGoGlobalNodeClick}> + {'点击前往创建'} + </Button> + </NoAvailable> + ); + } + + const selectVal = parseWorkflowRef(value); + + return ( + <Row gutter={10}> + <Col span={isShowAlgorithmTypeSelect ? 10 : 20}> + <Select + value={selectVal} + onChange={onSelectChange} + placeholder={'请选择需要引用的全局变量'} + allowClear + > + {globalNodeDef.variables.map((variable, index: number) => { + return ( + <Select.Option key={variable.name + index} value={variable._uuid} extra={variable}> + <OptionLabel data-empty-text="//未命名全局变量">{variable.name}</OptionLabel> + </Select.Option> + ); + })} + </Select> + </Col> + + {isShowAlgorithmTypeSelect && ( + <Col span={10}> + <Select + defaultValue={algorithmTypeOptionList[0].value} + onChange={onAlgorithmTypeSelectChange} + > + {algorithmTypeOptionList.map((item) => { + return ( + <Select.Option key={item.value} value={item.value}> + <OptionLabel>{item.label}</OptionLabel> + </Select.Option> + ); + })} + </Select> + </Col> + )} + + <VariableLinkAnchor + varUuid={selectVal} + jobUuid={TPL_GLOBAL_NODE_UUID} + disabled={!selectVal} + /> + </Row> + ); + + function onSelectChange(val: string, options: any) { + if ( + (options?.extra as Variable)?.widget_schema?.component === VariableComponent.AlgorithmSelect + ) { + setIsShowAlgorithmTypeSelect(true); + onChange?.(`${composeWorkflowRef(val)}.${algorithmTypeOptionList[0].value}`); + } else { + setIsShowAlgorithmTypeSelect(false); + onChange?.(composeWorkflowRef(val)); + } + } + function onAlgorithmTypeSelectChange(val: string) { + // self.variables.ag.path + onChange?.(`${composeWorkflowRef(selectVal)}.${val}`); + } + function onGoGlobalNodeClick() { + PubSub.publish(COMPOSE_DRAWER_CHANNELS.inspect, { + jobUuid: TPL_GLOBAL_NODE_UUID, + }); + } +}; + +export default WorkflowVariable; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/elements.module.less b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/elements.module.less new file mode 100644 index 000000000..96b0437e6 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/elements.module.less @@ -0,0 +1,17 @@ +.no_available { + padding-left: 5px; + font-size: 13px; + line-height: 32px; + white-space: nowrap; + color: var(--textColorSecondary); +} + +.option_label { + &:empty { + color: var(--textColorSecondary); + &::before { + content: attr(data-empty-text); + font-weight: normal; + } + } +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/elements.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/elements.tsx new file mode 100644 index 000000000..aec2a62de --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/elements.tsx @@ -0,0 +1,18 @@ +import React from 'react'; +import styled from './elements.module.less'; + +export function NoAvailable({ children, ...props }: any) { + return ( + <div className={styled.no_available} {...props}> + {children} + </div> + ); +} + +export function OptionLabel({ children, ...props }: any) { + return ( + <span className={styled.option_label} {...props}> + {children} + </span> + ); +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/shared.ts b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/shared.ts new file mode 100644 index 000000000..9ef030cc0 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/shared.ts @@ -0,0 +1,10 @@ +export const algorithmTypeOptionList = [ + { + label: 'path', + value: 'path', + }, + { + label: 'config', + value: 'config', + }, +]; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/types.ts b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/types.ts new file mode 100644 index 000000000..682a44944 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/RefVariableSelect/types.ts @@ -0,0 +1,7 @@ +/* istanbul ignore file */ + +export type RefModelSharedProps = { + value?: string; + isCheck?: boolean; + onChange?: (refStr: string) => void; +}; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/SlotEntryItem.module.less b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/SlotEntryItem.module.less new file mode 100644 index 000000000..9252de15f --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/SlotEntryItem.module.less @@ -0,0 +1,14 @@ +.second_label { + color: var(--textColorSecondary); + font-weight: normal; + font-size: 12px; +} + +.open_indicator { + transition: 0.4s var(--commonTiming); +} +&[data-open='true'] { + .open_indicator { + transform: rotate(180deg); + } +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/SlotEntryItem.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/SlotEntryItem.tsx new file mode 100644 index 000000000..5fd3f0984 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/SlotEntryItem.tsx @@ -0,0 +1,284 @@ +import React, { ChangeEvent, CSSProperties, FC, useRef, memo, useCallback, useState } from 'react'; +import styled from './SlotEntryItem.module.less'; +import { SlotEntry } from 'views/WorkflowTemplates/TemplateForm/stores'; +import { useToggle } from 'react-use'; +import { IconDown, IconQuestionCircle } from '@arco-design/web-react/icon'; + +import { JobSlotReferenceType } from 'typings/workflow'; +import { Summary, Container, Details, Name } from '../elements'; +import { Tag, Select, Input, Tooltip, Form } from '@arco-design/web-react'; +import { useSubscribe } from 'hooks'; + +import SelfVariable from './RefVariableSelect/SelfVariable'; +import WorkflowVariable from './RefVariableSelect/WorkflowVariable'; +import ProjectVariable from './RefVariableSelect/ProjectVariable'; +import SystemVariable from './RefVariableSelect/SystemVariable'; +import OtherJobVariable from './RefVariableSelect/OtherJobVariable'; +import JobProperty from './RefVariableSelect/JobProperty'; +import { COMPOSE_DRAWER_CHANNELS, HighlightPayload, scrollDrawerBodyTo } from '..'; +import { ValidateErrorEntity } from 'typings/component'; +import { isEqual } from 'lodash-es'; +import { formatValueToString, parseValueFromString } from 'shared/helpers'; + +const { DEFAULT, SELF, OTHER_JOB, WORKFLOW, PROJECT, SYSTEM, JOB_PROPERTY } = JobSlotReferenceType; + +type RefMeta = { color: string; refWidget: any; label: string }; + +const SlotRefMetas: Partial<Record<JobSlotReferenceType, RefMeta>> = { + [DEFAULT]: { + color: '', // default + refWidget: null, + label: '模板默认值', + }, + [SELF]: { + color: 'gold', + refWidget: SelfVariable, + label: '本任务变量', + }, + [OTHER_JOB]: { + color: 'cyan', + refWidget: OtherJobVariable, + label: '其他任务变量', + }, + [JOB_PROPERTY]: { + color: 'arcoblue', + refWidget: JobProperty, + label: '任务属性', + }, + [WORKFLOW]: { + color: 'blue', + refWidget: WorkflowVariable, + label: '工作流全局变量', + }, + [PROJECT]: { + color: 'purple', + refWidget: ProjectVariable, + label: '工作区变量', + }, + [SYSTEM]: { + color: 'lime', + refWidget: SystemVariable, + label: '系统变量', + }, +}; + +const refOptions = Object.entries(JobSlotReferenceType); + +type Props = { + isCheck?: boolean; + path: number | string; + value?: SlotEntry; + onChange?: (val: SlotEntry) => any; + style?: CSSProperties; + className?: string; +}; + +const SlotEntryItem: FC<Props> = ({ isCheck, path, value, onChange, ...props }) => { + const [validatePassed, setValidatePassed] = useState<boolean>(true); + const ref = useRef<HTMLDetailsElement>(null); + const [isOpen, toggleOpen] = useToggle(_shouldInitiallyOpen(value)); + const [hasError, toggleError] = useToggle(false); + const [highlighted, setHighlight] = useToggle(false); + const refValTimer = useRef<number>(null); + + const onRefValChange = useCallback((val: string) => { + refValTimer.current && clearTimeout(refValTimer.current); + + (refValTimer.current as any) = (setTimeout(() => { + toggleError(!val); + }, 200) as unknown) as any; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + + useSubscribe( + COMPOSE_DRAWER_CHANNELS.broadcast_error, + (_: string, errInfo: ValidateErrorEntity) => { + const hasError = errInfo.errorFields.some((field) => { + const [pathL1] = field.name; + const reg = RegExp(/_slotEntries/g); + return reg.test(pathL1) && pathL1 === path; + }); + + toggleError(hasError); + + if (hasError) { + toggleOpen(true); + } + }, + ); + useSubscribe(COMPOSE_DRAWER_CHANNELS.highlight, (_: string, { slotName }: HighlightPayload) => { + if (value && slotName === value[0]) { + setHighlight(true); + toggleOpen(true); + + // Scroll slot into view + const verticalMiddleY = (window.innerHeight - 60) / 2; + const top = ref.current?.offsetTop || verticalMiddleY; + scrollDrawerBodyTo(top - verticalMiddleY); + + setTimeout(() => { + setHighlight && setHighlight(false); + }, 5000); + } + }); + + if (!value) return null; + + const [slotName, slotConfig] = value; + const currRefType = slotConfig.reference_type; + const currRefMeta = SlotRefMetas[currRefType]!; + const isDefaultRefType = currRefType === DEFAULT; + + return ( + <Details ref={ref as any} data-has-error={hasError} data-open={isOpen} {...props}> + <Summary + data-has-error={hasError} + data-highlighted={highlighted} + onClick={(evt: any) => onToggle(evt as any)} + > + {/* + Certain HTML elements, like <summary>, <fieldset> and <button>, do not work as flex containers. + You can work by nesting a div under your summary + https://stackoverflow.com/questions/46156669/safari-flex-item-unwanted-100-width-css/46163405 + */} + <div + style={{ + display: 'flex', + alignItems: 'center', + height: '100%', + }} + > + <Name> + <Tag color={currRefMeta?.color} style={{ marginRight: 5 }} bordered> + {currRefMeta?.label} + </Tag> + {slotConfig.label || slotName} + <small className={styled.second_label}> ({slotName}) </small> + {slotConfig.help && ( + <Tooltip content={slotConfig.help}> + <IconQuestionCircle disabled={isCheck} style={{ marginLeft: 5 }} /> + </Tooltip> + )} + </Name> + <div style={{ marginLeft: 5 }}> + <IconDown className={styled.open_indicator} /> + </div> + </div> + </Summary> + + <Container style={{ display: isOpen ? 'block' : 'none' }}> + <Form.Item + field={getNamePath('reference_type')} + label={'插槽类型'} + rules={[{ required: true }]} + > + <Select disabled={isCheck} onChange={onTypeChange}> + {refOptions.map(([key, value]) => ( + <Select.Option key={key} value={value}> + {`${SlotRefMetas[value]?.label} - ${key}`} + </Select.Option> + ))} + </Select> + </Form.Item> + + {!isDefaultRefType && ( + <Form.Item + field={getNamePath('reference')} + label={'引用路径'} + // dependencies={['variables']} + rules={[ + { required: true }, + { + match: /^[a-zA-Z_0-9]+(?:(.[a-zA-Z_0-9]+)|(\['[^'"\\]+']))+$/g, + message: '只允许大小写英文字母数字及下划线的组合', + }, + ]} + > + {currRefMeta.refWidget ? ( + <currRefMeta.refWidget + isCheck={isCheck} + key="ref-widget" + onChange={onRefValChange} + placeholder={'请补全引用路径'} + /> + ) : ( + <Input disabled={isCheck} key="ref-default-input" placeholder={'请完善引用路径'} /> + )} + </Form.Item> + )} + + <Form.Item + hidden={!isDefaultRefType} + field={getNamePath('default_value')} + label={'默认值'} + rules={[ + { + validator: (value: string | undefined, callback: (error?: string) => void) => { + if (validatePassed) { + return; + } + callback(`JSON ${slotConfig.value_type} 格式错误`); + }, + }, + ]} + formatter={(value: any) => { + if (validatePassed) { + return formatValueToString(value, slotConfig.value_type); + } + return value; + }} + getValueFromEvent={(value) => { + try { + const res = parseValueFromString(value, slotConfig.value_type); + setValidatePassed(true); + return res; + } catch (error) { + setValidatePassed(false); + return value; + } + }} + > + <Input disabled={isCheck} placeholder={'默认值'} /> + </Form.Item> + </Container> + </Details> + ); + + function getNamePath(name: string) { + // A slot entry consist with [slotName, slotValue] + // so the 1 refers to entry's value + return [path, 1, name].join('.'); + } + function onTypeChange(val: JobSlotReferenceType) { + //Reset error status + toggleError(false); + // Every time change ref type, reset reference to empty + onChange && onChange([slotName, { ...slotConfig, reference_type: val, reference: '' }]); + } + function onToggle(evt: ChangeEvent<HTMLDetailsElement>) { + toggleOpen(evt.target.open); + } +}; + +function _shouldInitiallyOpen(val: SlotEntry | undefined) { + if (!val) return false; + + const [, slot] = val; + return slot.reference_type !== JobSlotReferenceType.DEFAULT && !slot.reference; +} + +/** + * Decide if the item need re-render + * 1. ignore onChange's ref change + * 2. use Deep comparison + */ +function _propsAreEqual( + { onChange: _1, value: oldValue, ...prevProps }: Props, + { onChange: _2, value: newValue, ...newProps }: Props, +): boolean { + if (oldValue !== newValue) return false; + + return isEqual(prevProps, newProps); +} + +export default memo(SlotEntryItem, _propsAreEqual); diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/helpers.ts b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/helpers.ts new file mode 100644 index 000000000..ae1bac752 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/helpers.ts @@ -0,0 +1,71 @@ +export function composeOtherJobRef(jobUuid: string, varUuid: string) { + if (!jobUuid || !varUuid) return ''; + + return `workflow.jobs['${jobUuid}'].variables.${varUuid}`; +} + +/** @returns [job-uuid, var-uuid, extra-field-value] or [job-name, var-name, extra-field-name] */ +export function parseOtherJobRef(reference?: string): [string, string, string] { + if (!reference) return [undefined, undefined] as never; + + const fragments = reference.split(/\.|']\.?|\['/); + + if (fragments.length < 5) { + return [undefined, undefined, undefined] as never; + } + + const [, , job, , variable, extraField] = fragments; + + return [job, variable, extraField]; +} + +export function parseJobPropRef(reference?: string) { + if (!reference) return [undefined, undefined] as never; + + if (reference.startsWith('self.')) { + return ['__SELF__', reference.split(/\.|']\.?|\['/)[1]]; + } + + const fragments = reference.split(/\.|']\.?|\['/); + + const [, , job, prop] = fragments; + + return [job, prop]; +} + +export function composeJobPropRef(params: { isSelf: boolean; job?: string; prop?: string }) { + const { isSelf, job, prop } = params; + + if (!prop) return undefined as never; + + if (isSelf) { + return `self.${prop}`; + } + return `workflow.jobs['${job}'].${prop}`; +} + +export function composSelfRef(varUuid?: string) { + if (!varUuid) return undefined as never; + + return `self.variables.${varUuid}`; +} + +export function parseSelfRef(val?: string) { + // e.g. self.variables.${uuid} + // If compoment type of variable is VariableComponent.AlgorithmSelect, self.variables.${uuid}.path or self.variables.${uuid}.path + const list = val?.split('.') ?? []; + return list[2] || (undefined as never); +} + +export function composeWorkflowRef(varUuid?: string) { + if (!varUuid) return undefined as never; + + return `workflow.variables.${varUuid}`; +} + +export function parseWorkflowRef(val?: string) { + // e.g. workflow.variables.${uuid} + // If compoment type of variable is VariableComponent.AlgorithmSelect, workflow.variables.${uuid}.path or workflow.variables.${uuid}.path + const list = val?.split('.') ?? []; + return list[2] || (undefined as never); +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/index.module.less b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/index.module.less new file mode 100644 index 000000000..40f12eb75 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/index.module.less @@ -0,0 +1,9 @@ +.slot_list_row { + display: flex; + flex-wrap: wrap; + gap: 20px; +} + +.slot_list_col { + flex: 1; +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/index.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/index.tsx new file mode 100644 index 000000000..bcd342526 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/SloEntrytList/index.tsx @@ -0,0 +1,87 @@ +import React, { FC, useContext, memo } from 'react'; +import { Form, Input } from '@arco-design/web-react'; +import NoResult from 'components/NoResult'; +import SlotEntryItem from './SlotEntryItem'; +import styled from './index.module.less'; +import { SlotEntries } from 'views/WorkflowTemplates/TemplateForm/stores'; +import { useSearchBox } from '../hooks'; +import { SearchBox } from '../elements'; +import { ComposeDrawerContext } from '../index'; + +type Props = { + slotList?: SlotEntries; + isCheck?: boolean; +}; + +const SlotEntryList: FC<Props> = ({ isCheck }) => { + const { filter, onFilterChange, onInputKeyPress } = useSearchBox(); + const context = useContext(ComposeDrawerContext); + const slotList = context.formData?._slotEntries; + + return ( + <> + <SearchBox> + <Input.Search + placeholder="按名字搜索插槽" + onChange={onFilterChange} + onKeyPress={onInputKeyPress} + /> + </SearchBox> + + <Form.List field="_slotEntries"> + {(entries) => { + const filtedItems = entries.filter(slotNameFilter); + + return ( + <div className={styled.slot_list_row}> + {/* 2 column layout */} + <div className={styled.slot_list_col}> + {filtedItems + .filter((_, index) => index % 2 === 0) + .map((entry) => ( + <Form.Item noStyle field={entry.field} key={'_slotEntries' + entry.key}> + <SlotEntryItem isCheck={isCheck} path={entry.field} /> + </Form.Item> + ))} + </div> + + <div className={styled.slot_list_col}> + {filtedItems + .filter((_, index) => index % 2 === 1) + .map((entry) => ( + <Form.Item + {...entry} + noStyle + field={entry.field} + key={'_slotEntries' + entry.key} + > + <SlotEntryItem isCheck={isCheck} path={entry.field} /> + </Form.Item> + ))} + </div> + + {entries.length === 0 && ( + <NoResult + noImage + text="该任务没有插槽" + style={{ margin: '50px auto 20px', width: '100%' }} + /> + )} + </div> + ); + }} + </Form.List> + </> + ); + + function slotNameFilter(_: any, index: number) { + if (!slotList) return true; + + const matcher = filter.toLowerCase(); + const [slotName, slot] = slotList[index] ?? []; + + return slotName?.toLowerCase().includes(matcher) || slot?.label.includes(matcher); + } +}; + +export default memo(SlotEntryList); diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/EnvsInputForm.module.less b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/EnvsInputForm.module.less new file mode 100644 index 000000000..67b141545 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/EnvsInputForm.module.less @@ -0,0 +1,8 @@ +.list_container { + transition: 0.4s var(--commonTiming); +} + +.remove_button { + position: absolute; + right: -28px; +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/EnvsInputForm.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/EnvsInputForm.tsx new file mode 100644 index 000000000..a91d5af67 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/EnvsInputForm.tsx @@ -0,0 +1,98 @@ +import React, { FC, useCallback, useLayoutEffect, useRef } from 'react'; +import styled from './EnvsInputForm.module.less'; + +import { useTranslation } from 'react-i18next'; +import { convertToUnit } from 'shared/helpers'; + +import { Form, Input, Button, Grid } from '@arco-design/web-react'; +import { Delete, PlusCircle } from 'components/IconPark'; + +const Row = Grid.Row; + +const EnvVariablesForm: FC<{ + path: any; + disabled?: boolean; +}> = ({ path, disabled }) => { + const { t } = useTranslation(); + const listInnerRef = useRef<HTMLDivElement>(); + const listContainerRef = useRef<HTMLDivElement>(); + + const setListContainerMaxHeight = useCallback( + (nextHeight: any) => { + listContainerRef.current!.style.maxHeight = convertToUnit(nextHeight); + }, + [listContainerRef], + ); + const getListInnerHeight = useCallback(() => { + return listInnerRef.current!.offsetHeight!; + }, [listInnerRef]); + + useLayoutEffect(() => { + const innerHeight = getListInnerHeight() + 30; + setListContainerMaxHeight(innerHeight); + }); + + return ( + <div> + <div + className={styled.list_container} + ref={listContainerRef as any} + onTransitionEnd={onFoldAnimationEnd} + > + <Form.List field={path.join('.')}> + {(fields, { add, remove }) => { + return ( + <div ref={listInnerRef as any}> + {fields.map((field, index) => ( + <Row key={field.key + index} align="start" style={{ position: 'relative' }}> + <Form.Item + style={{ flex: '0 0 50%' }} + {...field} + label="Name" + field={[field.field, 'name'].join('.')} + key={[field.key, 'name'].join('.')} + rules={[{ required: true }]} + > + <Input placeholder="name" disabled={disabled} /> + </Form.Item> + + <Form.Item + style={{ flex: '0 0 50%' }} + label="Value" + {...field} + field={[field.field, 'value'].join('.')} + key={[field.key, 'value'].join('.')} + rules={[{ required: true }]} + > + <Input.TextArea placeholder="value" disabled={disabled} /> + </Form.Item> + + <Button + className={styled.remove_button} + size="small" + icon={<Delete />} + shape="circle" + type="text" + onClick={() => remove(field.key)} + /> + </Row> + ))} + <Button size="small" icon={<PlusCircle />} onClick={() => add()}> + {t('project.add_parameters')} + </Button> + </div> + ); + }} + </Form.List> + </div> + </div> + ); + + function onFoldAnimationEnd(_: React.TransitionEvent) { + // Because of user can adjust list inner's height by resize value-textarea or add/remove variable + // we MUST set container's maxHeight to 'initial' after unfolded (after which user can interact) + listContainerRef.current!.style.maxHeight = 'initial'; + } +}; + +export default EnvVariablesForm; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/SlotLinkAnchor.module.less b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/SlotLinkAnchor.module.less new file mode 100644 index 000000000..5d855a349 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/SlotLinkAnchor.module.less @@ -0,0 +1,6 @@ +.container { + display: flex; + align-items: center; + justify-content: space-between; + width: 100%; +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/SlotLinkAnchor.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/SlotLinkAnchor.tsx new file mode 100644 index 000000000..f753345c1 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/SlotLinkAnchor.tsx @@ -0,0 +1,131 @@ +import React, { FC } from 'react'; +import styled from './SlotLinkAnchor.module.less'; +import { + definitionsStore, + editorInfosStore, + JobDefinitionForm, + TPL_GLOBAL_NODE_UUID, +} from 'views/WorkflowTemplates/TemplateForm/stores'; +import { JobSlotReferenceType } from 'typings/workflow'; +import { Tag } from '@arco-design/web-react'; +import PubSub from 'pubsub-js'; +import { COMPOSE_DRAWER_CHANNELS, InspectPayload } from '../../index'; +import { AnchorIcon } from '../../elements'; + +export enum SlotLinkType { + Self, + OtherJob, +} + +export type SlotLink = { + type: SlotLinkType; + jobUuid: string; + slotName: string; +}; + +type Props = { + link: SlotLink; +}; + +export function collectSlotLinks( + currNodeUuid?: string, + varUuid?: string, + context?: { formData?: JobDefinitionForm }, +) { + if (!varUuid || !currNodeUuid) return []; + + const refSourceList: SlotLink[] = []; + + editorInfosStore.entries.forEach(([nodeUuid, editInfo]) => { + if (!editInfo) return; + + const { slotEntries } = editInfo; + + /** If current node is workflow global variables */ + if (currNodeUuid === TPL_GLOBAL_NODE_UUID) { + slotEntries.forEach(([slotName, slot]) => { + if (slot.reference_type === JobSlotReferenceType.WORKFLOW) { + if (_isVarMatched(slot.reference, varUuid)) { + refSourceList.push({ + type: SlotLinkType.OtherJob, + jobUuid: nodeUuid, + slotName, + }); + } + } + }); + return; + } + + /** If current editorInfo belongs to current node */ + if (currNodeUuid === nodeUuid) { + context?.formData?._slotEntries?.forEach(([slotName, slot]) => { + if (slot.reference_type === JobSlotReferenceType.SELF) { + if (_isVarMatched(slot.reference, varUuid)) { + refSourceList.push({ + type: SlotLinkType.Self, + jobUuid: nodeUuid, + slotName, + }); + } + } + }); + return; + } + + slotEntries.forEach(([slotName, slot]) => { + if (slot.reference_type === JobSlotReferenceType.OTHER_JOB) { + if (_isVarMatched(slot.reference, varUuid) && _isJobMatched(slot.reference, currNodeUuid)) { + refSourceList.push({ + type: SlotLinkType.OtherJob, + jobUuid: nodeUuid, + slotName, + }); + } + } + }); + }); + + return refSourceList; +} + +const SlotLinkAnchor: FC<Props> = ({ link }) => { + const isOtherJob = link.type === SlotLinkType.OtherJob; + + return ( + <div className={styled.container} onClick={onLinkClick}> + <div> + {link.type === SlotLinkType.Self && <Tag>本任务</Tag>} + {isOtherJob && ( + <Tag color="magenta">{definitionsStore.getValueById(link.jobUuid)?.name}</Tag> + )} + {link.slotName} + </div> + <AnchorIcon /> + </div> + ); + + function onLinkClick() { + PubSub.publish(COMPOSE_DRAWER_CHANNELS.inspect, { + jobUuid: link.jobUuid, + slotName: link.slotName, + } as InspectPayload); + } +}; + +function _isVarMatched(ref: string, varUuid: string) { + if (!ref) return false; + return ref.endsWith(`.${varUuid}`); +} + +function _isJobMatched(ref: string, uuid: string) { + if (!ref) return false; + + const fragments = ref.split('.'); + if (fragments.length !== 5) return false; + + const [, , jobUuid] = fragments; + return jobUuid === uuid; +} + +export default SlotLinkAnchor; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/WidgetSchema.module.less b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/WidgetSchema.module.less new file mode 100644 index 000000000..0c34aaaba --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/WidgetSchema.module.less @@ -0,0 +1,9 @@ +.del_enum_button { + position: absolute; + top: 4px; + right: -30px; +} + +.enum { + position: relative; +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/WidgetSchema.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/WidgetSchema.tsx new file mode 100644 index 000000000..73cdebcc5 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/WidgetSchema.tsx @@ -0,0 +1,505 @@ +import React, { FC, useCallback, useState } from 'react'; +import styled from './WidgetSchema.module.less'; +import { set } from 'lodash-es'; +import { formatValueToString } from 'shared/helpers'; + +import { Button, Input, InputNumber, Select, Switch, Form } from '@arco-design/web-react'; +import { DatasetPathSelect } from 'components/DatasetSelect'; +import IconButton from 'components/IconButton'; +import { Delete, PlusCircle } from 'components/IconPark'; +import ModelCodesEditorButton from 'components/ModelCodesEditorButton'; +import { AlgorithmSelect } from 'components/DoubleSelect'; +import EnvsInputForm from './EnvsInputForm'; +import YAMLTemplateEditorButton from 'components/YAMLTemplateEditorButton'; + +import { + Variable, + VariableAccessMode, + VariableComponent, + VariableValueType, + VariableWidgetSchema, +} from 'typings/variable'; + +import { CpuInput, MemInput } from 'components/InputGroup/NumberTextInput'; +import { disabeldPeerWritableComponentTypeList } from '.'; +import { Tag } from 'typings/workflow'; + +const { STRING, CODE, LIST, OBJECT, NUMBER } = VariableValueType; + +/** + * NOTE: we are not open to choose [Radio, Checkbox, Switch] at the moment, + * bacause Radio, Checkbox can be replaced with Select + * and Switch's boolean type value is not supported by server side yet + */ +const WIDGET_COMPONENTS__supported: Partial<Record<VariableComponent, any>> = { + [VariableComponent.Input]: { + use: Input, + label: 'Input - 输入框', + type: STRING, + allowTypeList: [STRING, LIST, OBJECT], + displayType: STRING, + }, + [VariableComponent.Select]: { + use: Select, + label: 'Select - 选择器', + type: STRING, + allowTypeList: [STRING], + displayType: STRING, + }, + [VariableComponent.NumberPicker]: { + use: InputNumber, + label: 'Number - 数字输入框', + type: NUMBER, + allowTypeList: [NUMBER], + displayType: NUMBER, + }, + [VariableComponent.CPU]: { + use: CpuInput, + label: 'CpuInput - 输入 Cpu 资源参数的输入框', + type: STRING, + allowTypeList: [STRING], + displayType: STRING, + props: { + min: 1000, + max: Number.MAX_SAFE_INTEGER, + }, + }, + [VariableComponent.MEM]: { + use: MemInput, + label: 'MemInput - 输入 Mem 资源参数的输入框', + type: STRING, + allowTypeList: [STRING], + displayType: STRING, + props: { + min: 1, + max: 100, + }, + }, + [VariableComponent.TextArea]: { + use: Input.TextArea, + label: 'TextArea - 多行文本输入框', + type: STRING, + allowTypeList: [STRING], + displayType: STRING, + }, + [VariableComponent.Code]: { + use: ModelCodesEditorButton, + label: 'Code - 代码编辑器', + type: CODE, + allowTypeList: [CODE], + displayType: CODE, + }, + [VariableComponent.JSON]: { + use: YAMLTemplateEditorButton, + label: 'JSON - JSON编辑器', + type: OBJECT, + allowTypeList: [OBJECT], + displayType: STRING, + props: { + language: 'json', + }, + }, + [VariableComponent.DatasetPath]: { + use: DatasetPathSelect, + label: 'DatasetPath - 原始数据集路径选择器', + type: STRING, + allowTypeList: [STRING], + displayType: STRING, + props: { + lazyLoad: { + enable: true, + page_size: 10, + }, + }, + }, + [VariableComponent.FeatureSelect]: { + use: Input, + label: 'FeatureSelect - 特征选择器', + type: OBJECT, + allowTypeList: [OBJECT], + displayType: STRING, + }, + [VariableComponent.EnvsInput]: { + use: EnvsInputForm, + label: 'EnvsInput - 环境变量输入器', + type: LIST, + allowTypeList: [LIST], + displayType: LIST, + }, + [VariableComponent.AlgorithmSelect]: { + use: AlgorithmSelect, + label: 'AlgorithmSelect - 算法选择器', + type: OBJECT, + allowTypeList: [OBJECT], + displayType: OBJECT, + }, +}; + +export const componentOptions = Object.entries(WIDGET_COMPONENTS__supported).map(([key, val]) => ({ + value: key, + label: val.label, +})); + +type Props = { + form: any; + path: (number | string)[]; + value?: VariableWidgetSchema; + isCheck?: boolean; + onChange?: (val: Variable) => any; +}; + +const WidgetSchema: FC<Props> = ({ form, path, value, isCheck }) => { + const data = value; + const variableIdx = path.slice(0, -1); + + const Widget = WIDGET_COMPONENTS__supported[data?.component!]?.use || Input; + const widgetProps = WIDGET_COMPONENTS__supported[data?.component!]?.props || {}; + const allowTypeList: string[] = + WIDGET_COMPONENTS__supported[data?.component!]?.allowTypeList ?? []; + const defaultValueType: VariableValueType = WIDGET_COMPONENTS__supported[data?.component!]?.type; + const displayType: VariableValueType = + WIDGET_COMPONENTS__supported[data?.component!]?.displayType; + const widgetHasEnum = _hasEnum(data?.component); + const isCheckableCompnent = _isCheckableCompnent(data?.component); + const isDisplayTypeSelect = _isDisplayTypeSelect(data?.component); + + const [valueType, setValueType] = useState<VariableValueType>(() => { + // Get lastest valueType value from form + const variables: Variable[] = form.getFieldValue('variables'); + return variables?.[variableIdx?.[0] as number]?.value_type ?? defaultValueType; + }); + const tagList = [ + { + label: '资源配置', + value: Tag.RESOURCE_ALLOCATION, + }, + { + label: '输入参数', + value: Tag.INPUT_PARAM, + }, + { + label: '输入路径', + value: Tag.INPUT_PATH, + }, + { + label: '输出路径', + value: Tag.OUTPUT_PATH, + }, + { + label: '运行参数', + value: Tag.OPERATING_PARAM, + }, + { + label: '系统变量', + value: Tag.SYSTEM_PARAM, + }, + ]; + + const formatValue = useCallback( + (value: any) => { + if (valueType === CODE) { + return value; + } + + if (displayType === VariableValueType.STRING) { + if ((valueType === LIST || valueType === OBJECT) && typeof value === 'object') { + return formatValueToString(value, valueType); + } + + if (typeof value === 'string') { + return value; + } + + // Due to server only accept string type value + if (typeof value === 'number') { + return value.toString(); + } + } + + if ( + [VariableValueType.CODE, VariableValueType.LIST, VariableValueType.OBJECT].includes( + displayType, + ) + ) { + if (typeof value !== 'object') { + try { + const finalValue = JSON.parse(value); + return finalValue; + } catch (error) { + // Do nothing + } + } + } + + return value; + }, + [valueType, displayType], + ); + if (!data) return null; + + return ( + <div> + <Form.Item + field={[...path, 'component'].join('.')} + label="请选择组件" + rules={[{ required: true, message: '请选择组件' }]} + > + <Select disabled={isCheck} placeholder="请选择组件" onChange={onComponentChange}> + {componentOptions.map((comp) => { + return ( + <Select.Option key={comp.value} value={comp.value}> + {comp.label} + </Select.Option> + ); + })} + </Select> + </Form.Item> + + <Form.Item + field={[...variableIdx, 'value_type'].join('.')} + label="值类型" + hidden={!isDisplayTypeSelect} + > + <Select disabled={isCheck} placeholder="请选择值类型" onChange={onTypeChange}> + {allowTypeList.map((type) => { + return ( + <Select.Option key={type} value={type}> + {type} + </Select.Option> + ); + })} + </Select> + </Form.Item> + + {widgetHasEnum && ( + <Form.Item + field={[...path, 'enum'].join('.')} + label="可选项" + rules={[{ required: true, message: '请添加至少一个选项' }]} + > + <Form.List field={[...path, 'enum'].join('.')}> + {(fields, { add, remove }) => { + return ( + <div> + {fields.map((field, index) => { + return ( + <div className={styled.enum} key={field.key + index}> + <Form.Item rules={[{ required: true, message: '填写选项值' }]} {...field}> + <Input disabled={isCheck} placeholder={`选项 ${index + 1}`} /> + </Form.Item> + <IconButton + className={styled.del_enum_button} + disabled={isCheck} + circle + icon={<Delete />} + onClick={() => remove(field.key)} + /> + </div> + ); + })} + + <Button + disabled={isCheck} + size="small" + icon={<PlusCircle />} + onClick={() => add()} + > + 添加选项 + </Button> + </div> + ); + }} + </Form.List> + </Form.Item> + )} + + {/* The default value path is outside `widget_schema`, so the temp solution is name.slice(0, -1) */} + <Form.Item + field={[...variableIdx, 'value'].join('.')} + label="默认值" + triggerPropName={isCheckableCompnent ? 'checked' : 'value'} + normalize={formatValue} + formatter={(val) => { + let finalValue = val; + // Because some value were be parsed as object by parseComplexDictField function, but it is has conflicts with displayType + if (displayType === VariableValueType.STRING && typeof val === 'object') { + finalValue = JSON.stringify(val); + } + return finalValue; + }} + rules={ + !widgetHasEnum && + (data?.component === VariableComponent.Input || + data?.component === VariableComponent.FeatureSelect) && + (valueType === LIST || valueType === OBJECT) + ? [ + { + validator: (value: any, callback: (error?: string) => void) => { + // Hack: I don't know why value will be object type when I already set getValueProps function to format value to string type + // This situation happen when first render + if (typeof value === 'object') { + return; + } + + try { + JSON.parse(value); + return; + } catch (error) { + callback(`JSON ${valueType} 格式错误`); + } + }, + }, + ] + : [] + } + > + {widgetHasEnum ? ( + <Widget disabled={isCheck} placeholder="按需设置变量默认值" allowClear> + {widgetHasEnum && + (data.enum || []).map((opt: any, index: number) => { + return ( + <Select.Option key={opt + index} value={opt}> + {opt || '##请填充选项值##'} + </Select.Option> + ); + })} + </Widget> + ) : ( + <Widget + disabled={isCheck} + placeholder="按需设置变量默认值" + allowClear + path={[...variableIdx, 'value']} + {...widgetProps} + /> + )} + </Form.Item> + + <Form.Item field={[...path, 'tooltip'].join('.')} label="用户输入提示"> + <Input disabled={isCheck} placeholder="输入提示解释该字段作用" /> + </Form.Item> + + <Form.Item field={[...path, 'tag'].join('.')} label="参数类型"> + <Select disabled={isCheck} placeholder="请选择参数类型" onChange={onTagChange}> + {tagList.map((item) => { + return ( + <Select.Option key={item.value} value={item.value}> + {item.label} + </Select.Option> + ); + })} + </Select> + </Form.Item> + + <Form.Item field={[...path, 'required'].join('.')} triggerPropName="checked" label="是否必填"> + <Switch disabled={isCheck} /> + </Form.Item> + <Form.Item field={[...path, 'hidden'].join('.')} triggerPropName="checked" label="是否隐藏"> + <Switch disabled={isCheck} /> + </Form.Item> + </div> + ); + + function onTypeChange(type: VariableValueType) { + setValueType(type); + + const result = { variables: form.getFieldValue('variables') }; + set(result, `[${variableIdx[0]}].value_type`, type); + form.setFieldsValue(result); + } + + function onTagChange(tag: string) { + const result = { variables: form.getFieldValue('variables') }; + set(result, `[${variableIdx[0]}].tag`, tag); + form.setFieldsValue(result); + } + + function onComponentChange(val: VariableComponent) { + setValueType(WIDGET_COMPONENTS__supported[val].type); + + const variables = form.getFieldValue('variables'); + + const valueType = WIDGET_COMPONENTS__supported[val].type; + + const displayType: VariableValueType = WIDGET_COMPONENTS__supported[val]?.displayType; + + let defaultValue: any; + + // TODO: it's not clean to setFieldsValue using lodash-set, find a better way! + set(variables, `[${variableIdx[0]}].value_type`, valueType); + + if (disabeldPeerWritableComponentTypeList.includes(val)) { + set(variables, `[${variableIdx[0]}].access_mode`, VariableAccessMode.PEER_READABLE); + } + + let isChangeValue = false; + + // Set '{}' as the default value of Code componets. + if ([CODE, OBJECT].includes(valueType)) { + defaultValue = {}; + if (val === VariableComponent.AlgorithmSelect) { + defaultValue = { path: '', config: [] }; + } + + set( + variables, + `[${variableIdx[0]}].value`, + displayType === VariableValueType.STRING ? JSON.stringify(defaultValue) : defaultValue, + ); + isChangeValue = true; + } + if (valueType === LIST) { + defaultValue = []; + set( + variables, + `[${variableIdx[0]}].value`, + displayType === VariableValueType.STRING ? JSON.stringify(defaultValue) : defaultValue, + ); + isChangeValue = true; + } + /** + * Remove enum value if component is not select/checkbox/radio etc. + * cause formily will always render a select if enum is Array type in spite of + * componet type is 'Input' + */ + if (val !== VariableComponent.Select) { + set(variables, `[${variableIdx[0]}].widget_schema.enum`, undefined); + } + + if (val === VariableComponent.CPU) { + set(variables, `[${variableIdx[0]}].value`, '1000m'); + isChangeValue = true; + } + + if (val === VariableComponent.MEM) { + set(variables, `[${variableIdx[0]}].value`, '1Gi'); + isChangeValue = true; + } + + if (!isChangeValue) { + set(variables, `[${variableIdx[0]}].value`, undefined); + } + + form.setFieldsValue({ + variables, + }); + } +}; + +function _hasEnum(comp?: VariableComponent) { + if (!comp) return false; + const { Select, Radio, Checkbox } = VariableComponent; + return [Select, Radio, Checkbox].includes(comp); +} +function _isCheckableCompnent(comp?: VariableComponent) { + if (!comp) return false; + const { Switch, Radio, Checkbox } = VariableComponent; + return [Switch, Radio, Checkbox].includes(comp); +} +function _isDisplayTypeSelect(comp?: VariableComponent) { + if (!comp) return false; + const { Input } = VariableComponent; + return [Input].includes(comp); +} + +export default WidgetSchema; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/index.module.less b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/index.module.less new file mode 100644 index 000000000..16aa58f56 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/index.module.less @@ -0,0 +1,37 @@ +.var_name { + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + max-width: 550px; + flex: 1 1 0%; + padding-left: 10px; + font-size: 13px; + user-select: none; + + &:empty { + color: var(--textColorSecondary); + + &::before { + content: '// 补全变量信息'; + font-weight: normal; + } + } +} + +.action_button { + margin-right: 5px; +} + +.no_link { + color: var(--textColorSecondary); +} + +.open_indicator { + transition: 0.4s var(--commonTiming); +} + +&[data-open='true'] { + .open_indicator { + transform: rotate(180deg); + } +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/index.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/index.tsx new file mode 100644 index 000000000..c711a09c9 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/VariableItem/index.tsx @@ -0,0 +1,282 @@ +import { + Form, + Button, + Radio, + Tooltip, + Message, + Dropdown, + Popconfirm, + Input, +} from '@arco-design/web-react'; +import { IconLink, IconDelete, IconDown } from '@arco-design/web-react/icon'; +import PrettyMenu, { PrettyMenuItem } from 'components/PrettyMenu'; +import { indicators } from 'components/VariableLabel'; +import VariablePermission from 'components/VariblePermission'; +import { useSubscribe } from 'hooks'; +import React, { + ChangeEvent, + FC, + memo, + useContext, + useRef, + useState, + useMemo, + useCallback, +} from 'react'; +import { useToggle } from 'react-use'; +import styled from './index.module.less'; +import { ValidateErrorEntity } from 'typings/component'; +import { Variable, VariableAccessMode, VariableComponent } from 'typings/variable'; +import { VariableDefinitionForm } from 'views/WorkflowTemplates/TemplateForm/stores'; +import { Container, Details, Name, Summary } from '../../elements'; +import { + ComposeDrawerContext, + COMPOSE_DRAWER_CHANNELS, + HighlightPayload, + scrollDrawerBodyTo, +} from '../../index'; +import SlotLinkAnchor, { collectSlotLinks, SlotLink } from './SlotLinkAnchor'; +import WidgetSchema from './WidgetSchema'; + +export const disabeldPeerWritableComponentTypeList = [ + VariableComponent.DatasetPath, + VariableComponent.AlgorithmSelect, + VariableComponent.FeatureSelect, +]; + +type Props = { + path: string; + isCheck?: boolean; + value?: VariableDefinitionForm; + onChange?: (val: VariableDefinitionForm) => any; + remover?: any; + removerRef?: any; +}; + +const VariableItem: FC<Props> = ({ path, isCheck, value, removerRef }) => { + const ref = useRef<HTMLDetailsElement>(null); + const [isOpen, toggleOpen] = useToggle(_shouldInitiallyOpen(value)); + const [hasError, toggleError] = useToggle(false); + const [slotLinks, setSlotLinks] = useState<SlotLink[]>([]); + const [highlighted, setHighlight] = useToggle(false); + + const context = useContext(ComposeDrawerContext); + const varsIdentifyStr = context.formData?.variables?.map((item) => item.name).join(); + + const duplicatedNameValidator = useCallback( + (value: any, callback: (error?: string) => void) => { + if ( + context.formData?.variables + .filter((item) => item._uuid !== data._uuid) + .some((item) => item.name === value) + ) { + callback('检测到重名变量'); + } + }, + // eslint-disable-next-line react-hooks/exhaustive-deps + [varsIdentifyStr], + ); + + useSubscribe( + COMPOSE_DRAWER_CHANNELS.broadcast_error, + (_: string, errInfo: ValidateErrorEntity) => { + const hasError = errInfo.errorFields.some((field) => { + const [pathL1] = field.name; + return pathL1 === path; + }); + + toggleError(hasError); + + if (hasError) { + toggleOpen(true); + } + }, + ); + useSubscribe(COMPOSE_DRAWER_CHANNELS.highlight, (_: string, { varUuid }: HighlightPayload) => { + if (value && varUuid === value._uuid) { + setHighlight(true); + toggleOpen(true); + + // Scroll slot into view + const verticalMiddleY = (window.innerHeight - 60) / 2; + const top = ref.current?.offsetTop || verticalMiddleY; + scrollDrawerBodyTo(top - verticalMiddleY); + + setTimeout(() => { + setHighlight && setHighlight(false); + }, 5000); + } + }); + + const widtgetSchemaPath = useMemo(() => { + return [path, 'widget_schema']; + }, [path]); + + if (!value) { + return null; + } + + const data = value; + const varName = data.name; + + const PermissionIndicator = indicators[data.access_mode]; + const actionDisabled = !varName; + + const componentType = data?.widget_schema?.component; + + return ( + <Details data-has-error={hasError} ref={ref as any} data-open={isOpen}> + <Summary + data-has-error={hasError} + data-highlighted={highlighted} + onClick={(evt: any) => onToggle(evt as any)} + > + {/* + Certain HTML elements, like <summary>, <fieldset> and <button>, do not work as flex containers. + You can work by nesting a div under your summary + https://stackoverflow.com/questions/46156669/safari-flex-item-unwanted-100-width-css/46163405 + */} + <div + style={{ + display: 'flex', + alignItems: 'center', + height: '100%', + }} + > + <PermissionIndicator /> + + <Name className={styled.var_name}>{varName}</Name> + <StopClickPropagation> + <div> + {context.isEasyMode && ( + <Dropdown + trigger={['click']} + position="bl" + disabled={isCheck} + droplist={ + <PrettyMenu style={{ width: 'auto' }}> + {slotLinks.map((link, index) => ( + <PrettyMenuItem key={link.jobUuid + index}> + <SlotLinkAnchor link={link} /> + </PrettyMenuItem> + ))} + {slotLinks.length === 0 && ( + <PrettyMenuItem key="variable"> + <small className={styled.no_link}>该变量暂未被引用</small> + </PrettyMenuItem> + )} + </PrettyMenu> + } + > + <Tooltip content={actionDisabled ? '没有变量名无法查看引用' : ''}> + <Button + className={styled.action_button} + type="text" + size="small" + disabled={actionDisabled} + icon={<IconLink />} + onClick={inspectSlotLinks} + > + 查看引用 + </Button> + </Tooltip> + </Dropdown> + )} + <StopClickPropagation> + <Popconfirm disabled={isCheck} title="确认删除该变量吗" onOk={onRemoveClick as any}> + <Button + className={styled.action_button} + disabled={isCheck} + type="text" + size="small" + icon={<IconDelete />} + > + 删除 + </Button> + </Popconfirm> + </StopClickPropagation> + + <IconDown className={styled.open_indicator} /> + </div> + </StopClickPropagation> + </div> + </Summary> + <Container style={{ display: isOpen ? 'block' : 'none' }}> + <Form.Item + field={[path, 'name'].join('.')} + label="Key" + rules={[ + { required: true, message: '请输入变量 Key' }, + { + match: /^[a-zA-Z_0-9-]+$/g, + message: '只允许大小写英文字母数字及下划线的组合', + }, + { + validator: duplicatedNameValidator, + }, + ]} + > + <Input disabled={isCheck} placeholder="请输入变量名 (仅允许英语及下划线)" /> + </Form.Item> + + <Form.Item + field={[path, 'access_mode'].join('.')} + label="对侧权限" + rules={[{ required: true }]} + > + <Radio.Group disabled={isCheck} type="button"> + <Radio + value={VariableAccessMode.PEER_WRITABLE} + disabled={disabeldPeerWritableComponentTypeList.includes(componentType!)} + > + <VariablePermission.Writable desc /> + </Radio> + <Radio value={VariableAccessMode.PEER_READABLE}> + <VariablePermission.Readable desc /> + </Radio> + <Radio value={VariableAccessMode.PRIVATE}> + <VariablePermission.Private desc /> + </Radio> + </Radio.Group> + </Form.Item> + + <Form.Item field={widtgetSchemaPath.join('.')} noStyle> + <WidgetSchema isCheck={isCheck} form={context.formInstance!} path={widtgetSchemaPath} /> + </Form.Item> + </Container> + </Details> + ); + + function onRemoveClick() { + if (context.isEasyMode) { + const refSrcs = collectSlotLinks(context.uuid, data._uuid, context); + if (refSrcs.length) { + Message.warning('变量仍然被引用,请先解除引用关系'); + setSlotLinks(refSrcs); + return; + } + } + if (!path) return; + + removerRef.current?.(Number(path.slice(path.indexOf('[') + 1, -1))); + } + function onToggle(evt: ChangeEvent<HTMLDetailsElement>) { + toggleOpen(evt.target.open); + } + function inspectSlotLinks() { + const refSrcs = collectSlotLinks(context.uuid, data._uuid, context); + setSlotLinks(refSrcs); + } +}; + +function _shouldInitiallyOpen(val: Variable | undefined) { + if (!val) return false; + + return !val.name; +} + +function StopClickPropagation({ children }: { children: React.ReactNode }) { + return <span onClick={(evt) => evt.stopPropagation()}>{children}</span>; +} + +export default memo(VariableItem); diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/index.module.less b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/index.module.less new file mode 100644 index 000000000..ad0bfc8b2 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/index.module.less @@ -0,0 +1,16 @@ +.var_list_row { + display: flex; + flex-wrap: wrap; + gap: 20px; +} + +.var_list_col { + flex: 1 0; +} + +.add_button_row { + display: flex; + justify-content: center; + width: 100%; + margin-top: 20px; +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/index.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/index.tsx new file mode 100644 index 000000000..416e9997b --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/VariableList/index.tsx @@ -0,0 +1,118 @@ +import { Button, Input, Form } from '@arco-design/web-react'; +import { Plus } from 'components/IconPark'; +import NoResult from 'components/NoResult'; +import React, { FC, memo, useCallback, useContext, useRef } from 'react'; +import { useTranslation } from 'react-i18next'; +import styled from './index.module.less'; +import { giveDefaultVariable } from 'views/WorkflowTemplates/TemplateForm/stores'; +import { SearchBox } from '../elements'; +import { useSearchBox } from '../hooks'; +import { ComposeDrawerContext } from '../index'; +import VariableItem from './VariableItem'; + +const VariableList: FC<{ isCheck?: boolean }> = ({ isCheck }) => { + const { t } = useTranslation(); + const { filter, onFilterChange, onInputKeyPress } = useSearchBox(); + + const context = useContext(ComposeDrawerContext); + const varRemoverRerf = useRef<((index: number) => void) | undefined>(undefined); + const varNameFilter = useCallback( + (_: any, index: number) => { + const matcher = filter.toLowerCase(); + const variable = context.formData?.variables![index]; + + return variable?.name?.toLowerCase().includes(matcher); + }, + [filter, context], + ); + + return ( + <> + <SearchBox> + <Input.Search + placeholder="按名字搜索变量" + onChange={onFilterChange} + onKeyPress={onInputKeyPress} + /> + </SearchBox> + + <Form.List field="variables"> + {(fields, { add, remove }) => { + const filteredItems = fields.filter(varNameFilter as any); + /** + * Due to every time rerender variable list, the remove method + * will change ref, and it lead VariableItem re-render needlessly! + * so we wrap `remove` with a ref for reducing redundant render + */ + varRemoverRerf.current = remove; + + return ( + <div className={styled.var_list_row}> + {/* 2 column layout */} + <div className={styled.var_list_col}> + {filteredItems + .filter((_, index) => index % 2 === 0) + .map((field) => ( + <Form.Item + {...field} + key={'var-' + field.key} + noStyle + rules={[{ required: true, message: t('project.msg_var_name') }]} + > + <VariableItem + isCheck={isCheck} + path={field.field} + removerRef={varRemoverRerf} + /> + </Form.Item> + ))} + </div> + + <div className={styled.var_list_col}> + {filteredItems + .filter((_, index) => index % 2 === 1) + .map((field) => ( + <Form.Item + {...field} + key={'var-' + field.key} + noStyle + rules={[{ required: true, message: t('project.msg_var_name') }]} + > + <VariableItem + isCheck={isCheck} + path={field.field} + removerRef={varRemoverRerf} + /> + </Form.Item> + ))} + </div> + + {fields.length === 0 && ( + <NoResult + noImage + text={t('暂无变量,创建一个吧')} + style={{ margin: '50px auto 20px', width: '100%' }} + /> + )} + + <div className={styled.add_button_row}> + {/* DO NOT simplify `() => add()` to `add`, it will pollute form value with $event */} + <Button + disabled={isCheck} + type="primary" + size="small" + icon={<Plus />} + onClick={() => add(giveDefaultVariable())} + > + {t('workflow.btn_add_var')} + </Button> + </div> + </div> + ); + }} + </Form.List> + </> + ); +}; + +export default memo(VariableList); diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/elements.module.less b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/elements.module.less new file mode 100644 index 000000000..e9b6fb4b3 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/elements.module.less @@ -0,0 +1,107 @@ +@import '~styles/mixins.less'; + +.anchor_icon { + transform: translateX(1px) scaleX(-1); + margin-left: 10px; +} + +.details { + --highlightColor: var(--primaryColor); + + margin-bottom: 20px; + border-radius: 2px; + .open_indicator { + transition: 0.4s var(--commonTiming); + } + &[data-open='true'] { + background-color: #fcfcfc; + padding-bottom: 10px; + box-shadow: -2px 0 0 0 var(--highlightColor); + + &[data-has-error='true'] { + --highlightColor: var(--errorColor); + } + + .open_indicator { + transform: rotate(180deg); + } + } +} + +@keyframes HighlightedWave { + 0% { + box-shadow: 0 0 0 2px var(--primaryColor); + } + 33% { + box-shadow: 0 0 0 2px var(--primaryColor), 0 0 0 5px var(--blue2); + } + 66% { + box-shadow: 0 0 0 2px var(--primaryColor), 0 0 0 10px transparent; + } + 100% { + box-shadow: 0 0 0 2px var(--primaryColor); + } +} + +.summary { + transition: 'box-shadow' 0.4s var(--commonTiming), 'background-color' 0.4s var(--commonTiming); + + position: relative; + display: flex; + align-items: center; + height: 46px; + padding-left: 10px; + padding-right: 20px; + background-color: var(--backgroundColor); + border-radius: 2px; + cursor: pointer; + box-shadow: 0 0 0 2px transparent; + list-style: none; + + // Hide safari marker + &::-webkit-details-marker { + display: none; + } + + @supports (overflow: clip) { + overflow: clip; + } + + &:hover { + background-color: #f0f0f0; + } + + &[data-has-error='true'] { + &::before { + content: ''; + position: absolute; + left: 0; + top: 0; + border: 6px solid var(--errorColor); + border-radius: 0 0 50% 0; + } + } + &[data-highlighted='true'] { + background-color: var(--blue1); + animation: HighlightedWave 2s linear infinite; + } +} + +.container { + padding-right: 60px; + padding-top: 20px; +} + +.name { + .MixinEllipsis(); + max-width: 550px; + flex: 1; + padding-left: 10px; + font-size: 13px; + user-select: none; +} + +.search_box { + width: 400px; + margin-bottom: 20px; +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/elements.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/elements.tsx new file mode 100644 index 000000000..69df8f37e --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/elements.tsx @@ -0,0 +1,49 @@ +/* istanbul ignore file */ + +import React from 'react'; +import { Reply } from 'components/IconPark'; +import styled from './elements.module.less'; + +export function AnchorIcon({ children, ...props }: any) { + return <Reply className={styled.anchor_icon} />; +} + +export function Details({ children, ...props }: any) { + return ( + <div className={styled.details} {...props}> + {children} + </div> + ); +} + +export function Summary({ children, ...props }: any) { + return ( + <div className={styled.summary} {...props}> + {children} + </div> + ); +} + +export function Container({ children, ...props }: any) { + return ( + <div className={styled.container} {...props}> + {children} + </div> + ); +} + +export function Name({ children, ...props }: any) { + return ( + <strong className={styled.name} {...props}> + {children} + </strong> + ); +} + +export function SearchBox({ children, ...props }: any) { + return ( + <div className={styled.search_box} {...props}> + {children} + </div> + ); +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/hooks.ts b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/hooks.ts new file mode 100644 index 000000000..4def53ad6 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/hooks.ts @@ -0,0 +1,25 @@ +import { useState, useRef, useCallback } from 'react'; + +export function useSearchBox() { + const filterTimer: any = useRef(); + + const [filter, setFilter] = useState(''); + + const onFilterChange = useCallback((value: string, e: any) => { + clearTimeout(filterTimer.current); + + filterTimer.current = setTimeout(() => { + setFilter(value); + }, 400); + }, []); + + const onInputKeyPress = useCallback((evt: React.KeyboardEvent) => { + if (evt.key === 'Enter') evt.preventDefault(); + }, []); + + return { + onFilterChange, + filter, + onInputKeyPress, + }; +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/index.module.less b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/index.module.less new file mode 100644 index 000000000..1da1ab130 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/index.module.less @@ -0,0 +1,49 @@ +.compose_drawer { + :global(.arco-drawer-content) { + padding-top: 0; + padding-bottom: 200px; + } +} + +.drawer_header { + position: sticky; + top: 0; + z-index: 5; + background-color: #fff; + margin: 0 -24px 0; + padding: 20px 16px 20px 24px; + background-color: white; + border-bottom: 1px solid var(--lineColor); +} + +.drawer_title { + position: relative; + margin-bottom: 0; + margin-right: 10px; +} + +.form_section { + margin-bottom: 20px; + padding-top: 24px; + &:not([data-fill-width]) { + padding-right: 60px; + } + > .section_heading { + background-color: white; + padding: 10px 0; + margin-bottom: 6px; + font-size: 14px; + color: var(--textColorStrong); + } +} + +.button_grid_row { + position: fixed; + z-index: 1; + bottom: 0; + width: 100%; + padding: 20px 24px; + margin-left: -16px; + background-color: white; + border-top: 1px solid var(--lineColor); +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/index.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/index.tsx new file mode 100644 index 000000000..3db6c190e --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/JobComposeDrawer/index.tsx @@ -0,0 +1,376 @@ +/* eslint-disable react-hooks/exhaustive-deps */ +import { + Button, + Spin, + Tooltip, + Message, + Grid, + Switch, + Popconfirm, + Drawer, + Form, + FormInstance, +} from '@arco-design/web-react'; +import { DrawerProps } from '@arco-design/web-react/es/Drawer'; +import Modal from 'components/Modal'; +import { IconClose, IconLeft, IconDelete, IconSwap } from '@arco-design/web-react/icon'; +import { ChartNode } from 'components/WorkflowJobsCanvas/types'; +import GridRow from 'components/_base/GridRow'; +import { useSubscribe } from 'hooks'; +import jobTypeToMetaDatasMap from 'jobMetaDatas'; +import PubSub from 'pubsub-js'; +import React, { + forwardRef, + ForwardRefRenderFunction, + useCallback, + useEffect, + useImperativeHandle, + useState, +} from 'react'; +import { useToggle } from 'react-use'; +import styled from './index.module.less'; +import { ValidateErrorEntity } from 'typings/component'; +import { JobType } from 'typings/job'; +import { + DEFAULT_JOB, + definitionsStore, + editorInfosStore, + JobDefinitionForm, + SlotEntries, + IS_DEFAULT_EASY_MODE, +} from 'views/WorkflowTemplates/TemplateForm/stores'; +import DefaultMode, { Perspective } from './DefaultMode'; +import ExpertMode from './ExpertMode'; +import { FieldError } from '@arco-design/web-react/es/Form/interface'; + +const Row = Grid.Row; + +interface Props extends DrawerProps { + /** Is workflow global node */ + isGlobal: boolean; + isCheck?: boolean; + revisionId?: number; + uuid?: string; + prevNode?: ChartNode; + onClose?: any; + onSubmit?: any; + onDelete?: any; + onBack: () => void; + toggleVisible?: any; +} + +export type ExposedRef = { + validate(): Promise<boolean>; + getFormValues(): JobDefinitionForm; + reset(): any; + // isValidating: { current: boolean }; + isValidating: boolean; + isEasyMode: boolean; +}; + +export const COMPOSE_DRAWER_CHANNELS = { + broadcast_error: 'broadcast_error', + validation_passed: 'validation_passed', + inspect: 'inspect', + highlight: 'highlight', +}; + +export type InspectPayload = { + jobUuid?: string; + perspective?: Perspective; +} & HighlightPayload; + +export type HighlightPayload = { + varUuid?: string; + slotName?: string; +}; + +export const ComposeDrawerContext = React.createContext({ + uuid: undefined as string | undefined, + formData: undefined as JobDefinitionForm | undefined, + formInstance: undefined as FormInstance<JobDefinitionForm> | undefined, + isEasyMode: true as boolean, +}); + +const JobComposeDrawer: ForwardRefRenderFunction<ExposedRef, Props> = ( + { + isGlobal, + isCheck, + revisionId, + uuid, + prevNode, + visible, + toggleVisible, + onClose, + onSubmit, + onDelete, + onBack, + ...props + }, + parentRef, +) => { + const [formData, setFormData] = useState<JobDefinitionForm>(undefined as any); + const [isEasyMode, toggleEasyMode] = useToggle(IS_DEFAULT_EASY_MODE); + const [formInstance] = Form.useForm<JobDefinitionForm>(); + const [isValidating, toggleValidating] = useToggle(false); + const drawerTitle = () => { + if (isCheck && isGlobal) { + return '全局变量'; + } + if (isCheck && !isGlobal) { + return `${formData?.name || '任务'}`; + } + if (!isCheck && isGlobal) { + return '编辑全局变量'; + } + if (!isCheck && !isGlobal) { + return `编辑${formData?.name || '任务'}`; + } + }; + + // =========== Callbacks ================= + const onFinish = useCallback( + (values: JobDefinitionForm) => { + PubSub.publish(COMPOSE_DRAWER_CHANNELS.validation_passed); + onSubmit && onSubmit(values.variables ? values : formInstance.getFields()); + toggleVisible && toggleVisible(false); + }, + + [onSubmit], + ); + const onValidationFailed = useCallback((errors: { [key: string]: FieldError }) => { + const errorFields: { name: string[] }[] = []; + Object.keys(errors).forEach((key) => { + errorFields.push({ + name: key.split('.'), + }); + }); + const errInfo: ValidateErrorEntity<any> = { + values: undefined, + errorFields: errorFields, + outOfDate: false, + }; + Message.warning('配置有误,请检查'); + PubSub.publish(COMPOSE_DRAWER_CHANNELS.broadcast_error, errInfo); + }, []); + const onValuesChange = useCallback((_: any, values: JobDefinitionForm) => { + setFormData(values); + }, []); + const getFormValues = useCallback(() => { + return formInstance.getFieldsValue() as JobDefinitionForm; + }, []); + const reset = useCallback(() => { + return formInstance.resetFields(); + }, []); + const validateFields = useCallback(() => { + return new Promise<boolean>((resolve) => { + toggleValidating(true); + setTimeout(() => { + formInstance + .validate() + .then(() => { + resolve(true); + }) + .catch(() => { + resolve(false); + }) + .finally(() => { + toggleValidating(false); + }); + }, 50); + }); + }, []); + const onJobTypeChange = useCallback( + (type: JobType) => { + const slotEntries: SlotEntries = []; + + const jobMetaData = jobTypeToMetaDatasMap.get(type); + + if (jobMetaData && uuid) { + slotEntries.push(...Object.entries(jobMetaData.slots)); + editorInfosStore.upsertValue(uuid, { slotEntries, meta_yaml: jobMetaData.metaYamlString }); + } + + const nextFormData = jobMetaData + ? { ...formData, job_type: type, _slotEntries: slotEntries } + : { ...formData, job_type: type, _slotEntries: [] }; + + setFormData(nextFormData); + formInstance.setFieldsValue(nextFormData); + }, + [formData, uuid], + ); + + useImperativeHandle(parentRef, () => { + return { + validate: validateFields, + getFormValues, + reset, + isEasyMode, + isValidating, + }; + }); + + useEffect(() => { + if (uuid && formInstance && visible) { + const slotEntries: SlotEntries = []; + // Get definition and editor info by job uuid + // if either of them not exist, create a new one + const definition = + definitionsStore.getValueById(uuid) ?? definitionsStore.insertNewResource(uuid); + const editorInfo = + editorInfosStore.getValueById(uuid) ?? editorInfosStore.insertNewResource(uuid); + + editorInfo && slotEntries.push(...editorInfo.slotEntries); + + const nextFormData = { ...definition, _slotEntries: slotEntries }; + + // Legacy templates don't have easy_mode field + toggleEasyMode(definition.easy_mode ?? IS_DEFAULT_EASY_MODE); + setFormData(nextFormData); + formInstance.setFieldsValue(nextFormData); + } + }, [uuid, visible]); + + useEffect(() => { + toggleVisible(false); + }, [revisionId]); + + useSubscribe( + COMPOSE_DRAWER_CHANNELS.inspect, + (_: string, { jobUuid }: InspectPayload) => { + if (jobUuid) { + const definition = definitionsStore.getValueById(jobUuid); + + if (definition && !definition.easy_mode && !isEasyMode) { + Modal.confirm({ + title: '提示', + content: '任务当前模式为专家模式,不展示插槽,如需查看,请切换至普通模式', + }); + } + } + }, + [isEasyMode], + ); + + return ( + <ComposeDrawerContext.Provider value={{ uuid, formInstance, formData, isEasyMode }}> + <Drawer + className={styled.compose_drawer} + wrapClassName="#app-content" + visible={visible} + mask={false} + width="1200px" + onCancel={closeDrawer} + headerStyle={{ display: 'none' }} + {...props} + footer={null} + > + <Spin loading={isValidating} style={{ width: '100%' }}> + <Row className={styled.drawer_header} align="center" justify="space-between"> + <GridRow align="center" gap={10}> + {prevNode && ( + <Tooltip content="返回上一个浏览的任务"> + <Button icon={<IconLeft />} size="small" onClick={onBack}> + 返回 + </Button> + </Tooltip> + )} + <h3 className={styled.drawer_title}>{drawerTitle()}</h3> + </GridRow> + <GridRow gap="10"> + <> + <Button size="small" icon={<IconSwap />} onClick={onModeToggle}> + {isEasyMode ? '专家模式' : '普通模式'} + </Button> + + {!isGlobal && ( + <Popconfirm + disabled={isCheck} + title="删除后,该 Job 配置的内容都将丢失" + cancelText="取消" + okText="确认" + onConfirm={onDeleteClick} + > + <Button + disabled={isCheck} + size="small" + type="primary" + icon={<IconDelete />} + status="danger" + > + 删除 + </Button> + </Popconfirm> + )} + </> + + <Button size="small" icon={<IconClose />} onClick={closeDrawer} /> + </GridRow> + </Row> + + <Form + labelCol={{ span: 6 }} + wrapperCol={{ span: 18 }} + form={formInstance} + onSubmit={onFinish} + onSubmitFailed={onValidationFailed} + onValuesChange={onValuesChange as any} + initialValues={{ ...DEFAULT_JOB, easy_mode: isEasyMode }} + > + {/* NOTE: easy_mode is also a part of payload, + * but we are changing the value through the button in header, + * not by this form item's switch + */} + <Form.Item field="easy_mode" hidden triggerPropName="checked"> + <Switch disabled={isCheck} /> + </Form.Item> + + {isEasyMode ? ( + <DefaultMode + isCheck={isCheck} + isGlobal={isGlobal} + onJobTypeChange={onJobTypeChange} + /> + ) : ( + <ExpertMode isCheck={isCheck} isGlobal={isGlobal} /> + )} + + <Form.Item> + <GridRow className={styled.button_grid_row} gap={16}> + <Button disabled={isCheck} type="primary" htmlType="submit" loading={isValidating}> + 确认 + </Button> + <Button onClick={closeDrawer}>取消</Button> + </GridRow> + </Form.Item> + </Form> + </Spin> + </Drawer> + </ComposeDrawerContext.Provider> + ); + + function closeDrawer() { + onClose && onClose(); + toggleVisible && toggleVisible(false); + } + function onDeleteClick() { + onDelete && onDelete(); + } + function onModeToggle() { + toggleEasyMode(); + const nextFormData = { ...formData, easy_mode: !isEasyMode }; + setFormData(nextFormData); + formInstance.setFieldsValue({ easy_mode: !isEasyMode }); + } +}; + +export function scrollDrawerBodyTo(scrollTo: number) { + const target = document.querySelector('#app-content .compose-drawer .arco-drawer-content'); + + if (target) { + target.scrollTop = scrollTo; + } +} + +export default forwardRef(JobComposeDrawer); diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/TemplateCanvas.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/TemplateCanvas.tsx similarity index 96% rename from web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/TemplateCanvas.tsx rename to web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/TemplateCanvas.tsx index eb8b32e48..7b851bf23 100644 --- a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/TemplateCanvas.tsx +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/TemplateCanvas.tsx @@ -23,7 +23,6 @@ import { } from 'components/WorkflowJobsCanvas/types'; import { Container } from 'components/WorkflowJobsCanvas/elements'; import { JobNodeRawDataSlim, WorkflowTemplateForm } from 'stores/template'; -import i18n from 'i18n'; import { Variable } from 'typings/variable'; import { ConvertParams, @@ -33,10 +32,11 @@ import { } from 'components/WorkflowJobsCanvas/helpers'; import GlobalConfigNode from 'components/WorkflowJobsCanvas/JobNodes/GlobalConfigNode'; import TemplateConfigNode from './TemplateConfigNode'; -import { TPL_GLOBAL_NODE_UUID } from '../store'; +import { TPL_GLOBAL_NODE_UUID } from 'views/WorkflowTemplates/TemplateForm/stores'; type Props = { isEdit?: boolean; + isCheck?: boolean; template: WorkflowTemplateForm; onCanvasClick?: any; onNodeClick?: any; @@ -53,13 +53,14 @@ export type ExposedRef = { }; const TemplateCanvas: ForwardRefRenderFunction<ExposedRef, Props> = ( - { template, onCanvasClick, onNodeClick, isEdit }, + { template, onCanvasClick, onNodeClick, isEdit, isCheck }, parentRef, ) => { const isInitialConvert = useRef(true); const [chartInstance, setChartInstance] = useState<OnLoadParams>(); const [elements, setElements] = useState<ChartElements>([]); + // ☢️ WARNING: since we using react-flow hooks here, // an ReactFlowProvider is REQUIRED to wrap this component inside const setSelectedNodes = useStoreActions((actions) => actions.setSelectedElements); @@ -103,6 +104,7 @@ const TemplateCanvas: ForwardRefRenderFunction<ExposedRef, Props> = ( groupRows: groupByUuidDeps, }, ); + setElements(jobElements); // Set isInitialConvert to false if (isInitialConvert.current) { @@ -174,7 +176,7 @@ const TemplateCanvas: ForwardRefRenderFunction<ExposedRef, Props> = ( }; function _createTPLGlobalNode(_: Variable[], data: any, options: any) { - const name = i18n.t('workflow.label_global_config'); + const name = '全局配置'; return { id: TPL_GLOBAL_NODE_UUID, diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/TemplateConfigNode.module.less b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/TemplateConfigNode.module.less new file mode 100644 index 000000000..04c12b71b --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/TemplateConfigNode.module.less @@ -0,0 +1,64 @@ +@import '~styles/mixins.less'; + +.add_job_button { + .MixinCircle(20px); + .MixinFlexAlignCenter(); + position: absolute; + display: flex; + background-color: white; + color: var(--textColorDisabled); + font-size: 20px; + transition: 0.4s cubic-bezier(0.4, 0, 0.2, 1); + + &:hover { + color: var(--primaryColor); + border-color: currentColor; + } + + &::before { + content: ''; + position: absolute; + height: 21px; + padding: 10px 0; + width: 13px; + background-color: currentColor; + background-clip: content-box; + } + + &.left { + left: -32px; + top: calc(50% - 12px); + + &::before { + right: -11px; + } + } + &.right { + right: -32px; + top: calc(50% - 12px); + + &::before { + left: -11px; + } + } + &.bottom { + bottom: -32px; + left: calc(50% - 12px); + + &::before { + top: -15px; + transform: rotate(90deg); + } + } +} + +.container { + &:hover { + z-index: 5; + } + &:not(:hover) { + .add_job_button { + opacity: 0; + } + } +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/TemplateConfigNode.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/TemplateConfigNode.tsx new file mode 100644 index 000000000..2368c971f --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/TemplateConfigNode.tsx @@ -0,0 +1,129 @@ +import React, { FC } from 'react'; +import { Handle, Position } from 'react-flow-renderer'; +import { + Container, + JobName, + JobStatusText, + StatusIcon, +} from 'components/WorkflowJobsCanvas/JobNodes/elements'; +import { + configStatusText, + JobNodeProps, + statusIcons, + WORKFLOW_JOB_NODE_CHANNELS, +} from 'components/WorkflowJobsCanvas/JobNodes/shared'; +import { ChartNodeStatus, NodeData } from 'components/WorkflowJobsCanvas/types'; +import GridRow from 'components/_base/GridRow'; +import classNames from 'classnames'; +import { PlusCircle } from 'components/IconPark'; +import styled from './TemplateConfigNode.module.less'; +import PubSub from 'pubsub-js'; +import { Tooltip } from '@arco-design/web-react'; +import { definitionsStore } from 'views/WorkflowTemplates/TemplateForm/stores'; +import { IconLoading } from '@arco-design/web-react/icon'; + +const detailRegx = /detail/g; +const isCheck = detailRegx.test(window.location.href); + +type AddPosition = 'left' | 'right' | 'bottom'; +const AddJobHandle: FC<{ position: AddPosition; onClick: any }> = ({ position, onClick }) => { + let _position = ''; + switch (position) { + case 'left': + _position = styled.left; + break; + case 'right': + _position = styled.right; + break; + case 'bottom': + _position = styled.bottom; + break; + default: + _position = styled.bottom; + break; + } + return ( + <Tooltip content={`Click to add a new job to ${position}`}> + <div + className={classNames([styled.add_job_button, _position])} + style={{ pointerEvents: 'auto' }} + onClick={onButtonClick} + > + <PlusCircle /> + </div> + </Tooltip> + ); + + function onButtonClick(event: React.SyntheticEvent<any>) { + onClick && onClick(position); + + event.stopPropagation(); + } +}; + +export type AddJobPayload = { + id: string; + data: NodeData; + position: AddPosition; +}; + +const NodeStatus: FC<{ status: ChartNodeStatus }> = ({ status }) => { + // node status is success in detail page + const icon = statusIcons[status]; + const text = configStatusText[status]; + + const isValidating = status === ChartNodeStatus.Validating; + + return isValidating ? ( + <GridRow gap={5}> + <IconLoading style={{ fontSize: 16, color: 'var(--primaryColor)' }} /> + <JobStatusText>{text}</JobStatusText> + </GridRow> + ) : ( + <GridRow gap={5}> + {icon && <StatusIcon src={icon} />} + + <JobStatusText>{text}</JobStatusText> + </GridRow> + ); +}; + +const TemplateConfigNode: FC<JobNodeProps> = ({ data, id }) => { + const values = definitionsStore.getValueById(id); + return ( + <Container + data-uuid={data.raw.uuid} + className={classNames([ + data.raw.is_federated && 'federated-mark', + data.mark, + styled.container, + ])} + > + <Handle type="target" position={Position.Top} /> + + <JobName data-secondary={Boolean(values?.name)}>{values?.name || '//点击配置'}</JobName> + + <NodeStatus key={data.status} status={data.status} /> + + {!isCheck && ( + <> + <AddJobHandle position="left" onClick={onAddJobClick} /> + <AddJobHandle position="right" onClick={onAddJobClick} /> + <AddJobHandle position="bottom" onClick={onAddJobClick} /> + </> + )} + + <Handle type="source" position={Position.Bottom} /> + </Container> + ); + + function onAddJobClick(position: AddPosition) { + PubSub.publish(WORKFLOW_JOB_NODE_CHANNELS.click_add_job, { + id, + data, + position, + } as AddJobPayload); + } +}; + +export default TemplateConfigNode; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/index.module.less b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/index.module.less new file mode 100644 index 000000000..9ddf6517c --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/index.module.less @@ -0,0 +1,24 @@ +.container { + height: 594px; + width: 100%; +} + +.chart_header { + height: 48px; + padding: 13px 20px; + font-size: 14px; + line-height: 22px; + background-color: white; +} + +.template_name { + margin-bottom: 0; +} + +.footer { + position: sticky; + bottom: 0; + z-index: 5; // just > react-flow' z-index + padding: 15px 36px; + background-color: white; +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/index.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/index.tsx new file mode 100644 index 000000000..b0044b8b0 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateConfig/index.tsx @@ -0,0 +1,574 @@ +import React, { FC, useCallback, useRef, useState } from 'react'; +import { Button, Message, Tooltip } from '@arco-design/web-react'; +import Modal from 'components/Modal'; +import ErrorBoundary from 'components/ErrorBoundary'; +import { WORKFLOW_JOB_NODE_CHANNELS } from 'components/WorkflowJobsCanvas/JobNodes/shared'; +import { ChartNode, ChartNodeStatus } from 'components/WorkflowJobsCanvas/types'; +import GridRow from 'components/_base/GridRow'; +import { useSubscribe } from 'hooks'; +import { cloneDeep, last } from 'lodash-es'; +import { isNode, ReactFlowProvider } from 'react-flow-renderer'; +import { Redirect, useHistory, useParams } from 'react-router'; +import { useToggle } from 'react-use'; +import { useRecoilState } from 'recoil'; +import { + createTemplateRevision, + createWorkflowTemplate, + updateWorkflowTemplate, +} from 'services/workflow'; +import { stringifyComplexDictField } from 'shared/formSchema'; +import { giveWeakRandomKey, nextTick, to } from 'shared/helpers'; +import { templateForm } from 'stores/template'; +import styled from './index.module.less'; +import { JobDependency } from 'typings/job'; +import { + JobSlotReferenceType, + WorkflowTemplatePayload, + WorkflowTemplateType, +} from 'typings/workflow'; +import { + definitionsStore, + editorInfosStore, + JobDefinitionForm, + mapUuidDepToJobName, + TPL_GLOBAL_NODE_UUID, + VariableDefinitionForm, +} from 'views/WorkflowTemplates/TemplateForm/stores'; +import JobComposeDrawer, { + COMPOSE_DRAWER_CHANNELS, + ExposedRef as DrawerExposedRef, + InspectPayload, + scrollDrawerBodyTo, +} from './JobComposeDrawer'; +import { + parseJobPropRef, + parseOtherJobRef, + parseSelfRef, + parseWorkflowRef, +} from './JobComposeDrawer/SloEntrytList/helpers'; +import WorkflowTemplateCanvas, { ExposedRef as CanVasExposedRef } from './TemplateCanvas'; +import { AddJobPayload } from './TemplateConfigNode'; +import { useGetIsCanEditTemplate } from 'views/WorkflowTemplates/shared'; + +function _createASlimJobRawData({ + uuid, + dependencies, +}: { + uuid?: string; + dependencies: JobDependency[]; +}): any { + return { + uuid: uuid || giveWeakRandomKey(), + dependencies: [...dependencies], + }; +} + +const TemplateConifg: FC<{ + isEdit?: boolean; + isCheck?: boolean; + revisionId?: number; +}> = ({ isEdit, isCheck, revisionId }) => { + const history = useHistory(); + const params = useParams<{ id?: string; revision_id?: string }>(); + + const [drawerVisible, toggleDrawerVisible] = useToggle(false); + + const drawerRef = useRef<DrawerExposedRef>(); + const canvasRef = useRef<CanVasExposedRef>(); + + const rePositionChart = useRef(false); + + const [submitting, setSubmitting] = useToggle(false); + /** Whether is workflow gloabl variables node */ + const [isGlobal, setIsGlobal] = useState(false); + const [prevNode, setPrevNode] = useState<ChartNode | undefined>(); + const [currNode, setCurrNode] = useState<ChartNode>(); + + const [template, setTemplate] = useRecoilState(templateForm); + + const { isCanEdit, tip } = useGetIsCanEditTemplate( + template.kind === WorkflowTemplateType.BUILT_IN, + ); + /** updateTemplate only isEdit is true + * createTemplate when status is isCreate || isRevision || revisionId !== undefined + */ + const isRevision = Boolean(params.revision_id); + + const onDrawerFormSubmit = useCallback( + (values: JobDefinitionForm) => { + saveCurrentValues({ values, isGlobal }); + + canvasRef.current?.updateNodeStatusById({ + id: currNode?.id!, + status: ChartNodeStatus.Success, + }); + + canvasRef.current?.setSelectedNodes([]); + setCurrNode(undefined); + setPrevNode(undefined); + }, + // eslint-disable-next-line react-hooks/exhaustive-deps + [currNode, isGlobal], + ); + + const onSubmitClick = useCallback(async () => { + if (!checkIfAllJobConfigCompleted()) { + return Message.warning('未完成配置或有正在编辑的任务,请确认后再次提交'); + } + toggleDrawerVisible(false); + setSubmitting(true); + + const { config, ...basics } = cloneDeep(template); + let payload: WorkflowTemplatePayload<JobDefinitionForm, VariableDefinitionForm> = { + ...basics, + config: {} as any, + }; + + payload.config.variables = cloneDeep( + definitionsStore.getValueById(TPL_GLOBAL_NODE_UUID)?.variables!, + ); + + payload.config.job_definitions = config.job_definitions.map((item) => { + const values = cloneDeep(definitionsStore.getValueById(item.uuid)); + return { + ...values, + dependencies: item.dependencies.map(mapUuidDepToJobName), + } as any; + }); + payload.editor_info = { + yaml_editor_infos: Object.fromEntries( + /** + * Convert job & variable uuid in reference to job & variable's name + */ + config.job_definitions.map((item) => { + const { name: selfName, variables: selfVars } = definitionsStore.getValueById(item.uuid)!; + const { slotEntries, meta_yaml } = cloneDeep(editorInfosStore.getValueById(item.uuid)!); + slotEntries.forEach(([_, slot]) => { + if (slot.reference_type === JobSlotReferenceType.OTHER_JOB) { + const [jobUuid, varUuid] = parseOtherJobRef(slot.reference); + const target = definitionsStore.getValueById(jobUuid); + + if (target) { + slot.reference = slot.reference + .replace(jobUuid, target.name) + .replace(varUuid, target.variables.find((item) => item._uuid === varUuid)?.name!); + } + } + + if (slot.reference_type === JobSlotReferenceType.SELF) { + const varUuid = parseSelfRef(slot.reference); + + slot.reference = slot.reference.replace( + varUuid, + selfVars.find((item) => item._uuid === varUuid)?.name!, + ); + } + + if (slot.reference_type === JobSlotReferenceType.JOB_PROPERTY) { + const [jobUuid] = parseJobPropRef(slot.reference); + const target = definitionsStore.getValueById(jobUuid); + + if (target) { + slot.reference = slot.reference.replace(jobUuid, target.name); + } + } + + if (slot.reference_type === JobSlotReferenceType.WORKFLOW) { + const varUuid = parseWorkflowRef(slot.reference); + const globalDef = definitionsStore.getValueById(TPL_GLOBAL_NODE_UUID)!; + slot.reference = slot.reference.replace( + varUuid, + globalDef.variables.find((item) => item._uuid === varUuid)?.name!, + ); + } + }); + + return [selfName, { meta_yaml, slots: Object.fromEntries(slotEntries) }]; + }), + ), + }; + + // === Remove variables' _uuid start === + payload.config.variables.forEach((item: Partial<VariableDefinitionForm>) => { + if (item._uuid) delete item._uuid; + }); + payload.config.job_definitions.forEach((job) => { + job.variables.forEach((variable: Partial<VariableDefinitionForm>) => { + if (variable._uuid) delete variable._uuid; + }); + }); + // === Remove variables' _uuid end === + + payload.config.group_alias = basics.group_alias; + + payload = stringifyComplexDictField(payload); + + const [res, error] = await to( + isEdit && !revisionId && !isRevision + ? updateWorkflowTemplate(params.id!, payload) + : createWorkflowTemplate(payload), + ); + if (error) { + setSubmitting(false); + return Message.error(error.message); + } + + if (isEdit && !revisionId && !isRevision) { + await to(createTemplateRevision(params.id!)); + } else { + await to(createTemplateRevision(res.data.id)); + } + + Message.success(isEdit ? '模板修改成功!' : '模板创建成功!'); + + if (isEdit && !revisionId && !isRevision) { + history.push(`/workflow-center/workflow-templates/detail/${params.id}/config`); + } else { + history.push(`/workflow-center/workflow-templates`); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [template]); + + useSubscribe( + WORKFLOW_JOB_NODE_CHANNELS.click_add_job, + (_: any, payload: AddJobPayload) => { + const nextVal = cloneDeep(template); + const jobDefs = nextVal.config.job_definitions; + + const { position, data, id } = payload; + const rows = data.rows!; + + const rowIdx = rows?.findIndex((row) => row.find((col) => col.raw.uuid === id)); + const hasRowFollowed = Boolean(rows[rowIdx + 1]); + + const uuidOfLastJobInRow = last(rows[rowIdx])!.raw.uuid; + const uuidOfHeadJobInRow = last(rows[rowIdx])!.raw.uuid; + + const leftPivotJobIdx = jobDefs.findIndex((item) => item.uuid === uuidOfHeadJobInRow); + const rightPivotJobIdx = jobDefs.findIndex((item) => item.uuid === uuidOfLastJobInRow); + + const isInsert2Left = position === 'left'; + const isInsert2Bottom = position === 'bottom'; + + const preJobs = jobDefs.slice(0, leftPivotJobIdx); + const midJobs = jobDefs.slice(leftPivotJobIdx, rightPivotJobIdx + 1); + const postJobs = jobDefs.slice(rightPivotJobIdx + 1, jobDefs.length); + + const newJobDeps: JobDependency[] = []; + const newJobUuid = giveWeakRandomKey(); + + if (isInsert2Bottom) { + const depRow = rows[rowIdx]; + newJobDeps.push(...depRow.map((col: any) => ({ source: col.raw.uuid }))); + + if (hasRowFollowed) { + const followedRow = rows[rowIdx + 1]; + // New job will create new row that only contain the new job + followedRow.forEach((col) => { + const def = jobDefs.find((def) => def.uuid === col.raw.uuid); + if (def) { + def.dependencies = [{ source: newJobUuid }]; + } + }); + } + } else { + const depRow = rows[rowIdx - 1]; + + // New job add all depRow's job dependencies + if (depRow && depRow.every((item) => !item.isGlobal)) { + newJobDeps.push(...depRow.map((col: any) => ({ source: col.raw.uuid }))); + } + + // New job will be added by each followedRow's job dependencies + if (hasRowFollowed) { + const followedRow = rows[rowIdx + 1]; + followedRow.forEach((col) => { + const def = jobDefs.find((def) => def.uuid === col.raw.uuid); + if (def) { + def.dependencies = def.dependencies.concat([{ source: newJobUuid }]); + } + }); + } + } + + const newJob = _createASlimJobRawData({ uuid: newJobUuid, dependencies: newJobDeps }); + + // If insert to right or bottom, before should be empty + const before = [isInsert2Left && newJob].filter(Boolean); + // If insert to left, after should be empty + const after = [!isInsert2Left && newJob].filter(Boolean); + + nextVal.config.job_definitions = [...preJobs, ...before, ...midJobs, ...after, ...postJobs]; + + setTemplate(nextVal); + }, + [template.config.job_definitions.length], + ); + useSubscribe( + COMPOSE_DRAWER_CHANNELS.inspect, + (_: string, { jobUuid }: InspectPayload) => { + // Is current job` + if (jobUuid === currNode?.id || !jobUuid) return; + + inspectNode(jobUuid); + }, + // Need to refresh currNode ref inside inspectNode>selectNode each time, + // otherwise, currNode will be undefined + [currNode?.id], + ); + if (!template?.name) { + if (isEdit) { + return <Redirect to={`/workflow-center/workflow-templates/edit/basic/${params.id}`} />; + } + return <Redirect to={'/workflow-center/workflow-templates/create/basic'} />; + } + + return ( + <ErrorBoundary> + <main className={styled.container}> + {isCheck ? ( + <></> + ) : ( + <header className={styled.chart_header}> + <h3 className={styled.template_name}>{template.name}</h3> + </header> + )} + <ReactFlowProvider> + <WorkflowTemplateCanvas + ref={canvasRef as any} + isEdit={isEdit} + isCheck={isCheck} + template={template} + onNodeClick={selectNode} + onCanvasClick={onCanvasClick} + /> + </ReactFlowProvider> + + <JobComposeDrawer + ref={drawerRef as any} + isGlobal={isGlobal} + isCheck={isCheck} + revisionId={revisionId} + uuid={currNode?.id} + prevNode={prevNode} + visible={drawerVisible} + toggleVisible={toggleDrawerVisible} + onSubmit={onDrawerFormSubmit} + onClose={onCloseDrawer} + onDelete={onDeleteJob} + onBack={onBackToPrevJob} + /> + + {isCheck ? ( + <></> + ) : ( + <footer className={styled.footer}> + <GridRow gap="12"> + <Tooltip content={tip}> + <Button + type="primary" + loading={submitting} + onClick={onSubmitClick} + disabled={!isCanEdit} + > + 确认 + </Button> + </Tooltip> + <Button onClick={onPrevStepClick} disabled={submitting}> + 上一步 + </Button> + <Button onClick={onCancelForkClick} disabled={submitting}> + 取消 + </Button> + </GridRow> + </footer> + )} + </main> + </ErrorBoundary> + ); + + // ---------------- Methods -------------------- + + function saveCurrentValues(payload: { values?: JobDefinitionForm; isGlobal: boolean }) { + const currUuid = currNode?.id; + if (currUuid) { + const { _slotEntries: slotEntries, ...definitionValues } = + payload.values ?? drawerRef.current?.getFormValues()!; + + const editInfo = editorInfosStore.getValueById(currUuid); + + if (drawerRef.current?.isEasyMode && !payload.isGlobal && editInfo) { + const { meta_yaml } = editInfo; + editorInfosStore.upsertValue(currUuid, { slotEntries, meta_yaml }); + } + + definitionsStore.upsertValue(currUuid, definitionValues); + } + } + function checkIfAllJobConfigCompleted() { + const isAllCompleted = canvasRef.current?.chartInstance + .getElements() + .filter(isNode) + .every((node) => { + return node.data.status === ChartNodeStatus.Success; + }); + + return isAllCompleted; + } + + async function validateCurrentForm(nodeId?: string) { + const id = nodeId ?? currNode?.id; + if (id) { + canvasRef.current?.updateNodeStatusById({ + id, + status: ChartNodeStatus.Validating, + }); + const valid = await drawerRef.current?.validate(); + canvasRef.current?.updateNodeStatusById({ + id, + status: valid ? ChartNodeStatus.Success : ChartNodeStatus.Error, + }); + } + } + function inspectNode(uuid: string) { + const targetJobNode = canvasRef.current?.chartInstance + .getElements() + .filter(isNode) + .find((node) => { + return node.id === uuid; + }); + + if (targetJobNode && canvasRef.current) { + selectNode(targetJobNode as ChartNode); + canvasRef.current.setSelectedNodes([targetJobNode]); + } + } + async function selectNode(nextNode: ChartNode) { + if (nextNode.id === currNode?.id) return; + + if (currNode) { + setPrevNode(currNode); + } + + saveCurrentValues({ isGlobal }); + validateCurrentForm(currNode?.id).then(() => { + drawerRef.current?.reset(); + setCurrNode(nextNode); + setIsGlobal(!!nextNode?.data.isGlobal); + }); + + canvasRef.current?.updateNodeStatusById({ + id: nextNode?.id!, + status: ChartNodeStatus.Processing, + }); + + scrollDrawerBodyTo(0); + + toggleDrawerVisible(true); + + if (!drawerVisible && !rePositionChart.current) { + // Put whole chart at left side due to the opened drawer will override it, + // And we only do it once then let the user control it + canvasRef.current?.chartInstance.setTransform({ x: 50, y: 50, zoom: 1 }); + rePositionChart.current = true; + } + } + + // ---------------- Handlers -------------------- + + function onBackToPrevJob() { + if (prevNode) { + inspectNode(prevNode?.id); + + nextTick(() => { + setPrevNode(undefined); + }); + } + } + function onPrevStepClick() { + history.goBack(); + } + function onDeleteJob() { + const uuid = currNode?.id; + + if (definitionsStore.size === 2) { + Message.warning('工作流至少需要一个任务'); + return; + } + + if (uuid) { + const nextVal = cloneDeep(template); + const jobDefs = nextVal.config.job_definitions; + const idx = jobDefs.findIndex((def) => def.uuid === uuid); + const jobDefToRemove = jobDefs[idx]; + + const rows = currNode?.data.rows ?? []; + const currNodeRowIdx = rows?.findIndex((row) => row.find((col) => col.raw.uuid === uuid)); + // If row that only contain currNode, so this row will be delete soon + const shouldDeleteRow = rows[currNodeRowIdx].length === 1; + + for (let i = idx + 1; i < jobDefs.length; i++) { + const def = jobDefs[i]; + // Find followedRow job + if (def.dependencies.some((dep) => dep.source === uuid)) { + // Each followedRow job dependencies remove jobDefToRemove + def.dependencies = def.dependencies.filter((dep) => dep.source !== uuid); + + // If will delete row that only contain currNode, so each followedRow job dependencies add jobDefToRemove.dependencies + if (shouldDeleteRow) { + def.dependencies = def.dependencies.concat(jobDefToRemove.dependencies); + } + } + } + + nextVal.config.job_definitions = [ + ...jobDefs.slice(0, idx), + ...jobDefs.slice(idx + 1, jobDefs.length), + ]; + setTemplate(nextVal); + + // Remove job from store + // definitionsStore.removeValueById(uuid); + definitionsStore.map.delete(uuid); + editorInfosStore.removeValueById(uuid); + + setCurrNode(null as any); + toggleDrawerVisible(false); + } + } + function onCancelForkClick() { + Modal.confirm({ + title: '确认取消编辑模板吗?', + content: '取消后,已配置的模板内容将不再保留', + onOk() { + history.push(`/workflow-center/workflow-templates`); + }, + }); + } + + async function onCloseDrawer() { + canvasRef.current?.setSelectedNodes([]); + validateCurrentForm(currNode?.id); + setPrevNode(undefined); + } + + async function onCanvasClick() { + // If current job form is validating + if (drawerRef.current?.isValidating) { + return; + } + saveCurrentValues({ isGlobal }); + + validateCurrentForm(currNode?.id).then(() => { + drawerRef.current?.reset(); + setCurrNode(undefined); + nextTick(() => { + toggleDrawerVisible(false); + }); + }); + + setPrevNode(undefined); + } +}; + +export default TemplateConifg; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/RevisionList.module.less b/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/RevisionList.module.less new file mode 100644 index 000000000..0d218a736 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/RevisionList.module.less @@ -0,0 +1,128 @@ +.container { + height: 634px; + border-right: 1px solid #e5e8ef; + border-bottom: 1px solid #e5e8ef; + .main { + width: 95%; + height: 100%; + .header { + display: flex; + align-items: center; + justify-content: space-between; + height: 40px; + .name { + padding-left: 20px; + font-weight: 500; + font-size: 13px; + line-height: 40px; + color: #1d2129; + } + .number { + font-weight: 400; + font-size: 12px; + line-height: 40px; + text-align: right; + color: #86909c; + margin-right: 36px; + } + } + .list_section { + height: 594px; + overflow-y: auto; + .empty { + display: flex; + align-items: center; + justify-content: center; + height: 150px; + width: 100%; + } + .item { + height: 54px; + width: 100%; + padding-left: 20px; + cursor: pointer; + &:hover { + background: #f2f3f8; + } + .item_name { + font-weight: 500; + font-size: 12px; + line-height: 24px; + } + .item_time { + padding: 2px 8px; + font-weight: 400; + font-size: 12px; + line-height: 20px; + text-align: center; + color: #1d2129; + background: #f6f7fb; + } + .description { + height: 20px; + font-weight: 400; + font-size: 12px; + line-height: 20px; + color: #4e5969; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + } + .popover_bottom { + text-align: right; + } + } + } + } + .collapse { + transition: 0.1s background-color; + position: absolute; + top: 250px; + left: 256px; + z-index: 10; + display: flex; + justify-content: center; + align-items: center; + width: 25px; + height: 25px; + transform: translate(-50%, 50%); + padding: 2px 0 1px; + border-radius: 50%; + cursor: pointer; + background-color: rgb(var(--gray-1)); + + &:hover { + background-color: rgb(var(--gray-3)); + } + + > .anticon { + margin-top: 1px; + font-size: 10px; + } + } + .is_reverse { + transition: 0.1s background-color cubic-bezier(0.4, 0, 0.2, 1); + position: absolute; + top: 250px; + left: 5px; + z-index: 10; + display: flex; + justify-content: center; + align-items: center; + width: 25px; + height: 25px; + transform: translate(-50%, 50%) rotate(180deg); + padding: 1px 0 2px; + border-radius: 50%; + cursor: pointer; + background-color: rgb(var(--gray-1)); + + &:hover { + background-color: rgb(var(--gray-3)); + } + > .anticon { + margin-top: -1px; + transform: rotate(180deg); + } + } +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/RevisionList.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/RevisionList.tsx new file mode 100644 index 000000000..0b1af295f --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/RevisionList.tsx @@ -0,0 +1,312 @@ +import { Left } from 'components/IconPark'; +import React, { Dispatch, FC, SetStateAction, useEffect, useMemo, useState } from 'react'; +import { useTranslation } from 'react-i18next'; + +import { + Form, + Grid, + Message, + Modal, + Button, + Input, + Popover, + Divider, +} from '@arco-design/web-react'; +import MoreActions from 'components/MoreActions'; +import { Edit } from 'components/IconPark'; +import InvitionTable from 'components/InvitionTable'; +import { Participant, ParticipantType } from 'typings/participant'; +import { useQuery, UseQueryResult } from 'react-query'; +import { + deleteRevision, + fetchRevisionList, + patchRevisionComment, + getTemplateRevisionDownloadHref, +} from 'services/workflow'; +import { TemplateRevision, WorkflowTemplateMenuType } from 'typings/workflow'; +import CONSTANTS from 'shared/constants'; +import { ResponseInfo } from 'typings/app'; +import { formatTimestamp } from 'shared/date'; +import { saveBlob, to } from 'shared/helpers'; +import request from 'libs/request'; +import { sendTemplateRevision } from 'services/workflow'; +import styled from './RevisionList.module.less'; + +const Row = Grid.Row; +const Col = Grid.Col; + +const REVISION_QUERY_KEY = 'fetchRevisionList'; + +interface ListProps { + id: string; + collapsed: boolean; + name?: string; + ownerType?: WorkflowTemplateMenuType; + setRevisionId: Dispatch<SetStateAction<number>>; + setCollapsed: Dispatch<SetStateAction<boolean>>; +} + +const RevisionList: FC<ListProps> = (props) => { + const { t } = useTranslation(); + const [total, setTotal] = useState(0); + const [choosen, setChoosen] = useState(0); + const { id, name, collapsed, setRevisionId, setCollapsed, ownerType } = props; + + const listQuery = useQuery( + [REVISION_QUERY_KEY, id], + () => { + return fetchRevisionList(id); + }, + { + retry: 1, + refetchOnWindowFocus: false, + onSuccess(res) { + if (res.data.length > 0) { + setRevisionId(res.data[0].id); + } + }, + }, + ); + + const list = useMemo(() => { + if (!listQuery.data?.data) return []; + if (listQuery.data?.page_meta?.total_items) { + setTotal(listQuery.data.page_meta.total_items); + } + return listQuery.data.data; + }, [listQuery.data]); + + useEffect(() => { + if (list[choosen]) { + setRevisionId(list[choosen].id); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [choosen]); + + return ( + <div + className={styled.container} + style={{ + width: collapsed ? '256px' : '0px', + }} + > + {collapsed ? ( + <div className={styled.main}> + <div className={styled.header}> + <span className={styled.name}>{t('workflow.label_template_version')}</span> + <span className={styled.number}>{`共${total}个`}</span> + </div> + <section className={styled.list_section}> + {list.length === 0 ? ( + <div className={styled.empty}>{t('no_data')}</div> + ) : ( + list.map((item, index) => { + return ( + <RevisionListItem + key={item.id} + params={item} + setChoosen={setChoosen} + name={name} + index={index} + choosen={choosen} + listQuery={listQuery} + ownerType={ownerType} + /> + ); + }) + )} + </section> + </div> + ) : ( + <></> + )} + <div + onClick={() => setCollapsed(!collapsed)} + className={collapsed ? styled.collapse : styled.is_reverse} + > + <Left /> + </div> + </div> + ); +}; + +interface ItemProps { + params: TemplateRevision; + index: number; + choosen: number; + name?: string; + setChoosen: Dispatch<SetStateAction<number>>; + listQuery: UseQueryResult<ResponseInfo<TemplateRevision[]>, unknown>; + ownerType?: WorkflowTemplateMenuType; +} + +export const RevisionListItem: FC<ItemProps> = (props) => { + const { params, index, choosen, name, setChoosen, listQuery, ownerType } = props; + const [form] = Form.useForm(); + const { t } = useTranslation(); + const [visible, setVisible] = useState(false); + const [sendModalVisible, setSendModalVisible] = useState(false); + const [comment, setComment] = useState(params.comment); + const [isSubmitDisable, setIsSubmitDisable] = useState(true); + const [isLoading, setIsLoading] = useState(false); + + const moreActionsList = useMemo(() => { + const tempList = [ + { + label: '下载', + onClick: async () => { + const { id, revision_index } = params; + try { + const blob = await request(getTemplateRevisionDownloadHref(id), { + responseType: 'blob', + }); + saveBlob(blob, `V${revision_index}-${name}.json`); + } catch (error: any) { + Message.error(error.message); + } + }, + }, + { + label: '发送', + onClick: () => { + setSendModalVisible(true); + }, + }, + { + label: t('delete'), + danger: true, + onClick: () => { + Modal.confirm({ + title: `确认删除V${params.revision_index || ''}吗?`, + content: '删除后,该模板将无法进行操作,请谨慎删除', + onOk() { + deleteRevision(params.id) + .then(() => { + Message.success('删除成功'); + setChoosen(0); + listQuery.refetch(); + }) + .catch((error: any) => { + Message.error( + index === 0 + ? '无法删除最新版本模板' + : '删除失败,该模板已关联工作流任务。如需删除,请前往工作流替换模板后再试', + ); + }); + }, + }); + }, + }, + ]; + ownerType === WorkflowTemplateMenuType.PARTICIPANT && tempList.splice(1, 1); + return tempList; + }, [ownerType, listQuery, name, t, setChoosen, params, index]); + + return ( + <div + onClick={() => { + setChoosen(index); + }} + style={{ background: choosen === index ? '#f2f3f8' : '' }} + className={styled.item} + > + <Row> + <Col className={styled.item_name} span={4}>{`V${params.revision_index}`}</Col> + <Col className={styled.item_time} span={16}> + {formatTimestamp(params.created_at!)} + </Col> + <Col span={4} style={{ textAlign: 'center' }}> + <MoreActions actionList={moreActionsList} /> + </Col> + </Row> + <Row> + <Col span={21}> + <div className={styled.description}>{params.comment || CONSTANTS.EMPTY_PLACEHOLDER}</div> + </Col> + <Col span={3}> + <Popover + content={ + <> + <Input + defaultValue={params.comment} + placeholder="请输入模板描述" + allowClear={true} + onChange={(value) => { + setComment(value); + }} + /> + <Divider /> + <div className={styled.popover_bottom}> + <Button style={{ marginRight: '10px' }} onClick={() => setVisible(false)}> + 取消 + </Button> + <Button type="primary" onClick={onConfirm}> + {' '} + 确定 + </Button> + </div> + </> + } + title="编辑版本描述" + trigger="click" + popupVisible={visible} + > + <Button size="small" type="text" icon={<Edit />} onClick={() => setVisible(true)} /> + </Popover> + </Col> + </Row> + <Modal + title="发送至合作伙伴" + visible={sendModalVisible} + onOk={onOk} + confirmLoading={isLoading} + onCancel={() => setSendModalVisible(false)} + okButtonProps={{ disabled: isSubmitDisable }} + unmountOnExit={true} + style={{ minWidth: '700px' }} + > + <Form form={form}> + <Form.Item label="模版名称"> + <span>{name}</span> + </Form.Item> + <Form.Item label="模版版本"> + <span>{`V${params.revision_index}`}</span> + </Form.Item> + <Form.Item + label="合作伙伴" + field="participant_ids" + normalize={(value) => value?.map((item: any) => item.id)} + > + <InvitionTable + participantsType={ParticipantType.PLATFORM} + isSupportCheckbox={false} + onChange={(selectedParticipants: Participant[]) => { + setIsSubmitDisable(!selectedParticipants.length); + }} + /> + </Form.Item> + </Form> + </Modal> + </div> + ); + + async function onConfirm() { + await to(patchRevisionComment(params.id, { comment })); + setVisible(false); + listQuery.refetch(); + } + async function onOk() { + const participant_id = form.getFieldValue('participant_ids')?.[0]; + try { + setIsLoading(true); + await sendTemplateRevision(params.id, participant_id); + Message.success('发送成功!'); + setSendModalVisible(false); + } catch (error: any) { + Message.error(error.message); + } + setIsLoading(false); + } +}; + +export default RevisionList; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/WorkflowList.module.less b/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/WorkflowList.module.less new file mode 100644 index 000000000..6527790af --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/WorkflowList.module.less @@ -0,0 +1,21 @@ +.list_container { + width: 100%; +} + +.name_link { + display: block; + font-size: 16px; + + &[data-invalid='true'] { + color: var(--textColorDisabled); + + &:hover { + color: var(--primaryColor); + } + } +} + +.uuid_container { + display: block; + color: var(--textColorSecondary); +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/WorkflowList.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/WorkflowList.tsx new file mode 100644 index 000000000..9eab5e2ba --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/WorkflowList.tsx @@ -0,0 +1,156 @@ +import React, { FC, useMemo } from 'react'; +import styled from './WorkflowList.module.less'; +import { Table, Message, Spin } from '@arco-design/web-react'; +import { Link } from 'react-router-dom'; +import { useQuery } from 'react-query'; +import { fetchWorkflowListByRevisionId } from 'services/workflow'; +import i18n from 'i18n'; +import { formatTimestamp } from 'shared/date'; +import { Workflow, WorkflowState, WorkflowStateFilterParam } from 'typings/workflow'; +import WorkflowStage from './WorkflowStage'; +import WhichProject from 'components/WhichProject'; +import { useUrlState, useTablePaginationWithUrlState, useGetCurrentProjectId } from 'hooks'; +import { TIME_INTERVAL } from 'shared/constants'; + +type TableColumnsOptions = { + onSuccess?: Function; + withoutActions?: boolean; + defaultFavourFilteredValue?: string[]; + onForkableChange?: (record: Workflow, val: boolean) => void; + onFavourSwitchChange?: (record: Workflow) => void; +}; + +export const getWorkflowTableColumns = (options: TableColumnsOptions = {}) => { + const ret = [ + { + title: i18n.t('workflow.name'), + dataIndex: 'name', + key: 'name', + width: 200, + render: (name: string, record: Workflow) => { + const { state } = record; + const { INVALID } = WorkflowState; + return ( + <> + <Link + className={styled.name_link} + to={`/workflow-center/workflows/${record.id}`} + rel="nopener" + data-invalid={state === INVALID} + > + {name} + </Link> + <small className={styled.uuid_container}>uuid: {record.uuid}</small> + </> + ); + }, + }, + { + title: i18n.t('workflow.col_status'), + dataIndex: 'state', + width: 120, + render: (_: string, record: Workflow) => <WorkflowStage workflow={record} />, + }, + { + title: i18n.t('workflow.col_project'), + dataIndex: 'project_id', + width: 150, + render: (project_id: number) => <WhichProject id={project_id} />, + }, + { + title: i18n.t('workflow.col_date'), + dataIndex: 'created_at', + width: 150, + render: (date: number) => <div>{formatTimestamp(date)}</div>, + }, + ]; + + return ret; +}; + +type QueryParams = { + project?: string; + keyword?: string; + uuid?: string; + states?: WorkflowStateFilterParam[]; + page?: number; +}; + +export const WORKFLOW_LIST_QUERY_KEY = 'fetchWorkflowListByRevisionId'; +interface Props { + revisionId: number; +} + +const WorkflowList: FC<Props> = (props) => { + const [urlState, setUrlState] = useUrlState<QueryParams>({ keyword: '', uuid: '', states: [] }); + const projectId = useGetCurrentProjectId(); + const { revisionId } = props; + + const { urlState: pageInfoState, paginationProps } = useTablePaginationWithUrlState(); + const listQueryKey = [ + WORKFLOW_LIST_QUERY_KEY, + urlState.keyword, + urlState.uuid, + urlState.states, + projectId, + pageInfoState.page, + pageInfoState.pageSize, + revisionId, + ]; + const listQuery = useQuery( + listQueryKey, + () => { + return fetchWorkflowListByRevisionId(projectId || 0, { + // template_revision_id is available only Revision Item is clicked + template_revision_id: revisionId, + }); + }, + { + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, + }, + ); + const { isLoading, isError, data: res, error, refetch } = listQuery; + + if (isError && error) { + Message.error((error as Error).message); + } + + const workflowListShow = useMemo(() => { + const workflowList = res?.data ?? []; + return workflowList; + }, [res]); + + return ( + <Spin loading={isLoading}> + <div className={styled.list_container}> + <Table + className="custom-table custom-table-left-side-filter" + data={workflowListShow} + columns={getWorkflowTableColumns({ + onSuccess, + })} + onChange={(_, filter, sorter, extra) => { + if (extra.action === 'filter') { + setUrlState({ + page: 1, + }); + } + }} + scroll={{ x: '100%' }} + rowKey="name" + pagination={{ + ...paginationProps, + total: listQuery.data?.page_meta?.total_items ?? undefined, + }} + /> + </div> + </Spin> + ); + + function onSuccess() { + refetch(); + } +}; + +export default WorkflowList; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/WorkflowStage.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/WorkflowStage.tsx new file mode 100644 index 000000000..b48ba83cb --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/WorkflowStage.tsx @@ -0,0 +1,10 @@ +import StateIndicator from 'components/StateIndicator'; +import React, { FC } from 'react'; +import { getWorkflowStage } from 'shared/workflow'; +import { Workflow } from 'typings/workflow'; + +const WorkflowStage: FC<{ workflow: Workflow; tag?: boolean }> = ({ workflow, tag }) => { + return <StateIndicator {...getWorkflowStage(workflow)} tag={tag} />; +}; + +export default WorkflowStage; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/index.module.less b/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/index.module.less new file mode 100644 index 000000000..5444feae6 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/index.module.less @@ -0,0 +1,60 @@ +@import '~styles/mixins.less'; + +.padding_container { + width: 100%; + border-bottom: 1px solid var(--lineColor); +} + +.tabs_container { + flex: auto; + min-width: 0; +} + +.name { + margin-top: 0; + margin-bottom: -3px; + font-size: 16px; + font-weight: 600; + line-height: 24px; +} + +.comment { + font-size: 12px; + color: var(--textColorSecondary); +} + +.content { + position: relative; + display: flex; + margin: 0px -20px; +} + +.header_col { + margin-top: 9px; + text-align: right; +} + +.template_create { + position: absolute; + right: 5px; + top: 10px; + z-index: 2; +} + +.avatar { + .MixinSquare(48px); + background-color: var(--primary-1); + color: white; + border-radius: 4px; + font-size: 18px; + text-align: center; + + &::before { + display: inline-block; + width: 100%; + height: 100%; + content: ''; + background: url('../../../assets/icons/atom-icon-algorithm-management.svg') no-repeat; + background-size: contain; + } +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/index.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/index.tsx new file mode 100644 index 000000000..021980374 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateDetail/index.tsx @@ -0,0 +1,482 @@ +import React, { FC, useMemo, useState } from 'react'; +import styled from './index.module.less'; +import { useTranslation } from 'react-i18next'; +import { generatePath, useHistory, useParams } from 'react-router'; + +import { Spin, Grid, Button, Space, Tabs, Message, Modal } from '@arco-design/web-react'; +import { useGetIsCanEditTemplate } from '../shared'; +import SharedPageLayout from 'components/SharedPageLayout'; +import BreadcrumbLink from 'components/BreadcrumbLink'; + +import request from 'libs/request'; +import { saveBlob } from 'shared/helpers'; +import CONSTANTS from 'shared/constants'; +import MoreActions from 'components/MoreActions'; +import PropertyList from 'components/PropertyList'; +import TemplateConfig from '../TemplateConfig'; +import WorkflowList from './WorkflowList'; +import RevisionList from './RevisionList'; +import routes, { WorkflowTemplateDetailParams, WorkflowTemplateDetailTab } from '../routes'; +import { Rocket } from 'components/IconPark'; + +import { + deleteTemplate, + fetchRevisionDetail, + fetchTemplateById, + getTemplateDownloadHref, +} from 'services/workflow'; +import { useQuery } from 'react-query'; +import { parseComplexDictField } from 'shared/formSchema'; +import { + definitionsStore, + editorInfosStore, + JobDefinitionForm, + preprocessVariables, + TPL_GLOBAL_NODE_UUID, +} from 'views/WorkflowTemplates/TemplateForm/stores'; +import { giveWeakRandomKey } from 'shared/helpers'; +import { omit } from 'lodash-es'; +import { JobSlotReferenceType, WorkflowTemplate, WorkflowTemplateMenuType } from 'typings/workflow'; +import { + parseOtherJobRef, + parseSelfRef, + parseWorkflowRef, +} from '../TemplateConfig/JobComposeDrawer/SloEntrytList/helpers'; +import { useRecoilState } from 'recoil'; +import { templateForm } from 'stores/template'; +import CopyFormModal from '../TemplateList/CopyFormModal'; +import { formatTimestamp } from 'shared/date'; +import { Job } from 'typings/job'; +import { Variable } from 'typings/variable'; +import { useUnmount } from 'react-use'; +import { useResetCreateForm } from 'hooks/template'; + +const Row = Grid.Row; +const Col = Grid.Col; + +const TemplateDetail: FC = () => { + const { t } = useTranslation(); + const params = useParams<WorkflowTemplateDetailParams>(); + const history = useHistory(); + const reset = useResetCreateForm(); + const [template, setTemplateForm] = useRecoilState(templateForm); + const [collapsed, setCollapsed] = useState(true); + const [isShowCopyFormModal, setIsShowCopyFormModal] = useState(false); + const [selectedTemplate, setSelectedTemplate] = useState<WorkflowTemplate>(); + const [revisionId, setRevisionId] = useState(0); + const { isCanEdit } = useGetIsCanEditTemplate( + params.templateType === WorkflowTemplateMenuType.BUILT_IN, + ); + const templateQuery = useQuery( + ['fetchTemplateById', params.id], + () => { + return fetchTemplateById(params.id); + }, + { + retry: 1, + refetchOnWindowFocus: false, + onSuccess(res) { + onQuerySuccess(res.data); + }, + }, + ); + + const templateDetail = useMemo(() => { + if (!templateQuery.data?.data) return undefined; + return templateQuery.data.data; + }, [templateQuery.data]); + + const revisionQuery = useQuery( + ['fetchRevisionDetail', revisionId, templateDetail], + () => { + return fetchRevisionDetail(revisionId); + }, + { + retry: 1, + enabled: templateDetail !== undefined && revisionId !== 0, + refetchOnWindowFocus: false, + keepPreviousData: true, + onSuccess(res) { + const { + name, + kind, + group_alias, + created_at, + updated_at, + creator_username, + comment, + id, + } = (templateDetail as unknown) as WorkflowTemplate<Job, Variable>; + const { is_local, config, editor_info } = parseComplexDictField(res.data); + const revision_id = res.data.id; + const data: WorkflowTemplate<Job, Variable> = { + name: name!, + kind: kind!, + group_alias: group_alias!, + created_at, + updated_at, + creator_username, + comment, + id, + revision_id, + is_local, + config, + editor_info, + }; + onQuerySuccess(data); + }, + }, + ); + + const BreadcrumbLinkPaths = useMemo(() => { + return [ + { label: 'menu.label_workflow_tpl', to: '/workflow-center/workflow-templates' }, + { label: 'workflow.template_detail' }, + ]; + }, []); + const displayedProps = useMemo( + () => [ + { + value: template.group_alias, + label: t('workflow.col_group_alias'), + }, + { + value: template.creator_username, + label: t('workflow.col_creator'), + }, + { + value: template.updated_at + ? formatTimestamp(template.updated_at) + : CONSTANTS.EMPTY_PLACEHOLDER, + label: t('workflow.col_update_time'), + }, + { + value: template.created_at + ? formatTimestamp(template.created_at) + : CONSTANTS.EMPTY_PLACEHOLDER, + label: t('workflow.col_create_time'), + }, + ], + [template, t], + ); + + useUnmount(() => { + reset(); + definitionsStore.clearMap(); + editorInfosStore.clearMap(); + }); + + return ( + <SharedPageLayout title={<BreadcrumbLink paths={BreadcrumbLinkPaths} />}> + <Spin loading={templateQuery.isLoading || revisionQuery.isLoading}> + <div className={styled.padding_container}> + <Row> + <Col span={12}> + <Space size="medium"> + <div + className={styled.avatar} + data-name={ + template?.name ? template.name.slice(0, 1) : CONSTANTS.EMPTY_PLACEHOLDER + } + /> + <div> + <h3 className={styled.name}>{template?.name ?? '....'}</h3> + <Space className={styled.comment}> + {template?.comment ?? CONSTANTS.EMPTY_PLACEHOLDER} + </Space> + </div> + </Space> + </Col> + <Col className={styled.header_col} span={12}> + <Space> + <Button + type="primary" + disabled={!template?.name} + onClick={() => + history.push(`/workflow-center/workflows/initiate/basic/${params.id}`) + } + > + {t('workflow.create_workflow')} + </Button> + {params.templateType !== WorkflowTemplateMenuType.PARTICIPANT && ( + <Button + disabled={!template?.name || !isCanEdit} + onClick={() => + history.push(`/workflow-center/workflow-templates/edit/basic/${params.id}`) + } + > + 编辑 + </Button> + )} + <MoreActions + actionList={[ + { + label: t('workflow.action_download'), + disabled: !template?.name, + onClick: async () => { + const { id, name } = template; + try { + const blob = await request(getTemplateDownloadHref(id!), { + responseType: 'blob', + }); + saveBlob(blob, `${name}.json`); + } catch (error: any) { + Message.error(error.message); + } + }, + }, + { + label: t('copy'), + disabled: !template?.name, + onClick: () => { + setIsShowCopyFormModal((prevState) => true); + }, + }, + { + label: t('delete'), + danger: true, + disabled: !template?.name, + onClick: () => { + Modal.confirm({ + title: `确认删除${template.name || ''}吗?`, + content: '删除后,该模板将无法进行操作,请谨慎删除', + onOk() { + deleteTemplate(template.id!) + .then(() => { + Message.success('删除成功'); + history.push( + `/workflow-center/workflow-templates?tab=${params.templateType}}`, + ); + }) + .catch((error: any) => { + Message.error(error.message); + }); + }, + }); + }, + }, + ]} + /> + </Space> + </Col> + </Row> + <PropertyList cols={6} colProportions={[1, 1, 1, 1]} properties={displayedProps} /> + </div> + <div className={styled.content}> + <RevisionList + id={params.id} + name={template?.name} + ownerType={params.templateType} + collapsed={collapsed} + setCollapsed={setCollapsed} + setRevisionId={setRevisionId} + /> + <Button + className={styled.template_create} + type="text" + size="mini" + icon={<Rocket />} + disabled={revisionId === 0} + onClick={() => { + history.push( + `/workflow-center/workflow-templates/edit/basic/${params.id}/${revisionId}`, + ); + }} + > + 生成新模板 + </Button> + <div + className={styled.tabs_container} + style={{ width: collapsed ? 'calc(100% - 256px)' : '100%' }} + > + <Tabs + defaultActiveTab={params.tab} + onChange={(tab) => history.push(getTabPath(tab))} + style={{ marginBottom: 0 }} + > + <Tabs.TabPane + title={t('workflow.step_tpl_config')} + key={WorkflowTemplateDetailTab.Config} + /> + <Tabs.TabPane + title={t('workflow.label_workflow_list')} + key={WorkflowTemplateDetailTab.List} + /> + </Tabs> + <div className={styled.padding_container} style={{ paddingTop: 0 }}> + {params.tab === WorkflowTemplateDetailTab.Config && template.name && ( + <TemplateConfig isCheck={true} revisionId={revisionId} /> + )} + {params.tab === WorkflowTemplateDetailTab.List && ( + <WorkflowList revisionId={revisionId} /> + )} + </div> + </div> + </div> + </Spin> + <CopyFormModal + selectedWorkflowTemplate={selectedTemplate} + initialValues={{ + name: selectedTemplate ? `${selectedTemplate.name}${t('workflow.copy')}` : undefined, + }} + visible={isShowCopyFormModal} + onSuccess={onCopyFormModalSuccess} + onCancel={onCopyFormModalClose} + /> + </SharedPageLayout> + ); + + // ------------- Methods --------------- + function getTabPath(tab: string) { + return generatePath(routes.WorkflowTemplateDetail, { + ...params, + tab: tab as WorkflowTemplateDetailTab, + }); + } + function onCopyFormModalSuccess() { + setIsShowCopyFormModal((prevState) => false); + } + function onCopyFormModalClose() { + setIsShowCopyFormModal((prevState) => false); + setSelectedTemplate(() => undefined); + } + + function onQuerySuccess(data: WorkflowTemplate<Job, Variable>) { + /** + * Parse the template data from server: + * 1. basic infos like name, group_alias... write to recoil all the same + * 2. each job_definition will be tagged with a uuid, + * and replace deps souce job name with corresponding uuid, + * then the {uuid, dependencies} will save to recoil and real job def values should go ../store.ts + */ + const { + name, + kind, + group_alias, + created_at, + updated_at, + creator_username, + comment, + id, + is_local, + config, + editor_info, + revision_id, + } = parseComplexDictField(data); + setSelectedTemplate(data); + definitionsStore.upsertValue(TPL_GLOBAL_NODE_UUID, { + variables: config.variables.map(preprocessVariables), + } as JobDefinitionForm); + /** + * 1. Genrate a Map<uuid, job-name> + * 2. upsert job definition values to store + * - need to stringify code type variable's value + */ + const nameToUuidMap = config.job_definitions.reduce((map, job) => { + const thisJobUuid = giveWeakRandomKey(); + map[job.name] = thisJobUuid; + + const value = omit(job, 'dependencies') as JobDefinitionForm; + value.variables = value.variables.map(preprocessVariables); + // Save job definition values to definitionsStore + definitionsStore.upsertValue(thisJobUuid, { ...value }); + + return map; + }, {} as Record<string, string>); + /** + * Convert job & variable name in reference to + * job & variable's UUID we assign above + */ + config.job_definitions.forEach((job) => { + const jobUuid = nameToUuidMap[job.name]; + const self = definitionsStore.getValueById(jobUuid)!; + + // Save job editor info to editorInfosStore + const targetEditInfo = editor_info?.yaml_editor_infos[job.name]; + + if (targetEditInfo) { + editorInfosStore.upsertValue(jobUuid, { + slotEntries: Object.entries(targetEditInfo.slots) + .sort() + .map(([slotName, slot]) => { + if (slot.reference_type === JobSlotReferenceType.OTHER_JOB) { + const [jobName, varName] = parseOtherJobRef(slot.reference); + const targetJobUuid = nameToUuidMap[jobName]; + const target = definitionsStore.getValueById(targetJobUuid); + + if (target) { + slot.reference = slot.reference + .replace(jobName, targetJobUuid) + .replace( + varName, + target.variables.find((item) => item.name === varName)?._uuid!, + ); + } + } + + if (slot.reference_type === JobSlotReferenceType.JOB_PROPERTY) { + const [jobName] = parseOtherJobRef(slot.reference); + const targetJobUuid = nameToUuidMap[jobName]; + const target = definitionsStore.getValueById(targetJobUuid); + + if (target) { + slot.reference = slot.reference.replace(jobName, targetJobUuid); + } + } + + if (slot.reference_type === JobSlotReferenceType.SELF) { + const varName = parseSelfRef(slot.reference); + + slot.reference = slot.reference.replace( + varName, + self.variables.find((item) => item.name === varName)?._uuid!, + ); + } + + if (slot.reference_type === JobSlotReferenceType.WORKFLOW) { + const varName = parseWorkflowRef(slot.reference); + const globalDef = definitionsStore.getValueById(TPL_GLOBAL_NODE_UUID)!; + + slot.reference = slot.reference.replace( + varName, + globalDef.variables.find((item) => item.name === varName)?._uuid!, + ); + } + + return [slotName, slot]; + }), + meta_yaml: targetEditInfo.meta_yaml, + }); + } else { + editorInfosStore.insertNewResource(jobUuid); + } + }); + + const jobNodeSlimRawDataList = config.job_definitions.map((job) => { + const uuid = nameToUuidMap[job.name]; + + return { + uuid, + dependencies: job.dependencies.map((dep) => ({ source: nameToUuidMap[dep.source] })), + }; + }); + setTemplateForm({ + id, + revision_id, + name, + comment, + is_local, + group_alias, + config: { + variables: [], + job_definitions: jobNodeSlimRawDataList, + }, + kind, + created_at, + updated_at, + creator_username, + }); + } +}; + +export default TemplateDetail; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepOneBasic/index.module.less b/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepOneBasic/index.module.less new file mode 100644 index 000000000..ccbfe91ec --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepOneBasic/index.module.less @@ -0,0 +1,21 @@ +.container { + padding-top: 20px; + min-height: 100%; +} + +.styled_form { + width: 500px; + margin: 0 auto; + :global(.arco-form-label-item > label) { + font-size: 12px; + white-space: normal; + color: var(--color-text-2); + display: flex; + align-items: center; + justify-content: right; + } +} + +.styled_alert { + margin-bottom: 20px; +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepOneBasic/index.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepOneBasic/index.tsx index b380b57b5..4591f5131 100644 --- a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepOneBasic/index.tsx +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepOneBasic/index.tsx @@ -1,184 +1,359 @@ -import React, { FC } from 'react'; -import styled from 'styled-components'; -import { Form, Button, Input, Card, Switch, Spin } from 'antd'; -import { useTranslation } from 'react-i18next'; +import React, { FC, useMemo } from 'react'; +import styled from './index.module.less'; +import { Button, Input, Card, Spin, Alert, Form } from '@arco-design/web-react'; import { useHistory, useParams } from 'react-router-dom'; import { useRecoilState } from 'recoil'; import GridRow from 'components/_base/GridRow'; import FormLabel from 'components/FormLabel'; -import { templateForm } from 'stores/template'; -import { WorkflowTemplatePayload } from 'typings/workflow'; +import { templateBaseInfoForm, templateForm, defaultBaseInfoForm } from 'stores/template'; +import { + JobSlotReferenceType, + WorkflowTemplate, + WorkflowTemplatePayload, + WorkflowTemplateType, +} from 'typings/workflow'; import { useQuery } from 'react-query'; -import { fetchTemplateById } from 'services/workflow'; -import { upsertValue, TPL_GLOBAL_NODE_UUID, fillEmptyWidgetSchema } from '../store'; +import { fetchRevisionDetail, fetchTemplateById } from 'services/workflow'; +import { + definitionsStore, + TPL_GLOBAL_NODE_UUID, + preprocessVariables, + editorInfosStore, + JobDefinitionForm, +} from 'views/WorkflowTemplates/TemplateForm/stores'; import { giveWeakRandomKey } from 'shared/helpers'; -import { omit } from 'lodash'; +import { omit } from 'lodash-es'; import { parseComplexDictField } from 'shared/formSchema'; +import { validNamePattern, MAX_COMMENT_LENGTH } from 'shared/validator'; +import ButtonWithModalConfirm from 'components/ButtonWithModalConfirm'; +import { useIsFormValueChange } from 'hooks'; +import { useGetIsCanEditTemplate } from 'views/WorkflowTemplates/shared'; -const Container = styled(Card)` - padding-top: 20px; - min-height: 100%; -`; -const StyledForm = styled(Form)` - width: 500px; - margin: 0 auto; -`; +import { + parseOtherJobRef, + parseSelfRef, + parseWorkflowRef, +} from '../../TemplateConfig/JobComposeDrawer/SloEntrytList/helpers'; +import { Job } from 'typings/job'; +import { Variable } from 'typings/variable'; type Props = { isEdit?: boolean; isHydrated?: React.MutableRefObject<boolean>; + onFormValueChange?: () => void; }; -const TemplateStepOneBasic: FC<Props> = ({ isEdit, isHydrated }) => { - const { t } = useTranslation(); +const TemplateStepOneBasic: FC<Props> = ({ + isEdit, + isHydrated, + onFormValueChange: onFormValueChangeFromProps, +}) => { const history = useHistory(); - const params = useParams<{ id?: string }>(); + const params = useParams<{ id?: string; revision_id?: string }>(); const [formInstance] = Form.useForm(); + const [baseInfoTemplate, setBaseInfoTemplate] = useRecoilState(templateBaseInfoForm); const [template, setTemplateForm] = useRecoilState(templateForm); + const { isCanEdit, tip } = useGetIsCanEditTemplate( + template.kind === WorkflowTemplateType.BUILT_IN, + ); + const isRevision = Boolean(params.revision_id); + const { isFormValueChanged, onFormValueChange } = useIsFormValueChange(onFormChange); // DO fetch only when it's edit-mode + const tplQ = useQuery(['fetchTemplate', params.id], () => fetchTemplateById(params.id!), { - enabled: isEdit && !isHydrated?.current, + enabled: Boolean(isEdit && !isHydrated?.current), retry: 1, refetchOnWindowFocus: false, onSuccess(res) { - /** - * Parse the template data from server: - * 1. basic infos like name, group_alias, is_left... write to recoil all the same - * 2. each job_definition will be tagged with a uuid, - * and replace deps souce job name with corresponding uuid, - * then the {uuid, dependencies} will save to recoil and real job def values should go ../store.ts - */ - const { id, name, comment, is_left, group_alias, config } = parseComplexDictField(res.data); - - upsertValue(TPL_GLOBAL_NODE_UUID, { variables: config.variables.map(fillEmptyWidgetSchema) }); - - /** - * 1. Genrate a Map<uuid, job-name> - * 2. upsert job definition values to store - * - need deal variable codes - */ - const nameToUuidMap = config.job_definitions.reduce((map, job) => { - const thisJobUuid = giveWeakRandomKey(); - map[job.name] = thisJobUuid; - - const value = omit(job, 'dependencies'); - value.variables = value.variables.map(fillEmptyWidgetSchema); - - upsertValue(thisJobUuid, value); - - return map; - }, {} as Record<string, string>); - - const jobNodeSlimRawDataList = config.job_definitions.map((job) => { - const uuid = nameToUuidMap[job.name]; - - return { - uuid, - dependencies: job.dependencies.map((dep) => ({ source: nameToUuidMap[dep.source] })), - }; - }); - - setTemplateForm({ - id, - name, - comment, - is_left, - group_alias, - config: { - variables: [], - job_definitions: jobNodeSlimRawDataList, - }, - }); - - formInstance.setFieldsValue({ name, comment, is_left, group_alias }); - - if (isHydrated) { - isHydrated.current = true; - } + onQuerySuccess(res.data); }, }); + const templateDetail = useMemo(() => { + if (!tplQ.data?.data) return undefined; + return tplQ.data.data; + }, [tplQ.data]); + + const revisionQuery = useQuery( + ['fetchRevisionDetail', params.revision_id], + () => fetchRevisionDetail(params.revision_id!), + { + retry: 1, + refetchOnWindowFocus: false, + enabled: Boolean(templateDetail !== undefined && isRevision && !isHydrated?.current), + onSuccess(res) { + const { + name, + kind, + group_alias, + created_at, + updated_at, + creator_username, + } = (templateDetail as unknown) as WorkflowTemplate<Job, Variable>; + const { id, is_local, config, editor_info, comment } = parseComplexDictField(res.data); + const data: WorkflowTemplate<Job, Variable> = { + name: name!, + kind: kind!, + group_alias: group_alias!, + created_at, + updated_at, + creator_username, + comment, + id, + is_local, + config, + editor_info, + }; + onQuerySuccess(data); + }, + }, + ); return ( - <Container bordered={false}> - <Spin spinning={!!isEdit && tplQ.isLoading}> - <StyledForm + <Card className={styled.container} bordered={false}> + <Spin + loading={!!isEdit && tplQ.isLoading && revisionQuery.isLoading} + style={{ width: '100%' }} + > + <Form + className={styled.styled_form} labelCol={{ span: 6 }} wrapperCol={{ span: 18 }} + colon={true} form={formInstance} - onFinish={onFinish} - initialValues={template} - onValuesChange={onFormChange as any} + onSubmit={onFinish} + initialValues={!isEdit && !isHydrated?.current ? defaultBaseInfoForm : baseInfoTemplate} + onValuesChange={onFormValueChange} > + {!isCanEdit && <Alert className={styled.styled_alert} type="info" banner content={tip} />} <Form.Item - name="name" - label={t('workflow.label_new_template_name')} - rules={[{ required: true, message: t('workflow.msg_tpl_name_required') }]} + field="name" + label="模板名称" + rules={[ + { required: true, message: '请输入模板名!' }, + { + match: validNamePattern, + message: '只支持大小写字母,数字,中文开头或结尾,可包含“_”和“-”,不超过 63 个字符', + }, + ]} > - <Input placeholder={t('workflow.placeholder_template_name')} /> + <Input placeholder="请输入模板名称" disabled={!isRevision && (!isCanEdit || isEdit)} /> </Form.Item> <Form.Item - name="group_alias" + field="group_alias" label={ <FormLabel - label={t('workflow.label_group_alias')} - tooltip="模版根据该字段进行匹配,启动工作流时双侧必须选择相同 Group 名的两份模版" + label="Group" + tooltip="模板根据该字段进行匹配,启动工作流时双侧必须选择相同 Group 名的两份模板" /> } - rules={[{ required: true, message: t('workflow.msg_group_required') }]} + rules={[{ required: true, message: '请输入 Group 名' }]} > - <Input placeholder={t('workflow.msg_group_required')} /> + <Input placeholder="请输入 Group 名" disabled={!isCanEdit} /> </Form.Item> <Form.Item - name="is_left" - label={ - <FormLabel - label={t('workflow.label_is_left')} - tooltip="模版分为左模版和右模版,双侧能够成功启动一个工作流的条件是两边必须分别用了左和右模版且匹配的模板" - /> - } - valuePropName="checked" + field="comment" + label="工作流模板描述" + rules={[{ max: MAX_COMMENT_LENGTH, message: '最多为 200 个字符' }]} > - <Switch /> - </Form.Item> - - <Form.Item name="comment" label={t('workflow.label_template_comment')}> - <Input.TextArea rows={4} placeholder={t('workflow.placeholder_comment')} /> + <Input.TextArea + rows={4} + placeholder={isCanEdit ? '请输入工作流模板描述' : undefined} + disabled={!isCanEdit} + /> </Form.Item> <Form.Item wrapperCol={{ offset: 6 }}> <GridRow gap={16} top="12"> <Button type="primary" htmlType="submit"> - {t('next_step')} + 下一步 </Button> - <Button onClick={backToList}>{t('cancel')}</Button> + <ButtonWithModalConfirm + onClick={backToList} + isShowConfirmModal={isFormValueChanged || Boolean(template.name)} + > + 取消 + </ButtonWithModalConfirm> </GridRow> </Form.Item> - </StyledForm> + </Form> </Spin> - </Container> + </Card> ); function backToList() { - history.push('/workflow-templates'); + history.push(`/workflow-center/workflow-templates`); } function onFormChange( _: any, - values: Pick<WorkflowTemplatePayload, 'comment' | 'group_alias' | 'is_left' | 'name'>, + values: Pick<WorkflowTemplatePayload, 'comment' | 'group_alias' | 'name'>, ) { + onFormValueChangeFromProps?.(); setTemplateForm({ ...template, ...values, }); + setBaseInfoTemplate({ + ...baseInfoTemplate, + ...values, + }); } function onFinish() { - history.push( - isEdit ? `/workflow-templates/edit/jobs/${params.id}` : `/workflow-templates/create/jobs`, - ); + if (isHydrated) { + isHydrated.current = true; + } + + let path = `/workflow-center/workflow-templates/create/jobs`; + if (isEdit && !isRevision) { + path = `/workflow-center/workflow-templates/edit/jobs/${params.id}`; + } + if (isEdit && isRevision) { + path = `/workflow-center/workflow-templates/edit/jobs/${params.id}/${params.revision_id}`; + } + history.push(path); + } + function onQuerySuccess(data: WorkflowTemplate<Job, Variable>) { + /** + * Parse the template data from server: + * 1. basic infos like name, group_alias... write to recoil all the same + * 2. each job_definition will be tagged with a uuid, + * and replace deps souce job name with corresponding uuid, + * then the {uuid, dependencies} will save to recoil and real job def values should go ../store.ts + */ + const { + name, + kind, + group_alias, + created_at, + updated_at, + creator_username, + comment, + id, + is_local, + config, + editor_info, + } = parseComplexDictField(data); + + formInstance.setFieldsValue({ name, comment, group_alias }); + + definitionsStore.upsertValue(TPL_GLOBAL_NODE_UUID, { + variables: config.variables.map(preprocessVariables), + } as JobDefinitionForm); + /** + * 1. Genrate a Map<uuid, job-name> + * 2. upsert job definition values to store + * - need to stringify code type variable's value + */ + const nameToUuidMap = config.job_definitions.reduce((map, job) => { + const thisJobUuid = giveWeakRandomKey(); + map[job.name] = thisJobUuid; + + const value = omit(job, 'dependencies') as JobDefinitionForm; + value.variables = value.variables.map(preprocessVariables); + // Save job definition values to definitionsStore + definitionsStore.upsertValue(thisJobUuid, { ...value }); + + return map; + }, {} as Record<string, string>); + /** + * Convert job & variable name in reference to + * job & variable's UUID we assign above + */ + config.job_definitions.forEach((job) => { + const jobUuid = nameToUuidMap[job.name]; + const self = definitionsStore.getValueById(jobUuid)!; + + // Save job editor info to editorInfosStore + const targetEditInfo = editor_info?.yaml_editor_infos[job.name]; + if (targetEditInfo) { + editorInfosStore.upsertValue(jobUuid, { + slotEntries: Object.entries(targetEditInfo.slots) + .sort() + .map(([slotName, slot]) => { + if (slot.reference_type === JobSlotReferenceType.OTHER_JOB) { + const [jobName, varName] = parseOtherJobRef(slot.reference); + const targetJobUuid = nameToUuidMap[jobName]; + const target = definitionsStore.getValueById(targetJobUuid); + + if (target) { + slot.reference = slot.reference + .replace(jobName, targetJobUuid) + .replace( + varName, + target.variables.find((item) => item.name === varName)?._uuid!, + ); + } + } + + if (slot.reference_type === JobSlotReferenceType.JOB_PROPERTY) { + const [jobName] = parseOtherJobRef(slot.reference); + const targetJobUuid = nameToUuidMap[jobName]; + const target = definitionsStore.getValueById(targetJobUuid); + + if (target) { + slot.reference = slot.reference.replace(jobName, targetJobUuid); + } + } + + if (slot.reference_type === JobSlotReferenceType.SELF) { + const varName = parseSelfRef(slot.reference); + + slot.reference = slot.reference.replace( + varName, + self.variables.find((item) => item.name === varName)?._uuid!, + ); + } + + if (slot.reference_type === JobSlotReferenceType.WORKFLOW) { + const varName = parseWorkflowRef(slot.reference); + const globalDef = definitionsStore.getValueById(TPL_GLOBAL_NODE_UUID)!; + + slot.reference = slot.reference.replace( + varName, + globalDef.variables.find((item) => item.name === varName)?._uuid!, + ); + } + + return [slotName, slot]; + }), + meta_yaml: targetEditInfo.meta_yaml, + }); + } else { + editorInfosStore.insertNewResource(jobUuid); + } + }); + const jobNodeSlimRawDataList = config.job_definitions.map((job) => { + const uuid = nameToUuidMap[job.name]; + + return { + uuid, + dependencies: job.dependencies.map((dep) => ({ source: nameToUuidMap[dep.source] })), + }; + }); + + setTemplateForm({ + id, + name, + comment, + is_local, + group_alias, + config: { + variables: [], + job_definitions: jobNodeSlimRawDataList, + }, + kind, + created_at, + updated_at, + creator_username, + }); + + if (isHydrated) { + isHydrated.current = true; + } } }; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/JobComposeDrawer/VariableForm/WidgetSchema.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/JobComposeDrawer/VariableForm/WidgetSchema.tsx deleted file mode 100644 index a7a46989e..000000000 --- a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/JobComposeDrawer/VariableForm/WidgetSchema.tsx +++ /dev/null @@ -1,210 +0,0 @@ -import React, { FC } from 'react'; -import styled from 'styled-components'; -import { Form, Input, Button, InputNumber, Switch, Select, FormInstance } from 'antd'; -import { useTranslation } from 'react-i18next'; -import { - Variable, - VariableWidgetSchema, - VariableComponent, - VariableValueType, -} from 'typings/variable'; -import { PlusCircle, Delete } from 'components/IconPark'; -import IconButton from 'components/IconButton'; -import ModelCodesEditorButton from 'components/ModelCodesEditorButton'; -import { set } from 'lodash'; -import DatasetSelect from 'components/DatasetSelect'; - -const WidgetFormItem = styled(Form.Item)` - .ant-input-number { - width: 100%; - } -`; -const DelEnumButton = styled(IconButton)` - position: absolute; - top: 4px; - right: -30px; -`; -const Enum = styled.div` - position: relative; -`; - -const { STRING, CODE } = VariableValueType; - -/** - * NOTE: we are not opeing [Radio, Checkbox, Switch] at the moment, - * bacause Radio, Checkbox can be replaced with Select - * and Switch's boolean type value is not supported by server side - */ -const WIDGET_COMPONENTS__supported: Partial<Record<VariableComponent, any>> = { - [VariableComponent.Input]: { use: Input, label: 'Input - 输入框', type: STRING }, - [VariableComponent.Select]: { use: Select, label: 'Select - 选择器', type: STRING }, - [VariableComponent.NumberPicker]: { - use: InputNumber, - label: 'Number - 数字输入框', - type: STRING, // 'number' - }, - [VariableComponent.TextArea]: { - use: Input.TextArea, - label: 'TextArea - 多行文本输入框', - type: STRING, - }, - [VariableComponent.Code]: { use: ModelCodesEditorButton, label: 'Code - 代码', type: CODE }, - [VariableComponent.Dataset]: { - use: DatasetSelect, - label: 'Dataset - 数据集选择器', - type: STRING, - }, -}; - -export const componentOptions = Object.entries(WIDGET_COMPONENTS__supported).map(([key, val]) => ({ - value: key, - label: val.label, -})); - -type Props = { - form: FormInstance; - path: (number | string)[]; - value?: VariableWidgetSchema; - onChange?: (val: Variable) => any; -}; - -const WidgetSchema: FC<Props> = ({ form, path, value }) => { - const { t } = useTranslation(); - - if (!value) return null; - const data = value; - const variableIdx = path.slice(0, -1); - - const Widget = WIDGET_COMPONENTS__supported[data.component!]?.use || Input; - const type = WIDGET_COMPONENTS__supported[data.component!]?.type; - const widgetHasEnum = _hasEnum(data?.component); - const isCheckableCompnent = _isCheckableCompnent(data?.component); - - return ( - <div> - <Form.Item - name={[...path, 'component']} - label={t('workflow.label_var_comp')} - rules={[{ required: true, message: '请选择组件' }]} - > - <Select placeholder="请选择组件" onChange={onComponentChange}> - {componentOptions.map((comp) => { - return ( - <Select.Option key={comp.value} value={comp.value}> - {comp.label} - </Select.Option> - ); - })} - </Select> - </Form.Item> - - <Form.Item hidden name={[...variableIdx, 'value_type']}> - <Input /> - </Form.Item> - - {widgetHasEnum && ( - <Form.Item - name={[...path, 'enum']} - label={t('workflow.label_var_enum')} - rules={[{ required: true, message: '请添加至少一个选项' }]} - > - <Form.List name={[...path, 'enum']}> - {(fields, { add, remove }) => { - return ( - <div> - {fields.map((field, index) => { - return ( - <Enum key={field.key + index}> - <Form.Item rules={[{ required: true, message: '填写选项值' }]} {...field}> - <Input placeholder={`选项 ${index + 1}`} /> - </Form.Item> - <DelEnumButton - circle - icon={<Delete />} - onClick={() => remove(field.name)} - /> - </Enum> - ); - })} - - <Button size="small" icon={<PlusCircle />} onClick={() => add()}> - 添加选项 - </Button> - </div> - ); - }} - </Form.List> - </Form.Item> - )} - - {/* The default value path is outside `widget_schema`, so the temp solution is name.slice(0, -1) */} - <WidgetFormItem - name={[...variableIdx, 'value']} - label={t('workflow.label_default_val')} - valuePropName={isCheckableCompnent ? 'checked' : 'value'} - normalize={formatValue} - > - {widgetHasEnum ? ( - <Widget placeholder={t('workflow.placeholder_default_val')} allowClear> - {widgetHasEnum && - (data.enum || []).map((opt: any, index: number) => { - return ( - <Select.Option key={opt + index} value={opt}> - {opt || '##请填充选项值##'} - </Select.Option> - ); - })} - </Widget> - ) : ( - <Widget placeholder={t('workflow.placeholder_default_val')} allowClear /> - )} - </WidgetFormItem> - - <Form.Item name={[...path, 'tooltip']} label={t('用户输入提示')}> - <Input placeholder={t('输入提示解释该字段作用')} /> - </Form.Item> - - <Form.Item name={[...path, 'required']} valuePropName="checked" label="是否必填"> - <Switch /> - </Form.Item> - </div> - ); - - function formatValue(value: any) { - if (type === CODE) { - return value; - } - - if (typeof value === 'string') { - return value; - } - - // Due to server only accept string type value - if (typeof value === 'number') { - return value.toString(); - } - } - - function onComponentChange(val: VariableComponent) { - const variables = form.getFieldValue('variables'); - // TODO: it's not clean to setFieldsValue using lodash-set, find a better way! - set(variables, `[${variableIdx[0]}].value_type`, WIDGET_COMPONENTS__supported[val].type); - - form.setFieldsValue({ - variables, - }); - } -}; - -function _hasEnum(comp?: VariableComponent) { - if (!comp) return false; - const { Select, Radio, Checkbox } = VariableComponent; - return [Select, Radio, Checkbox].includes(comp); -} -function _isCheckableCompnent(comp?: VariableComponent) { - if (!comp) return false; - const { Switch, Radio, Checkbox } = VariableComponent; - return [Switch, Radio, Checkbox].includes(comp); -} - -export default WidgetSchema; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/JobComposeDrawer/VariableForm/index.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/JobComposeDrawer/VariableForm/index.tsx deleted file mode 100644 index 40d399dad..000000000 --- a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/JobComposeDrawer/VariableForm/index.tsx +++ /dev/null @@ -1,137 +0,0 @@ -import React, { FC } from 'react'; -import styled from 'styled-components'; -import { Form, Input, Button, Col, Radio, Popconfirm, FormInstance } from 'antd'; -import { useTranslation } from 'react-i18next'; -import { Variable, VariableAccessMode } from 'typings/variable'; -import { Delete, Down } from 'components/IconPark'; -import { MixinCommonTransition } from 'styles/mixins'; -import VariablePermission from 'components/VariblePermission'; -import { indicators } from 'components/VariableLabel'; -import WidgetSchema from './WidgetSchema'; -import { useToggle } from 'react-use'; - -const Details = styled.details` - margin-bottom: 20px; - - .open-indicator { - ${MixinCommonTransition()} - } - &[open] { - .open-indicator { - transform: rotate(180deg); - } - } -`; -const Summary = styled.summary` - display: flex; - align-items: center; - height: 46px; - padding: 0 20px; - background-color: var(--backgroundColor); - cursor: pointer; - - &:hover { - background-color: #f0f0f0; - } -`; -const Name = styled.strong` - flex: 1; - padding-left: 10px; - font-size: 12px; - line-height: 0; - user-select: none; -`; -const Container = styled.div` - padding-right: 60px; - padding-top: 20px; -`; - -type Props = { - path: (number | string)[]; - value?: Variable; - form: FormInstance; - onChange?: (val: Variable) => any; - onRemove: any; -}; - -/** - * Dynamic Variable form list - * MUST wrap with a antd.Form - */ -const VariableForm: FC<Props> = ({ form, path, value, onRemove }) => { - const { t } = useTranslation(); - const [isOpen] = useToggle(!value?.name); - - if (!value) { - return null; - } - - const data = value; - - const PermissionIndicator = indicators[data.access_mode]; - - return ( - <Details open={isOpen}> - <Summary> - <PermissionIndicator /> - - <Name>{data.name || '点击编辑变量'}</Name> - <Col> - <Popconfirm title={t('确认删除该变量吗')} onConfirm={onRemoveClick as any}> - <Button type="link" size="small" icon={<Delete />}> - 删除 - </Button> - </Popconfirm> - - <Down className="open-indicator" /> - </Col> - </Summary> - - <Container> - <Form.Item - name={[...path, 'name']} - label={t('workflow.label_var_name')} - rules={[ - { required: true, message: t('workflow.msg_varname_required') }, - { - pattern: /^[a-zA-Z_0-9-]+$/g, - message: t('workflow.msg_varname_invalid'), - }, - ]} - > - <Input placeholder={t('workflow.placeholder_var_name')} /> - </Form.Item> - - <Form.Item - name={[...path, 'access_mode']} - label={t('workflow.label_peer_access')} - rules={[{ required: true }]} - > - <Radio.Group> - <Radio.Button value={VariableAccessMode.PEER_WRITABLE}> - <VariablePermission.Writable desc /> - </Radio.Button> - <Radio.Button value={VariableAccessMode.PEER_READABLE}> - <VariablePermission.Readable desc /> - </Radio.Button> - <Radio.Button value={VariableAccessMode.PRIVATE}> - <VariablePermission.Private desc /> - </Radio.Button> - </Radio.Group> - </Form.Item> - - <Form.Item name={[...path, 'widget_schema']} noStyle> - <WidgetSchema form={form} path={[...path, 'widget_schema']} /> - </Form.Item> - </Container> - </Details> - ); - - function onRemoveClick(event: MouseEvent) { - event.stopPropagation(); - - onRemove && onRemove(value); - } -}; - -export default VariableForm; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/JobComposeDrawer/VariableList.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/JobComposeDrawer/VariableList.tsx deleted file mode 100644 index 21ef14c72..000000000 --- a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/JobComposeDrawer/VariableList.tsx +++ /dev/null @@ -1,49 +0,0 @@ -import React, { FC } from 'react'; -import { Form, Button, FormInstance } from 'antd'; -import { Plus } from 'components/IconPark'; -import { useTranslation } from 'react-i18next'; -import VariableForm from './VariableForm'; -import NoResult from 'components/NoResult'; -import { cloneDeep } from 'lodash'; -import { DEFAULT_VARIABLE } from '../../store'; - -const VariableList: FC<{ form: FormInstance }> = ({ form }) => { - const { t } = useTranslation(); - - return ( - <Form.List name="variables"> - {(fields, { add, remove }) => ( - <div> - {fields.map((field) => ( - <Form.Item - {...field} - noStyle - rules={[{ required: true, message: t('project.msg_var_name') }]} - > - <VariableForm form={form} path={[field.name]} onRemove={() => remove(field.name)} /> - </Form.Item> - ))} - {fields.length === 0 && ( - <NoResult - text="暂无变量,新建一个吧" - style={{ margin: '50px auto 20px', width: '30%' }} - /> - )} - <Form.Item wrapperCol={{ offset: 9 }}> - {/* DO NOT simplify `() => add()` to `add`, it will pollute form value with $event */} - <Button - type="primary" - size="small" - icon={<Plus />} - onClick={() => add(cloneDeep(DEFAULT_VARIABLE))} - > - {t('workflow.btn_add_var')} - </Button> - </Form.Item> - </div> - )} - </Form.List> - ); -}; - -export default VariableList; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/JobComposeDrawer/index.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/JobComposeDrawer/index.tsx deleted file mode 100644 index 3bb344ff1..000000000 --- a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/JobComposeDrawer/index.tsx +++ /dev/null @@ -1,224 +0,0 @@ -import React, { forwardRef, ForwardRefRenderFunction, useEffect, useImperativeHandle } from 'react'; -import styled from 'styled-components'; -import { Drawer, Row, Button, Form, Switch, Input, Select, Popconfirm } from 'antd'; -import { DrawerProps } from 'antd/lib/drawer'; -import ErrorBoundary from 'antd/lib/alert/ErrorBoundary'; -import GridRow from 'components/_base/GridRow'; -import { Close, Swap, Delete } from 'components/IconPark'; -import { useTranslation } from 'react-i18next'; -import { JobType, JobDefinitionForm } from 'typings/job'; -import { omit } from 'lodash'; -import VariableList from './VariableList'; -import { DEFAULT_JOB, getOrInsertValueById } from '../../store'; -import YAMLTemplateEditorButton from 'components/YAMLTemplateEditorButton'; - -const Container = styled(Drawer)` - top: 60px; - .ant-drawer-body { - padding-top: 0; - padding-bottom: 200px; - } -`; -const DrawerHeader = styled(Row)` - top: 0; - margin: 0 -24px 0; - padding: 20px 16px 20px 24px; - background-color: white; - border-bottom: 1px solid var(--lineColor); -`; -const DrawerTitle = styled.h3` - position: relative; - margin-bottom: 0; - margin-right: 10px; -`; -const FormSection = styled.section` - margin-bottom: 20px; - padding-top: 24px; - &:not([data-fill]) { - padding-right: 60px; - } - > h4 { - margin-bottom: 16px; - font-size: 15px; - color: var(--textColorStrong); - } -`; -const ButtonGridRow = styled(GridRow)` - position: fixed; - z-index: 1; - bottom: 60px; - width: 100%; - padding: 20px 0; - background-color: white; - border-top: 1px solid var(--lineColor); -`; - -interface Props extends DrawerProps { - isGlobal: boolean; - uuid?: string; - onClose?: any; - onSubmit?: any; - onDelete?: any; - toggleVisible?: any; -} - -export type ExposedRef = { - validate(): Promise<boolean>; - getFormValues(): JobDefinitionForm; - reset(): any; -}; - -const JobComposerDrawer: ForwardRefRenderFunction<ExposedRef, Props> = ( - { isGlobal, uuid, visible, toggleVisible, onClose, onSubmit, onDelete, ...props }, - parentRef, -) => { - const { t } = useTranslation(); - const [formInstance] = Form.useForm<JobDefinitionForm>(); - - useImperativeHandle(parentRef, () => { - return { - validate: validateFields, - getFormValues, - reset: formInstance.resetFields, - }; - }); - - useEffect(() => { - if (uuid && formInstance && visible) { - const newValues = getOrInsertValueById(uuid)!; - formInstance.setFieldsValue(newValues); - } - }, [uuid, formInstance, visible]); - - return ( - <ErrorBoundary> - <Container - getContainer="#app-content" - visible={visible} - mask={false} - width="640px" - push={{ distance: -240 }} - onClose={closeDrawer} - headerStyle={{ display: 'none' }} - {...props} - > - <DrawerHeader align="middle" justify="space-between"> - <Row align="middle"> - <DrawerTitle>编辑 Job</DrawerTitle> - </Row> - <GridRow gap="10"> - <Button size="small" icon={<Swap />} disabled> - 切换至简易模式 - </Button> - {!isGlobal && ( - <Popconfirm - title={t('workflow.msg_del_job_warning')} - cancelText={t('cancel')} - okText={t('submit')} - onConfirm={onDeleteClick} - > - <Button size="small" type="primary" icon={<Delete />} danger> - 删除 - </Button> - </Popconfirm> - )} - - <Button size="small" icon={<Close />} onClick={closeDrawer} /> - </GridRow> - </DrawerHeader> - - <Form - labelCol={{ span: 6 }} - wrapperCol={{ span: 18 }} - form={formInstance} - onFinish={onFinish} - initialValues={DEFAULT_JOB} - > - {!isGlobal && ( - <FormSection> - <h4>基本信息</h4> - <Form.Item - name="name" - label={t('workflow.label_job_name')} - rules={[{ required: true, message: t('workflow.msg_jobname_required') }]} - > - <Input placeholder={t('workflow.placeholder_jobname')} /> - </Form.Item> - - <Form.Item - name="job_type" - label={t('workflow.label_job_type')} - rules={[{ required: true, message: t('workflow.msg_jobname_required') }]} - > - <Select placeholder={t('workflow.placeholder_job_type')}> - {Object.values(omit(JobType, 'UNSPECIFIED')).map((type) => ( - <Select.Option key={type} value={type}> - {type} - </Select.Option> - ))} - </Select> - </Form.Item> - - <Form.Item - name="is_federated" - label={t('workflow.label_job_federated')} - valuePropName="checked" - > - <Switch /> - </Form.Item> - - <Form.Item - name="yaml_template" - label={t('workflow.label_job_yaml')} - rules={[{ required: true, message: t('workflow.msg_yaml_required') }]} - > - <YAMLTemplateEditorButton /> - </Form.Item> - </FormSection> - )} - - <FormSection data-fill> - <h4>自定义变量</h4> - {/* Form.List inside VariableList */} - <VariableList form={formInstance} /> - </FormSection> - - <Form.Item wrapperCol={{ offset: 0 }}> - <ButtonGridRow gap={16} top="12"> - <Button type="primary" htmlType="submit"> - {t('confirm')} - </Button> - - <Button onClick={closeDrawer}>{t('cancel')}</Button> - </ButtonGridRow> - </Form.Item> - </Form> - </Container> - </ErrorBoundary> - ); - - function closeDrawer() { - onClose && onClose(); - toggleVisible && toggleVisible(false); - } - function onFinish(values: JobDefinitionForm) { - onSubmit && onSubmit(values); - toggleVisible && toggleVisible(false); - } - function onDeleteClick() { - onDelete && onDelete(); - } - async function validateFields() { - try { - await formInstance.validateFields(); - return true; - } catch (error) { - return false; - } - } - function getFormValues() { - return formInstance.getFieldsValue(true) as JobDefinitionForm; - } -}; - -export default forwardRef(JobComposerDrawer); diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/TemplateConfigNode.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/TemplateConfigNode.tsx deleted file mode 100644 index fe07a49d4..000000000 --- a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/TemplateConfigNode.tsx +++ /dev/null @@ -1,144 +0,0 @@ -import React, { FC } from 'react'; -import { Handle, Position } from 'react-flow-renderer'; -import { - Container, - JobName, - JobStatusText, - StatusIcon, -} from 'components/WorkflowJobsCanvas/JobNodes/elements'; -import { - configStatusText, - JobNodeProps, - statusIcons, - WORKFLOW_JOB_NODE_CHANNELS, -} from 'components/WorkflowJobsCanvas/JobNodes/shared'; -import { NodeData } from 'components/WorkflowJobsCanvas/types'; -import GridRow from 'components/_base/GridRow'; -import classNames from 'classnames'; -import { PlusCircle } from 'components/IconPark'; -import styled from 'styled-components'; -import { MixinCircle, MixinCommonTransition, MixinFlexAlignCenter } from 'styles/mixins'; -import PubSub from 'pubsub-js'; -import { Tooltip } from 'antd'; -import { getOrInsertValueById } from '../store'; - -const AddJobButton = styled.div` - ${MixinCircle(20)} - ${MixinFlexAlignCenter()} - ${MixinCommonTransition()} - position: absolute; - display: flex; - background-color: white; - color: var(--textColorDisabled); - font-size: 20px; - - &:hover { - color: var(--primaryColor); - border-color: currentColor; - } - - &::before { - content: ''; - position: absolute; - height: 21px; - padding: 10px 0; - width: 13px; - background-color: currentColor; - background-clip: content-box; - } - - &.left { - left: -32px; - top: calc(50% - 12px); - - &::before { - right: -11px; - } - } - &.right { - right: -32px; - top: calc(50% - 12px); - - &::before { - left: -11px; - } - } - &.bottom { - bottom: -32px; - left: calc(50% - 12px); - - &::before { - top: -15px; - transform: rotate(90deg); - } - } -`; -const StyledContainer = styled(Container)` - &:hover { - z-index: 5; - } - &:not(:hover) { - .${AddJobButton.styledComponentId} { - opacity: 0; - } - } -`; - -type AddPosition = 'left' | 'right' | 'bottom'; -const AddJobHandle: FC<{ position: AddPosition; onClick: any }> = ({ position, onClick }) => { - return ( - <Tooltip title="Click to add a new job"> - <AddJobButton className={position} onClick={onButtonClick}> - <PlusCircle /> - </AddJobButton> - </Tooltip> - ); - - function onButtonClick(event: React.SyntheticEvent<any>) { - onClick && onClick(position); - - event.stopPropagation(); - } -}; - -export type AddJobPayload = { - id: string; - data: NodeData; - position: AddPosition; -}; - -const TemplateConfigNode: FC<JobNodeProps> = ({ data, id }) => { - const icon = statusIcons[data.status]; - const text = configStatusText[data.status]; - - const values = getOrInsertValueById(id); - - return ( - <StyledContainer className={classNames([data.raw.is_federated && 'federated-mark', data.mark])}> - <Handle type="target" position={Position.Top} /> - - <JobName data-secondary={Boolean(values?.name)}>{values?.name || '// 点击配置'}</JobName> - - <GridRow gap={5}> - {icon && <StatusIcon src={icon} />} - <JobStatusText>{text}</JobStatusText> - </GridRow> - - <AddJobHandle position="left" onClick={onAddJobClick} /> - <AddJobHandle position="right" onClick={onAddJobClick} /> - <AddJobHandle position="bottom" onClick={onAddJobClick} /> - - <Handle type="source" position={Position.Bottom} /> - </StyledContainer> - ); - - function onAddJobClick(position: AddPosition) { - PubSub.publish(WORKFLOW_JOB_NODE_CHANNELS.click_add_job, { - id, - data, - position, - } as AddJobPayload); - } -}; - -export default TemplateConfigNode; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/index.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/index.tsx deleted file mode 100644 index f4143b23a..000000000 --- a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/StepTwoJobs/index.tsx +++ /dev/null @@ -1,352 +0,0 @@ -import React, { FC, useState, useRef } from 'react'; -import ErrorBoundary from 'antd/lib/alert/ErrorBoundary'; -import { WORKFLOW_JOB_NODE_CHANNELS } from 'components/WorkflowJobsCanvas/JobNodes/shared'; -import { AddJobPayload } from './TemplateConfigNode'; -import WorkflowTemplateCanvas, { ExposedRef as CanVasExposedRef } from './TemplateCanvas'; -import { isNode, ReactFlowProvider } from 'react-flow-renderer'; -import { ChartNode, ChartNodeStatus } from 'components/WorkflowJobsCanvas/types'; -import { useSubscribe } from 'hooks'; -import { cloneDeep, last } from 'lodash'; -import { useToggle } from 'react-use'; -import { useRecoilState } from 'recoil'; -import { giveWeakRandomKey, to } from 'shared/helpers'; -import { templateForm } from 'stores/template'; -import { Modal, Button, message } from 'antd'; -import styled from 'styled-components'; -import { Job, JobDefinitionForm, JobDependency } from 'typings/job'; -import JobComposeDrawer, { ExposedRef as DrawerExposedRef } from './JobComposeDrawer'; -import { - getOrInsertValueById, - TPL_GLOBAL_NODE_UUID, - turnUuidDepToJobName, - upsertValue, - removeValueById, -} from '../store'; -import { Redirect, useHistory, useParams } from 'react-router'; -import { ExclamationCircle } from 'components/IconPark'; -import { Z_INDEX_GREATER_THAN_HEADER } from 'components/Header'; -import { useTranslation } from 'react-i18next'; -import GridRow from 'components/_base/GridRow'; -import { WorkflowTemplatePayload } from 'typings/workflow'; -import { createWorkflowTemplate, updateWorkflowTemplate } from 'services/workflow'; -import { stringifyComplexDictField } from 'shared/formSchema'; - -const Container = styled.main` - height: 100%; -`; - -const ChartHeader = styled.header` - height: 48px; - padding: 13px 20px; - font-size: 14px; - line-height: 22px; - background-color: white; -`; -const TemplateName = styled.h3` - margin-bottom: 0; -`; -const Footer = styled.footer` - position: sticky; - bottom: 0; - z-index: 5; // just > react-flow' z-index - padding: 15px 36px; - background-color: white; -`; - -function _createASlimJobRawData({ - uuid, - dependencies, -}: { - uuid?: string; - dependencies: JobDependency[]; -}): any { - return { - uuid: uuid || giveWeakRandomKey(), - dependencies: [...dependencies], - }; -} - -const TemplateStepTowJobs: FC<{ isEdit?: boolean }> = ({ isEdit }) => { - const history = useHistory(); - const { t } = useTranslation(); - const params = useParams<{ id?: string }>(); - - const [drawerVisible, toggleDrawerVisible] = useToggle(false); - - const drawerRef = useRef<DrawerExposedRef>(); - const canvasRef = useRef<CanVasExposedRef>(); - - const [submitting, setSubmitting] = useToggle(false); - const [isGlobal, setIsGlobal] = useState(false); - const [currNode, setCurrNode] = useState<ChartNode>(); - - const [template, setTemplate] = useRecoilState(templateForm); - - useSubscribe(WORKFLOW_JOB_NODE_CHANNELS.click_add_job, (_: any, payload: AddJobPayload) => { - const nextVal = cloneDeep(template); - const jobDefs = nextVal.config.job_definitions; - - const { position, data, id } = payload; - const rows = data.rows!; - - const rowIdx = rows?.findIndex((row) => row.find((col) => col.raw.uuid === id)); - const hasRowFollowed = Boolean(rows[rowIdx + 1]); - - const uuidOfLastJobInRow = last(rows[rowIdx])!.raw.uuid; - const uuidOfHeadJobInRow = last(rows[rowIdx])!.raw.uuid; - - const leftPivotJobIdx = jobDefs.findIndex((item) => item.uuid === uuidOfHeadJobInRow); - const rightPivotJobIdx = jobDefs.findIndex((item) => item.uuid === uuidOfLastJobInRow); - - const isInsert2Left = position === 'left'; - const isInsert2Bottom = position === 'bottom'; - - const preJobs = jobDefs.slice(0, leftPivotJobIdx); - const midJobs = jobDefs.slice(leftPivotJobIdx, rightPivotJobIdx + 1); - const postJobs = jobDefs.slice(rightPivotJobIdx + 1, jobDefs.length); - - const newJobDeps: JobDependency[] = []; - const newJobUuid = giveWeakRandomKey(); - - if (isInsert2Bottom) { - const depRow = rows[rowIdx]; - newJobDeps.push(...depRow.map((col: any) => ({ source: col.raw.uuid }))); - - if (hasRowFollowed) { - const followedRow = rows[rowIdx + 1]; - - followedRow.forEach((col) => { - const def = jobDefs.find((def) => def.uuid === col.raw.uuid); - if (def) { - def.dependencies = [{ source: newJobUuid }]; - } - }); - } - } else { - const depRow = rows[rowIdx - 1]; - if (depRow && depRow.every((item) => !item.isGlobal)) { - newJobDeps.push(...depRow.map((col: any) => ({ source: col.raw.uuid }))); - } - } - - const newJob = _createASlimJobRawData({ uuid: newJobUuid, dependencies: newJobDeps }); - - // If insert to right or bottom, before should be empty - const before = [isInsert2Left && newJob].filter(Boolean); - // If insert to left, after should be empty - const after = [!isInsert2Left && newJob].filter(Boolean); - - nextVal.config.job_definitions = [...preJobs, ...before, ...midJobs, ...after, ...postJobs]; - - setTemplate(nextVal); - }); - - if (!template?.name) { - if (isEdit) { - return <Redirect to={`/workflow-templates/edit/basic/${params.id}`} />; - } - return <Redirect to={'/workflow-templates/create/basic'} />; - } - - return ( - <ErrorBoundary> - <Container> - <ChartHeader> - <TemplateName>{template.name}</TemplateName> - </ChartHeader> - - <ReactFlowProvider> - <WorkflowTemplateCanvas - ref={canvasRef as any} - isEdit={isEdit} - template={template} - onNodeClick={onNodeClick} - onCanvasClick={onCanvasClick} - /> - </ReactFlowProvider> - - <JobComposeDrawer - ref={drawerRef as any} - isGlobal={isGlobal} - uuid={currNode?.id} - visible={drawerVisible} - toggleVisible={toggleDrawerVisible} - onSubmit={onDrawerFormSubmit} - onClose={onCloseDrawer} - onDelete={onDeleteJob} - /> - - <Footer> - <GridRow gap="12"> - <Button type="primary" loading={submitting} onClick={onSubmitClick}> - {t('submit')} - </Button> - <Button onClick={onPrevStepClick} disabled={submitting}> - {t('previous_step')} - </Button> - <Button onClick={onCancelForkClick} disabled={submitting}> - {t('cancel')} - </Button> - </GridRow> - </Footer> - </Container> - </ErrorBoundary> - ); - - // ---------------- Methods -------------------- - - function checkIfAllJobConfigCompleted() { - const isAllCompleted = canvasRef.current?.chartInstance - .getElements() - .filter(isNode) - .every((node) => { - return node.data.status === ChartNodeStatus.Success; - }); - - return isAllCompleted; - } - function saveCurrentValues(values?: JobDefinitionForm) { - if (currNode?.id) { - upsertValue(currNode?.id, values || drawerRef.current?.getFormValues()); - } - } - async function validateCurrentForm() { - if (currNode?.id) { - const valid = await drawerRef.current?.validate(); - canvasRef.current?.updateNodeStatusById({ - id: currNode?.id, - status: valid ? ChartNodeStatus.Success : ChartNodeStatus.Warning, - }); - } - } - - // ---------------- Handlers -------------------- - - function onPrevStepClick() { - history.goBack(); - } - function onDeleteJob() { - const uuid = currNode?.id; - - if (uuid) { - const nextVal = cloneDeep(template); - const jobDefs = nextVal.config.job_definitions; - const idx = jobDefs.findIndex((def) => def.uuid === uuid); - const jobDefToRemove = jobDefs[idx]; - - for (let i = idx + 1; i < jobDefs.length; i++) { - const def = jobDefs[i]; - - if (def.dependencies.some((dep) => dep.source === uuid)) { - def.dependencies = def.dependencies - .filter((dep) => dep.source !== uuid) - .concat(jobDefToRemove.dependencies); - } - } - - nextVal.config.job_definitions = [ - ...jobDefs.slice(0, idx), - ...jobDefs.slice(idx + 1, jobDefs.length), - ]; - setTemplate(nextVal); - // Remove job from store - removeValueById(uuid); - setCurrNode(null as any); - toggleDrawerVisible(false); - } - } - function onCancelForkClick() { - Modal.confirm({ - title: t('workflow.msg_sure_2_cancel_tpl'), - icon: <ExclamationCircle />, - zIndex: Z_INDEX_GREATER_THAN_HEADER, - content: t('workflow.msg_will_drop_tpl_config'), - style: { - top: '30%', - }, - onOk() { - history.push('/workflow-templates'); - }, - }); - } - function onDrawerFormSubmit(values: JobDefinitionForm) { - saveCurrentValues(values); - - canvasRef.current?.updateNodeStatusById({ - id: currNode?.id!, - status: ChartNodeStatus.Success, - }); - - canvasRef.current?.setSelectedNodes([]); - } - async function onCloseDrawer() { - canvasRef.current?.setSelectedNodes([]); - await validateCurrentForm(); - } - async function onNodeClick(nextNode: ChartNode) { - if (nextNode.id === currNode?.id) return; - - saveCurrentValues(); - await validateCurrentForm(); - - drawerRef.current?.reset(); - - setCurrNode(nextNode); - - canvasRef.current?.updateNodeStatusById({ - id: nextNode?.id!, - status: ChartNodeStatus.Processing, - }); - - setIsGlobal(!!nextNode?.data.isGlobal); - toggleDrawerVisible(true); - } - async function onCanvasClick() { - saveCurrentValues(); - await validateCurrentForm(); - - drawerRef.current?.reset(); - - toggleDrawerVisible(false); - setCurrNode(null as any); - } - async function onSubmitClick() { - if (!checkIfAllJobConfigCompleted()) { - return message.warn(t('workflow.msg_config_unfinished')); - } - - toggleDrawerVisible(false); - setSubmitting(true); - - const { config, ...basics } = cloneDeep(template); - let payload: WorkflowTemplatePayload = { ...basics, config: {} as any }; - - payload.config.variables = getOrInsertValueById(TPL_GLOBAL_NODE_UUID)?.variables!; - - payload.config.job_definitions = config.job_definitions.map((item) => { - const values = getOrInsertValueById(item.uuid); - return { - ...values, - dependencies: item.dependencies.map(turnUuidDepToJobName), - } as Job; - }); - - payload.config.group_alias = basics.group_alias; - payload.config.is_left = basics.is_left || false; - - payload = stringifyComplexDictField(payload); - - const [, error] = await to( - isEdit ? updateWorkflowTemplate(params.id!, payload) : createWorkflowTemplate(payload), - ); - - if (error) { - setSubmitting(false); - return message.error(error.message); - } - message.success(isEdit ? '模板修改成功!' : '模板创建成功!'); - history.push('/workflow-templates'); - } -}; - -export default TemplateStepTowJobs; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/index.module.less b/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/index.module.less new file mode 100644 index 000000000..ebea3e9e7 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/index.module.less @@ -0,0 +1,8 @@ +.step_container { + width: 350px; +} + +.form_area { + flex: 1; + margin-top: 12px; +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/index.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/index.tsx index edb06a744..8c03adff6 100644 --- a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/index.tsx +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/index.tsx @@ -1,25 +1,17 @@ import React, { FC, useEffect, useState } from 'react'; -import { useTranslation } from 'react-i18next'; -import { Steps, Row, Card } from 'antd'; -import styled from 'styled-components'; +import { Steps, Grid, Card } from '@arco-design/web-react'; +import styled from './index.module.less'; import { useParams, useHistory } from 'react-router-dom'; import { useUnmount } from 'react-use'; import { useResetCreateForm } from 'hooks/template'; import StepOneBasic from './StepOneBasic'; -import StepTwoJobs from './StepTwoJobs'; -import { clearMap } from './store'; import SharedPageLayout from 'components/SharedPageLayout'; import BackButton from 'components/BackButton'; +import { definitionsStore, editorInfosStore } from './stores'; +import TemplateConifg from '../TemplateConfig'; const { Step } = Steps; - -const StepContainer = styled.div` - width: 350px; -`; -const FormArea = styled.section` - flex: 1; - margin-top: 12px; -`; +const Row = Grid.Row; enum CreateSteps { basic, @@ -30,45 +22,64 @@ const TemplateForm: FC<{ isEdit?: boolean; isHydrated?: React.MutableRefObject<b isEdit, isHydrated, }) => { - const { t } = useTranslation(); const history = useHistory(); const params = useParams<{ step: keyof typeof CreateSteps }>(); - const [currentStep, setStep] = useState(CreateSteps[params.step || 'basic']); + const [currentStep, setStep] = useState(1); + const [isFormValueChanged, setIsFormValueChanged] = useState(false); + const reset = useResetCreateForm(); useEffect(() => { - setStep(CreateSteps[params.step || 'basic']); + setStep(params.step === 'basic' ? 1 : 2); }, [params.step]); useUnmount(() => { reset(); - clearMap(); + definitionsStore.clearMap(); + editorInfosStore.clearMap(); }); return ( <SharedPageLayout title={ - <BackButton onClick={() => history.goBack()}>{t('menu.label_workflow_tpl')}</BackButton> + <BackButton + onClick={() => history.replace(`/workflow-center/workflow-templates`)} + isShowConfirmModal={isFormValueChanged} + > + 模板管理 + </BackButton> } contentWrapByCard={false} > <Card> <Row justify="center"> - <StepContainer> + <div className={styled.step_container}> <Steps current={currentStep}> - <Step title={t('workflow.step_tpl_basic')} /> - <Step title={t('workflow.step_tpl_config')} /> + <Step title="基础信息" /> + <Step title="任务配置" /> </Steps> - </StepContainer> + </div> </Row> </Card> - <FormArea> - {params.step === 'basic' && <StepOneBasic isEdit={isEdit} isHydrated={isHydrated} />} - {params.step === 'jobs' && <StepTwoJobs isEdit={isEdit} />} - </FormArea> + <div className={styled.form_area}> + {params.step === 'basic' && ( + <StepOneBasic + isEdit={isEdit} + isHydrated={isHydrated} + onFormValueChange={onFormValueChange} + /> + )} + {params.step === 'jobs' && <TemplateConifg isEdit={isEdit} />} + </div> </SharedPageLayout> ); + + function onFormValueChange() { + if (!isFormValueChanged) { + setIsFormValueChanged(true); + } + } }; export default TemplateForm; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/store.ts b/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/store.ts deleted file mode 100644 index 1ff46854e..000000000 --- a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/store.ts +++ /dev/null @@ -1,82 +0,0 @@ -import { clone, isEmpty, isNil } from 'lodash'; -import { giveWeakRandomKey } from 'shared/helpers'; -import { JobType, JobDefinitionForm, JobDependency } from 'typings/job'; -import { - Variable, - VariableAccessMode, - VariableComponent, - VariableValueType, -} from 'typings/variable'; - -export const TPL_GLOBAL_NODE_UUID = giveWeakRandomKey(); - -// You can note that we don't have `dependencies` field here -// since the job form doesn't decide the value, but the TemplateCanvas do -export const DEFAULT_JOB: JobDefinitionForm = { - name: '', - job_type: JobType.DATA_JOIN, - is_federated: false, - variables: [], - yaml_template: '{}', -}; - -export const DEFAULT_GLOBAL_VARS: { variables: Variable[] } = { variables: [] }; - -export const DEFAULT_VARIABLE: Variable = { - name: '', - value: '', - access_mode: VariableAccessMode.PEER_WRITABLE, - value_type: VariableValueType.STRING, - widget_schema: { - component: VariableComponent.Input, - required: true, - }, -}; - -export function turnUuidDepToJobName(dep: JobDependency): JobDependency { - return { - source: getOrInsertValueById(dep.source)?.name!, - }; -} - -export function fillEmptyWidgetSchema(variable: Variable) { - if (!variable.widget_schema || isEmpty(variable.widget_schema)) { - const copy = clone(variable); - copy.widget_schema = clone(DEFAULT_VARIABLE.widget_schema); - - return copy; - } - - return variable; -} - -// ------------------------------------- Store of job defintiions & variables --------------------------------------------- - -const storedJobNGlbalValues: Map<string, JobDefinitionForm> = new Map(); - -/** - * NOTE: will create a default job def or global vars if not exist - */ -export function getOrInsertValueById(id?: string) { - if (isNil(id)) return null; - - if (!storedJobNGlbalValues.has(id)) { - upsertValue(id, id === TPL_GLOBAL_NODE_UUID ? clone(DEFAULT_GLOBAL_VARS) : clone(DEFAULT_JOB)); - } - - return storedJobNGlbalValues.get(id)!; -} - -export function removeValueById(id: string) { - return storedJobNGlbalValues.delete(id); -} - -export function upsertValue(id: string, val: any) { - return storedJobNGlbalValues.set(id, val); -} - -export function clearMap() { - return storedJobNGlbalValues.clear(); -} - -export default storedJobNGlbalValues; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/stores.ts b/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/stores.ts new file mode 100644 index 000000000..17e39335f --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateForm/stores.ts @@ -0,0 +1,165 @@ +import { clone, isEmpty, isNil, cloneDeep } from 'lodash-es'; +import { giveWeakRandomKey } from 'shared/helpers'; +import { JobType, Job, JobDependency } from 'typings/job'; +import { + Variable, + VariableAccessMode, + VariableComponent, + VariableValueType, +} from 'typings/variable'; +import { JobSlot } from 'typings/workflow'; +import { Overwrite } from 'utility-types'; +import jobTypeToMetaDatasMap from 'jobMetaDatas'; + +export type SlotName = string; +export type SlotEntry = [SlotName, JobSlot]; +export type SlotEntries = [SlotName, JobSlot][]; +export type VariableDefinitionForm = Variable & { _uuid: string }; +export type JobDefinitionForm = Overwrite< + Omit<Job, 'dependencies'>, + { variables: VariableDefinitionForm[] } +> & { _slotEntries: SlotEntries }; +export type JobDefinitionFormWithoutSlots = Omit<JobDefinitionForm, '_slotEntries'>; + +export const TPL_GLOBAL_NODE_SYMBOL = Symbol('Template-global-node'); +export const TPL_GLOBAL_NODE_UUID = giveWeakRandomKey(TPL_GLOBAL_NODE_SYMBOL); + +export const IS_DEFAULT_EASY_MODE = true; + +// You can note that we don't have `dependencies` field here +// since the job form doesn't decide the value, but the TemplateCanvas do +export const DEFAULT_JOB: JobDefinitionForm = { + name: '', + job_type: JobType.RAW_DATA, + is_federated: false, + easy_mode: IS_DEFAULT_EASY_MODE, + yaml_template: '{}', + variables: [], + _slotEntries: [], +}; + +export const DEFAULT_GLOBAL_VARS: { variables: VariableDefinitionForm[] } = { variables: [] }; + +export const DEFAULT_VARIABLE: VariableDefinitionForm = { + _uuid: '', + name: '', + value: '', + tag: '', + access_mode: VariableAccessMode.PEER_WRITABLE, + value_type: VariableValueType.STRING, + widget_schema: { + component: VariableComponent.Input, + required: true, + }, +}; + +export function giveDefaultVariable() { + const newVar = cloneDeep(DEFAULT_VARIABLE); + newVar._uuid = giveWeakRandomKey(); + return newVar; +} + +/** + * Create a stoire that contains a map with a <tpl-canvas-node-uuid, any values> struct, + * plus some helpers to control the map + */ +class TplNodeToAnyResourceStore<ResourceT> { + map = new Map<string, ResourceT>(); + + defaultResource: (id: string) => ResourceT; + + constructor(options: { defaultResource: (id: string) => ResourceT }) { + this.defaultResource = options.defaultResource; + } + + get entries() { + return Array.from(this.map.entries()); + } + + get size() { + return this.map.size; + } + + getValueById(id: string) { + if (isNil(id)) return undefined; + + return this.map.get(id)!; + } + + insertNewResource(id: string) { + const newResrc = this.defaultResource(id); + this.upsertValue(id, newResrc); + + return newResrc; + } + + removeValueById(id: string) { + return this.map.delete(id); + } + + upsertValue(id: string, val: ResourceT) { + return this.map.set(id, val); + } + + clearMap() { + return this.map.clear(); + } +} + +/** Store of job defintiions & variables */ +export const definitionsStore = new TplNodeToAnyResourceStore<JobDefinitionFormWithoutSlots>({ + defaultResource(id: string) { + return id === TPL_GLOBAL_NODE_UUID + ? (clone(DEFAULT_GLOBAL_VARS) as JobDefinitionForm) + : clone(DEFAULT_JOB); + }, +}); + +(window as any).definitionsStore = definitionsStore; + +export function mapUuidDepToJobName(dep: JobDependency): JobDependency { + return { + source: definitionsStore.getValueById(dep.source)?.name!, + }; +} + +/** + * 1. Fill empty widget_schema + * 2. Add _uuid to each varriable + */ +export function preprocessVariables(variable: Variable): VariableDefinitionForm { + const copy = clone(variable) as VariableDefinitionForm; + + if (!variable.widget_schema || isEmpty(variable.widget_schema)) { + copy.widget_schema = clone(DEFAULT_VARIABLE.widget_schema); + } + + copy._uuid = giveWeakRandomKey(); + + return copy; +} + +/** Store of job editor infos */ +export const editorInfosStore = new TplNodeToAnyResourceStore<{ + slotEntries: SlotEntries; + meta_yaml: string; +}>({ + defaultResource(id: string) { + if (id === TPL_GLOBAL_NODE_UUID) { + return null as never; + } + + const jobType = definitionsStore.getValueById(id)?.job_type; + + if (!jobType) return null as never; + + const jobMetaData = jobTypeToMetaDatasMap.get(jobType); + + if (!jobMetaData) return null as never; + + return { + slotEntries: Object.entries(jobMetaData.slots), + meta_yaml: jobMetaData.metaYamlString, + }; + }, +}); diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateList/CopyFormModal.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateList/CopyFormModal.tsx new file mode 100644 index 000000000..cce001630 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateList/CopyFormModal.tsx @@ -0,0 +1,122 @@ +import React, { FC, useEffect, useState } from 'react'; +import { useTranslation } from 'react-i18next'; +import { Modal, Form, Input, Button, Message } from '@arco-design/web-react'; +import { + createTemplateRevision, + createWorkflowTemplate, + fetchTemplateById, +} from 'services/workflow'; +import ButtonWithPopconfirm from 'components/ButtonWithPopconfirm'; +import { WorkflowTemplate } from 'typings/workflow'; +import { validNamePattern } from 'shared/validator'; +import { to } from 'shared/helpers'; + +export interface Props { + visible: boolean; + initialValues?: any; + selectedWorkflowTemplate?: WorkflowTemplate; + onSuccess?: () => void; + onFail?: () => void; + onCancel?: () => void; +} + +const CopyFormModal: FC<Props> = ({ + selectedWorkflowTemplate, + visible, + onSuccess, + onFail, + onCancel, + initialValues, +}) => { + const { t } = useTranslation(); + + const [isLoading, setIsLoading] = useState(false); + + const [formInstance] = Form.useForm<any>(); + + useEffect(() => { + if (visible && initialValues && formInstance) { + formInstance.setFieldsValue({ + ...initialValues, + }); + } + }, [visible, initialValues, formInstance]); + + return ( + <Modal + title={t('workflow.title_copy_template')} + visible={visible} + maskClosable={false} + maskStyle={{ backdropFilter: 'blur(4px)' }} + afterClose={afterClose} + onCancel={onCancel} + onOk={onSubmit} + footer={[ + <ButtonWithPopconfirm buttonText={t('cancel')} onConfirm={onCancel} />, + <Button type="primary" htmlType="submit" loading={isLoading} onClick={onSubmit}> + {t('confirm')} + </Button>, + ]} + > + <Form + labelCol={{ span: 6 }} + wrapperCol={{ span: 16 }} + style={{ width: '500px' }} + colon={true} + form={formInstance} + > + <Form.Item + field="name" + label={t('workflow.title_template_name')} + rules={[ + { required: true }, + { match: validNamePattern, message: t('valid_error.name_invalid') }, + ]} + > + <Input /> + </Form.Item> + </Form> + </Modal> + ); + + async function onSubmit() { + const templateName = formInstance.getFieldValue('name'); + if (!selectedWorkflowTemplate) { + return; + } + + const { id } = selectedWorkflowTemplate!; + setIsLoading(true); + const [res, err] = await to(fetchTemplateById(id)); + + if (err) { + setIsLoading(false); + onFail?.(); + return Message.error(t('workflow.msg_get_tpl_detail_failed')); + } + + const newTplPayload: WorkflowTemplate = res.data; + newTplPayload.name = templateName; + newTplPayload.kind = 0; + const [resp, error] = await to(createWorkflowTemplate(newTplPayload)); + + if (error) { + setIsLoading(false); + onFail?.(); + return Message.error(error.message); + } + + await to(createTemplateRevision(resp.data.id)); + + Message.success(t('app.copy_success')); + setIsLoading(false); + onSuccess?.(); + } + + function afterClose() { + // Clear all fields + formInstance.resetFields(); + } +}; + +export default CopyFormModal; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateList/TemplateUploadDialog.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateList/TemplateUploadDialog.tsx index fc039c515..2ef7920e3 100644 --- a/web_console_v2/client/src/views/WorkflowTemplates/TemplateList/TemplateUploadDialog.tsx +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateList/TemplateUploadDialog.tsx @@ -1,82 +1,95 @@ -import React, { FC, useCallback } from 'react'; -import { Form, Input, message, Modal } from 'antd'; +import React, { FC, useCallback, useState } from 'react'; +import { Message, Button, Input, Modal, Form } from '@arco-design/web-react'; import ReadFile from 'components/ReadFile'; -import { useTranslation } from 'react-i18next'; import { CreateTemplateForm } from 'stores/workflow'; import { createWorkflowTemplate } from 'services/workflow'; import { to } from 'shared/helpers'; import { removePrivate } from 'shared/object'; import { readAsJSONFromFile } from 'shared/file'; -import { WorkflowTemplatePayload, WorkflowTemplate } from 'typings/workflow'; +import { + WorkflowTemplatePayload, + WorkflowTemplate, + WorkflowTemplateEditInfo, +} from 'typings/workflow'; import { stringifyComplexDictField } from 'shared/formSchema'; -import { Z_INDEX_GREATER_THAN_HEADER } from 'components/Header'; -import i18n from 'i18n'; import { useToggle } from 'react-use'; import { useHistory } from 'react-router'; import { forceToRefreshQuery } from 'shared/queryClient'; import { TPL_LIST_QUERY_KEY } from '.'; +import i18n from 'i18n'; +import ButtonWithPopconfirm from 'components/ButtonWithPopconfirm'; const CreateTemplate: FC = () => { - const { t } = useTranslation(); const history = useHistory(); const [formInstance] = Form.useForm<CreateTemplateForm>(); const [visible, toggleVisible] = useToggle(true); - + const [editorInfo, setEditorInfo] = useState<WorkflowTemplateEditInfo>(); const createNewTpl = useCallback(async () => { - const [values, validError] = await to(formInstance.validateFields()); + const [values, validError] = await to(formInstance.validate()); if (validError) { return; } + // Get editor_info from uploaded template. + values.editor_info = editorInfo; + const payload = stringifyComplexDictField(removePrivate(values) as WorkflowTemplatePayload); const [, error] = await to(createWorkflowTemplate(payload)); if (error) { - return message.error(error.message); + return Message.error(error.message); } forceToRefreshQuery(TPL_LIST_QUERY_KEY); toggleVisible(false); - }, [formInstance, toggleVisible]); + }, [formInstance, toggleVisible, editorInfo]); return ( <Modal - title={t('workflow.btn_upload_tpl')} + title="上传模板" visible={visible} - style={{ top: '20%' }} + style={{ width: '600px' }} maskStyle={{ backdropFilter: 'blur(4px)' }} - width="600px" - closable={false} + closable={true} maskClosable={false} - keyboard={false} afterClose={afterClose} - getContainer="body" - zIndex={Z_INDEX_GREATER_THAN_HEADER} - onCancel={() => toggleVisible(false)} + onCancel={onCancel} onOk={onSubmit} + footer={[ + <ButtonWithPopconfirm key="back" buttonText={i18n.t('cancel')} onConfirm={onCancel} />, + <Button key="submit" type="primary" onClick={onSubmit}> + {i18n.t('submit')} + </Button>, + ]} > - <Form labelCol={{ span: 6 }} wrapperCol={{ span: 16 }} form={formInstance} labelAlign="left"> + <Form + labelCol={{ span: 6 }} + wrapperCol={{ span: 16 }} + colon={true} + form={formInstance} + labelAlign="left" + > <Form.Item - name="name" - label={t('workflow.label_new_template_name')} - rules={[{ required: true, message: t('workflow.msg_tpl_name_required') }]} + field="name" + label="模板名称" + rules={[{ required: true, message: '请输入模板名!' }]} > - <Input placeholder={t('workflow.placeholder_template_name')} /> + <Input placeholder="请输入模板名称" /> </Form.Item> <Form.Item - name="config" - label={t('workflow.label_upload_template')} - rules={[{ required: true, message: t('workflow.msg_tpl_file_required') }]} + field="config" + label="上传模板文件" + rules={[{ required: true, message: '请选择一个合适的模板文件!' }]} > <ReadFile accept=".json" reader={readConfig} maxSize={20} /> </Form.Item> - <Form.Item name="comment" label={t('workflow.label_template_comment')}> - <Input.TextArea rows={4} placeholder={t('workflow.placeholder_comment')} /> + <Form.Item field="comment" label="工作流模板描述"> + <Input.TextArea rows={4} placeholder="请输入工作流模板描述" /> </Form.Item> </Form> </Modal> @@ -85,31 +98,31 @@ const CreateTemplate: FC = () => { async function onSubmit() { createNewTpl(); } + function onCancel() { + toggleVisible(false); + } function afterClose() { - history.push('/workflow-templates'); + history.push(`/workflow-center/workflow-templates`); } async function readConfig(file: File) { try { const template = await readAsJSONFromFile<WorkflowTemplate>(file); if (!template.config) { - message.error(i18n.t('workflow.msg_tpl_config_missing')); + Message.error('模板格式错误,缺少 config 字段!'); return; } - const { config } = template; + const { config, editor_info } = template; if (!config.group_alias) { - message.error(i18n.t('workflow.msg_tpl_alias_missing')); - return; - } - if (config.is_left === undefined) { - message.error(i18n.t('workflow.msg_tpl_is_left_missing')); + Message.error('模板格式错误,缺少 config.group_alias 字段!'); return; } + setEditorInfo(editor_info); return config; } catch (error) { - message.error(error.message); + Message.error(error.message); } } }; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateList/index.module.less b/web_console_v2/client/src/views/WorkflowTemplates/TemplateList/index.module.less new file mode 100644 index 000000000..cf7484f47 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateList/index.module.less @@ -0,0 +1,13 @@ +.list_contaienr { + display: flex; + flex: 1; + width: 100%; +} + +.upload_menu_item { + width: 150; +} + +.template_name { + font-size: 12px; +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/TemplateList/index.tsx b/web_console_v2/client/src/views/WorkflowTemplates/TemplateList/index.tsx index 6915b50b9..3169200b2 100644 --- a/web_console_v2/client/src/views/WorkflowTemplates/TemplateList/index.tsx +++ b/web_console_v2/client/src/views/WorkflowTemplates/TemplateList/index.tsx @@ -1,246 +1,377 @@ import React, { FC, useMemo, useState } from 'react'; +import { generatePath, Link, Route, useHistory } from 'react-router-dom'; +import styled from './index.module.less'; import { useTranslation } from 'react-i18next'; import { useQuery } from 'react-query'; -import { Link, Route, useHistory } from 'react-router-dom'; + +import request from 'libs/request'; import { - createWorkflowTemplate, deleteTemplate, - fetchTemplateById, fetchWorkflowTemplateList, getTemplateDownloadHref, } from 'services/workflow'; -import styled from 'styled-components'; -import SharedPageLayout from 'components/SharedPageLayout'; -import NoResult from 'components/NoResult'; -import { - Col, - Input, - Row, - Table, - Form, - Button, - Tag, - Popconfirm, - message, - Dropdown, - Menu, -} from 'antd'; -import { WorkflowTemplate, WorkflowTemplatePayload } from 'typings/workflow'; -import GridRow from 'components/_base/GridRow'; -import { to } from 'shared/helpers'; -import { useToggle } from 'react-use'; -import { forceToRefreshQuery } from 'shared/queryClient'; -import { CloudUploadOutlined } from '@ant-design/icons'; -import TemplateUploadDialog from './TemplateUploadDialog'; -import request from 'libs/request'; -import { saveBlob } from 'shared/helpers'; -const ListContainer = styled.div` - display: flex; - flex: 1; - width: 100%; -`; -const UploadMenuItem = styled(Menu.Item)` - width: 150; - padding: 10px 15px; -`; -const TemplateName = styled(Link)` - font-size: 16px; -`; +import { useIsAdminRole } from 'hooks/user'; +import { useUrlState } from 'hooks'; +import { saveBlob } from 'shared/helpers'; +import { TIME_INTERVAL } from 'shared/constants'; +import { useGetIsCanEditTemplate } from 'views/WorkflowTemplates/shared'; +import routeMaps, { WorkflowTemplateDetailTab } from '../routes'; -export const TPL_LIST_QUERY_KEY = 'fetchTemplateList'; +import { Table, Grid, Input, Tag, Message, Dropdown, Menu, Tabs } from '@arco-design/web-react'; +import { IconUpload } from '@arco-design/web-react/icon'; +import GridRow from 'components/_base/GridRow'; +import NoResult from 'components/NoResult'; +import SharedPageLayout, { RemovePadding } from 'components/SharedPageLayout'; +import MoreActions, { ActionItem } from 'components/MoreActions'; +import Modal from 'components/Modal'; +import TemplateUploadDialog from './TemplateUploadDialog'; +import CopyFormModal from './CopyFormModal'; -const DownloadTemplate: FC<{ template: WorkflowTemplate }> = ({ template: { id, name } }) => { - const { t } = useTranslation(); +import { WorkflowTemplate, WorkflowTemplateMenuType } from 'typings/workflow'; +import { FilterOp } from 'typings/filter'; +import { constructExpressionTree, expression2Filter } from 'shared/filter'; - return ( - <Button type="link" size="small" onClick={onClick}> - {t('workflow.action_download')} - </Button> - ); +const Row = Grid.Row; +const Col = Grid.Col; - async function onClick() { - try { - const blob = await request(getTemplateDownloadHref(id), { - responseType: 'blob', - }); - saveBlob(blob, `${name}.json`); - } catch (error) { - message.error(error.message); - } - } +type QueryParams = { + name?: string; + kind?: string; }; -const DuplicateTemplate: FC<{ template: WorkflowTemplate }> = ({ template: { id } }) => { - const { t } = useTranslation(); - - const [loading, setLoading] = useToggle(false); - - return ( - <Button type="link" size="small" loading={loading} onClick={onClick}> - {t('workflow.action_fork')} - </Button> - ); - - async function onClick() { - setLoading(true); - const [res, err] = await to(fetchTemplateById(id)); - setLoading(false); - - if (err) { - return message.error(t('workflow.msg_get_tpl_detail_failed')); - } - - const newTplPayload: WorkflowTemplatePayload = res.data; - newTplPayload.name = res.data.name + '-copy'; - - const [, error] = await to(createWorkflowTemplate(newTplPayload)); - - if (error) { - return message.error(error.message); - } - - forceToRefreshQuery(TPL_LIST_QUERY_KEY); - } +export const TPL_LIST_QUERY_KEY = 'fetchTemplateList'; +export const KIND_VALUE_MAPPER: Record<WorkflowTemplateMenuType, number> = { + [WorkflowTemplateMenuType.MY]: 0, + [WorkflowTemplateMenuType.BUILT_IN]: 1, + [WorkflowTemplateMenuType.PARTICIPANT]: 2, }; const TemplateList: FC = () => { const { t } = useTranslation(); const history = useHistory(); - const [form] = Form.useForm(); - const [params, setParams] = useState({ keyword: '' }); + const [isShowCopyFormModal, setIsShowCopyFormModal] = useState(false); + const [selectedTemplate, setSelectedTemplate] = useState<WorkflowTemplate>(); - const listQ = useQuery(TPL_LIST_QUERY_KEY, () => fetchWorkflowTemplateList(), { - refetchOnWindowFocus: false, + const isAdminRole = useIsAdminRole(); + const [urlState, setUrlState] = useUrlState({ + page: 1, + pageSize: 10, + filter: initFilter(), + tab: WorkflowTemplateMenuType.MY, }); - const listData = listQ.data?.data; - const isEmpty = listData?.length === 0; + const initFilterParams = expression2Filter(urlState.filter); + const [filterParams, setFilterParams] = useState<QueryParams>({ + name: initFilterParams.name || '', + kind: initFilterParams.kind || urlState.tab, + }); + const { isCanEdit } = useGetIsCanEditTemplate(urlState.tab === WorkflowTemplateMenuType.BUILT_IN); + + const listQ = useQuery( + [TPL_LIST_QUERY_KEY, urlState], + () => + fetchWorkflowTemplateList({ + page: urlState.page, + pageSize: urlState.pageSize, + filter: urlState.filter, + }), + { + refetchOnWindowFocus: false, + keepPreviousData: true, + refetchInterval: TIME_INTERVAL.LIST, + }, + ); + + // Filter the display list by the search string + const templateListShow = useMemo(() => { + if (!listQ.data?.data) { + return []; + } + const templateList = listQ.data.data || []; + return templateList; + }, [listQ.data]); const columns = useMemo( - () => [ - { - title: t('workflow.col_tpl_name'), - dataIndex: 'name', - name: 'name', - render: (name: string, record: WorkflowTemplate) => ( - <TemplateName to={`/workflow-templates/edit/basic/${record.id}`}>{name}</TemplateName> - ), - }, - { - title: t('workflow.col_group_alias'), - dataIndex: 'group_alias', - name: 'group_alias', - }, - { - title: t('workflow.label_is_left'), - dataIndex: 'is_left', - name: 'is_left', - render: (isLeft: string) => ( - <Tag color={isLeft ? 'green' : 'warning'}>{String(isLeft)}</Tag> - ), - }, - { - title: t('workflow.col_actions'), - dataIndex: 'operation', - name: 'operation', - render: (_: any, record: WorkflowTemplate) => { - return ( - <GridRow left="-10" gap="8"> - <DownloadTemplate template={record} /> - - <DuplicateTemplate template={record} /> - - <Button size="small" type="link"> - <Link to={`/workflow-templates/edit/basic/${record.id}`}>{t('edit')}</Link> - </Button> - - <Popconfirm - title={t('workflow.msg_sure_to_delete')} - onConfirm={() => onDeleteConfirm(record)} - > - <Button size="small" type="link" danger> - {t('delete')} - </Button> - </Popconfirm> + () => { + const tempColumns = [ + { + title: '模板名', + dataIndex: 'name', + name: 'name', + render: (name: string, record: WorkflowTemplate) => ( + <GridRow gap={8}> + <Link className={styled.template_name} to={gotoTemplateDetail(record)}> + {name} + </Link> + {record.is_local && <Tag color={'cyan'}>单侧</Tag>} </GridRow> - ); + ), + }, + { + title: 'Group 别名', + dataIndex: 'group_alias', + name: 'group_alias', + }, + { + title: '创建方', + dataIndex: 'coordinator_pure_domain_name', + name: 'coordinator_pure_domain_name', + render: (value: any) => <span>{value || '-'}</span>, }, - }, - ], + { + title: '操作', + dataIndex: 'operation', + name: 'operation', + width: 200, + render: (_: any, record: WorkflowTemplate) => { + const isInBuiltInTab = urlState.tab === WorkflowTemplateMenuType.BUILT_IN; + + const isShowEditButton = + isCanEdit && urlState.tab !== WorkflowTemplateMenuType.PARTICIPANT; + + const actionList = [ + { + label: '下载', + onClick: async () => { + const { id, name } = record; + try { + const blob = await request(getTemplateDownloadHref(id), { + responseType: 'blob', + }); + saveBlob(blob, `${name}.json`); + } catch (error: any) { + Message.error(error.message); + } + }, + }, + !isInBuiltInTab && { + label: '复制', + onClick: () => { + setIsShowCopyFormModal((prevState) => true); + setSelectedTemplate((prevState) => record); + }, + }, + !isInBuiltInTab && { + label: '删除', + onClick: () => { + Modal.delete({ + title: `确认删除${record.name || ''}吗?`, + content: '删除后,该模板将无法进行操作,请谨慎删除', + onOk() { + deleteTemplate(record.id) + .then(() => { + Message.success('删除成功'); + listQ.refetch(); + }) + .catch((error) => { + Message.error(error.message); + }); + }, + }); + }, + danger: true, + }, + ].filter(Boolean) as ActionItem[]; + + return ( + <GridRow left={isShowEditButton ? '-10' : '0'} gap="8"> + {isShowEditButton && ( + <Link to={`/workflow-center/workflow-templates/edit/basic/${record.id}`}> + 编辑 + </Link> + )} + <MoreActions actionList={actionList} /> + </GridRow> + ); + }, + }, + ]; + if (urlState.tab !== WorkflowTemplateMenuType.PARTICIPANT) { + tempColumns.splice(2, 1); + } + return tempColumns; + }, + // eslint-disable-next-line react-hooks/exhaustive-deps - [t], + [listQ, isAdminRole, urlState.tab, isCanEdit], ); + const isEmpty = templateListShow.length === 0; + const isInBuiltInTab = urlState.tab !== WorkflowTemplateMenuType.MY; + return ( <> - <Route path="/workflow-templates/upload" exact component={TemplateUploadDialog} /> - - <SharedPageLayout title={t('menu.label_workflow_tpl')} tip="This feature is experimental"> - <Row gutter={16} justify="space-between" align="middle"> - <Col> - <Dropdown.Button - placement="bottomCenter" - overlay={ - <Menu> - <UploadMenuItem key="1" icon={<CloudUploadOutlined />} onClick={onUploadClick}> - {t('workflow.btn_upload_tpl')} - </UploadMenuItem> - </Menu> - } - size="large" - type="primary" - onClick={goCreate} - > - {t('workflow.create_tpl')} - </Dropdown.Button> + <Route + path={`/workflow-center/workflow-templates/upload`} + exact + component={TemplateUploadDialog} + /> + + <SharedPageLayout + title="模板管理" + tip="创建工作流时需要依赖于模板,模板用于定义工作流流程与参数。" + > + <RemovePadding style={{ height: 46 }}> + <Tabs activeTab={urlState.tab} onChange={onTabChange}> + <Tabs.TabPane + title={t('workflow.label_tab_workflow_tpl_my')} + key={WorkflowTemplateMenuType.MY} + /> + <Tabs.TabPane + title={t('workflow.label_tab_workflow_tpl_built_in')} + key={WorkflowTemplateMenuType.BUILT_IN} + /> + <Tabs.TabPane title={'合作伙伴模版'} key={WorkflowTemplateMenuType.PARTICIPANT} /> + </Tabs> + </RemovePadding> + <Row justify="space-between" align="center"> + <Col flex={6}> + {!isInBuiltInTab && ( + <Dropdown.Button + className="custom-operation-button" + type="primary" + position="bottom" + droplist={ + <Menu> + <Menu.Item className={styled.upload_menu_item} key="1" onClick={onUploadClick}> + <IconUpload /> + 上传模板 + </Menu.Item> + </Menu> + } + onClick={goCreate} + > + 创建模板 + </Dropdown.Button> + )} </Col> - <Col> - <Form initialValues={{ ...params }} layout="inline" form={form} onFinish={onSearch}> - <Form.Item name="keyword"> - <Input.Search - placeholder={t('dataset.placeholder_name_searchbox')} - onPressEnter={form.submit} - /> - </Form.Item> - </Form> + <Col flex="200px"> + <Input.Search + className="custom-input" + defaultValue={filterParams.name} + placeholder="请输入模板名搜索" + onSearch={onSearch} + allowClear + /> </Col> </Row> - - <ListContainer> + <div className={styled.list_container}> {isEmpty ? ( - <NoResult text={t('workflow.no_tpl')} to="/workflow-templates/create/basic" /> + <NoResult text="暂无工作流模板" to="/workflow-center/workflow-templates/create/basic" /> ) : ( <Table + className="custom-table custom-table-left-side-filter" loading={listQ.isFetching} - dataSource={listData} + data={templateListShow} columns={columns} scroll={{ x: '100%' }} - rowKey="name" + rowKey="id" + pagination={{ + total: listQ.data?.page_meta?.total_items ?? undefined, + current: Number(urlState.page), + pageSize: Number(urlState.pageSize), + onChange: onPageChange, + showTotal: (total: number) => `共 ${total} 条记录`, + }} /> )} - </ListContainer> + </div> + <CopyFormModal + selectedWorkflowTemplate={selectedTemplate} + initialValues={{ + name: selectedTemplate ? `${selectedTemplate.name}${t('workflow.copy')}` : undefined, + }} + visible={isShowCopyFormModal} + onSuccess={onCopyFormModalSuccess} + onCancel={onCopyFormModalClose} + /> </SharedPageLayout> </> ); function goCreate() { - history.push('/workflow-templates/create/basic'); + history.push('/workflow-center/workflow-templates/create/basic'); } - function onSearch(values: any) { - setParams(values); + function onSearch(val: any) { + setFilterParams({ + name: val, + }); + setUrlState((prevState) => ({ + ...prevState, + page: 1, + })); + constructFilterArray({ ...filterParams, name: val }); } - async function onDeleteConfirm(record: WorkflowTemplate) { - const [, error] = await to(deleteTemplate(record.id)); - - if (error) { - return message.error(error.message); + function onPageChange(page: number, pageSize: number | undefined) { + setUrlState((prevState) => ({ + ...prevState, + page, + pageSize, + })); + } + function onUploadClick() { + history.push(`/workflow-center/workflow-templates/upload`); + } + function onCopyFormModalSuccess() { + setIsShowCopyFormModal((prevState) => false); + listQ.refetch(); + } + function onCopyFormModalClose() { + setIsShowCopyFormModal((prevState) => false); + setSelectedTemplate((prevState) => undefined); + } + function onTabChange(val: string) { + setUrlState((prevState) => ({ + ...prevState, + tab: val, + })); + constructFilterArray({ ...filterParams, kind: val }); + } + function gotoTemplateDetail(record: any) { + return generatePath(routeMaps.WorkflowTemplateDetail, { + id: record.id, + tab: 'config' as WorkflowTemplateDetailTab, + templateType: urlState.tab, + }); + } + function constructFilterArray(value: QueryParams) { + const expressionNodes = []; + if (value.kind) { + expressionNodes.push({ + field: 'kind', + op: FilterOp.EQUAL, + number_value: KIND_VALUE_MAPPER?.[value.kind as WorkflowTemplateMenuType], + }); + } + if (value.name) { + expressionNodes.push({ + field: 'name', + op: FilterOp.CONTAIN, + string_value: value.name, + }); } - listQ.refetch(); + const serialization = constructExpressionTree(expressionNodes); + setFilterParams({ + name: value.name, + kind: value.kind, + }); + setUrlState((prevState) => ({ + ...prevState, + filter: serialization, + tab: value.kind, + page: 1, + })); } - async function onUploadClick() { - history.push('/workflow-templates/upload'); + function initFilter() { + const expressionNodes = []; + expressionNodes.push({ + field: 'kind', + op: FilterOp.EQUAL, + number_value: 0, + }); + return constructExpressionTree(expressionNodes); } }; diff --git a/web_console_v2/client/src/views/WorkflowTemplates/index.tsx b/web_console_v2/client/src/views/WorkflowTemplates/index.tsx index 7cf7a707f..de175effa 100644 --- a/web_console_v2/client/src/views/WorkflowTemplates/index.tsx +++ b/web_console_v2/client/src/views/WorkflowTemplates/index.tsx @@ -1,18 +1,24 @@ import React from 'react'; -import ErrorBoundary from 'antd/lib/alert/ErrorBoundary'; +import ErrorBoundary from 'components/ErrorBoundary'; import { Route } from 'react-router-dom'; import TemplateList from './TemplateList'; import CreateTemplate from './CreateTemplate'; import EditTemplate from './EditTemplate'; +import TemplateDetail from './TemplateDetail'; +import routesMap from './routes'; function WorkflowsPage() { return ( <ErrorBoundary> - <Route path="/workflow-templates" exact component={TemplateList} /> - <Route path="/workflow-templates/upload" exact component={TemplateList} /> - <Route path="/workflow-templates/create/:step" component={CreateTemplate} /> - <Route path="/workflow-templates/edit/:step/:id" component={EditTemplate} /> + <Route path="/workflow-center/workflow-templates" exact component={TemplateList} /> + <Route path="/workflow-center/workflow-templates/upload" exact component={TemplateList} /> + <Route path="/workflow-center/workflow-templates/create/:step" component={CreateTemplate} /> + <Route + path="/workflow-center/workflow-templates/edit/:step/:id/:revision_id?" + component={EditTemplate} + /> + <Route path={routesMap.WorkflowTemplateDetail} exact component={TemplateDetail} /> </ErrorBoundary> ); } diff --git a/web_console_v2/client/src/views/WorkflowTemplates/routes.tsx b/web_console_v2/client/src/views/WorkflowTemplates/routes.tsx new file mode 100644 index 000000000..039ca9548 --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/routes.tsx @@ -0,0 +1,21 @@ +import { WorkflowTemplateMenuType } from 'typings/workflow'; + +const INDEX_PATH = '/workflow-center'; +const WorkflowTmplate = `${INDEX_PATH}/workflow-templates`; + +const routes: Record<string, string> = { + WorkflowTemplateDetail: `${WorkflowTmplate}/detail/:id/:tab(config|list)/:templateType?`, +}; + +export default routes; + +export enum WorkflowTemplateDetailTab { + Config = 'config', + List = 'list', +} + +export interface WorkflowTemplateDetailParams { + tab: WorkflowTemplateDetailTab; + id: string; + templateType: WorkflowTemplateMenuType; +} diff --git a/web_console_v2/client/src/views/WorkflowTemplates/shared.ts b/web_console_v2/client/src/views/WorkflowTemplates/shared.ts new file mode 100644 index 000000000..2a33c2b3d --- /dev/null +++ b/web_console_v2/client/src/views/WorkflowTemplates/shared.ts @@ -0,0 +1,28 @@ +/* istanbul ignore file */ + +import { useRecoilValue } from 'recoil'; +import i18n from 'i18n'; + +import { appFlag } from 'stores/app'; +import { useIsAdminRole } from 'hooks/user'; + +import { FlagKey } from 'typings/flag'; + +export function useGetIsCanEditTemplate(isPresetTemplate = false) { + const appFlagValue = useRecoilValue(appFlag); + const isAdminRole = useIsAdminRole(); + + const isCanEdit = + !isPresetTemplate || + (isPresetTemplate && + isAdminRole && + Boolean(appFlagValue[FlagKey.PRESET_TEMPLATE_EDIT_ENABLED])); + + const tip = isCanEdit + ? '' + : !appFlagValue[FlagKey.PRESET_TEMPLATE_EDIT_ENABLED] + ? i18n.t('workflow.msg_can_not_edit_preset_template') + : i18n.t('workflow.msg_only_admin_edit_preset_template'); + + return { isCanEdit, tip }; +} diff --git a/web_console_v2/client/src/views/Workflows/CreateWorkflow/StepOneBasic/index.module.less b/web_console_v2/client/src/views/Workflows/CreateWorkflow/StepOneBasic/index.module.less new file mode 100644 index 000000000..d2babadc8 --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/CreateWorkflow/StepOneBasic/index.module.less @@ -0,0 +1,13 @@ +.container { + min-height: 100%; + padding-top: 20px; +} + +.form_container { + width: 500px; + margin: 0 auto; +} + +.no_available_tpl { + line-height: 32px; +} diff --git a/web_console_v2/client/src/views/Workflows/CreateWorkflow/StepOneBasic/index.tsx b/web_console_v2/client/src/views/Workflows/CreateWorkflow/StepOneBasic/index.tsx index b2a94311f..4a7e2dcc7 100644 --- a/web_console_v2/client/src/views/Workflows/CreateWorkflow/StepOneBasic/index.tsx +++ b/web_console_v2/client/src/views/Workflows/CreateWorkflow/StepOneBasic/index.tsx @@ -1,72 +1,92 @@ -import React, { FC, useState, useEffect } from 'react'; -import styled from 'styled-components'; -import { Form, Select, Radio, Button, Input, Spin, Card, notification, message } from 'antd'; -import { useTranslation } from 'react-i18next'; -import GridRow from 'components/_base/GridRow'; -import { useHistory, useLocation, useParams } from 'react-router-dom'; +import React, { FC, useEffect, useMemo, useState } from 'react'; import { - CreateWorkflowBasicForm, - workflowBasicForm, - workflowInEditing, - workflowConfigForm, - peerConfigInPairing, - templateInUsing, -} from 'stores/workflow'; -import { useRecoilState, useSetRecoilState } from 'recoil'; + Button, + Card, + Input, + Message, + Notification, + Radio, + Select, + Spin, + Typography, + Form, +} from '@arco-design/web-react'; +import styled from './index.module.less'; +import FormLabel from 'components/FormLabel'; +import { JobNodeRawData } from 'components/WorkflowJobsCanvas/types'; +import GridRow from 'components/_base/GridRow'; import { useRecoilQuery } from 'hooks/recoil'; -import { Workflow, WorkflowConfig, WorkflowTemplate } from 'typings/workflow'; -import { projectListQuery } from 'stores/project'; +import { useTranslation } from 'react-i18next'; import { useQuery } from 'react-query'; +import { useHistory, useLocation, useParams } from 'react-router-dom'; +import { useToggle } from 'react-use'; +import { useRecoilState, useRecoilValue, useSetRecoilState } from 'recoil'; import { + fetchRevisionDetail, + fetchRevisionList, + fetchTemplateById, fetchWorkflowTemplateList, getPeerWorkflowsConfig, getWorkflowDetailById, - fetchTemplateById, + PEER_WORKFLOW_DETAIL_QUERY_KEY, } from 'services/workflow'; -import { WorkflowCreateProps } from '..'; import { parseComplexDictField } from 'shared/formSchema'; import { to } from 'shared/helpers'; -import { JobNodeRawData } from 'components/WorkflowJobsCanvas/types'; +import { parseSearch } from 'shared/url'; +import { validNamePattern, isWorkflowNameUniqWithDebounce } from 'shared/validator'; +import { projectListQuery, projectState } from 'stores/project'; +import { + CreateWorkflowBasicForm, + peerConfigInPairing, + templateInUsing, + workflowBasicForm, + workflowConfigForm, + workflowInEditing, +} from 'stores/workflow'; +import { Workflow, WorkflowConfig, WorkflowTemplate } from 'typings/workflow'; import ScheduledWorkflowRunning, { scheduleIntervalValidator, } from 'views/Workflows/ScheduledWorkflowRunning'; -import FormLabel from 'components/FormLabel'; +import { WorkflowCreateProps } from '..'; +import { useIsFormValueChange } from 'hooks'; +import ButtonWithModalConfirm from 'components/ButtonWithModalConfirm'; +import { FilterOp } from 'typings/filter'; +import { constructExpressionTree } from 'shared/filter'; -const Container = styled(Card)` - padding-top: 20px; -`; -const StyledForm = styled.div` - width: 500px; - margin: 0 auto; -`; -const NoAvailableTpl = styled.span` - line-height: 32px; -`; - -const WorkflowsCreateStepOne: FC<WorkflowCreateProps & { onSuccess?: any }> = ({ +type FilterParams = { + groupAlias?: string; +}; + +const WorkflowsCreateStepOne: FC<WorkflowCreateProps> = ({ isInitiate, isAccept, - onSuccess, + onFormValueChange: onFormValueChangeFromProps, }) => { const { t } = useTranslation(); const history = useHistory(); const location = useLocation(); - const params = useParams<{ id: string }>(); - + const params = useParams<{ id: string; template_id?: string }>(); const [groupAlias, setGroupAlias] = useState(''); + const [tplDetailLoading, toggleTplLoading] = useToggle(false); + const [revisionDetailLoading, toggleRevisionLoading] = useToggle(false); const [formInstance] = Form.useForm<CreateWorkflowBasicForm>(); const { data: projectList } = useRecoilQuery(projectListQuery); + const project = useRecoilValue(projectState); const [formData, setFormData] = useRecoilState(workflowBasicForm); - const setTemplateInUsing = useSetRecoilState(templateInUsing); + const [currTemplate, setTemplateInUsing] = useRecoilState(templateInUsing); + // eslint-disable-next-line @typescript-eslint/no-unused-vars const [workflowConfig, setWorkflowConfigForm] = useRecoilState(workflowConfigForm); + const [tplId, setTplId] = useState(params.template_id || 0); const setPeerConfig = useSetRecoilState(peerConfigInPairing); // Using when Participant accept the initiation // it should be null if it's Coordinator side initiate a workflow const [workflow, setWorkflow] = useRecoilState(workflowInEditing); + const { isFormValueChanged, onFormValueChange } = useIsFormValueChange(onFormChange); + const workflowQuery = useQuery(['getWorkflow', params.id], getWorkflowDetail, { // Only do workflow fetch if: // 1. id existed in url @@ -75,76 +95,108 @@ const WorkflowsCreateStepOne: FC<WorkflowCreateProps & { onSuccess?: any }> = ({ enabled: Boolean(params.id) && !!isAccept && !Boolean(workflow), refetchOnWindowFocus: false, }); - const peerWorkflowQuery = useQuery(['getPeerWorkflow', params.id], getPeerWorkflow, { + const peerWorkflowQuery = useQuery([PEER_WORKFLOW_DETAIL_QUERY_KEY, params.id], getPeerWorkflow, { enabled: Boolean(params.id) && !!isAccept, refetchOnWindowFocus: false, retry: false, }); - const allowedIsLeftValue = isInitiate ? 'ALL' : !peerWorkflowQuery.data?.config?.is_left; - const tplListQuery = useQuery( - ['getTemplateList', allowedIsLeftValue, groupAlias], + ['getTemplateList', groupAlias], async () => fetchWorkflowTemplateList({ - isLeft: allowedIsLeftValue === 'ALL' ? undefined : allowedIsLeftValue, - groupAlias, + filter: constructFilterExpression({ groupAlias }), }), { enabled: isInitiate || Boolean(!!peerWorkflowQuery.data && groupAlias), refetchOnWindowFocus: false, }, ); + const revisionListQuery = useQuery(['getRevisionList', tplId], () => fetchRevisionList(tplId), { + enabled: Boolean(tplId), + refetchOnWindowFocus: false, + }); const peerErrorMsg = (peerWorkflowQuery.error as Error)?.message; + useEffect(() => { if (peerErrorMsg) { - notification.error({ - message: t('workflow.msg_peer_config_failed'), - description: `${peerErrorMsg} ${t('pls_try_again_later')}`, + Notification.error({ + title: t('workflow.msg_peer_config_failed'), + content: `${peerErrorMsg} ${t('pls_try_again_later')}`, duration: 0, }); } }, [peerErrorMsg, t]); + useEffect(() => { + if (params.template_id) { + onTemplateSelectChange(Number(params.template_id)); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + const tplList = tplListQuery.data?.data || []; const noAvailableTpl = tplList.length === 0; - const projectId = Number(new URLSearchParams(location.search).get('project')) || undefined; - const initValues = _getInitialValues(formData, workflow, projectId); + const revisionList = useMemo(() => { + if (!revisionListQuery.data) return []; + return revisionListQuery.data?.data || []; + }, [revisionListQuery.data]); + const noAvailableRevision = revisionList.length === 0; + const projectId = + project.current?.id ?? Number(parseSearch(location).get('project')) ?? undefined; + const initValues = params.template_id + ? _getInitialValues(formData, workflow, projectId, params.template_id) + : _getInitialValues(formData, workflow, projectId); const pairingPrefix = isAccept ? 'pairing_' : ''; - + const isLocalRun = currTemplate?.is_local; return ( - <Spin spinning={workflowQuery.isLoading}> - <Container bordered={false}> - <StyledForm> + <Card bordered={false} className={styled.container}> + <Spin loading={workflowQuery.isLoading} style={{ width: '100%' }}> + <div className={styled.form_container}> <Form labelCol={{ span: 6 }} wrapperCol={{ span: 18 }} form={formInstance} - onValuesChange={onFormChange as any} - initialValues={initValues} + onValuesChange={onFormValueChange} + initialValues={initValues as Partial<CreateWorkflowBasicForm> | undefined} > <Form.Item - name="name" + field="name" hasFeedback label={t('workflow.label_name')} rules={[ { required: true, message: t('workflow.msg_name_required') }, { max: 255, message: t('workflow.msg_workflow_name_invalid') }, + { match: validNamePattern, message: t('valid_error.name_invalid') }, + // If isAccept = true, don't check whether workflow name is unique + !isAccept + ? { + validator: isWorkflowNameUniqWithDebounce, + } + : {}, ]} > <Input disabled={isAccept} placeholder={t('workflow.placeholder_name')} /> </Form.Item> <Form.Item - name="project_id" + field="project_id" label={t('workflow.label_project')} hasFeedback rules={[{ required: true, message: t('workflow.msg_project_required') }]} > - <Select disabled={isAccept} placeholder={t('workflow.placeholder_project')}> + <Select + disabled={isAccept} + placeholder={t('workflow.placeholder_project')} + showSearch + allowClear + filterOption={(inputValue, option) => + option.props.children.toLowerCase().indexOf(inputValue.toLowerCase()) >= 0 + } + > {projectList && projectList.map((pj) => ( <Select.Option key={pj.id} value={pj.id}> @@ -156,21 +208,32 @@ const WorkflowsCreateStepOne: FC<WorkflowCreateProps & { onSuccess?: any }> = ({ <Form.Item label={t('workflow.label_template')} - name="_templateSelected" + field="_templateSelected" hasFeedback + help={ + isLocalRun ? ( + <Typography.Text disabled> + 当前模板仅支持本地运行,无需对侧参与配置 + </Typography.Text> + ) : undefined + } rules={[{ required: true, message: t('workflow.msg_template_required') }]} > {noAvailableTpl && !tplListQuery.isLoading && !tplListQuery.isIdle ? ( - <NoAvailableTpl> + <span className={styled.no_available_tpl}> {t(`workflow.msg_${pairingPrefix}no_abailable_tpl`)} - </NoAvailableTpl> + </span> ) : ( <Select - loading={tplListQuery.isLoading} + loading={tplListQuery.isLoading || tplDetailLoading} disabled={Boolean(tplListQuery.error) || noAvailableTpl} onChange={onTemplateSelectChange} placeholder={t('workflow.placeholder_template')} + showSearch allowClear + filterOption={(inputValue, option) => + option.props.children.toLowerCase().indexOf(inputValue.toLowerCase()) >= 0 + } > {tplList?.map((tpl) => ( <Select.Option key={tpl.id} value={tpl.id}> @@ -181,32 +244,67 @@ const WorkflowsCreateStepOne: FC<WorkflowCreateProps & { onSuccess?: any }> = ({ )} </Form.Item> - <Form.Item name="forkable" label={t('workflow.label_peer_forkable')}> - <Radio.Group disabled={isAccept}> - <Radio.Button value={true}>{t('workflow.label_allow')}</Radio.Button> - <Radio.Button value={false}>{t('workflow.label_not_allow')}</Radio.Button> - </Radio.Group> - </Form.Item> - - {workflowConfig?.is_left && ( + {noAvailableTpl && !tplListQuery.isLoading && !tplListQuery.isIdle ? ( + <></> + ) : ( <Form.Item - name="batch_update_interval" - label={ - <FormLabel - label={t('workflow.label_enable_batch_update_interval')} - tooltip={t('workflow.msg_schduled_run')} - /> - } - rules={[ - { - validator: scheduleIntervalValidator, - message: t('workflow.msg_min_10_interval'), - }, - ]} + label={t('workflow.label_revision')} + field="_revisionSelected" + hasFeedback + // rules={[{ required: true, message: t('workflow.msg_revision_required') }]} > - <ScheduledWorkflowRunning /> + <Select + loading={revisionListQuery.isLoading || revisionDetailLoading} + disabled={Boolean(revisionListQuery.error || noAvailableRevision)} + onChange={onRevisionSelectChange} + placeholder={t('workflow.placeholder_revision')} + showSearch + allowClear + filterOption={(inputValue, option) => + option.props.children + .join('') + .toLowerCase() + .indexOf(inputValue.toLowerCase()) >= 0 + } + > + {revisionList?.map((revision) => ( + <Select.Option key={revision.id} value={revision.id}> + V{revision.revision_index} + </Select.Option> + ))} + </Select> </Form.Item> )} + + <Form.Item + hidden={isLocalRun} + field="forkable" + label={t('workflow.label_peer_forkable')} + > + <Radio.Group disabled={isAccept} type="button"> + <Radio value={true}>{t('workflow.label_allow')}</Radio> + <Radio value={false}>{t('workflow.label_not_allow')}</Radio> + </Radio.Group> + </Form.Item> + + <Form.Item + field="cron_config" + label={ + <FormLabel + label={t('workflow.label_enable_batch_update_interval')} + tooltip={t('workflow.msg_schduled_run')} + /> + } + rules={[ + { + validator: scheduleIntervalValidator, + message: t('workflow.msg_time_required'), + validateTrigger: 'onSubmit', + }, + ]} + > + <ScheduledWorkflowRunning /> + </Form.Item> </Form> <Form.Item wrapperCol={{ offset: 6 }}> @@ -215,26 +313,33 @@ const WorkflowsCreateStepOne: FC<WorkflowCreateProps & { onSuccess?: any }> = ({ {t('next_step')} </Button> - <Button onClick={backToList}>{t('cancel')}</Button> + <ButtonWithModalConfirm + onClick={backToList} + isShowConfirmModal={isFormValueChanged || Boolean(formData.name)} + > + {t('cancel')} + </ButtonWithModalConfirm> </GridRow> </Form.Item> - </StyledForm> - </Container> - </Spin> + </div> + </Spin> + </Card> ); async function goNextStep() { - onSuccess && onSuccess(); - const nextRoute = isInitiate - ? '/workflows/initiate/config' - : `/workflows/accept/config/${params.id}`; + ? '/workflow-center/workflows/initiate/config' + : `/workflow-center/workflows/accept/config/${params.id}`; history.push(nextRoute); } function backToList() { - history.push('/workflows'); + history.push('/workflow-center/workflows'); } - function setCurrentUsingTemplate(tpl: WorkflowTemplate<any>) { + function setCurrentUsingTemplate(tpl?: WorkflowTemplate<any>) { + if (!tpl) { + setTemplateInUsing(null as any); + return; + } // Widget schemas of the template from backend side are JSON-string type // parse it before using const parsedTpl = parseComplexDictField(tpl); @@ -264,27 +369,54 @@ const WorkflowsCreateStepOne: FC<WorkflowCreateProps & { onSuccess?: any }> = ({ } // --------- Handlers ----------- function onFormChange(_: any, values: CreateWorkflowBasicForm) { + onFormValueChangeFromProps?.(); setFormData(values); } async function onTemplateSelectChange(id: number) { + formInstance.setFieldsValue({ _revisionSelected: undefined }); if (!id) { // If user clear select + setCurrentUsingTemplate(undefined); + setTplId(0); return; } - + setTplId(id); + toggleTplLoading(true); const [res, error] = await to(fetchTemplateById(id)); + toggleTplLoading(false); if (error) { - message.error(t('workflow.msg_get_tpl_detail_failed')); + Message.error(t('workflow.msg_get_tpl_detail_failed')); return; } if (!res.data) return; setCurrentUsingTemplate(res.data); } + async function onRevisionSelectChange(revision_id: number) { + // fetch revision detail + if (!revision_id) return; + + toggleRevisionLoading(true); + const [res, error] = await to(fetchRevisionDetail(revision_id)); + toggleRevisionLoading(false); + + if (error) { + Message.error(t('workflow.msg_get_revision_detail_failed')); + return; + } + + setCurrentUsingTemplate({ + ...currTemplate, + revision_id, + config: res.data.config, + editor_info: res.data.editor_info, + }); + } + async function onNextStepClick() { try { // Any form invalidation happens will throw error to stop the try block - await formInstance.validateFields(); + await formInstance.validate(); goNextStep(); } catch { /** ignore validation error */ @@ -292,10 +424,16 @@ const WorkflowsCreateStepOne: FC<WorkflowCreateProps & { onSuccess?: any }> = ({ } }; -function _getInitialValues(form: CreateWorkflowBasicForm, workflow: Workflow, projectId?: number) { +function _getInitialValues( + form: CreateWorkflowBasicForm, + workflow: Workflow, + projectId?: ID, + _templateSelected?: ID, +) { return Object.assign( { ...form, + _templateSelected: Number(_templateSelected) || form._templateSelected, }, // When user landing from clicking create workflow button // in Project page, hydrate project_ud @@ -304,4 +442,16 @@ function _getInitialValues(form: CreateWorkflowBasicForm, workflow: Workflow, pr ); } +function constructFilterExpression(value: FilterParams) { + const expressionNodes = []; + if (value.groupAlias) { + expressionNodes.push({ + field: 'group_alias', + op: FilterOp.EQUAL, + string_value: value.groupAlias, + }); + } + return constructExpressionTree(expressionNodes); +} + export default WorkflowsCreateStepOne; diff --git a/web_console_v2/client/src/views/Workflows/CreateWorkflow/SteptTwoConfig/index.module.less b/web_console_v2/client/src/views/Workflows/CreateWorkflow/SteptTwoConfig/index.module.less new file mode 100644 index 000000000..34ac197c9 --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/CreateWorkflow/SteptTwoConfig/index.module.less @@ -0,0 +1,20 @@ +.container { + height: 100%; + .chart_header { + height: 48px; + padding: 13px 20px; + font-size: 14px; + line-height: 22px; + background-color: white; + } + .footer { + position: sticky; + bottom: 0; + z-index: 5; // just above react-flow' z-index + padding: 15px 36px; + background-color: white; + } + .chart_title { + margin-bottom: 0; + } +} diff --git a/web_console_v2/client/src/views/Workflows/CreateWorkflow/SteptTwoConfig/index.tsx b/web_console_v2/client/src/views/Workflows/CreateWorkflow/SteptTwoConfig/index.tsx index 4e5a22fd1..9c8b78c8c 100644 --- a/web_console_v2/client/src/views/Workflows/CreateWorkflow/SteptTwoConfig/index.tsx +++ b/web_console_v2/client/src/views/Workflows/CreateWorkflow/SteptTwoConfig/index.tsx @@ -1,5 +1,5 @@ import React, { FC, useEffect, useRef, useState } from 'react'; -import styled from 'styled-components'; +import styled from './index.module.less'; import { ReactFlowProvider, useStoreState, @@ -16,7 +16,7 @@ import { NodeData, } from 'components/WorkflowJobsCanvas/types'; import GridRow from 'components/_base/GridRow'; -import { Button, message, Modal, Spin } from 'antd'; +import { Button, Message, Spin } from '@arco-design/web-react'; import { Redirect, useHistory, useParams } from 'react-router-dom'; import { useRecoilValue, useRecoilState } from 'recoil'; import { @@ -27,42 +27,21 @@ import { } from 'stores/workflow'; import { useTranslation } from 'react-i18next'; import i18n from 'i18n'; -import ErrorBoundary from 'antd/lib/alert/ErrorBoundary'; +import ErrorBoundary from 'components/ErrorBoundary'; import { acceptNFillTheWorkflowConfig, initiateAWorkflow } from 'services/workflow'; import { to } from 'shared/helpers'; import { WorkflowCreateProps } from '..'; import { WorkflowAcceptPayload, WorkflowInitiatePayload } from 'typings/workflow'; -import { Variable } from 'typings/variable'; import InspectPeerConfigs from '../../InspectPeerConfig'; -import { ExclamationCircle } from 'components/IconPark'; -import { Z_INDEX_GREATER_THAN_HEADER } from 'components/Header'; +import Modal from 'components/Modal'; import { stringifyComplexDictField } from 'shared/formSchema'; import { removePrivate } from 'shared/object'; -import { cloneDeep, Dictionary } from 'lodash'; +import { cloneDeep } from 'lodash-es'; import { useSubscribe } from 'hooks'; import { WORKFLOW_JOB_NODE_CHANNELS } from 'components/WorkflowJobsCanvas/JobNodes/shared'; import { CreateJobFlag } from 'typings/job'; - -const Container = styled.section` - height: 100%; -`; -const ChartHeader = styled.header` - height: 48px; - padding: 13px 20px; - font-size: 14px; - line-height: 22px; - background-color: white; -`; -const Footer = styled.footer` - position: sticky; - bottom: 0; - z-index: 5; // just above react-flow' z-index - padding: 15px 36px; - background-color: white; -`; -const ChartTitle = styled.h3` - margin-bottom: 0; -`; +import { hydrate } from 'views/Workflows/shared'; +import { Variable } from 'typings/variable'; const CanvasAndForm: FC<WorkflowCreateProps> = ({ isInitiate, isAccept }) => { const history = useHistory(); @@ -98,14 +77,14 @@ const CanvasAndForm: FC<WorkflowCreateProps> = ({ isInitiate, isAccept }) => { // eslint-disable-next-line react-hooks/exhaustive-deps }, [jobNodes[0]?.type]); - useSubscribe(WORKFLOW_JOB_NODE_CHANNELS.disable_job, onNodeDisabledChange); + useSubscribe(WORKFLOW_JOB_NODE_CHANNELS.disable_job, onNodeDisabledChange, [chartRef.current]); const isDisabled = { disabled: submitting }; if (!template?.config) { const redirectTo = isInitiate - ? '/workflows/initiate/basic' - : `/workflows/accept/basic/${params.id}`; + ? '/workflow-center/workflows/initiate/basic' + : `/workflow-center/workflows/accept/basic/${params.id}`; return <Redirect to={redirectTo} />; } @@ -116,11 +95,11 @@ const CanvasAndForm: FC<WorkflowCreateProps> = ({ isInitiate, isAccept }) => { return ( <ErrorBoundary> - <Container> - <Spin spinning={submitting}> - <ChartHeader> - <ChartTitle>{t('workflow.our_config')}</ChartTitle> - </ChartHeader> + <section className={styled.container}> + <Spin loading={submitting}> + <header className={styled.chart_header}> + <h3 className={styled.chart_title}>{t('workflow.our_config')}</h3> + </header> </Spin> <WorkflowJobsCanvas @@ -151,10 +130,10 @@ const CanvasAndForm: FC<WorkflowCreateProps> = ({ isInitiate, isAccept }) => { toggleVisible={togglePeerCfgVisible} /> - <Footer> + <footer className={styled.footer}> <GridRow gap="12"> <Button type="primary" loading={submitting} onClick={submitToCreate}> - {t('workflow.btn_send_2_ptcpt')} + {template.is_local ? '创建单侧工作流' : t('workflow.btn_send_2_ptcpt')} </Button> <Button onClick={onPrevStepClick} {...isDisabled}> {t('previous_step')} @@ -163,8 +142,8 @@ const CanvasAndForm: FC<WorkflowCreateProps> = ({ isInitiate, isAccept }) => { {t('cancel')} </Button> </GridRow> - </Footer> - </Container> + </footer> + </section> </ErrorBoundary> ); @@ -181,19 +160,18 @@ const CanvasAndForm: FC<WorkflowCreateProps> = ({ isInitiate, isAccept }) => { async function saveCurrentValues() { const values = await drawerRef.current?.getFormValues(); - - let nextValue = cloneDeep(configValue); + const nextValue = cloneDeep(configValue); if (currNode?.type === 'global') { // Hydrate values to workflow global variables - nextValue.variables = _hydrate(nextValue.variables, values); + nextValue.variables = hydrate(nextValue.variables, values) as Variable[]; } if (currNode?.type === 'config') { // Hydrate values to target job const targetJob = nextValue.job_definitions.find((job) => job.name === currNode.id); if (targetJob) { - targetJob.variables = _hydrate(targetJob.variables, values); + targetJob.variables = hydrate(targetJob.variables, values) as Variable[]; } } @@ -210,14 +188,13 @@ const CanvasAndForm: FC<WorkflowCreateProps> = ({ isInitiate, isAccept }) => { /** 🚀 Initiate create request */ async function submitToCreate() { if (!checkIfAllJobConfigCompleted()) { - return message.warn(i18n.t('workflow.msg_config_unfinished')); + return Message.warning(i18n.t('workflow.msg_config_unfinished')); } toggleDrawerVisible(false); setSubmitting(true); let resError: Error | null = null; - if (isInitiate) { const payload = stringifyComplexDictField( removePrivate({ @@ -230,7 +207,11 @@ const CanvasAndForm: FC<WorkflowCreateProps> = ({ isInitiate, isAccept }) => { payload.create_job_flags = _mapJobFlags(chartRef.current?.nodes); - const [, error] = await to(initiateAWorkflow(payload)); + payload.template_id = template.id; + + payload.template_revision_id = template.revision_id; + + const [, error] = await to(initiateAWorkflow(payload, payload.project_id)); resError = error; } @@ -244,16 +225,20 @@ const CanvasAndForm: FC<WorkflowCreateProps> = ({ isInitiate, isAccept }) => { payload.create_job_flags = _mapJobFlags(chartRef.current?.nodes); - const [, error] = await to(acceptNFillTheWorkflowConfig(params.id, payload)); + payload.template_id = template.id; + + const [, error] = await to( + acceptNFillTheWorkflowConfig(params.id, payload, basicPayload.project_id!), + ); resError = error; } setSubmitting(false); if (!resError) { - history.push('/workflows'); + history.push('/workflow-center/workflows'); } else { - message.error(resError.message); + Message.error(resError.message); } } async function selectNode(nextNode: ChartNode) { @@ -306,14 +291,9 @@ const CanvasAndForm: FC<WorkflowCreateProps> = ({ isInitiate, isAccept }) => { function onCancelCreationClick() { Modal.confirm({ title: i18n.t('workflow.msg_sure_2_cancel_create'), - icon: <ExclamationCircle />, - zIndex: Z_INDEX_GREATER_THAN_HEADER, content: i18n.t('workflow.msg_effect_of_cancel_create'), - style: { - top: '30%', - }, onOk() { - history.push('/workflows'); + history.push('/workflow-center/workflows'); }, }); } @@ -322,21 +302,6 @@ const CanvasAndForm: FC<WorkflowCreateProps> = ({ isInitiate, isAccept }) => { } }; -/** - * @param variableShells Variable defintions without any user input value - * @param formValues User inputs - */ -function _hydrate(variableShells: Variable[], formValues?: Dictionary<any>): Variable[] { - if (!formValues) return []; - - return variableShells.map((item) => { - return { - ...item, - value: formValues[item.name], - }; - }); -} - function _mapJobFlags(nodes?: ChartNodes) { if (!nodes) return []; diff --git a/web_console_v2/client/src/views/Workflows/CreateWorkflow/index.module.less b/web_console_v2/client/src/views/Workflows/CreateWorkflow/index.module.less new file mode 100644 index 000000000..9bd6a20a7 --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/CreateWorkflow/index.module.less @@ -0,0 +1,8 @@ +.step_container { + width: 350px; +} +.form_area { + flex: 1; + margin-top: 12px; + background-color: white; +} diff --git a/web_console_v2/client/src/views/Workflows/CreateWorkflow/index.tsx b/web_console_v2/client/src/views/Workflows/CreateWorkflow/index.tsx index 3c699bec4..27b01e5be 100644 --- a/web_console_v2/client/src/views/Workflows/CreateWorkflow/index.tsx +++ b/web_console_v2/client/src/views/Workflows/CreateWorkflow/index.tsx @@ -1,6 +1,6 @@ -import React, { FC, useState } from 'react'; -import styled from 'styled-components'; -import { Steps, Row, Card } from 'antd'; +import React, { FC, useState, useEffect } from 'react'; +import styled from './index.module.less'; +import { Steps, Grid, Card } from '@arco-design/web-react'; import StepOneBasic from './StepOneBasic'; import SteptTwoConfig from './SteptTwoConfig'; import { Route, useHistory, useParams } from 'react-router-dom'; @@ -11,15 +11,7 @@ import SharedPageLayout from 'components/SharedPageLayout'; import BackButton from 'components/BackButton'; const { Step } = Steps; - -const StepContainer = styled.div` - width: 350px; -`; -const FormArea = styled.section` - flex: 1; - margin-top: 12px; - background-color: white; -`; +const Row = Grid.Row; enum CreateSteps { basic, @@ -31,6 +23,7 @@ export type WorkflowCreateProps = { isInitiate?: boolean; // is Participant accepting a workflow from Coordinator isAccept?: boolean; + onFormValueChange?: () => void; }; /** @@ -42,60 +35,83 @@ const WorkflowsCreate: FC<WorkflowCreateProps> = (workflowCreateProps) => { const { t } = useTranslation(); const history = useHistory(); const params = useParams<{ step: keyof typeof CreateSteps; id?: string }>(); - const [currentStep, setStep] = useState(CreateSteps[params.step || 'basic']); + const [currentStep, setStep] = useState(CreateSteps[params.step || 'basic'] + 1); + const [isFormValueChanged, setIsFormValueChanged] = useState(false); + const reset = useResetCreateForms(); + useEffect(() => { + setStep(CreateSteps[params.step || 'basic'] + 1); + }, [params.step]); + useUnmount(() => { reset(); }); return ( <SharedPageLayout - title={<BackButton onClick={() => history.goBack()}>{t('menu.label_workflow')}</BackButton>} + title={ + <BackButton + onClick={() => history.replace(`/workflow-center/workflows`)} + isShowConfirmModal={isFormValueChanged} + > + {t('menu.label_workflow')} + </BackButton> + } contentWrapByCard={false} > <Card> <Row justify="center"> - <StepContainer> + <div className={styled.step_container}> <Steps current={currentStep}> <Step title={t('workflow.step_basic')} /> <Step title={t('workflow.step_config')} /> </Steps> - </StepContainer> + </div> </Row> </Card> - <FormArea> + <section className={styled.form_area}> <Route - path={`/workflows/initiate/basic`} + path={`/workflow-center/workflows/initiate/basic/:template_id?`} exact render={(props) => ( - <StepOneBasic onSuccess={setToConfigStep} {...props} {...workflowCreateProps} /> + <StepOneBasic + {...props} + {...workflowCreateProps} + onFormValueChange={onFormValueChange} + /> )} /> <Route - path={`/workflows/initiate/config`} + path={`/workflow-center/workflows/initiate/config`} exact render={(props) => <SteptTwoConfig {...props} {...workflowCreateProps} />} /> <Route - path={`/workflows/accept/basic/:id`} + path={`/workflow-center/workflows/accept/basic/:id`} exact render={(props) => ( - <StepOneBasic onSuccess={setToConfigStep} {...props} {...workflowCreateProps} /> + <StepOneBasic + {...props} + {...workflowCreateProps} + onFormValueChange={onFormValueChange} + /> )} /> <Route - path={`/workflows/accept/config/:id`} + path={`/workflow-center/workflows/accept/config/:id`} exact render={(props) => <SteptTwoConfig {...props} {...workflowCreateProps} />} /> - </FormArea> + </section> </SharedPageLayout> ); - function setToConfigStep() { - setStep(CreateSteps.config); + function onFormValueChange() { + if (!isFormValueChanged) { + setIsFormValueChanged(true); + } } }; diff --git a/web_console_v2/client/src/views/Workflows/EditWorkflow/StepOneBasic/index.module.less b/web_console_v2/client/src/views/Workflows/EditWorkflow/StepOneBasic/index.module.less new file mode 100644 index 000000000..106a4ace0 --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/EditWorkflow/StepOneBasic/index.module.less @@ -0,0 +1,17 @@ +.container { + min-height: 100%; + padding-top: 20px; +} + +.form_container { + width: 500px; + margin: 0 auto; +} + +.local_alert { + margin-bottom: 20px; +} + +.no_available_tpl { + line-height: 32px; +} diff --git a/web_console_v2/client/src/views/Workflows/EditWorkflow/StepOneBasic/index.tsx b/web_console_v2/client/src/views/Workflows/EditWorkflow/StepOneBasic/index.tsx index 90c12a33d..ea5a76eb4 100644 --- a/web_console_v2/client/src/views/Workflows/EditWorkflow/StepOneBasic/index.tsx +++ b/web_console_v2/client/src/views/Workflows/EditWorkflow/StepOneBasic/index.tsx @@ -1,6 +1,17 @@ -import React, { FC, useState, useEffect } from 'react'; -import styled from 'styled-components'; -import { Form, Select, Radio, Button, Input, Spin, Card, notification, message, Modal } from 'antd'; +import React, { FC, useState, useEffect, useMemo } from 'react'; +import styled from './index.module.less'; +import { + Form, + Select, + Radio, + Button, + Input, + Spin, + Card, + Notification, + Message, + Alert, +} from '@arco-design/web-react'; import { useTranslation } from 'react-i18next'; import GridRow from 'components/_base/GridRow'; import { useHistory, useLocation, useParams, Link } from 'react-router-dom'; @@ -22,6 +33,9 @@ import { getPeerWorkflowsConfig, getWorkflowDetailById, fetchTemplateById, + PEER_WORKFLOW_DETAIL_QUERY_KEY, + fetchRevisionList, + fetchRevisionDetail, } from 'services/workflow'; import { parseComplexDictField } from 'shared/formSchema'; import { to } from 'shared/helpers'; @@ -30,33 +44,34 @@ import ScheduledWorkflowRunning, { scheduleIntervalValidator, } from 'views/Workflows/ScheduledWorkflowRunning'; import FormLabel from 'components/FormLabel'; -import { ExclamationCircle } from 'components/IconPark'; -import { Z_INDEX_GREATER_THAN_HEADER } from 'components/Header'; - -const Container = styled(Card)` - padding-top: 20px; -`; -const StyledForm = styled.div` - width: 500px; - margin: 0 auto; -`; -const NoAvailableTpl = styled.span` - line-height: 32px; -`; - -const WorkflowsCreateStepOne: FC<{ onSuccess?: any }> = ({ onSuccess }) => { +import Modal from 'components/Modal'; +import { parseSearch } from 'shared/url'; +import { useIsFormValueChange } from 'hooks'; +import ButtonWithModalConfirm from 'components/ButtonWithModalConfirm'; +import { FilterOp } from 'typings/filter'; +import { constructExpressionTree } from 'shared/filter'; + +type FilterParams = { + groupAlias?: string; +}; + +const WorkflowsEditStepOne: FC<{ + onFormValueChange?: () => void; +}> = ({ onFormValueChange: onFormValueChangeFromProps }) => { const { t } = useTranslation(); const history = useHistory(); const location = useLocation(); const params = useParams<{ id: string }>(); const [groupAlias, setGroupAlias] = useState(''); + const [tplId, setTplId] = useState(0); const [formInstance] = Form.useForm<CreateWorkflowBasicForm>(); const { data: projectList } = useRecoilQuery(projectListQuery); const [formData, setFormData] = useRecoilState(workflowBasicForm); - const setTemplateInUsing = useSetRecoilState(templateInUsing); + const [currTemplate, setTemplateInUsing] = useRecoilState(templateInUsing); + // eslint-disable-next-line @typescript-eslint/no-unused-vars const [workflowConfig, setWorkflowConfigForm] = useRecoilState(workflowConfigForm); const setPeerConfig = useSetRecoilState(peerConfigInPairing); @@ -64,6 +79,8 @@ const WorkflowsCreateStepOne: FC<{ onSuccess?: any }> = ({ onSuccess }) => { // it should be null if it's Coordinator side initiate a workflow const [workflow, setWorkflow] = useRecoilState(workflowInEditing); + const { isFormValueChanged, onFormValueChange } = useIsFormValueChange(onFormChange); + const workflowQuery = useQuery(['getWorkflow', params.id], getWorkflowDetail, { // Only do workflow fetch if: // 1. id existed in url @@ -71,34 +88,39 @@ const WorkflowsCreateStepOne: FC<{ onSuccess?: any }> = ({ onSuccess }) => { enabled: Boolean(params.id) && !Boolean(workflow), refetchOnWindowFocus: false, }); - const peerWorkflowQuery = useQuery(['getPeerWorkflow', params.id], getPeerWorkflow, { - enabled: Boolean(params.id), + + const isLocalWorkflow = workflow?.is_local; + + const peerWorkflowQuery = useQuery([PEER_WORKFLOW_DETAIL_QUERY_KEY, params.id], getPeerWorkflow, { + enabled: Boolean(params.id) && !isLocalWorkflow && !workflowQuery.isFetching, refetchOnWindowFocus: false, retry: false, }); - const allowedIsLeftValue = !peerWorkflowQuery.data?.config?.is_left; - const tplListQuery = useQuery( - ['getTemplateList', allowedIsLeftValue, groupAlias], + ['getTemplateList', groupAlias], async () => fetchWorkflowTemplateList({ - isLeft: allowedIsLeftValue, - groupAlias, + filter: constructFilterExpression({ groupAlias }), }), { - enabled: Boolean(!!peerWorkflowQuery.data && groupAlias), + enabled: Boolean(!!peerWorkflowQuery.data && groupAlias) || isLocalWorkflow, refetchOnWindowFocus: false, }, ); + const revisionListQuery = useQuery(['getRevisionList', tplId], () => fetchRevisionList(tplId), { + enabled: Boolean(tplId), + refetchOnWindowFocus: false, + }); + const peerErrorMsg = (peerWorkflowQuery.error as Error)?.message; useEffect(() => { if (peerErrorMsg) { - notification.error({ - message: t('workflow.msg_peer_config_failed'), - description: `${peerErrorMsg} ${t('pls_try_again_later')}`, + Notification.error({ + title: t('workflow.msg_peer_config_failed'), + content: `${peerErrorMsg} ${t('pls_try_again_later')}`, duration: 0, }); } @@ -107,24 +129,39 @@ const WorkflowsCreateStepOne: FC<{ onSuccess?: any }> = ({ onSuccess }) => { const tplList = tplListQuery.data?.data || []; const noAvailableTpl = tplList.length === 0; - const projectId = Number(new URLSearchParams(location.search).get('project')) || undefined; + const revisionList = useMemo(() => { + if (!revisionListQuery.data) return []; + return revisionListQuery.data?.data || []; + }, [revisionListQuery.data]); + + const noAvailableRevision = revisionList.length === 0; + + const projectId = Number(parseSearch(location).get('project')) || undefined; const initValues = _getInitialValues(formData, workflow, projectId); const pairingPrefix = 'pairing_'; return ( - <Spin spinning={workflowQuery.isLoading}> - <Container bordered={false}> - <StyledForm> + <Card bordered={false} className={styled.container}> + <Spin loading={workflowQuery.isLoading} style={{ width: '100%' }}> + <div className={styled.form_container}> + {isLocalWorkflow && ( + <Alert + className={styled.local_alert} + type="info" + content="该任务为本地任务,仅支持单侧模板选择" + banner + /> + )} <Form labelCol={{ span: 6 }} wrapperCol={{ span: 18 }} form={formInstance} - onValuesChange={onFormChange as any} + onValuesChange={onFormValueChange} initialValues={initValues} > <Form.Item - name="name" + field="name" hasFeedback label={t('workflow.label_name')} rules={[ @@ -136,7 +173,7 @@ const WorkflowsCreateStepOne: FC<{ onSuccess?: any }> = ({ onSuccess }) => { </Form.Item> <Form.Item - name="project_id" + field="project_id" label={t('workflow.label_project')} hasFeedback rules={[{ required: true, message: t('workflow.msg_project_required') }]} @@ -151,10 +188,10 @@ const WorkflowsCreateStepOne: FC<{ onSuccess?: any }> = ({ onSuccess }) => { </Select> </Form.Item> - <Form.Item name="_keepUsingOriginalTemplate" label={t('workflow.label_template')}> - <Radio.Group> - <Radio.Button value={true}>{t('workflow.label_use_original_tpl')}</Radio.Button> - <Radio.Button value={false}>{t('workflow.label_choose_new_tpl')}</Radio.Button> + <Form.Item field="_keepUsingOriginalTemplate" label={t('workflow.label_template')}> + <Radio.Group type="button"> + <Radio value={true}>{t('workflow.label_use_original_tpl')}</Radio> + <Radio value={false}>{t('workflow.label_choose_new_tpl')}</Radio> </Radio.Group> </Form.Item> @@ -162,23 +199,27 @@ const WorkflowsCreateStepOne: FC<{ onSuccess?: any }> = ({ onSuccess }) => { <> <Form.Item wrapperCol={{ offset: 6 }} - name="_templateSelected" + field="_templateSelected" hasFeedback rules={[{ required: true, message: t('workflow.msg_template_required') }]} > {noAvailableTpl && !tplListQuery.isLoading && !tplListQuery.isIdle ? ( - <NoAvailableTpl> + <span className={styled.no_available_tpl}> {t(`workflow.msg_${pairingPrefix}no_abailable_tpl`)} - </NoAvailableTpl> + </span> ) : ( <Select loading={tplListQuery.isLoading} - disabled={Boolean(tplListQuery.error) || noAvailableTpl} + disabled={Boolean(tplListQuery.error)} onChange={onTemplateSelectChange} placeholder={t('workflow.placeholder_template')} + showSearch allowClear + filterOption={(inputValue, option) => + option.props.children.toLowerCase().indexOf(inputValue.toLowerCase()) >= 0 + } > - {tplList?.map((tpl) => ( + {tplList.map((tpl) => ( <Select.Option key={tpl.id} value={tpl.id}> {tpl.name} </Select.Option> @@ -191,39 +232,68 @@ const WorkflowsCreateStepOne: FC<{ onSuccess?: any }> = ({ onSuccess }) => { wrapperCol={{ offset: 6 }} style={{ marginBottom: 0, marginTop: '-10px' }} > - <Link to="/workflow-templates/create/basic" style={{ fontSize: '12px' }}> + <Link + to="/workflow-center/workflow-templates/create/basic" + style={{ fontSize: '12px' }} + > {t('workflow.btn_go_create_new_tpl')} </Link> </Form.Item> + + {noAvailableTpl && !tplListQuery.isLoading && !tplListQuery.isIdle ? ( + <></> + ) : ( + <Form.Item + wrapperCol={{ offset: 6 }} + field="_revisionSelected" + hasFeedback + // rules={[{ required: true, message: t('workflow.msg_revision_required') }]} + > + <Select + loading={revisionListQuery.isLoading} + disabled={Boolean(revisionListQuery.error || noAvailableRevision)} + onChange={onRevisionSelectChange} + placeholder={t('workflow.placeholder_revision')} + showSearch + allowClear + filterOption={(inputValue, option) => + option.props.children + .join('') + .toLowerCase() + .indexOf(inputValue.toLowerCase()) >= 0 + } + > + {revisionList?.map((revision) => ( + <Select.Option key={revision.id} value={revision.id}> + V{revision.revision_index} + </Select.Option> + ))} + </Select> + </Form.Item> + )} </> )} - <Form.Item name="forkable" label={t('workflow.label_peer_forkable')}> - <Radio.Group> - <Radio.Button value={true}>{t('workflow.label_allow')}</Radio.Button> - <Radio.Button value={false}>{t('workflow.label_not_allow')}</Radio.Button> - </Radio.Group> - </Form.Item> + {renderForkableItem()} - {workflowConfig?.is_left && ( - <Form.Item - name="batch_update_interval" - label={ - <FormLabel - label={t('workflow.label_enable_batch_update_interval')} - tooltip={t('workflow.msg_schduled_run')} - /> - } - rules={[ - { - validator: scheduleIntervalValidator, - message: t('workflow.msg_min_10_interval'), - }, - ]} - > - <ScheduledWorkflowRunning /> - </Form.Item> - )} + <Form.Item + field="cron_config" + label={ + <FormLabel + label={t('workflow.label_enable_batch_update_interval')} + tooltip={t('workflow.msg_schduled_run')} + /> + } + rules={[ + { + validator: scheduleIntervalValidator, + message: t('workflow.msg_time_required'), + validateTrigger: 'onSubmit', + }, + ]} + > + <ScheduledWorkflowRunning /> + </Form.Item> </Form> <Form.Item wrapperCol={{ offset: 6 }}> @@ -232,22 +302,32 @@ const WorkflowsCreateStepOne: FC<{ onSuccess?: any }> = ({ onSuccess }) => { {t('next_step')} </Button> - <Button onClick={backToList}>{t('cancel')}</Button> + <ButtonWithModalConfirm onClick={backToList} isShowConfirmModal={isFormValueChanged}> + {t('cancel')} + </ButtonWithModalConfirm> </GridRow> </Form.Item> - </StyledForm> - </Container> - </Spin> + </div> + </Spin> + </Card> ); function goNextStep() { - onSuccess && onSuccess(); - history.push(`/workflows/edit/config/${params.id}`); + history.push(`/workflow-center/workflows/edit/config/${params.id}`); } function backToList() { - history.push('/workflows'); + // if new tab only open this page,no other page has been opened,then go to workflows list page + if (history.length <= 2) { + history.push('/workflow-center/workflows'); + return; + } + history.go(-1); } - function setCurrentUsingTemplate(tpl: WorkflowTemplate<any>) { + function setCurrentUsingTemplate(tpl?: WorkflowTemplate<any>) { + if (!tpl) { + setTemplateInUsing(null as any); + return; + } // Widget schemas of the template from backend side are JSON-string type // parse it before using const parsedTpl = parseComplexDictField(tpl); @@ -263,6 +343,10 @@ const WorkflowsCreateStepOne: FC<{ onSuccess?: any }> = ({ onSuccess }) => { setWorkflow(data); setWorkflowConfigForm(data.config as WorkflowConfig<JobNodeRawData>); formInstance.setFieldsValue((data as any) as CreateWorkflowBasicForm); + setFormData({ + ...formData, + cron_config: data.cron_config, + }); } async function getPeerWorkflow() { const res = await getPeerWorkflowsConfig(params.id); @@ -277,28 +361,75 @@ const WorkflowsCreateStepOne: FC<{ onSuccess?: any }> = ({ onSuccess }) => { return anyPeerWorkflow; } // --------- Handlers ----------- + function constructFilterExpression(value: FilterParams) { + const expressionNodes = []; + if (value.groupAlias) { + expressionNodes.push({ + field: 'group_alias', + op: FilterOp.EQUAL, + string_value: value.groupAlias, + }); + } + + return constructExpressionTree(expressionNodes); + } + function onFormChange(_: any, values: CreateWorkflowBasicForm) { + onFormValueChangeFromProps?.(); setFormData(values); } async function onTemplateSelectChange(id: number) { + formInstance.setFieldsValue({ _revisionSelected: undefined }); if (!id) { // If user clear select + setCurrentUsingTemplate(undefined); + setTplId(0); return; } + setTplId(id); const [res, error] = await to(fetchTemplateById(id)); if (error) { - message.error(t('workflow.msg_get_tpl_detail_failed')); + Message.error(t('workflow.msg_get_tpl_detail_failed')); return; } if (!res.data) return; setCurrentUsingTemplate(res.data); } + + async function onRevisionSelectChange(revision_id: number) { + // fetch revision detail + if (!revision_id) return; + + const [res, error] = await to(fetchRevisionDetail(revision_id)); + + if (error) { + Message.error(t('workflow.msg_get_revision_detail_failed')); + return; + } + + setCurrentUsingTemplate({ + ...currTemplate, + revision_id, + config: res.data.config, + editor_info: res.data.editor_info, + }); + } + async function onNextStepClick() { + if ( + (!formData.cron_config && !initValues.cron_config) || + formData.cron_config === initValues.cron_config + ) { + setFormData({ + ...formData, + cron_config: undefined, + }); + } try { // Any form invalidation happens will throw error to stop the try block - await formInstance.validateFields(); + await formInstance.validate(); if (formData._keepUsingOriginalTemplate) { goNextStep(); @@ -307,12 +438,7 @@ const WorkflowsCreateStepOne: FC<{ onSuccess?: any }> = ({ onSuccess }) => { Modal.confirm({ title: t('workflow.msg_sure_2_replace_tpl'), - icon: <ExclamationCircle />, - zIndex: Z_INDEX_GREATER_THAN_HEADER, content: t('workflow.msg_loose_origin_vars_vals'), - style: { - top: '30%', - }, onOk() { goNextStep(); }, @@ -321,6 +447,22 @@ const WorkflowsCreateStepOne: FC<{ onSuccess?: any }> = ({ onSuccess }) => { /** ignore validation error */ } } + + // ---------- Renders -------- + function renderForkableItem() { + if (isLocalWorkflow) { + return null; + } + + return ( + <Form.Item field="forkable" label={t('workflow.label_peer_forkable')}> + <Radio.Group type="button"> + <Radio value={true}>{t('workflow.label_allow')}</Radio> + <Radio value={false}>{t('workflow.label_not_allow')}</Radio> + </Radio.Group> + </Form.Item> + ); + } }; function _getInitialValues(form: CreateWorkflowBasicForm, workflow: Workflow, projectId?: number) { @@ -335,4 +477,4 @@ function _getInitialValues(form: CreateWorkflowBasicForm, workflow: Workflow, pr ); } -export default WorkflowsCreateStepOne; +export default WorkflowsEditStepOne; diff --git a/web_console_v2/client/src/views/Workflows/EditWorkflow/SteptTwoConfig/index.module.less b/web_console_v2/client/src/views/Workflows/EditWorkflow/SteptTwoConfig/index.module.less new file mode 100644 index 000000000..f1643be0d --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/EditWorkflow/SteptTwoConfig/index.module.less @@ -0,0 +1,33 @@ +.container { + height: 100%; + display: flex; +} + +.chart_container { + height: 100%; + flex: 1; + + & + & { + margin-left: 16px; + } +} + +.chart_header { + height: 48px; + padding: 13px 20px; + font-size: 14px; + line-height: 22px; + background-color: white; +} + +.chart_title { + margin-bottom: 0; +} + +.footer { + position: sticky; + bottom: 0; + z-index: 5; // just above react-flow' z-index + padding: 15px 36px; + background-color: white; +} diff --git a/web_console_v2/client/src/views/Workflows/EditWorkflow/SteptTwoConfig/index.tsx b/web_console_v2/client/src/views/Workflows/EditWorkflow/SteptTwoConfig/index.tsx index 21c1cc97e..4937a6c52 100644 --- a/web_console_v2/client/src/views/Workflows/EditWorkflow/SteptTwoConfig/index.tsx +++ b/web_console_v2/client/src/views/Workflows/EditWorkflow/SteptTwoConfig/index.tsx @@ -1,23 +1,17 @@ import React, { FC, useMemo, useRef, useState } from 'react'; -import styled from 'styled-components'; -import { - ReactFlowProvider, - useStoreState, - useStoreActions, - ReactFlowState, -} from 'react-flow-renderer'; +import styled from './index.module.less'; +import { ReactFlowProvider } from 'react-flow-renderer'; import { useToggle } from 'react-use'; import JobFormDrawer, { JobFormDrawerExposedRef } from '../../JobFormDrawer'; import WorkflowJobsCanvas, { ChartExposedRef } from 'components/WorkflowJobsCanvas'; import { ChartNode, - ChartNodes, ChartNodeStatus, JobNodeRawData, NodeData, } from 'components/WorkflowJobsCanvas/types'; import GridRow from 'components/_base/GridRow'; -import { Button, message, Modal, Spin } from 'antd'; +import { Button, Message, Spin, Grid } from '@arco-design/web-react'; import { Redirect, useHistory, useParams } from 'react-router-dom'; import { useRecoilValue, useRecoilState } from 'recoil'; import { @@ -25,68 +19,53 @@ import { workflowBasicForm, peerConfigInPairing, workflowInEditing, + templateInUsing, } from 'stores/workflow'; import { useTranslation } from 'react-i18next'; import i18n from 'i18n'; -import ErrorBoundary from 'antd/lib/alert/ErrorBoundary'; -import { patchWorkflow } from 'services/workflow'; +import ErrorBoundary from 'components/ErrorBoundary'; +import { patchWorkflow, patchPeerWorkflow } from 'services/workflow'; import { to } from 'shared/helpers'; import { WorkflowAcceptPayload } from 'typings/workflow'; -import { Variable } from 'typings/variable'; import InspectPeerConfigs from '../../InspectPeerConfig'; -import { ExclamationCircle } from 'components/IconPark'; -import { Z_INDEX_GREATER_THAN_HEADER } from 'components/Header'; +import Modal from 'components/Modal'; import { stringifyComplexDictField } from 'shared/formSchema'; -import { cloneDeep, Dictionary } from 'lodash'; +import { cloneDeep } from 'lodash-es'; import { useSubscribe } from 'hooks'; import { WORKFLOW_JOB_NODE_CHANNELS } from 'components/WorkflowJobsCanvas/JobNodes/shared'; import { CreateJobFlag } from 'typings/job'; +import { hydrate } from 'views/Workflows/shared'; +import LocalWorkflowNote from 'views/Workflows/LocalWorkflowNote'; +import { Side } from 'typings/app'; +import { Variable } from 'typings/variable'; -const Container = styled.section` - height: 100%; -`; -const ChartHeader = styled.header` - height: 48px; - padding: 13px 20px; - font-size: 14px; - line-height: 22px; - background-color: white; -`; -const Footer = styled.footer` - position: sticky; - bottom: 0; - z-index: 5; // just above react-flow' z-index - padding: 15px 36px; - background-color: white; -`; -const ChartTitle = styled.h3` - margin-bottom: 0; -`; - -const CanvasAndForm: FC = () => { +const Row = Grid.Row; + +const WorkflowEditStepTwoConfig: FC = () => { const history = useHistory(); const params = useParams<{ id: string }>(); const { t } = useTranslation(); const drawerRef = useRef<JobFormDrawerExposedRef>(); - const chartRef = useRef<ChartExposedRef>(); + const selfConfigChartRef = useRef<ChartExposedRef>(); + const peerConfigChartRef = useRef<ChartExposedRef>(); + const [side, setSide] = useState<Side>('self'); const [submitting, setSubmitting] = useToggle(false); const [drawerVisible, toggleDrawerVisible] = useToggle(false); const [peerCfgVisible, togglePeerCfgVisible] = useToggle(false); const [currNode, setCurrNode] = useState<ChartNode>(); - /** - * Here we could use react-flow hooks is because we - * wrap CanvasAndForm with ReactFlowProvider in lines at the bottom - */ - const jobNodes = useStoreState((store: ReactFlowState) => store.nodes as ChartNodes); - const setSelectedElements = useStoreActions((actions) => actions.setSelectedElements); - - const workflow = useRecoilValue(workflowInEditing); + const isChangedPeerConfigValue = useRef(false); + + const [workflow, setWorkflow] = useRecoilState(workflowInEditing); const [configValue, setConfigValue] = useRecoilState(workflowConfigForm); + const [peerConfigValue, setPeerConfigValue] = useRecoilState(peerConfigInPairing); const basicPayload = useRecoilValue(workflowBasicForm); - const peerConfig = useRecoilValue(peerConfigInPairing); + const template = useRecoilValue(templateInUsing); useSubscribe(WORKFLOW_JOB_NODE_CHANNELS.disable_job, onNodeDisabledChange); + const targetConfigValueState = getConfigValueState(side); + const targetChartRef = getChartRef(side); + const processedConfig = useMemo(() => { // When using original template, we have the flags tell jobs' reuse/disable status // mark them on the job raw data @@ -100,83 +79,131 @@ const CanvasAndForm: FC = () => { }, [workflow?.create_job_flags]); if (!workflow) { - const redirectTo = `/workflows/edit/basic/${params.id}`; + const redirectTo = `/workflow-center/workflows/edit/basic/${params.id}`; return <Redirect to={redirectTo} />; } const currNodeVars = currNode?.type === 'global' - ? configValue.variables - : configValue.job_definitions.find((item) => item.name === currNode?.id)?.variables; + ? targetConfigValueState[0].variables + : targetConfigValueState[0].job_definitions.find((item) => item.name === currNode?.id) + ?.variables; const isDisabled = { disabled: submitting }; const isCurrNodeReused = currNode && (currNode.data.raw as JobNodeRawData).reused; + const isLocalWorkflow = workflow.is_local; return ( <ErrorBoundary> - <Container> - <Spin spinning={submitting}> - <ChartHeader> - <ChartTitle>{t('workflow.our_config')}</ChartTitle> - </ChartHeader> - </Spin> - - <WorkflowJobsCanvas - ref={chartRef as any} - nodeType="edit" - nodeInitialStatus={ - basicPayload._keepUsingOriginalTemplate - ? ChartNodeStatus.Success - : ChartNodeStatus.Pending - } - workflowConfig={processedConfig} - onJobClick={selectNode} - onCanvasClick={onCanvasClick} - /> - - <JobFormDrawer - ref={drawerRef as any} - visible={drawerVisible} - // Reused job cannot edit any variables - readonly={isCurrNodeReused} - message={isCurrNodeReused ? t('workflow.msg_resued_job_cannot_edit') : ''} - toggleVisible={toggleDrawerVisible} - showPeerConfigButton={Boolean(peerConfig)} - currentIdx={currNode?.data.index} - nodesCount={jobNodes.length} - jobDefinition={currNode?.data.raw} - initialValues={currNodeVars} - onGoNextJob={onGoNextJob} - onCloseDrawer={onCloseDrawer} - onViewPeerConfigClick={onViewPeerConfigClick} - /> - - <InspectPeerConfigs - config={peerConfig} - visible={peerCfgVisible} - toggleVisible={togglePeerCfgVisible} - /> - - <Footer> - <GridRow gap="12"> - <Button type="primary" loading={submitting} onClick={submitToPatch}> - {t('workflow.btn_submit_edit')} - </Button> - <Button onClick={onPrevStepClick} {...isDisabled}> - {t('previous_step')} - </Button> - <Button onClick={onCancelEditClick} {...isDisabled}> - {t('cancel')} - </Button> - </GridRow> - </Footer> - </Container> + <section className={styled.container}> + <div className={styled.chart_container}> + <Spin loading={submitting}> + <Row className={styled.chart_header} justify="space-between" align="center"> + <h3 className={styled.chart_title}>{t('workflow.our_config')}</h3> + {isLocalWorkflow && <LocalWorkflowNote />} + </Row> + </Spin> + <ReactFlowProvider> + <WorkflowJobsCanvas + ref={selfConfigChartRef as any} + nodeType="edit" + nodeInitialStatus={ + basicPayload._keepUsingOriginalTemplate + ? ChartNodeStatus.Success + : ChartNodeStatus.Pending + } + workflowConfig={processedConfig} + onJobClick={(node) => selectNode(node, 'self')} + onCanvasClick={onCanvasClick} + /> + </ReactFlowProvider> + </div> + + {!workflow.is_local && Boolean(peerConfigValue) && ( + <div className={styled.chart_container}> + <Spin loading={submitting}> + <Row className={styled.chart_header} justify="space-between" align="center"> + <h3 className={styled.chart_header}>{t('workflow.peer_config')}</h3> + </Row> + </Spin> + + <ReactFlowProvider> + <WorkflowJobsCanvas + ref={peerConfigChartRef} + side="peer" + nodeType="edit" + nodeInitialStatus={ + basicPayload._keepUsingOriginalTemplate + ? ChartNodeStatus.Success + : ChartNodeStatus.Pending + } + workflowConfig={peerConfigValue as any} + onCanvasClick={onCanvasClick} + onJobClick={(node) => selectNode(node, 'peer')} + /> + </ReactFlowProvider> + </div> + )} + </section> + + <JobFormDrawer + ref={drawerRef as any} + visible={drawerVisible} + isPeerSide={side !== 'self'} + // Reused job cannot edit any variables + readonly={isCurrNodeReused} + message={isCurrNodeReused ? t('workflow.msg_resued_job_cannot_edit') : ''} + toggleVisible={toggleDrawerVisible} + showPeerConfigButton={side === 'self' && !workflow.is_local && Boolean(peerConfigValue)} + currentIdx={currNode?.data.index} + nodesCount={targetChartRef?.nodes.length || 0} + jobDefinition={currNode?.data.raw} + initialValues={currNodeVars} + onGoNextJob={onGoNextJob} + onCloseDrawer={onCloseDrawer} + onViewPeerConfigClick={onViewPeerConfigClick} + /> + + <InspectPeerConfigs + config={peerConfigValue} + visible={peerCfgVisible} + toggleVisible={togglePeerCfgVisible} + /> + + <footer className={styled.footer}> + <GridRow gap="12"> + <Button type="primary" loading={submitting} onClick={submitToPatch}> + {t('workflow.btn_submit_edit')} + </Button> + <Button onClick={onPrevStepClick} {...isDisabled}> + {t('previous_step')} + </Button> + <Button onClick={onCancelEditClick} {...isDisabled}> + {t('cancel')} + </Button> + </GridRow> + </footer> </ErrorBoundary> ); // --------- Methods --------------- + function getConfigValueState(side: Side) { + return ({ + self: [configValue, setConfigValue], + peer: [peerConfigValue, setPeerConfigValue], + } as const)[side]; + } + function getChartRef(side: Side) { + return ({ + self: selfConfigChartRef.current, + peer: peerConfigChartRef.current, + } as const)[side]; + } function checkIfAllJobConfigCompleted() { - const isAllCompleted = jobNodes.every((node: ChartNode) => { + const allNodes = + selfConfigChartRef.current?.nodes.concat(peerConfigChartRef.current?.nodes || []) || []; + + const isAllCompleted = allNodes.every((node: ChartNode) => { // Whether a node has Success status or it's been disabled // we recognize it as complete! return node.data.status === ChartNodeStatus.Success || node.data.disabled; @@ -188,27 +215,33 @@ const CanvasAndForm: FC = () => { async function saveCurrentValues() { const values = await drawerRef.current?.getFormValues(); - let nextValue = cloneDeep(configValue); + const [innerConfigValue, setInnerConfigValue] = getConfigValueState(side); + + const nextValue = cloneDeep(innerConfigValue); if (currNode?.type === 'global') { // Hydrate values to workflow global variables - nextValue.variables = _hydrate(nextValue.variables, values); + nextValue.variables = hydrate(nextValue.variables, values) as Variable[]; } if (currNode?.type === 'edit') { // Hydrate values to target job const targetJob = nextValue.job_definitions.find((job) => job.name === currNode.id); if (targetJob) { - targetJob.variables = _hydrate(targetJob.variables, values); + targetJob.variables = hydrate(targetJob.variables, values) as Variable[]; } } - setConfigValue(nextValue); + setInnerConfigValue(nextValue as any); + + if (side === 'peer') { + isChangedPeerConfigValue.current = true; + } } async function validateCurrentValues() { if (!currNode) return; const isValid = await drawerRef.current?.validateCurrentForm(); - chartRef.current?.updateNodeStatusById({ + targetChartRef?.updateNodeStatusById({ id: currNode.id, status: isValid ? ChartNodeStatus.Success : ChartNodeStatus.Warning, }); @@ -216,31 +249,54 @@ const CanvasAndForm: FC = () => { /** 🚀 Initiate patch request */ async function submitToPatch() { if (!checkIfAllJobConfigCompleted()) { - return message.warn(i18n.t('workflow.msg_config_unfinished')); + return Message.warning(i18n.t('workflow.msg_config_unfinished')); } toggleDrawerVisible(false); setSubmitting(true); + if (isChangedPeerConfigValue.current) { + const peerPayload = stringifyComplexDictField({ + config: peerConfigValue, + } as WorkflowAcceptPayload); + + const [, error] = await to( + patchPeerWorkflow(params.id, peerPayload as any, workflow.project_id), + ); + + if (error) { + Message.error(error.message); + } + } + const payload = stringifyComplexDictField({ config: configValue, forkable: basicPayload.forkable!, - batch_update_interval: basicPayload.batch_update_interval!, + cron_config: basicPayload.cron_config!, } as WorkflowAcceptPayload); - payload.create_job_flags = _mapJobFlags(chartRef.current?.nodes); + payload.template_id = template?.id || workflow.template_info?.id; - const [, error] = await to(patchWorkflow(params.id, payload)); + payload.template_revision_id = template?.revision_id; + + const [, error] = await to(patchWorkflow(params.id, payload, workflow.project_id)); setSubmitting(false); if (!error) { - history.push('/workflows'); + setWorkflow(null as any); + history.push('/workflow-center/workflows'); } else { - message.error(error.message); + Message.error(error.message); } } - async function selectNode(nextNode: ChartNode) { + async function selectNode(nextNode: ChartNode, nextSide: Side) { + // If change one-side's job chart to another-side deselect current side's ndoe firstly + targetChartRef?.setSelectedNodes([]); + + // Since setState is Asynchronous, we need manually get the targetRef instead of using predefined one + const nextTargetChartRef = getChartRef(nextSide); + const prevNode = currNode; if (currNode && prevNode) { // Validate & Save current form before go another job @@ -249,12 +305,19 @@ const CanvasAndForm: FC = () => { } // Turn target node status to configuring - chartRef.current?.updateNodeStatusById({ id: nextNode.id, status: ChartNodeStatus.Processing }); + nextTargetChartRef?.updateNodeStatusById({ + id: nextNode.id, + status: ChartNodeStatus.Processing, + }); setCurrNode(nextNode); - setSelectedElements([nextNode]); + + nextTargetChartRef?.setSelectedNodes([nextNode]); toggleDrawerVisible(true); + + // Put setSide at the end to prevent previous codes from being confused by side-state + setSide(nextSide); } // ---------- Handlers ---------------- @@ -267,15 +330,15 @@ const CanvasAndForm: FC = () => { async function onCloseDrawer() { await validateCurrentValues(); saveCurrentValues(); - setSelectedElements([]); + targetChartRef?.setSelectedNodes([]); } function onGoNextJob() { if (!currNode) return; - const nextNodeToSelect = jobNodes.find( + const nextNodeToSelect = targetChartRef?.nodes.find( (node: ChartNode) => node.data.index === currNode.data.index + 1, ); - nextNodeToSelect && selectNode(nextNodeToSelect); + nextNodeToSelect && selectNode(nextNodeToSelect, side); } function onPrevStepClick() { history.goBack(); @@ -285,20 +348,25 @@ const CanvasAndForm: FC = () => { _: string, { data, ...payload }: { id: string; data: NodeData; disabled: boolean }, ) { - chartRef.current?.updateNodeDisabledById(payload); + const sideOfNode = data.side as Side; + if (!sideOfNode) { + console.error('[WorkflowEditStepTwoConfig]: assign a `side` prop to chart under forking'); + return; + } + getChartRef(sideOfNode)?.updateNodeDisabledById(payload); } function onCancelEditClick() { Modal.confirm({ title: i18n.t('workflow.msg_sure_2_cancel_edit'), - icon: <ExclamationCircle />, - zIndex: Z_INDEX_GREATER_THAN_HEADER, content: i18n.t('workflow.msg_sure_2_exist_edit'), - style: { - top: '30%', - }, onOk() { - history.push('/workflows'); + // if new tab only open this page,no other page has been opened,then go to workflows list page + if (history.length <= 3) { + history.push('/workflow-center/workflows'); + return; + } + history.go(-2); }, }); } @@ -307,35 +375,6 @@ const CanvasAndForm: FC = () => { } }; -/** - * @param variableShells Variable defintions without any user input value - * @param formValues User inputs - */ -function _hydrate(variableShells: Variable[], formValues?: Dictionary<any>): Variable[] { - if (!formValues) return []; - - return variableShells.map((item) => { - return { - ...item, - value: formValues[item.name], - }; - }); -} - -function _mapJobFlags(nodes?: ChartNodes) { - if (!nodes) return []; - - return nodes - .filter((node) => node.type !== 'global') - .map((node) => { - if (node.data.disabled) { - return CreateJobFlag.DISABLED; - } - - return CreateJobFlag.NEW; - }); -} - function _markJobFlags(jobs: JobNodeRawData[], flags: CreateJobFlag[] = []) { if (!flags) return jobs; @@ -351,12 +390,4 @@ function _markJobFlags(jobs: JobNodeRawData[], flags: CreateJobFlag[] = []) { }); } -const WorkflowsEditStepTwo: FC = () => { - return ( - <ReactFlowProvider> - <CanvasAndForm /> - </ReactFlowProvider> - ); -}; - -export default WorkflowsEditStepTwo; +export default WorkflowEditStepTwoConfig; diff --git a/web_console_v2/client/src/views/Workflows/EditWorkflow/index.module.less b/web_console_v2/client/src/views/Workflows/EditWorkflow/index.module.less new file mode 100644 index 000000000..9bd6a20a7 --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/EditWorkflow/index.module.less @@ -0,0 +1,8 @@ +.step_container { + width: 350px; +} +.form_area { + flex: 1; + margin-top: 12px; + background-color: white; +} diff --git a/web_console_v2/client/src/views/Workflows/EditWorkflow/index.tsx b/web_console_v2/client/src/views/Workflows/EditWorkflow/index.tsx index 1272f00c1..5658ff7e2 100644 --- a/web_console_v2/client/src/views/Workflows/EditWorkflow/index.tsx +++ b/web_console_v2/client/src/views/Workflows/EditWorkflow/index.tsx @@ -1,6 +1,6 @@ -import React, { FC, useState } from 'react'; -import styled from 'styled-components'; -import { Steps, Row, Card } from 'antd'; +import React, { FC, useState, useEffect } from 'react'; +import styled from './index.module.less'; +import { Steps, Grid, Card } from '@arco-design/web-react'; import StepOneBasic from './StepOneBasic'; import SteptTwoConfig from './SteptTwoConfig'; import { Route, useHistory, useParams } from 'react-router-dom'; @@ -11,15 +11,7 @@ import SharedPageLayout from 'components/SharedPageLayout'; import BackButton from 'components/BackButton'; const { Step } = Steps; - -const StepContainer = styled.div` - width: 350px; -`; -const FormArea = styled.section` - flex: 1; - margin-top: 12px; - background-color: white; -`; +const Row = Grid.Row; enum CreateSteps { basic, @@ -30,42 +22,60 @@ const WorkflowsEdit: FC = () => { const { t } = useTranslation(); const history = useHistory(); const params = useParams<{ step: keyof typeof CreateSteps; id?: string }>(); - const [currentStep, setStep] = useState(CreateSteps[params.step || 'basic']); + const [currentStep, setStep] = useState(CreateSteps[params.step || 'basic'] + 1); + const [isFormValueChanged, setIsFormValueChanged] = useState(false); const reset = useResetCreateForms(); + useEffect(() => { + setStep(CreateSteps[params.step || 'basic'] + 1); + }, [params.step]); + useUnmount(() => { reset(); }); return ( <SharedPageLayout - title={<BackButton onClick={() => history.goBack()}>{t('menu.label_workflow')}</BackButton>} + title={ + <BackButton + onClick={() => history.replace(`/workflow-center/workflows`)} + isShowConfirmModal={isFormValueChanged} + > + {t('menu.label_workflow')} + </BackButton> + } contentWrapByCard={false} > <Card> <Row justify="center"> - <StepContainer> + <div className={styled.step_container}> <Steps current={currentStep}> <Step title={t('workflow.step_basic')} /> <Step title={t('workflow.step_config')} /> </Steps> - </StepContainer> + </div> </Row> </Card> - <FormArea> + <section className={styled.form_area}> + <Route + path={`/workflow-center/workflows/edit/basic/:id`} + exact + render={() => <StepOneBasic onFormValueChange={onFormValueChange} />} + /> <Route - path={`/workflows/edit/basic/:id`} + path={`/workflow-center/workflows/edit/config/:id`} exact - render={(props) => <StepOneBasic onSuccess={setToConfigStep} {...props} />} + component={SteptTwoConfig} /> - <Route path={`/workflows/edit/config/:id`} exact component={SteptTwoConfig} /> - </FormArea> + </section> </SharedPageLayout> ); - function setToConfigStep() { - setStep(CreateSteps.config); + function onFormValueChange() { + if (!isFormValueChanged) { + setIsFormValueChanged(true); + } } }; diff --git a/web_console_v2/client/src/views/Workflows/ForkWorkflow/StepOneBasic/index.module.less b/web_console_v2/client/src/views/Workflows/ForkWorkflow/StepOneBasic/index.module.less new file mode 100644 index 000000000..f1b6259aa --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/ForkWorkflow/StepOneBasic/index.module.less @@ -0,0 +1,8 @@ +.container { + padding-top: 20px; + min-height: 100%; +} +.styled_form { + width: 500px; + margin: 0 auto; +} diff --git a/web_console_v2/client/src/views/Workflows/ForkWorkflow/StepOneBasic/index.tsx b/web_console_v2/client/src/views/Workflows/ForkWorkflow/StepOneBasic/index.tsx index 225cbff70..15f90a219 100644 --- a/web_console_v2/client/src/views/Workflows/ForkWorkflow/StepOneBasic/index.tsx +++ b/web_console_v2/client/src/views/Workflows/ForkWorkflow/StepOneBasic/index.tsx @@ -1,6 +1,6 @@ import React, { FC } from 'react'; -import styled from 'styled-components'; -import { Form, Button, Input, Card, Radio } from 'antd'; +import styled from './index.module.less'; +import { Form, Button, Input, Card, Radio } from '@arco-design/web-react'; import { useTranslation } from 'react-i18next'; import { forkWorkflowForm } from 'stores/workflow'; import { useHistory, useParams } from 'react-router-dom'; @@ -9,21 +9,23 @@ import GridRow from 'components/_base/GridRow'; import { useQuery } from 'react-query'; import { getWorkflowDetailById } from 'services/workflow'; import WhichProject from 'components/WhichProject'; - -const Container = styled(Card)` - padding-top: 20px; - min-height: 100%; -`; -const StyledForm = styled(Form)` - width: 500px; - margin: 0 auto; -`; +import FormLabel from 'components/FormLabel'; +import ScheduledWorkflowRunning, { + scheduleIntervalValidator, +} from 'views/Workflows/ScheduledWorkflowRunning'; +import { validNamePattern, isWorkflowNameUniqWithDebounce } from 'shared/validator'; +import { useIsFormValueChange } from 'hooks'; +import ButtonWithModalConfirm from 'components/ButtonWithModalConfirm'; type Props = { onSuccess: any; + onFormValueChange?: () => void; }; -const WorkflowForkStepOneBaic: FC<Props> = ({ onSuccess }) => { +const WorkflowForkStepOneBaic: FC<Props> = ({ + onSuccess, + onFormValueChange: onFormValueChangeFromProps, +}) => { const { t } = useTranslation(); const history = useHistory(); const params = useParams<{ id: string }>(); @@ -32,13 +34,19 @@ const WorkflowForkStepOneBaic: FC<Props> = ({ onSuccess }) => { const [formData, setFormData] = useRecoilState(forkWorkflowForm); + const { isFormValueChanged, onFormValueChange } = useIsFormValueChange(onFormChange); + const workflowQuery = useQuery(['getWorkflow', params.id], getWorkflowDetail, { enabled: Boolean(params.id), refetchOnWindowFocus: false, onSuccess(workflow) { const newName = workflow.name + '-copy'; - formInstance.setFieldsValue({ name: newName, forkable: workflow.forkable }); + formInstance.setFieldsValue({ + name: newName, + forkable: workflow.forkable, + cron_config: workflow.cron_config, + }); setFormData({ ...formData, @@ -48,24 +56,30 @@ const WorkflowForkStepOneBaic: FC<Props> = ({ onSuccess }) => { }); }, }); - // TODO: if peer workflow unforable, redirect user back ! + + const isLocalWorkflow = workflowQuery.data?.is_local; return ( - <Container bordered={false}> - <StyledForm + <Card bordered={false} className={styled.container}> + <Form + className={styled.styled_form} labelCol={{ span: 6 }} wrapperCol={{ span: 18 }} form={formInstance} - onFinish={onFinish} - onValuesChange={onFormChange as any} + onSubmit={onFinish} + onValuesChange={onFormValueChange} > <Form.Item - name="name" + field="name" hasFeedback label={t('workflow.label_name')} rules={[ { required: true, message: t('workflow.msg_name_required') }, { max: 255, message: t('workflow.msg_workflow_name_invalid') }, + { match: validNamePattern, message: t('valid_error.name_invalid') }, + { + validator: isWorkflowNameUniqWithDebounce, + }, ]} > <Input placeholder={t('workflow.placeholder_name')} disabled={workflowQuery.isFetching} /> @@ -75,15 +89,36 @@ const WorkflowForkStepOneBaic: FC<Props> = ({ onSuccess }) => { <WhichProject id={workflowQuery.data?.project_id} loading={workflowQuery.isFetching} /> </Form.Item> - <Form.Item label={t('workflow.label_template_name')}> + <Form.Item label={t('workflow.label_template_group')}> {workflowQuery.data?.config?.group_alias} </Form.Item> - <Form.Item name="forkable" label={t('workflow.label_peer_forkable')}> - <Radio.Group> - <Radio.Button value={true}>{t(`workflow.label_allow`)}</Radio.Button> - <Radio.Button value={false}>{t(`workflow.label_not_allow`)}</Radio.Button> - </Radio.Group> + {!isLocalWorkflow && ( + <Form.Item field="forkable" label={t('workflow.label_peer_forkable')}> + <Radio.Group type="button"> + <Radio value={true}>{t(`workflow.label_allow`)}</Radio> + <Radio value={false}>{t(`workflow.label_not_allow`)}</Radio> + </Radio.Group> + </Form.Item> + )} + + <Form.Item + field="cron_config" + label={ + <FormLabel + label={t('workflow.label_enable_batch_update_interval')} + tooltip={t('workflow.msg_schduled_run')} + /> + } + rules={[ + { + validator: scheduleIntervalValidator, + message: t('workflow.msg_time_required'), + validateTrigger: 'onSubmit', + }, + ]} + > + <ScheduledWorkflowRunning /> </Form.Item> <Form.Item wrapperCol={{ offset: 6 }}> @@ -92,20 +127,23 @@ const WorkflowForkStepOneBaic: FC<Props> = ({ onSuccess }) => { {t('next_step')} </Button> - <Button onClick={backToList}>{t('cancel')}</Button> + <ButtonWithModalConfirm onClick={backToList} isShowConfirmModal={isFormValueChanged}> + {t('cancel')} + </ButtonWithModalConfirm> </GridRow> </Form.Item> - </StyledForm> - </Container> + </Form> + </Card> ); function backToList() { - history.push('/workflows'); + history.push('/workflow-center/workflows'); } - function onFormChange(_: any, values: { name: string }) { + function onFormChange(_: any, values: { name: string; cron_config?: string }) { + onFormValueChangeFromProps?.(); setFormData({ ...formData, - name: values.name, + ...values, }); } async function getWorkflowDetail() { @@ -116,7 +154,7 @@ const WorkflowForkStepOneBaic: FC<Props> = ({ onSuccess }) => { function onFinish() { onSuccess(); - history.push(`/workflows/fork/config/${params.id}`); + history.push(`/workflow-center/workflows/fork/config/${params.id}`); } }; diff --git a/web_console_v2/client/src/views/Workflows/ForkWorkflow/StepTwoConfig/index.module.less b/web_console_v2/client/src/views/Workflows/ForkWorkflow/StepTwoConfig/index.module.less new file mode 100644 index 000000000..7bed9b3aa --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/ForkWorkflow/StepTwoConfig/index.module.less @@ -0,0 +1,42 @@ +@import '~styles/mixins.less'; + +.loadind_container { + .MixinFlexAlignCenter(); + height: 100%; + display: flex; +} + +.chart_container { + height: 100%; + flex: 1; + + & + & { + margin-left: 16px; + } +} + +.chart_section { + position: relative; + display: flex; + height: 100%; +} + +.chart_header { + height: 48px; + padding: 0 20px; + font-size: 14px; + line-height: 22px; + background-color: white; +} + +.chart_title { + margin-bottom: 0; +} + +.footer { + position: sticky; + bottom: 0; + z-index: 5; // just above react-flow' z-index + padding: 15px 36px; + background-color: white; +} diff --git a/web_console_v2/client/src/views/Workflows/ForkWorkflow/StepTwoConfig/index.tsx b/web_console_v2/client/src/views/Workflows/ForkWorkflow/StepTwoConfig/index.tsx index d952e2187..aff13eefd 100644 --- a/web_console_v2/client/src/views/Workflows/ForkWorkflow/StepTwoConfig/index.tsx +++ b/web_console_v2/client/src/views/Workflows/ForkWorkflow/StepTwoConfig/index.tsx @@ -1,76 +1,45 @@ -import React, { FC, useState, useRef } from 'react'; -import { Row, Modal, Button, message, Spin } from 'antd'; -import { useQuery } from 'react-query'; -import { Redirect, useHistory, useParams } from 'react-router-dom'; -import { useRecoilState } from 'recoil'; -import { forkTheWorkflow, getPeerWorkflowsConfig, getWorkflowDetailById } from 'services/workflow'; -import { forkWorkflowForm } from 'stores/workflow'; -import styled from 'styled-components'; -import { useTranslation } from 'react-i18next'; -import { ReactFlowProvider } from 'react-flow-renderer'; +import { Button, Message, Grid, Spin } from '@arco-design/web-react'; +import Modal from 'components/Modal'; import WorkflowJobsCanvas, { ChartExposedRef } from 'components/WorkflowJobsCanvas'; +import { useMarkFederatedJobs } from 'components/WorkflowJobsCanvas/hooks'; +import { WORKFLOW_JOB_NODE_CHANNELS } from 'components/WorkflowJobsCanvas/JobNodes/shared'; import { ChartNode, ChartNodes, ChartNodeStatus, - NodeData, JobNodeRawData, + NodeData, } from 'components/WorkflowJobsCanvas/types'; -import { useMarkFederatedJobs } from 'components/WorkflowJobsCanvas/hooks'; -import { cloneDeep, Dictionary, omit } from 'lodash'; -import JobFormDrawer, { JobFormDrawerExposedRef } from '../../JobFormDrawer'; +import GridRow from 'components/_base/GridRow'; +import { useSubscribe } from 'hooks'; +import i18n from 'i18n'; +import { cloneDeep, omit } from 'lodash-es'; +import React, { FC, useRef, useState } from 'react'; +import { ReactFlowProvider } from 'react-flow-renderer'; +import { useTranslation } from 'react-i18next'; +import { useQuery } from 'react-query'; +import { Redirect, useHistory, useParams } from 'react-router-dom'; import { useToggle } from 'react-use'; -import { WorkflowExecutionDetails, ChartWorkflowConfig } from 'typings/workflow'; -import { Variable } from 'typings/variable'; +import { useRecoilState } from 'recoil'; +import { + forkTheWorkflow, + getPeerWorkflow, + getWorkflowDetailById, + PEER_WORKFLOW_DETAIL_QUERY_KEY, +} from 'services/workflow'; import { parseComplexDictField, stringifyComplexDictField } from 'shared/formSchema'; -import i18n from 'i18n'; -import { ExclamationCircle } from 'components/IconPark'; -import { Z_INDEX_GREATER_THAN_HEADER } from 'components/Header'; -import GridRow from 'components/_base/GridRow'; import { to } from 'shared/helpers'; -import { MixinFlexAlignCenter } from 'styles/mixins'; -import { useSubscribe } from 'hooks'; -import { WORKFLOW_JOB_NODE_CHANNELS } from 'components/WorkflowJobsCanvas/JobNodes/shared'; +import { forkWorkflowForm } from 'stores/workflow'; +import styled from './index.module.less'; import { Side } from 'typings/app'; import { CreateJobFlag } from 'typings/job'; +import { ChartWorkflowConfig } from 'typings/workflow'; +import LocalWorkflowNote from 'views/Workflows/LocalWorkflowNote'; +import JobFormDrawer, { JobFormDrawerExposedRef } from '../../JobFormDrawer'; +import { hydrate } from 'views/Workflows/shared'; +import { Variable } from 'typings/variable'; -const LoadingContainer = styled.div` - ${MixinFlexAlignCenter()} - - display: flex; - height: 100%; - background-color: white; -`; -const ChartSection = styled.section` - position: relative; - display: flex; - height: 100%; -`; -const ChartContainer = styled.div` - height: 100%; - flex: 1; - - & + & { - margin-left: 16px; - } -`; -const ChartHeader = styled(Row)` - height: 48px; - padding: 0 20px; - font-size: 14px; - line-height: 22px; - background-color: white; -`; -const ChartTitle = styled.h3` - margin-bottom: 0; -`; -const Footer = styled.footer` - position: sticky; - bottom: 0; - z-index: 5; // just above react-flow' z-index - padding: 15px 36px; - background-color: white; -`; +const Row = Grid.Row; // We only have two side so far const ALL_SIDES: Side[] = ['self', 'peer']; @@ -91,7 +60,7 @@ const WorkflowForkStepTwoConfig: FC = () => { const { markThem } = useMarkFederatedJobs(); - useQuery(['getWorkflow', params.id], () => getWorkflowDetailById(params.id), { + const selfQuery = useQuery(['getWorkflow', params.id], () => getWorkflowDetailById(params.id), { refetchOnWindowFocus: false, onSuccess(data) { const config = parseComplexDictField(data.data).config! as ChartWorkflowConfig; @@ -104,32 +73,38 @@ const WorkflowForkStepTwoConfig: FC = () => { }); }, }); - const peerQuery = useQuery(['getPeerWorkflow', params.id], getPeerWorkflow, { - refetchOnWindowFocus: false, - onSuccess(data) { - const fork_proposal_config = parseComplexDictField(data).config! as ChartWorkflowConfig; - markThem(fork_proposal_config.job_definitions); - setFormData({ - ...formData, - fork_proposal_config, - }); + const peerQuery = useQuery( + [PEER_WORKFLOW_DETAIL_QUERY_KEY, params.id], + () => getPeerWorkflow(params.id), + { + refetchOnWindowFocus: false, + enabled: !formData.is_local && !selfQuery.isFetching, + onSuccess(data) { + const fork_proposal_config = parseComplexDictField(data).config! as ChartWorkflowConfig; + markThem(fork_proposal_config.job_definitions); + + setFormData({ + ...formData, + fork_proposal_config, + }); + }, }, - }); + ); useSubscribe(WORKFLOW_JOB_NODE_CHANNELS.change_inheritance, onNodeInheritanceChange); useSubscribe(WORKFLOW_JOB_NODE_CHANNELS.disable_job, onNodeDisabledChange); if (peerQuery.data?.forkable === false) { - message.warning(t('workflow.msg_unforkable')); - return <Redirect to={'/workflows'} />; + Message.warning(t('workflow.msg_unforkable')); + return <Redirect to={'/workflow-center/workflows'} />; } - if (!formData.config || !formData.fork_proposal_config) { + if (selfQuery.isFetching || peerQuery.isFetching) { return ( - <LoadingContainer> + <div className={styled.loadind_container}> <Spin /> - </LoadingContainer> + </div> ); } @@ -146,11 +121,12 @@ const WorkflowForkStepTwoConfig: FC = () => { return ( <> - <ChartSection> - <ChartContainer> - <ChartHeader justify="space-between" align="middle"> - <ChartTitle>{t('workflow.our_config')}</ChartTitle> - </ChartHeader> + <section className={styled.chart_section}> + <div className={styled.chart_container}> + <Row className={styled.chart_header} justify="space-between" align="center"> + <h3 className={styled.chart_title}>{t('workflow.our_config')}</h3> + {formData.is_local && <LocalWorkflowNote />} + </Row> <ReactFlowProvider> <WorkflowJobsCanvas ref={selfConfigChartRef} @@ -162,25 +138,27 @@ const WorkflowForkStepTwoConfig: FC = () => { onJobClick={(node) => selectNode(node, 'self')} /> </ReactFlowProvider> - </ChartContainer> - - <ChartContainer> - <ChartHeader justify="space-between" align="middle"> - <ChartTitle>{t('workflow.peer_config')}</ChartTitle> - </ChartHeader> - - <ReactFlowProvider> - <WorkflowJobsCanvas - ref={peerConfigChartRef} - side="peer" - nodeType="fork" - nodeInitialStatus={ChartNodeStatus.Success} - workflowConfig={formData.fork_proposal_config} - onCanvasClick={onCanvasClick} - onJobClick={(node) => selectNode(node, 'peer')} - /> - </ReactFlowProvider> - </ChartContainer> + </div> + + {!formData.is_local && formData.fork_proposal_config && ( + <div className={styled.chart_container}> + <Row className={styled.chart_header} justify="space-between" align="center"> + <h3 className={styled.chart_title}>{t('workflow.peer_config')}</h3> + </Row> + + <ReactFlowProvider> + <WorkflowJobsCanvas + ref={peerConfigChartRef} + side="peer" + nodeType="fork" + nodeInitialStatus={ChartNodeStatus.Success} + workflowConfig={formData.fork_proposal_config} + onCanvasClick={onCanvasClick} + onJobClick={(node) => selectNode(node, 'peer')} + /> + </ReactFlowProvider> + </div> + )} <JobFormDrawer ref={drawerRef as any} @@ -194,12 +172,12 @@ const WorkflowForkStepTwoConfig: FC = () => { onCloseDrawer={onCloseDrawer} toggleVisible={toggleDrawerVisible} /> - </ChartSection> + </section> - <Footer> + <footer className={styled.footer}> <GridRow gap="12"> <Button type="primary" loading={submitting} onClick={submitToFork}> - {t('workflow.btn_send_2_ptcpt')} + {formData.is_local ? '复制工作流' : t('workflow.btn_send_2_ptcpt')} </Button> <Button onClick={onPrevStepClick} {...isDisabled}> {t('previous_step')} @@ -208,7 +186,7 @@ const WorkflowForkStepTwoConfig: FC = () => { {t('cancel')} </Button> </GridRow> - </Footer> + </footer> </> ); @@ -225,13 +203,6 @@ const WorkflowForkStepTwoConfig: FC = () => { peer: peerConfigChartRef.current, } as const)[side]; } - - async function getPeerWorkflow(): Promise<WorkflowExecutionDetails> { - const res = await getPeerWorkflowsConfig(params.id); - const anyPeerWorkflow = Object.values(res.data).find((item) => !!item.config)!; - - return anyPeerWorkflow; - } async function selectNode(nextNode: ChartNode, nextSide: Side) { // If change one-side's job chart to another-side deselect current side's ndoe firstly targetChartRef?.setSelectedNodes([]); @@ -272,11 +243,14 @@ const WorkflowForkStepTwoConfig: FC = () => { async function saveCurrentValues() { const values = await drawerRef.current?.getFormValues(); - let nextValue = cloneDeep(formData); + const nextValue = cloneDeep(formData); if (currNode?.type === 'global') { // Hydrate values to workflow global variables - nextValue[targetConfigKey].variables = _hydrate(nextValue[targetConfigKey].variables, values); + nextValue[targetConfigKey].variables = hydrate( + nextValue[targetConfigKey].variables, + values, + ) as Variable[]; } if (currNode?.type === 'fork') { @@ -285,7 +259,7 @@ const WorkflowForkStepTwoConfig: FC = () => { (job) => job.name === currNode.id, ); if (targetJob) { - targetJob.variables = _hydrate(targetJob.variables, values); + targetJob.variables = hydrate(targetJob.variables, values) as Variable[]; } } @@ -304,7 +278,7 @@ const WorkflowForkStepTwoConfig: FC = () => { /** 🚀 Initiate fork request */ async function submitToFork() { if (!checkIfAllJobConfigCompleted()) { - return message.warn(i18n.t('workflow.msg_config_unconfirm_or_unfinished')); + return Message.warning(i18n.t('workflow.msg_config_unconfirm_or_unfinished')); } toggleDrawerVisible(false); @@ -314,36 +288,34 @@ const WorkflowForkStepTwoConfig: FC = () => { // Omit unused props payload.config.job_definitions = _omitJobsColorMark(payload.config.job_definitions); - payload.fork_proposal_config.job_definitions = _omitJobsColorMark( - payload.fork_proposal_config.job_definitions, - ); - // Find reusable job names for both peer and self side payload.create_job_flags = _mapJobFlags(selfConfigChartRef.current?.nodes!); - payload.peer_create_job_flags = _mapJobFlags(peerConfigChartRef.current?.nodes!); - const [, error] = await to(forkTheWorkflow(payload)); + // If not local workflow, do same things to peer side + if (!formData.is_local) { + payload.peer_create_job_flags = _mapJobFlags(peerConfigChartRef.current?.nodes!); + payload.fork_proposal_config.job_definitions = _omitJobsColorMark( + payload.fork_proposal_config.job_definitions, + ); + } + + const [, error] = await to(forkTheWorkflow(payload, payload.project_id)); setSubmitting(false); if (!error) { - history.push('/workflows'); + history.push('/workflow-center/workflows'); } else { - message.error(error.message); + Message.error(error.message); } } // ------------ Handlers ------------- function onCancelForkClick() { Modal.confirm({ title: i18n.t('workflow.msg_sure_2_cancel_fork'), - icon: <ExclamationCircle />, - zIndex: Z_INDEX_GREATER_THAN_HEADER, content: i18n.t('workflow.msg_effect_of_cancel_create'), - style: { - top: '30%', - }, onOk() { - history.push('/workflows'); + history.push('/workflow-center/workflows'); }, }); } @@ -410,17 +382,6 @@ const WorkflowForkStepTwoConfig: FC = () => { } }; -function _hydrate(variableShells: Variable[], formValues?: Dictionary<any>): Variable[] { - if (!formValues) return []; - - return variableShells.map((item) => { - return { - ...item, - value: formValues[item.name], - }; - }); -} - function _mapJobFlags(nodes?: ChartNodes) { if (!nodes) return []; diff --git a/web_console_v2/client/src/views/Workflows/ForkWorkflow/index.module.less b/web_console_v2/client/src/views/Workflows/ForkWorkflow/index.module.less new file mode 100644 index 000000000..9bd6a20a7 --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/ForkWorkflow/index.module.less @@ -0,0 +1,8 @@ +.step_container { + width: 350px; +} +.form_area { + flex: 1; + margin-top: 12px; + background-color: white; +} diff --git a/web_console_v2/client/src/views/Workflows/ForkWorkflow/index.tsx b/web_console_v2/client/src/views/Workflows/ForkWorkflow/index.tsx index f79eb7e53..c386d3a40 100644 --- a/web_console_v2/client/src/views/Workflows/ForkWorkflow/index.tsx +++ b/web_console_v2/client/src/views/Workflows/ForkWorkflow/index.tsx @@ -1,6 +1,6 @@ import React, { FC, useState } from 'react'; -import styled from 'styled-components'; -import { Steps, Row, Card } from 'antd'; +import styled from './index.module.less'; +import { Steps, Grid, Card } from '@arco-design/web-react'; import { Route, useParams, useHistory } from 'react-router-dom'; import { useTranslation } from 'react-i18next'; import { useUnmount } from 'react-use'; @@ -11,14 +11,7 @@ import SharedPageLayout from 'components/SharedPageLayout'; import BackButton from 'components/BackButton'; const { Step } = Steps; - -const StepContainer = styled.div` - width: 350px; -`; -const FormArea = styled.section` - flex: 1; - margin-top: 12px; -`; +const Row = Grid.Row; enum ForkSteps { basic, @@ -29,7 +22,9 @@ const ForkWorkflow: FC = () => { const { t } = useTranslation(); const history = useHistory(); const params = useParams<{ step: keyof typeof ForkSteps; id?: string }>(); - const [currentStep, setStep] = useState(ForkSteps[params.step || 'basic']); + const [currentStep, setStep] = useState(ForkSteps[params.step || 'basic'] + 1); + const [isFormValueChanged, setIsFormValueChanged] = useState(false); + const reset = useResetForkForms(); useUnmount(() => { @@ -39,33 +34,55 @@ const ForkWorkflow: FC = () => { return ( <SharedPageLayout - title={<BackButton onClick={() => history.goBack()}>{t('menu.label_workflow')}</BackButton>} + title={ + <BackButton + onClick={() => history.replace(`/workflow-center/workflows`)} + isShowConfirmModal={isFormValueChanged} + > + {t('menu.label_workflow')} + </BackButton> + } contentWrapByCard={false} > <Card> <Row justify="center"> - <StepContainer> + <div className={styled.step_container}> <Steps current={currentStep}> <Step title={t('workflow.step_basic')} /> <Step title={t('workflow.step_config')} /> </Steps> - </StepContainer> + </div> </Row> </Card> - <FormArea> + <section className={styled.form_area}> + <Route + path={`/workflow-center/workflows/fork/basic/:id`} + exact + render={(props) => ( + <StepOneBasic + onSuccess={setToConfigStep} + onFormValueChange={onFormValueChange} + {...props} + /> + )} + /> <Route - path={`/workflows/fork/basic/:id`} + path={`/workflow-center/workflows/fork/config/:id`} exact - render={(props) => <StepOneBasic onSuccess={setToConfigStep} {...props} />} + component={StepTwoConfig} /> - <Route path={`/workflows/fork/config/:id`} exact component={StepTwoConfig} /> - </FormArea> + </section> </SharedPageLayout> ); function setToConfigStep() { - setStep(ForkSteps.config); + setStep(ForkSteps['config'] + 1); + } + function onFormValueChange() { + if (!isFormValueChanged) { + setIsFormValueChanged(true); + } } }; diff --git a/web_console_v2/client/src/views/Workflows/InspectPeerConfig.module.less b/web_console_v2/client/src/views/Workflows/InspectPeerConfig.module.less new file mode 100644 index 000000000..575e7865e --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/InspectPeerConfig.module.less @@ -0,0 +1,9 @@ +.inspect_modal { + top: 20%; +} +.modal_header { + font-size: 16px; +} +.no_job { + margin: 30px auto; +} diff --git a/web_console_v2/client/src/views/Workflows/InspectPeerConfig.tsx b/web_console_v2/client/src/views/Workflows/InspectPeerConfig.tsx index 7d6581116..5a9244f82 100644 --- a/web_console_v2/client/src/views/Workflows/InspectPeerConfig.tsx +++ b/web_console_v2/client/src/views/Workflows/InspectPeerConfig.tsx @@ -1,41 +1,12 @@ import React, { FC } from 'react'; -import styled from 'styled-components'; -import { Modal, Tabs, Button } from 'antd'; -import { ModalProps } from 'antd/lib/modal/Modal'; -import { Close } from 'components/IconPark'; +import styled from './InspectPeerConfig.module.less'; +import { Modal, Tabs } from '@arco-design/web-react'; +import { ModalProps } from '@arco-design/web-react/es/Modal/modal'; import { useTranslation } from 'react-i18next'; import { WorkflowConfig } from 'typings/workflow'; import PropertyList from 'components/PropertyList'; -import { Z_INDEX_GREATER_THAN_HEADER } from 'components/Header'; -import ErrorBoundary from 'antd/lib/alert/ErrorBoundary'; import NoResult from 'components/NoResult'; -const InspectModal = styled(Modal)` - top: 20%; - - .ant-modal-header { - display: none; - } - .ant-modal-body { - padding: 0; - } -`; -const ModalHeader = styled.h2` - padding: 20px; - padding-bottom: 10px; - margin-bottom: 0; - font-size: 16px; - line-height: 24px; -`; -const JobTabs = styled(Tabs)` - &.ant-tabs-top > .ant-tabs-nav { - margin-bottom: 9px; - } -`; -const NoJob = styled(NoResult)` - margin: 30px auto; -`; - const InspectPeerConfig: FC< ModalProps & { toggleVisible: Function; config: WorkflowConfig | null } > = ({ config, toggleVisible, ...props }) => { @@ -48,44 +19,48 @@ const InspectPeerConfig: FC< const hasJob = Boolean(jobs.length); return ( - <ErrorBoundary> - <InspectModal - {...props} - zIndex={Z_INDEX_GREATER_THAN_HEADER} - onOk={closeModal} - okText={t('close')} - cancelButtonProps={{ - style: { display: 'none' }, - }} - width={455} - closeIcon={<Button size="small" icon={<Close />} onClick={closeModal} />} - > - <ModalHeader>{t('workflow.peer_config')}</ModalHeader> + <Modal + className={styled.inspect_modal} + {...props} + onOk={closeModal} + okText={t('close')} + cancelButtonProps={{ + style: { display: 'none' }, + }} + onCancel={closeModal} + > + <h2 className={styled.modal_header}>{t('workflow.peer_config')}</h2> - {config && hasJob && ( - <JobTabs type="card" defaultActiveKey={jobs[0].name}> - {jobs.map((item) => { - return ( - <Tabs.TabPane tab={item.name} key={item.name}> - <PropertyList - cols={1} - labelWidth={100} - properties={item.variables.map((vari) => { - return { - label: vari.name, - value: vari.value, - }; - })} - /> - </Tabs.TabPane> - ); - })} - </JobTabs> - )} + {config && hasJob && ( + <Tabs type="card" defaultActiveTab={jobs[0].name}> + {jobs.map((item) => { + return ( + <Tabs.TabPane title={item.name} key={item.name}> + <PropertyList + cols={1} + labelWidth={100} + properties={item.variables.map((vari) => { + return { + label: vari.name, + value: + vari.value && typeof vari.value === 'object' + ? JSON.stringify(vari.value) + : vari.value, + }; + })} + /> + </Tabs.TabPane> + ); + })} + </Tabs> + )} - {!hasJob && <NoJob text="对侧暂无任务" />} - </InspectModal> - </ErrorBoundary> + {!hasJob && ( + <div className={styled.no_job}> + <NoResult text="对侧暂无任务" /> + </div> + )} + </Modal> ); function closeModal() { diff --git a/web_console_v2/client/src/views/Workflows/JobFormDrawer.module.less b/web_console_v2/client/src/views/Workflows/JobFormDrawer.module.less new file mode 100644 index 000000000..0fd21f5f0 --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/JobFormDrawer.module.less @@ -0,0 +1,33 @@ +.container { + padding-top: 0; +} + +.drawer_header { + height: 68px; + padding-left: 8px; + padding-right: 6px; +} + +.drawer_title { + margin-bottom: 0; +} + +.message { + display: block; + margin-top: -17px; + margin-bottom: 10px; + line-height: 1; + color: var(--textColorSecondary); +} + +.permission_display { + margin: 0 -16px 38px; + padding: 14px 0 14px 24px; + font-size: 12px; + background-color: rgb(var(--gray-1)); +} + +.form_container { + padding-right: 68px; + padding-bottom: 200px; +} diff --git a/web_console_v2/client/src/views/Workflows/JobFormDrawer.tsx b/web_console_v2/client/src/views/Workflows/JobFormDrawer.tsx index 2ef31457b..0162c7357 100644 --- a/web_console_v2/client/src/views/Workflows/JobFormDrawer.tsx +++ b/web_console_v2/client/src/views/Workflows/JobFormDrawer.tsx @@ -5,56 +5,21 @@ import React, { useState, ForwardRefRenderFunction, } from 'react'; -import styled from 'styled-components'; -import { Drawer, Row, Button } from 'antd'; +import styled from './JobFormDrawer.module.less'; +import { Drawer, DrawerProps, Grid, Button } from '@arco-design/web-react'; import buildFormSchemaFromJobDef from 'shared/formSchema'; -import VariableSchemaForm, { formActions } from 'components/VariableSchemaForm'; import { FormilySchema } from 'typings/formily'; import GridRow from 'components/_base/GridRow'; import VariablePermission from 'components/VariblePermission'; -import { DrawerProps } from 'antd/lib/drawer'; import { JobNodeRawData, GlobalNodeRawData } from 'components/WorkflowJobsCanvas/types'; -import { cloneDeep, Dictionary, noop } from 'lodash'; -import { IFormState } from '@formily/antd'; -import { giveWeakRandomKey, to } from 'shared/helpers'; +import { cloneDeep, noop } from 'lodash-es'; +import { giveWeakRandomKey } from 'shared/helpers'; import { useTranslation } from 'react-i18next'; -import { removeUndefined } from 'shared/object'; -import ErrorBoundary from 'antd/lib/alert/ErrorBoundary'; -import { Close, Eye } from 'components/IconPark'; -import { Variable } from 'typings/variable'; - -const Container = styled(Drawer)` - top: 60px; - .ant-drawer-body { - padding-top: 0; - } -`; -const DrawerHeader = styled(Row)` - height: 68px; - margin: 0 -24px 0; - padding-left: 24px; - padding-right: 16px; -`; -const DrawerTitle = styled.h3` - margin-bottom: 0; -`; -const Message = styled.small` - display: block; - margin-top: -17px; - margin-bottom: 10px; - line-height: 1; - color: var(--textColorSecondary); -`; -const PermissionDisplay = styled.div` - margin: 0 -24px 38px; - padding: 14px 24px; - font-size: 12px; - background-color: var(--gray1); -`; -const FormContainer = styled.div` - padding-right: 68px; - padding-bottom: 200px; -`; +import { IconClose, IconEye } from '@arco-design/web-react/icon'; +import ErrorBoundary from 'components/ErrorBoundary'; +import { Variable, VariableComponent } from 'typings/variable'; + +const Row = Grid.Row; interface Props extends DrawerProps { message?: string; @@ -72,7 +37,7 @@ interface Props extends DrawerProps { } export type JobFormDrawerExposedRef = { validateCurrentForm(): Promise<boolean>; - getFormValues(): Promise<Dictionary<any>>; + getFormValues(): Promise<Record<string, any>>; }; const JobFormDrawer: ForwardRefRenderFunction<JobFormDrawerExposedRef, Props> = ( @@ -95,17 +60,29 @@ const JobFormDrawer: ForwardRefRenderFunction<JobFormDrawerExposedRef, Props> = parentRef, ) => { const { t } = useTranslation(); - const [randomKey, setRandomKey] = useState<string>(giveWeakRandomKey()); - const [formSchema, setFormSchema] = useState<FormilySchema>(null as any); + const [, setRandomKey] = useState<string>(giveWeakRandomKey()); + const [, setFormSchema] = useState<FormilySchema>(null as any); useEffect(() => { if (jobDefinition) { // Force to re-render schema form to prevent it remember previous values setRandomKey(giveWeakRandomKey()); + + // hide FeatureSelect component + const normalizedJobDefinition = { + ...jobDefinition, + variables: (jobDefinition.variables || []).filter((item) => { + if (item?.widget_schema?.component === VariableComponent.FeatureSelect) { + return false; + } + return true; + }), + }; + // prop 'node' is from `templateInUsing` which only has job definition // in order to hydrate the Form, we need get user-inputs (whick stored on `workflowConfigForm`) // and merge the user-inputs to definition - const jobDefWithValues = _hydrate(jobDefinition, initialValues); + const jobDefWithValues = _hydrate(normalizedJobDefinition, initialValues); const schema = buildFormSchemaFromJobDef(jobDefWithValues, { withPermissions: isPeerSide, readonly, @@ -125,63 +102,42 @@ const JobFormDrawer: ForwardRefRenderFunction<JobFormDrawerExposedRef, Props> = return null; } - const currentJobIdxDisplay = (currentIdx || 0) + 1; - const isFinalStep = currentJobIdxDisplay === nodesCount; - const confirmButtonText = isFinalStep - ? t('workflow.btn_conf_done') - : t('workflow.btn_conf_next_step', { - current: currentJobIdxDisplay, - total: nodesCount || 0, - }); - return ( <ErrorBoundary> - <Container + <Drawer + className={styled.container} {...props} - getContainer="#app-content" mask={false} width="640px" - push={{ distance: -240 }} headerStyle={{ display: 'none' }} - onClose={closeDrawer} + onCancel={closeDrawer} + closable={false} + wrapClassName="#app-content" + footer={null} > - <DrawerHeader align="middle" justify="space-between"> - <DrawerTitle>{jobDefinition.name}</DrawerTitle> + <Row className={styled.drawer_header} align="center" justify="space-between"> + <h3 className={styled.drawer_title}>{jobDefinition.name}</h3> <GridRow gap="10"> {showPeerConfigButton && ( - <Button size="small" icon={<Eye />} onClick={onViewPeerConfigClick || noop}> + <Button size="small" icon={<IconEye />} onClick={onViewPeerConfigClick || noop}> {t('workflow.btn_see_peer_config')} </Button> )} - <Button size="small" icon={<Close />} onClick={closeDrawer} /> + <Button size="small" icon={<IconClose />} onClick={closeDrawer} /> </GridRow> - </DrawerHeader> + </Row> - {message && <Message>{message}</Message>} + {message && <small className={styled.message}>{message}</small>} - <PermissionDisplay> + <div className={styled.permission_display}> <GridRow gap="20"> <label>{t('workflow.ptcpt_permission')}:</label> <VariablePermission.Writable desc /> <VariablePermission.Readable desc /> <VariablePermission.Private desc /> </GridRow> - </PermissionDisplay> - - {/* ☢️ Form Area */} - <FormContainer> - {formSchema && ( - <VariableSchemaForm - key={randomKey} - schema={formSchema} - onConfirm={confirmAndGoNextJob} - onCancel={closeDrawer as any} - confirmText={confirmButtonText} - cancelText={t('workflow.btn_close')} - /> - )} - </FormContainer> - </Container> + </div> + </Drawer> </ErrorBoundary> ); @@ -189,9 +145,9 @@ const JobFormDrawer: ForwardRefRenderFunction<JobFormDrawerExposedRef, Props> = // When no Node opened yet if (!jobDefinition) return true; - const [, error] = await to(formActions.validate()); + // const [, error] = await to(form.validate()); - return !error; + return false; } function closeDrawer() { // validate current form and mark Node with corresponding status @@ -199,26 +155,10 @@ const JobFormDrawer: ForwardRefRenderFunction<JobFormDrawerExposedRef, Props> = toggleVisible && toggleVisible(false); onCloseDrawer(); } - async function confirmAndGoNextJob() { - if (nodesCount === 0) return; - - const valid = await validateCurrentForm(); - if (!valid) return; - - if (isFinalStep) { - return closeDrawer(); - } - - // Tell parent component that need to point next job - onGoNextJob && onGoNextJob(jobDefinition); - } - - function getFormValues(): Promise<Dictionary<any>> { + function getFormValues(): Promise<Record<string, any>> { return new Promise((resolve) => { - formActions.getFormState((state: IFormState) => { - resolve(removeUndefined(state.values)); - }); + resolve({}); }); } }; diff --git a/web_console_v2/client/src/views/Workflows/LocalWorkflowNode.module.less b/web_console_v2/client/src/views/Workflows/LocalWorkflowNode.module.less new file mode 100644 index 000000000..d862b42a3 --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/LocalWorkflowNode.module.less @@ -0,0 +1,5 @@ +.container { + margin-left: 5px; + font-size: 12px; + color: var(--textColorSecondary); +} diff --git a/web_console_v2/client/src/views/Workflows/LocalWorkflowNote.tsx b/web_console_v2/client/src/views/Workflows/LocalWorkflowNote.tsx new file mode 100644 index 000000000..671a5e83b --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/LocalWorkflowNote.tsx @@ -0,0 +1,14 @@ +import { IconExclamationCircle } from '@arco-design/web-react/icon'; +import React, { FC, memo } from 'react'; +import styled from './LocalWorkflowNode.module.less'; + +const LocalWorkflowNote: FC = () => { + return ( + <div className={styled.container}> + <IconExclamationCircle style={{ marginRight: 3 }} /> + 该任务为本地任务,故无对侧配置 + </div> + ); +}; + +export default memo(LocalWorkflowNote); diff --git a/web_console_v2/client/src/views/Workflows/ScheduledWorkflowRunning/index.module.less b/web_console_v2/client/src/views/Workflows/ScheduledWorkflowRunning/index.module.less new file mode 100644 index 000000000..1528b17d5 --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/ScheduledWorkflowRunning/index.module.less @@ -0,0 +1,4 @@ +.switch_container { + margin-top: 5px; + margin-bottom: 15px; +} diff --git a/web_console_v2/client/src/views/Workflows/ScheduledWorkflowRunning/index.tsx b/web_console_v2/client/src/views/Workflows/ScheduledWorkflowRunning/index.tsx index d8eeb60cb..168e98e5e 100644 --- a/web_console_v2/client/src/views/Workflows/ScheduledWorkflowRunning/index.tsx +++ b/web_console_v2/client/src/views/Workflows/ScheduledWorkflowRunning/index.tsx @@ -1,22 +1,16 @@ import React, { FC, useEffect } from 'react'; -import styled from 'styled-components'; -import { InputNumber, Switch } from 'antd'; +import styled from './index.module.less'; +import { Switch } from '@arco-design/web-react'; import { useToggle } from 'react-use'; - -export const MIN_SCHEDULED_MINUTES = 1; - -const SwitchContainer = styled.div` - margin-top: 5px; - margin-bottom: 15px; -`; +import CronTimePicker, { parseCron, PickerValue, toCron } from 'components/CronTimePicker'; type Props = { - value?: number; - onChange?: (v: number) => void; + value?: string; + onChange?: (value: string) => void; }; const ScheduledWorkflowRunning: FC<Props> = ({ value, onChange }) => { - const isEnabled = value !== -1 || value >= MIN_SCHEDULED_MINUTES; + const isEnabled = !!value; const [inputVisible, toggleVisible] = useToggle(isEnabled); useEffect(() => { @@ -27,17 +21,16 @@ const ScheduledWorkflowRunning: FC<Props> = ({ value, onChange }) => { return ( <> - <SwitchContainer> + <div className={styled.switch_container}> <Switch checked={inputVisible} onChange={onSwitchChange} /> - </SwitchContainer> + </div> {inputVisible && ( - <InputNumber - min={MIN_SCHEDULED_MINUTES} - value={value} - onChange={onValueChange} - formatter={(value: any) => `${value}min`} - parser={(value: any) => value.replace('min', '')} + <CronTimePicker + value={parseCron(value || '')} + onChange={(value: PickerValue) => { + onValueChange(toCron(value)); + }} /> )} </> @@ -45,23 +38,21 @@ const ScheduledWorkflowRunning: FC<Props> = ({ value, onChange }) => { function onSwitchChange(val: boolean) { toggleVisible(val); - if (val === false) { - onChange && onChange(-1); + onValueChange(''); } else { - onChange && onChange(MIN_SCHEDULED_MINUTES); + onValueChange('null'); } } - function onValueChange(val: number) { + function onValueChange(val: string) { onChange && onChange(val); } }; - -export function scheduleIntervalValidator(_: any, value: number) { - if (value >= MIN_SCHEDULED_MINUTES || value === -1) { - return Promise.resolve(); +export function scheduleIntervalValidator(value: any, callback: (error?: string) => void) { + if (!value || value !== 'null') { + return; } - return Promise.reject(); + callback('请选择时间'); } export default ScheduledWorkflowRunning; diff --git a/web_console_v2/client/src/views/Workflows/WorkflowAccessControl/AccessSwitch.tsx b/web_console_v2/client/src/views/Workflows/WorkflowAccessControl/AccessSwitch.tsx index db8829032..ad767c1de 100644 --- a/web_console_v2/client/src/views/Workflows/WorkflowAccessControl/AccessSwitch.tsx +++ b/web_console_v2/client/src/views/Workflows/WorkflowAccessControl/AccessSwitch.tsx @@ -1,4 +1,4 @@ -import { message, Switch } from 'antd'; +import { Message, Switch } from '@arco-design/web-react'; import React, { FC } from 'react'; import { useToggle } from 'react-use'; import { to } from 'shared/helpers'; @@ -8,7 +8,7 @@ const AccessSwitch: FC<{ workflow: Workflow; keyOfSource: keyof Workflow; onSuccess: any; - patcher: (id: ID, val: boolean) => any; + patcher: (id: ID, val: boolean, projectId: ID) => any; }> = ({ workflow, onSuccess, patcher, keyOfSource }) => { // Q: Why is there a copy of workflow.forkable locally // A: After the swicthing, there would be a noticable delay reflect the workflow.forkable change @@ -27,13 +27,13 @@ const AccessSwitch: FC<{ async function onForkableChange(val: boolean) { toggle(true); - const [res, error] = await to(patcher(workflow.id, val)); + const [res, error] = await to(patcher(workflow.id, val, workflow.project_id)); toggle(false); if (error) { toggleUseLocal(false); - message.error(error.message); + Message.error(error.message); return; } diff --git a/web_console_v2/client/src/views/Workflows/WorkflowAccessControl/index.module.less b/web_console_v2/client/src/views/Workflows/WorkflowAccessControl/index.module.less new file mode 100644 index 000000000..258540900 --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/WorkflowAccessControl/index.module.less @@ -0,0 +1,3 @@ +.resource_name { + font-size: 14px; +} diff --git a/web_console_v2/client/src/views/Workflows/WorkflowAccessControl/index.tsx b/web_console_v2/client/src/views/Workflows/WorkflowAccessControl/index.tsx index 483ba51de..55a653755 100644 --- a/web_console_v2/client/src/views/Workflows/WorkflowAccessControl/index.tsx +++ b/web_console_v2/client/src/views/Workflows/WorkflowAccessControl/index.tsx @@ -1,43 +1,22 @@ import React, { FC, useEffect, useMemo, useRef } from 'react'; -import styled from 'styled-components'; +import styled from './index.module.less'; import { useTranslation } from 'react-i18next'; -import { Button, ButtonProps, Dropdown, Menu, Tooltip } from 'antd'; -import ErrorBoundary from 'antd/lib/alert/ErrorBoundary'; +import { Button, ButtonProps, Dropdown, Tooltip } from '@arco-design/web-react'; +import ErrorBoundary from 'components/ErrorBoundary'; import AccessSwitch from './AccessSwitch'; import { Workflow } from 'typings/workflow'; import { useToggle } from 'react-use'; import { toggleMetricsPublic, toggleWofklowForkable } from 'services/workflow'; import { giveWeakRandomKey } from 'shared/helpers'; -import { QuestionCircle } from 'components/IconPark'; +import { IconQuestionCircle } from '@arco-design/web-react/icon'; import GridRow from 'components/_base/GridRow'; +import PrettyMenu, { PrettyMenuItem } from 'components/PrettyMenu'; -const StyledMenu = styled(Menu)` - width: 250px; - padding: 8px 5px; - background-color: #edeeee; - border-radius: 5px; -`; - -const ControlItem = styled(Menu.Item)` - display: flex; - justify-content: space-between; - align-items: center; - height: 38px; - padding: 0 13px; - border-radius: 5px; - - &:hover { - box-shadow: 0 0 10px rgba(0, 0, 0, 0.1); - } -`; - -const ResourceName = styled.strong` - font-size: 14px; -`; - -const WorkflowAccessControl: FC<ButtonProps & { workflow: Workflow; onSuccess?: any }> = ( - props, -) => { +const WorkflowAccessControl: FC<ButtonProps & { workflow: Workflow; onSuccess?: any }> = ({ + onSuccess, + workflow, + ...restProps +}) => { const uuid = useMemo(() => giveWeakRandomKey(), []); const { t } = useTranslation(); const buttonRef = useRef<HTMLButtonElement>(null); @@ -64,38 +43,39 @@ const WorkflowAccessControl: FC<ButtonProps & { workflow: Workflow; onSuccess?: return ( <ErrorBoundary> <Dropdown - visible={visible} - overlay={ - <StyledMenu id={uuid}> - <ControlItem> - <ResourceName>{t('workflow.label_forkable')}</ResourceName> + popupVisible={visible} + droplist={ + <PrettyMenu id={uuid}> + <PrettyMenuItem key="forkable"> + <strong className={styled.resource_name}>{t('workflow.label_forkable')}</strong> <AccessSwitch keyOfSource="forkable" patcher={toggleWofklowForkable} - workflow={props.workflow} + workflow={workflow} onSuccess={onAccessChange} /> - </ControlItem> - <Menu.Divider /> - <ControlItem> + </PrettyMenuItem> + <PrettyMenuItem key="metric"> <GridRow gap="5"> - <ResourceName>{t('workflow.label_metric_public')}</ResourceName> - <Tooltip title={t('workflow.msg_metric_public')}> - <QuestionCircle style={{ fontSize: '12px' }} /> + <strong className={styled.resource_name}> + {t('workflow.label_metric_public')} + </strong> + <Tooltip content={t('workflow.msg_metric_public')}> + <IconQuestionCircle style={{ fontSize: '12px' }} /> </Tooltip> </GridRow> <AccessSwitch keyOfSource="metric_is_public" patcher={toggleMetricsPublic} - workflow={props.workflow} + workflow={workflow} onSuccess={onAccessChange} /> - </ControlItem> - </StyledMenu> + </PrettyMenuItem> + </PrettyMenu> } - placement="bottomCenter" + position="bottom" > - <Button ref={buttonRef} {...props} onClick={() => toggleVisible()}> + <Button ref={buttonRef} {...restProps} onClick={() => toggleVisible()}> {t('workflow.btn_access_ctrl')} </Button> </Dropdown> @@ -103,7 +83,7 @@ const WorkflowAccessControl: FC<ButtonProps & { workflow: Workflow; onSuccess?: ); function onAccessChange() { - props.onSuccess && props.onSuccess(); + onSuccess?.(); } }; diff --git a/web_console_v2/client/src/views/Workflows/WorkflowActions.tsx b/web_console_v2/client/src/views/Workflows/WorkflowActions.tsx index f43999f8b..862beaf75 100644 --- a/web_console_v2/client/src/views/Workflows/WorkflowActions.tsx +++ b/web_console_v2/client/src/views/Workflows/WorkflowActions.tsx @@ -1,92 +1,103 @@ import React, { FC } from 'react'; -import styled, { createGlobalStyle } from 'styled-components'; -import { - isAwaitParticipantConfig, - isCompleted, - isStopped, - isRunning, - isFailed, - isPendingAccpet, - isReadyToRun, - isOperable, - isForkable, - isInvalid, -} from 'shared/workflow'; -import { Workflow } from 'typings/workflow'; +import styled from 'styled-components'; +import { isOperable, isForkable, isEditable } from 'shared/workflow'; +import { Workflow, WorkflowState } from 'typings/workflow'; import { useTranslation } from 'react-i18next'; -import { Button, message, Spin, Popconfirm } from 'antd'; +import { Button, Message, Spin, Popconfirm } from '@arco-design/web-react'; import { useHistory } from 'react-router-dom'; import { getPeerWorkflowsConfig, runTheWorkflow, stopTheWorkflow, invalidTheWorkflow, + getWorkflowDetailById, } from 'services/workflow'; import WorkflowAccessControl from './WorkflowAccessControl'; import GridRow from 'components/_base/GridRow'; -import { Copy, Sync, TableReport, Tool, PlayCircle, Pause, Edit } from 'components/IconPark'; +import { + IconCopy, + IconSync, + IconTool, + IconPlayCircle, + IconPause, + IconEdit, + IconMindMapping, +} from '@arco-design/web-react/icon'; import { useToggle } from 'react-use'; import { to } from 'shared/helpers'; -import { ControlOutlined } from '@ant-design/icons'; -import ErrorBoundary from 'antd/lib/alert/ErrorBoundary'; +import ErrorBoundary from 'components/ErrorBoundary'; +import MoreActions, { ActionItem } from 'components/MoreActions'; +import Modal from 'components/Modal'; const Container = styled(GridRow)` width: fit-content; margin-left: ${(props: any) => (props.type === 'link' ? '-10px !important' : 0)}; `; -const SpinnerWrapperStyle = createGlobalStyle` - .spinnerWrapper { - width: fit-content; - } +const Link = styled.span` + color: var(--primaryColor); + cursor: pointer; + margin-left: 8px; `; -type Action = - | 'edit' - | 'report' - | 'configure' - | 'run' - | 'rerun' - | 'stop' - | 'fork' - | 'invalid' - | 'accessCtrl'; +type Action = 'edit' | 'configure' | 'run' | 'rerun' | 'stop' | 'fork' | 'invalid' | 'accessCtrl'; type Props = { workflow: Workflow; - type?: 'link' | 'default'; + type?: 'text' | 'default'; + size?: 'small' | 'mini'; + isShowMoreAction?: boolean; without?: Action[]; showIcon?: boolean; onSuccess?: Function; + onEditClick?: Function; + onAcceptClick?: Function; + onForkClick?: Function; }; const icons: Partial<Record<Action, any>> = { - report: TableReport, - configure: Tool, - run: PlayCircle, - rerun: Sync, - stop: Pause, - fork: Copy, - edit: Edit, - accessCtrl: ControlOutlined, + configure: IconTool, + run: IconPlayCircle, + rerun: IconSync, + stop: IconPause, + fork: IconCopy, + edit: IconEdit, + accessCtrl: IconMindMapping, }; -const WorkflowActions: FC<Props> = ({ workflow, type = 'default', without = [], onSuccess }) => { +const WorkflowActions: FC<Props> = ({ + workflow, + type = 'default', + isShowMoreAction = false, + without = [], + onSuccess, + size, + ...restProps +}) => { const { t } = useTranslation(); const history = useHistory(); const [loading, toggleLoading] = useToggle(false); + const { + RUNNING, + STOPPED, + INVALID, + COMPLETED, + FAILED, + PENDING_ACCEPT, + READY_TO_RUN, + PARTICIPANT_CONFIGURING, + } = WorkflowState; + + const { state } = workflow; const visible: Partial<Record<Action, boolean>> = { - configure: isPendingAccpet(workflow) && !without?.includes('configure'), - run: - (isReadyToRun(workflow) || isAwaitParticipantConfig(workflow)) && !without?.includes('run'), - stop: - (isFailed(workflow) || isRunning(workflow) || isCompleted(workflow)) && - !without?.includes('stop'), - rerun: isStopped(workflow) && !without?.includes('rerun'), - report: isCompleted(workflow) && !without?.includes('report'), + configure: state === PENDING_ACCEPT && !without?.includes('configure'), + run: (state === READY_TO_RUN || state === PARTICIPANT_CONFIGURING) && !without?.includes('run'), + stop: state === RUNNING && !without?.includes('stop'), + rerun: + (state === STOPPED || state === COMPLETED || state === FAILED) && !without?.includes('rerun'), fork: !without?.includes('fork'), accessCtrl: !without?.includes('accessCtrl'), - invalid: !without?.includes('fork') && !isInvalid(workflow), + invalid: !without?.includes('fork') && !(state === INVALID), edit: !without?.includes('edit'), }; @@ -99,21 +110,45 @@ const WorkflowActions: FC<Props> = ({ workflow, type = 'default', without = [], rerun: isDisabled, fork: !isForkable(workflow), invalid: false, - report: true, accessCtrl: false, - edit: isRunning(workflow), + edit: !isEditable(workflow), }; const isDefaultType = type === 'default'; + let actionList: ActionItem[] = []; + + if (isShowMoreAction) { + actionList = [ + { + label: t('workflow.action_fork'), + disabled: !visible.fork || disabled.fork, + onClick: onForkClick, + }, + { + label: t('workflow.action_invalid'), + disabled: !visible.invalid || disabled.invalid, + onClick: () => { + Modal.confirm({ + title: t('workflow.msg_sure_to_invalidate_title'), + content: t('workflow.msg_sure_to_invalidate_content'), + onOk() { + onInvalidClick(); + }, + }); + }, + danger: true, + }, + ]; + } + return ( <ErrorBoundary> - <Spin spinning={loading} size="small" wrapperClassName="spinnerWrapper"> - <SpinnerWrapperStyle /> + <Spin loading={loading} style={{ width: 'fit-content' }}> <Container {...{ type }} gap={isDefaultType ? 8 : 0}> {visible.edit && ( <Button - size="small" + size={size || 'small'} type={type} icon={withIcon('edit')} disabled={disabled.edit} @@ -122,20 +157,19 @@ const WorkflowActions: FC<Props> = ({ workflow, type = 'default', without = [], {t('workflow.action_edit')} </Button> )} - {visible.report && ( - // TODO: workflow model report - <Button size="small" type={type} icon={withIcon('report')} disabled={disabled.report}> - {t('workflow.action_show_report')} - </Button> - )} {visible.configure && ( - <Button size="small" type={type} icon={withIcon('configure')} onClick={onAcceptClick}> + <Button + size={size || 'small'} + type={type} + icon={withIcon('configure')} + onClick={onAcceptClick} + > {t('workflow.action_configure')} </Button> )} {visible.run && ( <Button - size="small" + size={size || 'small'} type={type} icon={withIcon('run')} onClick={onRunClick} @@ -145,15 +179,26 @@ const WorkflowActions: FC<Props> = ({ workflow, type = 'default', without = [], </Button> )} {visible.stop && ( - <Popconfirm title={t('workflow.msg_sure_to_stop')} onConfirm={onStopClick}> - <Button size="small" type={type} icon={withIcon('stop')} disabled={disabled.stop}> + <Popconfirm + title={t('workflow.msg_sure_to_stop')} + onConfirm={onStopClick} + disabled={disabled.stop} + okText="确 定" + cancelText="取 消" + > + <Button + size={size || 'small'} + type={type} + icon={withIcon('stop')} + disabled={disabled.stop} + > {t('workflow.action_stop_running')} </Button> </Popconfirm> )} {visible.rerun && ( <Button - size="small" + size={size || 'small'} type={type} icon={withIcon('rerun')} onClick={onRunClick} @@ -162,9 +207,9 @@ const WorkflowActions: FC<Props> = ({ workflow, type = 'default', without = [], {t('workflow.action_re_run')} </Button> )} - {visible.fork && ( + {!isShowMoreAction && visible.fork && ( <Button - size="small" + size={size || 'small'} type={type} icon={withIcon('fork')} onClick={onForkClick} @@ -173,26 +218,34 @@ const WorkflowActions: FC<Props> = ({ workflow, type = 'default', without = [], {t('workflow.action_fork')} </Button> )} - {visible.invalid && ( - <Button - size="small" - type={type} - onClick={onInvalidClick} - danger - disabled={disabled.invalid} + {!isShowMoreAction && visible.invalid && ( + <Popconfirm + title={t('workflow.msg_sure_to_invalidate_title')} + onConfirm={onInvalidClick} + okText="确 定" + cancelText="取 消" > - {t('workflow.action_invalid')} - </Button> + <Button + size={size || 'small'} + type={type} + status="danger" + disabled={disabled.invalid} + > + {t('workflow.action_invalid')} + </Button> + </Popconfirm> )} {visible.accessCtrl && ( <WorkflowAccessControl icon={withIcon('accessCtrl')} - size="small" + size={size || 'small'} type={type} workflow={workflow} disabled={disabled.accessCtrl} + onSuccess={onWorkflowAccessControlSuccess} /> )} + {isShowMoreAction && <MoreActions actionList={actionList} />} </Container> </Spin> </ErrorBoundary> @@ -208,58 +261,114 @@ const WorkflowActions: FC<Props> = ({ workflow, type = 'default', without = [], return <Ico />; } function onEditClick() { - history.push(`/workflows/edit/basic/${workflow.id}`); + if (restProps.onEditClick) { + restProps.onEditClick(); + return; + } + history.push(`/workflow-center/workflows/edit/basic/${workflow.id}`); } function onAcceptClick() { - history.push(`/workflows/accept/basic/${workflow.id}`); + if (restProps.onAcceptClick) { + restProps.onAcceptClick(); + return; + } + history.push(`/workflow-center/workflows/accept/basic/${workflow.id}`); } async function onForkClick() { - toggleLoading(true); - const [res, error] = await to(getPeerWorkflowsConfig(workflow.id)); - toggleLoading(false); - - if (error) { - return message.error(t('workflow.msg_get_peer_cfg_failed') + error.message); - } - - const anyPeerWorkflow = Object.values(res.data).find((item) => !!item.uuid)!; - if (!anyPeerWorkflow.forkable) { - message.warning(t('workflow.msg_unforkable')); + if (restProps.onForkClick) { + restProps.onForkClick(); return; } - history.push(`/workflows/fork/basic/${workflow.id}`); + try { + // Get is_local field by workflow detail api + toggleLoading(true); + const workflowDetail = await getWorkflowDetailById(workflow.id); + + const isLocal = workflowDetail.data.is_local; + + if (!isLocal) { + const [res, error] = await to(getPeerWorkflowsConfig(workflow.id)); + toggleLoading(false); + + if (error) { + return Message.error(t('workflow.msg_get_peer_cfg_failed') + error.message); + } + + const anyPeerWorkflow = Object.values(res.data).find((item) => !!item.uuid)!; + if (!anyPeerWorkflow.forkable) { + Message.warning(t('workflow.msg_unforkable')); + return; + } + } + + history.push(`/workflow-center/workflows/fork/basic/${workflow.id}`); + } catch (error) { + toggleLoading(false); + Message.error(error.message); + } } async function onRunClick() { toggleLoading(true); try { - await runTheWorkflow(workflow.id); - onSuccess && onSuccess(workflow); + await runTheWorkflow(workflow.id, workflow.project_id); + onSuccess?.(workflow); } catch (error) { - message.error(error.message); + // Hard code tip when project.variables is missing + // i.e. error.message = Invalid Variable when try to format the job u8e63b65b8ee941c8b94-raw:Unknown placeholder: project.variables.xyx-test + if (error.message) { + const regx = /project\.variables\.([^\s]*)/; + const result = String(error.message).match(regx); + if (result && result[1]) { + Message.warning({ + content: ( + <> + <span> + {t('workflow.msg_project_variables_required', { + var: result[1], + })} + </span> + <Link + onClick={(e) => { + history.push(`/projects/edit/${workflow.project_id}`); + }} + > + {t('workflow.msg_project_variables_link')} + </Link> + </> + ), + }); + } else { + Message.error(error.message); + } + } } toggleLoading(false); } async function onStopClick() { toggleLoading(true); try { - await stopTheWorkflow(workflow.id); - onSuccess && onSuccess(workflow); + await stopTheWorkflow(workflow.id, workflow.project_id); + onSuccess?.(workflow); } catch (error) { - message.error(error.message); + Message.error(error.message); } toggleLoading(false); } async function onInvalidClick() { toggleLoading(true); try { - await invalidTheWorkflow(workflow.id); - onSuccess && onSuccess(workflow); + await invalidTheWorkflow(workflow.id, workflow.project_id); + onSuccess?.(workflow); } catch (error) { - message.error(error.message); + Message.error(error.message); } toggleLoading(false); } + + function onWorkflowAccessControlSuccess() { + onSuccess?.(workflow); + } }; export default WorkflowActions; diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/GlobalConfigDrawer.module.less b/web_console_v2/client/src/views/Workflows/WorkflowDetail/GlobalConfigDrawer.module.less new file mode 100644 index 000000000..9f7488da8 --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/GlobalConfigDrawer.module.less @@ -0,0 +1,23 @@ +.container { + top: 0; + :global(.arco-drawer-content) { + padding-top: 0; + padding-bottom: 200px; + } +} + +.drawer_header { + position: sticky; + z-index: 2; + top: 0; + margin: 0 -16px 0; + padding: 20px 16px 20px 24px; + background-color: white; + box-shadow: 0 2px 12px rgba(0, 0, 0, 0.12); +} + +.drawer_title { + position: relative; + margin-bottom: 0; + margin-right: 10px; +} diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/GlobalConfigDrawer.tsx b/web_console_v2/client/src/views/Workflows/WorkflowDetail/GlobalConfigDrawer.tsx index 06db160d1..56c725c8b 100644 --- a/web_console_v2/client/src/views/Workflows/WorkflowDetail/GlobalConfigDrawer.tsx +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/GlobalConfigDrawer.tsx @@ -1,40 +1,20 @@ -import ErrorBoundary from 'antd/lib/alert/ErrorBoundary'; +import ErrorBoundary from 'components/ErrorBoundary'; import React, { ForwardRefRenderFunction } from 'react'; -import styled from 'styled-components'; -import { Drawer, Row, Button, Tag } from 'antd'; -import { DrawerProps } from 'antd/lib/drawer'; +import styled from './GlobalConfigDrawer.module.less'; +import { Drawer, Grid, Button, Tag } from '@arco-design/web-react'; +import { DrawerProps } from '@arco-design/web-react/es/Drawer'; import { NodeData } from 'components/WorkflowJobsCanvas/types'; import { useTranslation } from 'react-i18next'; -import { Close } from 'components/IconPark'; +import { IconClose } from '@arco-design/web-react/icon'; import GridRow from 'components/_base/GridRow'; import PropertyList from 'components/PropertyList'; import { WorkflowExecutionDetails } from 'typings/workflow'; import { Link } from 'react-router-dom'; import WhichProject from 'components/WhichProject'; import { useStoreActions } from 'react-flow-renderer'; +import { VariablePermissionLegend } from 'components/VariblePermission'; -const Container = styled(Drawer)` - top: 60px; - - .ant-drawer-body { - padding-top: 0; - padding-bottom: 200px; - } -`; -const DrawerHeader = styled(Row)` - position: sticky; - z-index: 2; - top: 0; - margin: 0 -24px 0; - padding: 20px 16px 20px 24px; - background-color: white; - box-shadow: 0 2px 12px rgba(0, 0, 0, 0.12); -`; -const DrawerTitle = styled.h3` - position: relative; - margin-bottom: 0; - margin-right: 10px; -`; +const Row = Grid.Row; interface Props extends DrawerProps { isPeerSide?: boolean; @@ -64,37 +44,41 @@ const GlobalConfigDrawer: ForwardRefRenderFunction<JobExecutionDetailsExposedRef const workflowProps = [ { - label: t('workflow.label_template_name'), + label: t('workflow.label_template_group'), value: workflow?.config?.group_alias || ( - <Link to={`/workflows/accept/basic/${workflow?.id}`}>{t('workflow.job_node_pending')}</Link> + <Link to={`/workflow-center/workflows/accept/basic/${workflow?.id}`}> + {t('workflow.job_node_pending')} + </Link> ), }, { label: t('workflow.label_project'), // TODO: peerWorkflow no project id - value: <WhichProject id={workflow?.project_id || 0} />, + value: <WhichProject id={workflow?.project_id} />, }, // Display workflow global variables ...(workflow?.config?.variables || []).map((item) => ({ label: item.name, value: item.value, + accessMode: item.access_mode, })), ]; return ( <ErrorBoundary> - <Container - getContainer="#app-content" + <Drawer + className={styled.container} mask={false} width="980px" - onClose={closeDrawer} + onCancel={closeDrawer} headerStyle={{ display: 'none' }} - maskClosable={true} + wrapClassName="#app-content" + footer={null} {...props} > - <DrawerHeader align="middle" justify="space-between"> - <Row align="middle"> - <DrawerTitle>{job.name}</DrawerTitle> + <Row className={styled.drawer_header} align="center" justify="space-between"> + <Row align="center"> + <h3 className={styled.drawer_title}>{job.name}</h3> {isPeerSide ? ( <Tag color="orange">{t('workflow.peer_config')}</Tag> @@ -103,11 +87,12 @@ const GlobalConfigDrawer: ForwardRefRenderFunction<JobExecutionDetailsExposedRef )} </Row> <GridRow gap="10"> - <Button size="small" icon={<Close />} onClick={closeDrawer} /> + <Button size="small" icon={<IconClose />} onClick={closeDrawer} /> </GridRow> - </DrawerHeader> + </Row> + <VariablePermissionLegend desc={true} prefix="对侧" style={{ marginTop: 15 }} /> <PropertyList initialVisibleRows={3} cols={2} properties={workflowProps} labelWidth={90} /> - </Container> + </Drawer> </ErrorBoundary> ); diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionDetailsDrawer.module.less b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionDetailsDrawer.module.less new file mode 100644 index 000000000..16eb2a94e --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionDetailsDrawer.module.less @@ -0,0 +1,56 @@ +.container { + top: 0; + :global(.arco-drawer-content) { + padding-top: 0; + padding-bottom: 200px; + } +} + +.drawer_header { + position: sticky; + z-index: 2; + top: 0; + margin: 0 -16px 0; + padding: 20px 16px 20px 24px; + background-color: white; + box-shadow: 0 2px 12px rgba(0, 0, 0, 0.12); +} + +.cover_header_shadow_if_not_sticky { + position: sticky; + bottom: 0; + z-index: 5; + top: 50px; + margin: 0 -16px 0; + height: 12px; + background-color: #fff; +} + +.drawer_title { + position: relative; + margin-bottom: 0; + margin-right: 10px; +} + +.id_text { + margin-left: 10px; + color: var(--textColorSecondary); +} + +.tab_panel { + display: none; + margin-top: 16px; + + &[data-visible='true'] { + display: block; + } +} + +.code_editor_container { + height: 550px; +} + +.check_text { + cursor: pointer; + color: var(--primaryColor); +} diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionDetailsDrawer.tsx b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionDetailsDrawer.tsx index b49cab50d..2bbb72af0 100644 --- a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionDetailsDrawer.tsx +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionDetailsDrawer.tsx @@ -1,71 +1,41 @@ -import ErrorBoundary from 'antd/lib/alert/ErrorBoundary'; +import ErrorBoundary from 'components/ErrorBoundary'; import React, { ForwardRefRenderFunction, useState } from 'react'; -import styled from 'styled-components'; -import { Drawer, Row, Button, Tag, Tabs } from 'antd'; -import { DrawerProps } from 'antd/lib/drawer'; +import styled from './JobExecutionDetailsDrawer.module.less'; +import { Drawer, Grid, Button, Tag, Tabs } from '@arco-design/web-react'; +import { DrawerProps } from '@arco-design/web-react/es/Drawer'; +import { IconClose } from '@arco-design/web-react/icon'; +import { useQuery } from 'react-query'; import { NodeData, ChartNodeStatus, JobNodeRawData } from 'components/WorkflowJobsCanvas/types'; -import { useTranslation } from 'react-i18next'; -import { Close } from 'components/IconPark'; +import { executionStatusText } from 'components/WorkflowJobsCanvas/JobNodes/shared'; +import { convertExecutionStateToStatus } from 'components/WorkflowJobsCanvas/helpers'; +import { VariablePermissionLegend } from 'components/VariblePermission'; +import CodeEditorButton from 'components/ModelCodesEditorButton'; +import AlgorithmDrawer from 'components/AlgorithmDrawer'; +import CodeEditor from 'components/CodeEditor'; +import CountTime from 'components/CountTime'; import GridRow from 'components/_base/GridRow'; -import { formatTimestamp } from 'shared/date'; import PropertyList from 'components/PropertyList'; +import { useTranslation } from 'react-i18next'; +import { formatTimestamp } from 'shared/date'; +import { formatJSONValue } from 'shared/helpers'; import JobExecutionLogs from './JobExecutionLogs'; import JobExecutionMetrics from './JobExecutionMetrics'; import JobExecutionPods from './JobExecutionPods'; -import { executionStatusText } from 'components/WorkflowJobsCanvas/JobNodes/shared'; -import { convertExecutionStateToStatus } from 'components/WorkflowJobsCanvas/helpers'; +import { fetchJobById } from 'services/workflow'; import { WorkflowExecutionDetails } from 'typings/workflow'; -import defaultTheme from 'styles/_theme'; -import JobKibanaMetrics from './JobKibanaMetrics/index.tsx'; +import { VariableComponent, VariableWidgetSchema, VariableAccessMode } from 'typings/variable'; +import { JobState } from 'typings/job'; +import JobKibanaMetrics from './JobKibanaMetrics'; +import dayjs from 'dayjs'; -const Container = styled(Drawer)` - top: 60px; - - .ant-drawer-body { - padding-top: 0; - padding-bottom: 200px; - } -`; -const DrawerHeader = styled(Row)` - position: sticky; - z-index: 2; - top: 0; - margin: 0 -24px 0; - padding: 20px 16px 20px 24px; - background-color: white; - box-shadow: 0 2px 12px rgba(0, 0, 0, 0.12); -`; -const CoverHeaderShadowIfNotSticky = styled.div` - position: sticky; - bottom: 0; - z-index: 5; - top: 50px; - margin: 0 -24px 0; - height: 12px; - background-color: #fff; -`; -const DrawerTitle = styled.h3` - position: relative; - margin-bottom: 0; - margin-right: 10px; -`; -const ID = styled.small` - margin-left: 10px; - color: var(--textColorSecondary); -`; -const TabPanel = styled.div` - display: none; - - &[data-visible='true'] { - display: block; - } -`; +const Row = Grid.Row; interface Props extends DrawerProps { isPeerSide?: boolean; jobData?: NodeData; workflow?: WorkflowExecutionDetails; toggleVisible?: Function; + participantId?: ID; } export type JobExecutionDetailsExposedRef = {}; @@ -73,19 +43,22 @@ export const JobExecutionDetailsContext = React.createContext({ job: (undefined as unknown) as JobNodeRawData, workflow: undefined as WorkflowExecutionDetails | undefined, isPeerSide: undefined as boolean | undefined, + participantId: undefined as ID | undefined, }); enum JobDetailTabs { Basic = 'basic', Kibana = 'kibana', + Yaml = 'yaml', } const tagColors = { [ChartNodeStatus.Pending]: 'default', - [ChartNodeStatus.Processing]: 'processing', - [ChartNodeStatus.Warning]: 'warning', - [ChartNodeStatus.Success]: 'success', - [ChartNodeStatus.Error]: 'error', + [ChartNodeStatus.Processing]: 'blue', + [ChartNodeStatus.Validating]: 'blue', + [ChartNodeStatus.Warning]: 'orange', + [ChartNodeStatus.Success]: 'green', + [ChartNodeStatus.Error]: 'red', }; const JobExecutionDetailsDrawer: ForwardRefRenderFunction<JobExecutionDetailsExposedRef, Props> = ({ @@ -93,108 +66,181 @@ const JobExecutionDetailsDrawer: ForwardRefRenderFunction<JobExecutionDetailsExp workflow, isPeerSide = false, toggleVisible, + participantId, ...props }) => { const { t } = useTranslation(); const [currTab, setTab] = useState(JobDetailTabs.Basic); + const jobQuery = useQuery( + ['fetchJobById', jobData?.raw.id], + () => fetchJobById(jobData?.raw.id), + { + enabled: Boolean(jobData?.raw.id) && props.visible, + }, + ); + + // in peerSide, jobData.raw.id is null, jobQuery is idle, so use jobData?.raw to help <JobExecutionPods/> show Pod table list + const data = isPeerSide ? (jobData?.raw as any) : jobQuery.data?.data; if (!jobData || !jobData.raw) { return null; } - const job = jobData.raw; - - const displayedProps = [ + const yaml = data ? data.snapshot : ''; + const displayedProps: Array<{ + label: string; + value: any; + hidden?: boolean; + accessMode?: VariableAccessMode; + }> = [ { label: 'K8s Job name', value: job.k8sName, }, { - label: t('workflow.label_job_type'), + label: '任务类型', value: job.job_type, }, { - label: t('workflow.label_job_created'), + label: '任务创建时间', value: formatTimestamp(job.created_at || 0), }, + { + label: '运行时长', + value: renderRuntime(), + }, ]; job.variables.forEach((item) => { + const widgetSchemaString = (item.widget_schema as unknown) as string; + let configValue = item.value || <span style={{ color: 'var(--textColorDisabled)' }}>N/A</span>; + try { + const parsedWidgetSchema = JSON.parse(widgetSchemaString) as VariableWidgetSchema; + // Set the value of the list item "code_tar" to a button to open the editor in read-only mode + if (parsedWidgetSchema.component === VariableComponent.Code) { + configValue = ( + <CodeEditorButton + value={JSON.parse(item.value)} + disabled={true} + buttonText="查看全部代码" + buttonType="text" + buttonIcon={null} + buttonStyle={{ padding: 0 }} + /> + ); + } + + if (parsedWidgetSchema.component === VariableComponent.AlgorithmSelect) { + const { algorithmProjectId, algorithmId, config = [] } = JSON.parse(item.value); + configValue = ( + <AlgorithmDrawer.Button + algorithmProjectId={algorithmProjectId} + algorithmId={algorithmId} + parameterVariables={config} + > + <span className={styled.check_text}>{t('check')}</span> + </AlgorithmDrawer.Button> + ); + } + } catch (error) {} + displayedProps.push({ label: item.name, - value: item.value || <span style={{ color: defaultTheme.textColorDisabled }}>N/A</span>, + value: configValue, + accessMode: item.access_mode, }); }); const jobStatus = convertExecutionStateToStatus(job.state!); - return ( <ErrorBoundary> - <JobExecutionDetailsContext.Provider value={{ job, workflow, isPeerSide }}> - <Container - getContainer="#app-content" + <JobExecutionDetailsContext.Provider value={{ job, workflow, isPeerSide, participantId }}> + <Drawer + className={styled.container} mask={false} width="980px" - onClose={closeDrawer} + onCancel={closeDrawer} headerStyle={{ display: 'none' }} + wrapClassName="#app-content" + footer={null} {...props} > - <DrawerHeader align="middle" justify="space-between"> - <Row align="middle"> - <DrawerTitle> + <Row className={styled.drawer_header} align="center" justify="space-between"> + <Row align="center"> + <h3 className={styled.drawer_title}> {job.name} - <ID>ID: {job.id}</ID> - </DrawerTitle> + <small className={styled.id_text}>ID: {job.id}</small> + </h3> {isPeerSide ? ( - <Tag color="orange">{t('workflow.peer_config')}</Tag> + <Tag color="orange" bordered> + {t('workflow.peer_config')} + </Tag> ) : ( - <Tag color="cyan">{t('workflow.our_config')}</Tag> + <Tag color="cyan" bordered> + {t('workflow.our_config')} + </Tag> )} - <Tag color={tagColors[jobStatus]}>{executionStatusText[jobStatus]}</Tag> + <Tag style={{ marginLeft: '5px' }} color={tagColors[jobStatus]} bordered> + {executionStatusText[jobStatus]} + </Tag> </Row> <GridRow gap="10"> - <Button size="small" icon={<Close />} onClick={closeDrawer} /> + <Button size="small" icon={<IconClose />} onClick={closeDrawer} /> </GridRow> - </DrawerHeader> + </Row> - <CoverHeaderShadowIfNotSticky /> + <div className={styled.cover_header_shadow_if_not_sticky} /> - <Tabs defaultActiveKey={currTab} onChange={onTabChange as any}> - <Tabs.TabPane tab={t('workflow.label_job_basics')} key={JobDetailTabs.Basic} /> - <Tabs.TabPane tab={t('workflow.label_job_kibana_metrics')} key={JobDetailTabs.Kibana} /> + <Tabs defaultActiveTab={currTab} onChange={onTabChange as any}> + <Tabs.TabPane title={t('workflow.label_job_basics')} key={JobDetailTabs.Basic} /> + <Tabs.TabPane title={t('workflow.label_job_yaml_detail')} key={JobDetailTabs.Yaml} /> + <Tabs.TabPane + title={t('workflow.label_job_kibana_metrics')} + key={JobDetailTabs.Kibana} + /> </Tabs> - <TabPanel data-visible={currTab === JobDetailTabs.Basic}> + <div className={styled.tab_panel} data-visible={currTab === JobDetailTabs.Basic}> + <VariablePermissionLegend desc={true} prefix="对侧" /> <PropertyList initialVisibleRows={3} cols={2} properties={displayedProps} labelWidth={90} /> - <JobExecutionMetrics job={job} workflow={workflow} isPeerSide={isPeerSide} visible={props.visible} + participantId={participantId} /> - <JobExecutionLogs isPeerSide={isPeerSide} job={job} workflow={workflow} enabled={Boolean(props.visible)} + participantId={participantId} /> + <JobExecutionPods job={data} isPeerSide={isPeerSide} loading={jobQuery.isFetching} /> + </div> - <JobExecutionPods job={job} isPeerSide={isPeerSide} /> - </TabPanel> - - <TabPanel data-visible={currTab === JobDetailTabs.Kibana}> + <div className={styled.tab_panel} data-visible={currTab === JobDetailTabs.Kibana}> <JobKibanaMetrics job={job} isPeerSide={isPeerSide} /> - </TabPanel> - </Container> + </div> + + <div className={styled.tab_panel} data-visible={currTab === JobDetailTabs.Yaml}> + <div className={styled.code_editor_container}> + <CodeEditor + value={yaml ? formatJSONValue(yaml) : ''} + language="json" + isReadOnly={true} + /> + </div> + </div> + </Drawer> </JobExecutionDetailsContext.Provider> </ErrorBoundary> ); @@ -205,6 +251,19 @@ const JobExecutionDetailsDrawer: ForwardRefRenderFunction<JobExecutionDetailsExp function onTabChange(activeTab: JobDetailTabs) { setTab(activeTab); } + + function renderRuntime() { + let runningTime: number = 0; + const isRunning_ = job.state === JobState.STARTED; + const isStopped_ = [JobState.STOPPED, JobState.COMPLETED, JobState.FAILED].includes(job.state); + const startedAt = job.start_at || job.updated_at; + if (isStopped_) { + runningTime = job.completed_at || job.updated_at - startedAt!; + } else if (isRunning_) { + runningTime = dayjs().unix() - startedAt!; + } + return <CountTime time={runningTime} isStatic={!isRunning_} isResetOnChange={true} />; + } }; export default JobExecutionDetailsDrawer; diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionLogs.module.less b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionLogs.module.less new file mode 100644 index 000000000..c4a1ae768 --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionLogs.module.less @@ -0,0 +1,14 @@ +.container { + position: relative; + margin-top: 20px; + margin-bottom: 20px; +} + +.heading_row { + margin-bottom: 10px; +} + +.heading { + margin-bottom: 0; + margin-right: 10px; +} diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionLogs.tsx b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionLogs.tsx index 08ea6f154..1472ecb1d 100644 --- a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionLogs.tsx +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionLogs.tsx @@ -2,31 +2,19 @@ import { JobNodeRawData } from 'components/WorkflowJobsCanvas/types'; import React, { FC, useCallback, useState } from 'react'; import { useTranslation } from 'react-i18next'; import { fetchJobLogs, fetchJobEvents, fetchPeerJobEvents } from 'services/workflow'; -import styled from 'styled-components'; +import styled from './JobExecutionLogs.module.less'; import { WorkflowExecutionDetails } from 'typings/workflow'; import PrintLogs from 'components/PrintLogs'; -import { Row, Radio, RadioChangeEvent } from 'antd'; - -const Container = styled.div` - position: relative; - margin-bottom: 20px; -`; -const HeadingRow = styled(Row)` - margin-bottom: 10px; -`; -const Heading = styled.h3` - margin-bottom: 0; - margin-right: 10px; -`; -const PrintJobLogs = styled(PrintLogs)` - border-radius: 4px; -`; +import { Grid, Radio } from '@arco-design/web-react'; + +const Row = Grid.Row; type Props = { enabled: boolean; job: JobNodeRawData; isPeerSide: boolean; workflow?: WorkflowExecutionDetails; + participantId?: ID; }; enum LogType { @@ -34,54 +22,57 @@ enum LogType { Events, } -const JobExecutionLogs: FC<Props> = ({ job, enabled, isPeerSide, workflow }) => { +const JobExecutionLogs: FC<Props> = ({ job, enabled, isPeerSide, workflow, participantId }) => { const { t } = useTranslation(); const [currType, setType] = useState(isPeerSide ? LogType.Events : LogType.Logs); - const fetchLogsOrEvents = useCallback(async () => { - if (isPeerSide) { - if (!job.k8sName) { - return { data: ['K8s Job name invalid!'] }; + const fetchLogsOrEvents = useCallback( + async (maxLines = 5000) => { + if (isPeerSide) { + if (!job.k8sName) { + return { data: ['K8s Job name invalid!'] }; + } + + return fetchPeerJobEvents(workflow?.uuid!, job.k8sName!, participantId ?? 0, { + maxLines, + }).catch((error) => ({ + data: [`[Error occurred during fetchPeerJobEvents]: \n\n${error.message}`], + })); } - return fetchPeerJobEvents(workflow?.uuid!, job.k8sName!, { - maxLines: 500, - }).catch((error) => ({ - data: [`[Error occurred during fetchPeerJobEvents]: \n\n${error.message}`], - })); - } + if (!job.id) { + return { data: ['Job ID invalid!'] }; + } - if (!job.id) { - return { data: ['Job ID invalid!'] }; - } + if (currType === LogType.Events) { + return fetchJobEvents(job.id, { + maxLines, + }).catch((error) => ({ + data: [`[Error occurred during fetchJobEvents]: \n\n${error.message}`], + })); + } - if (currType === LogType.Events) { - return fetchJobEvents(job.id, { - maxLines: 500, + return fetchJobLogs(job.id, { + maxLines, }).catch((error) => ({ - data: [`[Error occurred during fetchJobEvents]: \n\n${error.message}`], + data: [`[Error occurred during fetchJobLogs]: \n\n${error.message}`], })); - } - - return fetchJobLogs(job.id, { - maxLines: 500, - }).catch((error) => ({ - data: [`[Error occurred during fetchJobLogs]: \n\n${error.message}`], - })); - }, [currType, isPeerSide, job.id, job.k8sName, workflow?.uuid]); + }, + [currType, isPeerSide, job.id, job.k8sName, workflow?.uuid, participantId], + ); return ( - <Container> - <HeadingRow align="middle"> - <Heading>{t('workflow.label_job_logs')}</Heading> + <div className={styled.container}> + <Row align="center" className={styled.heading_row}> + <h3 className={styled.heading}>{t('workflow.label_job_logs')}</h3> - <Radio.Group value={currType} size="small" onChange={onTypeChange}> - {!isPeerSide && <Radio.Button value={LogType.Logs}>Logs</Radio.Button>} - <Radio.Button value={LogType.Events}>Events</Radio.Button> + <Radio.Group value={currType} size="small" onChange={onTypeChange} type="button"> + {!isPeerSide && <Radio value={LogType.Logs}>Logs</Radio>} + <Radio value={LogType.Events}>Events</Radio> </Radio.Group> - </HeadingRow> + </Row> - <PrintJobLogs + <PrintLogs height="350" queryKey={['getJobLogs', currType, job.id]} logsFetcher={fetchLogsOrEvents} @@ -90,13 +81,13 @@ const JobExecutionLogs: FC<Props> = ({ job, enabled, isPeerSide, workflow }) => fullscreenVisible onFullscreenClick={goFullScreen} /> - </Container> + </div> ); async function goFullScreen() { if (isPeerSide) { return window.open( - `/v2/logs/job/events/peer/${job.k8sName}/${workflow?.uuid}`, + `/v2/logs/job/events/peer/${job.k8sName}/${workflow?.uuid}/${participantId ?? ''}`, '_blank noopener', ); } @@ -108,8 +99,8 @@ const JobExecutionLogs: FC<Props> = ({ job, enabled, isPeerSide, workflow }) => window.open(`/v2/logs/job/${job.id}`, '_blank noopener'); } - function onTypeChange(event: RadioChangeEvent) { - setType(event.target.value); + function onTypeChange(value: any) { + setType(value); } }; diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionMetrics.module.less b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionMetrics.module.less new file mode 100644 index 000000000..83fe705fd --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionMetrics.module.less @@ -0,0 +1,43 @@ +@import '~styles/mixins.less'; + +.container { + margin-top: 30px; +} + +.chart_container { + height: 480px; + overflow: hidden; + text-align: center; + line-height: 200px; +} + +.placeholder { + display: flex; + flex-direction: column; + width: 280px; + height: 400px; + margin: auto; + + > img { + width: 230px; + margin-top: auto; + } +} + +.metric_not_public { + .MixinFlexAlignCenter(); + display: flex; + height: 160px; +} + +.explaination { + margin-top: 10px; + font-size: 12px; + text-align: center; + color: var(--textColorSecondary); +} + +.cta_button { + display: block; + margin: 0 auto auto; +} diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionMetrics.tsx b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionMetrics.tsx index 640c76697..740fe5fba 100644 --- a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionMetrics.tsx +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionMetrics.tsx @@ -1,96 +1,40 @@ -import { loadScript } from 'shared/helpers'; import React, { FC, useEffect, useState } from 'react'; import { useTranslation } from 'react-i18next'; import getMetricsSVG from 'assets/images/get-metrics.svg'; import emptySVG from 'assets/images/empty.svg'; -import styled from 'styled-components'; -import { Button, message, Spin } from 'antd'; +import styled from './JobExecutionMetrics.module.less'; +import { Button, Message, Spin } from '@arco-design/web-react'; import { useQuery } from 'react-query'; import { JobNodeRawData } from 'components/WorkflowJobsCanvas/types'; import { fetchJobMpld3Metrics, fetchPeerJobMpld3Metrics } from 'services/workflow'; import queryClient from 'shared/queryClient'; import { Workflow } from 'typings/workflow'; -import { MixinFlexAlignCenter } from 'styles/mixins'; -import ErrorBoundary from 'antd/lib/alert/ErrorBoundary'; - -const Container = styled.div` - margin-top: 30px; - margin-bottom: 20px; -`; -const Header = styled.h3``; -const ChartContainer = styled.div` - height: 480px; - overflow: hidden; - text-align: center; - line-height: 200px; -`; -const Placeholder = styled.div` - display: flex; - flex-direction: column; - width: 280px; - height: 400px; - margin: auto; - - > img { - width: 230px; - margin-top: auto; - } -`; -const MetricsNotPublic = styled.div` - ${MixinFlexAlignCenter()} - display: flex; - height: 160px; -`; -const Explaination = styled.p` - margin-top: 10px; - font-size: 12px; - text-align: center; - color: var(--textColorSecondary); -`; -const CTAButton = styled(Button)` - display: block; - margin: 0 auto auto; -`; - -declare global { - interface Window { - mpld3: { - draw_figure: (containerId: string, data: any) => void; - remove_figure: (containerId: string) => void; - }; - } -} +import ErrorBoundary from 'components/ErrorBoundary'; + type Props = { workflow?: Workflow; job: JobNodeRawData; isPeerSide: boolean; visible?: boolean; + participantId?: ID; }; -const JobExecutionMetrics: FC<Props> = ({ job, workflow, visible, isPeerSide }) => { +const JobExecutionMetrics: FC<Props> = ({ job, workflow, visible, isPeerSide, participantId }) => { const { t } = useTranslation(); - const [mpld3, setMpld3Intance] = useState((null as any) as Window['mpld3']); const [chartsVisible, setChartsVisible] = useState(false); - // Load deps only one time - useEffect(() => { - _loadDependencies().then(() => { - setMpld3Intance(window.mpld3); - }); - }, []); - const metricsQ = useQuery(['fetchMetrics', job.id, chartsVisible, isPeerSide], fetcher, { refetchOnWindowFocus: false, cacheTime: 60 * 60 * 1000, retry: 2, - enabled: chartsVisible && visible && Boolean(mpld3), + enabled: chartsVisible && visible, }); const chartMetrics = metricsQ.data; if (metricsQ.isError) { - message.error((metricsQ.error as any)?.message); + Message.error((metricsQ.error as any)?.message); } /** @@ -105,25 +49,37 @@ const JobExecutionMetrics: FC<Props> = ({ job, workflow, visible, isPeerSide }) // Chart render effect useEffect(() => { - if (chartMetrics?.data) { - if (!mpld3) { - message.warn(t('workflow.msg_chart_deps_loading')); - return; - } - clearChart(); - try { - chartMetrics.data.forEach((metric, index) => { - const chartId = _targetChartId(index); - setTimeout(() => { - mpld3.draw_figure(chartId, metric); - }, 20); - }); - } catch (error) { - message.error(error.message); - } - } + import('mpld3/d3.v5.min.js') + .then((d3) => { + window.d3 = d3; + }) + .then(() => import('mpld3')) + .then((mpld3) => { + if (chartMetrics?.data) { + if (!mpld3) { + Message.warning(t('workflow.msg_chart_deps_loading')); + return; + } + chartMetrics.data.forEach((_, index) => { + setTimeout(() => { + mpld3.remove_figure(_targetChartId(index)); + }, 20); + }); + try { + chartMetrics.data.forEach((metric, index) => { + const chartId = _targetChartId(index); + setTimeout(() => { + mpld3.draw_figure(chartId, metric); + }, 20); + }); + } catch (error) { + Message.error(error.message); + } + } + }); + // eslint-disable-next-line react-hooks/exhaustive-deps - }, [chartMetrics, mpld3]); + }, [chartMetrics]); const isEmpty = chartMetrics?.data?.length === 0; const isPeerMetricsPublic = isPeerSide && workflow?.metric_is_public; @@ -131,62 +87,57 @@ const JobExecutionMetrics: FC<Props> = ({ job, workflow, visible, isPeerSide }) return ( <ErrorBoundary> - <Container data-display-chart={chartsVisible}> - <Header>{t('workflow.label_job_metrics')}</Header> + <div data-display-chart={chartsVisible} className={styled.container}> + <h3>{t('workflow.label_job_metrics')}</h3> {!metricsVisible && ( - <MetricsNotPublic> - <Explaination>{t('workflow.placeholder_metric_not_public')}</Explaination> - </MetricsNotPublic> + <div className={styled.metric_not_public}> + <p className={styled.explaination}>{t('workflow.placeholder_metric_not_public')}</p> + </div> )} {!chartMetrics && metricsVisible && ( - <Spin spinning={metricsQ.isFetching}> - <Placeholder> + <Spin loading={metricsQ.isFetching} style={{ width: '100%' }}> + <div className={styled.placeholder}> <img src={getMetricsSVG} alt="fetch-metrics" /> - <Explaination>{t('workflow.placeholder_fetch_metrics')}</Explaination> - <CTAButton type="primary" onClick={() => setChartsVisible(true)}> + <p className={styled.explaination}>{t('workflow.placeholder_fetch_metrics')}</p> + <Button + className={styled.cta_button} + type="primary" + onClick={() => setChartsVisible(true)} + > {t('workflow.btn_fetch_metrics')} - </CTAButton> - </Placeholder> + </Button> + </div> </Spin> )} {isEmpty && ( - <Placeholder> + <div className={styled.placeholder}> <img src={emptySVG} alt="fetch-metrics" /> - <Explaination> {t('workflow.placeholder_no_metrics')}</Explaination> - <CTAButton + <p className={styled.explaination}> {t('workflow.placeholder_no_metrics')}</p> + <Button + className={styled.cta_button} loading={metricsQ.isFetching} type="primary" onClick={() => metricsQ.refetch()} > {t('workflow.btn_retry')} - </CTAButton> - </Placeholder> + </Button> + </div> )} {chartMetrics?.data?.map((_, index) => { - return <ChartContainer id={_targetChartId(index)} />; + return <div className={styled.chart_container} id={_targetChartId(index)} />; })} - </Container> + </div> </ErrorBoundary> ); - function clearChart() { - if (!chartMetrics || !mpld3) return; - - chartMetrics.data.forEach((_, index) => { - setTimeout(() => { - mpld3.remove_figure(_targetChartId(index)); - }, 20); - }); - } - function fetcher() { if (isPeerSide) { if (workflow && workflow.uuid) { - return fetchPeerJobMpld3Metrics(workflow.uuid, job.k8sName || job.name); + return fetchPeerJobMpld3Metrics(workflow.uuid, job.k8sName || job.name, participantId ?? 0); } throw new Error(t('workflow.msg_lack_workflow_infos')); @@ -195,12 +146,6 @@ const JobExecutionMetrics: FC<Props> = ({ job, workflow, visible, isPeerSide }) } }; -function _loadDependencies() { - return loadScript('https://d3js.org/d3.v5.min.js').then(() => { - return loadScript('https://mpld3.github.io/js/mpld3.v0.5.2.js'); - }); -} - function _targetChartId(index: number) { return `chart_${index}`; } diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionPods.module.less b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionPods.module.less new file mode 100644 index 000000000..13986a3ca --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionPods.module.less @@ -0,0 +1,3 @@ +.container { + margin-top: 30px; +} diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionPods.tsx b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionPods.tsx index 753113176..e9b41c8a6 100644 --- a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionPods.tsx +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobExecutionPods.tsx @@ -1,67 +1,26 @@ import React, { FC } from 'react'; -import styled from 'styled-components'; -import { Table } from 'antd'; -import { Pod, PodState } from 'typings/job'; +import styled from './JobExecutionPods.module.less'; +import { Table, Button } from '@arco-design/web-react'; +import { JobExecutionDetalis, Pod, PodState } from 'typings/job'; import i18n from 'i18n'; -import { Button } from 'antd'; -import StateIndicator, { StateTypes } from 'components/StateIndicator'; +import StateIndicator from 'components/StateIndicator'; import { useTranslation } from 'react-i18next'; -import { JobNodeRawData } from 'components/WorkflowJobsCanvas/types'; import ClickToCopy from 'components/ClickToCopy'; - -const Container = styled.div` - margin-top: 30px; -`; - -const stateType: { [key: string]: StateTypes } = { - [PodState.SUCCEEDED]: 'success', - [PodState.RUNNING]: 'processing', - [PodState.FAILED]: 'error', - [PodState.PENDING]: 'warning', - [PodState.UNKNOWN]: 'default', - [PodState.FAILED_AND_FREED]: 'warning', - [PodState.SUCCEEDED_AND_FREED]: 'success', - // Deprecated state values - [PodState.SUCCEEDED__deprecated]: 'success', - [PodState.RUNNING__deprecated]: 'processing', - [PodState.FAILED__deprecated]: 'error', - [PodState.PENDING__deprecated]: 'warning', - [PodState.UNKNOWN__deprecated]: 'default', - [PodState.SUCCEEDED_AND_FREED__deprecated]: 'warning', - [PodState.FAILED_AND_FREED__deprecated]: 'success', -}; -const stateText: { [key: string]: string } = { - [PodState.SUCCEEDED]: i18n.t('workflow.job_node_success'), - [PodState.RUNNING]: i18n.t('workflow.job_node_running'), - [PodState.FAILED]: i18n.t('workflow.job_node_failed'), - [PodState.PENDING]: i18n.t('workflow.job_node_waiting'), - [PodState.UNKNOWN]: i18n.t('workflow.pod_unknown'), - [PodState.FAILED_AND_FREED]: i18n.t('workflow.pod_failed_cleared'), - [PodState.SUCCEEDED_AND_FREED]: i18n.t('workflow.pod_success_cleared'), - // Deprecated state values - [PodState.SUCCEEDED__deprecated]: i18n.t('workflow.job_node_success'), - [PodState.RUNNING__deprecated]: i18n.t('workflow.job_node_running'), - [PodState.FAILED__deprecated]: i18n.t('workflow.job_node_failed'), - [PodState.PENDING__deprecated]: i18n.t('workflow.job_node_waiting'), - [PodState.UNKNOWN__deprecated]: i18n.t('workflow.pod_unknown'), - [PodState.SUCCEEDED_AND_FREED__deprecated]: i18n.t('workflow.pod_failed_cleared'), - [PodState.FAILED_AND_FREED__deprecated]: i18n.t('workflow.pod_success_cleared'), -}; +import { getPodState } from '../shared'; type Props = { - job: JobNodeRawData; + job?: JobExecutionDetalis; isPeerSide: boolean; + loading: boolean; }; -const JobExecutionPods: FC<Props> = ({ job, isPeerSide }) => { +const JobExecutionPods: FC<Props> = ({ job, isPeerSide, loading }) => { const { t } = useTranslation(); - let data = job.pods; - - if (!Array.isArray(job.pods)) { + let data = job?.pods; + if (!Array.isArray(data)) { data = []; } - const tablecolumns = [ { title: i18n.t('workflow.col_pod_name'), @@ -72,31 +31,35 @@ const JobExecutionPods: FC<Props> = ({ job, isPeerSide }) => { return <ClickToCopy text={val}>{val}</ClickToCopy>; }, }, - { - title: i18n.t('workflow.col_pod_ip'), - dataIndex: 'pod_ip', - key: 'pod_ip', - }, + !isPeerSide && + ({ + title: i18n.t('workflow.col_pod_ip'), + dataIndex: 'pod_ip', + key: 'pod_ip', + sorter(a: Pod, b: Pod) { + return a.pod_ip.localeCompare(b.pod_ip); + }, + } as any), { title: i18n.t('workflow.col_worker_status'), dataIndex: 'state', key: 'state', + sorter(a: Pod, b: Pod) { + return a.state.localeCompare(b.state); + }, render: (_: PodState, record: Pod) => { - const val = record.state ?? record.status; - - let tip: string = ''; - if ([PodState.FAILED, PodState.PENDING].includes(record.state)) { - tip = record.message || ''; - } - return <StateIndicator type={stateType[val]} text={stateText[val]} tip={tip} />; + return <StateIndicator {...getPodState(record)} />; }, }, { title: i18n.t('workflow.col_worker_type'), dataIndex: 'pod_type', key: 'pod_type', + sorter(a: Pod, b: Pod) { + return a.pod_type.localeCompare(b.pod_type); + }, }, - ]; + ].filter(Boolean); if (!isPeerSide) { tablecolumns.push({ @@ -107,7 +70,7 @@ const JobExecutionPods: FC<Props> = ({ job, isPeerSide }) => { render: (_: any, record: Pod) => { return ( <div style={{ marginLeft: '-13px' }}> - <Button type="link" size="small" onClick={() => goInspectLogs(record)}> + <Button type="text" size="small" onClick={() => goInspectLogs(record)}> {i18n.t('workflow.btn_inspect_logs')} </Button> </div> @@ -117,14 +80,14 @@ const JobExecutionPods: FC<Props> = ({ job, isPeerSide }) => { } return ( - <Container> + <div className={styled.container}> <h3>{t('workflow.label_pod_list')}</h3> - <Table dataSource={data || []} columns={tablecolumns} size="small" /> - </Container> + <Table loading={loading} data={data || []} columns={tablecolumns} size="small" /> + </div> ); function goInspectLogs(pod: Pod) { - window.open(`/v2/logs/pod/${job.id}/${pod.name}`, '_blank noopener'); + window.open(`/v2/logs/pod/${job?.id}/${pod.name}`, '_blank noopener'); } }; diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/AggregatorSelect.tsx b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/AggregatorSelect.tsx index 9399e253f..2c8ab4c77 100644 --- a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/AggregatorSelect.tsx +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/AggregatorSelect.tsx @@ -1,7 +1,7 @@ import React, { FC } from 'react'; import { useTranslation } from 'react-i18next'; import { KibanaAggregator, KibanaChartType } from 'typings/kibana'; -import { Select } from 'antd'; +import { Select } from '@arco-design/web-react'; const AggregatorChoices: Partial<Record<KibanaChartType, any[]>> = { [KibanaChartType.Timer]: [ diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/IntervalInput.tsx b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/IntervalInput.tsx index e7316b8ea..bce202f0c 100644 --- a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/IntervalInput.tsx +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/IntervalInput.tsx @@ -1,6 +1,6 @@ -import React, { ChangeEvent, FC, useState } from 'react'; +import React, { FC, useState } from 'react'; import { useTranslation } from 'react-i18next'; -import { Input, Select } from 'antd'; +import { Input, Select } from '@arco-design/web-react'; const { Option } = Select; @@ -21,7 +21,7 @@ const IntervalInput: FC<Props> = ({ value, onChange }) => { placeholder={t('workflow.placeholder_interval')} defaultValue={n} onChange={onInputChange} - addonAfter={ + addAfter={ <Select defaultValue={unit} onChange={onSelectChange} style={{ width: '66px' }}> <Option value="m">分</Option> <Option value="h">小时</Option> @@ -33,15 +33,14 @@ const IntervalInput: FC<Props> = ({ value, onChange }) => { } /> ); - function onInputChange(evt: ChangeEvent<HTMLInputElement>) { - const val = evt.target.value; - setN(val); + function onInputChange(value: string) { + setN(value); if (!localUnit) { onChange && onChange(undefined); return; } - onChange && onChange(val + localUnit); + onChange && onChange(value + localUnit); } function onSelectChange(value: string) { setUnit(value); diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/JsonStringInput.tsx b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/JsonStringInput.tsx index b5b421ba4..eb6d62237 100644 --- a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/JsonStringInput.tsx +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/JsonStringInput.tsx @@ -1,6 +1,6 @@ import React, { FC } from 'react'; import { useTranslation } from 'react-i18next'; -import { Input } from 'antd'; +import { Input } from '@arco-design/web-react'; type Props = { value?: string; diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/TimerNameInput.tsx b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/TimerNameInput.tsx index e4117c71a..3148eb6b6 100644 --- a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/TimerNameInput.tsx +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/TimerNameInput.tsx @@ -1,6 +1,5 @@ import React, { FC } from 'react'; -import { useTranslation } from 'react-i18next'; -import { Select } from 'antd'; +import { Select } from '@arco-design/web-react'; type Props = { value?: string; @@ -8,15 +7,14 @@ type Props = { }; const TimerNameInput: FC<Props> = ({ value, onChange }) => { - const { t } = useTranslation(); - return ( <Select - mode="tags" + mode="multiple" + allowCreate={true} defaultValue={value?.split(',')} onChange={onTimersChange} - placeholder={t('workflow.placeholder_timers')} - notFoundContent={t('workflow.placeholder_kibana_timer')} + placeholder="Timers" + notFoundContent={<span style={{ paddingLeft: '10px' }}>{'输入多个 timer 名称'}</span>} /> ); diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/UnixTimePicker.tsx b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/UnixTimePicker.tsx index 7744e93ac..cffe36d11 100644 --- a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/UnixTimePicker.tsx +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/UnixTimePicker.tsx @@ -1,5 +1,5 @@ import React, { FC } from 'react'; -import { DatePicker } from 'antd'; +import { DatePicker } from '@arco-design/web-react'; import { disableFuture } from 'shared/date'; import dayjs from 'dayjs'; diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/XAxisInput.tsx b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/XAxisInput.tsx index 0a03cdefa..f1183d5c9 100644 --- a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/XAxisInput.tsx +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/FieldComponents/XAxisInput.tsx @@ -1,6 +1,6 @@ import React, { FC } from 'react'; import { useTranslation } from 'react-i18next'; -import { Input } from 'antd'; +import { Input } from '@arco-design/web-react'; type Props = { value?: string; diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaChart/EmbeddedChart.module.less b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaChart/EmbeddedChart.module.less new file mode 100644 index 000000000..f3bf6795e --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaChart/EmbeddedChart.module.less @@ -0,0 +1,15 @@ +.embedded_frame { + width: 200%; + height: 600px; + border: none; + flex-shrink: 0; + transform: scale(0.5); + transform-origin: 0 0; +} + +.controls_container { + position: absolute; + z-index: 2; + right: 10px; + bottom: 20px; +} diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaChart/EmbeddedChart.tsx b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaChart/EmbeddedChart.tsx index 4a8fc004d..5a62d912e 100644 --- a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaChart/EmbeddedChart.tsx +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaChart/EmbeddedChart.tsx @@ -1,19 +1,9 @@ import React, { FC, useContext, useEffect, useRef } from 'react'; -import styled from 'styled-components'; +import styled from './EmbeddedChart.module.less'; import { useToggle } from 'react-use'; import { JobExecutionDetailsContext } from '../../JobExecutionDetailsDrawer'; import { ControlButton } from 'styles/elements'; -import { ControlsContainer } from '../elements'; -import { Pen, ShareInternal } from 'components/IconPark'; - -const EmbeddedFrame = styled.iframe` - width: 200%; - height: 600px; - border: none; - flex-shrink: 0; - transform: scale(0.5); - transform-origin: 0 0; -`; +import { IconPen, IconShareInternal } from '@arco-design/web-react/icon'; type Props = { src?: string; isFill?: boolean; onEditParams: any; onOpenNewWindow: any }; @@ -29,21 +19,24 @@ const KibanaEmbeddedChart: FC<Props> = ({ src, isFill, onEditParams, onOpenNewWi return ( <div style={{ width: '100%' }}> - <EmbeddedFrame + <iframe + title="kibana-iframe" + className={styled.embedded_frame} + // eslint-disable-next-line jsx-a11y/aria-role role="kibana-iframe" ref={(el) => (iframeRef.current = el)} src={src ? `${src}&embed=true` : undefined} onLoad={onLoaded} /> {isFill && ( - <ControlsContainer> + <div className={styled.controls_container}> <ControlButton onClick={onEditParams}> - <Pen /> + <IconPen /> </ControlButton> <ControlButton onClick={() => onOpenNewWindow(src)}> - <ShareInternal /> + <IconShareInternal /> </ControlButton> - </ControlsContainer> + </div> )} </div> ); diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaChart/LineChart.module.less b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaChart/LineChart.module.less new file mode 100644 index 000000000..b067ec5ff --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaChart/LineChart.module.less @@ -0,0 +1,6 @@ +.controls_container { + position: absolute; + z-index: 2; + right: 10px; + bottom: 20px; +} diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaChart/LineChart.tsx b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaChart/LineChart.tsx index 3e8a021a9..dab316861 100644 --- a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaChart/LineChart.tsx +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaChart/LineChart.tsx @@ -2,10 +2,10 @@ import React, { FC, memo, useMemo } from 'react'; import { Line } from 'react-chartjs-2'; import { KiabanaMetrics } from 'typings/kibana'; import { formatTimestamp } from 'shared/date'; -import defaultTheme from 'styles/_theme'; -import { ControlsContainer } from '../elements'; -import { Pen } from 'components/IconPark'; +import { IconPen } from '@arco-design/web-react/icon'; import { ControlButton } from 'styles/elements'; +import defaultTheme from 'styles/theme'; +import styled from './LineChart.module.less'; type Props = { metrics: KiabanaMetrics; @@ -44,24 +44,17 @@ const KibanaLineChart: FC<Props> = memo(({ metrics, label, isFill, onEditParams * we give two size of chart on purpose for different size of container */} {isFill ? ( - <Line key="fullscreen" type="line" data={data} options={OPTIONS} width={900} height={300} /> + <Line key="fullscreen" data={data} options={OPTIONS} width={900} height={300} /> ) : ( - <Line - key="non-fullscreen" - type="line" - data={data} - options={OPTIONS} - width={435} - height={300} - /> + <Line key="non-fullscreen" data={data} options={OPTIONS} width={435} height={300} /> )} {isFill && ( - <ControlsContainer> + <div className={styled.controls_container}> <ControlButton onClick={() => onEditParams()}> - <Pen /> + <IconPen /> </ControlButton> - </ControlsContainer> + </div> )} </div> ); diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaItem.module.less b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaItem.module.less new file mode 100644 index 000000000..13e92a78c --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaItem.module.less @@ -0,0 +1,39 @@ +.container { + position: relative; + padding: 20px 20px 10px; + border: 1px solid var(--lineColor); + border-radius: 4px; + margin-top: 20px; +} + +.not_loaded_placeholder { + position: absolute; + max-width: 50%; + left: 50%; + top: 50%; + transform: translate(-50%, -50%); + font-size: 12px; + color: var(--textColorSecondary); + text-align: center; +} + +.chart_container { + position: relative; + display: flex; + align-items: center; + flex-direction: column; + width: 100%; + height: 300px; + overflow: hidden; + margin-top: 15px; + background-color: var(--backgroundColor); + transition: height 0.4s cubic-bezier(0.4, 0, 0.2, 1); + + &[data-is-fill='true'] { + > [role='kibana-iframe'] { + width: 100%; + height: 100%; + transform: none; + } + } +} diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaItem.tsx b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaItem.tsx index 069e008e6..af8a2ef3c 100644 --- a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaItem.tsx +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaItem.tsx @@ -1,8 +1,8 @@ import React, { FC, memo, useContext, useEffect, useState } from 'react'; -import styled from 'styled-components'; +import styled from './KibanaItem.module.less'; import KibanaParamsForm from './KibanaParamsForm'; import KibanaEmbeddedChart from './KibanaChart/EmbeddedChart'; -import { Col, message, Row, Spin } from 'antd'; +import { Grid, Message, Spin } from '@arco-design/web-react'; import { JobType } from 'typings/job'; import { fetchJobEmbedKibanaSrc, fetchPeerKibanaMetrics } from 'services/workflow'; import { JobExecutionDetailsContext } from '../JobExecutionDetailsDrawer'; @@ -11,7 +11,9 @@ import { KiabanaMetrics, KibanaChartType, KibanaQueryParams } from 'typings/kiba import { useToggle } from 'react-use'; import { useTranslation } from 'react-i18next'; import KibanaLineChart from './KibanaChart/LineChart'; -import { NotLoadedPlaceholder, ChartContainer } from './elements'; + +const Row = Grid.Row; +const Col = Grid.Col; const { Rate, Ratio, Numeric, Time, Timer } = KibanaChartType; @@ -19,17 +21,6 @@ const typesForPeerSideJob = [Ratio, Numeric]; const typesForDataJoinJob = [Rate, Ratio, Numeric, Time, Timer]; const typesForNonDataJoinJob = [Ratio, Numeric, Time, Timer]; -const Container = styled.div` - position: relative; - padding: 20px 20px 10px; - border: 1px solid var(--lineColor); - border-radius: 4px; - - & + & { - margin-top: 20px; - } -`; - const KibanaItem: FC = memo(() => { /** Need a empty string as placeholder on left side */ const { t } = useTranslation(); @@ -38,7 +29,7 @@ const KibanaItem: FC = memo(() => { const [configuring, toggleConfiguring] = useToggle(true); const [fetching, toggleFetching] = useToggle(false); - const { isPeerSide, job, workflow } = useContext(JobExecutionDetailsContext); + const { isPeerSide, job, workflow, participantId } = useContext(JobExecutionDetailsContext); useEffect(() => { setEmbedSrcs([]); @@ -49,15 +40,15 @@ const KibanaItem: FC = memo(() => { const isEmpty = isPeerSide ? metrics.length === 0 : embedSrcs.length === 0; return ( - <Container> + <div className={styled.container}> <Row gutter={20}> <Col span={configuring ? 12 : 24}> - <Spin spinning={fetching}> - <ChartContainer data-is-fill={!configuring}> + <Spin loading={fetching} style={{ width: '100%' }}> + <div className={styled.chart_container} data-is-fill={!configuring}> {isEmpty ? ( - <NotLoadedPlaceholder> + <div className={styled.not_loaded_placeholder}> {t('workflow.placeholder_fill_kibana_form')} - </NotLoadedPlaceholder> + </div> ) : isPeerSide ? ( <KibanaLineChart isFill={!configuring} @@ -77,7 +68,7 @@ const KibanaItem: FC = memo(() => { ))} </> )} - </ChartContainer> + </div> </Spin> </Col> @@ -98,7 +89,7 @@ const KibanaItem: FC = memo(() => { </Col> )} </Row> - </Container> + </div> ); async function fetchEmbedSrcList(values: KibanaQueryParams): Promise<string[]> { @@ -106,7 +97,7 @@ const KibanaItem: FC = memo(() => { const [res, err] = await to(fetchJobEmbedKibanaSrc(job.id, values)); toggleFetching(false); if (err) { - message.error(err.message); + Message.error(err.message); return []; } @@ -114,7 +105,7 @@ const KibanaItem: FC = memo(() => { return res.data; } - message.warn(t('workflow.msg_no_available_kibana')); + Message.warning(t('workflow.msg_no_available_kibana')); return []; } @@ -123,11 +114,11 @@ const KibanaItem: FC = memo(() => { async function fetchMetrics(values: KibanaQueryParams) { toggleFetching(true); const [res, err] = await to( - fetchPeerKibanaMetrics(workflow?.uuid!, job.k8sName || job.name, values), + fetchPeerKibanaMetrics(workflow?.uuid!, job.k8sName || job.name, participantId ?? 0, values), ); toggleFetching(false); if (err) { - message.error(err.message); + Message.error(err.message); return; } diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaParamsForm.tsx b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaParamsForm.tsx index 32518c9f9..2251d272f 100644 --- a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaParamsForm.tsx +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/KibanaParamsForm.tsx @@ -1,7 +1,7 @@ import React, { FC, useContext, useState } from 'react'; import styled from 'styled-components'; import { useTranslation } from 'react-i18next'; -import { Form, Select, Input, Row, Col, Button, Switch } from 'antd'; +import { Form, Select, Input, Grid, Button, Switch } from '@arco-design/web-react'; import { KibanaChartType, KibanaQueryFields, KibanaQueryParams } from 'typings/kibana'; import { JobType } from 'typings/job'; import IntervalInput from './FieldComponents/IntervalInput'; @@ -12,9 +12,12 @@ import TimerNameInput from './FieldComponents/TimerNameInput'; import AggregatorSelect from './FieldComponents/AggregatorSelect'; import GridRow from 'components/_base/GridRow'; import FormLabel from 'components/FormLabel'; -import { ShareInternal } from 'components/IconPark'; +import { IconShareInternal } from '@arco-design/web-react/icon'; import { JobExecutionDetailsContext } from '../JobExecutionDetailsDrawer'; +const Row = Grid.Row; +const Col = Grid.Col; + const Container = styled.div``; const FieldToComponentMap: Partial<Record<KibanaQueryFields, { use: any; help?: string }>> = { @@ -134,12 +137,12 @@ const KibanaParamsForm: FC<Props> = ({ types, onPreview, onNewWindowPreview, onC form={formInstance} layout="vertical" initialValues={initialValues} - onFinish={onFinish} + onSubmit={onFinish} onValuesChange={onValuesChange} > <Row gutter={20}> <Col span={12}> - <Form.Item label="Type" name="type"> + <Form.Item label="Type" field="type"> <Select> {types.map((type) => ( <Select.Option key={type} value={type}> @@ -152,12 +155,11 @@ const KibanaParamsForm: FC<Props> = ({ types, onPreview, onNewWindowPreview, onC {fieldsConfig && fieldsConfig.fields.map((field) => { const Component = FieldToComponentMap[field]?.use || Input; - return ( <Col key={field} span={12}> <Form.Item label={<FormLabel label={field} tooltip={FieldToComponentMap[field]?.help} />} - name={field} + field={field} > <Component type={chartType} /> </Form.Item> @@ -169,13 +171,13 @@ const KibanaParamsForm: FC<Props> = ({ types, onPreview, onNewWindowPreview, onC <Form.Item> <GridRow gap={16} top="12" justify="end"> {!isPeerSide && ( - <Button type="link" icon={<ShareInternal />} onClick={onNewWindowPreviewClick}> + <Button type="text" icon={<IconShareInternal />} onClick={onNewWindowPreviewClick}> {t('workflow.btn_preview_kibana_fullscreen')} </Button> )} <Button onClick={onPreviewClick}>{t('workflow.btn_preview_kibana')}</Button> - <Button type="primary" htmlType="submit" size="middle"> + <Button type="primary" htmlType="submit" size="small"> {t('confirm')} </Button> </GridRow> @@ -191,10 +193,10 @@ const KibanaParamsForm: FC<Props> = ({ types, onPreview, onNewWindowPreview, onC setFormData(values); } function onPreviewClick() { - onPreview(formInstance.getFieldsValue(true)); + onPreview(formInstance.getFieldsValue()); } function onNewWindowPreviewClick() { - onNewWindowPreview(formInstance.getFieldsValue(true)); + onNewWindowPreview(formInstance.getFieldsValue()); } }; diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/elements.ts b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/elements.ts deleted file mode 100644 index d5c9ab0ec..000000000 --- a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/elements.ts +++ /dev/null @@ -1,43 +0,0 @@ -import styled from 'styled-components'; -import { MixinCommonTransition } from 'styles/mixins'; - -export const ChartContainer = styled.div` - ${MixinCommonTransition('height')}; - - position: relative; - display: flex; - align-items: center; - flex-direction: column; - width: 100%; - height: 300px; - overflow: hidden; - background-color: var(--backgroundColor); - - & + & { - margin-top: 15px; - } - - &[data-is-fill='true'] { - > [role='kibana-iframe'] { - width: 100%; - height: 100%; - transform: none; - } - } -`; -export const NotLoadedPlaceholder = styled.div` - position: absolute; - max-width: 50%; - left: 50%; - top: 50%; - transform: translate(-50%, -50%); - font-size: 12px; - color: var(--textColorSecondary); - text-align: center; -`; -export const ControlsContainer = styled.div` - position: absolute; - z-index: 2; - right: 10px; - bottom: 20px; -`; diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/index.module.less b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/index.module.less new file mode 100644 index 000000000..df37b5a4c --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/index.module.less @@ -0,0 +1,13 @@ +@import '~styles/mixins.less'; + +.add_chart_button { + width: 250px; +} + +.metric_not_public { + .MixinFlexAlignCenter(); + display: flex; + height: 160px; + font-size: 12px; + color: var(--textColorSecondary); +} diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/index.tsx b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/index.tsx new file mode 100644 index 000000000..7774cafc0 --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/index.tsx @@ -0,0 +1,62 @@ +import React, { FC, useContext, useEffect, useState } from 'react'; +import styled from './index.module.less'; +import { useTranslation } from 'react-i18next'; +import { JobExecutionDetalis } from 'typings/job'; +import { Button } from '@arco-design/web-react'; +import { IconPlus } from '@arco-design/web-react/icon'; +import KibanaItem from './KibanaItem'; +import GridRow from 'components/_base/GridRow'; +import { giveWeakRandomKey } from 'shared/helpers'; +import { JobExecutionDetailsContext } from '../JobExecutionDetailsDrawer'; + +type Props = { + job: JobExecutionDetalis; + isPeerSide?: boolean; +}; + +const JobKibanaMetrics: FC<Props> = ({ job }) => { + const { t } = useTranslation(); + const [queryList, setQueryList] = useState([giveWeakRandomKey()]); + + const { isPeerSide, workflow } = useContext(JobExecutionDetailsContext); + + useEffect(() => { + setQueryList([giveWeakRandomKey()]); + }, [job?.id]); + + const isPeerMetricsPublic = (isPeerSide && workflow?.metric_is_public) || !isPeerSide; + + if (!isPeerMetricsPublic) { + return ( + <div className={styled.metric_not_public}>{t('workflow.placeholder_metric_not_public')}</div> + ); + } + + return ( + <section> + {queryList.map((key) => ( + <KibanaItem key={key} /> + ))} + + <GridRow justify="center" top="20"> + <Button + className={styled.add_chart_button} + type="primary" + icon={<IconPlus />} + size="large" + onClick={onAddClick} + > + {t('workflow.btn_add_kibana_chart')} + </Button> + </GridRow> + </section> + ); + + function onAddClick() { + const nextList = [...queryList, giveWeakRandomKey()]; + + setQueryList(nextList); + } +}; + +export default JobKibanaMetrics; diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/index.tsx.tsx b/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/index.tsx.tsx deleted file mode 100644 index 1ab0f4390..000000000 --- a/web_console_v2/client/src/views/Workflows/WorkflowDetail/JobKibanaMetrics/index.tsx.tsx +++ /dev/null @@ -1,66 +0,0 @@ -import React, { FC, useContext, useEffect, useState } from 'react'; -import styled from 'styled-components'; -import { useTranslation } from 'react-i18next'; -import { JobExecutionDetalis } from 'typings/job'; -import { Button } from 'antd'; -import { Plus } from 'components/IconPark'; -import KibanaItem from './KibanaItem'; -import GridRow from 'components/_base/GridRow'; -import { giveWeakRandomKey } from 'shared/helpers'; -import { MixinFlexAlignCenter } from 'styles/mixins'; -import { JobExecutionDetailsContext } from '../JobExecutionDetailsDrawer'; - -const AddChartButton = styled(Button)` - width: 250px; -`; -const MetricsNotPublic = styled.div` - ${MixinFlexAlignCenter()} - display: flex; - height: 160px; - font-size: 12px; - color: var(--textColorSecondary); -`; - -type Props = { - job: JobExecutionDetalis; - isPeerSide?: boolean; -}; - -const JobKibanaMetrics: FC<Props> = ({ job }) => { - const { t } = useTranslation(); - const [queryList, setQueryList] = useState([giveWeakRandomKey()]); - - const { isPeerSide, workflow } = useContext(JobExecutionDetailsContext); - - useEffect(() => { - setQueryList([giveWeakRandomKey()]); - }, [job?.id]); - - const isPeerMetricsPublic = isPeerSide && workflow?.metric_is_public; - - if (!isPeerMetricsPublic) { - return <MetricsNotPublic>{t('workflow.placeholder_metric_not_public')}</MetricsNotPublic>; - } - - return ( - <section> - {queryList.map((key) => ( - <KibanaItem key={key} /> - ))} - - <GridRow justify="center" top="20"> - <AddChartButton type="primary" icon={<Plus />} size="large" onClick={onAddClick}> - {t('workflow.btn_add_kibana_chart')} - </AddChartButton> - </GridRow> - </section> - ); - - function onAddClick() { - const nextList = [...queryList, giveWeakRandomKey()]; - - setQueryList(nextList); - } -}; - -export default JobKibanaMetrics; diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/index.module.less b/web_console_v2/client/src/views/Workflows/WorkflowDetail/index.module.less new file mode 100644 index 000000000..5a2c033d7 --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/index.module.less @@ -0,0 +1,72 @@ +@import '~styles/mixins.less'; + +.chart_section { + --marginBottom: 12px; + display: flex; + margin-bottom: var(--marginBottom); +} + +.chart_container { + --chartHeaderHeight: 48px; + height: calc(var(--contentMinHeight) - var(--chartHeaderHeight) + var(--marginBottom)); + flex: 1; + + & + & { + margin-left: 16px; + } +} + +.header_card { + width: 100%; + padding: 8px; +} + +.header_row { + margin-bottom: 15px; + + &[data-forked='true'] { + margin-bottom: 25px; + } +} + +.name { + .MixinEllipsis(); + + max-width: 400px; + margin-bottom: 0; + font-size: 20px; +} + +.forked_form { + margin-top: -20px; + font-size: 12px; +} + +.origin_workflow_link { + margin-left: 5px; +} + +.no_jobs { + display: flex; + height: calc(100% - 48px); + background-color: rgb(var(--gray-1)); +} + +.chart_header { + height: var(--chartHeaderHeight); + padding: 0 20px; + font-size: 14px; + line-height: 22px; + background-color: white; +} + +.chart_title { + margin-bottom: 0; + + &::after { + margin-left: 25px; + content: attr(data-note); + font-size: 12px; + color: rgb(var(--gray-6)); + } +} diff --git a/web_console_v2/client/src/views/Workflows/WorkflowDetail/index.tsx b/web_console_v2/client/src/views/Workflows/WorkflowDetail/index.tsx index bf3237fd0..6ef2f5360 100644 --- a/web_console_v2/client/src/views/Workflows/WorkflowDetail/index.tsx +++ b/web_console_v2/client/src/views/Workflows/WorkflowDetail/index.tsx @@ -1,9 +1,14 @@ -import React, { FC, useState } from 'react'; -import styled from 'styled-components'; -import { Card, Spin, Row, Button, Col } from 'antd'; -import { useParams, Link } from 'react-router-dom'; +import React, { FC, useState, useMemo } from 'react'; +import styled from './index.module.less'; +import { Card, Spin, Grid, Button, Message } from '@arco-design/web-react'; +import { useParams, Link, useHistory } from 'react-router-dom'; import { useQuery } from 'react-query'; -import { getPeerWorkflowsConfig, getWorkflowDetailById } from 'services/workflow'; +import { + getPeerWorkflow, + getWorkflowDetailById, + getWorkflowDownloadHref, + PEER_WORKFLOW_DETAIL_QUERY_KEY, +} from 'services/workflow'; import WorkflowJobsCanvas from 'components/WorkflowJobsCanvas'; import { useTranslation } from 'react-i18next'; import WhichProject from 'components/WhichProject'; @@ -17,142 +22,170 @@ import { useToggle } from 'react-use'; import { JobNode, NodeData, JobNodeRawData } from 'components/WorkflowJobsCanvas/types'; import { useMarkFederatedJobs } from 'components/WorkflowJobsCanvas/hooks'; import PropertyList from 'components/PropertyList'; -import { Eye, EyeInvisible, Branch } from 'components/IconPark'; -import { WorkflowExecutionDetails } from 'typings/workflow'; +import { IconEye, IconEyeInvisible, IconBranch } from '@arco-design/web-react/icon'; +import { Workflow, WorkflowExecutionDetails, WorkflowState } from 'typings/workflow'; import { ReactFlowProvider } from 'react-flow-renderer'; -import { findJobExeInfoByJobDef, isRunning, isStopped } from 'shared/workflow'; +import { findJobExeInfoByJobDef } from 'shared/workflow'; import dayjs from 'dayjs'; import NoResult from 'components/NoResult'; import { CreateJobFlag } from 'typings/job'; import SharedPageLayout from 'components/SharedPageLayout'; - -const Container = styled.div` - display: flex; - flex-direction: column; - height: var(--contentMinHeight); -`; -const ChartSection = styled.section` - display: flex; - margin-top: 12px; - flex: 1; -`; -const ChartContainer = styled.div` - height: 100%; - flex: 1; - - & + & { - margin-left: 16px; - } -`; -const HeaderRow = styled(Row)` - margin-bottom: 15px; - - &[data-forked='true'] { - margin-bottom: 25px; - } -`; -const Name = styled.h3` - margin-bottom: 0; - font-size: 20px; - line-height: 1; -`; -const ForkedFrom = styled.div` - margin-top: -20px; - font-size: 12px; -`; -const OriginWorkflowLink = styled(Link)` - margin-left: 5px; -`; -// TODO: find a better way to define no-result's container -const NoJobs = styled.div` - display: flex; - height: calc(100% - 48px); - background-color: var(--gray1); -`; -const ChartHeader = styled(Row)` - height: 48px; - padding: 0 20px; - font-size: 14px; - line-height: 22px; - background-color: white; -`; -const ChartTitle = styled.h3` - margin-bottom: 0; - - &::after { - margin-left: 25px; - content: attr(data-note); - font-size: 12px; - color: var(--darkGray6); - } -`; +import GlobalConfigDrawer from './GlobalConfigDrawer'; +import LocalWorkflowNote from '../LocalWorkflowNote'; +import request from 'libs/request'; +import { saveBlob } from 'shared/helpers'; +import { formatExtra } from 'shared/modelCenter'; +import { VariablePermissionLegend } from 'components/VariblePermission'; +import { parseCron } from 'components/CronTimePicker'; +import { projectListQuery } from 'stores/project'; +import { useRecoilQuery } from 'hooks/recoil'; +import { TIME_INTERVAL, CONSTANTS } from 'shared/constants'; +import ClickToCopy from 'components/ClickToCopy'; +import { Tooltip } from '@arco-design/web-react'; + +const Row = Grid.Row; +const Col = Grid.Col; const WorkflowDetail: FC = () => { const { t } = useTranslation(); - const params = useParams<{ id: string }>(); + const history = useHistory(); + const params = useParams<{ + id: string; + type?: 'model-management' | 'model-evaluation' | 'offline-prediction'; + /** dataset kind */ + kind_label?: 'raw' | 'processed'; + }>(); const [peerJobsVisible, togglePeerJobsVisible] = useToggle(false); - const [drawerVisible, toggleDrawerVisible] = useToggle(false); + const [jobExecutionDetailsDrawerVisible, toggleJobExecutionDetailsDrawerVisible] = useToggle( + false, + ); + const [globalConfigDrawerVisible, toggleGlobalConfigDrawerVisible] = useToggle(false); + const [isPeerSide, setIsPeerSide] = useState(false); const [data, setData] = useState<NodeData>(); const detailQuery = useQuery( ['getWorkflowDetailById', params.id], () => getWorkflowDetailById(params.id), - { cacheTime: 1 }, + { cacheTime: 1, refetchInterval: TIME_INTERVAL.CONNECTION_CHECK }, ); - const peerWorkflowQuery = useQuery(['getPeerWorkflow', params.id], getPeerWorkflow, { - retry: false, - }); + + const { data: projectList } = useRecoilQuery(projectListQuery); const workflow = detailQuery.data?.data; - const peerWorkflow = peerWorkflowQuery.data; const isForked = Boolean(workflow?.forked_from); + const isLocalRun = Boolean(workflow?.is_local); + + const peerWorkflowQuery = useQuery( + [PEER_WORKFLOW_DETAIL_QUERY_KEY, params.id], + () => getPeerWorkflow(params.id), + { + retry: false, + enabled: Boolean(workflow) && !isLocalRun, // Wait for workflow response + }, + ); + const peerWorkflow = peerWorkflowQuery.data; const originWorkflowQuery = useQuery( - ['getPeerWorkflow', workflow?.forked_from], + ['getWorkflowDetailById', workflow?.forked_from], () => getWorkflowDetailById(workflow?.forked_from!), { enabled: isForked }, ); + const { RUNNING, STOPPED, COMPLETED, FAILED } = WorkflowState; + let isRunning_ = false; let isStopped_ = false; let runningTime: number = 0; if (workflow) { - isRunning_ = isRunning(workflow); - isStopped_ = isStopped(workflow); + const { state } = workflow; + isRunning_ = state === RUNNING; + isStopped_ = [STOPPED, COMPLETED, FAILED].includes(state); if (isRunning_ || isStopped_) { const { stop_at, start_at } = workflow; runningTime = isStopped_ ? stop_at! - start_at! : dayjs().unix() - start_at!; } } - + async function onDownloadTemplate() { + try { + const blob = await request( + getWorkflowDownloadHref(workflow?.id || '', workflow?.project_id), + { + responseType: 'blob', + }, + ); + saveBlob(blob, `${workflow?.name}使用模板.json`); + } catch (error: any) { + Message.error(error.message); + } + } const workflowProps = [ { - label: t('workflow.label_template_name'), + label: t('workflow.label_uuid'), + value: workflow?.uuid ?? CONSTANTS.EMPTY_PLACEHOLDER, + }, + { + label: t('workflow.label_template_group'), value: workflow?.config?.group_alias || ( - <Link to={`/workflows/accept/basic/${workflow?.id}`}>{t('workflow.job_node_pending')}</Link> + <Link to={`/workflow-center/workflows/accept/basic/${workflow?.id}`}> + {t('workflow.job_node_pending')} + </Link> + ), + }, + { + label: t('workflow.label_new_template_name'), + value: ( + <> + {(Boolean(workflow?.template_info?.name) && ( + <Link + to={`/workflow-center/workflow-templates/detail/${workflow?.template_info?.id}/config`} + > + {workflow?.template_info?.name} + </Link> + )) || + (workflow?.config && t('workflow.tpl_deleted')) || + t('workflow.tpl_config_pending')} + {workflow?.template_info?.is_modified && workflow?.config && ( + <Button type="text" size="mini" onClick={onDownloadTemplate}> + {t('workflow.tpl_modified')} + </Button> + )} + </> ), }, { label: t('workflow.label_project'), - value: <WhichProject id={workflow?.project_id || 0} />, + value: <WhichProject id={workflow?.project_id} />, }, { label: t('workflow.label_running_time'), - value: Boolean(workflow) && <CountTime time={runningTime} isStatic={!isRunning_} />, }, + { + label: t('workflow.label_revision'), + value: workflow?.template_info?.revision_index + ? `V${workflow?.template_info?.revision_index}` + : '-', + }, + { + label: t('workflow.label_creator'), + value: workflow?.creator || '-', + }, { label: t('workflow.label_batch_update_interval'), - value: `${workflow?.batch_update_interval} min`, - hidden: !workflow?.batch_update_interval || workflow?.batch_update_interval === -1, + value: `${workflow?.cron_config && stringifyCron(workflow.cron_config)}`, + hidden: !workflow?.cron_config, }, // Display workflow global variables - ...(workflow?.config?.variables || []).map((item) => ({ label: item.name, value: item.value })), + ...(workflow?.config?.variables || []).map((item) => ({ + label: item.name, + value: item.value, + accessMode: item.access_mode, + })), ]; const { markThem } = useMarkFederatedJobs(); @@ -164,76 +197,163 @@ const WorkflowDetail: FC = () => { markThem(jobsWithExeDetails, peerJobsWithExeDetails); + const BreadcrumbLinkPaths = useMemo(() => { + if (params.type === 'model-management') { + const formattedExtraWorkflow = workflow ? formatExtra(workflow) : ({} as Workflow); + return [ + { + label: 'menu.label_model_center_model_training', + to: '/model-center/model-management', + }, + { + label: formattedExtraWorkflow['model_group.name'] || CONSTANTS.EMPTY_PLACEHOLDER, + to: `/model-center/model-management/model-set/${formattedExtraWorkflow['model.group_id']}`, + }, + { label: formattedExtraWorkflow.name || CONSTANTS.EMPTY_PLACEHOLDER }, + ]; + } + if (params.kind_label === 'processed') { + return [ + { + label: 'menu.label_datasets', + to: '/datasets/processed/my', + }, + { + label: 'menu.label_datasets_job', + to: '/datasets/processed/my', + }, + ]; + } + + return [ + { label: 'menu.label_workflow', to: '/workflow-center/workflows' }, + { label: 'workflow.execution_detail' }, + ]; + }, [params, workflow]); + + const participantIdList = useMemo(() => { + if (workflow?.project_id && projectList && projectList.length > 0) { + const currentWorkflowProject = projectList.find((item) => item.id === workflow.project_id); + if (currentWorkflowProject && currentWorkflowProject.participants) { + return currentWorkflowProject.participants.map((item) => item.id); + } + } + return []; + }, [workflow, projectList]); + + const isModelCenterMode = + params.type === 'model-management' || + params.type === 'model-evaluation' || + params.type === 'offline-prediction'; + + function onEditClick() { + if (!workflow) { + return; + } + const formattedExtraWorkflow = workflow ? formatExtra(workflow) : ({} as Workflow); + + history.push( + `/model-center/${params.type}/${ + formattedExtraWorkflow.isReceiver ? 'receiver' : 'sender' + }/edit/global/${formattedExtraWorkflow.id}/${formattedExtraWorkflow['model.group_id'] || ''}`, + ); + } + function onAcceptClick() { + if (!workflow) { + return; + } + + history.push(`/model-center/${params.type}/receiver/edit/global/${workflow.id}`); + } + return ( <SharedPageLayout - title={ - <BreadcrumbLink - paths={[ - { label: 'menu.label_workflow', to: '/workflows' }, - { label: 'workflow.execution_detail' }, - ]} - /> - } + title={<BreadcrumbLink paths={BreadcrumbLinkPaths} />} contentWrapByCard={false} > - <Spin spinning={detailQuery.isLoading}> - <Container> - <Card> - <HeaderRow justify="space-between" align="middle" data-forked={isForked}> - <GridRow gap="8"> - <Name>{workflow?.name}</Name> - - {workflow && <WorkflowStage workflow={workflow} tag />} - </GridRow> - {workflow && ( - <Col> - <WorkflowActions workflow={workflow} onSuccess={detailQuery.refetch} /> - </Col> + <Spin loading={detailQuery.isLoading}> + <> + <section className={styled.chart_section}> + <Card className={styled.header_card}> + <Row + className={styled.header_row} + justify="space-between" + align="center" + data-forked={isForked} + > + <GridRow gap="8"> + <ClickToCopy text={workflow?.name || ''}> + <Tooltip position="top" content={workflow?.name}> + <h3 className={styled.name}>{workflow?.name}</h3> + </Tooltip> + </ClickToCopy> + {workflow && <WorkflowStage workflow={workflow} tag />} + </GridRow> + {workflow && ( + <Col flex="200px"> + <WorkflowActions + size="small" + workflow={workflow} + onSuccess={detailQuery.refetch} + onEditClick={isModelCenterMode ? onEditClick : undefined} + onAcceptClick={isModelCenterMode ? onAcceptClick : undefined} + /> + </Col> + )} + </Row> + + {isForked && originWorkflowQuery.isSuccess && ( + <div className={styled.forked_form}> + <IconBranch /> + {t('workflow.forked_from')} + <Link + className={styled.origin_workflow_link} + to={`/workflow-center/workflows/${originWorkflowQuery.data?.data.id}`} + > + {originWorkflowQuery.data?.data.name} + </Link> + </div> )} - </HeaderRow> - - {isForked && originWorkflowQuery.isSuccess && ( - <ForkedFrom> - <Branch /> - {t('workflow.forked_from')} - <OriginWorkflowLink to={`/workflows/${originWorkflowQuery.data?.data.id}`}> - {originWorkflowQuery.data?.data.name} - </OriginWorkflowLink> - </ForkedFrom> - )} - - <PropertyList - labelWidth={100} - initialVisibleRows={3} - cols={3} - properties={workflowProps} - style={{ marginBottom: '0' }} - /> - </Card> - - <ChartSection> + <VariablePermissionLegend desc={true} prefix="对侧" /> + + <PropertyList + labelWidth={100} + initialVisibleRows={3} + cols={3} + colProportions={[1, 2, 1]} + properties={workflowProps} + style={{ marginBottom: '0' }} + /> + </Card> + </section> + <section className={styled.chart_section}> {/* Our config */} - <ChartContainer> - <ChartHeader justify="space-between" align="middle"> - <ChartTitle data-note={peerJobsVisible ? t('workflow.federated_note') : ''}> + <div className={styled.chart_container}> + <Row className={styled.chart_header} justify="space-between" align="center"> + <h3 + className={styled.chart_title} + data-note={peerJobsVisible ? t('workflow.federated_note') : ''} + > {t('workflow.our_config')} - </ChartTitle> + </h3> - {!peerJobsVisible && ( - <Button icon={<Eye />} onClick={() => togglePeerJobsVisible(true)}> + {isLocalRun && <LocalWorkflowNote />} + + {!peerJobsVisible && !isLocalRun && ( + <Button icon={<IconEye />} onClick={() => togglePeerJobsVisible(true)}> {t('workflow.btn_see_peer_config')} </Button> )} - </ChartHeader> + </Row> {jobsWithExeDetails.length === 0 ? ( - <NoJobs> + <div className={styled.no_jobs}> <NoResult text={t('workflow.msg_not_config')} CTAText={t('workflow.action_configure')} - to={`/workflows/accept/basic/${params.id}`} + to={`/workflow-center/workflows/accept/basic/${params.id}`} /> - </NoJobs> + </div> ) : ( <ReactFlowProvider> <WorkflowJobsCanvas @@ -241,36 +361,46 @@ const WorkflowDetail: FC = () => { workflowConfig={{ ...workflow?.config!, job_definitions: jobsWithExeDetails, - variables: [], + variables: workflow?.config?.variables || [], }} onJobClick={viewJobDetail} - onCanvasClick={() => toggleDrawerVisible(false)} + onCanvasClick={hideAllDrawer} + /> + <GlobalConfigDrawer + key="self" + visible={globalConfigDrawerVisible && !isPeerSide} + toggleVisible={toggleGlobalConfigDrawerVisible} + jobData={data} + workflow={workflow} /> </ReactFlowProvider> )} - </ChartContainer> + </div> {/* Peer config */} {peerJobsVisible && ( - <ChartContainer> - <ChartHeader justify="space-between" align="middle"> - <ChartTitle data-note={peerJobsVisible ? t('workflow.federated_note') : ''}> + <div className={styled.chart_container}> + <Row className={styled.chart_header} justify="space-between" align="center"> + <h3 + className={styled.chart_title} + data-note={peerJobsVisible ? t('workflow.federated_note') : ''} + > {t('workflow.peer_config')} - </ChartTitle> + </h3> - <Button icon={<EyeInvisible />} onClick={() => togglePeerJobsVisible(false)}> + <Button icon={<IconEyeInvisible />} onClick={() => togglePeerJobsVisible(false)}> {t('workflow.btn_hide_peer_config')} </Button> - </ChartHeader> + </Row> {peerJobsWithExeDetails.length === 0 ? ( - <NoJobs> + <div className={styled.no_jobs}> {peerWorkflowQuery.isFetching ? ( <Spin style={{ margin: 'auto' }} /> ) : ( <NoResult text={t('workflow.msg_peer_not_ready')} /> )} - </NoJobs> + </div> ) : ( <ReactFlowProvider> <WorkflowJobsCanvas @@ -278,56 +408,121 @@ const WorkflowDetail: FC = () => { workflowConfig={{ ...peerWorkflowQuery.data?.config!, job_definitions: peerJobsWithExeDetails, - variables: [], + variables: peerWorkflow?.config?.variables || [], }} onJobClick={viewPeerJobDetail} - onCanvasClick={() => toggleDrawerVisible(false)} + onCanvasClick={hideAllDrawer} + /> + <GlobalConfigDrawer + key="peer" + visible={globalConfigDrawerVisible && isPeerSide} + toggleVisible={toggleGlobalConfigDrawerVisible} + jobData={data} + placement="left" + workflow={peerWorkflow} + isPeerSide /> </ReactFlowProvider> )} - </ChartContainer> + </div> )} - </ChartSection> - + </section> <JobExecutionDetailsDrawer key="self" - visible={drawerVisible && !isPeerSide} - toggleVisible={toggleDrawerVisible} + visible={jobExecutionDetailsDrawerVisible && !isPeerSide} + toggleVisible={toggleJobExecutionDetailsDrawerVisible} jobData={data} workflow={workflow} /> - + {/* Note: Only support one participant now, it will support 2+ participant soon */} <JobExecutionDetailsDrawer key="peer" - visible={drawerVisible && isPeerSide} - toggleVisible={toggleDrawerVisible} + visible={jobExecutionDetailsDrawerVisible && isPeerSide} + toggleVisible={toggleJobExecutionDetailsDrawerVisible} jobData={data} placement="left" workflow={peerWorkflow} + participantId={participantIdList?.[0] ?? 0} isPeerSide /> - </Container> + </> </Spin> </SharedPageLayout> ); function viewJobDetail(jobNode: JobNode) { + if (jobNode?.type === 'global') { + setIsPeerSide(false); + showGlobalConfigDrawer(jobNode); + return; + } setIsPeerSide(false); - showJobDetailesDrawer(jobNode); + showJobDetaileDrawer(jobNode); } function viewPeerJobDetail(jobNode: JobNode) { + if (jobNode?.type === 'global') { + setIsPeerSide(true); + showGlobalConfigDrawer(jobNode); + return; + } setIsPeerSide(true); - showJobDetailesDrawer(jobNode); + showJobDetaileDrawer(jobNode); } - function showJobDetailesDrawer(jobNode: JobNode) { + function showJobDetaileDrawer(jobNode: JobNode) { setData(jobNode.data); - toggleDrawerVisible(true); + toggleJobExecutionDetailsDrawerVisible(true); } - async function getPeerWorkflow() { - const res = await getPeerWorkflowsConfig(params.id); - const anyPeerWorkflow = Object.values(res.data).find((item) => !!item.config)!; - - return anyPeerWorkflow; + function showGlobalConfigDrawer(jobNode: JobNode) { + setData(jobNode.data); + toggleGlobalConfigDrawerVisible(true); + } + function hideAllDrawer() { + toggleGlobalConfigDrawerVisible(false); + toggleJobExecutionDetailsDrawerVisible(false); + } + function stringifyCron(cron: string) { + const { method, weekday, time } = parseCron(cron); + if (method === 'week') { + let weekdayString; + switch (weekday) { + case 0: + weekdayString = '星期天'; + break; + case 1: + weekdayString = '星期一'; + break; + + case 2: + weekdayString = '星期二'; + break; + + case 3: + weekdayString = '星期三'; + break; + + case 4: + weekdayString = '星期四'; + break; + + case 5: + weekdayString = '星期五'; + break; + + case 6: + weekdayString = '星期六'; + break; + + default: + weekdayString = '未知时间'; + } + const timeString = time!.format('HH:mm'); + return `${weekdayString} ${timeString}`; + } else if (method === 'hour') { + const timeString = time!.format('mm:ss'); + return `每时 ${timeString}`; + } + const timeString = time!.format('HH:mm'); + return `每天 ${timeString}`; } }; diff --git a/web_console_v2/client/src/views/Workflows/WorkflowList/List/index.module.less b/web_console_v2/client/src/views/Workflows/WorkflowList/List/index.module.less new file mode 100644 index 000000000..9b04d176e --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/WorkflowList/List/index.module.less @@ -0,0 +1,25 @@ +.workflow_ist_form_item { + margin-bottom: 0 !important; +} + +.workflow_list_container { + display: flex; + flex: 1; + width: 100%; + .col_name_link { + display: block; + font-size: 12px; + + &[data-invalid='true'] { + color: var(--textColorDisabled); + + &:hover { + color: var(--primaryColor); + } + } + } + .col_uuid { + display: block; + color: var(--textColorSecondary); + } +} diff --git a/web_console_v2/client/src/views/Workflows/WorkflowList/List/index.tsx b/web_console_v2/client/src/views/Workflows/WorkflowList/List/index.tsx new file mode 100644 index 000000000..c2a8a57e1 --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/WorkflowList/List/index.tsx @@ -0,0 +1,371 @@ +import React, { FC, useMemo, useState } from 'react'; +import { Form, Grid, Button, Input, Table, Message, Spin } from '@arco-design/web-react'; +import { Link, useHistory } from 'react-router-dom'; +import { useMutation, useQuery, useQueryClient } from 'react-query'; +import { fetchWorkflowList, favourTheWorkFlow } from 'services/workflow'; +import styled from './index.module.less'; +import i18n from 'i18n'; +import { formatTimestamp } from 'shared/date'; +import { useTranslation } from 'react-i18next'; +import { Workflow, WorkflowState, WorkflowStateFilterParam, WorkflowType } from 'typings/workflow'; +import WorkflowStage from '../WorkflowStage'; +import WorkflowActions from '../../WorkflowActions'; +import WhichProject from 'components/WhichProject'; +import MultiSelect from 'components/MultiSelect'; +import { workflowStateOptionList } from 'shared/workflow'; +import { useUrlState, useTablePaginationWithUrlState, useGetCurrentProjectId } from 'hooks'; +import { TIME_INTERVAL } from 'shared/constants'; +import { Switch } from '@arco-design/web-react'; +import { FilterOp } from 'typings/filter'; +import { constructExpressionTree, expression2Filter } from 'shared/filter'; + +const Row = Grid.Row; +const Col = Grid.Col; + +type TableColumnsOptions = { + onSuccess?: () => void; + withoutActions?: boolean; + withoutFavour?: boolean; + defaultFavourFilteredValue?: string[]; + onForkableChange?: (record: Workflow, val: boolean) => void; + onFavourSwitchChange?: (record: Workflow) => void; +}; + +export const getWorkflowTableColumns = (options: TableColumnsOptions = {}) => { + const ret = [ + { + title: i18n.t('workflow.name'), + dataIndex: 'name', + key: 'name', + width: 300, + render: (name: string, record: Workflow) => { + const { state } = record; + const { INVALID } = WorkflowState; + return ( + <> + <Link + to={`/workflow-center/workflows/${record.id}`} + rel="nopener" + className={styled.col_name_link} + data-invalid={state === INVALID} + > + {name} + </Link> + <small className={styled.col_uuid}>uuid: {record.uuid}</small> + </> + ); + }, + }, + { + title: i18n.t('workflow.col_status'), + dataIndex: 'state', + width: 150, + render: (_: string, record: Workflow) => <WorkflowStage workflow={record} />, + }, + { + title: i18n.t('workflow.col_project'), + dataIndex: 'project_id', + width: 150, + render: (project_id: number) => <WhichProject id={project_id} />, + }, + { + title: i18n.t('workflow.col_date'), + dataIndex: 'created_at', + width: 200, + render: (date: number) => <div>{formatTimestamp(date)}</div>, + }, + { + title: i18n.t('workflow.col_favorite'), + dataIndex: 'favour', + defaultFilteredValue: options.defaultFavourFilteredValue, + width: 120, + filters: [ + { + text: i18n.t('term_favored'), + value: '1', + }, + { + text: i18n.t('term_unfavored'), + value: '0', + }, + ], + filterMultiple: false, + render(favorite: number, record: Workflow) { + return ( + <Switch + size="small" + onChange={() => options.onFavourSwitchChange?.(record)} + checked={Boolean(favorite)} + /> + ); + }, + }, + ]; + + if (options.withoutFavour) { + ret.splice(ret.length - 1, 1); + } + + if (!options.withoutActions) { + ret.push({ + title: i18n.t('workflow.col_actions'), + dataIndex: 'operation', + width: 400, + render: (_: any, record: Workflow) => ( + <WorkflowActions onSuccess={options.onSuccess} workflow={record} type="text" size="mini" /> + ), + }); + } + + return ret; +}; + +type QueryParams = { + project?: string; + keyword?: string; + uuid?: string; + states?: WorkflowStateFilterParam[]; + page?: number; + favour?: '0' | '1'; + system?: string; +}; + +type TWorkflowListRes = { + data: Workflow[]; +}; + +type ListProps = { + type: WorkflowType; +}; + +export const WORKFLOW_LIST_QUERY_KEY = 'fetchWorkflowList'; + +const List: FC<ListProps> = ({ type }) => { + const { t } = useTranslation(); + const [form] = Form.useForm<QueryParams>(); + const history = useHistory(); + const [urlState, setUrlState] = useUrlState({ + page: 1, + pageSize: 10, + keyword: '', + uuid: '', + states: [], + filter: initFilter(), + favour: undefined, + }); + const projectId = useGetCurrentProjectId(); + + const { urlState: pageInfoState, paginationProps } = useTablePaginationWithUrlState(); + + const initFilterParams = expression2Filter(urlState.filter); + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const [filterParams, setFilterParams] = useState<QueryParams>({ + states: initFilterParams.states || [], + keyword: initFilterParams.keyword || '', + uuid: initFilterParams.uuid || '', + system: initFilterParams.system || false, + }); + + const queryClient = useQueryClient(); + const listQueryKey = [ + WORKFLOW_LIST_QUERY_KEY, + urlState.keyword, + urlState.uuid, + urlState.states, + urlState.favour, + projectId, + pageInfoState.page, + pageInfoState.pageSize, + ]; + const listQuery = useQuery( + listQueryKey, + () => { + if (!projectId) { + Message.info(t('select_project_notice')); + } + return fetchWorkflowList({ + ...urlState, + project: projectId, + page: pageInfoState.page, + pageSize: pageInfoState.pageSize, + }); + }, + { + retry: 2, + refetchInterval: TIME_INTERVAL.LIST, + keepPreviousData: true, + }, + ); + const { isLoading, isError, data: res, error, refetch } = listQuery; + const favourMutation = useMutation( + async (workflow: Workflow) => { + await favourTheWorkFlow(projectId ?? 0, workflow.id, !Boolean(workflow.favour)); + }, + { + onMutate(workflow) { + // cancel ongoing list queries + queryClient.cancelQueries(listQueryKey); + const oldData = queryClient.getQueryData<TWorkflowListRes>(listQueryKey); + const operatingIndex = oldData?.data.findIndex((item) => item.id === workflow.id); + + if (operatingIndex === -1 || operatingIndex === undefined) { + return oldData; + } + + // temporarily update list. + queryClient.setQueryData<TWorkflowListRes>(listQueryKey, (oldData) => { + const copied = oldData?.data ? [...oldData.data] : []; + copied.splice(operatingIndex, 1, { + ...workflow, + favour: !workflow.favour, + }); + + return { + data: copied, + }; + }); + + return oldData; + }, + onSuccess() { + refetch(); + }, + onError(_: any, __: any, oldData) { + // if failed, reverse list data to the old one. + queryClient.setQueryData(listQueryKey, oldData); + }, + }, + ); + + if (isError && error) { + Message.error((error as Error).message); + } + + const workflowListShow = useMemo(() => { + const workflowList = res?.data ?? []; + return workflowList; + }, [res]); + + return ( + <> + <Row justify="space-between" align="center"> + <Col span={4}> + {type === WorkflowType.MY ? ( + <Button className={'custom-operation-button'} type="primary" onClick={goCreate}> + {t('workflow.create_workflow')} + </Button> + ) : ( + <></> + )} + </Col> + <Col span={20}> + <Form + initialValues={{ ...urlState }} + layout="inline" + form={form} + onChange={onParamsChange} + style={{ justifyContent: 'flex-end' }} + > + <Form.Item field="states" className={styled.workflow_list_form_item}> + <MultiSelect + isHideIndex={true} + placeholder="任务状态" + optionList={workflowStateOptionList || []} + onChange={form.submit} + allowClear + style={{ minWidth: '227px', maxWidth: '500px', fontSize: '12px' }} + /> + </Form.Item> + <Form.Item field="uuid" className={styled.workflow_list_form_item}> + <Input.Search + className={'custom-input'} + placeholder={t('workflow.placeholder_uuid_searchbox')} + onSearch={form.submit} + allowClear + /> + </Form.Item> + <Form.Item field="keyword" className={styled.workflow_list_form_item}> + <Input.Search + className={'custom-input'} + placeholder={t('workflow.placeholder_name_searchbox')} + onSearch={form.submit} + allowClear + /> + </Form.Item> + </Form> + </Col> + </Row> + <Spin loading={isLoading}> + <div className={styled.workflow_list_container}> + <Table + className="custom-table custom-table-left-side-filter" + data={workflowListShow} + columns={getWorkflowTableColumns({ + onSuccess, + onFavourSwitchChange: (workflow: Workflow) => favourMutation.mutate(workflow), + defaultFavourFilteredValue: urlState.favour ? [urlState.favour] : [], + })} + onChange={(_, sorter, filter, extra) => { + if (extra.action === 'filter') { + setUrlState({ + page: 1, + favour: filter.favour?.[0] ?? undefined, + }); + } + }} + scroll={{ x: '100%' }} + rowKey="id" + pagination={{ + ...paginationProps, + total: listQuery.data?.page_meta?.total_items ?? undefined, + }} + style={{ minWidth: '800px' }} + /> + </div> + </Spin> + </> + ); + function onParamsChange(values: QueryParams) { + // Set urlState will auto-trigger list query + setUrlState({ ...values, page: 1 }); + } + function onSuccess() { + refetch(); + } + + function goCreate() { + history.push('/workflow-center/workflows/initiate/basic'); + } + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + function constructFilterArray(value: QueryParams) { + const expressionNodes = []; + expressionNodes.push({ + field: 'system', + op: FilterOp.EQUAL, + bool_value: type === WorkflowType.SYSTEM, + }); + + const serialization = constructExpressionTree(expressionNodes); + setFilterParams({ + system: value.system, + }); + setUrlState((prevState) => ({ + ...prevState, + filter: serialization, + page: 1, + })); + } + + function initFilter() { + const expressionNodes = []; + expressionNodes.push({ + field: 'system', + op: FilterOp.EQUAL, + bool_value: type === WorkflowType.SYSTEM, + }); + return constructExpressionTree(expressionNodes); + } +}; + +export default List; diff --git a/web_console_v2/client/src/views/Workflows/WorkflowList/index.module.less b/web_console_v2/client/src/views/Workflows/WorkflowList/index.module.less new file mode 100644 index 000000000..8e42ae6f5 --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/WorkflowList/index.module.less @@ -0,0 +1,9 @@ +.workflow_tabs { + .arco-tabs-header-nav-horizontal { + padding-left: 4px; + } + .arco-tabs-header-title { + margin-top: 4px; + margin-bottom: 4px; + } +} diff --git a/web_console_v2/client/src/views/Workflows/WorkflowList/index.tsx b/web_console_v2/client/src/views/Workflows/WorkflowList/index.tsx index 6f3af6088..1f027f732 100644 --- a/web_console_v2/client/src/views/Workflows/WorkflowList/index.tsx +++ b/web_console_v2/client/src/views/Workflows/WorkflowList/index.tsx @@ -1,201 +1,38 @@ -import React, { FC, useEffect, useState } from 'react'; -import styled from 'styled-components'; -import { Row, Col, Button, Form, Input, Table, message, Spin } from 'antd'; -import { Link, useHistory } from 'react-router-dom'; -import { useQuery } from 'react-query'; -import { fetchWorkflowList } from 'services/workflow'; -import i18n from 'i18n'; -import { formatTimestamp } from 'shared/date'; +import React, { FC, useState } from 'react'; import { useTranslation } from 'react-i18next'; -import SharedPageLayout from 'components/SharedPageLayout'; -import { Workflow } from 'typings/workflow'; -import WorkflowStage from './WorkflowStage'; -import WorkflowActions from '../WorkflowActions'; -import WhichProject from 'components/WhichProject'; -import NoResult from 'components/NoResult'; -import { projectState } from 'stores/project'; -import { isInvalid } from 'shared/workflow'; -import { useRecoilValue } from 'recoil'; +import { useParams, useHistory } from 'react-router'; -const FilterItem = styled(Form.Item)` - > .ant-form-item-control { - width: 227px; - } -`; -const ListContainer = styled.div` - display: flex; - flex: 1; - width: 100%; -`; -const NameLink = styled(Link)` - display: block; - margin-bottom: 3px; - font-size: 16px; - - &[data-invalid='true'] { - color: var(--textColorDisabled); - - &:hover { - color: var(--primaryColor); - } - } -`; -const UUID = styled.small` - display: block; - color: var(--textColorSecondary); -`; - -export const getWorkflowTableColumns = ( - options: { - onSuccess?: Function; - withoutActions?: boolean; - onForkableChange?: (record: Workflow, val: boolean) => void; - } = {}, -) => { - const ret = [ - { - title: i18n.t('workflow.name'), - dataIndex: 'name', - key: 'name', - render: (name: string, record: Workflow) => { - return ( - <> - <NameLink to={`/workflows/${record.id}`} rel="nopener" data-invalid={isInvalid(record)}> - {name} - </NameLink> - <UUID>UUID: {record.uuid}</UUID> - </> - ); - }, - }, - { - title: i18n.t('workflow.col_status'), - dataIndex: 'state', - name: 'state', - render: (_: string, record: Workflow) => <WorkflowStage workflow={record} />, - }, - { - title: i18n.t('workflow.col_project'), - dataIndex: 'project_id', - name: 'project_id', - width: 150, - render: (project_id: number) => <WhichProject id={project_id} />, - }, - { - title: i18n.t('workflow.col_date'), - dataIndex: 'created_at', - name: 'created_at', - render: (date: number) => <div>{formatTimestamp(date)}</div>, - }, - ]; - if (!options.withoutActions) { - ret.push({ - title: i18n.t('workflow.col_actions'), - dataIndex: 'operation', - name: 'operation', - render: (_: any, record: Workflow) => ( - <WorkflowActions - onSuccess={options.onSuccess} - workflow={record} - type="link" - without={['report']} - /> - ), - }); - } +import { WorkflowType } from 'typings/workflow'; - return ret; -}; - -type QueryParams = { - project?: string; - keyword?: string; - uuid?: string; -}; +import { Tabs } from '@arco-design/web-react'; +import SharedPageLayout, { RemovePadding } from 'components/SharedPageLayout'; +import List from './List'; +import styled from './index.module.less'; const WorkflowList: FC = () => { const { t } = useTranslation(); - const [form] = Form.useForm<QueryParams>(); const history = useHistory(); + const { tabType } = useParams<{ tabType: WorkflowType }>(); - const [listData, setList] = useState<Workflow[]>([]); - const [params, setParams] = useState<QueryParams>({ keyword: '', uuid: '' }); - - const project = useRecoilValue(projectState); - - const { isLoading, isError, data: res, error, refetch } = useQuery( - ['fetchWorkflowList', params.keyword, params.uuid, project.current?.id], - () => fetchWorkflowList({ ...params, project: project.current?.id }), - ); - - if (isError && error) { - message.error((error as Error).message); - } - - useEffect(() => { - setList(res?.data || []); - }, [res?.data]); - - const isEmpty = listData.length === 0; + const [activeKey, setActiveKey] = useState<WorkflowType>(tabType || WorkflowType.MY); return ( - <Spin spinning={isLoading}> - <SharedPageLayout title={t('menu.label_workflow')}> - <Row gutter={16} justify="space-between" align="middle"> - <Col> - <Button size="large" type="primary" onClick={goCreate}> - {t('workflow.create_workflow')} - </Button> - </Col> - <Col> - <Form - initialValues={{ ...params }} - layout="inline" - form={form} - onFinish={onParamsChange} - > - <FilterItem name="uuid"> - <Input.Search - placeholder={t('workflow.placeholder_uuid_searchbox')} - onPressEnter={form.submit} - /> - </FilterItem> - <FilterItem name="keyword"> - <Input.Search - placeholder={t('workflow.placeholder_name_searchbox')} - onPressEnter={form.submit} - /> - </FilterItem> - </Form> - </Col> - </Row> - - <ListContainer> - {isEmpty ? ( - <NoResult text={t('workflow.no_result')} to="/workflows/initiate/basic" /> - ) : ( - <Table - dataSource={listData} - columns={getWorkflowTableColumns({ onSuccess })} - scroll={{ x: '100%' }} - rowKey="name" - /> - )} - </ListContainer> - </SharedPageLayout> - </Spin> + <SharedPageLayout title={t('menu.label_workflow')}> + <RemovePadding style={{ height: 46 }}> + <Tabs className={styled.workflow_tabs} defaultActiveTab={activeKey} onChange={onTabChange}> + <Tabs.TabPane title={t('workflow.label_tab_my_workflow')} key={WorkflowType.MY} /> + <Tabs.TabPane title={t('workflow.label_tab_system_workflow')} key={WorkflowType.SYSTEM} /> + </Tabs> + </RemovePadding> + + {tabType === WorkflowType.MY ? <List type={WorkflowType.MY} /> : null} + {tabType === WorkflowType.SYSTEM ? <List type={WorkflowType.SYSTEM} /> : null} + </SharedPageLayout> ); - function onParamsChange(values: QueryParams) { - // Set params will auto-trigger list query - setParams(values); - } - function onSuccess() { - refetch(); - } - - function goCreate() { - history.push('/workflows/initiate/basic'); + function onTabChange(val: string) { + setActiveKey(val as WorkflowType); + history.replace(`/workflow-center/workflows/list/${val}`); } }; diff --git a/web_console_v2/client/src/views/Workflows/index.tsx b/web_console_v2/client/src/views/Workflows/index.tsx index a597e9852..f187b4f95 100644 --- a/web_console_v2/client/src/views/Workflows/index.tsx +++ b/web_console_v2/client/src/views/Workflows/index.tsx @@ -1,48 +1,59 @@ import React from 'react'; -import ErrorBoundary from 'antd/lib/alert/ErrorBoundary'; -import { Route, Redirect } from 'react-router-dom'; +import ErrorBoundary from 'components/ErrorBoundary'; +import { Route, Redirect, useLocation } from 'react-router-dom'; import WorkflowsList from './WorkflowList'; import CreateWorkflow from './CreateWorkflow'; import ForkWorkflow from './ForkWorkflow'; import EditWorkflow from './EditWorkflow'; import WorkflowDetail from './WorkflowDetail'; +import { WorkflowType } from 'typings/workflow'; function WorkflowsPage() { + const location = useLocation(); + return ( <ErrorBoundary> - <Route path="/workflows" exact component={WorkflowsList} /> <Route - path="/workflows/initiate" + path={`/workflow-center/workflows/list/:tabType(${WorkflowType.MY}|${WorkflowType.SYSTEM})`} exact - render={() => <Redirect to="/workflows/initiate/basic" />} + component={WorkflowsList} + /> + <Route + path="/workflow-center/workflows/initiate" + exact + render={() => <Redirect to="/workflow-center/workflows/initiate/basic" />} /> {/* Coordinator initiate a worklflow */} <Route - path="/workflows/initiate/:step" + path="/workflow-center/workflows/initiate/:step/:template_id?" exact render={(props: any) => <CreateWorkflow {...props} isInitiate={true} />} /> {/* Participant accept and fill the workflow config */} <Route - path="/workflows/accept/:step/:id" + path="/workflow-center/workflows/accept/:step/:id" exact render={(props: any) => <CreateWorkflow {...props} isAccept={true} />} /> <Route - path="/workflows/fork/:step/:id" + path="/workflow-center/workflows/fork/:step/:id" exact render={(props: any) => <ForkWorkflow {...props} />} /> <Route - path="/workflows/edit/:step/:id" + path="/workflow-center/workflows/edit/:step/:id" exact render={(props: any) => <EditWorkflow {...props} />} /> - <Route path="/workflows/:id" exact component={WorkflowDetail} /> + {location.pathname === '/workflow-center/workflows' && ( + <Redirect to={`/workflow-center/workflows/list/${WorkflowType.MY}`} /> + )} + + <Route path="/workflow-center/workflows/:id" exact component={WorkflowDetail} /> </ErrorBoundary> ); } diff --git a/web_console_v2/client/src/views/Workflows/shared.test.ts b/web_console_v2/client/src/views/Workflows/shared.test.ts new file mode 100644 index 000000000..dd9af5def --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/shared.test.ts @@ -0,0 +1,156 @@ +import { hydrate } from './shared'; + +import { Variable } from 'typings/variable'; + +import { + nameInput, + codeEditor, + featureSelect, + envsInput, + forceObjectInput, + forceListInput, +} from 'services/mocks/v2/variables/examples'; + +describe('hydrate', () => { + const testVariables: Variable[] = [ + nameInput, + codeEditor, + featureSelect, + envsInput, + forceObjectInput, + forceListInput, + ]; + it('normal', () => { + const testVariablesAllWithNewValue: Variable[] = [ + { ...nameInput, value: 'new value' }, + { ...codeEditor, value: { 'main.js': 'var a = 2;' } }, + { ...featureSelect, value: { a: 2 } }, + { + ...envsInput, + value: [ + { name: 'nn1', value: 'nv1' }, + { name: 'nn2', value: 'nv2' }, + ], + }, + { ...forceObjectInput, value: { a: 2 } }, + { ...forceListInput, value: [{ a: 2 }] }, + ]; + + const testVariablesSomeWithNewValue: Variable[] = [ + nameInput, + codeEditor, + { ...featureSelect, value: { a: 2 } }, + { + ...envsInput, + value: [ + { name: 'nn1', value: 'nv1' }, + { name: 'nn2', value: 'nv2' }, + ], + }, + { ...forceObjectInput, value: { a: 2 } }, + { ...forceListInput, value: [{ a: 2 }] }, + ]; + + expect(hydrate(testVariables, undefined)).toEqual([]); + expect(hydrate(testVariables, {})).toEqual(testVariables); + expect( + hydrate(testVariables, { + [nameInput.name]: 'new value', + [codeEditor.name]: { 'main.js': 'var a = 2;' }, + [featureSelect.name]: { a: 2 }, + [envsInput.name]: [ + { name: 'nn1', value: 'nv1' }, + { name: 'nn2', value: 'nv2' }, + ], + [forceObjectInput.name]: { a: 2 }, + [forceListInput.name]: [{ a: 2 }], + }), + ).toEqual(testVariablesAllWithNewValue); + expect( + hydrate(testVariables, { + [featureSelect.name]: { a: 2 }, + [envsInput.name]: [ + { name: 'nn1', value: 'nv1' }, + { name: 'nn2', value: 'nv2' }, + ], + [forceObjectInput.name]: { a: 2 }, + [forceListInput.name]: [{ a: 2 }], + }), + ).toEqual(testVariablesSomeWithNewValue); + }); + it('isStringifyVariableValue', () => { + expect(testVariables.every((item) => typeof item.value === 'string')).toBeFalsy(); + const finalVariables = hydrate( + testVariables, + { + [featureSelect.name]: { a: 2 }, + [envsInput.name]: [ + { name: 'nn1', value: 'nv1' }, + { name: 'nn2', value: 'nv2' }, + ], + [forceObjectInput.name]: { a: 2 }, + [forceListInput.name]: [{ a: 2 }], + }, + { + isStringifyVariableValue: true, + }, + ); + expect(finalVariables.every((item) => typeof item.value === 'string')).toBeTruthy(); + }); + it('isProcessVariableTypedValue', () => { + const finalVariables = hydrate( + testVariables, + { + [nameInput.name]: 'namename', + [featureSelect.name]: { a: 2 }, + [envsInput.name]: [ + { name: 'nn1', value: 'nv1' }, + { name: 'nn2', value: 'nv2' }, + ], + [forceObjectInput.name]: { a: 2 }, + [forceListInput.name]: [{ a: 2 }], + }, + { + isProcessVariableTypedValue: true, + }, + ); + + expect(finalVariables).toEqual([ + { ...nameInput, typed_value: 'namename', value: 'namename' }, + codeEditor, + { ...featureSelect, typed_value: { a: 2 }, value: { a: 2 } }, + { + ...envsInput, + typed_value: [ + { name: 'nn1', value: 'nv1' }, + { name: 'nn2', value: 'nv2' }, + ], + value: [ + { name: 'nn1', value: 'nv1' }, + { name: 'nn2', value: 'nv2' }, + ], + }, + { ...forceObjectInput, typed_value: { a: 2 }, value: { a: 2 } }, + { ...forceListInput, typed_value: [{ a: 2 }], value: [{ a: 2 }] }, + ]); + }); + it('isStringifyVariableWidgetSchema', () => { + expect(testVariables.every((item) => typeof item.widget_schema === 'string')).toBeFalsy(); + const finalVariables = hydrate( + testVariables, + { + [featureSelect.name]: { a: 2 }, + [envsInput.name]: [ + { name: 'nn1', value: 'nv1' }, + { name: 'nn2', value: 'nv2' }, + ], + [forceObjectInput.name]: { a: 2 }, + [forceListInput.name]: [{ a: 2 }], + }, + { + isStringifyVariableWidgetSchema: true, + }, + ); + expect(finalVariables.every((item) => typeof item.widget_schema === 'string')).toBeTruthy(); + }); +}); diff --git a/web_console_v2/client/src/views/Workflows/shared.ts b/web_console_v2/client/src/views/Workflows/shared.ts new file mode 100644 index 000000000..f89481a31 --- /dev/null +++ b/web_console_v2/client/src/views/Workflows/shared.ts @@ -0,0 +1,136 @@ +import { StateTypes } from 'components/StateIndicator'; +import i18n from 'i18n'; +import { Pod, PodState } from 'typings/job'; +import { cloneDeep } from 'lodash-es'; +import { processVariableTypedValue, stringifyVariableValue } from 'shared/formSchema'; +import { DataJobVariable } from 'typings/dataset'; +import { Variable } from 'typings/variable'; +import { TableColumnProps } from '@arco-design/web-react'; + +type TableFilterConfig = Pick<TableColumnProps, 'filters' | 'onFilter'>; + +/** + * @param variableShells Variable defintions without any user input value + * @param formValues User inputs + */ +export function hydrate( + variableShells: Array<Variable | DataJobVariable>, + formValues?: Record<string, any>, + options: { + isStringifyVariableValue?: boolean; + isProcessVariableTypedValue?: boolean; + isStringifyVariableWidgetSchema?: boolean; + } = { + isStringifyVariableValue: false, + isProcessVariableTypedValue: false, + isStringifyVariableWidgetSchema: false, + }, +): Array<Variable | DataJobVariable> { + if (!formValues) return []; + return variableShells.map((item) => { + const newVariable = cloneDeep({ ...item, value: formValues[item.name] ?? item.value }); + + if (options?.isStringifyVariableValue) { + stringifyVariableValue(newVariable as Variable); + } + if (options?.isProcessVariableTypedValue) { + processVariableTypedValue(newVariable as Variable); + } + if (options?.isStringifyVariableWidgetSchema) { + if (typeof newVariable.widget_schema === 'object') { + newVariable.widget_schema = JSON.stringify(newVariable.widget_schema); + } + } + + return newVariable; + }); +} + +export const podStateType: { [key: string]: StateTypes } = { + [PodState.SUCCEEDED]: 'success', + [PodState.RUNNING]: 'processing', + [PodState.FAILED]: 'error', + [PodState.PENDING]: 'warning', + [PodState.UNKNOWN]: 'default', + [PodState.FAILED_AND_FREED]: 'warning', + [PodState.SUCCEEDED_AND_FREED]: 'success', + // Deprecated state values + [PodState.SUCCEEDED__deprecated]: 'success', + [PodState.RUNNING__deprecated]: 'processing', + [PodState.FAILED__deprecated]: 'error', + [PodState.PENDING__deprecated]: 'warning', + [PodState.UNKNOWN__deprecated]: 'default', + [PodState.SUCCEEDED_AND_FREED__deprecated]: 'warning', + [PodState.FAILED_AND_FREED__deprecated]: 'success', +}; +export const podStateText: { [key: string]: string } = { + [PodState.SUCCEEDED]: i18n.t('workflow.job_node_success'), + [PodState.RUNNING]: i18n.t('workflow.job_node_running'), + [PodState.FAILED]: i18n.t('workflow.job_node_failed'), + [PodState.PENDING]: i18n.t('workflow.job_node_waiting'), + [PodState.UNKNOWN]: i18n.t('workflow.pod_unknown'), + [PodState.FAILED_AND_FREED]: i18n.t('workflow.pod_failed_cleared'), + [PodState.SUCCEEDED_AND_FREED]: i18n.t('workflow.pod_success_cleared'), + // Deprecated state values + [PodState.SUCCEEDED__deprecated]: i18n.t('workflow.job_node_success'), + [PodState.RUNNING__deprecated]: i18n.t('workflow.job_node_running'), + [PodState.FAILED__deprecated]: i18n.t('workflow.job_node_failed'), + [PodState.PENDING__deprecated]: i18n.t('workflow.job_node_waiting'), + [PodState.UNKNOWN__deprecated]: i18n.t('workflow.pod_unknown'), + [PodState.SUCCEEDED_AND_FREED__deprecated]: i18n.t('workflow.pod_failed_cleared'), + [PodState.FAILED_AND_FREED__deprecated]: i18n.t('workflow.pod_success_cleared'), +}; + +/* istanbul ignore next */ +export function getPodState(pod: Pod): { type: StateTypes; text: string; tip: string } { + let tip: string = ''; + if ([PodState.FAILED, PodState.PENDING].includes(pod.state)) { + tip = pod.message || ''; + } + return { + text: podStateText[pod.state], + type: podStateType[pod.state], + tip, + }; +} + +export const podStateOptions = [ + { + label: podStateText[PodState.SUCCEEDED], + value: PodState.SUCCEEDED, + }, + { + label: podStateText[PodState.RUNNING], + value: PodState.RUNNING, + }, + { + label: podStateText[PodState.FAILED], + value: PodState.FAILED, + }, + { + label: podStateText[PodState.PENDING], + value: PodState.PENDING, + }, + { + label: podStateText[PodState.UNKNOWN], + value: PodState.UNKNOWN, + }, + { + label: podStateText[PodState.FAILED_AND_FREED], + value: PodState.FAILED_AND_FREED, + }, + { + label: podStateText[PodState.SUCCEEDED_AND_FREED], + value: PodState.SUCCEEDED_AND_FREED, + }, +]; + +export const podStateFilters: TableFilterConfig = { + filters: podStateOptions.map((item) => ({ + text: item.label, + value: item.value, + })), + onFilter: (value: string, record: Pod) => { + return value === record.state; + }, +}; diff --git a/web_console_v2/client/src/views/index.module.less b/web_console_v2/client/src/views/index.module.less new file mode 100644 index 000000000..318c52a6f --- /dev/null +++ b/web_console_v2/client/src/views/index.module.less @@ -0,0 +1,6 @@ +.route_spin{ + height: 100%; + display: flex !important; + justify-content: center; + align-items: center; +} diff --git a/web_console_v2/client/src/views/index.tsx b/web_console_v2/client/src/views/index.tsx index 0c0488106..3dbb1320a 100644 --- a/web_console_v2/client/src/views/index.tsx +++ b/web_console_v2/client/src/views/index.tsx @@ -1,9 +1,72 @@ -import React from 'react'; -import routes from './routes'; - +import React, { FC, CSSProperties, Suspense } from 'react'; +import routes, { noSidebarRoutes } from './routes'; +import { Spin } from '@arco-design/web-react'; import { Switch, Route } from 'react-router-dom'; import ProtectedRoute from './ProtectedRoute'; +import styled from 'styled-components'; +import Header from 'components/Header'; +import Sidebar from 'components/Sidebar'; + +import { appGetters } from 'stores/app'; +import { useRecoilValue } from 'recoil'; +import { convertToUnit } from 'shared/helpers'; +import styles from './index.module.less'; + +const AppLayout = styled.div` + display: grid; + min-height: 100vh; + max-height: 100vh; + grid-template-areas: + 'header header' + 'sidebar main-content'; + grid-template-rows: auto 1fr; + grid-template-columns: auto 1fr; + overflow: hidden; +`; + +const AppHeader = styled(Header)` + grid-area: header; + align-self: start; +`; + +const AppSidebar = styled(Sidebar)` + grid-area: sidebar; +`; + +const AppMainContent = styled.main` + position: relative; + display: flex; + flex-direction: column; + grid-area: main-content; + + overflow: auto; + overflow-anchor: auto; + height: 100%; +`; + +const BaseLayout: FC<{}> = ({ children }) => { + const preferenceGetters = useRecoilValue(appGetters); + + return ( + <AppLayout + style={{ '--SidebarWidth': convertToUnit(preferenceGetters.sidebarWidth) } as CSSProperties} + > + <AppHeader /> + <AppSidebar /> + <AppMainContent id="app-content">{children}</AppMainContent> + </AppLayout> + ); +}; +const NoSidebarLayout: FC<{}> = ({ children }) => { + return ( + <AppLayout style={{ '--SidebarWidth': 0 } as CSSProperties}> + <AppHeader /> + <AppMainContent id="app-content">{children}</AppMainContent> + </AppLayout> + ); +}; + function RouterViews() { if (!routes) { return null; @@ -11,7 +74,8 @@ function RouterViews() { return ( <Switch> - {routes.map((route, index) => { + {noSidebarRoutes.map((route, index) => { + const { component: Component } = route; const RouteComponent = route.auth ? ProtectedRoute : Route; return ( @@ -19,8 +83,43 @@ function RouterViews() { key={index} path={route.path} exact={route.exact} - render={(props: any) => <route.component {...props} />} + render={(props: any) => ( + <NoSidebarLayout> + <Component {...props} /> + </NoSidebarLayout> + )} + roles={route.roles} + flagKeys={route.flagKeys} + abilitiesSupport={route.abilitiesSupport} + /> + ); + })} + {routes.map((route, index) => { + const { component: Component, async } = route; + const RouteComponent = route.auth ? ProtectedRoute : Route; + let AsyncComponent: any; + if (async) { + AsyncComponent = React.lazy(Component as any); + } + return ( + <RouteComponent + key={index} + path={route.path} + exact={route.exact} + render={(props: any) => ( + <BaseLayout> + {async ? ( + <Suspense fallback={<Spin loading={true} className={styles.route_spin} />}> + <AsyncComponent {...props} /> + </Suspense> + ) : ( + <Component {...props} /> + )} + </BaseLayout> + )} roles={route.roles} + flagKeys={route.flagKeys} + abilitiesSupport={route.abilitiesSupport} /> ); })} diff --git a/web_console_v2/client/src/views/routes.tsx b/web_console_v2/client/src/views/routes.tsx index 63380001e..d2619ea58 100644 --- a/web_console_v2/client/src/views/routes.tsx +++ b/web_console_v2/client/src/views/routes.tsx @@ -1,13 +1,28 @@ import React from 'react'; import { Redirect } from 'react-router-dom'; import Projects from 'views/Projects'; -import Workflows from 'views/Workflows'; -import WorkflowTemplates from 'views/WorkflowTemplates'; import Datasets from 'views/Datasets'; -import Settings from 'views/Settings'; import { FedRouteConfig } from 'typings/app'; +import { FlagKey } from 'typings/flag'; import { FedRoles } from 'typings/auth'; -import Users from './Users'; +import NewModelCenter from 'views/ModelCenter'; +import ModelServing from 'views/ModelServing'; +import Workflows from 'views/Workflows'; +import WorkflowTemplates from 'views/WorkflowTemplates'; +import AlgorithmAcceptance from 'views/AlgorithmManagement/AlgorithmAcceptance'; + +import CreateCentralization from './ModelCenter/ModelTrain/CreateCentralization'; +import NewModelEvaluationCreate from 'views/ModelCenter/ModelEvaluation/ModelEvaluationCreate'; +import NewModelTrainCreate from 'views/ModelCenter/ModelTrain/Create'; +import ModelTrainJobCreate from './ModelCenter/ModelTrain/ModelTrainJobCreate'; +import NewModelCenterRoutes from 'views/ModelCenter/routes'; +import CreatePartner from 'views/Partner/CreatePartner'; +import EditPartner from 'views/Partner/EditPartner'; +import CreateRawDataset from 'views/Datasets/CreateDataset'; +import CreateProcessedDataset from 'views/Datasets/CreateProcessedDataset'; +import DatasetCenterRoutes from 'views/Datasets/routes'; +import CreateDataSource from './Datasets/CreateDataSource'; +import { ABILITIES_SIDEBAR_MENU_MAPPER } from 'shared/router'; const routes: FedRouteConfig[] = [ { @@ -16,38 +31,192 @@ const routes: FedRouteConfig[] = [ component: () => <Redirect to="/projects" />, auth: true, }, - { path: '/projects', component: Projects, auth: true, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.projects, }, { - path: '/workflows', + path: '/workflow-center/workflows', component: Workflows, auth: true, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.workflowCenter, }, { - path: '/workflow-templates', + path: '/workflow-center/workflow-templates', component: WorkflowTemplates, auth: true, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.workflowCenter, }, { path: '/datasets', component: Datasets, auth: true, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.datasets, }, { path: '/settings', - component: Settings, + component: () => import('views/Settings'), auth: true, + async: true, + roles: [FedRoles.Admin], }, { path: '/users', - component: Users, + component: () => import('views/Users'), + auth: true, + roles: [FedRoles.Admin], + async: true, + flagKeys: [FlagKey.USER_MANAGEMENT_ENABLED], + }, + { + path: '/model-center', + component: NewModelCenter, + auth: true, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.modelCenter, + }, + { + path: '/trusted-center', + component: () => import('views/TrustedCenter'), + auth: true, + async: true, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.trustedCenter, + }, + { + path: '/algorithm-management', + component: () => import('views/AlgorithmManagement'), + auth: true, + async: true, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.algorithmManagement, + }, + { + path: '/model-serving', + component: ModelServing, + auth: true, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.modelServing, + }, + { + path: '/audit', + component: () => import('views/Audit'), auth: true, + async: true, roles: [FedRoles.Admin], }, -]; + { + path: '/partners', + component: () => import('views/Partner'), + async: true, + auth: true, + }, + { + path: '/operation', + component: () => import('views/OperationMaintenance'), + auth: true, + async: true, + roles: [FedRoles.Admin], + }, + { + path: '/dashboard', + component: () => import('views/Dashboard'), + auth: true, + async: true, + roles: [FedRoles.Admin], + }, + { + path: '/data_fix', + component: () => import('views/DataFix'), + auth: true, + async: true, + roles: [FedRoles.Admin], + }, + { + path: '/composer', + component: () => import('views/Composer'), + auth: true, + async: true, + roles: [FedRoles.Admin], + }, + { + path: '/cleanup', + component: () => import('views/Cleanup'), + auth: true, + async: true, + roles: [FedRoles.Admin], + }, +].filter(Boolean) as FedRouteConfig[]; + +export const noSidebarRoutes: FedRouteConfig[] = [ + { + path: '/datasets/raw/create', + component: CreateRawDataset, + auth: true, + exact: true, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.datasets, + }, + { + path: '/datasets/processed/:action(create|edit|authorize)/:id?', + component: CreateProcessedDataset, + auth: true, + exact: true, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.datasets, + }, + { + path: NewModelCenterRoutes.ModelTrainCreateCentralization, + component: CreateCentralization, + auth: true, + exact: true, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.modelCenter, + }, + { + path: NewModelCenterRoutes.ModelTrainCreate, + component: NewModelTrainCreate, + auth: true, + exact: true, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.modelCenter, + }, + { + path: NewModelCenterRoutes.ModelTrainJobCreate, + component: ModelTrainJobCreate, + auth: true, + exact: true, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.modelCenter, + }, + { + path: NewModelCenterRoutes.ModelEvaluationCreate, + component: NewModelEvaluationCreate, + auth: true, + exact: true, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.modelCenter, + }, + { + path: '/partners/create', + component: CreatePartner, + auth: true, + exact: true, + roles: [FedRoles.Admin], + }, + { + path: '/partners/edit/:id', + component: EditPartner, + auth: true, + exact: true, + roles: [FedRoles.Admin], + }, + { + path: '/algorithm-management/acceptance/:id', + component: AlgorithmAcceptance, + auth: true, + exact: true, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.algorithmManagement, + }, + { + path: DatasetCenterRoutes.DatasetCreate, + component: CreateDataSource, + auth: true, + exact: true, + abilitiesSupport: ABILITIES_SIDEBAR_MENU_MAPPER.datasets, + }, +].filter(Boolean) as FedRouteConfig[]; export default routes; diff --git a/web_console_v2/client/tests/setup.ts b/web_console_v2/client/tests/setup.ts index 22ca48370..9326069cf 100644 --- a/web_console_v2/client/tests/setup.ts +++ b/web_console_v2/client/tests/setup.ts @@ -3,6 +3,7 @@ // expect(element).toHaveTextContent(/react/i) // learn more: https://github.com/testing-library/jest-dom import '@testing-library/jest-dom'; +import 'jest-styled-components'; /** * Since i18next doesn't support jest environment @@ -17,4 +18,17 @@ jest.mock('i18next', () => ({ }; }, t: (k: any) => k, + useTranslation: () => ({ + t: (k: any) => k, + }), })); + +Object.defineProperty(window, 'matchMedia', { + value: () => { + return { + matches: false, + addListener: () => {}, + removeListener: () => {}, + }; + }, +}); diff --git a/web_console_v2/client/tsconfig.json b/web_console_v2/client/tsconfig.json index 45b5731f6..7914775ba 100644 --- a/web_console_v2/client/tsconfig.json +++ b/web_console_v2/client/tsconfig.json @@ -9,15 +9,15 @@ "strict": true, "forceConsistentCasingInFileNames": true, "noFallthroughCasesInSwitch": true, - "module": "esnext", + "module": "commonjs", "moduleResolution": "node", "resolveJsonModule": true, "isolatedModules": true, "noEmit": true, "jsx": "react", "baseUrl": "./src", - "typeRoots": ["./src/typings", "./node_modules/@types"] + "typeRoots": ["node_modules/@types", "./src/typings"] }, - "include": ["./src", "./src/typings", "tests"], - "exclude": ["node_modules", "./node_modules", "./node_modules/*", "build", "build/*"] + "include": ["./src", "tests"], + "exclude": ["build", "build/*"] } diff --git a/web_console_v2/docker/spark/Dockerfile b/web_console_v2/docker/spark/Dockerfile deleted file mode 100644 index c7251fb14..000000000 --- a/web_console_v2/docker/spark/Dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -FROM registry.cn-beijing.aliyuncs.com/fedlearner/spark-py:v3.0.0 -LABEL maintainers="Wang Sen <wangsen.0914@bytedance.com>, Runyu Yu<yurunyu@bytedance.com>" - -USER root -ARG DEBIAN_FRONTEND=noninteractive -RUN mkdir -p /usr/share/man/man1/ && apt update && apt install -y software-properties-common \ - && apt-add-repository 'deb http://security.debian.org/debian-security stretch/updates main' \ - && apt update && apt install -y maven openjdk-8-jdk git \ - && apt-get clean && rm -rf /var/lib/apt/lists/* - -RUN git clone https://github.com/tensorflow/ecosystem.git /opt/ecosystem - -ENV ROOT_DIR /opt/ecosystem -ENV SPARK_HOME /opt/spark -ENV JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64 -ENV PATH ${JAVA_HOME}/bin:${PATH} -ENV PYSPARK_PYTHON=/usr/bin/python3 -ENV PYSPARK_DRIVER_PYTHON=/usr/bin/python3 - -# NOTE: scala version is 2.12 -RUN cd ${ROOT_DIR}/hadoop && mvn clean install -DskipTests && cp target/tensorflow-hadoop-1.10.0.jar ${SPARK_HOME}/jars/ -RUN cd ${ROOT_DIR}/spark/spark-tensorflow-connector && mvn clean install -DskipTests && cp target/spark-tensorflow-connector_2.12-1.11.0.jar ${SPARK_HOME}/jars/ \ - && rm -rf /opt/ecosystem - -COPY ./requirements.txt /opt/env/requirements.txt -RUN pip3 install -U pip -i https://pypi.doubanio.com/simple \ - && /usr/bin/python3 -m pip install -r /opt/env/requirements.txt -i https://pypi.doubanio.com/simple diff --git a/web_console_v2/docker/spark/requirements.txt b/web_console_v2/docker/spark/requirements.txt deleted file mode 100644 index 18d08731d..000000000 --- a/web_console_v2/docker/spark/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -pandas==1.1.5 -fsspec==2021.4.0 \ No newline at end of file diff --git a/web_console_v2/inspection/BUILD.bazel b/web_console_v2/inspection/BUILD.bazel new file mode 100644 index 000000000..9bb322c5a --- /dev/null +++ b/web_console_v2/inspection/BUILD.bazel @@ -0,0 +1,67 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = [":data_inspection_package"]) + +package_group( + name = "data_inspection_package", + packages = ["//web_console_v2/inspection/..."], +) + +py_library( + name = "error_code_lib", + srcs = [ + "error_code.py", + ], + imports = ["."], + visibility = ["//visibility:public"], + deps = [ + ":envs_lib", + "@common_fsspec//:pkg", + ], +) + +py_library( + name = "dataset_directory_lib", + srcs = ["dataset_directory.py"], + imports = ["."], + visibility = ["//visibility:public"], +) + +py_library( + name = "envs_lib", + srcs = ["envs.py"], + imports = ["."], + visibility = ["//web_console_v2/inspection:data_inspection_package"], +) + +py_test( + name = "error_code_test", + size = "small", + srcs = [ + "error_code_test.py", + ], + visibility = ["//web_console_v2/inspection:data_inspection_package"], + deps = [ + ":error_code_lib", + "@common_fsspec//:pkg", + ], +) + +py_test( + name = "dataset_directory_test", + size = "small", + srcs = [ + "dataset_directory_test.py", + ], + visibility = ["//web_console_v2/inspection:data_inspection_package"], + deps = [ + ":dataset_directory_lib", + ], +) + +# TODO(liuhehan): bazelize this part after we copy two tfrecords jar and hist jars to docker +filegroup( + name = "inspection_srcs", + srcs = glob(["**"]), + visibility = ["//visibility:public"], +) diff --git a/web_console_v2/inspection/__init__.py b/web_console_v2/inspection/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/web_console_v2/inspection/analyzer/analyzer_task.py b/web_console_v2/inspection/analyzer/analyzer_task.py new file mode 100644 index 000000000..4daf3b7c3 --- /dev/null +++ b/web_console_v2/inspection/analyzer/analyzer_task.py @@ -0,0 +1,229 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import json +import logging +from typing import Optional +import fsspec +import cv2 +import numpy as np +from numpy import array +from copy import deepcopy + +from pyspark import SparkContext +from pyspark.sql import SparkSession +from pyspark.sql import functions as spark_func +from pyspark.sql.dataframe import DataFrame +from pyspark.sql.column import _to_java_column, _to_seq, Column +from dataset_directory import DatasetDirectory + +from util import load_tfrecords, is_file_matched +from error_code import AreaCode, ErrorType, JobException + +_DEFAULT_BUCKETS_NUM = 10 +_DEFAULT_SAMPLES_NUM = 20 +_MAX_SIZE = (256, 256) +_THUMBNAIL_EXTENSION = '.png' +_METRICS_UNSUPPORT_COLUMN_TYPE = [ + 'binary', +] +_HIST_UNSUPPORT_COLUMN_TYPE = ['string', 'binary'] + + +def _is_metrics_support_type(column_type: str): + return column_type not in _METRICS_UNSUPPORT_COLUMN_TYPE and not column_type.startswith('array') + + +def _is_hist_support_type(column_type: str): + return column_type not in _HIST_UNSUPPORT_COLUMN_TYPE and not column_type.startswith('array') + + +def _hist_func(feat_col: Column, min_num: float, max_num: float, bins_num: int, interval: float, sc: SparkContext): + hist = sc._jvm.com.bytedance.aml.enterprise.sparkudaf.Hist.getFunc() # pylint: disable=protected-access + return Column( + hist.apply( + _to_seq(sc, [ + feat_col, + spark_func.lit(min_num), + spark_func.lit(max_num), + spark_func.lit(bins_num), + spark_func.lit(interval) + ], _to_java_column))) + + +def _decode_binary_to_array(data: bytearray, h: int, w: int, c: int) -> array: + return np.reshape(data, (h, w, c)) + + +def _get_thumbnail(img: array, height: int, width: int) -> array: + size = (min(height, _MAX_SIZE[0]), min(width, _MAX_SIZE[1])) + return cv2.resize(img, size, interpolation=cv2.INTER_AREA) + + +# TODO(liuhehan): seperate this analyzer task to meta_task and preview_task +class AnalyzerTask(object): + + def _extract_feature_metrics(self, df: DataFrame, dtypes_dict: dict) -> dict: + df_missing = df.select(*(spark_func.sum(spark_func.col(c).isNull().cast('int')).alias(c) + for c in df.columns + if _is_metrics_support_type(dtypes_dict[c]))).withColumn( + 'summary', spark_func.lit('missing_count')) + df_stats = df.describe().unionByName(df_missing) + df_stats = df_stats.toPandas().set_index('summary').transpose() + return df_stats.to_dict(orient='index') + + def _extract_metadata(self, df: DataFrame, is_image: bool) -> dict: + """ + meta = { + label_count: [dict] + count: int + dtypes: [{'key': feature_name, 'value': feature_type},..], + sample: [row], + } + """ + meta = {} + # dtypes + logging.info('### loading dtypes...') + dtypes = [] + for d in df.dtypes: + k, v = d # (feature, type) + dtypes.append({'key': k, 'value': v}) + # TODO(wangzeju): refactor the key names + meta['dtypes'] = deepcopy(dtypes) + # remove binary in image metadata, add label-count + if is_image: + meta['dtypes'] = [item for item in meta['dtypes'] if item['key'] != 'data'] + # TODO(wangzeju): hard code to count for each category + # need to support more data/label formats on more columns. + if 'label' in df.columns: + label_count_rows = df.groupBy('label').count().collect() + label_count = [row.asDict() for row in label_count_rows] + meta['label_count'] = label_count + # sample count + logging.info('### loading count...') + meta['count'] = df.count() + # sample and thumbnail + logging.info('### loading sample...') + rows = df.head(_DEFAULT_SAMPLES_NUM) + samples = [] + for row in rows: + sample = [row[col_map['key']] for col_map in meta['dtypes']] + samples.append(sample) + meta['sample'] = samples + return meta + + def _extract_thumbnail(self, df: DataFrame, thumbnail_path: str): + samples = df.head(_DEFAULT_SAMPLES_NUM) + for row in samples: + item = row.asDict() + h, w, c = item['height'], item['width'], item['nChannels'] + file_name, raw_data = item['file_name'], item['data'] + img_array = _decode_binary_to_array(raw_data, h, w, c) + logging.info(f'### process with {file_name}') + img = _get_thumbnail(img_array, h, w) + logging.info(f'### {file_name} shape is:{img.shape}') + # get thumbnail file_name in _THUMBNAIL_EXTENSION + thumbnail_file_name = file_name.split('.')[0] + _THUMBNAIL_EXTENSION + img_path = os.path.join(thumbnail_path, thumbnail_file_name) + success, encoded_image = cv2.imencode(_THUMBNAIL_EXTENSION, img) + bytes_content = encoded_image.tobytes() + logging.info(f'### will write bytes_content to {img_path}') + with fsspec.open(img_path, mode='wb') as f: + f.write(bytes_content) + + def _extract_feature_hist(self, spark: SparkSession, df: DataFrame, dtypes_dict: dict, buckets_num: int) -> dict: + # feature histogram + logging.info('### loading hist...') + hist = {} + feat_col_list = [(col_idx, col_name) + for col_idx, col_name in enumerate(df.columns) + if _is_hist_support_type(dtypes_dict[col_name])] + if feat_col_list: + # When there is a NaN value, spark's max function will get a NaN result + # so it needs to be filled with the default value of zero + filled_df = df.na.fill(0) + min_col_list = [ + spark_func.min(spark_func.col(col_name)).alias(f'{col_name}-min') + for (col_idx, col_name) in feat_col_list + ] + max_col_list = [ + spark_func.max(spark_func.col(col_name)).alias(f'{col_name}-max') + for (col_idx, col_name) in feat_col_list + ] + minmax_df = filled_df.select(*(min_col_list + max_col_list)) + minmax_row = minmax_df.collect()[0] + hist_args_map = {} + for col_idx, col_name in feat_col_list: + min_num = minmax_row[f'{col_name}-min'] + max_num = minmax_row[f'{col_name}-max'] + hist_args = (spark_func.col(col_name), min_num, max_num, buckets_num, (max_num - min_num) / buckets_num, + spark.sparkContext) + hist_args_map[col_name] = hist_args + logging.info(f'### will get the hist statistics for feat cols: {feat_col_list}') + hist_result = df.select( + *[_hist_func(*hist_args_map[col_name]).alias(col_name) for (_, col_name) in feat_col_list]).collect() + for (_, col_name), col_result in zip(feat_col_list, hist_result[0]): + hist[col_name] = {'x': col_result['bins'], 'y': col_result['counts']} + return hist + + def run(self, spark: SparkSession, dataset_path: str, wildcard: str, is_image: bool, batch_name: str, + buckets_num: Optional[int], thumbnail_path: Optional[str]): + """extract metadata, features' metrics, hists and thumbnail for image format + + Args: + spark: spark session + dataset_path: path of dataset. + wildcard: the wildcard to match all tfrecords + is_image: whether it is an image type + batch_name: name of target batch which need analyze + buckets_num: the number of bucket to extract feature hist + thumbnail_path: dir path to save the thumbnails + + Raises: + JobException: read data by pyspark failed + """ + if buckets_num is None: + buckets_num = _DEFAULT_BUCKETS_NUM + dataset_directory = DatasetDirectory(dataset_path=dataset_path) + files = dataset_directory.batch_path(batch_name=batch_name) + meta_path = dataset_directory.batch_meta_file(batch_name=batch_name) + if not is_file_matched(files): + # this is a hack to allow empty intersection dataset + # no file matched, just skip analyzer + logging.warning(f'input_dataset_path {files} matches 0 file, skip analyzer task') + return + # load data + try: + df = load_tfrecords(spark, files, dataset_path) + except Exception as e: # pylint: disable=broad-except + raise JobException(AreaCode.ANALYZER, ErrorType.DATA_LOAD_ERROR, + f'failed to read input data, err: {str(e)}') from e + if df.count() == 0: + # this is a hack to allow empty intersection dataset + # all files are empty, just skip analyzer + logging.warning(f'get 0 data item in {files}, skip analyzer task') + return + dtypes_dict = dict(df.dtypes) + # extract pipeline + meta = self._extract_metadata(df, is_image) + meta['features'] = self._extract_feature_metrics(df, dtypes_dict) + if is_image: + self._extract_thumbnail(df, thumbnail_path) + meta['hist'] = self._extract_feature_hist(spark, df, dtypes_dict, buckets_num) + # save metadata to file + logging.info(f'### writing meta, path is {meta_path}') + with fsspec.open(meta_path, mode='w') as f: + f.write(json.dumps(meta)) diff --git a/web_console_v2/inspection/analyzer/analyzer_task_test.py b/web_console_v2/inspection/analyzer/analyzer_task_test.py new file mode 100644 index 000000000..348ff4797 --- /dev/null +++ b/web_console_v2/inspection/analyzer/analyzer_task_test.py @@ -0,0 +1,533 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# pylint: disable=protected-access +import unittest +import json +import os +from unittest import mock +import fsspec + +from pyspark.sql.types import BinaryType, IntegerType, StructField, StructType, StringType + +from testing.spark_test_case import PySparkTestCase +from analyzer.analyzer_task import AnalyzerTask +from dataset_directory import DatasetDirectory + + +class AnalyzerTaskTest(PySparkTestCase): + + def setUp(self) -> None: + super().setUp() + self._data_path = os.path.join(self.tmp_dataset_path, 'test_dataset') + self._dataset_directory = DatasetDirectory(self._data_path) + self._batch_name = 'test_batch' + self._filesystem = fsspec.filesystem('file') + self.maxDiff = None + self.analyzer_task = AnalyzerTask() + + def tearDown(self) -> None: + self._clear_up() + return super().tearDown() + + def _generate_dataframe(self): + data = [ + (1, b'01001100111', 256, 256, 3, 'cat', 'image_1'), + (2, b'01001100111', 245, 246, None, 'dog', 'image_2'), + (3, b'01001100111', None, 314, 3, 'cat', 'image_3'), + ] + schema = StructType([ + StructField('raw_id', IntegerType(), False), + StructField('data', BinaryType(), False), + StructField('rows', IntegerType(), True), + StructField('cols', IntegerType(), False), + StructField('channel', IntegerType(), True), + StructField('label', StringType(), False), + StructField('file_name', StringType(), False), + ]) + return self.spark.createDataFrame(data=data, schema=schema) + + def _generate_all_string_and_binary_dataframe(self): + data = [ + ('1', b'01001100111', 'cat'), + ('2', b'01001100111', 'dog'), + ('3', b'01001100111', 'cat'), + ] + schema = StructType([ + StructField('raw_id', StringType(), False), + StructField('image', BinaryType(), False), + StructField('label', StringType(), False), + ]) + return self.spark.createDataFrame(data=data, schema=schema) + + def _generate_fake_image_dataframe(self): + data = [ + (1, b'010011001110', 2, 2, 3, 'cat', 'image_1.jpg'), + (2, b'010011', 1, 2, 3, 'dog', 'image_2.jpg'), + (3, b'010011001110100111', 3, 2, 3, 'cat', 'image_3.jpg'), + ] + schema = StructType([ + StructField('raw_id', IntegerType(), False), + StructField('data', BinaryType(), False), + StructField('height', IntegerType(), False), + StructField('width', IntegerType(), False), + StructField('nChannels', IntegerType(), False), + StructField('label', StringType(), False), + StructField('file_name', StringType(), False), + ]) + return self.spark.createDataFrame(data=data, schema=schema) + + def _generate_tfrecords_image(self): + df = self._generate_fake_image_dataframe() + df.repartition(3).write.format('tfrecords').option('compression', 'none').save( + self._dataset_directory.batch_path(self._batch_name), mode='overwrite') + with fsspec.open(self._dataset_directory.schema_file, mode='w') as f: + json.dump(df.schema.jsonValue(), f) + + def _generate_tfrecords_tabular(self): + data = [ + (1, 2, 2, 3, 'cat', 'image_1.jpg'), + (2, 1, 2, 3, 'dog', 'image_2.jpg'), + (3, 3, 2, 3, 'cat', 'image_3.jpg'), + ] + schema = StructType([ + StructField('raw_id', IntegerType(), False), + StructField('height', IntegerType(), False), + StructField('width', IntegerType(), False), + StructField('nChannels', IntegerType(), False), + StructField('label', StringType(), False), + StructField('file_name', StringType(), False), + ]) + df = self.spark.createDataFrame(data=data, schema=schema) + df.repartition(3).write.format('tfrecords').option('compression', 'none').save( + self._dataset_directory.batch_path(self._batch_name), mode='overwrite') + with fsspec.open(self._dataset_directory.schema_file, mode='w') as f: + json.dump(df.schema.jsonValue(), f) + + def _clear_up(self): + if self._filesystem.isdir(self.tmp_dataset_path): + self._filesystem.rm(self.tmp_dataset_path, recursive=True) + + def test_analyzer_image(self): + self._generate_tfrecords_image() + self.analyzer_task.run(spark=self.spark, + dataset_path=self._data_path, + wildcard='batch/test_batch/**', + is_image=True, + batch_name=self._batch_name, + buckets_num=10, + thumbnail_path=self._dataset_directory.thumbnails_path(self._batch_name)) + expected_meta = { + 'dtypes': [{ + 'key': 'raw_id', + 'value': 'int' + }, { + 'key': 'height', + 'value': 'int' + }, { + 'key': 'width', + 'value': 'int' + }, { + 'key': 'nChannels', + 'value': 'int' + }, { + 'key': 'label', + 'value': 'string' + }, { + 'key': 'file_name', + 'value': 'string' + }], + 'label_count': mock.ANY, + 'count': 3, + 'sample': mock.ANY, + 'features': { + 'raw_id': { + 'count': '3', + 'mean': '2.0', + 'stddev': '1.0', + 'min': '1', + 'max': '3', + 'missing_count': '0' + }, + 'height': { + 'count': '3', + 'mean': '2.0', + 'stddev': '1.0', + 'min': '1', + 'max': '3', + 'missing_count': '0' + }, + 'width': { + 'count': '3', + 'mean': '2.0', + 'stddev': '0.0', + 'min': '2', + 'max': '2', + 'missing_count': '0' + }, + 'nChannels': { + 'count': '3', + 'mean': '3.0', + 'stddev': '0.0', + 'min': '3', + 'max': '3', + 'missing_count': '0' + }, + 'label': { + 'count': '3', + 'mean': None, + 'stddev': None, + 'min': 'cat', + 'max': 'dog', + 'missing_count': '0' + }, + 'file_name': { + 'count': '3', + 'mean': None, + 'stddev': None, + 'min': 'image_1.jpg', + 'max': 'image_3.jpg', + 'missing_count': '0' + } + }, + 'hist': { + 'raw_id': { + 'x': [1.0, 1.2, 1.4, 1.6, 1.8, 2.0, 2.2, 2.4000000000000004, 2.6, 2.8, 3.0], + 'y': [1, 0, 0, 0, 0, 1, 0, 0, 0, 1] + }, + 'height': { + 'x': [1.0, 1.2, 1.4, 1.6, 1.8, 2.0, 2.2, 2.4000000000000004, 2.6, 2.8, 3.0], + 'y': [1, 0, 0, 0, 0, 1, 0, 0, 0, 1] + }, + 'width': { + 'x': [2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0], + 'y': [0, 0, 0, 0, 0, 0, 0, 0, 0, 3] + }, + 'nChannels': { + 'x': [3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0], + 'y': [0, 0, 0, 0, 0, 0, 0, 0, 0, 3] + } + } + } + expected_sample = [[1, 2, 2, 3, 'cat', 'image_1.jpg'], [2, 1, 2, 3, 'dog', 'image_2.jpg'], + [3, 3, 2, 3, 'cat', 'image_3.jpg']] + expected_label_count = [{'label': 'dog', 'count': 1}, {'label': 'cat', 'count': 2}] + expected_file_names = ['image_1.png', 'image_2.png', 'image_3.png'] + files = self._filesystem.ls(self._dataset_directory.thumbnails_path(self._batch_name)) + file_names = [f.split('/')[-1] for f in files] + self.assertCountEqual(file_names, expected_file_names) + batch_level_meta_path = self._dataset_directory.batch_meta_file(batch_name=self._batch_name) + self.assertTrue(self._filesystem.isfile(batch_level_meta_path)) + with fsspec.open(batch_level_meta_path, 'r') as f: + batch_level_meta = json.load(f) + self.assertEqual(batch_level_meta, expected_meta) + self.assertCountEqual(batch_level_meta.get('sample'), expected_sample) + self.assertCountEqual(batch_level_meta.get('label_count'), expected_label_count) + + def test_analyzer_tabular(self): + self._generate_tfrecords_tabular() + self.analyzer_task.run(spark=self.spark, + dataset_path=self._data_path, + wildcard='batch/test_batch/**', + is_image=False, + batch_name=self._batch_name, + buckets_num=10, + thumbnail_path=self._dataset_directory.thumbnails_path(self._batch_name)) + expected_meta = { + 'dtypes': [{ + 'key': 'raw_id', + 'value': 'int' + }, { + 'key': 'height', + 'value': 'int' + }, { + 'key': 'width', + 'value': 'int' + }, { + 'key': 'nChannels', + 'value': 'int' + }, { + 'key': 'label', + 'value': 'string' + }, { + 'key': 'file_name', + 'value': 'string' + }], + 'count': 3, + 'sample': mock.ANY, + 'features': { + 'raw_id': { + 'count': '3', + 'mean': '2.0', + 'stddev': '1.0', + 'min': '1', + 'max': '3', + 'missing_count': '0' + }, + 'height': { + 'count': '3', + 'mean': '2.0', + 'stddev': '1.0', + 'min': '1', + 'max': '3', + 'missing_count': '0' + }, + 'width': { + 'count': '3', + 'mean': '2.0', + 'stddev': '0.0', + 'min': '2', + 'max': '2', + 'missing_count': '0' + }, + 'nChannels': { + 'count': '3', + 'mean': '3.0', + 'stddev': '0.0', + 'min': '3', + 'max': '3', + 'missing_count': '0' + }, + 'label': { + 'count': '3', + 'mean': None, + 'stddev': None, + 'min': 'cat', + 'max': 'dog', + 'missing_count': '0' + }, + 'file_name': { + 'count': '3', + 'mean': None, + 'stddev': None, + 'min': 'image_1.jpg', + 'max': 'image_3.jpg', + 'missing_count': '0' + } + }, + 'hist': { + 'raw_id': { + 'x': [1.0, 1.2, 1.4, 1.6, 1.8, 2.0, 2.2, 2.4000000000000004, 2.6, 2.8, 3.0], + 'y': [1, 0, 0, 0, 0, 1, 0, 0, 0, 1] + }, + 'height': { + 'x': [1.0, 1.2, 1.4, 1.6, 1.8, 2.0, 2.2, 2.4000000000000004, 2.6, 2.8, 3.0], + 'y': [1, 0, 0, 0, 0, 1, 0, 0, 0, 1] + }, + 'width': { + 'x': [2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0], + 'y': [0, 0, 0, 0, 0, 0, 0, 0, 0, 3] + }, + 'nChannels': { + 'x': [3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0], + 'y': [0, 0, 0, 0, 0, 0, 0, 0, 0, 3] + } + } + } + batch_level_meta_path = self._dataset_directory.batch_meta_file(batch_name=self._batch_name) + self.assertTrue(self._filesystem.isfile(batch_level_meta_path)) + with fsspec.open(batch_level_meta_path, 'r') as f: + batch_level_meta = json.load(f) + self.assertEqual(batch_level_meta, expected_meta) + expected_sample = [[1, 2, 2, 3, 'cat', 'image_1.jpg'], [2, 1, 2, 3, 'dog', 'image_2.jpg'], + [3, 3, 2, 3, 'cat', 'image_3.jpg']] + self.assertCountEqual(batch_level_meta.get('sample'), expected_sample) + + def test_extract_feature_hist(self): + df = self._generate_dataframe() + hist = self.analyzer_task._extract_feature_hist(self.spark, df, dict(df.dtypes), 10) + expect_hist = { + 'raw_id': { + 'x': [1.0, 1.2, 1.4, 1.6, 1.8, 2.0, 2.2, 2.4000000000000004, 2.6, 2.8, 3.0], + 'y': [1, 0, 0, 0, 0, 1, 0, 0, 0, 1] + }, + 'rows': { + 'x': [ + 0.0, 25.6, 51.2, 76.80000000000001, 102.4, 128.0, 153.60000000000002, 179.20000000000002, 204.8, + 230.4, 256.0 + ], + 'y': [1, 0, 0, 0, 0, 0, 0, 0, 0, 2] + }, + 'cols': { + 'x': [246.0, 252.8, 259.6, 266.4, 273.2, 280.0, 286.8, 293.6, 300.4, 307.2, 314.0], + 'y': [1, 1, 0, 0, 0, 0, 0, 0, 0, 1] + }, + 'channel': { + 'x': [ + 0.0, 0.3, 0.6, 0.8999999999999999, 1.2, 1.5, 1.7999999999999998, 2.1, 2.4, 2.6999999999999997, 3.0 + ], + 'y': [1, 0, 0, 0, 0, 0, 0, 0, 0, 2] + } + } + self.assertDictEqual(hist, expect_hist) + + # this case is special designed for a corner case: + # if the input data column are all string/binary, hist should be empty + df = self._generate_all_string_and_binary_dataframe() + hist = self.analyzer_task._extract_feature_hist(self.spark, df, dict(df.dtypes), 10) + self.assertDictEqual(hist, {}) + + def test_extract_thumbnail(self): + df = self._generate_fake_image_dataframe() + self.analyzer_task._extract_thumbnail(df, self._dataset_directory.thumbnails_path(self._batch_name)) + files = self._filesystem.ls(self._dataset_directory.thumbnails_path(self._batch_name)) + file_names = [f.split('/')[-1] for f in files] + golden_file_names = ['image_1.png', 'image_2.png', 'image_3.png'] + self.assertCountEqual(file_names, golden_file_names) + + def test_extract_feature_metrics(self): + df = self._generate_dataframe() + df_stats_dict = self.analyzer_task._extract_feature_metrics(df, dict(df.dtypes)) + expect_stats_dict = { + 'raw_id': { + 'count': '3', + 'mean': '2.0', + 'stddev': '1.0', + 'min': '1', + 'max': '3', + 'missing_count': '0' + }, + 'rows': { + 'count': '2', + 'mean': '250.5', + 'stddev': mock.ANY, + 'min': '245', + 'max': '256', + 'missing_count': '1' + }, + 'cols': { + 'count': '3', + 'mean': '272.0', + 'stddev': mock.ANY, + 'min': '246', + 'max': '314', + 'missing_count': '0' + }, + 'channel': { + 'count': '2', + 'mean': '3.0', + 'stddev': '0.0', + 'min': '3', + 'max': '3', + 'missing_count': '1' + }, + 'label': { + 'count': '3', + 'mean': None, + 'stddev': None, + 'min': 'cat', + 'max': 'dog', + 'missing_count': '0' + }, + 'file_name': { + 'count': '3', + 'mean': None, + 'stddev': None, + 'min': 'image_1', + 'max': 'image_3', + 'missing_count': '0' + } + } + self.assertDictEqual(df_stats_dict, expect_stats_dict) + + def test_extract_metadata_image(self): + df = self._generate_fake_image_dataframe() + meta = self.analyzer_task._extract_metadata(df, True) + expect_meta = { + 'dtypes': [{ + 'key': 'raw_id', + 'value': 'int' + }, { + 'key': 'height', + 'value': 'int' + }, { + 'key': 'width', + 'value': 'int' + }, { + 'key': 'nChannels', + 'value': 'int' + }, { + 'key': 'label', + 'value': 'string' + }, { + 'key': 'file_name', + 'value': 'string' + }], + 'label_count': [{ + 'label': 'dog', + 'count': 1 + }, { + 'label': 'cat', + 'count': 2 + }], + 'count': + 3, + 'sample': [[1, 2, 2, 3, 'cat', 'image_1.jpg'], [2, 1, 2, 3, 'dog', 'image_2.jpg'], + [3, 3, 2, 3, 'cat', 'image_3.jpg']] + } + self.assertEqual(meta, expect_meta) + + def test_extract_metadata_tabular(self): + df = self._generate_dataframe() + meta = self.analyzer_task._extract_metadata(df, False) + expect_meta = { + 'dtypes': [{ + 'key': 'raw_id', + 'value': 'int' + }, { + 'key': 'data', + 'value': 'binary' + }, { + 'key': 'rows', + 'value': 'int' + }, { + 'key': 'cols', + 'value': 'int' + }, { + 'key': 'channel', + 'value': 'int' + }, { + 'key': 'label', + 'value': 'string' + }, { + 'key': 'file_name', + 'value': 'string' + }], + 'count': + 3, + 'sample': [[1, bytearray(b'01001100111'), 256, 256, 3, 'cat', 'image_1'], + [2, bytearray(b'01001100111'), 245, 246, None, 'dog', 'image_2'], + [3, bytearray(b'01001100111'), None, 314, 3, 'cat', 'image_3']] + } + self.assertEqual(meta, expect_meta) + + def test_empty_file(self): + batch_path = self._dataset_directory.batch_path(self._batch_name) + fs = fsspec.get_mapper(batch_path).fs + fs.mkdir(batch_path) + fs.touch(os.path.join(batch_path, 'empty_file')) + self.analyzer_task.run(spark=self.spark, + dataset_path=self._data_path, + wildcard='batch/test_batch/**', + is_image=False, + batch_name=self._batch_name, + buckets_num=10, + thumbnail_path=self._dataset_directory.thumbnails_path(self._batch_name)) + meta_path = self._dataset_directory.batch_meta_file(batch_name=self._batch_name) + self.assertFalse(self._filesystem.exists(meta_path)) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/inspection/analyzer_v2.py b/web_console_v2/inspection/analyzer_v2.py new file mode 100644 index 000000000..95c372971 --- /dev/null +++ b/web_console_v2/inspection/analyzer_v2.py @@ -0,0 +1,95 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +import argparse + +from pyspark.sql import SparkSession +from pyspark.conf import SparkConf + +from analyzer.analyzer_task import AnalyzerTask +from util import normalize_file_path, build_spark_conf +from error_code import AreaCode, ErrorType, JobException, write_termination_message + + +def get_args(args=None) -> argparse.Namespace: + parser = argparse.ArgumentParser(description='analyzer scripts arguments') + subparsers = parser.add_subparsers(dest='command', help='sub-command help') + # image parser + image_parser = subparsers.add_parser('image', help='analyzer image format dataset') + image_parser.add_argument('--data_path', type=str, required=True, help='path of dataset') + image_parser.add_argument('--file_wildcard', type=str, required=True, help='file wildcard') + image_parser.add_argument('--buckets_num', '-n', type=int, required=False, help='the number of buckets for hist') + image_parser.add_argument('--thumbnail_path', type=str, required=True, help='dir path to save the thumbnails') + image_parser.add_argument('--batch_name', type=str, required=False, default='', help='batch_name of target batch') + image_parser.add_argument('--skip', action='store_true', help='skip analyzer task') + # tabular parser + tabular_parser = subparsers.add_parser('tabular', help='analyzer tabular format dataset') + tabular_parser.add_argument('--data_path', type=str, required=True, help='path of dataset') + tabular_parser.add_argument('--file_wildcard', type=str, required=True, help='file wildcard') + tabular_parser.add_argument('--buckets_num', '-n', type=int, required=False, help='the number of buckets for hist') + tabular_parser.add_argument('--batch_name', type=str, required=False, default='', help='batch_name of target batch') + tabular_parser.add_argument('--skip', action='store_true', help='skip analyzer task') + # none_structured parser + none_structured_parser = subparsers.add_parser('none_structured', help='analyzer none_structured format dataset') + none_structured_parser.add_argument('--skip', action='store_true', help='skip analyzer task') + # all needed args for both image/tabular will be given, so we use known_args to ignore unnecessary args + args, _ = parser.parse_known_args(args) + + return args + + +def analyzer(): + try: + args = get_args() + except SystemExit: + write_termination_message(AreaCode.ANALYZER, ErrorType.INPUT_PARAMS_ERROR, + 'input params error, check details in logs') + raise + logging.info(f'[analyzer]:\n' + '----------------------\n' + 'Input params:\n' + f'{args.__dict__}\n' + '----------------------\n') + if args.skip: + logging.info('[analyzer]: skip analyzer job [SKIP]') + return + conf: SparkConf = build_spark_conf() + spark = SparkSession.builder.config(conf=conf).getOrCreate() + thumbnail_path_parameter = None + if args.command == 'image': + thumbnail_path_parameter = args.thumbnail_path + try: + if not args.batch_name: + raise JobException(area_code=AreaCode.ANALYZER, + error_type=ErrorType.INPUT_PARAMS_ERROR, + message='failed to find batch_name') + AnalyzerTask().run(spark=spark, + dataset_path=normalize_file_path(args.data_path), + wildcard=args.file_wildcard, + is_image=(args.command == 'image'), + batch_name=args.batch_name, + buckets_num=int(args.buckets_num) if args.buckets_num else None, + thumbnail_path=normalize_file_path(thumbnail_path_parameter)) + except JobException as e: + write_termination_message(e.area_code, e.error_type, e.message) + raise + finally: + spark.stop() + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + analyzer() diff --git a/web_console_v2/inspection/analyzer_v2_test.py b/web_console_v2/inspection/analyzer_v2_test.py new file mode 100644 index 000000000..0d45f3131 --- /dev/null +++ b/web_console_v2/inspection/analyzer_v2_test.py @@ -0,0 +1,119 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import unittest +from unittest.mock import ANY, MagicMock, patch + +from pyspark import SparkConf + +from analyzer_v2 import get_args, analyzer +from error_code import AreaCode, ErrorType, JobException + + +class AnalyzerTest(unittest.TestCase): + + def test_get_args(self): + data_path = '/data/fake_path' + file_wildcard = 'batch/**/**' + buckets_num = '10' + thumbnail_path = 'thumbnail' + batch_name = '20220101' + + # test image + args = get_args(['image', f'--data_path={data_path}', f'--file_wildcard={file_wildcard}', \ + f'--buckets_num={buckets_num}', f'--thumbnail_path={thumbnail_path}', '--skip', \ + f'--batch_name={batch_name}']) + self.assertEqual(args.command, 'image') + self.assertEqual(args.data_path, data_path) + self.assertEqual(args.file_wildcard, file_wildcard) + self.assertEqual(args.buckets_num, int(buckets_num)) + self.assertEqual(args.thumbnail_path, thumbnail_path) + self.assertTrue(args.skip) + self.assertEqual(args.batch_name, batch_name) + + # test tabular + args = get_args(['tabular', f'--data_path={data_path}', f'--file_wildcard={file_wildcard}', \ + f'--buckets_num={buckets_num}', f'--thumbnail_path={thumbnail_path}', f'--batch_name={batch_name}']) + self.assertEqual(args.command, 'tabular') + self.assertEqual(args.data_path, data_path) + self.assertEqual(args.file_wildcard, file_wildcard) + self.assertEqual(args.buckets_num, int(buckets_num)) + self.assertFalse(args.skip) + self.assertEqual(args.batch_name, batch_name) + + # test none_structured + args = get_args(['none_structured', '--skip']) + self.assertEqual(args.command, 'none_structured') + self.assertTrue(args.skip) + + # test no required args + with self.assertRaises(SystemExit): + get_args(['image', '--skip']) + + @patch('analyzer_v2.write_termination_message') + @patch('analyzer_v2.get_args') + @patch('analyzer_v2.build_spark_conf') + @patch('analyzer_v2.AnalyzerTask.run') + def test_analyzer(self, mock_run: MagicMock, mock_build_spark_conf: MagicMock, mock_get_args: MagicMock, + mock_write_termination_message: MagicMock): + # set local spark + mock_build_spark_conf.return_value = SparkConf().setMaster('local') + + # test skip + mock_get_args.return_value = argparse.Namespace(skip=True) + analyzer() + mock_build_spark_conf.assert_not_called() + + # test no batch_name + mock_get_args.reset_mock() + data_path = '/data/fake_path' + file_wildcard = 'batch/**/**' + buckets_num = 10 + thumbnail_path = 'thumbnail' + mock_get_args.return_value = argparse.Namespace(command='image', + data_path=data_path, + file_wildcard=file_wildcard, + buckets_num=buckets_num, + batch_name='', + thumbnail_path=thumbnail_path, + skip=False) + with self.assertRaises(JobException): + analyzer() + mock_write_termination_message.assert_called_once_with(AreaCode.ANALYZER, ErrorType.INPUT_PARAMS_ERROR, + 'failed to find batch_name') + + # test not skip + mock_get_args.reset_mock() + batch_name = '20220101' + mock_get_args.return_value = argparse.Namespace(command='image', + data_path=data_path, + file_wildcard=file_wildcard, + buckets_num=buckets_num, + batch_name=batch_name, + thumbnail_path=thumbnail_path, + skip=False) + analyzer() + mock_run.assert_called_once_with(spark=ANY, + dataset_path='file://' + data_path, + wildcard=file_wildcard, + is_image=True, + batch_name=batch_name, + buckets_num=buckets_num, + thumbnail_path=thumbnail_path) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/inspection/converter_v2.py b/web_console_v2/inspection/converter_v2.py new file mode 100644 index 000000000..1338095f6 --- /dev/null +++ b/web_console_v2/inspection/converter_v2.py @@ -0,0 +1,332 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import enum +import logging +import json +import argparse +import fsspec +import os + +from pyspark.conf import SparkConf +from pyspark.sql import SparkSession +from pyspark.sql import functions as spark_func +from pyspark.sql.types import StringType, ArrayType, StructType +from pyspark.sql.dataframe import DataFrame +from dataset_directory import DatasetDirectory + +from util import FileFormat, normalize_file_path, dataset_schema_path, build_spark_conf, is_file_matched, \ + load_by_file_format, EXAMPLE_ID +from error_code import AreaCode, ErrorType, JobException, write_termination_message + +DEFAULT_MANIFEST_PATH = 'manifest.json' +DEFAULT_IMAGES_PATH = 'images' +RAW_ID = 'raw_id' + + +class ImportType(enum.Enum): + COPY = 'COPY' + NO_COPY = 'NO_COPY' + + +@spark_func.udf(StringType()) +def get_file_basename(origin_name: str) -> str: + return os.path.basename(origin_name) + + +def flatten(schema, prefix=None): + fields = [] + for field in schema.fields: + name = prefix + '.' + field.name if prefix else field.name + dtype = field.dataType + if isinstance(dtype, ArrayType): + dtype = dtype.elementType + + if isinstance(dtype, StructType): + fields += flatten(dtype, prefix=name) + else: + fields.append(name) + + return fields + + +def convert_timestamp_cols_type_to_string(df: DataFrame) -> DataFrame: + dtypes_dict = dict(df.dtypes) + ts_col_list = [col_name for col_name in df.columns if dtypes_dict[col_name] == 'timestamp'] + logging.info(f'will convert the timestamp type columns to string type:{ts_col_list}') + for col_name in ts_col_list: + df = df.withColumn(col_name, spark_func.col(col_name).cast('string')) + return df + + +def convert_image(spark: SparkSession, output_dataset_path: str, output_batch_path: str, input_batch_path: str, + manifest_name: str, images_dir_name: str): + """convert source data to tfrecords format and save to output_batch_path. + + Args: + spark: spark session + output_dataset_path: path of output dataset. + output_batch_path: path of output data_batch in this dataset. + input_batch_path: input batch path of the image dataset directory. + manifest_name: relative path of manifest file in zip archive. + images_dir_name: relative path of images directory in zip archive + + Raises: + JobException: read/write data by pyspark failed + + manifest json schema: + { + images:[ + { + name: str + file_name: str + created_at: str + annotation:{ + label: str + } + }, + ... + ] + } + + saved tfrecord schema: + +----------+-----+------+---------+-----+------+------+-----------+------+ + |file_name |width|height|nChannels|mode |data |name |created_at |label | + +----------+-----+------+---------+-----+------+------+-----------+------+ + |string |int |int |int |int |binary|string|string |string| + +----------+-----+------+---------+-----+------+------+-----------+------+ + """ + images_path = os.path.join(input_batch_path, images_dir_name) + logging.info(f'### will load the images dir into dataframe:{images_path}') + # spark.read.format('image') schema: + # origin: StringType (represents the file path of the image) + # height: IntegerType (height of the image) + # width: IntegerType (width of the image) + # nChannels: IntegerType (number of image channels) + # mode: IntegerType (OpenCV-compatible type) + # data: BinaryType (Image bytes in OpenCV-compatible order: row-wise BGR in most cases) + # document ref: https://spark.apache.org/docs/latest/ml-datasource.html + try: + images_df = spark.read.format('image').load(images_path).select('image.origin', 'image.width', 'image.height', + 'image.nChannels', 'image.mode', 'image.data') + except Exception as e: # pylint: disable=broad-except + raise JobException(AreaCode.CONVERTER, ErrorType.DATA_LOAD_ERROR, + f'failed to read input data, err: {str(e)}') from e + images_df = images_df.withColumn('file_name', get_file_basename(spark_func.col('origin'))).drop('origin') + manifest_path = os.path.join(input_batch_path, manifest_name) + logging.info(f'### will load the manifest json file into dataframe:{manifest_path}') + manifest_df = spark.read.json(manifest_path, multiLine=True) + manifest_df.printSchema() + manifest_df = manifest_df.select(spark_func.explode('images')).toDF('images').select( + 'images.name', 'images.file_name', 'images.created_at', 'images.annotation') + manifest_df = manifest_df.select(flatten(manifest_df.schema)) + logging.info('### will join the images_df with the manifest_df on file_name column') + df = images_df.join(manifest_df, 'file_name', 'inner') + df = convert_timestamp_cols_type_to_string(df) + logging.info(f'### saving to {output_batch_path}, in tfrecords') + try: + df.write.format('tfrecords').option('compression', 'none').save(output_batch_path, mode='overwrite') + except Exception as e: # pylint: disable=broad-except + raise JobException(AreaCode.CONVERTER, ErrorType.DATA_WRITE_ERROR, + f'failed to write data, err: {str(e)}') from e + with fsspec.open(dataset_schema_path(output_dataset_path), mode='w') as f: + json.dump(df.schema.jsonValue(), f) + + +def convert_tabular(spark: SparkSession, output_dataset_path: str, output_batch_path: str, input_batch_path: str, + file_format: FileFormat): + """convert source data to tfrecords format and save to output_batch_path. + + Args: + spark: spark session + output_dataset_path: path of output dataset. + output_batch_path: path of output data_batch in this dataset. + input_batch_path: input batch path of the tabular dataset. + file_format: FileFormat['csv', 'tfrecords'] + + Raises: + JobException: read/write data by pyspark failed + """ + if not is_file_matched(input_batch_path): + # this is a hack to allow empty intersection dataset + # no file matched, just mkdir output_batch_path and skip converter + fsspec.get_mapper(output_batch_path).fs.mkdir(output_batch_path) + logging.warning(f'input_dataset_path {input_batch_path} matches 0 files, skip converter task') + return + try: + df = load_by_file_format(spark, input_batch_path, file_format) + except Exception as e: # pylint: disable=broad-except + raise JobException(AreaCode.CONVERTER, ErrorType.DATA_LOAD_ERROR, + f'failed to read input data, err: {str(e)}') from e + # force raw_id to string type as raw_id will be convert to bytes type in raw_data job + # bigint type may cause memoryError + if RAW_ID in df.columns: + df = df.withColumn(RAW_ID, df[RAW_ID].cast(StringType())) + # force example_id to string type as example_id will be read as stringType in training + if EXAMPLE_ID in df.columns: + df = df.withColumn(EXAMPLE_ID, df[EXAMPLE_ID].cast(StringType())) + logging.info(f'### saving to {output_batch_path}, in tfrecords') + try: + df.write.format('tfrecords').option('compression', 'none').save(output_batch_path, mode='overwrite') + except Exception as e: # pylint: disable=broad-except + raise JobException(AreaCode.CONVERTER, ErrorType.DATA_WRITE_ERROR, + f'failed to write data, err: {str(e)}') from e + with fsspec.open(dataset_schema_path(output_dataset_path), mode='w') as f: + json.dump(df.schema.jsonValue(), f) + + +def convert_no_copy(output_dataset_path: str, output_batch_path: str, input_batch_path: str): + """save source data path to file source_batch_path in output_batch_path. + + Args: + output_dataset_path: path of output dataset. + output_batch_path: path of output data_batch in this dataset. + input_batch_path: input batch path of the tabular dataset. + + """ + batch_name = os.path.basename(output_batch_path) + with fsspec.open(DatasetDirectory(output_dataset_path).source_batch_path_file(batch_name), mode='w') as f: + f.write(input_batch_path) + + +def get_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description='converter scripts arguments') + subparsers = parser.add_subparsers(dest='command', help='sub-command help') + # image parser + image_parser = subparsers.add_parser('image', help='converter image format dataset') + image_parser.add_argument('--output_dataset_path', required=True, type=str, help='path of the output dataset') + image_parser.add_argument('--output_batch_path', + required=True, + type=str, + help='path of output data_batch in this dataset') + image_parser.add_argument('--input_batch_path', + required=True, + type=str, + help='input batch path of the image dataset') + image_parser.add_argument('--manifest_name', + type=str, + required=False, + default=DEFAULT_MANIFEST_PATH, + help='manifest file name in image dataset directory') + image_parser.add_argument('--images_dir_name', + type=str, + required=False, + default=DEFAULT_IMAGES_PATH, + help='images directory name in image dataset directory') + image_parser.add_argument('--import_type', + type=str, + choices=[import_type.value for import_type in ImportType], + required=False, + default=ImportType.COPY.value, + help='import type') + # tabular parser + tabular_parser = subparsers.add_parser('tabular', help='converter tabular format dataset') + tabular_parser.add_argument('--output_dataset_path', type=str, required=True, help='path of output dataset') + tabular_parser.add_argument('--output_batch_path', + type=str, + required=True, + help='path of output data_batch in this dataset') + tabular_parser.add_argument('--input_batch_path', + type=str, + required=True, + help='input batch path of the tabular dataset') + tabular_parser.add_argument('--format', + type=FileFormat, + choices=list(FileFormat), + required=True, + help='file format') + tabular_parser.add_argument('--import_type', + type=str, + choices=[import_type.value for import_type in ImportType], + required=False, + default=ImportType.COPY.value, + help='import type') + # none_structured parser + none_structured_parser = subparsers.add_parser('none_structured', help='converter none_structured format dataset') + none_structured_parser.add_argument('--output_dataset_path', type=str, required=True, help='path of output dataset') + none_structured_parser.add_argument('--output_batch_path', + type=str, + required=True, + help='path of output data_batch in this dataset') + none_structured_parser.add_argument('--input_batch_path', + type=str, + required=True, + help='input batch path of the none_structured dataset') + none_structured_parser.add_argument('--import_type', + type=str, + choices=[import_type.value for import_type in ImportType], + required=False, + default=ImportType.COPY.value, + help='import type') + # all needed args for both image/tabular/none_structured will be given, + # so we use known_args to ignore unnecessary args + args, _ = parser.parse_known_args() + + return args + + +def converter_task(): + try: + args = get_args() + except SystemExit: + write_termination_message(AreaCode.CONVERTER, ErrorType.INPUT_PARAMS_ERROR, + 'input params error, check details in logs') + raise + logging.info(f'[converter]:\n' + '----------------------\n' + 'Input params:\n' + f'{args.__dict__}\n' + '----------------------\n') + conf: SparkConf = build_spark_conf() + spark = SparkSession.builder.config(conf=conf).getOrCreate() + output_dataset_path = normalize_file_path(args.output_dataset_path) + output_batch_path = normalize_file_path(args.output_batch_path) + input_batch_path = normalize_file_path(args.input_batch_path) + try: + if args.import_type == ImportType.NO_COPY.value: + convert_no_copy(output_dataset_path=output_dataset_path, + output_batch_path=output_batch_path, + input_batch_path=input_batch_path) + else: + if args.command == 'none_structured': + raise JobException(AreaCode.CONVERTER, ErrorType.INPUT_PARAMS_ERROR, + 'none_structured dataset only supports no_copy import') + if args.command == 'image': + # image data + logging.info('will convert image format dataset') + convert_image(spark=spark, + output_dataset_path=output_dataset_path, + output_batch_path=output_batch_path, + input_batch_path=input_batch_path, + manifest_name=args.manifest_name, + images_dir_name=args.images_dir_name) + else: + # tabular data + logging.info('will convert tabular format dataset') + convert_tabular(spark=spark, + output_dataset_path=output_dataset_path, + output_batch_path=output_batch_path, + input_batch_path=input_batch_path, + file_format=args.format) + except JobException as e: + write_termination_message(e.area_code, e.error_type, e.message) + raise + finally: + spark.stop() + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + converter_task() diff --git a/web_console_v2/inspection/converter_v2_test.py b/web_console_v2/inspection/converter_v2_test.py new file mode 100644 index 000000000..d071a102f --- /dev/null +++ b/web_console_v2/inspection/converter_v2_test.py @@ -0,0 +1,272 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +import os +import fsspec +import json + +from pyspark.sql.dataframe import DataFrame +from pyspark.sql import functions as spark_func +from pyspark.sql.types import StringType, StructType, Row + +from testing.spark_test_case import PySparkTestCase + +from converter_v2 import convert_image, convert_no_copy, convert_tabular +from dataset_directory import DatasetDirectory +from util import FileFormat, dataset_schema_path, load_tfrecords + + +@spark_func.udf(StringType()) +def get_file_basename(origin_name: str) -> str: + return os.path.basename(origin_name) + + +class ConverterTest(PySparkTestCase): + + def tearDown(self) -> None: + self._clear_up() + return super().tearDown() + + def _clear_up(self): + fs = fsspec.filesystem('file') + if fs.isdir(self.tmp_dataset_path): + fs.rm(self.tmp_dataset_path, recursive=True) + + def assertDataframeEqual(self, df1: DataFrame, df2: DataFrame): + self.assertEqual(df1.subtract(df2).count(), 0) + self.assertEqual(df2.subtract(df1).count(), 0) + + def test_convert_image(self): + input_batch_path = os.path.join(self.test_data, 'image') + output_dataset_path = os.path.join(self.tmp_dataset_path, 'output_dataset') + batch_name = 'batch_test' + output_batch_path = DatasetDirectory(output_dataset_path).batch_path(batch_name) + manifest_name = 'manifest.json' + images_dir_name = 'images' + convert_image(self.spark, output_dataset_path, output_batch_path, input_batch_path, manifest_name, + images_dir_name) + fs = fsspec.filesystem('file') + files = fs.ls(output_batch_path) + file_names = {f.split('/')[-1] for f in files} + expect_file_names = {'part-r-00000', 'part-r-00001', 'part-r-00002', '_SUCCESS'} + self.assertTrue(expect_file_names.issubset(file_names)) + with fsspec.open(dataset_schema_path(output_dataset_path), 'r') as f: + output_schema = json.load(f) + expect_schema = { + 'type': + 'struct', + 'fields': [{ + 'name': 'file_name', + 'type': 'string', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'width', + 'type': 'integer', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'height', + 'type': 'integer', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'nChannels', + 'type': 'integer', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'mode', + 'type': 'integer', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'data', + 'type': 'binary', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'name', + 'type': 'string', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'created_at', + 'type': 'string', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'caption', + 'type': 'string', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'label', + 'type': 'string', + 'nullable': True, + 'metadata': {} + }] + } + self.assertEqual(output_schema, expect_schema) + image_df = self.spark.read.format('image').load(os.path.join(input_batch_path, images_dir_name)) + converter_df = load_tfrecords(self.spark, output_batch_path, output_dataset_path) + self.assertDataframeEqual(image_df.select('image.data'), converter_df.select('data')) + converter_struct_df = converter_df.select('file_name', 'width', 'height', 'nChannels', 'mode', 'name', + 'created_at', 'caption', 'label') + expect_struct = [ + Row(file_name='000000005756.jpg', + width=640, + height=361, + nChannels=3, + mode=16, + name='000000005756.jpg', + created_at='2021-08-30T16:52:15.501516', + caption='A group of people holding umbrellas looking at graffiti.', + label='A'), + Row(file_name='000000018425.jpg', + width=640, + height=480, + nChannels=3, + mode=16, + name='000000018425.jpg', + created_at='2021-08-30T16:52:15.501516', + caption='Two giraffe grazing on tree leaves under a hazy sky.', + label='B'), + Row(file_name='000000008181.jpg', + width=640, + height=480, + nChannels=3, + mode=16, + name='000000008181.jpg', + created_at='2021-08-30T16:52:15.501516', + caption='A motorcycle is parked on a gravel lot', + label='C') + ] + self.assertCountEqual(converter_struct_df.collect(), expect_struct) + + def test_convert_tabular(self): + input_batch_path = os.path.join(self.test_data, 'csv/medium_csv') + output_dataset_path = os.path.join(self.tmp_dataset_path, 'output_dataset') + batch_name = 'batch_test' + output_batch_path = DatasetDirectory(output_dataset_path).batch_path(batch_name) + file_format = FileFormat.CSV + convert_tabular(self.spark, output_dataset_path, output_batch_path, input_batch_path, file_format) + fs = fsspec.filesystem('file') + files = fs.ls(output_batch_path) + file_names = {f.split('/')[-1] for f in files} + expect_file_names = {'part-r-00000', '_SUCCESS'} + self.assertTrue(expect_file_names.issubset(file_names)) + with fsspec.open(dataset_schema_path(output_dataset_path), 'r') as f: + output_schema = json.load(f) + expect_schema = { + 'type': + 'struct', + 'fields': [{ + 'name': 'example_id', + 'type': 'string', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'raw_id', + 'type': 'string', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'event_time', + 'type': 'integer', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'x0', + 'type': 'double', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'x1', + 'type': 'double', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'x2', + 'type': 'double', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'x3', + 'type': 'double', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'x4', + 'type': 'double', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'x5', + 'type': 'double', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'x6', + 'type': 'double', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'x7', + 'type': 'double', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'x8', + 'type': 'double', + 'nullable': True, + 'metadata': {} + }, { + 'name': 'x9', + 'type': 'double', + 'nullable': True, + 'metadata': {} + }] + } + self.assertEqual(output_schema, expect_schema) + raw_df = self.spark.read.format('csv').option('header', 'true').schema( + StructType.fromJson(expect_schema)).load(input_batch_path) + converter_df = load_tfrecords(self.spark, output_batch_path, output_dataset_path) + self.assertEqual(raw_df.schema, converter_df.schema) + # TODO(liuhehan): add df content check + self.assertEqual(raw_df.count(), converter_df.count()) + + def test_convert_empty(self): + input_batch_path = os.path.join(self.test_data, 'csv/*.fake') + output_dataset_path = os.path.join(self.tmp_dataset_path, 'output_dataset') + batch_name = 'batch_test' + output_batch_path = DatasetDirectory(output_dataset_path).batch_path(batch_name) + file_format = FileFormat.CSV + convert_tabular(self.spark, output_dataset_path, output_batch_path, input_batch_path, file_format) + self.assertTrue(fsspec.get_mapper(output_batch_path).fs.isdir(output_batch_path)) + + def test_converter_no_copy(self): + input_batch_path = os.path.join(self.test_data, 'csv/medium_csv') + output_dataset_path = os.path.join(self.tmp_dataset_path, 'output_dataset') + batch_name = 'batch_test' + output_batch_path = DatasetDirectory(output_dataset_path).batch_path(batch_name) + convert_no_copy(output_dataset_path, output_batch_path, input_batch_path) + with fsspec.open(DatasetDirectory(output_dataset_path).source_batch_path_file(batch_name), 'r') as f: + self.assertEqual(f.read(), input_batch_path) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/inspection/dataset_alignment.py b/web_console_v2/inspection/dataset_alignment.py new file mode 100644 index 000000000..1dc59afc7 --- /dev/null +++ b/web_console_v2/inspection/dataset_alignment.py @@ -0,0 +1,152 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import logging +import argparse +from typing import List, Dict +import json +import fsspec + +from pyspark.rdd import RDD +from pyspark.sql import SparkSession +from pyspark.conf import SparkConf +from pyspark.sql.types import Row, StringType, StructType, StructField +from util import build_spark_conf, load_tfrecords, dataset_schema_path +from json_schema_checker import SchemaChecker + +from error_code import AreaCode, ErrorType, JobException, write_termination_message + +_DATASET_ALIGNMENT_LOG = 'dataset_alignment' + + +class DatasetAlignment(object): + """ + ProcessedDataset struct + | + |--- batch ---- batch_name_1 --- real data files + | | + | |- batch_name_2 --- real data files + | | + | |- batch_name_3 --- real data files + | + |--- errors --- batch_name_1 --- error message files (.csv) + | | + | |- batch_name_2 --- error message files (.csv) + | | + | |- batch_name_3 --- error message files (.csv) + | + |--- _META + """ + + def __init__(self, spark: SparkSession, input_dataset_path: str, input_batch_path: str, wildcard: str, + output_batch_path: str, json_schema: str, output_dataset_path: str, output_error_path: str): + self._spark = spark + self._input_dataset_path = input_dataset_path + self._input_batch_path = input_batch_path + self._wildcard = wildcard + self._output_batch_path = output_batch_path + self._json_schema = json.loads(json_schema) + self._output_dataset_path = output_dataset_path + self._output_error_path = output_error_path + + def _dump_error_msgs(self, rdd_errors: RDD): + deptSchema = StructType([ + StructField('field', StringType(), True), + StructField('message', StringType(), True), + ]) + df_error_msgs = self._spark.createDataFrame(rdd_errors, schema=deptSchema) + logging.info(f'[{_DATASET_ALIGNMENT_LOG}]: start to dump error message') + df_error_msgs.coalesce(1).write.format('json').save(self._output_error_path, mode='overwrite') + logging.info(f'[{_DATASET_ALIGNMENT_LOG}]: dump error message finished') + + def run(self): + try: + checker = SchemaChecker(self._json_schema) + except RuntimeError as e: + raise JobException(AreaCode.ALIGNMENT, ErrorType.INPUT_PARAMS_ERROR, 'json_schema is illegal') from e + + files = os.path.join(self._input_batch_path, self._wildcard) + try: + df = load_tfrecords(spark=self._spark, files=files, dataset_path=self._input_dataset_path) + except Exception as e: # pylint: disable=broad-except + raise JobException(AreaCode.ALIGNMENT, ErrorType.DATA_LOAD_ERROR, + f'failed to read input data, err: {str(e)}') from e + # broadcast has better performence by keeping read-only variable cached on each machine, + # rather than shipping a copy of it with tasks + # Ref: https://spark.apache.org/docs/latest/api/python/reference/api/pyspark.Broadcast.html + broadcast_vals = self._spark.sparkContext.broadcast({'checker': checker}) + + def check_schema(row: Row) -> List[Dict[str, str]]: + data = row.asDict() + checker = broadcast_vals.value['checker'] + error_message = checker.check(data) + return error_message + + # use flatMap to flat result from list[dict] to dict like {'filed': 'xxx', 'message': 'xxx'} + rdd_errors = df.rdd.flatMap(check_schema) + if rdd_errors.count() > 0: + self._dump_error_msgs(rdd_errors) + message = f'[{_DATASET_ALIGNMENT_LOG}]: schema check failed!' + logging.error(message) + raise JobException(AreaCode.ALIGNMENT, ErrorType.SCHEMA_CHECK_ERROR, message) + logging.info(f'[{_DATASET_ALIGNMENT_LOG}]: schema check succeeded!') + df.write.format('tfrecords').option('compression', 'none').save(self._output_batch_path, mode='overwrite') + with fsspec.open(dataset_schema_path(self._output_dataset_path), mode='w') as f: + json.dump(df.schema.jsonValue(), f) + logging.info(f'[{_DATASET_ALIGNMENT_LOG}]: dataset alignment task is finished!') + + +def get_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description='dataset alignment task') + + parser.add_argument('--input_dataset_path', type=str, required=True, help='path of input dataset') + parser.add_argument('--input_batch_path', type=str, required=True, help='path of input databatch') + parser.add_argument('--json_schema', type=str, required=True, help='json schema in string type') + parser.add_argument('--wildcard', type=str, required=True, help='wildcard') + parser.add_argument('--output_dataset_path', type=str, required=True, help='path of output dataset') + parser.add_argument('--output_batch_path', type=str, required=True, help='path of output databatch') + parser.add_argument('--output_error_path', type=str, required=True, help='path of output error') + + return parser.parse_args() + + +def alignment_task(): + try: + args = get_args() + except SystemExit: + write_termination_message(AreaCode.ALIGNMENT, ErrorType.INPUT_PARAMS_ERROR, + 'input params error, check details in logs') + raise + logging.info(f'[{_DATASET_ALIGNMENT_LOG}]:\n' + '----------------------\n' + 'Input params:\n' + f'{args.__dict__}\n' + '----------------------\n') + conf: SparkConf = build_spark_conf() + spark = SparkSession.builder.config(conf=conf).getOrCreate() + try: + DatasetAlignment(spark, args.input_dataset_path, args.input_batch_path, args.wildcard, args.output_batch_path, + args.json_schema, args.output_dataset_path, args.output_error_path).run() + except JobException as e: + write_termination_message(e.area_code, e.error_type, e.message) + raise + finally: + spark.stop() + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + alignment_task() diff --git a/web_console_v2/inspection/dataset_alignment_test.py b/web_console_v2/inspection/dataset_alignment_test.py new file mode 100644 index 000000000..5ae4ac0d9 --- /dev/null +++ b/web_console_v2/inspection/dataset_alignment_test.py @@ -0,0 +1,166 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import json +import unittest +import os +import fsspec + +from pyspark.sql.types import BinaryType, IntegerType, StructField, StructType, StringType +from error_code import JobException + +from testing.spark_test_case import PySparkTestCase +from dataset_alignment import DatasetAlignment +from util import dataset_schema_path + + +class DatasetAlignmentTest(PySparkTestCase): + + def setUp(self) -> None: + super().setUp() + self._generate_tfrecords() + + def tearDown(self) -> None: + self._clear_up() + return super().tearDown() + + def _generate_tfrecords(self): + data = [ + (1, b'01001100111', 256, 256, 3, 'cat'), + (2, b'01001100111', 244, 246, 3, 'dog'), + (3, b'01001100111', 255, 312, 3, 'cat'), + (4, b'01001100111', 256, 255, 3, 'cat'), + (5, b'01001100111', 201, 241, 3, 'cat'), + (6, b'01001100111', 255, 221, 3, 'dog'), + (7, b'01001100111', 201, 276, 3, 'dog'), + (8, b'01001100111', 258, 261, 3, 'dog'), + (9, b'01001100111', 198, 194, 3, 'cat'), + (10, b'01001100111', 231, 221, 3, 'cat'), + ] + schema = StructType([ + StructField('raw_id', IntegerType(), False), + StructField('image', BinaryType(), False), + StructField('rows', IntegerType(), False), + StructField('cols', IntegerType(), False), + StructField('channel', IntegerType(), False), + StructField('label', StringType(), False), + ]) + df = self.spark.createDataFrame(data=data, schema=schema) + df.repartition(3).write.format('tfrecords').option('compression', 'none').save(os.path.join( + self.tmp_dataset_path, 'input_dataset/batch/batch_test'), + mode='overwrite') + with fsspec.open(dataset_schema_path(os.path.join(self.tmp_dataset_path, 'input_dataset')), mode='w') as f: + json.dump(df.schema.jsonValue(), f) + + def _clear_up(self): + fs = fsspec.filesystem('file') + if fs.isdir(self.tmp_dataset_path): + fs.rm(self.tmp_dataset_path, recursive=True) + + def test_dataset_alignment(self): + input_dataset_path = os.path.join(self.tmp_dataset_path, 'input_dataset') + input_batch_path = os.path.join(input_dataset_path, 'batch/batch_test') + wildcard = '**' + output_dataset_path = os.path.join(self.tmp_dataset_path, 'output_dataset') + output_batch_patch = os.path.join(output_dataset_path, 'batch/batch_test') + input_schema_path = os.path.join(self.test_data, 'alignment_schema.json') + output_error_path = os.path.join(output_dataset_path, 'errors/batch_test') + with fsspec.open(input_schema_path, 'r') as f: + json_schema = f.read() + # test passed while no exception raise + DatasetAlignment(self.spark, input_dataset_path, input_batch_path, wildcard, output_batch_patch, json_schema, + output_dataset_path, output_error_path).run() + fs = fsspec.filesystem('file') + self.assertFalse(fs.isdir(output_error_path)) + files = fs.ls(output_batch_patch) + file_names = [] + for file in files: + # skip Cyclic Redundancy Check file + if file.endswith('.crc'): + continue + file_name = file.split('/')[-1] + if file_name != '_SUCCESS': + print(file_name) + self.assertNotEqual(fs.size(file), 0) + file_names.append(file_name) + golden_file_names = ['part-r-00000', 'part-r-00001', 'part-r-00002', '_SUCCESS'] + self.assertCountEqual(file_names, golden_file_names) + input_schema_path = dataset_schema_path(input_dataset_path) + self.assertTrue(fs.isfile(input_schema_path)) + with fsspec.open(input_schema_path, 'r') as f: + input_schema = f.read() + output_schema_path = dataset_schema_path(output_dataset_path) + self.assertTrue(fs.isfile(output_schema_path)) + with fsspec.open(output_schema_path, 'r') as f: + output_schema = f.read() + self.assertEqual(input_schema, output_schema) + + def test_dataset_alignment_error(self): + input_dataset_path = os.path.join(self.tmp_dataset_path, 'input_dataset') + input_batch_path = os.path.join(input_dataset_path, 'batch/batch_test') + wildcard = '**' + output_dataset_path = os.path.join(self.tmp_dataset_path, 'output_dataset') + output_batch_patch = os.path.join(output_dataset_path, 'batch/batch_test') + input_schema_path = os.path.join(self.test_data, 'alignment_schema_error.json') + output_error_path = os.path.join(output_dataset_path, 'errors/batch_test') + with fsspec.open(input_schema_path, 'r') as f: + json_schema = f.read() + # test passed while no exception raise + with self.assertRaises(JobException): + DatasetAlignment(self.spark, input_dataset_path, input_batch_path, wildcard, output_batch_patch, + json_schema, output_dataset_path, output_error_path).run() + fs = fsspec.filesystem('file') + self.assertTrue(fs.isdir(output_error_path)) + golden_data = [{ + 'field': 'raw_id', + 'message': '3 is not of type \'string\'' + }, { + 'field': 'raw_id', + 'message': '8 is not of type \'string\'' + }, { + 'field': 'raw_id', + 'message': '10 is not of type \'string\'' + }, { + 'field': 'raw_id', + 'message': '4 is not of type \'string\'' + }, { + 'field': 'raw_id', + 'message': '6 is not of type \'string\'' + }, { + 'field': 'raw_id', + 'message': '7 is not of type \'string\'' + }, { + 'field': 'raw_id', + 'message': '9 is not of type \'string\'' + }, { + 'field': 'raw_id', + 'message': '1 is not of type \'string\'' + }, { + 'field': 'raw_id', + 'message': '2 is not of type \'string\'' + }, { + 'field': 'raw_id', + 'message': '5 is not of type \'string\'' + }] + error_file = fs.glob(output_error_path + '/part-*.json')[0] + error_msgs = [] + with fs.open(error_file) as f: + for line in f: + error_msgs.append(json.loads(line)) + self.assertCountEqual(error_msgs, golden_data) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/inspection/dataset_directory.py b/web_console_v2/inspection/dataset_directory.py new file mode 100644 index 000000000..1eec014f9 --- /dev/null +++ b/web_console_v2/inspection/dataset_directory.py @@ -0,0 +1,99 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os + + +class DatasetDirectory(object): + """ + Dataset struct + | + |--- batch ---- batch_name_1 --- real data files + | | + | |- batch_name_2 --- real data files + | | + | |- batch_name_3 --- real data files + | + |--- meta --- batch_name_1 --- thumbnails (only for image) --- preview image (.png) + | | | + | | |- _META + | | + | |- batch_name_2 --- thumbnails (only for image) --- preview image (.png) + | | | + | | |- _META + | | + | |- batch_name_3 --- thumbnails (only for image) --- preview image (.png) + | | | + | | |- _META + | + |--- errors --- batch_name_1 --- error message files (.csv) + | | + | |- batch_name_2 --- error message files (.csv) + | | + | |- batch_name_3 --- error message files (.csv) + | + |--- side_output --- batch_name_1 --- intermedia data + | | + | |- batch_name_2 --- intermedia data + | | + | |- batch_name_3 --- intermedia data + | + |--- _META (now move to meta/batch_name, delete in future) + | + |--- schema.json + + """ + _BATCH_DIR = 'batch' + _META_DIR = 'meta' + _ERRORS_DIR = 'errors' + _SIDE_OUTPUT_DIR = 'side_output' + _THUMBNAILS_DIR = 'thumbnails' + _META_FILE = '_META' + _SCHEMA_FILE = 'schema.json' + _SOURCE_BATCH_PATH_FILE = 'source_batch_path' + + def __init__(self, dataset_path: str): + self._dataset_path = dataset_path + + @property + def dataset_path(self) -> str: + return self._dataset_path + + def batch_path(self, batch_name: str) -> str: + return os.path.join(self._dataset_path, self._BATCH_DIR, batch_name) + + def errors_path(self, batch_name: str) -> str: + return os.path.join(self._dataset_path, self._ERRORS_DIR, batch_name) + + def thumbnails_path(self, batch_name: str) -> str: + return os.path.join(self._dataset_path, self._META_DIR, batch_name, self._THUMBNAILS_DIR) + + def side_output_path(self, batch_name: str) -> str: + return os.path.join(self._dataset_path, self._SIDE_OUTPUT_DIR, batch_name) + + def source_batch_path_file(self, batch_name: str) -> str: + return os.path.join(self.batch_path(batch_name), self._SOURCE_BATCH_PATH_FILE) + + def batch_meta_file(self, batch_name) -> str: + return os.path.join(self._dataset_path, self._META_DIR, batch_name, self._META_FILE) + + @property + def schema_file(self) -> str: + return os.path.join(self._dataset_path, self._SCHEMA_FILE) + + # TODO(liuhehan): remove it in future + @property + def meta_file(self) -> str: + return os.path.join(self._dataset_path, self._META_FILE) diff --git a/web_console_v2/inspection/dataset_directory_test.py b/web_console_v2/inspection/dataset_directory_test.py new file mode 100644 index 000000000..b1ff39ac8 --- /dev/null +++ b/web_console_v2/inspection/dataset_directory_test.py @@ -0,0 +1,64 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +from dataset_directory import DatasetDirectory + + +class UtilTest(unittest.TestCase): + _DATASET_PATH = '/fakepath/test_dataset' + _BATCH_NAME = 'test_batch_name' + + def setUp(self) -> None: + super().setUp() + self._dataset_dir = DatasetDirectory(dataset_path=self._DATASET_PATH) + + def test_dataset_path(self): + self.assertEqual(self._dataset_dir.dataset_path, self._DATASET_PATH) + + def test_batch_path(self): + self.assertEqual(self._dataset_dir.batch_path(self._BATCH_NAME), + f'{self._DATASET_PATH}/batch/{self._BATCH_NAME}') + + def test_errors_path(self): + self.assertEqual(self._dataset_dir.errors_path(self._BATCH_NAME), + f'{self._DATASET_PATH}/errors/{self._BATCH_NAME}') + + def test_thumbnails_path(self): + self.assertEqual(self._dataset_dir.thumbnails_path(self._BATCH_NAME), + f'{self._DATASET_PATH}/meta/{self._BATCH_NAME}/thumbnails') + + def test_batch_meta_file(self): + self.assertEqual(self._dataset_dir.batch_meta_file(self._BATCH_NAME), + f'{self._DATASET_PATH}/meta/{self._BATCH_NAME}/_META') + + def test_tmp_path(self): + self.assertEqual(self._dataset_dir.side_output_path(self._BATCH_NAME), + f'{self._DATASET_PATH}/side_output/{self._BATCH_NAME}') + + def test_schema_file(self): + self.assertEqual(self._dataset_dir.schema_file, f'{self._DATASET_PATH}/schema.json') + + def test_meta_file(self): + self.assertEqual(self._dataset_dir.meta_file, f'{self._DATASET_PATH}/_META') + + def test_source_batch_path_file(self): + self.assertEqual(self._dataset_dir.source_batch_path_file(self._BATCH_NAME), + f'{self._DATASET_PATH}/batch/{self._BATCH_NAME}/source_batch_path') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/inspection/dataset_format_checker.py b/web_console_v2/inspection/dataset_format_checker.py new file mode 100644 index 000000000..2902c9c77 --- /dev/null +++ b/web_console_v2/inspection/dataset_format_checker.py @@ -0,0 +1,135 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import sys +import logging +import argparse +from typing import List + +from pyspark.conf import SparkConf +from pyspark.sql import SparkSession +from pyspark.sql.dataframe import DataFrame +from error_code import AreaCode, ErrorType, JobException, write_termination_message + +from util import FileFormat, normalize_file_path, build_spark_conf, load_by_file_format, is_file_matched + +RAW_ID_COLUMN = 'raw_id' +DEFAULT_IGNORED_NUMERIC_COLUMNS = frozenset(['raw_id', 'example_id', 'event_time']) +NUMERIC_TYPES = frozenset(['bigint', 'int', 'smallint', 'tinyint', 'double', 'float']) + +RAW_ID_CHECKER = 'RAW_ID_CHECKER' +NUMERIC_COLUMNS_CHECKER = 'NUMERIC_COLUMNS_CHECKER' + + +def check_raw_id(df: DataFrame): + if RAW_ID_COLUMN not in df.columns: + raise JobException(AreaCode.FORMAT_CHECKER, ErrorType.NO_KEY_COLUMN_ERROR, + f'[check_raw_id] failed to find {RAW_ID_COLUMN} in dataset') + + df_count = df.count() + distinct_count = df.dropDuplicates([RAW_ID_COLUMN]).count() + if df_count != distinct_count: + raise JobException(AreaCode.FORMAT_CHECKER, ErrorType.DATA_FORMAT_ERROR, + f'[check_raw_id] find {df_count - distinct_count} duplicated items in raw_id') + + +def check_numeric_columns(df: DataFrame): + illegal_columns = [] + for column_name, column_type in df.dtypes: + if column_name in DEFAULT_IGNORED_NUMERIC_COLUMNS: + continue + if column_type not in NUMERIC_TYPES: + illegal_column_msg = f'[column]: {column_name}, [type]: {column_type}' + illegal_columns.append(illegal_column_msg) + if len(illegal_columns) > 0: + raise JobException(AreaCode.FORMAT_CHECKER, ErrorType.DATA_FORMAT_ERROR, + f'[check_numeric_columns] find {len(illegal_columns)} illegal columns: {illegal_columns}') + + +def check_format(df: DataFrame, checkers: List[str]): + if RAW_ID_CHECKER in checkers: + check_raw_id(df) + if NUMERIC_COLUMNS_CHECKER in checkers: + check_numeric_columns(df) + + +def get_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description='dataset checker task') + subparsers = parser.add_subparsers(dest='command', help='sub-command help') + + # image parser + image_parser = subparsers.add_parser('image', help='check image format dataset') + + # tabular parser + tabular_parser = subparsers.add_parser('tabular', help='check tabular format dataset') + + tabular_parser.add_argument('--input_batch_path', + type=str, + required=True, + help='input batch path of the tabular dataset') + tabular_parser.add_argument('--format', + type=FileFormat, + choices=list(FileFormat), + required=True, + help='file format') + tabular_parser.add_argument('--checkers', type=str, required=True, help='checkers') + + # image parser + none_structured_parser = subparsers.add_parser('none_structured', help='check none_structured format dataset') + + # all needed args for both image/tabular will be given, so we use known_args to ignore unnecessary args + known_args, _ = parser.parse_known_args() + return known_args + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + try: + args = get_args() + except SystemExit: + write_termination_message(AreaCode.FORMAT_CHECKER, ErrorType.INPUT_PARAMS_ERROR, + 'input params error, check details in logs') + raise + logging.info(f'[format checker]:\n' + '----------------------\n' + 'Input params:\n' + f'{args.__dict__}\n' + '----------------------\n') + conf: SparkConf = build_spark_conf() + spark = SparkSession.builder.config(conf=conf).getOrCreate() + try: + if args.command == 'image': + # image data + logging.info('[format checker]: image type has no checker now, [SKIP]') + elif args.command == 'none_structured': + # none_structured data + logging.info('[format checker]: none_structured type has no checker now, [SKIP]') + else: + input_batch_path = normalize_file_path(args.input_batch_path) + # tabular data + if not is_file_matched(input_batch_path): + logging.warning(f'input_dataset_path {input_batch_path} matches 0 files') + sys.exit() + try: + dataframe = load_by_file_format(spark, input_batch_path, args.format) + except Exception as e: # pylint: disable=broad-except + raise JobException(AreaCode.FORMAT_CHECKER, ErrorType.DATA_LOAD_ERROR, + f'failed to read input data, err: {str(e)}') from e + check_format(dataframe, args.checkers.split(',')) + except JobException as e: + write_termination_message(e.area_code, e.error_type, e.message) + raise + finally: + spark.stop() diff --git a/web_console_v2/inspection/dataset_format_checker_test.py b/web_console_v2/inspection/dataset_format_checker_test.py new file mode 100644 index 000000000..758fcd334 --- /dev/null +++ b/web_console_v2/inspection/dataset_format_checker_test.py @@ -0,0 +1,104 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +import os +import fsspec + +from testing.spark_test_case import PySparkTestCase +from pyspark.sql.types import IntegerType, StructField, StructType, StringType, LongType, DoubleType +from pyspark.sql.dataframe import DataFrame + +from dataset_format_checker import check_format, JobException +from dataset_directory import DatasetDirectory +from util import FileFormat, load_by_file_format + + +class FormatCheckerTest(PySparkTestCase): + + def setUp(self) -> None: + super().setUp() + self._data_path = os.path.join(self.tmp_dataset_path, 'test_dataset') + self._dataset_directory = DatasetDirectory(self._data_path) + self._batch_name = 'test_batch' + self.maxDiff = None + + def tearDown(self) -> None: + fs = fsspec.filesystem('file') + if fs.isdir(self._data_path): + fs.rm(self._data_path, recursive=True) + return super().tearDown() + + def _generate_tfrecords_tabular(self) -> DataFrame: + data = [ + (1, 2, 2, 3, 'cat', 'image_1.jpg', 3.21), + (2, 1, 2, 3, 'dog', 'image_2.jpg', 3.23), + (3, 3, 2, 3, 'cat', 'image_3.jpg', 3.26), + ] + schema = StructType([ + StructField('example_id', IntegerType(), False), + StructField('height', LongType(), False), + StructField('width', IntegerType(), False), + StructField('nChannels', IntegerType(), False), + StructField('label', StringType(), False), + StructField('file_name', StringType(), False), + StructField('score', DoubleType(), False), + ]) + return self.spark.createDataFrame(data=data, schema=schema) + + def _generate_tfrecords_tabular_duplicate_raw_id(self) -> DataFrame: + data = [ + (1, 2, 2, 3, 3.21), + (1, 1, 2, 3, 3.23), + (3, 3, 2, 3, 3.26), + ] + schema = StructType([ + StructField('raw_id', IntegerType(), False), + StructField('height', LongType(), False), + StructField('width', IntegerType(), False), + StructField('nChannels', IntegerType(), False), + StructField('score', DoubleType(), False), + ]) + return self.spark.createDataFrame(data=data, schema=schema) + + def test_format_checker(self): + # check succeeded + input_batch_path = os.path.join(self.test_data, 'csv/medium_csv') + file_format = FileFormat.CSV + checkers = ['RAW_ID_CHECKER', 'NUMERIC_COLUMNS_CHECKER'] + df = load_by_file_format(self.spark, input_batch_path, file_format) + check_format(df, checkers) + + def test_no_raw_id(self): + df = self._generate_tfrecords_tabular() + checkers = ['RAW_ID_CHECKER'] + with self.assertRaises(JobException): + check_format(df, checkers) + + def test_duplicated_raw_id(self): + df = self._generate_tfrecords_tabular_duplicate_raw_id() + checkers = ['RAW_ID_CHECKER'] + with self.assertRaises(JobException): + check_format(df, checkers) + + def test_numeric_check_failed(self): + df = self._generate_tfrecords_tabular() + checkers = ['NUMERIC_COLUMNS_CHECKER'] + with self.assertRaises(JobException): + check_format(df, checkers) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/inspection/debug.py b/web_console_v2/inspection/debug.py new file mode 100644 index 000000000..daf1f57e4 --- /dev/null +++ b/web_console_v2/inspection/debug.py @@ -0,0 +1,18 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import time + +time.sleep(86400 * 7) diff --git a/web_console_v2/inspection/envs.py b/web_console_v2/inspection/envs.py new file mode 100644 index 000000000..9bd9b820a --- /dev/null +++ b/web_console_v2/inspection/envs.py @@ -0,0 +1,18 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os + +TERMINATION_LOG_PATH = os.getenv('TERMINATION_LOG_PATH', '/dev/termination-log') diff --git a/web_console_v2/inspection/error_code.py b/web_console_v2/inspection/error_code.py new file mode 100644 index 000000000..350011eff --- /dev/null +++ b/web_console_v2/inspection/error_code.py @@ -0,0 +1,79 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import enum +import fsspec + +import envs + + +@enum.unique +class AreaCode(enum.IntEnum): + UNKNOWN = 0 + PARTITION = 1 + FEATURE_EXTRACTION = 2 + FORMAT_CHECKER = 3 + CONVERTER = 4 + ANALYZER = 5 + ALIGNMENT = 6 + PSI_OT = 7 + PSI_RSA = 8 + PSI_HASH = 9 + TRAINER = 10 + EXPORT_DATASET = 11 + + +@enum.unique +class ErrorType(enum.IntEnum): + # system error + OUT_OF_MEMORY = 1 + CHANNEL_ERROR = 2 + # params error + DATA_FORMAT_ERROR = 1001 + NO_KEY_COLUMN_ERROR = 1002 + DATA_NOT_FOUND = 1003 + DATA_LOAD_ERROR = 1004 + INPUT_PARAMS_ERROR = 1005 + DATA_WRITE_ERROR = 1006 + SCHEMA_CHECK_ERROR = 1007 + + # other error + RESULT_ERROR = 2001 + + +def build_full_error_code(area_code: AreaCode, error_type: ErrorType) -> str: + return str(area_code.value).zfill(4) + str(error_type.value).zfill(4) + + +class JobException(Exception): + + def __init__(self, area_code: AreaCode, error_type: ErrorType, message: str): + super().__init__(message) + self.area_code = area_code + self.error_type = error_type + self.message = message + + def __repr__(self): + return f'{type(self).__name__}({build_full_error_code(self.area_code, self.error_type)}-{self.message})' + + def __str__(self) -> str: + return f'{build_full_error_code(self.area_code, self.error_type)}-{self.message}' + + +def write_termination_message(area_code: AreaCode, error_type: ErrorType, error_message: str): + error_code = build_full_error_code(area_code, error_type) + termitation_message = f'{error_code}-{error_message}' + with fsspec.open(envs.TERMINATION_LOG_PATH, 'w') as f: + f.write(termitation_message) diff --git a/web_console_v2/inspection/error_code_test.py b/web_console_v2/inspection/error_code_test.py new file mode 100644 index 000000000..c0a7ca979 --- /dev/null +++ b/web_console_v2/inspection/error_code_test.py @@ -0,0 +1,38 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +import os +import tempfile +import fsspec + +import envs +from error_code import AreaCode, ErrorType, write_termination_message + + +class ErrorCodeTest(unittest.TestCase): + + def test_write_terminaton_message(self): + with tempfile.TemporaryDirectory() as tmp_dir: + envs.TERMINATION_LOG_PATH = os.path.join(tmp_dir, 'log_file') + write_termination_message(AreaCode.FORMAT_CHECKER, ErrorType.DATA_FORMAT_ERROR, 'format check failed') + expected_errors = '00031001-format check failed' + with fsspec.open(envs.TERMINATION_LOG_PATH, mode='r') as f: + errors = f.read() + self.assertEqual(expected_errors, errors) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/inspection/export_dataset.py b/web_console_v2/inspection/export_dataset.py new file mode 100644 index 000000000..3057ffb42 --- /dev/null +++ b/web_console_v2/inspection/export_dataset.py @@ -0,0 +1,103 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import logging +import argparse +import fsspec + +from pyspark.sql import SparkSession +from pyspark.conf import SparkConf +from dataset_directory import DatasetDirectory +from error_code import AreaCode, ErrorType, JobException, write_termination_message +from util import FileFormat, build_spark_conf, load_by_file_format + + +def export_dataset_for_structured_type(input_path: str, export_path: str, file_format: FileFormat): + try: + conf: SparkConf = build_spark_conf() + spark = SparkSession.builder.config(conf=conf).getOrCreate() + try: + df = load_by_file_format(spark=spark, input_batch_path=input_path, file_format=file_format) + except Exception as e: # pylint: disable=broad-except + raise JobException(AreaCode.EXPORT_DATASET, ErrorType.DATA_LOAD_ERROR, + f'failed to read input data, err: {str(e)}') from e + try: + df.write.format('csv').option('compression', 'none').option('header', 'true').save(path=export_path, + mode='overwrite') + except Exception as e: # pylint: disable=broad-except + raise JobException(AreaCode.EXPORT_DATASET, ErrorType.DATA_WRITE_ERROR, + f'failed to write data, err: {str(e)}') from e + finally: + spark.stop() + + +def export_dataset_for_unknown_type(input_path: str, export_path: str): + fs: fsspec.spec.AbstractFileSystem = fsspec.get_mapper(export_path).fs + if fs.exists(export_path): + fs.rm(export_path, recursive=True) + fs.copy(input_path, export_path, recursive=True) + + +def get_args(args=None) -> argparse.Namespace: + parser = argparse.ArgumentParser(description='export dataset') + parser.add_argument('--data_path', type=str, help='path of intput') + parser.add_argument('--export_path', type=str, help='path of output') + parser.add_argument('--batch_name', type=str, required=False, help='batch need to export') + parser.add_argument('--file_format', + type=FileFormat, + choices=list(FileFormat), + default=FileFormat.TFRECORDS, + required=False, + help='file format') + # TODO(liuhehan): delete file_wildcard input after export job support batch + parser.add_argument('--file_wildcard', type=str, required=False, help='input file wildcard') + + return parser.parse_args(args) + + +def export_dataset(): + try: + args = get_args() + except SystemExit: + write_termination_message(AreaCode.EXPORT_DATASET, ErrorType.INPUT_PARAMS_ERROR, + 'input params error, check details in logs') + raise + logging.info(f'[export_dataset]:\n' + '----------------------\n' + 'Input params:\n' + f'{args.__dict__}\n' + '----------------------\n') + + if args.batch_name: + input_path = DatasetDirectory(dataset_path=args.data_path).batch_path(batch_name=args.batch_name) + else: + # TODO(liuhehan): delete after export job support batch + input_path = os.path.join(args.data_path, args.file_wildcard) + try: + if args.file_format == FileFormat.UNKNOWN: + export_dataset_for_unknown_type(input_path=input_path, export_path=args.export_path) + else: + export_dataset_for_structured_type(input_path=input_path, + export_path=args.export_path, + file_format=args.file_format) + except JobException as e: + write_termination_message(e.area_code, e.error_type, e.message) + raise + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + export_dataset() diff --git a/web_console_v2/inspection/export_dataset_test.py b/web_console_v2/inspection/export_dataset_test.py new file mode 100644 index 000000000..489ed15a6 --- /dev/null +++ b/web_console_v2/inspection/export_dataset_test.py @@ -0,0 +1,122 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import fsspec +import unittest +from unittest.mock import MagicMock, patch + +from pyspark import SparkConf + +from export_dataset import get_args, export_dataset, export_dataset_for_unknown_type, export_dataset_for_structured_type +from testing.spark_test_case import PySparkTestCase +from util import FileFormat + + +class AnalyzerTest(PySparkTestCase): + + def test_get_args(self): + data_path = '/data/fake_path' + file_wildcard = 'batch/**/**' + batch_name = '20220101' + export_path = '/data/export' + file_format_unknown = 'unknown' + + # test file_wildcard + args = get_args([f'--data_path={data_path}', f'--file_wildcard={file_wildcard}', \ + f'--export_path={export_path}']) + self.assertEqual(args.data_path, data_path) + self.assertEqual(args.file_wildcard, file_wildcard) + self.assertEqual(args.export_path, export_path) + self.assertEqual(args.file_format, FileFormat.TFRECORDS) + self.assertIsNone(args.batch_name) + + # test batch_name + args = get_args([f'--data_path={data_path}', f'--batch_name={batch_name}', f'--export_path={export_path}', \ + f'--file_format={file_format_unknown}']) + self.assertEqual(args.data_path, data_path) + self.assertIsNone(args.file_wildcard) + self.assertEqual(args.export_path, export_path) + self.assertEqual(args.file_format, FileFormat.UNKNOWN) + self.assertEqual(args.batch_name, batch_name) + + @patch('export_dataset.export_dataset_for_unknown_type') + @patch('export_dataset.export_dataset_for_structured_type') + @patch('export_dataset.get_args') + def test_export_dataset(self, mock_get_args: MagicMock, mock_export_dataset_for_structured_type: MagicMock, + mock_export_dataset_for_unknown_type: MagicMock): + data_path = '/data/fake_path' + file_wildcard = 'batch/**/**' + batch_name = '20220101' + export_path = '/data/export/20220101' + file_format_tf = 'tfrecords' + file_format_unknown = 'unknown' + + # test use spark + mock_get_args.return_value = argparse.Namespace(data_path=data_path, + file_wildcard=file_wildcard, + export_path=export_path, + batch_name=None, + file_format=file_format_tf) + export_dataset() + mock_export_dataset_for_structured_type.assert_called_once_with(input_path='/data/fake_path/batch/**/**', + export_path='/data/export/20220101', + file_format=FileFormat.TFRECORDS) + mock_export_dataset_for_unknown_type.assert_not_called() + + mock_get_args.reset_mock() + mock_export_dataset_for_structured_type.reset_mock() + mock_export_dataset_for_unknown_type.reset_mock() + + # test use fsspec + mock_get_args.return_value = argparse.Namespace(data_path=data_path, + export_path=export_path, + batch_name=batch_name, + file_format=file_format_unknown) + export_dataset() + mock_export_dataset_for_unknown_type.assert_called_once_with(input_path='/data/fake_path/batch/20220101', + export_path='/data/export/20220101') + mock_export_dataset_for_structured_type.assert_not_called() + + @patch('export_dataset.build_spark_conf') + def test_export_dataset_for_structured_type(self, mock_build_spark_conf: MagicMock): + # set local spark + mock_build_spark_conf.return_value = SparkConf().setMaster('local') + + input_path = os.path.join(self.test_data, 'csv/medium_csv') + export_path = self.tmp_dataset_path + + export_dataset_for_structured_type(input_path=input_path, export_path=export_path, file_format=FileFormat.CSV) + fs = fsspec.filesystem('file') + files = fs.ls(export_path) + file_names = {f.split('/')[-1] for f in files} + expect_file_names = {'_SUCCESS'} + self.assertTrue(expect_file_names.issubset(file_names)) + + def test_export_dataset_for_unknown_type(self): + input_path = os.path.join(self.test_data, 'csv/medium_csv') + export_path = self.tmp_dataset_path + + export_dataset_for_unknown_type(input_path=input_path, export_path=export_path) + fs = fsspec.filesystem('file') + files = fs.ls(export_path) + file_names = {f.split('/')[-1] for f in files} + expect_file_names = {'default_credit_hetero_guest.csv'} + self.assertEqual(expect_file_names, file_names) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/inspection/fake_data.py b/web_console_v2/inspection/fake_data.py new file mode 100644 index 000000000..0bcc2ec10 --- /dev/null +++ b/web_console_v2/inspection/fake_data.py @@ -0,0 +1,87 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# pylint: skip-file +import logging +import argparse + +from pyspark.sql import SparkSession +from pyspark.sql.types import StringType +from pyspark.mllib.random import RandomRDDs +import pyspark.sql.functions as f + +_RAW_ID = 'raw_id' +_DEFAULT_PARTITIONS = 16 + +DISTRIBUTION_FUNC_MAP = { + 'normal': RandomRDDs.normalVectorRDD, + 'uniform': RandomRDDs.uniformVectorRDD, + 'exponential': RandomRDDs.exponentialVectorRDD, + 'log': RandomRDDs.logNormalVectorRDD, + 'gamma': RandomRDDs.gammaVectorRDD +} + + +def make_data(output_dir_path: str, + items_num: int, + features_num: int, + partitions_num: int = _DEFAULT_PARTITIONS, + distribution: str = 'uniform'): + logging.info('========start========') + output_dir_path = output_dir_path.strip() + spark = SparkSession.builder.getOrCreate() + sc = spark.sparkContext + if distribution in DISTRIBUTION_FUNC_MAP: + feature_df = DISTRIBUTION_FUNC_MAP[distribution](sc, items_num, features_num, + partitions_num).map(lambda a: a.tolist()).toDF() + else: + logging.error(f'### no valid distribution: {distribution}') + return + df = feature_df.withColumn(_RAW_ID, f.md5(feature_df._1.cast(StringType()))) + df.write.format('csv').option('compression', 'none').option('header', 'true').save(path=output_dir_path, + mode='overwrite') + spark.stop() + logging.info('========done========') + + +def get_args(): + parser = argparse.ArgumentParser(description='convert bio dataset to coco format dataset.') + parser.add_argument('--output_dir_path', + '-o', + required=True, + type=str, + dest='output_dir_path', + help='dir of output') + parser.add_argument('--items', '-i', type=int, required=True, dest='items_num', help='number of items') + parser.add_argument('--features', '-f', type=int, required=True, dest='features_num', help='number of features') + parser.add_argument('--partitions_num', '-p', type=int, required=False, dest='partitions_num') + parser.add_argument('--distribution', + '-d', + required=False, + type=str, + dest='distribution', + help='the distribution of the feature. could be: normal/uniform/exponential/log/gamma') + return parser.parse_args() + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s') + args = get_args() + logging.info(f'\toutput dir: {args.output_dir_path}\n' + f'\titems_num is: {args.items_num}\n' + f'\tfeature_num is: {args.features_num}\n' + f'\tpartitions_num is: {args.partitions_num if args.partitions_num else _DEFAULT_PARTITIONS}\n' + f'\tdistribution is: {args.distribution}') + make_data(args.output_dir_path, args.items_num, args.features_num, args.partitions_num) diff --git a/web_console_v2/inspection/feature_extraction_v2.py b/web_console_v2/inspection/feature_extraction_v2.py new file mode 100644 index 000000000..98b44f96d --- /dev/null +++ b/web_console_v2/inspection/feature_extraction_v2.py @@ -0,0 +1,93 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# pylint: disable=redefined-outer-name +import json +import fsspec +import logging +import argparse +from cityhash import CityHash64 # pylint: disable=no-name-in-module + +from pyspark.conf import SparkConf +from pyspark.sql import SparkSession +from pyspark.sql.types import StringType + +from dataset_directory import DatasetDirectory +from util import build_spark_conf, FileFormat, EXAMPLE_ID, is_file_matched + + +# TODO(hangweiqiang): implement partition-wise join in parallel +def feature_extraction(spark: SparkSession, original_data_path: str, joined_data_path: str, part_num: int, + part_key: str, file_format: FileFormat, output_file_format: FileFormat, output_batch_name: str, + output_dataset_path: str): + dataset_dir = DatasetDirectory(output_dataset_path) + output_batch_path = dataset_dir.batch_path(output_batch_name) + if not is_file_matched(joined_data_path): + # this is a hack to allow empty intersection dataset + # no file matched, just mkdir output_batch_path and skip converter + fs: fsspec.AbstractFileSystem = fsspec.get_mapper(output_batch_path).fs + fs.mkdir(output_batch_path) + logging.warning(f'[feature_extraction]: joined_dataset_path {joined_data_path} matches 0 files, [SKIP]') + return + joined_df = spark.read.format(FileFormat.CSV.value).option('header', 'true').load(joined_data_path).toDF(part_key) + original_df = spark.read.format(file_format.value).option('header', 'true').load(original_data_path) + df = joined_df.join(original_df, on=part_key) + # use customized partition method to guarantee consistency of sample between parties + # TODO(liuhehan): unify partitioning method of output batch + sorted_df = df.rdd.keyBy(lambda v: v[part_key]) \ + .partitionBy(part_num, CityHash64) \ + .map(lambda v: v[1]) \ + .toDF(schema=df.schema) \ + .sortWithinPartitions(part_key) + sorted_df = sorted_df.withColumn(part_key, sorted_df[part_key].cast(StringType())) + # a hack to generate example_id column if not exist as training need example id + if EXAMPLE_ID not in sorted_df.columns: + sorted_df = sorted_df.withColumn(EXAMPLE_ID, sorted_df[part_key]) + sorted_df.write.format(output_file_format.value).option('compression', + 'none').option('header', 'true').save(output_batch_path, + mode='overwrite') + with fsspec.open(dataset_dir.schema_file, mode='w') as f: + json.dump(sorted_df.schema.jsonValue(), f) + spark.stop() + + +def get_args(): + parser = argparse.ArgumentParser(description='extract feature from dataset') + parser.add_argument('--original_data_path', type=str, help='original data path') + parser.add_argument('--joined_data_path', type=str, help='path of joined id') + parser.add_argument('--part_key', type=str, help='partition key') + parser.add_argument('--part_num', type=int, help='patition number') + parser.add_argument('--file_format', type=FileFormat, choices=list(FileFormat), help='format of original file') + parser.add_argument('--output_file_format', type=FileFormat, choices=list(FileFormat), help='format of output file') + parser.add_argument('--output_batch_name', type=str, help='name of output batch') + parser.add_argument('--output_dataset_path', type=str, help='path of output dataset') + return parser.parse_args() + + +if __name__ == '__main__': + args = get_args() + for arg, value in vars(args).items(): + logging.info(f'Arg: {arg}, value: {value}') + conf: SparkConf = build_spark_conf() + spark = SparkSession.builder.config(conf=conf).getOrCreate() + feature_extraction(spark=spark, + original_data_path=args.original_data_path, + joined_data_path=args.joined_data_path, + part_key=args.part_key, + part_num=args.part_num, + file_format=args.file_format, + output_file_format=args.output_file_format, + output_batch_name=args.output_batch_name, + output_dataset_path=args.output_dataset_path) diff --git a/web_console_v2/inspection/feature_extraction_v2_test.py b/web_console_v2/inspection/feature_extraction_v2_test.py new file mode 100644 index 000000000..b3015fcbd --- /dev/null +++ b/web_console_v2/inspection/feature_extraction_v2_test.py @@ -0,0 +1,87 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import csv +import unittest + +from cityhash import CityHash64 # pylint: disable=no-name-in-module + +from testing.spark_test_case import PySparkTestCase +from dataset_directory import DatasetDirectory +from feature_extraction_v2 import feature_extraction, FileFormat +from util import EXAMPLE_ID + + +class FeatureExtractionV2Test(PySparkTestCase): + + def test_feature_extract(self): + part_num = 3 + file_format = FileFormat.CSV + dataset_dir = DatasetDirectory(self.tmp_dataset_path) + batch_name = '20220331-1200' + side_output_path = dataset_dir.side_output_path(batch_name) + raw_data_path = os.path.join(side_output_path, 'raw') + joined_data_path = os.path.join(side_output_path, 'joined') + output_batch_path = dataset_dir.batch_path(batch_name) + # test no data + os.makedirs(joined_data_path, exist_ok=True) + feature_extraction(self.spark, + original_data_path=raw_data_path, + joined_data_path=joined_data_path, + part_num=part_num, + part_key='raw_id', + file_format=file_format, + output_file_format=FileFormat.CSV, + output_batch_name=batch_name, + output_dataset_path=self.tmp_dataset_path) + self.assertTrue(os.path.exists(output_batch_path)) + # write raw data + data = [(str(i), f'x{str(i)}', i + 1) for i in range(1000)] + df = self.spark.createDataFrame(data=data, schema=['raw_id', 'name', 'age']) + df.write.format(file_format.value).option('compression', 'none').option('header', 'true').save(raw_data_path, + mode='overwrite') + # write joined id + joined_id = [(str(i)) for i in range(0, 1000, 4)] + os.makedirs(joined_data_path, exist_ok=True) + expected_ids_list = [] + for part_id in range(part_num): + expected_ids_list.append([i for i in joined_id if CityHash64(i) % part_num == part_id]) + with open(os.path.join(joined_data_path, f'partition_{part_id}'), 'w', encoding='utf-8') as f: + f.write('raw_id\n') + f.write('\n'.join(expected_ids_list[part_id])) + + feature_extraction(self.spark, + original_data_path=raw_data_path, + joined_data_path=joined_data_path, + part_num=part_num, + part_key='raw_id', + file_format=file_format, + output_file_format=FileFormat.CSV, + output_batch_name=batch_name, + output_dataset_path=self.tmp_dataset_path) + # check raw_id and example_id from extracted data + filenames = list(filter(lambda file: file.startswith('part'), sorted(os.listdir(output_batch_path)))) + for part_id, file in enumerate(filenames): + with open(os.path.join(output_batch_path, file), 'r', encoding='utf-8') as f: + reader = csv.DictReader(f) + self.assertEqual([row['raw_id'] for row in reader], sorted(expected_ids_list[part_id])) + with open(os.path.join(output_batch_path, file), 'r', encoding='utf-8') as f: + reader = csv.DictReader(f) + self.assertEqual([row[EXAMPLE_ID] for row in reader], sorted(expected_ids_list[part_id])) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/inspection/json_schema_checker.py b/web_console_v2/inspection/json_schema_checker.py new file mode 100644 index 000000000..5b2d88e6a --- /dev/null +++ b/web_console_v2/inspection/json_schema_checker.py @@ -0,0 +1,46 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Any, Dict, List +import logging + +import jsonschema + +_SCHEMA_CHECK_LOG = 'schema check' + + +class SchemaChecker(object): + + def __init__(self, schema: Dict[Any, Any]): + self._schema = schema + try: + # spark broadcast use pickle serialization, cannot store draft7validator as a broadcast value + jsonschema.Draft7Validator(self._schema) + except Exception as e: # pylint: disable=broad-except + message = f'[{_SCHEMA_CHECK_LOG}] schema format error: schema format is invalid' + logging.error(message) + raise RuntimeError(message) from e + + def check(self, data: Dict) -> List[Dict[str, str]]: + error_msgs = [] + # convert bytearray to string as json_schema only support string + check_data = data.copy() + for key, value in check_data.items(): + if isinstance(value, (bytes, bytearray)): + check_data[key] = str(value) + for error in jsonschema.Draft7Validator(self._schema).iter_errors(check_data): + # TODO(lhh): schema check add example id to location row number + error_msgs.append({'field': '.'.join(error.absolute_path), 'message': error.message}) + return error_msgs diff --git a/web_console_v2/inspection/json_schema_checker_test.py b/web_console_v2/inspection/json_schema_checker_test.py new file mode 100644 index 000000000..789b041dc --- /dev/null +++ b/web_console_v2/inspection/json_schema_checker_test.py @@ -0,0 +1,94 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +from json_schema_checker import SchemaChecker + + +class JsonSchemaCheckerTest(unittest.TestCase): + + def test_schema_check(self): + json_schema = { + 'type': 'object', + 'properties': { + 'name': { + 'type': 'string', + }, + 'age': { + 'type': 'integer', + }, + 'father': { + 'type': 'string', + }, + 'son': { + 'type': 'null', + }, + 'is_student': { + 'type': 'boolean', + }, + 'video': { + 'type': 'string' + } + }, + 'additionalProperties': False, + 'required': [ + 'name', + 'age', + 'father', + 'son', + 'is_student', + ] + } + checker = SchemaChecker(json_schema) + + data_1 = { + 'age': 10, + 'name': 'xiaoming', + 'father': 'old xiaoming', + 'son': None, + 'is_student': True, + 'video': bytearray([1, 2, 3]), + } + self.assertEqual(checker.check(data_1), []) + + data_2 = { + 'age': '10', + 'name': 'xiaoming', + 'father': 'old xiaoming', + 'mother': 'old xiaoming', + 'son': None, + 'video': b'test video', + } + error_msgs = [{ + 'field': 'age', + 'message': '\'10\' is not of type \'integer\'' + }, { + 'field': '', + 'message': 'Additional properties are not allowed (\'mother\' was unexpected)' + }, { + 'field': '', + 'message': '\'is_student\' is a required property' + }] + self.assertEqual(checker.check(data_2), error_msgs) + + def test_init_exception(self): + json_schema_error_format = {'this is illegal format'} + with self.assertRaises(RuntimeError): + SchemaChecker(json_schema_error_format) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/inspection/partition.py b/web_console_v2/inspection/partition.py new file mode 100644 index 000000000..610a069d9 --- /dev/null +++ b/web_console_v2/inspection/partition.py @@ -0,0 +1,102 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import logging +import argparse + +from cityhash import CityHash64 # pylint: disable=no-name-in-module +from pyspark.conf import SparkConf +from pyspark.sql import SparkSession +from pyspark.sql.types import StringType +import fsspec + +from util import FileFormat, build_spark_conf, is_file_matched + + +# pylint: disable=redefined-outer-name +def partition(spark: SparkSession, input_path: str, file_format: FileFormat, output_file_format: FileFormat, + output_dir: str, part_num: int, part_key: str, write_raw_data: bool): + logging.info(f'[Partition] loading df..., input files path: {input_path}') + raw_path = os.path.join(output_dir, 'raw') + id_path = os.path.join(output_dir, 'ids') + if not is_file_matched(input_path): + # this is a hack to allow empty intersection dataset + # no file matched, just mkdir output_batch_path and skip converter + fs: fsspec.AbstractFileSystem = fsspec.get_mapper(output_dir).fs + if write_raw_data: + fs.mkdir(raw_path) + fs.mkdir(id_path) + logging.warning(f'[partition]: input_dataset_path {input_path} matches 0 files, [SKIP]') + return + df = spark.read.format(file_format.value).load(input_path, header=True, inferSchema=True) + if part_key not in df.columns: + raise ValueError(f'[Partition] error: part_id {part_key} not in df columns') + df = df.dropDuplicates([part_key]) + df = df.withColumn(part_key, df[part_key].cast(StringType())) + df.printSchema() + + logging.info('[Partition] start partitioning') + # spark operation steps explanations + # keyBy : use specified column as the index key used in partition, + # partitionBy(partition_num, func) : pass index key to func, and use the func result mod partition_num, + # map: remove the index key, + # toDF: change to dataframe by origin df schema + # partitionBy code ref: + # https://github.com/apache/spark/blob/master/python/pyspark/rdd.py#L2114 + # https://github.com/apache/spark/blob/7d88f1c5c7f38c0f1a2bd5e3116c668d9cbd98b1/python/pyspark/rdd.py#L251 + # define a customized partition method to ensure partition consistency between two party + # missing value when writing df as tfrecord if without sortWithinPartitions and the reason is unknown + df = df.rdd.keyBy(lambda v: v[part_key]) \ + .partitionBy(part_num, CityHash64) \ + .map(lambda v: v[1]) \ + .toDF(schema=df.schema) \ + .sortWithinPartitions(part_key) + if write_raw_data: + logging.info(f'[Partition] writing to raw path: {raw_path}') + df.write.format(output_file_format.value).option('compression', 'none').option('header', + 'true').save(raw_path, + mode='overwrite') + id_df = df.select(part_key) + id_df.write.format('csv').option('compression', 'none').option('header', 'true').save(id_path, mode='overwrite') + + +def get_args(): + parser = argparse.ArgumentParser(description='Partition the dataset') + parser.add_argument('--input_path', type=str, help='input data path with wildcard') + parser.add_argument('--file_format', type=FileFormat, choices=list(FileFormat), help='format of input data') + parser.add_argument('--part_num', type=int, help='patition number') + parser.add_argument('--part_key', type=str, help='patition key') + parser.add_argument('--output_file_format', type=FileFormat, choices=list(FileFormat), help='format of output file') + parser.add_argument('--output_dir', type=str, help='name of output batch') + parser.add_argument('--write_raw_data', type=bool, default=True, help='whether to write partitioned raw data') + return parser.parse_args() + + +if __name__ == '__main__': + args = get_args() + for arg, value in vars(args).items(): + logging.info(f'Arg: {arg}, value: {value}') + conf: SparkConf = build_spark_conf() + spark = SparkSession.builder.config(conf=conf).getOrCreate() + partition(spark, + input_path=args.input_path, + file_format=args.file_format, + output_file_format=args.output_file_format, + output_dir=args.output_dir, + part_num=args.part_num, + part_key=args.part_key, + write_raw_data=args.write_raw_data) + spark.stop() diff --git a/web_console_v2/inspection/partition_test.py b/web_console_v2/inspection/partition_test.py new file mode 100644 index 000000000..e217f9718 --- /dev/null +++ b/web_console_v2/inspection/partition_test.py @@ -0,0 +1,104 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import csv +import shutil +import unittest +import tempfile + +from cityhash import CityHash64 # pylint: disable=no-name-in-module + +from testing.spark_test_case import PySparkTestCase +from dataset_directory import DatasetDirectory +from partition import partition, FileFormat + + +class FeatureExtractionV2Test(PySparkTestCase): + + def test_partition(self): + part_num = 3 + file_format = FileFormat.CSV + data = [(str(i), f'x{str(i)}', i + 1) for i in range(1000)] + df = self.spark.createDataFrame(data=data, schema=['raw_id', 'name', 'age']) + expected_ids_list = [] + for part_id in range(part_num): + expected_ids_list.append([d[0] for d in data if CityHash64(d[0]) % part_num == part_id]) + # test write raw data + with tempfile.TemporaryDirectory() as input_path: + df.write.format(file_format.value).option('header', 'true').save(input_path, mode='overwrite') + dataset_dir = DatasetDirectory(self.tmp_dataset_path) + batch_name = '20220331-1200' + side_output_path = dataset_dir.side_output_path(batch_name) + partition(spark=self.spark, + input_path=input_path, + file_format=file_format, + output_file_format=FileFormat.CSV, + output_dir=side_output_path, + part_num=part_num, + part_key='raw_id', + write_raw_data=True) + raw_data_path = os.path.join(side_output_path, 'raw') + filenames = list(filter(lambda file: file.startswith('part'), sorted(os.listdir(raw_data_path)))) + for part_id, file in enumerate(filenames): + with open(os.path.join(raw_data_path, file), 'r', encoding='utf-8') as f: + reader = csv.DictReader(f) + self.assertEqual(sorted([row['raw_id'] for row in reader]), sorted(expected_ids_list[part_id])) + ids_data_path = os.path.join(side_output_path, 'ids') + filenames = list(filter(lambda file: file.startswith('part'), sorted(os.listdir(ids_data_path)))) + for part_id, file in enumerate(filenames): + with open(os.path.join(ids_data_path, file), 'r', encoding='utf-8') as f: + reader = csv.DictReader(f) + self.assertEqual(sorted([row['raw_id'] for row in reader]), sorted(expected_ids_list[part_id])) + shutil.rmtree(self.tmp_dataset_path) + # test not write raw data + with tempfile.TemporaryDirectory() as input_path: + df.write.format(file_format.value).option('header', 'true').save(input_path, mode='overwrite') + dataset_dir = DatasetDirectory(self.tmp_dataset_path) + batch_name = '20220331-1200' + side_output_path = dataset_dir.side_output_path(batch_name) + partition(spark=self.spark, + input_path=input_path, + file_format=file_format, + output_file_format=FileFormat.CSV, + output_dir=side_output_path, + part_num=part_num, + part_key='raw_id', + write_raw_data=False) + raw_data_path = os.path.join(side_output_path, 'raw') + self.assertFalse(os.path.exists(raw_data_path)) + shutil.rmtree(self.tmp_dataset_path) + # test no data + with tempfile.TemporaryDirectory() as input_path: + dataset_dir = DatasetDirectory(self.tmp_dataset_path) + batch_name = '20220331-1200' + side_output_path = dataset_dir.side_output_path(batch_name) + partition(spark=self.spark, + input_path=input_path, + file_format=file_format, + output_file_format=FileFormat.CSV, + output_dir=side_output_path, + part_num=part_num, + part_key='raw_id', + write_raw_data=True) + raw_data_path = os.path.join(side_output_path, 'raw') + self.assertTrue(os.path.exists(raw_data_path)) + ids_data_path = os.path.join(side_output_path, 'ids') + self.assertTrue(os.path.exists(ids_data_path)) + shutil.rmtree(self.tmp_dataset_path) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/inspection/psi.py b/web_console_v2/inspection/psi.py new file mode 100644 index 000000000..00cdd8ea6 --- /dev/null +++ b/web_console_v2/inspection/psi.py @@ -0,0 +1,153 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# pylint: skip-file +import os +import logging +from enum import Enum + +import rsa +import fsspec +from cityhash import CityHash64 +from gmpy2 import powmod +from pyspark.sql import SparkSession, DataFrame +from pyspark.sql.types import StringType +from pyspark.sql.functions import udf + +from util import getenv, FileFormat + + +class PSI(object): + + def __init__(self, + input_dir: str, + wildcard: str, + file_format: FileFormat, + output_dir: str, + part_num: int, + part_key: str, + rsa_key_path: str, + rsa_key_bits: int = 2048): + """ + Args: + input_dir: origin raw data + wildcard: use wildcard rules to match multiple files + file_format: specific read file format + output_dir: signed data save path + part_num: partition need to set + part_key: partition used key column + rsa_key_path: the private key path, may not exist, if not exist, create one + rsa_key_bits: if need to generate a key, use the argument to specific key bits + """ + self._input_dir = input_dir + self._wildcard = wildcard + self._file_format = file_format + self._output_dir = output_dir + self._part_num = part_num + self._part_key = part_key + self._rsa_key_path = rsa_key_path + self._rsa_key_bits = rsa_key_bits + + self._rsa = None + + def run(self, spark: SparkSession): + self._load_rsa_key() + self._partition_and_sign(spark) + + def _partition_and_sign(self, spark: SparkSession): + files = os.path.join(self._input_dir, self._wildcard) + logging.info(f'### loading df..., input files path: {files}') + df = spark.read.format(self._file_format).load(files, header=True, inferSchema=True) + if self._part_key not in df.columns: + raise ValueError(f'### error: part_id {self._part_key} not in df columns') + df = df.dropDuplicates([self._part_key]) + df = df.withColumn(self._part_key, df[self._part_key].cast(StringType())) + df.printSchema() + + logging.info(f'### partition and sorting') + part_idx = df.columns.index(self._part_key) + + sorted_df = df.rdd.keyBy(lambda v: v[part_idx]) \ + .partitionBy(self._part_num, self.partition_fn) \ + .map(lambda v: v[1]) \ + .toDF(schema=df.schema).sortWithinPartitions(self._part_key) + raw_path = os.path.join(self._output_dir, 'raw') + logging.info(f'### writing to raw path: {raw_path}') + self.write_df(sorted_df, raw_path) + d, n = self._rsa.d, self._rsa.n + + @udf() + def sign(v: str): + s = powmod(self.partition_fn(v), d, n).digits() + # hash and hex to save space + return hex(self.partition_fn(s))[2:] + + logging.info(f'### signing') + part_df = sorted_df.select(self._part_key) + sign_df = part_df.withColumn('signed', sign(part_df[self._part_key])) + sign_path = os.path.join(self._output_dir, 'signed') + logging.info(f'### writing to sign path: {sign_path}') + self.write_df(sign_df, sign_path) + + def _load_rsa_key(self): + fs = fsspec.filesystem('hdfs') + if not fs.exists(self._rsa_key_path): + logging.info('[Signer] key does not exist, generate one') + _, private_key = rsa.newkeys(self._rsa_key_bits) + with fs.open(self._rsa_key_path, 'wb') as f: + f.write(private_key.save_pkcs1(format='PEM')) + + with fs.open(self._rsa_key_path) as f: + logging.info('[Signer] Reading private key.') + self._rsa = rsa.PrivateKey.load_pkcs1(f.read()) + + @staticmethod + def partition_fn(v: str) -> int: + return CityHash64(v) + + @staticmethod + def write_df(data: 'DataFrame', path: str, fmt: str = 'csv'): + data.write.format(fmt).option('compression', 'none').option('header', 'true').save(path, mode='overwrite') + + +def main(): + input_dir = getenv('INPUT_DIR') + wildcard = getenv('WILDCARD', '*') + file_format = FileFormat(os.getenv('FILE_FORMAT', 'tfrecords').lower()) + output_dir = getenv('OUTPUT_DIR') + part_num = int(getenv('PART_NUM', '8')) + part_key = getenv('PART_KEY', 'raw_id') + rsa_key_path = getenv('RSA_KEY_PATH') + rsa_key_bits = int(getenv('RSA_KEY_BITS')) + + logging.info(f'preparing psi, input_dir: {input_dir}, wildcard: {wildcard} output_dir: {output_dir}, ' + f'part_num: {part_num}, part_key: {part_key}, rsa_key_path: {rsa_key_path}, ' + f'rsa_key_bits: {rsa_key_bits}') + + spark = SparkSession.builder.getOrCreate() + PSI(input_dir=input_dir, + wildcard=wildcard, + file_format=file_format, + output_dir=output_dir, + part_num=part_num, + part_key=part_key, + rsa_key_path=rsa_key_path, + rsa_key_bits=rsa_key_bits).run(spark) + spark.stop() + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + main() diff --git a/web_console_v2/inspection/requirements.txt b/web_console_v2/inspection/requirements.txt new file mode 100644 index 000000000..3e18b5d8f --- /dev/null +++ b/web_console_v2/inspection/requirements.txt @@ -0,0 +1,10 @@ +pandas==1.1.5 +fsspec==2022.1.0 +pyarrow==6.0.0 +jsonschema==3.2.0 +rsa==4.7.2 +cityhash==0.2.3 +gmpy2==2.0.8 +gmssl==3.2.1 +# opencv-python cannot use in docker, use this Pre-built CPU-only OpenCV package +opencv-python-headless==4.5.5.62 diff --git a/web_console_v2/inspection/sm4_encrypt.py b/web_console_v2/inspection/sm4_encrypt.py new file mode 100644 index 000000000..57854a30e --- /dev/null +++ b/web_console_v2/inspection/sm4_encrypt.py @@ -0,0 +1,72 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +import argparse + +from gmssl.sm4 import CryptSM4, SM4_ENCRYPT +from pyspark.sql import SparkSession +from pyspark.sql.functions import udf +from pyspark.sql.types import StringType +from pyspark.conf import SparkConf +from util import build_spark_conf + +# initial verctor for cbc encrypt +INITIAL_VECTOR = '00000000000000000000000000000000' + + +def sm4_encrypt(input_path: str, output_path: str, key_str: str): + key = bytes.fromhex(key_str) + iv = bytes.fromhex(INITIAL_VECTOR) + crypt_sm4 = CryptSM4() + crypt_sm4.set_key(key, SM4_ENCRYPT) + + conf: SparkConf = build_spark_conf() + spark = SparkSession.builder.config(conf=conf).getOrCreate() + broadcast_vals = spark.sparkContext.broadcast({'crypt_sm4': crypt_sm4, 'iv': iv}) + + @udf(StringType()) + def sm4(value_string: str) -> str: + crypt_sm4 = broadcast_vals.value['crypt_sm4'] + iv = broadcast_vals.value['iv'] + encrypt_value = crypt_sm4.crypt_cbc(iv, value_string.encode('utf-8')) + return encrypt_value.hex() + + df = spark.read.format('csv').load(input_path) + df = df.withColumnRenamed('_c0', 'raw_id') + df = df.withColumn('raw_id', sm4(df['raw_id'])) + df.write.format('csv').option('compression', 'none').option('header', 'true').save(path=output_path, + mode='overwrite') + + +def get_args(): + parser = argparse.ArgumentParser(description='esm4 ncrypt') + parser.add_argument('--input', type=str, help='path of intput sha256') + parser.add_argument('--output', type=str, help='path of output sm4') + return parser.parse_args() + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + args = get_args() + logging.info('Input Params:\n' + '--------------------------------\n' + f'\tinput: {args.input}\n' + f'\toutput: {args.output}\n' + '--------------------------------') + # key for cbc encrypt + enc_string = '64EC7C763AB7BF64E2D75FF83A319910' + + sm4_encrypt(args.input.strip(), args.output.strip(), enc_string) diff --git a/web_console_v2/inspection/testing/__init__.py b/web_console_v2/inspection/testing/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/web_console_v2/inspection/testing/spark_test_case.py b/web_console_v2/inspection/testing/spark_test_case.py new file mode 100644 index 000000000..ff53ba834 --- /dev/null +++ b/web_console_v2/inspection/testing/spark_test_case.py @@ -0,0 +1,38 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +import tempfile +import os +from pathlib import Path + +from pyspark import SparkConf +from pyspark.sql import SparkSession + + +class PySparkTestCase(unittest.TestCase): + + @classmethod + def setUpClass(cls): + # Uses threads to run spark + # Ref: https://spark.apache.org/docs/latest/submitting-applications.html#master-urls + conf = SparkConf().setMaster('local[*]').setAppName('test') + cls.spark = SparkSession.builder.config(conf=conf).getOrCreate() + cls.tmp_dataset_path = os.path.join(tempfile.gettempdir(), 'tmp_data') + cls.test_data = str(Path(__file__, '../test_data').resolve()) + + @classmethod + def tearDownClass(cls): + cls.spark.stop() diff --git a/web_console_v2/inspection/testing/spark_test_case_test.py b/web_console_v2/inspection/testing/spark_test_case_test.py new file mode 100644 index 000000000..4ea5eb901 --- /dev/null +++ b/web_console_v2/inspection/testing/spark_test_case_test.py @@ -0,0 +1,30 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest + +from testing.spark_test_case import PySparkTestCase + + +class SparkDemoTest(PySparkTestCase): + + def test_with_df(self): + df = self.spark.createDataFrame(data=[('Alice', 10), ('Bob', 21)], schema=['name', 'age']) + self.assertEqual(df.count(), 2) + self.assertEqual(df.agg({'age': 'sum'}).collect()[0][0], 31) + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/inspection/testing/test_data/alignment_schema.json b/web_console_v2/inspection/testing/test_data/alignment_schema.json new file mode 100644 index 000000000..f13e7131c --- /dev/null +++ b/web_console_v2/inspection/testing/test_data/alignment_schema.json @@ -0,0 +1,32 @@ +{ + "additionalProperties": false, + "properties": { + "channel": { + "type": "integer" + }, + "cols": { + "type": "integer" + }, + "image": { + "type": "string" + }, + "label": { + "type": "string" + }, + "raw_id": { + "type": "integer" + }, + "rows": { + "type": "integer" + } + }, + "required": [ + "raw_id", + "image", + "rows", + "cols", + "channel", + "label" + ], + "type": "object" +} \ No newline at end of file diff --git a/web_console_v2/inspection/testing/test_data/alignment_schema_error.json b/web_console_v2/inspection/testing/test_data/alignment_schema_error.json new file mode 100644 index 000000000..43c778b7e --- /dev/null +++ b/web_console_v2/inspection/testing/test_data/alignment_schema_error.json @@ -0,0 +1,32 @@ +{ + "additionalProperties": false, + "properties": { + "channel": { + "type": "integer" + }, + "cols": { + "type": "integer" + }, + "image": { + "type": "string" + }, + "label": { + "type": "string" + }, + "raw_id": { + "type": "string" + }, + "rows": { + "type": "integer" + } + }, + "required": [ + "raw_id", + "image", + "rows", + "cols", + "channel", + "label" + ], + "type": "object" +} \ No newline at end of file diff --git a/web_console_v2/inspection/testing/test_data/csv/medium_csv/default_credit_hetero_guest.csv b/web_console_v2/inspection/testing/test_data/csv/medium_csv/default_credit_hetero_guest.csv new file mode 100644 index 000000000..68ef0352b --- /dev/null +++ b/web_console_v2/inspection/testing/test_data/csv/medium_csv/default_credit_hetero_guest.csv @@ -0,0 +1,102 @@ +example_id,raw_id,event_time,x0,x1,x2,x3,x4,x5,x6,x7,x8,x9 +1,1,20210621,-1.13672,0.810161,0.185828,-1.057295,-1.24602,1.794564,1.782348,-0.696663,-0.666599,-1.530046 +2,2,20210621,-0.365981,0.810161,0.185828,0.858557,-1.029047,-0.874991,1.782348,0.138865,0.188746,0.234917 +3,3,20210621,-0.597202,0.810161,0.185828,0.858557,-0.161156,0.014861,0.111736,0.138865,0.188746,0.234917 +4,4,20210621,-0.905498,0.810161,0.185828,-1.057295,0.164303,0.014861,0.111736,0.138865,0.188746,0.234917 +5,5,20210621,-0.905498,-1.234323,0.185828,-1.057295,2.334029,-0.874991,0.111736,-0.696663,0.188746,0.234917 +6,6,20210621,-0.905498,-1.234323,-1.079457,0.858557,0.164303,0.014861,0.111736,0.138865,0.188746,0.234917 +7,7,20210621,2.56283,-1.234323,-1.079457,0.858557,-0.703588,0.014861,0.111736,0.138865,0.188746,0.234917 +8,8,20210621,-0.520128,0.810161,0.185828,0.858557,-1.354506,0.014861,-0.72357,-0.696663,0.188746,0.234917 +9,9,20210621,-0.211833,0.810161,1.451114,-1.057295,-0.812074,0.014861,0.111736,1.809921,0.188746,0.234917 +10,10,20210621,-1.13672,-1.234323,1.451114,0.858557,-0.05267,-1.764843,-1.558876,-1.532192,-1.521944,-0.647565 +11,11,20210621,0.250611,0.810161,1.451114,0.858557,-0.161156,0.014861,0.111736,1.809921,0.188746,0.234917 +12,12,20210621,0.713055,0.810161,-1.079457,0.858557,1.683111,-0.874991,-0.72357,-0.696663,-0.666599,-0.647565 +13,13,20210621,3.564792,0.810161,0.185828,0.858557,0.598248,-0.874991,0.111736,-0.696663,-0.666599,-0.647565 +14,14,20210621,-0.75135,-1.234323,0.185828,0.858557,-0.595102,0.904712,1.782348,1.809921,0.188746,0.234917 +15,15,20210621,0.635981,-1.234323,-1.079457,0.858557,-0.703588,0.014861,0.111736,0.138865,0.188746,0.234917 +16,16,20210621,-0.905498,0.810161,1.451114,2.77441,-1.354506,0.904712,1.782348,0.138865,0.188746,0.234917 +17,17,20210621,-1.13672,-1.234323,-1.079457,0.858557,-1.24602,0.014861,0.111736,1.809921,1.899436,1.999879 +18,18,20210621,1.175499,-1.234323,-1.079457,-1.057295,1.466139,0.014861,0.111736,0.138865,-0.666599,-0.647565 +19,19,20210621,1.483795,0.810161,-1.079457,-1.057295,1.466139,0.904712,-1.558876,-1.532192,-1.521944,-1.530046 +20,20,20210621,0.096463,0.810161,-1.079457,0.858557,-0.703588,0.904712,-1.558876,-1.532192,-1.521944,-1.530046 +21,21,20210621,-0.288907,0.810161,1.451114,0.858557,0.381275,0.014861,0.111736,0.138865,0.188746,0.234917 +22,22,20210621,-0.365981,0.810161,0.185828,-1.057295,0.381275,-0.874991,-0.72357,-0.696663,-0.666599,-0.647565 +23,23,20210621,-0.75135,0.810161,0.185828,0.858557,-1.029047,1.794564,0.111736,0.138865,1.899436,1.999879 +24,24,20210621,2.17746,0.810161,-1.079457,-1.057295,0.489762,-1.764843,-1.558876,-1.532192,-1.521944,-1.530046 +25,25,20210621,-0.597202,-1.234323,-1.079457,0.858557,-1.354506,0.014861,0.111736,0.138865,-0.666599,0.234917 +26,26,20210621,-0.905498,-1.234323,1.451114,0.858557,-1.354506,0.014861,0.111736,0.138865,0.188746,0.234917 +27,27,20210621,-0.828424,-1.234323,-1.079457,0.858557,-0.920561,0.904712,-1.558876,-0.696663,-0.666599,-0.647565 +28,28,20210621,-0.905498,0.810161,1.451114,0.858557,-0.595102,0.014861,0.111736,0.138865,0.188746,0.234917 +29,29,20210621,-0.905498,0.810161,1.451114,-1.057295,1.249166,-0.874991,-0.72357,-0.696663,-0.666599,-0.647565 +30,30,20210621,-0.905498,-1.234323,-1.079457,0.858557,-1.029047,0.014861,0.111736,0.138865,0.188746,0.234917 +31,31,20210621,0.481833,0.810161,-1.079457,0.858557,-0.920561,-0.874991,-0.72357,-0.696663,-0.666599,-0.647565 +32,32,20210621,-0.905498,-1.234323,0.185828,0.858557,-0.269643,1.794564,0.111736,0.138865,0.188746,0.234917 +33,33,20210621,-0.520128,-1.234323,-1.079457,0.858557,-0.378129,0.014861,0.111736,0.138865,0.188746,0.234917 +34,34,20210621,2.56283,0.810161,0.185828,-1.057295,2.00857,-1.764843,-1.558876,-1.532192,-1.521944,-1.530046 +35,35,20210621,2.56283,-1.234323,-1.079457,-1.057295,2.442516,-1.764843,-1.558876,-1.532192,-1.521944,-1.530046 +36,36,20210621,-0.057685,-1.234323,-1.079457,0.858557,-0.595102,-0.874991,-0.72357,-1.532192,-1.521944,-1.530046 +37,37,20210621,0.867203,-1.234323,0.185828,-1.057295,0.489762,0.014861,0.111736,0.138865,0.188746,0.234917 +38,38,20210621,-0.828424,0.810161,0.185828,0.858557,-1.462993,0.014861,0.111736,0.138865,0.188746,0.234917 +39,39,20210621,-0.905498,-1.234323,-1.079457,0.858557,-1.137534,0.904712,-0.72357,-0.696663,-1.521944,-1.530046 +40,40,20210621,0.867203,-1.234323,-1.079457,0.858557,-0.486615,-0.874991,-0.72357,1.809921,-0.666599,0.234917 +41,41,20210621,1.483795,-1.234323,-1.079457,0.858557,-0.269643,0.014861,0.111736,0.138865,0.188746,0.234917 +42,42,20210621,-0.75135,0.810161,-1.079457,0.858557,-1.137534,0.014861,0.111736,0.138865,0.188746,0.234917 +43,43,20210621,-1.213794,-1.234323,0.185828,0.858557,-1.462993,0.014861,0.111736,0.138865,0.188746,0.234917 +44,44,20210621,-0.211833,0.810161,0.185828,-1.057295,0.164303,0.014861,0.111736,0.138865,0.188746,0.234917 +45,45,20210621,-0.982572,0.810161,-1.079457,0.858557,-0.595102,0.014861,0.111736,0.138865,1.899436,0.234917 +46,46,20210621,0.327685,-1.234323,-1.079457,0.858557,-0.703588,-1.764843,-1.558876,-1.532192,-1.521944,-1.530046 +47,47,20210621,-1.13672,0.810161,-1.079457,0.858557,-1.462993,0.014861,0.111736,1.809921,-0.666599,0.234917 +48,48,20210621,-0.134759,0.810161,3.981685,0.858557,1.14068,0.014861,0.111736,-0.696663,0.188746,0.234917 +49,49,20210621,1.637943,-1.234323,0.185828,0.858557,-0.378129,-0.874991,-0.72357,-0.696663,-0.666599,-0.647565 +50,50,20210621,-1.13672,-1.234323,-1.079457,0.858557,-1.24602,0.014861,0.111736,0.138865,0.188746,0.234917 +51,51,20210621,-0.75135,-1.234323,1.451114,0.858557,0.706734,0.904712,1.782348,1.809921,1.899436,1.999879 +52,52,20210621,-0.520128,0.810161,1.451114,2.77441,0.815221,0.014861,0.111736,0.138865,0.188746,0.234917 +53,53,20210621,1.098425,0.810161,0.185828,-1.057295,1.466139,-1.764843,-1.558876,-1.532192,-1.521944,-1.530046 +54,54,20210621,0.096463,0.810161,-1.079457,0.858557,-1.137534,0.904712,1.782348,0.138865,0.188746,0.234917 +55,55,20210621,-0.134759,0.810161,-1.079457,0.858557,-0.703588,1.794564,0.111736,0.138865,0.188746,0.234917 +56,56,20210621,2.56283,0.810161,-1.079457,-1.057295,1.032193,-1.764843,-1.558876,-1.532192,-1.521944,-1.530046 +57,57,20210621,0.096463,0.810161,1.451114,-1.057295,-0.161156,0.014861,0.111736,0.138865,-0.666599,-0.647565 +58,58,20210621,0.096463,0.810161,0.185828,-1.057295,-0.161156,0.014861,0.111736,0.138865,0.188746,0.234917 +59,59,20210621,0.250611,0.810161,-1.079457,0.858557,-0.161156,-0.874991,2.617654,1.809921,1.899436,1.999879 +60,60,20210621,1.792091,0.810161,0.185828,-1.057295,-0.703588,0.014861,0.111736,0.138865,0.188746,0.234917 +61,61,20210621,2.56283,0.810161,1.451114,-1.057295,-0.812074,0.014861,0.111736,0.138865,0.188746,0.234917 +62,62,20210621,-0.75135,-1.234323,0.185828,-1.057295,0.381275,0.014861,0.111736,0.138865,0.188746,0.234917 +63,63,20210621,-0.905498,-1.234323,-1.079457,0.858557,-0.703588,1.794564,1.782348,1.809921,1.899436,1.999879 +64,64,20210621,-0.905498,0.810161,0.185828,-1.057295,1.14068,0.014861,0.111736,0.138865,-1.521944,-1.530046 +65,65,20210621,-0.288907,0.810161,0.185828,-1.057295,1.683111,-0.874991,-0.72357,-1.532192,-1.521944,-0.647565 +66,66,20210621,0.250611,-1.234323,-1.079457,-1.057295,2.334029,-1.764843,-1.558876,-1.532192,-0.666599,1.999879 +67,67,20210621,-1.213794,-1.234323,0.185828,-1.057295,2.225543,1.794564,1.782348,1.809921,0.188746,0.234917 +68,68,20210621,0.327685,0.810161,-1.079457,0.858557,-0.595102,1.794564,-0.72357,-0.696663,-0.666599,-0.647565 +69,69,20210621,-0.288907,0.810161,1.451114,0.858557,-0.703588,0.904712,-1.558876,-1.532192,-0.666599,1.999879 +70,70,20210621,-1.13672,-1.234323,3.981685,0.858557,-1.462993,1.794564,0.111736,0.138865,0.188746,0.234917 +71,71,20210621,-0.674276,-1.234323,-1.079457,0.858557,-0.486615,-0.874991,-0.72357,-0.696663,-0.666599,-0.647565 +72,72,20210621,1.175499,-1.234323,0.185828,0.858557,-0.703588,1.794564,1.782348,1.809921,1.899436,1.999879 +73,73,20210621,0.250611,0.810161,0.185828,-1.057295,-0.378129,-0.874991,-0.72357,-0.696663,-0.666599,1.999879 +74,74,20210621,0.944277,0.810161,-1.079457,0.858557,0.164303,0.904712,-1.558876,-0.696663,-0.666599,-0.647565 +75,75,20210621,1.329647,-1.234323,-1.079457,0.858557,-0.378129,-0.874991,-0.72357,-0.696663,-0.666599,-0.647565 +76,76,20210621,-1.13672,-1.234323,0.185828,0.858557,-1.24602,0.014861,0.111736,1.809921,0.188746,0.234917 +77,77,20210621,-0.905498,-1.234323,1.451114,0.858557,-1.137534,-0.874991,0.111736,0.138865,0.188746,0.234917 +78,78,20210621,1.021351,0.810161,-1.079457,-1.057295,1.032193,-0.874991,-0.72357,-0.696663,-0.666599,-0.647565 +79,79,20210621,-1.059646,0.810161,0.185828,0.858557,-1.462993,0.014861,0.111736,0.138865,0.188746,0.234917 +80,80,20210621,0.558907,0.810161,0.185828,0.858557,0.923707,0.904712,-1.558876,-1.532192,-1.521944,-1.530046 +81,81,20210621,2.331608,0.810161,1.451114,2.77441,-0.269643,0.014861,0.111736,0.138865,0.188746,0.234917 +82,82,20210621,1.483795,0.810161,-1.079457,0.858557,-1.029047,0.014861,0.111736,0.138865,0.188746,0.234917 +83,83,20210621,-0.828424,-1.234323,1.451114,0.858557,-0.595102,0.014861,0.111736,0.138865,0.188746,0.234917 +84,84,20210621,1.792091,0.810161,0.185828,-1.057295,0.923707,0.014861,0.111736,1.809921,0.188746,0.234917 +85,85,20210621,-0.905498,0.810161,1.451114,0.858557,1.466139,0.014861,0.111736,0.138865,0.188746,0.234917 +86,86,20210621,-0.057685,-1.234323,0.185828,0.858557,-0.269643,0.014861,0.111736,0.138865,0.188746,0.234917 +87,87,20210621,1.483795,0.810161,-1.079457,-1.057295,1.032193,-0.874991,-0.72357,1.809921,0.188746,-0.647565 +88,88,20210621,-0.057685,0.810161,0.185828,0.858557,-0.378129,0.014861,0.111736,0.138865,0.188746,0.234917 +89,89,20210621,-0.288907,0.810161,-1.079457,-1.057295,-0.05267,0.014861,0.111736,0.138865,-0.666599,-0.647565 +90,90,20210621,-1.13672,-1.234323,1.451114,0.858557,0.923707,1.794564,1.782348,0.138865,0.188746,0.234917 +91,91,20210621,0.250611,-1.234323,-1.079457,-1.057295,1.900084,1.794564,1.782348,1.809921,1.899436,1.999879 +92,92,20210621,0.867203,0.810161,-1.079457,0.858557,0.381275,-0.874991,-0.72357,-0.696663,0.188746,0.234917 +93,93,20210621,-0.520128,0.810161,-1.079457,0.858557,-0.920561,-1.764843,-1.558876,-1.532192,-1.521944,-1.530046 +94,94,20210621,-0.057685,0.810161,0.185828,-1.057295,0.164303,-0.874991,-0.72357,-0.696663,-0.666599,-0.647565 +95,95,20210621,-0.828424,0.810161,0.185828,0.858557,-1.354506,0.014861,0.111736,0.138865,0.188746,0.234917 +96,96,20210621,-0.597202,-1.234323,0.185828,0.858557,-0.05267,0.014861,0.111736,0.138865,0.188746,0.234917 +97,97,20210621,1.483795,-1.234323,-1.079457,-1.057295,0.815221,-0.874991,-0.72357,-0.696663,-0.666599,-0.647565 +98,98,20210621,-0.134759,-1.234323,-1.079457,0.858557,-0.920561,0.014861,0.111736,0.138865,0.188746,0.234917 +99,99,20210621,-0.905498,0.810161,1.451114,-1.057295,-1.462993,0.014861,0.111736,0.138865,0.188746,0.234917 +100,100,20210621,-1.13672,-1.234323,0.185828,-1.057295,0.272789,0.014861,0.111736,0.138865,0.188746,0.234917 +101,101,20210621,-0.211833,-1.234323,-1.079457,0.858557,-0.378129,-1.764843,-1.558876,-1.532192,-1.521944,-1.530046 \ No newline at end of file diff --git a/web_console_v2/inspection/testing/test_data/csv/small_csv/default_small_data.csv b/web_console_v2/inspection/testing/test_data/csv/small_csv/default_small_data.csv new file mode 100644 index 000000000..f011e7c32 --- /dev/null +++ b/web_console_v2/inspection/testing/test_data/csv/small_csv/default_small_data.csv @@ -0,0 +1,6 @@ +example_id,raw_id,event_time,x0,x1,x2,label +1,1,20210621,-1.13672,0.810161,0.185828,cat +2,2,20210621,-0.365981,0.810161,0.185828,dog +3,3,20210621,-0.597202,0.810161,0.185828,frog +4,4,20210621,-0.905498,0.810161,0.185828,cat +5,5,20210621,-0.905498,-1.234323,0.185828,dog \ No newline at end of file diff --git a/web_console_v2/inspection/testing/test_data/image/images/000000005756.jpg b/web_console_v2/inspection/testing/test_data/image/images/000000005756.jpg new file mode 100644 index 000000000..5ef11dfe7 Binary files /dev/null and b/web_console_v2/inspection/testing/test_data/image/images/000000005756.jpg differ diff --git a/web_console_v2/inspection/testing/test_data/image/images/000000008181.jpg b/web_console_v2/inspection/testing/test_data/image/images/000000008181.jpg new file mode 100644 index 000000000..b71a457b8 Binary files /dev/null and b/web_console_v2/inspection/testing/test_data/image/images/000000008181.jpg differ diff --git a/web_console_v2/inspection/testing/test_data/image/images/000000018425.jpg b/web_console_v2/inspection/testing/test_data/image/images/000000018425.jpg new file mode 100644 index 000000000..2edae28c6 Binary files /dev/null and b/web_console_v2/inspection/testing/test_data/image/images/000000018425.jpg differ diff --git a/web_console_v2/inspection/testing/test_data/image/manifest.json b/web_console_v2/inspection/testing/test_data/image/manifest.json new file mode 100644 index 000000000..4962e4f4a --- /dev/null +++ b/web_console_v2/inspection/testing/test_data/image/manifest.json @@ -0,0 +1,40 @@ +{ + "images": [ + { + "name": "000000018425.jpg", + "file_name": "000000018425.jpg", + "width": 640, + "height": 480, + "file_size": 209720, + "created_at": "2021-08-30T16:52:15.501516", + "annotation": { + "caption": "Two giraffe grazing on tree leaves under a hazy sky.", + "label": "B" + } + }, + { + "name": "000000005756.jpg", + "file_name": "000000005756.jpg", + "width": 640, + "height": 361, + "file_size": 215758, + "created_at": "2021-08-30T16:52:15.501516", + "annotation": { + "caption": "A group of people holding umbrellas looking at graffiti.", + "label": "A" + } + }, + { + "name": "000000008181.jpg", + "file_name": "000000008181.jpg", + "width": 640, + "height": 480, + "file_size": 214617, + "created_at": "2021-08-30T16:52:15.501516", + "annotation": { + "caption": "A motorcycle is parked on a gravel lot", + "label": "C" + } + } + ] +} \ No newline at end of file diff --git a/web_console_v2/inspection/testing/test_data/tfrecords/small_tfrecords/default_small_data.tfrecords b/web_console_v2/inspection/testing/test_data/tfrecords/small_tfrecords/default_small_data.tfrecords new file mode 100644 index 000000000..8ae5232ce Binary files /dev/null and b/web_console_v2/inspection/testing/test_data/tfrecords/small_tfrecords/default_small_data.tfrecords differ diff --git a/web_console_v2/inspection/util.py b/web_console_v2/inspection/util.py new file mode 100644 index 000000000..4770f8b05 --- /dev/null +++ b/web_console_v2/inspection/util.py @@ -0,0 +1,130 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import enum +from typing import Optional +import json +import logging + +import fsspec +from pyspark.conf import SparkConf +from pyspark.sql import SparkSession +from pyspark.sql.dataframe import DataFrame +from pyspark.sql.types import StructType +from urllib.parse import urlparse + +EXAMPLE_ID = 'example_id' +DEFAULT_SCHEME_TYPE = 'file' + + +class FileSuffixError(Exception): + + def __init__(self, message): + super().__init__() + self.message = message + + def __repr__(self): + return f'{type(self).__name__}({self.message})' + + +def dataset_schema_path(dataset_path: str) -> str: + return os.path.join(dataset_path, 'schema.json') + + +def build_spark_conf(conf: Optional[SparkConf] = None) -> SparkConf: + """Builds spark config by following our practices.""" + if not conf: + conf = SparkConf() + + # Ref: https://spark.apache.org/docs/3.1.1/configuration.html + + # ---------- speculation related + # Re-launches tasks if they are running slowly in a stage + conf.set('spark.speculation', 'true') + # Checks tasks to speculate every 100ms + conf.set('spark.speculation.interval', 100) + # Fraction of tasks which must be complete before speculation is enabled for a particular stage. + conf.set('spark.speculation.quantile', 0.9) + # How many times slower a task is than the median to be considered for speculation. + conf.set('spark.speculation.multiplier', 2) + # ---------- end of speculation related + + # disable compression to snappy as model training not support reading snappy file extension + conf.set('spark.hadoop.mapred.output.compress', 'false') + + return conf + + +def getenv(name: str, default: str = None) -> str: + value = os.getenv(name) + if value is not None: + return value + if default is None: + raise ValueError(f'Environment variable {name} is not set') + return default + + +def load_tfrecords(spark: SparkSession, files: str, dataset_path: str) -> DataFrame: + logging.info(f'### loading df..., input files path: {files}') + # read schema + try: + with fsspec.open(dataset_schema_path(dataset_path)) as f: + schema = StructType.fromJson(json.load(f)) + return spark.read.format('tfrecords').schema(schema).load(files) + except Exception as e: # pylint: disable=broad-except + # intersection dataset generated by FLApp has no schema, ignore it + logging.info(f'### failed to load dataset schema, err: {e}') + return spark.read.format('tfrecords').load(files) + + +def is_file_matched(path: str) -> bool: + fs: fsspec.AbstractFileSystem = fsspec.get_mapper(path).fs + glob_path = path + # input path of spark might be dir, file or wildcard, while fsspec glob could only check file and wildcard + # so we manually add wildcard ** to dir + if fs.isdir(path): + glob_path = os.path.join(path, '**') + files = fs.glob(glob_path) + # ignore _SUCCESS file and xxx._SUCCESS file + data_files = [file for file in files if os.path.split(file)[1] != '_SUCCESS' and not file.endswith('._SUCCESS')] + return len(data_files) > 0 + + +class FileFormat(str, enum.Enum): + CSV = 'csv' + TFRECORDS = 'tfrecords' + UNKNOWN = 'unknown' + + +def load_by_file_format(spark: SparkSession, input_batch_path: str, file_format: FileFormat) -> DataFrame: + if file_format == FileFormat.CSV: + return spark.read.format('csv').option('header', 'true').option('inferSchema', 'true').load(input_batch_path) + if file_format == FileFormat.TFRECORDS: + return spark.read.format('tfrecords').load(input_batch_path) + err_msg = f'### no valid file format, format: {file_format}' + logging.error(err_msg) + raise ValueError(err_msg) + + +def normalize_file_path(url: Optional[str]) -> Optional[str]: + if url is None: + return url + url_parser = urlparse(url) + scheme = url_parser.scheme + # set default source_type if no source_type found + if scheme == '' and url.startswith('/'): + url = f'{DEFAULT_SCHEME_TYPE}://{url}' + return url diff --git a/web_console_v2/inspection/util_test.py b/web_console_v2/inspection/util_test.py new file mode 100644 index 000000000..7cebe7dac --- /dev/null +++ b/web_console_v2/inspection/util_test.py @@ -0,0 +1,216 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import unittest +import fsspec +import os +import json + +from util import FileFormat, dataset_schema_path, load_tfrecords, is_file_matched, load_by_file_format, \ + normalize_file_path +from pyspark.sql.types import BinaryType, IntegerType, StructField, StructType, StringType +from testing.spark_test_case import PySparkTestCase + + +class UtilTest(PySparkTestCase): + + def tearDown(self) -> None: + self._clear_up() + return super().tearDown() + + def _generate_tfrecords_with_schema(self, dataset_path: str, batch_path: str): + data = [ + (1, b'01001100111', 256, 256, 3, 'cat'), + (2, b'01001100111', 244, 246, 3, 'dog'), + (3, b'01001100111', 255, 312, 3, 'cat'), + (4, b'01001100111', 256, 255, 3, 'cat'), + (5, b'01001100111', 201, 241, 3, 'cat'), + (6, b'01001100111', 255, 221, 3, 'dog'), + (7, b'01001100111', 201, 276, 3, 'dog'), + (8, b'01001100111', 258, 261, 3, 'dog'), + (9, b'01001100111', 198, 194, 3, 'cat'), + (10, b'01001100111', 231, 221, 3, 'cat'), + ] + schema = StructType([ + StructField('raw_id', IntegerType(), False), + StructField('image', BinaryType(), False), + StructField('rows', IntegerType(), False), + StructField('cols', IntegerType(), False), + StructField('channel', IntegerType(), False), + StructField('label', StringType(), False), + ]) + df = self.spark.createDataFrame(data=data, schema=schema) + df.repartition(3).write.format('tfrecords').option('compression', 'none').save(batch_path, mode='overwrite') + with fsspec.open(dataset_schema_path(dataset_path), mode='w') as f: + json.dump(df.schema.jsonValue(), f) + + def _generate_tfrecords_no_schema(self, batch_path: str): + data = [ + (1, b'01001100111', 256, 256, 3, 'cat'), + (2, b'01001100111', 244, 246, 3, 'dog'), + (3, b'01001100111', 255, 312, 3, 'cat'), + (4, b'01001100111', 256, 255, 3, 'cat'), + (5, b'01001100111', 201, 241, 3, 'cat'), + (6, b'01001100111', 255, 221, 3, 'dog'), + (7, b'01001100111', 201, 276, 3, 'dog'), + (8, b'01001100111', 258, 261, 3, 'dog'), + (9, b'01001100111', 198, 194, 3, 'cat'), + (10, b'01001100111', 231, 221, 3, 'cat'), + ] + df = self.spark.createDataFrame(data=data) + df.repartition(3).write.format('tfrecords').option('compression', 'none').save(batch_path, mode='overwrite') + + def _clear_up(self): + fs = fsspec.filesystem('file') + if fs.isdir(self.tmp_dataset_path): + fs.rm(self.tmp_dataset_path, recursive=True) + + def test_load_tfrecords_with_schema(self): + dataset_path = os.path.join(self.tmp_dataset_path, 'input_dataset') + batch_path = os.path.join(dataset_path, 'batch/batch_test') + self._generate_tfrecords_with_schema(dataset_path, batch_path) + df = load_tfrecords(spark=self.spark, files=batch_path, dataset_path=dataset_path) + data = [ + (1, b'01001100111', 256, 256, 3, 'cat'), + (2, b'01001100111', 244, 246, 3, 'dog'), + (3, b'01001100111', 255, 312, 3, 'cat'), + (4, b'01001100111', 256, 255, 3, 'cat'), + (5, b'01001100111', 201, 241, 3, 'cat'), + (6, b'01001100111', 255, 221, 3, 'dog'), + (7, b'01001100111', 201, 276, 3, 'dog'), + (8, b'01001100111', 258, 261, 3, 'dog'), + (9, b'01001100111', 198, 194, 3, 'cat'), + (10, b'01001100111', 231, 221, 3, 'cat'), + ] + schema = StructType([ + StructField('raw_id', IntegerType(), False), + StructField('image', BinaryType(), False), + StructField('rows', IntegerType(), False), + StructField('cols', IntegerType(), False), + StructField('channel', IntegerType(), False), + StructField('label', StringType(), False), + ]) + expect_df = self.spark.createDataFrame(data=data, schema=schema) + self.assertCountEqual(df.select('*').collect(), expect_df.select('*').collect()) + + def test_load_tfrecords_no_schema(self): + dataset_path = os.path.join(self.tmp_dataset_path, 'input_dataset') + batch_path = os.path.join(dataset_path, 'batch/batch_test') + self._generate_tfrecords_no_schema(batch_path) + df = load_tfrecords(spark=self.spark, files=batch_path, dataset_path=dataset_path) + expect_data = [ + set([1, '01001100111', 256, 256, 3, 'cat']), + set([2, '01001100111', 244, 246, 3, 'dog']), + set([3, '01001100111', 255, 312, 3, 'cat']), + set([4, '01001100111', 256, 255, 3, 'cat']), + set([5, '01001100111', 201, 241, 3, 'cat']), + set([6, '01001100111', 255, 221, 3, 'dog']), + set([7, '01001100111', 201, 276, 3, 'dog']), + set([8, '01001100111', 258, 261, 3, 'dog']), + set([9, '01001100111', 198, 194, 3, 'cat']), + set([10, '01001100111', 231, 221, 3, 'cat']), + ] + data = [] + for row in df.select('*').collect(): + row_data = [] + for key, value in row.asDict().items(): + row_data.append(value) + data.append(set(row_data)) + self.assertCountEqual(data, expect_data) + + def test_is_file_matched(self): + dataset_path = os.path.join(self.tmp_dataset_path, 'input_dataset') + batch_path = os.path.join(dataset_path, 'batch/batch_test') + # test no data + os.makedirs(batch_path, exist_ok=True) + fs: fsspec.AbstractFileSystem = fsspec.get_mapper(dataset_path).fs + fs.touch(os.path.join(batch_path, '_SUCCESS')) + fs.touch(os.path.join(batch_path, 'part-0000._SUCCESS')) + fs.touch(os.path.join(batch_path, 'part-0001._SUCCESS')) + self.assertFalse(is_file_matched(batch_path)) + + self._generate_tfrecords_with_schema(dataset_path, batch_path) + self.assertTrue(is_file_matched(batch_path)) + batch_path_csv = os.path.join(batch_path, '*.csv') + self.assertFalse(is_file_matched(batch_path_csv)) + batch_path_all = os.path.join(batch_path, '**') + self.assertTrue(is_file_matched(batch_path_all)) + + def test_load_by_file_format_csv(self): + dataset_path = os.path.join(self.test_data, 'csv/small_csv') + df = load_by_file_format(spark=self.spark, input_batch_path=dataset_path, file_format=FileFormat.CSV) + expect_dtypes = [('example_id', 'int'), ('raw_id', 'int'), ('event_time', 'int'), ('x0', 'double'), + ('x1', 'double'), ('x2', 'double'), ('label', 'string')] + self.assertCountEqual(expect_dtypes, df.dtypes) + expect_data = [ + [1, 1, 20210621, -1.13672, 0.810161, 0.185828, 'cat'], + [2, 2, 20210621, -0.365981, 0.810161, 0.185828, 'dog'], + [3, 3, 20210621, -0.597202, 0.810161, 0.185828, 'frog'], + [4, 4, 20210621, -0.905498, 0.810161, 0.185828, 'cat'], + [5, 5, 20210621, -0.905498, -1.234323, 0.185828, 'dog'], + ] + data = [] + for row in df.select('*').collect(): + row_data = [] + for key, _ in expect_dtypes: + row_data.append(row.asDict().get(key)) + data.append(row_data) + self.assertCountEqual(data, expect_data) + + def test_load_by_file_format_tfrecords(self): + dataset_path = os.path.join(self.test_data, 'tfrecords/small_tfrecords') + df = load_by_file_format(spark=self.spark, input_batch_path=dataset_path, file_format=FileFormat.TFRECORDS) + expect_dtypes = [('example_id', 'bigint'), ('raw_id', 'bigint'), ('event_time', 'bigint'), ('label', 'string')] + self.assertCountEqual(expect_dtypes, df.dtypes) + expect_data = [ + [1, 1, 20210621, 'cat'], + [2, 2, 20210621, 'dog'], + [3, 3, 20210621, 'frog'], + [4, 4, 20210621, 'cat'], + [5, 5, 20210621, 'dog'], + ] + data = [] + for row in df.select('*').collect(): + row_data = [] + for key, _ in expect_dtypes: + row_data.append(row.asDict().get(key)) + data.append(row_data) + self.assertCountEqual(data, expect_data) + + def test_load_by_file_format_failed(self): + dataset_path = os.path.join(self.tmp_dataset_path, 'input_dataset') + batch_path = os.path.join(dataset_path, 'batch/batch_test') + with self.assertRaises(ValueError): + load_by_file_format(spark=self.spark, input_batch_path=batch_path, file_format='unknown') + + def test_normalize_file_path(self): + path = normalize_file_path('') + self.assertEqual(path, '') + + path = normalize_file_path('/') + self.assertEqual(path, 'file:///') + + path = normalize_file_path('/test/123') + self.assertEqual(path, 'file:///test/123') + + path = normalize_file_path('test/123') + self.assertEqual(path, 'test/123') + + path = normalize_file_path('hdfs:///test/123') + self.assertEqual(path, 'hdfs:///test/123') + + +if __name__ == '__main__': + unittest.main() diff --git a/web_console_v2/nginx.conf b/web_console_v2/nginx.conf index 7b507bd8b..cd98cfb97 100644 --- a/web_console_v2/nginx.conf +++ b/web_console_v2/nginx.conf @@ -1,6 +1,6 @@ server { listen 1989; - client_max_body_size 20m; + client_max_body_size 200m; # Static files location ^~ /v2/ { root /app/client/build; @@ -8,6 +8,15 @@ server { try_files $uri /index.html = 404; } + location ^~ /libs/ { + root /app/client/build; + } + + # favicon + location ^~ /icon/ { + root /app/client/build; + } + location /api/v2/ { proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; diff --git a/web_console_v2/run_dev.sh b/web_console_v2/run_dev.sh new file mode 100644 index 000000000..82b26befc --- /dev/null +++ b/web_console_v2/run_dev.sh @@ -0,0 +1,41 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +#!/bin/bash + +set -e + +# Whether API or Client, always start nginx +diff_result=$(diff ../tools/dev_workspace/nginx.conf /etc/nginx/conf.d/nginx.conf || true) +if [[ $diff_result != '' ]] +then + echo "detect difference, restarting nginx" + cp -f ../tools/dev_workspace/nginx.conf /etc/nginx/conf.d/nginx.conf + service nginx restart +fi + +if [[ $ROLE == client ]] +then + # Starts Client server + cd ./client + npm install -g pnpm@6.4.0 && pnpm install && PORT=3000 pnpm run start +elif [[ $ROLE == api ]] +then + # Starts API server + cd ./api + bash run_dev.sh +else + echo "Invalid ROLE: $ROLE" +fi diff --git a/web_console_v2/run_prod.sh b/web_console_v2/run_prod.sh index b5a5a28f0..ae00f5792 100644 --- a/web_console_v2/run_prod.sh +++ b/web_console_v2/run_prod.sh @@ -1,11 +1,24 @@ +# Copyright 2023 The FedLearner Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + #!/bin/bash set -e service nginx restart -code-server & - # Starts API server cd /app/api -sh run_prod.sh --migrate +./run_prod --migrate diff --git a/web_console_v2/tools/start_db.sh b/web_console_v2/tools/start_db.sh deleted file mode 100644 index 77da922df..000000000 --- a/web_console_v2/tools/start_db.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash -# -# Copyright 2021 The FedLearner Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -PORT=33600 - -docker rm -f mysql-fedlearner &> /dev/null -docker run -it --name mysql-fedlearner -p $PORT:3306 --rm -d -e MYSQL_ROOT_PASSWORD=root mysql:5.7 --default-authentication-plugin=mysql_native_password - -while : -do - mysql -h 0.0.0.0 --port 33600 -uroot -proot -e "CREATE DATABASE IF NOT EXISTS fedlearner;" &> /dev/null - if [ $? -eq 0 ] - then - break - fi -done - - -echo URI: mysql+pymysql://root:root@localhost:$PORT/fedlearner \ No newline at end of file